index
int64 0
0
| repo_id
stringlengths 26
205
| file_path
stringlengths 51
246
| content
stringlengths 8
433k
| __index_level_0__
int64 0
10k
|
---|---|---|---|---|
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/TimeUtil.java
|
package com.netflix.suro.routing.filter;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
public class TimeUtil {
/**
* Converts the given time format string to a {@link org.joda.time.format.DateTimeFormatter} instance.
*
* @param formatName A name used to identify the given time format. This is mainly used for error reporting.
* @param timeFormat The date time format to be converted.
*
* @return A {@link org.joda.time.format.DateTimeFormatter} instance of the given time format.
*
* @throws IllegalArgumentException if the given time format is invalid.
*/
public static DateTimeFormatter toDateTimeFormatter(String formatName, String timeFormat) {
DateTimeFormatter formatter = null;
try{
formatter = DateTimeFormat.forPattern(timeFormat);
}catch(IllegalArgumentException e){
//JODA's error message doesn't tell you which value sucked, so we create a better
// error message here.
IllegalArgumentException iae = new IllegalArgumentException(
String.format("Invalid time format for the property %s: '%s'",
formatName,
timeFormat),
e.getCause());
iae.setStackTrace(e.getStackTrace());
throw iae;
}
return formatter;
}
/**
* Converts the given epoch time in millisecond to a string according to the given format. Note
* each invocation creates a new {@link org.joda.time.format.DateTimeFormatter} instance, which is pretty costly.
* This method is suitable for testing and calls that are not on hot path.
*
* @param millis the epoch time to be converted
* @param format The format the returned time string
* @return A string representation of the given epoch time in the given format
*/
public static String toString(long millis, String format){
return DateTimeFormat.forPattern(format).print(millis);
}
}
| 1,500 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/PathExistsMessageFilter.java
|
package com.netflix.suro.routing.filter;
import org.apache.commons.jxpath.JXPathContext;
import org.apache.commons.jxpath.Pointer;
import org.apache.commons.jxpath.ri.model.beans.NullPointer;
public class PathExistsMessageFilter extends BaseMessageFilter {
private String xpath;
public PathExistsMessageFilter(String path) {
this.xpath = path;
}
@Override
public boolean apply(Object input) {
JXPathContext jxpath = JXPathContext.newContext(input);
// We should allow non-existing path, and let predicate handle it.
jxpath.setLenient(true);
Pointer pointer = jxpath.getPointer(xpath);
return pointer != null && !(pointer instanceof NullPointer) && pointer.getValue() != null;
}
public String getXpath() {
return xpath;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("PathExistsMessageFilter [xpath=");
builder.append(xpath);
builder.append("]");
return builder.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((xpath == null) ? 0 : xpath.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PathExistsMessageFilter other = (PathExistsMessageFilter) obj;
if (xpath == null) {
if (other.xpath != null) {
return false;
}
} else if (!xpath.equals(other.xpath)) {
return false;
}
return true;
}
}
| 1,501 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/OrMessageFilter.java
|
package com.netflix.suro.routing.filter;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
public class OrMessageFilter extends BaseMessageFilter {
final private Predicate<Object> orPredicate;
public OrMessageFilter(MessageFilter... filters) {
this.orPredicate = Predicates.or(filters);
}
public OrMessageFilter(Iterable<? extends MessageFilter> filters) {
this.orPredicate = Predicates.or(filters);
}
@Override
public boolean apply(Object input) {
return orPredicate.apply(input);
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("OrMessageFilter");
sb.append("{orPredicate=").append(orPredicate);
sb.append('}');
return sb.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OrMessageFilter that = (OrMessageFilter) o;
if (orPredicate != null ? !orPredicate.equals(that.orPredicate) : that.orPredicate != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orPredicate != null ? orPredicate.hashCode() : 0;
}
}
| 1,502 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/AlwaysFalseMessageFilter.java
|
package com.netflix.suro.routing.filter;
final public class AlwaysFalseMessageFilter extends BaseMessageFilter {
// There's no point of creating multiple instance of this class
private AlwaysFalseMessageFilter(){
setOriginalDslString("false");
}
@Override
public boolean apply(Object input) {
return false;
}
public static final AlwaysFalseMessageFilter INSTANCE = new AlwaysFalseMessageFilter();
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("AlwaysFalseMessageFilter []");
return builder.toString();
}
@Override
public int hashCode() {
return Boolean.FALSE.hashCode();
}
@Override
public boolean equals(Object obj) {
return obj instanceof AlwaysFalseMessageFilter;
}
}
| 1,503 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/MessageFilter.java
|
package com.netflix.suro.routing.filter;
import com.google.common.base.Predicate;
/**
* A contract for filtering events. These filters can be applied/defined both at the publisher and subscriber level.<p/>
* It is recommended to use a filter language as specified in {@link com.netflix.suro.routing.filter.lang} which provides
* flexible ways of defining filters. However, for programmatic creation of simple or custom filters it may be easy
* to directly implement this interface. <p/>
* The structure of the event filters is entirely opaque to the event bus and all processing related to evaluation of
* the same is left to the implementations.
*/
public interface MessageFilter extends Predicate<Object> {
// Emphasize that every {@code MessageFilter} instance can be used as a key
// in a collection.
public int hashCode();
public boolean equals(Object o);
}
| 1,504 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/NullValuePredicate.java
|
package com.netflix.suro.routing.filter;
/**
* This predicate returns true if its input is null. That is, it identifies
* null object.
*
*/
final public class NullValuePredicate implements ValuePredicate{
private static final byte KEY = 0x00;
private NullValuePredicate(){}
@Override
public boolean apply(final Object input) {
return input == null;
}
public static final NullValuePredicate INSTANCE = new NullValuePredicate();
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("NullValuePredicate []");
return builder.toString();
}
@Override
public final int hashCode() {
return KEY;
}
@Override
public final boolean equals(Object obj) {
return obj instanceof NullValuePredicate;
}
}
| 1,505 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/TimeMillisValuePredicate.java
|
package com.netflix.suro.routing.filter;
import org.joda.time.format.DateTimeFormatter;
import javax.annotation.Nullable;
public class TimeMillisValuePredicate implements ValuePredicate<Long> {
private String timeFormat;
private String value;
private String fnName;
private NumericValuePredicate longPredicate;
public TimeMillisValuePredicate(String timeFormat, String value, String fnName){
this.timeFormat = timeFormat;
this.value = value;
this.fnName = fnName;
DateTimeFormatter formatter = TimeUtil.toDateTimeFormatter("time format", timeFormat);
long timeInMs = formatter.parseMillis(value);
this.longPredicate = new NumericValuePredicate(timeInMs, fnName);
}
@Override
public boolean apply(@Nullable Long input) {
return longPredicate.apply(input);
}
public String getValue(){
return value;
}
public String getTimeFormat(){
return this.timeFormat;
}
String getFnName() {
return this.fnName;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("TimeMillisValuePredicate [timeFormat=");
builder.append(timeFormat);
builder.append(", value=");
builder.append(value);
builder.append(", fnName=");
builder.append(fnName);
builder.append(", longPredicate=");
builder.append(longPredicate);
builder.append("]");
return builder.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((fnName == null) ? 0 : fnName.hashCode());
result = prime * result + ((timeFormat == null) ? 0 : timeFormat.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TimeMillisValuePredicate other = (TimeMillisValuePredicate) obj;
if (fnName == null) {
if (other.fnName != null) {
return false;
}
} else if (!fnName.equals(other.fnName)) {
return false;
}
if (timeFormat == null) {
if (other.timeFormat != null) {
return false;
}
} else if (!timeFormat.equals(other.timeFormat)) {
return false;
}
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
}
| 1,506 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/StringValuePredicate.java
|
package com.netflix.suro.routing.filter;
import com.google.common.base.Objects;
import javax.annotation.Nullable;
public class StringValuePredicate implements ValuePredicate<String> {
private String value;
public StringValuePredicate(@Nullable String value){
this.value = value;
}
@Override
public boolean apply(@Nullable String input) {
return Objects.equal(value, input);
}
String getValue(){
return value;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("StringValuePredicate [value=");
builder.append(value);
builder.append("]");
return builder.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StringValuePredicate other = (StringValuePredicate) obj;
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
}
| 1,507 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/PathValuePredicate.java
|
package com.netflix.suro.routing.filter;
import com.google.common.base.Objects;
import org.apache.commons.jxpath.JXPathContext;
import javax.annotation.Nullable;
public class PathValuePredicate implements ValuePredicate<String> {
private String valueXpath;
private String inputXpath;
public PathValuePredicate(String valueXpath, String inputXpath){
this.valueXpath = valueXpath;
this.inputXpath = inputXpath;
}
@Override
public boolean apply(@Nullable String input) {
JXPathContext context = JXPathContext.newContext(input);
context.setLenient(true);
return Objects.equal(context.getValue(valueXpath), context.getValue(inputXpath));
}
public String getInputXpath(){
return inputXpath;
}
public String getValueXpath() {
return valueXpath;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((inputXpath == null) ? 0 : inputXpath.hashCode());
result = prime * result + ((valueXpath == null) ? 0 : valueXpath.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PathValuePredicate other = (PathValuePredicate) obj;
if (inputXpath == null) {
if (other.inputXpath != null) {
return false;
}
} else if (!inputXpath.equals(other.inputXpath)) {
return false;
}
if (valueXpath == null) {
if (other.valueXpath != null) {
return false;
}
} else if (!valueXpath.equals(other.valueXpath)) {
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("PathValuePredicate [valueXpath=");
builder.append(valueXpath);
builder.append(", inputXpath=");
builder.append(inputXpath);
builder.append("]");
return builder.toString();
}
}
| 1,508 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/FilterLanguageSupport.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.MessageFilter;
/**
* General contract for any filter language which relates to a methodology of converting a language expression to a
* valid {@link com.netflix.suro.routing.filter.MessageFilter} instance consumable by {@link com.netflix.suro.routing.filter}
*
* @author Nitesh Kant ([email protected])
*/
public interface FilterLanguageSupport<T> {
/**
* Converts the passed filter object to a valid {@link MessageFilter}.
*
* @param filter Filter object to convert.
*
* @return {@link MessageFilter} corresponding to the passed filter.
*
* @throws InvalidFilterException If the passed filter was invalid.
*/
MessageFilter convert(T filter) throws InvalidFilterException;
}
| 1,509 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/MessageFilterParser.java
|
// $ANTLR 3.4 MessageFilter.g 2012-08-22 11:55:58
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.*;
import org.antlr.runtime.tree.*;
@SuppressWarnings({"all", "warnings", "unchecked"})
public class MessageFilterParser extends Parser {
public static final String[] tokenNames = new String[] {
"<invalid>", "<EOR>", "<DOWN>", "<UP>", "AND", "BETWEEN", "COMMENT", "EQUALS", "ESC_SEQ", "EXISTS", "EXPONENT", "FALSE", "GE", "GT", "HEX_DIGIT", "IN", "IS", "LE", "LT", "MATCHES", "NOT", "NOT_EQUALS", "NULL", "NUMBER", "OCTAL_ESC", "OR", "STRING", "TIME_MILLIS_FUN_NAME", "TIME_STRING_FUN_NAME", "TRUE", "UNICODE_ESC", "WS", "XPATH_FUN_NAME", "'('", "')'", "','"
};
public static final int EOF=-1;
public static final int T__33=33;
public static final int T__34=34;
public static final int T__35=35;
public static final int AND=4;
public static final int BETWEEN=5;
public static final int COMMENT=6;
public static final int EQUALS=7;
public static final int ESC_SEQ=8;
public static final int EXISTS=9;
public static final int EXPONENT=10;
public static final int FALSE=11;
public static final int GE=12;
public static final int GT=13;
public static final int HEX_DIGIT=14;
public static final int IN=15;
public static final int IS=16;
public static final int LE=17;
public static final int LT=18;
public static final int MATCHES=19;
public static final int NOT=20;
public static final int NOT_EQUALS=21;
public static final int NULL=22;
public static final int NUMBER=23;
public static final int OCTAL_ESC=24;
public static final int OR=25;
public static final int STRING=26;
public static final int TIME_MILLIS_FUN_NAME=27;
public static final int TIME_STRING_FUN_NAME=28;
public static final int TRUE=29;
public static final int UNICODE_ESC=30;
public static final int WS=31;
public static final int XPATH_FUN_NAME=32;
// delegates
public Parser[] getDelegates() {
return new Parser[] {};
}
// delegators
public MessageFilterParser(TokenStream input) {
this(input, new RecognizerSharedState());
}
public MessageFilterParser(TokenStream input, RecognizerSharedState state) {
super(input, state);
}
protected TreeAdaptor adaptor = new CommonTreeAdaptor();
public void setTreeAdaptor(TreeAdaptor adaptor) {
this.adaptor = adaptor;
}
public TreeAdaptor getTreeAdaptor() {
return adaptor;
}
public String[] getTokenNames() { return MessageFilterParser.tokenNames; }
public String getGrammarFileName() { return "MessageFilter.g"; }
/**
Creates a new parser that parses the given input string.
*/
public static MessageFilterParser createParser(String input) {
ANTLRStringStream inputStream = new ANTLRStringStream(input);
MessageFilterLexer lexer = new MessageFilterLexer(inputStream);
CommonTokenStream tokens = new CommonTokenStream(lexer);
return new MessageFilterParser(tokens);
}
@Override
public void reportError(RecognitionException e) {
// if we've already reported an error and have not matched a token
// yet successfully, don't report any errors.
if ( state.errorRecovery ) {
return;
}
state.syntaxErrors++; // don't count spurious
state.errorRecovery = true;
throwParsingError(this.getTokenNames(), e);
}
// A slight modification of #displayRecognitionError(String[], RecognitionException)
private void throwParsingError(String[] tokenNames, RecognitionException e) {
String hdr = getErrorHeader(e);
String msg = getErrorMessage(e, tokenNames);
throw new MessageFilterParsingException(String.format("%s %s", hdr, msg), e);
}
public static class filter_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "filter"
// MessageFilter.g:102:1: filter : (a= boolean_expr -> $a) ( OR b= boolean_expr -> ^( OR $filter $b) )* ( EOF )? ;
public final filter_return filter() throws RecognitionException {
filter_return retval = new filter_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token OR1=null;
Token EOF2=null;
boolean_expr_return a =null;
boolean_expr_return b =null;
CommonTree OR1_tree=null;
CommonTree EOF2_tree=null;
RewriteRuleTokenStream stream_EOF=new RewriteRuleTokenStream(adaptor,"token EOF");
RewriteRuleTokenStream stream_OR=new RewriteRuleTokenStream(adaptor,"token OR");
RewriteRuleSubtreeStream stream_boolean_expr=new RewriteRuleSubtreeStream(adaptor,"rule boolean_expr");
try {
// MessageFilter.g:103:2: ( (a= boolean_expr -> $a) ( OR b= boolean_expr -> ^( OR $filter $b) )* ( EOF )? )
// MessageFilter.g:103:4: (a= boolean_expr -> $a) ( OR b= boolean_expr -> ^( OR $filter $b) )* ( EOF )?
{
// MessageFilter.g:103:4: (a= boolean_expr -> $a)
// MessageFilter.g:103:5: a= boolean_expr
{
pushFollow(FOLLOW_boolean_expr_in_filter323);
a=boolean_expr();
state._fsp--;
stream_boolean_expr.add(a.getTree());
// AST REWRITE
// elements: a
// token labels:
// rule labels: retval, a
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
RewriteRuleSubtreeStream stream_a=new RewriteRuleSubtreeStream(adaptor,"rule a",a!=null?a.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 103:19: -> $a
{
adaptor.addChild(root_0, stream_a.nextTree());
}
retval.tree = root_0;
}
// MessageFilter.g:103:25: ( OR b= boolean_expr -> ^( OR $filter $b) )*
loop1:
do {
int alt1=2;
int LA1_0 = input.LA(1);
if ( (LA1_0==OR) ) {
alt1=1;
}
switch (alt1) {
case 1 :
// MessageFilter.g:103:26: OR b= boolean_expr
{
OR1=(Token)match(input,OR,FOLLOW_OR_in_filter330);
stream_OR.add(OR1);
pushFollow(FOLLOW_boolean_expr_in_filter334);
b=boolean_expr();
state._fsp--;
stream_boolean_expr.add(b.getTree());
// AST REWRITE
// elements: OR, filter, b
// token labels:
// rule labels: retval, b
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
RewriteRuleSubtreeStream stream_b=new RewriteRuleSubtreeStream(adaptor,"rule b",b!=null?b.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 103:44: -> ^( OR $filter $b)
{
// MessageFilter.g:103:47: ^( OR $filter $b)
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new OrTreeNode(stream_OR.nextToken())
, root_1);
adaptor.addChild(root_1, stream_retval.nextTree());
adaptor.addChild(root_1, stream_b.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
default :
break loop1;
}
} while (true);
// MessageFilter.g:103:79: ( EOF )?
int alt2=2;
int LA2_0 = input.LA(1);
if ( (LA2_0==EOF) ) {
alt2=1;
}
switch (alt2) {
case 1 :
// MessageFilter.g:103:79: EOF
{
EOF2=(Token)match(input,EOF,FOLLOW_EOF_in_filter354);
stream_EOF.add(EOF2);
}
break;
}
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "filter"
public static class boolean_expr_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "boolean_expr"
// MessageFilter.g:106:1: boolean_expr : (a= boolean_factor -> $a) ( AND b= boolean_factor -> ^( AND $boolean_expr $b) )* ;
public final boolean_expr_return boolean_expr() throws RecognitionException {
boolean_expr_return retval = new boolean_expr_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token AND3=null;
boolean_factor_return a =null;
boolean_factor_return b =null;
CommonTree AND3_tree=null;
RewriteRuleTokenStream stream_AND=new RewriteRuleTokenStream(adaptor,"token AND");
RewriteRuleSubtreeStream stream_boolean_factor=new RewriteRuleSubtreeStream(adaptor,"rule boolean_factor");
try {
// MessageFilter.g:107:2: ( (a= boolean_factor -> $a) ( AND b= boolean_factor -> ^( AND $boolean_expr $b) )* )
// MessageFilter.g:107:4: (a= boolean_factor -> $a) ( AND b= boolean_factor -> ^( AND $boolean_expr $b) )*
{
// MessageFilter.g:107:4: (a= boolean_factor -> $a)
// MessageFilter.g:107:5: a= boolean_factor
{
pushFollow(FOLLOW_boolean_factor_in_boolean_expr370);
a=boolean_factor();
state._fsp--;
stream_boolean_factor.add(a.getTree());
// AST REWRITE
// elements: a
// token labels:
// rule labels: retval, a
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
RewriteRuleSubtreeStream stream_a=new RewriteRuleSubtreeStream(adaptor,"rule a",a!=null?a.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 107:21: -> $a
{
adaptor.addChild(root_0, stream_a.nextTree());
}
retval.tree = root_0;
}
// MessageFilter.g:107:27: ( AND b= boolean_factor -> ^( AND $boolean_expr $b) )*
loop3:
do {
int alt3=2;
int LA3_0 = input.LA(1);
if ( (LA3_0==AND) ) {
alt3=1;
}
switch (alt3) {
case 1 :
// MessageFilter.g:107:28: AND b= boolean_factor
{
AND3=(Token)match(input,AND,FOLLOW_AND_in_boolean_expr377);
stream_AND.add(AND3);
pushFollow(FOLLOW_boolean_factor_in_boolean_expr381);
b=boolean_factor();
state._fsp--;
stream_boolean_factor.add(b.getTree());
// AST REWRITE
// elements: b, boolean_expr, AND
// token labels:
// rule labels: retval, b
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
RewriteRuleSubtreeStream stream_b=new RewriteRuleSubtreeStream(adaptor,"rule b",b!=null?b.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 107:49: -> ^( AND $boolean_expr $b)
{
// MessageFilter.g:107:52: ^( AND $boolean_expr $b)
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new AndTreeNode(stream_AND.nextToken())
, root_1);
adaptor.addChild(root_1, stream_retval.nextTree());
adaptor.addChild(root_1, stream_b.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
default :
break loop3;
}
} while (true);
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "boolean_expr"
public static class boolean_factor_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "boolean_factor"
// MessageFilter.g:111:1: boolean_factor : ( predicate | NOT predicate -> ^( NOT predicate ) );
public final boolean_factor_return boolean_factor() throws RecognitionException {
boolean_factor_return retval = new boolean_factor_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token NOT5=null;
predicate_return predicate4 =null;
predicate_return predicate6 =null;
CommonTree NOT5_tree=null;
RewriteRuleTokenStream stream_NOT=new RewriteRuleTokenStream(adaptor,"token NOT");
RewriteRuleSubtreeStream stream_predicate=new RewriteRuleSubtreeStream(adaptor,"rule predicate");
try {
// MessageFilter.g:112:2: ( predicate | NOT predicate -> ^( NOT predicate ) )
int alt4=2;
int LA4_0 = input.LA(1);
if ( (LA4_0==EXISTS||LA4_0==FALSE||LA4_0==TRUE||(LA4_0 >= XPATH_FUN_NAME && LA4_0 <= 33)) ) {
alt4=1;
}
else if ( (LA4_0==NOT) ) {
alt4=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 4, 0, input);
throw nvae;
}
switch (alt4) {
case 1 :
// MessageFilter.g:112:4: predicate
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_predicate_in_boolean_factor414);
predicate4=predicate();
state._fsp--;
adaptor.addChild(root_0, predicate4.getTree());
}
break;
case 2 :
// MessageFilter.g:113:6: NOT predicate
{
NOT5=(Token)match(input,NOT,FOLLOW_NOT_in_boolean_factor423);
stream_NOT.add(NOT5);
pushFollow(FOLLOW_predicate_in_boolean_factor425);
predicate6=predicate();
state._fsp--;
stream_predicate.add(predicate6.getTree());
// AST REWRITE
// elements: NOT, predicate
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 113:20: -> ^( NOT predicate )
{
// MessageFilter.g:113:23: ^( NOT predicate )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new NotTreeNode(stream_NOT.nextToken())
, root_1);
adaptor.addChild(root_1, stream_predicate.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "boolean_factor"
public static class predicate_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "predicate"
// MessageFilter.g:116:1: predicate : ( '(' filter ')' -> filter | comparison_function | between_predicate | in_predicate | null_predicate | regex_predicate | exists_predicate | TRUE -> TRUE | FALSE -> FALSE );
public final predicate_return predicate() throws RecognitionException {
predicate_return retval = new predicate_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token char_literal7=null;
Token char_literal9=null;
Token TRUE16=null;
Token FALSE17=null;
filter_return filter8 =null;
comparison_function_return comparison_function10 =null;
between_predicate_return between_predicate11 =null;
in_predicate_return in_predicate12 =null;
null_predicate_return null_predicate13 =null;
regex_predicate_return regex_predicate14 =null;
exists_predicate_return exists_predicate15 =null;
CommonTree char_literal7_tree=null;
CommonTree char_literal9_tree=null;
CommonTree TRUE16_tree=null;
CommonTree FALSE17_tree=null;
RewriteRuleTokenStream stream_FALSE=new RewriteRuleTokenStream(adaptor,"token FALSE");
RewriteRuleTokenStream stream_TRUE=new RewriteRuleTokenStream(adaptor,"token TRUE");
RewriteRuleTokenStream stream_33=new RewriteRuleTokenStream(adaptor,"token 33");
RewriteRuleTokenStream stream_34=new RewriteRuleTokenStream(adaptor,"token 34");
RewriteRuleSubtreeStream stream_filter=new RewriteRuleSubtreeStream(adaptor,"rule filter");
try {
// MessageFilter.g:117:2: ( '(' filter ')' -> filter | comparison_function | between_predicate | in_predicate | null_predicate | regex_predicate | exists_predicate | TRUE -> TRUE | FALSE -> FALSE )
int alt5=9;
switch ( input.LA(1) ) {
case 33:
{
alt5=1;
}
break;
case XPATH_FUN_NAME:
{
int LA5_2 = input.LA(2);
if ( (LA5_2==33) ) {
int LA5_6 = input.LA(3);
if ( (LA5_6==STRING) ) {
int LA5_7 = input.LA(4);
if ( (LA5_7==34) ) {
switch ( input.LA(5) ) {
case EQUALS:
case GE:
case GT:
case LE:
case LT:
case NOT_EQUALS:
{
alt5=2;
}
break;
case BETWEEN:
{
alt5=3;
}
break;
case IN:
{
alt5=4;
}
break;
case IS:
{
alt5=5;
}
break;
case MATCHES:
{
alt5=6;
}
break;
case EXISTS:
{
alt5=7;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 5, 8, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 5, 7, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 5, 6, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 5, 2, input);
throw nvae;
}
}
break;
case EXISTS:
{
alt5=7;
}
break;
case TRUE:
{
alt5=8;
}
break;
case FALSE:
{
alt5=9;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 5, 0, input);
throw nvae;
}
switch (alt5) {
case 1 :
// MessageFilter.g:117:4: '(' filter ')'
{
char_literal7=(Token)match(input,33,FOLLOW_33_in_predicate448);
stream_33.add(char_literal7);
pushFollow(FOLLOW_filter_in_predicate450);
filter8=filter();
state._fsp--;
stream_filter.add(filter8.getTree());
char_literal9=(Token)match(input,34,FOLLOW_34_in_predicate452);
stream_34.add(char_literal9);
// AST REWRITE
// elements: filter
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 117:19: -> filter
{
adaptor.addChild(root_0, stream_filter.nextTree());
}
retval.tree = root_0;
}
break;
case 2 :
// MessageFilter.g:118:3: comparison_function
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_comparison_function_in_predicate463);
comparison_function10=comparison_function();
state._fsp--;
adaptor.addChild(root_0, comparison_function10.getTree());
}
break;
case 3 :
// MessageFilter.g:119:3: between_predicate
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_between_predicate_in_predicate469);
between_predicate11=between_predicate();
state._fsp--;
adaptor.addChild(root_0, between_predicate11.getTree());
}
break;
case 4 :
// MessageFilter.g:120:3: in_predicate
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_in_predicate_in_predicate475);
in_predicate12=in_predicate();
state._fsp--;
adaptor.addChild(root_0, in_predicate12.getTree());
}
break;
case 5 :
// MessageFilter.g:121:3: null_predicate
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_null_predicate_in_predicate481);
null_predicate13=null_predicate();
state._fsp--;
adaptor.addChild(root_0, null_predicate13.getTree());
}
break;
case 6 :
// MessageFilter.g:122:3: regex_predicate
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_regex_predicate_in_predicate487);
regex_predicate14=regex_predicate();
state._fsp--;
adaptor.addChild(root_0, regex_predicate14.getTree());
}
break;
case 7 :
// MessageFilter.g:123:3: exists_predicate
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_exists_predicate_in_predicate493);
exists_predicate15=exists_predicate();
state._fsp--;
adaptor.addChild(root_0, exists_predicate15.getTree());
}
break;
case 8 :
// MessageFilter.g:124:3: TRUE
{
TRUE16=(Token)match(input,TRUE,FOLLOW_TRUE_in_predicate499);
stream_TRUE.add(TRUE16);
// AST REWRITE
// elements: TRUE
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 124:8: -> TRUE
{
adaptor.addChild(root_0,
new TrueValueTreeNode(stream_TRUE.nextToken())
);
}
retval.tree = root_0;
}
break;
case 9 :
// MessageFilter.g:125:3: FALSE
{
FALSE17=(Token)match(input,FALSE,FOLLOW_FALSE_in_predicate511);
stream_FALSE.add(FALSE17);
// AST REWRITE
// elements: FALSE
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 125:9: -> FALSE
{
adaptor.addChild(root_0,
new FalseValueTreeNode(stream_FALSE.nextToken())
);
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "predicate"
public static class comparison_function_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "comparison_function"
// MessageFilter.g:128:1: comparison_function : ( path_function EQUALS value_function -> ^( EQUALS path_function value_function ) | path_function NOT_EQUALS value_function -> ^( NOT_EQUALS path_function value_function ) | path_function GT compariable_value_function -> ^( GT path_function compariable_value_function ) | path_function GE compariable_value_function -> ^( GE path_function compariable_value_function ) | path_function LT compariable_value_function -> ^( LT path_function compariable_value_function ) | path_function LE compariable_value_function -> ^( LE path_function compariable_value_function ) );
public final comparison_function_return comparison_function() throws RecognitionException {
comparison_function_return retval = new comparison_function_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token EQUALS19=null;
Token NOT_EQUALS22=null;
Token GT25=null;
Token GE28=null;
Token LT31=null;
Token LE34=null;
path_function_return path_function18 =null;
value_function_return value_function20 =null;
path_function_return path_function21 =null;
value_function_return value_function23 =null;
path_function_return path_function24 =null;
compariable_value_function_return compariable_value_function26 =null;
path_function_return path_function27 =null;
compariable_value_function_return compariable_value_function29 =null;
path_function_return path_function30 =null;
compariable_value_function_return compariable_value_function32 =null;
path_function_return path_function33 =null;
compariable_value_function_return compariable_value_function35 =null;
CommonTree EQUALS19_tree=null;
CommonTree NOT_EQUALS22_tree=null;
CommonTree GT25_tree=null;
CommonTree GE28_tree=null;
CommonTree LT31_tree=null;
CommonTree LE34_tree=null;
RewriteRuleTokenStream stream_GE=new RewriteRuleTokenStream(adaptor,"token GE");
RewriteRuleTokenStream stream_GT=new RewriteRuleTokenStream(adaptor,"token GT");
RewriteRuleTokenStream stream_LT=new RewriteRuleTokenStream(adaptor,"token LT");
RewriteRuleTokenStream stream_EQUALS=new RewriteRuleTokenStream(adaptor,"token EQUALS");
RewriteRuleTokenStream stream_NOT_EQUALS=new RewriteRuleTokenStream(adaptor,"token NOT_EQUALS");
RewriteRuleTokenStream stream_LE=new RewriteRuleTokenStream(adaptor,"token LE");
RewriteRuleSubtreeStream stream_compariable_value_function=new RewriteRuleSubtreeStream(adaptor,"rule compariable_value_function");
RewriteRuleSubtreeStream stream_value_function=new RewriteRuleSubtreeStream(adaptor,"rule value_function");
RewriteRuleSubtreeStream stream_path_function=new RewriteRuleSubtreeStream(adaptor,"rule path_function");
try {
// MessageFilter.g:129:2: ( path_function EQUALS value_function -> ^( EQUALS path_function value_function ) | path_function NOT_EQUALS value_function -> ^( NOT_EQUALS path_function value_function ) | path_function GT compariable_value_function -> ^( GT path_function compariable_value_function ) | path_function GE compariable_value_function -> ^( GE path_function compariable_value_function ) | path_function LT compariable_value_function -> ^( LT path_function compariable_value_function ) | path_function LE compariable_value_function -> ^( LE path_function compariable_value_function ) )
int alt6=6;
int LA6_0 = input.LA(1);
if ( (LA6_0==XPATH_FUN_NAME) ) {
int LA6_1 = input.LA(2);
if ( (LA6_1==33) ) {
int LA6_2 = input.LA(3);
if ( (LA6_2==STRING) ) {
int LA6_3 = input.LA(4);
if ( (LA6_3==34) ) {
switch ( input.LA(5) ) {
case EQUALS:
{
alt6=1;
}
break;
case NOT_EQUALS:
{
alt6=2;
}
break;
case GT:
{
alt6=3;
}
break;
case GE:
{
alt6=4;
}
break;
case LT:
{
alt6=5;
}
break;
case LE:
{
alt6=6;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 6, 4, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 6, 3, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 6, 2, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 6, 1, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 6, 0, input);
throw nvae;
}
switch (alt6) {
case 1 :
// MessageFilter.g:129:4: path_function EQUALS value_function
{
pushFollow(FOLLOW_path_function_in_comparison_function529);
path_function18=path_function();
state._fsp--;
stream_path_function.add(path_function18.getTree());
EQUALS19=(Token)match(input,EQUALS,FOLLOW_EQUALS_in_comparison_function531);
stream_EQUALS.add(EQUALS19);
pushFollow(FOLLOW_value_function_in_comparison_function533);
value_function20=value_function();
state._fsp--;
stream_value_function.add(value_function20.getTree());
// AST REWRITE
// elements: value_function, path_function, EQUALS
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 129:40: -> ^( EQUALS path_function value_function )
{
// MessageFilter.g:129:43: ^( EQUALS path_function value_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new EqualsTreeNode(stream_EQUALS.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1, stream_value_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 2 :
// MessageFilter.g:130:3: path_function NOT_EQUALS value_function
{
pushFollow(FOLLOW_path_function_in_comparison_function552);
path_function21=path_function();
state._fsp--;
stream_path_function.add(path_function21.getTree());
NOT_EQUALS22=(Token)match(input,NOT_EQUALS,FOLLOW_NOT_EQUALS_in_comparison_function554);
stream_NOT_EQUALS.add(NOT_EQUALS22);
pushFollow(FOLLOW_value_function_in_comparison_function556);
value_function23=value_function();
state._fsp--;
stream_value_function.add(value_function23.getTree());
// AST REWRITE
// elements: value_function, path_function, NOT_EQUALS
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 130:43: -> ^( NOT_EQUALS path_function value_function )
{
// MessageFilter.g:130:46: ^( NOT_EQUALS path_function value_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new NotEqualsTreeNode(stream_NOT_EQUALS.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1, stream_value_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 3 :
// MessageFilter.g:131:3: path_function GT compariable_value_function
{
pushFollow(FOLLOW_path_function_in_comparison_function575);
path_function24=path_function();
state._fsp--;
stream_path_function.add(path_function24.getTree());
GT25=(Token)match(input,GT,FOLLOW_GT_in_comparison_function577);
stream_GT.add(GT25);
pushFollow(FOLLOW_compariable_value_function_in_comparison_function579);
compariable_value_function26=compariable_value_function();
state._fsp--;
stream_compariable_value_function.add(compariable_value_function26.getTree());
// AST REWRITE
// elements: path_function, GT, compariable_value_function
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 131:47: -> ^( GT path_function compariable_value_function )
{
// MessageFilter.g:131:50: ^( GT path_function compariable_value_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new ComparableTreeNode(stream_GT.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1, stream_compariable_value_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 4 :
// MessageFilter.g:132:3: path_function GE compariable_value_function
{
pushFollow(FOLLOW_path_function_in_comparison_function598);
path_function27=path_function();
state._fsp--;
stream_path_function.add(path_function27.getTree());
GE28=(Token)match(input,GE,FOLLOW_GE_in_comparison_function600);
stream_GE.add(GE28);
pushFollow(FOLLOW_compariable_value_function_in_comparison_function602);
compariable_value_function29=compariable_value_function();
state._fsp--;
stream_compariable_value_function.add(compariable_value_function29.getTree());
// AST REWRITE
// elements: compariable_value_function, path_function, GE
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 132:47: -> ^( GE path_function compariable_value_function )
{
// MessageFilter.g:132:50: ^( GE path_function compariable_value_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new ComparableTreeNode(stream_GE.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1, stream_compariable_value_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 5 :
// MessageFilter.g:133:3: path_function LT compariable_value_function
{
pushFollow(FOLLOW_path_function_in_comparison_function621);
path_function30=path_function();
state._fsp--;
stream_path_function.add(path_function30.getTree());
LT31=(Token)match(input,LT,FOLLOW_LT_in_comparison_function623);
stream_LT.add(LT31);
pushFollow(FOLLOW_compariable_value_function_in_comparison_function625);
compariable_value_function32=compariable_value_function();
state._fsp--;
stream_compariable_value_function.add(compariable_value_function32.getTree());
// AST REWRITE
// elements: path_function, compariable_value_function, LT
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 133:47: -> ^( LT path_function compariable_value_function )
{
// MessageFilter.g:133:50: ^( LT path_function compariable_value_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new ComparableTreeNode(stream_LT.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1, stream_compariable_value_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 6 :
// MessageFilter.g:134:3: path_function LE compariable_value_function
{
pushFollow(FOLLOW_path_function_in_comparison_function644);
path_function33=path_function();
state._fsp--;
stream_path_function.add(path_function33.getTree());
LE34=(Token)match(input,LE,FOLLOW_LE_in_comparison_function646);
stream_LE.add(LE34);
pushFollow(FOLLOW_compariable_value_function_in_comparison_function648);
compariable_value_function35=compariable_value_function();
state._fsp--;
stream_compariable_value_function.add(compariable_value_function35.getTree());
// AST REWRITE
// elements: compariable_value_function, LE, path_function
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 134:47: -> ^( LE path_function compariable_value_function )
{
// MessageFilter.g:134:50: ^( LE path_function compariable_value_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new ComparableTreeNode(stream_LE.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1, stream_compariable_value_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "comparison_function"
public static class between_predicate_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "between_predicate"
// MessageFilter.g:137:1: between_predicate : ( path_function BETWEEN '(' NUMBER ',' NUMBER ')' -> ^( BETWEEN path_function NUMBER NUMBER ) | path_function BETWEEN '(' time_millis_function ',' time_millis_function ')' -> ^( BETWEEN path_function time_millis_function time_millis_function ) | path_function BETWEEN '(' time_string_function ',' time_string_function ')' -> ^( BETWEEN path_function time_string_function time_string_function ) );
public final between_predicate_return between_predicate() throws RecognitionException {
between_predicate_return retval = new between_predicate_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token BETWEEN37=null;
Token char_literal38=null;
Token NUMBER39=null;
Token char_literal40=null;
Token NUMBER41=null;
Token char_literal42=null;
Token BETWEEN44=null;
Token char_literal45=null;
Token char_literal47=null;
Token char_literal49=null;
Token BETWEEN51=null;
Token char_literal52=null;
Token char_literal54=null;
Token char_literal56=null;
path_function_return path_function36 =null;
path_function_return path_function43 =null;
time_millis_function_return time_millis_function46 =null;
time_millis_function_return time_millis_function48 =null;
path_function_return path_function50 =null;
time_string_function_return time_string_function53 =null;
time_string_function_return time_string_function55 =null;
CommonTree BETWEEN37_tree=null;
CommonTree char_literal38_tree=null;
CommonTree NUMBER39_tree=null;
CommonTree char_literal40_tree=null;
CommonTree NUMBER41_tree=null;
CommonTree char_literal42_tree=null;
CommonTree BETWEEN44_tree=null;
CommonTree char_literal45_tree=null;
CommonTree char_literal47_tree=null;
CommonTree char_literal49_tree=null;
CommonTree BETWEEN51_tree=null;
CommonTree char_literal52_tree=null;
CommonTree char_literal54_tree=null;
CommonTree char_literal56_tree=null;
RewriteRuleTokenStream stream_35=new RewriteRuleTokenStream(adaptor,"token 35");
RewriteRuleTokenStream stream_33=new RewriteRuleTokenStream(adaptor,"token 33");
RewriteRuleTokenStream stream_BETWEEN=new RewriteRuleTokenStream(adaptor,"token BETWEEN");
RewriteRuleTokenStream stream_34=new RewriteRuleTokenStream(adaptor,"token 34");
RewriteRuleTokenStream stream_NUMBER=new RewriteRuleTokenStream(adaptor,"token NUMBER");
RewriteRuleSubtreeStream stream_time_string_function=new RewriteRuleSubtreeStream(adaptor,"rule time_string_function");
RewriteRuleSubtreeStream stream_time_millis_function=new RewriteRuleSubtreeStream(adaptor,"rule time_millis_function");
RewriteRuleSubtreeStream stream_path_function=new RewriteRuleSubtreeStream(adaptor,"rule path_function");
try {
// MessageFilter.g:138:2: ( path_function BETWEEN '(' NUMBER ',' NUMBER ')' -> ^( BETWEEN path_function NUMBER NUMBER ) | path_function BETWEEN '(' time_millis_function ',' time_millis_function ')' -> ^( BETWEEN path_function time_millis_function time_millis_function ) | path_function BETWEEN '(' time_string_function ',' time_string_function ')' -> ^( BETWEEN path_function time_string_function time_string_function ) )
int alt7=3;
int LA7_0 = input.LA(1);
if ( (LA7_0==XPATH_FUN_NAME) ) {
int LA7_1 = input.LA(2);
if ( (LA7_1==33) ) {
int LA7_2 = input.LA(3);
if ( (LA7_2==STRING) ) {
int LA7_3 = input.LA(4);
if ( (LA7_3==34) ) {
int LA7_4 = input.LA(5);
if ( (LA7_4==BETWEEN) ) {
int LA7_5 = input.LA(6);
if ( (LA7_5==33) ) {
switch ( input.LA(7) ) {
case NUMBER:
{
alt7=1;
}
break;
case TIME_MILLIS_FUN_NAME:
case 35:
{
alt7=2;
}
break;
case TIME_STRING_FUN_NAME:
{
alt7=3;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 7, 6, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 7, 5, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 7, 4, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 7, 3, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 7, 2, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 7, 1, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 7, 0, input);
throw nvae;
}
switch (alt7) {
case 1 :
// MessageFilter.g:138:4: path_function BETWEEN '(' NUMBER ',' NUMBER ')'
{
pushFollow(FOLLOW_path_function_in_between_predicate673);
path_function36=path_function();
state._fsp--;
stream_path_function.add(path_function36.getTree());
BETWEEN37=(Token)match(input,BETWEEN,FOLLOW_BETWEEN_in_between_predicate675);
stream_BETWEEN.add(BETWEEN37);
char_literal38=(Token)match(input,33,FOLLOW_33_in_between_predicate677);
stream_33.add(char_literal38);
NUMBER39=(Token)match(input,NUMBER,FOLLOW_NUMBER_in_between_predicate679);
stream_NUMBER.add(NUMBER39);
char_literal40=(Token)match(input,35,FOLLOW_35_in_between_predicate681);
stream_35.add(char_literal40);
NUMBER41=(Token)match(input,NUMBER,FOLLOW_NUMBER_in_between_predicate683);
stream_NUMBER.add(NUMBER41);
char_literal42=(Token)match(input,34,FOLLOW_34_in_between_predicate685);
stream_34.add(char_literal42);
// AST REWRITE
// elements: NUMBER, path_function, NUMBER, BETWEEN
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 139:4: -> ^( BETWEEN path_function NUMBER NUMBER )
{
// MessageFilter.g:139:7: ^( BETWEEN path_function NUMBER NUMBER )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new BetweenTreeNode(stream_BETWEEN.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1,
new NumberTreeNode(stream_NUMBER.nextToken())
);
adaptor.addChild(root_1,
new NumberTreeNode(stream_NUMBER.nextToken())
);
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 2 :
// MessageFilter.g:140:3: path_function BETWEEN '(' time_millis_function ',' time_millis_function ')'
{
pushFollow(FOLLOW_path_function_in_between_predicate716);
path_function43=path_function();
state._fsp--;
stream_path_function.add(path_function43.getTree());
BETWEEN44=(Token)match(input,BETWEEN,FOLLOW_BETWEEN_in_between_predicate718);
stream_BETWEEN.add(BETWEEN44);
char_literal45=(Token)match(input,33,FOLLOW_33_in_between_predicate720);
stream_33.add(char_literal45);
pushFollow(FOLLOW_time_millis_function_in_between_predicate722);
time_millis_function46=time_millis_function();
state._fsp--;
stream_time_millis_function.add(time_millis_function46.getTree());
char_literal47=(Token)match(input,35,FOLLOW_35_in_between_predicate724);
stream_35.add(char_literal47);
pushFollow(FOLLOW_time_millis_function_in_between_predicate726);
time_millis_function48=time_millis_function();
state._fsp--;
stream_time_millis_function.add(time_millis_function48.getTree());
char_literal49=(Token)match(input,34,FOLLOW_34_in_between_predicate728);
stream_34.add(char_literal49);
// AST REWRITE
// elements: time_millis_function, BETWEEN, time_millis_function, path_function
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 141:4: -> ^( BETWEEN path_function time_millis_function time_millis_function )
{
// MessageFilter.g:141:7: ^( BETWEEN path_function time_millis_function time_millis_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new BetweenTimeMillisTreeNode(stream_BETWEEN.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1, stream_time_millis_function.nextTree());
adaptor.addChild(root_1, stream_time_millis_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 3 :
// MessageFilter.g:142:3: path_function BETWEEN '(' time_string_function ',' time_string_function ')'
{
pushFollow(FOLLOW_path_function_in_between_predicate753);
path_function50=path_function();
state._fsp--;
stream_path_function.add(path_function50.getTree());
BETWEEN51=(Token)match(input,BETWEEN,FOLLOW_BETWEEN_in_between_predicate755);
stream_BETWEEN.add(BETWEEN51);
char_literal52=(Token)match(input,33,FOLLOW_33_in_between_predicate757);
stream_33.add(char_literal52);
pushFollow(FOLLOW_time_string_function_in_between_predicate759);
time_string_function53=time_string_function();
state._fsp--;
stream_time_string_function.add(time_string_function53.getTree());
char_literal54=(Token)match(input,35,FOLLOW_35_in_between_predicate761);
stream_35.add(char_literal54);
pushFollow(FOLLOW_time_string_function_in_between_predicate763);
time_string_function55=time_string_function();
state._fsp--;
stream_time_string_function.add(time_string_function55.getTree());
char_literal56=(Token)match(input,34,FOLLOW_34_in_between_predicate765);
stream_34.add(char_literal56);
// AST REWRITE
// elements: time_string_function, path_function, BETWEEN, time_string_function
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 143:4: -> ^( BETWEEN path_function time_string_function time_string_function )
{
// MessageFilter.g:143:7: ^( BETWEEN path_function time_string_function time_string_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new BetweenTimeStringTreeNode(stream_BETWEEN.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1, stream_time_string_function.nextTree());
adaptor.addChild(root_1, stream_time_string_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "between_predicate"
public static class in_predicate_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "in_predicate"
// MessageFilter.g:146:1: in_predicate : ( path_function IN '(' STRING ( ',' STRING )* ')' -> ^( IN path_function ( STRING )+ ) | path_function IN '(' NUMBER ( ',' NUMBER )* ')' -> ^( IN path_function ( NUMBER )+ ) );
public final in_predicate_return in_predicate() throws RecognitionException {
in_predicate_return retval = new in_predicate_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token IN58=null;
Token char_literal59=null;
Token STRING60=null;
Token char_literal61=null;
Token STRING62=null;
Token char_literal63=null;
Token IN65=null;
Token char_literal66=null;
Token NUMBER67=null;
Token char_literal68=null;
Token NUMBER69=null;
Token char_literal70=null;
path_function_return path_function57 =null;
path_function_return path_function64 =null;
CommonTree IN58_tree=null;
CommonTree char_literal59_tree=null;
CommonTree STRING60_tree=null;
CommonTree char_literal61_tree=null;
CommonTree STRING62_tree=null;
CommonTree char_literal63_tree=null;
CommonTree IN65_tree=null;
CommonTree char_literal66_tree=null;
CommonTree NUMBER67_tree=null;
CommonTree char_literal68_tree=null;
CommonTree NUMBER69_tree=null;
CommonTree char_literal70_tree=null;
RewriteRuleTokenStream stream_IN=new RewriteRuleTokenStream(adaptor,"token IN");
RewriteRuleTokenStream stream_35=new RewriteRuleTokenStream(adaptor,"token 35");
RewriteRuleTokenStream stream_33=new RewriteRuleTokenStream(adaptor,"token 33");
RewriteRuleTokenStream stream_34=new RewriteRuleTokenStream(adaptor,"token 34");
RewriteRuleTokenStream stream_STRING=new RewriteRuleTokenStream(adaptor,"token STRING");
RewriteRuleTokenStream stream_NUMBER=new RewriteRuleTokenStream(adaptor,"token NUMBER");
RewriteRuleSubtreeStream stream_path_function=new RewriteRuleSubtreeStream(adaptor,"rule path_function");
try {
// MessageFilter.g:147:2: ( path_function IN '(' STRING ( ',' STRING )* ')' -> ^( IN path_function ( STRING )+ ) | path_function IN '(' NUMBER ( ',' NUMBER )* ')' -> ^( IN path_function ( NUMBER )+ ) )
int alt10=2;
int LA10_0 = input.LA(1);
if ( (LA10_0==XPATH_FUN_NAME) ) {
int LA10_1 = input.LA(2);
if ( (LA10_1==33) ) {
int LA10_2 = input.LA(3);
if ( (LA10_2==STRING) ) {
int LA10_3 = input.LA(4);
if ( (LA10_3==34) ) {
int LA10_4 = input.LA(5);
if ( (LA10_4==IN) ) {
int LA10_5 = input.LA(6);
if ( (LA10_5==33) ) {
int LA10_6 = input.LA(7);
if ( (LA10_6==STRING) ) {
alt10=1;
}
else if ( (LA10_6==NUMBER) ) {
alt10=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 10, 6, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 10, 5, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 10, 4, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 10, 3, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 10, 2, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 10, 1, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 10, 0, input);
throw nvae;
}
switch (alt10) {
case 1 :
// MessageFilter.g:147:4: path_function IN '(' STRING ( ',' STRING )* ')'
{
pushFollow(FOLLOW_path_function_in_in_predicate796);
path_function57=path_function();
state._fsp--;
stream_path_function.add(path_function57.getTree());
IN58=(Token)match(input,IN,FOLLOW_IN_in_in_predicate798);
stream_IN.add(IN58);
char_literal59=(Token)match(input,33,FOLLOW_33_in_in_predicate800);
stream_33.add(char_literal59);
STRING60=(Token)match(input,STRING,FOLLOW_STRING_in_in_predicate802);
stream_STRING.add(STRING60);
// MessageFilter.g:147:32: ( ',' STRING )*
loop8:
do {
int alt8=2;
int LA8_0 = input.LA(1);
if ( (LA8_0==35) ) {
alt8=1;
}
switch (alt8) {
case 1 :
// MessageFilter.g:147:33: ',' STRING
{
char_literal61=(Token)match(input,35,FOLLOW_35_in_in_predicate805);
stream_35.add(char_literal61);
STRING62=(Token)match(input,STRING,FOLLOW_STRING_in_in_predicate807);
stream_STRING.add(STRING62);
}
break;
default :
break loop8;
}
} while (true);
char_literal63=(Token)match(input,34,FOLLOW_34_in_in_predicate811);
stream_34.add(char_literal63);
// AST REWRITE
// elements: IN, STRING, path_function
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 147:50: -> ^( IN path_function ( STRING )+ )
{
// MessageFilter.g:147:53: ^( IN path_function ( STRING )+ )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new StringInTreeNode(stream_IN.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
if ( !(stream_STRING.hasNext()) ) {
throw new RewriteEarlyExitException();
}
while ( stream_STRING.hasNext() ) {
adaptor.addChild(root_1,
new StringTreeNode(stream_STRING.nextToken())
);
}
stream_STRING.reset();
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 2 :
// MessageFilter.g:148:3: path_function IN '(' NUMBER ( ',' NUMBER )* ')'
{
pushFollow(FOLLOW_path_function_in_in_predicate836);
path_function64=path_function();
state._fsp--;
stream_path_function.add(path_function64.getTree());
IN65=(Token)match(input,IN,FOLLOW_IN_in_in_predicate838);
stream_IN.add(IN65);
char_literal66=(Token)match(input,33,FOLLOW_33_in_in_predicate840);
stream_33.add(char_literal66);
NUMBER67=(Token)match(input,NUMBER,FOLLOW_NUMBER_in_in_predicate842);
stream_NUMBER.add(NUMBER67);
// MessageFilter.g:148:31: ( ',' NUMBER )*
loop9:
do {
int alt9=2;
int LA9_0 = input.LA(1);
if ( (LA9_0==35) ) {
alt9=1;
}
switch (alt9) {
case 1 :
// MessageFilter.g:148:32: ',' NUMBER
{
char_literal68=(Token)match(input,35,FOLLOW_35_in_in_predicate845);
stream_35.add(char_literal68);
NUMBER69=(Token)match(input,NUMBER,FOLLOW_NUMBER_in_in_predicate847);
stream_NUMBER.add(NUMBER69);
}
break;
default :
break loop9;
}
} while (true);
char_literal70=(Token)match(input,34,FOLLOW_34_in_in_predicate851);
stream_34.add(char_literal70);
// AST REWRITE
// elements: NUMBER, path_function, IN
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 148:49: -> ^( IN path_function ( NUMBER )+ )
{
// MessageFilter.g:148:52: ^( IN path_function ( NUMBER )+ )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new NumericInTreeNode(stream_IN.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
if ( !(stream_NUMBER.hasNext()) ) {
throw new RewriteEarlyExitException();
}
while ( stream_NUMBER.hasNext() ) {
adaptor.addChild(root_1,
new NumberTreeNode(stream_NUMBER.nextToken())
);
}
stream_NUMBER.reset();
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "in_predicate"
public static class null_predicate_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "null_predicate"
// MessageFilter.g:151:1: null_predicate : path_function IS NULL -> ^( NULL path_function ) ;
public final null_predicate_return null_predicate() throws RecognitionException {
null_predicate_return retval = new null_predicate_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token IS72=null;
Token NULL73=null;
path_function_return path_function71 =null;
CommonTree IS72_tree=null;
CommonTree NULL73_tree=null;
RewriteRuleTokenStream stream_IS=new RewriteRuleTokenStream(adaptor,"token IS");
RewriteRuleTokenStream stream_NULL=new RewriteRuleTokenStream(adaptor,"token NULL");
RewriteRuleSubtreeStream stream_path_function=new RewriteRuleSubtreeStream(adaptor,"rule path_function");
try {
// MessageFilter.g:152:2: ( path_function IS NULL -> ^( NULL path_function ) )
// MessageFilter.g:152:4: path_function IS NULL
{
pushFollow(FOLLOW_path_function_in_null_predicate881);
path_function71=path_function();
state._fsp--;
stream_path_function.add(path_function71.getTree());
IS72=(Token)match(input,IS,FOLLOW_IS_in_null_predicate883);
stream_IS.add(IS72);
NULL73=(Token)match(input,NULL,FOLLOW_NULL_in_null_predicate885);
stream_NULL.add(NULL73);
// AST REWRITE
// elements: path_function, NULL
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 152:26: -> ^( NULL path_function )
{
// MessageFilter.g:152:29: ^( NULL path_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new NullTreeNode(stream_NULL.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "null_predicate"
public static class regex_predicate_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "regex_predicate"
// MessageFilter.g:155:1: regex_predicate : path_function MATCHES STRING -> ^( MATCHES path_function STRING ) ;
public final regex_predicate_return regex_predicate() throws RecognitionException {
regex_predicate_return retval = new regex_predicate_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token MATCHES75=null;
Token STRING76=null;
path_function_return path_function74 =null;
CommonTree MATCHES75_tree=null;
CommonTree STRING76_tree=null;
RewriteRuleTokenStream stream_MATCHES=new RewriteRuleTokenStream(adaptor,"token MATCHES");
RewriteRuleTokenStream stream_STRING=new RewriteRuleTokenStream(adaptor,"token STRING");
RewriteRuleSubtreeStream stream_path_function=new RewriteRuleSubtreeStream(adaptor,"rule path_function");
try {
// MessageFilter.g:156:2: ( path_function MATCHES STRING -> ^( MATCHES path_function STRING ) )
// MessageFilter.g:156:6: path_function MATCHES STRING
{
pushFollow(FOLLOW_path_function_in_regex_predicate909);
path_function74=path_function();
state._fsp--;
stream_path_function.add(path_function74.getTree());
MATCHES75=(Token)match(input,MATCHES,FOLLOW_MATCHES_in_regex_predicate911);
stream_MATCHES.add(MATCHES75);
STRING76=(Token)match(input,STRING,FOLLOW_STRING_in_regex_predicate913);
stream_STRING.add(STRING76);
// AST REWRITE
// elements: MATCHES, path_function, STRING
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 156:35: -> ^( MATCHES path_function STRING )
{
// MessageFilter.g:156:38: ^( MATCHES path_function STRING )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new MatchesTreeNode(stream_MATCHES.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_1,
new StringTreeNode(stream_STRING.nextToken())
);
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "regex_predicate"
public static class exists_predicate_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "exists_predicate"
// MessageFilter.g:159:1: exists_predicate : ( path_function EXISTS -> ^( EXISTS path_function ) | EXISTS path_function -> ^( EXISTS path_function ) );
public final exists_predicate_return exists_predicate() throws RecognitionException {
exists_predicate_return retval = new exists_predicate_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token EXISTS78=null;
Token EXISTS79=null;
path_function_return path_function77 =null;
path_function_return path_function80 =null;
CommonTree EXISTS78_tree=null;
CommonTree EXISTS79_tree=null;
RewriteRuleTokenStream stream_EXISTS=new RewriteRuleTokenStream(adaptor,"token EXISTS");
RewriteRuleSubtreeStream stream_path_function=new RewriteRuleSubtreeStream(adaptor,"rule path_function");
try {
// MessageFilter.g:160:2: ( path_function EXISTS -> ^( EXISTS path_function ) | EXISTS path_function -> ^( EXISTS path_function ) )
int alt11=2;
int LA11_0 = input.LA(1);
if ( (LA11_0==XPATH_FUN_NAME) ) {
alt11=1;
}
else if ( (LA11_0==EXISTS) ) {
alt11=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 11, 0, input);
throw nvae;
}
switch (alt11) {
case 1 :
// MessageFilter.g:160:4: path_function EXISTS
{
pushFollow(FOLLOW_path_function_in_exists_predicate940);
path_function77=path_function();
state._fsp--;
stream_path_function.add(path_function77.getTree());
EXISTS78=(Token)match(input,EXISTS,FOLLOW_EXISTS_in_exists_predicate942);
stream_EXISTS.add(EXISTS78);
// AST REWRITE
// elements: EXISTS, path_function
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 160:25: -> ^( EXISTS path_function )
{
// MessageFilter.g:160:28: ^( EXISTS path_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new ExistsTreeNode(stream_EXISTS.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 2 :
// MessageFilter.g:161:3: EXISTS path_function
{
EXISTS79=(Token)match(input,EXISTS,FOLLOW_EXISTS_in_exists_predicate959);
stream_EXISTS.add(EXISTS79);
pushFollow(FOLLOW_path_function_in_exists_predicate961);
path_function80=path_function();
state._fsp--;
stream_path_function.add(path_function80.getTree());
// AST REWRITE
// elements: path_function, EXISTS
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 161:24: -> ^( EXISTS path_function )
{
// MessageFilter.g:161:27: ^( EXISTS path_function )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new ExistsTreeNode(stream_EXISTS.nextToken())
, root_1);
adaptor.addChild(root_1, stream_path_function.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "exists_predicate"
public static class path_function_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "path_function"
// MessageFilter.g:164:1: path_function : XPATH_FUN_NAME '(' STRING ')' -> ^( XPATH_FUN_NAME STRING ) ;
public final path_function_return path_function() throws RecognitionException {
path_function_return retval = new path_function_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token XPATH_FUN_NAME81=null;
Token char_literal82=null;
Token STRING83=null;
Token char_literal84=null;
CommonTree XPATH_FUN_NAME81_tree=null;
CommonTree char_literal82_tree=null;
CommonTree STRING83_tree=null;
CommonTree char_literal84_tree=null;
RewriteRuleTokenStream stream_XPATH_FUN_NAME=new RewriteRuleTokenStream(adaptor,"token XPATH_FUN_NAME");
RewriteRuleTokenStream stream_33=new RewriteRuleTokenStream(adaptor,"token 33");
RewriteRuleTokenStream stream_34=new RewriteRuleTokenStream(adaptor,"token 34");
RewriteRuleTokenStream stream_STRING=new RewriteRuleTokenStream(adaptor,"token STRING");
try {
// MessageFilter.g:165:2: ( XPATH_FUN_NAME '(' STRING ')' -> ^( XPATH_FUN_NAME STRING ) )
// MessageFilter.g:165:4: XPATH_FUN_NAME '(' STRING ')'
{
XPATH_FUN_NAME81=(Token)match(input,XPATH_FUN_NAME,FOLLOW_XPATH_FUN_NAME_in_path_function990);
stream_XPATH_FUN_NAME.add(XPATH_FUN_NAME81);
char_literal82=(Token)match(input,33,FOLLOW_33_in_path_function992);
stream_33.add(char_literal82);
STRING83=(Token)match(input,STRING,FOLLOW_STRING_in_path_function994);
stream_STRING.add(STRING83);
char_literal84=(Token)match(input,34,FOLLOW_34_in_path_function996);
stream_34.add(char_literal84);
// AST REWRITE
// elements: XPATH_FUN_NAME, STRING
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 165:34: -> ^( XPATH_FUN_NAME STRING )
{
// MessageFilter.g:165:37: ^( XPATH_FUN_NAME STRING )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new XPathTreeNode(stream_XPATH_FUN_NAME.nextToken())
, root_1);
adaptor.addChild(root_1,
new StringTreeNode(stream_STRING.nextToken())
);
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "path_function"
public static class value_function_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "value_function"
// MessageFilter.g:168:1: value_function : ( equality_value_function | compariable_value_function );
public final value_function_return value_function() throws RecognitionException {
value_function_return retval = new value_function_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
equality_value_function_return equality_value_function85 =null;
compariable_value_function_return compariable_value_function86 =null;
try {
// MessageFilter.g:169:2: ( equality_value_function | compariable_value_function )
int alt12=2;
int LA12_0 = input.LA(1);
if ( (LA12_0==FALSE||LA12_0==NULL||LA12_0==STRING||LA12_0==TRUE||LA12_0==XPATH_FUN_NAME) ) {
alt12=1;
}
else if ( (LA12_0==EOF||LA12_0==AND||LA12_0==NUMBER||LA12_0==OR||(LA12_0 >= TIME_MILLIS_FUN_NAME && LA12_0 <= TIME_STRING_FUN_NAME)||(LA12_0 >= 34 && LA12_0 <= 35)) ) {
alt12=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 12, 0, input);
throw nvae;
}
switch (alt12) {
case 1 :
// MessageFilter.g:169:4: equality_value_function
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_equality_value_function_in_value_function1022);
equality_value_function85=equality_value_function();
state._fsp--;
adaptor.addChild(root_0, equality_value_function85.getTree());
}
break;
case 2 :
// MessageFilter.g:169:30: compariable_value_function
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_compariable_value_function_in_value_function1026);
compariable_value_function86=compariable_value_function();
state._fsp--;
adaptor.addChild(root_0, compariable_value_function86.getTree());
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "value_function"
public static class equality_value_function_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "equality_value_function"
// MessageFilter.g:172:1: equality_value_function : ( STRING -> STRING | TRUE -> TRUE | FALSE -> FALSE | NULL -> NULL | path_function );
public final equality_value_function_return equality_value_function() throws RecognitionException {
equality_value_function_return retval = new equality_value_function_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token STRING87=null;
Token TRUE88=null;
Token FALSE89=null;
Token NULL90=null;
path_function_return path_function91 =null;
CommonTree STRING87_tree=null;
CommonTree TRUE88_tree=null;
CommonTree FALSE89_tree=null;
CommonTree NULL90_tree=null;
RewriteRuleTokenStream stream_FALSE=new RewriteRuleTokenStream(adaptor,"token FALSE");
RewriteRuleTokenStream stream_TRUE=new RewriteRuleTokenStream(adaptor,"token TRUE");
RewriteRuleTokenStream stream_NULL=new RewriteRuleTokenStream(adaptor,"token NULL");
RewriteRuleTokenStream stream_STRING=new RewriteRuleTokenStream(adaptor,"token STRING");
try {
// MessageFilter.g:173:2: ( STRING -> STRING | TRUE -> TRUE | FALSE -> FALSE | NULL -> NULL | path_function )
int alt13=5;
switch ( input.LA(1) ) {
case STRING:
{
alt13=1;
}
break;
case TRUE:
{
alt13=2;
}
break;
case FALSE:
{
alt13=3;
}
break;
case NULL:
{
alt13=4;
}
break;
case XPATH_FUN_NAME:
{
alt13=5;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 13, 0, input);
throw nvae;
}
switch (alt13) {
case 1 :
// MessageFilter.g:173:4: STRING
{
STRING87=(Token)match(input,STRING,FOLLOW_STRING_in_equality_value_function1038);
stream_STRING.add(STRING87);
// AST REWRITE
// elements: STRING
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 173:11: -> STRING
{
adaptor.addChild(root_0,
new StringTreeNode(stream_STRING.nextToken())
);
}
retval.tree = root_0;
}
break;
case 2 :
// MessageFilter.g:174:3: TRUE
{
TRUE88=(Token)match(input,TRUE,FOLLOW_TRUE_in_equality_value_function1051);
stream_TRUE.add(TRUE88);
// AST REWRITE
// elements: TRUE
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 174:8: -> TRUE
{
adaptor.addChild(root_0,
new TrueValueTreeNode(stream_TRUE.nextToken())
);
}
retval.tree = root_0;
}
break;
case 3 :
// MessageFilter.g:175:3: FALSE
{
FALSE89=(Token)match(input,FALSE,FOLLOW_FALSE_in_equality_value_function1064);
stream_FALSE.add(FALSE89);
// AST REWRITE
// elements: FALSE
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 175:9: -> FALSE
{
adaptor.addChild(root_0,
new FalseValueTreeNode(stream_FALSE.nextToken())
);
}
retval.tree = root_0;
}
break;
case 4 :
// MessageFilter.g:176:3: NULL
{
NULL90=(Token)match(input,NULL,FOLLOW_NULL_in_equality_value_function1077);
stream_NULL.add(NULL90);
// AST REWRITE
// elements: NULL
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 176:8: -> NULL
{
adaptor.addChild(root_0,
new NullValueTreeNode(stream_NULL.nextToken())
);
}
retval.tree = root_0;
}
break;
case 5 :
// MessageFilter.g:177:3: path_function
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_path_function_in_equality_value_function1090);
path_function91=path_function();
state._fsp--;
adaptor.addChild(root_0, path_function91.getTree());
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "equality_value_function"
public static class compariable_value_function_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "compariable_value_function"
// MessageFilter.g:180:1: compariable_value_function : ( NUMBER -> NUMBER | time_millis_function | time_string_function );
public final compariable_value_function_return compariable_value_function() throws RecognitionException {
compariable_value_function_return retval = new compariable_value_function_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token NUMBER92=null;
time_millis_function_return time_millis_function93 =null;
time_string_function_return time_string_function94 =null;
CommonTree NUMBER92_tree=null;
RewriteRuleTokenStream stream_NUMBER=new RewriteRuleTokenStream(adaptor,"token NUMBER");
try {
// MessageFilter.g:181:2: ( NUMBER -> NUMBER | time_millis_function | time_string_function )
int alt14=3;
switch ( input.LA(1) ) {
case NUMBER:
{
alt14=1;
}
break;
case EOF:
case AND:
case OR:
case TIME_MILLIS_FUN_NAME:
case 34:
case 35:
{
alt14=2;
}
break;
case TIME_STRING_FUN_NAME:
{
alt14=3;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 14, 0, input);
throw nvae;
}
switch (alt14) {
case 1 :
// MessageFilter.g:181:4: NUMBER
{
NUMBER92=(Token)match(input,NUMBER,FOLLOW_NUMBER_in_compariable_value_function1102);
stream_NUMBER.add(NUMBER92);
// AST REWRITE
// elements: NUMBER
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 181:11: -> NUMBER
{
adaptor.addChild(root_0,
new NumberTreeNode(stream_NUMBER.nextToken())
);
}
retval.tree = root_0;
}
break;
case 2 :
// MessageFilter.g:182:3: time_millis_function
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_time_millis_function_in_compariable_value_function1114);
time_millis_function93=time_millis_function();
state._fsp--;
adaptor.addChild(root_0, time_millis_function93.getTree());
}
break;
case 3 :
// MessageFilter.g:183:3: time_string_function
{
root_0 = (CommonTree)adaptor.nil();
pushFollow(FOLLOW_time_string_function_in_compariable_value_function1120);
time_string_function94=time_string_function();
state._fsp--;
adaptor.addChild(root_0, time_string_function94.getTree());
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "compariable_value_function"
public static class time_millis_function_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "time_millis_function"
// MessageFilter.g:186:1: time_millis_function : ( TIME_MILLIS_FUN_NAME '(' STRING ',' STRING ')' -> ^( TIME_MILLIS_FUN_NAME STRING STRING ) |);
public final time_millis_function_return time_millis_function() throws RecognitionException {
time_millis_function_return retval = new time_millis_function_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token TIME_MILLIS_FUN_NAME95=null;
Token char_literal96=null;
Token STRING97=null;
Token char_literal98=null;
Token STRING99=null;
Token char_literal100=null;
CommonTree TIME_MILLIS_FUN_NAME95_tree=null;
CommonTree char_literal96_tree=null;
CommonTree STRING97_tree=null;
CommonTree char_literal98_tree=null;
CommonTree STRING99_tree=null;
CommonTree char_literal100_tree=null;
RewriteRuleTokenStream stream_35=new RewriteRuleTokenStream(adaptor,"token 35");
RewriteRuleTokenStream stream_33=new RewriteRuleTokenStream(adaptor,"token 33");
RewriteRuleTokenStream stream_34=new RewriteRuleTokenStream(adaptor,"token 34");
RewriteRuleTokenStream stream_TIME_MILLIS_FUN_NAME=new RewriteRuleTokenStream(adaptor,"token TIME_MILLIS_FUN_NAME");
RewriteRuleTokenStream stream_STRING=new RewriteRuleTokenStream(adaptor,"token STRING");
try {
// MessageFilter.g:187:2: ( TIME_MILLIS_FUN_NAME '(' STRING ',' STRING ')' -> ^( TIME_MILLIS_FUN_NAME STRING STRING ) |)
int alt15=2;
int LA15_0 = input.LA(1);
if ( (LA15_0==TIME_MILLIS_FUN_NAME) ) {
alt15=1;
}
else if ( (LA15_0==EOF||LA15_0==AND||LA15_0==OR||(LA15_0 >= 34 && LA15_0 <= 35)) ) {
alt15=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 15, 0, input);
throw nvae;
}
switch (alt15) {
case 1 :
// MessageFilter.g:187:4: TIME_MILLIS_FUN_NAME '(' STRING ',' STRING ')'
{
TIME_MILLIS_FUN_NAME95=(Token)match(input,TIME_MILLIS_FUN_NAME,FOLLOW_TIME_MILLIS_FUN_NAME_in_time_millis_function1132);
stream_TIME_MILLIS_FUN_NAME.add(TIME_MILLIS_FUN_NAME95);
char_literal96=(Token)match(input,33,FOLLOW_33_in_time_millis_function1134);
stream_33.add(char_literal96);
STRING97=(Token)match(input,STRING,FOLLOW_STRING_in_time_millis_function1136);
stream_STRING.add(STRING97);
char_literal98=(Token)match(input,35,FOLLOW_35_in_time_millis_function1138);
stream_35.add(char_literal98);
STRING99=(Token)match(input,STRING,FOLLOW_STRING_in_time_millis_function1140);
stream_STRING.add(STRING99);
char_literal100=(Token)match(input,34,FOLLOW_34_in_time_millis_function1142);
stream_34.add(char_literal100);
// AST REWRITE
// elements: TIME_MILLIS_FUN_NAME, STRING, STRING
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 187:51: -> ^( TIME_MILLIS_FUN_NAME STRING STRING )
{
// MessageFilter.g:187:54: ^( TIME_MILLIS_FUN_NAME STRING STRING )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new TimeMillisValueTreeNode(stream_TIME_MILLIS_FUN_NAME.nextToken())
, root_1);
adaptor.addChild(root_1,
new StringTreeNode(stream_STRING.nextToken())
);
adaptor.addChild(root_1,
new StringTreeNode(stream_STRING.nextToken())
);
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 2 :
// MessageFilter.g:188:2:
{
root_0 = (CommonTree)adaptor.nil();
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "time_millis_function"
public static class time_string_function_return extends ParserRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "time_string_function"
// MessageFilter.g:193:1: time_string_function : TIME_STRING_FUN_NAME '(' STRING ',' STRING ',' STRING ')' -> ^( TIME_STRING_FUN_NAME STRING STRING STRING ) ;
public final time_string_function_return time_string_function() throws RecognitionException {
time_string_function_return retval = new time_string_function_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token TIME_STRING_FUN_NAME101=null;
Token char_literal102=null;
Token STRING103=null;
Token char_literal104=null;
Token STRING105=null;
Token char_literal106=null;
Token STRING107=null;
Token char_literal108=null;
CommonTree TIME_STRING_FUN_NAME101_tree=null;
CommonTree char_literal102_tree=null;
CommonTree STRING103_tree=null;
CommonTree char_literal104_tree=null;
CommonTree STRING105_tree=null;
CommonTree char_literal106_tree=null;
CommonTree STRING107_tree=null;
CommonTree char_literal108_tree=null;
RewriteRuleTokenStream stream_35=new RewriteRuleTokenStream(adaptor,"token 35");
RewriteRuleTokenStream stream_TIME_STRING_FUN_NAME=new RewriteRuleTokenStream(adaptor,"token TIME_STRING_FUN_NAME");
RewriteRuleTokenStream stream_33=new RewriteRuleTokenStream(adaptor,"token 33");
RewriteRuleTokenStream stream_34=new RewriteRuleTokenStream(adaptor,"token 34");
RewriteRuleTokenStream stream_STRING=new RewriteRuleTokenStream(adaptor,"token STRING");
try {
// MessageFilter.g:194:2: ( TIME_STRING_FUN_NAME '(' STRING ',' STRING ',' STRING ')' -> ^( TIME_STRING_FUN_NAME STRING STRING STRING ) )
// MessageFilter.g:194:5: TIME_STRING_FUN_NAME '(' STRING ',' STRING ',' STRING ')'
{
TIME_STRING_FUN_NAME101=(Token)match(input,TIME_STRING_FUN_NAME,FOLLOW_TIME_STRING_FUN_NAME_in_time_string_function1179);
stream_TIME_STRING_FUN_NAME.add(TIME_STRING_FUN_NAME101);
char_literal102=(Token)match(input,33,FOLLOW_33_in_time_string_function1181);
stream_33.add(char_literal102);
STRING103=(Token)match(input,STRING,FOLLOW_STRING_in_time_string_function1183);
stream_STRING.add(STRING103);
char_literal104=(Token)match(input,35,FOLLOW_35_in_time_string_function1185);
stream_35.add(char_literal104);
STRING105=(Token)match(input,STRING,FOLLOW_STRING_in_time_string_function1187);
stream_STRING.add(STRING105);
char_literal106=(Token)match(input,35,FOLLOW_35_in_time_string_function1189);
stream_35.add(char_literal106);
STRING107=(Token)match(input,STRING,FOLLOW_STRING_in_time_string_function1191);
stream_STRING.add(STRING107);
char_literal108=(Token)match(input,34,FOLLOW_34_in_time_string_function1193);
stream_34.add(char_literal108);
// AST REWRITE
// elements: STRING, TIME_STRING_FUN_NAME, STRING, STRING
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 194:63: -> ^( TIME_STRING_FUN_NAME STRING STRING STRING )
{
// MessageFilter.g:194:66: ^( TIME_STRING_FUN_NAME STRING STRING STRING )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot(
new TimeStringValueTreeNode(stream_TIME_STRING_FUN_NAME.nextToken())
, root_1);
adaptor.addChild(root_1,
new StringTreeNode(stream_STRING.nextToken())
);
adaptor.addChild(root_1,
new StringTreeNode(stream_STRING.nextToken())
);
adaptor.addChild(root_1,
new StringTreeNode(stream_STRING.nextToken())
);
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "time_string_function"
// Delegated rules
public static final BitSet FOLLOW_boolean_expr_in_filter323 = new BitSet(new long[]{0x0000000002000002L});
public static final BitSet FOLLOW_OR_in_filter330 = new BitSet(new long[]{0x0000000320100A00L});
public static final BitSet FOLLOW_boolean_expr_in_filter334 = new BitSet(new long[]{0x0000000002000002L});
public static final BitSet FOLLOW_EOF_in_filter354 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_boolean_factor_in_boolean_expr370 = new BitSet(new long[]{0x0000000000000012L});
public static final BitSet FOLLOW_AND_in_boolean_expr377 = new BitSet(new long[]{0x0000000320100A00L});
public static final BitSet FOLLOW_boolean_factor_in_boolean_expr381 = new BitSet(new long[]{0x0000000000000012L});
public static final BitSet FOLLOW_predicate_in_boolean_factor414 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_NOT_in_boolean_factor423 = new BitSet(new long[]{0x0000000320000A00L});
public static final BitSet FOLLOW_predicate_in_boolean_factor425 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_33_in_predicate448 = new BitSet(new long[]{0x0000000320100A00L});
public static final BitSet FOLLOW_filter_in_predicate450 = new BitSet(new long[]{0x0000000400000000L});
public static final BitSet FOLLOW_34_in_predicate452 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_comparison_function_in_predicate463 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_between_predicate_in_predicate469 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_in_predicate_in_predicate475 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_null_predicate_in_predicate481 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_regex_predicate_in_predicate487 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_exists_predicate_in_predicate493 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TRUE_in_predicate499 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_FALSE_in_predicate511 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_comparison_function529 = new BitSet(new long[]{0x0000000000000080L});
public static final BitSet FOLLOW_EQUALS_in_comparison_function531 = new BitSet(new long[]{0x000000013CC00800L});
public static final BitSet FOLLOW_value_function_in_comparison_function533 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_comparison_function552 = new BitSet(new long[]{0x0000000000200000L});
public static final BitSet FOLLOW_NOT_EQUALS_in_comparison_function554 = new BitSet(new long[]{0x000000013CC00800L});
public static final BitSet FOLLOW_value_function_in_comparison_function556 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_comparison_function575 = new BitSet(new long[]{0x0000000000002000L});
public static final BitSet FOLLOW_GT_in_comparison_function577 = new BitSet(new long[]{0x0000000018800000L});
public static final BitSet FOLLOW_compariable_value_function_in_comparison_function579 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_comparison_function598 = new BitSet(new long[]{0x0000000000001000L});
public static final BitSet FOLLOW_GE_in_comparison_function600 = new BitSet(new long[]{0x0000000018800000L});
public static final BitSet FOLLOW_compariable_value_function_in_comparison_function602 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_comparison_function621 = new BitSet(new long[]{0x0000000000040000L});
public static final BitSet FOLLOW_LT_in_comparison_function623 = new BitSet(new long[]{0x0000000018800000L});
public static final BitSet FOLLOW_compariable_value_function_in_comparison_function625 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_comparison_function644 = new BitSet(new long[]{0x0000000000020000L});
public static final BitSet FOLLOW_LE_in_comparison_function646 = new BitSet(new long[]{0x0000000018800000L});
public static final BitSet FOLLOW_compariable_value_function_in_comparison_function648 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_between_predicate673 = new BitSet(new long[]{0x0000000000000020L});
public static final BitSet FOLLOW_BETWEEN_in_between_predicate675 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_33_in_between_predicate677 = new BitSet(new long[]{0x0000000000800000L});
public static final BitSet FOLLOW_NUMBER_in_between_predicate679 = new BitSet(new long[]{0x0000000800000000L});
public static final BitSet FOLLOW_35_in_between_predicate681 = new BitSet(new long[]{0x0000000000800000L});
public static final BitSet FOLLOW_NUMBER_in_between_predicate683 = new BitSet(new long[]{0x0000000400000000L});
public static final BitSet FOLLOW_34_in_between_predicate685 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_between_predicate716 = new BitSet(new long[]{0x0000000000000020L});
public static final BitSet FOLLOW_BETWEEN_in_between_predicate718 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_33_in_between_predicate720 = new BitSet(new long[]{0x0000000808000000L});
public static final BitSet FOLLOW_time_millis_function_in_between_predicate722 = new BitSet(new long[]{0x0000000800000000L});
public static final BitSet FOLLOW_35_in_between_predicate724 = new BitSet(new long[]{0x0000000408000000L});
public static final BitSet FOLLOW_time_millis_function_in_between_predicate726 = new BitSet(new long[]{0x0000000400000000L});
public static final BitSet FOLLOW_34_in_between_predicate728 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_between_predicate753 = new BitSet(new long[]{0x0000000000000020L});
public static final BitSet FOLLOW_BETWEEN_in_between_predicate755 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_33_in_between_predicate757 = new BitSet(new long[]{0x0000000010000000L});
public static final BitSet FOLLOW_time_string_function_in_between_predicate759 = new BitSet(new long[]{0x0000000800000000L});
public static final BitSet FOLLOW_35_in_between_predicate761 = new BitSet(new long[]{0x0000000010000000L});
public static final BitSet FOLLOW_time_string_function_in_between_predicate763 = new BitSet(new long[]{0x0000000400000000L});
public static final BitSet FOLLOW_34_in_between_predicate765 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_in_predicate796 = new BitSet(new long[]{0x0000000000008000L});
public static final BitSet FOLLOW_IN_in_in_predicate798 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_33_in_in_predicate800 = new BitSet(new long[]{0x0000000004000000L});
public static final BitSet FOLLOW_STRING_in_in_predicate802 = new BitSet(new long[]{0x0000000C00000000L});
public static final BitSet FOLLOW_35_in_in_predicate805 = new BitSet(new long[]{0x0000000004000000L});
public static final BitSet FOLLOW_STRING_in_in_predicate807 = new BitSet(new long[]{0x0000000C00000000L});
public static final BitSet FOLLOW_34_in_in_predicate811 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_in_predicate836 = new BitSet(new long[]{0x0000000000008000L});
public static final BitSet FOLLOW_IN_in_in_predicate838 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_33_in_in_predicate840 = new BitSet(new long[]{0x0000000000800000L});
public static final BitSet FOLLOW_NUMBER_in_in_predicate842 = new BitSet(new long[]{0x0000000C00000000L});
public static final BitSet FOLLOW_35_in_in_predicate845 = new BitSet(new long[]{0x0000000000800000L});
public static final BitSet FOLLOW_NUMBER_in_in_predicate847 = new BitSet(new long[]{0x0000000C00000000L});
public static final BitSet FOLLOW_34_in_in_predicate851 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_null_predicate881 = new BitSet(new long[]{0x0000000000010000L});
public static final BitSet FOLLOW_IS_in_null_predicate883 = new BitSet(new long[]{0x0000000000400000L});
public static final BitSet FOLLOW_NULL_in_null_predicate885 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_regex_predicate909 = new BitSet(new long[]{0x0000000000080000L});
public static final BitSet FOLLOW_MATCHES_in_regex_predicate911 = new BitSet(new long[]{0x0000000004000000L});
public static final BitSet FOLLOW_STRING_in_regex_predicate913 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_exists_predicate940 = new BitSet(new long[]{0x0000000000000200L});
public static final BitSet FOLLOW_EXISTS_in_exists_predicate942 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_EXISTS_in_exists_predicate959 = new BitSet(new long[]{0x0000000100000000L});
public static final BitSet FOLLOW_path_function_in_exists_predicate961 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_XPATH_FUN_NAME_in_path_function990 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_33_in_path_function992 = new BitSet(new long[]{0x0000000004000000L});
public static final BitSet FOLLOW_STRING_in_path_function994 = new BitSet(new long[]{0x0000000400000000L});
public static final BitSet FOLLOW_34_in_path_function996 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_equality_value_function_in_value_function1022 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_compariable_value_function_in_value_function1026 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_STRING_in_equality_value_function1038 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TRUE_in_equality_value_function1051 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_FALSE_in_equality_value_function1064 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_NULL_in_equality_value_function1077 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_path_function_in_equality_value_function1090 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_NUMBER_in_compariable_value_function1102 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_time_millis_function_in_compariable_value_function1114 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_time_string_function_in_compariable_value_function1120 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TIME_MILLIS_FUN_NAME_in_time_millis_function1132 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_33_in_time_millis_function1134 = new BitSet(new long[]{0x0000000004000000L});
public static final BitSet FOLLOW_STRING_in_time_millis_function1136 = new BitSet(new long[]{0x0000000800000000L});
public static final BitSet FOLLOW_35_in_time_millis_function1138 = new BitSet(new long[]{0x0000000004000000L});
public static final BitSet FOLLOW_STRING_in_time_millis_function1140 = new BitSet(new long[]{0x0000000400000000L});
public static final BitSet FOLLOW_34_in_time_millis_function1142 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TIME_STRING_FUN_NAME_in_time_string_function1179 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_33_in_time_string_function1181 = new BitSet(new long[]{0x0000000004000000L});
public static final BitSet FOLLOW_STRING_in_time_string_function1183 = new BitSet(new long[]{0x0000000800000000L});
public static final BitSet FOLLOW_35_in_time_string_function1185 = new BitSet(new long[]{0x0000000004000000L});
public static final BitSet FOLLOW_STRING_in_time_string_function1187 = new BitSet(new long[]{0x0000000800000000L});
public static final BitSet FOLLOW_35_in_time_string_function1189 = new BitSet(new long[]{0x0000000004000000L});
public static final BitSet FOLLOW_STRING_in_time_string_function1191 = new BitSet(new long[]{0x0000000400000000L});
public static final BitSet FOLLOW_34_in_time_string_function1193 = new BitSet(new long[]{0x0000000000000002L});
}
| 1,510 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/BetweenTimeMillisTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.*;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class BetweenTimeMillisTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
ValueTreeNode xpathNode = (ValueTreeNode)getChild(0);
String xpath = (String)xpathNode.getValue();
TimeMillisValueTreeNode lowerBoundNode = (TimeMillisValueTreeNode)getChild(1);
TimeMillisValueTreeNode upperBoundNode = (TimeMillisValueTreeNode)getChild(2);
return MessageFilters.and(
new PathValueMessageFilter(
xpath,
new TimeMillisValuePredicate(lowerBoundNode.getValueFormat(), lowerBoundNode.getValue(), ">=")),
new PathValueMessageFilter(
xpath,
new TimeMillisValuePredicate(upperBoundNode.getValueFormat(), upperBoundNode.getValue(), "<"))
);
}
public BetweenTimeMillisTreeNode(Token t) {
super(t);
}
public BetweenTimeMillisTreeNode(BetweenTimeMillisTreeNode node) {
super(node);
}
public Tree dupNode() {
return new BetweenTimeMillisTreeNode(this);
}
}
| 1,511 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/NullValueTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class NullValueTreeNode extends MessageFilterBaseTreeNode implements ValueTreeNode {
@Override
public Object getValue() {
return null;
}
public NullValueTreeNode(Token t) {
super(t);
}
public NullValueTreeNode(NullValueTreeNode node) {
super(node);
}
public Tree dupNode() {
return new NullValueTreeNode(this);
}
}
| 1,512 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/TreeNodeUtil.java
|
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.tree.Tree;
class TreeNodeUtil {
private TreeNodeUtil(){}
public static String getXPath(Tree pathNode) {
ValueTreeNode xpathNode = (ValueTreeNode)pathNode;
return (String)xpathNode.getValue();
}
}
| 1,513 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/UnexpectedTokenException.java
|
package com.netflix.suro.routing.filter.lang;
import com.google.common.base.Joiner;
import org.antlr.runtime.tree.Tree;
public class UnexpectedTokenException extends RuntimeException {
private Tree unexpected;
private String[] expected;
private Joiner joiner = Joiner.on(" or ");
public UnexpectedTokenException(Tree unexpected, String... expected){
this.unexpected = unexpected;
this.expected = expected;
}
@Override
public String toString() {
return String.format(
"Unexpected token %s at %d:%d. Expected: %s",
unexpected.getText(),
unexpected.getLine(),
unexpected.getCharPositionInLine(),
joiner.join(expected));
}
@Override
public String getMessage(){
return toString();
}
}
| 1,514 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/NumericInTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.netflix.suro.routing.filter.MessageFilter;
import com.netflix.suro.routing.filter.MessageFilters;
import com.netflix.suro.routing.filter.NumericValuePredicate;
import com.netflix.suro.routing.filter.PathValueMessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
import java.util.List;
import static com.netflix.suro.routing.filter.lang.TreeNodeUtil.getXPath;
public class NumericInTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@SuppressWarnings("unchecked")
@Override
public MessageFilter translate() {
final String xpath = getXPath(getChild(0));
List children = getChildren();
return MessageFilters.or(
Lists.transform(children.subList(1, children.size()), new Function<Object, MessageFilter>() {
@Override
public MessageFilter apply(Object node) {
Number value = ((NumberTreeNode) node).getValue();
return new PathValueMessageFilter(xpath, new NumericValuePredicate(value, "="));
}
})
);
}
public NumericInTreeNode(Token t) {
super(t);
}
public NumericInTreeNode(NumericInTreeNode node) {
super(node);
}
public Tree dupNode() {
return new NumericInTreeNode(this);
}
}
| 1,515 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/BetweenTimeStringTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.*;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class BetweenTimeStringTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
ValueTreeNode xpathNode = (ValueTreeNode)getChild(0);
String xpath = (String)xpathNode.getValue();
TimeStringValueTreeNode lowerBoundNode = (TimeStringValueTreeNode)getChild(1);
TimeStringValueTreeNode upperBoundNode = (TimeStringValueTreeNode)getChild(2);
return MessageFilters.and(
new PathValueMessageFilter(
xpath,
new TimeStringValuePredicate(
lowerBoundNode.getValueTimeFormat(),
lowerBoundNode.getInputTimeFormat(),
lowerBoundNode.getValue(),
">=")),
new PathValueMessageFilter(
xpath,
new TimeStringValuePredicate(
upperBoundNode.getValueTimeFormat(),
upperBoundNode.getInputTimeFormat(),
upperBoundNode.getValue(),
"<"))
);
}
public BetweenTimeStringTreeNode(Token t) {
super(t);
}
public BetweenTimeStringTreeNode(BetweenTimeStringTreeNode node) {
super(node);
}
public Tree dupNode() {
return new BetweenTimeStringTreeNode(this);
}
}
| 1,516 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/InvalidFilterException.java
|
package com.netflix.suro.routing.filter.lang;
/**
* A generic exception representing an invalid filter expression.
*
* @author Nitesh Kant ([email protected])
*/
public class InvalidFilterException extends Exception {
private static final long serialVersionUID = -5878696854757828678L;
private Object filter;
public InvalidFilterException(String message, Throwable cause, Object filter) {
super(String.format("Invalid filter %s. Error: %s", filter, message), cause);
this.filter = filter;
}
public InvalidFilterException(Throwable cause, Object filter) {
super(String.format("Invalid filter %s.", filter), cause);
this.filter = filter;
}
}
| 1,517 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/TimeMillisValueTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class TimeMillisValueTreeNode extends MessageFilterBaseTreeNode implements ValueTreeNode {
@Override
public String getValue() {
return (String)((ValueTreeNode)getChild(1)).getValue();
}
public String getValueFormat() {
return (String)((ValueTreeNode)getChild(0)).getValue();
}
public TimeMillisValueTreeNode(Token t) {
super(t);
}
public TimeMillisValueTreeNode(TimeMillisValueTreeNode node) {
super(node);
}
public Tree dupNode() {
return new TimeMillisValueTreeNode(this);
}
}
| 1,518 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/NullTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.NullValuePredicate;
import com.netflix.suro.routing.filter.PathValueMessageFilter;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
import static com.netflix.suro.routing.filter.lang.TreeNodeUtil.getXPath;
public class NullTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
String xpath = getXPath(getChild(0));
return new PathValueMessageFilter(xpath, NullValuePredicate.INSTANCE);
}
public NullTreeNode(Token t) {
super(t);
}
public NullTreeNode(NullTreeNode node) {
super(node);
}
public Tree dupNode() {
return new NullTreeNode(this);
}
}
| 1,519 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/ValueTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
public interface ValueTreeNode {
public Object getValue();
}
| 1,520 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/XPathTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class XPathTreeNode extends MessageFilterBaseTreeNode implements ValueTreeNode {
@Override
public Object getValue() {
return getChild(0).getText();
}
public XPathTreeNode(Token t) {
super(t);
}
public XPathTreeNode(XPathTreeNode node) {
super(node);
}
public Tree dupNode() {
return new XPathTreeNode(this);
}
}
| 1,521 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/StringInTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.netflix.suro.routing.filter.MessageFilters;
import com.netflix.suro.routing.filter.MessageFilter;
import com.netflix.suro.routing.filter.PathValueMessageFilter;
import com.netflix.suro.routing.filter.StringValuePredicate;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
import java.util.List;
import static com.netflix.suro.routing.filter.lang.TreeNodeUtil.getXPath;
public class StringInTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@SuppressWarnings("unchecked")
@Override
public MessageFilter translate() {
final String xpath = getXPath(getChild(0));
List children = getChildren();
return MessageFilters.or(
Lists.transform(children.subList(1, children.size()), new Function<Object, MessageFilter>() {
@Override
public MessageFilter apply(Object node) {
String value = ((StringTreeNode) node).getValue();
return new PathValueMessageFilter(xpath, new StringValuePredicate(value));
}
})
);
}
public StringInTreeNode(Token t) {
super(t);
}
public StringInTreeNode(StringInTreeNode node) {
super(node);
}
public Tree dupNode() {
return new StringInTreeNode(this);
}
}
| 1,522 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/MessageFilterParsingException.java
|
package com.netflix.suro.routing.filter.lang;
public class MessageFilterParsingException extends RuntimeException {
public MessageFilterParsingException(String msg, Throwable cause){
super(msg, cause);
}
}
| 1,523 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/MatchesTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.PathValueMessageFilter;
import com.netflix.suro.routing.filter.RegexValuePredicate;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
import static com.netflix.suro.routing.filter.lang.TreeNodeUtil.getXPath;
public class MatchesTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
String xpath = getXPath(getChild(0));
StringTreeNode valueNode = (StringTreeNode)getChild(1);
return new PathValueMessageFilter(xpath, new RegexValuePredicate(valueNode.getValue(), RegexValuePredicate.MatchPolicy.PARTIAL));
}
public MatchesTreeNode(Token t) {
super(t);
}
public MatchesTreeNode(MatchesTreeNode node) {
super(node);
}
public Tree dupNode() {
return new MatchesTreeNode(this);
}
}
| 1,524 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/NotEqualsTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.MessageFilters;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class NotEqualsTreeNode extends EqualityComparisonBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
return MessageFilters.not(getEqualFilter());
}
public NotEqualsTreeNode(Token t) {
super(t);
}
public NotEqualsTreeNode(NotEqualsTreeNode node) {
super(node);
}
public Tree dupNode() {
return new NotEqualsTreeNode(this);
}
}
| 1,525 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/EqualsTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class EqualsTreeNode extends EqualityComparisonBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
return getEqualFilter();
}
public EqualsTreeNode(Token t) {
super(t);
}
public EqualsTreeNode(EqualsTreeNode node) {
super(node);
}
public Tree dupNode() {
return new EqualsTreeNode(this);
}
}
| 1,526 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/TimeStringValueTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class TimeStringValueTreeNode extends MessageFilterBaseTreeNode implements ValueTreeNode {
@Override
public String getValue() {
return (String)((ValueTreeNode)getChild(2)).getValue();
}
public String getValueTimeFormat() {
return (String)((ValueTreeNode)getChild(1)).getValue();
}
public String getInputTimeFormat() {
return (String)((ValueTreeNode)getChild(0)).getValue();
}
public TimeStringValueTreeNode(Token t) {
super(t);
}
public TimeStringValueTreeNode(TimeStringValueTreeNode node) {
super(node);
}
public Tree dupNode() {
return new TimeStringValueTreeNode(this);
}
}
| 1,527 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/NotTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.MessageFilters;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class NotTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
MessageFilter filter = ((MessageFilterTranslatable)getChild(0)).translate();
return MessageFilters.not(filter);
}
public NotTreeNode(Token t) {
super(t);
}
public NotTreeNode(NotTreeNode node) {
super(node);
}
public Tree dupNode() {
return new NotTreeNode(this);
}
}
| 1,528 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/EqualityComparisonBaseTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.*;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
import static com.netflix.suro.routing.filter.lang.MessageFilterParser.*;
import static com.netflix.suro.routing.filter.lang.TreeNodeUtil.getXPath;
public abstract class EqualityComparisonBaseTreeNode extends MessageFilterBaseTreeNode {
public EqualityComparisonBaseTreeNode(Token t) {
super(t);
}
public EqualityComparisonBaseTreeNode(MessageFilterBaseTreeNode node) {
super(node);
}
// TODO this is an ugly workaround. We should really generate ^(NOT ^(Equals...) for NOT_EQUAL
// but I can't get ANTLR to generated nested tree with added node.
protected MessageFilter getEqualFilter() {
String xpath = getXPath(getChild(0));
Tree valueNode = getChild(1);
switch(valueNode.getType()){
case NUMBER:
Number value = (Number)((ValueTreeNode)valueNode).getValue();
return new PathValueMessageFilter(xpath, new NumericValuePredicate(value, "="));
case STRING:
String sValue = (String)((ValueTreeNode)valueNode).getValue();
return new PathValueMessageFilter(xpath, new StringValuePredicate(sValue));
case TRUE:
return new PathValueMessageFilter(xpath, BooleanValuePredicate.TRUE);
case FALSE:
return new PathValueMessageFilter(xpath, BooleanValuePredicate.FALSE);
case NULL:
return new PathValueMessageFilter(xpath, NullValuePredicate.INSTANCE);
case XPATH_FUN_NAME:
String aPath = (String)((ValueTreeNode)valueNode).getValue();
return new PathValueMessageFilter(xpath, new PathValuePredicate(aPath, xpath));
case TIME_MILLIS_FUN_NAME:
TimeMillisValueTreeNode timeNode = (TimeMillisValueTreeNode)valueNode;
return new PathValueMessageFilter(xpath,
new TimeMillisValuePredicate(
timeNode.getValueFormat(),
timeNode.getValue(),
"="));
case TIME_STRING_FUN_NAME:
TimeStringValueTreeNode timeStringNode = (TimeStringValueTreeNode)valueNode;
return new PathValueMessageFilter(xpath,
new TimeStringValuePredicate(
timeStringNode.getValueTimeFormat(),
timeStringNode.getInputTimeFormat(),
timeStringNode.getValue(),
"="));
default:
throw new UnexpectedTokenException(valueNode, "Number", "String", "TRUE", "FALSE");
}
}
}
| 1,529 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/StringTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class StringTreeNode extends MessageFilterBaseTreeNode implements ValueTreeNode {
@Override
public String getValue() {
return getText();
}
public StringTreeNode(Token t) {
super(t);
}
public StringTreeNode(StringTreeNode node) {
super(node);
}
public Tree dupNode() {
return new StringTreeNode(this);
}
}
| 1,530 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/TrueValueTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.MessageFilters;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class TrueValueTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
public TrueValueTreeNode(Token t) {
super(t);
}
public TrueValueTreeNode(TrueValueTreeNode node) {
super(node);
}
public Tree dupNode() {
return new TrueValueTreeNode(this);
}
@Override
public MessageFilter translate() {
return MessageFilters.alwaysTrue();
}
}
| 1,531 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/MessageFilterTranslatable.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.MessageFilter;
public interface MessageFilterTranslatable {
public MessageFilter translate();
}
| 1,532 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/BetweenTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.*;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class BetweenTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
ValueTreeNode xpathNode = (ValueTreeNode)getChild(0);
String xpath = (String)xpathNode.getValue();
ValueTreeNode lowerBoundNode = (ValueTreeNode)getChild(1);
Number lowerBound = (Number)lowerBoundNode.getValue();
ValueTreeNode upperBoundNode = (ValueTreeNode)getChild(2);
Number upperBound = (Number)upperBoundNode.getValue();
return MessageFilters.and(
new PathValueMessageFilter(xpath, new NumericValuePredicate(lowerBound, ">=")),
new PathValueMessageFilter(xpath, new NumericValuePredicate(upperBound, "<"))
);
}
public BetweenTreeNode(Token t) {
super(t);
}
public BetweenTreeNode(BetweenTreeNode node) {
super(node);
}
public Tree dupNode() {
return new BetweenTreeNode(this);
}
}
| 1,533 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/MessageFilterBaseTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.CommonTree;
public abstract class MessageFilterBaseTreeNode extends CommonTree {
public MessageFilterBaseTreeNode(Token t) {
super(t);
}
public MessageFilterBaseTreeNode(MessageFilterBaseTreeNode node) {
super(node);
}
public String toString() {
return String.format("%s<%s>", getText(), getClass().getSimpleName());
}
}
| 1,534 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/MessageFilterLexer.java
|
// $ANTLR 3.4 MessageFilter.g 2012-08-22 11:55:59
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.*;
@SuppressWarnings({"all", "warnings", "unchecked"})
public class MessageFilterLexer extends Lexer {
public static final int EOF=-1;
public static final int T__33=33;
public static final int T__34=34;
public static final int T__35=35;
public static final int AND=4;
public static final int BETWEEN=5;
public static final int COMMENT=6;
public static final int EQUALS=7;
public static final int ESC_SEQ=8;
public static final int EXISTS=9;
public static final int EXPONENT=10;
public static final int FALSE=11;
public static final int GE=12;
public static final int GT=13;
public static final int HEX_DIGIT=14;
public static final int IN=15;
public static final int IS=16;
public static final int LE=17;
public static final int LT=18;
public static final int MATCHES=19;
public static final int NOT=20;
public static final int NOT_EQUALS=21;
public static final int NULL=22;
public static final int NUMBER=23;
public static final int OCTAL_ESC=24;
public static final int OR=25;
public static final int STRING=26;
public static final int TIME_MILLIS_FUN_NAME=27;
public static final int TIME_STRING_FUN_NAME=28;
public static final int TRUE=29;
public static final int UNICODE_ESC=30;
public static final int WS=31;
public static final int XPATH_FUN_NAME=32;
public void reportError(RecognitionException e) {
// if we've already reported an error and have not matched a token
// yet successfully, don't report any errors.
if ( state.errorRecovery ) {
//System.err.print("[SPURIOUS] ");
return;
}
state.syntaxErrors++; // don't count spurious
state.errorRecovery = true;
throwLexerException(this.getTokenNames(), e);
}
public void throwLexerException(String[] tokenNames, RecognitionException e) {
String hdr = getErrorHeader(e);
String msg = getErrorMessage(e, tokenNames);
throw new MessageFilterParsingException(hdr+" "+msg, e);
}
// delegates
// delegators
public Lexer[] getDelegates() {
return new Lexer[] {};
}
public MessageFilterLexer() {}
public MessageFilterLexer(CharStream input) {
this(input, new RecognizerSharedState());
}
public MessageFilterLexer(CharStream input, RecognizerSharedState state) {
super(input,state);
}
public String getGrammarFileName() { return "MessageFilter.g"; }
// $ANTLR start "AND"
public final void mAND() throws RecognitionException {
try {
int _type = AND;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:33:5: ( 'and' )
// MessageFilter.g:33:7: 'and'
{
match("and");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "AND"
// $ANTLR start "BETWEEN"
public final void mBETWEEN() throws RecognitionException {
try {
int _type = BETWEEN;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:34:9: ( 'between' )
// MessageFilter.g:34:11: 'between'
{
match("between");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "BETWEEN"
// $ANTLR start "EQUALS"
public final void mEQUALS() throws RecognitionException {
try {
int _type = EQUALS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:35:8: ( '=' )
// MessageFilter.g:35:10: '='
{
match('=');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "EQUALS"
// $ANTLR start "EXISTS"
public final void mEXISTS() throws RecognitionException {
try {
int _type = EXISTS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:36:8: ( 'exists' )
// MessageFilter.g:36:10: 'exists'
{
match("exists");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "EXISTS"
// $ANTLR start "FALSE"
public final void mFALSE() throws RecognitionException {
try {
int _type = FALSE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:37:7: ( 'false' )
// MessageFilter.g:37:9: 'false'
{
match("false");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "FALSE"
// $ANTLR start "GE"
public final void mGE() throws RecognitionException {
try {
int _type = GE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:38:4: ( '>=' )
// MessageFilter.g:38:6: '>='
{
match(">=");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "GE"
// $ANTLR start "GT"
public final void mGT() throws RecognitionException {
try {
int _type = GT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:39:4: ( '>' )
// MessageFilter.g:39:6: '>'
{
match('>');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "GT"
// $ANTLR start "IN"
public final void mIN() throws RecognitionException {
try {
int _type = IN;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:40:4: ( 'in' )
// MessageFilter.g:40:6: 'in'
{
match("in");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "IN"
// $ANTLR start "IS"
public final void mIS() throws RecognitionException {
try {
int _type = IS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:41:4: ( 'is' )
// MessageFilter.g:41:6: 'is'
{
match("is");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "IS"
// $ANTLR start "LE"
public final void mLE() throws RecognitionException {
try {
int _type = LE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:42:4: ( '<=' )
// MessageFilter.g:42:6: '<='
{
match("<=");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "LE"
// $ANTLR start "LT"
public final void mLT() throws RecognitionException {
try {
int _type = LT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:43:4: ( '<' )
// MessageFilter.g:43:6: '<'
{
match('<');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "LT"
// $ANTLR start "MATCHES"
public final void mMATCHES() throws RecognitionException {
try {
int _type = MATCHES;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:44:9: ( '=~' )
// MessageFilter.g:44:11: '=~'
{
match("=~");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "MATCHES"
// $ANTLR start "NOT"
public final void mNOT() throws RecognitionException {
try {
int _type = NOT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:45:5: ( 'not' )
// MessageFilter.g:45:7: 'not'
{
match("not");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "NOT"
// $ANTLR start "NOT_EQUALS"
public final void mNOT_EQUALS() throws RecognitionException {
try {
int _type = NOT_EQUALS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:46:12: ( '!=' )
// MessageFilter.g:46:14: '!='
{
match("!=");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "NOT_EQUALS"
// $ANTLR start "NULL"
public final void mNULL() throws RecognitionException {
try {
int _type = NULL;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:47:6: ( 'null' )
// MessageFilter.g:47:8: 'null'
{
match("null");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "NULL"
// $ANTLR start "OR"
public final void mOR() throws RecognitionException {
try {
int _type = OR;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:48:4: ( 'or' )
// MessageFilter.g:48:6: 'or'
{
match("or");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "OR"
// $ANTLR start "TIME_MILLIS_FUN_NAME"
public final void mTIME_MILLIS_FUN_NAME() throws RecognitionException {
try {
int _type = TIME_MILLIS_FUN_NAME;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:49:22: ( 'time-millis' )
// MessageFilter.g:49:24: 'time-millis'
{
match("time-millis");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "TIME_MILLIS_FUN_NAME"
// $ANTLR start "TIME_STRING_FUN_NAME"
public final void mTIME_STRING_FUN_NAME() throws RecognitionException {
try {
int _type = TIME_STRING_FUN_NAME;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:50:22: ( 'time-string' )
// MessageFilter.g:50:24: 'time-string'
{
match("time-string");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "TIME_STRING_FUN_NAME"
// $ANTLR start "TRUE"
public final void mTRUE() throws RecognitionException {
try {
int _type = TRUE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:51:6: ( 'true' )
// MessageFilter.g:51:8: 'true'
{
match("true");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "TRUE"
// $ANTLR start "XPATH_FUN_NAME"
public final void mXPATH_FUN_NAME() throws RecognitionException {
try {
int _type = XPATH_FUN_NAME;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:52:16: ( 'xpath' )
// MessageFilter.g:52:18: 'xpath'
{
match("xpath");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "XPATH_FUN_NAME"
// $ANTLR start "T__33"
public final void mT__33() throws RecognitionException {
try {
int _type = T__33;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:53:7: ( '(' )
// MessageFilter.g:53:9: '('
{
match('(');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__33"
// $ANTLR start "T__34"
public final void mT__34() throws RecognitionException {
try {
int _type = T__34;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:54:7: ( ')' )
// MessageFilter.g:54:9: ')'
{
match(')');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__34"
// $ANTLR start "T__35"
public final void mT__35() throws RecognitionException {
try {
int _type = T__35;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:55:7: ( ',' )
// MessageFilter.g:55:9: ','
{
match(',');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__35"
// $ANTLR start "NUMBER"
public final void mNUMBER() throws RecognitionException {
try {
int _type = NUMBER;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:198:5: ( ( '+' | '-' )? ( '0' .. '9' )+ ( '.' ( '0' .. '9' )* ( EXPONENT )? )? | ( '+' | '-' )? '.' ( '0' .. '9' )+ ( EXPONENT )? | ( '+' | '-' )? ( '0' .. '9' )+ EXPONENT )
int alt11=3;
alt11 = dfa11.predict(input);
switch (alt11) {
case 1 :
// MessageFilter.g:198:9: ( '+' | '-' )? ( '0' .. '9' )+ ( '.' ( '0' .. '9' )* ( EXPONENT )? )?
{
// MessageFilter.g:198:9: ( '+' | '-' )?
int alt1=2;
int LA1_0 = input.LA(1);
if ( (LA1_0=='+'||LA1_0=='-') ) {
alt1=1;
}
switch (alt1) {
case 1 :
// MessageFilter.g:
{
if ( input.LA(1)=='+'||input.LA(1)=='-' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
}
// MessageFilter.g:198:20: ( '0' .. '9' )+
int cnt2=0;
loop2:
do {
int alt2=2;
int LA2_0 = input.LA(1);
if ( ((LA2_0 >= '0' && LA2_0 <= '9')) ) {
alt2=1;
}
switch (alt2) {
case 1 :
// MessageFilter.g:
{
if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
if ( cnt2 >= 1 ) break loop2;
EarlyExitException eee =
new EarlyExitException(2, input);
throw eee;
}
cnt2++;
} while (true);
// MessageFilter.g:198:32: ( '.' ( '0' .. '9' )* ( EXPONENT )? )?
int alt5=2;
int LA5_0 = input.LA(1);
if ( (LA5_0=='.') ) {
alt5=1;
}
switch (alt5) {
case 1 :
// MessageFilter.g:198:33: '.' ( '0' .. '9' )* ( EXPONENT )?
{
match('.');
// MessageFilter.g:198:37: ( '0' .. '9' )*
loop3:
do {
int alt3=2;
int LA3_0 = input.LA(1);
if ( ((LA3_0 >= '0' && LA3_0 <= '9')) ) {
alt3=1;
}
switch (alt3) {
case 1 :
// MessageFilter.g:
{
if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
break loop3;
}
} while (true);
// MessageFilter.g:198:49: ( EXPONENT )?
int alt4=2;
int LA4_0 = input.LA(1);
if ( (LA4_0=='E'||LA4_0=='e') ) {
alt4=1;
}
switch (alt4) {
case 1 :
// MessageFilter.g:198:49: EXPONENT
{
mEXPONENT();
}
break;
}
}
break;
}
}
break;
case 2 :
// MessageFilter.g:199:9: ( '+' | '-' )? '.' ( '0' .. '9' )+ ( EXPONENT )?
{
// MessageFilter.g:199:9: ( '+' | '-' )?
int alt6=2;
int LA6_0 = input.LA(1);
if ( (LA6_0=='+'||LA6_0=='-') ) {
alt6=1;
}
switch (alt6) {
case 1 :
// MessageFilter.g:
{
if ( input.LA(1)=='+'||input.LA(1)=='-' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
}
match('.');
// MessageFilter.g:199:24: ( '0' .. '9' )+
int cnt7=0;
loop7:
do {
int alt7=2;
int LA7_0 = input.LA(1);
if ( ((LA7_0 >= '0' && LA7_0 <= '9')) ) {
alt7=1;
}
switch (alt7) {
case 1 :
// MessageFilter.g:
{
if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
if ( cnt7 >= 1 ) break loop7;
EarlyExitException eee =
new EarlyExitException(7, input);
throw eee;
}
cnt7++;
} while (true);
// MessageFilter.g:199:36: ( EXPONENT )?
int alt8=2;
int LA8_0 = input.LA(1);
if ( (LA8_0=='E'||LA8_0=='e') ) {
alt8=1;
}
switch (alt8) {
case 1 :
// MessageFilter.g:199:36: EXPONENT
{
mEXPONENT();
}
break;
}
}
break;
case 3 :
// MessageFilter.g:200:9: ( '+' | '-' )? ( '0' .. '9' )+ EXPONENT
{
// MessageFilter.g:200:9: ( '+' | '-' )?
int alt9=2;
int LA9_0 = input.LA(1);
if ( (LA9_0=='+'||LA9_0=='-') ) {
alt9=1;
}
switch (alt9) {
case 1 :
// MessageFilter.g:
{
if ( input.LA(1)=='+'||input.LA(1)=='-' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
}
// MessageFilter.g:200:20: ( '0' .. '9' )+
int cnt10=0;
loop10:
do {
int alt10=2;
int LA10_0 = input.LA(1);
if ( ((LA10_0 >= '0' && LA10_0 <= '9')) ) {
alt10=1;
}
switch (alt10) {
case 1 :
// MessageFilter.g:
{
if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
if ( cnt10 >= 1 ) break loop10;
EarlyExitException eee =
new EarlyExitException(10, input);
throw eee;
}
cnt10++;
} while (true);
mEXPONENT();
}
break;
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "NUMBER"
// $ANTLR start "COMMENT"
public final void mCOMMENT() throws RecognitionException {
try {
int _type = COMMENT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:204:5: ( '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' | '/*' ( options {greedy=false; } : . )* '*/' )
int alt15=2;
int LA15_0 = input.LA(1);
if ( (LA15_0=='/') ) {
int LA15_1 = input.LA(2);
if ( (LA15_1=='/') ) {
alt15=1;
}
else if ( (LA15_1=='*') ) {
alt15=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 15, 1, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 15, 0, input);
throw nvae;
}
switch (alt15) {
case 1 :
// MessageFilter.g:204:9: '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
{
match("//");
// MessageFilter.g:204:14: (~ ( '\\n' | '\\r' ) )*
loop12:
do {
int alt12=2;
int LA12_0 = input.LA(1);
if ( ((LA12_0 >= '\u0000' && LA12_0 <= '\t')||(LA12_0 >= '\u000B' && LA12_0 <= '\f')||(LA12_0 >= '\u000E' && LA12_0 <= '\uFFFF')) ) {
alt12=1;
}
switch (alt12) {
case 1 :
// MessageFilter.g:
{
if ( (input.LA(1) >= '\u0000' && input.LA(1) <= '\t')||(input.LA(1) >= '\u000B' && input.LA(1) <= '\f')||(input.LA(1) >= '\u000E' && input.LA(1) <= '\uFFFF') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
break loop12;
}
} while (true);
// MessageFilter.g:204:28: ( '\\r' )?
int alt13=2;
int LA13_0 = input.LA(1);
if ( (LA13_0=='\r') ) {
alt13=1;
}
switch (alt13) {
case 1 :
// MessageFilter.g:204:28: '\\r'
{
match('\r');
}
break;
}
match('\n');
_channel=HIDDEN;
}
break;
case 2 :
// MessageFilter.g:205:9: '/*' ( options {greedy=false; } : . )* '*/'
{
match("/*");
// MessageFilter.g:205:14: ( options {greedy=false; } : . )*
loop14:
do {
int alt14=2;
int LA14_0 = input.LA(1);
if ( (LA14_0=='*') ) {
int LA14_1 = input.LA(2);
if ( (LA14_1=='/') ) {
alt14=2;
}
else if ( ((LA14_1 >= '\u0000' && LA14_1 <= '.')||(LA14_1 >= '0' && LA14_1 <= '\uFFFF')) ) {
alt14=1;
}
}
else if ( ((LA14_0 >= '\u0000' && LA14_0 <= ')')||(LA14_0 >= '+' && LA14_0 <= '\uFFFF')) ) {
alt14=1;
}
switch (alt14) {
case 1 :
// MessageFilter.g:205:42: .
{
matchAny();
}
break;
default :
break loop14;
}
} while (true);
match("*/");
_channel=HIDDEN;
}
break;
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "COMMENT"
// $ANTLR start "WS"
public final void mWS() throws RecognitionException {
try {
int _type = WS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:208:5: ( ( ' ' | '\\t' | '\\r' | '\\n' ) )
// MessageFilter.g:208:9: ( ' ' | '\\t' | '\\r' | '\\n' )
{
if ( (input.LA(1) >= '\t' && input.LA(1) <= '\n')||input.LA(1)=='\r'||input.LA(1)==' ' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
_channel=HIDDEN;
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "WS"
// $ANTLR start "STRING"
public final void mSTRING() throws RecognitionException {
try {
int _type = STRING;
int _channel = DEFAULT_TOKEN_CHANNEL;
// MessageFilter.g:216:5: ( '\"' ( ESC_SEQ |~ ( '\\\\' | '\"' ) )* '\"' )
// MessageFilter.g:216:8: '\"' ( ESC_SEQ |~ ( '\\\\' | '\"' ) )* '\"'
{
match('\"');
// MessageFilter.g:216:12: ( ESC_SEQ |~ ( '\\\\' | '\"' ) )*
loop16:
do {
int alt16=3;
int LA16_0 = input.LA(1);
if ( (LA16_0=='\\') ) {
alt16=1;
}
else if ( ((LA16_0 >= '\u0000' && LA16_0 <= '!')||(LA16_0 >= '#' && LA16_0 <= '[')||(LA16_0 >= ']' && LA16_0 <= '\uFFFF')) ) {
alt16=2;
}
switch (alt16) {
case 1 :
// MessageFilter.g:216:14: ESC_SEQ
{
mESC_SEQ();
}
break;
case 2 :
// MessageFilter.g:216:24: ~ ( '\\\\' | '\"' )
{
if ( (input.LA(1) >= '\u0000' && input.LA(1) <= '!')||(input.LA(1) >= '#' && input.LA(1) <= '[')||(input.LA(1) >= ']' && input.LA(1) <= '\uFFFF') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
break loop16;
}
} while (true);
match('\"');
setText(getText().substring(1, getText().length()-1));
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "STRING"
// $ANTLR start "HEX_DIGIT"
public final void mHEX_DIGIT() throws RecognitionException {
try {
// MessageFilter.g:221:11: ( ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' ) )
// MessageFilter.g:
{
if ( (input.LA(1) >= '0' && input.LA(1) <= '9')||(input.LA(1) >= 'A' && input.LA(1) <= 'F')||(input.LA(1) >= 'a' && input.LA(1) <= 'f') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "HEX_DIGIT"
// $ANTLR start "ESC_SEQ"
public final void mESC_SEQ() throws RecognitionException {
try {
// MessageFilter.g:225:5: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | UNICODE_ESC | OCTAL_ESC )
int alt17=3;
int LA17_0 = input.LA(1);
if ( (LA17_0=='\\') ) {
switch ( input.LA(2) ) {
case '\"':
case '\'':
case '\\':
case 'b':
case 'f':
case 'n':
case 'r':
case 't':
{
alt17=1;
}
break;
case 'u':
{
alt17=2;
}
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
{
alt17=3;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 17, 1, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 17, 0, input);
throw nvae;
}
switch (alt17) {
case 1 :
// MessageFilter.g:225:9: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' )
{
match('\\');
if ( input.LA(1)=='\"'||input.LA(1)=='\''||input.LA(1)=='\\'||input.LA(1)=='b'||input.LA(1)=='f'||input.LA(1)=='n'||input.LA(1)=='r'||input.LA(1)=='t' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
case 2 :
// MessageFilter.g:226:9: UNICODE_ESC
{
mUNICODE_ESC();
}
break;
case 3 :
// MessageFilter.g:227:9: OCTAL_ESC
{
mOCTAL_ESC();
}
break;
}
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "ESC_SEQ"
// $ANTLR start "OCTAL_ESC"
public final void mOCTAL_ESC() throws RecognitionException {
try {
// MessageFilter.g:232:5: ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) )
int alt18=3;
int LA18_0 = input.LA(1);
if ( (LA18_0=='\\') ) {
int LA18_1 = input.LA(2);
if ( ((LA18_1 >= '0' && LA18_1 <= '3')) ) {
int LA18_2 = input.LA(3);
if ( ((LA18_2 >= '0' && LA18_2 <= '7')) ) {
int LA18_4 = input.LA(4);
if ( ((LA18_4 >= '0' && LA18_4 <= '7')) ) {
alt18=1;
}
else {
alt18=2;
}
}
else {
alt18=3;
}
}
else if ( ((LA18_1 >= '4' && LA18_1 <= '7')) ) {
int LA18_3 = input.LA(3);
if ( ((LA18_3 >= '0' && LA18_3 <= '7')) ) {
alt18=2;
}
else {
alt18=3;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 18, 1, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 18, 0, input);
throw nvae;
}
switch (alt18) {
case 1 :
// MessageFilter.g:232:9: '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' )
{
match('\\');
if ( (input.LA(1) >= '0' && input.LA(1) <= '3') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
if ( (input.LA(1) >= '0' && input.LA(1) <= '7') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
if ( (input.LA(1) >= '0' && input.LA(1) <= '7') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
case 2 :
// MessageFilter.g:233:9: '\\\\' ( '0' .. '7' ) ( '0' .. '7' )
{
match('\\');
if ( (input.LA(1) >= '0' && input.LA(1) <= '7') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
if ( (input.LA(1) >= '0' && input.LA(1) <= '7') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
case 3 :
// MessageFilter.g:234:9: '\\\\' ( '0' .. '7' )
{
match('\\');
if ( (input.LA(1) >= '0' && input.LA(1) <= '7') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
}
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "OCTAL_ESC"
// $ANTLR start "UNICODE_ESC"
public final void mUNICODE_ESC() throws RecognitionException {
try {
// MessageFilter.g:239:5: ( '\\\\' 'u' HEX_DIGIT HEX_DIGIT HEX_DIGIT HEX_DIGIT )
// MessageFilter.g:239:9: '\\\\' 'u' HEX_DIGIT HEX_DIGIT HEX_DIGIT HEX_DIGIT
{
match('\\');
match('u');
mHEX_DIGIT();
mHEX_DIGIT();
mHEX_DIGIT();
mHEX_DIGIT();
}
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "UNICODE_ESC"
// $ANTLR start "EXPONENT"
public final void mEXPONENT() throws RecognitionException {
try {
// MessageFilter.g:243:10: ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )
// MessageFilter.g:243:12: ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+
{
if ( input.LA(1)=='E'||input.LA(1)=='e' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
// MessageFilter.g:243:22: ( '+' | '-' )?
int alt19=2;
int LA19_0 = input.LA(1);
if ( (LA19_0=='+'||LA19_0=='-') ) {
alt19=1;
}
switch (alt19) {
case 1 :
// MessageFilter.g:
{
if ( input.LA(1)=='+'||input.LA(1)=='-' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
}
// MessageFilter.g:243:33: ( '0' .. '9' )+
int cnt20=0;
loop20:
do {
int alt20=2;
int LA20_0 = input.LA(1);
if ( ((LA20_0 >= '0' && LA20_0 <= '9')) ) {
alt20=1;
}
switch (alt20) {
case 1 :
// MessageFilter.g:
{
if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
if ( cnt20 >= 1 ) break loop20;
EarlyExitException eee =
new EarlyExitException(20, input);
throw eee;
}
cnt20++;
} while (true);
}
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "EXPONENT"
public void mTokens() throws RecognitionException {
// MessageFilter.g:1:8: ( AND | BETWEEN | EQUALS | EXISTS | FALSE | GE | GT | IN | IS | LE | LT | MATCHES | NOT | NOT_EQUALS | NULL | OR | TIME_MILLIS_FUN_NAME | TIME_STRING_FUN_NAME | TRUE | XPATH_FUN_NAME | T__33 | T__34 | T__35 | NUMBER | COMMENT | WS | STRING )
int alt21=27;
switch ( input.LA(1) ) {
case 'a':
{
alt21=1;
}
break;
case 'b':
{
alt21=2;
}
break;
case '=':
{
int LA21_3 = input.LA(2);
if ( (LA21_3=='~') ) {
alt21=12;
}
else {
alt21=3;
}
}
break;
case 'e':
{
alt21=4;
}
break;
case 'f':
{
alt21=5;
}
break;
case '>':
{
int LA21_6 = input.LA(2);
if ( (LA21_6=='=') ) {
alt21=6;
}
else {
alt21=7;
}
}
break;
case 'i':
{
int LA21_7 = input.LA(2);
if ( (LA21_7=='n') ) {
alt21=8;
}
else if ( (LA21_7=='s') ) {
alt21=9;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 21, 7, input);
throw nvae;
}
}
break;
case '<':
{
int LA21_8 = input.LA(2);
if ( (LA21_8=='=') ) {
alt21=10;
}
else {
alt21=11;
}
}
break;
case 'n':
{
int LA21_9 = input.LA(2);
if ( (LA21_9=='o') ) {
alt21=13;
}
else if ( (LA21_9=='u') ) {
alt21=15;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 21, 9, input);
throw nvae;
}
}
break;
case '!':
{
alt21=14;
}
break;
case 'o':
{
alt21=16;
}
break;
case 't':
{
int LA21_12 = input.LA(2);
if ( (LA21_12=='i') ) {
int LA21_31 = input.LA(3);
if ( (LA21_31=='m') ) {
int LA21_33 = input.LA(4);
if ( (LA21_33=='e') ) {
int LA21_34 = input.LA(5);
if ( (LA21_34=='-') ) {
int LA21_35 = input.LA(6);
if ( (LA21_35=='m') ) {
alt21=17;
}
else if ( (LA21_35=='s') ) {
alt21=18;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 21, 35, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 21, 34, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 21, 33, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 21, 31, input);
throw nvae;
}
}
else if ( (LA21_12=='r') ) {
alt21=19;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 21, 12, input);
throw nvae;
}
}
break;
case 'x':
{
alt21=20;
}
break;
case '(':
{
alt21=21;
}
break;
case ')':
{
alt21=22;
}
break;
case ',':
{
alt21=23;
}
break;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
{
alt21=24;
}
break;
case '/':
{
alt21=25;
}
break;
case '\t':
case '\n':
case '\r':
case ' ':
{
alt21=26;
}
break;
case '\"':
{
alt21=27;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 21, 0, input);
throw nvae;
}
switch (alt21) {
case 1 :
// MessageFilter.g:1:10: AND
{
mAND();
}
break;
case 2 :
// MessageFilter.g:1:14: BETWEEN
{
mBETWEEN();
}
break;
case 3 :
// MessageFilter.g:1:22: EQUALS
{
mEQUALS();
}
break;
case 4 :
// MessageFilter.g:1:29: EXISTS
{
mEXISTS();
}
break;
case 5 :
// MessageFilter.g:1:36: FALSE
{
mFALSE();
}
break;
case 6 :
// MessageFilter.g:1:42: GE
{
mGE();
}
break;
case 7 :
// MessageFilter.g:1:45: GT
{
mGT();
}
break;
case 8 :
// MessageFilter.g:1:48: IN
{
mIN();
}
break;
case 9 :
// MessageFilter.g:1:51: IS
{
mIS();
}
break;
case 10 :
// MessageFilter.g:1:54: LE
{
mLE();
}
break;
case 11 :
// MessageFilter.g:1:57: LT
{
mLT();
}
break;
case 12 :
// MessageFilter.g:1:60: MATCHES
{
mMATCHES();
}
break;
case 13 :
// MessageFilter.g:1:68: NOT
{
mNOT();
}
break;
case 14 :
// MessageFilter.g:1:72: NOT_EQUALS
{
mNOT_EQUALS();
}
break;
case 15 :
// MessageFilter.g:1:83: NULL
{
mNULL();
}
break;
case 16 :
// MessageFilter.g:1:88: OR
{
mOR();
}
break;
case 17 :
// MessageFilter.g:1:91: TIME_MILLIS_FUN_NAME
{
mTIME_MILLIS_FUN_NAME();
}
break;
case 18 :
// MessageFilter.g:1:112: TIME_STRING_FUN_NAME
{
mTIME_STRING_FUN_NAME();
}
break;
case 19 :
// MessageFilter.g:1:133: TRUE
{
mTRUE();
}
break;
case 20 :
// MessageFilter.g:1:138: XPATH_FUN_NAME
{
mXPATH_FUN_NAME();
}
break;
case 21 :
// MessageFilter.g:1:153: T__33
{
mT__33();
}
break;
case 22 :
// MessageFilter.g:1:159: T__34
{
mT__34();
}
break;
case 23 :
// MessageFilter.g:1:165: T__35
{
mT__35();
}
break;
case 24 :
// MessageFilter.g:1:171: NUMBER
{
mNUMBER();
}
break;
case 25 :
// MessageFilter.g:1:178: COMMENT
{
mCOMMENT();
}
break;
case 26 :
// MessageFilter.g:1:186: WS
{
mWS();
}
break;
case 27 :
// MessageFilter.g:1:189: STRING
{
mSTRING();
}
break;
}
}
protected DFA11 dfa11 = new DFA11(this);
static final String DFA11_eotS =
"\2\uffff\1\4\3\uffff";
static final String DFA11_eofS =
"\6\uffff";
static final String DFA11_minS =
"\1\53\1\56\1\60\3\uffff";
static final String DFA11_maxS =
"\2\71\1\145\3\uffff";
static final String DFA11_acceptS =
"\3\uffff\1\2\1\1\1\3";
static final String DFA11_specialS =
"\6\uffff}>";
static final String[] DFA11_transitionS = {
"\1\1\1\uffff\1\1\1\3\1\uffff\12\2",
"\1\3\1\uffff\12\2",
"\12\2\13\uffff\1\5\37\uffff\1\5",
"",
"",
""
};
static final short[] DFA11_eot = DFA.unpackEncodedString(DFA11_eotS);
static final short[] DFA11_eof = DFA.unpackEncodedString(DFA11_eofS);
static final char[] DFA11_min = DFA.unpackEncodedStringToUnsignedChars(DFA11_minS);
static final char[] DFA11_max = DFA.unpackEncodedStringToUnsignedChars(DFA11_maxS);
static final short[] DFA11_accept = DFA.unpackEncodedString(DFA11_acceptS);
static final short[] DFA11_special = DFA.unpackEncodedString(DFA11_specialS);
static final short[][] DFA11_transition;
static {
int numStates = DFA11_transitionS.length;
DFA11_transition = new short[numStates][];
for (int i=0; i<numStates; i++) {
DFA11_transition[i] = DFA.unpackEncodedString(DFA11_transitionS[i]);
}
}
class DFA11 extends DFA {
public DFA11(BaseRecognizer recognizer) {
this.recognizer = recognizer;
this.decisionNumber = 11;
this.eot = DFA11_eot;
this.eof = DFA11_eof;
this.min = DFA11_min;
this.max = DFA11_max;
this.accept = DFA11_accept;
this.special = DFA11_special;
this.transition = DFA11_transition;
}
public String getDescription() {
return "197:1: NUMBER : ( ( '+' | '-' )? ( '0' .. '9' )+ ( '.' ( '0' .. '9' )* ( EXPONENT )? )? | ( '+' | '-' )? '.' ( '0' .. '9' )+ ( EXPONENT )? | ( '+' | '-' )? ( '0' .. '9' )+ EXPONENT );";
}
}
}
| 1,535 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/NumberTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class NumberTreeNode extends MessageFilterBaseTreeNode implements ValueTreeNode {
@Override
public Number getValue() {
return Double.valueOf(getText());
}
public NumberTreeNode(Token t) {
super(t);
}
public NumberTreeNode(NumberTreeNode node) {
super(node);
}
public Tree dupNode() {
return new NumberTreeNode(this);
}
}
| 1,536 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/AndTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.netflix.suro.routing.filter.MessageFilters;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class AndTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@Override
@SuppressWarnings("unchecked")
public MessageFilter translate() {
return MessageFilters.and(
Lists.transform(getChildren(), new Function<Object, MessageFilter>() {
@Override
public MessageFilter apply(Object input) {
MessageFilterTranslatable node = (MessageFilterTranslatable) input;
return node.translate();
}
})
);
}
public AndTreeNode(Token t) {
super(t);
}
public AndTreeNode(AndTreeNode node) {
super(node);
}
public Tree dupNode() {
return new AndTreeNode(this);
}
}
| 1,537 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/ComparableTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.*;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
import static com.netflix.suro.routing.filter.lang.TreeNodeUtil.getXPath;
import static com.netflix.suro.routing.filter.lang.MessageFilterParser.*;
public class ComparableTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
String xpath = getXPath(getChild(0));
Tree valueNode = getChild(1);
switch(valueNode.getType()){
case NUMBER:
Number value = (Number)((ValueTreeNode)valueNode).getValue();
return new PathValueMessageFilter(xpath, new NumericValuePredicate(value, getToken().getText()));
case TIME_MILLIS_FUN_NAME:
TimeMillisValueTreeNode timeValueNode = (TimeMillisValueTreeNode)valueNode;
return new PathValueMessageFilter(
xpath,
new TimeMillisValuePredicate(
timeValueNode.getValueFormat(),
timeValueNode.getValue(),
getToken().getText()));
case TIME_STRING_FUN_NAME:
TimeStringValueTreeNode timeStringNode = (TimeStringValueTreeNode)valueNode;
return new PathValueMessageFilter(
xpath,
new TimeStringValuePredicate(
timeStringNode.getValueTimeFormat(),
timeStringNode.getInputTimeFormat(),
timeStringNode.getValue(),
getToken().getText()));
default:
throw new UnexpectedTokenException(valueNode, "Number", "time-millis", "time-string");
}
}
public ComparableTreeNode(Token t) {
super(t);
}
public ComparableTreeNode(ComparableTreeNode node) {
super(node);
}
public Tree dupNode() {
return new ComparableTreeNode(this);
}
}
| 1,538 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/OrTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.netflix.suro.routing.filter.MessageFilters;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
public class OrTreeNode extends CommonTree implements MessageFilterTranslatable {
@Override
@SuppressWarnings("unchecked")
public MessageFilter translate() {
return MessageFilters.or(
Lists.transform(getChildren(), new Function<Object, MessageFilter>() {
@Override
public MessageFilter apply(Object input) {
MessageFilterTranslatable node = (MessageFilterTranslatable) input;
return node.translate();
}
})
);
}
public OrTreeNode(Token t) {
super(t);
}
public OrTreeNode(OrTreeNode node) {
super(node);
}
public Tree dupNode() {
return new OrTreeNode(this);
} // for dup'ing type
public String toString() {
return String.format("%s<%s>", token.getText(), getClass().getSimpleName());
}
}
| 1,539 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/FalseValueTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.MessageFilters;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
public class FalseValueTreeNode extends MessageFilterBaseTreeNode implements ValueTreeNode, MessageFilterTranslatable {
@Override
public Object getValue() {
return Boolean.FALSE;
}
public FalseValueTreeNode(Token t) {
super(t);
}
public FalseValueTreeNode(FalseValueTreeNode node) {
super(node);
}
public Tree dupNode() {
return new FalseValueTreeNode(this);
}
@Override
public MessageFilter translate() {
return MessageFilters.alwaysFalse();
}
}
| 1,540 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/routing/filter/lang/ExistsTreeNode.java
|
package com.netflix.suro.routing.filter.lang;
import com.netflix.suro.routing.filter.PathExistsMessageFilter;
import com.netflix.suro.routing.filter.MessageFilter;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.Tree;
import static com.netflix.suro.routing.filter.lang.TreeNodeUtil.getXPath;
public class ExistsTreeNode extends MessageFilterBaseTreeNode implements MessageFilterTranslatable {
@Override
public MessageFilter translate() {
return new PathExistsMessageFilter(getXPath(getChild(0)));
}
public ExistsTreeNode(Token t) {
super(t);
}
public ExistsTreeNode(ExistsTreeNode node) {
super(node);
}
public Tree dupNode() {
return new ExistsTreeNode(this);
}
}
| 1,541 |
0 |
Create_ds/suro/suro-core/src/main/java/com/netflix/suro
|
Create_ds/suro/suro-core/src/main/java/com/netflix/suro/jackson/DefaultObjectMapper.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.suro.jackson;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import com.fasterxml.jackson.datatype.guava.GuavaModule;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Singleton;
import com.google.inject.name.Names;
import com.netflix.suro.TypeHolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteOrder;
import java.util.Set;
/**
* The default {@link ObjectMapper} used for serializing and de-serializing JSON objects.
*/
@Singleton
public class DefaultObjectMapper extends ObjectMapper {
private static final Logger LOG = LoggerFactory.getLogger(DefaultObjectMapper.class);
public DefaultObjectMapper() {
this(null, null);
}
@Inject
public DefaultObjectMapper(final Injector injector, Set<TypeHolder> crossInjectable)
{
SimpleModule serializerModule = new SimpleModule("SuroServer default serializers");
serializerModule.addSerializer(ByteOrder.class, ToStringSerializer.instance);
serializerModule.addDeserializer(
ByteOrder.class,
new JsonDeserializer<ByteOrder>()
{
@Override
public ByteOrder deserialize(
JsonParser jp, DeserializationContext ctxt
) throws IOException, JsonProcessingException
{
if (ByteOrder.BIG_ENDIAN.toString().equals(jp.getText())) {
return ByteOrder.BIG_ENDIAN;
}
return ByteOrder.LITTLE_ENDIAN;
}
}
);
registerModule(serializerModule);
registerModule(new GuavaModule());
if (injector != null) {
setInjectableValues(new InjectableValues() {
@Override
public Object findInjectableValue(
Object valueId,
DeserializationContext ctxt,
BeanProperty forProperty,
Object beanInstance
) {
LOG.info("Looking for " + valueId);
try {
return injector.getInstance(Key.get(forProperty.getType().getRawClass(), Names.named((String)valueId)));
} catch (Exception e) {
try {
return injector.getInstance(forProperty.getType().getRawClass());
} catch (Exception ex) {
LOG.info("No implementation found, returning null");
}
return null;
}
}
});
}
configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
configure(MapperFeature.AUTO_DETECT_GETTERS, false);
configure(MapperFeature.AUTO_DETECT_CREATORS, false);
configure(MapperFeature.AUTO_DETECT_FIELDS, false);
configure(MapperFeature.AUTO_DETECT_IS_GETTERS, false);
configure(MapperFeature.AUTO_DETECT_SETTERS, false);
configure(SerializationFeature.INDENT_OUTPUT, false);
if (crossInjectable != null) {
for (TypeHolder entry : crossInjectable) {
LOG.info("Registering subtype : " + entry.getName() + " -> " + entry.getRawType().getCanonicalName());
registerSubtypes(new NamedType(entry.getRawType(), entry.getName()));
}
}
}
}
| 1,542 |
0 |
Create_ds/suro/suro-kafka-consumer/src/test/java/com/netflix/suro/input
|
Create_ds/suro/suro-kafka-consumer/src/test/java/com/netflix/suro/input/kafka/TestKafkaConsumer.java
|
package com.netflix.suro.input.kafka;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import com.netflix.suro.input.SuroInput;
import com.netflix.suro.jackson.DefaultObjectMapper;
import com.netflix.suro.message.MessageContainer;
import com.netflix.suro.routing.MessageRouter;
import com.netflix.suro.sink.kafka.KafkaServerExternalResource;
import com.netflix.suro.sink.kafka.ZkExternalResource;
import kafka.admin.TopicCommand;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
import org.mockito.ArgumentCaptor;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
public class TestKafkaConsumer {
public static ZkExternalResource zk = new ZkExternalResource();
public static KafkaServerExternalResource kafkaServer = new KafkaServerExternalResource(zk);
@ClassRule
public static TestRule chain = RuleChain
.outerRule(zk)
.around(kafkaServer);
private static final String TOPIC_NAME = "testkafkaconsumer";
@Test
public void test() throws Exception {
int numPartitions = 6;
int messageCount = 10;
TopicCommand.createTopic(zk.getZkClient(),
new TopicCommand.TopicCommandOptions(new String[]{
"--zookeeper", "dummy", "--create", "--topic", TOPIC_NAME,
"--replication-factor", "2", "--partitions", Integer.toString(numPartitions)}));
ObjectMapper jsonMapper = new DefaultObjectMapper();
sendKafkaMessage(kafkaServer.getBrokerListStr(), TOPIC_NAME, numPartitions, messageCount);
final CountDownLatch latch = new CountDownLatch(numPartitions * messageCount);
MessageRouter router = mock(MessageRouter.class);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
latch.countDown();
return null;
}
}).when(router).process(any(SuroInput.class), any(MessageContainer.class));
Properties properties = new Properties();
properties.setProperty("group.id", "testkafkaconsumer");
properties.setProperty("zookeeper.connect", zk.getConnectionString());
properties.setProperty("auto.offset.reset", "smallest");
try {
new KafkaConsumer(properties, TOPIC_NAME, numPartitions, router, jsonMapper);
fail("should have failed without timeout");
} catch (Exception e) {
// do nothing
}
properties.setProperty("consumer.timeout.ms", "1000");
KafkaConsumer consumer = new KafkaConsumer(properties, TOPIC_NAME, numPartitions, router, jsonMapper);
KafkaConsumer.MAX_PAUSE = 10000; // for testing
consumer.start();
latch.await(1000 * 5, TimeUnit.MILLISECONDS);
ArgumentCaptor<MessageContainer> msgContainers = ArgumentCaptor.forClass(MessageContainer.class);
verify(router, times(numPartitions * messageCount)).process(any(SuroInput.class), msgContainers.capture());
for (MessageContainer container : msgContainers.getAllValues()) {
assertEquals(container.getRoutingKey(), TOPIC_NAME);
assertTrue(container.getEntity(String.class).startsWith("testMessage"));
}
final CountDownLatch latch1 = new CountDownLatch(numPartitions * messageCount);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
latch1.countDown();
return null;
}
}).when(router).process(any(SuroInput.class), any(MessageContainer.class));
long pauseTime = 5000;
consumer.setPause(pauseTime);
long start = System.currentTimeMillis();
sendKafkaMessage(kafkaServer.getBrokerListStr(), TOPIC_NAME, numPartitions, messageCount);
latch1.await(1000 * 5 + pauseTime, TimeUnit.MILLISECONDS);
long end = System.currentTimeMillis();
assertTrue(end - start > pauseTime);
msgContainers = ArgumentCaptor.forClass(MessageContainer.class);
verify(router, times(numPartitions * messageCount * 2)).process(any(SuroInput.class), msgContainers.capture());
for (MessageContainer container : msgContainers.getAllValues()) {
assertEquals(container.getRoutingKey(), TOPIC_NAME);
assertTrue(container.getEntity(String.class).startsWith("testMessage"));
}
consumer.shutdown();
}
public static void sendKafkaMessage(String brokerList, String topicName, int partitionCount, int messageCount) throws java.io.IOException, InterruptedException {
KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>
(new ImmutableMap.Builder<String, Object>()
.put("client.id", "kakasink")
.put("bootstrap.servers", brokerList).build(),
new ByteArraySerializer(), new ByteArraySerializer());
for (int i = 0; i < messageCount; ++i) {
for (int j = 0; j < partitionCount; ++j) {
producer.send(new ProducerRecord(topicName, j, null, new String("testMessage1").getBytes()));
}
}
}
}
| 1,543 |
0 |
Create_ds/suro/suro-kafka-consumer/src/test/java/com/netflix/suro/sink
|
Create_ds/suro/suro-kafka-consumer/src/test/java/com/netflix/suro/sink/kafka/ZkExternalResource.java
|
package com.netflix.suro.sink.kafka;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.exception.ZkMarshallingError;
import org.I0Itec.zkclient.serialize.ZkSerializer;
import org.apache.curator.test.TestingServer;
import org.junit.rules.ExternalResource;
import org.junit.rules.TemporaryFolder;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
public class ZkExternalResource extends ExternalResource {
private TestingServer zkServer;
private ZkClient zkClient;
private TemporaryFolder tempDir = new TemporaryFolder();
@Override
protected void before() throws Throwable {
tempDir.create();
zkServer = new TestingServer();
zkClient = new ZkClient("localhost:" + zkServer.getPort(), 20000, 20000, new ZkSerializer() {
@Override
public byte[] serialize(Object data) throws ZkMarshallingError {
try {
return ((String)data).getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
@Override
public Object deserialize(byte[] bytes) throws ZkMarshallingError {
if (bytes == null)
return null;
try {
return new String(bytes, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
});
}
@Override
protected void after() {
if (zkServer != null) {
try {
zkServer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
tempDir.delete();
}
public ZkClient getZkClient() {
return zkClient;
}
public int getServerPort() { return zkServer.getPort(); }
public String getConnectionString() { return zkServer.getConnectString(); }
}
| 1,544 |
0 |
Create_ds/suro/suro-kafka-consumer/src/test/java/com/netflix/suro/sink
|
Create_ds/suro/suro-kafka-consumer/src/test/java/com/netflix/suro/sink/kafka/KafkaServerExternalResource.java
|
package com.netflix.suro.sink.kafka;
import com.google.common.collect.Lists;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServer;
import org.apache.commons.lang.StringUtils;
import org.junit.rules.ExternalResource;
import org.junit.rules.TemporaryFolder;
import java.io.IOException;
import java.net.ServerSocket;
import java.util.List;
import java.util.Properties;
public class KafkaServerExternalResource extends ExternalResource {
public static final int BROKER_ID1 = 0;
public static final int BROKER_ID2 = 1;
//public static final int KAFKA_PORT1 = 2200;
//public static final int KAFKA_PORT2 = 2201;
public static final long TIMEOUT = 10000;
private KafkaConfig config1;
private KafkaConfig config2;
private KafkaServer server1;
private KafkaServer server2;
private List<KafkaServer> servers;
private List<KafkaConfig> configs;
private final ZkExternalResource zk;
private final TemporaryFolder tempDir = new TemporaryFolder();
public KafkaServerExternalResource(ZkExternalResource zk) {
this.zk = zk;
}
private static int getUnusedPort() throws IOException {
ServerSocket ss = new ServerSocket(0);
ss.setReuseAddress(false);
int unusedPort = ss.getLocalPort();
ss.close();
return unusedPort;
}
@Override
protected void before() throws Throwable {
startServer(getUnusedPort(), getUnusedPort());
}
public void startServer(int port1, int port2) throws IOException {
tempDir.create();
config1 = new KafkaConfig(
createBrokerConfig(BROKER_ID1, port1, zk.getServerPort(), tempDir.newFolder().getAbsolutePath()));
server1 = createServer(config1);
config2 = new KafkaConfig(
createBrokerConfig(BROKER_ID2, port2, zk.getServerPort(), tempDir.newFolder().getAbsolutePath()));
server2 = createServer(config2);
servers = Lists.newArrayList(server1, server2);
configs = Lists.newArrayList(config1, config2);
}
@Override
protected void after() {
shutdown();
}
public void shutdown() {
if (server1 != null) {
server1.shutdown();
server1.awaitShutdown();
}
if (server2 != null) {
server2.shutdown();
server2.awaitShutdown();
}
tempDir.delete();
}
public String getBrokerListStr() {
List<String> str = Lists.newArrayList();
for (KafkaConfig config : configs) {
str.add(config.hostName() + ":" + config.port());
}
return StringUtils.join(str, ",");
}
public KafkaServer getServer(int index) {
return servers.get(index);
}
public static KafkaServer createServer(KafkaConfig config) {
KafkaServer server = new KafkaServer(config, kafka.utils.SystemTime$.MODULE$);
server.startup();
return server;
}
public static Properties createBrokerConfig(int nodeId, int port, int zkPort, String dir) {
Properties props = new Properties();
props.put("broker.id", Integer.toString(nodeId));
props.put("brokerId", Integer.toString(nodeId));
props.put("host.name", "localhost");
props.put("port", Integer.toString(port));
props.put("log.dir", dir);
props.put("log.flush.interval.messages", "1");
props.put("zookeeper.connect", "localhost:" + zkPort);
props.put("replica.socket.timeout.ms", "1500");
props.put("hostName", "localhost");
props.put("numPartitions", "1");
return props;
}
}
| 1,545 |
0 |
Create_ds/suro/suro-kafka-consumer/src/main/java/com/netflix/suro/input
|
Create_ds/suro/suro-kafka-consumer/src/main/java/com/netflix/suro/input/kafka/KafkaConsumer.java
|
package com.netflix.suro.input.kafka;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.netflix.suro.input.SuroInput;
import com.netflix.suro.message.DefaultMessageContainer;
import com.netflix.suro.message.Message;
import com.netflix.suro.routing.MessageRouter;
import kafka.consumer.*;
import kafka.javaapi.consumer.ConsumerConnector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;
public class KafkaConsumer implements SuroInput {
public static final String TYPE = "kafka";
private static Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
protected final Properties consumerProps;
private final String topic;
private final MessageRouter router;
private final ObjectMapper jsonMapper;
private ConsumerConnector connector;
private ExecutorService executor;
private final int readers;
private List<Future<?>> runners = new ArrayList<Future<?>>();
private volatile boolean running = false;
@JsonCreator
public KafkaConsumer(
@JsonProperty("consumerProps") Properties consumerProps,
@JsonProperty("topic") String topic,
@JsonProperty("readers") int readers,
@JacksonInject MessageRouter router,
@JacksonInject ObjectMapper jsonMapper
) {
Preconditions.checkNotNull(consumerProps);
Preconditions.checkNotNull(topic);
Preconditions.checkNotNull(consumerProps.getProperty("group.id"));
Preconditions.checkNotNull(consumerProps.getProperty("zookeeper.connect"));
String timeoutStr = consumerProps.getProperty("consumer.timeout.ms");
Preconditions.checkNotNull(timeoutStr);
Preconditions.checkArgument(Long.parseLong(timeoutStr) > 0);
this.consumerProps = consumerProps;
this.topic = topic;
this.readers = readers == 0 ? 1 : readers;
this.router = router;
this.jsonMapper = jsonMapper;
}
@Override
public String getId() {
return topic + "-" + consumerProps.getProperty("group.id");
}
private AtomicLong pausedTime = new AtomicLong(0);
public static long MAX_PAUSE = 1000; // not final for the test
@Override
public void start() throws Exception {
executor = Executors.newCachedThreadPool(
new ThreadFactoryBuilder().setNameFormat("KafkaConsumer-%d").build());
connector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProps));
final Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(ImmutableMap.of(topic, readers));
final List<KafkaStream<byte[], byte[]>> streamList = streams.get(topic);
if (streamList == null) {
throw new RuntimeException(topic + " is not valid");
}
running = true;
for (KafkaStream<byte[], byte[]> stream : streamList) {
final ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
runners.add(
executor.submit(new Runnable() {
@Override
public void run() {
while (running) {
try {
long pause = Math.min(pausedTime.get(), MAX_PAUSE);
if (pause > 0) {
Thread.sleep(pause);
pausedTime.set(0);
}
byte[] message = iterator.next().message();
router.process(
KafkaConsumer.this,
new DefaultMessageContainer(new Message(topic, message), jsonMapper));
} catch (ConsumerTimeoutException timeoutException) {
// do nothing
} catch (Exception e) {
log.error("Exception on consuming kafka with topic: " + topic, e);
}
}
}
})
);
}
}
@Override
public void shutdown() {
stop();
connector.shutdown();
}
@Override
public void setPause(long ms) {
pausedTime.addAndGet(ms);
}
private void stop() {
running = false;
try {
for (Future<?> runner : runners) {
runner.get();
}
} catch (InterruptedException e) {
// do nothing
} catch (ExecutionException e) {
log.error("Exception on stopping the task", e);
}
}
@Override
public boolean equals(Object o) {
if (o instanceof KafkaConsumer) {
KafkaConsumer kafkaConsumer = (KafkaConsumer) o;
boolean topicEquals = topic.equals(kafkaConsumer.topic);
if (topicEquals) {
return consumerProps.getProperty("group.id").equals(kafkaConsumer.consumerProps.getProperty("group.id"));
} else {
return false;
}
} else {
return false;
}
}
@Override
public int hashCode() {
return (getId()).hashCode();
}
}
| 1,546 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/dto/JsonlinesStandardOutputTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Test;
public class JsonlinesStandardOutputTest {
private ObjectMapper mapper = new ObjectMapper();
@Test
public void testStandardJsonOutputObjectCreation() {
List<Object> featureList = Lists.newArrayList(new Integer("1"), new Double("2.0"), "3");
JsonlinesStandardOutput jsonlinesStandardOutputTest = new JsonlinesStandardOutput(featureList);
Assert.assertNotNull(jsonlinesStandardOutputTest.getFeatures());
Assert.assertTrue(jsonlinesStandardOutputTest.getFeatures().get(0) instanceof Integer);
Assert.assertTrue(jsonlinesStandardOutputTest.getFeatures().get(1) instanceof Double);
Assert.assertTrue(jsonlinesStandardOutputTest.getFeatures().get(2) instanceof String);
}
@Test(expected = NullPointerException.class)
public void testNullInputPassedToConstructor() {
new JsonlinesStandardOutput(null);
}
@Test
public void testParseStandardJsonOutput() throws IOException {
String inputJson = IOUtils.toString(this.getClass().getResourceAsStream("standard_json_out.json"), "UTF-8");
JsonlinesStandardOutput sjo = mapper.readValue(inputJson, JsonlinesStandardOutput.class);
Assert.assertEquals(sjo.getFeatures(), Lists.newArrayList(1.0, 2.0, 4.0));
}
}
| 1,547 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/dto/SageMakerRequestObjectTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
public class SageMakerRequestObjectTest {
private ObjectMapper mapper = new ObjectMapper();
@Test
public void testSageMakerRequestObjectCreation() throws IOException {
String inputJson = IOUtils.toString(this.getClass().getResourceAsStream("basic_input_schema.json"), "UTF-8");
DataSchema schema = mapper.readValue(inputJson, DataSchema.class);
SageMakerRequestObject sro = new SageMakerRequestObject(schema, Lists.newArrayList(1, "C", 38.0));
Assert.assertEquals(sro.getSchema().getInput().size(), 3);
Assert.assertEquals(sro.getSchema().getInput().get(0).getName(), "name_1");
Assert.assertEquals(sro.getSchema().getInput().get(1).getName(), "name_2");
Assert.assertEquals(sro.getSchema().getInput().get(2).getName(), "name_3");
Assert.assertEquals(sro.getSchema().getInput().get(0).getType(), "int");
Assert.assertEquals(sro.getSchema().getInput().get(1).getType(), "string");
Assert.assertEquals(sro.getSchema().getInput().get(2).getType(), "double");
Assert.assertEquals(sro.getSchema().getInput().get(0).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(1).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(2).getStruct(), "basic");
Assert.assertEquals(sro.getData(), Lists.newArrayList(1, "C", 38.0));
Assert.assertEquals(sro.getSchema().getOutput().getName(), "features");
Assert.assertEquals(sro.getSchema().getOutput().getType(), "double");
}
@Test(expected = NullPointerException.class)
public void testNullDataPassedToConstructor() {
new SageMakerRequestObject(null, null);
}
@Test
public void testParseBasicInputJson() throws IOException {
String inputJson = IOUtils.toString(this.getClass().getResourceAsStream("basic_input.json"), "UTF-8");
SageMakerRequestObject sro = mapper.readValue(inputJson, SageMakerRequestObject.class);
Assert.assertEquals(sro.getSchema().getInput().size(), 3);
Assert.assertEquals(sro.getSchema().getInput().get(0).getName(), "name_1");
Assert.assertEquals(sro.getSchema().getInput().get(1).getName(), "name_2");
Assert.assertEquals(sro.getSchema().getInput().get(2).getName(), "name_3");
Assert.assertEquals(sro.getSchema().getInput().get(0).getType(), "int");
Assert.assertEquals(sro.getSchema().getInput().get(1).getType(), "string");
Assert.assertEquals(sro.getSchema().getInput().get(2).getType(), "double");
Assert.assertEquals(sro.getSchema().getInput().get(0).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(1).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(2).getStruct(), "basic");
Assert.assertEquals(sro.getData(), Lists.newArrayList(1, "C", 38.0));
Assert.assertEquals(sro.getSchema().getOutput().getName(), "features");
Assert.assertEquals(sro.getSchema().getOutput().getType(), "double");
}
@Test
public void testParseCompleteInputJson() throws IOException {
String inputJson = IOUtils.toString(this.getClass().getResourceAsStream("complete_input.json"), "UTF-8");
SageMakerRequestObject sro = mapper.readValue(inputJson, SageMakerRequestObject.class);
Assert.assertEquals(sro.getSchema().getInput().size(), 3);
Assert.assertEquals(sro.getSchema().getInput().size(), 3);
Assert.assertEquals(sro.getSchema().getInput().get(0).getName(), "name_1");
Assert.assertEquals(sro.getSchema().getInput().get(1).getName(), "name_2");
Assert.assertEquals(sro.getSchema().getInput().get(2).getName(), "name_3");
Assert.assertEquals(sro.getSchema().getInput().get(0).getType(), "double");
Assert.assertEquals(sro.getSchema().getInput().get(1).getType(), "string");
Assert.assertEquals(sro.getSchema().getInput().get(2).getType(), "double");
Assert.assertEquals(sro.getSchema().getInput().get(0).getStruct(), "vector");
Assert.assertEquals(sro.getSchema().getInput().get(1).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(2).getStruct(), "array");
Assert.assertEquals(sro.getData(),
Lists.newArrayList(Lists.newArrayList(1.0, 2.0, 3.0), "C", Lists.newArrayList(38.0, 24.0)));
Assert.assertEquals(sro.getSchema().getOutput().getName(), "features");
Assert.assertEquals(sro.getSchema().getOutput().getType(), "double");
Assert.assertEquals(sro.getSchema().getOutput().getStruct(), "vector");
}
}
| 1,548 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/dto/JsonlinesTextOutputTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Test;
public class JsonlinesTextOutputTest {
private ObjectMapper mapper = new ObjectMapper();
@Test
public void testStandardJsonOutputObjectCreation() {
JsonlinesTextOutput jsonlinesTextOutputTest = new JsonlinesTextOutput("this is spark ml server");
Assert.assertEquals(jsonlinesTextOutputTest.getSource(), "this is spark ml server");
}
@Test(expected = NullPointerException.class)
public void testNullInputPassedToConstructor() {
new JsonlinesTextOutput(null);
}
@Test
public void testParseStandardJsonOutput() throws IOException {
String inputJson = IOUtils.toString(this.getClass().getResourceAsStream("text_json_out.json"), "UTF-8");
JsonlinesTextOutput sjo = mapper.readValue(inputJson, JsonlinesTextOutput.class);
Assert.assertEquals(sjo.getSource(), "this is spark ml server");
}
}
| 1,549 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/dto/DataSchemaTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Test;
public class DataSchemaTest {
private ObjectMapper mapper = new ObjectMapper();
List<ColumnSchema> inputCols = Lists.newArrayList(new ColumnSchema("name_1", "type_1", "struct_1"),
new ColumnSchema("name_2", "type_2", "struct_2"));
ColumnSchema outputCol = new ColumnSchema("name_out_1", "type_out_1", "struct_out_1");
@Test
public void testDataSchemaObjectCreation() {
DataSchema ds = new DataSchema(inputCols, outputCol);
Assert.assertEquals(ds.getInput().get(0).getName(), "name_1");
Assert.assertEquals(ds.getInput().get(0).getType(), "type_1");
Assert.assertEquals(ds.getInput().get(0).getStruct(), "struct_1");
Assert.assertEquals(ds.getInput().get(1).getName(), "name_2");
Assert.assertEquals(ds.getInput().get(1).getType(), "type_2");
Assert.assertEquals(ds.getInput().get(1).getStruct(), "struct_2");
Assert.assertEquals(ds.getOutput().getName(), "name_out_1");
Assert.assertEquals(ds.getOutput().getType(), "type_out_1");
Assert.assertEquals(ds.getOutput().getStruct(), "struct_out_1");
}
@Test(expected = NullPointerException.class)
public void testEmptyInputColumnsPassedToConstructor() {
new DataSchema(null, outputCol);
}
@Test(expected = NullPointerException.class)
public void testEmptyOutputColumnsPassedToConstructor() {
new DataSchema(inputCols, null);
}
@Test
public void testParseBasicInputJson() throws IOException {
String inputJson = IOUtils.toString(this.getClass().getResourceAsStream("basic_input_schema.json"), "UTF-8");
DataSchema schema = mapper.readValue(inputJson, DataSchema.class);
Assert.assertEquals(schema.getInput().size(), 3);
Assert.assertEquals(schema.getInput().get(0).getName(), "name_1");
Assert.assertEquals(schema.getInput().get(1).getName(), "name_2");
Assert.assertEquals(schema.getInput().get(2).getName(), "name_3");
Assert.assertEquals(schema.getInput().get(0).getType(), "int");
Assert.assertEquals(schema.getInput().get(1).getType(), "string");
Assert.assertEquals(schema.getInput().get(2).getType(), "double");
Assert.assertEquals(schema.getInput().get(0).getStruct(), "basic");
Assert.assertEquals(schema.getInput().get(1).getStruct(), "basic");
Assert.assertEquals(schema.getInput().get(2).getStruct(), "basic");
Assert.assertEquals(schema.getOutput().getName(), "features");
Assert.assertEquals(schema.getOutput().getType(), "double");
}
}
| 1,550 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/dto/ColumnSchemaTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.amazonaws.sagemaker.type.DataStructureType;
import org.junit.Assert;
import org.junit.Test;
public class ColumnSchemaTest {
@Test
public void testSingleColumnObjectCreation() {
ColumnSchema columnSchemaTest = new ColumnSchema("test_name", "test_type", "test_struct");
Assert.assertEquals(columnSchemaTest.getName(), "test_name");
Assert.assertEquals(columnSchemaTest.getType(), "test_type");
Assert.assertEquals(columnSchemaTest.getStruct(), "test_struct");
}
@Test(expected = NullPointerException.class)
public void testNullNamePassedToConstructor() {
new ColumnSchema(null, "test_type", "test_struct");
}
@Test(expected = NullPointerException.class)
public void testNullTypePassedToConstructor() {
new ColumnSchema("test_name", null, "test_struct");
}
@Test
public void testNullStructPassedToConstructor() {
ColumnSchema columnSchemaTest = new ColumnSchema("test_name", "test_type", null);
Assert.assertEquals(columnSchemaTest.getStruct(), DataStructureType.BASIC);
}
}
| 1,551 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/dto/BatchExecutionParameterTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import org.junit.Assert;
import org.junit.Test;
public class BatchExecutionParameterTest {
@Test
public void testBatchExecutionParameterObjectCreation() {
BatchExecutionParameter testBatchExecution = new BatchExecutionParameter(1, "SINGLE_RECORD", 5);
Assert.assertEquals(testBatchExecution.getBatchStrategy(), "SINGLE_RECORD");
Assert.assertEquals(new Integer("1"), testBatchExecution.getMaxConcurrentTransforms());
Assert.assertEquals(new Integer("5"), testBatchExecution.getMaxPayloadInMB());
}
@Test(expected = NullPointerException.class)
public void testNullBatchStrategyPassedToConstructor() {
new BatchExecutionParameter(1, null, 5);
}
@Test(expected = NullPointerException.class)
public void testNullConcurrentTransformsPassedToConstructor() {
new BatchExecutionParameter(null, "SINGLE_RECORD", 5);
}
@Test(expected = NullPointerException.class)
public void testNullMaxPayloadPassedToConstructor() {
new BatchExecutionParameter(1, "SINGLE_RECORD", null);
}
}
| 1,552 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/dto/SageMakerRequestListObjectTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class SageMakerRequestListObjectTest {
private ObjectMapper mapper = new ObjectMapper();
private List<List<Object>> listOfListInputForBasicInput;
private List<List<Object>> listOfListInputForMultipleInput;
@Before
public void setup(){
listOfListInputForBasicInput = new ArrayList<>();
listOfListInputForBasicInput.add(Lists.newArrayList(1, "C", 38.0));
listOfListInputForBasicInput.add(Lists.newArrayList(2, "D", 39.0));
listOfListInputForMultipleInput = new ArrayList<>();
listOfListInputForMultipleInput.add(Lists.newArrayList(Lists.newArrayList(1, 2, 3), "C",
Lists.newArrayList(38.0, 24.0)));
listOfListInputForMultipleInput.add(Lists.newArrayList(Lists.newArrayList(4, 5, 6), "D",
Lists.newArrayList(39.0, 25.0)));
}
@Test
public void testSageMakerRequestListObjectCreation() throws IOException {
String inputJson = IOUtils.toString(this.getClass().getResourceAsStream("basic_input_schema.json"), "UTF-8");
DataSchema schema = mapper.readValue(inputJson, DataSchema.class);
SageMakerRequestListObject sro = new SageMakerRequestListObject(schema, listOfListInputForBasicInput);
Assert.assertEquals(sro.getSchema().getInput().size(), 3);
Assert.assertEquals(sro.getSchema().getInput().get(0).getName(), "name_1");
Assert.assertEquals(sro.getSchema().getInput().get(1).getName(), "name_2");
Assert.assertEquals(sro.getSchema().getInput().get(2).getName(), "name_3");
Assert.assertEquals(sro.getSchema().getInput().get(0).getType(), "int");
Assert.assertEquals(sro.getSchema().getInput().get(1).getType(), "string");
Assert.assertEquals(sro.getSchema().getInput().get(2).getType(), "double");
Assert.assertEquals(sro.getSchema().getInput().get(0).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(1).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(2).getStruct(), "basic");
Assert.assertEquals(sro.getData(),listOfListInputForBasicInput);
Assert.assertEquals(sro.getSchema().getOutput().getName(), "features");
Assert.assertEquals(sro.getSchema().getOutput().getType(), "double");
}
@Test(expected = NullPointerException.class)
public void testNullDataPassedToConstructor() {
new SageMakerRequestListObject(null, null);
}
@Test
public void testParseBasicMultipleLinesInputJson() throws IOException {
String inputJson = IOUtils.toString(this.getClass().getResourceAsStream("basic_multipleLines_input.json"), "UTF-8");
SageMakerRequestListObject sro = mapper.readValue(inputJson, SageMakerRequestListObject.class);
Assert.assertEquals(sro.getSchema().getInput().size(), 3);
Assert.assertEquals(sro.getSchema().getInput().get(0).getName(), "name_1");
Assert.assertEquals(sro.getSchema().getInput().get(1).getName(), "name_2");
Assert.assertEquals(sro.getSchema().getInput().get(2).getName(), "name_3");
Assert.assertEquals(sro.getSchema().getInput().get(0).getType(), "int");
Assert.assertEquals(sro.getSchema().getInput().get(1).getType(), "string");
Assert.assertEquals(sro.getSchema().getInput().get(2).getType(), "double");
Assert.assertEquals(sro.getSchema().getInput().get(0).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(1).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(2).getStruct(), "basic");
Assert.assertEquals(sro.getData(), listOfListInputForBasicInput);
Assert.assertEquals(sro.getSchema().getOutput().getName(), "features");
Assert.assertEquals(sro.getSchema().getOutput().getType(), "double");
}
@Test
public void testParseCompleteMultipleLinesInputJson() throws IOException {
String inputJson = IOUtils.toString(this.getClass().getResourceAsStream("complete_multipleLines_input.json"), "UTF-8");
SageMakerRequestListObject sro = mapper.readValue(inputJson, SageMakerRequestListObject.class);
Assert.assertEquals(sro.getSchema().getInput().size(), 3);
Assert.assertEquals(sro.getSchema().getInput().size(), 3);
Assert.assertEquals(sro.getSchema().getInput().get(0).getName(), "name_1");
Assert.assertEquals(sro.getSchema().getInput().get(1).getName(), "name_2");
Assert.assertEquals(sro.getSchema().getInput().get(2).getName(), "name_3");
Assert.assertEquals(sro.getSchema().getInput().get(0).getType(), "int");
Assert.assertEquals(sro.getSchema().getInput().get(1).getType(), "string");
Assert.assertEquals(sro.getSchema().getInput().get(2).getType(), "double");
Assert.assertEquals(sro.getSchema().getInput().get(0).getStruct(), "vector");
Assert.assertEquals(sro.getSchema().getInput().get(1).getStruct(), "basic");
Assert.assertEquals(sro.getSchema().getInput().get(2).getStruct(), "array");
Assert.assertEquals(sro.getData(), listOfListInputForMultipleInput);
Assert.assertEquals(sro.getSchema().getOutput().getName(), "features");
Assert.assertEquals(sro.getSchema().getOutput().getType(), "double");
Assert.assertEquals(sro.getSchema().getOutput().getStruct(), "vector");
}
}
| 1,553 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/configuration/ContextLoaderTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.configuration;
import com.amazonaws.sagemaker.configuration.ContextLoaderTest.TestConfig;
import java.io.File;
import ml.combust.mleap.runtime.javadsl.LeapFrameBuilder;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = {TestConfig.class})
public class ContextLoaderTest {
@Autowired
ApplicationContext context;
@Configuration
@Import(BeanConfiguration.class) // the actual configuration
public static class TestConfig {
@Bean
public File provideModelFile() {
return new File(this.getClass().getResource("model").getFile());
}
}
@Test
public void testApplicationContextSetup() {
//Checks ApplicationContext is initialized and a random Bean is instantiated properly
Assert.assertNotNull(context.getBean(LeapFrameBuilder.class));
}
}
| 1,554 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/configuration/BeanConfigurationTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.configuration;
import com.amazonaws.sagemaker.utils.SystemUtils;
import java.io.File;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.springframework.boot.web.embedded.jetty.JettyServletWebServerFactory;
@RunWith(PowerMockRunner.class)
@PrepareForTest(SystemUtils.class)
public class BeanConfigurationTest {
public BeanConfigurationTest() {
}
private BeanConfiguration configuration = new BeanConfiguration();
@Test
public void testModelLocationNotNull() {
Assert.assertNotNull(configuration.provideModelFile());
Assert.assertEquals(configuration.provideModelFile(), new File("/opt/ml/model"));
}
@Test
public void testContextBuilderNotNull() {
Assert.assertNotNull(configuration.provideContextBuilder());
}
@Test
public void testBundleBuilderNotNull() {
Assert.assertNotNull(configuration.provideBundleBuilder());
}
@Test
public void testMleapContextNotNull() {
Assert.assertNotNull(configuration.provideMleapContext(configuration.provideContextBuilder()));
}
@Test
public void testLeapFrameBuilderNotNull() {
Assert.assertNotNull(configuration.provideLeapFrameBuilder());
}
@Test
public void testLeapFrameBuilderSupportNotNull() {
Assert.assertNotNull(configuration.provideLeapFrameBuilderSupport());
}
@Test
public void testTransformerNotNull() {
File dummyMLeapFile = new File(this.getClass().getResource("model").getFile());
Assert.assertNotNull(configuration.provideTransformer(dummyMLeapFile, configuration.provideBundleBuilder(),
configuration.provideMleapContext(configuration.provideContextBuilder())));
}
@Test
public void testObjectMapperNotNull() {
Assert.assertNotNull(configuration.provideObjectMapper());
}
@Test
public void testJettyServletWebServerFactoryNotNull() {
JettyServletWebServerFactory jettyServletTest = configuration.provideJettyServletWebServerFactory();
final String listenerPort =
(System.getenv("SAGEMAKER_BIND_TO_PORT") != null) ? System.getenv("SAGEMAKER_BIND_TO_PORT") : "8080";
Assert.assertEquals((int) new Integer(listenerPort), jettyServletTest.getPort());
Assert.assertNotNull(jettyServletTest.getServerCustomizers());
}
@Test
public void testParsePortFromEnvironment() {
PowerMockito.mockStatic(System.class);
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_BIND_TO_PORT")).thenReturn("7070");
Assert.assertEquals(configuration.getHttpListenerPort(), "7070");
}
}
| 1,555 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/utils/SystemUtilsTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.utils;
import org.junit.Assert;
import org.junit.Test;
public class SystemUtilsTest {
@Test
public void testGetNumberOfThreads() {
Assert.assertEquals(2 * Runtime.getRuntime().availableProcessors(), SystemUtils.getNumberOfThreads(2));
}
}
| 1,556 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/controller/ServingControllerTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.controller;
import com.amazonaws.sagemaker.dto.BatchExecutionParameter;
import com.amazonaws.sagemaker.dto.ColumnSchema;
import com.amazonaws.sagemaker.dto.DataSchema;
import com.amazonaws.sagemaker.dto.SageMakerRequestObject;
import com.amazonaws.sagemaker.helper.DataConversionHelper;
import com.amazonaws.sagemaker.helper.ResponseHelper;
import com.amazonaws.sagemaker.type.AdditionalMediaType;
import com.amazonaws.sagemaker.utils.ScalaUtils;
import com.amazonaws.sagemaker.utils.SystemUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import ml.combust.mleap.runtime.frame.ArrayRow;
import ml.combust.mleap.runtime.frame.DefaultLeapFrame;
import ml.combust.mleap.runtime.frame.Transformer;
import ml.combust.mleap.runtime.javadsl.LeapFrameBuilder;
import ml.combust.mleap.runtime.javadsl.LeapFrameBuilderSupport;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
@RunWith(PowerMockRunner.class)
@PrepareForTest({ScalaUtils.class, SystemUtils.class})
class ServingControllerTest {
private ServingController controller;
private DataConversionHelper converter = new DataConversionHelper(new LeapFrameBuilderSupport(),
new LeapFrameBuilder());
private Transformer mleapTransformerMock;
private SageMakerRequestObject sro;
private DefaultLeapFrame responseLeapFrame;
private ArrayRow outputArrayRow;
private List<ColumnSchema> inputColumns;
private ColumnSchema outputColumn;
private List<Object> inputData;
private String schemaInJson;
private ObjectMapper mapper = new ObjectMapper();
private ResponseHelper responseHelper = new ResponseHelper(mapper);
//PowerMock needs zero arugment constructor
public ServingControllerTest() {
}
private void buildDefaultSageMakerRequestObject() {
schemaInJson = "{\"input\":[{\"name\":\"test_name_1\",\"type\":\"int\"},{\"name\":\"test_name_2\","
+ "\"type\":\"double\"}],\"output\":{\"name\":\"out_name\",\"type\":\"int\"}}";
inputColumns = Lists.newArrayList(new ColumnSchema("test_name_1", "int", null),
new ColumnSchema("test_name_2", "double", null));
outputColumn = new ColumnSchema("out_name", "int", null);
inputData = Lists.newArrayList(new Integer("1"), new Double("2.0"));
sro = new SageMakerRequestObject(new DataSchema(inputColumns, outputColumn), inputData);
}
private void buildResponseLeapFrame() {
responseLeapFrame = new DataConversionHelper(new LeapFrameBuilderSupport(), new LeapFrameBuilder())
.convertInputToLeapFrame(sro.getSchema(), sro.getData());
outputArrayRow = new ArrayRow(Lists.newArrayList(new Integer("1")));
}
@Before
public void setup() {
responseHelper = new ResponseHelper(mapper);
mleapTransformerMock = Mockito.mock(Transformer.class);
this.buildDefaultSageMakerRequestObject();
this.buildResponseLeapFrame();
controller = new ServingController(mleapTransformerMock, responseHelper, converter, mapper);
PowerMockito.mockStatic(ScalaUtils.class);
PowerMockito.mockStatic(SystemUtils.class);
PowerMockito
.when(ScalaUtils.transformLeapFrame(Mockito.any(Transformer.class), Mockito.any(DefaultLeapFrame.class)))
.thenReturn(responseLeapFrame);
PowerMockito.when(ScalaUtils.selectFromLeapFrame(Mockito.any(DefaultLeapFrame.class), Mockito.anyString()))
.thenReturn(responseLeapFrame);
PowerMockito.when(ScalaUtils.getOutputArrayRow(Mockito.any(DefaultLeapFrame.class))).thenReturn(outputArrayRow);
}
@Test
public void testPerformShallowHealthCheck() {
Assert.assertEquals(controller.performShallowHealthCheck().getStatusCode(), HttpStatus.OK);
}
@Test
public void testReturnBatchExecutionParameter() throws Exception {
ResponseEntity response = controller.returnBatchExecutionParameter();
Assert.assertEquals(response.getStatusCode(), HttpStatus.OK);
BatchExecutionParameter batchParam = new ObjectMapper()
.readValue(Objects.requireNonNull(response.getBody()).toString(), BatchExecutionParameter.class);
Assert.assertEquals((int) batchParam.getMaxConcurrentTransforms(), SystemUtils.getNumberOfThreads(1));
Assert.assertEquals(batchParam.getBatchStrategy(), "SINGLE_RECORD");
Assert.assertEquals((int) batchParam.getMaxPayloadInMB(), 5);
}
@Test
public void testSingleValueCsvAcceptResponse() {
final ResponseEntity<String> output = controller.transformRequestJson(sro, AdditionalMediaType.TEXT_CSV_VALUE);
Assert.assertEquals(output.getBody(), "1");
Assert.assertEquals(Objects.requireNonNull(output.getHeaders().getContentType()).toString(),
AdditionalMediaType.TEXT_CSV_VALUE);
}
@Test
public void testSingleValueJsonlinesAcceptResponse() {
final ResponseEntity<String> output = controller
.transformRequestJson(sro, AdditionalMediaType.APPLICATION_JSONLINES_VALUE);
Assert.assertEquals(output.getBody(), "1");
Assert.assertEquals(Objects.requireNonNull(output.getHeaders().getContentType()).toString(),
AdditionalMediaType.APPLICATION_JSONLINES_VALUE);
}
@Test
public void testSingleValueNoAcceptResponse() {
final ResponseEntity<String> output = controller.transformRequestJson(sro, null);
Assert.assertEquals(output.getBody(), "1");
Assert.assertEquals(Objects.requireNonNull(output.getHeaders().getContentType()).toString(),
AdditionalMediaType.TEXT_CSV_VALUE);
}
@Test
public void testListValueCsvAcceptResponse() {
outputColumn = new ColumnSchema("out_name", "int", "array");
List<Object> outputResponse = Lists.newArrayList(1, 2);
sro = new SageMakerRequestObject(new DataSchema(inputColumns, outputColumn), inputData);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenReturn(outputResponse.iterator());
final ResponseEntity<String> output = controller.transformRequestJson(sro, "text/csv");
Assert.assertEquals(output.getBody(), "1,2");
}
@Test
public void testListValueJsonLinesAcceptResponse() {
outputColumn = new ColumnSchema("out_name", "int", "vector");
List<Object> outputResponse = Lists.newArrayList(1, 2);
sro = new SageMakerRequestObject(new DataSchema(inputColumns, outputColumn), inputData);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenReturn(outputResponse.iterator());
final ResponseEntity<String> output = controller.transformRequestJson(sro, "application/jsonlines");
Assert.assertEquals(output.getBody(), "{\"features\":[1,2]}");
}
@Test
public void testListValueNoAcceptResponse() {
outputColumn = new ColumnSchema("out_name", "int", "array");
List<Object> outputResponse = Lists.newArrayList(1, 2);
sro = new SageMakerRequestObject(new DataSchema(inputColumns, outputColumn), inputData);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenReturn(outputResponse.iterator());
final ResponseEntity<String> output = controller.transformRequestJson(sro, null);
Assert.assertEquals(output.getBody(), "1,2");
}
@Test
public void testListValueMLeapThrowsException() {
outputColumn = new ColumnSchema("out_name", "int", "array");
sro = new SageMakerRequestObject(new DataSchema(inputColumns, outputColumn), inputData);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenThrow(new RuntimeException("input data is not valid"));
final ResponseEntity<String> output = controller.transformRequestJson(sro, "text/csv");
Assert.assertEquals(output.getStatusCode(), HttpStatus.BAD_REQUEST);
Assert.assertEquals(output.getBody(), "input data is not valid");
}
@Test
public void testInputNull() {
final ResponseEntity<String> output = controller.transformRequestJson(null, "text/csv");
Assert.assertEquals(output.getStatusCode(), HttpStatus.NO_CONTENT);
}
@Test
public void testCsvApiWithListInput() {
schemaInJson = "{\"input\":[{\"name\":\"test_name_1\",\"type\":\"int\"},{\"name\":\"test_name_2\","
+ "\"type\":\"double\"}],\"output\":{\"name\":\"out_name\",\"type\":\"int\",\"struct\":\"vector\"}}";
List<Object> outputResponse = Lists.newArrayList(1, 2);
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA")).thenReturn(schemaInJson);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenReturn(outputResponse.iterator());
final ResponseEntity<String> output = controller.transformRequestCsv("1,2.0".getBytes(), "text/csv");
Assert.assertEquals(output.getBody(), "1,2");
}
@Test
public void testCsvApiWithNullInput() {
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA")).thenReturn(schemaInJson);
final ResponseEntity<String> output = controller.transformRequestCsv(null, "text/csv");
Assert.assertEquals(output.getStatusCode(), HttpStatus.NO_CONTENT);
}
@Test
public void testListValueMLeapThrowsExceptionCsvApi() {
schemaInJson = "{\"input\":[{\"name\":\"test_name_1\",\"type\":\"int\"},{\"name\":\"test_name_2\","
+ "\"type\":\"double\"}],\"output\":{\"name\":\"out_name\",\"type\":\"int\",\"struct\":\"vector\"}}";
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA")).thenReturn(schemaInJson);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenThrow(new RuntimeException("input data is not valid"));
final ResponseEntity<String> output = controller.transformRequestCsv("1,2.0".getBytes(), "text/csv");
Assert.assertEquals(output.getStatusCode(), HttpStatus.BAD_REQUEST);
Assert.assertEquals(output.getBody(), "input data is not valid");
}
@Test
public void testJsonLinesApiWithListInputCsvOutput() {
schemaInJson = "{"
+ "\"input\":["
+ "{\"name\":\"test_name_1\",\"type\":\"int\"},"
+ "{\"name\":\"test_name_2\",\"type\":\"double\"},"
+ "{\"name\":\"test_name_3\",\"type\":\"string\"}"
+ "],"
+ "\"output\":"
+ "{\"name\":\"out_name\",\"type\":\"int\",\"struct\":\"vector\"}"
+ "}";
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA"))
.thenReturn(schemaInJson);
List<Object> outputResponseForFirstInput = Lists.newArrayList(1, 2);
List<Object> outputResponseForSecondInput = Lists.newArrayList(3, 4);
List<Object> outputResponseForThirdInput = Lists.newArrayList(5, 6);
List<Object> outputResponseForFourthInput = Lists.newArrayList(7, 8);
List<Object> outputResponseForFifthInput = Lists.newArrayList(9, 10);
List<Object> outputResponseForSixthInput = Lists.newArrayList(11, 12);
List<Object> outputResponseForSeventhInput = Lists.newArrayList(13, 14);
List<Object> outputResponseForEighthInput = Lists.newArrayList(15, 16);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenReturn(outputResponseForFirstInput.iterator())
.thenReturn(outputResponseForSecondInput.iterator())
.thenReturn(outputResponseForThirdInput.iterator())
.thenReturn(outputResponseForFourthInput.iterator())
.thenReturn(outputResponseForFifthInput.iterator())
.thenReturn(outputResponseForSixthInput.iterator())
.thenReturn(outputResponseForSeventhInput.iterator())
.thenReturn(outputResponseForEighthInput.iterator());
final String expectOutput = "[[1,2], [3,4]]";
final ResponseEntity<String> output =
controller.transformRequestJsonLines(
"{\"data\":[[1,2.0,\"TEST1\"], [2,3.0,\"TEST\"]]}".getBytes(), "text/csv");
Assert.assertEquals(expectOutput, output.getBody());
final String expectOutput1 = "[[5,6], [7,8]]";
final ResponseEntity<String> output1 =
controller.transformRequestJsonLines(
"{\"data\":[1,2.0,\"TEST1\"]}\n{\"data\":[2,3.0,\"TEST\"]}".getBytes(), "text/csv");
Assert.assertEquals(expectOutput1, output1.getBody());
final String expectOutput2 = "[[9,10], [11,12], [13,14], [15,16]]";
final ResponseEntity<String> output2 =
controller.transformRequestJsonLines(
("{\"data\":[[1,2.0,\"TEST1\"], [2,3.0,\"TEST\"]]}\n"
+ "{\"data\":[1,2.0,\"TEST1\"]}\n"
+ "{\"data\":[2,3.0,\"TEST\"]}"
).getBytes(),
"text/csv");
Assert.assertEquals(expectOutput2, output2.getBody());
}
@Test
public void testJsonLinesApiWithListInputJsonOutput() {
schemaInJson = "{"
+ "\"input\":["
+ "{\"name\":\"test_name_1\",\"type\":\"int\"},"
+ "{\"name\":\"test_name_2\",\"type\":\"double\"},"
+ "{\"name\":\"test_name_3\",\"type\":\"string\"}"
+ "],"
+ "\"output\":"
+ "{\"name\":\"out_name\",\"type\":\"int\",\"struct\":\"vector\"}"
+ "}";
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA"))
.thenReturn(schemaInJson);
List<Object> outputResponseForFirstInput = Lists.newArrayList(1, 2);
List<Object> outputResponseForSecondInput = Lists.newArrayList(3, 4);
List<Object> outputResponseForThirdInput = Lists.newArrayList(5, 6);
List<Object> outputResponseForFourthInput = Lists.newArrayList(7, 8);
List<Object> outputResponseForFifthInput = Lists.newArrayList(9, 10);
List<Object> outputResponseForSixthInput = Lists.newArrayList(11, 12);
List<Object> outputResponseForSeventhInput = Lists.newArrayList(13, 14);
List<Object> outputResponseForEighthInput = Lists.newArrayList(15, 16);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenReturn(outputResponseForFirstInput.iterator())
.thenReturn(outputResponseForSecondInput.iterator())
.thenReturn(outputResponseForThirdInput.iterator())
.thenReturn(outputResponseForFourthInput.iterator())
.thenReturn(outputResponseForFifthInput.iterator())
.thenReturn(outputResponseForSixthInput.iterator())
.thenReturn(outputResponseForSeventhInput.iterator())
.thenReturn(outputResponseForEighthInput.iterator());
final String expectOutput = "[[{\"features\":[1,2]}], [{\"features\":[3,4]}]]";
final ResponseEntity<String> output =
controller.transformRequestJsonLines(
"{\"data\":[[1,2.0,\"TEST1\"], [2,3.0,\"TEST\"]]}".getBytes(),
"application/jsonlines");
Assert.assertEquals(expectOutput, output.getBody());
final String expectOutput1 = "[[{\"features\":[5,6]}], [{\"features\":[7,8]}]]";
final ResponseEntity<String> output1 =
controller.transformRequestJsonLines(
"{\"data\":[1,2.0,\"TEST1\"]}\n{\"data\":[2,3.0,\"TEST\"]}".getBytes(),
"application/jsonlines");
Assert.assertEquals(expectOutput1, output1.getBody());
final String expectOutput2 =
"[[{\"features\":[9,10]}], [{\"features\":[11,12]}], "
+ "[{\"features\":[13,14]}], [{\"features\":[15,16]}]]";
final ResponseEntity<String> output2 =
controller.transformRequestJsonLines(
("{\"data\":[[1,2.0,\"TEST1\"], [2,3.0,\"TEST\"]]}\n"
+ "{\"data\":[1,2.0,\"TEST1\"]}\n"
+ "{\"data\":[2,3.0,\"TEST\"]}"
).getBytes(),
"application/jsonlines");
Assert.assertEquals(expectOutput2, output2.getBody());
}
@Test
public void testJsonLinesApiWithListInputJsonTextOutput() {
schemaInJson = "{"
+ "\"input\":["
+ "{\"name\":\"test_name_1\",\"type\":\"int\"},"
+ "{\"name\":\"test_name_2\",\"type\":\"double\"},"
+ "{\"name\":\"test_name_3\",\"type\":\"string\"}"
+ "],"
+ "\"output\":"
+ "{\"name\":\"out_name\",\"type\":\"int\",\"struct\":\"vector\"}"
+ "}";
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA"))
.thenReturn(schemaInJson);
List<Object> outputResponseForFirstInput = Lists.newArrayList(1, 2);
List<Object> outputResponseForSecondInput = Lists.newArrayList(3, 4);
List<Object> outputResponseForThirdInput = Lists.newArrayList(5, 6);
List<Object> outputResponseForFourthInput = Lists.newArrayList(7, 8);
List<Object> outputResponseForFifthInput = Lists.newArrayList(9, 10);
List<Object> outputResponseForSixthInput = Lists.newArrayList(11, 12);
List<Object> outputResponseForSeventhInput = Lists.newArrayList(13, 14);
List<Object> outputResponseForEighthInput = Lists.newArrayList(15, 16);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenReturn(outputResponseForFirstInput.iterator())
.thenReturn(outputResponseForSecondInput.iterator())
.thenReturn(outputResponseForThirdInput.iterator())
.thenReturn(outputResponseForFourthInput.iterator())
.thenReturn(outputResponseForFifthInput.iterator())
.thenReturn(outputResponseForSixthInput.iterator())
.thenReturn(outputResponseForSeventhInput.iterator())
.thenReturn(outputResponseForEighthInput.iterator());
final String expectOutput = "[[{\"source\":\"1 2\"}], [{\"source\":\"3 4\"}]]";
final ResponseEntity<String> output =
controller.transformRequestJsonLines(
"{\"data\":[[1,2.0,\"TEST1\"], [2,3.0,\"TEST\"]]}".getBytes(),
"application/jsonlines;data=text");
Assert.assertEquals(expectOutput, output.getBody());
final String expectOutput1 = "[[{\"source\":\"5 6\"}], [{\"source\":\"7 8\"}]]";
final ResponseEntity<String> output1 =
controller.transformRequestJsonLines(
"{\"data\":[1,2.0,\"TEST1\"]}\n{\"data\":[2,3.0,\"TEST\"]}".getBytes(),
"application/jsonlines;data=text");
Assert.assertEquals(expectOutput1, output1.getBody());
final String expectOutput2 =
"[[{\"source\":\"9 10\"}], [{\"source\":\"11 12\"}], "
+ "[{\"source\":\"13 14\"}], [{\"source\":\"15 16\"}]]";
final ResponseEntity<String> output2 =
controller.transformRequestJsonLines(
("{\"data\":[[1,2.0,\"TEST1\"], [2,3.0,\"TEST\"]]}\n{\"data\":[1,2.0,\"TEST1\"]}\n{\"data\":[2,3.0,\"TEST\"]}"
).getBytes(),
"application/jsonlines;data=text");
Assert.assertEquals(expectOutput2, output2.getBody());
}
@Test
public void testProcessInputDataForJsonLines() throws IOException {
String jsonLinesAsString =
"{\"schema\":"
+ "{\"input\":[{\"name\":\"test_name_1\",\"type\":\"int\"},{\"name\":\"test_name_2\","
+ "\"type\":\"double\"},{\"name\":\"test_name_3\",\"type\":\"string\"}],"
+ "\"output\":{\"name\":\"out_name\",\"type\":\"int\",\"struct\":\"vector\"}},"
+ "\"data\":[[1,2.0,\"TEST1\"], [2,3.0,\"TEST\"]]}"
+ "\n{\"data\":[1,2.0,\"TEST1\"]}"
+ "\n{\"data\":[2,3.0,\"TEST\"]}";
List<Object> outputResponseForFirstInput = Lists.newArrayList(1, 2);
List<Object> outputResponseForSecondInput = Lists.newArrayList(3, 4);
List<Object> outputResponseForThirdInput = Lists.newArrayList(5, 6);
List<Object> outputResponseForFourthInput = Lists.newArrayList(7, 8);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenReturn(outputResponseForFirstInput.iterator())
.thenReturn(outputResponseForSecondInput.iterator())
.thenReturn(outputResponseForThirdInput.iterator())
.thenReturn(outputResponseForFourthInput.iterator());
final String expectOutput = "[[1,2], [3,4], [5,6], [7,8]]";
final ResponseEntity<String> output = controller.transformRequestJsonLines(
jsonLinesAsString.getBytes(), "text/csv");
Assert.assertEquals(expectOutput, output.getBody());
}
@Test
public void testJsonLinesApiWithListInputThrowsException() {
schemaInJson = "{\"input\":[{\"name\":\"test_name_1\",\"type\":\"int\"},{\"name\":\"test_name_2\","
+ "\"type\":\"double\"},{\"name\":\"test_name_3\",\"type\":\"string\"}],\"output\":{\"name\":\"out_name\",\"type\":\"int\",\"struct\":\"vector\"}}";
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA")).thenReturn(schemaInJson);
PowerMockito
.when(ScalaUtils.getJavaObjectIteratorFromArrayRow(Mockito.any(ArrayRow.class), Mockito.anyString()))
.thenThrow(new RuntimeException("input data is not valid"));
final ResponseEntity<String> output = controller.transformRequestJsonLines("{\"data\":[[1,2.0,\"TEST1\"], [2,3.0,\"TEST\"]]}".getBytes(), "text/csv");
Assert.assertEquals(output.getStatusCode(), HttpStatus.BAD_REQUEST);
Assert.assertEquals(output.getBody(), "input data is not valid");
}
@Test
public void testJsonLinesApiWithNullInput() {
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA")).thenReturn(schemaInJson);
final ResponseEntity<String> output = controller.transformRequestJsonLines(null, "text/csv");
Assert.assertEquals(output.getStatusCode(), HttpStatus.BAD_REQUEST);
}
@Test
public void testJsonLinesApiWithEmptyInput() {
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA")).thenReturn(schemaInJson);
final ResponseEntity<String> output = controller.transformRequestJsonLines(new byte[0], "text/csv");
Assert.assertEquals(output.getStatusCode(), HttpStatus.NO_CONTENT);
}
@Test
public void testParseAcceptEmptyFromRequestEnvironmentPresent() {
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT"))
.thenReturn("application/jsonlines;data=text");
Assert.assertEquals(controller.retrieveAndVerifyAccept(null), "application/jsonlines;data=text");
}
@Test
public void testParseAcceptAnyFromRequestEnvironmentPresent() {
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT"))
.thenReturn("application/jsonlines;data=text");
Assert.assertEquals(controller.retrieveAndVerifyAccept("*/*"), "application/jsonlines;data=text");
}
@Test
public void testParseAcceptEmptyFromRequestEnvironmentNotPresent() {
Assert.assertEquals(controller.retrieveAndVerifyAccept(null), "text/csv");
}
@Test
public void testParseAcceptAnyFromRequestEnvironmentNotPresent() {
Assert.assertEquals(controller.retrieveAndVerifyAccept("*/*"), "text/csv");
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidAcceptInEnvironment() {
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT"))
.thenReturn("application/json");
controller.retrieveAndVerifyAccept("application/json");
}
@Test
public void testSchemaPresentInRequestAndEnvironment() throws IOException {
inputColumns = Lists.newArrayList(new ColumnSchema("name_1", "type_1", "struct_1"),
new ColumnSchema("name_2", "type_2", "struct_2"));
outputColumn = new ColumnSchema("name_out_1", "type_out_1", "struct_out_1");
DataSchema ds = new DataSchema(inputColumns, outputColumn);
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA")).thenReturn(schemaInJson);
DataSchema outputSchema = controller.retrieveAndVerifySchema(ds, mapper);
Assert.assertEquals(outputSchema.getInput().size(), 2);
Assert.assertEquals(outputSchema.getInput().get(0).getName(), "name_1");
Assert.assertEquals(outputSchema.getOutput().getName(), "name_out_1");
}
@Test
public void testSchemaPresentOnlyInEnvironment() throws IOException {
schemaInJson = "{\"input\":[{\"name\":\"i_1\",\"type\":\"int\"}],\"output\":{\"name\":\"o_1\","
+ "\"type\":\"double\"}}";
PowerMockito.when(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA")).thenReturn(schemaInJson);
DataSchema outputSchema = controller.retrieveAndVerifySchema(null, mapper);
Assert.assertEquals(outputSchema.getInput().size(), 1);
Assert.assertEquals(outputSchema.getInput().get(0).getName(), "i_1");
Assert.assertEquals(outputSchema.getOutput().getName(), "o_1");
}
@Test(expected = RuntimeException.class)
public void testSchemaAbsentEverywhere() throws IOException {
controller.retrieveAndVerifySchema(null, mapper);
}
}
| 1,557 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/type/BasicDataTypeTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.type;
import org.junit.Assert;
import org.junit.Test;
public class BasicDataTypeTest {
@Test
public void testBasicDataType() {
Assert.assertEquals(BasicDataType.BOOLEAN, "boolean");
Assert.assertEquals(BasicDataType.INTEGER, "int");
Assert.assertEquals(BasicDataType.FLOAT, "float");
Assert.assertEquals(BasicDataType.LONG, "long");
Assert.assertEquals(BasicDataType.DOUBLE, "double");
Assert.assertEquals(BasicDataType.SHORT, "short");
Assert.assertEquals(BasicDataType.BYTE, "byte");
Assert.assertEquals(BasicDataType.STRING, "string");
}
}
| 1,558 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/type/AdditionalMediaTypeTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.type;
import org.junit.Assert;
import org.junit.Test;
public class AdditionalMediaTypeTest {
@Test
public void testAdditionalMimeType() {
Assert.assertEquals(AdditionalMediaType.TEXT_CSV_VALUE, "text/csv");
Assert.assertEquals(AdditionalMediaType.APPLICATION_JSONLINES_VALUE, "application/jsonlines");
Assert.assertEquals(AdditionalMediaType.APPLICATION_JSONLINES_TEXT_VALUE, "application/jsonlines;data=text");
}
}
| 1,559 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/type/DataStructureTypeTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.type;
import org.junit.Assert;
import org.junit.Test;
public class DataStructureTypeTest {
@Test
public void testStructureType() {
Assert.assertEquals(DataStructureType.BASIC, "basic");
Assert.assertEquals(DataStructureType.VECTOR, "vector");
Assert.assertEquals(DataStructureType.ARRAY, "array");
}
}
| 1,560 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/helper/DataConversionHelperTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.helper;
import com.amazonaws.sagemaker.dto.DataSchema;
import com.amazonaws.sagemaker.dto.SageMakerRequestObject;
import com.amazonaws.sagemaker.type.BasicDataType;
import com.amazonaws.sagemaker.type.DataStructureType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import ml.combust.mleap.core.types.ListType;
import ml.combust.mleap.core.types.ScalarType;
import ml.combust.mleap.core.types.TensorType;
import ml.combust.mleap.runtime.frame.ArrayRow;
import ml.combust.mleap.runtime.frame.DefaultLeapFrame;
import ml.combust.mleap.runtime.javadsl.LeapFrameBuilder;
import ml.combust.mleap.runtime.javadsl.LeapFrameBuilderSupport;
import org.apache.commons.io.IOUtils;
import org.apache.spark.ml.linalg.Vectors;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
public class DataConversionHelperTest {
private ObjectMapper mapper = new ObjectMapper();
private DataConversionHelper dataConversionHelper = new DataConversionHelper(new LeapFrameBuilderSupport(),
new LeapFrameBuilder());
@Test
public void testParseCsvToObjectList() throws IOException {
String csvInput = "2,C,34.5";
String inputJson = IOUtils
.toString(this.getClass().getResourceAsStream("../dto/basic_input_schema.json"), "UTF-8");
DataSchema schema = mapper.readValue(inputJson, DataSchema.class);
List<Object> expectedOutput = Lists.newArrayList(new Integer("2"), "C", new Double("34.5"));
Assert.assertEquals(dataConversionHelper.convertCsvToObjectList(csvInput, schema), expectedOutput);
}
@Test
public void testParseCsvQuotesToObjectList() throws IOException {
String csvInput = "2,\"C\",34.5";
String inputJson = IOUtils
.toString(this.getClass().getResourceAsStream("../dto/basic_input_schema.json"), "UTF-8");
DataSchema schema = mapper.readValue(inputJson, DataSchema.class);
List<Object> expectedOutput = Lists.newArrayList(new Integer("2"), "C", new Double("34.5"));
Assert.assertEquals(dataConversionHelper.convertCsvToObjectList(csvInput, schema), expectedOutput);
}
@Test
public void testCastingInputToLeapFrame() throws Exception {
String inputJson = IOUtils
.toString(this.getClass().getResourceAsStream("../dto/complete_input.json"), "UTF-8");
SageMakerRequestObject sro = mapper.readValue(inputJson, SageMakerRequestObject.class);
DefaultLeapFrame leapframeTest = dataConversionHelper.convertInputToLeapFrame(sro.getSchema(), sro.getData());
Assert.assertNotNull(leapframeTest.schema());
Assert.assertNotNull(leapframeTest.dataset());
}
@Test
public void testCastingMLeapBasicTypeToJavaType() {
ArrayRow testRow = new ArrayRow(Lists.newArrayList(new Integer("1")));
Assert.assertEquals(new Integer("1"),
dataConversionHelper.convertMLeapBasicTypeToJavaType(testRow, BasicDataType.INTEGER));
testRow = new ArrayRow(Lists.newArrayList(new Double("1.0")));
Assert.assertEquals(new Double("1.0"),
dataConversionHelper.convertMLeapBasicTypeToJavaType(testRow, BasicDataType.FLOAT));
testRow = new ArrayRow(Lists.newArrayList(new Long("1")));
Assert.assertEquals(new Long("1"),
dataConversionHelper.convertMLeapBasicTypeToJavaType(testRow, BasicDataType.LONG));
testRow = new ArrayRow(Lists.newArrayList(new Double("1.0")));
Assert.assertEquals(new Double("1"),
dataConversionHelper.convertMLeapBasicTypeToJavaType(testRow, BasicDataType.DOUBLE));
testRow = new ArrayRow(Lists.newArrayList(new Short("1")));
Assert.assertEquals(new Short("1"),
dataConversionHelper.convertMLeapBasicTypeToJavaType(testRow, BasicDataType.SHORT));
testRow = new ArrayRow(Lists.newArrayList(new Byte("1")));
Assert.assertEquals(new Byte("1"),
dataConversionHelper.convertMLeapBasicTypeToJavaType(testRow, BasicDataType.BYTE));
testRow = new ArrayRow(Lists.newArrayList(Boolean.valueOf("1")));
Assert.assertEquals(Boolean.valueOf("1"),
dataConversionHelper.convertMLeapBasicTypeToJavaType(testRow, BasicDataType.BOOLEAN));
testRow = new ArrayRow(Lists.newArrayList("1"));
Assert.assertEquals("1", dataConversionHelper.convertMLeapBasicTypeToJavaType(testRow, BasicDataType.STRING));
}
@Test(expected = IllegalArgumentException.class)
public void testCastingMleapBasicTypeToJavaTypeWrongInput() {
ArrayRow testRow = new ArrayRow(Lists.newArrayList(new Integer("1")));
Assert
.assertEquals(new Integer("1"), dataConversionHelper.convertMLeapBasicTypeToJavaType(testRow, "intvalue"));
}
@Test
public void testCastingInputToJavaTypeSingle() {
Assert.assertEquals(new Integer("1"), dataConversionHelper
.convertInputDataToJavaType(BasicDataType.INTEGER, DataStructureType.BASIC, new Integer("1")));
Assert.assertEquals(new Float("1.0"), dataConversionHelper
.convertInputDataToJavaType(BasicDataType.FLOAT, DataStructureType.BASIC, new Float("1.0")));
Assert.assertEquals(new Double("1.0"), dataConversionHelper
.convertInputDataToJavaType(BasicDataType.DOUBLE, DataStructureType.BASIC, new Double("1.0")));
Assert.assertEquals(new Byte("1"),
dataConversionHelper
.convertInputDataToJavaType(BasicDataType.BYTE, DataStructureType.BASIC, new Byte("1")));
Assert.assertEquals(new Long("1"),
dataConversionHelper.convertInputDataToJavaType(BasicDataType.LONG, null, new Long("1")));
Assert.assertEquals(new Short("1"),
dataConversionHelper.convertInputDataToJavaType(BasicDataType.SHORT, null, new Short("1")));
Assert.assertEquals("1", dataConversionHelper.convertInputDataToJavaType(BasicDataType.STRING, null, "1"));
Assert.assertEquals(Boolean.valueOf("1"),
dataConversionHelper.convertInputDataToJavaType(BasicDataType.BOOLEAN, null, Boolean.valueOf("1")));
}
@Test
public void testCastingInputToJavaTypeList() {
//Check vector struct and double type returns a Spark vector
Assert.assertEquals(Vectors.dense(new double[]{1.0, 2.0}),dataConversionHelper
.convertInputDataToJavaType(BasicDataType.DOUBLE, DataStructureType.VECTOR,
Lists.newArrayList(new Double("1.0"), new Double("2.0"))));
Assert.assertEquals(Lists.newArrayList(1L, 2L), dataConversionHelper
.convertInputDataToJavaType(BasicDataType.LONG, DataStructureType.ARRAY,
Lists.newArrayList(new Long("1"), new Long("2"))));
Assert.assertEquals(Lists.newArrayList(new Short("1")), dataConversionHelper
.convertInputDataToJavaType(BasicDataType.SHORT, DataStructureType.ARRAY,
Lists.newArrayList(new Short("1"))));
Assert.assertEquals(Lists.newArrayList("1"), dataConversionHelper
.convertInputDataToJavaType(BasicDataType.STRING, DataStructureType.ARRAY, Lists.newArrayList("1")));
Assert.assertEquals(Lists.newArrayList(Boolean.valueOf("1")), dataConversionHelper
.convertInputDataToJavaType(BasicDataType.BOOLEAN, DataStructureType.ARRAY,
Lists.newArrayList(Boolean.valueOf("1"))));
}
@Test(expected = IllegalArgumentException.class)
public void testConvertInputToJavaTypeNonDoibleVector() {
dataConversionHelper
.convertInputDataToJavaType(BasicDataType.INTEGER, DataStructureType.VECTOR, new Integer("1"));
}
@Test(expected = IllegalArgumentException.class)
public void testCastingInputToJavaTypeNonList() {
dataConversionHelper
.convertInputDataToJavaType(BasicDataType.INTEGER, DataStructureType.VECTOR, new Integer("1"));
}
@Test(expected = IllegalArgumentException.class)
public void testCastingInputToJavaTypeWrongType() {
dataConversionHelper.convertInputDataToJavaType("intvalue", DataStructureType.BASIC, new Integer("1"));
}
@Test(expected = IllegalArgumentException.class)
public void testCastingInputToJavaTypeListWrongType() {
dataConversionHelper.convertInputDataToJavaType("intvalue", DataStructureType.VECTOR, Lists.newArrayList(1, 2));
}
@Test
public void testCastingInputToMLeapType() {
Assert.assertTrue(dataConversionHelper
.convertInputToMLeapInputType(BasicDataType.INTEGER, DataStructureType.BASIC) instanceof ScalarType);
Assert.assertTrue(
dataConversionHelper.convertInputToMLeapInputType(BasicDataType.FLOAT, null) instanceof ScalarType);
Assert.assertTrue(dataConversionHelper
.convertInputToMLeapInputType(BasicDataType.DOUBLE, DataStructureType.VECTOR) instanceof TensorType);
Assert.assertTrue(dataConversionHelper
.convertInputToMLeapInputType(BasicDataType.LONG, DataStructureType.ARRAY) instanceof ListType);
Assert.assertTrue(dataConversionHelper
.convertInputToMLeapInputType(BasicDataType.STRING, DataStructureType.BASIC) instanceof ScalarType);
Assert.assertTrue(
dataConversionHelper.convertInputToMLeapInputType(BasicDataType.SHORT, null) instanceof ScalarType);
Assert.assertTrue(dataConversionHelper
.convertInputToMLeapInputType(BasicDataType.BYTE, DataStructureType.ARRAY) instanceof ListType);
Assert.assertTrue(dataConversionHelper
.convertInputToMLeapInputType(BasicDataType.BOOLEAN, DataStructureType.VECTOR) instanceof TensorType);
}
@Test(expected = IllegalArgumentException.class)
public void testCastingInputToMLeapTypeWrongType() {
dataConversionHelper.convertInputToMLeapInputType("intvalue", DataStructureType.VECTOR);
}
}
| 1,561 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/test/java/com/amazonaws/sagemaker/helper/ResponseHelperTest.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.helper;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import java.util.List;
import java.util.Objects;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.springframework.http.HttpHeaders;
import org.springframework.http.ResponseEntity;
public class ResponseHelperTest {
private List<Object> dummyResponse = Lists.newArrayList();
private ResponseHelper responseHelperTest = new ResponseHelper(new ObjectMapper());
@Before
public void setup() {
dummyResponse = Lists.newArrayList(new Integer("1"), new Float("0.2"));
}
@Test
public void testSingleOutput() {
ResponseEntity<String> outputTest = responseHelperTest.sendResponseForSingleValue("1", "text/csv");
Assert.assertEquals(Objects.requireNonNull(outputTest.getHeaders().get(HttpHeaders.CONTENT_TYPE)).get(0),
"text/csv");
Assert.assertEquals(outputTest.getBody(), "1");
}
@Test
public void testSingleJsonlines() {
ResponseEntity<String> outputTest = responseHelperTest
.sendResponseForSingleValue("1", "application/jsonlines");
Assert.assertEquals(Objects.requireNonNull(outputTest.getHeaders().get(HttpHeaders.CONTENT_TYPE)).get(0),
"application/jsonlines");
Assert.assertEquals(outputTest.getBody(), "1");
}
@Test
public void testSingleOutputNoContentType() {
ResponseEntity<String> outputTest = responseHelperTest.sendResponseForSingleValue("1", null);
Assert.assertEquals(Objects.requireNonNull(outputTest.getHeaders().get(HttpHeaders.CONTENT_TYPE)).get(0),
"text/csv");
Assert.assertEquals(outputTest.getBody(), "1");
}
@Test
public void testListOutputCsv() throws JsonProcessingException {
ResponseEntity<String> outputTest = responseHelperTest
.sendResponseForList(dummyResponse.iterator(), "text/csv");
Assert.assertEquals(outputTest.getBody(), "1,0.2");
Assert.assertEquals(Objects.requireNonNull(outputTest.getHeaders().get(HttpHeaders.CONTENT_TYPE)).get(0),
"text/csv");
}
@Test
public void testListOutputJsonlines() throws JsonProcessingException {
ResponseEntity<String> outputTest = responseHelperTest
.sendResponseForList(dummyResponse.iterator(), "application/jsonlines");
Assert.assertEquals(outputTest.getBody(), "{\"features\":[1,0.2]}");
Assert.assertEquals(Objects.requireNonNull(outputTest.getHeaders().get(HttpHeaders.CONTENT_TYPE)).get(0),
"application/jsonlines");
}
@Test
public void testTextOutputJsonlines() throws JsonProcessingException {
dummyResponse = Lists.newArrayList("this", "is", "spark", "ml", "server");
ResponseEntity<String> outputTest = responseHelperTest
.sendResponseForList(dummyResponse.iterator(), "application/jsonlines;data=text");
Assert.assertEquals(outputTest.getBody(), "{\"source\":\"this is spark ml server\"}");
Assert.assertEquals(Objects.requireNonNull(outputTest.getHeaders().get(HttpHeaders.CONTENT_TYPE)).get(0),
"application/jsonlines");
}
@Test
public void testListOutputInvalidAccept() throws JsonProcessingException {
ResponseEntity<String> outputTest = responseHelperTest
.sendResponseForList(dummyResponse.iterator(), "application/json");
Assert.assertEquals(outputTest.getBody(), "1,0.2");
Assert.assertEquals(Objects.requireNonNull(outputTest.getHeaders().get(HttpHeaders.CONTENT_TYPE)).get(0),
"text/csv");
}
@Test
public void testTextOutputInvalidAccept() throws JsonProcessingException {
dummyResponse = Lists.newArrayList("this", "is", "spark", "ml", "server");
ResponseEntity<String> outputTest = responseHelperTest
.sendResponseForList(dummyResponse.iterator(), "application/json");
Assert.assertEquals(outputTest.getBody(), "this,is,spark,ml,server");
Assert.assertEquals(Objects.requireNonNull(outputTest.getHeaders().get(HttpHeaders.CONTENT_TYPE)).get(0),
"text/csv");
}
}
| 1,562 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/App.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* Spring Boot starter application
*/
@SpringBootApplication
public class App {
public static void main(String[] args) {
SpringApplication.run(App.class, args);
}
}
| 1,563 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/dto/SageMakerRequestListObject.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import java.util.List;
/**
* Request object POJO to which data field of input request in JSONLINES format will be mapped to by Spring (using Jackson).
* For sample input, please see test/resources/com/amazonaws/sagemaker/dto
*/
public class SageMakerRequestListObject {
private DataSchema schema;
private List<List<Object>> data;
@JsonCreator
public SageMakerRequestListObject(@JsonProperty("schema") final DataSchema schema,
@JsonProperty("data") final List<List<Object>> data) {
// schema can be retrieved from environment variable as well, hence it is not enforced to be null
this.schema = schema;
this.data = Preconditions.checkNotNull(data);
}
public DataSchema getSchema() {
return schema;
}
public List<List<Object>> getData() {
return data;
}
}
| 1,564 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/dto/ColumnSchema.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.amazonaws.sagemaker.type.DataStructureType;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import java.util.Optional;
/**
* POJO to represent single column of Spark data that MLeap will transform. Each column can be a basic value or a List
* of basic values (for Spark Array or Vector).
*/
public class ColumnSchema {
private String name;
private String type;
private String struct;
@JsonCreator
public ColumnSchema(@JsonProperty("name") final String name, @JsonProperty("type") final String type,
@JsonProperty("struct") final String struct) {
this.name = Preconditions.checkNotNull(name);
this.type = Preconditions.checkNotNull(type);
this.struct = Optional.ofNullable(struct).orElse(DataStructureType.BASIC);
}
public String getName() {
return name;
}
public String getType() {
return type;
}
public String getStruct() {
return struct;
}
}
| 1,565 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/dto/SageMakerRequestObject.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import java.util.List;
/**
* Request object POJO to which input request in JSON format will be mapped to by Spring (using Jackson). For sample
* input, please see test/resources/com/amazonaws/sagemaker/dto
*/
public class SageMakerRequestObject {
private DataSchema schema;
private List<Object> data;
@JsonCreator
public SageMakerRequestObject(@JsonProperty("schema") final DataSchema schema,
@JsonProperty("data") final List<Object> data) {
// schema can be retrieved from environment variable as well, hence it is not enforced to be null
this.schema = schema;
this.data = Preconditions.checkNotNull(data);
}
public DataSchema getSchema() {
return schema;
}
public List<Object> getData() {
return data;
}
}
| 1,566 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/dto/DataSchema.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import java.util.List;
/**
* Input schema for the request paylod. This can either be passed via an environment variable or part of a request.
* If the schema is present in both the environment variable and the request, the one in request will take precedence.
*/
public class DataSchema {
private List<ColumnSchema> input;
private ColumnSchema output;
@JsonCreator
public DataSchema(@JsonProperty("input") final List<ColumnSchema> input,
@JsonProperty("output") final ColumnSchema output) {
this.input = Preconditions.checkNotNull(input);
this.output = Preconditions.checkNotNull(output);
}
public List<ColumnSchema> getInput() {
return input;
}
public ColumnSchema getOutput() {
return output;
}
}
| 1,567 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/dto/JsonlinesTextOutput.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
/**
* POJO class to represent the standard JSONlines output format for SageMaker NLP algorithms (BlazingText, Seq2Seq)
*/
public class JsonlinesTextOutput {
private String source;
@JsonCreator
public JsonlinesTextOutput(@JsonProperty("source") final String source) {
this.source = Preconditions.checkNotNull(source);
}
public String getSource() {
return source;
}
}
| 1,568 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/dto/BatchExecutionParameter.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
/**
* POJO class corresponding to the execution-parameters API call that Batch requires
*/
public class BatchExecutionParameter {
@JsonProperty("MaxConcurrentTransforms")
private Integer maxConcurrentTransforms;
@JsonProperty("BatchStrategy")
private String batchStrategy;
@JsonProperty("MaxPayloadInMB")
private Integer maxPayloadInMB;
@JsonCreator
public BatchExecutionParameter(@JsonProperty("MaxConcurrentTransforms") Integer maxConcurrentTransforms,
@JsonProperty("BatchStrategy") String batchStrategy, @JsonProperty("MaxPayloadInMB") Integer maxPayloadInMB) {
this.maxConcurrentTransforms = Preconditions.checkNotNull(maxConcurrentTransforms);
this.batchStrategy = Preconditions.checkNotNull(batchStrategy);
this.maxPayloadInMB = Preconditions.checkNotNull(maxPayloadInMB);
}
public Integer getMaxConcurrentTransforms() {
return maxConcurrentTransforms;
}
public String getBatchStrategy() {
return batchStrategy;
}
public Integer getMaxPayloadInMB() {
return maxPayloadInMB;
}
}
| 1,569 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/dto/JsonlinesStandardOutput.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.dto;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import java.util.List;
/**
* POJO class to represent the standard JSONlines output format for SageMaker built-in algorithms.
*/
public class JsonlinesStandardOutput {
private List<Object> features;
@JsonCreator
public JsonlinesStandardOutput(@JsonProperty("features") final List<Object> features) {
this.features = Preconditions.checkNotNull(features);
}
public List<Object> getFeatures() {
return features;
}
}
| 1,570 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/configuration/BeanConfiguration.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.configuration;
import com.amazonaws.sagemaker.utils.SystemUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import java.io.File;
import java.util.List;
import ml.combust.mleap.runtime.MleapContext;
import ml.combust.mleap.runtime.frame.Transformer;
import ml.combust.mleap.runtime.javadsl.BundleBuilder;
import ml.combust.mleap.runtime.javadsl.ContextBuilder;
import ml.combust.mleap.runtime.javadsl.LeapFrameBuilder;
import ml.combust.mleap.runtime.javadsl.LeapFrameBuilderSupport;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.web.embedded.jetty.JettyServerCustomizer;
import org.springframework.boot.web.embedded.jetty.JettyServletWebServerFactory;
import org.springframework.context.annotation.Bean;
/**
* Contains all Spring bean configurations
*/
@SpringBootConfiguration
public class BeanConfiguration {
private static final String DEFAULT_HTTP_LISTENER_PORT = "8080";
private static final String DEFAULT_MODEL_LOCATION = "/opt/ml/model";
private static final Integer MAX_CORE_TO_THREAD_RATIO = 10;
private static final Integer MIN_CORE_TO_THREAD_RATIO = 2;
@Bean
public File provideModelFile() {
return new File(DEFAULT_MODEL_LOCATION);
}
@Bean
public ContextBuilder provideContextBuilder() {
return new ContextBuilder();
}
@Bean
public MleapContext provideMleapContext(ContextBuilder contextBuilder) {
return contextBuilder.createMleapContext();
}
@Bean
public BundleBuilder provideBundleBuilder() {
return new BundleBuilder();
}
@Bean
public LeapFrameBuilder provideLeapFrameBuilder() {
return new LeapFrameBuilder();
}
@Bean
public LeapFrameBuilderSupport provideLeapFrameBuilderSupport() {
return new LeapFrameBuilderSupport();
}
@Bean
public Transformer provideTransformer(final File modelFile, final BundleBuilder bundleBuilder,
final MleapContext mleapContext) {
return bundleBuilder.load(modelFile, mleapContext).root();
}
@Bean
public ObjectMapper provideObjectMapper() {
return new ObjectMapper();
}
@Bean
public JettyServletWebServerFactory provideJettyServletWebServerFactory() {
final JettyServletWebServerFactory jettyServlet = new JettyServletWebServerFactory(
new Integer(this.getHttpListenerPort()));
final List<JettyServerCustomizer> serverCustomizerList = Lists.newArrayList();
final JettyServerCustomizer serverCustomizer = server -> {
final QueuedThreadPool threadPool = server.getBean(QueuedThreadPool.class);
threadPool.setMinThreads(SystemUtils.getNumberOfThreads(MIN_CORE_TO_THREAD_RATIO));
threadPool.setMaxThreads(SystemUtils.getNumberOfThreads(MAX_CORE_TO_THREAD_RATIO));
};
serverCustomizerList.add(serverCustomizer);
jettyServlet.setServerCustomizers(serverCustomizerList);
return jettyServlet;
}
@VisibleForTesting
protected String getHttpListenerPort() {
return (SystemUtils.getEnvironmentVariable("SAGEMAKER_BIND_TO_PORT") != null) ? SystemUtils
.getEnvironmentVariable("SAGEMAKER_BIND_TO_PORT") : DEFAULT_HTTP_LISTENER_PORT;
}
}
| 1,571 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/utils/SystemUtils.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.utils;
/**
* Utility class for dealing with System or Environment related functionalities. These methofs are moved to this class
* so that they can be easily mocked out by PowerMockito.mockStatic while testing the actual classes.
*/
public class SystemUtils {
/**
* Computes the number of threads to use based on number of available processors in the host
*
* @param coreToThreadRatio, the multiplicative factor per core
* @return coreToThreadRatio multiplied by available cores in the host
*/
public static int getNumberOfThreads(final Integer coreToThreadRatio) {
final int numberOfCores = Runtime.getRuntime().availableProcessors();
return coreToThreadRatio * numberOfCores;
}
/**
* Retrieves environment variable pertaining to a key
*
* @param key, the environment variable key
* @return the value corresponding to the key from environment settings
*/
public static String getEnvironmentVariable(final String key) {
return System.getenv(key);
}
}
| 1,572 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/utils/ScalaUtils.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.utils;
import com.amazonaws.sagemaker.type.DataStructureType;
import java.util.Collections;
import java.util.Iterator;
import ml.combust.mleap.runtime.frame.ArrayRow;
import ml.combust.mleap.runtime.frame.DefaultLeapFrame;
import ml.combust.mleap.runtime.frame.Row;
import ml.combust.mleap.runtime.frame.Transformer;
import ml.combust.mleap.runtime.javadsl.LeapFrameSupport;
import org.apache.commons.lang3.StringUtils;
import scala.collection.JavaConverters;
/**
* Utility class for dealing with Scala to Java conversion related issues. These functionalities are moved to this
* class so that they can be easily mocked out by PowerMockito.mockStatic while testing the actual classes.
*/
public class ScalaUtils {
private final static LeapFrameSupport leapFrameSupport = new LeapFrameSupport();
/**
* Invokes MLeap transformer object with DefaultLeapFrame and returns DefaultLeapFrame from MLeap helper Try Monad
*
* @param transformer, the MLeap transformer which performs the inference
* @param leapFrame, input to MLeap
* @return the DefaultLeapFrame in helper
*/
public static DefaultLeapFrame transformLeapFrame(final Transformer transformer, final DefaultLeapFrame leapFrame) {
return transformer.transform(leapFrame).get();
}
/**
* Selects a value corresponding to a key from DefaultLeapFrame and returns DefaultLeapFrame from MLeap helper Try
* Monad
*
* @param key, the value corresponding to key to be retrieved
* @param leapFrame, input to MLeap
* @return the DefaultLeapFrame in helper
*/
public static DefaultLeapFrame selectFromLeapFrame(final DefaultLeapFrame leapFrame, final String key) {
return leapFrameSupport.select(leapFrame, Collections.singletonList(key));
}
/**
* Returns an ArrayRow object from DefaultLeapFrame Try Monad after converting Scala collections to Java
* collections
*
* @param leapFrame, the DefaultLeapFrame from which output to be extracted
* @return ArrayRow which can be used to retrieve the original output
*/
public static ArrayRow getOutputArrayRow(final DefaultLeapFrame leapFrame) {
final Iterator<Row> rowIterator = leapFrameSupport.collect(leapFrame).iterator();
// SageMaker input structure only allows to call MLeap transformer for single data point
return (ArrayRow) (rowIterator.next());
}
/**
* Retrieves the raw output value from ArrayRow for Vector/Array use cases.
*
* @param predictionRow, the output ArrayRow
* @param structure, whether it is Spark Vector or Array
* @return Iterator to raw values of the Vector or Array
*/
public static Iterator<Object> getJavaObjectIteratorFromArrayRow(final ArrayRow predictionRow,
final String structure) {
return (StringUtils.equals(structure, DataStructureType.VECTOR)) ? JavaConverters
.asJavaIteratorConverter(predictionRow.getTensor(0).toDense().rawValuesIterator()).asJava()
: predictionRow.getList(0).iterator();
}
}
| 1,573 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/controller/ServingController.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.controller;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
import static org.springframework.web.bind.annotation.RequestMethod.POST;
import com.amazonaws.sagemaker.dto.BatchExecutionParameter;
import com.amazonaws.sagemaker.dto.DataSchema;
import com.amazonaws.sagemaker.dto.SageMakerRequestListObject;
import com.amazonaws.sagemaker.dto.SageMakerRequestObject;
import com.amazonaws.sagemaker.helper.DataConversionHelper;
import com.amazonaws.sagemaker.helper.ResponseHelper;
import com.amazonaws.sagemaker.type.AdditionalMediaType;
import com.amazonaws.sagemaker.type.DataStructureType;
import com.amazonaws.sagemaker.utils.ScalaUtils;
import com.amazonaws.sagemaker.utils.SystemUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import ml.combust.mleap.runtime.frame.ArrayRow;
import ml.combust.mleap.runtime.frame.DefaultLeapFrame;
import ml.combust.mleap.runtime.frame.Transformer;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* The Spring controller class which implements the APIs
*/
@RestController
public class ServingController {
private static final Logger LOG = LogManager.getLogger(ServingController.class);
private static final List<String> VALID_ACCEPT_LIST = Lists
.newArrayList(AdditionalMediaType.TEXT_CSV_VALUE, AdditionalMediaType.APPLICATION_JSONLINES_VALUE,
AdditionalMediaType.APPLICATION_JSONLINES_TEXT_VALUE);
private final Transformer mleapTransformer;
private final ResponseHelper responseHelper;
private final DataConversionHelper dataConversionHelper;
private final ObjectMapper mapper;
@Autowired
public ServingController(final Transformer mleapTransformer, final ResponseHelper responseHelper,
final DataConversionHelper dataConversionHelper, final ObjectMapper mapper) {
this.mleapTransformer = Preconditions.checkNotNull(mleapTransformer);
this.responseHelper = Preconditions.checkNotNull(responseHelper);
this.dataConversionHelper = Preconditions.checkNotNull(dataConversionHelper);
this.mapper = Preconditions.checkNotNull(mapper);
}
/**
* Implements the health check GET API
*
* @return ResponseEntity with status 200
*/
@RequestMapping(path = "/ping", method = GET)
public ResponseEntity performShallowHealthCheck() {
return ResponseEntity.ok().build();
}
/**
* Implements the Batch Execution GET Parameter API
*
* @return ResponseEntity with body as the expected payload JSON & status 200
*/
@RequestMapping(path = "/execution-parameters", method = GET, produces = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity returnBatchExecutionParameter() throws JsonProcessingException {
final BatchExecutionParameter batchParam = new BatchExecutionParameter(SystemUtils.getNumberOfThreads(1),
"SINGLE_RECORD", 5);
final String responseStr = mapper.writeValueAsString(batchParam);
return ResponseEntity.ok(responseStr);
}
/**
* Implements the invocations POST API for application/json input
*
* @param sro, the request object
* @param accept, indicates the content types that the http method is able to understand
* @return ResponseEntity with body as the expected payload JSON & proper statuscode based on the input
*/
@RequestMapping(path = "/invocations", method = POST, consumes = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<String> transformRequestJson(@RequestBody final SageMakerRequestObject sro,
@RequestHeader(value = HttpHeaders.ACCEPT, required = false) final String accept) {
if (sro == null) {
LOG.error("Input passed to the request is empty");
return ResponseEntity.noContent().build();
}
try {
final String acceptVal = this.retrieveAndVerifyAccept(accept);
final DataSchema schema = this.retrieveAndVerifySchema(sro.getSchema(), mapper);
return this.processInputData(sro.getData(), schema, acceptVal);
} catch (final Exception ex) {
LOG.error("Error in processing current request", ex);
return ResponseEntity.badRequest().body(ex.getMessage());
}
}
/**
* Implements the invocations POST API for text/csv input
*
* @param csvRow, data in row format in CSV
* @param accept, indicates the content types that the http method is able to understand
* @return ResponseEntity with body as the expected payload JSON & proper statuscode based on the input
*/
@RequestMapping(path = "/invocations", method = POST, consumes = AdditionalMediaType.TEXT_CSV_VALUE)
public ResponseEntity<String> transformRequestCsv(@RequestBody final byte[] csvRow,
@RequestHeader(value = HttpHeaders.ACCEPT, required = false) String accept) {
if (csvRow == null) {
LOG.error("Input passed to the request is empty");
return ResponseEntity.noContent().build();
}
try {
final String acceptVal = this.retrieveAndVerifyAccept(accept);
final DataSchema schema = this.retrieveAndVerifySchema(null, mapper);
return this
.processInputData(dataConversionHelper.convertCsvToObjectList(new String(csvRow), schema), schema,
acceptVal);
} catch (final Exception ex) {
LOG.error("Error in processing current request", ex);
return ResponseEntity.badRequest().body(ex.getMessage());
}
}
/**
* Implements the invocations POST API for application/jsonlines input
*
* @param jsonLines, lines of json values
* @param accept, indicates the content types that the http method is able to understand
* @return ResponseEntity with body as the expected payload JSON & proper statuscode based on the input
*/
@RequestMapping(path = "/invocations", method = POST, consumes = AdditionalMediaType.APPLICATION_JSONLINES_VALUE)
public ResponseEntity<String> transformRequestJsonLines(
@RequestBody final byte[] jsonLines,
@RequestHeader(value = HttpHeaders.ACCEPT, required = false)
final String accept) {
if (jsonLines == null) {
LOG.error("Input passed to the request is null");
return ResponseEntity.badRequest().build();
} else if (jsonLines.length == 0) {
LOG.error("Input passed to the request is empty");
return ResponseEntity.noContent().build();
}
try {
final String acceptVal = this.retrieveAndVerifyAccept(accept);
return this.processInputDataForJsonLines(new String(jsonLines), acceptVal);
} catch (final Exception ex) {
LOG.error("Error in processing current request", ex);
return ResponseEntity.badRequest().body(ex.getMessage());
}
}
@VisibleForTesting
protected String retrieveAndVerifyAccept(final String acceptFromRequest) {
final String acceptVal = checkEmptyAccept(acceptFromRequest) ? SystemUtils
.getEnvironmentVariable("SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT") : acceptFromRequest;
if (StringUtils.isNotEmpty(acceptVal) && !VALID_ACCEPT_LIST.contains(acceptVal)) {
throw new IllegalArgumentException("Accept value passed via request or environment variable is not valid");
}
return StringUtils.isNotEmpty(acceptVal) ? acceptVal : AdditionalMediaType.TEXT_CSV_VALUE;
}
@VisibleForTesting
protected DataSchema retrieveAndVerifySchema(final DataSchema schemaFromPayload, final ObjectMapper mapper)
throws IOException {
if ((schemaFromPayload == null) && (SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA") == null)) {
throw new RuntimeException(
"Input schema has to be provided either via environment variable or " + "via the request");
}
return (schemaFromPayload != null) ? schemaFromPayload
: mapper.readValue(SystemUtils.getEnvironmentVariable("SAGEMAKER_SPARKML_SCHEMA"), DataSchema.class);
}
private ResponseEntity<String> processInputData(final List<Object> inputData, final DataSchema schema,
final String acceptVal) throws JsonProcessingException {
final DefaultLeapFrame leapFrame = dataConversionHelper.convertInputToLeapFrame(schema, inputData);
// Making call to the MLeap executor to get the output
final DefaultLeapFrame totalLeapFrame = ScalaUtils.transformLeapFrame(mleapTransformer, leapFrame);
final DefaultLeapFrame predictionsLeapFrame = ScalaUtils
.selectFromLeapFrame(totalLeapFrame, schema.getOutput().getName());
final ArrayRow outputArrayRow = ScalaUtils.getOutputArrayRow(predictionsLeapFrame);
return transformToHttpResponse(schema, outputArrayRow, acceptVal);
}
/**
* Helper method to interpret the JSONLines input and return the response in the expected output format.
*
* @param jsonLinesAsString
* The JSON lines input.
*
* @param acceptVal
* The output format in which the response is to be returned.
*
* @return
* The transformed output for the JSONlines input.
*
* @throws IOException
* If there is an exception during object mapping and validation.
*
*/
ResponseEntity<String> processInputDataForJsonLines(
final String jsonLinesAsString, final String acceptVal) throws IOException {
final String lines[] = jsonLinesAsString.split("\\r?\\n");
final ObjectMapper mapper = new ObjectMapper();
// first line is special since it could contain the schema as well. Extract the schema.
final SageMakerRequestObject firstLine = mapper.readValue(lines[0], SageMakerRequestObject.class);
final DataSchema schema = this.retrieveAndVerifySchema(firstLine.getSchema(), mapper);
List<List<Object>> inputDatas = Lists.newArrayList();
for(String jsonStringLine : lines) {
try {
final SageMakerRequestListObject sro = mapper.readValue(jsonStringLine, SageMakerRequestListObject.class);
for(int idx = 0; idx < sro.getData().size(); ++idx) {
inputDatas.add(sro.getData().get(idx));
}
} catch (final JsonMappingException ex) {
final SageMakerRequestObject sro = mapper.readValue(jsonStringLine, SageMakerRequestObject.class);
inputDatas.add(sro.getData());
}
}
List<ResponseEntity<String>> responseList = Lists.newArrayList();
// Process each input separately and add response to a list
for (int idx = 0; idx < inputDatas.size(); ++idx) {
responseList.add(this.processInputData(inputDatas.get(idx), schema, acceptVal));
}
// Merge response body to a new output response
List<List<String>> bodyList = Lists.newArrayList();
// All response should be valid if no exception got catch
// which all headers should be the same and extract the first one to construct responseEntity
HttpHeaders headers = responseList.get(0).getHeaders();
//combine body in responseList
for (ResponseEntity<String> response: responseList) {
bodyList.add(Lists.newArrayList(response.getBody()));
}
return ResponseEntity.ok().headers(headers).body(bodyList.toString());
}
private boolean checkEmptyAccept(final String acceptFromRequest) {
//Spring may send the Accept as "*\/*" (star/star) in case accept is not passed via request
return (StringUtils.isBlank(acceptFromRequest) || StringUtils.equals(acceptFromRequest, MediaType.ALL_VALUE));
}
private ResponseEntity<String> transformToHttpResponse(final DataSchema schema, final ArrayRow predictionRow,
final String accept) throws JsonProcessingException {
if (StringUtils.equals(schema.getOutput().getStruct(), DataStructureType.BASIC)) {
final Object output = dataConversionHelper
.convertMLeapBasicTypeToJavaType(predictionRow, schema.getOutput().getType());
return responseHelper.sendResponseForSingleValue(output.toString(), accept);
} else {
// If not basic type, it can be vector or array type from Spark
return responseHelper.sendResponseForList(
ScalaUtils.getJavaObjectIteratorFromArrayRow(predictionRow, schema.getOutput().getStruct()), accept);
}
}
}
| 1,574 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/type/DataStructureType.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.type;
/**
* Each column in the input and output can be a single value (basic), Spark ArrayType(array) or Spark Vector type
* (vector)
*/
public final class DataStructureType {
public static final String BASIC = "basic";
public static final String VECTOR = "vector";
public static final String ARRAY = "array";
}
| 1,575 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/type/BasicDataType.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.type;
/**
* Basic data types supported for each column in the input. Each column can be an individual value or an Array/Vector
* (List) * of this.
*/
public final class BasicDataType {
public static final String BOOLEAN = "boolean";
public static final String BYTE = "byte";
public static final String SHORT = "short";
public static final String INTEGER = "int";
public static final String FLOAT = "float";
public static final String LONG = "long";
public static final String DOUBLE = "double";
public static final String STRING = "string";
}
| 1,576 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/type/AdditionalMediaType.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.type;
/**
* This class contains MIME types which are not part of Spring officially provided MIME types
*/
public final class AdditionalMediaType {
public static final String TEXT_CSV_VALUE = "text/csv";
public static final String APPLICATION_JSONLINES_VALUE = "application/jsonlines";
public static final String APPLICATION_JSONLINES_TEXT_VALUE = "application/jsonlines;data=text";
}
| 1,577 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/helper/ResponseHelper.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.helper;
import com.amazonaws.sagemaker.dto.JsonlinesStandardOutput;
import com.amazonaws.sagemaker.dto.JsonlinesTextOutput;
import com.amazonaws.sagemaker.type.AdditionalMediaType;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import java.util.Iterator;
import java.util.List;
import java.util.StringJoiner;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
/**
* This class contains the logic for converting MLeap helper into SageMaker specific helper along with status-codes
*/
@Component
public class ResponseHelper {
private final ObjectMapper mapper;
@Autowired
public ResponseHelper(final ObjectMapper mapper) {
this.mapper = Preconditions.checkNotNull(mapper);
}
/**
* Sends the helper when the output is a single value (e.g. prediction)
*
* @param value, the helper value
* @param acceptVal, the accept customer has passed or default (text/csv) if not passed
* @return Spring ResponseEntity which contains the body and the header
*/
public ResponseEntity<String> sendResponseForSingleValue(final String value, String acceptVal) {
if (StringUtils.isEmpty(acceptVal)) {
acceptVal = AdditionalMediaType.TEXT_CSV_VALUE;
}
return StringUtils.equals(acceptVal, AdditionalMediaType.TEXT_CSV_VALUE) ? this.getCsvOkResponse(value)
: this.getJsonlinesOkResponse(value);
}
/**
* This method is responsible for sending the values in the appropriate format so that it can be parsed by other 1P
* algorithms. Currently, it supports two formats, standard jsonlines and jsonlines for text. Please see
* test/resources/com/amazonaws/sagemaker/dto for example output format or SageMaker built-in algorithms
* documentaiton to know about the output format.
*
* @param outputDataIterator, data iterator for raw output values in case output is an Array or Vector
* @param acceptVal, the accept customer has passed or default (text/csv) if not passed
* @return Spring ResponseEntity which contains the body and the header.
*/
public ResponseEntity<String> sendResponseForList(final Iterator<Object> outputDataIterator, String acceptVal)
throws JsonProcessingException {
if (StringUtils.equals(acceptVal, AdditionalMediaType.APPLICATION_JSONLINES_VALUE)) {
return this.buildStandardJsonOutputForList(outputDataIterator);
} else if (StringUtils.equals(acceptVal, AdditionalMediaType.APPLICATION_JSONLINES_TEXT_VALUE)) {
return this.buildTextJsonOutputForList(outputDataIterator);
} else {
return this.buildCsvOutputForList(outputDataIterator);
}
}
private ResponseEntity<String> buildCsvOutputForList(final Iterator<Object> outputDataIterator) {
final StringJoiner sj = new StringJoiner(",");
while (outputDataIterator.hasNext()) {
sj.add(outputDataIterator.next().toString());
}
return this.getCsvOkResponse(sj.toString());
}
private ResponseEntity<String> buildStandardJsonOutputForList(final Iterator<Object> outputDataIterator)
throws JsonProcessingException {
final List<Object> columns = Lists.newArrayList();
while (outputDataIterator.hasNext()) {
columns.add(outputDataIterator.next());
}
final JsonlinesStandardOutput jsonOutput = new JsonlinesStandardOutput(columns);
final String jsonRepresentation = mapper.writeValueAsString(jsonOutput);
return this.getJsonlinesOkResponse(jsonRepresentation);
}
private ResponseEntity<String> buildTextJsonOutputForList(final Iterator<Object> outputDataIterator)
throws JsonProcessingException {
final StringJoiner stringJoiner = new StringJoiner(" ");
while (outputDataIterator.hasNext()) {
stringJoiner.add(outputDataIterator.next().toString());
}
final JsonlinesTextOutput jsonOutput = new JsonlinesTextOutput(stringJoiner.toString());
final String jsonRepresentation = mapper.writeValueAsString(jsonOutput);
return this.getJsonlinesOkResponse(jsonRepresentation);
}
private ResponseEntity<String> getCsvOkResponse(final String responseBody) {
final HttpHeaders headers = new HttpHeaders();
headers.set(HttpHeaders.CONTENT_TYPE, AdditionalMediaType.TEXT_CSV_VALUE);
return ResponseEntity.ok().headers(headers).body(responseBody);
}
// We are always responding with the valid format for application/jsonlines, whicth is a valid JSON
private ResponseEntity<String> getJsonlinesOkResponse(final String responseBody) {
final HttpHeaders headers = new HttpHeaders();
headers.set(HttpHeaders.CONTENT_TYPE, AdditionalMediaType.APPLICATION_JSONLINES_VALUE);
return ResponseEntity.ok().headers(headers).body(responseBody);
}
}
| 1,578 |
0 |
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker
|
Create_ds/sagemaker-sparkml-serving-container/src/main/java/com/amazonaws/sagemaker/helper/DataConversionHelper.java
|
/*
* Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package com.amazonaws.sagemaker.helper;
import com.amazonaws.sagemaker.dto.ColumnSchema;
import com.amazonaws.sagemaker.dto.DataSchema;
import com.amazonaws.sagemaker.type.BasicDataType;
import com.amazonaws.sagemaker.type.DataStructureType;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import ml.combust.mleap.core.types.*;
import ml.combust.mleap.runtime.frame.ArrayRow;
import ml.combust.mleap.runtime.frame.DefaultLeapFrame;
import ml.combust.mleap.runtime.frame.Row;
import ml.combust.mleap.runtime.javadsl.LeapFrameBuilder;
import ml.combust.mleap.runtime.javadsl.LeapFrameBuilderSupport;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.ml.linalg.Vectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.io.StringReader;
import java.util.List;
import java.util.stream.Collectors;
/**
* Converter class to convert data between input to MLeap expected types and convert back MLeap helper to Java types
* for output.
*/
@Component
public class DataConversionHelper {
private final LeapFrameBuilderSupport support;
private final LeapFrameBuilder leapFrameBuilder;
@Autowired
public DataConversionHelper(final LeapFrameBuilderSupport support, final LeapFrameBuilder leapFrameBuilder) {
this.support = Preconditions.checkNotNull(support);
this.leapFrameBuilder = Preconditions.checkNotNull(leapFrameBuilder);
}
/**
* Parses the input payload in CSV format to a list of Objects
* @param csvInput, the input received from the request in CSV format
* @param schema, the data schema retrieved from environment variable
* @return List of Objects, where each Object correspond to one feature of the input data
* @throws IOException, if there is an exception thrown in the try-with-resources block
*/
public List<Object> convertCsvToObjectList(final String csvInput, final DataSchema schema) throws IOException {
try (final StringReader sr = new StringReader(csvInput)) {
final List<Object> valueList = Lists.newArrayList();
final CSVParser parser = CSVFormat.DEFAULT.parse(sr);
// We don not supporting multiple CSV lines as input currently
final CSVRecord record = parser.getRecords().get(0);
final int inputLength = schema.getInput().size();
for (int idx = 0; idx < inputLength; ++idx) {
ColumnSchema sc = schema.getInput().get(idx);
// For CSV input, each value is treated as an individual feature by default
valueList.add(this.convertInputDataToJavaType(sc.getType(), DataStructureType.BASIC, record.get(idx)));
}
return valueList;
}
}
/**
* Convert input object to DefaultLeapFrame
*
* @param schema, the input schema received from request or environment variable
* @param data , the input data received from request as a list of objects
* @return the DefaultLeapFrame object which MLeap transformer expects
*/
public DefaultLeapFrame convertInputToLeapFrame(final DataSchema schema, final List<Object> data) {
final int inputLength = schema.getInput().size();
final List<StructField> structFieldList = Lists.newArrayList();
final List<Object> valueList = Lists.newArrayList();
for (int idx = 0; idx < inputLength; ++idx) {
ColumnSchema sc = schema.getInput().get(idx);
structFieldList
.add(new StructField(sc.getName(), this.convertInputToMLeapInputType(sc.getType(), sc.getStruct())));
valueList.add(this.convertInputDataToJavaType(sc.getType(), sc.getStruct(), data.get(idx)));
}
final StructType mleapSchema = leapFrameBuilder.createSchema(structFieldList);
final Row currentRow = support.createRowFromIterable(valueList);
final List<Row> rows = Lists.newArrayList();
rows.add(currentRow);
return leapFrameBuilder.createFrame(mleapSchema, rows);
}
/**
* Convert basic types in the MLeap helper to Java types for output.
*
* @param predictionRow, the ArrayRow from MLeapResponse
* @param type, the basic type to which the helper should be casted, provided by user via input
* @return the proper Java type
*/
public Object convertMLeapBasicTypeToJavaType(final ArrayRow predictionRow, final String type) {
switch (type) {
case BasicDataType.INTEGER:
return predictionRow.getInt(0);
case BasicDataType.LONG:
return predictionRow.getLong(0);
case BasicDataType.FLOAT:
case BasicDataType.DOUBLE:
return predictionRow.getDouble(0);
case BasicDataType.BOOLEAN:
return predictionRow.getBool(0);
case BasicDataType.BYTE:
return predictionRow.getByte(0);
case BasicDataType.SHORT:
return predictionRow.getShort(0);
case BasicDataType.STRING:
return predictionRow.getString(0);
default:
throw new IllegalArgumentException("Given type is not supported");
}
}
@SuppressWarnings("unchecked")
@VisibleForTesting
protected Object convertInputDataToJavaType(final String type, final String structure, final Object value) {
if (StringUtils.isBlank(structure) || StringUtils.equals(structure, DataStructureType.BASIC)) {
switch (type) {
case BasicDataType.INTEGER:
return new Integer(value.toString());
case BasicDataType.LONG:
return new Long(value.toString());
case BasicDataType.FLOAT:
return new Float(value.toString());
case BasicDataType.DOUBLE:
return new Double(value.toString());
case BasicDataType.BOOLEAN:
return Boolean.valueOf(value.toString());
case BasicDataType.BYTE:
return new Byte(value.toString());
case BasicDataType.SHORT:
return new Short(value.toString());
case BasicDataType.STRING:
return value.toString();
default:
throw new IllegalArgumentException("Given type is not supported");
}
} else if (!StringUtils.isBlank(structure) && StringUtils.equals(structure, DataStructureType.ARRAY)) {
List<Object> listOfObjects;
try {
listOfObjects = (List<Object>) value;
} catch (ClassCastException cce) {
throw new IllegalArgumentException("Input val is not a list but struct passed is array");
}
switch (type) {
case BasicDataType.INTEGER:
return listOfObjects.stream().map(elem -> (Integer) elem).collect(Collectors.toList());
case BasicDataType.LONG:
return listOfObjects.stream().map(elem -> (Long) elem).collect(Collectors.toList());
case BasicDataType.FLOAT:
case BasicDataType.DOUBLE:
return listOfObjects.stream().map(elem -> (Double) elem).collect(Collectors.toList());
case BasicDataType.BOOLEAN:
return listOfObjects.stream().map(elem -> (Boolean) elem).collect(Collectors.toList());
case BasicDataType.BYTE:
return listOfObjects.stream().map(elem -> (Byte) elem).collect(Collectors.toList());
case BasicDataType.SHORT:
return listOfObjects.stream().map(elem -> (Short) elem).collect(Collectors.toList());
case BasicDataType.STRING:
return listOfObjects.stream().map(elem -> (String) elem).collect(Collectors.toList());
default:
throw new IllegalArgumentException("Given type is not supported");
}
} else {
if(!type.equals(BasicDataType.DOUBLE))
throw new IllegalArgumentException("Only Double type is supported for vector");
List<Double> vectorValues;
try {
vectorValues = (List<Double>)value;
} catch (ClassCastException cce) {
throw new IllegalArgumentException("Input val is not a list but struct passed is vector");
}
double[] primitiveVectorValues = vectorValues.stream().mapToDouble(d -> d).toArray();
return Vectors.dense(primitiveVectorValues);
}
}
@VisibleForTesting
protected DataType convertInputToMLeapInputType(final String type, final String structure) {
BasicType basicType;
switch (type) {
case BasicDataType.INTEGER:
basicType = support.createInt();
break;
case BasicDataType.LONG:
basicType = support.createLong();
break;
case BasicDataType.FLOAT:
basicType = support.createFloat();
break;
case BasicDataType.DOUBLE:
basicType = support.createDouble();
break;
case BasicDataType.BOOLEAN:
basicType = support.createBoolean();
break;
case BasicDataType.BYTE:
basicType = support.createByte();
break;
case BasicDataType.SHORT:
basicType = support.createShort();
break;
case BasicDataType.STRING:
basicType = support.createString();
break;
default:
basicType = null;
}
if (basicType == null) {
throw new IllegalArgumentException("Data type passed in the request is wrong for one or more columns");
}
if (StringUtils.isNotBlank(structure)) {
switch (structure) {
case DataStructureType.VECTOR:
return new TensorType(basicType, true);
case DataStructureType.ARRAY:
return new ListType(basicType, true);
case DataStructureType.BASIC:
return new ScalarType(basicType, true);
}
}
// if structure field is not passed, it is by default basic
return new ScalarType(basicType, true);
}
}
| 1,579 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/NFGraphTest.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
public class NFGraphTest {
RandomizedGraphBuilder randomizedGraphBuilder;
private long seed;
private NFGraph graph;
@Before
public void setUp() throws IOException {
Random rand = new Random();
int numANodes = rand.nextInt(10000);
int numBNodes = rand.nextInt(10000);
seed = System.currentTimeMillis();
randomizedGraphBuilder = new RandomizedGraphBuilder(numANodes, numBNodes);
NFCompressedGraph compressedGraph = randomizedGraphBuilder.build(new Random(seed));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
compressedGraph.writeTo(outputStream);
ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
this.graph = NFCompressedGraph.readFrom(inputStream);
}
@Test
public void randomizedTest() {
randomizedGraphBuilder.assertGraph(graph, new Random(seed));
}
}
| 1,580 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/NFGraphMemoryPoolTest.java
|
package com.netflix.nfgraph;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.util.ByteSegmentPool;
public class NFGraphMemoryPoolTest {
private ByteSegmentPool memoryPool;
@Before
public void setUp() {
memoryPool = new ByteSegmentPool(8);
}
@Test
public void swapBackAndForth() throws IOException {
ByteSegmentPool memoryPool = new ByteSegmentPool(8);
RandomizedGraphBuilder graphBuilder = new RandomizedGraphBuilder(10000, 10000);
long seed = System.currentTimeMillis();
NFCompressedGraph graph1 = graphBuilder.build(new Random(seed));
graph1 = roundTripGraph(graph1);
graphBuilder.assertGraph(graph1, new Random(seed));
NFCompressedGraph graph2 = graphBuilder.build(new Random(seed+1));
graph2 = roundTripGraph(graph2);
graphBuilder.assertGraph(graph2, new Random(seed+1));
graph1.destroy();
NFCompressedGraph graph3 = graphBuilder.build(new Random(seed+2));
graph3 = roundTripGraph(graph3);
graphBuilder.assertGraph(graph3, new Random(seed+2));
try {
/// this shouldn't work -- we have reused this memory now.
graphBuilder.assertGraph(graph1, new Random(seed));
Assert.fail();
} catch(AssertionError expected) { }
}
private NFCompressedGraph roundTripGraph(NFCompressedGraph graph) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
graph.writeTo(baos);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
return NFCompressedGraph.readFrom(bais, memoryPool);
}
}
| 1,581 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/RandomizedGraphBuilder.java
|
package com.netflix.nfgraph;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import static com.netflix.nfgraph.spec.NFPropertySpec.COMPACT;
import static com.netflix.nfgraph.spec.NFPropertySpec.GLOBAL;
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.MULTIPLE;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.HashSet;
import java.util.Random;
import com.netflix.nfgraph.build.NFBuildGraph;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
public class RandomizedGraphBuilder {
public static final NFGraphSpec RANDOM_GRAPH_SPEC = new NFGraphSpec(
new NFNodeSpec("node-type-a",
new NFPropertySpec("a-to-one-b-global", "node-type-b", SINGLE | GLOBAL),
new NFPropertySpec("a-to-one-b-per-model", "node-type-b", SINGLE | MODEL_SPECIFIC)
),
new NFNodeSpec("node-type-b",
new NFPropertySpec("b-to-many-a-compact-global", "node-type-a", MULTIPLE | COMPACT | GLOBAL),
new NFPropertySpec("b-to-many-a-hashed-global", "node-type-a", MULTIPLE | HASH | GLOBAL),
new NFPropertySpec("b-to-many-a-compact-per-model", "node-type-a", MULTIPLE | COMPACT | MODEL_SPECIFIC),
new NFPropertySpec("b-to-many-a-hashed-per-model", "node-type-a", MULTIPLE | HASH | MODEL_SPECIFIC)
)
);
private final int numANodes;
private final int numBNodes;
public RandomizedGraphBuilder(int numANodes, int numBNodes) {
this.numANodes = numANodes;
this.numBNodes = numBNodes;
}
public NFCompressedGraph build(Random rand) {
NFBuildGraph graph = new NFBuildGraph(RANDOM_GRAPH_SPEC);
graph.addConnectionModel("model-1");
graph.addConnectionModel("model-2");
for(int i=0; i < numANodes;i++) {
if(rand.nextBoolean())
graph.addConnection("node-type-a", i, "a-to-one-b-global", rand.nextInt(numBNodes));
if(rand.nextBoolean())
graph.addConnection("model-1", "node-type-a", i, "a-to-one-b-per-model", rand.nextInt(numBNodes));
if(rand.nextBoolean())
graph.addConnection("model-2", "node-type-a", i, "a-to-one-b-per-model", rand.nextInt(numBNodes));
}
for(int i=0; i < numBNodes;i++) {
addMultipleRandomConnections(rand, graph, i, "global", "b-to-many-a-compact-global");
addMultipleRandomConnections(rand, graph, i, "global", "b-to-many-a-hashed-global");
addMultipleRandomConnections(rand, graph, i, "model-1", "b-to-many-a-compact-per-model");
addMultipleRandomConnections(rand, graph, i, "model-2", "b-to-many-a-compact-per-model");
addMultipleRandomConnections(rand, graph, i, "model-1", "b-to-many-a-hashed-per-model");
addMultipleRandomConnections(rand, graph, i, "model-2", "b-to-many-a-hashed-per-model");
}
return graph.compress();
}
private void addMultipleRandomConnections(Random rand, NFBuildGraph graph, int fromOrdinal, String model, String propertyName) {
if(rand.nextBoolean()) {
HashSet<Integer> connections = buildRandomConnectionSet(rand);
for(Integer connection : connections) {
graph.addConnection(model, "node-type-b", fromOrdinal, propertyName, connection.intValue());
}
}
}
public void assertGraph(NFGraph graph, Random rand) {
for(int i=0;i<numANodes;i++) {
int conn = graph.getConnection("node-type-a", i, "a-to-one-b-global");
int expected = rand.nextBoolean() ? rand.nextInt(numBNodes) : -1;
assertEquals(expected, conn);
conn = graph.getConnection("model-1", "node-type-a", i, "a-to-one-b-per-model");
expected = rand.nextBoolean() ? rand.nextInt(numBNodes) : -1;
assertEquals(expected, conn);
conn = graph.getConnection("model-2", "node-type-a", i, "a-to-one-b-per-model");
expected = rand.nextBoolean() ? rand.nextInt(numBNodes) : -1;
assertEquals(expected, conn);
}
for(int i=0;i<numBNodes;i++) {
assertMultipleConnections(graph, rand, "global", i, "b-to-many-a-compact-global");
assertMultipleConnections(graph, rand, "global", i, "b-to-many-a-hashed-global");
assertMultipleConnections(graph, rand, "model-1", i, "b-to-many-a-compact-per-model");
assertMultipleConnections(graph, rand, "model-2", i, "b-to-many-a-compact-per-model");
assertMultipleConnections(graph, rand, "model-1", i, "b-to-many-a-hashed-per-model");
assertMultipleConnections(graph, rand, "model-2", i, "b-to-many-a-hashed-per-model");
}
}
private void assertMultipleConnections(NFGraph graph, Random rand, String model, int fromOrdinal, String propertyName) {
OrdinalSet set = graph.getConnectionSet(model, "node-type-b", fromOrdinal, propertyName);
if(!rand.nextBoolean()) {
assertEquals(0, set.size());
return;
}
HashSet<Integer> connections = buildRandomConnectionSet(rand);
OrdinalIterator iter = set.iterator();
int actualOrdinal = iter.nextOrdinal();
while(actualOrdinal != NO_MORE_ORDINALS) {
assertTrue(String.valueOf(actualOrdinal), connections.contains(actualOrdinal));
actualOrdinal = iter.nextOrdinal();
}
assertEquals(connections.size(), set.size());
}
private HashSet<Integer> buildRandomConnectionSet(Random rand) {
int numConnections = rand.nextInt(100);
HashSet<Integer> connections = new HashSet<Integer>();
for(int j=0;j<numConnections;j++) {
int connectedTo = rand.nextInt(numANodes);
while(connections.contains(connectedTo))
connectedTo = rand.nextInt(numANodes);
connections.add(connectedTo);
}
return connections;
}
}
| 1,582 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializerTest.java
|
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
public class NFCompressedGraphPointersSerializerTest {
@Test
public void dataLengthLessthan4GBUsesIntegerPointers() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
pointers.addPointers("test", new long[] { 1, 2, 3 });
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, (long)Integer.MAX_VALUE * 2);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
serializer.serializePointers(dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
NFCompressedGraphPointers deserialized = new NFCompressedGraphPointersDeserializer().deserializePointers(dis);
Assert.assertTrue(deserialized instanceof NFCompressedGraphIntPointers);
}
@Test
public void dataLengthGreaterThan4GBUsesLongPointers() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
pointers.addPointers("test", new long[] { 1, 2, 3 });
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, (long)Integer.MAX_VALUE * 3);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
serializer.serializePointers(dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
NFCompressedGraphPointers deserialized = new NFCompressedGraphPointersDeserializer().deserializePointers(dis);
Assert.assertTrue(deserialized instanceof NFCompressedGraphLongPointers);
}
@Test
public void pointersMightStartGreaterThan2GB() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
long bigStartVal = Integer.MAX_VALUE;
bigStartVal += 5;
long[] ptrs = new long[] { bigStartVal, bigStartVal + 10, bigStartVal + 20, bigStartVal + 100 };
pointers.addPointers("Test", ptrs);
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, bigStartVal + 125);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
serializer.serializePointers(new DataOutputStream(baos));
NFCompressedGraphPointersDeserializer deserializer = new NFCompressedGraphPointersDeserializer();
NFCompressedGraphPointers deserialized = deserializer.deserializePointers(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
for(int i=0;i<ptrs.length;i++) {
Assert.assertEquals(ptrs[i], deserialized.getPointer("Test", i));
}
Assert.assertTrue(deserialized instanceof NFCompressedGraphIntPointers);
}
@Test
public void pointersMightStartGreaterThan4GB() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
long bigStartVal = Integer.MAX_VALUE;
bigStartVal *= 5;
long[] ptrs = new long[] { bigStartVal, bigStartVal + 10, bigStartVal + 20, bigStartVal + 100 };
pointers.addPointers("Test", ptrs);
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, bigStartVal + 125);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
serializer.serializePointers(new DataOutputStream(baos));
NFCompressedGraphPointersDeserializer deserializer = new NFCompressedGraphPointersDeserializer();
NFCompressedGraphPointers deserialized = deserializer.deserializePointers(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
for(int i=0;i<ptrs.length;i++) {
Assert.assertEquals(ptrs[i], deserialized.getPointer("Test", i));
}
Assert.assertTrue(deserialized instanceof NFCompressedGraphLongPointers);
}
}
| 1,583 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/util/OrdinalMapTest.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.util.OrdinalMap;
public class OrdinalMapTest {
private OrdinalMap<String> ordinalMap;
@Before
public void setUp() {
this.ordinalMap = new OrdinalMap<String>();
}
@Test
public void assignsOrdinalsInOrder() {
for(int i=0; i<10; i++) {
assertEquals(i, ordinalMap.add(String.valueOf(i)));
}
}
@Test
public void retainsMappingFromObjectToOrdinal() {
for(int i=0;i<1000;i++) {
ordinalMap.add(String.valueOf(i));
}
for(int i=0;i<1000;i++) {
assertEquals(i, ordinalMap.get(String.valueOf(i)));
}
}
@Test
public void retainsMappingFromOrdinalToObject() {
for(int i=0;i<1000;i++) {
ordinalMap.add(String.valueOf(i));
}
for(int i=0;i<1000;i++) {
assertEquals(String.valueOf(i), ordinalMap.get(i));
}
}
}
| 1,584 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/spec/NFPropertySpecTest.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class NFPropertySpecTest {
@Test
public void testInstantiateWithFlagsDefault() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, 0);
assertTrue(propertySpec.isGlobal());
assertTrue(propertySpec.isMultiple());
assertFalse(propertySpec.isHashed());
}
@Test
public void testInstantiateWithFlags() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, MODEL_SPECIFIC | HASH | SINGLE);
assertFalse(propertySpec.isGlobal());
assertFalse(propertySpec.isMultiple());
assertTrue(propertySpec.isHashed());
}
}
| 1,585 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/HashSetTest.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressor.HashedPropertyBuilder;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.ByteData;
import org.junit.Test;
public class HashSetTest extends EncodedConnectionSetTest {
@Override
protected ByteData generateCompressedData(OrdinalSet ordinals) {
ByteArrayBuffer buf = new ByteArrayBuffer();
HashedPropertyBuilder builder = new HashedPropertyBuilder(buf);
builder.buildProperty(ordinals);
dataLength = buf.length();
return buf.getData();
}
@Override
protected OrdinalSet createOrdinalSet() {
ByteArrayReader reader = new ByteArrayReader(data, 0, dataLength);
return new HashSetOrdinalSet(reader);
}
@Override
protected int maximumTotalOrdinals() {
return 100000;
}
@Test
public void singleOrdinal127IsSizedAppropriately() {
ByteArrayBuffer buf = new ByteArrayBuffer();
HashedPropertyBuilder builder = new HashedPropertyBuilder(buf);
builder.buildProperty(new SingleOrdinalSet(127));
ByteArrayReader reader = new ByteArrayReader(buf.getData(), 0, buf.length());
OrdinalSet set = new HashSetOrdinalSet(reader);
assertTrue(set.contains(127));
assertFalse(set.contains(128));
}
}
| 1,586 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/NFCompressedGraphTest.java
|
package com.netflix.nfgraph.compressed;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import static com.netflix.nfgraph.spec.NFPropertySpec.GLOBAL;
import static com.netflix.nfgraph.spec.NFPropertySpec.MULTIPLE;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.build.NFBuildGraph;
import com.netflix.nfgraph.exception.NFGraphException;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
public class NFCompressedGraphTest {
private NFCompressedGraph compressedGraph;
@Before
public void setUp() {
NFGraphSpec spec = new NFGraphSpec(
new NFNodeSpec(
"a",
new NFPropertySpec("multiple", "b", GLOBAL | MULTIPLE),
new NFPropertySpec("single", "b", GLOBAL | SINGLE)
),
new NFNodeSpec("b")
);
NFBuildGraph graph = new NFBuildGraph(spec);
graph.addConnection("a", 0, "multiple", 0);
graph.addConnection("a", 0, "multiple", 1);
graph.addConnection("a", 0, "single", 0);
compressedGraph = graph.compress();
}
@Test
public void returnsValidOrdinalSetForSingleConnections() {
OrdinalSet set = compressedGraph.getConnectionSet("a", 0, "single");
assertEquals(1, set.size());
assertEquals(true, set.contains(0));
assertArrayEquals(new int[] { 0 }, set.asArray());
}
@Test
public void returnsValidOrdinalIteratorForSingleConnections() {
OrdinalIterator iter = compressedGraph.getConnectionIterator("a", 0, "single");
assertEquals(0, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
iter.reset();
assertEquals(0, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void returnsFirstOrdinalForMultipleConnections() {
int ordinal = compressedGraph.getConnection("a", 0, "multiple");
assertEquals(0, ordinal);
}
@Test
public void returnsNegativeOneForUndefinedConnections() {
int ordinal = compressedGraph.getConnection("a", 1, "multiple");
assertEquals(-1, ordinal);
}
@Test
public void returnsEmptySetForUndefinedConnections() {
OrdinalSet set = compressedGraph.getConnectionSet("a", 1, "multiple");
assertEquals(0, set.size());
}
@Test
public void returnsEmptyIteratorForUndefinedConnections() {
OrdinalIterator iter = compressedGraph.getConnectionIterator("a", 1, "multiple");
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void throwsNFGraphExceptionWhenQueryingForUndefinedNodeType() {
try {
compressedGraph.getConnectionSet("undefined", 0, "multiple");
Assert.fail("NFGraphException should have been thrown");
} catch(NFGraphException expected) { }
}
@Test
public void throwsNFGraphExceptionWhenQueryingForUndefinedProperty() {
try {
compressedGraph.getConnectionIterator("a", 0, "undefined");
Assert.fail("NFGraphException should have been thrown");
} catch(NFGraphException expected) { }
}
}
| 1,587 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/EncodedConnectionSetTest.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.build.NFBuildGraphOrdinalSet;
import com.netflix.nfgraph.util.ByteData;
public abstract class EncodedConnectionSetTest {
protected int totalOrdinals;
protected Set<Integer> expectedOrdinals;
protected ByteData data;
protected long dataLength;
protected Random rand;
protected long seed;
@Before
public void setUp() {
createRandom();
totalOrdinals = rand.nextInt(maximumTotalOrdinals());
OrdinalSet ordinals = generateExpectedOrdinals(totalOrdinals);
data = generateCompressedData(ordinals);
}
protected abstract ByteData generateCompressedData(OrdinalSet ordinals);
protected abstract OrdinalSet createOrdinalSet();
protected abstract int maximumTotalOrdinals();
private void createRandom() {
seed = System.currentTimeMillis();
rand = new Random(seed);
}
private OrdinalSet generateExpectedOrdinals(int totalOrdinals) {
expectedOrdinals = new HashSet<Integer>();
int numOrdinalsInSet = rand.nextInt(totalOrdinals);
int ordinals[] = new int[numOrdinalsInSet];
for(int i=0; i<ordinals.length; i++) {
int ordinal = rand.nextInt(totalOrdinals);
while(expectedOrdinals.contains(ordinal))
ordinal = rand.nextInt(totalOrdinals);
ordinals[i] = ordinal;
expectedOrdinals.add(ordinal);
}
return new NFBuildGraphOrdinalSet(ordinals, ordinals.length);
}
@Test
public void ordinalSetSizeIsCorrect() {
OrdinalSet ordinalSet = createOrdinalSet();
assertEquals("seed: " + seed, expectedOrdinals.size(), ordinalSet.size());
}
@Test
public void ordinalSetContainsExpectedOrdinals() {
OrdinalSet ordinalSet = createOrdinalSet();
for(Integer expected : expectedOrdinals) {
assertTrue("expected: " + expected.intValue() + " seed: " + seed, ordinalSet.contains(expected.intValue()));
}
}
@Test
public void ordinalSetContainsAll() {
OrdinalSet ordinalSet = createOrdinalSet();
int expected[] = new int[expectedOrdinals.size()];
int expectedIdx = 0;
for(Integer expect : expectedOrdinals) {
if(rand.nextBoolean()) {
expected[expectedIdx++] = expect.intValue();
}
}
assertTrue("seed: " + seed, ordinalSet.containsAll(Arrays.copyOf(expected, expectedIdx)));
}
@Test
public void ordinalSetContainsMostButNotAll() {
OrdinalSet ordinalSet = createOrdinalSet();
int unexpected[] = new int[expectedOrdinals.size() + 1];
int unexpectedIdx = 0;
boolean addedUnexpected = false;
for(Integer expect : expectedOrdinals) {
if(rand.nextBoolean()) {
unexpected[unexpectedIdx++] = expect.intValue();
}
if(rand.nextInt(5) == 0) {
unexpected[unexpectedIdx++] = generateUnexpectedOrdinal();
addedUnexpected = true;
}
}
if(!addedUnexpected) {
unexpected[unexpectedIdx++] = generateUnexpectedOrdinal();
}
assertFalse("seed: " + seed, ordinalSet.containsAll(Arrays.copyOf(unexpected, unexpectedIdx)));
}
@Test
public void ordinalSetDoesNotContainUnexpectedOrdinals() {
OrdinalSet ordinalSet = createOrdinalSet();
for(int i=0;i<totalOrdinals;i++) {
if(!expectedOrdinals.contains(i)) {
assertFalse("seed: " + seed, ordinalSet.contains(i));
}
}
}
@Test
public void iteratorReturnsArray() {
OrdinalSet ordinalSet = createOrdinalSet();
int arr[] = ordinalSet.asArray();
for(int ordinal : arr) {
assertTrue("seed: " + seed, expectedOrdinals.contains(Integer.valueOf(ordinal)));
}
assertEquals(expectedOrdinals.size(), arr.length);
}
@Test
public void iteratorReturnsAllExpectedOrdinalsOnce() {
OrdinalIterator iter = createOrdinalSet().iterator();
Set<Integer> returnedOrdinals = new HashSet<Integer>();
int counter = 0;
try {
int ordinal = iter.nextOrdinal();
while(ordinal != NO_MORE_ORDINALS) {
counter++;
assertTrue("seed: " + seed, expectedOrdinals.contains(ordinal));
returnedOrdinals.add(ordinal);
ordinal = iter.nextOrdinal();
}
} catch(Throwable t) {
t.printStackTrace();
fail("seed: " + seed);
}
assertEquals("seed: " + seed, expectedOrdinals.size(), returnedOrdinals.size());
assertEquals("seed: " + seed, expectedOrdinals.size(), counter);
}
private int generateUnexpectedOrdinal() {
int unexpectedOrdinal = rand.nextInt(totalOrdinals);
while(expectedOrdinals.contains(unexpectedOrdinal))
unexpectedOrdinal = rand.nextInt(totalOrdinals);
return unexpectedOrdinal;
}
}
| 1,588 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/BitSetTest.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressor.BitSetPropertyBuilder;
import com.netflix.nfgraph.compressor.HashedPropertyBuilder;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.ByteData;
import com.netflix.nfgraph.util.SimpleByteArray;
import org.junit.Assert;
import org.junit.Test;
public class BitSetTest extends EncodedConnectionSetTest {
@Override
protected ByteData generateCompressedData(OrdinalSet ordinals) {
ByteArrayBuffer buf = new ByteArrayBuffer();
BitSetPropertyBuilder builder = new BitSetPropertyBuilder(buf);
builder.buildProperty(ordinals, totalOrdinals);
dataLength = buf.length();
return buf.getData();
}
@Override
protected OrdinalSet createOrdinalSet() {
ByteArrayReader reader = new ByteArrayReader(data, 0, dataLength);
return new BitSetOrdinalSet(reader);
}
@Override
protected int maximumTotalOrdinals() {
return 100000;
}
@Test
public void bitSetDoesNotAttemptToReadPastRange() {
byte[] data = new byte[] { 1, 1, 1 };
ByteArrayReader reader = new ByteArrayReader(new SimpleByteArray(data), 1, 2);
BitSetOrdinalSet set = new BitSetOrdinalSet(reader);
Assert.assertEquals(1, set.size());
Assert.assertTrue(set.contains(0));
Assert.assertFalse(set.contains(8));
}
}
| 1,589 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/CompactSetTest.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressor.CompactPropertyBuilder;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.ByteData;
public class CompactSetTest extends EncodedConnectionSetTest {
@Override
protected ByteData generateCompressedData(OrdinalSet ordinals) {
ByteArrayBuffer buf = new ByteArrayBuffer();
CompactPropertyBuilder builder = new CompactPropertyBuilder(buf);
builder.buildProperty(ordinals);
dataLength = buf.length();
return buf.getData();
}
@Override
protected OrdinalSet createOrdinalSet() {
ByteArrayReader reader = new ByteArrayReader(data, 0, dataLength);
return new CompactOrdinalSet(reader);
}
@Override
protected int maximumTotalOrdinals() {
return 1000;
}
}
| 1,590 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/NFCompressedGraphIntPointersTest.java
|
/*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import org.junit.Assert;
import org.junit.Test;
public class NFCompressedGraphIntPointersTest {
@Test
public void test() {
NFCompressedGraphIntPointers pointers = new NFCompressedGraphIntPointers();
long ptr0 = ((long)Integer.MAX_VALUE + 1000);
long ptr1 = ((long)Integer.MAX_VALUE * 2);
pointers.addPointers("test", new int[] { (int)ptr0, (int)ptr1 });
Assert.assertEquals(ptr0, pointers.getPointer("test", 0));
Assert.assertEquals(ptr1, pointers.getPointer("test", 1));
}
}
| 1,591 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/VIntTest.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import static org.junit.Assert.assertEquals;
import java.util.Random;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.ByteData;
public class VIntTest {
private int randomValues[];
private ByteData data;
private long seed;
@Before
public void setUp() {
seed = System.currentTimeMillis();
Random rand = new Random(seed);
ByteArrayBuffer buf = new ByteArrayBuffer();
randomValues = new int[rand.nextInt(10000)];
for(int i=0;i<randomValues.length;i++) {
randomValues[i] = rand.nextInt(Integer.MAX_VALUE);
buf.writeVInt(randomValues[i]);
}
data = buf.getData();
}
@Test
public void decodedValuesAreSameAsEncodedValues() {
ByteArrayReader reader = new ByteArrayReader(data, 0);
for(int i=0;i<randomValues.length;i++) {
assertEquals("seed: " + seed, randomValues[i], reader.readVInt());
}
}
}
| 1,592 |
0 |
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/build/NFBuildGraphOrdinalIteratorTest.java
|
package com.netflix.nfgraph.build;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.OrdinalIterator;
public class NFBuildGraphOrdinalIteratorTest {
private NFBuildGraphOrdinalIterator iter;
@Before
public void setUp() {
iter = new NFBuildGraphOrdinalIterator(new int[] { 2, 1, 2, 3, 4 }, 4);
}
@Test
public void iteratesOverOrdinalsInOrder() {
assertEquals(1, iter.nextOrdinal());
assertEquals(2, iter.nextOrdinal());
assertEquals(3, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void canBeReset() {
for(int i=0;i<10;i++)
iter.nextOrdinal();
iter.reset();
assertEquals(1, iter.nextOrdinal());
assertEquals(2, iter.nextOrdinal());
assertEquals(3, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void copyContainsSameOrdinals() {
OrdinalIterator iter = this.iter.copy();
assertEquals(1, iter.nextOrdinal());
assertEquals(2, iter.nextOrdinal());
assertEquals(3, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void isOrdered() {
assertEquals(true, iter.isOrdered());
}
}
| 1,593 |
0 |
Create_ds/netflix-graph/src/main/java/com/netflix
|
Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/NFGraphModelHolder.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import java.util.Iterator;
import com.netflix.nfgraph.util.OrdinalMap;
/**
* <code>NFGraphModelHolder</code> maintains an ordering over the models in a given NFGraph.<p>
*
* An {@link NFGraph} may contain one or more "connection models". A "connection model" is a grouping of the set of connections
* between nodes in the graph.<p>
*
* Connections added for a connection model will be visible only for that model. Use of multiple connection models will
* add a minimum of one byte per model-specific connection set per node. As a result, this feature should be used only
* when the number of connection models is and will remain low.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFGraphModelHolder implements Iterable<String> {
public static final String CONNECTION_MODEL_GLOBAL = "global";
private OrdinalMap<String> modelMap;
public NFGraphModelHolder() {
modelMap = new OrdinalMap<String>();
modelMap.add(CONNECTION_MODEL_GLOBAL);
}
public int size() {
return modelMap.size();
}
public int getModelIndex(String connectionModel) {
return modelMap.add(connectionModel);
}
public String getModel(int modelIndex) {
return modelMap.get(modelIndex);
}
public Iterator<String> iterator() {
return modelMap.iterator();
}
}
| 1,594 |
0 |
Create_ds/netflix-graph/src/main/java/com/netflix
|
Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/OrdinalIterator.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import com.netflix.nfgraph.compressed.BitSetOrdinalIterator;
import com.netflix.nfgraph.compressed.CompactOrdinalIterator;
import com.netflix.nfgraph.compressed.HashSetOrdinalIterator;
/**
* <code>OrdinalIterator</code> is the interface used to iterate over a set of connections.<p>
*
* An <code>OrdinalIterator</code> may be obtained for a set of connections directly from an {@link NFGraph} or via an {@link OrdinalSet}
* obtained from an {@link NFGraph}.
*
* @see CompactOrdinalIterator
* @see HashSetOrdinalIterator
* @see BitSetOrdinalIterator
*
*/
public interface OrdinalIterator {
/**
* This value will be returned from <code>nextOrdinal()</code> after the iteration is completed.
*/
public static final int NO_MORE_ORDINALS = Integer.MAX_VALUE;
/**
* @return the next ordinal in this set.
*/
public int nextOrdinal();
/**
* Rewinds this <code>OrdinalIterator</code> to the beginning of the set.
*/
public void reset();
/**
* Obtain a copy of this <code>OrdinalIterator</code>. The returned <code>OrdinalIterator</code> will be reset to the beginning of the set.
*/
public OrdinalIterator copy();
/**
* @return <code>true</code> if the ordinals returned from this set are guaranteed to be in ascending order. Returns <code>false</code> otherwise.
*/
public boolean isOrdered();
/**
* An iterator which always return <code>OrdinalIterator.NO_MORE_ORDINALS</code>
*/
public static final OrdinalIterator EMPTY_ITERATOR = new OrdinalIterator() {
@Override public int nextOrdinal() { return NO_MORE_ORDINALS; }
@Override public void reset() { }
@Override public OrdinalIterator copy() { return this; }
@Override public boolean isOrdered() { return true; }
};
}
| 1,595 |
0 |
Create_ds/netflix-graph/src/main/java/com/netflix
|
Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/OrdinalSet.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import static com.netflix.nfgraph.OrdinalIterator.EMPTY_ITERATOR;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import com.netflix.nfgraph.build.NFBuildGraphOrdinalSet;
import com.netflix.nfgraph.compressed.BitSetOrdinalSet;
import com.netflix.nfgraph.compressed.CompactOrdinalSet;
import com.netflix.nfgraph.compressed.HashSetOrdinalSet;
/**
* <code>OrdinalSet</code> is the interface used to represent a set of connections.<p>
*
* An <code>OrdinalSet</code> is obtained directly from an {@link NFGraph}.
*
* @see NFGraph
*
*/
public abstract class OrdinalSet {
/**
* Returns <code>true</code> when the specified value is contained in this set. Depending on the implementation,
* this operation will have one of two performance characteristics:<p>
*
* <code>O(1)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}<br>
* <code>O(n)</code> for {@link CompactOrdinalSet} and {@link NFBuildGraphOrdinalSet}
*/
public abstract boolean contains(int value);
/**
* Returns <code>true</code> when all specified values are contained in this set. Depending on the implementation,
* this operation will have one of two performance characteristics:<p>
*
* <code>O(m)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}, where <code>m</code> is the number of specified elements.<br>
* <code>O(n)</code> for {@link CompactOrdinalSet}, where <code>n</code> is the number of elements in the set.<br>
* <code>O(n * m)</code> for {@link NFBuildGraphOrdinalSet}.
*/
public boolean containsAll(int... values) {
for(int value : values) {
if(!contains(value))
return false;
}
return true;
}
/**
* Returns an array containing all elements in the set.
*/
public int[] asArray() {
int arr[] = new int[size()];
OrdinalIterator iter = iterator();
int ordinal = iter.nextOrdinal();
int i = 0;
while(ordinal != NO_MORE_ORDINALS) {
arr[i++] = ordinal;
ordinal = iter.nextOrdinal();
}
return arr;
}
/**
* @return an {@link OrdinalIterator} over this set.
*/
public abstract OrdinalIterator iterator();
/**
* @return the number of ordinals in this set.
*/
public abstract int size();
private static final int EMPTY_ORDINAL_ARRAY[] = new int[0];
/**
* An empty <code>OrdinalSet</code>.
*/
public static final OrdinalSet EMPTY_SET = new OrdinalSet() {
@Override public boolean contains(int value) { return false; }
@Override public int[] asArray() { return EMPTY_ORDINAL_ARRAY; }
@Override public OrdinalIterator iterator() { return EMPTY_ITERATOR; }
@Override public int size() { return 0; }
};
}
| 1,596 |
0 |
Create_ds/netflix-graph/src/main/java/com/netflix
|
Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/NFGraph.java
|
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import com.netflix.nfgraph.build.NFBuildGraph;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.util.OrdinalMap;
/**
* <code>NFGraph</code> represents a directed graph and is the base class for the two flavors of NetflixGraph
* ({@link NFBuildGraph} and {@link NFCompressedGraph}). It defines the operations for retrieving connections
* in the graph, given some node and property.<p>
*
* In the NetflixGraph library, each node in your graph is expected to be uniquely represented as a "type" and "ordinal".
* Each "type" will be referred to by some String. An "ordinal", in this sense, is a number that uniquely defines the node
* given its type. If a type of node has "n" instances, then each instance should be representable by some unique value
* from 0 through (n-1). If nodes in the graph are represented as Objects externally to the NetflixGraph library, then
* developers may find it helpful to use an {@link OrdinalMap} for each type to create and maintain a mapping between objects
* and their ordinals. The {@link OrdinalMap} has been optimized with this use case in mind. <p>
*
* Use of the NFGraph is expected to generally follow some lifecycle:<p>
* <ol>
* <li>Define an {@link NFGraphSpec}, which serves as the schema for the graph data.</li>
* <li>Instantiate an {@link NFBuildGraph}, then populate it with connections.</li>
* <li>Compress the {@link NFBuildGraph}, which will return a representation of the data as an {@link NFCompressedGraph}.</li>
* <li>Serialize the {@link NFCompressedGraph} to a stream. Netflix, for example, has a use case which streams this graph to Amazon Web Service's S3.</li>
* <li>Deserialize the stream where the compact in-memory representation of the graph data is necessary.</li>
* </ol><p>
*
* In some cases, the location where the compact in-memory representation is necessary is the same as the location where this
* representation will be built. In these cases, steps (4) and (5) above will be omitted.<p>
*
* If there will be a producer of this graph and one or more consumers, then your producer code will resemble:<p>
*
* <pre>
* {@code
* NFGraphSpec spec = new NFGraphSpec( ... );
*
* NFBuildGraph buildGraph = new NFBuildGraph(spec);
*
* for( ... each connection between nodes ... ) {
* graph.addConnection( ... );
* }
*
* NFCompressedGraph compressedGraph = buildGraph.compress();
*
* OutputStream os = ... stream to where you want the serialized data ...;
*
* compressedGraph.writeTo(os);
* }
* </pre>
*
* And your consumer code will resemble:<p>
*
* <pre>
* {@code
* InputStream is = ... stream from where the serialized data was written ...;
*
* NFGraph graph = NFCompressedGraph.readFrom(is);
* }
* </pre>
*
* @see NFGraphSpec
* @see NFBuildGraph
* @see NFCompressedGraph
*
* @author dkoszewnik
*/
public abstract class NFGraph {
protected final NFGraphSpec graphSpec;
protected final NFGraphModelHolder modelHolder;
protected NFGraph(NFGraphSpec graphSpec) {
this.graphSpec = graphSpec;
this.modelHolder = new NFGraphModelHolder();
}
protected NFGraph(NFGraphSpec graphSpec, NFGraphModelHolder modelHolder) {
this.graphSpec = graphSpec;
this.modelHolder = modelHolder;
}
/**
* Retrieve a single connected ordinal, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return the connected ordinal, or -1 if there is no such ordinal
*/
public int getConnection(String nodeType, int ordinal, String propertyName) {
return getConnection(0, nodeType, ordinal, propertyName);
}
/**
* Retrieve a single connected ordinal in a given connection model, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return the connected ordinal, or -1 if there is no such ordinal
*/
public int getConnection(String connectionModel, String nodeType, int ordinal, String propertyName) {
int connectionModelIndex = modelHolder.getModelIndex(connectionModel);
return getConnection(connectionModelIndex, nodeType, ordinal, propertyName);
}
/**
* Retrieve an {@link OrdinalIterator} over all connected ordinals, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return an {@link OrdinalIterator} over all connected ordinals
*/
public OrdinalIterator getConnectionIterator(String nodeType, int ordinal, String propertyName) {
return getConnectionIterator(0, nodeType, ordinal, propertyName);
}
/**
* Retrieve an {@link OrdinalIterator} over all connected ordinals in a given connection model, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return an {@link OrdinalIterator} over all connected ordinals
*/
public OrdinalIterator getConnectionIterator(String connectionModel, String nodeType, int ordinal, String propertyName) {
int connectionModelIndex = modelHolder.getModelIndex(connectionModel);
return getConnectionIterator(connectionModelIndex, nodeType, ordinal, propertyName);
}
/**
* Retrieve an {@link OrdinalSet} over all connected ordinals, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return an {@link OrdinalSet} over all connected ordinals
*/
public OrdinalSet getConnectionSet(String nodeType, int ordinal, String propertyName) {
return getConnectionSet(0, nodeType, ordinal, propertyName);
}
/**
* Retrieve an {@link OrdinalSet} over all connected ordinals in a given connection model, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return an {@link OrdinalSet} over all connected ordinals
*/
public OrdinalSet getConnectionSet(String connectionModel, String nodeType, int ordinal, String propertyName) {
int connectionModelIndex = modelHolder.getModelIndex(connectionModel);
return getConnectionSet(connectionModelIndex, nodeType, ordinal, propertyName);
}
protected abstract int getConnection(int connectionModelIndex, String nodeType, int ordinal, String propertyName);
protected abstract OrdinalSet getConnectionSet(int connectionModelIndex, String nodeType, int ordinal, String propertyName);
protected abstract OrdinalIterator getConnectionIterator(int connectionModelIndex, String nodeType, int ordinal, String propertyName);
}
| 1,597 |
0 |
Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersDeserializer.java
|
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.SimpleByteArray;
import java.io.DataInputStream;
import java.io.IOException;
public class NFCompressedGraphPointersDeserializer {
NFCompressedGraphPointers deserializePointers(DataInputStream dis) throws IOException {
int numTypes = dis.readInt();
/// Backwards compatibility: The representation of the pointers is encoded as
/// In order to maintain backwards compatibility of produced artifacts,
/// if more than 32 bits is required to represent the pointers, then flag
/// the sign bit in the serialized number of node types.
if((numTypes & Integer.MIN_VALUE) != 0) {
numTypes &= Integer.MAX_VALUE;
return deserializeLongPointers(dis, numTypes & Integer.MAX_VALUE);
}
return deserializeIntPointers(dis, numTypes);
}
private NFCompressedGraphLongPointers deserializeLongPointers(DataInputStream dis, int numTypes) throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
for(int i=0;i<numTypes;i++) {
String nodeType = dis.readUTF();
pointers.addPointers(nodeType, deserializeLongPointerArray(dis));
}
return pointers;
}
private long[] deserializeLongPointerArray(DataInputStream dis) throws IOException {
int numNodes = dis.readInt();
int numBytes = dis.readInt();
byte data[] = new byte[numBytes];
long pointers[] = new long[numNodes];
dis.readFully(data);
ByteArrayReader reader = new ByteArrayReader(new SimpleByteArray(data), 0);
long currentPointer = 0;
for(int i=0;i<numNodes;i++) {
long vInt = reader.readVLong();
if(vInt == -1) {
pointers[i] = -1;
} else {
currentPointer += vInt;
pointers[i] = currentPointer;
}
}
return pointers;
}
private NFCompressedGraphIntPointers deserializeIntPointers(DataInputStream dis, int numTypes) throws IOException {
NFCompressedGraphIntPointers pointers = new NFCompressedGraphIntPointers();
for(int i=0;i<numTypes;i++) {
String nodeType = dis.readUTF();
pointers.addPointers(nodeType, deserializeIntPointerArray(dis));
}
return pointers;
}
private int[] deserializeIntPointerArray(DataInputStream dis) throws IOException {
int numNodes = dis.readInt();
int numBytes = dis.readInt();
byte data[] = new byte[numBytes];
int pointers[] = new int[numNodes];
dis.readFully(data);
ByteArrayReader reader = new ByteArrayReader(new SimpleByteArray(data), 0);
long currentPointer = 0;
for(int i=0;i<numNodes;i++) {
int vInt = reader.readVInt();
if(vInt == -1) {
pointers[i] = -1;
} else {
currentPointer += vInt;
pointers[i] = (int)currentPointer;
}
}
return pointers;
}
}
| 1,598 |
0 |
Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph
|
Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/serializer/NFCompressedGraphDeserializer.java
|
/*
* Copyright 2013-2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
import com.netflix.nfgraph.util.ByteData;
import com.netflix.nfgraph.util.ByteSegmentPool;
import com.netflix.nfgraph.util.SegmentedByteArray;
import com.netflix.nfgraph.util.SimpleByteArray;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* This class is used by {@link NFCompressedGraph#readFrom(InputStream)}.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFCompressedGraphDeserializer {
private final NFCompressedGraphPointersDeserializer pointersDeserializer = new NFCompressedGraphPointersDeserializer();
public NFCompressedGraph deserialize(InputStream is) throws IOException {
return deserialize(is, null);
}
public NFCompressedGraph deserialize(InputStream is, ByteSegmentPool byteSegmentPool) throws IOException {
DataInputStream dis = new DataInputStream(is);
NFGraphSpec spec = deserializeSpec(dis);
NFGraphModelHolder models = deserializeModels(dis);
NFCompressedGraphPointers pointers = pointersDeserializer.deserializePointers(dis);
long dataLength = deserializeDataLength(dis);
ByteData data = deserializeData(dis, dataLength, byteSegmentPool);
return new NFCompressedGraph(spec, models, data, dataLength, pointers);
}
private NFGraphSpec deserializeSpec(DataInputStream dis) throws IOException {
int numNodes = dis.readInt();
NFNodeSpec nodeSpecs[] = new NFNodeSpec[numNodes];
for(int i=0;i<numNodes;i++) {
String nodeTypeName = dis.readUTF();
int numProperties = dis.readInt();
NFPropertySpec propertySpecs[] = new NFPropertySpec[numProperties];
for(int j=0;j<numProperties;j++) {
String propertyName = dis.readUTF();
String toNodeType = dis.readUTF();
boolean isGlobal = dis.readBoolean();
boolean isMultiple = dis.readBoolean();
boolean isHashed = dis.readBoolean();
propertySpecs[j] = new NFPropertySpec(propertyName, toNodeType, isGlobal, isMultiple, isHashed);
}
nodeSpecs[i] = new NFNodeSpec(nodeTypeName, propertySpecs);
}
return new NFGraphSpec(nodeSpecs);
}
private NFGraphModelHolder deserializeModels(DataInputStream dis) throws IOException {
int numModels = dis.readInt();
NFGraphModelHolder modelHolder = new NFGraphModelHolder();
for(int i=0;i<numModels;i++) {
modelHolder.getModelIndex(dis.readUTF());
}
return modelHolder;
}
/// Backwards compatibility: If the data length is greater than Integer.MAX_VALUE, then
/// -1 is serialized as an int before a long containing the actual length.
private long deserializeDataLength(DataInputStream dis) throws IOException {
int dataLength = dis.readInt();
if(dataLength == -1) {
return dis.readLong();
}
return dataLength;
}
private ByteData deserializeData(DataInputStream dis, long dataLength, ByteSegmentPool memoryPool) throws IOException {
if(dataLength >= 0x20000000 || memoryPool != null) {
SegmentedByteArray data = memoryPool == null ? new SegmentedByteArray(14) : new SegmentedByteArray(memoryPool);
data.readFrom(dis, dataLength);
return data;
} else {
byte data[] = new byte[(int)dataLength];
dis.readFully(data);
return new SimpleByteArray(data);
}
}
}
| 1,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.