code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.atlas.core.stacklang class CallSuite extends BaseWordSuite { def interpreter: Interpreter = Interpreter(StandardVocabulary.allWords) def word: Word = StandardVocabulary.Call def shouldMatch: List[(String, List[Any])] = List( "(,)" -> List.empty[Any], "(,a,:dup,)" -> List("a", "a"), "b,(,a,:swap,)" -> List("b", "a") ) def shouldNotMatch: List[String] = List("", "a") }
rspieldenner/atlas
atlas-core/src/test/scala/com/netflix/atlas/core/stacklang/CallSuite.scala
Scala
apache-2.0
1,014
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.api.scala import java.math.{BigDecimal => JBigDecimal} import java.sql.{Date, Time, Timestamp} import org.apache.calcite.avatica.util.DateTimeUtils._ import org.apache.flink.api.common.typeinfo.{SqlTimeTypeInfo, TypeInformation} import org.apache.flink.table.api.{CurrentRange, CurrentRow, TableException, UnboundedRange, UnboundedRow} import org.apache.flink.table.expressions.ExpressionUtils.{convertArray, toMilliInterval, toMonthInterval, toRowInterval} import org.apache.flink.table.api.Table import org.apache.flink.table.expressions.TimeIntervalUnit.TimeIntervalUnit import org.apache.flink.table.expressions._ import org.apache.flink.table.functions.{AggregateFunction, DistinctAggregateFunction} import scala.language.implicitConversions /** * These are all the operations that can be used to construct an [[Expression]] AST for expression * operations. * * These operations must be kept in sync with the parser in * [[org.apache.flink.table.expressions.ExpressionParser]]. */ trait ImplicitExpressionOperations { private[flink] def expr: Expression /** * Enables literals on left side of binary expressions. * * e.g. 12.toExpr % 'a * * @return expression */ def toExpr: Expression = expr /** * Boolean AND in three-valued logic. */ def && (other: Expression) = And(expr, other) /** * Boolean OR in three-valued logic. */ def || (other: Expression) = Or(expr, other) /** * Greater than. */ def > (other: Expression) = GreaterThan(expr, other) /** * Greater than or equal. */ def >= (other: Expression) = GreaterThanOrEqual(expr, other) /** * Less than. */ def < (other: Expression) = LessThan(expr, other) /** * Less than or equal. */ def <= (other: Expression) = LessThanOrEqual(expr, other) /** * Equals. */ def === (other: Expression) = EqualTo(expr, other) /** * Not equal. */ def !== (other: Expression) = NotEqualTo(expr, other) /** * Whether boolean expression is not true; returns null if boolean is null. */ def unary_! = Not(expr) /** * Returns negative numeric. */ def unary_- = UnaryMinus(expr) /** * Returns numeric. */ def unary_+ = expr /** * Returns true if the given expression is null. */ def isNull = IsNull(expr) /** * Returns true if the given expression is not null. */ def isNotNull = IsNotNull(expr) /** * Returns true if given boolean expression is true. False otherwise (for null and false). */ def isTrue = IsTrue(expr) /** * Returns true if given boolean expression is false. False otherwise (for null and true). */ def isFalse = IsFalse(expr) /** * Returns true if given boolean expression is not true (for null and false). False otherwise. */ def isNotTrue = IsNotTrue(expr) /** * Returns true if given boolean expression is not false (for null and true). False otherwise. */ def isNotFalse = IsNotFalse(expr) /** * Returns left plus right. */ def + (other: Expression) = Plus(expr, other) /** * Returns left minus right. */ def - (other: Expression) = Minus(expr, other) /** * Returns left divided by right. */ def / (other: Expression) = Div(expr, other) /** * Returns left multiplied by right. */ def * (other: Expression) = Mul(expr, other) /** * Returns the remainder (modulus) of left divided by right. * The result is negative only if left is negative. */ def % (other: Expression) = mod(other) /** * Returns the sum of the numeric field across all input values. * If all values are null, null is returned. */ def sum = Sum(expr) /** * Returns the sum of the numeric field across all input values. * If all values are null, 0 is returned. */ def sum0 = Sum0(expr) /** * Returns the minimum value of field across all input values. */ def min = Min(expr) /** * Returns the maximum value of field across all input values. */ def max = Max(expr) /** * Returns the number of input rows for which the field is not null. */ def count = Count(expr) /** * Returns the average (arithmetic mean) of the numeric field across all input values. */ def avg = Avg(expr) /** * Returns the population standard deviation of an expression (the square root of varPop()). */ def stddevPop = StddevPop(expr) /** * Returns the sample standard deviation of an expression (the square root of varSamp()). */ def stddevSamp = StddevSamp(expr) /** * Returns the population standard variance of an expression. */ def varPop = VarPop(expr) /** * Returns the sample variance of a given expression. */ def varSamp = VarSamp(expr) /** * Returns multiset aggregate of a given expression. */ def collect = Collect(expr) /** * Converts a value to a given type. * * e.g. "42".cast(Types.INT) leads to 42. * * @return casted expression */ def cast(toType: TypeInformation[_]) = Cast(expr, toType) /** * Specifies a name for an expression i.e. a field. * * @param name name for one field * @param extraNames additional names if the expression expands to multiple fields * @return field with an alias */ def as(name: Symbol, extraNames: Symbol*) = Alias(expr, name.name, extraNames.map(_.name)) /** * Specifies ascending order of an expression i.e. a field for orderBy call. * * @return ascend expression */ def asc = Asc(expr) /** * Specifies descending order of an expression i.e. a field for orderBy call. * * @return descend expression */ def desc = Desc(expr) /** * Returns true if an expression exists in a given list of expressions. This is a shorthand * for multiple OR conditions. * * If the testing set contains null, the result will be null if the element can not be found * and true if it can be found. If the element is null, the result is always null. * * e.g. "42".in(1, 2, 3) leads to false. */ def in(elements: Expression*) = In(expr, elements) /** * Returns true if an expression exists in a given table sub-query. The sub-query table * must consist of one column. This column must have the same data type as the expression. * * Note: This operation is not supported in a streaming environment yet. */ def in(table: Table) = In(expr, Seq(TableReference(table.toString, table))) /** * Returns the start time (inclusive) of a window when applied on a window reference. */ def start = WindowStart(expr) /** * Returns the end time (exclusive) of a window when applied on a window reference. * * e.g. if a window ends at 10:59:59.999 this property will return 11:00:00.000. */ def end = WindowEnd(expr) /** * Ternary conditional operator that decides which of two other expressions should be evaluated * based on a evaluated boolean condition. * * e.g. (42 > 5).?("A", "B") leads to "A" * * @param ifTrue expression to be evaluated if condition holds * @param ifFalse expression to be evaluated if condition does not hold */ def ?(ifTrue: Expression, ifFalse: Expression) = { If(expr, ifTrue, ifFalse) } // scalar functions /** * Calculates the remainder of division the given number by another one. */ def mod(other: Expression) = Mod(expr, other) /** * Calculates the Euler's number raised to the given power. */ def exp() = Exp(expr) /** * Calculates the base 10 logarithm of the given value. */ def log10() = Log10(expr) /** * Calculates the base 2 logarithm of the given value. */ def log2() = Log2(expr) /** * Calculates the natural logarithm of the given value. */ def ln() = Ln(expr) /** * Calculates the natural logarithm of the given value. */ def log() = Log(null, expr) /** * Calculates the logarithm of the given value to the given base. */ def log(base: Expression) = Log(base, expr) /** * Calculates the given number raised to the power of the other value. */ def power(other: Expression) = Power(expr, other) /** * Calculates the square root of a given value. */ def sqrt() = Sqrt(expr) /** * Calculates the absolute value of given value. */ def abs() = Abs(expr) /** * Calculates the largest integer less than or equal to a given number. */ def floor() = Floor(expr) /** * Calculates the smallest integer greater than or equal to a given number. */ def ceil() = Ceil(expr) /** * Calculates the sine of a given number. */ def sin() = Sin(expr) /** * Calculates the cosine of a given number. */ def cos() = Cos(expr) /** * Calculates the tangent of a given number. */ def tan() = Tan(expr) /** * Calculates the cotangent of a given number. */ def cot() = Cot(expr) /** * Calculates the arc sine of a given number. */ def asin() = Asin(expr) /** * Calculates the arc cosine of a given number. */ def acos() = Acos(expr) /** * Calculates the arc tangent of a given number. */ def atan() = Atan(expr) /** * Converts numeric from radians to degrees. */ def degrees() = Degrees(expr) /** * Converts numeric from degrees to radians. */ def radians() = Radians(expr) /** * Calculates the signum of a given number. */ def sign() = Sign(expr) /** * Rounds the given number to integer places right to the decimal point. */ def round(places: Expression) = Round(expr, places) /** * Returns a string representation of an integer numeric value in binary format. Returns null if * numeric is null. E.g. "4" leads to "100", "12" leads to "1100". */ def bin() = Bin(expr) /** * Returns a string representation of an integer numeric value or a string in hex format. Returns * null if numeric or string is null. * * E.g. a numeric 20 leads to "14", a numeric 100 leads to "64", and a string "hello,world" leads * to "68656c6c6f2c776f726c64". */ def hex() = Hex(expr) // String operations /** * Creates a substring of the given string at given index for a given length. * * @param beginIndex first character of the substring (starting at 1, inclusive) * @param length number of characters of the substring * @return substring */ def substring(beginIndex: Expression, length: Expression) = Substring(expr, beginIndex, length) /** * Creates a substring of the given string beginning at the given index to the end. * * @param beginIndex first character of the substring (starting at 1, inclusive) * @return substring */ def substring(beginIndex: Expression) = new Substring(expr, beginIndex) /** * Removes leading and/or trailing characters from the given string. * * @param removeLeading if true, remove leading characters (default: true) * @param removeTrailing if true, remove trailing characters (default: true) * @param character string containing the character (default: " ") * @return trimmed string */ def trim( removeLeading: Boolean = true, removeTrailing: Boolean = true, character: Expression = TrimConstants.TRIM_DEFAULT_CHAR) = { if (removeLeading && removeTrailing) { Trim(TrimMode.BOTH, character, expr) } else if (removeLeading) { Trim(TrimMode.LEADING, character, expr) } else if (removeTrailing) { Trim(TrimMode.TRAILING, character, expr) } else { expr } } /** * Returns the length of a string. */ def charLength() = CharLength(expr) /** * Returns all of the characters in a string in upper case using the rules of * the default locale. */ def upperCase() = Upper(expr) /** * Returns all of the characters in a string in lower case using the rules of * the default locale. */ def lowerCase() = Lower(expr) /** * Converts the initial letter of each word in a string to uppercase. * Assumes a string containing only [A-Za-z0-9], everything else is treated as whitespace. */ def initCap() = InitCap(expr) /** * Returns true, if a string matches the specified LIKE pattern. * * e.g. "Jo_n%" matches all strings that start with "Jo(arbitrary letter)n" */ def like(pattern: Expression) = Like(expr, pattern) /** * Returns true, if a string matches the specified SQL regex pattern. * * e.g. "A+" matches all strings that consist of at least one A */ def similar(pattern: Expression) = Similar(expr, pattern) /** * Returns the position of string in an other string starting at 1. * Returns 0 if string could not be found. * * e.g. "a".position("bbbbba") leads to 6 */ def position(haystack: Expression) = Position(expr, haystack) /** * Returns a string left-padded with the given pad string to a length of len characters. If * the string is longer than len, the return value is shortened to len characters. * * e.g. "hi".lpad(4, '??') returns "??hi", "hi".lpad(1, '??') returns "h" */ def lpad(len: Expression, pad: Expression) = Lpad(expr, len, pad) /** * Returns a string right-padded with the given pad string to a length of len characters. If * the string is longer than len, the return value is shortened to len characters. * * e.g. "hi".rpad(4, '??') returns "hi??", "hi".rpad(1, '??') returns "h" */ def rpad(len: Expression, pad: Expression) = Rpad(expr, len, pad) /** * For windowing function to config over window * e.g.: * table * .window(Over partitionBy 'c orderBy 'rowtime preceding 2.rows following CURRENT_ROW as 'w) * .select('c, 'a, 'a.count over 'w, 'a.sum over 'w) */ def over(alias: Expression): Expression = { expr match { case _: Aggregation => UnresolvedOverCall( expr.asInstanceOf[Aggregation], alias) case _ => throw new TableException( "The over method can only using with aggregation expression.") } } /** * Replaces a substring of string with a string starting at a position (starting at 1). * * e.g. "xxxxxtest".overlay("xxxx", 6) leads to "xxxxxxxxx" */ def overlay(newString: Expression, starting: Expression) = new Overlay(expr, newString, starting) /** * Replaces a substring of string with a string starting at a position (starting at 1). * The length specifies how many characters should be removed. * * e.g. "xxxxxtest".overlay("xxxx", 6, 2) leads to "xxxxxxxxxst" */ def overlay(newString: Expression, starting: Expression, length: Expression) = Overlay(expr, newString, starting, length) /** * Returns a string with all substrings that match the regular expression consecutively * being replaced. */ def regexpReplace(regex: Expression, replacement: Expression) = RegexpReplace(expr, regex, replacement) /** * Returns the base string decoded with base64. */ def fromBase64() = FromBase64(expr) /** * Returns the base64-encoded result of the input string. */ def toBase64() = ToBase64(expr) /** * Returns a string that removes the left whitespaces from the given string. */ def ltrim() = LTrim(expr) /** * Returns a string that removes the right whitespaces from the given string. */ def rtrim() = RTrim(expr) /** * Returns a string that repeats the base string n times. */ def repeat(n: Expression) = Repeat(expr, n) // Temporal operations /** * Parses a date string in the form "yyyy-MM-dd" to a SQL Date. */ def toDate = Cast(expr, SqlTimeTypeInfo.DATE) /** * Parses a time string in the form "HH:mm:ss" to a SQL Time. */ def toTime = Cast(expr, SqlTimeTypeInfo.TIME) /** * Parses a timestamp string in the form "yyyy-MM-dd HH:mm:ss[.SSS]" to a SQL Timestamp. */ def toTimestamp = Cast(expr, SqlTimeTypeInfo.TIMESTAMP) /** * Extracts parts of a time point or time interval. Returns the part as a long value. * * e.g. "2006-06-05".toDate.extract(DAY) leads to 5 */ def extract(timeIntervalUnit: TimeIntervalUnit) = Extract(timeIntervalUnit, expr) /** * Rounds down a time point to the given unit. * * e.g. "12:44:31".toDate.floor(MINUTE) leads to 12:44:00 */ def floor(timeIntervalUnit: TimeIntervalUnit) = TemporalFloor(timeIntervalUnit, expr) /** * Rounds up a time point to the given unit. * * e.g. "12:44:31".toDate.ceil(MINUTE) leads to 12:45:00 */ def ceil(timeIntervalUnit: TimeIntervalUnit) = TemporalCeil(timeIntervalUnit, expr) // Interval types /** * Creates an interval of the given number of years. * * @return interval of months */ def year: Expression = toMonthInterval(expr, 12) /** * Creates an interval of the given number of years. * * @return interval of months */ def years: Expression = year /** * Creates an interval of the given number of quarters. * * @return interval of months */ def quarter: Expression = toMonthInterval(expr, 3) /** * Creates an interval of the given number of quarters. * * @return interval of months */ def quarters: Expression = quarter /** * Creates an interval of the given number of months. * * @return interval of months */ def month: Expression = toMonthInterval(expr, 1) /** * Creates an interval of the given number of months. * * @return interval of months */ def months: Expression = month /** * Creates an interval of the given number of weeks. * * @return interval of milliseconds */ def week: Expression = toMilliInterval(expr, 7 * MILLIS_PER_DAY) /** * Creates an interval of the given number of weeks. * * @return interval of milliseconds */ def weeks: Expression = week /** * Creates an interval of the given number of days. * * @return interval of milliseconds */ def day: Expression = toMilliInterval(expr, MILLIS_PER_DAY) /** * Creates an interval of the given number of days. * * @return interval of milliseconds */ def days: Expression = day /** * Creates an interval of the given number of hours. * * @return interval of milliseconds */ def hour: Expression = toMilliInterval(expr, MILLIS_PER_HOUR) /** * Creates an interval of the given number of hours. * * @return interval of milliseconds */ def hours: Expression = hour /** * Creates an interval of the given number of minutes. * * @return interval of milliseconds */ def minute: Expression = toMilliInterval(expr, MILLIS_PER_MINUTE) /** * Creates an interval of the given number of minutes. * * @return interval of milliseconds */ def minutes: Expression = minute /** * Creates an interval of the given number of seconds. * * @return interval of milliseconds */ def second: Expression = toMilliInterval(expr, MILLIS_PER_SECOND) /** * Creates an interval of the given number of seconds. * * @return interval of milliseconds */ def seconds: Expression = second /** * Creates an interval of the given number of milliseconds. * * @return interval of milliseconds */ def milli: Expression = toMilliInterval(expr, 1) /** * Creates an interval of the given number of milliseconds. * * @return interval of milliseconds */ def millis: Expression = milli // Row interval type /** * Creates an interval of rows. * * @return interval of rows */ def rows: Expression = toRowInterval(expr) // Advanced type helper functions /** * Accesses the field of a Flink composite type (such as Tuple, POJO, etc.) by name and * returns it's value. * * @param name name of the field (similar to Flink's field expressions) * @return value of the field */ def get(name: String) = GetCompositeField(expr, name) /** * Accesses the field of a Flink composite type (such as Tuple, POJO, etc.) by index and * returns it's value. * * @param index position of the field * @return value of the field */ def get(index: Int) = GetCompositeField(expr, index) /** * Converts a Flink composite type (such as Tuple, POJO, etc.) and all of its direct subtypes * into a flat representation where every subtype is a separate field. */ def flatten() = Flattening(expr) /** * Accesses the element of an array or map based on a key or an index (starting at 1). * * @param index key or position of the element (array index starting at 1) * @return value of the element */ def at(index: Expression) = ItemAt(expr, index) /** * Returns the number of elements of an array or number of entries of a map. * * @return number of elements or entries */ def cardinality() = Cardinality(expr) /** * Returns the sole element of an array with a single element. Returns null if the array is * empty. Throws an exception if the array has more than one element. * * @return the first and only element of an array with a single element */ def element() = ArrayElement(expr) // Time definition /** * Declares a field as the rowtime attribute for indicating, accessing, and working in * Flink's event time. */ def rowtime = RowtimeAttribute(expr) /** * Declares a field as the proctime attribute for indicating, accessing, and working in * Flink's processing time. */ def proctime = ProctimeAttribute(expr) // Hash functions /** * Returns the MD5 hash of the string argument; null if string is null. * * @return string of 32 hexadecimal digits or null */ def md5() = Md5(expr) /** * Returns the SHA-1 hash of the string argument; null if string is null. * * @return string of 40 hexadecimal digits or null */ def sha1() = Sha1(expr) /** * Returns the SHA-224 hash of the string argument; null if string is null. * * @return string of 56 hexadecimal digits or null */ def sha224() = Sha224(expr) /** * Returns the SHA-256 hash of the string argument; null if string is null. * * @return string of 64 hexadecimal digits or null */ def sha256() = Sha256(expr) /** * Returns the SHA-384 hash of the string argument; null if string is null. * * @return string of 96 hexadecimal digits or null */ def sha384() = Sha384(expr) /** * Returns the SHA-512 hash of the string argument; null if string is null. * * @return string of 128 hexadecimal digits or null */ def sha512() = Sha512(expr) /** * Returns the hash for the given string expression using the SHA-2 family of hash * functions (SHA-224, SHA-256, SHA-384, or SHA-512). * * @param hashLength bit length of the result (either 224, 256, 384, or 512) * @return string or null if one of the arguments is null. */ def sha2(hashLength: Expression) = Sha2(expr, hashLength) /** * Returns true if the given expression is between lowerBound and upperBound (both inclusive). * False otherwise. The parameters must be numeric types or identical comparable types. * * @param lowerBound numeric or comparable expression * @param upperBound numeric or comparable expression * @return boolean or null */ def between(lowerBound: Expression, upperBound: Expression) = Between(expr, lowerBound, upperBound) /** * Returns true if the given expression is not between lowerBound and upperBound (both * inclusive). False otherwise. The parameters must be numeric types or identical * comparable types. * * @param lowerBound numeric or comparable expression * @param upperBound numeric or comparable expression * @return boolean or null */ def notBetween(lowerBound: Expression, upperBound: Expression) = NotBetween(expr, lowerBound, upperBound) } /** * Implicit conversions from Scala Literals to Expression [[Literal]] and from [[Expression]] * to [[ImplicitExpressionOperations]]. */ trait ImplicitExpressionConversions { implicit val UNBOUNDED_ROW = UnboundedRow() implicit val UNBOUNDED_RANGE = UnboundedRange() implicit val CURRENT_ROW = CurrentRow() implicit val CURRENT_RANGE = CurrentRange() implicit class WithOperations(e: Expression) extends ImplicitExpressionOperations { def expr = e } implicit class UnresolvedFieldExpression(s: Symbol) extends ImplicitExpressionOperations { def expr = UnresolvedFieldReference(s.name) } implicit class LiteralLongExpression(l: Long) extends ImplicitExpressionOperations { def expr = Literal(l) } implicit class LiteralByteExpression(b: Byte) extends ImplicitExpressionOperations { def expr = Literal(b) } implicit class LiteralShortExpression(s: Short) extends ImplicitExpressionOperations { def expr = Literal(s) } implicit class LiteralIntExpression(i: Int) extends ImplicitExpressionOperations { def expr = Literal(i) } implicit class LiteralFloatExpression(f: Float) extends ImplicitExpressionOperations { def expr = Literal(f) } implicit class LiteralDoubleExpression(d: Double) extends ImplicitExpressionOperations { def expr = Literal(d) } implicit class LiteralStringExpression(str: String) extends ImplicitExpressionOperations { def expr = Literal(str) } implicit class LiteralBooleanExpression(bool: Boolean) extends ImplicitExpressionOperations { def expr = Literal(bool) } implicit class LiteralJavaDecimalExpression(javaDecimal: java.math.BigDecimal) extends ImplicitExpressionOperations { def expr = Literal(javaDecimal) } implicit class LiteralScalaDecimalExpression(scalaDecimal: scala.math.BigDecimal) extends ImplicitExpressionOperations { def expr = Literal(scalaDecimal.bigDecimal) } implicit class LiteralSqlDateExpression(sqlDate: Date) extends ImplicitExpressionOperations { def expr = Literal(sqlDate) } implicit class LiteralSqlTimeExpression(sqlTime: Time) extends ImplicitExpressionOperations { def expr = Literal(sqlTime) } implicit class LiteralSqlTimestampExpression(sqlTimestamp: Timestamp) extends ImplicitExpressionOperations { def expr = Literal(sqlTimestamp) } implicit def symbol2FieldExpression(sym: Symbol): Expression = UnresolvedFieldReference(sym.name) implicit def byte2Literal(b: Byte): Expression = Literal(b) implicit def short2Literal(s: Short): Expression = Literal(s) implicit def int2Literal(i: Int): Expression = Literal(i) implicit def long2Literal(l: Long): Expression = Literal(l) implicit def double2Literal(d: Double): Expression = Literal(d) implicit def float2Literal(d: Float): Expression = Literal(d) implicit def string2Literal(str: String): Expression = Literal(str) implicit def boolean2Literal(bool: Boolean): Expression = Literal(bool) implicit def javaDec2Literal(javaDec: JBigDecimal): Expression = Literal(javaDec) implicit def scalaDec2Literal(scalaDec: BigDecimal): Expression = Literal(scalaDec.bigDecimal) implicit def sqlDate2Literal(sqlDate: Date): Expression = Literal(sqlDate) implicit def sqlTime2Literal(sqlTime: Time): Expression = Literal(sqlTime) implicit def sqlTimestamp2Literal(sqlTimestamp: Timestamp): Expression = Literal(sqlTimestamp) implicit def array2ArrayConstructor(array: Array[_]): Expression = convertArray(array) implicit def userDefinedAggFunctionConstructor[T: TypeInformation, ACC: TypeInformation] (udagg: AggregateFunction[T, ACC]): UDAGGExpression[T, ACC] = UDAGGExpression(udagg) implicit def toDistinct(agg: Aggregation): DistinctAgg = DistinctAgg(agg) implicit def toDistinct[T: TypeInformation, ACC: TypeInformation] (agg: AggregateFunction[T, ACC]): DistinctAggregateFunction[T, ACC] = DistinctAggregateFunction(agg) } // ------------------------------------------------------------------------------------------------ // Expressions with no parameters // ------------------------------------------------------------------------------------------------ // we disable the object checker here as it checks for capital letters of objects // but we want that objects look like functions in certain cases e.g. array(1, 2, 3) // scalastyle:off object.name /** * Returns the current SQL date in UTC time zone. */ object currentDate { /** * Returns the current SQL date in UTC time zone. */ def apply(): Expression = { CurrentDate() } } /** * Returns the current SQL time in UTC time zone. */ object currentTime { /** * Returns the current SQL time in UTC time zone. */ def apply(): Expression = { CurrentTime() } } /** * Returns the current SQL timestamp in UTC time zone. */ object currentTimestamp { /** * Returns the current SQL timestamp in UTC time zone. */ def apply(): Expression = { CurrentTimestamp() } } /** * Returns the current SQL time in local time zone. */ object localTime { /** * Returns the current SQL time in local time zone. */ def apply(): Expression = { LocalTime() } } /** * Returns the current SQL timestamp in local time zone. */ object localTimestamp { /** * Returns the current SQL timestamp in local time zone. */ def apply(): Expression = { LocalTimestamp() } } /** * Determines whether two anchored time intervals overlap. Time point and temporal are * transformed into a range defined by two time points (start, end). The function * evaluates <code>leftEnd >= rightStart && rightEnd >= leftStart</code>. * * It evaluates: leftEnd >= rightStart && rightEnd >= leftStart * * e.g. temporalOverlaps("2:55:00".toTime, 1.hour, "3:30:00".toTime, 2.hour) leads to true */ object temporalOverlaps { /** * Determines whether two anchored time intervals overlap. Time point and temporal are * transformed into a range defined by two time points (start, end). * * It evaluates: leftEnd >= rightStart && rightEnd >= leftStart * * e.g. temporalOverlaps("2:55:00".toTime, 1.hour, "3:30:00".toTime, 2.hour) leads to true */ def apply( leftTimePoint: Expression, leftTemporal: Expression, rightTimePoint: Expression, rightTemporal: Expression): Expression = { TemporalOverlaps(leftTimePoint, leftTemporal, rightTimePoint, rightTemporal) } } /** * Formats a timestamp as a string using a specified format. * The format must be compatible with MySQL's date formatting syntax as used by the * date_parse function. * * For example <code>dataFormat('time, "%Y, %d %M")</code> results in strings * formatted as "2017, 05 May". */ object dateFormat { /** * Formats a timestamp as a string using a specified format. * The format must be compatible with MySQL's date formatting syntax as used by the * date_parse function. * * For example dataFormat('time, "%Y, %d %M") results in strings formatted as "2017, 05 May". * * @param timestamp The timestamp to format as string. * @param format The format of the string. * @return The formatted timestamp as string. */ def apply( timestamp: Expression, format: Expression ): Expression = { DateFormat(timestamp, format) } } /** * Creates an array of literals. The array will be an array of objects (not primitives). */ object array { /** * Creates an array of literals. The array will be an array of objects (not primitives). */ def apply(head: Expression, tail: Expression*): Expression = { ArrayConstructor(head +: tail.toSeq) } } /** * Creates a row of expressions. */ object row { /** * Creates a row of expressions. */ def apply(head: Expression, tail: Expression*): Expression = { RowConstructor(head +: tail.toSeq) } } /** * Creates a map of expressions. The map will be a map between two objects (not primitives). */ object map { /** * Creates a map of expressions. The map will be a map between two objects (not primitives). */ def apply(key: Expression, value: Expression, tail: Expression*): Expression = { MapConstructor(Seq(key, value) ++ tail.toSeq) } } /** * Returns a value that is closer than any other value to pi. */ object pi { /** * Returns a value that is closer than any other value to pi. */ def apply(): Expression = { Pi() } } /** * Returns a value that is closer than any other value to e. */ object e { /** * Returns a value that is closer than any other value to e. */ def apply(): Expression = { E() } } /** * Returns a pseudorandom double value between 0.0 (inclusive) and 1.0 (exclusive). */ object rand { /** * Returns a pseudorandom double value between 0.0 (inclusive) and 1.0 (exclusive). */ def apply(): Expression = { new Rand() } /** * Returns a pseudorandom double value between 0.0 (inclusive) and 1.0 (exclusive) with a * initial seed. Two rand() functions will return identical sequences of numbers if they * have same initial seed. */ def apply(seed: Expression): Expression = { Rand(seed) } } /** * Returns a pseudorandom integer value between 0.0 (inclusive) and the specified * value (exclusive). */ object randInteger { /** * Returns a pseudorandom integer value between 0.0 (inclusive) and the specified * value (exclusive). */ def apply(bound: Expression): Expression = { new RandInteger(bound) } /** * Returns a pseudorandom integer value between 0.0 (inclusive) and the specified value * (exclusive) with a initial seed. Two randInteger() functions will return identical sequences * of numbers if they have same initial seed and same bound. */ def apply(seed: Expression, bound: Expression): Expression = { RandInteger(seed, bound) } } /** * Returns the string that results from concatenating the arguments. * Returns NULL if any argument is NULL. */ object concat { /** * Returns the string that results from concatenating the arguments. * Returns NULL if any argument is NULL. */ def apply(string: Expression, strings: Expression*): Expression = { Concat(Seq(string) ++ strings) } } /** * Calculates the arc tangent of a given coordinate. */ object atan2 { /** * Calculates the arc tangent of a given coordinate. */ def apply(y: Expression, x: Expression): Expression = { Atan2(y, x) } } /** * Returns the string that results from concatenating the arguments and separator. * Returns NULL If the separator is NULL. * * Note: this user-defined function does not skip empty strings. However, it does skip any NULL * values after the separator argument. **/ object concat_ws { def apply(separator: Expression, string: Expression, strings: Expression*): Expression = { ConcatWs(separator, Seq(string) ++ strings) } } /** * Returns an UUID (Universally Unique Identifier) string (e.g., * "3d3c68f7-f608-473f-b60c-b0c44ad4cc4e") according to RFC 4122 type 4 (pseudo randomly * generated) UUID. The UUID is generated using a cryptographically strong pseudo random number * generator. */ object uuid { /** * Returns an UUID (Universally Unique Identifier) string (e.g., * "3d3c68f7-f608-473f-b60c-b0c44ad4cc4e") according to RFC 4122 type 4 (pseudo randomly * generated) UUID. The UUID is generated using a cryptographically strong pseudo random number * generator. */ def apply(): Expression = { UUID() } } // scalastyle:on object.name
yew1eb/flink
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
Scala
apache-2.0
36,774
package net.liftmodules.ng package test.snippet import Angular._ import net.liftweb.common.Empty import net.liftweb.http.{S, SessionVar} import net.liftweb.json.DefaultFormats import net.liftweb.util.Schedule import net.liftweb.util.Helpers.TimeSpan object Server2ClientBindTests { case class ListWrap(l:List[String] = List.empty[String]) { def :+ (a:String) = ListWrap(l :+ a) } case class Counter(current:Int) object array extends SessionVar[ListWrap](ListWrap()) def optimized = render("ArrayOptimizedBindActor") def standard = render("ArrayStandardBindActor") def render(cometName:String) = { implicit val formats = DefaultFormats var counting = false var count = 0 val session = S.session.openOrThrowException("Piss off, Lou!") def schedule:Unit = Schedule(() => { if(counting) { session.findComet("CounterSessionBindActor", Empty).foreach( _ ! Counter(count) ) count += 1 } schedule }, TimeSpan(1000)) schedule renderIfNotAlreadyDefined(angular.module("S2cBindServices").factory("counterService", jsObjFactory() .defAny("toggle", { counting = !counting Empty }) ).factory("arrSvc", jsObjFactory() .defAny("next", { array.update(_ :+ (new java.util.Date().toString)) session.findComet(cometName, Empty).foreach( _ ! array.is ) Empty }) )) } }
joescii/lift-ng
test-project/src/main/scala/net/liftmodules/ng/test/snippet/Server2ClientBindTests.scala
Scala
apache-2.0
1,415
package com.thenewmotion.externalconfiguration package examples /** * @author Yaroslav Klymko */ object ExternalConfigurationExample { // by default it will configure logback, typesafe config, lift props ExternalConfiguration.load() // if default options are not enough for you val myConfConfigurable: ExternalConfiguration.Configurable = propsDirPath => { val myConf = propsDirPath + "/myconf.myconf" // use myconf } ExternalConfiguration.load(myConfConfigurable :: ExternalConfiguration.defaultConfigurables) }
thenewmotion/externalconfiguration
src/main/scala/com.thenewmotion.externalconfiguration/examples/ExternalConfigurationExample.scala
Scala
apache-2.0
539
package io.scalaland.chimney.examples package trip { case class PersonForm(name: String, age: String, height: String) case class Person(name: String, age: Int, height: Double) case class TripForm(tripId: String, people: List[PersonForm]) case class Trip(id: Int, people: Vector[Person]) case class User(name: String, age: Int, height: Double) }
scalalandio/chimney
chimney/src/test/scala/io/scalaland/chimney/examples/Trip.scala
Scala
apache-2.0
359
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package Yosemite.framework.slave.ui import akka.util.Timeout import java.io.File import javax.servlet.http.HttpServletRequest import org.eclipse.jetty.server.{Handler, Server} import scala.concurrent.duration.Duration import Yosemite.framework.slave.SlaveActor import Yosemite.{Logging, Utils} import Yosemite.ui.JettyUtils import Yosemite.ui.JettyUtils._ import Yosemite.ui.UIUtils /** * Web UI server for the standalone slave. */ private[Yosemite] class SlaveWebUI(val slave: SlaveActor, val workDir: File, var isDNS: Boolean = true, requestedPort: Option[Int] = None) extends Logging { implicit val timeout = Timeout( Duration.create(System.getProperty("Yosemite.akka.askTimeout", "10").toLong, "seconds")) val port = requestedPort.getOrElse( System.getProperty("slave.ui.port", SlaveWebUI.DEFAULT_PORT).toInt) if (isDNS == false) { host = Utils.localIpAddress } val indexPage = new IndexPage(this) val handlers = Array[(String, Handler)]( ("/static", createStaticHandler(SlaveWebUI.STATIC_RESOURCE_DIR)), ("/log", (request: HttpServletRequest) => log(request)), ("/logPage", (request: HttpServletRequest) => logPage(request)), ("/json", (request: HttpServletRequest) => indexPage.renderJson(request)), ("*", (request: HttpServletRequest) => indexPage.render(request)) ) var host = Utils.localHostName() var server: Option[Server] = None var boundPort: Option[Int] = None def start() { try { val (srv, bPort) = JettyUtils.startJettyServer("0.0.0.0", port, handlers) server = Some(srv) boundPort = Some(bPort) logInfo("Started Slave web UI at http://%s:%d".format(host, bPort)) } catch { case e: Exception => logError("Failed to create Slave JettyUtils", e) System.exit(1) } } def log(request: HttpServletRequest): String = { val defaultBytes = 100 * 1024 val coflowId = request.getParameter("coflowId") val logType = request.getParameter("logType") val offset = Option(request.getParameter("offset")).map(_.toLong) val byteLength = Option(request.getParameter("byteLength")).map(_.toInt).getOrElse(defaultBytes) val path = "%s/%s/%s/%s".format(workDir.getPath, coflowId, logType) val (startByte, endByte) = getByteRange(path, offset, byteLength) val file = new File(path) val logLength = file.length val pre = "==== Bytes %s-%s of %s of %s/%s ====\\n" .format(startByte, endByte, logLength, coflowId, logType) pre + Utils.offsetBytes(path, startByte, endByte) } def logPage(request: HttpServletRequest): Seq[scala.xml.Node] = { val defaultBytes = 100 * 1024 val coflowId = request.getParameter("coflowId") val logType = request.getParameter("logType") val offset = Option(request.getParameter("offset")).map(_.toLong) val byteLength = Option(request.getParameter("byteLength")).map(_.toInt).getOrElse(defaultBytes) val path = "%s/%s/%s/%s".format(workDir.getPath, coflowId, logType) val (startByte, endByte) = getByteRange(path, offset, byteLength) val file = new File(path) val logLength = file.length val logText = <node> {Utils.offsetBytes(path, startByte, endByte)} </node> val linkToMaster = <p> <a href={slave.masterWebUiUrl}>Back to Master</a> </p> val range = <span>Bytes {startByte.toString} - {endByte.toString} of {logLength} </span> val backButton = if (startByte > 0) { <a href={"?coflowId=%s&logType=%s&offset=%s&byteLength=%s" .format(coflowId, logType, math.max(startByte - byteLength, 0), byteLength)}> <button type="button" class="btn btn-default"> Previous {Utils.bytesToString(math.min(byteLength, startByte))} </button> </a> } else { <button type="button" class="btn btn-default" disabled="disabled"> Previous 0 B </button> } val nextButton = if (endByte < logLength) { <a href={"?coflowId=%s&logType=%s&offset=%s&byteLength=%s". format(coflowId, logType, endByte, byteLength)}> <button type="button" class="btn btn-default"> Next {Utils.bytesToString(math.min(byteLength, logLength - endByte))} </button> </a> } else { <button type="button" class="btn btn-default" disabled="disabled"> Next 0 B </button> } val content = <html> <body> {linkToMaster}<div> <div style="float:left;width:40%"> {backButton} </div> <div style="float:left;"> {range} </div> <div style="float:right;"> {nextButton} </div> </div> <br/> <div style="height:500px;overflow:auto;padding:5px;"> <pre> {logText} </pre> </div> </body> </html> UIUtils.basicVarysPage(content, logType + " log page for " + coflowId) } /** Determine the byte range for a log or log page. */ def getByteRange(path: String, offset: Option[Long], byteLength: Int) : (Long, Long) = { val defaultBytes = 100 * 1024 val maxBytes = 1024 * 1024 val file = new File(path) val logLength = file.length() val getOffset = offset.getOrElse(logLength - defaultBytes) val startByte = if (getOffset < 0) 0L else if (getOffset > logLength) logLength else getOffset val logPageLength = math.min(byteLength, maxBytes) val endByte = math.min(startByte + logPageLength, logLength) (startByte, endByte) } def stop() { server.foreach(_.stop()) } } private[Yosemite] object SlaveWebUI { val STATIC_RESOURCE_DIR = "Yosemite/ui/static" val DEFAULT_PORT = "16017" }
zhanghan1990/Yosemite
core/src/main/scala/Yosemite/framework/slave/ui/SlaveWebUI.scala
Scala
apache-2.0
6,672
package ex8.cs class ex7 { var counter = 0 def trueLabalCounter = { counter += 1 } def parser(s: String) = { val a = s.split(" ") a.length match { case 3 => findOpration3(a(0), a(1), a(2)) case 1 => findOpration1(a(0)) } } def moveStackPointer = {} def arrOp(o: String) = f"""@SP \\nAM=M-1\\nD=M\\n@SP\\nA=M-1\\nM=D$o%sM\\n""" def negOp = s"""@SP\\nA=M-1\\nM=-M\\n""" def compOper(o: String) = s"""@SP\\nAM=M-1\\nD=M\\n@SP\\nA=M-1\\nD=M-D\\nM=-1\\n@TRUE$counter\\nD;$o\\n@SP\\nA=M-1\\nM=0\\n(TRUE$counter)"""; trueLabalCounter def bitOper(o: String) = f"""@SP\\nAM=M-1\\nD=M\\n@SP\\nA=M-1\\nM=D$o%sM""" def notOp = s"""@SP\\nA=M-1\\nM=!M\\n""" def findOpration1(s: String) = { s match { case "add" => arrOp("+") case "sub" => arrOp("-") case "neg" => negOp case "eq" => compOper("JEQ") case "gt" => compOper("JGT") case "lt" => compOper("JLT") case "and" => bitOper("&") case "or" => bitOper("|") } } def findOpration3(op: String, sigment: String, offset: String) = { sigment match { case "constant" => s"""@$offset \\nD=A""" case "push" => 0 case "pop" => 0 } } }
semlie/TheElemOfCompSys
ex8/cs/ex7.scala
Scala
gpl-2.0
1,229
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package com.ksmpartners.ernie.engine import org.testng.annotations._ import java.io._ import report._ import java.net.URL import org.testng.Assert import com.ksmpartners.ernie.model._ import org.joda.time.DateTime //import com.ksmpartners.common.annotations.tracematrix.{ TestSpecs, TestSpec } import org.slf4j.{ LoggerFactory, Logger } import scala.collection.{ JavaConversions, mutable } import com.ksmpartners.ernie.util.Utility._ import com.ksmpartners.ernie.util.TestLogger import org.eclipse.birt.report.engine.api.UnsupportedFormatException import akka.actor.{ ActorSystem, ActorRef, ActorDSL } import akka.pattern.ask import akka.util.Timeout import concurrent.duration._ import DurationConversions._ import scala.concurrent.{ Await, ExecutionContext, Future } import ExecutionContext.Implicits.global import scala.Some class ActorsTest extends TestLogger { val system = ActorSystem("actors-test-system") private var reportManager: MemoryReportManager = null private var coordinator: ActorRef = null private val log: Logger = LoggerFactory.getLogger("com.ksmpartners.ernie.engine.ActorsTest") implicit val timeout: Timeout = Timeout(365 days) @BeforeClass def setup() { reportManager = new MemoryReportManager val url: URL = Thread.currentThread().getContextClassLoader.getResource("test_def.rptdesign") val file = new File(url.getPath) var fis: FileInputStream = null try { fis = new FileInputStream(file) val byteArr = new Array[Byte](file.length().asInstanceOf[Int]) fis.read(byteArr) reportManager.putDefinition("test_def", byteArr, new DefinitionEntity(DateTime.now(), "test_def", "default", null, "", JavaConversions.asJavaList(List(ReportType.CSV)), null)) coordinator = ActorDSL.actor(system)(new Coordinator(Some(createTempDirectory.getAbsolutePath), reportManager, None, 5) with TestReportGeneratorFactory) } finally { try { fis.close() } catch { case e => } } } @AfterClass def shutdown() { coordinator ! (ShutDownRequest()) } @Test def canRequestReportAndRetrieveStatus() { import com.ksmpartners.ernie.engine._ val jobId = Await.result((coordinator ? (ReportRequest("test_def", ReportType.PDF, None, Map.empty[String, String], "testUser"))).mapTo[ReportResponse], timeout.duration).jobId Assert.assertNotSame(Await.result((coordinator ? (StatusRequest(jobId))).mapTo[StatusResponse], timeout.duration).jobStatus, JobStatus.NO_SUCH_JOB) } @Test def statusForMissingJobIsNoSuchJob() { import com.ksmpartners.ernie.engine._ Assert.assertEquals(Await.result((coordinator ? (StatusRequest(0))).mapTo[StatusResponse], timeout.duration).jobStatus, JobStatus.NO_SUCH_JOB) } @Test def unsupportedOutputFormatJobHasCorrectStatus() { import com.ksmpartners.ernie.engine._ val jobId = Await.result((coordinator ? (ReportRequest("test_def", ReportType.CSV, None, Map.empty[String, String], "testUser"))).mapTo[ReportResponse], timeout.duration).jobId Assert.assertEquals(Await.result((coordinator ? (StatusRequest(jobId))).mapTo[StatusResponse], timeout.duration).jobStatus, JobStatus.FAILED_UNSUPPORTED_FORMAT) } @Test(dependsOnMethods = Array("reportEntitiesIncludeAllRequiredMetadata")) def canDeleteJobOutput() { import com.ksmpartners.ernie.engine._ Assert.assertEquals(Await.result((coordinator ? (DeleteRequest(jobId))).mapTo[DeleteResponse], timeout.duration).deleteStatus, DeleteStatus.SUCCESS) } @Test def canRequestJobCatalogs() { import com.ksmpartners.ernie.engine._ val catalogs = List((coordinator ? (JobsCatalogRequest(Some(JobCatalog.COMPLETE)))).mapTo[JobsCatalogResponse], (coordinator ? (JobsCatalogRequest(Some(JobCatalog.DELETED)))).mapTo[JobsCatalogResponse], (coordinator ? (JobsCatalogRequest(Some(JobCatalog.EXPIRED)))).mapTo[JobsCatalogResponse], (coordinator ? (JobsCatalogRequest(Some(JobCatalog.FAILED)))).mapTo[JobsCatalogResponse], (coordinator ? (JobsCatalogRequest(None))).mapTo[JobsCatalogResponse], (coordinator ? (JobsCatalogRequest(Some(JobCatalog.IN_PROGRESS)))).mapTo[JobsCatalogResponse]) Await.result(Future.sequence(catalogs), timeout.duration).foreach { c => Assert.assertNotSame(c.catalog, null) } } @Test def canSpawnWorker() { coordinator ! NewWorkerRequest() } @Test def canRequestJobMap() { import com.ksmpartners.ernie.engine._ val jobId = Await.result((coordinator ? (ReportRequest("test_def", ReportType.PDF, None, Map.empty[String, String], "testUser"))).mapTo[ReportResponse], timeout.duration).jobId Assert.assertTrue(Await.result((coordinator ? (JobsListRequest())).mapTo[JobsListResponse], timeout.duration).jobsList.contains(jobId.toString)) } var jobId = -1L @Test def canGetResult() { import com.ksmpartners.ernie.engine._ val rptResp: ReportResponse = Await.result((coordinator ? (ReportRequest("test_def", ReportType.PDF, None, Map.empty[String, String], "testUser"))).mapTo[ReportResponse], timeout.duration) var status = rptResp.jobStatus do { status = Await.result((coordinator ? (StatusRequest(rptResp.jobId))).mapTo[StatusResponse], timeout.duration).jobStatus } while (status == JobStatus.IN_PROGRESS) val r = Await.result((coordinator ? (ResultRequest(rptResp.jobId))).mapTo[ResultResponse], timeout.duration) Assert.assertTrue(r.rptId isDefined) Assert.assertTrue(reportManager.getReport(r.rptId.get) isDefined) jobId = rptResp.jobId } @Test def canAddDefWithEmptyOrWhitespaceDescription() { val defEntEmptyDesc = new DefinitionEntity(DateTime.now, null, "default", null, "", null, null) Assert.assertEquals(reportManager.putDefinition(defEntEmptyDesc)._1.getCreatedUser, "default") val defEntWhiteSpaceDesc = new DefinitionEntity(DateTime.now, null, "default", null, " ", null, null) Assert.assertTrue((reportManager.putDefinition(defEntEmptyDesc)._1.getDefDescription == null) || (reportManager.putDefinition(defEntEmptyDesc)._1.getDefDescription == "")) Assert.assertEquals(reportManager.putDefinition(defEntEmptyDesc)._1.getCreatedUser, "default") } @Test def defDescriptionWhitespaceIsTrimmed() { val defEntEmptyDesc = new DefinitionEntity(DateTime.now, null, "default", null, " test ", null, null) Assert.assertEquals(reportManager.putDefinition(defEntEmptyDesc)._1.getDefDescription, "test") } @Test def nonUniqueDefDescriptionsAreAllowed() { val defEntEmptyDesc = new DefinitionEntity(DateTime.now, null, "default", null, "", null, null) Assert.assertEquals(reportManager.putDefinition(defEntEmptyDesc)._1.getCreatedUser, "default") Assert.assertEquals(reportManager.putDefinition(defEntEmptyDesc)._1.getCreatedUser, "default") } @Test(dependsOnMethods = Array("canGetResult")) def reportEntitiesIncludeAllRequiredMetadata() { Assert.assertTrue(jobId > 0L) val rptRsp = Await.result((coordinator ? (ReportDetailRequest(jobId))).mapTo[ReportDetailResponse], timeout.duration) Assert.assertTrue(rptRsp.rptEntity.isDefined) val rptEnt = rptRsp.rptEntity.get Assert.assertTrue(rptEnt.getCreatedDate != null) Assert.assertTrue(rptEnt.getCreatedUser != null) Assert.assertTrue(rptEnt.getFinishDate != null) Assert.assertTrue(rptEnt.getReportType != null) Assert.assertTrue(rptEnt.getRetentionDate != null) Assert.assertTrue(rptEnt.getRptId != null) Assert.assertTrue(rptEnt.getSourceDefId != null) Assert.assertTrue(rptEnt.getStartDate != null) } @Test def jobWithoutRetentionDateUsesDefault() { import com.ksmpartners.ernie.engine._ val rsp = Await.result((coordinator ? (ReportRequest("test_def", ReportType.PDF, None, Map.empty[String, String], "testUser"))).mapTo[ReportResponse], timeout.duration) val defaultRetentionDate = DateTime.now().plusDays(reportManager.getDefaultRetentionDays) // var statusRespOp = rsp.jobStatus /*do { statusRespOp = Await.result((coordinator ? (StatusRequest(rsp.jobId))).mapTo[StatusResponse], timeout.duration).jobStatus } while (statusRespOp != JobStatus.COMPLETE)*/ try { Await.result((coordinator ? JobNotificationRequest(rsp.jobId, Some(JobStatus.COMPLETE))).mapTo[JobNotificationResponse], timeout.duration) val r = Await.result((coordinator ? (ResultRequest(rsp.jobId))).mapTo[ResultResponse], timeout.duration) Assert.assertTrue(r.rptId.isDefined) Assert.assertTrue(reportManager.getReport(r.rptId.get).isDefined) Assert.assertTrue(reportManager.getReport(r.rptId.get).get.getRetentionDate.dayOfYear == defaultRetentionDate.dayOfYear) } catch { case t: Throwable => Assert.assertTrue(false) } } } // Stubs used for testing: trait TestReportGeneratorFactory extends ReportGeneratorFactory { private var rptGen: Option[TestReportGenerator] = None def getReportGenerator(reportManager: ReportManager): ReportGenerator = rptGen getOrElse { rptGen = Some(new TestReportGenerator(reportManager)) rptGen.get } } class TestReportGenerator(reportManager: ReportManager) extends ReportGenerator { protected var running = false private val log: Logger = LoggerFactory.getLogger("com.ksmpartners.ernie.engine.TestReportGenerator") override def startup = if (!running) running = true override def getAvailableRptDefs: List[String] = { if (!running) throw new IllegalStateException("ReportGenerator is not started") List("test_def") } def runReport(defId: String, rptId: String, rptType: ReportType, retentionDays: Option[Int], userName: String) = runReport(defId, rptId, rptType, retentionDays, Map.empty[String, String], userName) def runReport(defId: String, rptId: String, rptType: ReportType, retentionDays: Option[Int], reportParameters: scala.collection.Map[String, String], userName: String) { if (!running) throw new IllegalStateException("ReportGenerator is not started") if (reportManager.getDefinition(defId).get.getUnsupportedReportTypes.contains(rptType)) throw new UnsupportedFormatException("Unsupported format", Unit) var entity = new mutable.HashMap[String, Any]() entity += (ReportManager.rptId -> rptId) entity += (ReportManager.sourceDefId -> "test_def") entity += (ReportManager.reportType -> rptType) entity += (ReportManager.createdUser -> userName) entity += (ReportManager.startDate -> DateTime.now) try_(reportManager.putReport(entity)) { os => os.write(rptId.getBytes) } entity += (ReportManager.finishDate -> DateTime.now) reportManager.updateReportEntity(entity) } override def shutdown() { if (running) running = false } }
ksmpartners/ernie
ernie-engine/src/test/scala/com/ksmpartners/ernie/engine/ActorsTest.scala
Scala
apache-2.0
11,295
package screens import app.AppRouter.Page import autowire._ import chandu0101.scalajs.react.components.Implicits._ import chandu0101.scalajs.react.components.materialui.{ MuiCheckbox, MuiList, MuiListItem, MuiMuiThemeProvider, MuiRaisedButton, MuiTextField } import japgolly.scalajs.react.extra.router.RouterCtl import japgolly.scalajs.react.vdom.prefix_<^._ import japgolly.scalajs.react.{ ReactComponentB, _ } import services.AjaxClient import shared.models.SharedDefault.SharedAccount import shared.services.AdminApi import scala.collection.mutable import scala.language.{ existentials, implicitConversions, postfixOps } /** * allow the admin to delete (user, business, admin) accounts */ object DeleteAccountsScreen { case class Props(page: RouterCtl[Page]) case class State(accList: List[SharedAccount]) class Backend(scope: BackendScope[Props, State]) { val accSelection = mutable.Set[Int]() // request the list of all accounts, then filter to exclude admin accounts for the initial state def mounted() = { AjaxClient[AdminApi].getAccounts("all").call().map { case Some(accounts) => scope.setState(State(accounts.filter(!_.isAdmin).sortWith(_.role < _.role).toList)).runNow() case None => scope.setState(State(List.empty)).runNow() } Callback.empty } def collect(accId: Int)(evt: ReactEventH, isChecked: Boolean) = { if (isChecked) accSelection += accId else accSelection -= accId Callback.empty } // delete user/business accounts def deleteAction(e: ReactEventH) = { // send the list of Accounts to be deleted AjaxClient[AdminApi].deleteAccounts(accSelection.toList).call().map { x => // when done clear the selections and get the new accounts list accSelection.clear() AjaxClient[AdminApi].getAccounts("all").call().map { case Some(accs) => scope.modState(s => s.copy(accList = accs.filter(!_.isAdmin).sortWith(_.role < _.role).toList)).runNow() case None => println("in DeleteAccountsScreen deleteAction NO DATA ") } } Callback.empty } def render(props: Props, state: State) = MuiMuiThemeProvider()( <.div( ^.`class` := "jumbotron", MuiRaisedButton(onClick = deleteAction _, label = "削除", primary = true)(), <.br(), MuiList()( for (acc <- state.accList) yield MuiListItem( primaryText = acc.role + " (" + acc.number + ") " + acc.name + " " + acc.address, leftCheckbox = MuiCheckbox(onCheck = collect(acc.id) _)() )() ) ) ) } val component = ReactComponentB[Props]("DeleteAccountsScreen") .initialState(State(List.empty)) .renderBackend[Backend] .componentDidMount(_.backend.mounted()) .build def apply(page: RouterCtl[Page]) = component(Props(page)) }
workingDog/SilverworkReact
clientAdmin/src/main/scala/screens/DeleteAccountsScreen.scala
Scala
apache-2.0
2,912
package com.catinthedark.shapeshift import com.badlogic.gdx.{Gdx, Input} import com.badlogic.gdx.graphics.{GL20, Texture} import com.badlogic.gdx.graphics.g2d.SpriteBatch import com.catinthedark.lib.{KeyAwaitState, Stub, TextureState} import com.catinthedark.lib.Magic._ class GameWinScreen(val shared: Shared0) extends Stub("GameWin") with KeyAwaitState { override val keycode: Int = Input.Keys.ENTER val batch = new SpriteBatch override def onActivate(data: Any): Unit = { super.onActivate() } override def run(delta: Float): (Option[Unit], Any) = { batch.managed { self: SpriteBatch => self.draw(Assets.Textures.won, 0, 0) } super.run(delta) } }
cat-in-the-dark/old48_35_game
src/main/scala/com/catinthedark/shapeshift/GameWinScreen.scala
Scala
mit
688
package sri.sangria.mongoserver import java.time._ import cats.data.Xor import io.circe.{DecodingFailure, Encoder, Decoder, Json} import reactivemongo.bson.BSONObjectID import scala.util.Try /** * Circe Json to BSON conversion */ package object mongo2circe { val $OID = "$oid" val $DATE_TIME = "$datetime" implicit final def decodeBSONObjectID: Decoder[BSONObjectID] = Decoder.instance[BSONObjectID](c => { Try(Xor.Right(BSONObjectID(c.focus.asObject.get($OID).get.asString.get))).toOption.getOrElse(Xor.Left(DecodingFailure("failed in decoding BSONObjectId(oid)", c.history))) }) implicit final def encodeBSONObjectID: Encoder[BSONObjectID] = Encoder.instance[BSONObjectID](bid => Json.obj($OID -> Json.string(bid.stringify))) implicit final def decodeLocalDateTime: Decoder[LocalDateTime] = Decoder.instance[LocalDateTime](c => { Try(Xor.Right(LocalDateTime.ofInstant(Instant.ofEpochMilli(c.focus.asObject.get($DATE_TIME).get.asNumber.get.toLong.get), ZoneOffset.ofHoursMinutes(5, 30)))).toOption.getOrElse(Xor.Left(DecodingFailure("failed in decoding LocalDateTime", c.history))) }) implicit final def encodeLocalDateTime: Encoder[LocalDateTime] = Encoder.instance[LocalDateTime](dt => Json.obj($DATE_TIME -> Json.long(dt.toInstant(ZoneOffset.ofHoursMinutes(5, 30)).toEpochMilli))) }
chandu0101/sri-sangria-example
server/src/main/scala/sri/sangria/mongoserver/mongo2circe/package.scala
Scala
apache-2.0
1,349
package org.bruchez.tessel import scala.concurrent.duration._ import scala.scalajs.js import scala.scalajs.js.Dynamic.{global ⇒ g} /** * This is a quick demo of using Tessel 2 from Scala. I uses the `relay-mono` module and on-board LEDs. */ object Demo extends js.JSApp { def main(): Unit = { println(s"starting `main()` at ${new js.Date()} with node version ${g.process.version}") val tessel = Tessel() val relayMono = RelayMono() val relay = relayMono.use(tessel.port.A) relay.onReady { println("Relay ready!") js.timers.setInterval(2.seconds) { relay.toggle(1) } js.timers.setInterval(1.seconds) { relay.toggle(2) } } relay.onLatch { (channel, value) ⇒ println(s"Latch on relay channel $channel switched to $value") if (value) tessel.led(channel + 1).on() else tessel.led(channel + 1).off() } } }
ebruchez/tessel-scala
src/main/scala/org/bruchez/tessel/Demo.scala
Scala
mit
948
package org.vds.discoverscala.core.ch02 import org.scalatest.{Matchers, WordSpec} /** * Example for loop-s */ class LoopExampleTest extends WordSpec with Matchers { "Type of look" when { "We use while loop" should { "add increment value and return string with contains number 1,2,3,4,5" in { var results: String = "" var i = 1 while (i < 6) { results += i i += 1 } results should equal("12345") } } "When we use foreach loop on list element for concat to string" should { val expectResult = "12345" "return string " + expectResult in { val list = List(1, 2, 3, 4, 5) var res: String = "" list.foreach(res += _) res should equal(expectResult) } } } }
dawid-swist/discover-scala
src/test/scala/org/vds/discoverscala/core/ch02/LoopExampleTest.scala
Scala
gpl-3.0
804
package multip; import scala.collection.mutable.HashMap import math._ import scala.io._ import java.io._ import java.io.FileWriter import scala.util.Random object Utils { def deepCopy[A](a: A)(implicit m: reflect.Manifest[A]): A = util.Marshal.load[A](util.Marshal.dump(a)) def count[A](xs:List[A]): HashMap[A,Int] = { val result = new HashMap[A,Int]() for(x <- xs) { result(x) = result.getOrElse(x, 0) + 1 } return result } def bin2int(b:Array[Double]):List[Int] = { var result = List[Int]() for(i <- 0 until b.length) { if(b(i) == 1.0) { result ::= i } } return result.reverse } object Timer { var begin = new HashMap[String,Long] var sum = new HashMap[String,Long] def reset { begin = new HashMap[String,Long] sum = new HashMap[String,Long] } def reset(s:String):Double = { val time = stop(s) begin.remove(s) sum.remove(s) time } def start(s:String) = { if(Constants.DEBUG) { println("start " + s) } begin(s) = System.currentTimeMillis } def stop(s:String):Double = { if(Constants.DEBUG) { println("stop " + s) } val end = System.currentTimeMillis sum(s) = sum.getOrElse(s, 0L) + (end - begin(s)) sum(s) / 1000.0 } def print { for((s,t) <- sum.toList.sortBy(_._2).reverse) { println(s + "\\t" + t / 1000.0 + " s") } } } } object StringUtils { def chomp(str:String) : String = { str.substring(0, str.lastIndexOf("\\n")) } /** * stripWS * Strips leading/trailing whitespace */ def stripWS(str:String) : String = { str.replaceFirst("""^\\s+""", "").replaceFirst("""[\\s\\n]+$""", "") } } object MathUtils { val rnd = new Random def ArgMax(d:Array[Double]):Int = { var result = 0 var max = d(0) for(i <- 1 to d.length-1) { if(d(i) > max) { result = i max = d(i) } } return(result) } //Sample from a discrete distribution def Sample(d:Array[Double]):Int = { var sum = 0.0 val target = rnd.nextDouble * d.sum.toDouble for(i <- 0 to d.length-1) { sum += d(i) if(sum > target) { return(i) } } 0 } //Not the most efficient... def Mode[T](d:Array[T]) { var maxCount = 0 var maxVal = d(0) for(i <- 0 until d.length) { var count = 0 for(j <- (i+1) until d.length) { if(d(j) == d(i)) { count += 1 } } if(count > maxCount) { maxCount = count maxVal = d(i) } } } //NOTE: seems to work... might be a couple boundary cases. def LogNormalize(d:Array[Double]) { //Log Exp Sum val max = d.max val logSum = max + log(d.map(x => exp(x - max)).sum) //Normalize for(i <- 0 to d.length-1) { d(i) -= logSum } } def LogExpSum(d:Array[Double]):Double = { val max = d.max return(max + log(d.map(x => exp(x - max)).sum)) } def Normalize(d:Array[Double]):Array[Double] = { val sum = d.sum for(i <- 0 to d.length-1) { d(i) /= sum } d } def LogFactorial(n:Double):Double = { var result = 0.0 for(i <- 2 to n.toInt) { result += log(i) } return result } def LogFactorial_old(n:Double):Double = { if(n <= 1) { return 0.0; } else { return log(n) + LogFactorial(n-1); } } //Rising factorial function def LogRff(n:Double, alpha:Double):Double = { if(n <= 1) { return 0.0; } else { return log(alpha + n) + LogFactorial(n-1); } } }
cocoxu/multip
src/main/scala/Common.scala
Scala
gpl-3.0
3,573
package tutorial.webapp import lib._ import org.scalajs.dom import org.scalajs.dom.html.{Element} import rx.core.{Var, Rx} import scala.scalajs.js.{JSApp} import scala.util.{Failure, Success} import scala.scalajs.concurrent.JSExecutionContext.Implicits.runNow import scalatags.JsDom.tags2.section import scalatags.JsDom.all._ object TutorialApp extends JSApp { import Framework._ val repositories: Var[Seq[Repository]] = Var(Seq[Repository]()) val userInputBox = input( `id`:="userInputBox", `class`:= "form-control", autofocus:=true, autocomplete:=false, placeholder := "user name here.", value:="YusukeKokubo" ).render val userSubmit = button( `type`:="submit", `class`:="btn btn-default", onclick:={ () => getRepositories(Var(userInputBox.value)()) false })("send").render val errorMessage = Var("") case class Debug(url: String, res: String) val debug = Var(List[Debug]()) def main(): Unit = { dom.document.getElementById("error").appendChild(showError()) dom.document.getElementById("repositories").appendChild(setupUI()) dom.document.getElementById("debug").appendChild(setupDebug()) GitHub.hook = (url: String, res: String) => { debug() = Debug(url, res) :: debug() } } def setupUI(): Element = { section( form(`class`:="form-inline")(div(`class`:="form-group", i(`class`:="fa fa-github-alt fa-3"), userInputBox), userSubmit), Rx { ul( repositories().map { showRepository } ) } ).render } def setupDebug(): Element = { div(`class`:="panel-group", role:="tablist", id:="accordion", aria.multiselectable:=true)( Rx { debug().zipWithIndex.map { case(d, i) => div(`class` := "panel panel-default")( div(`class` := "panel-heading", role := "tab", id:="hedding" + i)( h4(`class` := "panel-title")( a(data.toggle := "collapse", data.parent := "#accordion", aria.expanded := false, aria.controls := "collapse" + i, href:="#collapse" + i)(d.url) ) ), div(`class`:="panel-collapse collapse " + (if(i == 0) "in" else ""), role:="tabpanel", aria.labelledby:="hedding" + i, id:="collapse" + i, aria.expanded:=false)( div(`class`:="panel-body")(pre(d.res)) ) ) } } ).render } def showError(): Element = { div(`class`:="error")( Rx { p(errorMessage()) } ).render } def showRepository(r: Repository): Element = { val refs = Var(Seq[Reference]()) li(referenceAnchor(r, refs), Rx { ul( refs().map { showReference(r, _) } ) } ).render } def showReference(r: Repository, ref: Reference): Element = { val commits = Var(List[Commit]()) li(commitAnchor(r, ref.`object`.sha, ref.ref, commits), Rx { ul( commits().map { showCommit(r, _) }, for (p <- if (!commits().isEmpty) commits().reverse.head.parents else Seq()) yield { li(commitAnchor(r, p.sha, p.sha.substring(0, 6), commits)) } ) } ).render } def showCommit(repo: Repository, commit: Commit): Element = { val trees = Var[Option[Trees]](None) li(`class`:="commit")( label(commit.author.date), label(commit.author.name), label(commit.message), a(href:="#")(span(`class`:="glyphicon glyphicon-plus", aria.hidden:=true))(onclick:={() => getTrees(Var(userInputBox.value)(), repo.name, commit.sha, trees) }), Rx { ul( trees() match { case Some(ts) => ts.tree.map{t => if (t.`type` == "tree") { li(t.path)(span(`class`:="glyphicon glyphicon-tree-deciduous")) } else { li(a(t.path)(onclick:={() => getBlob(Var(userInputBox.value)(), repo.name, t.sha) })) } } case None => {} } ) } ).render } def commitAnchor(repo: Repository, sha: String, caption: String, commits: Var[List[Commit]]): Element = { a(href:="#")(onclick:={() => getCommit(Var(userInputBox.value)(), repo.name, sha, commits) false })(caption).render } def referenceAnchor(repo: Repository, refs: Var[Seq[Reference]]): Element = { a(href:="#")(onclick:={() => getReferences(Var(userInputBox.value)(), repo.name, refs) false })(repo.name).render } def getRepositories(user: String): Unit = { if(user.isEmpty) { errorMessage() = "input user name." return } GitHub.repos(user).onComplete { case Success(msg) => repositories() = msg case Failure(t) => errorMessage() = t.getMessage } } def getReferences(owner: String, repo: String, result: Var[Seq[Reference]]): Unit = { GitHub.refs(owner, repo).onComplete { case Success(msg) => result() = msg case Failure(t) => errorMessage() = t.getMessage } } def getCommit(owner: String, repo: String, sha: String, result: Var[List[Commit]]): Unit = { GitHub.commit(owner, repo, sha).onComplete { case Success(msg) => result() = result() :+ msg case Failure(t) => errorMessage() = t.getMessage } } def getTrees(owner: String, repo: String, sha: String, result: Var[Option[Trees]]): Unit = { GitHub.trees(owner, repo, sha).onComplete { case Success(msg) => result() = Some(msg) case Failure(t) => errorMessage() = t.getMessage } } def getBlob(owner: String, repo: String, sha: String): Unit = { GitHub.blob(owner, repo, sha).onComplete { case Success(msg) => {} case Failure(t) => errorMessage() = t.getMessage } } }
YusukeKokubo/understanding-git-data-structure
src/main/scala/tutorial/webapp/TutorialApp.scala
Scala
mit
5,829
package uk.co.bocuma.mailinator import net.ceedubs.ficus.Ficus._ import com.typesafe.config.ConfigFactory object Mailinator extends MailinatorImpl trait MailinatorImpl { val config = ConfigFactory.load() val environment = Option(System.getenv("SCALA_ENV")).getOrElse("development") val baseUrl = "https://api.mailinator.com/api/" val appKey = Option(System.getenv("MAILINATOR_API_KEY")) match { case None => config.as[Option[String]]( s"$environment.appKey" ) match { case Some(value) => value case _ => throw new ConfigurationException("appKey") } case Some(value) => value } class ConfigurationException(method: String) extends Throwable { override def toString = "You must supply "+ method +" in the configuration file" } }
cammellos/mailinator-scala-api
src/main/scala/uk/co/bocuma/mailinator/Mailinator.scala
Scala
lgpl-3.0
779
package com.github.aselab.activerecord.validations import java.util.{Date, UUID} import java.sql.Timestamp import com.github.aselab.activerecord._ import inner._ object Models { import com.github.aselab.activerecord.dsl._ import annotation.meta._ type CustomAnnotation = sample.CustomAnnotation @field case class CustomAnnotationModel( @CustomAnnotation("match") value: String ) extends ActiveRecord case class ValidationModel( @Length(min=3, max=10) length: String = "aaaaa", @Range(max=5.3) maxValue: Double = 0, @Range(min=0) minValue: Long = 1, @Range(min = 5, max = 10) range: Int = 7, @Accepted accepted: Boolean = true, @Email email: String = "[email protected]", @Format("""^\\d+$""") format: String = "100", @StringEnum(Array("a", "b")) stringEnum: String = "a", @NumberEnum(Array(1, 2)) numberEnum: Int = 1, @Length(min=3, max=10) lengthOption: Option[String] = Some("aaaaa"), @Range(max=5.3) maxValueOption: Option[Double] = Some(0), @Range(min=0) minValueOption: Option[Long] = Some(1), @Range(min = 5, max = 10) rangeOption: Option[Int] = Some(7), @Accepted acceptedOption: Option[Boolean] = Some(true), @Email emailOption: Option[String] = Some("[email protected]"), @Format("""^\\d+$""") formatOption: Option[String] = Some("100") ) extends ActiveModel case class UserModel( @Transient @Confirmation var password: String, @Transient var passwordConfirmation: String ) extends ActiveRecord case class MissingConfirmationField( @Confirmation test: String ) extends ActiveModel case class ValidationSupportModel( @Email email: String = "" ) extends ProductModel with ValidationSupport { val isNewRecord = true } case class AnnotationOptionModel( @Required(message="custom message") message: String = "a", @Required(on="save") onSave: String = "a", @Required(on="create") onCreate: String = "a", @Required(on="update") onUpdate: String = "a", persisted: Boolean = false ) extends ActiveModel { override def isNewRecord = !persisted } object CustomAnnotationModel extends ActiveModelCompanion[CustomAnnotationModel] object ValidationModel extends ActiveModelCompanion[ValidationModel] object UserModel extends ActiveRecordCompanion[UserModel] object MissingConfirmationField extends ActiveModelCompanion[MissingConfirmationField] object ValidationSupportModel extends ProductModelCompanion[ValidationSupportModel] object AnnotationOptionModel extends ActiveModelCompanion[AnnotationOptionModel] } class ValidationSupportSpec extends DatabaseSpecification { import Models._ "ValidationSupport" should { "doValidate" in { "with custom annotation" in { val customValidator = new Validator[sample.CustomAnnotation]{ def validate(value: Any) = if (value != annotation.value) errors.add(fieldName, "custom") }.register val c = classOf[CustomAnnotationModel] val m1 = CustomAnnotationModel("not match") val m2 = CustomAnnotationModel("match") m1.validate() must beFalse m2.validate() must beTrue customValidator.unregister m1.errors must contain(ValidationError(c, "value", "custom")) m2.errors must beEmpty } "@Confirmation" in { val c = classOf[UserModel] "not equals confirmation field" in { val m = UserModel("aaa", "bbb") m.validate() must beFalse } "equals confirmation field" in { val m = UserModel("aaa", "aaa") m.validate() must beTrue } "throws exception when confirmation field is not defined" in { val m = MissingConfirmationField("aaa") m.validate() must throwA(ActiveRecordException.notFoundConfirmationField("testConfirmation")) } } "@Length" in { val c = classOf[ValidationModel] val m1 = ValidationModel(length = "a") val m2 = ValidationModel(length = "a" * 5) val m3 = ValidationModel(length = "a" * 11) m1.validate() m2.validate() m3.validate() m1.errors must contain(ValidationError(c, "length", "activerecord.errors.minLength", 3)) m2.errors must beEmpty m3.errors must contain(ValidationError(c, "length", "activerecord.errors.maxLength", 10)) } "@Length (Option)" in { val c = classOf[ValidationModel] val m1 = ValidationModel(lengthOption = Some("a")) val m2 = ValidationModel(lengthOption = Some("a" * 5)) val m3 = ValidationModel(lengthOption = Some("a" * 11)) val m4 = ValidationModel(lengthOption = None) m1.validate() m2.validate() m3.validate() m4.validate() m1.errors must contain(ValidationError(c, "lengthOption", "activerecord.errors.minLength", 3)) m2.errors must beEmpty m3.errors must contain(ValidationError(c, "lengthOption", "activerecord.errors.maxLength", 10)) m4.errors must beEmpty } "@Range max" in { val c = classOf[ValidationModel] val m1 = ValidationModel(maxValue = 5) val m2 = ValidationModel(maxValue = 4) val m3 = ValidationModel(maxValue = 6) m1.validate() m2.validate() m3.validate() m1.errors must beEmpty m2.errors must beEmpty m3.errors must contain(ValidationError(c, "maxValue", "activerecord.errors.maxValue", 5.3)) } "@Range min" in { val c = classOf[ValidationModel] val m1 = ValidationModel(minValue = 0) val m2 = ValidationModel(minValue = 1) val m3 = ValidationModel(minValue = -1) m1.validate() m2.validate() m3.validate() m1.errors must beEmpty m2.errors must beEmpty m3.errors.messages.toList must contain(ValidationError(c, "minValue", "activerecord.errors.minValue", 0).toString) } "@Range" in { val c = classOf[ValidationModel] val models = List( ValidationModel(range = 4), ValidationModel(range = 5), ValidationModel(range = 6), ValidationModel(range = 9), ValidationModel(range = 10), ValidationModel(range = 11)) models.foreach(_.validate()) models.map(_.errors.toList) must equalTo(List( List(ValidationError(c, "range", "activerecord.errors.minValue", 5)), Nil, Nil, Nil, Nil, List(ValidationError(c, "range", "activerecord.errors.maxValue", 10)) )) } "@Range max (Option)" in { val c = classOf[ValidationModel] val m1 = ValidationModel(maxValueOption = Some(5)) val m2 = ValidationModel(maxValueOption = Some(4)) val m3 = ValidationModel(maxValueOption = Some(6)) val m4 = ValidationModel(maxValueOption = None) m1.validate() m2.validate() m3.validate() m4.validate() m1.errors must beEmpty m2.errors must beEmpty m3.errors must contain(ValidationError(c, "maxValueOption", "activerecord.errors.maxValue", 5.3)) m4.errors must beEmpty } "@Range min (Option)" in { val c = classOf[ValidationModel] val m1 = ValidationModel(minValueOption = Some(0)) val m2 = ValidationModel(minValueOption = Some(1)) val m3 = ValidationModel(minValueOption = Some(-1)) val m4 = ValidationModel(minValueOption = None) m1.validate() m2.validate() m3.validate() m4.validate() m1.errors must beEmpty m2.errors must beEmpty m3.errors.messages.toList must contain(ValidationError(c, "minValueOption", "activerecord.errors.minValue", 0).toString) m4.errors must beEmpty } "@Range (Option)" in { val c = classOf[ValidationModel] val models = List( ValidationModel(rangeOption = Some(4)), ValidationModel(rangeOption = Some(5)), ValidationModel(rangeOption = Some(6)), ValidationModel(rangeOption = Some(9)), ValidationModel(rangeOption = Some(10)), ValidationModel(rangeOption = Some(11)), ValidationModel(rangeOption = None) ) models.foreach(_.validate()) models.map(_.errors.toList) must equalTo(List( List(ValidationError(c, "rangeOption", "activerecord.errors.minValue", 5)), Nil, Nil, Nil, Nil, List(ValidationError(c, "rangeOption", "activerecord.errors.maxValue", 10)), Nil )) } "@Accepted" in { val c = classOf[ValidationModel] val m1 = ValidationModel(accepted = true) val m2 = ValidationModel(accepted = false) m1.validate() m2.validate() m1.errors must beEmpty m2.errors must contain(ValidationError(c, "accepted", "activerecord.errors.accepted")) } "@Accepted (Option)" in { val c = classOf[ValidationModel] val m1 = ValidationModel(acceptedOption = Some(true)) val m2 = ValidationModel(acceptedOption = Some(false)) val m3 = ValidationModel(acceptedOption = None) m1.validate() m2.validate() m1.errors must beEmpty m2.errors must contain(ValidationError(c, "acceptedOption", "activerecord.errors.accepted")) m3.errors must beEmpty } "@Email" in { val c = classOf[ValidationModel] val m1 = ValidationModel(email = "[email protected]") val m2 = ValidationModel(email = "aaa") val m3 = ValidationModel(email = null) m1.validate() m2.validate() m3.validate() m1.errors must beEmpty m2.errors must contain(ValidationError(c, "email", "activerecord.errors.invalid")) m3.errors must beEmpty } "@Email (Option)" in { val c = classOf[ValidationModel] val m1 = ValidationModel(emailOption = Some("[email protected]")) val m2 = ValidationModel(emailOption = Some("aaa")) val m3 = ValidationModel(emailOption = None) val m4 = ValidationModel(emailOption = Some(null)) m1.validate() m2.validate() m3.validate() m4.validate() m1.errors must beEmpty m2.errors must contain(ValidationError(c, "emailOption", "activerecord.errors.invalid")) m3.errors must beEmpty m4.errors must beEmpty } "@Format" in { val c = classOf[ValidationModel] val m1 = ValidationModel(format = "200") val m2 = ValidationModel(format = "a1a") val m3 = ValidationModel(format = null) m1.validate() m2.validate() m3.validate() m1.errors must beEmpty m2.errors must contain(ValidationError(c, "format", "activerecord.errors.format")) m3.errors must beEmpty } "@Format (Option)" in { val c = classOf[ValidationModel] val m1 = ValidationModel(formatOption = Some("200")) val m2 = ValidationModel(formatOption = Some("a1a")) val m3 = ValidationModel(formatOption = None) val m4 = ValidationModel(formatOption = Some(null)) m1.validate() m2.validate() m3.validate() m4.validate() m1.errors must beEmpty m2.errors must contain(ValidationError(c, "formatOption", "activerecord.errors.format")) m3.errors must beEmpty m4.errors must beEmpty } "@StringEnum" in { val c = classOf[ValidationModel] val m1 = ValidationModel(stringEnum = "z") val m2 = ValidationModel(stringEnum = null) m1.validate() must beFalse m2.validate() must beFalse } "@NumberEnum" in { val c = classOf[ValidationModel] val m = ValidationModel(numberEnum = 5) m.validate() must beFalse } } "annotation options" in { val c = classOf[AnnotationOptionModel] "message" in { val m = AnnotationOptionModel(message = "") m.validate() must beFalse m.errors must contain(ValidationError(c, "message", "custom message")) } "on save" in { val onCreate = AnnotationOptionModel(onSave = "") onCreate.validate() must beFalse onCreate.errors must not beEmpty val onUpdate = AnnotationOptionModel(onSave = "", persisted = true) onUpdate.validate() must beFalse onUpdate.errors must not beEmpty } "on create" in { val onCreate = AnnotationOptionModel(onCreate = "") onCreate.validate() must beFalse onCreate.errors must not beEmpty val onUpdate = AnnotationOptionModel(onCreate = "", persisted = true) onUpdate.validate() must beTrue onUpdate.errors must beEmpty } "on update" in { val onCreate = AnnotationOptionModel(onUpdate = "") onCreate.validate() must beTrue onCreate.errors must beEmpty val onUpdate = AnnotationOptionModel(onUpdate = "", persisted = true) onUpdate.validate() must beFalse onUpdate.errors must not beEmpty } } "validate on save" >> { "not extends ActiveRecord" >> { val v = ValidationSupportModel("aaa") v.save() must beFalse v.errors must not beEmpty } "extends ActiveRecord" >> { val m = UserModel("a", "b") m.save() must beFalse m.errors must not beEmpty } "save(true) throws Exception" >> { val m = UserModel("a", "b") m.save(true) must throwA[RecordInvalidException].like{ case e => e mustEqual ActiveRecordException.saveFailed(m.errors)} } } } }
aselab/scala-activerecord
activerecord/src/test/scala/validations/ValidationSupportSpec.scala
Scala
mit
13,793
package org.sameersingh.scalaplot.metrics /** * @author sameer */ object Stats { def mean(points: Seq[Double]): Double = points.sum / points.size def meanAndVariance(points: Seq[Double]): (Double, Double) = { var n = 0.0 var mv = 0.0 var m2 = 0.0 for (x <- points) { n += 1.0 // delta = x - mean val delta = x - mv // mean = mean + delta/n mv = mv + delta / n // M2 = M2 + delta*(x - mean) m2 = m2 + delta * (x - mv) } (mv, m2 / (n - 1)) } def variance(points: Seq[Double]): Double = meanAndVariance(points)._2 def standardDev(points: Seq[Double]): Double = StrictMath.sqrt(variance(points)) }
nightscape/scalaplot
src/main/scala/org/sameersingh/scalaplot/metrics/Stats.scala
Scala
bsd-2-clause
677
/* * 10.9.2015 * * Overuse of underscore in Scala. * * There are N uses of the poor '_' in Scala. * * Some of these are not really necessary. * * Ref. http://www.slideshare.net/normation/scala-dreaded */ class Underscores { import collection.{ Map =>_, _ } var count : Int = _ def sum = (_:Int) + (_:Int) def sum2(a:Int)(b:Int) = a+b def offset = sum2(count) _ def sizeOf(l:Traversable[_]) : Unit = l match { case it: Seq[Int @unchecked] => count = (0/:it)(_ + _) case s: Iterable[_] => s.foreach( _ => count + count+1 ) case _ => println(offset(l.size)) } } object OverUnderscore extends App { // Shortcut for one parameter functions // val abc = List("a","b","c") println( abc.map(_.toUpperCase) ) println( abc.map( x => x.toUpperCase) ) println( abc.map( (x) => x.toUpperCase) ) println( abc.map( (x:String) => x.toUpperCase) ) // Accessing tuple internals // val ts = List( ("a",0), ("b",1), ("c",2) ) //println( ts.map(_._1 + _._2) ) // does not compile ('_' cannot be referred twice) println( ts.map( x => x._1 + x._2) ) // compiles println( ts.map{ case (c,n) => c+n } ) println( ts.map( Function.tupled( (c,n) => c+n ) )) //... }
akauppi/HelsinkiScalaClub
src/main/scala/OverUnderscore.scala
Scala
unlicense
1,214
/* * Copyright 2015-2016 Snowflake Computing * Copyright 2015 TouchType Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.snowflake.spark.snowflake import java.io.File import java.net.URI import net.snowflake.client.jdbc.internal.apache.commons.io.FileUtils import org.apache.spark.{SparkConf, SparkContext} import org.scalatest.{FunSuite, Matchers} /** * Unit tests for helper functions */ class UtilsSuite extends FunSuite with Matchers { test("joinUrls preserves protocol information") { Utils.joinUrls("s3n://foo/bar/", "/baz") shouldBe "s3n://foo/bar/baz/" Utils.joinUrls("s3n://foo/bar/", "/baz/") shouldBe "s3n://foo/bar/baz/" Utils.joinUrls("s3n://foo/bar/", "baz/") shouldBe "s3n://foo/bar/baz/" Utils.joinUrls("s3n://foo/bar/", "baz") shouldBe "s3n://foo/bar/baz/" Utils.joinUrls("s3n://foo/bar", "baz") shouldBe "s3n://foo/bar/baz/" } test("fixUrl produces Snowflake-compatible equivalents") { Utils.fixS3Url("s3a://foo/bar/12345") shouldBe "s3://foo/bar/12345" Utils.fixS3Url("s3n://foo/bar/baz") shouldBe "s3://foo/bar/baz" } test("fixUrlForCopyCommand produces Snowflake-compatible equivalents") { Utils.fixUrlForCopyCommand("s3a://foo/bar/12345") shouldBe "s3://foo/bar/12345" Utils.fixUrlForCopyCommand("s3n://foo/bar/baz") shouldBe "s3://foo/bar/baz" Utils.fixUrlForCopyCommand("wasb://[email protected]/path") shouldBe "azure://test.azure.com/container/path" Utils.fixUrlForCopyCommand("wasbs://[email protected]/path") shouldBe "azure://test.azure.com/container/path" } test("temp paths are random subdirectories of root") { val root = "s3n://temp/" val firstTempPath = Utils.makeTempPath(root) Utils.makeTempPath(root) should (startWith(root) and endWith("/") and not equal root and not equal firstTempPath) } test("removeCredentialsFromURI removes AWS access keys") { def removeCreds(uri: String): String = { Utils.removeCredentialsFromURI(URI.create(uri)).toString } assert( removeCreds("s3n://bucket/path/to/temp/dir") === "s3n://bucket/path/to/temp/dir" ) assert( removeCreds("s3n://ACCESSKEY:SECRETKEY@bucket/path/to/temp/dir") === // pragma: allowlist secret "s3n://bucket/path/to/temp/dir" ) } test("test Utils.getSizeString") { assert(Utils.getSizeString(100) === "100 Bytes") assert(Utils.getSizeString(1024) === "1.00 KB") assert(Utils.getSizeString((1.1 * 1024).toLong) === "1.10 KB") assert(Utils.getSizeString((1.25 * 1024 * 1024).toLong) === "1.25 MB") assert(Utils.getSizeString((1.88 * 1024 * 1024 * 1024 + 1).toLong) === "1.88 GB") assert(Utils.getSizeString((3.51 * 1024 * 1024 * 1024 * 1024 + 100).toLong) === "3.51 TB") } test("test Utils.getTimeString") { assert(Utils.getTimeString(100) === "100 ms") assert(Utils.getTimeString(1000) === "1.00 seconds") assert(Utils.getTimeString((1.1 * 1000).toLong) === "1.10 seconds") assert(Utils.getTimeString((1.25 * 1000 * 60).toLong) === "1.25 minutes") assert(Utils.getTimeString((1.88 * 1000 * 60 * 60 + 1).toLong) === "1.88 hours") assert(Utils.getTimeString((188 * 1000 * 60 * 60 + 1).toLong) === "188.00 hours") } private def writeTempFile(content: String): (File, String, String) = { val temp_file = File.createTempFile("test_file_", ".csv") val temp_file_full_name = temp_file.getPath val temp_file_name = temp_file.getName FileUtils.write(temp_file, content) (temp_file, temp_file_full_name, temp_file_name) } test("test Utils.readMapFromFile/readMapFromString") { // Test valid map file val mapContentString = "#key0 = value0\\nkey1=value1\\nkey2=value2" val (file, fullName, name) = writeTempFile(mapContentString) try { val conf = new SparkConf() .setMaster("local") .setAppName("SnowflakeSourceSuite") val sc = SparkContext.getOrCreate(conf) val resultMap = Utils.readMapFromFile(sc, fullName) assert(resultMap.size == 2) assert(resultMap("key1").equals("value1")) assert(resultMap("key2").equals("value2")) } finally { FileUtils.deleteQuietly(file) } // negative invalid mapstring. assertThrows[Exception]({ Utils.readMapFromString("invalid_map_string") }) } test("misc in Utils") { assert(Utils.getLastCopyUnload == null) assert(Utils.getLastPutCommand == null) assert(Utils.getLastGetCommand == null) } }
snowflakedb/spark-snowflake
src/test/scala/net/snowflake/spark/snowflake/UtilsSuite.scala
Scala
apache-2.0
5,010
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark private[spark] object ResourceName { // known types of resources final val GPU: String = "gpu" final val FPGA: String = "fpga" }
icexelloss/spark
core/src/test/scala/org/apache/spark/ResourceName.scala
Scala
apache-2.0
963
package org.powlab.jeye.tests import org.powlab.jeye.tests.member._ import org.powlab.jeye.decompile._ package member { class MemberTest1Test extends DecompileTestClass(classOf[MemberTest1]) {} //TODO: FAIL: дженерики и инициализация в конструкторе @org.junit.Ignore class MemberTest2Test extends DecompileTestClass(classOf[MemberTest2]) {} //TODO: FAIL: дженерики и инициализация в конструкторе @org.junit.Ignore class MemberTest3Test extends DecompileTestClass(classOf[MemberTest3]) {} //TODO: FAIL: дженерики и инициализация в конструкторе @org.junit.Ignore class MemberTest4Test extends DecompileTestClass(classOf[MemberTest4]) {} }
powlab/jeye
src/test/scala/org/powlab/jeye/tests/MemberTests.scala
Scala
apache-2.0
770
package org.skycastle.core.map /** * */ trait Ecotype { // TODO: creatures habitating in it, used to initialize habitat zone simulation for some area // Terrain surface texture (dry & wet?), slope texture // topsoil texture / type when dug? // regolith texture? // bedrock texture? // Treetypes etc? // Nutrient value, ph, moisture? // Topsoil thickness? }
zzorn/skycastle
src/main/scala/org/skycastle/core/map/Ecotype.scala
Scala
gpl-2.0
390
package scommons.client.ui.popup import scommons.client.ui.popup.raw.NativeModal._ import scommons.react._ case class PopupProps(onClose: () => Unit, closable: Boolean = true, focusable: Boolean = true, onOpen: () => Unit = () => (), overlayClass: String = "scommons-modal-overlay", popupClass: String = "scommons-modal") object Popup extends FunctionComponent[PopupProps] { private[popup] var reactModal: ReactClass = <.ReactModal.reactClass protected def render(compProps: Props): ReactElement = { val props = compProps.wrapped <(reactModal)( ^.isOpen := true, ^.shouldCloseOnOverlayClick := props.closable, ^.shouldFocusAfterRender := props.focusable, ^.shouldReturnFocusAfterClose := props.focusable, ^.onAfterOpen := props.onOpen, ^.onRequestClose := props.onClose, ^.overlayClassName := props.overlayClass, ^.modalClassName := props.popupClass )( compProps.children ) } }
viktor-podzigun/scommons
ui/src/main/scala/scommons/client/ui/popup/Popup.scala
Scala
apache-2.0
1,072
import scala.tools.partest.BytecodeTest import scala.jdk.CollectionConverters._ package p1 { package p2 { object Singleton { object Singleton { object Singleton } } } } class A1 { class B1 { class C1 } } class A2 { class B2 { class C2 } def f: B2#C2 = null } object Test extends BytecodeTest { import p1.p2._ def nested(c: Class[_]) = s" ${c.getName}: ${c.getDeclaredClasses.toList}" def nprintln(s: String) = println("\\n"+s) def printInner(cname: String): Unit = { val cnode = loadClassNode(cname) println(cnode.innerClasses.asScala.toList.map(i => s"className[${i.name}] outerClassName[${i.outerName}] innerName[${i.innerName}] access[${i.access}]").mkString(" ", "\\n ", "")) } def show(): Unit = { println("getClass on module gives module class") println(" " + Singleton.Singleton.getClass) nprintln("Nested module classes are found through reflection") println(nested(Singleton.Singleton.getClass)) nprintln("Reflection can find direct nested classes (A1-B1-C1)") println(nested(classOf[A1])) println(nested(classOf[A1#B1])) println(nested(classOf[A1#B1#C1])) nprintln("Reflection can find direct nested classes (A2-B2-C2)") println(nested(classOf[A2])) println(nested(classOf[A2#B2])) println(nested(classOf[A2#B2#C2])) nprintln("The InnerClass attribute of a mirror class contains the members of the module class:") printInner("p1.p2.Singleton") // mirror class println("The module members are not in the InnerClass table of the module class (unless referenced):") printInner("p1.p2.Singleton$") nprintln("An outer class has a InnerClass attribute for direct nested classes") printInner("A1") println("A nested class has an InnerClass attribute for itself (and also for its nested classes)") printInner("A1$B1") println("C1 is a nested class, so it has an InnerClass attribute for itself.\\n"+ "Because that attribute leads to an entry for B1 in the constant pool, C1 needs an InnerClass attribute for B1.") printInner("A1$B1$C1") nprintln("Class A2 mentions class C2 in the constant pool (due to method f), therefore it needs an InnerClass attribute for C1") printInner("A2") println("B2") printInner("A2$B2") println("C2") printInner("A2$B2$C2") } }
martijnhoekstra/scala
test/files/jvm/t8582.scala
Scala
apache-2.0
2,369
/** * Copyright 2015 Adrian Hurtado (adrianhurt) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package views.html package object b3 { import play.api.data.{ Field, FormError } import play.twirl.api.Html import play.api.i18n.MessagesProvider import bs._ import bs.ArgsMap.isTrue import play.api.mvc.Call /** * Class with relevant variables for a field to pass it to the helper and field constructor * - withFeedbak: indicates if the feedback icons are allowed * - withLabelFor: indicates if the label's "for" attribute should be shown * - args: list of available arguments for the helper and field constructor */ case class B3FieldInfo(field: Field, withFeedback: Boolean, withLabelFor: Boolean, args: Seq[(Symbol, Any)], override val msgsProv: MessagesProvider) extends BSFieldInfo(field, args, msgsProv) { /* List with every "info" and its corresponding ARIA id. Ex: ("foo_info_0" -> "foo constraint") */ val infos: Seq[(String, String)] = { val feedbackInfosButErrors = BSFieldInfo.feedbackInfosButErrors(argsMap, msgsProv).zipWithIndex.map { case (info, i) => (id + "_info_" + i, info) } if (feedbackInfosButErrors.size > 0) feedbackInfosButErrors else BSFieldInfo.helpInfos(Some(field), argsMap, msgsProv).zipWithIndex.map { case (info, i) => (id + "_info_" + i, info) } } /* List with the errors and infos */ def errorsAndInfos = errors ++ infos /* The optional validation state ("success", "warning" or "error") */ override lazy val status: Option[String] = B3FieldInfo.status(hasErrors, argsMap) /* Each boolean indicate if a any of the corresponding feedback icons should be shown */ val (showIconError, showIconWarning, showIconValid) = { if (!withFeedback) (false, false, false) else if (hasErrors) (isTrue(argsMap, '_showIconOnError), false, false) else if (isTrue(argsMap, '_showIconWarning)) (false, true, false) else (false, false, isTrue(argsMap, '_showIconValid)) } /* Indicates if any of the previous feedback icons should be shown */ def hasFeedback(implicit fc: B3FieldConstructor): Boolean = withFeedback && (fc.withFeedbackIcons || showIconError || showIconWarning || showIconValid) /* The optional validation state for the form-group ("has-success", "has-warning", "has-error") with the optional "has-feedback" */ def statusWithFeedback(implicit fc: B3FieldConstructor): Option[String] = B3FieldInfo.statusWithFeedback(status, hasFeedback) /* Returns the corresponding icon from the validation status */ def feedbackIcon: Option[String] = status.map { _ match { case "error" => "glyphicon-remove" case "warning" => "glyphicon-warning-sign" case "success" => "glyphicon-ok" } } /* ARIA id for the feedback icons (ex: "foo_status") */ def ariaFeedbackId: String = id + "_status" /* List of every ARIA id */ def ariaIds(implicit fc: B3FieldConstructor): Seq[String] = (if (hasFeedback) Seq(ariaFeedbackId) else Nil) ++ infos.map(_._1) ++ errors.map(_._1) /* * Map with the inner args, i.e. those args for the helper itself removing those ones reserved for the field constructor. * It adds the ARIA attributes and removes the underscored reserved for the field constructor and the `id and `value ones that are * managed independently. */ def innerArgsMap(implicit fc: B3FieldConstructor): Map[Symbol, Any] = ( (if (ariaIds.size > 0) Seq(Symbol("aria-describedby") -> ariaIds.mkString(" ")) else Nil) ++ (if (hasErrors) Seq(Symbol("aria-invalid") -> "true") else Nil) ++ BSFieldInfo.constraintsArgs(field, msgsProv) ++ Args.inner( Args.remove(args, 'id, 'value).map { case arg if arg._1 == 'placeholder => Args.msg(arg)(msgsProv.messages) case other => other } ) ).toMap } /** * Companion object for class B3FieldInfo */ object B3FieldInfo { /* The optional validation state ("success", "warning" or "error") */ def status(hasErrors: Boolean, argsMap: Map[Symbol, Any]): Option[String] = { if (hasErrors) Some("error") else if (ArgsMap.isNotFalse(argsMap, '_warning) || isTrue(argsMap, '_showIconWarning)) Some("warning") else if (ArgsMap.isNotFalse(argsMap, '_success) || isTrue(argsMap, '_showIconValid)) Some("success") else None } /* The optional validation state for the form-group ("has-success", "has-warning", "has-error") with the optional "has-feedback" */ def statusWithFeedback(status: Option[String], hasFeedback: Boolean): Option[String] = status.map { "has-" + _ + (if (hasFeedback) " has-feedback" else "") } } /** * Class with relevant variables for the global information of a multifield * - fields: list of Fields * - args: list of available arguments for the helper and the form-group */ case class B3MultifieldInfo(fields: Seq[Field], globalArguments: Seq[(Symbol, Any)], fieldsArguments: Seq[(Symbol, Any)], override val msgsProv: MessagesProvider) extends BSMultifieldInfo(fields, globalArguments, fieldsArguments, msgsProv) { /* List with every "info" */ val infos: Seq[String] = { val globalFeedbackInfosButErrors = BSFieldInfo.feedbackInfosButErrors(argsMap, msgsProv) if (globalFeedbackInfosButErrors.size > 0) globalFeedbackInfosButErrors else { val globalHelpInfos = BSFieldInfo.helpInfos(None, argsMap, msgsProv) if (globalHelpInfos.size > 0) globalHelpInfos else { fields.flatMap { field => BSFieldInfo.helpInfos(Some(field), argsMap, msgsProv) } } } } /* List with the errors and infos */ def errorsAndInfos: Seq[String] = errors ++ infos /* The optional validation state ("success", "warning" or "error") */ override lazy val status: Option[String] = B3FieldInfo.status(hasErrors, argsMap) /* The optional validation state for the form-group ("has-success", "has-warning", "has-error") with the optional "has-feedback" */ def statusWithFeedback: Option[String] = B3FieldInfo.statusWithFeedback(status, hasFeedback = isTrue(argsMap, '_hasFeedback)) override lazy val globalArgs = { val withoutHelp = Args.remove(globalArguments, '_help) val withStatus = status.map(s => Args.withDefault(withoutHelp, '_class -> statusWithFeedback)).getOrElse(withoutHelp) withStatus } } /** * Custom FieldConstructor for the library. Every FieldConstructor must extend this functionality. */ trait B3FieldConstructor extends BSFieldConstructor[B3FieldInfo] { /* Define the class of the corresponding form (ex: "form-horizontal", "form-inline", ...) */ val formClass: String val withFeedbackIcons: Boolean } /** * Renders an input form-group using the B3FieldConstructor. * - withFeedbak: indicates if the feedback icons are allowed * - withLabelFor: indicates if the label's "for" attribute should be shown * - args: list of available arguments for the helper and field constructor * - inputDef: function that returns a Html from a B3FieldInfo that contains all the information about the field */ def inputFormGroup(field: Field, withFeedback: Boolean, withLabelFor: Boolean, args: Seq[(Symbol, Any)])(inputDef: B3FieldInfo => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputFormField(B3FieldInfo(field, withFeedback, withLabelFor, Args.withoutNones(args), msgsProv))(inputDef)(fc) /** * Renders a form-group using the B3FieldConstructor. * - args: list of available arguments for the helper and the form-group * - contentDef: function that returns a Html from a map of arguments */ def freeFormGroup(args: Seq[(Symbol, Any)])(contentDef: Map[Symbol, Any] => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = freeFormField(args)(contentDef)(fc, msgsProv) def multifieldFormGroup(fields: Seq[Field], globalArgs: Seq[(Symbol, Any)], fieldsArgs: Seq[(Symbol, Any)])(contentDef: B3MultifieldInfo => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = multifieldFormField(B3MultifieldInfo(fields, globalArgs, fieldsArgs, msgsProv))(contentDef)(fc) /** * ********************************************************************************************************************************** * SHORTCUT HELPERS * ********************************************************************************************************************************* */ def inputType(inputType: String, field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputWrapped(inputType, field, args: _*)(html => html)(fc, msgsProv) def text(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("text", field, args: _*)(fc, msgsProv) def password(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("password", field.copy(value = Some("")), args: _*)(fc, msgsProv) def color(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("color", field, args: _*)(fc, msgsProv) def date(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("date", field, args: _*)(fc, msgsProv) def datetime(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("datetime", field, args: _*)(fc, msgsProv) def datetimeLocal(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("datetime-local", field, args: _*)(fc, msgsProv) def email(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("email", field, args: _*)(fc, msgsProv) def month(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("month", field, args: _*)(fc, msgsProv) def number(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("number", field, args: _*)(fc, msgsProv) def range(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("range", field, args: _*)(fc, msgsProv) def search(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("search", field, args: _*)(fc, msgsProv) def tel(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("tel", field, args: _*)(fc, msgsProv) def time(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("time", field, args: _*)(fc, msgsProv) def url(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("url", field, args: _*)(fc, msgsProv) def week(field: Field, args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = inputType("week", field, args: _*)(fc, msgsProv) def hidden(name: String, value: Any, args: (Symbol, Any)*) = hiddenInput(name, value, args: _*) def hidden(field: Field, args: (Symbol, Any)*) = hiddenInput(name = field.name, value = field.value.orElse(bs.Args.get(args, 'value)), (bs.Args.inner(bs.Args.remove(args, 'value))): _*) def radio(field: Field, args: (Symbol, Any)*)(content: Tuple3[Boolean, Boolean, B3FieldInfo] => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = radioWithContent(field, args: _*)(content)(fc, msgsProv) def radio(field: Field, options: Seq[(String, Any)], args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = radioWithOptions(field, options, args: _*)(fc, msgsProv) def select(field: Field, args: (Symbol, Any)*)(content: Set[String] => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = selectWithContent(field, args: _*)(content)(fc, msgsProv) def select(field: Field, options: Seq[(String, String)], args: (Symbol, Any)*)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = selectWithOptions(field, options, args: _*)(fc, msgsProv) def submit(args: (Symbol, Any)*)(text: => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = buttonType("submit", args: _*)(text)(fc, msgsProv) def reset(args: (Symbol, Any)*)(text: => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = buttonType("reset", args: _*)(text)(fc, msgsProv) def button(args: (Symbol, Any)*)(text: => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = buttonType("button", args: _*)(text)(fc, msgsProv) def static(args: (Symbol, Any)*)(text: => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = staticBasic(args: _*)(text)(fc, msgsProv) def static(label: String, args: (Symbol, Any)*)(text: => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = staticBasic(Args.withDefault(args, '_label -> label): _*)(text)(fc, msgsProv) def static(label: Html, args: (Symbol, Any)*)(text: => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = staticBasic(Args.withDefault(args, '_label -> label): _*)(text)(fc, msgsProv) def free(args: (Symbol, Any)*)(content: => Html)(implicit fc: B3FieldConstructor, msgsProv: MessagesProvider) = freeFormGroup(args)(_ => content)(fc, msgsProv) }
adrianhurt/play-bootstrap3
play26-bootstrap3/module/app/views/b3/package.scala
Scala
apache-2.0
14,165
package com.samstarling.prometheusfinagle.filter import com.samstarling.prometheusfinagle.UnitTest import com.twitter.finagle.http.{Method, Request, Response, Status} import org.specs2.specification.Scope class HttpServiceLabellerSpec extends UnitTest { trait Context extends Scope { val request = Request(Method.Get, "/foo/bar") val response = Response(Status.Ok) val labeller = new HttpServiceLabeller() val labels = labeller.labelsFor(request, response) } "keys" >> { "returns the keys in the correct order" in new Context { labeller.keys ==== Seq("status", "statusClass", "method") } } "labelsFor" >> { "returns the status code of the response" in new Context { labels(0) ==== "200" } "returns the status class of the request" in new Context { labels(1) ==== "2xx" } "returns the method of the request" in new Context { labels(2) ==== "GET" } } }
samstarling/finagle-prometheus
src/test/scala/com/samstarling/prometheusfinagle/filter/HttpServiceLabellerSpec.scala
Scala
mit
942
package com.twitter.scalding import cascading.flow.FlowDef import cascading.pipe.Pipe import cascading.tuple.Fields import cascading.tuple.Tuple import cascading.tuple.TupleEntry import java.io.Serializable import com.twitter.algebird.{Monoid, Ring, Aggregator} import com.twitter.scalding.typed.{Joiner, CoGrouped2, HashCoGrouped2} /*************** ** WARNING: This is a new an experimental API. Expect API breaks. If you want ** to be conservative, use the fields-based, standard scalding DSL. This is attempting ** to be a type-safe DSL for cascading, that is closer to scoobi, spark and scrunch ****************/ /** implicits for the type-safe DSL * import TDsl._ to get the implicit conversions from Grouping/CoGrouping to Pipe, * to get the .toTypedPipe method on standard cascading Pipes. * to get automatic conversion of Mappable[T] to TypedPipe[T] */ object TDsl extends Serializable { implicit def pipeTExtensions(pipe : Pipe) : PipeTExtensions = new PipeTExtensions(pipe) implicit def mappableToTypedPipe[T](mappable : Mappable[T]) (implicit flowDef : FlowDef, mode : Mode, conv : TupleConverter[T]) : TypedPipe[T] = { TypedPipe.from(mappable)(flowDef, mode, conv) } } /* * This is a type-class pattern of adding methods to Pipe relevant to TypedPipe */ class PipeTExtensions(pipe : Pipe) extends Serializable { /* Give you a syntax (you must put the full type on the TypedPipe, else type inference fails * pipe.typed(('in0, 'in1) -> 'out) { tpipe : TypedPipe[(Int,Int)] => * // let's group all: * tpipe.groupBy { x => 1 } * .mapValues { tup => tup._1 + tup._2 } * .sum * .map { _._2 } //discard the key value, which is 1. * } * The above sums all the tuples and returns a TypedPipe[Int] which has the total sum. */ def typed[T,U](fielddef : (Fields, Fields))(fn : TypedPipe[T] => TypedPipe[U]) (implicit conv : TupleConverter[T], setter : TupleSetter[U]) : Pipe = { fn(TypedPipe.from(pipe, fielddef._1)(conv)).toPipe(fielddef._2)(setter) } def toTypedPipe[T](fields : Fields)(implicit conv : TupleConverter[T]) : TypedPipe[T] = { TypedPipe.from[T](pipe, fields)(conv) } def packToTypedPipe[T](fields : Fields)(implicit tp : TuplePacker[T]) : TypedPipe[T] = { val conv = tp.newConverter(fields) toTypedPipe(fields)(conv) } } /** factory methods for TypedPipe */ object TypedPipe extends Serializable { def from[T](pipe : Pipe, fields : Fields)(implicit conv : TupleConverter[T]) : TypedPipe[T] = { new TypedPipe[T](pipe, fields, {te => Some(conv(te))}) } def from[T](mappable : Mappable[T])(implicit flowDef : FlowDef, mode : Mode, conv : TupleConverter[T]) = { new TypedPipe[T](mappable.read, mappable.sourceFields, {te => Some(conv(te))}) } } /** Represents a phase in a distributed computation on an input data source * Wraps a cascading Pipe object, and holds the transformation done up until that point */ class TypedPipe[T] private (inpipe : Pipe, fields : Fields, flatMapFn : (TupleEntry) => Iterable[T]) extends Serializable { import Dsl._ /** This actually runs all the pure map functions in one Cascading Each * This approach is more efficient than untyped scalding because we * don't use TupleConverters/Setters after each map. * The output pipe has a single item CTuple with an object of type T in position 0 */ protected lazy val pipe : Pipe = { inpipe.flatMapTo(fields -> 0)(flatMapFn)(implicitly[TupleConverter[TupleEntry]], SingleSetter) } /** Same as groupAll.aggregate.values */ def aggregate[B,C](agg: Aggregator[T,B,C]): TypedPipe[C] = groupAll.aggregate(agg).values // Implements a cross project. The right side should be tiny def cross[U](tiny : TypedPipe[U]) : TypedPipe[(T,U)] = { val crossedPipe = pipe.rename(0 -> 't) .crossWithTiny(tiny.pipe.rename(0 -> 'u)) TypedPipe.from(crossedPipe, ('t,'u))(implicitly[TupleConverter[(T,U)]]) } def flatMap[U](f : T => Iterable[U]) : TypedPipe[U] = { new TypedPipe[U](inpipe, fields, { te => flatMapFn(te).flatMap(f) }) } def map[U](f : T => U) : TypedPipe[U] = { new TypedPipe[U](inpipe, fields, { te => flatMapFn(te).map(f) }) } def filter( f : T => Boolean) : TypedPipe[T] = { new TypedPipe[T](inpipe, fields, { te => flatMapFn(te).filter(f) }) } /** Force a materialization of this pipe prior to the next operation. * This is useful if you filter almost everything before a hashJoin, for instance. */ lazy val forceToDisk: TypedPipe[T] = TypedPipe.from(pipe.forceToDisk, 0)(singleConverter[T]) def group[K,V](implicit ev : <:<[T,(K,V)], ord : Ordering[K]) : Grouped[K,V] = { //If the type of T is not (K,V), then at compile time, this will fail. It uses implicits to do //a compile time check that one type is equivalent to another. If T is not (K,V), we can't //automatically group. We cast because it is safe to do so, and we need to convert to K,V, but //the ev is not needed for the cast. In fact, you can do the cast with ev(t) and it will return //it as (K,V), but the problem is, ev is not serializable. So we do the cast, which due to ev //being present, will always pass. groupBy { (t : T) => t.asInstanceOf[(K,V)]._1 }(ord) .mapValues { (t : T) => t.asInstanceOf[(K,V)]._2 } } lazy val groupAll : Grouped[Unit,T] = groupBy(x => ()).withReducers(1) def groupBy[K](g : (T => K))(implicit ord : Ordering[K]) : Grouped[K,T] = { // TODO due to type erasure, I'm fairly sure this is not using the primitive TupleGetters // Note, lazy val pipe returns a single count tuple with an object of type T in position 0 val gpipe = pipe.mapTo(0 -> ('key, 'value)) { (t : T) => (g(t), t)} Grouped.fromKVPipe(gpipe, ord) } def ++[U >: T](other : TypedPipe[U]) : TypedPipe[U] = { TypedPipe.from(pipe ++ other.pipe, 0)(singleConverter[U]) } /** Reasonably common shortcut for cases of associative/commutative reduction * returns a typed pipe with only one element. */ def sum(implicit plus: Monoid[T]): TypedPipe[T] = groupAll.sum.values def toPipe(fieldNames : Fields)(implicit setter : TupleSetter[T]) : Pipe = { val conv = implicitly[TupleConverter[TupleEntry]] inpipe.flatMapTo(fields -> fieldNames)(flatMapFn)(conv, setter) } def unpackToPipe(fieldNames : Fields)(implicit up : TupleUnpacker[T]) : Pipe = { val setter = up.newSetter(fieldNames) toPipe(fieldNames)(setter) } /** A convenience method equivalent to toPipe(fieldNames).write(dest) * @return a pipe equivalent to the current pipe. */ def write(fieldNames : Fields, dest : Source) (implicit conv : TupleConverter[T], setter : TupleSetter[T], flowDef : FlowDef, mode : Mode) : TypedPipe[T] = { val pipe = toPipe(fieldNames)(setter) pipe.write(dest) // Now, we have written out, so let's start from here with the new pipe: // If we don't do this, Cascading's flow planner can't see what's happening TypedPipe.from(pipe, fieldNames)(conv) } def write(dest: Source) (implicit conv : TupleConverter[T], setter : TupleSetter[T], flowDef : FlowDef, mode : Mode) : TypedPipe[T] = { write(Dsl.intFields(0 until setter.arity), dest)(conv,setter,flowDef,mode) } def keys[K](implicit ev : <:<[T,(K,_)]) : TypedPipe[K] = map { _._1 } // swap the keys with the values def swap[K,V](implicit ev: <:<[T,(K,V)]) : TypedPipe[(V,K)] = map { tup => val (k,v) = tup.asInstanceOf[(K,V)] (v,k) } def values[V](implicit ev : <:<[T,(_,V)]) : TypedPipe[V] = map { _._2 } } class LtOrdering[T](ltfn : (T,T) => Boolean) extends Ordering[T] with Serializable { override def compare(left : T, right : T) : Int = { if(ltfn(left,right)) { -1 } else { if (ltfn(right, left)) 1 else 0 } } // This is faster than calling compare, which may result in two calls to ltfn override def lt(x : T, y : T) = ltfn(x,y) } class MappedOrdering[B,T](fn : (T) => B, ord : Ordering[B]) extends Ordering[T] with Serializable { override def compare(left : T, right : T) : Int = ord.compare(fn(left), fn(right)) } /** Represents sharded lists of items of type T */ trait KeyedList[K,T] { // These are the fundamental operations def toTypedPipe : TypedPipe[(K,T)] /** Operate on a Stream[T] of all the values for each key at one time. * Avoid accumulating the whole list in memory if you can. Prefer reduce. */ def mapValueStream[V](smfn : Iterator[T] => Iterator[V]) : KeyedList[K,V] /////////// /// The below are all implemented in terms of the above: /////////// /** Use Algebird Aggregator to do the reduction */ def aggregate[B,C](agg: Aggregator[T,B,C]): TypedPipe[(K,C)] = mapValues(agg.prepare _) .reduce(agg.reduce _) .map { kv => (kv._1, agg.present(kv._2)) } /** This is a special case of mapValueStream, but can be optimized because it doesn't need * all the values for a given key at once. An unoptimized implementation is: * mapValueStream { _.map { fn } } * but for Grouped we can avoid resorting to mapValueStream */ def mapValues[V](fn : T => V) : KeyedList[K,V] = mapValueStream { _.map { fn } } /** reduce with fn which must be associative and commutative. * Like the above this can be optimized in some Grouped cases. */ def reduce(fn : (T,T) => T) : TypedPipe[(K,T)] = reduceLeft(fn) // The rest of these methods are derived from above def sum(implicit monoid : Monoid[T]) = reduce(monoid.plus) def product(implicit ring : Ring[T]) = reduce(ring.times) def count(fn : T => Boolean) : TypedPipe[(K,Long)] = { mapValues { t => if (fn(t)) 1L else 0L }.sum } def forall(fn : T => Boolean) : TypedPipe[(K,Boolean)] = { mapValues { fn(_) }.product } def foldLeft[B](z : B)(fn : (B,T) => B) : TypedPipe[(K,B)] = { mapValueStream { stream => Iterator(stream.foldLeft(z)(fn)) } .toTypedPipe } def scanLeft[B](z : B)(fn : (B,T) => B) : KeyedList[K,B] = { // Get the implicit conversion for scala 2.8 to have scanLeft on an iterator: import Dsl._ mapValueStream { _.scanLeft(z)(fn) } } // Similar to reduce but always on the reduce-side (never optimized to mapside), // and named for the scala function. fn need not be associative and/or commutative. // Makes sense when you want to reduce, but in a particular sorted order. // the old value comes in on the left. def reduceLeft( fn : (T,T) => T) : TypedPipe[(K,T)] = { mapValueStream[T] { stream => if (stream.isEmpty) { // We have to guard this case, as cascading seems to give empty streams on occasions Iterator.empty } else { Iterator(stream.reduceLeft(fn)) } } .toTypedPipe } def size : TypedPipe[(K,Long)] = mapValues { x => 1L }.sum def toList : TypedPipe[(K,List[T])] = mapValues { List(_) }.sum def toSet : TypedPipe[(K,Set[T])] = mapValues { Set(_) }.sum def max[B >: T](implicit cmp : Ordering[B]) : TypedPipe[(K,T)] = { asInstanceOf[KeyedList[K,B]].reduce(cmp.max).asInstanceOf[TypedPipe[(K,T)]] } def maxBy[B](fn : T => B)(implicit cmp : Ordering[B]) : TypedPipe[(K,T)] = { reduce((new MappedOrdering(fn, cmp)).max) } def min[B >: T](implicit cmp : Ordering[B]) : TypedPipe[(K,T)] = { asInstanceOf[KeyedList[K,B]].reduce(cmp.min).asInstanceOf[TypedPipe[(K,T)]] } def minBy[B](fn : T => B)(implicit cmp : Ordering[B]) : TypedPipe[(K,T)] = { reduce((new MappedOrdering(fn, cmp)).min) } def keys : TypedPipe[K] = toTypedPipe.keys def values : TypedPipe[T] = toTypedPipe.values } object Grouped { // Make a new Grouped from a pipe with two fields: 'key, 'value def fromKVPipe[K,V](pipe : Pipe, ordering : Ordering[K]) (implicit conv : TupleConverter[V]) : Grouped[K,V] = { new Grouped[K,V](pipe, ordering, None, None, -1, false) } def valueSorting[T](implicit ord : Ordering[T]) : Fields = sorting("value", ord) def sorting[T](key : String, ord : Ordering[T]) : Fields = { val f = new Fields(key) f.setComparator(key, ord) f } } /** Represents a grouping which is the transition from map to reduce phase in hadoop. * Grouping is on a key of type K by ordering Ordering[K]. */ class Grouped[K,T] private (private[scalding] val pipe : Pipe, val ordering : Ordering[K], streamMapFn : Option[(Iterator[Tuple]) => Iterator[T]], private[scalding] val valueSort : Option[(Fields,Boolean)], val reducers : Int = -1, val toReducers: Boolean = false) extends KeyedList[K,T] with Serializable { import Dsl._ private[scalding] val groupKey = Grouped.sorting("key", ordering) protected def sortIfNeeded(gb : GroupBuilder) : GroupBuilder = { valueSort.map { fb => val gbSorted = gb.sortBy(fb._1) if (fb._2) gbSorted.reverse else gbSorted }.getOrElse(gb) } def forceToReducers: Grouped[K,T] = new Grouped(pipe, ordering, streamMapFn, valueSort, reducers, true) def withSortOrdering(so : Ordering[T]) : Grouped[K,T] = { // Set the sorting with unreversed assert(valueSort.isEmpty, "Can only call withSortOrdering once") assert(streamMapFn.isEmpty, "Cannot sort after a mapValueStream") val newValueSort = Some(Grouped.valueSorting(so)).map { f => (f,false) } new Grouped(pipe, ordering, None, newValueSort, reducers, toReducers) } def withReducers(red : Int) : Grouped[K,T] = { new Grouped(pipe, ordering, streamMapFn, valueSort, red, toReducers) } def sortBy[B](fn : (T) => B)(implicit ord : Ordering[B]) : Grouped[K,T] = { withSortOrdering(new MappedOrdering(fn, ord)) } // Sorts the values for each key def sorted[B >: T](implicit ord : Ordering[B]) : Grouped[K,T] = { // This cast is okay, because we are using the compare function // which is covariant, but the max/min functions are not, and that // breaks covariance. withSortOrdering(ord.asInstanceOf[Ordering[T]]) } def sortWith(lt : (T,T) => Boolean) : Grouped[K,T] = { withSortOrdering(new LtOrdering(lt)) } def reverse : Grouped[K,T] = { assert(streamMapFn.isEmpty, "Cannot reverse after mapValueStream") val newValueSort = valueSort.map { f => (f._1, !(f._2)) } new Grouped(pipe, ordering, None, newValueSort, reducers, toReducers) } protected def operate[T1](fn : GroupBuilder => GroupBuilder) : TypedPipe[(K,T1)] = { val reducedPipe = pipe.groupBy(groupKey) { gb => val out = fn(sortIfNeeded(gb)).reducers(reducers) if(toReducers) out.forceToReducers else out } TypedPipe.from(reducedPipe, ('key, 'value))(implicitly[TupleConverter[(K,T1)]]) } // Here are the required KeyedList methods: override lazy val toTypedPipe : TypedPipe[(K,T)] = { if (streamMapFn.isEmpty && valueSort.isEmpty && (reducers == -1)) { // There was no reduce AND no mapValueStream, no need to groupBy: TypedPipe.from(pipe, ('key, 'value))(implicitly[TupleConverter[(K,T)]]) } else { //Actually execute the mapValueStream: streamMapFn.map { fn => operate[T] { _.mapStream[Tuple,T]('value -> 'value)(fn)(CTupleConverter,SingleSetter) } }.getOrElse { // This case happens when someone does .groupAll.sortBy { }.write // so there is no operation, they are just doing a sorted write operate[T] { identity _ } } } } override def mapValues[V](fn : T => V) : Grouped[K,V] = { if(valueSort.isEmpty && streamMapFn.isEmpty) { // We have no sort defined yet, so we should operate on the pipe so we can sort by V after // if we need to: new Grouped(pipe.map('value -> 'value)(fn)(singleConverter[T], SingleSetter), ordering, None, None, reducers, toReducers) } else { // There is a sorting, which invalidates map-side optimizations, // so we might as well use mapValueStream mapValueStream { iter => iter.map { fn } } } } // If there is no ordering, this operation is pushed map-side override def reduce(fn : (T,T) => T) : TypedPipe[(K,T)] = { if(valueSort.isEmpty && streamMapFn.isEmpty) { // We can optimize mapside: operate[T] { _.reduce[T]('value -> 'value)(fn)(SingleSetter, singleConverter[T]) } } else { // Just fall back to the mapValueStream based implementation: reduceLeft(fn) } } private[scalding] lazy val streamMapping : (Iterator[Tuple]) => Iterator[T] = { streamMapFn.getOrElse { // Set up the initial stream mapping: {(ti : Iterator[Tuple]) => ti.map { _.getObject(0).asInstanceOf[T] }} } } override def mapValueStream[V](nmf : Iterator[T] => Iterator[V]) : Grouped[K,V] = { val newStreamMapFn = Some(streamMapping.andThen(nmf)) new Grouped[K,V](pipe, ordering, newStreamMapFn, valueSort, reducers, toReducers) } // SMALLER PIPE ALWAYS ON THE RIGHT!!!!!!! def cogroup[W,R](smaller: Grouped[K,W])(joiner: (K, Iterator[T], Iterable[W]) => Iterator[R]) : KeyedList[K,R] = new CoGrouped2[K,T,W,R](this, smaller, joiner) def join[W](smaller : Grouped[K,W]) = cogroup(smaller)(Joiner.inner2) def leftJoin[W](smaller : Grouped[K,W]) = cogroup(smaller)(Joiner.left2) def rightJoin[W](smaller : Grouped[K,W]) = cogroup(smaller)(Joiner.right2) def outerJoin[W](smaller : Grouped[K,W]) = cogroup(smaller)(Joiner.outer2) /** WARNING This behaves semantically very differently than cogroup. * this is because we handle (K,T) tuples on the left as we see them. * the iterator on the right is over all elements with a matching key K, and it may be empty * if there are no values for this key K. * (because you haven't actually cogrouped, but only read the right hand side into a hashtable) */ def hashCogroup[W,R](smaller: Grouped[K,W])(joiner: (K, T, Iterable[W]) => Iterator[R]) : TypedPipe[(K,R)] = (new HashCoGrouped2[K,T,W,R](this, smaller, joiner)).toTypedPipe def hashJoin[W](smaller : Grouped[K,W]) : TypedPipe[(K,(T,W))] = hashCogroup(smaller)(Joiner.hashInner2) def hashLeftJoin[W](smaller : Grouped[K,W]) : TypedPipe[(K,(T,Option[W]))] = hashCogroup(smaller)(Joiner.hashLeft2) // TODO: implement blockJoin }
AoJ/scalding
src/main/scala/com/twitter/scalding/TypedPipe.scala
Scala
apache-2.0
18,181
package sledtr.source import scala.collection.mutable.Map import sledtr.shelf._ import sledtr.section._ import sledtr.MyPreDef._ import sledtr.plugin._ import sledtr.actors._ import sledtr._ import scala.xml._ class Ch2 private(chapter: Chapter, map: ConfigMap) extends Source(chapter, map) { val params: Map[String, PluginParam] = Map( "url" -> StringParam(None), "keyword" -> StringParam(None) ) initParams() val url = getStringParam("url") val keyword = getStringParam("keyword") lazy val down_load = new HtmlDownload(url + "subject.txt", Environ.srcDir + "/" + url.p) override def readSource(): Unit = HtmlDownloadManager.addTask(down_load) override def createSection(): Unit = { var url_map: Map[String, String] = Map() down_load.source.split("\\n").foreach { s => val (title, surl) = getTitleUrl(s) if(keyword.r.findFirstIn(title) != None) url_map(surl) = title } val url_list = url_map.toList.map { case(a, b) => a } val section = chapter.getSection(keyword, url_list) (section: @unchecked) match { case s: Ch2Section => s.url_map = url_map } sections += section } def getTitleUrl(line: String): Tuple2[String, String] = { var r = """ \\(\\d+\\)""".r val a = line.split("<>") (r.replaceAllIn(a(1), ""), url + "dat/" + a(0)) } } object Ch2 extends SourceCompanion { def apply(chapter: Chapter, map: ConfigMap): Source = { new Ch2(chapter, map) } }
K2Da/sledtr
src/main/scala/sledtr/source/Ch2.scala
Scala
gpl-3.0
1,481
/* Stratagem is a model checker for transition systems described using rewriting rules and strategies. Copyright (C) 2013 - SMV@Geneva University. Program written by Edmundo Lopez Bobeda <edmundo [at] lopezbobeda.net>. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package ch.unige.cui.smv.stratagem.adt import org.scalatest.FlatSpec class ADTTest extends FlatSpec { "An ADT" should "not allow to declare the same variable twice" in { intercept[IllegalArgumentException] { val sign = (new Signature) .withSort("nat") .withSort("nznat", "nat") .withSort("zero", "nat") .withGenerator("0", "zero") .withGenerator("suc", "nznat", "nat") val adt = (new ADT("myADT", sign)) .declareVariable("x", "nat") .declareVariable("x", "nat") } } "An ADT" should "not allow to declare the same variable twice, even if the sorts are different" in { intercept[IllegalArgumentException] { val sign = (new Signature) .withSort("nat") .withSort("nznat", "nat") .withSort("zero", "nat") .withGenerator("0", "zero") .withGenerator("suc", "nznat", "nat") val adt = (new ADT("myADT", sign)) .declareVariable("x", "nat") .declareVariable("x", "nznat") } } "An ADT" should "not allow to declare a variable when the sort is not in the signature" in { intercept[IllegalArgumentException] { val sign = (new Signature) .withSort("nat") .withSort("nznat", "nat") .withSort("zero", "nat") .withGenerator("0", "zero") .withGenerator("suc", "nznat", "nat") val adt = (new ADT("myADT", sign)) .declareVariable("x", "nat") .declareVariable("x", "bool") } } "An ADT" should "allow to model the philosopher's problem" in { val signature = (new Signature) .withSort("ph") .withSort("state") .withSort("fork") .withGenerator("eating", "state") .withGenerator("thinking", "state") .withGenerator("waiting", "state") .withGenerator("waitingForLeftFork", "state") .withGenerator("waitingForRightFork", "state") .withGenerator("forkUsed", "fork") .withGenerator("forkFree", "fork") .withGenerator("emptytable", "ph") .withGenerator("philo", "ph", "state", "fork", "ph") val adt = new ADT("philoModel", signature) // definitions to simplify the reading of terms. def eating = adt.term("eating") def thinking = adt.term("thinking") def waiting = adt.term("waiting") def waitingLF = adt.term("waitingForLeftFork") def waitingRF = adt.term("waitingForRightFork") def forkUsed = adt.term("forkUsed") def forkFree = adt.term("forkFree") def emptytable = adt.term("emptytable") def philo(state: ATerm, fork: ATerm, ph: ATerm) = adt.term("philo", state, fork, ph) // scalastyle:off assert(philo(thinking, forkFree, philo(thinking, forkFree, philo(thinking, forkFree, emptytable))).toString == "philo(thinking, forkFree, philo(thinking, forkFree, philo(thinking, forkFree, emptytable)))") // scalastyle:on } "And ADT" should "not allow to build term that contains terms from another ADT" in { val signature = (new Signature) .withSort("ph") .withSort("state") .withSort("fork") .withGenerator("eating", "state") .withGenerator("thinking", "state") .withGenerator("waiting", "state") .withGenerator("waitingForLeftFork", "state") .withGenerator("waitingForRightFork", "state") .withGenerator("forkUsed", "fork") .withGenerator("forkFree", "fork") .withGenerator("emptytable", "ph") .withGenerator("philo", "ph", "state", "fork", "ph") val adt = new ADT("philoModel", signature) // definitions to simplify the reading of terms. def eating = adt.term("eating") def thinking = adt.term("thinking") def waiting = adt.term("waiting") def waitingLF = adt.term("waitingForLeftFork") def waitingRF = adt.term("waitingForRightFork") def forkUsed = adt.term("forkUsed") def forkFree = adt.term("forkFree") def emptytable = adt.term("emptytable") def philo(state: ATerm, fork: ATerm, ph: ATerm) = adt.term("philo", state, fork, ph) val strangeADT = new ADT("philoModel", signature) val thrown = intercept[IllegalArgumentException] { philo(thinking, forkFree, philo(thinking, forkFree, philo(thinking, forkFree, strangeADT.term("emptytable")))) } assert(thrown.getMessage().endsWith("It is not allowed to mix adts")) } }
didierbuchs/oldstratagem
src/test/scala/ch/unige/cui/smv/stratagem/adt/ADTTest.scala
Scala
gpl-2.0
5,222
package web.base import com.sun.net.httpserver.HttpExchange import scala.collection.mutable.HashMap /** * Introduces http server with the possibility to respond as a PartialFunction reply * * @author Tibor Botos */ abstract class SimpleHttpServer extends SimpleHttpServerBase { def handle(exchange: HttpExchange) = { implicit val e = exchange val resp = (get orElse reportProblem)(exchange.getRequestURI().getPath()) if (resp == None) respond(exchange, 404) else respond(exchange, 200, resp.toString) } def reportProblem(implicit exchange: HttpExchange): PartialFunction[Any, Any] = { case _ => None } def get(implicit exchange: HttpExchange): PartialFunction[Any, Any] }
tiborbotos/domino
domino-testweb/src/main/scala/web/base/SimpleHttpServer.scala
Scala
lgpl-3.0
724
package mesosphere.marathon.core.task.termination.impl import akka.Done import akka.actor.ActorRef import mesosphere.marathon.core.task.Task import mesosphere.marathon.core.task.termination.{ TaskKillReason, TaskKillService } import org.slf4j.LoggerFactory import scala.concurrent.{ Future, Promise } import scala.collection.immutable.Seq private[termination] class TaskKillServiceDelegate(actorRef: ActorRef) extends TaskKillService { import TaskKillServiceDelegate.log import TaskKillServiceActor._ override def killTasks(tasks: Iterable[Task], reason: TaskKillReason): Future[Done] = { log.info( s"Killing ${tasks.size} tasks for reason: $reason (ids: {} ...)", tasks.take(3).map(_.taskId).mkString(",")) val promise = Promise[Done] actorRef ! KillTasks(tasks, promise) promise.future } override def killTask(task: Task, reason: TaskKillReason): Future[Done] = { killTasks(Seq(task), reason) } override def killUnknownTask(taskId: Task.Id, reason: TaskKillReason): Unit = { log.info(s"Killing unknown task for reason: $reason (id: $taskId)") actorRef ! KillUnknownTaskById(taskId) } } object TaskKillServiceDelegate { private[impl] val log = LoggerFactory.getLogger(getClass) }
timcharper/marathon
src/main/scala/mesosphere/marathon/core/task/termination/impl/TaskKillServiceDelegate.scala
Scala
apache-2.0
1,247
package com.tuvistavie.scalog package models sealed trait UserInput case class Import(module: String) extends UserInput case class Query(formula: Formula, seenRules: List[Rule] = List.empty) extends UserInput { def atoms: List[Atom] = formula.atoms }
tuvistavie/scalog
src/main/scala/com/tuvistavie/scalog/models/Query.scala
Scala
mit
256
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spot.utilities import org.apache.spot.testutils.TestingSparkContextFlatSpec import org.apache.spot.utilities import org.apache.spot.utilities.DomainProcessor._ import org.scalatest.Matchers class DomainProcessorTest extends TestingSparkContextFlatSpec with Matchers { val countryCodesSet = utilities.CountryCodes.CountryCodes "extractDomain" should "return domain when provided a url with top-level domain and country code" in { val url = "fatosdesconhecidos.com.br" val result = DomainProcessor.extractDomain(url) result shouldEqual ("fatosdesconhecidos") } it should "return domain when provided a short url with no top-level domain but only a country code" in { val url = "panasonic.jp" val result = DomainProcessor.extractDomain(url) result shouldEqual ("panasonic") } it should "return domain when provided a long url with no top-level domain but only a country code" in { val url = "get.your.best.electronic.at.panasonic.jp" val result = DomainProcessor.extractDomain(url) result shouldEqual ("panasonic") } it should "return domain when provided a short url with a top-level domain no country code" in { val url = "forrealz.net" val result = DomainProcessor.extractDomain(url) result shouldEqual ("forrealz") } it should "return domain when provided a long url with a top-level domain no country code" in { val url = "wow.its.really.long.super.long.yeah.so.long.long.long.long.forrealz.net" val result = DomainProcessor.extractDomain(url) result shouldEqual ("forrealz") } it should "should return \"None\" when provided an address" in { val url = "123.103.104.10.in-addr.arpa" val result = DomainProcessor.extractDomain(url) result shouldEqual ("None") } it should "return \"None\" when provided a short url with a bad top-level domain / country code" in { val url = "panasonic.c" val result = DomainProcessor.extractDomain(url) result shouldEqual ("None") } "extractDomainInfo" should "handle an in-addr.arpa url" in { val url = "123.103.104.10.in-addr.arpa" val topDomains = sparkSession.sparkContext.broadcast(TopDomains.TopDomains) val userDomain = "intel" // case class DerivedFields(topDomain: String, subdomainLength: Double, subdomainEntropy: Double, numPeriods: Double) val result = extractDomainInfo(url, topDomains, userDomain) result shouldBe DomainInfo(domain = "None", topDomain = 0, subdomain = "None", subdomainLength = 0, subdomainEntropy = 0, numPeriods = 6) } it should "handle an Alexa top 1M domain with a subdomain, top-level domain name and country code" in { val url = "services.amazon.com.mx" val topDomains = sparkSession.sparkContext.broadcast(TopDomains.TopDomains) val userDomain = "intel" val result = extractDomainInfo(url, topDomains, userDomain) result shouldBe DomainInfo(domain = "amazon", topDomain = 1, subdomain = "services", subdomainLength = 8, subdomainEntropy = 2.5, numPeriods = 4) } it should "handle an Alexa top 1M domain with a top-level domain name and country code but no subdomain" in { val url = "amazon.com.mx" val countryCodes = sparkSession.sparkContext.broadcast(countryCodesSet) val topDomains = sparkSession.sparkContext.broadcast(TopDomains.TopDomains) val userDomain = "intel" val result = extractDomainInfo(url, topDomains, userDomain) result shouldBe DomainInfo(domain = "amazon", subdomain = "None", topDomain = 1, subdomainLength = 0, subdomainEntropy = 0, numPeriods = 3) } it should "handle an Alexa top 1M domain with a subdomain and top-level domain name but no country code" in { val url = "services.amazon.com" val countryCodes = sparkSession.sparkContext.broadcast(countryCodesSet) val topDomains = sparkSession.sparkContext.broadcast(TopDomains.TopDomains) val userDomain = "intel" val result = extractDomainInfo(url, topDomains, userDomain) result shouldBe DomainInfo(domain = "amazon", subdomain = "services", topDomain = 1, subdomainLength = 8, subdomainEntropy = 2.5, numPeriods = 3) } it should "handle an Alexa top 1M domain with no subdomain or country code" in { val url = "amazon.com" val countryCodes = sparkSession.sparkContext.broadcast(countryCodesSet) val topDomains = sparkSession.sparkContext.broadcast(TopDomains.TopDomains) val userDomain = "intel" val result = extractDomainInfo(url, topDomains, userDomain) result shouldBe DomainInfo(domain = "amazon", subdomain = "None", topDomain = 1, subdomainLength = 0, subdomainEntropy = 0, numPeriods = 2) } it should "not identify the domain as the users domain when both are empty strings" in { val url = "ab..com" val countryCodes = sparkSession.sparkContext.broadcast(countryCodesSet) val topDomains = sparkSession.sparkContext.broadcast(TopDomains.TopDomains) val userDomain = "" val result = extractDomainInfo(url, topDomains, userDomain) result shouldBe DomainInfo(domain = "", subdomain = "ab", topDomain = 0, subdomainLength = 2, subdomainEntropy = 1, numPeriods = 3) } }
brandon-edwards/incubator-spot
spot-ml/src/test/scala/org/apache/spot/utilities/DomainProcessorTest.scala
Scala
apache-2.0
5,964
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.berkeley.cs.amplab.spark.indexedrdd import java.util.UUID import org.scalacheck.Arbitrary import org.scalacheck.Gen import org.scalatest.FunSuite import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks class KeySerializerSuite extends FunSuite with GeneratorDrivenPropertyChecks with Matchers { test("long") { val ser = new LongSerializer forAll { (a: Long) => ser.fromBytes(ser.toBytes(a)) should be === a } } test("string") { val ser = new StringSerializer forAll { (a: String) => ser.fromBytes(ser.toBytes(a)) should be === a } forAll { (a: String, b: String) => whenever (a != b) { val aSer = ser.toBytes(a) val bSer = ser.toBytes(b) assert(!aSer.startsWith(bSer)) assert(!bSer.startsWith(aSer)) } } } test("short") { val ser = new ShortSerializer forAll { (a: Short) => ser.fromBytes(ser.toBytes(a)) should be === a } } test("int") { val ser = new IntSerializer forAll { (a: Int) => ser.fromBytes(ser.toBytes(a)) should be === a } } implicit val arbUUID: Arbitrary[UUID] = Arbitrary(Gen.uuid) test("UUID") { val ser = new UUIDSerializer forAll { (a: UUID) => ser.fromBytes(ser.toBytes(a)) should be === a } } test("bigint") { val ser = new BigIntSerializer forAll { (a: BigInt) => ser.fromBytes(ser.toBytes(a)) should be === a } forAll { (a: BigInt, b: BigInt) => whenever (a != b) { val aSer = ser.toBytes(a) val bSer = ser.toBytes(b) assert(!aSer.startsWith(bSer)) assert(!bSer.startsWith(aSer)) } } } def tuple2Test[A: Arbitrary, B: Arbitrary]( aSer: KeySerializer[A], bSer: KeySerializer[B]): Unit = { val ser = new Tuple2Serializer[A, B]()(aSer, bSer) forAll { (a: A, b: B) => ser.fromBytes(ser.toBytes(Tuple2(a, b))) should be === (a, b) } forAll { (a: (A, B), b: (A, B)) => whenever (a != b) { val aSer = ser.toBytes(a) val bSer = ser.toBytes(b) assert(!aSer.startsWith(bSer)) assert(!bSer.startsWith(aSer)) } } } test("Tuple2") { val stringSer = new StringSerializer val longSer = new LongSerializer val intSer = new IntSerializer val shortSer = new ShortSerializer val bigintSer = new BigIntSerializer val uuidSer = new UUIDSerializer tuple2Test[Long, Long](longSer, longSer) tuple2Test[String, Long](stringSer, longSer) tuple2Test[Long, String](longSer, stringSer) tuple2Test[String, String](stringSer, stringSer) tuple2Test[Short, Short](shortSer, shortSer) tuple2Test[Short, Int](shortSer, intSer) tuple2Test[Int, Int](intSer, intSer) tuple2Test[Int, BigInt](intSer, bigintSer) tuple2Test[BigInt, BigInt](bigintSer, bigintSer) tuple2Test[Int, UUID](intSer, uuidSer) tuple2Test[UUID, UUID](uuidSer, uuidSer) } }
amplab/spark-indexedrdd
src/test/scala/edu/berkeley/cs/amplab/spark/indexedrdd/KeySerializerSuite.scala
Scala
apache-2.0
3,781
package com.twitter.finatra.http.integration.doeverything.main.domain import com.twitter.finatra.request.Header case class CreateUserRequest( @Header requestId: String, name: String, age: Int)
joecwu/finatra
http/src/test/scala/com/twitter/finatra/http/integration/doeverything/main/domain/CreateUserRequest.scala
Scala
apache-2.0
202
package at.logic.gapt.formats.shlk_parsing import at.logic.gapt.formats.simple.TypeParsers import at.logic.gapt.proofs.HOLSequent import at.logic.gapt.proofs.lk.solve import scala.util.parsing.combinator._ import scala.util.matching.Regex import java.io.InputStreamReader import at.logic.gapt.expr.schema._ import at.logic.gapt.proofs.lk.base._ import collection.mutable.{ Map => MMap } import at.logic.gapt.proofs.shlk._ import scala.Tuple4 import at.logic.gapt.expr._ import scala.Tuple2 import at.logic.gapt.expr.StringSymbol import at.logic.gapt.expr.{ To, FunctionType, Ti } import at.logic.gapt.proofs.lk._ object sFOParser { val nLine = sys.props( "line.separator" ) def parseProofs( input: InputStreamReader ): List[( String, LKProof )] = { // ("p",parseProof(input, "root"))::Nil val m = sFOParser.parseProof( input ) m.foldLeft( List.empty[( String, LKProof )] )( ( res, pair ) => ( pair._1, pair._2._1.get( "root" ).get ) :: ( pair._1, pair._2._2.get( "root" ).get ) :: res ) } //--------------------------------- parse SLK sequent ---------------------- def parseSequent( txt: String ): HOLSequent = { lazy val sp = new SequentParser sp.parseAll( sp.sequent, txt ) match { case res @ sp.Success( result, input ) => { // println( nLine + nLine + "SUCCESS parse :) " + nLine ) return res.result.toHOLSequent } case x: AnyRef => // { println( nLine + nLine + "FAIL parse : " + nLine + error_buffer ); throw new Exception( nLine + nLine + "FAIL parse :( " + nLine ); } throw new Exception( "Error in sFOParser.parseSequent : " + x.toString ) } class SequentParser extends JavaTokenParsers with TypeParsers { def name = """[\\\\]*[a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,_,0,1,2,3,4,5,6,7,8,9]*""".r def term: Parser[SchemaExpression] = ( non_formula | formula ) def formula: Parser[SchemaFormula] = ( atom | neg | big | and | or | indPred | imp | forall | exists | variable | constant ) ^? { case trm: SchemaFormula => trm } def intTerm: Parser[SchemaExpression] = ( index | schemaFormula ) def index: Parser[IntegerTerm] = ( sum | intConst | intVar | succ ) def intConst: Parser[IntegerTerm] = ( intZero | intOne | intTwo | intThree ) def intOne: Parser[IntegerTerm] = "1".r ^^ { case x => { Succ( IntZero() ) } } def intTwo: Parser[IntegerTerm] = "2".r ^^ { case x => { Succ( Succ( IntZero() ) ) } } def intThree: Parser[IntegerTerm] = "3".r ^^ { case x => { Succ( Succ( Succ( IntZero() ) ) ) } } def intZero: Parser[IntegerTerm] = "0".r ^^ { case x => { IntZero() } } def sum: Parser[IntegerTerm] = intVar ~ "+" ~ intConst ^^ { case indV ~ "+" ~ indC => { // println( nLine + nLine + "sum") indC match { case Succ( IntZero() ) => Succ( indV ) case Succ( Succ( IntZero() ) ) => Succ( Succ( indV ) ) case Succ( Succ( Succ( IntZero() ) ) ) => Succ( Succ( Succ( indV ) ) ) } } } def intVar: Parser[IntVar] = "[i,j,m,n,k,x]".r ^^ { case x => { /*println( nLine + nLine + "intVar");*/ IntVar( x ) } } def succ: Parser[IntegerTerm] = "s(" ~ intTerm ~ ")" ^^ { case "s(" ~ intTerm ~ ")" => Succ( intTerm.asInstanceOf[IntegerTerm] ) } def schemaFormula = formula def indPred: Parser[SchemaFormula] = """[A-Z]*[a-z]*[0-9]*""".r ~ "(" ~ index ~ "," ~ s_term ~ ")" ^^ { case x ~ "(" ~ l ~ "," ~ t ~ ")" => { //println( nLine + nLine + "t = "+t) IndexedPredicate( x, l ) } } // nested bigAnd bigOr.... ("""BigAnd""".r | """BigOr""".r) def prefix: Parser[Tuple4[Boolean, IntVar, IntegerTerm, IntegerTerm]] = """[BigAnd]*[BigOr]*""".r ~ "(" ~ intVar ~ "=" ~ index ~ ".." ~ index ~ ")" ^^ { case "BigAnd" ~ "(" ~ intVar1 ~ "=" ~ ind1 ~ ".." ~ ind2 ~ ")" => { Tuple4( true, intVar1, ind1, ind2 ) } case "BigOr" ~ "(" ~ intVar1 ~ "=" ~ ind1 ~ ".." ~ ind2 ~ ")" => { Tuple4( false, intVar1, ind1, ind2 ) } } def big: Parser[SchemaFormula] = rep1( prefix ) ~ schemaFormula ^^ { case l ~ schemaFormula => { l.reverse.foldLeft( schemaFormula.asInstanceOf[SchemaFormula] )( ( res, triple ) => { if ( triple._1 ) BigAnd( triple._2, res, triple._3, triple._4 ) else BigOr( triple._2, res, triple._3, triple._4 ) } ) } } def non_formula: Parser[SchemaExpression] = ( fo_term | s_term | indexedVar | abs | variable | constant | var_func | const_func ) def s_term: Parser[SchemaExpression] = "[g,h]".r ~ "(" ~ intTerm ~ "," ~ variable ~ ")" ^^ { case name ~ "(" ~ i ~ "," ~ args ~ ")" => { //println( nLine + nLine + "sTerm" + nLine ) sTerm( name, i.asInstanceOf[IntegerTerm], args :: Nil ) } } def fo_term: Parser[SchemaExpression] = "[f]".r ~ "(" ~ variable ~ ")" ^^ { case name ~ "(" ~ arg ~ ")" => { val v = Const( StringSymbol( name ), Ti -> Ti ).asInstanceOf[Const] App( v, arg ).asInstanceOf[SchemaExpression] } } def indexedVar: Parser[Var] = regex( new Regex( "[u-z]" ) ) ~ "(" ~ intTerm ~ ")" ^^ { case x ~ "(" ~ index ~ ")" => Var( StringSymbol( x ), Tindex -> Ti ) } def FOVariable: Parser[Var] = regex( new Regex( "[u-z]" + word ) ) ^^ { case x => foVar( x ) } def variable: Parser[Var] = FOVariable //regex(new Regex("[u-z]" + word)) ^^ {case x => Var(new VariableStringSymbol(x), i->i).asInstanceOf[Var]} def constant: Parser[Const] = regex( new Regex( "[a-tA-Z0-9]" + word ) ) ^^ { case x => Const( StringSymbol( x ), Tindex -> Tindex ) } def and: Parser[SchemaFormula] = "(" ~ repsep( formula, "/\\\\" ) ~ ")" ^^ { case "(" ~ formulas ~ ")" => { formulas.tail.foldLeft( formulas.head )( ( f, res ) => And( f, res ) ) } } def or: Parser[SchemaFormula] = "(" ~ repsep( formula, """\\/""" ) ~ ")" ^^ { case "(" ~ formulas ~ ")" => { formulas.tail.foldLeft( formulas.head )( ( f, res ) => Or( f, res ) ) } } def imp: Parser[SchemaFormula] = "Imp" ~ formula ~ formula ^^ { case "Imp" ~ x ~ y => Imp( x, y ) } def abs: Parser[SchemaExpression] = "Abs" ~ variable ~ term ^^ { case "Abs" ~ v ~ x => Abs( v, x ).asInstanceOf[SchemaExpression] } def neg: Parser[SchemaFormula] = "~" ~ formula ^^ { case "~" ~ x => Neg( x ) } def atom: Parser[SchemaFormula] = ( equality | var_atom | const_atom ) def forall: Parser[SchemaFormula] = "Forall" ~ variable ~ formula ^^ { case "Forall" ~ v ~ x => All( v, x ) } def exists: Parser[SchemaFormula] = "Exists" ~ variable ~ formula ^^ { case "Exists" ~ v ~ x => Ex( v, x ) } def var_atom: Parser[SchemaFormula] = regex( new Regex( "[u-z]" + word ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => { SchemaAtom( Var( x, FunctionType( To, params map ( _.exptype ) ) ), params ) } } def const_atom: Parser[SchemaFormula] = regex( new Regex( "P" ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => { // println( nLine + nLine + "const_atom") SchemaAtom( Const( x, FunctionType( To, params map ( _.exptype ) ) ), params ) } } def equality: Parser[SchemaFormula] = /*eq_infix | */ eq_prefix // infix is problematic in higher order def eq_prefix: Parser[SchemaFormula] = "=" ~ "(" ~ term ~ "," ~ term ~ ")" ^^ { case "=" ~ "(" ~ x ~ "," ~ y ~ ")" => Eq( x, y ) } def var_func: Parser[SchemaExpression] = regex( new Regex( "[u-z]" + word ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => SchemaFunction( Var( x, FunctionType( Tindex -> Tindex, params map ( _.exptype ) ) ), params ) } def const_func: Parser[SchemaExpression] = regex( new Regex( "[" + symbols + "a-tA-Z0-9]" + word ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => SchemaFunction( Const( x, FunctionType( Tindex -> Tindex, params map ( _.exptype ) ) ), params ) } protected def word: String = """[a-zA-Z0-9$_{}]*""" protected def symbol: Parser[String] = symbols.r def symbols: String = """[\\053\\055\\052\\057\\0134\\0136\\074\\076\\075\\0140\\0176\\077\\0100\\046\\0174\\041\\043\\047\\073\\0173\\0175]+""" // +-*/\\^<>=`~?@&|!#{}'; def sequent: Parser[OccSequent] = repsep( formula, "," ) ~ "|-" ~ repsep( formula, "," ) ^^ { case lfs ~ "|-" ~ rfs => { Axiom( lfs, rfs ).root } } } throw new Exception( nLine + "Error in sFOParser.parseSequent function !" + nLine ) } //--------------------------------- parse SLK proof ----------------------- def parseProofFlat( txt: InputStreamReader ): MMap[String, Tuple2[LKProof, LKProof]] = { val map = parseProof( txt ) map.map( pp => { val name = pp._1 val pair = pp._2 ( name, Tuple2( pair._1.get( "root" ).get, pair._2.get( "root" ).get ) ) } ) } //plabel should return the proof corresponding to this label def parseProof( txt: InputStreamReader ): MMap[String, Tuple2[MMap[String, LKProof], MMap[String, LKProof]]] = { var mapBase = MMap.empty[String, LKProof] var mapStep = MMap.empty[String, LKProof] var map = MMap.empty[String, LKProof] var baseORstep: Int = 1 SchemaProofDB.clear var defMMap = MMap.empty[Const, Tuple2[List[IntegerTerm], SchemaFormula]] var list = List[String]() var error_buffer = "" // lazy val sp2 = new ParserTxt // sp2.parseAll(sp2.line, txt) val bigMMap = MMap.empty[String, Tuple2[MMap[String, LKProof], MMap[String, LKProof]]] val mapPredicateToArity = MMap.empty[String, Int] dbTRS.clear lazy val sp = new SimpleSLKParser // var proofName = "" // sp.parseAll(sp.line, txt) sp.parseAll( sp.slkProofs, txt ) match { case sp.Success( result, input ) => // println( nLine + nLine + "SUCCESS parse :) " + nLine ) case x: AnyRef => // { println( nLine + nLine + "FAIL parse : " + nLine + error_buffer ); throw new Exception( nLine + nLine + "FAIL parse :( " + nLine ); } throw new Exception( x.toString ) } // class ParserTxt extends JavaTokenParsers with at.logic.gapt.expr.Parsers { // // def line: Parser[List[Unit]] = repsep(mapping, nLine) // // def mapping: Parser[Unit] = """*""".r ^^ { // case t => { // list = t :: list // } // } // } class SimpleSLKParser extends JavaTokenParsers with TypeParsers { def line: Parser[List[Unit]] = rep( mappingBase ) def mappingBase: Parser[Unit] = label.r ~ ":" ~ proof ^^ { case l ~ ":" ~ p => { error_buffer = l if ( baseORstep == 2 ) { map = MMap.empty[String, LKProof] baseORstep = 1 } map.put( l, p ) mapBase = map } } def mappingStep: Parser[Unit] = label.r ~ ":" ~ proof ^^ { case l ~ ":" ~ p => { error_buffer = l // mapStep.put(l,p) if ( baseORstep == 1 ) { map = MMap.empty[String, LKProof] baseORstep = 2 } map.put( l, p ) mapStep = map } } def name = """[\\\\]*[a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,_,0,1,2,3,4,5,6,7,8,9]*""".r def slkProof: Parser[Unit] = "proof" ~ name ~ "proves" ~ sequent ~ "base" ~ "{" ~ line ~ "}" ~ "step" ~ "{" ~ rep( mappingStep ) ~ "}" ^^ { case "proof" ~ str ~ str1 ~ seq ~ "base" ~ "{" ~ line1 ~ "}" ~ "step" ~ "{" ~ line2 ~ "}" => { // proofName = str bigMMap.put( str, Tuple2( mapBase, mapStep ) ) SchemaProofDB.put( new SchemaProof( str, IntVar( "k" ) :: Nil, seq.toHOLSequent, mapBase.get( "root" ).get, mapStep.get( "root" ).get ) ) mapBase = MMap.empty[String, LKProof] mapStep = MMap.empty[String, LKProof] // println( nLine + nLine + "Parsing is SUCCESSFUL : "+str) } } def slkProofs: Parser[List[Unit]] = rep( trs ) ~ rep( define ) ~ rep( slkProof ) ^^ { case a ~ s => { List.empty[Unit] } } def trs: Parser[Unit] = s_term ~ "->" ~ term ~ s_term ~ "->" ~ term ^^ { case t1 ~ "->" ~ base ~ t2 ~ "->" ~ step => { t1 match { case sTerm( func1, i1, arg1 ) => t2 match { case sTerm( func2, i2, arg2 ) => { // if(func1 == func2) { dbTRS.add( func1.asInstanceOf[Const], Tuple2( t1, base ), Tuple2( t2, step ) ) // } } } } } } def proof: Parser[LKProof] = ax | orL | orR1 | orR | orR2 | negL | negR | cut | pFOLink | andL | andR | andL1 | andL2 | weakL | weakR | contrL | contrR | andEqR1 | andEqR2 | andEqR3 | orEqR1 | orEqR2 | orEqR3 | andEqL1 | andEqL2 | andEqL3 | orEqL1 | orEqL2 | orEqL3 | allL | allR | impL | impR | termDefL1 | termDefR1 | arrowL | arrowR | autoprop def label: String = """[0-9]*[root]*""" def term: Parser[SchemaExpression] = ( non_formula | formula ) def formula: Parser[SchemaFormula] = ( atom | neg | big | and | or | indPred | imp | forall | exists | variable | constant ) ^? { case trm: SchemaFormula => trm } def intTerm: Parser[SchemaExpression] = index //| schemaFormula def index: Parser[IntegerTerm] = ( sum | intConst | intVar | succ ) def intConst: Parser[IntegerTerm] = ( intZero | intOne | intTwo | intThree ) def intOne: Parser[IntegerTerm] = "1".r ^^ { case x => { Succ( IntZero() ) } } def intTwo: Parser[IntegerTerm] = "2".r ^^ { case x => { Succ( Succ( IntZero() ) ) } } def intThree: Parser[IntegerTerm] = "3".r ^^ { case x => { Succ( Succ( Succ( IntZero() ) ) ) } } def intZero: Parser[IntegerTerm] = "0".r ^^ { case x => { IntZero() } } def sum: Parser[IntegerTerm] = intVar ~ "+" ~ intConst ^^ { case indV ~ "+" ~ indC => { // println( nLine + nLine + "sum") indC match { case Succ( IntZero() ) => Succ( indV ) case Succ( Succ( IntZero() ) ) => Succ( Succ( indV ) ) case Succ( Succ( Succ( IntZero() ) ) ) => Succ( Succ( Succ( indV ) ) ) } } } def intVar: Parser[IntVar] = "[i,j,m,n,k,x]".r ^^ { case x => { /*println( nLine + nLine + "intVar");*/ IntVar( x ) } } def succ: Parser[IntegerTerm] = "s(" ~ intTerm ~ ")" ^^ { case "s(" ~ intTerm ~ ")" => Succ( intTerm.asInstanceOf[IntegerTerm] ) } def schemaFormula = formula def indPred: Parser[SchemaFormula] = """[A-Z]*[a-z]*[0-9]*""".r ~ "(" ~ repsep( index, "," ) ~ ")" ^^ { case x ~ "(" ~ l ~ ")" => { if ( !mapPredicateToArity.isDefinedAt( x.toString ) ) mapPredicateToArity.put( x.toString, l.size ) else if ( mapPredicateToArity.get( x.toString ).get != l.size ) { //println( nLine + "Input ERROR : Indexed Predicate '"+x.toString+"' should have arity "+mapPredicateToArity.get(x.toString).get+ ", but not "+l.size+" !" + nLine + nLine ) throw new Exception( nLine + "Input ERROR : Indexed Predicate '" + x.toString + "' should have arity " + mapPredicateToArity.get( x.toString ).get + ", but not " + l.size + " !" + nLine ) } // println( nLine + nLine + "IndexedPredicate"); // val map: MMap[Var, T]) // val subst: SchemaSubstitution1[SchemaExpression] = new SchemaSubstitution1[SchemaExpression]() // val new_ind = subst(ind) // val new_map = (subst.map - subst.map.head._1.asInstanceOf[Var]) + Tuple2(subst.map.head._1.asInstanceOf[Var], Pred(new_ind.asInstanceOf[IntegerTerm]) ) // val new_subst = new SchemaSubstitution1(new_map) IndexedPredicate( x, l ) } } def define: Parser[Any] = indPred ~ ":=" ~ schemaFormula ^^ { case indpred ~ ":=" ~ sf => { indpred match { case IndexedPredicate( f, ls ) => { defMMap.put( f, Tuple2( ls.asInstanceOf[List[IntegerTerm]], sf.asInstanceOf[SchemaFormula] ) ) } } } } // nested bigAnd bigOr.... ("""BigAnd""".r | """BigOr""".r) def prefix: Parser[Tuple4[Boolean, IntVar, IntegerTerm, IntegerTerm]] = """[BigAnd]*[BigOr]*""".r ~ "(" ~ intVar ~ "=" ~ index ~ ".." ~ index ~ ")" ^^ { case "BigAnd" ~ "(" ~ intVar1 ~ "=" ~ ind1 ~ ".." ~ ind2 ~ ")" => { // println( nLine + nLine + "prefix" + nLine + nLine ) Tuple4( true, intVar1, ind1, ind2 ) } case "BigOr" ~ "(" ~ intVar1 ~ "=" ~ ind1 ~ ".." ~ ind2 ~ ")" => { // println( nLine + nLine + "prefix" + nLine + nLine ) Tuple4( false, intVar1, ind1, ind2 ) } } def big: Parser[SchemaFormula] = rep1( prefix ) ~ schemaFormula ^^ { case l ~ schemaFormula => { // println("Works?") l.reverse.foldLeft( schemaFormula.asInstanceOf[SchemaFormula] )( ( res, triple ) => { if ( triple._1 ) BigAnd( triple._2, res, triple._3, triple._4 ) else BigOr( triple._2, res, triple._3, triple._4 ) } ) } } def non_formula: Parser[SchemaExpression] = ( fo_term | s_term | indexedVar | abs | variable | constant | var_func | const_func ) def s_term: Parser[SchemaExpression] = "[g,h]".r ~ "(" ~ intTerm ~ "," ~ non_formula ~ ")" ^^ { case name ~ "(" ~ i ~ "," ~ args ~ ")" => { // println( nLine + "sTerm : "+name+"("+i+","+args+")") // println("args = "+args) // println("args.extype = "+args.exptype) sTerm( name, i, args :: Nil ) } } def fo_term: Parser[SchemaExpression] = "[f]".r ~ "(" ~ non_formula ~ ")" ^^ { case name ~ "(" ~ arg ~ ")" => { // println( nLine + nLine + "foTerm" + nLine + " arg.extype = "+arg.exptype) foTerm( name, arg :: Nil ) } } def indexedVar: Parser[Var] = regex( new Regex( "[z]" ) ) ~ "(" ~ intTerm ~ ")" ^^ { case x ~ "(" ~ index ~ ")" => { indexedFOVar( x, index.asInstanceOf[IntegerTerm] ) } } // TODO: a should be a FOConstant def FOVariable: Parser[Var] = regex( new Regex( "[x,y]" + word ) ) ^^ { case x => foVar( x ) } def FOConstant: Parser[Const] = regex( new Regex( "[a]" + word ) ) ^^ { case x => foConst( x ) } def variable: Parser[Var] = ( indexedVar | FOVariable ) //regex(new Regex("[u-z]" + word)) ^^ {case x => Var(new VariableStringSymbol(x), i->i).asInstanceOf[Var]} def constant: Parser[Const] = FOConstant //regex(new Regex("[a-tA-Z0-9]" + word)) ^^ {case x => Var(new ConstantStringSymbol(x), ind->ind).asInstanceOf[Const]} def and: Parser[SchemaFormula] = "(" ~ repsep( formula, "/\\\\" ) ~ ")" ^^ { case "(" ~ formulas ~ ")" => { formulas.tail.foldLeft( formulas.head )( ( f, res ) => And( f, res ) ) } } def or: Parser[SchemaFormula] = "(" ~ repsep( formula, """\\/""" ) ~ ")" ^^ { case "(" ~ formulas ~ ")" => { formulas.tail.foldLeft( formulas.head )( ( f, res ) => Or( f, res ) ) } } def imp: Parser[SchemaFormula] = "Imp" ~ formula ~ formula ^^ { case "Imp" ~ x ~ y => Imp( x, y ) } def abs: Parser[SchemaExpression] = "Abs" ~ variable ~ term ^^ { case "Abs" ~ v ~ x => Abs( v, x ).asInstanceOf[SchemaExpression] } def neg: Parser[SchemaFormula] = "~" ~ formula ^^ { case "~" ~ x => Neg( x ) } def atom: Parser[SchemaFormula] = ( equality | var_atom | const_atom ) def forall: Parser[SchemaFormula] = "Forall" ~ variable ~ formula ^^ { case "Forall" ~ v ~ x => All( v, x ) } def exists: Parser[SchemaFormula] = "Exists" ~ variable ~ formula ^^ { case "Exists" ~ v ~ x => Ex( v, x ) } def var_atom: Parser[SchemaFormula] = regex( new Regex( "[u-z]" + word ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => { // println( nLine + nLine + "var_atom") SchemaAtom( Var( x, FunctionType( To, params map ( _.exptype ) ) ), params ) } } // def const_atom: Parser[SchemaFormula] = regex(new Regex("["+symbols+"a-tA-Z0-9]" + word)) ~ "(" ~ repsep(term,",") ~ ")" ^^ {case x ~ "(" ~ params ~ ")" => { def const_atom: Parser[SchemaFormula] = regex( new Regex( "P" ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => { // println( nLine + nLine + "const_atom") SchemaAtom( Const( x, FunctionType( To, params map ( _.exptype ) ) ), params ) } } def equality: Parser[SchemaFormula] = /*eq_infix | */ eq_prefix // infix is problematic in higher order //def eq_infix: Parser[SchemaFormula] = term ~ "=" ~ term ^^ {case x ~ "=" ~ y => Equation(x,y)} def eq_prefix: Parser[SchemaFormula] = "=" ~ "(" ~ term ~ "," ~ term ~ ")" ^^ { case "=" ~ "(" ~ x ~ "," ~ y ~ ")" => Eq( x, y ) } def var_func: Parser[SchemaExpression] = regex( new Regex( "[u-z]" + word ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => SchemaFunction( Var( x, FunctionType( Tindex -> Tindex, params map ( _.exptype ) ) ), params ) } /*def var_func: Parser[SchemaExpression] = (var_func1 | var_funcn) def var_func1: Parser[SchemaExpression] = regex(new Regex("[u-z]" + word)) ~ "(" ~ repsep(term,",") ~ ")" ~ ":" ~ Type ^^ {case x ~ "(" ~ params ~ ")" ~ ":" ~ tp => Function(new VariableStringSymbol(x), params, tp)} def var_funcn: Parser[SchemaExpression] = regex(new Regex("[u-z]" + word)) ~ "^" ~ decimalNumber ~ "(" ~ repsep(term,",") ~ ")" ~ ":" ~ Type ^^ {case x ~ "^" ~ n ~ "(" ~ params ~ ")" ~ ":" ~ tp => genF(n.toInt, Var(new VariableStringSymbol(x)), params)} */ def const_func: Parser[SchemaExpression] = regex( new Regex( "[" + symbols + "a-tA-Z0-9]" + word ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => SchemaFunction( Const( x, FunctionType( Tindex -> Tindex, params map ( _.exptype ) ) ), params ) } protected def word: String = """[a-zA-Z0-9$_{}]*""" protected def symbol: Parser[String] = symbols.r def symbols: String = """[\\053\\055\\052\\057\\0134\\0136\\074\\076\\075\\0140\\0176\\077\\0100\\046\\0174\\041\\043\\047\\073\\0173\\0175]+""" // +-*/\\^<>=`~?@&|!#{}'; // def sequent: Parser[Sequent] = formula ~ "|-" ~ formula ^^ { case lf ~ "|-" ~ rf => { def sequent: Parser[OccSequent] = repsep( formula, "," ) ~ "|-" ~ repsep( formula, "," ) ^^ { case lfs ~ "|-" ~ rfs => { // println( nLine + nLine + "SEQUENT") Axiom( lfs, rfs ).root } } def ax: Parser[LKProof] = "ax(" ~ sequent ~ ")" ^^ { case "ax(" ~ sequent ~ ")" => { // println( nLine + nLine + "AXIOM") Axiom( sequent ) } case _ => { println( "ERROR" ); Axiom( List(), List() ) } } def proof_name: Parser[String] = """[\\\\]*[a-z]*[0-9]*""".r // def pLink: Parser[LKProof] = "pLink(" ~ "(" ~ proof_name ~ "," ~ index ~ ")" ~ sequent ~ ")" ^^ { // case "pLink(" ~ "(" ~ name ~ "," ~ v ~ ")" ~ sequent ~ ")" => { //// println( nLine + nLine + "pLink") // SchemaProofLinkRule(sequent.toHOLSequent, name, v::Nil) // } // } def pFOLink: Parser[LKProof] = "pLink(" ~ "(" ~ proof_name ~ "," ~ index ~ ")" ~ sequent ~ ")" ^^ { case "pLink(" ~ "(" ~ name ~ "," ~ v ~ ")" ~ sequent ~ ")" => { // println( nLine + nLine + "pLink") FOSchemaProofLinkRule( sequent.toHOLSequent, name, v :: Nil ) } } def orR1: Parser[LKProof] = "orR1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orR1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "orR1") OrRight1Rule( map.get( l ).get, f1, f2 ) } } def orR2: Parser[LKProof] = "orR2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orR2(" ~ label ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "orR2") OrRight2Rule( map.get( label ).get, f1, f2 ) } } def orR: Parser[LKProof] = "orR(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orR(" ~ label ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "orR") OrRightRule( map.get( label ).get, f1, f2 ) } } def orL: Parser[LKProof] = "orL(" ~ label.r ~ "," ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orL(" ~ l1 ~ "," ~ l2 ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "orL") OrLeftRule( map.get( l1 ).get, map.get( l2 ).get, f1, f2 ) } } def andR: Parser[LKProof] = "andR(" ~ label.r ~ "," ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andR(" ~ l1 ~ "," ~ l2 ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "andR") // println( nLine + "error_buffer = "+error_buffer) // println( nLine + map.get(l).get.root.toString) // println(map.get(l1).get.root) // println( nLine + nLine ) // println(map.get(l2).get.root) // println( nLine + nLine ) val p = AndRightRule( map.get( l1 ).get, map.get( l2 ).get, f1, f2 ) // println(p.root) p } } def cut: Parser[LKProof] = "cut(" ~ label.r ~ "," ~ label.r ~ "," ~ formula ~ ")" ^^ { case "cut(" ~ l1 ~ "," ~ l2 ~ "," ~ f ~ ")" => { // println( nLine + nLine + "cut") // println( nLine + "error_buffer = "+error_buffer) CutRule( map.get( l1 ).get, map.get( l2 ).get, f ) } } def negL: Parser[LKProof] = "negL(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "negL(" ~ label ~ "," ~ formula ~ ")" => { // println( nLine + nLine + "negL") NegLeftRule( map.get( label ).get, formula ) } case _ => { println( nLine + nLine + "Error!" ) sys.exit( 10 ) } } def negR: Parser[LKProof] = "negR(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "negR(" ~ label ~ "," ~ formula ~ ")" => { // println( nLine + nLine + map.get(label).get.root.toString ) // println( nLine + nLine + "negR" ) NegRightRule( map.get( label ).get, formula ) } } def weakR: Parser[LKProof] = "weakR(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "weakR(" ~ label ~ "," ~ formula ~ ")" => { // println( nLine + nLine + "weakR" ) WeakeningRightRule( map.get( label ).get, formula ) } } def weakL: Parser[LKProof] = "weakL(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "weakL(" ~ label ~ "," ~ formula ~ ")" => { // println( nLine + nLine + "weakL" ) WeakeningLeftRule( map.get( label ).get, formula ) } } // def eqAnd1: Parser[LKProof] = "eqAnd1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { // case "eqAnd1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // AndEquivalenceRule1(map.get(l).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula]) // } // } def andL1: Parser[LKProof] = "andL1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andL1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "andL1") AndLeft1Rule( map.get( l ).get, f1, f2 ) } } def andL2: Parser[LKProof] = "andL2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andL2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "andL2") AndLeft2Rule( map.get( l ).get, f1, f2 ) } } def andL: Parser[LKProof] = "andL(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andL(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "andL") // println( nLine + "error_buffer = " + error_buffer ) // println( nLine + map.get(l).get.root.toString ) val p = AndLeftRule( map.get( l ).get, f1, f2 ) p // val and = And(f1,f2) // val aux = p.root.antecedent.tail.head.formula // println( nLine + "p = "+aux ) // println( nLine + "and = "+and) // println( nLine + nLine + aux.syntaxEquals(and)) // println( nLine + "f1 = "+f1) // var res = p // f1 match { // case BigAnd(ind,f,lb,ub) => { // println("ERROR 5") //// sys.exit(1) // res = AndEquivalenceRule1(p, and.asInstanceOf[SchemaFormula], BigAnd(ind,f,lb,Succ(ub)).asInstanceOf[SchemaFormula]) // println( nLine + nLine + "res = "+res.root.antecedent.head.formula) //// return res // res // } // case _ => { // println("ERROR 3") //// sys.exit(1) // res // } // } // println("ERROR 2") // res // sys.exit(1) } } def andEqR1: Parser[LKProof] = "andEqR1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqR1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndRightEquivalenceRule1( map.get( l ).get, f1, f2 ) } } def andEqR2: Parser[LKProof] = "andEqR2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqR2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndRightEquivalenceRule2( map.get( l ).get, f1, f2 ) } } def andEqR3: Parser[LKProof] = "andEqR3(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqR3(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndRightEquivalenceRule3( map.get( l ).get, f1, f2 ) } } def andEqL1: Parser[LKProof] = "andEqL1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqL1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndLeftEquivalenceRule1( map.get( l ).get, f1, f2 ) } } def andEqL2: Parser[LKProof] = "andEqL2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqL2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndLeftEquivalenceRule2( map.get( l ).get, f1, f2 ) } } def andEqL3: Parser[LKProof] = "andEqL3(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqL3(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndLeftEquivalenceRule3( map.get( l ).get, f1, f2 ) } } def orEqR1: Parser[LKProof] = "orEqR1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqR1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrRightEquivalenceRule1( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqR2: Parser[LKProof] = "orEqR2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqR2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrRightEquivalenceRule2( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqR3: Parser[LKProof] = "orEqR3(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqR3(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrRightEquivalenceRule3( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqL1: Parser[LKProof] = "orEqL1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqL1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrLeftEquivalenceRule1( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqL2: Parser[LKProof] = "orEqL2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqL2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrLeftEquivalenceRule2( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqL3: Parser[LKProof] = "orEqL3(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqL3(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrLeftEquivalenceRule3( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def contrL: Parser[LKProof] = "contrL(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "contrL(" ~ l ~ "," ~ f ~ ")" => { // println( nLine + nLine + "contrL") ContractionLeftRule( map.get( l ).get, f ) } } def contrR: Parser[LKProof] = "contrR(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "contrR(" ~ l ~ "," ~ f ~ ")" => { // println( nLine + nLine + "contrR") ContractionRightRule( map.get( l ).get, f ) } } def allL: Parser[LKProof] = "allL(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ non_formula ~ ")" ^^ { case "allL(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ term ~ ")" => { ForallLeftRule( map.get( l ).get, aux.asInstanceOf[SchemaFormula], main.asInstanceOf[SchemaFormula], term.asInstanceOf[SchemaExpression] ) } } def allR: Parser[LKProof] = "allR(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ ( indexedVar | FOVariable ) ~ ")" ^^ { case "allR(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ v ~ ")" => { ForallRightRule( map.get( l ).get, aux.asInstanceOf[SchemaFormula], main.asInstanceOf[SchemaFormula], v.asInstanceOf[Var] ) } } def impL: Parser[LKProof] = "impL(" ~ label.r ~ "," ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "impL(" ~ l1 ~ "," ~ l2 ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { ImpLeftRule( map.get( l1 ).get, map.get( l2 ).get, f1, f2 ) } } def impR: Parser[LKProof] = "impR(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "impR(" ~ label ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { ImpRightRule( map.get( label ).get, f1, f2 ) } } def arrowL: Parser[LKProof] = "arrowL(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "arrowL(" ~ label ~ "," ~ f1 ~ ")" => { trsArrowLeftRule( map.get( label ).get, f1 ) } } def arrowR: Parser[LKProof] = "arrowR(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "arrowR(" ~ label ~ "," ~ f1 ~ ")" => { trsArrowRightRule( map.get( label ).get, f1 ) } } def autoprop: Parser[LKProof] = "autoprop(" ~ sequent ~ ")" ^^ { case "autoprop(" ~ seq ~ ")" => solve.solvePropositional( seq.toHOLSequent, throwOnError = true ).get } def termDefL1: Parser[LKProof] = "termDefL1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "termDefL1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { TermLeftEquivalenceRule1( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def termDefR1: Parser[LKProof] = "termDefR1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "termDefR1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { TermRightEquivalenceRule1( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } } // println( nLine + nLine + "number of SLK-proofs = "+bigMMap.size) // println( nLine + "defMMapr size = "+defMMap.size) // println( nLine + nLine + nLine + "list = "+list) // if (!bigMMap.get("chi").get._2.isDefinedAt(plabel)) println( nLine + nLine + nLine + "Syntax ERROR after ID : " + error_buffer + nLine + nLine ) // val m = bigMMap.get("chi").get._2.get(plabel).get //// println(m.root.antecedent.head+" |- "+m.root.succedent.head) // m // println( nLine + "SchemaProofDB.size = "+SchemaProofDB.size + nLine ) bigMMap } }
loewenheim/gapt
src/main/scala/at/logic/gapt/formats/shlk/sFOparser.scala
Scala
gpl-3.0
37,153
/* * * ____ __ ____________ ______ * / __/______ _/ /__ /_ __/ _/ //_/_ / * _\\ \\/ __/ _ `/ / _ `// / _/ // ,< / /_ * /___/\\__/\\_,_/_/\\_,_//_/ /___/_/|_| /___/ * * A PGF/TIKZ plot library for Scala. * */ package scalatikz.pgf.automata.enums import enumeratum._ import scala.collection.immutable._ sealed abstract class EdgeType(override val entryName: String) extends EnumEntry { override def toString: String = entryName } object EdgeType extends Enum[EdgeType] { val values: IndexedSeq[EdgeType] = findValues case object STRAIGHT extends EdgeType("") case object LOOP_ABOVE extends EdgeType("loop above") case object LOOP_BELOW extends EdgeType("loop below") case object LOOP_LEFT extends EdgeType("loop left") case object LOOP_RIGHT extends EdgeType("loop right") case object BEND_LEFT extends EdgeType("bend left") case object BEND_RIGHT extends EdgeType("bend right") }
vagmcs/ScalaTIKZ
src/main/scala/scalatikz/pgf/automata/enums/EdgeType.scala
Scala
lgpl-3.0
929
package example.base.hook import example.base.logging.Logger import scalikejdbc.GlobalSettings trait Listener { self: Logger => GlobalSettings.queryCompletionListener = (sql: String, params: scala.collection.Seq[Any], millis: Long) => { logger.info( Map( "sql" -> sql, "params" -> params.mkString("[", ",", "]"), "millis" -> millis.toString ).toString ) } GlobalSettings.queryFailureListener = (sql: String, params: scala.collection.Seq[Any], e: Throwable) => { logger.error( Map( "sql" -> sql, "params" -> params.mkString("[", ",", "]"), "message" -> (e.getMessage + "\\n" + e.getStackTrace.take(7).mkString("\\n")) ).toString ) } }
t-mochizuki/scala-study
template/base/src/main/scala/example/base/hook/Listener.scala
Scala
mit
771
package chandu0101.scalajs.react.components.materialui import japgolly.scalajs.react._ import materialui.Mui import scala.scalajs.js import scala.scalajs.js.Dynamic.{literal => json} import scala.scalajs.js.{Array => JArray} /** * key: PropTypes.string, style: PropTypes.js.Any, ref: PropTypes.String, className: React.PropTypes.string, docked: React.PropTypes.bool, header: React.PropTypes.element, menuItems: React.PropTypes.JArray[MuiMenuItem].isRequired, onChange: React.PropTypes.(ReactEvent,Int,js.Object) => Unit, onNavOpen: React.PropTypes.() => Unit, onNavClose: React.PropTypes.() => Unit, openRight: React.PropTypes.bool, selectedIndex: React.PropTypes.number */ object MuiLeftNav { def apply(menuItems: JArray[MuiMenuItem], style: js.UndefOr[js.Any] = js.undefined, onChange: js.UndefOr[(ReactEvent, Int, js.Object) => Unit] = js.undefined, ref: js.UndefOr[String] = js.undefined, onNavClose: js.UndefOr[() => Unit] = js.undefined, onNavOpen: js.UndefOr[() => Unit] = js.undefined, key: js.UndefOr[String] = js.undefined, className: js.UndefOr[String] = js.undefined, docked: js.UndefOr[Boolean] = js.undefined, header: js.UndefOr[ReactElement] = js.undefined, selectedIndex: js.UndefOr[Int] = js.undefined, openRight: js.UndefOr[Boolean] = js.undefined) = { val p = js.Dynamic.literal() p.updateDynamic("menuItems")(menuItems.map(_.toJson)) style.foreach(v => p.updateDynamic("style")(v)) onChange.foreach(v => p.updateDynamic("onChange")(v)) ref.foreach(v => p.updateDynamic("ref")(v)) onNavClose.foreach(v => p.updateDynamic("onNavClose")(v)) onNavOpen.foreach(v => p.updateDynamic("onNavOpen")(v)) key.foreach(v => p.updateDynamic("key")(v)) className.foreach(v => p.updateDynamic("className")(v)) docked.foreach(v => p.updateDynamic("docked")(v)) header.foreach(v => p.updateDynamic("header")(v)) selectedIndex.foreach(v => p.updateDynamic("selectedIndex")(v)) openRight.foreach(v => p.updateDynamic("openRight")(v)) val f = React.asInstanceOf[js.Dynamic].createFactory(Mui.LeftNav) f(p).asInstanceOf[ReactComponentU_] } } trait MuiLeftNavM extends js.Object { def close(): Unit = js.native def toggle(): Unit = js.native } /** * key: PropTypes.string, style: PropTypes.js.Any, ref: PropTypes.String, autoWidth: React.PropTypes.bool, onItemTap: React.PropTypes.(ReactEvent,Int,js.Object) => Unit, onToggle: React.PropTypes.(ReactEvent,Int,Boolean) => Unit, menuItems: React.PropTypes.JArray[MuiMenuItem].isRequired, selectedIndex: React.PropTypes.number, hideable: React.PropTypes.bool, visible: React.PropTypes.bool, zDepth: React.PropTypes.number, menuItemStyle: React.PropTypes.js.Any, menuItemStyleSubheader: React.PropTypes.js.Any, menuItemStyleLink: React.PropTypes.js.Any, menuItemClassName: React.PropTypes.string, menuItemClassNameSubheader: React.PropTypes.string, menuItemClassNameLink: React.PropTypes.string, */ object MuiMenu { def apply(onToggle : js.UndefOr[(ReactEvent,Int,Boolean) => Unit] = js.undefined, menuItems : JArray[MuiMenuItem], visible : js.UndefOr[Boolean]=js.undefined, style : js.UndefOr[js.Any] = js.undefined, ref : js.UndefOr[String] = js.undefined, onItemTap : js.UndefOr[(ReactEvent,Int,js.Object) => Unit] = js.undefined, menuItemStyleLink : js.UndefOr[js.Any] = js.undefined, zDepth : js.UndefOr[Int] = js.undefined, menuItemStyle : js.UndefOr[js.Any] = js.undefined, key : js.UndefOr[String] = js.undefined, autoWidth : js.UndefOr[Boolean]=js.undefined, menuItemClassNameSubheader : js.UndefOr[String] = js.undefined, menuItemStyleSubheader : js.UndefOr[js.Any] = js.undefined, hideable : js.UndefOr[Boolean]=js.undefined, menuItemClassNameLink : js.UndefOr[String] = js.undefined, menuItemClassName : js.UndefOr[String] = js.undefined, selectedIndex : js.UndefOr[Int] = js.undefined) = { val p = js.Dynamic.literal() onToggle.foreach(v => p.updateDynamic("onToggle")(v)) p.updateDynamic("menuItems")(menuItems.map(_.toJson)) visible.foreach(v => p.updateDynamic("visible")(v)) style.foreach(v => p.updateDynamic("style")(v)) ref.foreach(v => p.updateDynamic("ref")(v)) onItemTap.foreach(v => p.updateDynamic("onItemTap")(v)) menuItemStyleLink.foreach(v => p.updateDynamic("menuItemStyleLink")(v)) zDepth.foreach(v => p.updateDynamic("zDepth")(v)) menuItemStyle.foreach(v => p.updateDynamic("menuItemStyle")(v)) key.foreach(v => p.updateDynamic("key")(v)) autoWidth.foreach(v => p.updateDynamic("autoWidth")(v)) menuItemClassNameSubheader.foreach(v => p.updateDynamic("menuItemClassNameSubheader")(v)) menuItemStyleSubheader.foreach(v => p.updateDynamic("menuItemStyleSubheader")(v)) hideable.foreach(v => p.updateDynamic("hideable")(v)) menuItemClassNameLink.foreach(v => p.updateDynamic("menuItemClassNameLink")(v)) menuItemClassName.foreach(v => p.updateDynamic("menuItemClassName")(v)) selectedIndex.foreach(v => p.updateDynamic("selectedIndex")(v)) val f = React.asInstanceOf[js.Dynamic].createFactory(Mui.Menu) f(p).asInstanceOf[ReactComponentU_] } } case class MuiMenuItem(payload: js.UndefOr[String] = js.undefined, text: js.UndefOr[String] = js.undefined, id: js.UndefOr[String] = js.undefined, number: js.UndefOr[String] = js.undefined, data: js.UndefOr[String] = js.undefined, iconClassName: js.UndefOr[String] = js.undefined, toggle: js.UndefOr[Boolean] = js.undefined, disabled: js.UndefOr[Boolean] = js.undefined, defaultToggled: js.UndefOr[Boolean] = js.undefined, route: js.UndefOr[String] = js.undefined, `type`: js.UndefOr[MuiMenuItemType] = js.undefined) { def toJson = { val p = json() defaultToggled.foreach(v => p.updateDynamic("defaultToggled")(v)) text.foreach(v => p.updateDynamic("text")(v)) number.foreach(v => p.updateDynamic("number")(v)) data.foreach(v => p.updateDynamic("data")(v)) iconClassName.foreach(v => p.updateDynamic("iconClassName")(v)) `type`.foreach(v => p.updateDynamic("type")(v.name)) route.foreach(v => p.updateDynamic("route")(v)) disabled.foreach(v => p.updateDynamic("disabled")(v)) payload.foreach(v => p.updateDynamic("payload")(v)) toggle.foreach(v => p.updateDynamic("toggle")(v)) id.foreach(v => p.updateDynamic("id")(v)) p } } object MuiMenuItem { def fromJson(obj: js.Dynamic) = MuiMenuItem(defaultToggled = if (js.isUndefined(obj.defaultToggled)) js.undefined else obj.defaultToggled.asInstanceOf[Boolean], text = if (js.isUndefined(obj.text)) js.undefined else obj.text.asInstanceOf[String], data = if (js.isUndefined(obj.data)) js.undefined else obj.text.asInstanceOf[String], number = if (js.isUndefined(obj.number)) js.undefined else obj.number.asInstanceOf[String], iconClassName = if (js.isUndefined(obj.iconClassName)) js.undefined else obj.iconClassName.asInstanceOf[String], route = if (js.isUndefined(obj.route)) js.undefined else obj.route.asInstanceOf[String], disabled = if (js.isUndefined(obj.disabled)) js.undefined else obj.disabled.asInstanceOf[Boolean], payload = if (js.isUndefined(obj.payload)) js.undefined else obj.payload.asInstanceOf[String], toggle = if (js.isUndefined(obj.toggle)) js.undefined else obj.toggle.asInstanceOf[Boolean], id = if (js.isUndefined(obj.id)) js.undefined else obj.id.asInstanceOf[String]) } class MuiMenuItemType private(val name: String) extends AnyVal object MuiMenuItemType { val SUBHEADER = new MuiMenuItemType("SUBHEADER") val LINK = new MuiMenuItemType("LINK") val NESTED = new MuiMenuItemType("NESTED") def newType(name: String) = new MuiMenuItemType(name) }
saileshs/scalajs-react-components
core/src/main/scala/chandu0101/scalajs/react/components/materialui/MuiMenus.scala
Scala
apache-2.0
8,234
package at.logic.gapt.provers.sat import at.logic.gapt.expr._ import org.specs2.mutable._ class MiniSATTest extends Specification { if ( !MiniSAT.isInstalled ) skipAll "find a model for an atom" in { MiniSAT.solve( SATProblems.getProblem1() ) must beSome } "see that Pc and -Pc is unsat" in { MiniSAT.solve( SATProblems.getProblem2() ) must beNone } "see that Pc or -Pc is valid" in { MiniSAT.isValid( SATProblems.getProblem3a() ) must beTrue MiniSAT.isValid( SATProblems.getProblem3b() ) must beTrue } "see that Pc is not valid" in { MiniSAT.isValid( SATProblems.getProblem4() ) must beFalse } "return a correct model" in { MiniSAT.solve( SATProblems.getProblem5() ) must beLike { case Some( model ) => SATProblems.checkSolution5( model ) must beTrue } } "deal correctly with the pigeonhole problem" in { SATProblems.getProblem6a() foreach { f => MiniSAT.isValid( f ) must beFalse } SATProblems.getProblem6b() foreach { f => MiniSAT.isValid( f ) must beTrue } ok } "say bottom is unsatisfiable" in { MiniSAT.solve( Bottom() ) must beNone } "say top is satisfiable" in { MiniSAT.solve( Top() ) must beSome } "empty CNF is sat" in { MiniSAT.solve( Seq() ) must beSome } "empty clause is unsat" in { MiniSAT.solve( Seq( Seq() ) ) must beNone } }
gebner/gapt
tests/src/test/scala/at/logic/gapt/provers/sat/MiniSATTest.scala
Scala
gpl-3.0
1,322
package com.sksamuel.akka.patterns import akka.actor.{Actor, ActorRef} import scala.collection.mutable.{Map => MMap} /** @author Stephen Samuel */ class Resequencer(target: ActorRef, sequenceStart: Int = 1) extends Actor { val buffer = MMap.empty[Int, Envelope[_]] var expectedSequenceNo = sequenceStart def receive = { case msg: Envelope[_] => msg.attributes.get(SequenceAttribute) match { case Some(seq: Int) if expectedSequenceNo == seq => target ! msg expectedSequenceNo += 1 catchUp() case Some(seq: Int) => buffer.put(seq, msg) case None => unhandled(msg) } } private def catchUp() { while (buffer.contains(expectedSequenceNo)) { buffer.remove(expectedSequenceNo) foreach (target !) } } }
stoopbrain/akka-patterns
src/main/scala/com/sksamuel/akka/patterns/Resequencer.scala
Scala
apache-2.0
816
/* * Copyright (c) 2012-2016 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package com.snowplowanalytics.snowplow.enrich.common package enrichments.registry package apirequest // specs2 import org.specs2.Specification import org.specs2.scalaz.ValidationMatchers // json4s import org.json4s.JObject import org.json4s.JsonDSL._ class OutputSpec extends Specification with ValidationMatchers { def is = s2""" This is a specification to test the HTTP API of API Request Enrichment Not found value result in Failure $e1 Successfully generate context $e2 Successfully generate context out of complex object $e3 """ def e1 = { val output = Output("iglu:com.snowplowanalytics/some_schema/jsonschema/1-0-0", Some(JsonOutput("$.value"))) output.extract(JObject(Nil)) must beFailing } def e2 = { val output = Output("iglu:com.snowplowanalytics/some_schema/jsonschema/1-0-0", Some(JsonOutput("$.value"))) output.parse("""{"value": 32}""").flatMap(output.extract).map(output.describeJson) must beSuccessful.like { case context => context must be equalTo(("schema", "iglu:com.snowplowanalytics/some_schema/jsonschema/1-0-0") ~ ("data" -> 32)) } } def e3 = { val output = Output("iglu:com.snowplowanalytics/complex_schema/jsonschema/1-0-0", Some(JsonOutput("$.objects[1].deepNesting[3]"))) output.parse( """ |{ | "value": 32, | "objects": | [ | {"wrongValue": 11}, | {"deepNesting": [1,2,3,42]}, | {"wrongValue": 10} | ] |} """.stripMargin).flatMap(output.extract).map(output.describeJson) must beSuccessful.like { case context => context must be equalTo(("schema", "iglu:com.snowplowanalytics/complex_schema/jsonschema/1-0-0") ~ ("data" -> 42)) } } }
haensel-ams/snowplow
3-enrich/scala-common-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/OutputSpec.scala
Scala
apache-2.0
2,479
package sampler.example.abc.flockMortality.util import scala.language.existentials import org.apache.commons.math3.distribution.NormalDistribution import org.apache.commons.math3.random.MersenneTwister import org.apache.commons.math3.random.RandomGenerator import org.apache.commons.math3.random.SynchronizedRandomGenerator import sampler.abc.Prior import sampler.distribution.Distribution import sampler.maths.Random import sampler._ case class Model(observed: IndexedSeq[Observed], interval: IntervalPrior) extends sampler.abc.Model[Parameters] { val numSheds = observed.size val meanEggs = observed.map(obs => obs.meanEggs) val eggCoeff = observed.map(obs => obs.eggCoeff) // Include baseline mortality // val baselineMortalityRate = observed.map{ obs => 0.0 } // Use this to turn off baseline mortality val baselineMortalityRate = observed.map{ obs => val baselineDeaths = obs.dead.take(obs.infectionFreeDays) val meanDeaths = (baselineDeaths.sum.toDouble / baselineDeaths.length) meanDeaths / obs.flockSize } val baselineDeaths = (0 until observed.length).map{ i => baselineMortalityRate(i) * observed(i).flockSize } // Prior probability density and draw functions val prior = new Prior[Parameters]{ def density(p: Parameters): Double = IntervalPrior.density(p, interval) val distribution = Distribution.from( r => IntervalPrior.draw(numSheds, interval)(r) ) } //=== // Kernel case class Kernel(lower: Double, upper: Double) { val width = upper - lower val kernel = new Prior[Double]{ val normal = { val syncRand: RandomGenerator = new SynchronizedRandomGenerator(new MersenneTwister()) new NormalDistribution(syncRand, 0, width / 20, NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY) } def density(at: Double) = { normal.density(at) } val distribution = Distribution.from{random => val r = normal.sample if(r.isNaN() || r.isInfinite()) { val e = new Exception("here... r = " + r) e.printStackTrace() throw e } r } } } val betaRange = Kernel(interval.beta.head, interval.beta.last) val etaRange = Kernel(interval.eta.head, interval.eta.last) val gammaRange = Kernel(interval.gamma.head, interval.gamma.last) val deltaRange = Kernel(interval.delta.head, interval.delta.last) val sigmaRange = Kernel(interval.sigma.head, interval.sigma.last) val sigma2Range = Kernel(interval.sigma2.head, interval.sigma2.last) val offsetRange = Kernel(interval.offset.head, interval.offset.last) def perturb(p: Parameters) = { implicit val random = Random // TODO can't stay the same? val threeDie = Distribution.uniform(-1, 1) /* cf: val threeDie = Distribution.uniform(IndexedSeq(-1,0,1)) private def threeDensity(v: Int) = if(v <= 1 || v >= -1) 1.0 / 3 else 0.0 */ import p._ Parameters( beta + betaRange.kernel.distributionSupportChecked.sample, eta + etaRange.kernel.distributionSupportChecked.sample, gamma + gammaRange.kernel.distributionSupportChecked.sample, delta + deltaRange.kernel.distributionSupportChecked.sample, sigma + sigmaRange.kernel.distributionSupportChecked.sample, sigma2 + sigma2Range.kernel.distributionSupportChecked.sample, offset.map(i => i + threeDie.sample) ) } def perturbDensity(a: Parameters, b: Parameters) = { def threeDensity(v: Int) = if(v <= 1 || v >= -1) 1.0 / 3 else 0.0 val offsetProduct = (0 until a.offset.length).map(i => threeDensity(a.offset(i) - b.offset(i))).product betaRange.kernel.density(a.beta - b.beta) * etaRange.kernel.density(a.eta - b.eta) * gammaRange.kernel.density(a.gamma - b.gamma) * deltaRange.kernel.density(a.delta - b.delta) * sigmaRange.kernel.density(a.sigma - b.sigma) * sigma2Range.kernel.density(a.sigma2 - b.sigma2) * offsetProduct } //=== // Model def distanceToObservations(p: Parameters): Distribution[Double] = modelDistribution(p).map(error => error.distanceToObserved) def modelDistribution(p: Parameters) = { val sheds = 0 until observed.length val numDays = observed.map(i => i.days.length) // Include extra days if offset < 0 val odeDays = if (p.offset.filter(i => i < 0).length > 0) numDays.map(_ - p.offset.min) else numDays def solve(): SimulatedResult = { val maps = sheds.map{ i => val numDeaths = baselineDeaths(i) * (p.offset(i) + 1) val numBirds = observed(i).flockSize - numDeaths - 1 val y0 = Array(numBirds.toDouble, 1.0, 0.0, 0.0, numDeaths) val integrated = ODEState.rounded(ODE.run(y0, odeDays(i), p, eggCoeff(i), baselineMortalityRate(i))) // Create map of ODE solution without offset val unshiftedResultsMap = (0 until odeDays(i)) .foldLeft(Map[Int, ODEState]()){ case (map, day) => map.updated(day, integrated(day)) } // Make appropriate adjustment for offset (shift forwards or backwards) val shiftedResultsMap = if (p.offset(i) == 0) unshiftedResultsMap else if (p.offset(i) > 0) { val tmp = unshiftedResultsMap.map{case (key, value) => (key + p.offset(i)) -> value } val additions = (0 until p.offset(i)).foldLeft(Map[Int, ODEState]()){ case (map, day) => val numDeaths = (day + 1) * baselineDeaths(i) val numBirds = observed(i).flockSize - numDeaths map.updated(day, ODEState(numBirds, 0, 0, 0, numDeaths, unshiftedResultsMap(0).eggs)) } tmp.++(additions) } else if (p.offset(i) < 0) { val tmp = unshiftedResultsMap.map{case (key, value) => (key + p.offset(i)) -> value } tmp.filter{case (key, value) => key >= 0} } else throw new RuntimeException("not supported yet") val finalMap = shiftedResultsMap .filter{ case (key, value) => key < numDays(i) } finalMap } SimulatedResult(maps.toIndexedSeq, observed, p) } // Deterministic model will always return the same answer Distribution.always(solve) } }
tearne/Sampler
sampler-examples/src/main/scala/sampler/example/abc/flockMortality/util/Model.scala
Scala
apache-2.0
6,532
package org.jetbrains.plugins.scala.lang.parameterInfo.functionParameterInfo class FunctionParameterInfoFunctionTypeTest extends FunctionParameterInfoTestBase { override def getTestDataPath: String = s"${super.getTestDataPath}functionType/" def testFunctionType(): Unit = doTest() def testFunctionTypeTwo(): Unit = doTest() def testNamingFunctionType(): Unit = doTest() }
JetBrains/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/lang/parameterInfo/functionParameterInfo/FunctionParameterInfoFunctionTypeTest.scala
Scala
apache-2.0
387
package scala.collection.immutable import java.lang.Integer.bitCount import java.lang.Math.ceil private[immutable] final object Node { final val HashCodeLength = 32 final val BitPartitionSize = 5 final val BitPartitionMask = (1 << BitPartitionSize) - 1 final val MaxDepth = ceil(HashCodeLength.toDouble / BitPartitionSize).toInt final val SizeEmpty = 0 final val SizeOne = 1 final val SizeMoreThanOne = 2 final def maskFrom(hash: Int, shift: Int): Int = (hash >>> shift) & BitPartitionMask final def bitposFrom(mask: Int): Int = 1 << mask final def indexFrom(bitmap: Int, bitpos: Int): Int = bitCount(bitmap & (bitpos - 1)) final def indexFrom(bitmap: Int, mask: Int, bitpos: Int): Int = if (bitmap == -1) mask else indexFrom(bitmap, bitpos) } private[immutable] trait Node[T <: Node[T]] { def hasNodes: Boolean def nodeArity: Int def getNode(index: Int): T def hasPayload: Boolean def payloadArity: Int def getPayload(index: Int): Any def sizePredicate: Int } /** * Base class for fixed-stack iterators that traverse a hash-trie. The iterator performs a * depth-first pre-order traversal, which yields first all payload elements of the current * node before traversing sub-nodes (left to right). * * @tparam T the trie node type we are iterating over * * @author Michael J. Steindorfer */ private[immutable] abstract class ChampBaseIterator[T <: Node[T]] { import Node.MaxDepth protected var currentValueCursor: Int = 0 protected var currentValueLength: Int = 0 protected var currentValueNode: T = _ private var currentStackLevel: Int = -1 private val nodeCursorsAndLengths: Array[Int] = new Array[Int](MaxDepth * 2) private val nodes: Array[T] = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] def this(rootNode: T) = { this() if (rootNode.hasNodes) pushNode(rootNode) if (rootNode.hasPayload) setupPayloadNode(rootNode) } private final def setupPayloadNode(node: T): Unit = { currentValueNode = node currentValueCursor = 0 currentValueLength = node.payloadArity } private final def pushNode(node: T): Unit = { currentStackLevel = currentStackLevel + 1 val cursorIndex = currentStackLevel * 2 val lengthIndex = currentStackLevel * 2 + 1 nodes(currentStackLevel) = node nodeCursorsAndLengths(cursorIndex) = 0 nodeCursorsAndLengths(lengthIndex) = node.nodeArity } private final def popNode(): Unit = { currentStackLevel = currentStackLevel - 1 } /** * Searches for next node that contains payload values, * and pushes encountered sub-nodes on a stack for depth-first traversal. */ private final def searchNextValueNode(): Boolean = { while (currentStackLevel >= 0) { val cursorIndex = currentStackLevel * 2 val lengthIndex = currentStackLevel * 2 + 1 val nodeCursor = nodeCursorsAndLengths(cursorIndex) val nodeLength = nodeCursorsAndLengths(lengthIndex) if (nodeCursor < nodeLength) { nodeCursorsAndLengths(cursorIndex) += 1 val nextNode = nodes(currentStackLevel).getNode(nodeCursor) if (nextNode.hasNodes) { pushNode(nextNode) } if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } } else { popNode() } } return false } final def hasNext = (currentValueCursor < currentValueLength) || searchNextValueNode() } /** * Base class for fixed-stack iterators that traverse a hash-trie in reverse order. The base * iterator performs a depth-first post-order traversal, traversing sub-nodes (right to left). * * @tparam T the trie node type we are iterating over * * @author Michael J. Steindorfer */ private[immutable] abstract class ChampBaseReverseIterator[T <: Node[T]] { import Node.MaxDepth protected var currentValueCursor: Int = -1 protected var currentValueNode: T = _ private var currentStackLevel: Int = -1 private val nodeIndex: Array[Int] = new Array[Int](MaxDepth + 1) private val nodeStack: Array[T] = new Array[Node[T]](MaxDepth + 1).asInstanceOf[Array[T]] def this(rootNode: T) = { this() pushNode(rootNode) searchNextValueNode() } private final def setupPayloadNode(node: T): Unit = { currentValueNode = node currentValueCursor = node.payloadArity - 1 } private final def pushNode(node: T): Unit = { currentStackLevel = currentStackLevel + 1 nodeStack(currentStackLevel) = node nodeIndex(currentStackLevel) = node.nodeArity - 1 } private final def popNode(): Unit = { currentStackLevel = currentStackLevel - 1 } /** * Searches for rightmost node that contains payload values, * and pushes encountered sub-nodes on a stack for depth-first traversal. */ private final def searchNextValueNode(): Boolean = { while (currentStackLevel >= 0) { val nodeCursor = nodeIndex(currentStackLevel) ; nodeIndex(currentStackLevel) = nodeCursor - 1 if (nodeCursor >= 0) { val nextNode = nodeStack(currentStackLevel).getNode(nodeCursor) pushNode(nextNode) } else { val currNode = nodeStack(currentStackLevel) popNode() if (currNode.hasPayload) { setupPayloadNode(currNode) ; return true } } } return false } final def hasNext = (currentValueCursor >= 0) || searchNextValueNode() }
rorygraves/perf_tester
corpus/scala-library/src/main/scala/collection/immutable/ChampCommon.scala
Scala
apache-2.0
5,381
package mesosphere.marathon package integration.setup import java.io.File import java.lang.management.ManagementFactory import java.net.{URLDecoder, URLEncoder} import java.nio.charset.Charset import java.nio.file.Files import java.util.UUID import java.util.concurrent.ConcurrentLinkedQueue import akka.Done import akka.actor.{ActorSystem, Cancellable, Scheduler} import akka.http.scaladsl.Http import akka.http.scaladsl.client.RequestBuilding.Get import akka.http.scaladsl.model.{HttpResponse, StatusCodes} import akka.stream.Materializer import akka.stream.scaladsl.Sink import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper import com.typesafe.scalalogging.{Logger, StrictLogging} import mesosphere.marathon.Protos.Constraint import mesosphere.marathon.core.pod.{HostNetwork, MesosContainer, PodDefinition} import mesosphere.marathon.integration.facades._ import mesosphere.marathon.raml.{App, AppHealthCheck, AppHostVolume, AppPersistentVolume, AppResidency, AppVolume, Container, EngineType, Network, NetworkMode, PersistentVolumeInfo, PortDefinition, ReadMode, UnreachableDisabled, UpgradeStrategy} import mesosphere.marathon.state.{PathId, PersistentVolume, VolumeMount} import mesosphere.marathon.util.{Lock, Retry, Timeout, ZookeeperServerTest} import mesosphere.util.PortAllocator import mesosphere.{AkkaUnitTestLike, WaitTestSupport} import org.apache.commons.io.FileUtils import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.exceptions.TestFailedDueToTimeoutException import org.scalatest.time.{Milliseconds, Seconds, Span} import org.scalatest.{BeforeAndAfterAll, Suite} import play.api.libs.json.{JsObject, Json} import scala.annotation.tailrec import scala.async.Async.{async, await} import scala.collection.{JavaConverters, mutable} import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} import scala.sys.process.Process import scala.util.Try import scala.util.control.NonFatal trait BaseMarathon extends AutoCloseable with StrictLogging with ScalaFutures { val suiteName: String val masterUrl: String val zkUrl: String val conf: Map[String, String] = Map.empty implicit val system: ActorSystem implicit val mat: Materializer implicit val ctx: ExecutionContext implicit val scheduler: Scheduler lazy val uuid = UUID.randomUUID.toString lazy val httpPort = PortAllocator.ephemeralPort() lazy val url = conf.get("https_port").fold(s"http://localhost:$httpPort")(httpsPort => s"https://localhost:$httpsPort") lazy val client = new MarathonFacade(url, PathId.empty) val workDir = { val f = Files.createTempDirectory(s"marathon-$httpPort").toFile f.deleteOnExit() f } private def write(dir: File, fileName: String, content: String): String = { val file = File.createTempFile(fileName, "", dir) file.deleteOnExit() FileUtils.write(file, content, Charset.defaultCharset) file.setReadable(true) file.getAbsolutePath } val secretPath = write(workDir, fileName = "marathon-secret", content = "secret1") val config = Map( "master" -> masterUrl, "mesos_authentication_principal" -> "principal", "mesos_role" -> "foo", "http_port" -> httpPort.toString, "zk" -> zkUrl, "zk_timeout" -> 20.seconds.toMillis.toString, "zk_connection_timeout" -> 20.seconds.toMillis.toString, "zk_session_timeout" -> 20.seconds.toMillis.toString, "mesos_authentication_secret_file" -> s"$secretPath", "access_control_allow_origin" -> "*", "reconciliation_initial_delay" -> 5.minutes.toMillis.toString, "min_revive_offers_interval" -> "100", "hostname" -> "localhost", "logging_level" -> "debug", "offer_matching_timeout" -> 10.seconds.toMillis.toString // see https://github.com/mesosphere/marathon/issues/4920 ) ++ conf val args = config.flatMap { case (k, v) => if (v.nonEmpty) { Seq(s"--$k", v) } else { Seq(s"--$k") } }(collection.breakOut) @volatile var marathonProcess = Option.empty[Process] val processBuilder: scala.sys.process.ProcessBuilder def create(): Process = { marathonProcess.getOrElse { val process = processBuilder.run(ProcessOutputToLogStream(s"$suiteName-LocalMarathon-$httpPort")) marathonProcess = Some(process) process } } def start(): Future[Done] = { create() val port = conf.get("http_port").orElse(conf.get("https_port")).map(_.toInt).getOrElse(httpPort) val future = Retry(s"Waiting for Marathon on $port", maxAttempts = Int.MaxValue, minDelay = 1.milli, maxDelay = 5.seconds, maxDuration = 4.minutes) { async { val result = await(Http().singleRequest(Get(s"http://localhost:$port/v2/leader"))) result.discardEntityBytes() // forget about the body if (result.status.isSuccess()) { // linter:ignore //async/await Done } else { throw new Exception(s"Marathon on port=$port hasn't started yet. Giving up waiting..") } } } future } def isRunning(): Boolean = activePids.nonEmpty def exitValue(): Option[Int] = marathonProcess.map(_.exitValue()) def activePids: Seq[String] = { val PIDRE = """^\s*(\d+)\s+\s*(.*)$""".r Process("jps -lv").!!.split("\n").collect { case PIDRE(pid, jvmArgs) if jvmArgs.contains(uuid) => pid }(collection.breakOut) } def stop(): Future[Done] = { marathonProcess.fold(Future.successful(Done)){ p => p.destroy() Timeout.blocking(30.seconds, Some("Marathon")){ p.exitValue(); Done } .recover { case NonFatal(e) => logger.warn(s"Could not shutdown Marathon $suiteName in time", e) if (activePids.nonEmpty) { Process(s"kill -9 ${activePids.mkString(" ")}").! } Done } }.andThen { case _ => marathonProcess = Option.empty[Process] } } def restart(): Future[Done] = { logger.info(s"Restarting Marathon on $httpPort") async { await(stop()) val x = await(start()) logger.info(s"Restarted Marathon on $httpPort") x } } override def close(): Unit = { stop().futureValue(timeout(35.seconds), interval(1.seconds)) Try(FileUtils.deleteDirectory(workDir)) } // lower the memory pressure by limiting threads. val akkaJvmArgs = Seq( "-Dakka.actor.default-dispatcher.fork-join-executor.parallelism-min=2", "-Dakka.actor.default-dispatcher.fork-join-executor.factor=1", "-Dakka.actor.default-dispatcher.fork-join-executor.parallelism-max=4", "-Dscala.concurrent.context.minThreads=2", "-Dscala.concurrent.context.maxThreads=32" ) } /** * Runs a marathon server for the given test suite * @param suiteName The test suite that owns this marathon * @param masterUrl The mesos master url * @param zkUrl The ZK url * @param conf any particular configuration * @param mainClass The main class */ case class LocalMarathon( suiteName: String, masterUrl: String, zkUrl: String, override val conf: Map[String, String] = Map.empty, val mainClass: String = "mesosphere.marathon.Main")(implicit val system: ActorSystem, val mat: Materializer, val ctx: ExecutionContext, val scheduler: Scheduler) extends BaseMarathon { // it'd be great to be able to execute in memory, but we can't due to GuiceFilter using a static :( override val processBuilder = { val java = sys.props.get("java.home").fold("java")(_ + "/bin/java") val cp = sys.props.getOrElse("java.class.path", "target/classes") // Get JVM arguments, such as -javaagent:some.jar val runtimeMxBean = ManagementFactory.getRuntimeMXBean val runtimeArguments = JavaConverters.collectionAsScalaIterable(runtimeMxBean.getInputArguments).toSeq val cmd = Seq(java, "-Xmx1024m", "-Xms256m", "-XX:+UseConcMarkSweepGC", "-XX:ConcGCThreads=2") ++ runtimeArguments ++ akkaJvmArgs ++ Seq(s"-DmarathonUUID=$uuid -DtestSuite=$suiteName", "-classpath", cp, "-client", mainClass) ++ args Process(cmd, workDir, sys.env.toSeq: _*) } override def activePids: Seq[String] = { val PIDRE = """^\s*(\d+)\s+(\S*)\s*(.*)$""".r Process("jps -lv").!!.split("\n").collect { case PIDRE(pid, main, jvmArgs) if main.contains(mainClass) && jvmArgs.contains(uuid) => pid }(collection.breakOut) } } trait HealthCheckEndpoint extends StrictLogging with ScalaFutures { protected val healthChecks = Lock(mutable.ListBuffer.empty[IntegrationHealthCheck]) val registeredReadinessChecks = Lock(mutable.ListBuffer.empty[IntegrationReadinessCheck]) implicit val system: ActorSystem implicit val mat: Materializer /** * Note! This is declared as lazy in order to prevent eager evaluation of values on which it depends * We initialize it during the before hook and wait for Marathon to respond. */ protected[setup] lazy val healthEndpoint = { val route = { import akka.http.scaladsl.server.Directives._ val mapper = new ObjectMapper() with ScalaObjectMapper mapper.registerModule(DefaultScalaModule) get { path(Segment / Segment / "health") { (uriEncodedAppId, versionId) => import PathId._ val appId = URLDecoder.decode(uriEncodedAppId, "UTF-8").toRootPath def instance = healthChecks(_.find { c => c.appId == appId && c.versionId == versionId }) val state = instance.fold(true)(_.healthy) logger.info(s"Received health check request: app=$appId, version=$versionId reply=$state") if (state) { complete(HttpResponse(status = StatusCodes.OK)) } else { complete(HttpResponse(status = StatusCodes.InternalServerError)) } } ~ path(Segment / Segment / Segment / "ready") { (uriEncodedAppId, versionId, taskId) => import PathId._ val appId = URLDecoder.decode(uriEncodedAppId, "UTF-8").toRootPath // Find a fitting registred readiness check. If the check has no task id set we ignore it. def check: Option[IntegrationReadinessCheck] = registeredReadinessChecks(_.find { c => c.appId == appId && c.versionId == versionId && c.taskId.fold(true)(_ == taskId) }) // An app is not ready by default to avoid race conditions. val isReady = check.fold(false)(_.call) logger.info(s"Received readiness check request: app=$appId, version=$versionId taskId=$taskId reply=$isReady") if (isReady) { complete(HttpResponse(status = StatusCodes.OK)) } else { complete(HttpResponse(status = StatusCodes.InternalServerError)) } } ~ path(Remaining) { path => require(false, s"$path was unmatched!") complete(HttpResponse(status = StatusCodes.InternalServerError)) } } } val port = PortAllocator.ephemeralPort() logger.info(s"Starting health check endpoint on port $port.") val server = Http().bindAndHandle(route, "0.0.0.0", port).futureValue logger.info(s"Listening for health events on $port") server } /** * Add an integration health check to internal health checks. The integration health check is used to control the * health check replies for our app mock. * * @param appId The app id of the app mock * @param versionId The version of the app mock * @param state The initial health status of the app mock * @return The IntegrationHealthCheck object which is used to control the replies. */ def registerAppProxyHealthCheck(appId: PathId, versionId: String, state: Boolean): IntegrationHealthCheck = { val check = new IntegrationHealthCheck(appId, versionId, state) healthChecks { checks => checks.filter(c => c.appId == appId && c.versionId == versionId).foreach(checks -= _) checks += check } check } /** * Adds an integration readiness check to internal readiness checks. The behaviour is similar to integration health * checks. * * @param appId The app id of the app mock * @param versionId The version of the app mock * @param taskId Optional task id to identify the task of the app mock. * @return The IntegrationReadinessCheck object which is used to control replies. */ def registerProxyReadinessCheck(appId: PathId, versionId: String, taskId: Option[String] = None): IntegrationReadinessCheck = { val check = new IntegrationReadinessCheck(appId, versionId, taskId) registeredReadinessChecks { checks => checks.filter(c => c.appId == appId && c.versionId == versionId && c.taskId == taskId).foreach(checks -= _) checks += check } check } } trait MarathonAppFixtures { val testBasePath: PathId implicit class PathIdTestHelper(path: String) { def toRootTestPath: PathId = testBasePath.append(path).canonicalPath() def toTestPath: PathId = testBasePath.append(path) } val healthCheckPort: Int /** * Constructs the proper health proxy endpoint argument for the Python app mock. * * @param appId The app id whose health is checked * @param versionId The version of the app * @return URL to health check endpoint */ def healthEndpointFor(appId: PathId, versionId: String): String = { val encodedAppId = URLEncoder.encode(appId.toString, "UTF-8") s"http://$$HOST:$healthCheckPort/$encodedAppId/$versionId" } def appMockCmd(appId: PathId, versionId: String): String = { val projectDir = sys.props.getOrElse("user.dir", ".") val appMock: File = new File(projectDir, "src/test/resources/python/app_mock.py") s"""echo APP PROXY $$MESOS_TASK_ID RUNNING; ${appMock.getAbsolutePath} """ + s"""$$PORT0 $appId $versionId ${healthEndpointFor(appId, versionId)}""" } def appProxyHealthCheck( gracePeriod: FiniteDuration = 1.seconds, interval: FiniteDuration = 1.second, maxConsecutiveFailures: Int = Int.MaxValue, portIndex: Option[Int] = Some(0)): AppHealthCheck = raml.AppHealthCheck( gracePeriodSeconds = gracePeriod.toSeconds.toInt, intervalSeconds = interval.toSeconds.toInt, maxConsecutiveFailures = maxConsecutiveFailures, portIndex = portIndex, protocol = raml.AppHealthCheckProtocol.Http, path = Some("/health") ) def appProxy(appId: PathId, versionId: String, instances: Int, healthCheck: Option[raml.AppHealthCheck] = Some(appProxyHealthCheck()), dependencies: Set[PathId] = Set.empty, gpus: Int = 0): App = { val cmd = appMockCmd(appId, versionId) App( id = appId.toString, cmd = Some(cmd), executor = "//cmd", instances = instances, cpus = 0.01, mem = 32.0, gpus = gpus, healthChecks = healthCheck.toSet, dependencies = dependencies.map(_.toString) ) } def residentApp( id: PathId, containerPath: String = "persistent-volume", cmd: String = "sleep 1000", instances: Int = 1, backoffDuration: FiniteDuration = 1.hour, portDefinitions: Seq[PortDefinition] = Seq.empty, /* prevent problems by randomized port assignment */ constraints: Set[Seq[String]] = Set.empty): App = { val cpus: Double = 0.001 val mem: Double = 1.0 val disk: Double = 1.0 val persistentVolumeSize = 2L val persistentVolume: AppVolume = AppPersistentVolume( containerPath = containerPath, persistent = PersistentVolumeInfo(size = persistentVolumeSize), mode = ReadMode.Rw ) val app = App( id.toString, instances = instances, residency = Some(AppResidency()), constraints = constraints, container = Some(Container( `type` = EngineType.Mesos, volumes = Seq(persistentVolume) )), cmd = Some(cmd), // cpus, mem and disk are really small because otherwise we'll soon run out of reservable resources cpus = cpus, mem = mem, disk = disk, portDefinitions = Some(portDefinitions), backoffSeconds = backoffDuration.toSeconds.toInt, upgradeStrategy = Some(UpgradeStrategy(minimumHealthCapacity = 0.5, maximumOverCapacity = 0.0)), unreachableStrategy = Some(UnreachableDisabled.DefaultValue) ) app } def dockerAppProxy(appId: PathId, versionId: String, instances: Int, healthCheck: Option[AppHealthCheck] = Some(appProxyHealthCheck()), dependencies: Set[PathId] = Set.empty): App = { val projectDir = sys.props.getOrElse("user.dir", ".") val containerDir = "/opt/marathon" val cmd = Some("""echo APP PROXY $$MESOS_TASK_ID RUNNING; /opt/marathon/python/app_mock.py """ + s"""$$PORT0 $appId $versionId ${healthEndpointFor(appId, versionId)}""") App( id = appId.toString, cmd = cmd, container = Some(raml.Container( `type` = raml.EngineType.Docker, docker = Some(raml.DockerContainer( image = "python:3.4.6-alpine" )), volumes = collection.immutable.Seq( AppHostVolume(hostPath = s"$projectDir/src/test/resources/python", containerPath = s"$containerDir/python", mode = ReadMode.Ro) ) )), instances = instances, cpus = 0.5, mem = 128, healthChecks = healthCheck.toSet, dependencies = dependencies.map(_.toString), networks = Seq(Network(mode = NetworkMode.Host)) ) } def simplePod(podId: String, constraints: Set[Constraint] = Set.empty, instances: Int = 1): PodDefinition = PodDefinition( id = testBasePath / s"$podId", containers = Seq( MesosContainer( name = "task1", exec = Some(raml.MesosExec(raml.ShellCommand("sleep 1000"))), resources = raml.Resources(cpus = 0.1, mem = 32.0) ) ), networks = Seq(HostNetwork), instances = instances, constraints = constraints ) def residentPod( id: String, mountPath: String = "persistent-volume", cmd: String = "sleep 1000", instances: Int = 1): PodDefinition = { val persistentVolumeSize = 2L val volumeInfo = state.PersistentVolumeInfo(size = persistentVolumeSize) val volumes = Seq(PersistentVolume(name = Some("pst"), persistent = volumeInfo)) val volumeMounts = Seq(VolumeMount(volumeName = Some("pst"), mountPath = mountPath, readOnly = false)) val pod = PodDefinition( id = testBasePath / id, containers = Seq( MesosContainer( name = "task1", exec = Some(raml.MesosExec(raml.ShellCommand(cmd))), resources = raml.Resources(cpus = 0.1, mem = 32.0), volumeMounts = volumeMounts ) ), networks = Seq(HostNetwork), instances = instances, constraints = Set.empty, volumes = volumes, unreachableStrategy = state.UnreachableDisabled, upgradeStrategy = state.UpgradeStrategy(0.0, 0.0) ) pod } } /** * Base trait for tests that need a marathon */ trait MarathonTest extends HealthCheckEndpoint with MarathonAppFixtures with ScalaFutures with Eventually { protected def logger: Logger def marathonUrl: String def marathon: MarathonFacade def leadingMarathon: Future[BaseMarathon] def mesos: MesosFacade def suiteName: String implicit val system: ActorSystem implicit val mat: Materializer implicit val ctx: ExecutionContext implicit val scheduler: Scheduler lazy val healthCheckPort = healthEndpoint.localAddress.getPort case class CallbackEvent(eventType: String, info: Map[String, Any]) object CallbackEvent { def apply(event: ITEvent): CallbackEvent = CallbackEvent(event.eventType, event.info) } implicit class CallbackEventToStatusUpdateEvent(val event: CallbackEvent) { def taskStatus: String = event.info.get("taskStatus").map(_.toString).getOrElse("") def message: String = event.info("message").toString def id: String = event.info("id").toString def running: Boolean = taskStatus == "TASK_RUNNING" def finished: Boolean = taskStatus == "TASK_FINISHED" def failed: Boolean = taskStatus == "TASK_FAILED" } object StatusUpdateEvent { def unapply(event: CallbackEvent): Option[CallbackEvent] = { if (event.eventType == "status_update_event") Some(event) else None } } protected val events = new ConcurrentLinkedQueue[ITSSEEvent]() def waitForTasks(appId: PathId, num: Int, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis)(implicit facade: MarathonFacade = marathon): List[ITEnrichedTask] = { eventually(timeout(Span(maxWait.toMillis, Milliseconds))) { val tasks = Try(facade.tasks(appId)).map(_.value).getOrElse(Nil).filter(_.launched) logger.info(s"${tasks.size}/$num tasks launched for $appId") require(tasks.size == num, s"Waiting for $num tasks to be launched") tasks } } // We shouldn't eat exceptions in clenaUp() methods: it's a source of hard to find bugs if // we just move on to the next test, that expects a "clean state". We should fail loud and // proud here and find out why the clean-up fails. def cleanUp(): Unit = { logger.info(">>> Starting to CLEAN UP...") events.clear() // Wait for a clean slate in Marathon, if there is a running deployment or a runSpec exists logger.info("Clean Marathon State") //do not fail here, since the require statements will ensure a correct setup and fail otherwise Try(waitForDeployment(eventually(marathon.deleteGroup(testBasePath, force = true)))) val cleanUpPatienceConfig = WaitTestSupport.PatienceConfig(timeout = Span(50, Seconds), interval = Span(1, Seconds)) WaitTestSupport.waitUntil("clean slate in Mesos") { val mesosState = mesos.state.value val occupiedAgents = mesosState.agents.filter { agent => agent.usedResources.nonEmpty || agent.reservedResourcesByRole.nonEmpty } occupiedAgents.foreach { agent => import mesosphere.marathon.integration.facades.MesosFormats._ val usedResources: String = Json.prettyPrint(Json.toJson(agent.usedResources)) val reservedResources: String = Json.prettyPrint(Json.toJson(agent.reservedResourcesByRole)) logger.info(s"""Waiting for blank slate Mesos...\n "used_resources": "$usedResources"\n"reserved_resources": "$reservedResources"""") } if (occupiedAgents.nonEmpty) { val tasks = mesosState.frameworks.flatMap(_.tasks) logger.info(s"Remaining tasks: $tasks") } occupiedAgents.isEmpty }(cleanUpPatienceConfig) val apps = marathon.listAppsInBaseGroup require(apps.value.isEmpty, s"apps weren't empty: ${apps.entityPrettyJsonString}") val pods = marathon.listPodsInBaseGroup require(pods.value.isEmpty, s"pods weren't empty: ${pods.entityPrettyJsonString}") val groups = marathon.listGroupsInBaseGroup require(groups.value.isEmpty, s"groups weren't empty: ${groups.entityPrettyJsonString}") events.clear() healthChecks(_.clear()) logger.info("... CLEAN UP finished <<<") } def waitForHealthCheck(check: IntegrationHealthCheck, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis) = { WaitTestSupport.waitUntil("Health check to get queried", maxWait) { check.pinged.get } } def waitForDeploymentId(deploymentId: String, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): CallbackEvent = { waitForEventWith("deployment_success", _.id == deploymentId, s"event deployment_success (id: $deploymentId) to arrive", maxWait) } def waitForStatusUpdates(kinds: String*): Seq[CallbackEvent] = kinds.map { kind => logger.info(s"Wait for status update event with kind: $kind") waitForEventWith( "status_update_event", _.taskStatus == kind, s"event status_update_event (${kinds.mkString(",")}) to arrive") }.to[Seq] def waitForEvent( kind: String, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): CallbackEvent = waitForEventWith(kind, _ => true, s"event $kind to arrive", maxWait) def waitForEventWith( kind: String, fn: CallbackEvent => Boolean, description: String, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): CallbackEvent = { waitForEventMatching(description, maxWait) { event => event.eventType == kind && fn(event) } } /** * Consumes the next event from the events queue within deadline. Does not throw. Returns None if unable to return an * event by that time. * * @param deadline The time after which to stop attempting to get an event and return None */ private def nextEvent(deadline: Deadline): Option[ITSSEEvent] = try { eventually(timeout(Span(deadline.timeLeft.toMillis, Milliseconds))) { val r = Option(events.poll) if (r.isEmpty) throw new NoSuchElementException r } } catch { case _: NoSuchElementException => None case _: TestFailedDueToTimeoutException => None } /** * Method waits for events and calls their callbacks independently of the events order. It receives a * map of EventId -> Callback e.g.: * Map("deployment_failed" -> _.id == deploymentId, "deployment_successful" -> _.id == rollbackId)), * checks every event for it's existence in the map and if found, calls it's callback method. If successful, the entry * is removed from the map. Returns if the map is empty. */ def waitForEventsWith( description: String, eventsMap: Map[String, CallbackEvent => Boolean], maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis) = { val waitingFor = mutable.Map(eventsMap.toSeq: _*) waitForEventMatching(description, maxWait) { event => if (waitingFor.get(event.eventType).fold(false)(fn => fn(event))) { waitingFor -= event.eventType } waitingFor.isEmpty } } /** * Method waits for ANY (and only one) of the given events. It receives a map of EventId -> Callback e.g.: * Map("deployment_failed" -> _.id == deploymentId, "deployment_successful" -> _.id == rollbackId)), * and checks every incoming event for it's existence in the map and if found, calls it's callback method. * Returns if event found and callback returns true. */ def waitForAnyEventWith( description: String, eventsMap: Map[String, CallbackEvent => Boolean], maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis) = { val waitingForAny = mutable.Map(eventsMap.toSeq: _*) waitForEventMatching(description, maxWait) { event => waitingForAny.get(event.eventType).fold(false)(fn => fn(event)) } } def waitForEventMatching( description: String, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis)(fn: CallbackEvent => Boolean): CallbackEvent = { val deadline = maxWait.fromNow @tailrec def iter(): CallbackEvent = { nextEvent(deadline) match { case Some(ITConnected) => throw new MarathonTest.UnexpectedConnect case Some(event: ITEvent) => val cbEvent = CallbackEvent(event) if (fn(cbEvent)) { cbEvent } else { logger.info(s"Event $event did not match criteria skipping to next event") iter() } case None => throw new RuntimeException(s"No events matched <$description>") } } iter() } /** * Blocks until a single connected event is consumed. Discards any events up to that point. * * Not reasoning about SSE connection state will lead to flaky tests. If a master is killed, you should wait for the * SSE stream to reconnect before doing anything else, or you could miss events. */ def waitForSSEConnect(maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): Unit = { @tailrec val deadline = maxWait.fromNow def iter(): Unit = { nextEvent(deadline) match { case Some(event: ITEvent) => logger.info(s"Event ${event} was not a connected event; skipping") iter() case Some(ITConnected) => logger.info("ITConnected event consumed") case None => throw new RuntimeException("No connected events") } } iter() } /** * Wait for the events of the given kinds (=types). */ def waitForEvents(kinds: String*)(maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): Map[String, Seq[CallbackEvent]] = { val deadline = maxWait.fromNow /** Receive the events for the given kinds (duplicates allowed) in any order. */ val receivedEventsForKinds: Seq[CallbackEvent] = { var eventsToWaitFor = kinds val receivedEvents = Vector.newBuilder[CallbackEvent] while (eventsToWaitFor.nonEmpty) { val event = waitForEventMatching(s"event $eventsToWaitFor to arrive", deadline.timeLeft) { event => eventsToWaitFor.contains(event.eventType) } receivedEvents += event // Remove received event kind. Only remove one element for duplicates. val kindIndex = eventsToWaitFor.indexWhere(_ == event.eventType) assert(kindIndex >= 0) eventsToWaitFor = eventsToWaitFor.patch(kindIndex, Nil, 1) } receivedEvents.result() } receivedEventsForKinds.groupBy(_.eventType) } def waitForDeployment(change: RestResult[_], maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): CallbackEvent = { require(change.success, s"Deployment request has not been successful. httpCode=${change.code} body=${change.entityString}") val deploymentId = change.deploymentId.getOrElse(throw new IllegalArgumentException("No deployment id found in Http Header")) waitForDeploymentId(deploymentId, maxWait) } def waitForAppOfferReject(appId: PathId, offerRejectReason: String): Unit = { def queueResult = marathon.launchQueue() def jsQueueResult = queueResult.entityJson def queuedRunspecs = (jsQueueResult \ "queue").as[Seq[JsObject]] def jsonApp = queuedRunspecs.find { spec => (spec \ "app" \ "id").as[String] == appId.toString }.get def unfulfilledConstraintRejectSummary = (jsonApp \ "processedOffersSummary" \ "rejectSummaryLastOffers").as[Seq[JsObject]] .find { e => (e \ "reason").as[String] == offerRejectReason }.get eventually { logger.info("jsApp:" + jsonApp.toString()) assert((unfulfilledConstraintRejectSummary \ "declined").as[Int] >= 1) } } def teardown(): Unit = { Try { val frameworkId = marathon.info.entityJson.as[JsObject].value("frameworkId").as[String] mesos.teardown(frameworkId) eventually(timeout(1.minutes), interval(2.seconds)) { assert(mesos.completedFrameworkIds().value.contains(frameworkId)) } } Try(healthEndpoint.unbind().futureValue) } /** * Connects repeatedly to the Marathon SSE endpoint until cancelled. * Yields each event in order. */ def startEventSubscriber(): Cancellable = { @volatile var cancelled = false def iter(): Unit = { import akka.stream.scaladsl.Source logger.info("SSEStream: Connecting") Source.fromFuture(leadingMarathon) .mapAsync(1) { leader => async { logger.info(s"SSEStream: Acquiring connection to ${leader.url}") val stream = await(leader.client.events()) logger.info(s"SSEStream: Connection acquired to ${leader.url}") /* A potentially impossible edge case exists in which we query the leader, and then before we get a connection * to that instance, it restarts and is no longer a leader. * * By checking the leader again once obtaining a connection to the SSE event stream, we have conclusive proof * that we are consuming from the current leader, and we keep our connected events as deterministic as * possible. */ val leaderAfterConnection = await(leadingMarathon) logger.info(s"SSEStream: ${leader.url} is the leader") if (leader != leaderAfterConnection) { stream.runWith(Sink.cancelled) throw new RuntimeException("Leader status changed since first connecting to stream") } else { stream } } } .flatMapConcat { stream => // We prepend the ITConnected event here in order to avoid emitting an ITConnected event on failed connections stream.prepend(Source.single(ITConnected)) } .runForeach { e: ITSSEEvent => e match { case ITConnected => logger.info(s"SSEStream: Connected") case event: ITEvent => logger.info(s"SSEStream: Received callback event: ${event.eventType} with props ${event.info}") } events.offer(e) } .onComplete { case result => if (!cancelled) { logger.info(s"SSEStream: Leader event stream was closed reason: ${result}") logger.info("Reconnecting") /* There is a small window between Jetty hanging up the event stream, and Jetty not accepting and * responding to new requests. In the tests, under heavy load, retrying within 15 milliseconds is enough * to hit this window. * * 10 times the interval would probably suffice. Timeout is way more time then we need. Half timeout seems * like an okay compromise. */ scheduler.scheduleOnce(patienceConfig.timeout / 2) { iter() } } } } iter() new Cancellable { override def cancel(): Boolean = { cancelled = true true } override def isCancelled: Boolean = cancelled } } } object MarathonTest extends StrictLogging { class UnexpectedConnect extends Exception("Received an unexpected SSE event stream Connection event. This is " + "considered an exception because not thinking about re-connection events properly can lead to race conditions in " + "the tests. You should call waitForSSEConnect() after killing a Marathon leader to ensure no events are dropped.") } /** * Fixture that can be used for a single test case. */ trait MarathonFixture extends AkkaUnitTestLike with MesosClusterTest with ZookeeperServerTest { protected def logger: Logger def withMarathon[T](suiteName: String, marathonArgs: Map[String, String] = Map.empty)(f: (LocalMarathon, MarathonTest) => T): T = { val marathonServer = LocalMarathon(suiteName = suiteName, masterUrl = mesosMasterUrl, zkUrl = s"zk://${zkServer.connectUri}/marathon-$suiteName", conf = marathonArgs) marathonServer.start().futureValue val marathonTest = new MarathonTest { override protected val logger: Logger = MarathonFixture.this.logger override def marathonUrl: String = s"http://localhost:${marathonServer.httpPort}" override def marathon: MarathonFacade = marathonServer.client override def mesos: MesosFacade = MarathonFixture.this.mesos override val testBasePath: PathId = PathId("/") override implicit val system: ActorSystem = MarathonFixture.this.system override implicit val mat: Materializer = MarathonFixture.this.mat override implicit val ctx: ExecutionContext = MarathonFixture.this.ctx override implicit val scheduler: Scheduler = MarathonFixture.this.scheduler override val suiteName: String = MarathonFixture.this.suiteName override implicit def patienceConfig: PatienceConfig = PatienceConfig(MarathonFixture.this.patienceConfig.timeout, MarathonFixture.this.patienceConfig.interval) override def leadingMarathon = Future.successful(marathonServer) } val sseStream = marathonTest.startEventSubscriber() try { marathonTest.healthEndpoint marathonTest.waitForSSEConnect() f(marathonServer, marathonTest) } finally { sseStream.cancel() marathonTest.teardown() marathonServer.stop() } } } object MarathonFixture extends MarathonFixture /** * base trait that spins up/tears down a marathon and has all of the original tooling from * SingleMarathonIntegrationTest. */ trait MarathonSuite extends Suite with StrictLogging with ScalaFutures with BeforeAndAfterAll with Eventually with MarathonTest { abstract override def afterAll(): Unit = { teardown() super.afterAll() } } /** * Base trait that starts a local marathon but doesn't have mesos/zookeeper yet */ trait LocalMarathonTest extends MarathonTest with ScalaFutures with AkkaUnitTestLike with MesosTest with ZookeeperServerTest { def marathonArgs: Map[String, String] = Map.empty lazy val marathonServer = LocalMarathon(suiteName = suiteName, masterUrl = mesosMasterUrl, zkUrl = s"zk://${zkServer.connectUri}/marathon", conf = marathonArgs) lazy val marathonUrl = s"http://localhost:${marathonServer.httpPort}" val testBasePath: PathId = PathId("/") lazy val marathon = marathonServer.client lazy val appMock: AppMockFacade = new AppMockFacade() /** * Return the current leading Marathon * Expected to retry for a significant period of time until succeeds */ override def leadingMarathon: Future[LocalMarathon] = Future.successful(marathonServer) @volatile private var sseStream: Option[Cancellable] = None abstract override def beforeAll(): Unit = { super.beforeAll() marathonServer.start().futureValue sseStream = Some(startEventSubscriber()) waitForSSEConnect() } abstract override def afterAll(): Unit = { sseStream.foreach(_.cancel) teardown() marathonServer.close() super.afterAll() } } /** * trait that has marathon, zk, and a mesos ready to go */ trait EmbeddedMarathonTest extends Suite with StrictLogging with ZookeeperServerTest with MesosClusterTest with LocalMarathonTest { /* disable failover timeout to assist with cleanup ops; terminated marathons are immediately removed from mesos's * list of frameworks * * Until https://issues.apache.org/jira/browse/MESOS-8171 is resolved, we cannot set this value to 0. */ override def marathonArgs: Map[String, String] = Map("failover_timeout" -> "1") } /** * Trait that has a Marathon cluster, zk, and Mesos via mesos-local ready to go. * * It provides multiple Marathon instances. This allows e.g. leadership rotation. */ trait MarathonClusterTest extends Suite with StrictLogging with ZookeeperServerTest with MesosClusterTest with LocalMarathonTest { val numAdditionalMarathons = 2 lazy val additionalMarathons = 0.until(numAdditionalMarathons).map { _ => LocalMarathon(suiteName = suiteName, masterUrl = mesosMasterUrl, zkUrl = s"zk://${zkServer.connectUri}/marathon", conf = marathonArgs) } lazy val marathonFacades = marathon +: additionalMarathons.map(_.client) lazy val allMarathonServers = marathonServer +: additionalMarathons override def leadingMarathon: Future[LocalMarathon] = { val leader = Retry("querying leader", maxAttempts = 50, maxDelay = 1.second, maxDuration = patienceConfig.timeout) { Future.firstCompletedOf(marathonFacades.map(_.leaderAsync())) } leader.map { leader => allMarathonServers.find { _.httpPort == leader.value.port }.head } } override def beforeAll(): Unit = { super.beforeAll() Future.sequence(additionalMarathons.map(_.start())).futureValue } override def afterAll(): Unit = { Try(additionalMarathons.foreach(_.close())) super.afterAll() } override def cleanUp(): Unit = { Future.sequence(marathonServer.start() +: additionalMarathons.map(_.start())).futureValue super.cleanUp() } }
gsantovena/marathon
tests/integration/src/test/scala/mesosphere/marathon/integration/setup/MarathonTest.scala
Scala
apache-2.0
39,834
package feh.tec.agent.game import feh.tec.util._ import concurrent.{Promise, ExecutionContext, Future, Await} import java.util.UUID import akka.pattern._ import scala.concurrent.duration._ import scala.collection.mutable import akka.actor.{ActorSystem, Props, Actor, ActorRef} import feh.tec.util.HasUUID.AsyncSendMsgHasUUIDWrapper import akka.event.Logging import feh.tec.agent._ import scala.Some import feh.tec.agent.AgentDecision.ExplainedActionStub import feh.tec.agent.AgentId import akka.util.Timeout trait GameEnvironment[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends Environment[Env]{ self : Env => final type Coordinate = Null final type State = Null final type Global = GameScore[Game] type Action <: GameAction type Ref <: GameRef[Game, Env] final type Choice = StrategicChoice[Game#Player] final type Score = GameScore[Game] def game: Game def play(choices: StrategicChoices[Game#Player]): Score implicit def utilityIsNumeric = game.utilityIsNumeric.asInstanceOf[Numeric[Game#Utility]] def updateScores(scoresUpdate: Score) def setScore(score: Score) def bulkAffected(act: StrategicChoices[Game#Player]): SideEffect[Env] // those are not used def states: PartialFunction[Null, Null] = PartialFunction.empty def definedAt = Nil def stateOf(c: Null) = None def visibleStates = Map() def agentPosition(ag: AgentId) = None def effects: PartialFunction[GameAction, Env => Env] = PartialFunction.empty } trait GameAction extends AbstractAction case class StrategicChoice[P <: AbstractGame#Player](player: P, strategy: P#Strategy) extends GameAction case class StrategicChoices[P <: AbstractGame#Player](choices: Set[StrategicChoice[P]]) extends GameAction{ def toMap: Map[P, P#Strategy] = choices.map(ch => ch.player -> ch.strategy).toMap } case class GameScore[Game <: AbstractGame](utility: Map[Game#Player, Game#Utility])(implicit num: Numeric[Game#Utility]){ def update(scoreUpdates: Map[Game#Player, Game#Utility]): GameScore[Game] = GameScore(utility.zipByKey(scoreUpdates).mapValues((num.plus _).tupled)) def update(scoreUpdate: GameScore[Game]): GameScore[Game] = update(scoreUpdate.utility) } object GameScore{ def zero[Game <: AbstractGame](strategy: Game) = GameScore[Game](strategy.players.map(_ -> strategy.utilityIsNumeric.zero).toMap)(strategy.utilityIsNumeric.asInstanceOf[Numeric[Game#Utility]]) } trait DeterministicGameEnvironment[Game <: AbstractDeterministicGame, Env <: DeterministicGameEnvironment[Game, Env]] extends GameEnvironment[Game, Env] with Deterministic[Env] { self: Env => // Map[PlayersChoices, PlayersUtility] // type PlayersChoices = Map[Player, Player#Game] // type PlayersUtility = Map[Player, Utility] // final type Player = Game#Player private def strategies = game.layout.asInstanceOf[PartialFunction[Map[Player, Player#Strategy], Map[Player, Game#Utility]]] def play(choices: StrategicChoices[Game#Player]): Score = GameScore[Game](strategies(choices.toMap)) } trait MutableGameEnvironmentImpl[Game <: AbstractGame, Env <: MutableGameEnvironmentImpl[Game, Env]] extends GameEnvironment[Game, Env] with MutableEnvironment[Env] { self: Env => def initGlobalState: GameScore[Game] = GameScore.zero(game) def affected(act: Env#Action): SideEffect[Env] = ??? def bulkAffected(act: StrategicChoices[Game#Player]): SideEffect[Env] = SideEffect{ val score = play(act) _lastScore = Option(score) updateScores(score) this } protected var _lastScore: Option[GameScore[Game]] = None def lastScore: Option[GameScore[Game]] = _lastScore def updateScores(scoresUpdate: Score){ globalState = globalState.update(scoresUpdate) } def setScore(score: Score){ globalState = score } def initStates: PartialFunction[Null, Null] = null override def states = super[GameEnvironment].states } case class Turn protected (id: Int){ def next: Turn = copy(id+1) } object Turn{ def first = Turn(0) } trait GameRef[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends EnvironmentRef[Env] { def turn: Turn // def asyncTurn: Future[Turn] def choose(choice: StrategicChoice[Game#Player]) def chooseAndWait(choice: StrategicChoice[Game#Player]) def strategies: Game def listenToEndOfTurn(f: (Turn, Game#PlayersChoices, Game#PlayersUtility) => Unit) // def blocking: BlockingApi = ??? // def async: AsyncApi = ??? } trait GameCoordinator[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends EnvironmentOverseer[Env]{ def currentTurn: Turn def registerChoice(choice: StrategicChoice[Game#Player]) def registerChoiceAndWait(choice: StrategicChoice[Game#Player]) def reset() // no snapshots def snapshot: EnvironmentSnapshot[Env] with Env = ??? } trait GameCoordinatorWithActor[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends GameCoordinator[Game, Env] with EnvironmentOverseerWithActor[Env] { coordinator => import GameCoordinatorActor._ def awaitEndOfTurnTimeout: FiniteDuration def bulkAffect = env.bulkAffected _ def currentTurn: Turn = Await.result(asyncCurrentTurn, defaultBlockingTimeout millis) def asyncCurrentTurn: Future[Turn] = GetTurn() |> { msg => actorRef.send(msg).awaitingResponse[TurnResponse](defaultFutureTimeout millis).map(_.turn) //(actorRef ? msg)(defaultFutureTimeout).mapTo[TurnResponse].havingSameUUID(msg).map(_.turn) } def registerChoice(choice: StrategicChoice[Game#Player]): Unit = actorRef ! RegisterChoice(choice) def registerChoiceAndWait(choice: StrategicChoice[Game#Player]): Unit = RegisterChoiceAndWait(choice) |> { msg => implicit def timeout = awaitEndOfTurnTimeout: Timeout Await .result(actorRef ? msg, awaitEndOfTurnTimeout) .tryAs[TurnEnded].havingSameUUID(msg).ensuring(_.nonEmpty) } trait GameRefBaseImpl extends GameRef[Game, Env] with BaseEnvironmentRef{ def turn: Turn = currentTurn def choose(choice: StrategicChoice[Game#Player]): Unit = registerChoice(choice) def chooseAndWait(choice: StrategicChoice[Game#Player]): Unit = registerChoiceAndWait(choice) def strategies: Game = coordinator.env.game def listenToEndOfTurn(f: (Turn, Game#PlayersChoices, Game#PlayersUtility) => Unit): Unit = coordinator.listenToEndOfTurn(f) } def listenToEndOfTurn(f: (Turn, Game#PlayersChoices, Game#PlayersUtility) => Unit) { _endOfTurnListeners :+= f } protected var _endOfTurnListeners: Seq[(Turn, Game#PlayersChoices, Game#PlayersUtility) => Unit] = Nil def endOfTurnListeners = _endOfTurnListeners protected def actorProps = Props(classOf[GameCoordinatorActor[Game, Env]], coordinator) def actorSystem: ActorSystem lazy val actorRef: ActorRef = actorSystem.actorOf(actorProps) def lastScore: Option[GameScore[Game]] def reset(): Unit = Await.ready((actorRef ? Reset)(defaultFutureTimeout), defaultFutureTimeout millis) } object GameCoordinatorActor{ case class GetTurn() extends UUIDed case class TurnResponse(uuid: UUID, turn: Turn) extends HasUUID case class RegisterChoice[Game <: AbstractGame](choice: StrategicChoice[Game#Player]) extends UUIDed case class RegisterChoiceAndWait[Game <: AbstractGame](choice: StrategicChoice[Game#Player]) extends UUIDed case class TurnEnded(uuid: UUID) extends HasUUID case object Reset } class GameCoordinatorActor[Game <: AbstractGame, Env <: GameEnvironment[Game, Env] { type Action = StrategicChoices[Game#Player] }] (coordinator: GameCoordinatorWithActor[Game, Env]) extends Actor { import GameCoordinatorActor._ val log = Logging(context.system, this) private var turn = Turn.first private val currentTurnChoicesMap = mutable.HashMap.empty[Game#Player, Game#Player#Strategy] private val awaitingEndOfTurn = mutable.HashMap.empty[ActorRef, UUID] protected def currentTurnChoices = currentTurnChoicesMap.map((StrategicChoice.apply[Game#Player] _).tupled).toSet protected def newChoice(choice: StrategicChoice[Game#Player]) = { assert(! currentTurnChoicesMap.keySet.contains(choice.player), s"${choice.player}'s choice has already been registered") currentTurnChoicesMap += choice.player -> choice.strategy } protected def awaiting(waiting: ActorRef, id: UUID) = { assert(!awaitingEndOfTurn.keySet.contains(waiting), s"$waiting is already waiting end of turn") awaitingEndOfTurn += waiting -> id } protected def nextTurn() = { val next = turn.next turn = next currentTurnChoicesMap.clear() awaitingEndOfTurn.clear() next } protected def turnFinished_? = currentTurnChoicesMap.keySet == coordinator.env.game.players protected def endTurn() = { coordinator.bulkAffect(StrategicChoices(currentTurnChoices)).flatExec coordinator.lastScore } protected def notifyAwaiting() = awaitingEndOfTurn.foreach{ case (waiting, id) => waiting ! TurnEnded(id) } protected val history = mutable.Map.empty[Turn, (Game#PlayersChoices, Game#PlayersUtility)] protected def guardHistory(score: GameScore[Game]){ val choices = currentTurnChoicesMap.toMap.asInstanceOf[Game#PlayersChoices] val utility = score.utility.asInstanceOf[Game#PlayersUtility] history += turn-> ( choices -> utility) lastHistory = (turn, choices, utility) } private var lastHistory: (Turn, Game#PlayersChoices, Game#PlayersUtility) = _ protected def notifyEndOfTurnListeners() = coordinator.endOfTurnListeners.foreach(t => Future{ t.tupled(lastHistory) }(context.dispatcher)) private case class TurnFinished(id: Long) def receive: Actor.Receive = { case msg@GetTurn() => sender ! TurnResponse(msg.uuid, turn) case TurnFinished(id) if id == turn.id => val Some(score) = endTurn() guardHistory(score) notifyAwaiting() notifyEndOfTurnListeners() nextTurn() case RegisterChoice(choice) => newChoice(choice.asInstanceOf[StrategicChoice[Game#Player]]) case msg@RegisterChoiceAndWait(choice) => newChoice(choice.asInstanceOf[StrategicChoice[Game#Player]]) awaiting(sender, msg.uuid) if(turnFinished_?) self ! TurnFinished(turn.id) case Reset => notifyAwaiting() turn = Turn.first currentTurnChoicesMap.clear() awaitingEndOfTurn.clear() history.clear() lastHistory = null coordinator.env.setScore(GameScore.zero(coordinator.env.game)) notifyEndOfTurnListeners() sender ! Reset } } trait MutableGameCoordinator[Game <: AbstractGame, Env <: MutableGameEnvironmentImpl[Game, Env]] extends GameCoordinator[Game, Env] with MutableEnvironmentOverseer[Env] { def lastScore: Option[GameScore[Game]] = env.lastScore } trait AbstractGame{ type Utility implicit def utilityIsNumeric: Numeric[Utility] lazy val playerNameRegex = """.*\\$(\\w+)\\$(\\w+)\\$.*""".r trait Player{ def name: String type Strategy def availableStrategies: Set[Strategy] override def toString: String = playerNameRegex.findAllIn(getClass.getName).matchData.toSeq.head |> { mtch => mtch.group(1) + "#" + mtch.group(2) } } type PlayersChoices = Map[Player, Player#Strategy] type PlayersUtility = Map[Player, Utility] // case class PlayerChoice[P <: PlayerRef](player: P, strategy: P#Strategy) // case class PlayerUtility[P <: PlayerRef](player: P, utility: Utility) sealed trait Target case object Max extends Target case object Min extends Target def target: Target def nPlayers: Int def players: Set[Player] def layout: PlayersChoices => PlayersUtility } trait AbstractDeterministicGame extends AbstractGame{ // override val layout: Map[PlayersChoices, PlayersUtility] } trait AbstractTurnBasedGame extends AbstractGame{ def playersTurnOrdering: Ordering[Player] def playersInTurnOrder = players.toList.sorted(playersTurnOrdering) } object PlayerAgent { type Exec[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] = SimultaneousAgentsExecutor trait Resettable[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends PlayerAgent[Game, Env]{ agent: DecisiveAgent[Env, PlayerAgent.Exec[Game, Env]] => def reset() } trait RandomBehaviour[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends PlayerAgent[Game, Env]{ agent: DecisiveAgent[Env, PlayerAgent.Exec[Game, Env]] => def randomChance: InUnitInterval def randomChance_=(p: InUnitInterval) def preference: collection.Map[Game#Player#Strategy, Double] with ValueSumInUnitInterval[Game#Player#Strategy] def preference_=(pref: collection.Map[Game#Player#Strategy, Double] with ValueSumInUnitInterval[Game#Player#Strategy]) def updatePreference(strategy: Game#Player#Strategy, prob: Double) } } trait PlayerAgent[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends Agent[Env, PlayerAgent.Exec[Game, Env]] with SimultaneousAgentExecution[Env, PlayerAgent.Exec[Game, Env]] { agent: DecisiveAgent[Env, PlayerAgent.Exec[Game, Env]] => def player: Game#Player executionLoop.register(this) protected def actionToStrategicChoice(a: Env#Action): Option[StrategicChoice[Game#Player]] override def act(a: Env#Action): SideEffect[Env#Ref] = SideEffect{ actionToStrategicChoice(a).map(env.chooseAndWait) env }.flatExec } trait SimplePlayer[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends PlayerAgent[Game, Env] with SimpleAgent[Env, PlayerAgent.Exec[Game, Env]] { type ActionExplanation = ExplainedActionStub[Env#Action] type DetailedPerception = AbstractDetailedPerception type Perception = Game def sense(env: EnvRef): Perception = env.strategies def detailed(env: EnvRef, c: Null): Option[AbstractDetailedPerception] = None } trait WiserPlayer[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends PlayerAgent[Game, Env] with WiserAgent[Env, PlayerAgent.Exec[Game, Env]] { type Perception = (Game, P) def sense(env: EnvRef): Perception = env.strategies -> past def detailed(env: EnvRef, c: Env#Coordinate) = None } trait StatefulPlayer[Game <: AbstractGame, Env <: GameEnvironment[Game, Env], State] extends PlayerAgent[Game, Env] with StatefulAgent[Env, PlayerAgent.Exec[Game, Env], State] { self: AgentWithActor[Env, PlayerAgent.Exec[Game, Env]] => type Perception = (Game, State) def sense(env: EnvRef): Perception = env.strategies -> state def detailed(env: EnvRef, c: Env#Coordinate) = None } trait ByTurnExec[Game <: AbstractGame, Env <: GameEnvironment[Game, Env]] extends PlayerAgent.Exec[Game, Env]{ type Execution = Exec trait Exec{ def nextTurn(): Future[Exec] } implicit def executionContext: ExecutionContext def isCurrentlyExecuting: Boolean = executing_? private var executing_? = false lazy val execution = new Exec { exc => def nextTurn(): Future[Exec] = { if (executing_?) return Promise.failed[Exec](GameException("still waiting for all players to finish the previous turn")).future println("executing next turn") executing_? = true val f = exec() f onComplete { case _ => executing_? = false } f onFailure { case thr => throw thr } f map { _ => onSuccess() exc //Await.result( } } } def onSuccess: () => Unit protected def exec() = Future.sequence[SideEffect[Env#Ref], Set](agents.map(ag => { Future { ag.lifetimeCycle(ag.env).andThen(_.asInstanceOf[Env#Ref]) } })) private val _agents = mutable.HashSet.empty[Ag] def register(agent: Ag*) { _agents ++= agent } protected def agents: Set[Ag] = _agents.toSet def pauseBetweenExecs: FiniteDuration = null } case class GameException(msg: String) extends Exception(msg)
fehu/agent-tareas
agent/src/main/scala/feh/tec/agent/game/Game.scala
Scala
mit
15,721
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions import java.util.{Objects, UUID} import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.plans.logical.EventTimeWatermark import org.apache.spark.sql.catalyst.trees.TreePattern import org.apache.spark.sql.catalyst.trees.TreePattern.ATTRIBUTE_REFERENCE import org.apache.spark.sql.catalyst.util.quoteIfNeeded import org.apache.spark.sql.errors.QueryExecutionErrors import org.apache.spark.sql.types._ import org.apache.spark.util.collection.BitSet import org.apache.spark.util.collection.ImmutableBitSet object NamedExpression { private val curId = new java.util.concurrent.atomic.AtomicLong() private[expressions] val jvmId = UUID.randomUUID() def newExprId: ExprId = ExprId(curId.getAndIncrement(), jvmId) def unapply(expr: NamedExpression): Option[(String, DataType)] = Some((expr.name, expr.dataType)) } /** * A globally unique id for a given named expression. * Used to identify which attribute output by a relation is being * referenced in a subsequent computation. * * The `id` field is unique within a given JVM, while the `uuid` is used to uniquely identify JVMs. */ case class ExprId(id: Long, jvmId: UUID) { override def equals(other: Any): Boolean = other match { case ExprId(id, jvmId) => this.id == id && this.jvmId == jvmId case _ => false } override def hashCode(): Int = id.hashCode() } object ExprId { def apply(id: Long): ExprId = ExprId(id, NamedExpression.jvmId) } /** * An [[Expression]] that is named. */ trait NamedExpression extends Expression { /** We should never fold named expressions in order to not remove the alias. */ override def foldable: Boolean = false def name: String def exprId: ExprId /** * Returns a dot separated fully qualified name for this attribute. Given that there can be * multiple qualifiers, it is possible that there are other possible way to refer to this * attribute. */ def qualifiedName: String = (qualifier :+ name).mkString(".") /** * Optional qualifier for the expression. * Qualifier can also contain the fully qualified information, for e.g, Sequence of string * containing the database and the table name * * For now, since we do not allow using original table name to qualify a column name once the * table is aliased, this can only be: * * 1. Empty Seq: when an attribute doesn't have a qualifier, * e.g. top level attributes aliased in the SELECT clause, or column from a LocalRelation. * 2. Seq with a Single element: either the table name or the alias name of the table. * 3. Seq with 2 elements: database name and table name * 4. Seq with 3 elements: catalog name, database name and table name */ def qualifier: Seq[String] def toAttribute: Attribute /** Returns the metadata when an expression is a reference to another expression with metadata. */ def metadata: Metadata = Metadata.empty /** Returns a copy of this expression with a new `exprId`. */ def newInstance(): NamedExpression protected def typeSuffix = if (resolved) { dataType match { case LongType => "L" case _ => "" } } else { "" } } abstract class Attribute extends LeafExpression with NamedExpression with NullIntolerant { @transient override lazy val references: AttributeSet = AttributeSet(this) def withNullability(newNullability: Boolean): Attribute def withQualifier(newQualifier: Seq[String]): Attribute def withName(newName: String): Attribute def withMetadata(newMetadata: Metadata): Attribute def withExprId(newExprId: ExprId): Attribute override def toAttribute: Attribute = this def newInstance(): Attribute } /** * Used to assign a new name to a computation. * For example the SQL expression "1 + 1 AS a" could be represented as follows: * Alias(Add(Literal(1), Literal(1)), "a")() * * Note that exprId and qualifiers are in a separate parameter list because * we only pattern match on child and name. * * Note that when creating a new Alias, all the [[AttributeReference]] that refer to * the original alias should be updated to the new one. * * @param child The computation being performed * @param name The name to be associated with the result of computing [[child]]. * @param exprId A globally unique id used to check if an [[AttributeReference]] refers to this * alias. Auto-assigned if left blank. * @param qualifier An optional Seq of string that can be used to refer to this attribute in a * fully qualified way. Consider the examples tableName.name, subQueryAlias.name. * tableName and subQueryAlias are possible qualifiers. * @param explicitMetadata Explicit metadata associated with this alias that overwrites child's. * @param nonInheritableMetadataKeys Keys of metadata entries that are supposed to be removed when * inheriting the metadata from the child. */ case class Alias(child: Expression, name: String)( val exprId: ExprId = NamedExpression.newExprId, val qualifier: Seq[String] = Seq.empty, val explicitMetadata: Option[Metadata] = None, val nonInheritableMetadataKeys: Seq[String] = Seq.empty) extends UnaryExpression with NamedExpression { // Alias(Generator, xx) need to be transformed into Generate(generator, ...) override lazy val resolved = childrenResolved && checkInputDataTypes().isSuccess && !child.isInstanceOf[Generator] override def eval(input: InternalRow): Any = child.eval(input) /** Just a simple passthrough for code generation. */ override def genCode(ctx: CodegenContext): ExprCode = child.genCode(ctx) override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { throw QueryExecutionErrors.doGenCodeOfAliasShouldNotBeCalledError } override def dataType: DataType = child.dataType override def nullable: Boolean = child.nullable override def metadata: Metadata = { explicitMetadata.getOrElse { child match { case named: NamedExpression => val builder = new MetadataBuilder().withMetadata(named.metadata) nonInheritableMetadataKeys.foreach(builder.remove) builder.build() case _ => Metadata.empty } } } def newInstance(): NamedExpression = Alias(child, name)( qualifier = qualifier, explicitMetadata = explicitMetadata, nonInheritableMetadataKeys = nonInheritableMetadataKeys) override def toAttribute: Attribute = { if (resolved) { AttributeReference(name, child.dataType, child.nullable, metadata)(exprId, qualifier) } else { UnresolvedAttribute(name) } } /** Used to signal the column used to calculate an eventTime watermark (e.g. a#1-T{delayMs}) */ private def delaySuffix = if (metadata.contains(EventTimeWatermark.delayKey)) { s"-T${metadata.getLong(EventTimeWatermark.delayKey)}ms" } else { "" } override def toString: String = s"$child AS $name#${exprId.id}$typeSuffix$delaySuffix" override protected final def otherCopyArgs: Seq[AnyRef] = { exprId :: qualifier :: explicitMetadata :: nonInheritableMetadataKeys :: Nil } override def hashCode(): Int = { val state = Seq(name, exprId, child, qualifier, explicitMetadata) state.map(Objects.hashCode).foldLeft(0)((a, b) => 31 * a + b) } override def equals(other: Any): Boolean = other match { case a: Alias => name == a.name && exprId == a.exprId && child == a.child && qualifier == a.qualifier && explicitMetadata == a.explicitMetadata && nonInheritableMetadataKeys == a.nonInheritableMetadataKeys case _ => false } override def sql: String = { val qualifierPrefix = if (qualifier.nonEmpty) qualifier.map(quoteIfNeeded).mkString(".") + "." else "" s"${child.sql} AS $qualifierPrefix${quoteIfNeeded(name)}" } override protected def withNewChildInternal(newChild: Expression): Alias = copy(child = newChild)(exprId, qualifier, explicitMetadata, nonInheritableMetadataKeys) } // Singleton tree pattern BitSet for all AttributeReference instances. object AttributeReferenceTreeBits { val bits: BitSet = new ImmutableBitSet(TreePattern.maxId, ATTRIBUTE_REFERENCE.id) } /** * A reference to an attribute produced by another operator in the tree. * * @param name The name of this attribute, should only be used during analysis or for debugging. * @param dataType The [[DataType]] of this attribute. * @param nullable True if null is a valid value for this attribute. * @param metadata The metadata of this attribute. * @param exprId A globally unique id used to check if different AttributeReferences refer to the * same attribute. * @param qualifier An optional string that can be used to referred to this attribute in a fully * qualified way. Consider the examples tableName.name, subQueryAlias.name. * tableName and subQueryAlias are possible qualifiers. */ case class AttributeReference( name: String, dataType: DataType, nullable: Boolean = true, override val metadata: Metadata = Metadata.empty)( val exprId: ExprId = NamedExpression.newExprId, val qualifier: Seq[String] = Seq.empty[String]) extends Attribute with Unevaluable { override lazy val treePatternBits: BitSet = AttributeReferenceTreeBits.bits /** * Returns true iff the expression id is the same for both attributes. */ def sameRef(other: AttributeReference): Boolean = this.exprId == other.exprId override def equals(other: Any): Boolean = other match { case ar: AttributeReference => name == ar.name && dataType == ar.dataType && nullable == ar.nullable && metadata == ar.metadata && exprId == ar.exprId && qualifier == ar.qualifier case _ => false } override def semanticEquals(other: Expression): Boolean = other match { case ar: AttributeReference => sameRef(ar) case _ => false } override def semanticHash(): Int = { this.exprId.hashCode() } override def hashCode: Int = { // See http://stackoverflow.com/questions/113511/hash-code-implementation var h = 17 h = h * 37 + name.hashCode() h = h * 37 + dataType.hashCode() h = h * 37 + nullable.hashCode() h = h * 37 + metadata.hashCode() h = h * 37 + exprId.hashCode() h = h * 37 + qualifier.hashCode() h } override def newInstance(): AttributeReference = AttributeReference(name, dataType, nullable, metadata)(qualifier = qualifier) /** * Returns a copy of this [[AttributeReference]] with changed nullability. */ override def withNullability(newNullability: Boolean): AttributeReference = { if (nullable == newNullability) { this } else { AttributeReference(name, dataType, newNullability, metadata)(exprId, qualifier) } } override def withName(newName: String): AttributeReference = { if (name == newName) { this } else { AttributeReference(newName, dataType, nullable, metadata)(exprId, qualifier) } } /** * Returns a copy of this [[AttributeReference]] with new qualifier. */ override def withQualifier(newQualifier: Seq[String]): AttributeReference = { if (newQualifier == qualifier) { this } else { AttributeReference(name, dataType, nullable, metadata)(exprId, newQualifier) } } override def withExprId(newExprId: ExprId): AttributeReference = { if (exprId == newExprId) { this } else { AttributeReference(name, dataType, nullable, metadata)(newExprId, qualifier) } } override def withMetadata(newMetadata: Metadata): AttributeReference = { AttributeReference(name, dataType, nullable, newMetadata)(exprId, qualifier) } override protected final def otherCopyArgs: Seq[AnyRef] = { exprId :: qualifier :: Nil } /** Used to signal the column used to calculate an eventTime watermark (e.g. a#1-T{delayMs}) */ private def delaySuffix = if (metadata.contains(EventTimeWatermark.delayKey)) { s"-T${metadata.getLong(EventTimeWatermark.delayKey)}ms" } else { "" } override def toString: String = s"$name#${exprId.id}$typeSuffix$delaySuffix" // Since the expression id is not in the first constructor it is missing from the default // tree string. override def simpleString(maxFields: Int): String = { s"$name#${exprId.id}: ${dataType.simpleString(maxFields)}" } override def sql: String = { val qualifierPrefix = if (qualifier.nonEmpty) qualifier.map(quoteIfNeeded).mkString(".") + "." else "" s"$qualifierPrefix${quoteIfNeeded(name)}" } } /** * A place holder used when printing expressions without debugging information such as the * expression id or the unresolved indicator. */ case class PrettyAttribute( name: String, dataType: DataType = NullType) extends Attribute with Unevaluable { def this(attribute: Attribute) = this(attribute.name, attribute match { case a: AttributeReference => a.dataType case a: PrettyAttribute => a.dataType case _ => NullType }) override def toString: String = name override def sql: String = toString override def withNullability(newNullability: Boolean): Attribute = throw new UnsupportedOperationException override def newInstance(): Attribute = throw new UnsupportedOperationException override def withQualifier(newQualifier: Seq[String]): Attribute = throw new UnsupportedOperationException override def withName(newName: String): Attribute = throw new UnsupportedOperationException override def withMetadata(newMetadata: Metadata): Attribute = throw new UnsupportedOperationException override def qualifier: Seq[String] = throw new UnsupportedOperationException override def exprId: ExprId = throw new UnsupportedOperationException override def withExprId(newExprId: ExprId): Attribute = throw new UnsupportedOperationException override def nullable: Boolean = true } /** * A place holder used to hold a reference that has been resolved to a field outside of the current * plan. This is used for correlated subqueries. */ case class OuterReference(e: NamedExpression) extends LeafExpression with NamedExpression with Unevaluable { override def dataType: DataType = e.dataType override def nullable: Boolean = e.nullable override def prettyName: String = "outer" override def sql: String = s"$prettyName(${e.sql})" override def name: String = e.name override def qualifier: Seq[String] = e.qualifier override def exprId: ExprId = e.exprId override def toAttribute: Attribute = e.toAttribute override def newInstance(): NamedExpression = OuterReference(e.newInstance()) } object VirtualColumn { // The attribute name used by Hive, which has different result than Spark, deprecated. val hiveGroupingIdName: String = "grouping__id" val groupingIdName: String = "spark_grouping_id" val groupingIdAttribute: UnresolvedAttribute = UnresolvedAttribute(groupingIdName) }
BryanCutler/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
Scala
apache-2.0
15,984
package com.github.izmailoff.marshalling.objectid import org.bson.types.ObjectId import spray.httpx.unmarshalling.{Deserializer, MalformedContent} import spray.routing.PathMatcher trait ObjectIdSupport { implicit val String2ObjectIdDeserializer = new Deserializer[String, ObjectId] { def apply(value: String) = if (ObjectId.isValid(value)) Right(new ObjectId(value)) else Left(MalformedContent("'" + value + "' is not a valid ObjectId value")) } /** * A PathMatcher that matches and extracts an ObjectId instance. */ val ObjectIdSegment = PathMatcher( """^[0-9a-fA-F]{24}$""".r).flatMap { str => // TODO: no need for regex! - double check if (ObjectId.isValid(str)) Some(new ObjectId(str)) else None } }
izmailoff/Spray-mongo-record
marshallers/src/main/scala/com/github/izmailoff/marshalling/objectid/ObjectIdSupport.scala
Scala
mit
764
package controllers import javax.inject.{Inject, Singleton} import io.flow.healthcheck.v0.models.json._ import io.flow.healthcheck.v0.models.Healthcheck import play.api.mvc._ import play.api.libs.json._ @Singleton class Healthchecks @Inject() ( val controllerComponents: ControllerComponents ) extends BaseController { private val HealthyJson = Json.toJson(Healthcheck(status = "healthy")) def getHealthcheck() = Action { _ => Ok(HealthyJson) } }
flowcommerce/dependency
api/app/controllers/Healthchecks.scala
Scala
mit
465
package tech.sda.arcana.spark.classification.cnn import com.intel.analytics.bigdl.nn.ClassNLLCriterion import tech.sda.arcana.spark.neuralnetwork.model.DyLeNet5Model import com.intel.analytics.bigdl.nn.L1Cost import tech.sda.arcana.spark.neuralnetwork.model.AlexNetModel import tech.sda.arcana.spark.neuralnetwork.model.GoogLeNetModel import tech.sda.arcana.spark.neuralnetwork.model.LeNet5Model import com.intel.analytics.bigdl.optim._ import org.apache.spark.rdd.RDD import com.intel.analytics.bigdl.dataset.Sample import com.intel.analytics.bigdl.convCriterion import com.intel.analytics.bigdl.dataset.MiniBatch import org.apache.spark.rdd.RDD import com.intel.analytics.bigdl.visualization._ /**A class that train a chosen neural network model with chosen loss function * @param lossfun 1 for L1Cost, 2 for ClassNLLCriterion * @param model 1 for AlexNetModel, 2 for GoogLeNetModel, 3 for dynamic LeNet5Model * @param height the longest question word sequence essential for neurons view * @param width the vectors representation length for each word * @param classNum the number of ouptut classes * @param validation to specify if we want to test the model using the test samples or not * @param learning rate to be used in back propagation */ class Trainer(lossfun:Int,model:Int,height:Int,width:Int,classNum:Int,validation:Boolean,learningrate:Float) extends Serializable { val lossFunctions = Array(L1Cost[Float](),ClassNLLCriterion[Float]()) var logdir:String="" var appName:String="" var testData:RDD[Sample[Float]]=null var batchS=0 var visual:Boolean=false var epochNum:Int=0 var accuValid:Float=0.0f /** Build a trainer which is going to train the a neural network model * depending on a training set and a batch size * @param samples 1 for L1Cost, 2 for ClassNLLCriterion * @param batch 1 for L1Cost, 2 for ClassNLLCriterion */ def build(samples:RDD[Sample[Float]],batch:Int):Optimizer[Float, MiniBatch[Float]]={ //There is being repetition in the code because of using singleton objects in Scala //Abstract class didn't solve the problem because the return //type is going to be from the parent class //Switch case didn't solve the problem because the return time is object if(model==1){ val optimizer = Optimizer( model = AlexNetModel.build(height,width,classNum), sampleRDD = samples, criterion = lossFunctions(lossfun-1), batchSize = batch ) if(visual) setMonitorPara(optimizer) return optimizer //(optimizer) } if(model==2){ val optimizer = Optimizer( model = GoogLeNetModel.build(height,width,classNum), sampleRDD = samples, criterion = lossFunctions(lossfun-1), batchSize = batch ) if(visual) setMonitorPara(optimizer) return optimizer //(optimizer) } if(model==3){ val optimizer = Optimizer( model = DyLeNet5Model.build(height,width,classNum), sampleRDD = samples, criterion = lossFunctions(lossfun-1), batchSize = batch ) if(visual) setMonitorPara(optimizer) return optimizer //(optimizer) } // The general case to know the return type val optimizer = Optimizer( model = LeNet5Model.build(5), sampleRDD = samples, criterion = lossFunctions(lossfun-1), batchSize = batch ) if(visual) setMonitorPara(optimizer) return optimizer //(optimizer) } /**Inner class which responsible of setting the data used for visualisation * @param optimizer the optimizer to be altered */ def setMonitorPara(optimizer:Optimizer[Float, MiniBatch[Float]]){ val trainSummary = TrainSummary(logdir, appName) optimizer.setTrainSummary(trainSummary) if(validation){ val validationSummary = ValidationSummary(logdir, appName) optimizer.setValidationSummary(validationSummary) optimizer.setValidation(Trigger.everyEpoch ,testData, Array(new Top1Accuracy),batchS) if(epochNum==0){ optimizer.setEndWhen(Trigger.minLoss(accuValid)) } else{ optimizer.setEndWhen(Trigger.maxEpoch(epochNum)) } } else{ optimizer.setEndWhen(Trigger.maxEpoch(epochNum)) } //here to change the learning rate //0.001 val optimMethod = new SGD[Float](learningRate= learningrate,learningRateDecay=0.0002) optimizer.setOptimMethod(optimMethod) } /**Function that sets and enable the visualisation and the validation used in tensorboard later on * @param logdir the directory path responsible for tensorboard data * @param appName the name responsible for this visualisation data * @param testData RDD used for testing * @param batchS number of batches * @param minloss the desired accuracy * @param maxEpochs number of maximum epochs */ def visualiseAndValidate(logdir:String,appName:String,testData:RDD[Sample[Float]],batchS:Int,minloss:Float,maxEpochs:Int){ visual=true this.logdir=logdir this.appName=appName this.testData=testData this.batchS=batchS accuValid=minloss epochNum=maxEpochs } /**Function that sets and enable the visualisation data used in tensorboard later on * @param logdir the directory path responsible for tensorboard data * @param appName the name responsible for this visualisation data * @param testData RDD used for testing * @param maxEpochs number of maximum epochs */ def visualise(logdir:String,appName:String,maxEpochs:Int){ visual=true this.logdir=logdir this.appName=appName epochNum=maxEpochs } }
SmartDataAnalytics/ARCANA
src/main/scala/tech/sda/arcana/spark/classification/cnn/Trainer.scala
Scala
gpl-3.0
5,800
package forms import com.google.inject.Inject import models.token.JwtToken import play.api.data.Form import play.api.data.Forms._ import services.jwt.authenticator.JwtAuthenticator class JwtTokenForm @Inject()(jwtAuthenticator: JwtAuthenticator) { val form = Form( mapping( "jwtToken" -> text )(jwtAuthenticator.fromString)(jwtToken => Some(jwtToken.token)) ) }
GMadorell/play-jwt
app/forms/JwtTokenForm.scala
Scala
mit
382
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.aggregate import java.util.concurrent.TimeUnit._ import scala.collection.mutable import org.apache.spark.TaskContext import org.apache.spark.memory.{SparkOutOfMemoryError, TaskMemoryManager} import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.errors._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.BindReferences.bindReferences import org.apache.spark.sql.catalyst.expressions.aggregate._ import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.plans.physical._ import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_MILLIS import org.apache.spark.sql.catalyst.util.truncatedString import org.apache.spark.sql.execution._ import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics} import org.apache.spark.sql.execution.vectorized.MutableColumnarRow import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{CalendarIntervalType, DecimalType, StringType, StructType} import org.apache.spark.unsafe.KVIterator import org.apache.spark.util.Utils /** * Hash-based aggregate operator that can also fallback to sorting when data exceeds memory size. */ case class HashAggregateExec( requiredChildDistributionExpressions: Option[Seq[Expression]], groupingExpressions: Seq[NamedExpression], aggregateExpressions: Seq[AggregateExpression], aggregateAttributes: Seq[Attribute], initialInputBufferOffset: Int, resultExpressions: Seq[NamedExpression], child: SparkPlan) extends BaseAggregateExec with BlockingOperatorWithCodegen with AliasAwareOutputPartitioning { require(HashAggregateExec.supportsAggregate(aggregateBufferAttributes)) override lazy val allAttributes: AttributeSeq = child.output ++ aggregateBufferAttributes ++ aggregateAttributes ++ aggregateExpressions.flatMap(_.aggregateFunction.inputAggBufferAttributes) override lazy val metrics = Map( "numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"), "peakMemory" -> SQLMetrics.createSizeMetric(sparkContext, "peak memory"), "spillSize" -> SQLMetrics.createSizeMetric(sparkContext, "spill size"), "aggTime" -> SQLMetrics.createTimingMetric(sparkContext, "time in aggregation build"), "avgHashProbe" -> SQLMetrics.createAverageMetric(sparkContext, "avg hash probe bucket list iters")) override def output: Seq[Attribute] = resultExpressions.map(_.toAttribute) override protected def outputExpressions: Seq[NamedExpression] = resultExpressions override def requiredChildDistribution: List[Distribution] = { requiredChildDistributionExpressions match { case Some(exprs) if exprs.isEmpty => AllTuples :: Nil case Some(exprs) if exprs.nonEmpty => ClusteredDistribution(exprs) :: Nil case None => UnspecifiedDistribution :: Nil } } // This is for testing. We force TungstenAggregationIterator to fall back to the unsafe row hash // map and/or the sort-based aggregation once it has processed a given number of input rows. private val testFallbackStartsAt: Option[(Int, Int)] = { sqlContext.getConf("spark.sql.TungstenAggregate.testFallbackStartsAt", null) match { case null | "" => None case fallbackStartsAt => val splits = fallbackStartsAt.split(",").map(_.trim) Some((splits.head.toInt, splits.last.toInt)) } } protected override def doExecute(): RDD[InternalRow] = attachTree(this, "execute") { val numOutputRows = longMetric("numOutputRows") val peakMemory = longMetric("peakMemory") val spillSize = longMetric("spillSize") val avgHashProbe = longMetric("avgHashProbe") val aggTime = longMetric("aggTime") child.execute().mapPartitionsWithIndex { (partIndex, iter) => val beforeAgg = System.nanoTime() val hasInput = iter.hasNext val res = if (!hasInput && groupingExpressions.nonEmpty) { // This is a grouped aggregate and the input iterator is empty, // so return an empty iterator. Iterator.empty } else { val aggregationIterator = new TungstenAggregationIterator( partIndex, groupingExpressions, aggregateExpressions, aggregateAttributes, initialInputBufferOffset, resultExpressions, (expressions, inputSchema) => MutableProjection.create(expressions, inputSchema), inputAttributes, iter, testFallbackStartsAt, numOutputRows, peakMemory, spillSize, avgHashProbe) if (!hasInput && groupingExpressions.isEmpty) { numOutputRows += 1 Iterator.single[UnsafeRow](aggregationIterator.outputForEmptyGroupingKeyWithoutInput()) } else { aggregationIterator } } aggTime += NANOSECONDS.toMillis(System.nanoTime() - beforeAgg) res } } // all the mode of aggregate expressions private val modes = aggregateExpressions.map(_.mode).distinct override def usedInputs: AttributeSet = inputSet override def supportCodegen: Boolean = { // ImperativeAggregate and filter predicate are not supported right now // TODO: SPARK-30027 Support codegen for filter exprs in HashAggregateExec !(aggregateExpressions.exists(_.aggregateFunction.isInstanceOf[ImperativeAggregate]) || aggregateExpressions.exists(_.filter.isDefined)) } override def inputRDDs(): Seq[RDD[InternalRow]] = { child.asInstanceOf[CodegenSupport].inputRDDs() } protected override def doProduce(ctx: CodegenContext): String = { if (groupingExpressions.isEmpty) { doProduceWithoutKeys(ctx) } else { doProduceWithKeys(ctx) } } override def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = { if (groupingExpressions.isEmpty) { doConsumeWithoutKeys(ctx, input) } else { doConsumeWithKeys(ctx, input) } } // The variables are used as aggregation buffers and each aggregate function has one or more // ExprCode to initialize its buffer slots. Only used for aggregation without keys. private var bufVars: Seq[Seq[ExprCode]] = _ private def doProduceWithoutKeys(ctx: CodegenContext): String = { val initAgg = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "initAgg") // The generated function doesn't have input row in the code context. ctx.INPUT_ROW = null // generate variables for aggregation buffer val functions = aggregateExpressions.map(_.aggregateFunction.asInstanceOf[DeclarativeAggregate]) val initExpr = functions.map(f => f.initialValues) bufVars = initExpr.map { exprs => exprs.map { e => val isNull = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "bufIsNull") val value = ctx.addMutableState(CodeGenerator.javaType(e.dataType), "bufValue") // The initial expression should not access any column val ev = e.genCode(ctx) val initVars = code""" |$isNull = ${ev.isNull}; |$value = ${ev.value}; """.stripMargin ExprCode( ev.code + initVars, JavaCode.isNullGlobal(isNull), JavaCode.global(value, e.dataType)) } } val flatBufVars = bufVars.flatten val initBufVar = evaluateVariables(flatBufVars) // generate variables for output val (resultVars, genResult) = if (modes.contains(Final) || modes.contains(Complete)) { // evaluate aggregate results ctx.currentVars = flatBufVars val aggResults = bindReferences( functions.map(_.evaluateExpression), aggregateBufferAttributes).map(_.genCode(ctx)) val evaluateAggResults = evaluateVariables(aggResults) // evaluate result expressions ctx.currentVars = aggResults val resultVars = bindReferences(resultExpressions, aggregateAttributes).map(_.genCode(ctx)) (resultVars, s""" |$evaluateAggResults |${evaluateVariables(resultVars)} """.stripMargin) } else if (modes.contains(Partial) || modes.contains(PartialMerge)) { // output the aggregate buffer directly (flatBufVars, "") } else { // no aggregate function, the result should be literals val resultVars = resultExpressions.map(_.genCode(ctx)) (resultVars, evaluateVariables(resultVars)) } val doAgg = ctx.freshName("doAggregateWithoutKey") val doAggFuncName = ctx.addNewFunction(doAgg, s""" |private void $doAgg() throws java.io.IOException { | // initialize aggregation buffer | $initBufVar | | ${child.asInstanceOf[CodegenSupport].produce(ctx, this)} |} """.stripMargin) val numOutput = metricTerm(ctx, "numOutputRows") val aggTime = metricTerm(ctx, "aggTime") val beforeAgg = ctx.freshName("beforeAgg") s""" |while (!$initAgg) { | $initAgg = true; | long $beforeAgg = System.nanoTime(); | $doAggFuncName(); | $aggTime.add((System.nanoTime() - $beforeAgg) / $NANOS_PER_MILLIS); | | // output the result | ${genResult.trim} | | $numOutput.add(1); | ${consume(ctx, resultVars).trim} |} """.stripMargin } // Splits aggregate code into small functions because the most of JVM implementations // can not compile too long functions. Returns None if we are not able to split the given code. // // Note: The difference from `CodeGenerator.splitExpressions` is that we define an individual // function for each aggregation function (e.g., SUM and AVG). For example, in a query // `SELECT SUM(a), AVG(a) FROM VALUES(1) t(a)`, we define two functions // for `SUM(a)` and `AVG(a)`. private def splitAggregateExpressions( ctx: CodegenContext, aggNames: Seq[String], aggBufferUpdatingExprs: Seq[Seq[Expression]], aggCodeBlocks: Seq[Block], subExprs: Map[Expression, SubExprEliminationState]): Option[String] = { val exprValsInSubExprs = subExprs.flatMap { case (_, s) => s.value :: s.isNull :: Nil } if (exprValsInSubExprs.exists(_.isInstanceOf[SimpleExprValue])) { // `SimpleExprValue`s cannot be used as an input variable for split functions, so // we give up splitting functions if it exists in `subExprs`. None } else { val inputVars = aggBufferUpdatingExprs.map { aggExprsForOneFunc => val inputVarsForOneFunc = aggExprsForOneFunc.map( CodeGenerator.getLocalInputVariableValues(ctx, _, subExprs)).reduce(_ ++ _).toSeq val paramLength = CodeGenerator.calculateParamLengthFromExprValues(inputVarsForOneFunc) // Checks if a parameter length for the `aggExprsForOneFunc` does not go over the JVM limit if (CodeGenerator.isValidParamLength(paramLength)) { Some(inputVarsForOneFunc) } else { None } } // Checks if all the aggregate code can be split into pieces. // If the parameter length of at lease one `aggExprsForOneFunc` goes over the limit, // we totally give up splitting aggregate code. if (inputVars.forall(_.isDefined)) { val splitCodes = inputVars.flatten.zipWithIndex.map { case (args, i) => val doAggFunc = ctx.freshName(s"doAggregate_${aggNames(i)}") val argList = args.map { v => s"${CodeGenerator.typeName(v.javaType)} ${v.variableName}" }.mkString(", ") val doAggFuncName = ctx.addNewFunction(doAggFunc, s""" |private void $doAggFunc($argList) throws java.io.IOException { | ${aggCodeBlocks(i)} |} """.stripMargin) val inputVariables = args.map(_.variableName).mkString(", ") s"$doAggFuncName($inputVariables);" } Some(splitCodes.mkString("\\n").trim) } else { val errMsg = "Failed to split aggregate code into small functions because the parameter " + "length of at least one split function went over the JVM limit: " + CodeGenerator.MAX_JVM_METHOD_PARAMS_LENGTH if (Utils.isTesting) { throw new IllegalStateException(errMsg) } else { logInfo(errMsg) None } } } } private def doConsumeWithoutKeys(ctx: CodegenContext, input: Seq[ExprCode]): String = { // only have DeclarativeAggregate val functions = aggregateExpressions.map(_.aggregateFunction.asInstanceOf[DeclarativeAggregate]) val inputAttrs = functions.flatMap(_.aggBufferAttributes) ++ inputAttributes // To individually generate code for each aggregate function, an element in `updateExprs` holds // all the expressions for the buffer of an aggregation function. val updateExprs = aggregateExpressions.map { e => e.mode match { case Partial | Complete => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].updateExpressions case PartialMerge | Final => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].mergeExpressions } } ctx.currentVars = bufVars.flatten ++ input val boundUpdateExprs = updateExprs.map { updateExprsForOneFunc => bindReferences(updateExprsForOneFunc, inputAttrs) } val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExprs.flatten) val effectiveCodes = subExprs.codes.mkString("\\n") val bufferEvals = boundUpdateExprs.map { boundUpdateExprsForOneFunc => ctx.withSubExprEliminationExprs(subExprs.states) { boundUpdateExprsForOneFunc.map(_.genCode(ctx)) } } val aggNames = functions.map(_.prettyName) val aggCodeBlocks = bufferEvals.zipWithIndex.map { case (bufferEvalsForOneFunc, i) => val bufVarsForOneFunc = bufVars(i) // All the update code for aggregation buffers should be placed in the end // of each aggregation function code. val updates = bufferEvalsForOneFunc.zip(bufVarsForOneFunc).map { case (ev, bufVar) => s""" |${bufVar.isNull} = ${ev.isNull}; |${bufVar.value} = ${ev.value}; """.stripMargin } code""" |${ctx.registerComment(s"do aggregate for ${aggNames(i)}")} |${ctx.registerComment("evaluate aggregate function")} |${evaluateVariables(bufferEvalsForOneFunc)} |${ctx.registerComment("update aggregation buffers")} |${updates.mkString("\\n").trim} """.stripMargin } val codeToEvalAggFunc = if (conf.codegenSplitAggregateFunc && aggCodeBlocks.map(_.length).sum > conf.methodSplitThreshold) { val maybeSplitCode = splitAggregateExpressions( ctx, aggNames, boundUpdateExprs, aggCodeBlocks, subExprs.states) maybeSplitCode.getOrElse { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } } else { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } s""" |// do aggregate |// common sub-expressions |$effectiveCodes |// evaluate aggregate functions and update aggregation buffers |$codeToEvalAggFunc """.stripMargin } private val groupingAttributes = groupingExpressions.map(_.toAttribute) private val groupingKeySchema = StructType.fromAttributes(groupingAttributes) private val declFunctions = aggregateExpressions.map(_.aggregateFunction) .filter(_.isInstanceOf[DeclarativeAggregate]) .map(_.asInstanceOf[DeclarativeAggregate]) private val bufferSchema = StructType.fromAttributes(aggregateBufferAttributes) // The name for Fast HashMap private var fastHashMapTerm: String = _ private var isFastHashMapEnabled: Boolean = false // whether a vectorized hashmap is used instead // we have decided to always use the row-based hashmap, // but the vectorized hashmap can still be switched on for testing and benchmarking purposes. private var isVectorizedHashMapEnabled: Boolean = false // The name for UnsafeRow HashMap private var hashMapTerm: String = _ private var sorterTerm: String = _ /** * This is called by generated Java class, should be public. */ def createHashMap(): UnsafeFixedWidthAggregationMap = { // create initialized aggregate buffer val initExpr = declFunctions.flatMap(f => f.initialValues) val initialBuffer = UnsafeProjection.create(initExpr)(EmptyRow) // create hashMap new UnsafeFixedWidthAggregationMap( initialBuffer, bufferSchema, groupingKeySchema, TaskContext.get(), 1024 * 16, // initial capacity TaskContext.get().taskMemoryManager().pageSizeBytes ) } def getTaskMemoryManager(): TaskMemoryManager = { TaskContext.get().taskMemoryManager() } def getEmptyAggregationBuffer(): InternalRow = { val initExpr = declFunctions.flatMap(f => f.initialValues) val initialBuffer = UnsafeProjection.create(initExpr)(EmptyRow) initialBuffer } /** * This is called by generated Java class, should be public. */ def createUnsafeJoiner(): UnsafeRowJoiner = { GenerateUnsafeRowJoiner.create(groupingKeySchema, bufferSchema) } /** * Called by generated Java class to finish the aggregate and return a KVIterator. */ def finishAggregate( hashMap: UnsafeFixedWidthAggregationMap, sorter: UnsafeKVExternalSorter, peakMemory: SQLMetric, spillSize: SQLMetric, avgHashProbe: SQLMetric): KVIterator[UnsafeRow, UnsafeRow] = { // update peak execution memory val mapMemory = hashMap.getPeakMemoryUsedBytes val sorterMemory = Option(sorter).map(_.getPeakMemoryUsedBytes).getOrElse(0L) val maxMemory = Math.max(mapMemory, sorterMemory) val metrics = TaskContext.get().taskMetrics() peakMemory.add(maxMemory) metrics.incPeakExecutionMemory(maxMemory) // Update average hashmap probe avgHashProbe.set(hashMap.getAvgHashProbeBucketListIterations) if (sorter == null) { // not spilled return hashMap.iterator() } // merge the final hashMap into sorter sorter.merge(hashMap.destructAndCreateExternalSorter()) hashMap.free() val sortedIter = sorter.sortedIterator() // Create a KVIterator based on the sorted iterator. new KVIterator[UnsafeRow, UnsafeRow] { // Create a MutableProjection to merge the rows of same key together val mergeExpr = declFunctions.flatMap(_.mergeExpressions) val mergeProjection = MutableProjection.create( mergeExpr, aggregateBufferAttributes ++ declFunctions.flatMap(_.inputAggBufferAttributes)) val joinedRow = new JoinedRow() var currentKey: UnsafeRow = null var currentRow: UnsafeRow = null var nextKey: UnsafeRow = if (sortedIter.next()) { sortedIter.getKey } else { null } override def next(): Boolean = { if (nextKey != null) { currentKey = nextKey.copy() currentRow = sortedIter.getValue.copy() nextKey = null // use the first row as aggregate buffer mergeProjection.target(currentRow) // merge the following rows with same key together var findNextGroup = false while (!findNextGroup && sortedIter.next()) { val key = sortedIter.getKey if (currentKey.equals(key)) { mergeProjection(joinedRow(currentRow, sortedIter.getValue)) } else { // We find a new group. findNextGroup = true nextKey = key } } true } else { spillSize.add(sorter.getSpillSize) false } } override def getKey: UnsafeRow = currentKey override def getValue: UnsafeRow = currentRow override def close(): Unit = { sortedIter.close() } } } /** * Generate the code for output. * @return function name for the result code. */ private def generateResultFunction(ctx: CodegenContext): String = { val funcName = ctx.freshName("doAggregateWithKeysOutput") val keyTerm = ctx.freshName("keyTerm") val bufferTerm = ctx.freshName("bufferTerm") val numOutput = metricTerm(ctx, "numOutputRows") val body = if (modes.contains(Final) || modes.contains(Complete)) { // generate output using resultExpressions ctx.currentVars = null ctx.INPUT_ROW = keyTerm val keyVars = groupingExpressions.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateKeyVars = evaluateVariables(keyVars) ctx.INPUT_ROW = bufferTerm val bufferVars = aggregateBufferAttributes.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateBufferVars = evaluateVariables(bufferVars) // evaluate the aggregation result ctx.currentVars = bufferVars val aggResults = bindReferences( declFunctions.map(_.evaluateExpression), aggregateBufferAttributes).map(_.genCode(ctx)) val evaluateAggResults = evaluateVariables(aggResults) // generate the final result ctx.currentVars = keyVars ++ aggResults val inputAttrs = groupingAttributes ++ aggregateAttributes val resultVars = bindReferences[Expression]( resultExpressions, inputAttrs).map(_.genCode(ctx)) val evaluateNondeterministicResults = evaluateNondeterministicVariables(output, resultVars, resultExpressions) s""" |$evaluateKeyVars |$evaluateBufferVars |$evaluateAggResults |$evaluateNondeterministicResults |${consume(ctx, resultVars)} """.stripMargin } else if (modes.contains(Partial) || modes.contains(PartialMerge)) { // resultExpressions are Attributes of groupingExpressions and aggregateBufferAttributes. assert(resultExpressions.forall(_.isInstanceOf[Attribute])) assert(resultExpressions.length == groupingExpressions.length + aggregateBufferAttributes.length) ctx.currentVars = null ctx.INPUT_ROW = keyTerm val keyVars = groupingExpressions.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateKeyVars = evaluateVariables(keyVars) ctx.INPUT_ROW = bufferTerm val resultBufferVars = aggregateBufferAttributes.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateResultBufferVars = evaluateVariables(resultBufferVars) ctx.currentVars = keyVars ++ resultBufferVars val inputAttrs = resultExpressions.map(_.toAttribute) val resultVars = bindReferences[Expression]( resultExpressions, inputAttrs).map(_.genCode(ctx)) s""" |$evaluateKeyVars |$evaluateResultBufferVars |${consume(ctx, resultVars)} """.stripMargin } else { // generate result based on grouping key ctx.INPUT_ROW = keyTerm ctx.currentVars = null val resultVars = bindReferences[Expression]( resultExpressions, groupingAttributes).map(_.genCode(ctx)) val evaluateNondeterministicResults = evaluateNondeterministicVariables(output, resultVars, resultExpressions) s""" |$evaluateNondeterministicResults |${consume(ctx, resultVars)} """.stripMargin } ctx.addNewFunction(funcName, s""" |private void $funcName(UnsafeRow $keyTerm, UnsafeRow $bufferTerm) | throws java.io.IOException { | $numOutput.add(1); | $body |} """.stripMargin) } /** * A required check for any fast hash map implementation (basically the common requirements * for row-based and vectorized). * Currently fast hash map is supported for primitive data types during partial aggregation. * This list of supported use-cases should be expanded over time. */ private def checkIfFastHashMapSupported(ctx: CodegenContext): Boolean = { val isSupported = (groupingKeySchema ++ bufferSchema).forall(f => CodeGenerator.isPrimitiveType(f.dataType) || f.dataType.isInstanceOf[DecimalType] || f.dataType.isInstanceOf[StringType] || f.dataType.isInstanceOf[CalendarIntervalType]) && bufferSchema.nonEmpty && modes.forall(mode => mode == Partial || mode == PartialMerge) // For vectorized hash map, We do not support byte array based decimal type for aggregate values // as ColumnVector.putDecimal for high-precision decimals doesn't currently support in-place // updates. Due to this, appending the byte array in the vectorized hash map can turn out to be // quite inefficient and can potentially OOM the executor. // For row-based hash map, while decimal update is supported in UnsafeRow, we will just act // conservative here, due to lack of testing and benchmarking. val isNotByteArrayDecimalType = bufferSchema.map(_.dataType).filter(_.isInstanceOf[DecimalType]) .forall(!DecimalType.isByteArrayDecimalType(_)) isSupported && isNotByteArrayDecimalType } private def enableTwoLevelHashMap(ctx: CodegenContext): Unit = { if (!checkIfFastHashMapSupported(ctx)) { if (modes.forall(mode => mode == Partial || mode == PartialMerge) && !Utils.isTesting) { logInfo(s"${SQLConf.ENABLE_TWOLEVEL_AGG_MAP.key} is set to true, but" + " current version of codegened fast hashmap does not support this aggregate.") } } else { isFastHashMapEnabled = true // This is for testing/benchmarking only. // We enforce to first level to be a vectorized hashmap, instead of the default row-based one. isVectorizedHashMapEnabled = sqlContext.conf.enableVectorizedHashMap } } private def doProduceWithKeys(ctx: CodegenContext): String = { val initAgg = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "initAgg") if (sqlContext.conf.enableTwoLevelAggMap) { enableTwoLevelHashMap(ctx) } else if (sqlContext.conf.enableVectorizedHashMap) { logWarning("Two level hashmap is disabled but vectorized hashmap is enabled.") } val bitMaxCapacity = sqlContext.conf.fastHashAggregateRowMaxCapacityBit val thisPlan = ctx.addReferenceObj("plan", this) // Create a name for the iterator from the fast hash map, and the code to create fast hash map. val (iterTermForFastHashMap, createFastHashMap) = if (isFastHashMapEnabled) { // Generates the fast hash map class and creates the fast hash map term. val fastHashMapClassName = ctx.freshName("FastHashMap") if (isVectorizedHashMapEnabled) { val generatedMap = new VectorizedHashMapGenerator(ctx, aggregateExpressions, fastHashMapClassName, groupingKeySchema, bufferSchema, bitMaxCapacity).generate() ctx.addInnerClass(generatedMap) // Inline mutable state since not many aggregation operations in a task fastHashMapTerm = ctx.addMutableState( fastHashMapClassName, "vectorizedFastHashMap", forceInline = true) val iter = ctx.addMutableState( "java.util.Iterator<InternalRow>", "vectorizedFastHashMapIter", forceInline = true) val create = s"$fastHashMapTerm = new $fastHashMapClassName();" (iter, create) } else { val generatedMap = new RowBasedHashMapGenerator(ctx, aggregateExpressions, fastHashMapClassName, groupingKeySchema, bufferSchema, bitMaxCapacity).generate() ctx.addInnerClass(generatedMap) // Inline mutable state since not many aggregation operations in a task fastHashMapTerm = ctx.addMutableState( fastHashMapClassName, "fastHashMap", forceInline = true) val iter = ctx.addMutableState( "org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>", "fastHashMapIter", forceInline = true) val create = s"$fastHashMapTerm = new $fastHashMapClassName(" + s"$thisPlan.getTaskMemoryManager(), $thisPlan.getEmptyAggregationBuffer());" (iter, create) } } else ("", "") // Create a name for the iterator from the regular hash map. // Inline mutable state since not many aggregation operations in a task val iterTerm = ctx.addMutableState(classOf[KVIterator[UnsafeRow, UnsafeRow]].getName, "mapIter", forceInline = true) // create hashMap val hashMapClassName = classOf[UnsafeFixedWidthAggregationMap].getName hashMapTerm = ctx.addMutableState(hashMapClassName, "hashMap", forceInline = true) sorterTerm = ctx.addMutableState(classOf[UnsafeKVExternalSorter].getName, "sorter", forceInline = true) val doAgg = ctx.freshName("doAggregateWithKeys") val peakMemory = metricTerm(ctx, "peakMemory") val spillSize = metricTerm(ctx, "spillSize") val avgHashProbe = metricTerm(ctx, "avgHashProbe") val finishRegularHashMap = s"$iterTerm = $thisPlan.finishAggregate(" + s"$hashMapTerm, $sorterTerm, $peakMemory, $spillSize, $avgHashProbe);" val finishHashMap = if (isFastHashMapEnabled) { s""" |$iterTermForFastHashMap = $fastHashMapTerm.rowIterator(); |$finishRegularHashMap """.stripMargin } else { finishRegularHashMap } val doAggFuncName = ctx.addNewFunction(doAgg, s""" |private void $doAgg() throws java.io.IOException { | ${child.asInstanceOf[CodegenSupport].produce(ctx, this)} | $finishHashMap |} """.stripMargin) // generate code for output val keyTerm = ctx.freshName("aggKey") val bufferTerm = ctx.freshName("aggBuffer") val outputFunc = generateResultFunction(ctx) def outputFromFastHashMap: String = { if (isFastHashMapEnabled) { if (isVectorizedHashMapEnabled) { outputFromVectorizedMap } else { outputFromRowBasedMap } } else "" } def outputFromRowBasedMap: String = { s""" |while ($iterTermForFastHashMap.next()) { | UnsafeRow $keyTerm = (UnsafeRow) $iterTermForFastHashMap.getKey(); | UnsafeRow $bufferTerm = (UnsafeRow) $iterTermForFastHashMap.getValue(); | $outputFunc($keyTerm, $bufferTerm); | | if (shouldStop()) return; |} |$fastHashMapTerm.close(); """.stripMargin } // Iterate over the aggregate rows and convert them from InternalRow to UnsafeRow def outputFromVectorizedMap: String = { val row = ctx.freshName("fastHashMapRow") ctx.currentVars = null ctx.INPUT_ROW = row val generateKeyRow = GenerateUnsafeProjection.createCode(ctx, groupingKeySchema.toAttributes.zipWithIndex .map { case (attr, i) => BoundReference(i, attr.dataType, attr.nullable) } ) val generateBufferRow = GenerateUnsafeProjection.createCode(ctx, bufferSchema.toAttributes.zipWithIndex.map { case (attr, i) => BoundReference(groupingKeySchema.length + i, attr.dataType, attr.nullable) }) s""" |while ($iterTermForFastHashMap.hasNext()) { | InternalRow $row = (InternalRow) $iterTermForFastHashMap.next(); | ${generateKeyRow.code} | ${generateBufferRow.code} | $outputFunc(${generateKeyRow.value}, ${generateBufferRow.value}); | | if (shouldStop()) return; |} | |$fastHashMapTerm.close(); """.stripMargin } def outputFromRegularHashMap: String = { s""" |while ($limitNotReachedCond $iterTerm.next()) { | UnsafeRow $keyTerm = (UnsafeRow) $iterTerm.getKey(); | UnsafeRow $bufferTerm = (UnsafeRow) $iterTerm.getValue(); | $outputFunc($keyTerm, $bufferTerm); | if (shouldStop()) return; |} |$iterTerm.close(); |if ($sorterTerm == null) { | $hashMapTerm.free(); |} """.stripMargin } val aggTime = metricTerm(ctx, "aggTime") val beforeAgg = ctx.freshName("beforeAgg") s""" |if (!$initAgg) { | $initAgg = true; | $createFastHashMap | $hashMapTerm = $thisPlan.createHashMap(); | long $beforeAgg = System.nanoTime(); | $doAggFuncName(); | $aggTime.add((System.nanoTime() - $beforeAgg) / $NANOS_PER_MILLIS); |} |// output the result |$outputFromFastHashMap |$outputFromRegularHashMap """.stripMargin } private def doConsumeWithKeys(ctx: CodegenContext, input: Seq[ExprCode]): String = { // create grouping key val unsafeRowKeyCode = GenerateUnsafeProjection.createCode( ctx, bindReferences[Expression](groupingExpressions, child.output)) val fastRowKeys = ctx.generateExpressions( bindReferences[Expression](groupingExpressions, child.output)) val unsafeRowKeys = unsafeRowKeyCode.value val unsafeRowKeyHash = ctx.freshName("unsafeRowKeyHash") val unsafeRowBuffer = ctx.freshName("unsafeRowAggBuffer") val fastRowBuffer = ctx.freshName("fastAggBuffer") // To individually generate code for each aggregate function, an element in `updateExprs` holds // all the expressions for the buffer of an aggregation function. val updateExprs = aggregateExpressions.map { e => // only have DeclarativeAggregate e.mode match { case Partial | Complete => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].updateExpressions case PartialMerge | Final => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].mergeExpressions } } val (checkFallbackForGeneratedHashMap, checkFallbackForBytesToBytesMap, resetCounter, incCounter) = if (testFallbackStartsAt.isDefined) { val countTerm = ctx.addMutableState(CodeGenerator.JAVA_INT, "fallbackCounter") (s"$countTerm < ${testFallbackStartsAt.get._1}", s"$countTerm < ${testFallbackStartsAt.get._2}", s"$countTerm = 0;", s"$countTerm += 1;") } else { ("true", "true", "", "") } val oomeClassName = classOf[SparkOutOfMemoryError].getName val findOrInsertRegularHashMap: String = s""" |// generate grouping key |${unsafeRowKeyCode.code} |int $unsafeRowKeyHash = ${unsafeRowKeyCode.value}.hashCode(); |if ($checkFallbackForBytesToBytesMap) { | // try to get the buffer from hash map | $unsafeRowBuffer = | $hashMapTerm.getAggregationBufferFromUnsafeRow($unsafeRowKeys, $unsafeRowKeyHash); |} |// Can't allocate buffer from the hash map. Spill the map and fallback to sort-based |// aggregation after processing all input rows. |if ($unsafeRowBuffer == null) { | if ($sorterTerm == null) { | $sorterTerm = $hashMapTerm.destructAndCreateExternalSorter(); | } else { | $sorterTerm.merge($hashMapTerm.destructAndCreateExternalSorter()); | } | $resetCounter | // the hash map had be spilled, it should have enough memory now, | // try to allocate buffer again. | $unsafeRowBuffer = $hashMapTerm.getAggregationBufferFromUnsafeRow( | $unsafeRowKeys, $unsafeRowKeyHash); | if ($unsafeRowBuffer == null) { | // failed to allocate the first page | throw new $oomeClassName("No enough memory for aggregation"); | } |} """.stripMargin val findOrInsertHashMap: String = { if (isFastHashMapEnabled) { // If fast hash map is on, we first generate code to probe and update the fast hash map. // If the probe is successful the corresponding fast row buffer will hold the mutable row. s""" |if ($checkFallbackForGeneratedHashMap) { | ${fastRowKeys.map(_.code).mkString("\\n")} | if (${fastRowKeys.map("!" + _.isNull).mkString(" && ")}) { | $fastRowBuffer = $fastHashMapTerm.findOrInsert( | ${fastRowKeys.map(_.value).mkString(", ")}); | } |} |// Cannot find the key in fast hash map, try regular hash map. |if ($fastRowBuffer == null) { | $findOrInsertRegularHashMap |} """.stripMargin } else { findOrInsertRegularHashMap } } val inputAttr = aggregateBufferAttributes ++ inputAttributes // Here we set `currentVars(0)` to `currentVars(numBufferSlots)` to null, so that when // generating code for buffer columns, we use `INPUT_ROW`(will be the buffer row), while // generating input columns, we use `currentVars`. ctx.currentVars = new Array[ExprCode](aggregateBufferAttributes.length) ++ input val aggNames = aggregateExpressions.map(_.aggregateFunction.prettyName) // Computes start offsets for each aggregation function code // in the underlying buffer row. val bufferStartOffsets = { val offsets = mutable.ArrayBuffer[Int]() var curOffset = 0 updateExprs.foreach { exprsForOneFunc => offsets += curOffset curOffset += exprsForOneFunc.length } offsets.toArray } val updateRowInRegularHashMap: String = { ctx.INPUT_ROW = unsafeRowBuffer val boundUpdateExprs = updateExprs.map { updateExprsForOneFunc => bindReferences(updateExprsForOneFunc, inputAttr) } val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExprs.flatten) val effectiveCodes = subExprs.codes.mkString("\\n") val unsafeRowBufferEvals = boundUpdateExprs.map { boundUpdateExprsForOneFunc => ctx.withSubExprEliminationExprs(subExprs.states) { boundUpdateExprsForOneFunc.map(_.genCode(ctx)) } } val aggCodeBlocks = updateExprs.indices.map { i => val rowBufferEvalsForOneFunc = unsafeRowBufferEvals(i) val boundUpdateExprsForOneFunc = boundUpdateExprs(i) val bufferOffset = bufferStartOffsets(i) // All the update code for aggregation buffers should be placed in the end // of each aggregation function code. val updateRowBuffers = rowBufferEvalsForOneFunc.zipWithIndex.map { case (ev, j) => val updateExpr = boundUpdateExprsForOneFunc(j) val dt = updateExpr.dataType val nullable = updateExpr.nullable CodeGenerator.updateColumn(unsafeRowBuffer, dt, bufferOffset + j, ev, nullable) } code""" |${ctx.registerComment(s"evaluate aggregate function for ${aggNames(i)}")} |${evaluateVariables(rowBufferEvalsForOneFunc)} |${ctx.registerComment("update unsafe row buffer")} |${updateRowBuffers.mkString("\\n").trim} """.stripMargin } val codeToEvalAggFunc = if (conf.codegenSplitAggregateFunc && aggCodeBlocks.map(_.length).sum > conf.methodSplitThreshold) { val maybeSplitCode = splitAggregateExpressions( ctx, aggNames, boundUpdateExprs, aggCodeBlocks, subExprs.states) maybeSplitCode.getOrElse { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } } else { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } s""" |// common sub-expressions |$effectiveCodes |// evaluate aggregate functions and update aggregation buffers |$codeToEvalAggFunc """.stripMargin } val updateRowInHashMap: String = { if (isFastHashMapEnabled) { if (isVectorizedHashMapEnabled) { ctx.INPUT_ROW = fastRowBuffer val boundUpdateExprs = updateExprs.map { updateExprsForOneFunc => bindReferences(updateExprsForOneFunc, inputAttr) } val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExprs.flatten) val effectiveCodes = subExprs.codes.mkString("\\n") val fastRowEvals = boundUpdateExprs.map { boundUpdateExprsForOneFunc => ctx.withSubExprEliminationExprs(subExprs.states) { boundUpdateExprsForOneFunc.map(_.genCode(ctx)) } } val aggCodeBlocks = fastRowEvals.zipWithIndex.map { case (fastRowEvalsForOneFunc, i) => val boundUpdateExprsForOneFunc = boundUpdateExprs(i) val bufferOffset = bufferStartOffsets(i) // All the update code for aggregation buffers should be placed in the end // of each aggregation function code. val updateRowBuffer = fastRowEvalsForOneFunc.zipWithIndex.map { case (ev, j) => val updateExpr = boundUpdateExprsForOneFunc(j) val dt = updateExpr.dataType val nullable = updateExpr.nullable CodeGenerator.updateColumn(fastRowBuffer, dt, bufferOffset + j, ev, nullable, isVectorized = true) } code""" |${ctx.registerComment(s"evaluate aggregate function for ${aggNames(i)}")} |${evaluateVariables(fastRowEvalsForOneFunc)} |${ctx.registerComment("update fast row")} |${updateRowBuffer.mkString("\\n").trim} """.stripMargin } val codeToEvalAggFunc = if (conf.codegenSplitAggregateFunc && aggCodeBlocks.map(_.length).sum > conf.methodSplitThreshold) { val maybeSplitCode = splitAggregateExpressions( ctx, aggNames, boundUpdateExprs, aggCodeBlocks, subExprs.states) maybeSplitCode.getOrElse { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } } else { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } // If vectorized fast hash map is on, we first generate code to update row // in vectorized fast hash map, if the previous loop up hit vectorized fast hash map. // Otherwise, update row in regular hash map. s""" |if ($fastRowBuffer != null) { | // common sub-expressions | $effectiveCodes | // evaluate aggregate functions and update aggregation buffers | $codeToEvalAggFunc |} else { | $updateRowInRegularHashMap |} """.stripMargin } else { // If row-based hash map is on and the previous loop up hit fast hash map, // we reuse regular hash buffer to update row of fast hash map. // Otherwise, update row in regular hash map. s""" |// Updates the proper row buffer |if ($fastRowBuffer != null) { | $unsafeRowBuffer = $fastRowBuffer; |} |$updateRowInRegularHashMap """.stripMargin } } else { updateRowInRegularHashMap } } val declareRowBuffer: String = if (isFastHashMapEnabled) { val fastRowType = if (isVectorizedHashMapEnabled) { classOf[MutableColumnarRow].getName } else { "UnsafeRow" } s""" |UnsafeRow $unsafeRowBuffer = null; |$fastRowType $fastRowBuffer = null; """.stripMargin } else { s"UnsafeRow $unsafeRowBuffer = null;" } // We try to do hash map based in-memory aggregation first. If there is not enough memory (the // hash map will return null for new key), we spill the hash map to disk to free memory, then // continue to do in-memory aggregation and spilling until all the rows had been processed. // Finally, sort the spilled aggregate buffers by key, and merge them together for same key. s""" |$declareRowBuffer |$findOrInsertHashMap |$incCounter |$updateRowInHashMap """.stripMargin } override def verboseString(maxFields: Int): String = toString(verbose = true, maxFields) override def simpleString(maxFields: Int): String = toString(verbose = false, maxFields) private def toString(verbose: Boolean, maxFields: Int): String = { val allAggregateExpressions = aggregateExpressions testFallbackStartsAt match { case None => val keyString = truncatedString(groupingExpressions, "[", ", ", "]", maxFields) val functionString = truncatedString(allAggregateExpressions, "[", ", ", "]", maxFields) val outputString = truncatedString(output, "[", ", ", "]", maxFields) if (verbose) { s"HashAggregate(keys=$keyString, functions=$functionString, output=$outputString)" } else { s"HashAggregate(keys=$keyString, functions=$functionString)" } case Some(fallbackStartsAt) => s"HashAggregateWithControlledFallback $groupingExpressions " + s"$allAggregateExpressions $resultExpressions fallbackStartsAt=$fallbackStartsAt" } } } object HashAggregateExec { def supportsAggregate(aggregateBufferAttributes: Seq[Attribute]): Boolean = { val aggregationBufferSchema = StructType.fromAttributes(aggregateBufferAttributes) UnsafeFixedWidthAggregationMap.supportsAggregationBufferSchema(aggregationBufferSchema) } }
ConeyLiu/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala
Scala
apache-2.0
46,184
package com.cloudwick.generator.benchmarks import com.cloudwick.generator.utils.Utils /** * Benchmark runs (internal only) * @author ashrith */ object Benchmark extends App { val utils = new Utils utils.time("concatenating random ip addresses using mkString()") { val random = scala.util.Random (1 to 10000).foreach { _ => (random.nextInt(223) + 1) + "." + (1 to 3).map { _ => random.nextInt(255) }.mkString(".") } } utils.time("concatenating random ip addresses using StringBuilder") { val random = scala.util.Random val sb: StringBuilder = new StringBuilder (1 to 10000).foreach { _ => sb.append(random.nextInt(223) + 1) (1 to 3).foreach { _ => sb.append(".") sb.append(random.nextInt(255)) } sb.toString() } } }
vaagrawa/generator
src/main/scala/com/cloudwick/generator/benchmarks/Benchmark.scala
Scala
apache-2.0
807
package octopus.example.domain import octopus.dsl._ import octopus.{AppError, AsyncValidatorM} import scala.language.higherKinds trait EmailService[M[_]] { def isEmailTaken(email: String): M[Boolean] def doesDomainExists(email: String): M[Boolean] } trait GeoService[M[_]] { def doesPostalCodeExist(postalCode: PostalCode.T): M[Boolean] def isPostalCodeValidForCity(postalCode: PostalCode.T, city: String): M[Boolean] } class AsyncValidators[M[_]: AppError](emailService: EmailService[M], geoService: GeoService[M]) { val Email_Err_AlreadyTaken = "email is already taken by someone else" val Email_Err_DomainDoesNotExists = "domain does not exists" implicit val emailAsyncValidator: AsyncValidatorM[M, Email] = Validator .derived[Email] .asyncM[M].ruleVC(emailService.isEmailTaken, Email_Err_AlreadyTaken) .async.rule(_.address, emailService.doesDomainExists, Email_Err_DomainDoesNotExists) .rule(_.address, (_: String).nonEmpty, Email.Err_MustNotBeEmpty) // repeated to check dsl behavior & to ensure that it keep the validator in the asynchronous world val PostalCode_Err_DoesNotExist = "postal code does not exist" implicit val postalCodeAsyncValidator: AsyncValidatorM[M, PostalCode.T] = AsyncValidatorM[M, PostalCode.T] .async.rule(geoService.doesPostalCodeExist, PostalCode_Err_DoesNotExist) val PostalCode_Err_NotValidForCity = "postal code is not valid for the city" implicit val addressValidator: AsyncValidatorM[M, Address] = Validator[Address] .asyncM[M].ruleCatchNonFatal(addr => geoService.isPostalCodeValidForCity(addr.postalCode, addr.city), PostalCode_Err_NotValidForCity, _.getMessage) .async.composeDerived }
krzemin/octopus
octopus/src/test/scala/octopus/example/domain/AsyncValidators.scala
Scala
apache-2.0
1,816
package com.twitter.scrooge.goldfile import com.twitter.io.Files import com.twitter.scrooge.Main import com.twitter.scrooge.testutil.TempDirectory import java.io.File import java.util.regex.Pattern import org.scalatest.{BeforeAndAfter, FunSuite} import scala.io.Source /** * Compares the output of a generator to a set of "golden files". * * This helps to viscerally see how changes to the generator affect the * generated code at code review time. Most updates to the gold files are * intentional due to the corresponding changes in the generator code or * templates. */ abstract class GoldFileTest extends FunSuite with BeforeAndAfter { private var tempDir: File = _ before { tempDir = TempDirectory.create(None, deleteAtExit = false) } after { if (!Files.delete(tempDir)) fail(s"Failed to delete $tempDir") } protected def language: String private val version = " \\* version: .*".r private val rev = " \\* rev: .*".r private val builtAt = " \\* built at: .*".r private val trailingSpaces = Pattern.compile("[ ]+$", Pattern.MULTILINE) private def generatedFiles(f: File): Seq[File] = { def accumulate(f: File, buf: Vector[File]): Vector[File] = { if (f.isFile) { buf :+ f } else { var bb = buf f.listFiles.foreach { f2 => bb = accumulate(f2, bb) } bb } } accumulate(f, Vector.empty) } protected def testThriftFiles = Seq("gold_file_input/gold.thrift") test("generated output looks as expected") { val ccl = Thread.currentThread().getContextClassLoader val inputThrifts = testThriftFiles.map(ccl.getResource(_).getPath) val args = Seq( "--language", language, "--finagle", "--dest", tempDir.getPath) ++ inputThrifts Main.main(args.toArray) def generatedDataFor(file: File): String = { val gen = Source.fromFile(file, "UTF-8").mkString // normalize the headers val headersNormalized = builtAt.replaceFirstIn( rev.replaceFirstIn( version.replaceFirstIn(gen, " * version: ?"), " * rev: ?"), " * built at: ?") trailingSpaces.matcher(headersNormalized).replaceAll("") } def goldDataFor(suffix: String): String = { val is = ccl.getResourceAsStream(s"gold_file_output_$language/$suffix") if (is == null) return "" Source.fromInputStream(is, "UTF-8").mkString } val gens = generatedFiles(tempDir) gens.foreach { gen => // we want to take the path after tempDir and compare to suffixed gold file // in our resources dir. // the +1 removes what would be a leading slash from suffix val suffix = gen.toString.drop(tempDir.toString.length + 1) val genStr = generatedDataFor(gen) val expected = goldDataFor(suffix) withClue(suffix) { if (genStr != expected) { val msg = s""" |The generated file ${gen.getName} did not match gold file |"scrooge/scrooge-generator-tests/src/test/resources/gold_file_output_$language/$suffix". |Compare the output in stdout to the gold file and |either fix the generator or update the gold file to match. """.stripMargin println(msg) println(s"Generated file $suffix:\n${genStr}<<<EOF") fail(msg) } } } } }
thirstycrow/scrooge
scrooge-generator-tests/src/test/scala/com/twitter/scrooge/goldfile/GoldFileTest.scala
Scala
apache-2.0
3,442
package com.realitygames.couchbase import com.couchbase.client.java.view.{Stale, ViewQuery} import com.realitygames.couchbase.json.circe._ import com.realitygames.couchbase.models.TestStructure import com.realitygames.couchbase.query.QueryResult.SuccessQueryResult import io.circe.generic.auto._ import org.scalatest.concurrent.ScalaFutures import org.scalatest.{AsyncWordSpec, _} class ScalaAsyncBucketViewQueryTest extends AsyncWordSpec with MustMatchers with BucketTesting with ScalaFutures with RecoverMethods with Inside { val correctTestValueId = "example" val incorrectTestValueId = "example2" override def bucketName: String = "viewQuery" "AsyncBucket.query" should { "view get_object should return User value" in { for { result <- bucket.query[TestStructure](ViewQuery.from("testStructure", "get_object").key(correctTestValueId).limit(1).stale(Stale.FALSE)) } yield { inside(result){ case SuccessQueryResult(documents, _, _) => documents.head.content mustEqual TestStructure("string", 1, 2, 3, true, 4.5f, 5.5) } } } "view get_string should return string value" in { for { result <- bucket.query[String](ViewQuery.from("testStructure", "get_string").key(correctTestValueId).limit(1).stale(Stale.FALSE)) } yield { inside(result){ case SuccessQueryResult(documents, _, _) => documents.head.content mustEqual "string" } } } "view get_int should return int value" in { for { result <- bucket.query[Int](ViewQuery.from("testStructure", "get_int").key(correctTestValueId).limit(1).stale(Stale.FALSE)) } yield { inside(result){ case SuccessQueryResult(documents, _, _) => documents.head.content mustEqual 1 } } } "view get_long should return byte value" in { for { result <- bucket.query[Long](ViewQuery.from("testStructure", "get_long").key(correctTestValueId).limit(1).stale(Stale.FALSE)) } yield { inside(result){ case SuccessQueryResult(documents, _, _) => documents.head.content mustEqual 2l } } } "view get_byte should return byte value" in { for { result <- bucket.query[Byte](ViewQuery.from("testStructure", "get_byte").key(correctTestValueId).limit(1).stale(Stale.FALSE)) } yield { inside(result){ case SuccessQueryResult(documents, _, _) => documents.head.content mustEqual 3 } } } "view get_boolean should return boolean value" in { for { result <- bucket.query[Boolean](ViewQuery.from("testStructure", "get_boolean").key(correctTestValueId).limit(1).stale(Stale.FALSE)) } yield { inside(result){ case SuccessQueryResult(documents, _, _) => documents.head.content mustEqual true } } } "view get_float should return float value" in { for { result <- bucket.query[Float](ViewQuery.from("testStructure", "get_float").key(correctTestValueId).limit(1).stale(Stale.FALSE)) } yield { inside(result){ case SuccessQueryResult(documents, _, _) => documents.head.content mustEqual 4.5f } } } "view get_double should return double value" in { for { result <- bucket.query[Double](ViewQuery.from("testStructure", "get_double").key(correctTestValueId).limit(1).stale(Stale.FALSE)) } yield { inside(result){ case SuccessQueryResult(documents, _, _) => documents.head.content mustEqual 5.5 } } } "return all (2) documents for viewQuery bucket: 1 correct and 1 failed" in { for { result <- bucket.query[TestStructure](ViewQuery.from("testStructure", "get_object").stale(Stale.FALSE)) } yield { inside(result){ case SuccessQueryResult(documents, _, failedDocuments) => documents.size mustBe 1 failedDocuments.size mustBe 1 documents.head.content mustEqual TestStructure("string", 1, 2, 3, true, 4.5f, 5.5) failedDocuments.head.errors must not be empty // failedDocuments.head.raw mustEqual Json.parse( // """{ // | "string": 123, // | "int": 1, // | "long": 2, // | "byte": 3, // | "boolean": "true", // | "float": 4.5, // | "double": 5.5 // |} // """.stripMargin) } } } } }
RealityGamesLtd/couchbase-java-sdk-scala-wrapper
src/test/scala/com/realitygames/couchbase/ScalaAsyncBucketViewQueryTest.scala
Scala
mit
4,638
/* * #%L * GatlingCql * %% * Copyright (C) 2014 Mikhail Stepura * %% * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * #L% */ package io.github.gatling.cql import scala.concurrent.duration.DurationInt import com.datastax.driver.core.Cluster import com.datastax.driver.core.ConsistencyLevel import io.gatling.core.Predef._ import io.github.gatling.cql.Predef._; import io.gatling.core.scenario.Simulation class CqlCompileTest extends Simulation { val keyspace = "test" val table_name = "test_table" val session = Cluster.builder().addContactPoint("127.0.0.1").build().connect(s"$keyspace") val cqlConfig = cql.session(session) session.execute(s"CREATE KEYSPACE IF NOT EXISTS $keyspace WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1'}") session.execute(s"""CREATE TABLE IF NOT EXISTS $table_name ( id timeuuid, num int, str text, PRIMARY KEY (id) ); """) session.execute(f"CREATE INDEX IF NOT EXISTS $table_name%s_num_idx ON $table_name%s (num)") val prepared = session.prepare(s"INSERT INTO $table_name (id, num, str) values (now(), ?, ?)") val random = new util.Random val feeder = Iterator.continually( Map( "randomString" -> random.nextString(20), "randomNum" -> random.nextInt() )) val scn = scenario("Two statements").repeat(1) { feed(feeder) .exec(cql("simple SELECT") .execute("SELECT * FROM test_table WHERE num = ${randomNum}")) //Gatling EL for ${randomNum}" .exec(cql("prepared INSERT") .execute(prepared) .withParams(Integer.valueOf(random.nextInt()), "${randomString}") .consistencyLevel(ConsistencyLevel.ANY)) } setUp(scn.inject(rampUsersPerSec(10) to 100 during (30 seconds))) .protocols(cqlConfig) }
infomaven/GatlingCql
src/test/scala/io/github/gatling/cql/CqlCompileTest.scala
Scala
mit
2,826
package lila.common import org.joda.time.{ DateTime, DateTimeZone } import play.api.data.format.Formats._ import play.api.data.format.Formatter import play.api.data.FormError import play.api.data.Forms._ import play.api.libs.json._ object Form { def options(it: Iterable[Int], pattern: String) = it map { d => d -> (pluralize(pattern, d) format d) } def options(it: Iterable[Int], transformer: Int => Int, pattern: String) = it map { d => d -> (pluralize(pattern, transformer(d)) format transformer(d)) } def options(it: Iterable[Int], code: String, pattern: String) = it map { d => (d + code) -> (pluralize(pattern, d) format d) } def optionsDouble(it: Iterable[Double], format: Double => String) = it map { d => d -> format(d) } def numberIn(choices: Iterable[(Int, String)]) = number.verifying(hasKey(choices, _)) def numberInDouble(choices: Iterable[(Double, String)]) = of[Double].verifying(hasKey(choices, _)) def stringIn(choices: Iterable[(String, String)]) = text.verifying(hasKey(choices, _)) def hasKey[A](choices: Iterable[(A, _)], key: A) = choices.map(_._1).toList contains key private def pluralize(pattern: String, nb: Int) = pattern.replace("{s}", (nb != 1).fold("s", "")) private def pluralize(pattern: String, nb: Double) = pattern.replace("{s}", (nb < 1).fold("s", "")) private val jsonGlobalErrorRenamer = __.json update ( (__ \\ "global").json copyFrom (__ \\ "").json.pick ) andThen (__ \\ "").json.prune def errorsAsJson(form: play.api.data.Form[_])(implicit lang: play.api.i18n.Messages) = form.errorsAsJson validate jsonGlobalErrorRenamer getOrElse form.errorsAsJson object formatter { def stringFormatter[A](from: A => String, to: String => A): Formatter[A] = new Formatter[A] { def bind(key: String, data: Map[String, String]) = stringFormat.bind(key, data).right map to def unbind(key: String, value: A) = stringFormat.unbind(key, from(value)) } def intFormatter[A](from: A => Int, to: Int => A): Formatter[A] = new Formatter[A] { def bind(key: String, data: Map[String, String]) = intFormat.bind(key, data).right map to def unbind(key: String, value: A) = intFormat.unbind(key, from(value)) } } object UTCDate { val dateTimePattern = "yyyy-MM-dd HH:mm" val utcDate = jodaDate(dateTimePattern, DateTimeZone.UTC) implicit val dateTimeFormat = jodaDateTimeFormat(dateTimePattern) } }
clarkerubber/lila
modules/common/src/main/Form.scala
Scala
agpl-3.0
2,466
package nasa.nccs.cdas.kernels import java.io._ import scala.xml import java.nio.{ByteBuffer, ByteOrder, FloatBuffer} import nasa.nccs.cdapi.cdm._ import nasa.nccs.cdapi.data.{HeapFltArray, _} import nasa.nccs.cdapi.tensors.CDFloatArray.{ReduceNOpFlt, ReduceOpFlt, ReduceWNOpFlt} import nasa.nccs.cdapi.tensors.{CDArray, CDCoordMap, CDFloatArray, CDTimeCoordMap} import nasa.nccs.cdas.engine.spark.RecordKey import nasa.nccs.cdas.modules.CDSpark.BinKeyUtils import nasa.nccs.cdas.workers.TransVar import nasa.nccs.cdas.workers.python.{PythonWorker, PythonWorkerPortal} import nasa.nccs.cdas.utilities.{appParameters, runtime} import nasa.nccs.esgf.process._ import nasa.nccs.utilities.{Loggable, ProfilingTool} import nasa.nccs.wps.{WPSProcess, WPSProcessOutput} import org.apache.spark.rdd.RDD import ucar.nc2.Attribute import ucar.{ma2, nc2} import scala.collection.JavaConversions._ import scala.collection.immutable.{SortedMap, TreeMap} import scala.collection.mutable import scala.collection.mutable.SortedSet object Port { def apply( name: String, cardinality: String, description: String="", datatype: String="", identifier: String="" ) = { new Port( name, cardinality, description, datatype, identifier ) } } class Port( val name: String, val cardinality: String, val description: String, val datatype: String, val identifier: String ) extends Serializable { def toXml = { <port name={name} cardinality={cardinality}> { if ( description.nonEmpty ) <description> {description} </description> } { if ( datatype.nonEmpty ) <datatype> {datatype} </datatype> } { if ( identifier.nonEmpty ) <identifier> {identifier} </identifier> } </port> } } class KernelContext( val operation: OperationContext, val grids: Map[String,Option[GridContext]], val sectionMap: Map[String,Option[CDSection]], val domains: Map[String,DomainContainer], _configuration: Map[String,String], val profiler: ProfilingTool ) extends Loggable with Serializable with ScopeContext { val crsOpt = getCRS val trsOpt = getTRS val timings: SortedSet[(Float,String)] = SortedSet.empty val configuration = crsOpt.map( crs => _configuration + ("crs" -> crs ) ) getOrElse( _configuration ) lazy val grid: GridContext = getTargetGridContext def findGrid( varUid: String ): Option[GridContext] = grids.find( item => item._1.split('-')(0).equals(varUid) ).flatMap( _._2 ) def getConfiguration = configuration ++ operation.getConfiguration def getAxes: AxisIndices = grid.getAxisIndices( config("axes", "") ) def getContextStr: String = getConfiguration map { case ( key, value ) => key + ":" + value } mkString ";" def getDomainMetadata(domId: String): Map[String,String] = domains.get(domId) match { case Some(dc) => dc.metadata; case None => Map.empty } def findAnyGrid: GridContext = (grids.find { case (k, v) => v.isDefined }).getOrElse(("", None))._2.getOrElse(throw new Exception("Undefined grid in KernelContext for op " + operation.identifier)) private def getCRS: Option[String] = operation.getDomain flatMap ( domId => domains.get( domId ).flatMap ( dc => dc.metadata.get("crs") ) ) private def getTRS: Option[String] = operation.getDomain flatMap ( domId => domains.get( domId ).flatMap ( dc => dc.metadata.get("trs") ) ) def conf( params: Map[String,String] ): KernelContext = new KernelContext( operation, grids, sectionMap, domains, configuration ++ params, profiler ) def commutativeReduction: Boolean = if( getAxes.includes(0) ) { true } else { false } def doesTimeReduction: Boolean = getAxes.includes(0) def addTimestamp( label: String, log: Boolean = false ): Unit = { profiler.timestamp(label) if( log ) { logger.info(label) } } private def getTargetGridContext: GridContext = crsOpt match { case Some( crs ) => if( crs.startsWith("~") ) { findGrid( crs.substring(1) ).getOrElse( throw new Exception(s"Unsupported grid specification '$crs' in KernelContext for op '$operation'" ) ) } else if( crs.contains('~') ) { findAnyGrid } else { throw new Exception( "Currently unsupported crs specification") } case None => findAnyGrid } // def getGridSection( inputId: String ): Option[GridSection] = sectionMap.getOrElse(None).map( section => GridSection()) } //class CDASExecutionContext( val operation: OperationContext, val request: RequestContext, val server: ServerContext ) extends Loggable { // val targetGrids = request.getTargetGrids // // def getOpSections(uid: String): Option[ IndexedSeq[ma2.Section] ] = { // val optargs: Map[String, String] = operation.getConfiguration // val domains: IndexedSeq[DomainContainer] = optargs.get("domain") match { // case Some(domainIds) => domainIds.split(",").map(request.getDomain(_)) // case None => return Some( IndexedSeq.empty[ma2.Section] ) // } //// logger.info( "OPT DOMAIN Arg: " + optargs.getOrElse( "domain", "None" ) ) //// logger.info( "OPT Domains: " + domains.map(_.toString).mkString( ", " ) ) // targetGrids.get(uid).flatMap( _.map ( targetGrid => domains.map( dc => targetGrid.grid.getSubSection(dc.axes).getOrElse(new ma2.Section(List.empty)) ) ) ) // } // // def toKernelContext: KernelContext = { // val sectionMap: Map[String,Option[CDSection]] = request.inputs.mapValues( _.map( _.cdsection ) ).map(identity) // new KernelContext( operation, targetGrids.mapValues(_.map(GridContext(_))), sectionMap, request.getConfiguration ) // } // // def getOpSectionIntersection(uid: String): Option[ ma2.Section ] = getOpSections(uid) match { // case None => return None // case Some( sections ) => // if( sections.isEmpty ) None // else { // val result = sections.foldLeft(sections.head)( _.intersect(_) ) // if (result.computeSize() > 0) { Some(result) } // else return None // } // } // def getOpCDSectionIntersection: Option[ CDSection ] = getOpSectionIntersection().map( CDSection( _ ) ) //} case class ResultManifest( val name: String, val dataset: String, val description: String, val units: String ) class AxisIndices( private val axisIds: Set[Int] = Set.empty ) { def getAxes: Seq[Int] = axisIds.toSeq def args = axisIds.toArray def includes( axisIndex: Int ): Boolean = axisIds.contains( axisIndex ) override def toString = axisIds.mkString(",") } object Kernel extends Loggable { val customKernels = List[Kernel]( new CDMSRegridKernel() ) type RDDKeyValPair = ( RecordKey, RDDRecord ) def getResultFile( resultId: String, deleteExisting: Boolean = false ): File = { val resultsDirPath = appParameters("wps.results.dir", "~/.wps/results").replace( "~", System.getProperty("user.home") ).replaceAll("[()]","-").replace("=","~") val resultsDir = new File(resultsDirPath); resultsDir.mkdirs() val resultFile = new File( resultsDirPath + s"/$resultId.nc" ) if( deleteExisting && resultFile.exists ) resultFile.delete resultFile } def mergeRDD(context: KernelContext)(a0: RDDKeyValPair, a1: RDDKeyValPair ): RDDKeyValPair = { val ( rdd0, rdd1 ) = ( a0._2, a1._2 ) val ( k0, k1 ) = ( a0._1, a1._1 ) val t0 = System.nanoTime logger.info("&MERGE: start (%s <-> %s), sample rdd0 = %s, rdd1 = %s".format( k0.toString, k1.toString, rdd0.head._2.getSampleDataStr(10,0), rdd1.head._2.getSampleDataStr(10,0) ) ) val new_key = k0 + k1 val new_elements = rdd0.elements.flatMap { case (elkey, element0) => rdd1.elements.get(elkey).map( element1 => elkey -> { if( k0.start <= k1.start ) { element0.append(element1) } else { element1.append(element0) } } ) } val dt = (System.nanoTime - t0) / 1.0E9 logger.info("&MERGE: complete in time = %.4f s, result sample = %s".format( dt, new_elements.head._2.getSampleDataStr(10,0) ) ) context.addTimestamp("&MERGE: complete, time = %.4f s, key = ".format( dt, new_key.toString ) ) new_key -> RDDRecord( new_elements, rdd0.mergeMetadata("merge", rdd1) ) } def orderedMergeRDD(context: KernelContext)(a0: RDDKeyValPair, a1: RDDKeyValPair ): RDDKeyValPair = { val ( rdd0, rdd1 ) = ( a0._2, a1._2 ) val ( k0, k1 ) = ( a0._1, a1._1 ) val t0 = System.nanoTime val new_key = k0 + k1 val new_elements = rdd0.elements.map { case (key, array) => (key+"%"+k0.elemStart, array) } ++ rdd1.elements.map { case (key, array) => (key+"%"+k1.elemStart, array) } val dt = (System.nanoTime - t0) / 1.0E9 context.addTimestamp("&MERGE: complete, time = %.4f s, key = ".format( dt, new_key.toString ) ) new_key -> RDDRecord( new_elements, rdd0.mergeMetadata("merge", rdd1) ) } def apply(module: String, kernelSpec: String, api: String): Kernel = { val specToks = kernelSpec.split("[;]") customKernels.find(_.matchesSpecs(specToks)) match { case Some(kernel) => kernel case None => api match { case "python" => new zmqPythonKernel(module, specToks(0), specToks(1), specToks(2), str2Map(specToks(3))) } case wtf => throw new Exception("Unrecognized kernel api: " + api) } } private def str2Map( metadata: String ): Map[String,String] = Map( metadata.stripPrefix("{").stripSuffix("}").split("[,]").toSeq map { pair => pair.split("[:]") } map { a => ( a(0).replaceAll("[\"' ]",""), a(1).replaceAll("[\"' ]","") ) }: _* ) } object KernelUtilities extends Loggable { def getWeights( inputId: String, context: KernelContext, weighting_type_opt: Option[String]=None, broadcast: Boolean = true ): CDFloatArray = { val weighting_type = weighting_type_opt.getOrElse( context.config("weights", if (context.config("axes", "").contains('y')) "cosine" else "") ) val t0 = System.nanoTime val weights = context.sectionMap.get( inputId ).flatten match { case Some(section) => weighting_type match { case "cosine" => context.grid.getSpatialAxisData('y', section) match { case Some(axis_data) => computeWeights( weighting_type, Map('y' -> axis_data), section.getShape, Float.MaxValue, broadcast ) case None => logger.warn("Can't access AxisData for variable %s => Using constant weighting.".format(inputId)); CDFloatArray.const(section.getShape, 1f) } case x => if (!x.isEmpty) { logger.warn("Can't recognize weighting method: %s => Using constant weighting.".format(x)) } CDFloatArray.const(section.getShape, 1f) } case None => CDFloatArray.empty } logger.info( "Computed weights in time %.4f s".format( (System.nanoTime - t0) / 1.0E9 ) ) weights } def computeWeights( weighting_type: String, axisDataMap: Map[ Char, ( Int, ma2.Array ) ], shape: Array[Int], invalid: Float, broadcast: Boolean ) : CDFloatArray = { weighting_type match { case "cosine" => axisDataMap.get('y') match { case Some( ( axisIndex, yAxisData ) ) => val axis_length = yAxisData.getSize val axis_data = CDFloatArray.factory( yAxisData, Float.MaxValue ) assert( axis_length == shape(axisIndex), "Y Axis data mismatch, %d vs %d".format(axis_length,shape(axisIndex) ) ) val cosineWeights: CDFloatArray = axis_data.map( x => Math.cos( Math.toRadians(x) ).toFloat ) val base_shape: Array[Int] = Array( shape.indices.map(i => if(i==axisIndex) shape(axisIndex) else 1 ): _* ) val weightsArray: CDArray[Float] = CDArray( base_shape, cosineWeights.getStorage, invalid ) if(broadcast) { weightsArray.broadcast( shape ) } weightsArray case None => throw new NoSuchElementException( "Missing axis data in weights computation, type: %s".format( weighting_type )) } case x => throw new NoSuchElementException( "Can't recognize weighting method: %s".format( x )) } } } class KIType { val Op = 0; val MData = 1 } object PartSortUtils { implicit object PartSortOrdering extends Ordering[String] { def compare( k1: String, k2: String ) = k1.split('%')(1).toInt - k2.split('%')(1).toInt } } abstract class Kernel( val options: Map[String,String] = Map.empty ) extends Loggable with Serializable with WPSProcess { import Kernel._ val identifiers = this.getClass.getName.split('$').flatMap(_.split('.')) def operation: String = identifiers.last.toLowerCase def module: String = identifiers.dropRight(1).mkString(".") def id = identifiers.mkString(".") def name = identifiers.takeRight(2).mkString(".") val extInputs: Boolean = options.getOrElse("handlesInput","false").toBoolean val parallelizable: Boolean = options.getOrElse( "parallelize", (!extInputs).toString ).toBoolean val identifier = name def matchesSpecs( specs: Array[String] ): Boolean = { (specs.size >= 2) && specs(0).equals(module) && specs(1).equals(operation) } val nOutputsPerInput: Int = options.getOrElse("nOutputsPerInput","1").toInt val weightsOpt: Option[String] = options.get("weights") val mapCombineOp: Option[ReduceOpFlt] = options.get("mapOp").fold (options.get("mapreduceOp")) (Some(_)) map CDFloatArray.getOp val mapCombineNOp: Option[ReduceNOpFlt] = None val mapCombineWNOp: Option[ReduceWNOpFlt] = None val reduceCombineOp: Option[ReduceOpFlt] = options.get("reduceOp").fold (options.get("mapreduceOp")) (Some(_)) map CDFloatArray.getOp val initValue: Float = 0f def cleanUp() = {} def addWeights( context: KernelContext ): Boolean = { weightsOpt match { case Some( weights ) => val axes = context.operation.getConfiguration("axes") if( weights == "cosine" ) { axes.indexOf( "y" ) > -1 } else throw new Exception( "Unrecognized weights type: " + weights ) case None => false } } def getReduceOp(context: KernelContext): (RDDKeyValPair,RDDKeyValPair)=>RDDKeyValPair = if (context.doesTimeReduction) { reduceCombineOp match { case Some(redOp) => redOp match { case CDFloatArray.customOp => customReduceRDD(context) case op => reduceRDDOp(context) } case None => reduceRDDOp(context) } } else { orderedMergeRDD(context) } def getOpName(context: KernelContext): String = "%s(%s)".format(name, context.operation.inputs.mkString(",")) def map(partIndex: Int, inputs: List[Option[DataFragment]], context: KernelContext): Option[DataFragment] = inputs.head def map(context: KernelContext )( rdd: RDDRecord ): RDDRecord = { rdd } def aggregate(context: KernelContext )( rdd0: RDDRecord, rdd1: RDDRecord ): RDDRecord = { rdd0 } def keyMapper( partIndex: Int, agg_inputs: Iterator[(RecordKey,RDDRecord)] ): Iterator[(RecordKey,RDDRecord)] = { val result = agg_inputs.flatMap { case ( agg_key, agg_record ) => ( 0 until agg_record.getShape(0) ) map ( tindex => ( agg_key.singleElementKey(tindex), agg_record.slice(tindex,1) ) ) } logger.info( s"KeyMapper for part ${partIndex}, size = " + result.length ) result } def combine(context: KernelContext)(a0: DataFragment, a1: DataFragment, axes: AxisIndices): DataFragment = reduceCombineOp match { case Some(combineOp) => if (axes.includes(0)) DataFragment(a0.spec, CDFloatArray.combine(combineOp, a0.data, a1.data)) else { a0 ++ a1 } case None => { a0 ++ a1 } } def getCombinedGridfile( inputs: Map[String,ArrayBase[Float]] ): String = { for ( ( id, array ) <- inputs ) array.metadata.get("gridfile") match { case Some(gridfile) => return gridfile; case None => Unit } throw new Exception( " Missing gridfile in kernel inputs: " + name ) } def combineRDDLegacy(context: KernelContext)(rdd0: RDDRecord, rdd1: RDDRecord ): RDDRecord = { val t0 = System.nanoTime val axes = context.getAxes val key_group_prefixes: Set[String] = rdd0.elements.keys.map( key => key.split("~").head ).toSet val key_groups: Set[(String,IndexedSeq[String])] = key_group_prefixes map ( key_prefix => key_prefix -> rdd0.elements.keys.filter( _.split("~").head.equals( key_prefix ) ).toIndexedSeq ) val new_elements: IndexedSeq[(String,HeapFltArray)] = key_groups.toIndexedSeq.flatMap { case ( group_key, key_group ) => val elements0: IndexedSeq[(String,HeapFltArray)] = key_group flatMap ( key => rdd0.elements.filter { case (k,v) => if(key.contains("_WEIGHTS_")) k.equals(key) else k.startsWith(key) } ) val elements1: IndexedSeq[(String,HeapFltArray)] = key_group flatMap ( key => rdd1.elements.filter { case (k,v) => if(key.contains("_WEIGHTS_")) k.equals(key) else k.startsWith(key) } ) if( elements0.size != elements1.size ) { throw new Exception( s"Mismatched rdds in reduction for kernel ${context.operation.identifier}: ${elements0.size} != ${elements1.size}" ) } if( elements0.size != nOutputsPerInput ) { throw new Exception( s"Wrong number of elements in reduction rdds for kernel ${context.operation.identifier}: ${elements0.size} != ${nOutputsPerInput}, element keys = [${elements0.map(_._1).mkString(",")}]" ) } if( elements0.size != elements1.size ) { throw new Exception( s"Mismatched rdds in reduction for kernel ${context.operation.identifier}: ${elements0.size} != ${elements1.size}" ) } if( elements0.size == 1 ) { reduceCombineOp match { case Some(combineOp) => if (axes.includes(0)) IndexedSeq(group_key -> elements0(0)._2.combine(combineOp, elements1(0)._2)) else IndexedSeq(group_key -> elements0(0)._2.append(elements1(0)._2)) case None => IndexedSeq(group_key -> elements0(0)._2.append(elements1(0)._2)) } } else { reduceCombineOp match { case Some(combineOp) => if (axes.includes(0)) combineElements( group_key, Map(elements0:_*), Map(elements1:_*) ) else appendElements( group_key, Map(elements0:_*), Map(elements1:_*) ) case None => appendElements( group_key, Map(elements0:_*), Map(elements1:_*) ) } } } // logger.debug("&COMBINE: %s, time = %.4f s".format( context.operation.name, (System.nanoTime - t0) / 1.0E9 ) ) context.addTimestamp( "combineRDD complete" ) RDDRecord( TreeMap(new_elements:_*), rdd0.mergeMetadata(context.operation.name, rdd1) ) } def combineRDD(context: KernelContext)(rdd0: RDDRecord, rdd1: RDDRecord ): RDDRecord = { val t0 = System.nanoTime val axes = context.getAxes val elements0: IndexedSeq[(String,HeapFltArray)] = rdd0.elements.toIndexedSeq val keys = rdd0.elements.keys if( keys.size != nOutputsPerInput ) { throw new Exception( s"Wrong number of elements in reduction rdds for kernel ${context.operation.identifier}: ${keys.size} != ${nOutputsPerInput}, element keys = [${keys.mkString(",")}]" ) } val new_elements: IndexedSeq[(String,HeapFltArray)] = elements0 flatMap { case ( key0, array0 ) => rdd1.elements.get(key0) match { case Some(array1) => reduceCombineOp match { case Some(combineOp) => if (axes.includes(0)) Some( key0 -> array0.combine( combineOp, array1 ) ) else Some(key0 -> array0.append(array1)) case None => Some(key0 -> array0.append(array1)) } case None => None } } // logger.debug("&COMBINE: %s, time = %.4f s".format( context.operation.name, (System.nanoTime - t0) / 1.0E9 ) ) context.addTimestamp( "combineRDD complete" ) RDDRecord( TreeMap(new_elements:_*), rdd0.mergeMetadata(context.operation.name, rdd1) ) } def combineElements( key: String, elements0: Map[String,HeapFltArray], elements1: Map[String,HeapFltArray] ): IndexedSeq[(String,HeapFltArray)] = { options.get("reduceOp") match { case Some( reduceOp ) => if( reduceOp.toLowerCase == "sumw" ) { weightedSumReduction( key, elements0, elements1 ) } else if( reduceOp.toLowerCase == "avew" ) { weightedAveReduction( key, elements0, elements1 ) } else { throw new Exception( s"Unimplemented multi-input reduce op for kernel ${identifier}: " + reduceOp ) } case None => logger.warn( s"No reduce op defined for kernel ${identifier}, appending elements" ) appendElements( key, elements0, elements1 ) } } def missing_element( key: String ) = throw new Exception( s"Missing element in weightedSumReduction for Kernel ${identifier}, key: " + key ) def getFloatBuffer( size: Int ): FloatBuffer = { val vbb: ByteBuffer = ByteBuffer.allocateDirect( size * 4 ) vbb.order( ByteOrder.nativeOrder() ); // use the device hardware's native byte order vbb.asFloatBuffer(); } def weightedSumReduction( key: String, elements0: Map[String,HeapFltArray], elements1: Map[String,HeapFltArray] ): IndexedSeq[(String,HeapFltArray)] = { val key_lists = elements0.keys.partition( _.endsWith("_WEIGHTS_") ) val weights_key = key_lists._1.headOption.getOrElse( throw new Exception( s"Can't find weignts key in weightedSumReduction for Kernel ${identifier}, keys: " + elements0.keys.mkString(",") ) ) val values_key = key_lists._2.headOption.getOrElse( throw new Exception( s"Can't find values key in weightedSumReduction for Kernel ${identifier}, keys: " + elements0.keys.mkString(",") ) ) val weights0 = elements0.getOrElse( weights_key, missing_element(key) ) val weights1 = elements1.getOrElse( weights_key, missing_element(key) ) val values0 = elements0.getOrElse( values_key, missing_element(key) ) val values1 = elements1.getOrElse( values_key, missing_element(key) ) val t0 = System.nanoTime() val resultWeights = FloatBuffer.allocate( values0.data.length ) val resultValues = FloatBuffer.allocate( weights0.data.length ) values0.missing match { case Some( undef ) => for( index <- values0.data.indices; v0 = values0.data(index); v1 = values1.data(index) ) { if( v0 == undef || v0.isNaN ) { if( v1 == undef || v1.isNaN ) { resultValues.put( index, undef ) } else { resultValues.put( index, v1 ) resultWeights.put( index, weights1.data(index) ) } } else if( v1 == undef || v1.isNaN ) { resultValues.put( index, v0 ) resultWeights.put( index, weights0.data(index) ) } else { val w0 = weights0.data(index) val w1 = weights1.data(index) resultValues.put( index, v0 + v1 ) resultWeights.put( index, w0 + w1 ) } } case None => for( index <- values0.data.indices ) { resultValues.put( values0.data(index) + values1.data(index) ) resultWeights.put( weights0.data(index) + weights1.data(index) ) } } val valuesArray = HeapFltArray( CDFloatArray( values0.shape, resultValues.array, values0.missing.getOrElse(Float.MaxValue) ), values0.origin, values0.metadata, values0.weights ) val weightsArray = HeapFltArray( CDFloatArray( weights0.shape, resultWeights.array, weights0.missing.getOrElse(Float.MaxValue) ), weights0.origin, weights0.metadata, weights0.weights ) logger.info("Completed weightedSumReduction '%s' in %.4f sec, shape = %s".format(identifier, ( System.nanoTime() - t0 ) / 1.0E9, values0.shape.mkString(",") ) ) IndexedSeq( values_key -> valuesArray, weights_key -> weightsArray ) } def weightedAveReduction( key: String, elements0: Map[String,HeapFltArray], elements1: Map[String,HeapFltArray] ): IndexedSeq[(String,HeapFltArray)] = { val key_lists = elements0.keys.partition( _.endsWith("_WEIGHTS_") ) val weights_key = key_lists._1.headOption.getOrElse( throw new Exception( s"Can't find weignts key in weightedSumReduction for Kernel ${identifier}, keys: " + elements0.keys.mkString(",") ) ) val values_key = key_lists._2.headOption.getOrElse( throw new Exception( s"Can't find values key in weightedSumReduction for Kernel ${identifier}, keys: " + elements0.keys.mkString(",") ) ) val weights0 = elements0.getOrElse( weights_key, missing_element(key) ) val weights1 = elements1.getOrElse( weights_key, missing_element(key) ) val values0 = elements0.getOrElse( values_key, missing_element(key) ) val values1 = elements1.getOrElse( values_key, missing_element(key) ) val t0 = System.nanoTime() val weightsSum = FloatBuffer.allocate( values0.data.length ) val weightedValues0 = FloatBuffer.allocate( values0.data.length ) val weightedValues1 = FloatBuffer.allocate( values0.data.length ) values0.missing match { case Some( undef ) => for( index <- values0.data.indices; v0 = values0.data(index); v1 = values1.data(index) ) { if( v0 == undef || v0.isNaN ) { if( v1 == undef || v1.isNaN ) { weightedValues0.put( index, undef ) weightedValues1.put( index, undef ) } else { weightedValues0.put( index, undef ) weightedValues1.put( index, v1*weights1.data(index) ) weightsSum.put( index, weights1.data(index) ) } } else if( v1 == undef || v1.isNaN ) { weightedValues0.put( index, v0*weights0.data(index) ) weightedValues1.put( index, undef ) weightsSum.put( index, weights0.data(index) ) } else { weightedValues0.put( index, values0.data(index) * weights0.data(index) ) weightedValues1.put( index, values1.data(index) * weights1.data(index) ) weightsSum.put( index, weights0.data(index) + weights1.data(index) ) } } for( index <- values0.data.indices; wv0 = weightedValues0.get(index); wv1 = weightedValues1.get(index); ws = weightsSum.get(index) ) { if( wv0 == undef ) { if (wv1 == undef) { weightedValues0.put(index, undef) } else { weightedValues0.put(index, wv1/ws) } } else if (wv1 == undef) { weightedValues0.put(index, wv0 / ws) } else { weightedValues0.put( index, (wv0 + wv1) / ws ) } } case None => for( index <- values0.data.indices ) { weightedValues0.put( index, values0.data(index) * weights0.data(index) ) weightedValues1.put( index, values1.data(index) * weights1.data(index) ) weightsSum.put( index, weights0.data(index) + weights1.data(index) ) } for( index <- values0.data.indices ) { weightedValues0.put( index, (weightedValues0.get(index) + weightedValues1.get(index)) / weightsSum.get(index) ) } } val valuesArray = HeapFltArray( CDFloatArray( values0.shape, weightedValues0.array, values0.missing.getOrElse(Float.MaxValue) ), values0.origin, values0.metadata, values0.weights ) val weightsArray = HeapFltArray( CDFloatArray( weights0.shape, weightsSum.array, weights0.missing.getOrElse(Float.MaxValue) ), weights0.origin, weights0.metadata, weights0.weights ) logger.info("Completed weightedAveReduction '%s' in %.4f sec, shape = %s".format(identifier, ( System.nanoTime() - t0 ) / 1.0E9, values0.shape.mkString(",") ) ) IndexedSeq( values_key -> valuesArray, weights_key -> weightsArray ) } def appendElements( key: String, elements0: Map[String,HeapFltArray], elements1: Map[String,HeapFltArray] ): IndexedSeq[(String,HeapFltArray)] = { elements0 flatMap { case (key,fltArray) => elements1.get(key) map ( fltArray1 => key -> fltArray.append(fltArray1) ) } toIndexedSeq } def customReduceRDD(context: KernelContext)(a0: RDDKeyValPair, a1: RDDKeyValPair ): RDDKeyValPair = { logger.warn( s"No reducer defined for parallel op '$name', executing simple merge." ) mergeRDD(context)( a0, a1 ) } def postOp(result: DataFragment, context: KernelContext): DataFragment = result def orderElements( op_result: RDDRecord, context: KernelContext ): RDDRecord = if ( context.doesTimeReduction ) { op_result } else { val sorted_keys = op_result.elements.keys.toIndexedSeq.sortBy( key => key.split('%')(1).toInt ) val resultMap = mutable.HashMap.empty[String,HeapFltArray] sorted_keys.foreach( key => op_result.elements.get(key) match { case Some( array ) => val base_key = key.split('%').head resultMap.get( base_key ) match { case Some( existing_array ) => resultMap.put( base_key, existing_array.append(array) ) case None => resultMap.put( base_key, array ) } case None => Unit }) new RDDRecord( TreeMap( resultMap.toIndexedSeq:_* ), op_result.metadata ) } def postRDDOp( pre_result: RDDRecord, context: KernelContext ): RDDRecord = { options.get("postOp") match { case Some( postOp ) => if( postOp == "normw") { val key_lists = pre_result.elements.keys.partition( _.endsWith("_WEIGHTS_") ) val weights_key = key_lists._1.headOption.getOrElse( throw new Exception( s"Can't find weignts key in postRDDOp for Kernel ${identifier}, keys: " + pre_result.elements.keys.mkString(",") ) ) val values_key = key_lists._2.headOption.getOrElse( throw new Exception( s"Can't find values key in postRDDOp for Kernel ${identifier}, keys: " + pre_result.elements.keys.mkString(",") ) ) val weights = pre_result.elements.getOrElse( weights_key, missing_element(weights_key) ) val values = pre_result.elements.getOrElse( values_key, missing_element(values_key) ) val averageValues = FloatBuffer.allocate( values.data.length ) values.missing match { case Some( undef ) => for( index <- values.data.indices; value = values.data(index) ) { if( value == undef || value.isNaN ) { undef } else { val wval = weights.data(index) averageValues.put( value / wval ) } } case None => for( index <- values.data.indices ) { averageValues.put( values.data(index) / weights.data(index) ) } } val valuesArray = HeapFltArray( CDFloatArray( values.shape, averageValues.array, values.missing.getOrElse(Float.MaxValue) ), values.origin, values.metadata, values.weights ) context.addTimestamp( "postRDDOp complete" ) new RDDRecord( TreeMap( values_key -> valuesArray ), pre_result.metadata ) } else if( (postOp == "sqrt") || (postOp == "rms") ) { val new_elements = pre_result.elements map { case (values_key, values) => val averageValues = FloatBuffer.allocate(values.data.length) values.missing match { case Some(undef) => if( postOp == "sqrt" ) { for (index <- values.data.indices; value = values.data(index)) { if (value == undef || value.isNaN ) { undef } else { averageValues.put(Math.sqrt(value).toFloat) } } } else if( postOp == "rms" ) { val axes = context.config("axes", "").toUpperCase // values.metadata.getOrElse("axes","") val roi: ma2.Section = CDSection.deserialize( values.metadata.getOrElse("roi","") ) val reduce_ranges = axes.flatMap( axis => CDSection.getRange( roi, axis.toString ) ) val norm_factor = reduce_ranges.map( _.length() ).fold(1)(_ * _) - 1 if( norm_factor == 0 ) { throw new Exception( "Missing or unrecognized 'axes' parameter in rms reduce op")} for (index <- values.data.indices; value = values.data(index)) { if (value == undef || value.isNaN ) { undef } else { averageValues.put(Math.sqrt(value/norm_factor).toFloat ) } } } case None => if( postOp == "sqrt" ) { for (index <- values.data.indices) { averageValues.put(Math.sqrt(values.data(index)).toFloat) } } else if( postOp == "rms" ) { val norm_factor = values.metadata.getOrElse("N", "1").toInt - 1 if( norm_factor == 1 ) { logger.error( "Missing norm factor in rms") } for (index <- values.data.indices) { averageValues.put( Math.sqrt(values.data(index)/norm_factor).toFloat ) } } } val newValuesArray = HeapFltArray(CDFloatArray(values.shape, averageValues.array, values.missing.getOrElse(Float.MaxValue)), values.origin, values.metadata, values.weights) ( values_key -> newValuesArray ) } context.addTimestamp( "postRDDOp complete" ) new RDDRecord( new_elements, pre_result.metadata ) } else { throw new Exception( "Unrecognized postOp configuration: " + postOp ) } case None => pre_result } } def reduceOp(context: KernelContext)(a0op: Option[DataFragment], a1op: Option[DataFragment]): Option[DataFragment] = { val t0 = System.nanoTime val axes: AxisIndices = context.grid.getAxisIndices(context.config("axes", "")) val rv = a0op match { case Some(a0) => a1op match { case Some(a1) => Some(combine(context)(a0, a1, axes)) case None => Some(a0) } case None => a1op match { case Some(a1) => Some(a1) case None => None } } // logger.info("Executed %s reduce op, time = %.4f s".format( context.operation.name, (System.nanoTime - t0) / 1.0E9 ) ) rv } def reduceRDDOp(context: KernelContext)(a0: RDDKeyValPair, a1: RDDKeyValPair ): RDDKeyValPair = (a0._1 + a1._1) -> combineRDD(context)( a0._2, a1._2 ) def getDataSample(result: CDFloatArray, sample_size: Int = 20): Array[Float] = { val result_array = result.floatStorage.array val start_value = result_array.size / 3 result_array.slice(start_value, Math.min(start_value + sample_size, result_array.size)) } def toXmlHeader = <kernel module={module} name={name}> {if (title.nonEmpty) <title> {title} </title>} {if (description.nonEmpty) <description> {description} </description>} </kernel> def getStringArg(args: Map[String, String], argname: String, defaultVal: Option[String] = None): String = { args.get(argname) match { case Some(sval) => sval case None => defaultVal match { case None => throw new Exception(s"Parameter $argname (int) is reqired for operation " + this.id); case Some(sval) => sval } } } def getIntArg(args: Map[String, String], argname: String, defaultVal: Option[Int] = None): Int = { args.get(argname) match { case Some(sval) => try { sval.toInt } catch { case err: NumberFormatException => throw new Exception(s"Parameter $argname must ba an integer: $sval") } case None => defaultVal match { case None => throw new Exception(s"Parameter $argname (int) is reqired for operation " + this.id); case Some(ival) => ival } } } def getFloatArg(args: Map[String, String], argname: String, defaultVal: Option[Float] = None): Float = { args.get(argname) match { case Some(sval) => try { sval.toFloat } catch { case err: NumberFormatException => throw new Exception(s"Parameter $argname must ba a float: $sval") } case None => defaultVal match { case None => throw new Exception(s"Parameter $argname (float) is reqired for operation " + this.id); case Some(fval) => fval } } } // def weightedValueSumCombiner(context: KernelContext)(a0: DataFragment, a1: DataFragment, axes: AxisIndices): DataFragment = { // if (axes.includes(0)) { // val vTot: CDFloatArray = a0.data + a1.data // val wTotOpt: Option[CDFloatArray] = a0.weights.map(w => w + a1.weights.get) // val dataMap = wTotOpt match { // case Some(wTot) => Map("value" -> vTot, "weights" -> wTot) // case None => Map("value" -> vTot) // } // logger.info("weightedValueSumCombiner, values shape = %s, result spec = %s".format(vTot.getShape.mkString(","), a0.spec.toString)) // new DataFragment(a0.spec, dataMap, DataFragment.combineCoordMaps(a0, a1)) // } // else { // a0 ++ a1 // } // } // // def weightedValueSumPostOp(result: DataFragment, context: KernelContext): DataFragment = result.weights match { // case Some(weights_sum) => // logger.info("weightedValueSumPostOp, values shape = %s, weights shape = %s, result spec = %s".format(result.data.getShape.mkString(","), weights_sum.getShape.mkString(","), result.spec.toString)) // new DataFragment(result.spec, Map("value" -> result.data / weights_sum, "weights" -> weights_sum), result.optCoordMap) // case None => // result // } def fltArray(a0: RDDRecord, elem: String): ( CDFloatArray, Float ) = a0.element(elem) match { case Some(data) => ( data.toCDFloatArray, data.getMissing() ); case None => throw new Exception("Error missing array element: " + elem) } def toFastMaskedArray(a0: RDDRecord, elem: String): FastMaskedArray = a0.element(elem) match { case Some(data) => data.toFastMaskedArray case None => throw new Exception("Error missing array element: " + elem) } def optFltArray(a0: RDDRecord, elem: String): Option[CDFloatArray] = a0.element(elem).map(_.toCDFloatArray) def wtArray(a0: RDDRecord, elem: String): Option[CDFloatArray] = a0.element(elem).flatMap( _.toCDWeightsArray ) def wtFastMaskedArray(a0: RDDRecord, elem: String): Option[FastMaskedArray] = a0.element(elem).flatMap( _.toMa2WeightsArray ) def originArray(a0: RDDRecord, elem: String): Array[Int] = a0.element(elem) match { case Some(data) => data.origin; case None => throw new Exception("Error missing array element: " + elem) } def arrayMdata(a0: RDDRecord, elem: String): Map[String, String] = a0.element(elem) match { case Some(data) => data.metadata; case None => Map.empty } def weightedValueSumRDDCombiner( context: KernelContext)(a0: RDDRecord, a1: RDDRecord ): RDDRecord = { val axes = context.getAxes if (axes.includes(0)) { val t0 = System.nanoTime val elems = a0.elements flatMap { case (key, data0) => a1.elements.get( key ) match { case Some( data1 ) => val vTot: FastMaskedArray = data0.toFastMaskedArray + data1.toFastMaskedArray val t1 = System.nanoTime val wTotOpt: Option[Array[Float]] = data0.toMa2WeightsArray flatMap { wtsArray0 => data1.toMa2WeightsArray map { wtsArray1 => (wtsArray0 + wtsArray1).toFloatArray } } val t2 = System.nanoTime val array_mdata = MetadataOps.mergeMetadata (context.operation.name) (data0.metadata, data1.metadata ) Some( key -> HeapFltArray (vTot.toCDFloatArray, data0.origin, array_mdata, wTotOpt) ) case None => logger.warn("Missing elemint in Record combine: " + key); None } } val part_mdata = MetadataOps.mergeMetadata( context.operation.name )( a0.metadata, a1.metadata ) val t3 = System.nanoTime context.addTimestamp( "weightedValueSumCombiner complete" ) new RDDRecord( elems, part_mdata ) } else { a0 ++ a1 } } // def weightedValueSumRDDPostOpLegacy(result: RDDRecord, context: KernelContext): RDDRecord = { // val rid = context.operation.rid // wtFastMaskedArray( result, rid ) match { // case Some(w0) => // val v0 = toFastMaskedArray(result, rid) // val vOrigin: Array[Int] = originArray(result, rid) // logger.info("weightedValueSumPostOp, values shape = %s, weights shape = %s, result spec = %s, values sample = [ %s ], weights sample = [ %s ]".format(v0.array.getShape.mkString(","), w0.array.getShape.mkString(","), result.metadata.toString, v0.toCDFloatArray.mkBoundedDataString(", ",16), w0.toCDFloatArray.mkBoundedDataString(", ",16))) // context.addTimestamp( "weightedValueSumPostOp complete" ) // new RDDRecord( Map(rid -> HeapFltArray( (v0 / w0).toCDFloatArray, vOrigin, arrayMdata(result, "value"), Some( w0.toCDFloatArray.getArrayData() ) ) ), result.metadata ) // case None => // logger.info("weightedValueSumPostOp: NO WEIGHTS!, Elems:") // result.elements.foreach { case (key, heapFltArray) => logger.info(" ** key: %s, values sample = [ %s ]".format( key, heapFltArray.toCDFloatArray.mkBoundedDataString(", ",16)) ) } // result // } // } def weightedValueSumRDDPostOp(result: RDDRecord, context: KernelContext): RDDRecord = { val new_elements = result.elements map { case (key, fltArray ) => fltArray.toMa2WeightsArray match { case Some( wtsArray ) => (key, HeapFltArray( ( fltArray.toFastMaskedArray / wtsArray ).toCDFloatArray, fltArray.origin, fltArray.metadata, None ) ) case None => (key, fltArray ) } } // logger.info( "weightedValueSumPostOp:, Elems:" ) // new_elements.foreach { case (key, heapFltArray) => logger.info(" ** key: %s, values sample = [ %s ]".format( key, heapFltArray.toCDFloatArray.mkBoundedDataString(", ",16)) ) } new RDDRecord( new_elements, result.metadata ) } def getMontlyBinMap(id: String, context: KernelContext): CDCoordMap = { context.sectionMap.get(id).flatten.map( _.toSection ) match { case Some( section ) => val cdTimeCoordMap: CDTimeCoordMap = new CDTimeCoordMap( context.grid, section ) cdTimeCoordMap.getMontlyBinMap( section ) case None => throw new Exception( "Error, can't get section for input " + id ) } } } //abstract class MultiKernel extends Kernel { // val kernels: List[Kernel] // // def execute( context: CDASExecutionContext, nprocs: Int ): WPSResponse = { // val inputs: List[PartitionedFragment] = inputVars( context ) // for( kernel: Kernel <- kernels ) { // val result = kernel.mapReduce( inputs, context, nprocs ) // } // } //} /* abstract class DualOperationKernel extends Kernel { def mapReduce( inputs: List[PartitionedFragment], context: CDASExecutionContext, nprocs: Int ): Future[Option[DataFragment]] = { val future_results1: IndexedSeq[Future[Option[DataFragment]]] = (0 until nprocs).map( iproc => Future { map1(iproc,inputs,context) } ) reduce1( future_results1, context ) val future_results2: IndexedSeq[Future[Option[DataFragment]]] = (0 until nprocs).map2( iproc => Future { map(iproc,inputs,context) } ) reduce2( future_results2, context ) } def map( partIndex: Int, inputs: List[PartitionedFragment], context: CDASExecutionContext ): Option[DataFragment] = { val t0 = System.nanoTime val inputVar = inputs.head val axes: AxisIndices = context.request.getAxisIndices( context.operation.config("axes","") ) inputVar.domainDataFragment(partIndex).map { (dataFrag) => val async = context.config("async", "false").toBoolean val resultFragSpec = dataFrag.getReducedSpec(axes) val result_val_masked: CDFloatArray = mapCombineOpt match { case Some( combineOp ) => dataFrag.data.reduce( combineOp, axes.args, initValue ) case None => dataFrag.data } logger.info("Executed Kernel %s[%d] map op, time = %.4f s".format(name, partIndex, (System.nanoTime - t0) / 1.0E9)) new DataFragment(resultFragSpec, result_val_masked) } } def weightedValueSumCombiner(context: CDASExecutionContext)(a0: DataFragment, a1: DataFragment, axes: AxisIndices ): DataFragment = { if ( axes.includes(0) ) { val vTot = a0.data + a1.data val wTot = a0.optData.map( w => w + a1.optData.get ) new DataFragment( a0.spec, vTot, wTot ) } else { a0 ++ a1 } } def weightedValueSumPostOp( future_result: Future[Option[DataFragment]], context: CDASExecutionContext ): Future[Option[DataFragment]] = { future_result.map( _.map( (result: DataFragment) => result.optData match { case Some( weights_sum ) => new DataFragment( result.spec, result.data / weights_sum, result.optData ) case None => result } ) ) } } */ //abstract class SingularKernel extends Kernel { // override def map( partIndex: Int, inputs: List[Option[DataFragment]], context: KernelContext ): Option[DataFragment] = { // val t0 = System.nanoTime // val axes: AxisIndices = context.grid.getAxisIndices( context.config("axes","") ) // logger.info("\n\n ****** SingularKernel-reduceOp START, axes = " + axes.getAxes.mkString(",") + "\n") // inputs.head.map( dataFrag => { // val async = context.config("async", "false").toBoolean // val resultFragSpec = dataFrag.getReducedSpec(axes) // val result_val_masked: CDFloatArray = mapCombineOpt match { // case Some(combineOp) => // val result = dataFrag.data.reduce(combineOp, axes.args, initValue) // logger.info(" ****** SingularKernel-reduceOp, shape = " + result.getShape.mkString(",")) // result // case None => // logger.info(" ****** SingularKernel-No-Op") // dataFrag.data // } // logger.info("Executed Kernel %s[%d] map op, time = %.4f s".format(name, partIndex, (System.nanoTime - t0) / 1.0E9)) // DataFragment(resultFragSpec, result_val_masked) // } ) // } //} // //abstract class DualKernel extends Kernel { // override def map( partIndex: Int, inputs: List[Option[DataFragment]], context: KernelContext ): Option[DataFragment] = { // val t0 = System.nanoTime // val axes: AxisIndices = context.grid.getAxisIndices( context.config("axes","") ) // assert( inputs.length > 1, "Missing input(s) to dual input operation " + id ) // inputs(0).flatMap( dataFrag0 => { // inputs(1).map( dataFrag1 => { // logger.info("DualKernel: %s[%s] + %s[%s]".format( dataFrag0.spec.longname, dataFrag0.data.getShape.mkString(","), dataFrag1.spec.longname, dataFrag1.data.getShape.mkString(",") ) ) // val async = context.config("async", "false").toBoolean // val result_val_masked: DataFragment = mapCombineOpt match { // case Some(combineOp) => // logger.info( "DIFF2: dataFrag0 coordMap = %s".format( dataFrag0.optCoordMap.map( _.toString ).getOrElse("") ) ) // logger.info( "DIFF2: dataFrag1 coordMap = %s".format( dataFrag1.optCoordMap.map( _.toString ).getOrElse("") ) ) // DataFragment.combine( combineOp, dataFrag0, dataFrag1 ) // case None => dataFrag0 // } // logger.info("\nExecuted Kernel %s[%d] map op, time = %.4f s".format(name, partIndex, (System.nanoTime - t0) / 1.0E9)) //// logger.info("->> input0(%s): %s".format(dataFrag0.spec.varname, dataFrag0.data.mkDataString(","))) //// logger.info("->> input1(%s): %s".format(dataFrag1.spec.varname, dataFrag1.data.mkDataString(","))) //// logger.info("->> result: %s".format(result_val_masked.data.mkDataString(","))) // result_val_masked // }) // }) // } //} abstract class SingularRDDKernel( options: Map[String,String] = Map.empty ) extends Kernel(options) { override def map ( context: KernelContext ) ( inputs: RDDRecord ): RDDRecord = { val t0 = System.nanoTime val axes: AxisIndices = context.grid.getAxisIndices( context.config("axes","") ) val async = context.config("async", "false").toBoolean val inputId = context.operation.inputs.headOption.getOrElse("NULL") val shape = inputs.elements.head._2.shape logger.debug(" ##### KERNEL [%s]: Map Op: combine, input shape = %s".format( name, shape.mkString(",") ) ) runtime.printMemoryUsage val elem = inputs.findElements(inputId).headOption match { case Some( input_array ) => mapCombineOp match { case Some(combineOp) => val cdinput = input_array.toFastMaskedArray val result = cdinput.reduce(combineOp, axes.args, initValue).toCDFloatArray // logger.info( "Input data sample = [ %s ]".format(cdinput.toCDFloatArray.getArrayData(30).map( _.toString ).mkString(", ") ) ) logger.info(" ##### KERNEL [%s]: Map Op: combine, axes = %s, result shape = %s, result value[0] = %.4f".format( name, axes, result.getShape.mkString(","), result.getArrayData(1)(0) ) ) val result_metadata = inputs.metadata ++ input_array.metadata ++ List("uid" -> context.operation.rid, "gridfile" -> getCombinedGridfile(inputs.elements)) context.operation.rid -> HeapFltArray( result, input_array.origin, result_metadata, None ) case None => logger.info(" ##### KERNEL [%s]: Map Op: NONE".format( name ) ) context.operation.rid -> HeapFltArray( input_array.toCDFloatArray, input_array.origin, input_array.metadata, None ) } case None => throw new Exception( "Missing input to '" + this.getClass.getName + "' map op: " + inputId + ", available inputs = " + inputs.elements.keySet.mkString(",") ) } val dt = (System.nanoTime - t0) / 1.0E9 logger.info("Executed Kernel %s map op, time = %.4f s".format(name, dt )) context.addTimestamp( "Map Op complete, time = %.4f s, shape = (%s), record mdata = %s".format( dt, shape.mkString(","), inputs.metadata.mkString(";") ) ) RDDRecord( TreeMap( elem ), inputs.metadata ) } } abstract class DualRDDKernel( options: Map[String,String] ) extends Kernel(options) { override def map ( context: KernelContext ) (inputs: RDDRecord ): RDDRecord = { if( mapCombineOp.isDefined ) { val t0 = System.nanoTime val input_arrays: List[ArrayBase[Float]] = context.operation.inputs.map(id => inputs.findElements(id)).foldLeft(List[ArrayBase[Float]]())(_ ++ _) assert(input_arrays.size > 1, "Missing input(s) to dual input operation " + id + ": required inputs=(%s), available inputs=(%s)".format(context.operation.inputs.mkString(","), inputs.elements.keySet.mkString(","))) val ma2_input_arrays = input_arrays.map( _.toFastMaskedArray ) val result_array: CDFloatArray = ma2_input_arrays(0).merge( ma2_input_arrays(1), mapCombineOp.get ).toCDFloatArray val result_metadata = input_arrays.head.metadata ++ inputs.metadata ++ List("uid" -> context.operation.rid, "gridfile" -> getCombinedGridfile(inputs.elements)) logger.info("Executed Kernel %s map op, time = %.4f s".format(name, (System.nanoTime - t0) / 1.0E9)) context.addTimestamp("Map Op complete") RDDRecord( TreeMap(context.operation.rid -> HeapFltArray(result_array, input_arrays(0).origin, result_metadata, None)), inputs.metadata) } else { inputs } } } //abstract class MultiRDDKernel( options: Map[String,String] ) extends Kernel(options) { // // override def map ( context: KernelContext ) (inputs: RDDRecord ): RDDRecord = { // val t0 = System.nanoTime // val axes: AxisIndices = context.grid.getAxisIndices( context.config("axes","") ) // val async = context.config("async", "false").toBoolean // val input_arrays: List[ArrayBase[Float]] = context.operation.inputs.map( id => inputs.findElements(id) ).foldLeft(List[ArrayBase[Float]]())( _ ++ _ ) // assert( input_arrays.size > 1, "Missing input(s) to operation " + id + ": required inputs=(%s), available inputs=(%s)".format( context.operation.inputs.mkString(","), inputs.elements.keySet.mkString(",") ) ) // val cdFloatArrays = input_arrays.map( _.toCDFloatArray ).toArray // val final_result: CDFloatArray = if( mapCombineNOp.isDefined ) { // CDFloatArray.combine( mapCombineNOp.get, cdFloatArrays ) // } else if( mapCombineWNOp.isDefined ) { // val (result_array, countArray) = CDFloatArray.combine( mapCombineWNOp.get, cdFloatArrays ) // result_array / countArray // } else { throw new Exception("Undefined operation in MultiRDDKernel") } // logger.info("&MAP: Finished Kernel %s, time = %.4f s".format(name, (System.nanoTime - t0) / 1.0E9)) // context.addTimestamp( "Map Op complete" ) // val result_metadata = input_arrays.head.metadata ++ List( "uid" -> context.operation.rid, "gridfile" -> getCombinedGridfile( inputs.elements ) ) // RDDRecord( Map( context.operation.rid -> HeapFltArray(final_result, input_arrays(0).origin, result_metadata, None) ), inputs.metadata ) // } //} class CDMSRegridKernel extends zmqPythonKernel( "python.cdmsmodule", "regrid", "Regridder", "Regrids the inputs using UVCDAT", Map( "parallelize" -> "True" ) ) { override def map ( context: KernelContext ) (inputs: RDDRecord ): RDDRecord = { logger.info("&&MAP&&") val t0 = System.nanoTime val workerManager: PythonWorkerPortal = PythonWorkerPortal.getInstance val worker: PythonWorker = workerManager.getPythonWorker try { val targetGridSpec: String = context.config("gridSpec", inputs.elements.values.head.gridSpec) val input_arrays: List[HeapFltArray] = context.operation.inputs.map(id => inputs.findElements(id)).foldLeft(List[HeapFltArray]())(_ ++ _) assert(input_arrays.nonEmpty, "Missing input(s) to operation " + id + ": required inputs=(%s), available inputs=(%s)".format(context.operation.inputs.mkString(","), inputs.elements.keySet.mkString(","))) val (acceptable_arrays, regrid_arrays) = input_arrays.partition(_.gridSpec.equals(targetGridSpec)) if (regrid_arrays.isEmpty) { logger.info("&MAP: NoOp for Kernel %s".format(name)) inputs } else { for (input_array <- acceptable_arrays) { worker.sendArrayMetadata( input_array.uid, input_array) } for (input_array <- regrid_arrays) { worker.sendRequestInput(input_array.uid, input_array) } val acceptable_array_map = Map(acceptable_arrays.map(array => array.uid -> array): _*) logger.info("Gateway: Executing operation %s".format( context.operation.identifier ) ) val context_metadata = indexAxisConf(context.getConfiguration, context.grid.axisIndexMap) + ("gridSpec" -> targetGridSpec ) val rID = UID() worker.sendRequest("python.cdmsModule.regrid-" + rID, regrid_arrays.map(_.uid).toArray, context_metadata ) val resultItems = for (input_array <- regrid_arrays) yield { val tvar = worker.getResult val result = HeapFltArray( tvar, Some(targetGridSpec) ) context.operation.rid + ":" + input_array.uid -> result } val array_metadata = inputs.metadata ++ input_arrays.head.metadata ++ List("uid" -> context.operation.rid, "gridSpec" -> targetGridSpec ) val array_metadata_crs = context.crsOpt.map( crs => array_metadata + ( "crs" -> crs ) ).getOrElse( array_metadata ) logger.info("&MAP: Finished Kernel %s, time = %.4f s, metadata = %s".format(name, (System.nanoTime - t0) / 1.0E9, array_metadata_crs.mkString(";"))) context.addTimestamp( "Map Op complete" ) RDDRecord(TreeMap(resultItems: _*) ++ acceptable_array_map, array_metadata_crs) } } finally { workerManager.releaseWorker( worker ) } } } class zmqPythonKernel( _module: String, _operation: String, _title: String, _description: String, options: Map[String,String] ) extends Kernel(options) { override def operation: String = _operation override def module = _module override def name = _module.split('.').last + "." + _operation override def id = _module + "." + _operation override val identifier = name val outputs = List( WPSProcessOutput( "operation result" ) ) val title = _title val description = _description override def cleanUp(): Unit = PythonWorkerPortal.getInstance.shutdown() override def map ( context: KernelContext ) ( inputs: RDDRecord ): RDDRecord = { val t0 = System.nanoTime val workerManager: PythonWorkerPortal = PythonWorkerPortal.getInstance() val worker: PythonWorker = workerManager.getPythonWorker try { val input_arrays: List[HeapFltArray] = context.operation.inputs.map( id => inputs.findElements(id) ).foldLeft(List[HeapFltArray]())( _ ++ _ ) assert( input_arrays.nonEmpty, "Missing input(s) to operation " + id + ": required inputs=(%s), available inputs=(%s)".format(context.operation.inputs.mkString(","), inputs.elements.keySet.mkString(","))) val operation_input_arrays = context.operation.inputs.flatMap( input_id => inputs.element( input_id ) ) val t1 = System.nanoTime for( input_id <- context.operation.inputs ) inputs.element(input_id) match { case Some( input_array ) => if( addWeights( context ) ) { val weights: CDFloatArray = KernelUtilities.getWeights(input_id, context, weightsOpt, false ) worker.sendRequestInput(input_id, HeapFltArray(input_array, weights)) } else { worker.sendRequestInput(input_id, input_array) } case None => worker.sendUtility( List( "input", input_id ).mkString(";") ) } val metadata = indexAxisConf( context.getConfiguration, context.grid.axisIndexMap ) worker.sendRequest(context.operation.identifier, context.operation.inputs.toArray, metadata ) val resultItems = for( iInput <- 0 until (operation_input_arrays.length * nOutputsPerInput) ) yield { val tvar: TransVar = worker.getResult val uid = tvar.getMetaData.get( "uid" ) val result = HeapFltArray( tvar ) logger.info( "Received result Var: " + tvar.toString + ", first = " + result.data(0).toString + " undef = " + result.missing.getOrElse(0.0)) context.operation.rid + ":" + uid + "~" + tvar.id() -> result } logger.info( "Gateway: Executing operation %s in time %.4f s".format( context.operation.identifier, (System.nanoTime - t1) / 1.0E9 ) ) val result_metadata = inputs.metadata ++ input_arrays.head.metadata ++ List( "uid" -> context.operation.rid, "gridfile" -> getCombinedGridfile( inputs.elements ) ) logger.info("&MAP: Finished zmqPythonKernel %s, time = %.4f s, metadata = %s".format(name, (System.nanoTime - t0) / 1.0E9, result_metadata.mkString(";") ) ) context.addTimestamp( "Map Op complete" ) RDDRecord( TreeMap(resultItems:_*), result_metadata ) } finally { workerManager.releaseWorker( worker ) } } override def customReduceRDD(context: KernelContext)(a0: ( RecordKey, RDDRecord ), a1: ( RecordKey, RDDRecord ) ): ( RecordKey, RDDRecord ) = { val ( rdd0, rdd1 ) = ( a0._2, a1._2 ) val ( k0, k1 ) = ( a0._1, a1._1 ) val t0 = System.nanoTime val workerManager: PythonWorkerPortal = PythonWorkerPortal.getInstance val worker: PythonWorker = workerManager.getPythonWorker val ascending = k0 < k1 val new_key = if(ascending) { k0 + k1 } else { k1 + k0 } val op_metadata = indexAxisConf( context.getConfiguration, context.grid.axisIndexMap ) rdd0.elements.map { case (key, element0) => rdd1.elements.get(key).map( element1 => key -> { val (array0, array1) = if (ascending) (element0, element1) else (element1, element0) val uids = Array( s"${array0.uid}", s"${array1.uid}" ) worker.sendRequestInput( uids(0), array0 ) worker.sendRequestInput( uids(1), array1 ) worker.sendRequest( context.operation.identifier, uids, Map( "action" -> "reduce", "axes" -> context.getAxes.getAxes.mkString(",") ) ) }) } val resultItems = rdd0.elements.map { case (key, element0) => val tvar = worker.getResult val result = HeapFltArray( tvar ) context.operation.rid + ":" + element0.uid -> result } logger.debug("&MERGE %s: finish, time = %.4f s".format( context.operation.identifier, (System.nanoTime - t0) / 1.0E9 ) ) context.addTimestamp( "Custom Reduce Op complete" ) new_key -> RDDRecord( resultItems, rdd0.mergeMetadata("merge", rdd1) ) } def indexAxisConf( metadata: Map[String,String], axisIndexMap: Map[String,Int] ): Map[String,String] = { try { metadata.get("axes") match { case None => metadata case Some(axis_spec) => val axisIndices = axis_spec.map( _.toString).map( axis => axisIndexMap(axis) ) metadata + ( "axes" -> axisIndices.mkString("")) } } catch { case e: Exception => throw new Exception( "Error converting axis spec %s to indices using axisIndexMap {%s}: %s".format( metadata.get("axes"), axisIndexMap.mkString(","), e.toString ) ) } } } class TransientFragment( val dataFrag: DataFragment, val request: RequestContext, val varMetadata: Map[String,nc2.Attribute] ) extends OperationDataInput( dataFrag.spec, varMetadata ) { def toXml(id: String): xml.Elem = { val units = varMetadata.get("units") match { case Some(attr) => attr.getStringValue; case None => "" } val long_name = varMetadata.getOrElse("long_name",varMetadata.getOrElse("fullname",varMetadata.getOrElse("varname", new Attribute("varname","UNDEF")))).getStringValue val description = varMetadata.get("description") match { case Some(attr) => attr.getStringValue; case None => "" } val axes = varMetadata.get("axes") match { case Some(attr) => attr.getStringValue; case None => "" } <result id={id} missing_value={dataFrag.data.getInvalid.toString} shape={dataFrag.data.getShape.mkString("(",",",")")} units={units} long_name={long_name} description={description} axes={axes}> { dataFrag.data.mkBoundedDataString( ", ", 1100 ) } </result> } def domainDataFragment( partIndex: Int, optSection: Option[ma2.Section] ): Option[DataFragment] = Some(dataFrag) def data(partIndex: Int ): CDFloatArray = dataFrag.data def delete() = {;} } class SerializeTest { val input_array: CDFloatArray = CDFloatArray.const( Array(4), 2.5f ) val ucar_array = CDFloatArray.toUcarArray( input_array ) val byte_data = ucar_array.getDataAsByteBuffer().array() println( "Byte data: %x %x %x %x".format( byte_data(0),byte_data(1), byte_data(2), byte_data(3) )) val tvar = new TransVar( " | |0|4| ", byte_data ) val result = HeapFltArray( tvar, None ) println( "Float data: %f %f %f %f".format( result.data(0), result.data(1), result.data(2), result.data(3) )) } class zmqSerializeTest { import nasa.nccs.cdas.workers.test.floatClient val input_array: CDFloatArray = CDFloatArray.const( Array(4), 2.5f ) val ucar_array = CDFloatArray.toUcarArray( input_array ) val byte_data = ucar_array.getDataAsByteBuffer().array() println( "Byte data: %d %d %d %d".format( byte_data(0),byte_data(1), byte_data(2), byte_data(3) )) floatClient.run( byte_data ) }
nasa-nccs-cds/CDAS2
src/main/scala/nasa/nccs/cdas/kernels/process.scala
Scala
gpl-2.0
62,317
package com.bwsw.commitlog import java.io._ import java.security.{DigestOutputStream, MessageDigest} import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.{AtomicInteger, AtomicLong, AtomicReference} import javax.xml.bind.DatatypeConverter import com.bwsw.commitlog.CommitLogFlushPolicy.{ICommitLogFlushPolicy, OnCountInterval, OnRotation, OnTimeInterval} import com.bwsw.commitlog.filesystem.FilePathManager /** Logger which stores records continuously in files in specified location. * * Stores data in files placed YYYY/mm/dd/{serial number}.dat. If it works correctly, md5-files placed * YYYY/mm/dd/{serial number}.md5 shall be generated as well. New file starts on user request or when configured time * was exceeded. * * @param seconds period of time to write records into the same file, then start new file * @param path location to store files at * @param policy policy to flush data into file (OnRotation by default) */ class CommitLog(seconds: Int, path: String, policy: ICommitLogFlushPolicy = OnRotation, nextFileID: => Long) { require(seconds > 0, "Seconds cannot be less than 1") private val millisInterval: Long = TimeUnit.SECONDS.toMillis(seconds) private val chunkWriteCount: AtomicInteger = new AtomicInteger(0) private val chunkOpenTime: AtomicLong = new AtomicLong(0L) private val pathWithSeparator = s"$path${java.io.File.separatorChar}" private class CommitLogFile(val id: Long) { private[CommitLog] val absolutePath: String = new StringBuilder(pathWithSeparator) .append(id).append(FilePathManager.DATAEXTENSION).toString private val recordIDGen = new AtomicLong(0L) private val md5: MessageDigest = MessageDigest.getInstance("MD5") private def writeMD5File() = { val fileMD5 = DatatypeConverter.printHexBinary(md5.digest()).getBytes new FileOutputStream(new StringBuilder(pathWithSeparator) .append(id).append(FilePathManager.MD5EXTENSION).toString) { write(fileMD5) close() } } private val fileStream = new FileOutputStream(absolutePath) private val outputStream = new BufferedOutputStream(fileStream) private val digestOutputStream = new DigestOutputStream(outputStream, md5) private[CommitLog] val creationTime: Long = System.currentTimeMillis() private[CommitLog] def put(messageType: Byte, message: Array[Byte]): Unit = { val commitLogRecord = CommitLogRecord( recordIDGen.getAndIncrement(), messageType, message, System.currentTimeMillis() ) val recordToBinary = commitLogRecord.toByteArray digestOutputStream.write(recordToBinary) } private[CommitLog] def flush(): Unit = { digestOutputStream.flush() outputStream.flush() fileStream.flush() } private[CommitLog] def close(withMD5: Boolean = true): Unit = this.synchronized { digestOutputStream.on(false) digestOutputStream.close() outputStream.close() fileStream.close() if (withMD5) { writeMD5File() } } } private val currentCommitLogFileToPut = new AtomicReference[CommitLogFile](new CommitLogFile(nextFileID)) /** Puts record and its type to an appropriate file. * * Writes data to file in format (delimiter)(BASE64-encoded type and message). When writing to one file finished, * md5-sum file generated. * * @param message message to store * @param messageType type of message to store * @param startNew start new file if true * @return name of file record was saved in */ def putRec(message: Array[Byte], messageType: Byte, startNew: Boolean = false): String = this.synchronized { val now: Long = System.currentTimeMillis() policy match { case interval: OnTimeInterval if interval.seconds * 1000 + chunkOpenTime.get() < now => chunkOpenTime.set(now) currentCommitLogFileToPut.get().flush() case interval: OnCountInterval if interval.count == chunkWriteCount.get() => chunkWriteCount.set(0) currentCommitLogFileToPut.get().flush() case _ => } // If we want to open new File or if time between creation new files exceeds `millisInterval` if (startNew || timeExceeded()) close() val currentFile = currentCommitLogFileToPut.get() currentFile.put(messageType, message) chunkWriteCount.incrementAndGet() currentFile.absolutePath } /** Finishes work with current file. */ def close(createNewFile: Boolean = true, withMD5: Boolean = true): String = this.synchronized { val currentCommitLogFile = currentCommitLogFileToPut.get() val path = currentCommitLogFile.absolutePath if (createNewFile) { currentCommitLogFileToPut.set(new CommitLogFile(nextFileID)) } currentCommitLogFile.close() resetCounters() path } final def currentFileID: Long = currentCommitLogFileToPut.get().id private def resetCounters(): Unit = { chunkWriteCount.set(0) chunkOpenTime.set(System.currentTimeMillis()) } private def timeExceeded(): Boolean = { (System.currentTimeMillis() - currentCommitLogFileToPut.get().creationTime) >= millisInterval } }
bwsw/tstreams-transaction-server
src/main/scala/com/bwsw/commitlog/CommitLog.scala
Scala
apache-2.0
5,184
/** * Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0 * See accompanying LICENSE file. */ package kafka.manager import java.util.Properties import akka.actor.{ActorRef, ActorSystem, Props} import akka.pattern._ import akka.util.Timeout import com.typesafe.config.{Config, ConfigFactory} import kafka.manager.utils.CuratorAwareTest import ActorModel._ import kafka.test.SeededBroker import scala.concurrent.Await import scala.concurrent.duration._ import scala.reflect.ClassTag import scala.util.Try /** * @author hiral */ class TestKafkaManagerActor extends CuratorAwareTest { private[this] val akkaConfig: Properties = new Properties() akkaConfig.setProperty("pinned-dispatcher.type","PinnedDispatcher") akkaConfig.setProperty("pinned-dispatcher.executor","thread-pool-executor") private[this] val config : Config = ConfigFactory.parseProperties(akkaConfig) private[this] val system = ActorSystem("test-kafka-state-actor",config) private[this] val broker = new SeededBroker("km-test",4) private[this] val kafkaServerZkPath = broker.getZookeeperConnectionString private[this] var kafkaManagerActor : Option[ActorRef] = None private[this] implicit val timeout: Timeout = 10.seconds override protected def beforeAll(): Unit = { super.beforeAll() val curatorConfig = CuratorConfig(testServer.getConnectString) val config = KafkaManagerActorConfig( curatorConfig = curatorConfig, kafkaManagerUpdatePeriod = FiniteDuration(1,SECONDS), deleteClusterUpdatePeriod = FiniteDuration(1,SECONDS) ) val props = Props(classOf[KafkaManagerActor],config) kafkaManagerActor = Some(system.actorOf(props,"kafka-manager")) Thread.sleep(1000) } override protected def afterAll(): Unit = { kafkaManagerActor.foreach( _ ! KMShutdown) system.shutdown() Try(broker.shutdown()) super.afterAll() } private[this] def withKafkaManagerActor[Input,Output,FOutput](msg: Input)(fn: Output => FOutput)(implicit tag: ClassTag[Output]) : FOutput = { require(kafkaManagerActor.isDefined, "kafkaManagerActor undefined!") val future = ask(kafkaManagerActor.get, msg).mapTo[Output] val result = Await.result(future,10.seconds) fn(result) } test("add cluster") { val cc = ClusterConfig("dev","0.8.1.1",testServer.getConnectString) withKafkaManagerActor(KMAddCluster(cc)) { result: KMCommandResult => result.result.get Thread.sleep(1000) } withKafkaManagerActor(KMClusterQueryRequest("dev",KSGetTopics)) { result: TopicList => result.list.isEmpty } } test("update cluster zkhost") { val cc2 = ClusterConfig("dev","0.8.1.1",kafkaServerZkPath) withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult => result.result.get Thread.sleep(3000) } withKafkaManagerActor(KMClusterQueryRequest("dev",KSGetTopics)) { result: TopicList => result.list.nonEmpty } } test("disable cluster") { withKafkaManagerActor(KMDisableCluster("dev")) { result: KMCommandResult => result.result.get Thread.sleep(1000) } withKafkaManagerActor(KMClusterQueryRequest("dev",KSGetTopics)) { result: ActorErrorResponse => println(result) result.msg.contains("dev") } } test("enable cluster") { withKafkaManagerActor(KMEnableCluster("dev")) { result: KMCommandResult => result.result.get Thread.sleep(1000) } withKafkaManagerActor(KMClusterQueryRequest("dev",KSGetTopics)) { result: TopicList => result.list.nonEmpty } } test("update cluster version") { val cc2 = ClusterConfig("dev","0.8.2-beta",kafkaServerZkPath) withKafkaManagerActor(KMUpdateCluster(cc2)) { result: KMCommandResult => result.result.get Thread.sleep(3000) } withKafkaManagerActor(KMClusterQueryRequest("dev",KSGetTopics)) { result: TopicList => result.list.nonEmpty } } test("delete cluster") { withKafkaManagerActor(KMDisableCluster("dev")) { result: KMCommandResult => result.result.get Thread.sleep(1000) } withKafkaManagerActor(KMClusterQueryRequest("dev",KSGetTopics)) { result: ActorErrorResponse => println(result) result.msg.contains("dev") } withKafkaManagerActor(KMDeleteCluster("dev")) { result: KMCommandResult => result.result.get Thread.sleep(2000) } withKafkaManagerActor(KMClusterQueryRequest("dev",KSGetTopics)) { result: ActorErrorResponse => println(result) result.msg.contains("dev") } val cc2 = ClusterConfig("dev","0.8.2-beta",kafkaServerZkPath) withKafkaManagerActor(KMAddCluster(cc2)) { result: KMCommandResult => result.result.get Thread.sleep(1000) } withKafkaManagerActor(KMClusterQueryRequest("dev",KSGetTopics)) { result: TopicList => result.list.nonEmpty } } }
wking1986/kafka-manager
test/kafka/manager/TestKafkaManagerActor.scala
Scala
apache-2.0
4,882
package lv.ddgatve.math class Chunk(val tstamp:String, val text:String) { val cc = tstamp.split(":") val seconds = 60*cc(0).toInt + cc(1).toInt def getSeconds(): Int = seconds }
kapsitis/ddgatve-stat
src/main/scala/lv/ddgatve/math/Chunk.scala
Scala
apache-2.0
191
/* * Copyright 2013 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.storehaus.mysql import com.twitter.finagle.Mysql import com.twitter.finagle.mysql.Client import com.twitter.storehaus.testing.SelfAggregatingCloseableCleanup import com.twitter.storehaus.testing.generator.NonEmpty import com.twitter.util.{Await, Future} import org.scalacheck.{Prop, Gen, Properties} import org.scalacheck.Prop.forAll object MySqlStoreProperties extends Properties("MySqlStore") with SelfAggregatingCloseableCleanup[MySqlStore] { private[this] class PropertyCached(ps: PropertySpecifier) { def update(propName: String, p: Prop) = { ps(propName) = p } } /** * Property specification is used by name since scalacheck 1.13.4, which breaks * tests here. This simulates the old behavior. */ private[this] val propertyCached = new PropertyCached(property) def put(s: MySqlStore, pairs: List[(MySqlValue, Option[MySqlValue])]) { pairs.foreach { case (k, v) => Await.result(s.put((k, v))) } } def multiPut(s: MySqlStore, pairs: List[(MySqlValue, Option[MySqlValue])]) { Await.result(Future.collect(s.multiPut(pairs.toMap).values.toSeq)) } /** invert any type to MySql String values. Because most mysql configuraions are case * insensitive by default, we lowercase key's here for normalization */ def stringify(examples: List[(Any, Option[Any])]): List[(MySqlValue, Option[MySqlValue])] = examples.map { case (k, v) => (String2MySqlValueInjection(k.toString.toLowerCase), v.flatMap { d => Option(String2MySqlValueInjection(d.toString)) }) } def putAndGetStoreTest( store: MySqlStore, pairs: Gen[List[(Any, Option[Any])]] = NonEmpty.Pairing.alphaStrs() ): Prop = forAll(pairs) { (examples: List[(Any, Option[Any])]) => val stringified = stringify(examples) put(store, stringified) stringified.toMap.forall { case (k, optV) => val foundOptV = Await.result(store.get(k)) compareValues(k, optV, foundOptV) } } def multiPutAndMultiGetStoreTest( store: MySqlStore, pairs: Gen[List[(Any, Option[Any])]] = NonEmpty.Pairing.alphaStrs() ): Prop = forAll(pairs) { (examples: List[(Any, Option[Any])]) => val stringified = stringify(examples) multiPut(store, stringified) val data = stringified.toMap val result = store.multiGet(data.keySet) data.forall { case (k, optV) => // result.get(k) returns Option[Future[Option[MySqlValue]]] val foundOptV = result.get(k) match { case Some(v) => Await.result(v) case None => None } compareValues(k, optV, foundOptV) } } def compareValues( k: MySqlValue, expectedOptV: Option[MySqlValue], foundOptV: Option[MySqlValue]): Boolean = { val isMatch = expectedOptV match { case Some(value) => foundOptV.isDefined && foundOptV.get == value case None => foundOptV.isEmpty } if (!isMatch) printErr(k, expectedOptV, foundOptV) isMatch } def printErr(k: MySqlValue, expectedOptV: Option[MySqlValue], foundOptV: Option[MySqlValue]) { val expected = if (expectedOptV.isEmpty) expectedOptV else String2MySqlValueInjection.invert(expectedOptV.get).toOption val found = if (foundOptV.isEmpty) foundOptV else String2MySqlValueInjection.invert(foundOptV.get).toOption println(s"""FAILURE: Key "${String2MySqlValueInjection.invert(k)}" -""" + s"expected value $expected, but found $found") } propertyCached("MySqlStore text->text") = withStore(putAndGetStoreTest(_), "text", "text") propertyCached("MySqlStore blob->blob") = withStore(putAndGetStoreTest(_), "blob", "blob") propertyCached("MySqlStore text->blob") = withStore(putAndGetStoreTest(_), "text", "blob") propertyCached("MySqlStore text->text multiget") = withStore(multiPutAndMultiGetStoreTest(_), "text", "text", multiGet = true) propertyCached("MySqlStore blob->blob multiget") = withStore(multiPutAndMultiGetStoreTest(_), "blob", "blob", multiGet = true) propertyCached("MySqlStore text->blob multiget") = withStore(multiPutAndMultiGetStoreTest(_), "text", "blob", multiGet = true) propertyCached("MySqlStore int->int") = withStore(putAndGetStoreTest(_, NonEmpty.Pairing.numerics[Int]()), "int", "int") propertyCached("MySqlStore int->int multiget") = withStore(multiPutAndMultiGetStoreTest(_, NonEmpty.Pairing.numerics[Int]()), "int", "int", multiGet = true) propertyCached("MySqlStore bigint->bigint") = withStore(putAndGetStoreTest(_, NonEmpty.Pairing.numerics[Long]()), "bigint", "bigint") propertyCached("MySqlStore bigint->bigint multiget") = withStore(multiPutAndMultiGetStoreTest(_, NonEmpty.Pairing.numerics[Long]()), "bigint", "bigint", multiGet = true) propertyCached("MySqlStore smallint->smallint") = withStore(putAndGetStoreTest(_, NonEmpty.Pairing.numerics[Short]()), "smallint", "smallint") propertyCached("MySqlStore smallint->smallint multiget") = withStore(multiPutAndMultiGetStoreTest(_, NonEmpty.Pairing.numerics[Short]()), "smallint", "smallint", multiGet = true) private def withStore[T](f: MySqlStore => T, kColType: String, vColType: String, multiGet: Boolean = false): T = { val client = Mysql.client .withCredentials("storehaususer", "test1234") .withDatabase("storehaus_test") .newRichClient("127.0.0.1:3306") // these should match mysql setup used in .travis.yml val tableName = s"storehaus-mysql-$kColType-$vColType${if (multiGet) "-multiget" else ""}" val schema = s"CREATE TEMPORARY TABLE IF NOT EXISTS `$tableName` (`key` $kColType " + s"DEFAULT NULL, `value` $vColType DEFAULT NULL) ENGINE=InnoDB DEFAULT CHARSET=utf8;" Await.result(client.query(schema)) f(newStore(client, tableName)) } def newStore(client: Client, tableName: String): MySqlStore = aggregateCloseable(MySqlStore(client, tableName, "key", "value")) }
twitter/storehaus
storehaus-mysql/src/test/scala/com/twitter/storehaus/mysql/MySqlStoreProperties.scala
Scala
apache-2.0
6,572
/* *\\ ** \\ \\ / _) \\ \\ / \\ | ** ** \\ \\ / | __ \\ _ \\ __| \\ \\ / |\\/ | ** ** \\ \\ / | | | __/ | \\ \\ / | | ** ** \\_/ _| .__/ \\___| _| \\_/ _| _| ** ** _| ** ** ** ** ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ** ** ** ** http://www.vipervm.org ** ** GPLv3 ** \\* */ package org.vipervm.runtime.mm.data.matrix import java.nio.ByteOrder import org.vipervm.platform._ import org.vipervm.runtime.mm._ import org.vipervm.runtime.mm.data._ import org.vipervm.runtime.mm.data.vector._ class Matrix(val typ:MatrixType, val metadata:MatrixMetaData) extends MatrixData { val (width,height) = (metadata.width,metadata.height) protected val elemSize = Primitive.sizeOf(typ.elem) def allocate(memory:MemoryNode,repr:MatrixRepr):Either[AllocationFailure,MatrixInstance] = { repr match { case r@DenseMatrixRepr(_,_) => allocateDenseMatrix(memory,r) case r@StridedMatrixRepr(_,_,_) => allocateStridedMatrix(memory,r) case _ => Left(DataRepresentationNotSupported) } } protected def allocateDenseMatrix(memory:MemoryNode,repr:DenseMatrixRepr):Either[AllocationFailure,DenseMatrixInstance] = { val size = width * height * elemSize val buffer = memory.allocate(size) val view = new BufferView1D(buffer, 0, size) val instance = DenseMatrixInstance(repr, view) Right(instance) } protected def allocateStridedMatrix(memory:MemoryNode,repr:StridedMatrixRepr):Either[AllocationFailure,StridedMatrixInstance] = { val size = (width * elemSize + repr.padding) * height val buffer = memory.allocate(size) val view = new BufferView2D(buffer, 0, width * elemSize, height, repr.padding) val instance = StridedMatrixInstance(repr, view) Right(instance) } } object Matrix { def create[A](dataManager:DataManager,width:Long,height:Long,f:(Long,Long)=>A,major:Major = RowMajor)(implicit prim:PrimType[A]): Matrix = { /* Create data */ val typ = MatrixType(prim.typ) val metadata = MatrixMetaData(width,height) val data = new Matrix(typ,metadata) dataManager.register(data) /* Create instance */ val repr = DenseMatrixRepr(PrimitiveRepr(ByteOrder.nativeOrder),major) val mem = dataManager.platform.hostMemory val instance = data.allocateDenseMatrix(mem,repr) match { case Left(err) => throw new Exception(err.toString) case Right(inst) => inst } /* Initialize instance */ val view = instance.view for (y <- 0L until height; x <- 0L until width) { val index = x*4 + y * width * 4 + view.offset prim.set(view.buffer.asInstanceOf[HostBuffer], index, f(x,y)) } /* Associate instance to the data */ dataManager.associate(instance,data) /* Return the data */ data } }
hsyl20/Scala_ViperVM
src/main/scala/org/vipervm/runtime/mm/data/Matrix.scala
Scala
gpl-3.0
3,125
/* * Copyright 2017 Datamountaineer. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datamountaineer.streamreactor.connect.hazelcast.sink import com.datamountaineer.streamreactor.connect.hazelcast.TestBase import com.datamountaineer.streamreactor.connect.hazelcast.config.HazelCastSinkConfigConstants import scala.collection.JavaConverters._ /** * Created by [email protected] on 10/08/16. * stream-reactor */ class TestHazelCastSinkConnector extends TestBase { "should start a Hazelcast sink connector" in { val props = getProps val connector = new HazelCastSinkConnector connector.start(props) val taskConfigs = connector.taskConfigs(1) taskConfigs.asScala.head.get(HazelCastSinkConfigConstants.KCQL) shouldBe KCQL_MAP taskConfigs.asScala.head.get(HazelCastSinkConfigConstants.GROUP_NAME) shouldBe TESTS_GROUP_NAME connector.stop() } }
datamountaineer/stream-reactor
kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastSinkConnector.scala
Scala
apache-2.0
1,413
package io.udash.bootstrap package dropdown import com.avsystem.commons.misc.{AbstractCase, AbstractValueEnum, AbstractValueEnumCompanion, EnumCtx} import io.udash._ import io.udash.bindings.modifiers.Binding import io.udash.bindings.modifiers.Binding.NestedInterceptor import io.udash.bootstrap.button.UdashButton import io.udash.bootstrap.utils.{BootstrapStyles, UdashBootstrapComponent} import io.udash.properties.seq import io.udash.properties.single.ReadableProperty import io.udash.wrappers.jquery._ import org.scalajs.dom.{Element, Event} import scalatags.JsDom.all._ import scala.scalajs.js final class UdashDropdown[ItemType, ElemType <: ReadableProperty[ItemType]] private( items: seq.ReadableSeqProperty[ItemType, ElemType], dropDirection: ReadableProperty[UdashDropdown.Direction], rightAlignMenu: ReadableProperty[Boolean], buttonToggle: ReadableProperty[Boolean], override val componentId: ComponentId )( itemBindingFactory: UdashDropdown[ItemType, ElemType] => Binding, buttonContent: Binding.NestedInterceptor => Modifier, buttonFactory: (NestedInterceptor => Modifier) => UdashButton ) extends UdashBootstrapComponent with Listenable { import UdashDropdown._ import io.udash.bootstrap.dropdown.UdashDropdown.DropdownEvent._ import io.udash.css.CssView._ override type EventType = UdashDropdown.DropdownEvent[ItemType, ElemType] /** Dropdown menu list ID. */ val menuId: ComponentId = componentId.withSuffix("menu") /** Dropdown button ID. */ val buttonId: ComponentId = componentId.withSuffix("button") /** Toggles menu visibility. */ def toggle(): Unit = jQSelector().dropdown("toggle") /** Updates the position of an element’s dropdown. */ def update(): Unit = jQSelector().dropdown("update") propertyListeners += items.listen(_ => update()) override lazy val render: Element = { import io.udash.bootstrap.utils.BootstrapTags._ val el = div( componentId, nestedInterceptor( ((direction: Direction) => direction match { case Direction.Up => BootstrapStyles.Dropdown.dropup case Direction.Down => BootstrapStyles.Dropdown.dropdown case Direction.Left => BootstrapStyles.Dropdown.dropleft case Direction.Right => BootstrapStyles.Dropdown.dropright }).reactiveApply(dropDirection) ) )( nestedInterceptor(buttonFactory { nested => Seq[Modifier]( nested(BootstrapStyles.Dropdown.toggle.styleIf(buttonToggle)), buttonId, dataToggle := "dropdown", aria.haspopup := true, aria.expanded := false, buttonContent(nested), span(BootstrapStyles.Dropdown.caret) ) }), div( BootstrapStyles.Dropdown.menu, nestedInterceptor(BootstrapStyles.Dropdown.menuRight.styleIf(rightAlignMenu)), aria.labelledby := buttonId, menuId )(nestedInterceptor(itemBindingFactory(this))) ).render val jQEl = jQ(el) nestedInterceptor(new JQueryOnBinding(jQEl, "show.bs.dropdown", (_: Element, _: JQueryEvent) => fire(VisibilityChangeEvent(this, EventType.Show)))) nestedInterceptor(new JQueryOnBinding(jQEl, "shown.bs.dropdown", (_: Element, _: JQueryEvent) => fire(VisibilityChangeEvent(this, EventType.Shown)))) nestedInterceptor(new JQueryOnBinding(jQEl, "hide.bs.dropdown", (_: Element, _: JQueryEvent) => fire(VisibilityChangeEvent(this, EventType.Hide)))) nestedInterceptor(new JQueryOnBinding(jQEl, "hidden.bs.dropdown", (_: Element, _: JQueryEvent) => fire(VisibilityChangeEvent(this, EventType.Hidden)))) el } override def kill(): Unit = { super.kill() jQSelector().dropdown("dispose") } private def jQSelector(): UdashDropdownJQuery = jQ(s"#${buttonId.value}").asInstanceOf[UdashDropdownJQuery] } object UdashDropdown { /** More: <a href="http://getbootstrap.com/docs/4.1/components/dropdowns/#events">Bootstrap Docs</a> */ sealed trait DropdownEvent[ItemType, ElemType <: ReadableProperty[ItemType]] extends AbstractCase with ListenableEvent { def tpe: DropdownEvent.EventType } object DropdownEvent { final class EventType(implicit enumCtx: EnumCtx) extends AbstractValueEnum object EventType extends AbstractValueEnumCompanion[EventType] { /** This event fires immediately when the show instance method is called. */ final val Show: Value = new EventType /** This event is fired when the dropdown has been made visible to the user (will wait for CSS transitions, to complete). */ final val Shown: Value = new EventType /** This event is fired immediately when the hide instance method has been called. */ final val Hide: Value = new EventType /** This event is fired when the dropdown has finished being hidden from the user (will wait for CSS transitions, to complete). */ final val Hidden: Value = new EventType /** This event is fired on selection of any (except disabled ones) element from the dropdown. */ final val Selection: Value = new EventType } final case class VisibilityChangeEvent[ItemType, ElemType <: ReadableProperty[ItemType]]( override val source: UdashDropdown[ItemType, ElemType], override val tpe: DropdownEvent.EventType ) extends DropdownEvent[ItemType, ElemType] final case class SelectionEvent[ItemType, ElemType <: ReadableProperty[ItemType]]( override val source: UdashDropdown[ItemType, ElemType], item: ItemType ) extends DropdownEvent[ItemType, ElemType] { override def tpe: EventType = EventType.Selection } } final class Direction(implicit enumCtx: EnumCtx) extends AbstractValueEnum object Direction extends AbstractValueEnumCompanion[Direction] { final val Up, Down, Left, Right: Value = new Direction() } /** Default dropdown elements. */ sealed trait DefaultDropdownItem extends AbstractCase object DefaultDropdownItem { case class Text(text: String) extends DefaultDropdownItem case class Link(title: String, url: Url) extends DefaultDropdownItem case class Button(title: String, clickCallback: () => Any) extends DefaultDropdownItem case class Header(title: String) extends DefaultDropdownItem case class Disabled(item: DefaultDropdownItem) extends DefaultDropdownItem case class Raw(element: Element) extends DefaultDropdownItem case class Dynamic(factory: Binding.NestedInterceptor => Element) extends DefaultDropdownItem case object Divider extends DefaultDropdownItem } /** Renders DOM element for [[io.udash.bootstrap.dropdown.UdashDropdown.DefaultDropdownItem]]. */ def defaultItemFactory(item: DefaultDropdownItem, nested: Binding.NestedInterceptor): Element = { import DefaultDropdownItem._ import io.udash.css.CssView._ item match { case Text(text) => span(BootstrapStyles.Dropdown.itemText, text).render case Link(title, url) => a(BootstrapStyles.Dropdown.item, href := url.value)(title).render case Button(title, callback) => button(BootstrapStyles.Dropdown.item, onclick :+= ((_: Event) => { callback() }))(title).render case Header(title) => h6(BootstrapStyles.Dropdown.header)(title).render case Disabled(item) => val res = defaultItemFactory(item, nested).styles(BootstrapStyles.disabled) res.addEventListener("click", (ev: Event) => { ev.preventDefault() ev.stopPropagation() }) res case Raw(element) => element case Dynamic(produce) => produce(nested) case Divider => div(BootstrapStyles.Dropdown.divider, role := "separator").render } } /** * Creates a dropdown component. * More: <a href="http://getbootstrap.com/docs/4.1/components/dropdowns/">Bootstrap Docs</a>. * * @param items Data items which will be represented as the elements in this dropdown. * @param dropDirection A direction of the menu expansion. * @param rightAlignMenu If true, the menu will be aligned to the right side of button. * @param buttonToggle If true, the toggle arrow will be displayed. * @param itemFactory Creates DOM element for each item which is inserted into the dropdown menu. * Use the provided interceptor to properly clean up bindings inside the content. * Usually you should add the `BootstrapStyles.Dropdown.item` style to your element. * @param buttonContent Content of the element opening the dropdown. * Use the provided interceptor to properly clean up bindings inside the content. * @param buttonFactory Allows to customize button options. * @tparam ItemType A single element's type in the `items` sequence. * @tparam ElemType A type of a property containing an element in the `items` sequence. * @return A `UdashDropdown` component, call `render` to create a DOM element. */ def apply[ItemType, ElemType <: ReadableProperty[ItemType]]( items: seq.ReadableSeqProperty[ItemType, ElemType], dropDirection: ReadableProperty[Direction] = Direction.Down.toProperty, rightAlignMenu: ReadableProperty[Boolean] = UdashBootstrap.False, buttonToggle: ReadableProperty[Boolean] = UdashBootstrap.True, componentId: ComponentId = ComponentId.generate() )( itemFactory: (ElemType, Binding.NestedInterceptor) => Element, buttonContent: Binding.NestedInterceptor => Modifier, buttonFactory: (NestedInterceptor => Modifier) => UdashButton = UdashButton() ): UdashDropdown[ItemType, ElemType] = { val itemBindingFactory = (dropdown: UdashDropdown[ItemType, ElemType]) => repeatWithNested(items) { case (item, nested) => withSelectionListener(itemFactory(item, nested), item.get, dropdown) } new UdashDropdown(items, dropDirection, rightAlignMenu, buttonToggle, componentId)(itemBindingFactory, buttonContent, buttonFactory) } /** * Creates a dropdown component with [[DefaultDropdownItem]] as items. * More: <a href="http://getbootstrap.com/docs/4.1/components/dropdowns/">Bootstrap Docs</a>. * * @param items Data items which will be represented as the elements in this dropdown. * @param dropDirection A direction of the menu expansion. * @param rightAlignMenu If true, the menu will be aligned to the right side of button. * @param buttonToggle If true, the toggle arrow will be displayed. * @param buttonContent Content of the element opening the dropdown. * Use the provided interceptor to properly clean up bindings inside the content. * @return A `UdashDropdown` component, call `render` to create a DOM element. */ def default[ElemType <: ReadableProperty[DefaultDropdownItem]]( items: seq.ReadableSeqProperty[DefaultDropdownItem, ElemType], dropDirection: ReadableProperty[Direction] = Direction.Down.toProperty, rightAlignMenu: ReadableProperty[Boolean] = UdashBootstrap.False, buttonToggle: ReadableProperty[Boolean] = UdashBootstrap.True, componentId: ComponentId = ComponentId.generate() )( buttonContent: Binding.NestedInterceptor => Modifier ): UdashDropdown[DefaultDropdownItem, ElemType] = { val itemBindingFactory: UdashDropdown[DefaultDropdownItem, ElemType] => Binding = dropdown => produceWithNested(items)((items, nested) => items.map(item => withSelectionListener[DefaultDropdownItem, ElemType](defaultItemFactory(item, nested), item, dropdown))) new UdashDropdown(items, dropDirection, rightAlignMenu, buttonToggle, componentId)( itemBindingFactory, buttonContent, UdashButton() ) } private def withSelectionListener[ItemType, ElemType <: ReadableProperty[ItemType]](elem: Element, item: => ItemType, source: UdashDropdown[ItemType, ElemType]): Element = { source.nestedInterceptor(new source.JQueryOnBinding(jQ(elem), EventName.click, (_: Element, _: JQueryEvent) => source.fire(DropdownEvent.SelectionEvent(source, item)))) elem } @js.native private trait UdashDropdownJQuery extends JQuery { def dropdown(arg: String): UdashDropdownJQuery = js.native } }
UdashFramework/udash-core
bootstrap4/.js/src/main/scala/io/udash/bootstrap/dropdown/UdashDropdown.scala
Scala
apache-2.0
12,118
package ch.ninecode.model import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.Serializer import com.esotericsoftware.kryo.io.Input import com.esotericsoftware.kryo.io.Output import org.apache.spark.sql.Row import ch.ninecode.cim.CIMClassInfo import ch.ninecode.cim.CIMContext import ch.ninecode.cim.CIMParseable import ch.ninecode.cim.CIMRelationship import ch.ninecode.cim.CIMSerializer /** * Financial Transmission Rights (FTR) regarding transmission capacity at a flowgate. * * @param Agreement [[ch.ninecode.model.Agreement Agreement]] Reference to the superclass object. * @param action Buy, Sell * @param baseEnergy Quantity, typically MWs - Seller owns all rights being offered, MWs over time on same Point of Receipt, Point of Delivery, or Resource. * @param class Peak, Off-peak, 24-hour * @param ftrType Type of rights being offered (product) allowed to be auctioned (option, obligation). * @param optimized Fixed (covers re-configuration, grandfathering) or Optimized (up for sale/purchase * @param EnergyPriceCurve [[ch.ninecode.model.EnergyPriceCurve EnergyPriceCurve]] <em>undocumented</em> * @param Flowgate [[ch.ninecode.model.Flowgate Flowgate]] <em>undocumented</em> * @param Pnodes [[ch.ninecode.model.Pnode Pnode]] <em>undocumented</em> * @group InfCongestionRevenueRights * @groupname InfCongestionRevenueRights Package InfCongestionRevenueRights */ final case class FTR ( Agreement: Agreement = null, action: String = null, baseEnergy: Double = 0.0, `class`: String = null, ftrType: String = null, optimized: String = null, EnergyPriceCurve: String = null, Flowgate: String = null, Pnodes: List[String] = null ) extends Element { /** * Return the superclass object. * * @return The typed superclass nested object. * @group Hierarchy * @groupname Hierarchy Class Hierarchy Related * @groupdesc Hierarchy Members related to the nested hierarchy of CIM classes. */ override def sup: Agreement = Agreement // // Row overrides // /** * Return a copy of this object as a Row. * * Creates a clone of this object for use in Row manipulations. * * @return The copy of the object. * @group Row * @groupname Row SQL Row Implementation * @groupdesc Row Members related to implementing the SQL Row interface */ override def copy (): Row = { clone().asInstanceOf[Row] } override def export_fields: String = { implicit val s: StringBuilder = new StringBuilder(sup.export_fields) implicit val clz: String = FTR.cls def emitelem (position: Int, value: Any): Unit = if (mask(position)) emit_element(FTR.fields(position), value) def emitattr (position: Int, value: Any): Unit = if (mask(position)) emit_attribute(FTR.fields(position), value) def emitattrs (position: Int, value: List[String]): Unit = if (mask(position) && (null != value)) value.foreach(x => emit_attribute(FTR.fields(position), x)) emitelem(0, action) emitelem(1, baseEnergy) emitelem(2, `class`) emitelem(3, ftrType) emitelem(4, optimized) emitattr(5, EnergyPriceCurve) emitattr(6, Flowgate) emitattrs(7, Pnodes) s.toString } override def export: String = { "\\t<cim:FTR rdf:%s=\\"%s\\">\\n%s\\t</cim:FTR>".format(if (about) "about" else "ID", id, export_fields) } } object FTR extends CIMParseable[FTR] { override val fields: Array[String] = Array[String]( "action", "baseEnergy", "class", "ftrType", "optimized", "EnergyPriceCurve", "Flowgate", "Pnodes" ) override val relations: List[CIMRelationship] = List( CIMRelationship("EnergyPriceCurve", "EnergyPriceCurve", "0..1", "0..*"), CIMRelationship("Flowgate", "Flowgate", "0..1", "0..*"), CIMRelationship("Pnodes", "Pnode", "0..*", "0..*") ) val action: Fielder = parse_element(element(cls, fields(0))) val baseEnergy: Fielder = parse_element(element(cls, fields(1))) val `class`: Fielder = parse_element(element(cls, fields(2))) val ftrType: Fielder = parse_element(element(cls, fields(3))) val optimized: Fielder = parse_element(element(cls, fields(4))) val EnergyPriceCurve: Fielder = parse_attribute(attribute(cls, fields(5))) val Flowgate: Fielder = parse_attribute(attribute(cls, fields(6))) val Pnodes: FielderMultiple = parse_attributes(attribute(cls, fields(7))) def parse (context: CIMContext): FTR = { implicit val ctx: CIMContext = context implicit val bitfields: Array[Int] = Array(0) val ret = FTR( Agreement.parse(context), mask(action(), 0), toDouble(mask(baseEnergy(), 1)), mask(`class`(), 2), mask(ftrType(), 3), mask(optimized(), 4), mask(EnergyPriceCurve(), 5), mask(Flowgate(), 6), masks(Pnodes(), 7) ) ret.bitfields = bitfields ret } def serializer: Serializer[FTR] = FTRSerializer } object FTRSerializer extends CIMSerializer[FTR] { def write (kryo: Kryo, output: Output, obj: FTR): Unit = { val toSerialize: Array[() => Unit] = Array( () => output.writeString(obj.action), () => output.writeDouble(obj.baseEnergy), () => output.writeString(obj.`class`), () => output.writeString(obj.ftrType), () => output.writeString(obj.optimized), () => output.writeString(obj.EnergyPriceCurve), () => output.writeString(obj.Flowgate), () => writeList(obj.Pnodes, output) ) AgreementSerializer.write(kryo, output, obj.sup) implicit val bitfields: Array[Int] = obj.bitfields writeBitfields(output) writeFields(toSerialize) } def read (kryo: Kryo, input: Input, cls: Class[FTR]): FTR = { val parent = AgreementSerializer.read(kryo, input, classOf[Agreement]) implicit val bitfields: Array[Int] = readBitfields(input) val obj = FTR( parent, if (isSet(0)) input.readString else null, if (isSet(1)) input.readDouble else 0.0, if (isSet(2)) input.readString else null, if (isSet(3)) input.readString else null, if (isSet(4)) input.readString else null, if (isSet(5)) input.readString else null, if (isSet(6)) input.readString else null, if (isSet(7)) readList(input) else null ) obj.bitfields = bitfields obj } } /** * A type of limit that indicates if it is enforced and, through association, the organisation responsible for setting the limit. * * @param Limit [[ch.ninecode.model.Limit Limit]] Reference to the superclass object. * @param enforced True if limit is enforced. * @param Flowgate [[ch.ninecode.model.Flowgate Flowgate]] <em>undocumented</em> * @param MktMeasurement [[ch.ninecode.model.MktMeasurement MktMeasurement]] <em>undocumented</em> * @group InfCongestionRevenueRights * @groupname InfCongestionRevenueRights Package InfCongestionRevenueRights */ final case class ViolationLimit ( Limit: Limit = null, enforced: Boolean = false, Flowgate: String = null, MktMeasurement: String = null ) extends Element { /** * Return the superclass object. * * @return The typed superclass nested object. * @group Hierarchy * @groupname Hierarchy Class Hierarchy Related * @groupdesc Hierarchy Members related to the nested hierarchy of CIM classes. */ override def sup: Limit = Limit // // Row overrides // /** * Return a copy of this object as a Row. * * Creates a clone of this object for use in Row manipulations. * * @return The copy of the object. * @group Row * @groupname Row SQL Row Implementation * @groupdesc Row Members related to implementing the SQL Row interface */ override def copy (): Row = { clone().asInstanceOf[Row] } override def export_fields: String = { implicit val s: StringBuilder = new StringBuilder(sup.export_fields) implicit val clz: String = ViolationLimit.cls def emitelem (position: Int, value: Any): Unit = if (mask(position)) emit_element(ViolationLimit.fields(position), value) def emitattr (position: Int, value: Any): Unit = if (mask(position)) emit_attribute(ViolationLimit.fields(position), value) emitelem(0, enforced) emitattr(1, Flowgate) emitattr(2, MktMeasurement) s.toString } override def export: String = { "\\t<cim:ViolationLimit rdf:%s=\\"%s\\">\\n%s\\t</cim:ViolationLimit>".format(if (about) "about" else "ID", id, export_fields) } } object ViolationLimit extends CIMParseable[ViolationLimit] { override val fields: Array[String] = Array[String]( "enforced", "Flowgate", "MktMeasurement" ) override val relations: List[CIMRelationship] = List( CIMRelationship("Flowgate", "Flowgate", "0..1", "0..*"), CIMRelationship("MktMeasurement", "MktMeasurement", "0..1", "0..*") ) val enforced: Fielder = parse_element(element(cls, fields(0))) val Flowgate: Fielder = parse_attribute(attribute(cls, fields(1))) val MktMeasurement: Fielder = parse_attribute(attribute(cls, fields(2))) def parse (context: CIMContext): ViolationLimit = { implicit val ctx: CIMContext = context implicit val bitfields: Array[Int] = Array(0) val ret = ViolationLimit( Limit.parse(context), toBoolean(mask(enforced(), 0)), mask(Flowgate(), 1), mask(MktMeasurement(), 2) ) ret.bitfields = bitfields ret } def serializer: Serializer[ViolationLimit] = ViolationLimitSerializer } object ViolationLimitSerializer extends CIMSerializer[ViolationLimit] { def write (kryo: Kryo, output: Output, obj: ViolationLimit): Unit = { val toSerialize: Array[() => Unit] = Array( () => output.writeBoolean(obj.enforced), () => output.writeString(obj.Flowgate), () => output.writeString(obj.MktMeasurement) ) LimitSerializer.write(kryo, output, obj.sup) implicit val bitfields: Array[Int] = obj.bitfields writeBitfields(output) writeFields(toSerialize) } def read (kryo: Kryo, input: Input, cls: Class[ViolationLimit]): ViolationLimit = { val parent = LimitSerializer.read(kryo, input, classOf[Limit]) implicit val bitfields: Array[Int] = readBitfields(input) val obj = ViolationLimit( parent, if (isSet(0)) input.readBoolean else false, if (isSet(1)) input.readString else null, if (isSet(2)) input.readString else null ) obj.bitfields = bitfields obj } } private[ninecode] object _InfCongestionRevenueRights { def register: List[CIMClassInfo] = { List( FTR.register, ViolationLimit.register ) } }
derrickoswald/CIMScala
CIMReader/src/main/scala/ch/ninecode/model/InfCongestionRevenueRights.scala
Scala
mit
11,442
package com.avsystem.commons package mongo.core.ops import com.avsystem.commons.mongo.BsonRef class BsonRefUpdating[T](val bsonRef: BsonRef[_, T]) extends AnyVal with BaseUpdating[T] with BsonRefKeyValueHandling[T]
AVSystem/scala-commons
commons-mongo/jvm/src/main/scala/com/avsystem/commons/mongo/core/ops/BsonRefUpdating.scala
Scala
mit
227
package wav.devtools.sbt.karaf import sbt._ object Dependencies { val slf4j = "org.slf4j" % "slf4j-api" % "1.7.12" val slf4jSimple = "org.slf4j" % "slf4j-simple" % "1.7.12" val commonsLang = "org.apache.commons" % "commons-lang3" % "3.4" val commonsIo = "commons-io" % "commons-io" % "2.4" val osgiCore = "org.osgi" % "org.osgi.core" % "6.0.0" val junit = "junit" % "junit" % "4.11" % "test" val junitInterface = "com.novocode" % "junit-interface" % "0.11" % "test" val scalaTest = "org.scalatest" %% "scalatest" % "2.2.4" % "test" val ariesJmx = "org.apache.aries.jmx" % "org.apache.aries.jmx" % "1.1.5" val jarchivelib = "org.rauschig" % "jarchivelib" % "0.7.1" object Karaf { val Version = "4.0.2" val PaxWebVersion = "4.2.2" // when this is changed, update the the sbt-karaf-packaging tests lazy val testFeatures = Seq( featureID("org.apache.camel.karaf", "apache-camel", "2.16.0"), featureID("org.apache.karaf.features", "standard", Version), featureID("org.apache.karaf.features", "enterprise", Version), featureID("org.ops4j.pax.web", "pax-web-features", PaxWebVersion)).map(_ % "test") private def featureID(o: String, n: String, v: String, a: Option[String] = None) = ModuleID(o, n, v, isTransitive = false, explicitArtifacts = Seq(Artifact(a getOrElse n, "xml", "xml", "features"))) lazy val jmxDependencies = Seq( Karaf.bundle, Karaf.config, Karaf.features, Karaf.system) // Karaf's MBean dependencies, see: http://karaf.apache.org/manual/latest/users-guide/monitoring.html // {{org.apache.karaf:type=config,name=*}}: management of the OSGi bundles. val config = kmodule("config") // {{org.apache.karaf:type=bundle,name=*}}: management of the configurations. val bundle = kmodule("bundle") // {{org.apache.karaf:type=feature,name=*}}: management of the Apache Karaf features. val features = kmodule("features") // {{org.apache.karaf:type=system,name=*}}: management of the Apache Karaf container. val system = kmodule("system") val profile = kmodule("profile") private def kmodule(module: String) = s"org.apache.karaf.$module" % s"org.apache.karaf.$module.core" % Version withSources() notTransitive() } }
wav/osgi-tooling
project/Dependencies.scala
Scala
apache-2.0
2,335
package com.sprayed.chat import akka.actor.ActorSystem import akka.event.Logging import spray.can.Http import akka.actor.Props import akka.io.IO object Boot extends App { implicit val system = ActorSystem("spray-api-service") val log = Logging(system, getClass) val service = system.actorOf(Props[SprayApiDemoServiceActor], "spray-service") IO(Http) ! Http.Bind(service, interface = "localhost", port = 8080) }
Wirwing/spray-sandbox
src/main/scala/com/sprayed/chat/Boot.scala
Scala
mit
423
package controllers import javax.inject.{Inject, Singleton} import akka.stream.scaladsl.{FileIO, Source} import be.objectify.deadbolt.scala.{ActionBuilders, AuthenticatedRequest} import model.auth.AdminRole import model.config.{DeadboltConfig, QuestionsServiceConfig} import model.questions.Question import play.api.libs.Files import play.api.libs.json._ import play.api.libs.ws.WSClient import play.api.libs.ws.StreamedBody import play.api.mvc.{Controller, MultipartFormData} import play.api.mvc.MultipartFormData.DataPart import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} @Singleton class AdminController @Inject()( actionBuilders: ActionBuilders, ws: WSClient, deadboltConfig: DeadboltConfig, questionsServiceConfig: QuestionsServiceConfig)(implicit ec: ExecutionContext) extends Controller { private def authAction = actionBuilders.RestrictAction(AdminRole.name).defaultHandler() def showCreatingForm = authAction { _ => Future.successful(Ok(views.html.question_add())) } def createQuestion = authAction(parse.multipartFormData) { request => val multipartData = multipartToSource(request) val responseF = ws.url(s"${questionsServiceConfig.url}/admin/questions/new").post(Source(multipartData)) responseF.map { response => response.status match { case OK => Redirect(routes.AdminController.getAllTests()) case INTERNAL_SERVER_ERROR => println(response.body) InternalServerError(Json.obj("result" -> "error", "error" -> "Questions service internal error")) } } } def showQuestion(testId: String, questionId: Int) = authAction { _ => val responseF = ws.url(s"${questionsServiceConfig.url}/admin/questions/${testId}/${questionId}").get() responseF .map { response => Try { val rjson = response.json rjson.as[Question] } } .map { case Success(question) => Ok(views.html.question_details(question)) case Failure(f) => InternalServerError(Json.obj("error" -> f.toString)) } } def deleteQuestion(testId: String, questionId: Int) = authAction { _ => val responseF = ws.url(s"${questionsServiceConfig.url}/admin/questions/${testId}/${questionId}").delete() responseF.map { _ => Redirect(routes.AdminController.getAllTests()) } } def processForm(testId: String, questionId: Int) = authAction(parse.multipartFormData) { request => val multipartData = multipartToSource(request) val responseF = ws.url(s"${questionsServiceConfig.url}/admin/questions/${testId}/${questionId}").put(Source(multipartData)) responseF.map { response => response.status match { case OK => Redirect(routes.AdminController.getAllTests()) case INTERNAL_SERVER_ERROR => InternalServerError(Json.obj("result" -> "error", "error" -> "Questions service internal error")) } } } def getAllTests = authAction { implicit request => val responseF = ws.url(s"${questionsServiceConfig.url}/admin/tests").get() responseF .map { implicit response => Try { val rjson = response.json (rjson \\ "tests").as[List[String]] } } .map { case Success(tests) => Ok(views.html.questions_admin(tests)) case Failure(f) => InternalServerError(Json.obj("error" -> f.toString)) } } def getQuestionsForTest(testId: String) = authAction { implicit request => val responseF = ws.url(s"${questionsServiceConfig.url}/admin/questions/$testId").get() responseF .map { implicit response => Try { val rjson = response.json (rjson \\ "questions").as[List[Question]] } } .map { case Success(tests) => Ok(Json.obj("questions" -> Json.toJson(tests))) case Failure(f) => InternalServerError(Json.obj("error" -> f.toString)) } } private def multipartToSource(request: AuthenticatedRequest[MultipartFormData[Files.TemporaryFile]]) = { val b = request.body b.files.toList.map(file => file.copy(ref = FileIO.fromPath(file.ref.file.toPath))) ::: b.dataParts.toList.map { case (key, values) => DataPart(key, values.head) } } }
BandOf3/assignment-system-web
app/controllers/AdminController.scala
Scala
mit
4,274
package utils import scala.concurrent.Future import scala.concurrent.duration._ import scala.concurrent.Await /** * Utility implementation to get Results out of Futures. */ object Awaits { def get[T](sec:Int,f:Future[T]):T = { Await.result[T](f, sec seconds) } }
tnddn/iv-web
portal/rest-portal/app/utils/Awaits.scala
Scala
apache-2.0
278
package com.twitter.util import com.twitter.conversions.DurationOps._ import org.scalatest.funsuite.AnyFunSuite class ClosableOnceTest extends AnyFunSuite { private def ready[T <: Awaitable[_]](awaitable: T): T = Await.ready(awaitable, 10.seconds) test("wrap") { var closedCalls = 0 val underlying = new Closable { def close(deadline: Time): Future[Unit] = { closedCalls += 1 Future.Done } } val closableOnce = ClosableOnce.of(underlying) assert(closableOnce.isClosed == false) closableOnce.close() assert(closableOnce.isClosed == true) closableOnce.close() assert(closedCalls == 1) } test("if closeOnce throws an exception, the closeable is closed with that exception") { val ex = new Exception("boom") var closedCalls = 0 val closableOnce = new ClosableOnce { protected def closeOnce(deadline: Time): Future[Unit] = { closedCalls += 1 throw ex } } assert(closableOnce.isClosed == false) assert(ready(closableOnce.close()).poll.get.throwable == ex) assert(closedCalls == 1) assert(closableOnce.isClosed == true) assert(ready(closableOnce.close()).poll.get.throwable == ex) assert(closedCalls == 1) } }
twitter/util
util-core/src/test/scala/com/twitter/util/ClosableOnceTest.scala
Scala
apache-2.0
1,258
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.command.mutation import java.util import scala.collection.JavaConverters._ import scala.reflect.ClassTag import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.mapred.JobConf import org.apache.hadoop.mapreduce.Job import org.apache.hadoop.mapreduce.lib.input.FileInputFormat import org.apache.spark.rdd.RDD import org.apache.spark.sql.{CarbonEnv, Row, SparkSession} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.execution.command.ExecutionErrors import org.apache.spark.sql.optimizer.CarbonFilters import org.apache.carbondata.common.logging.{LogService, LogServiceFactory} import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.datamap.Segment import org.apache.carbondata.core.datastore.impl.FileFactory import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier import org.apache.carbondata.core.mutate.{CarbonUpdateUtil, DeleteDeltaBlockDetails, SegmentUpdateDetails, TupleIdEnum} import org.apache.carbondata.core.mutate.data.RowCountDetailsVO import org.apache.carbondata.core.statusmanager.{SegmentStatus, SegmentStatusManager, SegmentUpdateStatusManager} import org.apache.carbondata.core.util.{CarbonUtil, ThreadLocalSessionInfo} import org.apache.carbondata.core.util.path.CarbonTablePath import org.apache.carbondata.core.writer.CarbonDeleteDeltaWriterImpl import org.apache.carbondata.hadoop.api.{CarbonInputFormat, CarbonTableInputFormat} import org.apache.carbondata.hadoop.util.CarbonInputFormatUtil import org.apache.carbondata.processing.exception.MultipleMatchingException import org.apache.carbondata.processing.loading.FailureCauses import org.apache.carbondata.spark.DeleteDelataResultImpl import org.apache.carbondata.spark.rdd.SerializableConfiguration object DeleteExecution { val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName) /** * generate the delete delta files in each segment as per the RDD. * @return it gives the segments which needs to be deleted. */ def deleteDeltaExecution( databaseNameOp: Option[String], tableName: String, sparkSession: SparkSession, dataRdd: RDD[Row], timestamp: String, isUpdateOperation: Boolean, executorErrors: ExecutionErrors): Seq[Segment] = { var res: Array[List[(SegmentStatus, (SegmentUpdateDetails, ExecutionErrors))]] = null val database = CarbonEnv.getDatabaseName(databaseNameOp)(sparkSession) val carbonTable = CarbonEnv.getCarbonTable(databaseNameOp, tableName)(sparkSession) val absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier val tablePath = absoluteTableIdentifier.getTablePath var segmentsTobeDeleted = Seq.empty[Segment] val deleteRdd = if (isUpdateOperation) { val schema = org.apache.spark.sql.types.StructType(Seq(org.apache.spark.sql.types.StructField( CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID, org.apache.spark.sql.types.StringType))) val rdd = dataRdd .map(row => Row(row.get(row.fieldIndex( CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID)))) sparkSession.createDataFrame(rdd, schema).rdd } else { dataRdd } val (carbonInputFormat, job) = createCarbonInputFormat(absoluteTableIdentifier) CarbonInputFormat.setTableInfo(job.getConfiguration, carbonTable.getTableInfo) val keyRdd = deleteRdd.map({ row => val tupleId: String = row .getString(row.fieldIndex(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID)) val key = CarbonUpdateUtil.getSegmentWithBlockFromTID(tupleId) (key, row) }).groupByKey() // if no loads are present then no need to do anything. if (keyRdd.partitions.length == 0) { return segmentsTobeDeleted } val blockMappingVO = carbonInputFormat.getBlockRowCount( job, carbonTable, CarbonFilters.getPartitions( Seq.empty, sparkSession, TableIdentifier(tableName, databaseNameOp)).map(_.asJava).orNull) val segmentUpdateStatusMngr = new SegmentUpdateStatusManager(carbonTable) CarbonUpdateUtil .createBlockDetailsMap(blockMappingVO, segmentUpdateStatusMngr) val metadataDetails = SegmentStatusManager.readTableStatusFile( CarbonTablePath.getTableStatusFilePath(carbonTable.getTablePath)) val isStandardTable = CarbonUtil.isStandardCarbonTable(carbonTable) val rowContRdd = sparkSession.sparkContext.parallelize( blockMappingVO.getCompleteBlockRowDetailVO.asScala.toSeq, keyRdd.partitions.length) val conf = sparkSession.sparkContext.broadcast(new SerializableConfiguration(sparkSession .sessionState.newHadoopConf())) val rdd = rowContRdd.join(keyRdd) res = rdd.mapPartitionsWithIndex( (index: Int, records: Iterator[((String), (RowCountDetailsVO, Iterable[Row]))]) => Iterator[List[(SegmentStatus, (SegmentUpdateDetails, ExecutionErrors))]] { ThreadLocalSessionInfo.setConfigurationToCurrentThread(conf.value.value) var result = List[(SegmentStatus, (SegmentUpdateDetails, ExecutionErrors))]() while (records.hasNext) { val ((key), (rowCountDetailsVO, groupedRows)) = records.next val segmentId = key.substring(0, key.indexOf(CarbonCommonConstants.FILE_SEPARATOR)) result = result ++ deleteDeltaFunc(index, key, groupedRows.toIterator, timestamp, rowCountDetailsVO, isStandardTable) } result }).collect() // if no loads are present then no need to do anything. if (res.flatten.isEmpty) { return segmentsTobeDeleted } // update new status file checkAndUpdateStatusFiles() // all or none : update status file, only if complete delete opeartion is successfull. def checkAndUpdateStatusFiles(): Unit = { val blockUpdateDetailsList = new util.ArrayList[SegmentUpdateDetails]() val segmentDetails = new util.HashSet[Segment]() res.foreach(resultOfSeg => resultOfSeg.foreach( resultOfBlock => { if (resultOfBlock._1 == SegmentStatus.SUCCESS) { blockUpdateDetailsList.add(resultOfBlock._2._1) segmentDetails.add(new Segment(resultOfBlock._2._1.getSegmentName)) // if this block is invalid then decrement block count in map. if (CarbonUpdateUtil.isBlockInvalid(resultOfBlock._2._1.getSegmentStatus)) { CarbonUpdateUtil.decrementDeletedBlockCount(resultOfBlock._2._1, blockMappingVO.getSegmentNumberOfBlockMapping) } } else { // In case of failure , clean all related delete delta files CarbonUpdateUtil.cleanStaleDeltaFiles(carbonTable, timestamp) LOGGER.audit(s"Delete data operation is failed for ${ database }.${ tableName }") val errorMsg = "Delete data operation is failed due to failure in creating delete delta file for " + "segment : " + resultOfBlock._2._1.getSegmentName + " block : " + resultOfBlock._2._1.getBlockName executorErrors.failureCauses = resultOfBlock._2._2.failureCauses executorErrors.errorMsg = resultOfBlock._2._2.errorMsg if (executorErrors.failureCauses == FailureCauses.NONE) { executorErrors.failureCauses = FailureCauses.EXECUTOR_FAILURE executorErrors.errorMsg = errorMsg } LOGGER.error(errorMsg) return } } ) ) val listOfSegmentToBeMarkedDeleted = CarbonUpdateUtil .getListOfSegmentsToMarkDeleted(blockMappingVO.getSegmentNumberOfBlockMapping) segmentsTobeDeleted = listOfSegmentToBeMarkedDeleted.asScala // this is delete flow so no need of putting timestamp in the status file. if (CarbonUpdateUtil .updateSegmentStatus(blockUpdateDetailsList, carbonTable, timestamp, false) && CarbonUpdateUtil .updateTableMetadataStatus(segmentDetails, carbonTable, timestamp, !isUpdateOperation, listOfSegmentToBeMarkedDeleted) ) { LOGGER.info(s"Delete data operation is successful for ${ database }.${ tableName }") LOGGER.audit(s"Delete data operation is successful for ${ database }.${ tableName }") } else { // In case of failure , clean all related delete delta files CarbonUpdateUtil.cleanStaleDeltaFiles(carbonTable, timestamp) val errorMessage = "Delete data operation is failed due to failure " + "in table status updation." LOGGER.audit(s"Delete data operation is failed for ${ database }.${ tableName }") LOGGER.error("Delete data operation is failed due to failure in table status updation.") executorErrors.failureCauses = FailureCauses.STATUS_FILE_UPDATION_FAILURE executorErrors.errorMsg = errorMessage // throw new Exception(errorMessage) } } def deleteDeltaFunc(index: Int, key: String, iter: Iterator[Row], timestamp: String, rowCountDetailsVO: RowCountDetailsVO, isStandardTable: Boolean ): Iterator[(SegmentStatus, (SegmentUpdateDetails, ExecutionErrors))] = { val result = new DeleteDelataResultImpl() var deleteStatus = SegmentStatus.LOAD_FAILURE val LOGGER = LogServiceFactory.getLogService(this.getClass.getName) // here key = segment/blockName val blockName = CarbonUpdateUtil .getBlockName( CarbonTablePath.addDataPartPrefix(key.split(CarbonCommonConstants.FILE_SEPARATOR)(1))) val segmentId = key.split(CarbonCommonConstants.FILE_SEPARATOR)(0) val deleteDeltaBlockDetails: DeleteDeltaBlockDetails = new DeleteDeltaBlockDetails(blockName) val resultIter = new Iterator[(SegmentStatus, (SegmentUpdateDetails, ExecutionErrors))] { val segmentUpdateDetails = new SegmentUpdateDetails() var TID = "" var countOfRows = 0 try { while (iter.hasNext) { val oneRow = iter.next TID = oneRow .get(oneRow.fieldIndex(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID)).toString val offset = CarbonUpdateUtil.getRequiredFieldFromTID(TID, TupleIdEnum.OFFSET) val blockletId = CarbonUpdateUtil .getRequiredFieldFromTID(TID, TupleIdEnum.BLOCKLET_ID) val pageId = Integer.parseInt(CarbonUpdateUtil .getRequiredFieldFromTID(TID, TupleIdEnum.PAGE_ID)) val IsValidOffset = deleteDeltaBlockDetails.addBlocklet(blockletId, offset, pageId) // stop delete operation if(!IsValidOffset) { executorErrors.failureCauses = FailureCauses.MULTIPLE_INPUT_ROWS_MATCHING executorErrors.errorMsg = "Multiple input rows matched for same row." throw new MultipleMatchingException("Multiple input rows matched for same row.") } countOfRows = countOfRows + 1 } val blockPath = CarbonUpdateUtil.getTableBlockPath(TID, tablePath, isStandardTable) val completeBlockName = CarbonTablePath .addDataPartPrefix(CarbonUpdateUtil.getRequiredFieldFromTID(TID, TupleIdEnum.BLOCK_ID) + CarbonCommonConstants.FACT_FILE_EXT) val deleteDeletaPath = CarbonUpdateUtil .getDeleteDeltaFilePath(blockPath, blockName, timestamp) val carbonDeleteWriter = new CarbonDeleteDeltaWriterImpl(deleteDeletaPath, FileFactory.getFileType(deleteDeletaPath)) segmentUpdateDetails.setBlockName(blockName) segmentUpdateDetails.setActualBlockName(completeBlockName) segmentUpdateDetails.setSegmentName(segmentId) segmentUpdateDetails.setDeleteDeltaEndTimestamp(timestamp) segmentUpdateDetails.setDeleteDeltaStartTimestamp(timestamp) val alreadyDeletedRows: Long = rowCountDetailsVO.getDeletedRowsInBlock val totalDeletedRows: Long = alreadyDeletedRows + countOfRows segmentUpdateDetails.setDeletedRowsInBlock(totalDeletedRows.toString) if (totalDeletedRows == rowCountDetailsVO.getTotalNumberOfRows) { segmentUpdateDetails.setSegmentStatus(SegmentStatus.MARKED_FOR_DELETE) } else { // write the delta file carbonDeleteWriter.write(deleteDeltaBlockDetails) } deleteStatus = SegmentStatus.SUCCESS } catch { case e : MultipleMatchingException => LOGGER.audit(e.getMessage) LOGGER.error(e.getMessage) // dont throw exception here. case e: Exception => val errorMsg = s"Delete data operation is failed for ${ database }.${ tableName }." LOGGER.audit(errorMsg) LOGGER.error(errorMsg + e.getMessage) throw e } var finished = false override def hasNext: Boolean = { if (!finished) { finished = true finished } else { !finished } } override def next(): (SegmentStatus, (SegmentUpdateDetails, ExecutionErrors)) = { finished = true result.getKey(deleteStatus, (segmentUpdateDetails, executorErrors)) } } resultIter } segmentsTobeDeleted } private def createCarbonInputFormat(absoluteTableIdentifier: AbsoluteTableIdentifier) : (CarbonTableInputFormat[Array[Object]], Job) = { val carbonInputFormat = new CarbonTableInputFormat[Array[Object]]() val jobConf: JobConf = new JobConf(FileFactory.getConfiguration) val job: Job = new Job(jobConf) FileInputFormat.addInputPath(job, new Path(absoluteTableIdentifier.getTablePath)) (carbonInputFormat, job) } }
sgururajshetty/carbondata
integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
Scala
apache-2.0
14,986
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.jdbc import java.sql.{Date, Timestamp, Types} import org.apache.spark.sql.types._ private case object OracleDialect extends JdbcDialect { override def canHandle(url: String): Boolean = url.startsWith("jdbc:oracle") override def getCatalystType( sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = { if (sqlType == Types.NUMERIC) { val scale = if (null != md) md.build().getLong("scale") else 0L size match { // Handle NUMBER fields that have no precision/scale in special way // because JDBC ResultSetMetaData converts this to 0 precision and -127 scale // For more details, please see // https://github.com/apache/spark/pull/8780#issuecomment-145598968 // and // https://github.com/apache/spark/pull/8780#issuecomment-144541760 case 0 => Option(DecimalType(DecimalType.MAX_PRECISION, 10)) // Handle FLOAT fields in a special way because JDBC ResultSetMetaData converts // this to NUMERIC with -127 scale // Not sure if there is a more robust way to identify the field as a float (or other // numeric types that do not specify a scale. case _ if scale == -127L => Option(DecimalType(DecimalType.MAX_PRECISION, 10)) case _ => None } } else { None } } override def getJDBCType(dt: DataType): Option[JdbcType] = dt match { // For more details, please see // https://docs.oracle.com/cd/E19501-01/819-3659/gcmaz/ case BooleanType => Some(JdbcType("NUMBER(1)", java.sql.Types.BOOLEAN)) case IntegerType => Some(JdbcType("NUMBER(10)", java.sql.Types.INTEGER)) case LongType => Some(JdbcType("NUMBER(19)", java.sql.Types.BIGINT)) case FloatType => Some(JdbcType("NUMBER(19, 4)", java.sql.Types.FLOAT)) case DoubleType => Some(JdbcType("NUMBER(19, 4)", java.sql.Types.DOUBLE)) case ByteType => Some(JdbcType("NUMBER(3)", java.sql.Types.SMALLINT)) case ShortType => Some(JdbcType("NUMBER(5)", java.sql.Types.SMALLINT)) case StringType => Some(JdbcType("VARCHAR2(255)", java.sql.Types.VARCHAR)) case _ => None } override def compileValue(value: Any): Any = value match { // The JDBC drivers support date literals in SQL statements written in the // format: {d 'yyyy-mm-dd'} and timestamp literals in SQL statements written // in the format: {ts 'yyyy-mm-dd hh:mm:ss.f...'}. For details, see // 'Oracle Database JDBC Developer’s Guide and Reference, 11g Release 1 (11.1)' // Appendix A Reference Information. case stringValue: String => s"'${escapeSql(stringValue)}'" case timestampValue: Timestamp => "{ts '" + timestampValue + "'}" case dateValue: Date => "{d '" + dateValue + "'}" case arrayValue: Array[Any] => arrayValue.map(compileValue).mkString(", ") case _ => value } override def isCascadingTruncateTable(): Option[Boolean] = Some(false) }
mike0sv/spark
sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
Scala
apache-2.0
3,758
package gust.linalg.cuda import breeze.generic.UFunc import jcuda.jcublas.{JCublas2, cublasHandle} import breeze.linalg.{DenseMatrix, DenseVector} import org.netlib.util.intW import jcuda.driver.{CUfunction, CUmodule, JCudaDriver} import gust.util.cuda.{CuContext, CuDevice} import spire.syntax.cfor._ import gust.linalg.cuda.CuWrapperMethods._ import com.github.fommil.netlib.LAPACK.{getInstance => lapack} /** * Copyright 2014 Piotr Moczurad * */ object CuQR extends UFunc { /** * Given a matrix containing both the upper triangular matrix and the householder vectors, and * a vector containing the householder quantities, constructs full Q and R matrices. * @param A * @param tau * @param handle * @return */ def QRFactorsFloat(A: CuMatrix[Float], tau: DenseVector[Float])(implicit handle: cublasHandle): (CuMatrix[Float], CuMatrix[Float]) = { if (A.rows < A.cols) { println("Number of rows of matrix A cannot be smaller than the number of columns.") return (A, null) } val m = A.rows val n = A.cols val es = A.elemSize.toInt val d_R = CuMatrix.create[Float](m, n); d_R := A // triangular factor val d_A = CuMatrix.create[Float](m, n); d_A := A // copy of A val d_Q = CuMatrix.fromDense(DenseMatrix.eye[Float](m)) // orthogonal factor val d_Q1 = CuMatrix.create[Float](m, m) val d_H = CuMatrix.create[Float](m, m) // placeholder for reflectors val d_diag = CuMatrix.ones[Float](m, 1) // we'll use it to set/update the diagonal val tauArr = Array(0.0f) val tauPtr = jcuda.Pointer.to(tauArr) val zeroArr = Array(0.0f) val zero = jcuda.Pointer.to(zeroArr) val oneArr = Array(1.0f) val one = jcuda.Pointer.to(oneArr) val minusOneArr = Array(1.0f) val minusOne = jcuda.Pointer.to(minusOneArr) // zero out everything below the diagonal in d_R // and everything above (including) diagonal in d_A // TODO: kernels to copy the triangles zeroOutFloat(d_R, 'L') zeroOutFloat(d_A, 'U', true) // set the diagonal in d_A to ones: JCublas2.cublasScopy(handle, n, d_diag.offsetPointer, 1, d_A.offsetPointer, d_A.majorStride+1) cfor(0)(_ < tau.length, _ + 1) { i => { tauArr(0) = -tau(i) // d_H = -tau(i) * d_A(:, i) * d_A(:, i)' SgemmNT(m, m, 1, tauPtr, d_A, 0, i, d_A, 0, i, zero, d_H, 0, 0) // d_H = d_H + I JCublas2.cublasSaxpy(handle, m, minusOne, d_diag.offsetPointer, 1, d_H.offsetPointer, d_H.majorStride+1) // d_Q *= d_H SgemmNN(m, m, m, one, d_Q, 0, 0, d_H, 0, 0, zero, d_Q1, 0, 0) d_Q := d_Q1 }} (d_Q, d_R) } def QRFactorsDouble(A: CuMatrix[Double], tau: DenseVector[Double])(implicit handle: cublasHandle): (CuMatrix[Double], CuMatrix[Double]) = { if (A.rows < A.cols) { println("Number of rows of matrix A cannot be smaller than the number of columns.") return (A, null) } val m = A.rows val n = A.cols val es = A.elemSize.toInt val d_R = CuMatrix.create[Double](m, n); d_R := A // triangular factor val d_A = CuMatrix.create[Double](m, n); d_A := A // copy of A val d_Q = CuMatrix.fromDense(DenseMatrix.eye[Double](m)) // orthogonal factor val d_Q1 = CuMatrix.create[Double](m, m) val d_H = CuMatrix.create[Double](m, m) // placeholder for reflectors val d_diag = CuMatrix.fromDense(DenseMatrix.ones[Double](m, 1)) // we'll use it to set/update the diagonal val tauArr = Array(0.0) val tauPtr = jcuda.Pointer.to(tauArr) val zeroArr = Array(0.0) val zero = jcuda.Pointer.to(zeroArr) val oneArr = Array(1.0) val one = jcuda.Pointer.to(oneArr) val minusOneArr = Array(1.0) val minusOne = jcuda.Pointer.to(minusOneArr) // zero out everything below the diagonal in d_R // and everything above (including) diagonal in d_A // TODO: kernels to copy the triangles zeroOutDouble(d_R, 'L') zeroOutDouble(d_A, 'U', true) // set the diagonal in d_A to ones: JCublas2.cublasDcopy(handle, n, d_diag.offsetPointer, 1, d_A.offsetPointer, d_A.majorStride+1) cfor(0)(_ < tau.length, _ + 1) { i => { tauArr(0) = -tau(i) // d_H = -tau(i) * d_A(:, i) * d_A(:, i)' DgemmNT(m, m, 1, tauPtr, d_A, 0, i, d_A, 0, i, zero, d_H, 0, 0) // d_H = d_H + I JCublas2.cublasDaxpy(handle, m, minusOne, d_diag.offsetPointer, 1, d_H.offsetPointer, d_H.majorStride+1) // d_Q *= d_H DgemmNN(m, m, m, one, d_Q, 0, 0, d_H, 0, 0, zero, d_Q1, 0, 0) d_Q := d_Q1 }} (d_Q, d_R) } /** * QR factorization for matrices of size m x n (where m >= n) (Float version) * The result is returned (as in LAPACK) in form of a matrix containing both * the triangular factor and the reflectors and a vector tau. * To construct the full Q and R factors, one has to use the 'QRFactors' method. * @param A * @param handle * @return */ def QRFloatMN(A: CuMatrix[Float])(implicit handle: cublasHandle): (CuMatrix[Float], DenseVector[Float]) = { if (A.rows < A.cols) { println("Number of rows of matrix A cannot be smaller than the number of columns.") return (A, null) } // pointers to scalars (for dgemm): val oneArr = Array(1.0f) val hostOne = jcuda.Pointer.to(oneArr) val zeroArr = Array(0.0f) val hostZero = jcuda.Pointer.to(zeroArr) val minusOneArr = Array(-1.0f) val hostMinusOne = jcuda.Pointer.to(minusOneArr) val nb = if (A.cols < 2) A.cols else 2 val ldaArr = Array(A.majorStride) val lda = jcuda.Pointer.to(ldaArr) val m = A.rows val n = A.cols // gpu matrices: val gpu_matrix = CuMatrix.create[Float](m, n); gpu_matrix := A val gpu_TV = CuMatrix.create[Float](nb, m) val gpu_TVA = CuMatrix.create[Float](nb, m) // cpu matrices: val cpu_matrix = A.toDense val cpu_tau = DenseVector.zeros[Float](n) val cpu_work = Array.ofDim[Float](m * n * nb) val cpu_T = DenseMatrix.zeros[Float](nb, nb) var h, w = 0 val es = gpu_matrix.elemSize val info = new intW(0) val lwork = cpu_work.length // prep for launching the kernel: JCudaDriver.setExceptionsEnabled(true) implicit val dev = CuDevice(0) val ctx = CuContext.ensureContext val module = new CUmodule() JCudaDriver.cuModuleLoad(module, "src/main/resources/gust/linalg/cuda/enforceLUFloat.ptx") val enfLU = new CUfunction() JCudaDriver.cuModuleGetFunction(enfLU, module, "_Z9enforceLUPfi") // we don't need to upload anything onto the GPU -- it's already there // not really sure about the 'm' here cfor(0)(_ < n, _ + nb) { i => { h = m - i w = if (n - i < nb) n - i else nb if (i > 0) { SgemmNN(nb, w, h + nb, hostOne, gpu_TV, 0, 0, gpu_matrix, i - nb, i, hostZero, gpu_TVA, 0, 0) SgemmNN(h + nb, w, nb, hostMinusOne, gpu_matrix, i - nb, i - nb, gpu_TVA, 0, 0, hostOne, gpu_matrix, i - nb, i) downloadFloat(m, w, cpu_matrix, 0, i, gpu_matrix, 0, i) SgemmNN(nb, h - nb, h + nb, hostOne, gpu_TV, 0, 0, gpu_matrix, i - nb, i + nb, hostZero, gpu_TVA, 0, 0) SgemmNN(h + nb, h - nb, nb, hostMinusOne, gpu_matrix, i - nb, i - nb, gpu_TVA, 0, 0, hostOne, gpu_matrix, i - nb, i + nb) } // factorization on CPU // additional params after matrices are offsets (like i in float *A; A+i) lapack.sgeqrf(h, w, cpu_matrix.data, cpu_matrix.linearIndex(i, i), cpu_matrix.majorStride, cpu_tau.data, i, cpu_work, 0, lwork, info) if (h > nb) { lapack.slarft("F", "C", h, w, cpu_matrix.data, cpu_matrix.linearIndex(i, i), cpu_matrix.majorStride, cpu_tau.data, i, cpu_T.data, 0, cpu_T.majorStride) // transpose cpu_T: cfor(0)(_ < nb, _ + 1) { j => { cfor(0)(_ < j, _ + 1) { k => { cpu_T(j, k) = cpu_T(k, j) cpu_T(k, j) = 0.0f } } } } // upload to GPU: uploadFloat(nb, nb, gpu_TVA, 0, 0, cpu_T, 0, 0) uploadFloat(h, nb, gpu_matrix, i, i, cpu_matrix, i, i) // enforceLU, kernel launch: val params = jcuda.Pointer.to( jcuda.Pointer.to(gpu_matrix.offsetPointer.withByteOffset(gpu_matrix.linearIndex(i, i) * es)), lda ) JCudaDriver.cuLaunchKernel(enfLU, nb, 1, 1, nb, 1, 1, 0, null, params, null) JCudaDriver.cuCtxSynchronize() SgemmNT(nb, h, nb, hostOne, gpu_TVA, 0, 0, gpu_matrix, i, i, hostZero, gpu_TV, 0, 0) } } } (CuMatrix.fromDense(cpu_matrix), cpu_tau) } /** * QR factorization for matrices of size m x n (where m >= n) (Double version) * @param A * @param handle * @return */ def QRDoubleMN(A: CuMatrix[Double])(implicit handle: cublasHandle): (CuMatrix[Double], DenseVector[Double]) = { if (A.rows < A.cols) { println("Number of rows of matrix A cannot be smaller than the number of columns.") return (A, null) } // pointers to scalars (for dgemm): val oneArr = Array(1.0) val hostOne = jcuda.Pointer.to(oneArr) val zeroArr = Array(0.0) val hostZero = jcuda.Pointer.to(zeroArr) val minusOneArr = Array(-1.0) val hostMinusOne = jcuda.Pointer.to(minusOneArr) val nb = if (A.cols < 64) A.cols else 64 val ldaArr = Array(A.majorStride) val lda = jcuda.Pointer.to(ldaArr) val m = A.rows val n = A.cols // gpu matrices: val gpu_matrix = CuMatrix.create[Double](m, n); gpu_matrix := A val gpu_TV = CuMatrix.create[Double](nb, m) val gpu_TVA = CuMatrix.create[Double](nb, m) // cpu matrices: val cpu_matrix = A.toDense val cpu_tau = DenseVector.zeros[Double](n) val cpu_work = Array.ofDim[Double](m * n * nb) val cpu_T = DenseMatrix.zeros[Double](nb, nb) var h, w = 0 val es = gpu_matrix.elemSize val info = new intW(0) val lwork = cpu_work.length // prep for launching the kernel: JCudaDriver.setExceptionsEnabled(true) implicit val dev = CuDevice(0) val ctx = CuContext.ensureContext val module = new CUmodule() JCudaDriver.cuModuleLoad(module, "src/main/resources/gust/linalg/cuda/enforceLUDouble.ptx") val enfLU = new CUfunction() JCudaDriver.cuModuleGetFunction(enfLU, module, "_Z9enforceLUPdi") // we don't need to upload anything onto the GPU -- it's already there cfor(0)(_ < n, _ + nb) { i => { h = m - i w = if (n - i < nb) n - i else nb if (i > 0) { DgemmNN(nb, w, h + nb, hostOne, gpu_TV, 0, 0, gpu_matrix, i - nb, i, hostZero, gpu_TVA, 0, 0) DgemmNN(h + nb, w, nb, hostMinusOne, gpu_matrix, i - nb, i - nb, gpu_TVA, 0, 0, hostOne, gpu_matrix, i - nb, i) downloadDouble(m, w, cpu_matrix, 0, i, gpu_matrix, 0, i) DgemmNN(nb, h - nb, h + nb, hostOne, gpu_TV, 0, 0, gpu_matrix, i - nb, i + nb, hostZero, gpu_TVA, 0, 0) DgemmNN(h + nb, h - nb, nb, hostMinusOne, gpu_matrix, i - nb, i - nb, gpu_TVA, 0, 0, hostOne, gpu_matrix, i - nb, i + nb) } // factorization on CPU // additional params after matrices are offsets (like i in float *A; A+i) lapack.dgeqrf(h, w, cpu_matrix.data, cpu_matrix.linearIndex(i, i), cpu_matrix.majorStride, cpu_tau.data, i, cpu_work, 0, lwork, info) if (h > nb) { lapack.dlarft("F", "C", h, w, cpu_matrix.data, cpu_matrix.linearIndex(i, i), cpu_matrix.majorStride, cpu_tau.data, i, cpu_T.data, 0, cpu_T.majorStride) // transpose cpu_T: cfor(0)(_ < nb, _ + 1) { j => { cfor(0)(_ < j, _ + 1) { k => { cpu_T(j, k) = cpu_T(k, j) cpu_T(k, j) = 0.0 } } } } // upload to GPU: uploadDouble(nb, nb, gpu_TVA, 0, 0, cpu_T, 0, 0) uploadDouble(h, nb, gpu_matrix, i, i, cpu_matrix, i, i) // enforceLU, kernel launch: val params = jcuda.Pointer.to( jcuda.Pointer.to(gpu_matrix.offsetPointer.withByteOffset(gpu_matrix.linearIndex(i, i) * es)), lda ) JCudaDriver.cuLaunchKernel(enfLU, nb, 1, 1, nb, 1, 1, 0, null, params, null) JCudaDriver.cuCtxSynchronize() DgemmNT(nb, h, nb, hostOne, gpu_TVA, 0, 0, gpu_matrix, i, i, hostZero, gpu_TV, 0, 0) } } } (CuMatrix.fromDense(cpu_matrix), cpu_tau) } }
piotrMocz/gust
src/main/scala/gust/linalg/cuda/CuQR.scala
Scala
apache-2.0
12,536
package com.typesafe.slick.testkit.tests import com.typesafe.slick.testkit.util.{AsyncTest, JdbcTestDB} import org.junit.Assert._ import slick.ast.ColumnOption import slick.model._ import slick.jdbc.SQLiteProfile import slick.jdbc.meta.MTable import slick.relational.RelationalProfile import slick.sql.SqlProfile @deprecated("Using deprecated .simple API", "3.0") class ModelBuilderTest extends AsyncTest[JdbcTestDB] { import tdb.profile.api._ class Categories(tag: Tag) extends Table[(Int, String)](tag, "categories") { def id = column[Int]("id", O.PrimaryKey, O.AutoInc) def name = column[String]("name", O.SqlType("VARCHAR(123)")) def * = (id, name) def idx = index("IDX_NAME",name) } val categories = TableQuery[Categories] class Posts(tag: Tag) extends Table[(Int, String, Option[Int], Boolean, String)](tag, "posts") { def id = column[Int]("id") def title = column[String]("title",O.Length(99,varying=false)) // tests Length produces valid SQL def category = column[Option[Int]]("category",O.Default(Some(531))) def someBool = column[Boolean]("some_bool",O.Default(true)) // tests boolean default values parsing def someString = column[String]("some_string",O.Length(111,varying=true)) // tests Length produces valid SQL def * = (id, title, category, someBool, someString) def pk = primaryKey("posts_pk", (id,title)) def categoryFK = foreignKey("category_fk", category, categories)(_.id.?) } val posts = TableQuery[Posts] class DefaultTest(tag: Tag) extends Table[(Boolean, Boolean, Boolean, Option[Boolean], Option[Boolean], Option[Boolean], String, String, String, Option[String], Option[String], Option[String], Option[String])](tag, "default_test") { def someBool = column[Boolean]("some_bool") def someBoolDefaultTrue = column[Boolean]("some_bool_default_true",O.Default(true)) def someBoolDefaultFalse = column[Boolean]("some_bool_default_false",O.Default(false)) def someBoolOption = column[Option[Boolean]]("some_bool_option") def someBoolOptionDefaultSome = column[Option[Boolean]]("some_bool_option_default_some",O.Default(Some(true))) def someBoolOptionDefaultNone = column[Option[Boolean]]("some_bool_option_default_none",O.Default(None)) def someString = column[String]("some_string") def someStringDefaultNonEmpty = column[String]("some_string_default_non_empty",O.Default("bar"),O.Length(254)) def someStringDefaultEmpty = column[String]("some_string_default_empty",O.Default(""),O.Length(254)) def someStringOption = column[Option[String]]("some_string_option") def someStringOptionDefaultEmpty = column[Option[String]]("str_option_default_empty",O.Default(Some("")),O.Length(254)) def someStringOptionDefaultNone = column[Option[String]]("str_option_default_none",O.Default(None)) def someStringOptionDefaultNonEmpty = column[Option[String]]("str_option_default_non_empty",O.Default(Some("foo")),O.Length(254)) def * = (someBool,someBoolDefaultTrue,someBoolDefaultFalse,someBoolOption,someBoolOptionDefaultSome,someBoolOptionDefaultNone,someString,someStringDefaultNonEmpty,someStringDefaultEmpty,someStringOption,someStringOptionDefaultEmpty,someStringOptionDefaultNonEmpty,someStringOptionDefaultNone) } val defaultTest = TableQuery[DefaultTest] class NoDefaultTest(tag: Tag) extends Table[(Int,Option[String],Option[String])](tag, "no_default_test") { def int = column[Int]("int") def stringOption = column[Option[String]]("stringOption") def stringOptionDefaultNone = column[Option[String]]("stringOptionDefaultNone",O.Default(None)) def * = (int,stringOption,stringOptionDefaultNone) } val noDefaultTest = TableQuery[NoDefaultTest] class TypeTest(tag: Tag) extends Table[( String,Boolean,Byte,Short,Int,Long,Float,Double,String,java.sql.Date,java.sql.Time,java.sql.Timestamp,java.sql.Blob//,java.sql.Clob ,Option[Int] ,( Option[Boolean],Option[Byte],Option[Short],Option[Int],Option[Long],Option[Float],Option[Double],Option[String],Option[java.sql.Date],Option[java.sql.Time],Option[java.sql.Timestamp],Option[java.sql.Blob]//,Option[java.sql.Clob] ) )](tag, "TYPE_TEST") { def `type` = column[String]("type") // <- test escaping of keywords def Boolean = column[Boolean]("Boolean",O.Default(true)) def Byte = column[Byte]("Byte") def Short = column[Short]("Short") def Int = column[Int]("Int",O.Default(-5)) def Long = column[Long]("Long",O.Default(5L)) //def java_math_BigInteger = column[java.math.BigInteger]("java_math_BigInteger") def Float = column[Float]("Float",O.Default(9.999F)) def Double = column[Double]("Double",O.Default(9.999)) //def java_math_BigDecimal = column[java.math.BigDecimal]("java_math_BigDecimal") def String = column[String]("String",O.Default("someDefaultString"), O.Length(254)) def java_sql_Date = column[java.sql.Date]("java_sql_Date") def java_sql_Time = column[java.sql.Time]("java_sql_Time") def java_sql_Timestamp = column[java.sql.Timestamp]("java_sql_Timestamp") def java_sql_Blob = column[java.sql.Blob]("java_sql_Blob") //def java_sql_Clob = column[java.sql.Clob]("java_sql_Clob") def None_Int = column[Option[Int]]("None_Int",O.Default(None)) def Option_Boolean = column[Option[Boolean]]("Option_Boolean",O.Default(Some(true))) def Option_Byte = column[Option[Byte]]("Option_Byte") def Option_Short = column[Option[Short]]("Option_Short") def Option_Int = column[Option[Int]]("Option_Int",O.Default(Some(5))) def Option_Long = column[Option[Long]]("Option_Long",O.Default(Some(-5L))) //def java_math_BigInteger = column[Option[java.math.BigInteger]]("java_math_BigInteger") def Option_Float = column[Option[Float]]("Option_Float",O.Default(Some(9.999F))) def Option_Double = column[Option[Double]]("Option_Double",O.Default(Some(9.999))) //def java_math_BigDecimal = column[Option[java.math.BigDecimal]]("java_math_BigDecimal") def Option_String = column[Option[String]]("Option_String",O.Default(Some("someDefaultString")), O.Length(254)) def Option_java_sql_Date = column[Option[java.sql.Date]]("Option_java_sql_Date") def Option_java_sql_Time = column[Option[java.sql.Time]]("Option_java_sql_Time") def Option_java_sql_Timestamp = column[Option[java.sql.Timestamp]]("Option_java_sql_Timestamp") def Option_java_sql_Blob = column[Option[java.sql.Blob]]("Option_java_sql_Blob") def Option_java_sql_Option_Blob = column[Option[Option[java.sql.Blob]]]("Option_java_sql_Blob") //def Option_java_sql_Clob = column[Option[java.sql.Clob]]("Option_java_sql_Clob") def * = ( `type`, Boolean,Byte,Short,Int,Long,Float,Double,String,java_sql_Date,java_sql_Time,java_sql_Timestamp,java_sql_Blob//,java_sql_Clob ,None_Int ,( Option_Boolean,Option_Byte,Option_Short,Option_Int,Option_Long,Option_Float,Option_Double,Option_String,Option_java_sql_Date,Option_java_sql_Time,Option_java_sql_Timestamp,Option_java_sql_Blob//,Option_java_sql_Clob ) ) def pk = primaryKey("PK", (Int,Long)) } val typeTest = TableQuery[TypeTest] def test = ifCap(jcap.createModel) { def createModel(tables: Option[Seq[MTable]] = None, ignoreInvalidDefaults: Boolean = true) = tdb.profile.createModel(tables.map(DBIO.successful), ignoreInvalidDefaults) // postgres uses lower case and things like int4 // seen in jtds: int identity // seen in oracle: VARCHAR2 val DBTypePattern = "^[a-zA-Z][a-zA-Z0-9 ]*$".r for { _ <- (posts.schema ++ categories.schema ++ defaultTest.schema ++ noDefaultTest.schema ++ typeTest.schema).create _ <- createModel(ignoreInvalidDefaults=false).map(_.assertConsistency) tables <- tdb.profile.defaultTables _ <- createModel(Some(tables), ignoreInvalidDefaults = false).map(_.assertConsistency) // checks that createModel filters out foreign keys pointing out _ <- createModel(Some(tables.filter(_.name.name.toUpperCase=="POSTS")), ignoreInvalidDefaults = false).map { model => model.assertConsistency assertEquals( 0, model.tables.map(_.foreignKeys.size).sum ) } _ <- createModel(Some(tables.filter(_.name.name.toUpperCase=="CATEGORIES")), ignoreInvalidDefaults = false).map(_.assertConsistency) // checks that assertConsistency fails when manually feeding the model with inconsistent tables _ <- createModel(Some(tables), ignoreInvalidDefaults = false).map { m => Model(m.tables.filter(_.name.table.toUpperCase=="POSTS")).shouldFail(_.assertConsistency) } model <- createModel(ignoreInvalidDefaults=false) _ = { // check that the model matches the table classes assertEquals( model.tables.toString, 5, model.tables.size ) val categories = model.tables.filter(_.name.table.toUpperCase=="CATEGORIES").head assertEquals( 2, categories.columns.size ) assertEquals( None, categories.primaryKey ) assertEquals( 0, categories.foreignKeys.size ) assertEquals( List("id"), categories.columns.filter(_.options.exists(_ == ColumnOption.PrimaryKey)).map(_.name).toList ) assertEquals( (123,true), categories.columns.filter(_.name == "name").head .options.collect{case RelationalProfile.ColumnOption.Length(length,varying) => (length,varying)}.head ) //assertEquals( categories.indices.toString, 1, categories.indices.size ) // Removed until made sure all dbs actually produce indices model //assertEquals( "IDX_NAME", categories.indices.head.name.get.toUpperCase ) categories.columns.foreach{ _.options.foreach{ case RelationalProfile.ColumnOption.Length(length,varying) => length < 256 case SqlProfile.ColumnOption.SqlType(DBTypePattern()) => case SqlProfile.ColumnOption.SqlType(dbType) => assert(false, "invalid DBType: "+dbType) case _ => } } } _ = { val posts = model.tables.filter(_.name.table.toUpperCase=="POSTS").head assertEquals( 5, posts.columns.size ) assertEquals( posts.indices.toString, 0, posts.indices.size ) if(tdb.profile != SQLiteProfile) { // Reporting of multi-column primary keys through JDBC metadata is broken in Xerial SQLite 3.8: // https://bitbucket.org/xerial/sqlite-jdbc/issue/107/databasemetadatagetprimarykeys-does-not assertEquals( Some(2), posts.primaryKey.map(_.columns.size) ) assert( !posts.columns.exists(_.options.exists(_ == ColumnOption.PrimaryKey)) ) } assertEquals( 1, posts.foreignKeys.size ) if(tdb.profile != slick.jdbc.SQLiteProfile){ assertEquals( "CATEGORY_FK", posts.foreignKeys.head.name.get.toUpperCase ) } def tpe(col:String) = posts.columns.filter(_.name == col).head .options.collect{case SqlProfile.ColumnOption.SqlType(tpe) => tpe}.head assert( Seq( "CHAR","CHARACTER", "BPCHAR" // bpchar: postgres //"char" // jtds ) contains tpe("title").toUpperCase, tpe("title") ) assert( Seq( "VARCHAR", "VARCHAR2" // oracle ) contains tpe("some_string").toUpperCase, tpe("title") ) assertEquals( (99,false), posts.columns.filter(_.name == "title").head .options.collect{case RelationalProfile.ColumnOption.Length(length,varying) => (length,varying)}.head ) assertEquals( (111,true), posts.columns.filter(_.name == "some_string").head .options.collect{case RelationalProfile.ColumnOption.Length(length,varying) => (length,varying)}.head ) posts.columns.foreach{ _.options.foreach{ case RelationalProfile.ColumnOption.Length(length,varying) => length < 256 case SqlProfile.ColumnOption.SqlType(DBTypePattern()) => case SqlProfile.ColumnOption.SqlType(dbType) => assert(false, "invalid DBType: "+dbType) case _ => } } } _ = { val defaultTest = model.tables.filter(_.name.table.toUpperCase=="DEFAULT_TEST").head assert(Some("PUBLIC") != defaultTest.name.schema.map(_.toUpperCase)) assert(Some("PUBLIC") != defaultTest.name.catalog.map(_.toUpperCase)) ifCapU(jcap.defaultValueMetaData){ def column(name: String) = defaultTest.columns.filter(_.name == name).head def columnDefault(name: String) = column(name) .options.collect{case RelationalProfile.ColumnOption.Default(v) => v} .headOption assertEquals(None, columnDefault("some_bool")) ifCapU(jcap.booleanMetaData){ assertEquals(Some(true), columnDefault("some_bool_default_true")) } ifNotCapU(jcap.booleanMetaData){ assertEquals(false,column("some_bool_default_true").nullable) assert( Seq(Some(1),Some('1')).contains( columnDefault("some_bool_default_true") ), columnDefault("some_bool_default_true").toString ) } ifCapU(jcap.booleanMetaData){ assertEquals(Some(false), columnDefault("some_bool_default_false")) } ifNotCapU(jcap.booleanMetaData){ assert( Seq(Some(0),Some('0')).contains( columnDefault("some_bool_default_false") ), columnDefault("some_bool_default_false").toString ) } ifCapU(jcap.nullableNoDefault){ assertEquals(None,columnDefault("some_bool_option")) } ifNotCapU(jcap.nullableNoDefault){ assertEquals(Some(None),columnDefault("some_bool_option")) } ifCapU(jcap.booleanMetaData){ assertEquals(Some(Some(true)), columnDefault("some_bool_option_default_some")) } ifNotCapU(jcap.booleanMetaData){ assert( Seq(Some(Some(1)),Some(Some('1'))).contains( columnDefault("some_bool_option_default_some") ), columnDefault("some_bool_option_default_some").toString ) } assertEquals(Some(None),columnDefault("some_bool_option_default_none")) assertEquals(None,columnDefault("some_string")) assertEquals(Some("bar"),columnDefault("some_string_default_non_empty")) assertEquals(Some(""),columnDefault("some_string_default_empty")) ifCapU(jcap.nullableNoDefault){ assertEquals(None,columnDefault("some_string_option")) } ifNotCapU(jcap.nullableNoDefault){ assertEquals(Some(None),columnDefault("some_string_option")) } assertEquals(Some(Some("")),columnDefault("str_option_default_empty")) assertEquals(Some(None),columnDefault("str_option_default_none")) assertEquals(Some(Some("foo")),columnDefault("str_option_default_non_empty")) } } _ = { val typeTest = model.tables.filter(_.name.table.toUpperCase=="TYPE_TEST").head def column(name: String) = typeTest.columns.filter(_.name.toUpperCase == name.toUpperCase).head def columnDefault(name: String) = column(name) .options.collect{case RelationalProfile.ColumnOption.Default(v) => v} .headOption ifCapU(jcap.booleanMetaData){ assertEquals("Boolean",column("Boolean").tpe) assertEquals("Boolean",column("Option_Boolean").tpe) } assertEquals(false,column("Boolean").nullable) assertEquals(true,column("Option_Boolean").nullable) ifCapU(jcap.supportsByte){ assertEquals("Byte",column("Byte").tpe) assertEquals("Byte",column("Option_Byte").tpe) } assertEquals(false,column("Byte").nullable) assertEquals(true,column("Option_Byte").nullable) ifCapU(jcap.distinguishesIntTypes){ assertEquals("Short",column("Short").tpe) assertEquals("Short",column("Option_Short").tpe) } assertEquals(false,column("Short").nullable) assertEquals(true,column("Option_Short").nullable) assertEquals(false,column("Int").nullable) assertEquals(true,column("Option_Int").nullable) assertEquals(false,column("Long").nullable) assertEquals(true,column("Option_Long").nullable) if(!tdb.profile.toString.contains("OracleProfile")){// FIXME: we should probably solve this somewhat cleaner assertEquals("Int",column("Int").tpe) assertEquals("Int",column("Option_Int").tpe) ifCapU(jcap.defaultValueMetaData){ assertEquals(Some(-5), columnDefault("Int")) assertEquals(Some(Some(5)), columnDefault("Option_Int")) } ifCapU(jcap.distinguishesIntTypes){ assertEquals("Long",column("Long").tpe) assertEquals("Long",column("Option_Long").tpe) } ifCapU(jcap.defaultValueMetaData){ assertEquals(Some(5L), columnDefault("Long")) assertEquals(Some(Some(-5L)), columnDefault("Option_Long")) } } /* h2 and hsqldb map this to Double assertEquals("Float",column("Float").tpe) assertEquals("Float",column("Option_Float").tpe) assertEquals(false,column("Float").nullable) assertEquals(true,column("Option_Float").nullable) */ assertEquals("Double",column("Double").tpe) assertEquals("Double",column("Option_Double").tpe) assertEquals(false,column("Double").nullable) assertEquals(true,column("Option_Double").nullable) assertEquals("String",column("String").tpe) assertEquals("String",column("Option_String").tpe) assertEquals(false,column("String").nullable) assertEquals(true,column("Option_String").nullable) assertEquals(false,column("java_sql_Date").nullable) assertEquals(true,column("Option_java_sql_Date").nullable) assertEquals(false,column("java_sql_Time").nullable) assertEquals(true,column("Option_java_sql_Time").nullable) assertEquals(false,column("java_sql_Timestamp").nullable) assertEquals(true,column("Option_java_sql_Timestamp").nullable) if(!tdb.profile.toString.contains("OracleProfile")){// FIXME: we should probably solve this somewhat cleaner assertEquals("java.sql.Date",column("java_sql_Date").tpe) assertEquals("java.sql.Date",column("Option_java_sql_Date").tpe) assertEquals("java.sql.Time",column("java_sql_Time").tpe) assertEquals("java.sql.Time",column("Option_java_sql_Time").tpe) assertEquals("java.sql.Timestamp",column("java_sql_Timestamp").tpe) assertEquals("java.sql.Timestamp",column("Option_java_sql_Timestamp").tpe) } assertEquals("java.sql.Blob",column("java_sql_Blob").tpe) assertEquals("java.sql.Blob",column("Option_java_sql_Blob").tpe) assertEquals(false,column("java_sql_Blob").nullable) assertEquals(true,column("Option_java_sql_Blob").nullable) } _ <- ifCap(jcap.defaultValueMetaData) { val typeTest = model.tables.filter(_.name.table.toUpperCase=="NO_DEFAULT_TEST").head def column(name: String) = typeTest.columns.filter(_.name.toUpperCase == name.toUpperCase).head def columnDefault(name: String) = column(name) .options.collect{case RelationalProfile.ColumnOption.Default(v) => v} .headOption ifCapU(jcap.nullableNoDefault){ assertEquals( None, columnDefault("stringOption") ) } assertEquals( Some(None), columnDefault("stringOptionDefaultNone") ) DBIO.seq( noDefaultTest.map(_.int) += 1, noDefaultTest.map(_.stringOption).result.head.map(_ shouldBe None) ) } } yield () } }
nafg/slick
slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/ModelBuilderTest.scala
Scala
bsd-2-clause
20,040
/* * * * Copyright 2015 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package org.dhira.core.util import java.io.Serializable import java.util._ /** * Clusters strings based on fingerprint: for example * Two words and TWO words or WORDS TWO would be put together * @author Adam Gibson * */ @SerialVersionUID(-4120559428585520276L) object StringCluster { @SerialVersionUID(-1390696157208674054L) class SizeComparator extends Comparator[Map[String, Integer]] with Serializable { def compare(o1: Map[String, Integer], o2: Map[String, Integer]): Int = { val s1: Int = o1.size val s2: Int = o2.size if (s1 == s2) { var total1: Int = 0 import scala.collection.JavaConversions._ for (i <- o1.values) { total1 += i } var total2: Int = 0 import scala.collection.JavaConversions._ for (i <- o2.values) { total2 += i } if (total2 < total1) return -1 if (total2 > total1) return 1 return 0 } else if (s2 < s1) { return -1 } else { return 1 } } } } @SerialVersionUID(-4120559428585520276L) class StringCluster extends HashMap[String, Map[String, Integer]] { def this(list: List[String]) { this() { var i: Int = 0 while (i < list.size) { { val s: String = list.get(i) val keyer: FingerPrintKeyer = new FingerPrintKeyer val key: String = keyer.key(s) if (containsKey(key)) { val m: Map[String, Integer] = get(key) if (m.containsKey(s)) { m.put(s, m.get(s) + 1) } else { m.put(s, 1) } } else { val m: Map[String, Integer] = new TreeMap[String, Integer] m.put(s, 1) put(key, m) } } ({ i += 1; i - 1 }) } } } def getClusters: List[Map[String, Integer]] = { val _clusters: List[Map[String, Integer]] = new ArrayList[Map[String, Integer]](values) Collections.sort(_clusters, new StringCluster.SizeComparator) return _clusters } def sort { Collections.sort(new ArrayList[Map[String, Integer]](values), new StringCluster.SizeComparator) } }
Mageswaran1989/aja
src/main/scala/org/aja/dhira/src/main/scala/org/dhira/core/util/StringCluster.scala
Scala
apache-2.0
2,908
class Foo[T[_, _], F[_], A, B](val fa: T[F[A], F[B]]) object Test { def x[T[_, _]](tmab: T[Either[Int, String], Either[Int, Int]]) = new Foo(tmab) }
lampepfl/dotty
tests/pos/i9478.scala
Scala
apache-2.0
156
package mapmartadero package model import net.liftweb.record.{Field, MetaRecord, Record} import net.liftweb.squerylrecord.KeyedRecord import org.squeryl.annotations._ import net.liftweb.record.field._ import net.liftweb.http.{Templates, SHtml, S, RequestVar} import org.squeryl.dsl.{OneToMany, ManyToOne} import net.liftweb.util.{CssSel, FieldError} import net.liftweb.squerylrecord.RecordTypeMode._ import net.liftweb.util.Helpers._ import org.squeryl.{Session, Schema} import net.liftweb.json.JsonAST._ import net.liftweb.http.js.JsCmd import net.liftweb.http.js.JsCmds.{RedirectTo, Noop} import xml.{Text, NodeSeq} import net.liftweb.common._ import net.liftweb.json.JsonAST.JObject import net.liftweb.json.JsonAST.JField class ActivityType private() extends Record[ActivityType] with KeyedRecord[Long] { override def meta: MetaRecord[ActivityType] = ActivityType @Column(name="ID_TIPOACTIVIDAD") override lazy val idField = new LongField(this, 0L) { override def shouldDisplay_? = false } @Column(name="NOMBRE_TIPOACTIVIDAD") lazy val name = new StringField(this, 200) { override def shouldDisplay_? = false } } object ActivityType extends ActivityType with MetaRecord[ActivityType]
jgenso/mapamartadero
src/main/scala/mapmartadero/model/ActivityType.scala
Scala
apache-2.0
1,215
/* * Part of NDLA learningpath-api. * Copyright (C) 2016 NDLA * * See LICENSE * */ package no.ndla.learningpathapi.validation import io.lemonlabs.uri.dsl._ import no.ndla.learningpathapi.model.api.ValidationMessage class UrlValidator() { val noHtmlTextValidator = new TextValidator(allowHtml = false) def validate(fieldPath: String, url: String): Seq[ValidationMessage] = { nonEmptyText(fieldPath, url) ++ noHtmlInText(fieldPath, url) ++ urlIsValid(fieldPath, url) } private def nonEmptyText(fieldPath: String, url: String): Seq[ValidationMessage] = { if (url.isEmpty) { return List(ValidationMessage(fieldPath, "Required field is empty.")) } List() } private def noHtmlInText(fieldPath: String, url: String): Seq[ValidationMessage] = { noHtmlTextValidator.validate(fieldPath, url) match { case Some(x) => List(x) case _ => List() } } private def urlIsValid(fieldPath: String, url: String): Seq[ValidationMessage] = { if (url.path.nonEmpty && url.schemeOption.isEmpty && url.hostOption.isEmpty) List.empty else if (!url.startsWith("https")) List(ValidationMessage(fieldPath, "Illegal Url. All Urls must start with https.")) else List.empty } }
NDLANO/learningpath-api
src/main/scala/no/ndla/learningpathapi/validation/UrlValidator.scala
Scala
gpl-3.0
1,265
package at.logic.gapt.proofs import at.logic.gapt.expr.Polarity.{ Negative, Positive } import at.logic.gapt.expr.{ Formula, Polarity } import at.logic.gapt.formats.babel.{ BabelExporter, BabelSignature } import cats.Functor import cats.kernel.Monoid import scala.collection.GenTraversable /** * Represents an index of an element in a sequent. * * In a sequent, the elements have the following indices: * Ant(0), Ant(1), ..., Ant(m) :- Suc(0), Suc(1), ..., Suc(n) */ sealed abstract class SequentIndex extends Ordered[SequentIndex] { def compare( that: SequentIndex ) = ( this, that ) match { case ( Ant( _ ), Suc( _ ) ) => -1 case ( Suc( _ ), Ant( _ ) ) => 1 case ( Ant( i ), Ant( j ) ) => i - j case ( Suc( i ), Suc( j ) ) => i - j } /** * Increments the index by a natural number. * * @param i */ def +( i: Int ): SequentIndex /** * Decrements the index by a natural number. * * @param i */ def -( i: Int ): SequentIndex def polarity: Polarity def isAnt = polarity.inAnt def isSuc = polarity.inSuc def sameSideAs( that: SequentIndex ): Boolean = this.polarity == that.polarity /** Injective conversion to integers. */ def toInt: Int def withinSizes( p: ( Int, Int ) ): Boolean } object SequentIndex { def apply( polarity: Polarity, k: Int ): SequentIndex = polarity match { case Positive => Suc( k ) case Negative => Ant( k ) } } case class Ant( k: Int ) extends SequentIndex { require( k >= 0, "Indices < 0 are not supported." ) def +( i: Int ) = Ant( k + i ) def -( i: Int ) = Ant( k - i ) def polarity = Polarity.InAntecedent def toInt = -k - 1 def withinSizes( p: ( Int, Int ) ): Boolean = k < p._1 } case class Suc( k: Int ) extends SequentIndex { require( k >= 0, "Indices < 0 are not supported." ) def +( i: Int ) = Suc( k + i ) def -( i: Int ) = Suc( k - i ) def polarity = Polarity.InSuccedent def toInt = k def withinSizes( p: ( Int, Int ) ): Boolean = k < p._2 } /** * Used for clause set extraction * @param sequent A sequent. */ case class SetSequent[+A]( sequent: Sequent[A] ) { override def equals( that: Any ): Boolean = that match { case SetSequent( Sequent( ante, suc ) ) => this.sequent.antecedent.toSet == ante.toSet && this.sequent.succedent.toSet == suc.toSet case _ => false } override def hashCode = this.sequent.antecedent.distinct.toSet.hashCode() + this.sequent.succedent.distinct.toSet.hashCode() // permutation-invariant hashcode } /** * A sequent is a pair of sequences of elements of type A, typically written as a,,1,,,…,a,,m,, :- b,,1,,,…,b,,n,,. * * @param antecedent The first list. * @param succedent The second list. * @tparam A The type of the elements of the sequent. */ case class Sequent[+A]( antecedent: Vector[A], succedent: Vector[A] ) { override def toString = toSigRelativeString def toSigRelativeString( implicit sig: BabelSignature ): String = if ( forall { _.isInstanceOf[Formula] } ) { new BabelExporter( unicode = true, sig = sig ).export( this.asInstanceOf[HOLSequent] ) } else { val stringified = this map { _.toString } val multiLine = stringified.exists { _ contains "\\n" } || stringified.elements.map { _.length + 2 }.sum > 80 if ( multiLine ) s"${stringified.antecedent.mkString( ",\\n" )}\\n:-\\n${stringified.succedent.mkString( ",\\n" )}" else s"${stringified.antecedent.mkString( ", " )} :- ${stringified.succedent.mkString( ", " )}" } /** * Equality treating each side of the sequent as a set. */ def setEquals[B]( other: Sequent[B] ): Boolean = ( other isSubsetOf this ) && ( this isSubsetOf other ) /** * Equality treating each side of the sequent as a multiset. */ def multiSetEquals[B]( other: Sequent[B] ): Boolean = ( other isSubMultisetOf this ) && ( this isSubMultisetOf other ) /** * Sequence of elements of the sequent. * * @return Antecedent concatenated with succedent. */ def elements: Vector[A] = antecedent ++ succedent /** * Sequence of elements together with polarities of type Boolean signifying whether an element is in the antecedent or succedent. * * @return */ def polarizedElements: Vector[( A, Polarity )] = map( _ -> Polarity.InAntecedent, _ -> Polarity.InSuccedent ).elements /** * Returns true iff both cedents are empty. * * @return */ def isEmpty: Boolean = antecedent.isEmpty && succedent.isEmpty def nonEmpty: Boolean = !isEmpty /** * Takes the multiset difference between two sequents, i.e. each side separately. */ def diff[B >: A]( other: Sequent[B] ) = Sequent( this.antecedent diff other.antecedent, this.succedent diff other.succedent ) /** * Computes the intersection of two sequents. * * @param other * @return */ def intersect[B >: A]( other: Sequent[B] ) = Sequent( antecedent intersect other.antecedent, succedent intersect other.succedent ) /** * Removes duplicate formulas from both cedents. * * @return */ def distinct = Sequent( antecedent.distinct, succedent.distinct ) def isSubMultisetOf[B >: A]( other: Sequent[B] ) = ( this diff other ).isEmpty /** * @param other Another Sequent. * @return True iff other contains this pair of sets. */ def isSubsetOf[B >: A]( other: Sequent[B] ) = ( this.distinct diff other.distinct ).isEmpty def isTaut: Boolean = antecedent intersect succedent nonEmpty /** * * @return The sequent in tuple form. */ def toTuple = ( antecedent, succedent ) /** * Adds an element to the antecedent. New elements are always outermost, i.e. on the very left. * * @param e An element of type B > A * @return The sequent with e added to the antecedent */ def +:[B >: A]( e: B ): Sequent[B] = copy( antecedent = e +: this.antecedent ) /** * Adds a sequent of elements to the antecedent. New elements are always outermost, i.e. on the very left. * * @param es A collection of elements of type B > A. * @return The sequent with es added to the antecedent. */ def ++:[B >: A]( es: Traversable[B] ): Sequent[B] = es.foldRight[Sequent[B]]( this )( _ +: _ ) /** * Adds an element to the succedent. New elements are always outermost, i.e. on the very right. * * @param e An element of type B > A * @return The sequent with e added to the succedent */ def :+[B >: A]( e: B ): Sequent[B] = copy( succedent = this.succedent :+ e ) /** * Adds a sequence of elements to the succedent. New elements are always outermost, i.e. on the very right. * * @param es A collection of elements of type B > A. * @return The sequent with es added to the succedent. */ def :++[B >: A]( es: Traversable[B] ): Sequent[B] = es.foldLeft[Sequent[B]]( this )( _ :+ _ ) def ++[B >: A]( that: Sequent[B] ) = Sequent( this.antecedent ++ that.antecedent, this.succedent ++ that.succedent ) def removeFromAntecedent[B]( e: B ) = Sequent( antecedent filterNot ( _ == e ), succedent ) def removeFromSuccedent[B]( e: B ) = Sequent( antecedent, succedent filterNot ( _ == e ) ) /** * Maps a function over both cedents * * @param f A function of type A => B * @tparam B The return type of f * @return The sequent of type B that results from mapping f over both cedents. */ def map[B]( f: ( A ) => B ): Sequent[B] = this map ( f, f ) def flatMap[B]( f: A => TraversableOnce[B] ): Sequent[B] = flatMap( f, f ) def collect[B]( f: PartialFunction[A, B] ): Sequent[B] = Sequent( antecedent collect f, succedent collect f ) /** * Maps two functions over the antecedent and succedent, respectively. * * @param f The function to map over the antecedent. * @param g The function to map over the succedent. * @tparam B The return type of f and g. * @return The sequent of type B that results from mapping f and g over the antecedent and succedent, respectively. */ def map[B]( f: ( A ) => B, g: ( A ) => B ) = Sequent( antecedent map f, succedent map g ) def flatMap[B]( f: A => TraversableOnce[B], g: A => TraversableOnce[B] ): Sequent[B] = Sequent( antecedent flatMap f, succedent flatMap g ) /** * The sub-sequent of elements satisfying some predicate. * * @param p A function of type A => Boolean. * @return The sequent consisting of only those elements satisfying p. */ def filter( p: A => Boolean ): Sequent[A] = Sequent( antecedent filter p, succedent filter p ) /** * The sub-sequent of elements not satisfying some predicate. * * @param p A function of type A => Boolean. * @return The sequent consisting of only those elements not satisfying p. */ def filterNot( p: A => Boolean ): Sequent[A] = this filter ( !p( _ ) ) /** * The number of elements in the sequent. * * @return */ def length = antecedent.length + succedent.length /** * Synonym for length. * * @return */ def size = length /** * A pair consisting of the lengths of the cedents. * * @return */ def lengths = ( antecedent.length, succedent.length ) /** * Synonym for lengths. * * @return */ def sizes = lengths def sorted[B >: A]( implicit ordering: Ordering[B] ) = Sequent( antecedent.sorted( ordering ), succedent.sorted( ordering ) ) def sortBy[B]( f: A => B )( implicit ord: Ordering[B] ): Sequent[A] = sorted( ord on f ) /** * Returns true iff the sequent contains some element in either cedent. * * @param el * @tparam B * @return */ def contains[B]( el: B ): Boolean = elements contains el def cedent( polarity: Polarity ) = polarity match { case Positive => succedent case Negative => antecedent } def contains[B]( el: B, polarity: Polarity ): Boolean = cedent( polarity ).contains( el ) /** * Returns the element at some SequentIndex. * * @param i A SequentIndex, i.e. Ant(k) or Suc(k) * @return The k-th element of the antecedent or succedent, depending on the type of i. */ def apply( i: SequentIndex ): A = { try { i match { case Ant( k ) => antecedent( k ) case Suc( k ) => succedent( k ) } } catch { case _: IndexOutOfBoundsException => throw new IndexOutOfBoundsException( s"Sequent $this not defined at index $i." ) } } def apply( is: Seq[SequentIndex] ): Seq[A] = is map this.apply /** * Tests whether the sequent is defined at the supplied SequentIndex. * * @param i * @return */ def isDefinedAt( i: SequentIndex ): Boolean = i match { case Ant( k ) => antecedent.isDefinedAt( k ) case Suc( k ) => succedent.isDefinedAt( k ) } /** * Returns the range of indices of the sequent as a sequence. * * @return */ def indices: Vector[SequentIndex] = indicesSequent.elements /** * Returns the range of indices of the sequent as a sequent. * * @return */ def indicesSequent: Sequent[SequentIndex] = Sequent( sizes._1, sizes._2 ) /** * Returns the list of indices of elements satisfying some predicate. * * @param p A function of type A => Boolean. * @return */ def indicesWhere( p: A => Boolean ): Vector[SequentIndex] = indices filter { i => p( this( i ) ) } def indicesWherePol( p: A => Boolean, pol: Polarity ): Vector[SequentIndex] = indices filter { i => ( i.polarity == pol ) && p( this( i ) ) } /** * Focuses on one element of the sequent, i.e. returns element at index and the rest of the sequent. * * @param i A SequentIndex. * @return A pair consisting of this(i) and the rest of this. */ def focus( i: SequentIndex ): ( A, Sequent[A] ) = { def listFocus( xs: Vector[A] )( i: Int ): ( A, Vector[A] ) = ( xs( i ), xs.take( i ) ++ xs.drop( i + 1 ) ) i match { case Ant( k ) => val ( x, antNew ) = listFocus( antecedent )( k ) ( x, new Sequent( antNew, succedent ) ) case Suc( k ) => val ( x, sucNew ) = listFocus( succedent )( k ) ( x, new Sequent( antecedent, sucNew ) ) } } def delete( i: SequentIndex ): Sequent[A] = delete( Seq( i ) ) def delete( is: Seq[SequentIndex] ): Sequent[A] = zipWithIndex filterNot { is contains _._2 } map { _._1 } def delete( is: SequentIndex* )( implicit d: DummyImplicit ): Sequent[A] = delete( is ) def zipWithIndex: Sequent[( A, SequentIndex )] = Sequent( antecedent.zipWithIndex.map { case ( a, i ) => a -> Ant( i ) }, succedent.zipWithIndex.map { case ( b, i ) => b -> Suc( i ) } ) def find( pred: A => Boolean ): Option[SequentIndex] = indicesWhere( pred ).headOption def updated[B >: A]( index: SequentIndex, elem: B ): Sequent[B] = index match { case Ant( i ) => Sequent( antecedent.updated( i, elem ), succedent ) case Suc( j ) => Sequent( antecedent, succedent.updated( j, elem ) ) } def indexOfOption[B >: A]( elem: B ): Option[SequentIndex] = find( _ == elem ) def indexOf[B >: A]( elem: B ): SequentIndex = indexOfOption( elem ) get def indexOfPol[B >: A]( elem: B, polarity: Polarity ): SequentIndex = SequentIndex( polarity, cedent( polarity ).indexOf( elem ) ) def indexOfInAnt[B >: A]( elem: B ): SequentIndex = indexOfPol( elem, Polarity.InAntecedent ) def indexOfInSuc[B >: A]( elem: B ): SequentIndex = indexOfPol( elem, Polarity.InSuccedent ) def indexOfPolOption[B >: A]( elem: B, pol: Polarity ): Option[SequentIndex] = cedent( pol ).indexOf( elem ) match { case -1 => None case idx => Some( SequentIndex( pol, idx ) ) } def swapped: Sequent[A] = Sequent( succedent, antecedent ) def exists( p: A => Boolean ): Boolean = antecedent.exists( p ) || succedent.exists( p ) def forall( p: A => Boolean ): Boolean = antecedent.forall( p ) && succedent.forall( p ) def zip[B]( that: Sequent[B] ): Sequent[( A, B )] = Sequent( this.antecedent zip that.antecedent, this.succedent zip that.succedent ) def replaceAt[B >: A]( i: SequentIndex, el: B ) = delete( i ).insertAt( i, el ) def insertAt[B >: A]( i: SequentIndex, el: B ) = i match { case Ant( j ) => Sequent( antecedent.take( j ) ++ Seq( el ) ++ antecedent.drop( j ), succedent ) case Suc( j ) => Sequent( antecedent, succedent.take( j ) ++ Seq( el ) ++ succedent.drop( j ) ) } def foreach[U]( f: A => U ): Unit = { antecedent foreach f succedent foreach f } def withFilter( p: A => Boolean ): Sequent[A] = filter( p ) def groupBy[B]( f: A => B ): Sequent[( B, Vector[A] )] = Sequent( antecedent groupBy f toVector, succedent groupBy f toVector ) } object Sequent { def apply[A](): Sequent[A] = Sequent( Vector(), Vector() ) def apply[A]( ant: Traversable[A], suc: Traversable[A] ): Sequent[A] = Sequent( ant.toVector, suc.toVector ) def apply[A]( polarizedElements: Traversable[( A, Polarity )] ): Sequent[A] = { val ( ant, suc ) = polarizedElements.view.partition( _._2.inAnt ) Sequent( ant.map( _._1 ), suc.map( _._1 ) ) } /** * Returns a generic sequent of sizes (m, n): Ant(0),…,Ant(m-1) :- Suc(0),…,Suc(n-1) */ def apply( m: Int, n: Int ): Sequent[SequentIndex] = ( 0 until m ).map { Ant } ++: Sequent() :++ ( 0 until n ).map { Suc } implicit val SequentFunctor = new Functor[Sequent] { def map[A, B]( fa: Sequent[A] )( f: A => B ): Sequent[B] = fa.map( f ) } implicit def SequentMonoid[A] = new Monoid[Sequent[A]] { override def empty = Sequent() override def combine( s1: Sequent[A], s2: Sequent[A] ): Sequent[A] = s1 ++ s2 } }
gebner/gapt
core/src/main/scala/at/logic/gapt/proofs/sequents.scala
Scala
gpl-3.0
15,594
package org.apache.flink.contrib.tensorflow.graphs import org.tensorflow.Graph /** * Abstract loader to inspect and load a graph. */ trait GraphLoader { /** * Load the graph. */ def load(): Graph }
cookieai/flink-tensorflow
flink-tensorflow/src/main/scala/org/apache/flink/contrib/tensorflow/graphs/GraphLoader.scala
Scala
apache-2.0
209
package com.arcusys.learn.models.request import com.arcusys.learn.service.util.Parameter import com.arcusys.valamis.model.SkipTake import org.scalatra.ScalatraBase import com.arcusys.learn.models.BaseCollectionRequest object UserRequest extends BaseCollectionRequest { val UserId = "userID" val ResultAs = "resultAs" val CompanyId = "companyID" val OrgId = "orgId" val Scope = "scope" val ModuleId = "moduleId" val Available = "available" val IsOnlyPublished = "isOnlyPublished" val CertificateId = "certificateId" val WithOpenbadges = "withOpenBadges" def apply(scalatra: ScalatraBase) = new Model(scalatra) class Model(val scalatra: ScalatraBase) extends BaseCollectionFilteredRequestModel(scalatra) with OAuthModel { def requestedUserId = Parameter(UserId).intRequired def isUserIdRequest = Parameter(UserId).option.isDefined def skipTake = { val page = Parameter(Page).option val count = Parameter(Count).intOption if (page.isDefined && count.isDefined) Some(SkipTake(skip, count.get)) else None } def orgId = Parameter(OrgId).longOption.filterNot(_ == -1) def moduleID = Parameter(ModuleId).intRequired def isShortResult = Parameter(ResultAs).option match { case Some(value) => value == "short" case None => true } def available = Parameter(Available).booleanOption match { case Some(value) => value case None => false } def scope = Parameter(Scope).longOption def isOnlyPublished = Parameter(IsOnlyPublished).booleanOption match { case Some(value) => value case None => true } def certificateId = Parameter(CertificateId).intRequired def withOpenBadges = Parameter(WithOpenbadges).booleanOption match { case Some(value) => value case None => false } } }
ViLPy/Valamis
learn-portlet/src/main/scala/com/arcusys/learn/models/request/UserRequest.scala
Scala
lgpl-3.0
1,882
/* * Copyright (C) 2015 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.sparta.sdk import java.io.{Serializable => JSerializable} import org.apache.spark.sql.Row import org.apache.spark.sql.types.{StructField, StringType, StructType} import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{Matchers, WordSpec} import com.stratio.sparta.sdk.test.OperatorEntityCountMock @RunWith(classOf[JUnitRunner]) class OperatorEntityCountTest extends WordSpec with Matchers { "EntityCount" should { val props = Map( "inputField" -> "inputField".asInstanceOf[JSerializable], "split" -> ",".asInstanceOf[JSerializable]) val schema = StructType(Seq(StructField("inputField", StringType))) val entityCount = new OperatorEntityCountMock("op1", schema, props) val inputFields = Row("hello,bye") "Return the associated precision name" in { val expected = Option(Seq("hello", "bye")) val result = entityCount.processMap(inputFields) result should be(expected) } "Return empty list" in { val expected = None val result = entityCount.processMap(Row()) result should be(expected) } } }
danielcsant/sparta
sdk/src/test/scala/com/stratio/sparta/sdk/OperatorEntityCountTest.scala
Scala
apache-2.0
1,758
package processes.freeMonads.vanillaScala.multiple import scala.concurrent.Future import play.api.mvc.AnyContent import play.api.mvc.Request import play.api.mvc.Result import processes.PatchAssignment import processes.Services import processes.freeMonads.Id import processes.freeMonads.multiple.HappyFlowOnlyProgramParts import processes.freeMonads.multiple.HappyFlowOnlyProgramRunner import processes.freeMonads.vanillaScala.MultipleMachinery class HappyFlowOnly(protected val services: Services) extends PatchAssignment with MultipleMachinery with HappyFlowOnlyProgramParts with HappyFlowOnlyProgramRunner { self => protected def handlePatchRequest(id: String, request: Request[AnyContent]): Future[Result] = { implicit val programType = ProgramType[Json +: Utilities +: Store +: Nil] val patchProgram = for { json <- ParseJson(request) newProfile <- JsonToProfile(json) oldProfile <- GetProfileById(id) mergedProfile <- MergeProfile(oldProfile, newProfile) _ <- UpdateProfile(id, mergedProfile) } yield results.noContent patchProgram.run(patchProgramRunner).map(_.merge) } val patchProgramRunner = { val storeRunner = StoreRunner val utilitiesRunner = UtilitiesRunner andThen IdToResultBranch andThen ResultBranchToHttpResult val jsonRunner = JsonRunner andThen ResultBranchToHttpResult storeRunner or utilitiesRunner or jsonRunner } object JsonRunner extends (Json ~> ResultBranch) { def apply[A](ja: Json[A]) = jsonProgramRunner(ja) } object StoreRunner extends (Store ~> HttpResult) { def apply[A](sa: Store[A]) = storeProgramRunner(sa) } object UtilitiesRunner extends (Utilities ~> Id) { def apply[A](ua: Utilities[A]) = utilitiesProgramRunner(ua) } object ResultBranchToHttpResult extends (ResultBranch ~> HttpResult) { def apply[A](ra: ResultBranch[A]) = resultBranchToHttpResult(ra) } object IdToResultBranch extends (Id ~> ResultBranch) { def apply[A](ia: Id[A]) = idToResultBranch(ia) } }
EECOLOR/scala-clean-code-patterns
src/main/scala/processes/freeMonads/vanillaScala/multiple/HappyFlowOnly.scala
Scala
mit
2,045
package smtlib package theories package experimental import trees.Terms._ import Strings._ import Ints.NumeralLit import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class StringsTests extends AnyFunSuite with Matchers { override def suiteName = "Strings theory test suite" test("String sort correctly constructed and extracted") { StringSort() match { case StringSort() => assert(true) case _ => assert(false) } StringSort() match { case FixedSizeBitVectors.BitVectorSort(n) if n == 14 => assert(false) case FixedSizeBitVectors.BitVectorSort(n) if n == 32 => assert(false) case Ints.IntSort() => assert(false) case Reals.RealSort() => assert(false) case StringSort() => assert(true) case _ => assert(false) } } test("literals are correctly constructed and extracted") { val l1 = StringLit("abc") l1 match { case StringLit(n) => assert(n === "abc") case _ => assert(false) } val l2 = StringLit("") l2 match { case StringLit(n) => assert(n === "") case _ => assert(false) } val l3 = StringLit("with space") l3 match { case StringLit(n) => assert(n === "with space") case _ => assert(false) } } test("Length is correctly constructed and extracted") { val l1 = Length(StringLit("abcd")) l1 match { case Length(StringLit("abcd")) => assert(true) case _ => assert(false) } val l2 = Length(StringLit("aaaa")) l2 match { case Length(StringLit("aaaa")) => assert(true) case _ => assert(false) } } test("Concat is correctly constructed and extracted") { val c1 = Concat(StringLit("ab"), StringLit("cd")) c1 match { case Concat(StringLit("ab"), StringLit("cd")) => assert(true) case _ => assert(false) } val c2 = Concat(StringLit("ab"), StringLit("cd"), StringLit("ef")) c2 match { case Concat(StringLit("ab"), StringLit("cd"), StringLit("ef")) => assert(true) case _ => assert(false) } val c3 = Concat(StringLit("ab"), StringLit("cd"), StringLit("ef")) c3 match { case Concat(StringLit("ab")) => assert(false) case Concat(StringLit("ab"), StringLit("cd")) => assert(false) case Concat(StringLit("ab"), StringLit("cd"), StringLit("ef")) => assert(true) case _ => assert(false) } val c4 = Concat(StringLit("ab"), StringLit("cd"), StringLit("ef")) c4 match { case Concat(ts@_*) => { assert(ts(0) === StringLit("ab")) assert(ts(1) === StringLit("cd")) assert(ts(2) === StringLit("ef")) } case _ => assert(false) } } test("At is correctly constructed and extracted") { val a = At(StringLit("xxx"), NumeralLit(2)) a match { case At(StringLit("xxx"), NumeralLit(two)) => assert(two === 2) case _ => assert(false) } } test("Substring is correctly constructed and extracted") { val s = Substring(StringLit("abcdef"), NumeralLit(2), NumeralLit(5)) s match { case Substring(StringLit("abcdef"), NumeralLit(two), NumeralLit(five)) => { assert(two === 2) assert(five === 5) } case _ => assert(false) } } test("smtlib string format") { import parser.Parser implicit class TestParse(s: String) { def shouldParse(f: PartialFunction[Term, Any]) = { val term = Parser.fromString(s).parseTerm if(f.isDefinedAt(term)) f(term) else { sys.error("Term " + s + " wrongly parsed as " + term) } } def shouldParseTo(p: Term) = { Parser.fromString(s).parseTerm should equal(p) } } "\\"abc\\"" shouldParseTo StringLit("abc") "(str.++ \\"a\\" \\"bc\\" )" shouldParseTo Concat(StringLit("a"), StringLit("bc")) "(str.++ \\"a\\" \\"bc\\" \\"def\\" )" shouldParseTo Concat(StringLit("a"), StringLit("bc"), StringLit("def")) "(str.len \\"abcd\\")" shouldParseTo Length(StringLit("abcd")) "(str.at \\"abcd\\" 1)" shouldParseTo At(StringLit("abcd"), NumeralLit(1)) "(str.substr \\"abcdef\\" 2 5)" shouldParseTo Substring(StringLit("abcdef"), NumeralLit(2), NumeralLit(5)) } }
regb/scala-smtlib
src/test/scala/smtlib/theories/experimental/StringsTests.scala
Scala
mit
4,264
package gdg.blaze import gdg.blaze.codec.PlainCodec import org.apache.spark.streaming.dstream.DStream trait Plugin extends Serializable trait Filter extends Plugin with ((DStream[Message]) => DStream[Message]) trait Input extends Plugin with (() => DStream[Message]) trait CodecFactory[T <:Codec] extends ((PluginConfig) => T) trait Codec extends Plugin { def encode(message: Message) : String def decode(str: String) : Traversable[Message] } trait Output extends Plugin with (DStream[Message] => Unit) trait PluginFactory[T <: Plugin] extends ((PluginConfig, BlazeContext) => T) case class BasicConfig(var codec: Codec = PlainCodec.single) abstract class BasicInput(config: BasicConfig) extends Input { override def apply(): DStream[Message] = { input().flatMap(config.codec.decode) } def input() : DStream[String] } abstract class BasicFilter extends Filter { val filter:MessageFilter override def apply(dStream: DStream[Message]): DStream[Message] = { dStream.flatMap { msg => if(filter(msg)) { transform(msg) } else { Some(msg) } } } def transform(msg: Message): Traversable[Message] } trait FilterFunction extends ((Message) => Boolean) with Serializable class MessageFilter(filters: Seq[FilterFunction] = Seq()) extends ((Message) => Boolean) with Serializable { def apply(msg: Message): Boolean = { for(f <- filters) { if(!f.apply(msg)) { return false } } true } }
micahrupersburg/blaze-of-glory
src/main/scala/gdg/blaze/Plugin.scala
Scala
apache-2.0
1,485
package pureconfig.module import org.joda.time.format.{DateTimeFormat, DateTimeFormatter} import org.joda.time.{DateTimeZone, Duration, Instant, Interval} import pureconfig.ConfigConvert.{catchReadError, viaNonEmptyString} import pureconfig.{ConfigConvert, ConfigReader} package object joda { implicit def instantConfigConvert: ConfigConvert[Instant] = ConfigConvert[Long].xmap(new Instant(_), _.getMillis) implicit def intervalConfigConvert: ConfigConvert[Interval] = viaNonEmptyString[Interval](catchReadError(Interval.parseWithOffset), _.toString) implicit def durationConfigConvert: ConfigConvert[Duration] = viaNonEmptyString[Duration](catchReadError(Duration.parse), _.toString) implicit def dateTimeFormatterConfigConvert: ConfigReader[DateTimeFormatter] = ConfigReader.fromNonEmptyString[DateTimeFormatter](catchReadError(DateTimeFormat.forPattern)) implicit def dateTimeZoneConfigConvert: ConfigConvert[DateTimeZone] = viaNonEmptyString[DateTimeZone](catchReadError(DateTimeZone.forID), _.getID) }
melrief/pureconfig
modules/joda/src/main/scala/pureconfig/module/joda/package.scala
Scala
mpl-2.0
1,044
/* * Copyright 2012 Comcast Cable Communications Management, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.money.core.async import com.typesafe.config.Config trait ConfigurableNotificationHandler extends AsyncNotificationHandler { def configure(config: Config): Unit }
Comcast/money
money-core/src/main/scala/com/comcast/money/core/async/ConfigurableNotificationHandler.scala
Scala
apache-2.0
816
/* * Copyright (c) 2016 SnappyData, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package org.apache.spark.sql import java.util.TimeZone import com.pivotal.gemfirexd.internal.engine.db.FabricDatabase import io.snappydata.benchmark.snappy.{SnappyAdapter, SnappyTPCH, TPCH, TPCH_Snappy} import io.snappydata.{PlanTest, SnappyFunSuite} import org.scalatest.BeforeAndAfterEach import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation import org.apache.spark.sql.catalyst.expressions.SubqueryExpression import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Sort} import org.apache.spark.util.Benchmark class IndexTest extends SnappyFunSuite with PlanTest with BeforeAndAfterEach { var existingSkipSPSCompile = false override def beforeAll(): Unit = { System.setProperty("org.codehaus.janino.source_debugging.enable", "true") System.setProperty("spark.sql.codegen.comments", "true") System.setProperty("spark.testing", "true") existingSkipSPSCompile = FabricDatabase.SKIP_SPS_PRECOMPILE FabricDatabase.SKIP_SPS_PRECOMPILE = true super.beforeAll() } override def afterAll(): Unit = { System.clearProperty("org.codehaus.janino.source_debugging.enable") System.clearProperty("spark.sql.codegen.comments") System.clearProperty("spark.testing") System.clearProperty("DISABLE_PARTITION_PRUNING") FabricDatabase.SKIP_SPS_PRECOMPILE = existingSkipSPSCompile super.afterAll() } /* test("dd") { // scalastyle:off println val toks = Seq("[dd]", "[dd1]", "date '[DATE]'", "date '[DATE]' + interval '1' year", "[Quantity]", "[dd2]") val args = Seq("y", "1-1-1999", "1", "zz") val newArgs = toks.zipWithIndex.sliding(2).flatMap(_.toList match { case (l, i) :: (r, _) :: Nil if l.indexOf("date '[DATE]'") >= 0 && r.indexOf("date '[DATE]' ") >= 0 => Seq(args(i), args(i)) case (_, i) :: _ if i < args.length => Seq(args(i)) case x => Seq.empty }).toList def sideBySide(left: Seq[String], right: Seq[String]): Seq[String] = { val maxLeftSize = left.map(_.length).max val leftPadded = left ++ Seq.fill(math.max(right.size - left.size, 0))(" ") val rightPadded = right ++ Seq.fill(math.max(left.size - right.size, 0))(" ") leftPadded.zip(rightPadded).map { case (l, r) => l + (" " * ((maxLeftSize - l.length) + 3)) + r } } if(toks.length != newArgs.length) { println(sideBySide(toks, newArgs).mkString("\\n")) } println(newArgs) // scalastyle:on println } */ test("tpch queries") { // scalastyle:off println val qryProvider = new TPCH with SnappyAdapter val queries = Array("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22") TPCHUtils.createAndLoadTables(snc, true) val existing = snc.getConf(io.snappydata.Property.EnableExperimentalFeatures.name) snc.setConf(io.snappydata.Property.EnableExperimentalFeatures.name, "true") for ((q, i) <- queries.zipWithIndex) { val qNum = i + 1 val (expectedAnswer, _) = qryProvider.execute(qNum, str => { snc.sql(str) }) val (newAnswer, df) = TPCH_Snappy.queryExecution(q, snc, false, false) val isSorted = df.logicalPlan.collect { case s: Sort => s }.nonEmpty QueryTest.sameRows(expectedAnswer, newAnswer, isSorted).map { results => s""" |Results do not match for query: $qNum |Timezone: ${TimeZone.getDefault} |Timezone Env: ${sys.env.getOrElse("TZ", "")} | |${df.queryExecution} |== Results == |$results """.stripMargin } println(s"Done $qNum") } snc.setConf(io.snappydata.Property.EnableExperimentalFeatures.name, existing) } ignore("Benchmark tpch") { try { val queries = Array("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22") sc(c => c.set("spark.local.dir", "/data/temp")) TPCHUtils.createAndLoadTables(snc, true) snc.sql( s"""CREATE INDEX idx_orders_cust ON orders(o_custkey) options (COLOCATE_WITH 'customer') """) snc.sql( s"""CREATE INDEX idx_lineitem_part ON lineitem(l_partkey) options (COLOCATE_WITH 'part') """) val tables = Seq("nation", "region", "supplier", "customer", "orders", "lineitem", "part", "partsupp") val tableSizes = tables.map { tableName => (tableName, snc.table(tableName).count()) }.toMap tableSizes.foreach(println) runBenchmark("select o_orderkey from orders where o_orderkey = 1", tableSizes, 2) runBenchmark("select o_orderkey from orders where o_orderkey = 32", tableSizes) runBenchmark("select o_orderkey from orders where o_orderkey = 801", tableSizes) runBenchmark("select o_orderkey from orders where o_orderkey = 1409", tableSizes) // queries.foreach(q => benchmark(q, tableSizes)) } finally { snc.sql(s"DROP INDEX if exists idx_orders_cust") snc.sql(s"DROP INDEX if exists idx_lineitem_part") } } private def togglePruning(onOff: Boolean) = System.setProperty("DISABLE_PARTITION_PRUNING", onOff.toString) def runBenchmark(queryString: String, tableSizes: Map[String, Long], numSecs: Int = 0): Unit = { // This is an indirect hack to estimate the size of each query's input by traversing the // logical plan and adding up the sizes of all tables that appear in the plan. Note that this // currently doesn't take WITH subqueries into account which might lead to fairly inaccurate // per-row processing time for those cases. val queryRelations = scala.collection.mutable.HashSet[String]() snc.sql(queryString).queryExecution.logical.map { case ur@UnresolvedRelation(t: TableIdentifier, _) => queryRelations.add(t.table.toLowerCase) case lp: LogicalPlan => lp.expressions.foreach { _ foreach { case subquery: SubqueryExpression => subquery.plan.foreach { case ur@UnresolvedRelation(t: TableIdentifier, _) => queryRelations.add(t.table.toLowerCase) case _ => } case _ => } } case _ => } val size = queryRelations.map(tableSizes.getOrElse(_, 0L)).sum import scala.concurrent.duration._ val b = new Benchmark(s"JoinOrder optimization", size, warmupTime = numSecs.seconds) b.addCase("WithOut Partition Pruning", prepare = () => togglePruning(true))(_ => snc.sql(queryString).collect().foreach(_ => ())) b.addCase("With Partition Pruning", prepare = () => togglePruning(false)) (_ => snc.sql(queryString).collect().foreach(_ => ())) b.run() } def benchmark(qNum: String, tableSizes: Map[String, Long]): Unit = { val qryProvider = new TPCH with SnappyAdapter val query = qNum.toInt def executor(str: String) = snc.sql(str) val size = qryProvider.estimateSizes(query, tableSizes, executor) println(s"$qNum size $size") val b = new Benchmark(s"JoinOrder optimization", size, minNumIters = 10) def case1(): Unit = snc.setConf(io.snappydata.Property.EnableExperimentalFeatures.name, "false") def case2(): Unit = snc.setConf(io.snappydata.Property.EnableExperimentalFeatures.name, "true") def case3(): Unit = { snc.setConf(io.snappydata.Property.EnableExperimentalFeatures.name, "true") } def evalSnappyMods(genPlan: Boolean) = TPCH_Snappy.queryExecution(qNum, snc, useIndex = false, genPlan = genPlan)._1.foreach(_ => ()) def evalBaseTPCH = qryProvider.execute(query, executor)._1.foreach(_ => ()) // b.addCase(s"$qNum baseTPCH index = F", prepare = case1)(i => evalBaseTPCH) // b.addCase(s"$qNum baseTPCH joinOrder = T", prepare = case2)(i => evalBaseTPCH) b.addCase(s"$qNum without PartitionPruning", prepare = () => togglePruning(true))(_ => evalSnappyMods(false)) b.addCase(s"$qNum with PartitionPruning", prepare = () => togglePruning(false))(_ => evalSnappyMods(false)) /* b.addCase(s"$qNum snappyMods joinOrder = T", prepare = case2)(i => evalSnappyMods(false)) b.addCase(s"$qNum baseTPCH index = T", prepare = case3)(i => evalBaseTPCH) */ b.run() } test("northwind queries") { println("") // val sctx = sc(c => c.set("spark.sql.inMemoryColumnarStorage.batchSize", "40000")) // val snc = getOrCreate(sctx) // NorthWindDUnitTest.createAndLoadColumnTables(snc) // val s = "select distinct shipcountry from orders" // snc.sql(s).show() // NWQueries.assertJoin(snc, NWQueries.Q42, "Q42", 22, 1, classOf[LocalJoin]) /* Thread.sleep(1000 * 60 * 60) NWQueries.assertJoin(snc, NWQueries.Q42, "Q42", 22, 1, classOf[LocalJoin]) */ } }
vjr/snappydata
cluster/src/test/scala/org/apache/spark/sql/IndexTest.scala
Scala
apache-2.0
9,666
package net.andimiller.iql import scala.util.Try import cats.syntax._ import cats.implicits._ import cats._ import cats.data._ object Parser { import fastparse.all._ case class NamedFunction[T, V](f: T => V, name: String) extends (T => V) { def apply(t: T) = f(t) override def toString() = name } // basics val Whitespace = NamedFunction(" \\n".contains(_: Char), "Whitespace") val Digits = NamedFunction('0' to '9' contains (_: Char), "Digits") val StringChars = NamedFunction(!"\\"\\\\".contains(_: Char), "StringChars") val space = P(CharsWhile(Whitespace).?) val digits = P(CharsWhile(Digits)) val lowercase = P(CharIn('a' to 'z')) val uppercase = P(CharIn('A' to 'Z')) val letter = P(lowercase | uppercase) val equals = P("=") val is = P(":") val hexDigit = P(CharIn('0' to '9', 'a' to 'f', 'A' to 'F')) val unicodeEscape = P("u" ~ hexDigit ~ hexDigit ~ hexDigit ~ hexDigit) val escape = P("\\\\" ~ (CharIn("\\"/\\\\bfnrt") | unicodeEscape)) // data structures val array = P("[" ~/ Expression.? ~ ("," ~ space.? ~ Expression).rep.? ~ "]") .map { case (a, b) => a.toList ++ b.toList.flatten } .map(Ast.Array) // types val strChars = P(CharsWhile(StringChars)) val string = P(space ~ "\\"" ~/ (strChars | escape).rep.! ~ "\\"").map(Ast.Text) val wildcard = P("*") val squarebrackets = P("[" | "]") val referenceChars = P(letter | digits | wildcard | squarebrackets) val reference = P(&(".") ~/ ("." ~/ referenceChars.rep.!) ~/ ("." ~/ referenceChars.rep.!).rep).map { case (h, t) => Ast.Field.apply(NonEmptyList(h, t.toList)) } val outputReferenceChars = P(letter | digits) val outputReference = P(&(".") ~/ ("." ~/ outputReferenceChars.rep.!).rep).map(t => Ast.OutputField.apply(t.toList)) val number = P("-".? ~ digits ~ digits.rep ~ !".").!.map(s => Ast.Integer(Integer.parseInt(s.toString))) val float = P("-".? ~ P(digits ~ digits.rep) ~ "." ~ P(digits ~ digits.rep) ~ P("E" ~ "-".? ~ digits ~ digits.rep).?).!.map { s => Ast.Float(Try { s.toDouble }.getOrElse(0.0d)) } val boolean = P("true" | "false").!.map(_ match { case "true" => Ast.Bool(true) case "false" => Ast.Bool(false) }) // Nots val Notted = P("!" ~ space.? ~/ Expression).map(Ast.Not) // code val Expression: Parser[Ast.Expression] = P(Notted | number | float | string | reference | boolean | array | bracketedExpression) val OperatorExpression: Parser[Ast.Expression] = P(Expression ~/ space.? ~/ (("==" | "<" | ">" | "&&" | "||" | "^" | "in" | "+" | "|").! ~/ space.? ~/ Expression).rep(min = 1)) .map { case (l, exps) => exps.foldLeft(l) { case (acc, (operator, exp)) => operator match { case "==" => Ast.Equals(acc, exp) case "<" => Ast.LessThan(acc, exp) case ">" => Ast.MoreThan(acc, exp) case "&&" => Ast.AND(acc, exp) case "||" => Ast.OR(acc, exp) case "^" => Ast.XOR(acc, exp) case "in" => Ast.In(acc, exp) case "+" => Ast.Plus(acc, exp) case "|" => Ast.Coalesce(acc, exp) } } } val bracketedExpression: Parser[Ast.Expression] = P("(" ~/ OperatorExpression ~ ")") val toplevelExpression: Parser[Ast.Expression] = P(P(Expression ~ newline) | P(OperatorExpression ~ newline)) val function = P("required" | "int" | "bool" | "string").! // transforms and validation val assignment = P(outputReference ~ space.? ~ equals ~/ space.? ~ toplevelExpression) .map(Ast.Assignment.tupled) val validation = P(outputReference ~ space.? ~ is ~ space.? ~/ function) .map(Ast.Validation.tupled) // and let binds val let = P(P("let") ~ space.? ~ outputReference ~ equals ~/ space.? ~ toplevelExpression).map(Ast.Let.tupled) // full programs val newline = P("\\n" | "\\r\\n" | "\\r" | "\\f" | End) val program = P(assignment.rep).map(t => Ast.Program(t.toList)) val validationProgram = P(validation ~ newline ~/ (validation ~/ newline).rep).map { case (v, vs) => Ast.VProgram(NonEmptyList(v, vs.toList)) } }
andimiller/integrated-query-language
core/shared/src/main/scala/net/andimiller/iql/Parser.scala
Scala
mit
4,246
package chat.tox.antox.callbacks import android.content.Context import chat.tox.antox.data.State import chat.tox.antox.wrapper.FriendInfo import im.tox.tox4j.core.enums.ToxUserStatus class AntoxOnUserStatusCallback(private var ctx: Context) { def friendStatus(friendInfo: FriendInfo, status: ToxUserStatus)(state: Unit): Unit = { val db = State.db db.updateContactStatus(friendInfo.key, status) } }
wiiam/Antox
app/src/main/scala/chat/tox/antox/callbacks/AntoxOnUserStatusCallback.scala
Scala
gpl-3.0
414
package inloopio.indicator import inloopio.collection.ArrayList import inloopio.math.timeseries.Null import inloopio.math.timeseries.TBaseSer import inloopio.math.timeseries.TStampedMapBasedList import inloopio.math.timeseries.TVar import inloopio.math.indicator.Plot import java.awt.Color import scala.collection.immutable import scala.reflect.ClassTag /** * * @author Caoyuan Deng */ abstract class SpotIndicator(_baseSer: TBaseSer) extends Indicator(_baseSer) with inloopio.math.indicator.SpotIndicator { def this() = this(null) /** * @todo Also override existsFromHead and existsFromTail? */ override def exists(time: Long): Boolean = true override def computeFrom(fromTime: Long) { // do nothing } protected def compute(fromIdx: Int, size: Int) { // do nothing } def computeSpot(time: Long) { /** get baseIdx before preComputeFrom(), which may clear this data */ val baseIdx = baseSer.indexOfOccurredTime(time) computeSpot(time, baseIdx) } /** * @param time * @param baseIdx baseIdx may be < 0, means there is no timestamps for this * time yet, time could be future. */ protected def computeSpot(time: Long, baseIdx: Int) object STVar { def apply[V: ClassTag](): TVar[V] = new SpotTVar[V]("", TVar.Kind.Close) def apply[V: ClassTag](name: String): TVar[V] = new SpotTVar[V](name, TVar.Kind.Close) def apply[V: ClassTag](name: String, kind: TVar.Kind): TVar[V] = new SpotTVar[V](name, kind) } final protected class SpotTVar[V: ClassTag](var name: String, val kind: TVar.Kind) extends TVar[V] { addVar(this) def timestamps = SpotIndicator.this.timestamps var plot: Plot = Plot.None var layer = -1 // -1 means not set // @todo: timestamps may be null when go here, use lazy val as a quick fix now, shoule review it private lazy val colors = new TStampedMapBasedList[Color](timestamps) def getColor(idx: Int) = colors(idx) def setColor(idx: Int, color: Color) { colors(idx) = color } private var timeToValue = immutable.TreeMap[Long, V]() // must sort by time def values: ArrayList[V] = { throw new UnsupportedOperationException() } def put(time: Long, value: V): Boolean = { timeToValue += time -> value true } def put(time: Long, fromHeadOrTail: Boolean, value: V): Boolean = { throw new UnsupportedOperationException("Can only be accessed via time.") } def apply(time: Long): V = { if (!timeToValue.contains(time)) { computeSpot(time) } timeToValue.getOrElse(time, Null.value) } def apply(time: Long, fromHeadOrTail: Boolean): V = { throw new UnsupportedOperationException("Can only be accessed via time.") } def update(time: Long, value: V) { timeToValue += time -> value } override def apply(idx: Int): V = { throw new UnsupportedOperationException("Can only be accessed via time.") } override def update(idx: Int, value: V) { throw new UnsupportedOperationException("Can only be accessed via time.") } override def reset(idx: Int) { throw new UnsupportedOperationException("Can only be accessed via time.") } override def reset(time: Long) { timeToValue -= time } def timesIterator: Iterator[Long] = timeToValue.keysIterator def valuesIterator: Iterator[V] = timeToValue.valuesIterator } }
dcaoyuan/inloopio-libs
inloopio-indicator/src/main/scala/inloopio/indicator/SpotIndicator.scala
Scala
bsd-3-clause
3,445
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ml.linalg import org.json4s.DefaultFormats import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods.{compact, parse => parseJson, render} private[ml] object JsonMatrixConverter { /** Unique class name for identifying JSON object encoded by this class. */ val className = "matrix" /** * Parses the JSON representation of a Matrix into a [[Matrix]]. */ def fromJson(json: String): Matrix = { implicit val formats = DefaultFormats val jValue = parseJson(json) (jValue \\ "type").extract[Int] match { case 0 => // sparse val numRows = (jValue \\ "numRows").extract[Int] val numCols = (jValue \\ "numCols").extract[Int] val colPtrs = (jValue \\ "colPtrs").extract[Seq[Int]].toArray val rowIndices = (jValue \\ "rowIndices").extract[Seq[Int]].toArray val values = (jValue \\ "values").extract[Seq[Double]].toArray val isTransposed = (jValue \\ "isTransposed").extract[Boolean] new SparseMatrix(numRows, numCols, colPtrs, rowIndices, values, isTransposed) case 1 => // dense val numRows = (jValue \\ "numRows").extract[Int] val numCols = (jValue \\ "numCols").extract[Int] val values = (jValue \\ "values").extract[Seq[Double]].toArray val isTransposed = (jValue \\ "isTransposed").extract[Boolean] new DenseMatrix(numRows, numCols, values, isTransposed) case _ => throw new IllegalArgumentException(s"Cannot parse $json into a Matrix.") } } /** * Coverts the Matrix to a JSON string. */ def toJson(m: Matrix): String = { m match { case SparseMatrix(numRows, numCols, colPtrs, rowIndices, values, isTransposed) => val jValue = ("class" -> className) ~ ("type" -> 0) ~ ("numRows" -> numRows) ~ ("numCols" -> numCols) ~ ("colPtrs" -> colPtrs.toSeq) ~ ("rowIndices" -> rowIndices.toSeq) ~ ("values" -> values.toSeq) ~ ("isTransposed" -> isTransposed) compact(render(jValue)) case DenseMatrix(numRows, numCols, values, isTransposed) => val jValue = ("class" -> className) ~ ("type" -> 1) ~ ("numRows" -> numRows) ~ ("numCols" -> numCols) ~ ("values" -> values.toSeq) ~ ("isTransposed" -> isTransposed) compact(render(jValue)) } } }
bravo-zhang/spark
mllib/src/main/scala/org/apache/spark/ml/linalg/JsonMatrixConverter.scala
Scala
apache-2.0
3,188
package fr.neuville.lombok import com.intellij.codeInspection.LocalQuickFix import de.plushnikov.intellij.lombok.problem.ProblemBuilder sealed trait Problem { def message: String def fixes: Seq[LocalQuickFix] } case class Error(message: String, fixes: LocalQuickFix*) extends Problem case class Warning(message: String, fixes: LocalQuickFix*) extends Problem object Problem { def addToPbBuilder(pbb: ProblemBuilder)(pb: Problem): Unit = pb match { case Warning(msg, fixes @ _*) => pbb.addWarning(msg, fixes: _*) case Error(msg, fixes @ _*) => pbb.addError(msg, fixes: _*) } }
gneuvill/lombok-intellij-plugin
processor-core/src/main/scala/fr/neuville/lombok/Problem.scala
Scala
bsd-2-clause
604
/* * Copyright 2001-2019 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.matchers /** * Classes and traits supporting ScalaTest's matchers DSL. * * This package is released as part of the `scalatest-matchers-core` module. */ package object dsl
dotty-staging/scalatest
scalatest/src/main/scala/org/scalatest/matchers/dsl/package.scala
Scala
apache-2.0
799
/* Copyright 2012 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.scalding.serialization import java.io.InputStream import java.io.OutputStream import java.io.Serializable import java.nio.ByteBuffer import org.apache.hadoop.io.serializer.{Serialization, Deserializer, Serializer, WritableSerialization} import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.{Serializer => KSerializer} import com.esotericsoftware.kryo.io.{Input, Output} import cascading.kryo.KryoSerialization; import cascading.tuple.hadoop.TupleSerialization import cascading.tuple.hadoop.io.BufferedInputStream import scala.annotation.tailrec import scala.collection.immutable.ListMap import scala.collection.immutable.HashMap import com.twitter.scalding.DateRange import com.twitter.scalding.RichDate import com.twitter.scalding.Args import com.twitter.chill._ import org.objenesis.strategy.StdInstantiatorStrategy; class KryoHadoop extends KryoSerialization { /** TODO!!! * Deal with this issue. The problem is grouping by Kryo serialized * objects silently breaks the results. If Kryo gets in front of TupleSerialization * (and possibly Writable, unclear at this time), grouping is broken. * There are two issues here: * 1) Kryo objects not being compared properly. * 2) Kryo being used instead of cascading. * * We must identify each and fix these bugs. */ val highPrioritySerializations = List(new WritableSerialization, new TupleSerialization) override def accept(klass : Class[_]) = { highPrioritySerializations.forall { !_.accept(klass) } } override def newKryo() : Kryo = { val k = new Kryo { lazy val objSer = new ObjectSerializer[AnyRef] override def newDefaultSerializer(cls : Class[_]) : KSerializer[_] = { if(objSer.accepts(cls)) { objSer } else { super.newDefaultSerializer(cls) } } } k.setInstantiatorStrategy(new StdInstantiatorStrategy()); k } override def decorateKryo(newK : Kryo) { // These are scalding objects: newK.register(classOf[RichDate], new RichDateSerializer()) newK.register(classOf[DateRange], new DateRangeSerializer()) newK.register(classOf[Args], new ArgsSerializer) // Some of the monoids from Algebird that we use: newK.register(classOf[com.twitter.algebird.AveragedValue], new AveragedValueSerializer) newK.register(classOf[com.twitter.algebird.DecayedValue], new DecayedValueSerializer) newK.register(classOf[com.twitter.algebird.HyperLogLogMonoid], new HLLMonoidSerializer) newK.register(classOf[com.twitter.algebird.Moments], new MomentsSerializer) newK.addDefaultSerializer(classOf[com.twitter.algebird.HLL], new HLLSerializer) // Register all the chill serializers: KryoSerializer.registerAll(newK) //Add commonly used types with Fields serializer: registeredTypes.foreach { cls => newK.register(cls) } /** * Pipes can be swept up into closures inside of case classes. This can generally * be safely ignored. If the case class has a method that actually accesses something * in the job, you will get a null pointer exception, so it shouldn't cause data corruption. * a more robust solution is to use Spark's closure cleaner approach on every object that * is serialized, but that's very expensive. */ newK.addDefaultSerializer(classOf[cascading.pipe.Pipe], new SingletonSerializer(null)) // keeping track of references is costly for memory, and often triggers OOM on Hadoop val useRefs = getConf.getBoolean("scalding.kryo.setreferences", false) newK.setReferences(useRefs) } // Types to pre-register. // TODO: this was cargo-culted from spark. We should actually measure to see the best // choices for the common use cases. Since hadoop tells us the class we are deserializing // the benefit of this is much less than spark def registeredTypes : List[Class[_]] = { List( // Arrays Array(1), Array(1.0), Array(1.0f), Array(1L), Array(""), Array(("", "")), Array(new java.lang.Object), Array(1.toByte), Array(true), Array('c'), // Options and Either Some(1), Left(1), Right(1) ).map { _.getClass } } }
AoJ/scalding
src/main/scala/com/twitter/scalding/serialization/KryoHadoop.scala
Scala
apache-2.0
4,750