code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.github.filosganga.play.predictionio import play.api._ import io.prediction.Client /** * * @author Filippo De Luca - [email protected] */ class PredictionIoPlugin(app: Application) extends Plugin with HasApi { private lazy val cfg = app.configuration.getConfig("prediction") .getOrElse(throw app.configuration.reportError("prediction", "prediction is required")) private lazy val clientProvider = new ConfigClientProvider(cfg) private lazy val predictionIoApi = new Api { protected def withClient[T](f: (Client) => T): T = clientProvider.withClient(f) } def api: Api = predictionIoApi override def onStart() { // To init lazy field api Logger.info("PredictionIO Plugin started.") } override def onStop() { clientProvider.shutdown() Logger.info("PredictionIO Plugin stopped.") } } class ConfigClientProvider(cfg: Configuration) extends ClientProvider { private lazy val client: Client = initClient() private def initClient(): Client = new Client( cfg.getString("app-key").getOrElse(throw cfg.reportError("app-key", "app-key is required")), cfg.getString("uri").getOrElse(throw cfg.reportError("uri", "uri is required")), cfg.getInt("thread-limit").getOrElse(100) ) def withClient[T](f: (Client) => T): T = { f(client) } def shutdown() { client.close() } }
filosganga/play-predictionio
src/main/scala/com/github/filosganga/play/predictionio/PredictionIoPlugin.scala
Scala
apache-2.0
2,183
package TAPLcomp2.simplebool import scala.util.parsing.combinator.{ImplicitConversions, PackratParsers} import scala.util.parsing.combinator.syntactical.StandardTokenParsers sealed trait Term case class TmVar(i: String) extends Term case class TmAbs(v: String, ty: Ty, t: Term) extends Term case class TmApp(t1: Term, t2: Term) extends Term case object TmTrue extends Term case object TmFalse extends Term case class TmIf(cond: Term, t1: Term, t2: Term) extends Term sealed trait Ty case class TyArr(t1: Ty, t2: Ty) extends Ty case object TyBool extends Ty object SimpleBoolParsers extends StandardTokenParsers with PackratParsers with ImplicitConversions { lexical.reserved += ("Bool", "true", "false", "if", "then", "else", "_") lexical.delimiters += ("(", ")", ";", "/", ".", ":", "->", "\\\\") // lower-case identifier lazy val lcid: PackratParser[String] = ident ^? { case id if id.charAt(0).isLower => id } // upper-case identifier lazy val ucid: PackratParser[String] = ident ^? { case id if id.charAt(0).isUpper => id } lazy val `type`: PackratParser[Ty] = arrowType lazy val arrowType: PackratParser[Ty] = (aType <~ "->") ~ arrowType ^^ { case t1 ~ t2 => TyArr(t1, t2) } ||| aType lazy val aType: PackratParser[Ty] = "(" ~> `type` <~ ")" ||| "Bool" ^^ { _ => TyBool } lazy val term: PackratParser[Term] = appTerm ||| ("\\\\" ~> lcid) ~ (":" ~> `type`) ~ ("." ~> term) ^^ { case v ~ ty ~ t => TmAbs(v, ty, t) } ||| ("if" ~> term) ~ ("then" ~> term) ~ ("else" ~> term) ^^ { case t1 ~ t2 ~ t3 => TmIf(t1, t2, t3) } lazy val appTerm: PackratParser[Term] = (appTerm ~ aTerm) ^^ { case t1 ~ t2 => TmApp(t1, t2) } ||| aTerm lazy val aTerm: PackratParser[Term] = "(" ~> term <~ ")" ||| lcid ^^ { i => TmVar(i) } ||| "true" ^^ { _ => TmTrue } ||| "false" ^^ { _ => TmFalse } def input(s: String) = phrase(term)(new lexical.Scanner(s)) match { case t if t.successful => t.get case t => sys.error(t.toString) } }
hy-zhang/parser
Scala/Parser/src/TAPLcomp2/simplebool/parser.scala
Scala
bsd-3-clause
2,029
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.batch.sql import org.apache.flink.api.scala._ import org.apache.flink.table.api.{SqlParserException, _} import org.apache.flink.table.planner.utils.TableTestBase import org.junit.{Before, Test} class LegacyLimitTest extends TableTestBase { protected val util = batchTestUtil() @Before def setup(): Unit = { util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c) val ddl = s""" |CREATE TABLE LimitTable ( | a int, | b bigint, | c string |) WITH ( | 'connector.type' = 'TestLimitableTableSource', | 'is-bounded' = 'true' |) """.stripMargin util.tableEnv.executeSql(ddl) } @Test def testLimitWithoutOffset(): Unit = { util.verifyExecPlan("SELECT * FROM MyTable LIMIT 5") } @Test def testLimit0WithoutOffset(): Unit = { util.verifyExecPlan("SELECT * FROM MyTable LIMIT 0") } @Test(expected = classOf[SqlParserException]) def testNegativeLimitWithoutOffset(): Unit = { util.verifyExecPlan("SELECT * FROM MyTable LIMIT -1") } @Test def testLimitWithOffset(): Unit = { util.verifyExecPlan("SELECT a, c FROM MyTable LIMIT 10 OFFSET 1") } @Test def testLimitWithOffset0(): Unit = { util.verifyExecPlan("SELECT a, c FROM MyTable LIMIT 10 OFFSET 0") } @Test def testLimit0WithOffset0(): Unit = { util.verifyExecPlan("SELECT a, c FROM MyTable LIMIT 0 OFFSET 0") } @Test def testLimit0WithOffset(): Unit = { util.verifyExecPlan("SELECT a, c FROM MyTable LIMIT 0 OFFSET 10") } @Test(expected = classOf[SqlParserException]) def testLimitWithNegativeOffset(): Unit = { util.verifyExecPlan("SELECT a, c FROM MyTable LIMIT 10 OFFSET -1") } @Test def testFetchWithOffset(): Unit = { util.verifyExecPlan("SELECT a, c FROM MyTable OFFSET 10 ROWS FETCH NEXT 10 ROWS ONLY") } @Test def testFetchWithoutOffset(): Unit = { util.verifyExecPlan("SELECT a, c FROM MyTable FETCH FIRST 10 ROWS ONLY") } @Test def testFetch0WithoutOffset(): Unit = { util.verifyExecPlan("SELECT a, c FROM MyTable FETCH FIRST 0 ROWS ONLY") } @Test def testOnlyOffset(): Unit = { util.verifyExecPlan("SELECT a, c FROM MyTable OFFSET 10 ROWS") } @Test def testFetchWithLimitSource(): Unit = { val sqlQuery = "SELECT a, c FROM LimitTable FETCH FIRST 10 ROWS ONLY" util.verifyExecPlan(sqlQuery) } @Test def testOrderByWithLimitSource(): Unit = { val sqlQuery = "SELECT a, c FROM LimitTable ORDER BY c LIMIT 10" util.verifyExecPlan(sqlQuery) } @Test def testLimitWithLimitSource(): Unit = { val sqlQuery = "SELECT a, c FROM LimitTable LIMIT 10" util.verifyExecPlan(sqlQuery) } @Test def testLimitWithOffsetAndLimitSource(): Unit = { val sqlQuery = "SELECT a, c FROM LimitTable LIMIT 10 OFFSET 1" util.verifyExecPlan(sqlQuery) } @Test def testFetchWithOffsetAndLimitSource(): Unit = { val sqlQuery = "SELECT a, c FROM LimitTable OFFSET 10 ROWS FETCH NEXT 10 ROWS ONLY" util.verifyExecPlan(sqlQuery) } }
tillrohrmann/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/LegacyLimitTest.scala
Scala
apache-2.0
3,930
package io.getquill.idiom import io.getquill.ast._ import io.getquill.util.Interleave import io.getquill.util.Messages._ import scala.collection.mutable.ListBuffer object StatementInterpolator { trait Tokenizer[T] { def token(v: T): Token } object Tokenizer { def apply[T](f: T => Token) = new Tokenizer[T] { def token(v: T) = f(v) } def withFallback[T]( fallback: Tokenizer[T] => Tokenizer[T] )(pf: PartialFunction[T, Token]) = new Tokenizer[T] { private val stable = fallback(this) override def token(v: T) = pf.applyOrElse(v, stable.token) } } implicit class TokenImplicit[T](v: T)(implicit tokenizer: Tokenizer[T]) { def token = tokenizer.token(v) } implicit def stringTokenizer: Tokenizer[String] = Tokenizer[String] { case string => StringToken(string) } implicit def externalTokenizer( implicit tagTokenizer: Tokenizer[Tag], liftTokenizer: Tokenizer[Lift] ): Tokenizer[External] = Tokenizer[External] { case tag: Tag => tagTokenizer.token(tag) case lift: Lift => liftTokenizer.token(lift) } implicit def tagTokenizer: Tokenizer[Tag] = Tokenizer[Tag] { case tag: ScalarTag => ScalarTagToken(tag) case tag: QuotationTag => QuotationTagToken(tag) } implicit def liftTokenizer: Tokenizer[Lift] = Tokenizer[Lift] { case tag: ScalarTag => ScalarTagToken(tag) case tag: QuotationTag => QuotationTagToken(tag) case lift: ScalarLift => ScalarLiftToken(lift) // TODO Longer Explanation case lift: Tag => fail("Cannot tokenizer a non-scalar tagging.") case lift: Lift => fail( s"Can't tokenize a non-scalar lifting. ${lift.name}\\n" + s"\\n" + s"This might happen because:\\n" + s"* You are trying to insert or update an `Option[A]` field, but Scala infers the type\\n" + s" to `Some[A]` or `None.type`. For example:\\n" + s" run(query[Users].update(_.optionalField -> lift(Some(value))))" + s" In that case, make sure the type is `Option`:\\n" + s" run(query[Users].update(_.optionalField -> lift(Some(value): Option[Int])))\\n" + s" or\\n" + s" run(query[Users].update(_.optionalField -> lift(Option(value))))\\n" + s"\\n" + s"* You are trying to insert or update whole Embedded case class. For example:\\n" + s" run(query[Users].update(_.embeddedCaseClass -> lift(someInstance)))\\n" + s" In that case, make sure you are updating individual columns, for example:\\n" + s" run(query[Users].update(\\n" + s" _.embeddedCaseClass.a -> lift(someInstance.a),\\n" + s" _.embeddedCaseClass.b -> lift(someInstance.b)\\n" + s" ))" + s"\\n" + s"* You are trying to insert or update an ADT field, but Scala infers the specific type\\n" + s" instead of the ADT type. For example:\\n" + s" case class User(role: UserRole)\\n" + s" sealed trait UserRole extends Product with Serializable\\n" + s" object UserRole {\\n" + s" case object Writer extends UserRole\\n" + s" case object Reader extends UserRole\\n" + s" implicit val encodeStatus: MappedEncoding[UserRole, String] = ...\\n" + s" implicit val decodeStatus: MappedEncoding[String, UserRole] = ...\\n" + s" }\\n" + s" run(query[User].update(_.role -> lift(UserRole.Writer)))\\n" + s" In that case, make sure you are uplifting to ADT type, for example:\\n" + s" run(query[User].update(_.role -> lift(UserRole.Writer: UserRole)))\\n" ) } implicit def tokenTokenizer: Tokenizer[Token] = Tokenizer[Token](identity) implicit def statementTokenizer: Tokenizer[Statement] = Tokenizer[Statement](identity) implicit def stringTokenTokenizer: Tokenizer[StringToken] = Tokenizer[StringToken](identity) implicit def liftingTokenTokenizer: Tokenizer[ScalarLiftToken] = Tokenizer[ScalarLiftToken](identity) implicit class TokenList[T](list: List[T]) { def mkStmt(sep: String = ", ")(implicit tokenize: Tokenizer[T]) = { val l1 = list.map(_.token) val l2 = List.fill(l1.size - 1)(StringToken(sep)) Statement(Interleave(l1, l2)) } } implicit def listTokenizer[T]( implicit tokenize: Tokenizer[T] ): Tokenizer[List[T]] = Tokenizer[List[T]] { case list => list.mkStmt() } implicit class Impl(sc: StringContext) { private def flatten(tokens: List[Token]): List[Token] = { def unestStatements(tokens: List[Token]): List[Token] = { tokens.flatMap { case Statement(innerTokens) => unestStatements(innerTokens) case token => token :: Nil } } def mergeStringTokens(tokens: List[Token]): List[Token] = { val (resultBuilder, leftTokens) = tokens.foldLeft((new ListBuffer[Token], new ListBuffer[String])) { case ((builder, acc), stringToken: StringToken) => val str = stringToken.string if (str.nonEmpty) acc += stringToken.string (builder, acc) case ((builder, prev), b) if prev.isEmpty => (builder += b.token, prev) case ((builder, prev), b) /* if prev.nonEmpty */ => builder += StringToken(prev.result().mkString) builder += b.token (builder, new ListBuffer[String]) } if (leftTokens.nonEmpty) resultBuilder += StringToken(leftTokens.result().mkString) resultBuilder.result() } (unestStatements _) .andThen(mergeStringTokens _) .apply(tokens) } private def checkLengths( args: scala.collection.Seq[Any], parts: Seq[String] ): Unit = if (parts.length != args.length + 1) throw new IllegalArgumentException( "wrong number of arguments (" + args.length + ") for interpolated string with " + parts.length + " parts" ) def stmt(args: Token*): Statement = { checkLengths(args, sc.parts) val partsIterator = sc.parts.iterator val argsIterator = args.iterator val bldr = List.newBuilder[Token] bldr += StringToken(partsIterator.next()) while (argsIterator.hasNext) { bldr += argsIterator.next() bldr += StringToken(partsIterator.next()) } val tokens = flatten(bldr.result()) Statement(tokens) } } }
getquill/quill
quill-core-portable/src/main/scala/io/getquill/idiom/StatementInterpolator.scala
Scala
apache-2.0
6,718
class A(a: Int) object A { val x = 3 }
dotty-staging/dotty
sbt-test/source-dependencies/constructors-unrelated/A.scala
Scala
apache-2.0
39
/* * Copyright (c) 2021, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ package com.krux.hyperion.activity import com.krux.hyperion.adt._ import com.krux.hyperion.expression.{EncryptedParameter, Format} case class SftpActivityFields( host: HString, port: Option[HInt] = None, username: Option[HString] = None, password: Option[EncryptedParameter[String]] = None, identity: Option[HS3Uri] = None, pattern: Option[HString] = None, sinceDate: Option[HDateTime] = None, untilDate: Option[HDateTime] = None, skipEmpty: HBoolean = false, markSuccessfulJobs: HBoolean = false ) trait SftpActivity extends BaseShellCommandActivity { type Self <: SftpActivity def sftpActivityFields: SftpActivityFields def updateSftpActivityFields(fields: SftpActivityFields): Self def direction: HString def sftpPath: Option[HString] def host = sftpActivityFields.host def sinceDate = sftpActivityFields.sinceDate def since(date: HDateTime) = updateSftpActivityFields( sftpActivityFields.copy(sinceDate = Option(date)) ) def untilDate = sftpActivityFields.untilDate def until(date: HDateTime) = updateSftpActivityFields( sftpActivityFields.copy(untilDate = Option(date)) ) def port = sftpActivityFields.port def withPort(port: HInt) = updateSftpActivityFields( sftpActivityFields.copy(port = Option(port)) ) def username = sftpActivityFields.username def withUsername(username: HString) = updateSftpActivityFields( sftpActivityFields.copy(username = Option(username)) ) def password = sftpActivityFields.password def withPassword(password: EncryptedParameter[String]) = updateSftpActivityFields( sftpActivityFields.copy(password = Option(password)) ) def identity = sftpActivityFields.identity def withIdentity(identity: HS3Uri) = updateSftpActivityFields( sftpActivityFields.copy(identity = Option(identity)) ) def pattern = sftpActivityFields.pattern def withPattern(pattern: HString) = updateSftpActivityFields( sftpActivityFields.copy(pattern = Option(pattern)) ) def skipEmpty = sftpActivityFields.skipEmpty def skippingEmpty() = updateSftpActivityFields( sftpActivityFields.copy(skipEmpty = true) ) def markSuccessfulJobs = sftpActivityFields.markSuccessfulJobs def markingSuccessfulJobs() = updateSftpActivityFields( sftpActivityFields.copy(markSuccessfulJobs = true) ) private val DateTimeFormat = "yyyy-MM-dd\\\\'T\\\\'HH:mm:ssZZ" private def arguments: Seq[HType] = Seq( Option(Seq[HString](direction)), Option(Seq[HString]("--host", host)), port.map(p => Seq[HType]("--port", p)), username.map(u => Seq[HString]("--user", u)), password.map(p => Seq[HString]("--password", p)), identity.map(i => Seq[HType]("--identity", i)), pattern.map(p => Seq[HString]("--pattern", p)), sinceDate.map(d => Seq[HString]("--since", Format(d, DateTimeFormat))), untilDate.map(d => Seq[HString]("--until", Format(d, DateTimeFormat))), if (skipEmpty) Option(Seq[HString]("--skip-empty")) else None, if (markSuccessfulJobs) Option(Seq[HString]("--mark-successful-jobs")) else None, Option(sftpPath.toSeq) ).flatten.flatten val mainClass: HString = "com.krux.hyperion.contrib.activity.sftp.SftpActivity" override def scriptArguments = (jarUri.serialize: HString) +: mainClass +: arguments def scriptUriBase: HString def jarUri: HString = s"${scriptUriBase}activities/hyperion-sftp-activity-current-assembly.jar" }
realstraw/hyperion
contrib/activity/definition/src/main/scala/com/krux/hyperion/activity/SftpActivity.scala
Scala
bsd-3-clause
3,640
package sigmastate.serialization import sigmastate._ import sigmastate.utxo._ class TransformersSerializationSpec extends SerializationSpecification { property("MapCollection: Serializer round trip") { forAll { mc: MapCollection[SInt.type, SInt.type] => roundTripTest(mc) } } property("Exists: Serializer round trip") { forAll { e: Exists[SInt.type] => roundTripTest(e) } } property("ForAll: Serializer round trip") { forAll { e: ForAll[SInt.type] => roundTripTest(e) } } property("Fold: Serializer round trip") { forAll { f: Fold[SInt.type, SBoolean.type] => roundTripTest(f) } } property("Slice: Serializer round trip") { forAll { f: Slice[SInt.type] => roundTripTest(f) } } property("AtLeast: Serializer round trip") { forAll { f: AtLeast => roundTripTest(f) } } property("Append: Serializer round trip") { forAll { f: Append[SInt.type] => roundTripTest(f) } } property("Filter: Serializer round trip") { forAll { f: Filter[SInt.type] => roundTripTest(f) } } property("SizeOf: Serializer round trip") { forAll { s: SizeOf[SInt.type] => roundTripTest(s) } } property("ExtractAmount: Serializer round trip") { forAll { e: ExtractAmount => roundTripTest(e) } } property("ExtractScriptBytes: Serializer round trip") { forAll { e: ExtractScriptBytes => roundTripTest(e) } } property("ExtractBytes: Serializer round trip") { forAll { e: ExtractBytes => roundTripTest(e) } } property("ExtractBytesWithNoRef: Serializer round trip") { forAll { e: ExtractBytesWithNoRef => roundTripTest(e) } } property("ExtractId: Serializer round trip") { forAll { e: ExtractId => roundTripTest(e) } } property("ExtractCreationInfo: Serializer round trip") { forAll() { e: ExtractCreationInfo => roundTripTest(e) } } property("ExtractRegisterAs: Serializer round trip") { forAll { e: ExtractRegisterAs[SInt.type ] => roundTripTest(e) } } property("IntToByteArray: Serializer round trip") { forAll { itba: LongToByteArray => roundTripTest(itba) } } property("DeserializeContext: Serializer round trip") { forAll { itba: DeserializeContext[SBoolean.type ] => roundTripTest(itba) } } property("DeserializeRegister: Serializer round trip") { forAll { itba: DeserializeRegister[SBoolean.type ] => roundTripTest(itba) } } property("ByteArrayToBigInt: Serializer round trip") { forAll { batbi: ByteArrayToBigInt => roundTripTest(batbi) } } property("CalcBlake2b256: Serializer round trip") { forAll { b256: CalcBlake2b256 => roundTripTest(b256) } } property("CalcSha256: Serializer round trip") { forAll { s256: CalcSha256 => roundTripTest(s256) } } property("ByIndex: Serializer round trip") { forAll { bi: ByIndex[SInt.type ] => roundTripTest(bi) } } property("Downcast: Serializer round trip") { forAll(downcastGen) { v => roundTripTest(v) } } property("GetVar: Serializer round trip") { forAll(getVarIntGen) { v => roundTripTest(v) } } property("OptionGet: Serializer round trip") { forAll(optionGetGen) { v => roundTripTest(v) } } property("OptionGetOrElse: Serializer round trip") { forAll(optionGetOrElseGen) { v => roundTripTest(v) } } property("OptionIsDefined: Serializer round trip") { forAll(optionIsDefinedGen) { v => roundTripTest(v) } } property("SigmaAnd: Serializer round trip") { forAll(sigmaAndGen) { v => roundTripTest(v) } } property("SigmaOr: Serializer round trip") { forAll(sigmaOrGen) { v => roundTripTest(v) } } property("BoolToSigmaProp: Serializer round trip") { forAll(boolToSigmaPropGen) { v => roundTripTest(v) } } property("ByteArrayToLong: Serializer round trip") { forAll(byteArrayToLongGen) { roundTripTest(_) } } property("XorOf: Serializer round trip") { forAll(logicalExprTreeNodeGen(Seq(XorOf.apply))) { tree => roundTripTest(tree) } } }
ScorexFoundation/sigmastate-interpreter
sigmastate/src/test/scala/sigmastate/serialization/TransformersSerializationSpec.scala
Scala
mit
4,188
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This code is based on code org.apache.spark.repl.SparkIMain released under Apache 2.0" * Link on Github: https://github.com/apache/spark/blob/master/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala * Author: Martin Odersky */ package org.apache.spark.repl import java.io.{StringWriter, File} import java.net.URL import java.util.concurrent.Future import org.apache.spark._ import org.apache.spark.annotation.DeveloperApi import org.apache.spark.repl.H2OIMain.CodeAssembler import scala.Predef.{println => _, _} import scala.collection.mutable import scala.language.implicitConversions import scala.reflect.internal.util._ import scala.reflect.io.{AbstractFile, PlainFile} import scala.reflect.runtime.{universe => ru} import scala.reflect.{ClassTag, classTag} import scala.sys.BooleanProp import scala.tools.nsc.backend.JavaPlatform import scala.tools.nsc.interpreter._ import scala.tools.nsc.reporters._ import scala.tools.nsc.util.Exceptional.unwrap import scala.tools.nsc.util.ScalaClassLoader.URLClassLoader import scala.tools.nsc.util.{MergedClassPath, ScalaClassLoader, stackTraceString, stringFromWriter} import scala.tools.nsc.{io, util, _} import scala.tools.reflect.StdRuntimeTags._ import scala.tools.util.PathResolver import scala.util.control.ControlThrowable // scalastyle:off /** An interpreter for Scala code. It is slighlty modified for H2O purposes so it can be used in the * environemnt where multiple interpretrs exist and are using the same http class server. * * The main public entry points are compile(), interpret(), and bind(). * The compile() method loads a complete Scala file. The interpret() method * executes one line of Scala code at the request of the user. The bind() * method binds an object to a variable that can then be used by later * interpreted code. * * The overall approach is based on compiling the requested code and then * using a Java classloader and Java reflection to run the code * and access its results. * * In more detail, a single compiler instance is used * to accumulate all successfully compiled or interpreted Scala code. To * "interpret" a line of code, the compiler generates a fresh object that * includes the line of code and which has public member(s) to export * all variables defined by that code. To extract the result of an * interpreted line to show the user, a second "result object" is created * which imports the variables exported by the above object and then * exports members called "$eval" and "$print". To accomodate user expressions * that read from variables or methods defined in previous statements, "import" * statements are used. * * This interpreter shares the strengths and weaknesses of using the * full compiler-to-Java. The main strength is that interpreted code * behaves exactly as does compiled code, including running at full speed. * The main weakness is that redefining classes and methods is not handled * properly, because rebinding at the Java level is technically difficult. * * @author Moez A. Abdel-Gawad * @author Lex Spoon */ @DeveloperApi class H2OIMain(val sharedCLHelper: ClassLoaderHelper, initialSettings: Settings, val out: JPrintWriter, val outWriter: StringWriter, val sessionID: Int, propagateExceptions: Boolean = false) extends H2OImports with Logging { imain => private val SPARK_DEBUG_REPL: Boolean = System.getenv("SPARK_DEBUG_REPL") == "1" /** Local directory to save .class files too */ private lazy val outputDir = { REPLClassServerUtils.getClassOutputDir } if (SPARK_DEBUG_REPL) { echo("Output directory: " + outputDir) } /** * Returns the path to the output directory containing all generated * class files that will be served by the REPL class server. */ @DeveloperApi lazy val getClassOutputDirectory = outputDir private val virtualDirectory = new PlainFile(outputDir) private var currentSettings: Settings = initialSettings private var printResults = true // whether to print result lines private var totalSilence = false // whether to print anything private var _initializeComplete = false // compiler is initialized private var _isInitialized: Future[Boolean] = null // set up initialization future private var bindExceptions = true // whether to bind the lastException variable private var _executionWrapper = "" // code to be wrapped around all lines /** * URI of the class server used to feed REPL compiled classes. * * @return The string representing the class server uri */ @DeveloperApi def classServerUri = { REPLClassServerUtils.classServerUri } /** We're going to go to some trouble to initialize the compiler asynchronously. * It's critical that nothing call into it until it's been initialized or we will * run into unrecoverable issues, but the perceived repl startup time goes * through the roof if we wait for it. So we initialize it with a future and * use a lazy val to ensure that any attempt to use the compiler object waits * on the future. */ // our private compiler private val _compiler: Global = newCompiler(settings, reporter) private val nextReqId = { var counter = 0 () => { counter += 1; counter } } private def compilerClasspath: Seq[URL] = { if (isInitializeComplete) global.classPath.asURLs else new PathResolver(settings).result.asURLs // the compiler's classpath } // NOTE: Exposed to repl package since accessed indirectly from SparkIMain private[repl] def settings = currentSettings private def mostRecentLine = prevRequestList match { case Nil => "" case req :: _ => req.originalLine } // Run the code body with the given boolean settings flipped to true. private def withoutWarnings[T](body: => T): T = beQuietDuring { val saved = settings.nowarn.value if (!saved) settings.nowarn.value = true try body finally if (!saved) settings.nowarn.value = false } /* /** construct an interpreter that reports to Console */ def this(settings: Settings) = this(new SparkConf,settings, new NewLinePrintWriter(new ConsoleWriter, true)) def this() = this(new Settings()) */ private lazy val repllog: Logger = new Logger { val out: JPrintWriter = imain.out val isInfo: Boolean = BooleanProp keyExists "scala.repl.info" val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug" val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace" } private[repl] lazy val formatting: Formatting = new Formatting { val prompt = Properties.shellPromptString } // NOTE: Exposed to repl package since used by SparkExprTyper and SparkILoop private[repl] lazy val reporter: ConsoleReporter = new H2OIMain.ReplReporter(this) /** * Determines if errors were reported (typically during compilation). * * @note This is not for runtime errors * * @return True if had errors, otherwise false */ @DeveloperApi def isReportingErrors = reporter.hasErrors import formatting._ import reporter.{printMessage, withoutTruncating} // This exists mostly because using the reporter too early leads to deadlock. private def echo(msg: String) { Console println msg } private def _initSources = List(new BatchSourceFile("<init>", "package intp_id_" + sessionID + " \\n class $repl_$init { }")) //private def _initSources = List(new BatchSourceFile("<init>", "class $repl_$init { }")) private def _initialize() = { try { // todo. if this crashes, REPL will hang new _compiler.Run() compileSources _initSources _initializeComplete = true true } catch AbstractOrMissingHandler() } private def tquoted(s: String) = "\\"\\"\\"" + s + "\\"\\"\\"" // argument is a thunk to execute after init is done // NOTE: Exposed to repl package since used by SparkILoop private[repl] def initialize(postInitSignal: => Unit) { synchronized { if (_isInitialized == null) { _isInitialized = io.spawn { try _initialize() finally postInitSignal } } } } /** * Initializes the underlying compiler/interpreter in a blocking fashion. * * @note Must be executed before using SparkIMain! */ @DeveloperApi def initializeSynchronous(): Unit = { if (!isInitializeComplete) { _initialize() assert(global != null, global) } } private def isInitializeComplete = _initializeComplete /** the public, go through the future compiler */ /** * The underlying compiler used to generate ASTs and execute code. */ @DeveloperApi lazy val global: Global = { if (isInitializeComplete) _compiler else { // If init hasn't been called yet you're on your own. if (_isInitialized == null) { logWarning("Warning: compiler accessed before init set up. Assuming no postInit code.") initialize(()) } // // blocks until it is ; false means catastrophic failure if (_isInitialized.get()) _compiler else null } } @deprecated("Use `global` for access to the compiler instance.", "2.9.0") private lazy val compiler: global.type = global import global._ import definitions.termMember import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined} private implicit class ReplTypeOps(tp: Type) { def orElse(other: => Type): Type = if (tp ne NoType) tp else other def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp) } // TODO: If we try to make naming a lazy val, we run into big time // scalac unhappiness with what look like cycles. It has not been easy to // reduce, but name resolution clearly takes different paths. // NOTE: Exposed to repl package since used by SparkExprTyper private[repl] object naming extends { val global: imain.global.type = imain.global } with Naming { // make sure we don't overwrite their unwisely named res3 etc. def freshUserTermName(): TermName = { val name = newTermName(freshUserVarName()) if (definedNameMap contains name) freshUserTermName() else name } def isUserTermName(name: Name) = isUserVarName("" + name) def isInternalTermName(name: Name) = isInternalVarName("" + name) } import naming._ // NOTE: Exposed to repl package since used by H2OILoop private[repl] object deconstruct extends { val global: imain.global.type = imain.global } with StructuredTypeStrings // NOTE: Exposed to repl package since used by H2OImports private[repl] lazy val memberHandlers = new { val intp: imain.type = imain } with H2OMemberHandlers import memberHandlers._ /** * Suppresses overwriting print results during the operation. * * @param body The block to execute * @tparam T The return type of the block * * @return The result from executing the block */ @DeveloperApi def beQuietDuring[T](body: => T): T = { val saved = printResults printResults = false try body finally printResults = saved } /** * Completely masks all output during the operation (minus JVM standard * out and error). * * @param operation The block to execute * @tparam T The return type of the block * * @return The result from executing the block */ @DeveloperApi def beSilentDuring[T](operation: => T): T = { val saved = totalSilence totalSilence = true try operation finally totalSilence = saved } // NOTE: Exposed to repl package since used by SparkILoop private[repl] def quietRun[T](code: String) = beQuietDuring(interpret(code)) private def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = { case t: ControlThrowable => throw t case t: Throwable => logDebug(label + ": " + unwrap(t)) logDebug(stackTraceString(unwrap(t))) alt } /** takes AnyRef because it may be binding a Throwable or an Exceptional */ private def withLastExceptionLock[T](body: => T, alt: => T): T = { assert(bindExceptions, "withLastExceptionLock called incorrectly.") bindExceptions = false try beQuietDuring(body) catch logAndDiscard("withLastExceptionLock", alt) finally bindExceptions = true } /** * Contains the code (in string form) representing a wrapper around all * code executed by this instance. * * @return The wrapper code as a string */ @DeveloperApi def executionWrapper = _executionWrapper /** * Sets the code to use as a wrapper around all code executed by this * instance. * * @param code The wrapper code as a string */ @DeveloperApi def setExecutionWrapper(code: String) = _executionWrapper = code /** * Clears the code used as a wrapper around all code executed by * this instance. */ @DeveloperApi def clearExecutionWrapper() = _executionWrapper = "" /** interpreter settings */ private lazy val isettings = new H2OISettings(this) /** * Instantiates a new compiler used by SparkIMain. Overridable to provide * own instance of a compiler. * * @param settings The settings to provide the compiler * @param reporter The reporter to use for compiler output * * @return The compiler as a Global */ @DeveloperApi protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = { settings.outputDirs setSingleOutput virtualDirectory settings.exposeEmptyPackage.value = true new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" } } /** * Adds any specified jars to the compile and runtime classpaths. * * @note Currently only supports jars, not directories * @param urls The list of items to add to the compile and runtime classpaths */ @DeveloperApi def addUrlsToClassPath(urls: URL*): Unit = { new Run // Needed to force initialization of "something" to correctly load Scala classes from jars sharedCLHelper.addUrlsToClasspath(urls: _*) // Add jars/classes to runtime for execution updateCompilerClassPath(urls: _*) // Add jars/classes to compile time for compiling } private def updateCompilerClassPath(urls: URL*): Unit = { require(!global.forMSIL) // Only support JavaPlatform val platform = global.platform.asInstanceOf[JavaPlatform] val newClassPath = mergeUrlsIntoClassPath(platform, urls: _*) // NOTE: Must use reflection until this is exposed/fixed upstream in Scala val fieldSetter = platform.getClass.getMethods .find(_.getName.endsWith("currentClassPath_$eq")).get fieldSetter.invoke(platform, Some(newClassPath)) // Reload all jars specified into our compiler global.invalidateClassPathEntries(urls.map(_.getPath): _*) } private def mergeUrlsIntoClassPath(platform: JavaPlatform, urls: URL*): MergedClassPath[AbstractFile] = { // Collect our new jars/directories and add them to the existing set of classpaths val allClassPaths = ( platform.classPath.asInstanceOf[MergedClassPath[AbstractFile]].entries ++ urls.map(url => { platform.classPath.context.newClassPath( if (url.getProtocol == "file") { val f = new File(url.getPath) if (f.isDirectory) io.AbstractFile.getDirectory(f) else io.AbstractFile.getFile(f) } else { io.AbstractFile.getURL(url) } ) }) ).distinct // Combine all of our classpaths (old and new) into one merged classpath new MergedClassPath(allClassPaths, platform.classPath.context) } /** * Represents the parent classloader used by this instance. Can be * overridden to provide alternative classloader. * * @return The classloader used as the parent loader of this instance */ @DeveloperApi protected def parentClassLoader: ClassLoader = SparkHelper.explicitParentLoader(settings).getOrElse(this.getClass.getClassLoader) /* A single class loader is used for all commands interpreted by this Interpreter. It would also be possible to create a new class loader for each command to interpret. The advantages of the current approach are: - Expressions are only evaluated one time. This is especially significant for I/O, e.g. "val x = Console.readLine" The main disadvantage is: - Objects, classes, and methods cannot be rebound. Instead, definitions shadow the old ones, and old code objects refer to the old definitions. */ private def resetClassLoader() = { logDebug("Setting new classloader: was " + sharedCLHelper.REPLCLassLoader) sharedCLHelper.resetREPLCLassLoader() ensureClassLoader() } private final def ensureClassLoader() { sharedCLHelper.ensureREPLClassLoader(makeClassLoader()) } // NOTE: Exposed to repl package since used by SparkILoop private[repl] def classLoader: AbstractFileClassLoader = { ensureClassLoader() sharedCLHelper.REPLCLassLoader } private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) { /** Overridden here to try translating a simple name to the generated * class name if the original attempt fails. This method is used by * getResourceAsStream as well as findClass. */ override protected def findAbstractFile(name: String): AbstractFile = { super.findAbstractFile(name) match { // deadlocks on startup if we try to translate names too early case null if isInitializeComplete => generatedName(name) map (x => super.findAbstractFile(x)) orNull case file => file } } } private def makeClassLoader(): AbstractFileClassLoader = new TranslatingClassLoader(parentClassLoader match { case null => ScalaClassLoader fromURLs compilerClasspath case p => sharedCLHelper.ensureRuntimeCLassLoader(new URLClassLoader(compilerClasspath, p) with ExposeAddUrl) sharedCLHelper.runtimeClassLoader }) private def getInterpreterClassLoader = classLoader // Set the current Java "context" class loader to this interpreter's class loader // NOTE: Exposed to repl package since used by SparkILoopInit private[repl] def setContextClassLoader() = classLoader.setAsContext() /** * Returns the real name of a class based on its repl-defined name. * * ==Example== * Given a simple repl-defined name, returns the real name of * the class representing it, e.g. for "Bippy" it may return * {{{ * $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy * }}} * * @param simpleName The repl-defined name whose real name to retrieve * * @return Some real name if the simple name exists, else None */ @DeveloperApi def generatedName(simpleName: String): Option[String] = { if (simpleName endsWith nme.MODULE_SUFFIX_STRING) optFlatName(simpleName.init) map (_ + nme.MODULE_SUFFIX_STRING) else optFlatName(simpleName) } // NOTE: Exposed to repl package since used by SparkILoop private[repl] def flatName(id: String) = optFlatName(id) getOrElse id // NOTE: Exposed to repl package since used by SparkILoop private[repl] def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id) /** * Retrieves all simple names contained in the current instance. * * @return A list of sorted names */ @DeveloperApi def allDefinedNames = definedNameMap.keys.toList.sorted private def pathToType(id: String): String = pathToName(newTypeName(id)) // NOTE: Exposed to repl package since used by SparkILoop private[repl] def pathToTerm(id: String): String = pathToName(newTermName(id)) /** * Retrieves the full code path to access the specified simple name * content. * * @param name The simple name of the target whose path to determine * * @return The full path used to access the specified target (name) */ @DeveloperApi def pathToName(name: Name): String = { if (definedNameMap contains name) definedNameMap(name) fullPath name else name.toString } /** Most recent tree handled which wasn't wholly synthetic. */ private def mostRecentlyHandledTree: Option[Tree] = { prevRequests.reverse foreach { req => req.handlers.reverse foreach { case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member) case _ => () } } None } /** Stubs for work in progress. */ private def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = { for (t1 <- old.simpleNameOfType(name); t2 <- req.simpleNameOfType(name)) { logDebug("Redefining type '%s'\\n %s -> %s".format(name, t1, t2)) } } private def handleTermRedefinition(name: TermName, old: Request, req: Request) = { for (t1 <- old.compilerTypeOf get name; t2 <- req.compilerTypeOf get name) { // Printing the types here has a tendency to cause assertion errors, like // assertion failed: fatal: <refinement> has owner value x, but a class owner is required // so DBG is by-name now to keep it in the family. (It also traps the assertion error, // but we don't want to unnecessarily risk hosing the compiler's internal state.) logDebug("Redefining term '%s'\\n %s -> %s".format(name, t1, t2)) } } private def recordRequest(req: Request) { if (req == null || referencedNameMap == null) return prevRequests += req req.referencedNames foreach (x => referencedNameMap(x) = req) // warning about serially defining companions. It'd be easy // enough to just redefine them together but that may not always // be what people want so I'm waiting until I can do it better. for { name <- req.definedNames filterNot (x => req.definedNames contains x.companionName) oldReq <- definedNameMap get name.companionName newSym <- req.definedSymbols get name oldSym <- oldReq.definedSymbols get name.companionName if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule } } { afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.")) replwarn("Companions must be defined together; you may wish to use :paste mode for this.") } // Updating the defined name map req.definedNames foreach { name => if (definedNameMap contains name) { if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req) else handleTermRedefinition(name.toTermName, definedNameMap(name), req) } definedNameMap(name) = req } } private def replwarn(msg: => String) { if (!settings.nowarnings.value) printMessage(msg) } private def isParseable(line: String): Boolean = { beSilentDuring { try parse(line) match { case Some(xs) => xs.nonEmpty // parses as-is case None => true // incomplete } catch { case x: Exception => // crashed the compiler replwarn("Exception in isParseable(\\"" + line + "\\"): " + x) false } } } private def compileSourcesKeepingRun(sources: SourceFile*) = { val run = new Run() reporter.reset() run compileSources sources.toList (!reporter.hasErrors, run) } /** * Compiles specified source files. * * @param sources The sequence of source files to compile * * @return True if successful, otherwise false */ @DeveloperApi def compileSources(sources: SourceFile*): Boolean = compileSourcesKeepingRun(sources: _*)._1 /** * Compiles a string of code. * * @param code The string of code to compile * * @return True if successful, otherwise false */ @DeveloperApi def compileString(code: String): Boolean = compileSources(new BatchSourceFile("<script>", code)) /** Build a request from the user. `trees` is `line` after being parsed. */ private def buildRequest(line: String, trees: List[Tree]): Request = { executingRequest = new Request(line, trees) executingRequest } // rewriting "5 // foo" to "val x = { 5 // foo }" creates broken code because // the close brace is commented out. Strip single-line comments. // ... but for error message output reasons this is not used, and rather than // enclosing in braces it is constructed like "val x =\\n5 // foo". private def removeComments(line: String): String = { showCodeIfDebugging(line) // as we're about to lose our // show line.lines map (s => s indexOf "//" match { case -1 => s case idx => s take idx }) mkString "\\n" } private def safePos(t: Tree, alt: Int): Int = try t.pos.startOrPoint catch { case _: UnsupportedOperationException => alt } // Given an expression like 10 * 10 * 10 we receive the parent tree positioned // at a '*'. So look at each subtree and find the earliest of all positions. private def earliestPosition(tree: Tree): Int = { var pos = Int.MaxValue tree foreach { t => pos = math.min(pos, safePos(t, Int.MaxValue)) } pos } private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = { val content = indentCode(line) val trees = parse(content) match { case None => return Left(IR.Incomplete) case Some(Nil) => return Left(IR.Error) // parse error or empty input case Some(trees) => trees } logDebug( trees map (t => { // [Eugene to Paul] previously it just said `t map ...` // because there was an implicit conversion from Tree to a list of Trees // however Martin and I have removed the conversion // (it was conflicting with the new reflection API), // so I had to rewrite this a bit val subs = t collect { case sub => sub } subs map (t0 => " " + safePos(t0, -1) + ": " + t0.shortClass + "\\n" ) mkString "" }) mkString "\\n" ) // If the last tree is a bare expression, pinpoint where it begins using the // AST node position and snap the line off there. Rewrite the code embodied // by the last tree as a ValDef instead, so we can access the value. trees.last match { case _: Assign => // we don't want to include assignments case _: TermTree | _: Ident | _: Select => // ... but do want other unnamed terms. val varName = if (synthetic) freshInternalVarName() else freshUserVarName() val rewrittenLine = { // In theory this would come out the same without the 1-specific test, but // it's a cushion against any more sneaky parse-tree position vs. code mismatches: // this way such issues will only arise on multiple-statement repl input lines, // which most people don't use. if (trees.size == 1) "val " + varName + " =\\n" + content else { // The position of the last tree val lastpos0 = earliestPosition(trees.last) // Oh boy, the parser throws away parens so "(2+2)" is mispositioned, // with increasingly hard to decipher positions as we move on to "() => 5", // (x: Int) => x + 1, and more. So I abandon attempts to finesse and just // look for semicolons and newlines, which I'm sure is also buggy. val (raw1, raw2) = content splitAt lastpos0 logDebug("[raw] " + raw1 + " <---> " + raw2) val adjustment = (raw1.reverse takeWhile (ch => (ch != ';') && (ch != '\\n'))).size val lastpos = lastpos0 - adjustment // the source code split at the laboriously determined position. val (l1, l2) = content splitAt lastpos logDebug("[adj] " + l1 + " <---> " + l2) val prefix = if (l1.trim == "") "" else l1 + ";\\n" // Note to self: val source needs to have this precise structure so that // error messages print the user-submitted part without the "val res0 = " part. val combined = prefix + "val " + varName + " =\\n" + l2 logDebug(List( " line" -> line, " content" -> content, " was" -> l2, "combined" -> combined) map { case (label, s) => label + ": '" + s + "'" } mkString "\\n" ) combined } } // Rewriting "foo ; bar ; 123" // to "foo ; bar ; val resXX = 123" requestFromLine(rewrittenLine, synthetic) match { case Right(req) => return Right(req withOriginalLine line) case x => return x } case _ => } Right(buildRequest(line, trees)) } // normalize non-public types so we don't see protected aliases like Self private def normalizeNonPublic(tp: Type) = tp match { case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias case _ => tp } /** * Interpret one line of input. All feedback, including parse errors * and evaluation results, are printed via the supplied compiler's * reporter. Values defined are available for future interpreted strings. * * @note This assigns variables with user name structure like "res0" * * @param line The line representing the code to interpret * * @return Whether the line was interpreted successfully, or failed due to * incomplete code, compilation error, or runtime error */ @DeveloperApi def interpret(line: String): IR.Result = interpret(line, false) /** * Interpret one line of input. All feedback, including parse errors * and evaluation results, are printed via the supplied compiler's * reporter. Values defined are available for future interpreted strings. * * @note This assigns variables with synthetic (generated) name structure * like "$ires0" * * @param line The line representing the code to interpret * * @return Whether the line was interpreted successfully, or failed due to * incomplete code, compilation error, or runtime error */ @DeveloperApi def interpretSynthetic(line: String): IR.Result = interpret(line, true) private def interpret(line: String, synthetic: Boolean): IR.Result = { def loadAndRunReq(req: Request) = { classLoader.setAsContext() val (result, succeeded) = req.loadAndRun /** To our displeasure, ConsoleReporter offers only printMessage, * which tacks a newline on the end. Since that breaks all the * output checking, we have to take one off to balance. */ if (succeeded) { if (printResults && result != "") printMessage(result stripSuffix "\\n") else if (isReplDebug) // show quiet-mode activity printMessage(result.trim.lines map ("[quiet] " + _) mkString "\\n") // Book-keeping. Have to record synthetic requests too, // as they may have been issued for information, e.g. :type recordRequest(req) IR.Success } else { // don't truncate stack traces withoutTruncating(printMessage(result)) IR.Error } } if (global == null) IR.Error else requestFromLine(line, synthetic) match { case Left(result) => result case Right(req) => // null indicates a disallowed statement type; otherwise compile and // fail if false (implying e.g. a type error) if (req == null || !req.compile) IR.Error else loadAndRunReq(req) } } /** * Bind a specified name to a specified value. The name may * later be used by expressions passed to interpret. * * @note This binds via compilation and interpretation * * @param name The variable name to bind * @param boundType The type of the variable, as a string * @param value The object value to bind to it * * @return An indication of whether the binding succeeded or failed * using interpreter results */ @DeveloperApi def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = { val bindRep = new ReadEvalPrint() val run = bindRep.compile( """ |object %s { | var value: %s = _ | def set(x: Any) = value = x.asInstanceOf[%s] |} """.stripMargin.format(bindRep.evalName, boundType, boundType) ) bindRep.callEither("set", value) match { case Left(ex) => logDebug("Set failed in bind(%s, %s, %s)".format(name, boundType, value)) logDebug(util.stackTraceString(ex)) IR.Error case Right(_) => val line = "%sval %s = %s.value".format(modifiers map (_ + " ") mkString, name, bindRep.evalPath) logDebug("Interpreting: " + line) interpret(line) } } /** * Bind a specified name to a specified value directly. * * @note This updates internal bound names directly * * @param name The variable name to bind * @param boundType The type of the variable, as a string * @param value The object value to bind to it * * @return An indication of whether the binding succeeded or failed * using interpreter results */ @DeveloperApi def directBind(name: String, boundType: String, value: Any): IR.Result = { val result = bind(name, boundType, value) if (result == IR.Success) directlyBoundNames += newTermName(name) result } private def directBind(p: NamedParam): IR.Result = directBind(p.name, p.tpe, p.value) private def directBind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = directBind((name, value)) /** * Overwrites previously-bound val with a new instance. * * @param p The named parameters used to provide the name, value, and type * * @return The results of rebinding the named val */ @DeveloperApi def rebind(p: NamedParam): IR.Result = { val name = p.name val oldType = typeOfTerm(name) orElse { return IR.Error } val newType = p.tpe val tempName = freshInternalVarName() quietRun("val %s = %s".format(tempName, name)) quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType)) } private def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*)) /** * Executes an import statement per "id" provided * * @example addImports("org.apache.spark.SparkContext") * * @param ids The series of "id" strings used for import statements * * @return The results of importing the series of "id" strings */ @DeveloperApi def addImports(ids: String*): IR.Result = if (ids.isEmpty) IR.Success else interpret("import " + ids.mkString(", ")) // NOTE: Exposed to repl package since used by SparkILoop private[repl] def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p)) private def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value) private def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value)) private def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x) private def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x) private def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x) /** * Reset this interpreter, forgetting all user-specified requests. */ @DeveloperApi def reset() { clearExecutionWrapper() resetClassLoader() resetAllCreators() prevRequests.clear() referencedNameMap.clear() definedNameMap.clear() virtualDirectory.delete() virtualDirectory.create() } /** * Stops the underlying REPL class server and flushes the reporter used * for compiler output. */ @DeveloperApi def close() { reporter.flush() } /** * Captures the session names (which are set by system properties) once, instead of for each line. */ @DeveloperApi object FixedSessionNames { val lineName = sessionNames.line val readName = sessionNames.read val evalName = sessionNames.eval val printName = sessionNames.print val resultName = sessionNames.result } /** Here is where we: * * 1) Read some source code, and put it in the "read" object. * 2) Evaluate the read object, and put the result in the "eval" object. * 3) Create a String for human consumption, and put it in the "print" object. * * Read! Eval! Print! Some of that not yet centralized here. */ class ReadEvalPrint(val lineId: Int) { def this() = this(freshLineId()) private var lastRun: Run = _ private var evalCaught: Option[Throwable] = None private var conditionalWarnings: List[ConditionalWarning] = Nil val packageName = "intp_id_" + sessionID + "." + FixedSessionNames.lineName + lineId // val packageName = FixedSessionNames.lineName + lineId val readName = FixedSessionNames.readName val evalName = FixedSessionNames.evalName val printName = FixedSessionNames.printName val resultName = FixedSessionNames.resultName def bindError(t: Throwable) = { // Immediately throw the exception if we are asked to propagate them if (propagateExceptions) { throw unwrap(t) } if (!bindExceptions) // avoid looping if already binding throw t val unwrapped = unwrap(t) withLastExceptionLock[String]({ directBind[Throwable]("lastException", unwrapped)(tagOfThrowable, classTag[Throwable]) util.stackTraceString(unwrapped) }, util.stackTraceString(unwrapped)) } // TODO: split it out into a package object and a regular // object and we can do that much less wrapping. def packageDecl = "package " + packageName def pathTo(name: String) = packageName + "." + name def packaged(code: String) = packageDecl + "\\n\\n" + code def readPath = pathTo(readName) def evalPath = pathTo(evalName) def printPath = pathTo(printName) def call(name: String, args: Any*): AnyRef = { val m = evalMethod(name) logDebug("Invoking: " + m) if (args.nonEmpty) logDebug(" with args: " + args.mkString(", ")) m.invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*) } def callEither(name: String, args: Any*): Either[Throwable, AnyRef] = try Right(call(name, args: _*)) catch { case ex: Throwable => Left(ex) } def callOpt(name: String, args: Any*): Option[AnyRef] = try Some(call(name, args: _*)) catch { case ex: Throwable => bindError(ex); None } class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) {} private def evalError(path: String, ex: Throwable) = throw new EvalException("Failed to load '" + path + "': " + ex.getMessage, ex) private def load(path: String): Class[_] = { try Class.forName(path, true, classLoader) catch { case ex: Throwable => evalError(path, unwrap(ex)) } } lazy val evalClass = load(evalPath) lazy val evalValue = callEither(resultName) match { case Left(ex) => evalCaught = Some(ex); None case Right(result) => Some(result) } def compile(source: String): Boolean = compileAndSaveRun("<console>", source) /** The innermost object inside the wrapper, found by * following accessPath into the outer one. */ def resolvePathToSymbol(accessPath: String): Symbol = { // val readRoot = getRequiredModule(readPath) // the outermost wrapper // MATEI: Changed this to getClass because the root object is no longer a module (Scala singleton object) val readRoot = rootMirror.getClassByName(newTypeName(readPath)) // the outermost wrapper (accessPath split '.').foldLeft(readRoot: Symbol) { case (sym, "") => sym case (sym, name) => afterTyper(termMember(sym, name)) } } /** We get a bunch of repeated warnings for reasons I haven't * entirely figured out yet. For now, squash. */ private def updateRecentWarnings(run: Run) { def loop(xs: List[(Position, String)]): List[(Position, String)] = xs match { case Nil => Nil case ((pos, msg)) :: rest => val filtered = rest filter { case (pos0, msg0) => (msg != msg0) || (pos.lineContent.trim != pos0.lineContent.trim) || { // same messages and same line content after whitespace removal // but we want to let through multiple warnings on the same line // from the same run. The untrimmed line will be the same since // there's no whitespace indenting blowing it. pos.lineContent == pos0.lineContent } } (pos, msg) :: loop(filtered) } // PRASHANT: This leads to a NoSuchMethodError for _.warnings. Yet to figure out its purpose. // val warnings = loop(run.allConditionalWarnings flatMap (_.warnings)) // if (warnings.nonEmpty) // mostRecentWarnings = warnings } private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match { case Array(method) => method case xs => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\\n", "\\n", "")) } private def compileAndSaveRun(label: String, code: String) = { showCodeIfDebugging(code) val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code))) updateRecentWarnings(run) lastRun = run success } } /** One line of code submitted by the user for interpretation */ // private class Request(val line: String, val trees: List[Tree]) { val reqId = nextReqId() val lineRep = new ReadEvalPrint() private var _originalLine: String = null def withOriginalLine(s: String): this.type = { _originalLine = s; this } def originalLine = if (_originalLine == null) line else _originalLine /** handlers for each tree in this request */ val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _) def defHandlers = handlers collect { case x: MemberDefHandler => x } /** all (public) names defined by these statements */ val definedNames = handlers flatMap (_.definedNames) /** list of names used by this expression */ val referencedNames: List[Name] = handlers flatMap (_.referencedNames) /** def and val names */ def termNames = handlers flatMap (_.definesTerm) def typeNames = handlers flatMap (_.definesType) def definedOrImported = handlers flatMap (_.definedOrImported) def definedSymbolList = defHandlers flatMap (_.definedSymbols) def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name)) def definedTermSymbol(name: String) = definedSymbols(newTermName(name)) val definedClasses = handlers.exists { case _: ClassHandler => true case _ => false } /** Code to import bound names from previous lines - accessPath is code to * append to objectName to access anything bound by request. */ val SparkComputedImports(importsPreamble, importsTrailer, accessPath) = importsCode(referencedNames.toSet, definedClasses) /** Code to access a variable with the specified name */ def fullPath(vname: String) = { // lineRep.readPath + accessPath + ".`%s`".format(vname) lineRep.readPath + ".INSTANCE" + accessPath + ".`%s`".format(vname) } /** Same as fullpath, but after it has been flattened, so: * $line5.$iw.$iw.$iw.Bippy // fullPath * $line5.$iw$$iw$$iw$Bippy // fullFlatName */ def fullFlatName(name: String) = // lineRep.readPath + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name lineRep.readPath + ".INSTANCE" + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name /** The unmangled symbol name, but supplemented with line info. */ def disambiguated(name: Name): String = name + " (in " + lineRep + ")" /** Code to access a variable with the specified name */ def fullPath(vname: Name): String = fullPath(vname.toString) /** the line of code to compute */ def toCompute = line /** generate the source code for the object that computes this request */ private object ObjectSourceCode extends CodeAssembler[MemberHandler] { def path = pathToTerm("$intp") def envLines = { if (!isReplPower) Nil // power mode only for now // $intp is not bound; punt, but include the line. else if (path == "$intp") List( "def $line = " + tquoted(originalLine), "def $trees = Nil" ) else List( "def $line = " + tquoted(originalLine), "def $req = %s.requestForReqId(%s).orNull".format(path, reqId), "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId) ) } val preamble = """ |class %s extends Serializable { | %s%s%s """.stripMargin.format(lineRep.readName, envLines.map(" " + _ + ";\\n").mkString, importsPreamble, indentCode(toCompute)) val postamble = importsTrailer + "\\n}" + "\\n" + "object " + lineRep.readName + " {\\n" + " val INSTANCE = new " + lineRep.readName + "();\\n" + "}\\n" val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this /* val preamble = """ |object %s extends Serializable { |%s%s%s """.stripMargin.format(lineRep.readName, envLines.map(" " + _ + ";\\n").mkString, importsPreamble, indentCode(toCompute)) val postamble = importsTrailer + "\\n}" val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this */ } private object ResultObjectSourceCode extends CodeAssembler[MemberHandler] { /** We only want to generate this code when the result * is a value which can be referred to as-is. */ val evalResult = if (!handlers.last.definesValue) "" else handlers.last.definesTerm match { case Some(vname) if typeOf contains vname => "lazy val %s = %s".format(lineRep.resultName, fullPath(vname)) case _ => "" } // first line evaluates object to make sure constructor is run // initial "" so later code can uniformly be: + etc val preamble = """ |object %s { | %s | val %s: String = %s { | %s | ("" """.stripMargin.format( lineRep.evalName, evalResult, lineRep.printName, executionWrapper, lineRep.readName + ".INSTANCE" + accessPath ) val postamble = """ | ) | } |} """.stripMargin val generate = (m: MemberHandler) => m resultExtractionCode Request.this } // get it def getEvalTyped[T]: Option[T] = getEval map (_.asInstanceOf[T]) def getEval: Option[AnyRef] = { // ensure it has been compiled compile // try to load it and call the value method lineRep.evalValue filterNot (_ == null) } /** Compile the object file. Returns whether the compilation succeeded. * If all goes well, the "types" map is computed. */ lazy val compile: Boolean = { // error counting is wrong, hence interpreter may overlook failure - so we reset reporter.reset() // compile the object containing the user's code lineRep.compile(ObjectSourceCode(handlers)) && { // extract and remember types typeOf typesOfDefinedTerms // Assign symbols to the original trees // TODO - just use the new trees. defHandlers foreach { dh => val name = dh.member.name definedSymbols get name foreach { sym => dh.member setSymbol sym logDebug("Set symbol of " + name + " to " + sym.defString) } } // compile the result-extraction object withoutWarnings(lineRep compile ResultObjectSourceCode(handlers)) } } lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath) def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name))) /* typeOf lookup with encoding */ def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString))) def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName) private def typeMap[T](f: Type => T) = mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x))) /** Types of variables defined by this request. */ lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType /** String representations of same. */ lazy val typeOf = typeMap[String](tp => afterTyper(tp.toString())) // lazy val definedTypes: Map[Name, Type] = { // typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap // } lazy val definedSymbols = ( termNames.map(x => x -> applyToResultMember(x, x => x)) ++ typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect) ).toMap[Name, Symbol] withDefaultValue NoSymbol lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe)) /** load and run the code using reflection */ def loadAndRun: (String, Boolean) = { try { ("" + (lineRep call sessionNames.print), true) } catch { case ex: Throwable => (lineRep.bindError(ex), false) } } override def toString = "Request(line=%s, %s trees)".format(line, trees.size) } /** * Returns the name of the most recent interpreter result. Useful for * for extracting information regarding the previous result. * * @return The simple name of the result (such as res0) */ @DeveloperApi def mostRecentVar: String = if (mostRecentlyHandledTree.isEmpty) "" else "" + (mostRecentlyHandledTree.get match { case x: ValOrDefDef => x.name case Assign(Ident(name), _) => name case ModuleDef(_, name, _) => name case _ => naming.mostRecentVar }) private var mostRecentWarnings: List[(global.Position, String)] = Nil /** * Returns a list of recent warnings from compiler execution. * * @return The list of tuples (compiler position, warning) */ @DeveloperApi def lastWarnings = mostRecentWarnings def treesForRequestId(id: Int): List[Tree] = requestForReqId(id).toList flatMap (_.trees) def requestForReqId(id: Int): Option[Request] = if (executingRequest != null && executingRequest.reqId == id) Some(executingRequest) else prevRequests find (_.reqId == id) def requestForName(name: Name): Option[Request] = { assert(definedNameMap != null, "definedNameMap is null") definedNameMap get name } def requestForIdent(line: String): Option[Request] = requestForName(newTermName(line)) orElse requestForName(newTypeName(line)) def requestHistoryForName(name: Name): List[Request] = prevRequests.toList.reverse filter (_.definedNames contains name) def definitionForName(name: Name): Option[MemberHandler] = requestForName(name) flatMap { req => req.handlers find (_.definedNames contains name) } /** * Retrieves the object representing the id (variable name, method name, * class name, etc) provided. * * @param id The id (variable name, method name, class name, etc) whose * associated content to retrieve * * @return Some containing term name (id) representation if exists, else None */ @DeveloperApi def valueOfTerm(id: String): Option[AnyRef] = requestForName(newTermName(id)) flatMap (_.getEval) /** * Retrieves the class representing the id (variable name, method name, * class name, etc) provided. * * @param id The id (variable name, method name, class name, etc) whose * associated class to retrieve * * @return Some containing term name (id) class if exists, else None */ @DeveloperApi def classOfTerm(id: String): Option[JClass] = valueOfTerm(id) map (_.getClass) /** * Retrieves the type representing the id (variable name, method name, * class name, etc) provided. * * @param id The id (variable name, method name, class name, etc) whose * associated type to retrieve * * @return The Type information about the term name (id) provided */ @DeveloperApi def typeOfTerm(id: String): Type = newTermName(id) match { case nme.ROOTPKG => RootClass.tpe case name => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name) } /** * Retrieves the symbol representing the id (variable name, method name, * class name, etc) provided. * * @param id The id (variable name, method name, class name, etc) whose * associated symbol to retrieve * * @return The Symbol information about the term name (id) provided */ @DeveloperApi def symbolOfTerm(id: String): Symbol = requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id) // TODO: No use yet, but could be exposed as a DeveloperApi private def symbolOfType(id: String): Symbol = requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id) /** * Retrieves the runtime class and type representing the id (variable name, * method name, class name, etc) provided. * * @param id The id (variable name, method name, class name, etc) whose * associated runtime class and type to retrieve * * @return Some runtime class and Type information as a tuple for the * provided term name if it exists, else None */ @DeveloperApi def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = { classOfTerm(id) flatMap { clazz => new RichClass(clazz).supers find (c => !new RichClass(c).isScalaAnonymous) map { nonAnon => (nonAnon, runtimeTypeOfTerm(id)) } } } /** * Retrieves the runtime type representing the id (variable name, * method name, class name, etc) provided. * * @param id The id (variable name, method name, class name, etc) whose * associated runtime type to retrieve * * @return The runtime Type information about the term name (id) provided */ @DeveloperApi def runtimeTypeOfTerm(id: String): Type = { typeOfTerm(id) andAlso { tpe => val clazz = classOfTerm(id) getOrElse { return NoType } val staticSym = tpe.typeSymbol val runtimeSym = getClassIfDefined(clazz.getName) if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym)) runtimeSym.info else NoType } } private def cleanMemberDecl(owner: Symbol, member: Name): Type = afterTyper { normalizeNonPublic { owner.info.nonPrivateDecl(member).tpe match { case NullaryMethodType(tp) => tp case tp => tp } } } private object exprTyper extends { val repl: H2OIMain.this.type = imain } with H2OExprTyper {} /** * Constructs a list of abstract syntax trees representing the provided code. * * @param line The line of code to parse and construct into ASTs * * @return Some list of ASTs if the line is valid, else None */ @DeveloperApi def parse(line: String): Option[List[Tree]] = exprTyper.parse(line) /** * Constructs a Symbol representing the final result of the expression * provided or representing the definition provided. * * @param code The line of code * * @return The Symbol or NoSymbol (found under scala.reflect.internal) */ @DeveloperApi def symbolOfLine(code: String): Symbol = exprTyper.symbolOfLine(code) /** * Constucts type information based on the provided expression's final * result or the definition provided. * * @param expr The expression or definition * * @param silent Whether to output information while constructing the type * * @return The type information or an error */ @DeveloperApi def typeOfExpression(expr: String, silent: Boolean = true): Type = exprTyper.typeOfExpression(expr, silent) protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x } protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x } /** * Retrieves the defined, public names in the compiler. * * @return The list of matching "term" names */ @DeveloperApi def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName /** * Retrieves the defined type names in the compiler. * * @return The list of matching type names */ @DeveloperApi def definedTypes = onlyTypes(allDefinedNames) /** * Retrieves the defined symbols in the compiler. * * @return The set of matching Symbol instances */ @DeveloperApi def definedSymbols = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol] /** * Retrieves the list of public symbols in the compiler. * * @return The list of public Symbol instances */ @DeveloperApi def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name)) // Terms with user-given names (i.e. not res0 and not synthetic) /** * Retrieves defined, public names that are not res0 or the result of a direct bind. * * @return The list of matching "term" names */ @DeveloperApi def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x)) private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol /** Translate a repl-defined identifier into a Symbol. */ private def apply(name: String): Symbol = types(name) orElse terms(name) private def types(name: String): Symbol = { val tpname = newTypeName(name) findName(tpname) orElse getClassIfDefined(tpname) } private def terms(name: String): Symbol = { val termname = newTypeName(name) findName(termname) orElse getModuleIfDefined(termname) } // [Eugene to Paul] possibly you could make use of TypeTags here private def types[T: ClassTag]: Symbol = types(classTag[T].runtimeClass.getName) private def terms[T: ClassTag]: Symbol = terms(classTag[T].runtimeClass.getName) private def apply[T: ClassTag]: Symbol = apply(classTag[T].runtimeClass.getName) /** * Retrieves the Symbols representing classes in the compiler. * * @return The list of matching ClassSymbol instances */ @DeveloperApi def classSymbols = allDefSymbols collect { case x: ClassSymbol => x } /** * Retrieves the Symbols representing methods in the compiler. * * @return The list of matching MethodSymbol instances */ @DeveloperApi def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x } /** the previous requests this interpreter has processed */ private var executingRequest: Request = _ private val prevRequests = mutable.ListBuffer[Request]() private val referencedNameMap = mutable.Map[Name, Request]() private val definedNameMap = mutable.Map[Name, Request]() private val directlyBoundNames = mutable.Set[Name]() private def allHandlers = prevRequestList flatMap (_.handlers) private def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x } private def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol) private def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last // NOTE: Exposed to repl package since used by SparkImports private[repl] def prevRequestList = prevRequests.toList private def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct private def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames) // NOTE: Exposed to repl package since used by SparkILoop and SparkImports private[repl] def importHandlers = allHandlers collect { case x: ImportHandler => x } /** * Retrieves a list of unique defined and imported names in the compiler. * * @return The list of "term" names */ def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct /** Another entry point for tab-completion, ids in scope */ // NOTE: Exposed to repl package since used by SparkJLineCompletion private[repl] def unqualifiedIds = visibleTermNames map (_.toString) filterNot (_ contains "$") sorted /** Parse the ScalaSig to find type aliases */ private def aliasForType(path: String) = ByteCode.aliasForType(path) private def withoutUnwrapping(op: => Unit): Unit = { val saved = isettings.unwrapStrings isettings.unwrapStrings = false try op finally isettings.unwrapStrings = saved } // NOTE: Exposed to repl package since used by SparkILoop private[repl] def symbolDefString(sym: Symbol) = { TypeStrings.quieter( afterTyper(sym.defString), sym.owner.name + ".this.", sym.owner.fullName + "." ) } private def showCodeIfDebugging(code: String) { /** Secret bookcase entrance for repl debuggers: end the line * with "// show" and see what's going on. */ def isShow = code.lines exists (_.trim endsWith "// show") def isShowRaw = code.lines exists (_.trim endsWith "// raw") // old style beSilentDuring(parse(code)) foreach { ts => ts foreach { t => if (isShow || isShowRaw) withoutUnwrapping(echo(asCompactString(t))) else withoutUnwrapping(logDebug(asCompactString(t))) } } } // debugging // NOTE: Exposed to repl package since accessed indirectly from SparkIMain // and SparkJLineCompletion private[repl] def debugging[T](msg: String)(res: T) = { logDebug(msg + " " + res) res } } /** Utility methods for the Interpreter. */ object H2OIMain { // The two name forms this is catching are the two sides of this assignment: // // $line3.$read.$iw.$iw.Bippy = // $line3.$read$$iw$$iw$Bippy@4a6a00ca private def removeLineWrapper(s: String) = s.replaceAll( """\\$line\\d+[./]\\$(read|eval|print)[$.]""", "") private def removeIWPackages(s: String) = s.replaceAll( """\\$(iw|iwC|read|eval|print)[$.]""", "") private def removeSparkVals(s: String) = s.replaceAll( """\\$VAL[0-9]+[$.]""", "") def stripString(s: String) = removeSparkVals(removeIWPackages(removeLineWrapper(s))) trait CodeAssembler[T] { def preamble: String def generate: T => String def postamble: String def apply(contributors: List[T]): String = stringFromWriter { code => code println preamble contributors map generate foreach (code println _) code println postamble } } trait StrippingWriter { def isStripping: Boolean def stripImpl(str: String): String def strip(str: String): String = if (isStripping) stripImpl(str) else str } trait TruncatingWriter { def maxStringLength: Int def isTruncating: Boolean def truncate(str: String): String = { if (isTruncating && (maxStringLength != 0 && str.length > maxStringLength)) (str take maxStringLength - 3) + "..." else str } } abstract class StrippingTruncatingWriter(out: JPrintWriter, outWriter: StringWriter) extends JPrintWriter(out) with StrippingWriter with TruncatingWriter { self => def clean(str: String): String = truncate(strip(str)) override def write(str: String) = { // when testing print whole interpreter response if(!sys.props.contains("spark.testing")){ super.flush() outWriter.getBuffer.setLength(0) } super.write(clean(str))} } class ReplStrippingWriter(intp: H2OIMain) extends StrippingTruncatingWriter(intp.out, intp.outWriter) { import intp._ def maxStringLength = isettings.maxPrintString def isStripping = isettings.unwrapStrings def isTruncating = reporter.truncationOK def stripImpl(str: String): String = naming.unmangle(str) } class ReplReporter(intp: H2OIMain) extends ConsoleReporter(intp.settings, null, new ReplStrippingWriter(intp)) { override def printMessage(msg: String) { // Avoiding deadlock when the compiler starts logging before // the lazy val is done. if (intp.isInitializeComplete) { if (intp.totalSilence) () else super.printMessage(msg) } else Console.println(msg) } } } class H2OISettings(intp: H2OIMain) extends Logging { /** A list of paths where :load should look */ var loadPath = List(".") /** Set this to true to see repl machinery under -Yrich-exceptions. */ var showInternalStackTraces = true /** The maximum length of toString to use when printing the result * of an evaluation. 0 means no maximum. If a printout requires * more than this number of characters, then the printout is * truncated. */ var maxPrintString = 800 /** The maximum number of completion candidates to print for tab * completion without requiring confirmation. */ var maxAutoprintCompletion = 250 /** String unwrapping can be disabled if it is causing issues. * Settings this to false means you will see Strings like "$iw.$iw.". */ var unwrapStrings = false def deprecation_=(x: Boolean) = { val old = intp.settings.deprecation.value intp.settings.deprecation.value = x if (!old && x) logDebug("Enabled -deprecation output.") else if (old && !x) logDebug("Disabled -deprecation output.") } def deprecation: Boolean = intp.settings.deprecation.value def allSettings = Map( "maxPrintString" -> maxPrintString, "maxAutoprintCompletion" -> maxAutoprintCompletion, "unwrapStrings" -> unwrapStrings, "deprecation" -> deprecation ) private def allSettingsString = allSettings.toList sortBy (_._1) map { case (k, v) => " " + k + " = " + v + "\\n" } mkString override def toString = """ | SparkISettings { | %s | }""".stripMargin.format(allSettingsString) } // scalastyle:on
nilbody/sparkling-water
core/src/main/scala/org/apache/spark/repl/H2OIMain.scala
Scala
apache-2.0
67,550
/* * Copyright © 2014 TU Berlin ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.emmalanguage package compiler.lang.core import compiler.BaseCompilerSpec /** A spec for the `LNF.dce` transformation. */ class DCESpec extends BaseCompilerSpec { import compiler._ import u.reify val dcePipeline: u.Expr[Any] => u.Tree = pipeline(typeCheck = true)( Core.lnf, tree => time(DCE.transform(tree), "dce") ).compose(_.tree) "eliminate unused valdefs" - { "directly" in { //noinspection ScalaUnusedSymbol val act = dcePipeline(reify { val x = 15 15 * t._1 }) val exp = idPipeline(reify { val x$1: this.t.type = t val x$2 = x$1._1 val x$3 = 15 * x$2 x$3 }) act shouldBe alphaEqTo(exp) } "transitively" in { //noinspection ScalaUnusedSymbol val act = dcePipeline(reify { val x = 15 val y = 2 * x 15 * t._1 }) val exp = idPipeline(reify { val x$1: this.t.type = t val x$2 = x$1._1 val x$3 = 15 * x$2 x$3 }) act shouldBe alphaEqTo(exp) } } "don't eliminate unit valdefs" - { "println" in { val act = dcePipeline(reify { println("alma") val x = 5 x }) //noinspection ScalaUnusedSymbol val exp = idPipeline(reify { val res = println("alma") val x = 5 x }) act shouldBe alphaEqTo(exp) } } }
aalexandrov/emma
emma-language/src/test/scala/org/emmalanguage/compiler/lang/core/DCESpec.scala
Scala
apache-2.0
2,058
package lila.simul import play.api.libs.json._ import lila.common.LightUser import lila.game.{ Game, GameRepo } import lila.user.User final class JsonView( gameRepo: GameRepo, getLightUser: LightUser.Getter, proxyRepo: lila.round.GameProxyRepo )(implicit ec: scala.concurrent.ExecutionContext) { implicit private val colorWriter: Writes[chess.Color] = Writes { c => JsString(c.name) } implicit private val simulTeamWriter = Json.writes[SimulTeam] private def fetchGames(simul: Simul) = if (simul.isFinished) gameRepo gamesFromSecondary simul.gameIds else simul.gameIds.map(proxyRepo.game).sequenceFu.dmap(_.flatten) def apply(simul: Simul, team: Option[SimulTeam]): Fu[JsObject] = for { games <- fetchGames(simul) lightHost <- getLightUser(simul.hostId) applicants <- simul.applicants.sortBy(-_.player.rating).map(applicantJson).sequenceFu pairingOptions <- simul.pairings .sortBy(-_.player.rating) .map(pairingJson(games, simul.hostId)) .sequenceFu pairings = pairingOptions.flatten } yield baseSimul(simul, lightHost) ++ Json .obj( "applicants" -> applicants, "pairings" -> pairings ) .add("team", team) .add("quote" -> simul.isCreated.option(lila.quote.Quote.one(simul.id))) def api(simul: Simul): Fu[JsObject] = getLightUser(simul.hostId) map { lightHost => baseSimul(simul, lightHost) ++ Json.obj( "nbApplicants" -> simul.applicants.size, "nbPairings" -> simul.pairings.size ) } def api(simuls: List[Simul]): Fu[JsArray] = simuls.map(api).sequenceFu map JsArray.apply def apiAll( pending: List[Simul], created: List[Simul], started: List[Simul], finished: List[Simul] ): Fu[JsObject] = for { pendingJson <- api(pending) createdJson <- api(created) startedJson <- api(started) finishedJson <- api(finished) } yield Json.obj( "pending" -> pendingJson, "created" -> createdJson, "started" -> startedJson, "finished" -> finishedJson ) private def baseSimul(simul: Simul, lightHost: Option[LightUser]) = Json.obj( "id" -> simul.id, "host" -> lightHost.map { host => Json .obj( "id" -> host.id, "name" -> host.name, "rating" -> simul.hostRating ) .add("gameId" -> simul.hostGameId.ifTrue(simul.isRunning)) .add("title" -> host.title) .add("patron" -> host.isPatron) }, "name" -> simul.name, "fullName" -> simul.fullName, "variants" -> simul.variants.map(variantJson(chess.Speed(simul.clock.config.some))), "isCreated" -> simul.isCreated, "isRunning" -> simul.isRunning, "isFinished" -> simul.isFinished, "text" -> simul.text ) private def variantJson(speed: chess.Speed)(v: chess.variant.Variant) = Json.obj( "key" -> v.key, "icon" -> lila.game.PerfPicker.perfType(speed, v, none).map(_.iconChar.toString), "name" -> v.name ) private def playerJson(player: SimulPlayer): Fu[JsObject] = getLightUser(player.user) map { light => Json .obj( "id" -> player.user, "rating" -> player.rating ) .add("name" -> light.map(_.name)) .add("title" -> light.map(_.title)) .add("provisional" -> ~player.provisional) .add("patron" -> light.??(_.isPatron)) } private def applicantJson(app: SimulApplicant): Fu[JsObject] = playerJson(app.player) map { player => Json.obj( "player" -> player, "variant" -> app.player.variant.key, "accepted" -> app.accepted ) } private def gameJson(hostId: User.ID, g: Game) = Json .obj( "id" -> g.id, "status" -> g.status.id, "fen" -> (chess.format.Forsyth boardAndColor g.situation), "lastMove" -> ~g.lastMoveKeys, "orient" -> g.playerByUserId(hostId).map(_.color) ) .add( "clock" -> g.clock.ifTrue(g.isBeingPlayed).map { c => Json.obj( "white" -> c.remainingTime(chess.White).roundSeconds, "black" -> c.remainingTime(chess.Black).roundSeconds ) } ) .add("winner" -> g.winnerColor.map(_.name)) private def pairingJson(games: List[Game], hostId: String)(p: SimulPairing): Fu[Option[JsObject]] = games.find(_.id == p.gameId) ?? { game => playerJson(p.player) map { player => Json .obj( "player" -> player, "variant" -> p.player.variant.key, "hostColor" -> p.hostColor, "game" -> gameJson(hostId, game) ) .some } } }
luanlv/lila
modules/simul/src/main/JsonView.scala
Scala
mit
4,865
/* * Copyright 2016 by Simba Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.simba import java.util.concurrent.locks.ReentrantReadWriteLock import org.apache.spark.internal.Logging import org.apache.spark.sql.simba.index._ import org.apache.spark.sql.{Dataset => SQLDataset} import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias} import org.apache.spark.storage.StorageLevel import org.apache.spark.storage.StorageLevel._ import scala.collection.mutable.ArrayBuffer /** * Created by dong on 1/20/16. * Index Manager for Simba */ private case class IndexedData(name: String, plan: LogicalPlan, indexedData: IndexedRelation) case class IndexInfo(tableName: String, indexName: String, attributes: Seq[Attribute], indexType: IndexType, location: String, storageLevel: StorageLevel) extends Serializable private[simba] class IndexManager extends Logging { @transient private val indexedData = new ArrayBuffer[IndexedData] @transient private val indexLock = new ReentrantReadWriteLock @transient private val indexInfos = new ArrayBuffer[IndexInfo] def getIndexInfo: Array[IndexInfo] = indexInfos.toArray private def readLock[A](f: => A): A = { val lock = indexLock.readLock() lock.lock() try f finally { lock.unlock() } } private def writeLock[A](f: => A): A = { val lock = indexLock.writeLock() lock.lock() try f finally { lock.unlock() } } private[simba] def isEmpty: Boolean = readLock { indexedData.isEmpty } private[simba] def lookupIndexedData(query: SQLDataset[_]): Option[IndexedData] = readLock { val tmp_res = indexedData.find(cd => query.queryExecution.analyzed.sameResult(cd.plan)) if (tmp_res.nonEmpty) return tmp_res else { indexedData.find(cd => { cd.plan match { case tmp_plan: SubqueryAlias => query.queryExecution.analyzed.sameResult(tmp_plan.child) case _ => false } }) } } private[simba] def lookupIndexedData(plan: LogicalPlan): Option[IndexedData] = readLock { val tmp_res = indexedData.find(cd => plan.sameResult(cd.plan)) if (tmp_res.nonEmpty) return tmp_res else { indexedData.find(cd => { cd.plan match { case tmp_plan: SubqueryAlias => plan.sameResult(tmp_plan.child) case _ => false } }) } } private[simba] def lookupIndexedData(query: SQLDataset[_], indexName: String): Option[IndexedData] = readLock { lookupIndexedData(query.queryExecution.analyzed, indexName) } private[simba] def lookupIndexedData(plan: LogicalPlan, indexName: String): Option[IndexedData] = readLock { val tmp_res = indexedData.find(cd => plan.sameResult(cd.plan) && cd.name.equals(indexName)) if (tmp_res.nonEmpty) return tmp_res else { indexedData.find(cd => { cd.plan match { case tmp_plan: SubqueryAlias => plan.sameResult(tmp_plan.child) && cd.name.equals(indexName) case _ => false } }) } } private[simba] def persistIndex(simbaSession: SimbaSession, indexName: String, fileName: String): Unit = { val dataIndex = indexedData.indexWhere(cd => cd.name.equals(indexName)) require(dataIndex >= 0, "Index not found!") val preData = indexInfos(dataIndex) val indexedItem = indexedData(dataIndex) val sparkContext = simbaSession.sparkContext sparkContext.parallelize(Array(indexedItem.plan)).saveAsObjectFile(fileName + "/plan") sparkContext.parallelize(Array(preData)).saveAsObjectFile(fileName + "/indexInfo") if (preData.indexType == RTreeType) { val rtreeRelation = indexedItem.indexedData.asInstanceOf[RTreeIndexedRelation] sparkContext.parallelize(Array(rtreeRelation)).saveAsObjectFile(fileName + "/rtreeRelation") rtreeRelation._indexedRDD.saveAsObjectFile(fileName + "/rdd") } else if (preData.indexType == TreeMapType) { val treeMapRelation = indexedItem.indexedData.asInstanceOf[TreeMapIndexedRelation] sparkContext.parallelize(Array(treeMapRelation)).saveAsObjectFile(fileName + "/treeMapRelation") treeMapRelation._indexedRDD.saveAsObjectFile(fileName + "/rdd") } else if (preData.indexType == TreapType) { val treapRelation = indexedItem.indexedData.asInstanceOf[TreapIndexedRelation] sparkContext.parallelize(Array(treapRelation)).saveAsObjectFile(fileName + "/treapRelation") treapRelation._indexedRDD.saveAsObjectFile(fileName + "/rdd") } indexInfos(dataIndex) = IndexInfo(preData.tableName, preData.indexName, preData.attributes, preData.indexType, fileName, preData.storageLevel) } private[simba] def loadIndex(simbaSession: SimbaSession, indexName: String, fileName: String): Unit = { val sparkContext = simbaSession.sparkContext val info = sparkContext.objectFile[IndexInfo](fileName + "/indexInfo").collect().head val plan = sparkContext.objectFile[LogicalPlan](fileName + "/plan").collect().head val rdd = sparkContext.objectFile[IPartition](fileName + "/rdd") if (info.indexType == RTreeType){ val rtreeRelation = sparkContext.objectFile[RTreeIndexedRelation](fileName + "/rtreeRelation").collect().head indexedData += IndexedData(indexName, plan, RTreeIndexedRelation(rtreeRelation.output, rtreeRelation.child, rtreeRelation.table_name, rtreeRelation.column_keys, rtreeRelation.index_name)(rdd, rtreeRelation.global_rtree)) } else if (info.indexType == TreeMapType) { val treeMapRelation = sparkContext.objectFile[TreeMapIndexedRelation](fileName + "/treeMapRelation").collect().head indexedData += IndexedData(indexName, plan, TreeMapIndexedRelation(treeMapRelation.output, treeMapRelation.child, treeMapRelation.table_name, treeMapRelation.column_keys, treeMapRelation.index_name)(rdd, treeMapRelation.range_bounds)) } else if (info.indexType == TreapType) { val treapRelation = sparkContext.objectFile[TreapIndexedRelation](fileName + "/treapRelation").collect().head indexedData += IndexedData(indexName, plan, TreapIndexedRelation(treapRelation.output, treapRelation.child, treapRelation.table_name, treapRelation.column_keys, treapRelation.index_name)(rdd, treapRelation.range_bounds)) } indexInfos += info } private[simba] def setStorageLevel(query: SQLDataset[_], indexName: String, newLevel: StorageLevel): Unit = writeLock { val dataIndex = indexedData.indexWhere { cd => query.queryExecution.analyzed.sameResult(cd.plan) && cd.name.equals(indexName) } require(dataIndex >= 0, "Index not found!") val preData = indexInfos(dataIndex) indexInfos(dataIndex) = IndexInfo(preData.tableName, preData.indexName, preData.attributes, preData.indexType, preData.location, newLevel) } private[simba] def createIndexQuery(query: SQLDataset[_], indexType: IndexType, indexName: String, column: List[Attribute], tableName: Option[String] = None, storageLevel: StorageLevel = MEMORY_AND_DISK): Unit = writeLock { val planToIndex = query.queryExecution.analyzed if (lookupIndexedData(planToIndex).nonEmpty) { // scalastyle:off println println("Index for the data has already been built.") // scalastyle:on println } else { indexedData += IndexedData(indexName, planToIndex, IndexedRelation(query.queryExecution.executedPlan, tableName, indexType, column, indexName)) indexInfos += IndexInfo(tableName.getOrElse("anonymous"), indexName, column, indexType, "", storageLevel) } } private[simba] def showQuery(tableName: String): Unit = readLock { indexInfos.map(row => { if (row.tableName.equals(tableName)) { // scalastyle:off println println("Index " + row.indexName + " {") println("\\tTable: " + tableName) print("\\tOn column: (") for (i <- row.attributes.indices) if (i != row.attributes.length - 1) { print(row.attributes(i).name + ", ") } else println(row.attributes(i).name + ")") println("\\tIndex Type: " + row.indexType.toString) println("}") // scalastyle:on println } row }) } private[simba] def dropIndexQuery(query: Dataset[_], blocking: Boolean = true): Unit = writeLock { val planToIndex = query.queryExecution.analyzed var hasFound = false var found = true while (found) { val dataIndex = indexedData.indexWhere(cd => planToIndex.sameResult(cd.plan)) if (dataIndex < 0) found = false else hasFound = true indexedData(dataIndex).indexedData.indexedRDD.unpersist(blocking) indexedData.remove(dataIndex) indexInfos.remove(dataIndex) } indexedData } private[simba] def dropIndexByNameQuery(query: SQLDataset[_], indexName: String, blocking: Boolean = true): Unit = writeLock { val planToIndex = query.queryExecution.analyzed val dataIndex = indexedData.indexWhere { cd => planToIndex.sameResult(cd.plan) && cd.name.equals(indexName) } require(dataIndex >= 0, s"Table $query or index $indexName is not indexed.") indexedData(dataIndex).indexedData.indexedRDD.unpersist(blocking) indexedData.remove(dataIndex) indexInfos.remove(dataIndex) } private[simba] def dropIndexByColumnQuery(query: SQLDataset[_], column: List[Attribute], blocking: Boolean = true) : Unit = writeLock { val planToIndex = query.queryExecution.analyzed var dataIndex = -1 for (i <- 0 to indexInfos.length) { val cd = indexedData(i) val row = indexInfos(i) if (planToIndex.sameResult(cd.plan) && row.attributes.equals(column)) { dataIndex = i } } require(dataIndex >= 0, s"Table $query or Index on $column is not indexed.") indexedData(dataIndex).indexedData.indexedRDD.unpersist(blocking) indexedData.remove(dataIndex) indexInfos.remove(dataIndex) } private[simba] def tryDropIndexQuery(query: SQLDataset[_], blocking: Boolean = true): Boolean = writeLock { val planToIndex = query.queryExecution.analyzed var found = true var hasFound = false while (found) { val dataIndex = indexedData.indexWhere(cd => planToIndex.sameResult(cd.plan)) found = dataIndex >= 0 if (found) { hasFound = true indexedData(dataIndex).indexedData.indexedRDD.unpersist(blocking) indexedData.remove(dataIndex) indexInfos.remove(dataIndex) } } hasFound } private[simba] def tryDropIndexByNameQuery(query: SQLDataset[_], indexName: String, blocking: Boolean = true): Boolean = writeLock { val planToCache = query.queryExecution.analyzed val dataIndex = indexedData.indexWhere(cd => planToCache.sameResult(cd.plan)) val found = dataIndex >= 0 if (found) { indexedData(dataIndex).indexedData.indexedRDD.unpersist(blocking) indexedData.remove(dataIndex) indexInfos.remove(dataIndex) } found } private[simba] def clearIndex(): Unit = writeLock { indexedData.foreach(_.indexedData.indexedRDD.unpersist()) indexedData.clear() indexInfos.clear() } private[simba] def useIndexedData(plan: LogicalPlan): LogicalPlan = { plan transformDown { case currentFragment => lookupIndexedData(currentFragment) .map(_.indexedData.withOutput(currentFragment.output)) .getOrElse(currentFragment) } } }
InitialDLab/Simba
src/main/scala/org/apache/spark/sql/simba/IndexManager.scala
Scala
apache-2.0
12,660
/* * Copyright 2015 Daniel W. H. James * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package play.filters.cors import scala.concurrent.Future import play.api.{ Configuration, Play } import play.api.libs.concurrent.Execution.Implicits.defaultContext import play.api.mvc.{ ActionBuilder, Request, Result } /** An [[play.api.mvc.ActionBuilder ActionBuilder]] that implements Cross-Origin Resource Sharing (CORS) * * @see [[CORSFilter]] * @see [[http://www.w3.org/TR/cors/ CORS specification]] */ trait CORSActionBuilder extends ActionBuilder[Request] with AbstractCORSPolicy { override protected val logger = Play.logger override def invokeBlock[A](request: Request[A], block: Request[A] => Future[Result]): Future[Result] = { filterRequest(() => block(request), request) } } /** An [[play.api.mvc.ActionBuilder ActionBuilder]] that implements Cross-Origin Resource Sharing (CORS) * * It can be configured to... * * - allow only requests with origins from a whitelist (by default all origins are allowed) * - allow only HTTP methods from a whitelist for preflight requests (by default all methods are allowed) * - allow only HTTP headers from a whitelist for preflight requests (by default all methods are allowed) * - set custom HTTP headers to be exposed in the response (by default no headers are exposed) * - disable/enable support for credentials (by default credentials support is enabled) * - set how long (in seconds) the results of a preflight request can be cached in a preflight result cache (by default 3600 seconds, 1 hour) * * @example * {{{ * CORSActionBuilder { Ok } // an action that uses the application configuration * * CORSActionBuilder("my-conf-path") { Ok } // an action that uses a subtree of the application configuration * * val corsConfig: CORSConfig = ... * CORSActionBuilder(conf) { Ok } // an action that uses a locally defined configuration * }}} * * @see [[CORSFilter]] * @see [[http://www.w3.org/TR/cors/ CORS specification]] */ object CORSActionBuilder extends CORSActionBuilder { private def globalConf = Play.maybeApplication.map(_.configuration).getOrElse(Configuration.empty) override protected def corsConfig = CORSConfig.fromConfiguration(globalConf) /** Construct an action builder that uses a subtree of the application configuration. * * @param configPath The path to the subtree of the application configuration. */ def apply(configPath: String): CORSActionBuilder = new CORSActionBuilder { override protected def corsConfig = CORSConfig.fromConfiguration( Play.maybeApplication.flatMap( _.configuration.getConfig(configPath)).getOrElse(Configuration.empty)) } /** Construct an action builder that uses locally defined configuration. * * @param config The local configuration to use in place of the global configuration. * @see [[CORSConfig]] */ def apply(config: CORSConfig): CORSActionBuilder = new CORSActionBuilder { override protected val corsConfig = config } }
dwhjames/play-cors
src/main/scala/play/filters/cors/CORSActionBuilder.scala
Scala
apache-2.0
3,604
package com.github.log0ymxm.mapper class UnionFind(val n: Int) { private[this] val comp = (0 until n).toArray[Int] private[this] val compSize = Array.fill[Int](n)(1) private[this] var nComp: Int = n def add(u: Int, v: Int): Int = { val uRoot = find(u) val vRoot = find(v) if (uRoot != vRoot) { if (compSize(uRoot) < compSize(vRoot)) { comp(uRoot) = vRoot compSize(vRoot) += compSize(uRoot) } else { comp(vRoot) = uRoot compSize(uRoot) += compSize(vRoot) } nComp -= 1 } return compSize(find(u)) } def find(u: Int): Int = { if (u == comp(u)) { u } else { var compIdx = u while (compIdx != comp(compIdx)) { comp(compIdx) = comp(comp(compIdx)) compIdx = comp(compIdx) } compIdx } } def nComponents: Int = nComp def connected(u: Int, v: Int): Boolean = find(u) == find(v) }
log0ymxm/spark-mapper
src/main/scala/com/github/log0ymxm/mapper/UnionFind.scala
Scala
apache-2.0
927
package me.invkrh.raft.core import scala.collection.mutable.ArrayBuffer import scala.concurrent.ExecutionContextExecutor import scala.concurrent.duration._ import scala.language.postfixOps import akka.actor.{Actor, ActorRef, ActorSystem, Props, Scheduler} import akka.pattern.pipe import me.invkrh.raft.deploy.raftServerName import me.invkrh.raft.exception._ import me.invkrh.raft.message._ import me.invkrh.raft.message.AdminMessage._ import me.invkrh.raft.message.ClientMessage._ import me.invkrh.raft.message.RPCMessage._ import me.invkrh.raft.message.TimerMessage._ import me.invkrh.raft.storage.DataStore import me.invkrh.raft.util._ // TODO: reduce resource usage, cpu and network object Server { def props( id: Int, minElectionTime: FiniteDuration, maxElectionTime: FiniteDuration, tickTime: FiniteDuration, rcpRetries: Int, dataStore: DataStore): Props = { if (minElectionTime <= tickTime) { throw HeartbeatIntervalException() } Props(new Server(id, minElectionTime, maxElectionTime, tickTime, rcpRetries, dataStore)) } def props(id: Int, conf: ServerConf): Props = { props( id, conf.minElectionTime, conf.maxElectionTime, conf.tickTime, conf.rpcRetries, conf.dataStore) } def run( id: Int, minElectionTime: FiniteDuration, maxElectionTime: FiniteDuration, tickTime: FiniteDuration, rpcRetries: Int, dataStore: DataStore, name: String)(implicit system: ActorSystem): ActorRef = { system.actorOf( props(id, minElectionTime, maxElectionTime, tickTime, rpcRetries, dataStore), name) } def run(id: Int, serverConf: ServerConf)(implicit system: ActorSystem): ActorRef = { system.actorOf(props(id, serverConf), s"$raftServerName-$id") } def syncLogsFromLeader(request: AppendEntries, logs: List[LogEntry]): (List[LogEntry], Int) = { def logMerge(requestLogs: List[LogEntry], localLogs: List[LogEntry]): List[LogEntry] = { (requestLogs, localLogs) match { case (x +: xs, y +: ys) => if (x.term == y.term) { if (x.command != y.command) { throw LogMatchingPropertyException(x.command, y.command) } x +: logMerge(xs, ys) } else { requestLogs } case (Nil, _) => localLogs case (_, Nil) => requestLogs } } val (before, after) = logs.splitAt(request.prevLogIndex + 1) val lastNewEntryIndex = request.entries.size + request.prevLogIndex val mergedLogs = before ++ logMerge(request.entries, after) (mergedLogs, lastNewEntryIndex) } def findNewCommitIndex( commitIndex: Int, matchIndexValue: Iterable[Int], logs: List[LogEntry], curTerm: Int): Option[Int] = { val maj = matchIndexValue.size / 2 + 1 // self in included val eligible = matchIndexValue.filter(_ > commitIndex).toList.sortBy(-_).lift(maj - 1) eligible.filter(e => logs(e).term == curTerm) } } class Server( val id: Int, minElectionTime: FiniteDuration, maxElectionTime: FiniteDuration, tickTime: FiniteDuration, rcpRetries: Int, dataStore: DataStore) extends Actor with Logging { // implicit context variable // there exists an implicit ActorRef for this actor: self implicit val executor: ExecutionContextExecutor = context.dispatcher implicit val scheduler: Scheduler = context.system.scheduler // Persistent state on all servers private var curTerm = 0 private var votedFor: Option[Int] = None private var logs: List[LogEntry] = List(LogEntry(0, Init, null)) // Volatile state on all servers @volatile private var commitIndex = 0 @volatile private var lastApplied = 0 // Volatile state on leaders @volatile private var nextIndex = Map[Int, Int]() @volatile private var matchIndex = Map[Int, Int]() // Additional state private implicit var members: Map[Int, ActorRef] = Map() private var curState: ServerState.Value = ServerState.Bootstrap private var curLeaderId: Option[Int] = None private val dataStoreManager = context.actorOf(DataStoreManager.props(dataStore), "dsm") private val clientMessageCache = new MessageCache[Command](id) private val electionTimer = new RandomizedTimer(minElectionTime, maxElectionTime, StartElection) private val heartBeatTimer = new PeriodicTimer(tickTime, Tick) override def loggingPrefix: String = s"[$id-$curState]" override def preStart(): Unit = {} override def postStop(): Unit = { logInfo(s"Server $id stops and cancel all timer scheduled tasks") electionTimer.stop() heartBeatTimer.stop() } def requestSetCommitIndex(commitIndex: Int): Unit = { this.commitIndex = commitIndex if (commitIndex > lastApplied) { logDebug("Applying command to state machine") logDebug("dataStoreManager: " + dataStoreManager) dataStoreManager ! ApplyLogsRequest( logs.slice(lastApplied + 1, commitIndex + 1), // end is exclusive commitIndex) } } ////////////////////////////////////////////////////////////////////////////////////////////////// // Processing batch of approved responses with Majority ////////////////////////////////////////////////////////////////////////////////////////////////// def processConversation(cvs: Conversation): Unit = { var maxTerm = -1 val validateExchange = new ArrayBuffer[Exchange]() cvs.content foreach { case ex @ Exchange(request, response, fid) => val ref = members(fid) response match { // Reply to leader case AppendEntriesResult(term, false) if term == curTerm => logDebug(s"AppendEntries is failed on server $fid at term $term (address: $ref)") nextIndex = nextIndex.updated(fid, Math.max(1, nextIndex(fid) - 1)) // at least 1 case AppendEntriesResult(term, true) if term == curTerm => logDebug(s"AppendEntries is succeeded on server $fid at term $term (address: $ref)") val req = request.asInstanceOf[AppendEntries] val matchIndexValue = req.prevLogIndex + req.entries.size matchIndex = matchIndex.updated(fid, matchIndexValue) nextIndex = nextIndex.updated(fid, matchIndexValue + 1) validateExchange.append(ex) // Reply to candidate case RequestVoteResult(term, false) if term == curTerm => logDebug(s"Vote rejected by server $fid at term $term (address: $ref)") case RequestVoteResult(term, true) if term == curTerm => logDebug(s"Vote granted by server $fid at term $term (address: $ref)") validateExchange.append(ex) // Timeout Response to both of candidate and leader case _: RequestTimeout => logInfo(s"Request $request is time out when connecting server $fid (address: $ref)") case _: RPCResponse => if (response.term > curTerm && !response.success) { logInfo(s"Higher term is detected by $response from server $fid (address: $ref)") maxTerm = Math.max(response.term, maxTerm) } else { throw InvalidResponseException(response, curTerm) } } } if (maxTerm > curTerm) { becomeFollower(maxTerm) } else { val validateCnt = validateExchange.size + 1 val hint = s"$validateCnt / ${members.size}" if (validateCnt > members.size / 2) { logInfo(s"Majority is reached ($hint) at term $curTerm") cvs match { case _: AppendEntriesConversation => logInfo("Applying logs on leader") Server .findNewCommitIndex(commitIndex, matchIndex.values, logs, curTerm) .foreach(requestSetCommitIndex) case _: RequestVoteConversation => becomeLeader() } } else { logInfo(s"Majority is NOT reached ($hint) at term $curTerm") } } } ////////////////////////////////////////////////////////////////////////////////////////////////// // End Point ////////////////////////////////////////////////////////////////////////////////////////////////// def adminEndpoint: Receive = { case GetStatus => sender ! Status( id, curTerm, curState, curLeaderId, nextIndex, matchIndex, commitIndex, lastApplied) } def startElectionEndpoint: Receive = { case StartElection => becomeCandidate() } def tickEndPoint: Receive = { case Tick => LeaderMessageHub(curTerm, id, commitIndex, nextIndex, logs, members) .distributeRPCRequest(tickTime, rcpRetries) .map(AppendEntriesConversation) pipeTo self } def appendEntriesEndPoint: Receive = { case request: AppendEntries => if (request.term < curTerm) { sender ! AppendEntriesResult(curTerm, success = false) } else { // Same term with different leader if (request.term == curTerm && curLeaderId.exists(_ != request.leaderId)) { throw MultiLeaderException(id, request.leaderId, request.term) } becomeFollower(request.term, Some(request.leaderId)) if (logs.size - 1 >= request.prevLogIndex && logs(request.prevLogIndex).term == request.prevLogTerm) { val (mergedLogs, lastNewEntryIndex) = Server.syncLogsFromLeader(request, logs) logs = mergedLogs if (request.leaderCommit > commitIndex) { logDebug("Updating local commit index") requestSetCommitIndex(Math.min(request.leaderCommit, lastNewEntryIndex)) } sender ! AppendEntriesResult(curTerm, success = true) } else { sender ! AppendEntriesResult(curTerm, success = false) } } } def requestVoteEndPoint: Receive = { case request: RequestVote => if (curTerm > request.term) { sender ! RequestVoteResult(curTerm, success = false) } else { if (curTerm < request.term) { becomeFollower(request.term) } val isUpToDate: Boolean = if (request.lastLogTerm > logs.last.term) { true } else if (request.lastLogTerm == logs.last.term) { request.lastLogIndex >= logs.size - 1 } else { false } if ((votedFor.isEmpty || votedFor.get == request.candidateId) && isUpToDate) { votedFor = Some(request.candidateId) sender ! RequestVoteResult(request.term, success = true) electionTimer.restart() } else { sender ! RequestVoteResult(request.term, success = false) } } } def commandEndPoint: Receive = { case cmd: Command => curLeaderId match { case Some(leaderId) => if (leaderId == id) { // if this server is leader logDebug("Leader received cmd: " + cmd) logs = logs :+ LogEntry(curTerm, cmd, sender) } else { val leaderRef = members.getOrElse(leaderId, throw new Exception("Unknown leader id: " + leaderId)) leaderRef forward cmd } case None => clientMessageCache.add(sender, cmd) } } def conversationEndPoint: Receive = { case c: Conversation => processConversation(c) } def commitIndexACKEndPoint: Receive = { case CommandApplied(n) => logDebug("command applied") this.lastApplied = n } def irrelevantMsgEndPoint: Receive = { case msg: RaftMessage => logWarn(s"Irrelevant messages found: $msg, from ${sender.path}") } ////////////////////////////////////////////////////////////////////////////////////////////////// // Server state conversion ////////////////////////////////////////////////////////////////////////////////////////////////// override def receive: Receive = // Initial state adminEndpoint orElse { case Membership(index) => if (index.nonEmpty) { logInfo(s"Server $id initialized") members = index becomeFollower(0) } else { throw EmptyMembershipException() } } def follower: Receive = commandEndPoint orElse startElectionEndpoint orElse appendEntriesEndPoint orElse requestVoteEndPoint orElse adminEndpoint orElse commitIndexACKEndPoint orElse irrelevantMsgEndPoint def candidate: Receive = conversationEndPoint orElse commandEndPoint orElse startElectionEndpoint orElse appendEntriesEndPoint orElse requestVoteEndPoint orElse adminEndpoint orElse irrelevantMsgEndPoint def leader: Receive = conversationEndPoint orElse commandEndPoint orElse tickEndPoint orElse appendEntriesEndPoint orElse requestVoteEndPoint orElse adminEndpoint orElse commitIndexACKEndPoint orElse irrelevantMsgEndPoint def becomeFollower(newTerm: Int, newLeader: Option[Int] = None): Unit = { curTerm = newTerm votedFor = None newLeader match { case Some(newLeaderId) => if (!curLeaderId.contains(newLeaderId)) { // new leader is not current leader logInfo(s"At term $curTerm, new leader (id = $newLeaderId) detected, become follower") curLeaderId = newLeader } members.get(newLeaderId) foreach clientMessageCache.flushTo case None => if (curTerm == 0) { logInfo(s"At term $curTerm, start up as follower") } else { logInfo(s"At term $curTerm, request with higher term detected, become follower") } curLeaderId = None } curState = ServerState.Follower context.become(follower) heartBeatTimer.stop() electionTimer.restart() } def becomeCandidate(): Unit = { curTerm = curTerm + 1 votedFor = Some(id) curLeaderId = None logInfo(s"Election for term $curTerm started, server $id becomes candidate") curState = ServerState.Candidate context.become(candidate) CandidateMessageHub(curTerm, id, logs, members) .distributeRPCRequest(minElectionTime, rcpRetries) .map(RequestVoteConversation.apply) pipeTo self electionTimer.restart() } def becomeLeader(): Unit = { // term not changed curLeaderId = Some(id) votedFor = None // (re)initialize after election members foreach { case (svrId, _) => nextIndex = nextIndex.updated(svrId, logs.size) matchIndex = matchIndex.updated(svrId, 0) } logInfo(s"Server $id becomes leader") curState = ServerState.Leader context.become(leader) clientMessageCache.flushTo(self) electionTimer.stop() heartBeatTimer.restart() } }
invkrh/akka-raft
src/main/scala/me/invkrh/raft/core/Server.scala
Scala
mit
14,776
package com.karasiq.shadowcloud.javafx import akka.actor.ActorSystem import com.karasiq.shadowcloud.providers.LifecycleHook import scala.concurrent.Await private[javafx] final class SCJavaFXLifecycleHook(actorSystem: ActorSystem) extends LifecycleHook { override def initialize(): Unit = { import scala.concurrent.duration._ Await.ready(JavaFXContext(actorSystem).initFuture, 30 seconds) } override def shutdown(): Unit = () }
Karasiq/shadowcloud
javafx/src/main/scala/com/karasiq/shadowcloud/javafx/SCJavaFXLifecycleHook.scala
Scala
apache-2.0
445
package model /** * Created by lukasz on 11.11.16. */ case class UserData(id: Option[Long], email: String, firstName: Option[String], lastName: Option[String]) object UserData { def tupled = (UserData.apply _).tupled }
lszku/ProductDatabase
app/model/UserData.scala
Scala
bsd-3-clause
229
package methods /** * Created by rapolu on 17/02/2017. */ object singleton extends App{ println("Welcome to First Object.") }
java8dev/scala4impatient
src/main/scala/methods/singleton.scala
Scala
gpl-3.0
134
package nl.gideondk.sentinel.protocol import akka.stream.Materializer import akka.stream.scaladsl.{ BidiFlow, Framing, Sink, Source } import akka.util.{ ByteString, ByteStringBuilder } import nl.gideondk.sentinel.pipeline.Resolver import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future sealed trait SimpleMessageFormat { def payload: String } case class SimpleCommand(cmd: Int, payload: String) extends SimpleMessageFormat // 1 case class SimpleReply(payload: String) extends SimpleMessageFormat // 2 case class SimpleStreamChunk(payload: String) extends SimpleMessageFormat // 3 case class SimpleError(payload: String) extends SimpleMessageFormat object SimpleMessage { val PING_PONG = 1 val TOTAL_CHUNK_SIZE = 2 val GENERATE_NUMBERS = 3 val CHUNK_LENGTH = 4 val ECHO = 5 implicit val byteOrder = java.nio.ByteOrder.BIG_ENDIAN def deserialize(bs: ByteString): SimpleMessageFormat = { val iter = bs.iterator iter.getByte.toInt match { case 1 ⇒ SimpleCommand(iter.getInt, new String(iter.toByteString.toArray)) case 2 ⇒ SimpleReply(new String(iter.toByteString.toArray)) case 3 ⇒ SimpleStreamChunk(new String(iter.toByteString.toArray)) case 4 ⇒ SimpleError(new String(iter.toByteString.toArray)) } } def serialize(m: SimpleMessageFormat): ByteString = { val bsb = new ByteStringBuilder() m match { case x: SimpleCommand ⇒ bsb.putByte(1.toByte) bsb.putInt(x.cmd) bsb.putBytes(x.payload.getBytes) case x: SimpleReply ⇒ bsb.putByte(2.toByte) bsb.putBytes(x.payload.getBytes) case x: SimpleStreamChunk ⇒ bsb.putByte(3.toByte) bsb.putBytes(x.payload.getBytes) case x: SimpleError ⇒ bsb.putByte(4.toByte) bsb.putBytes(x.payload.getBytes) case _ ⇒ } bsb.result } val flow = BidiFlow.fromFunctions(serialize, deserialize) def protocol = flow.atop(Framing.simpleFramingProtocol(1024)) } import nl.gideondk.sentinel.protocol.SimpleMessage._ object SimpleHandler extends Resolver[SimpleMessageFormat] { def process(implicit mat: Materializer): PartialFunction[SimpleMessageFormat, Action] = { case SimpleStreamChunk(x) ⇒ if (x.length > 0) ConsumerAction.ConsumeStreamChunk else ConsumerAction.EndStream case x: SimpleError ⇒ ConsumerAction.AcceptError case x: SimpleReply ⇒ ConsumerAction.AcceptSignal case SimpleCommand(PING_PONG, payload) ⇒ ProducerAction.Signal { x: SimpleCommand ⇒ Future(SimpleReply("PONG")) } case x ⇒ println("Unhandled: " + x); ConsumerAction.Ignore } } object SimpleServerHandler extends Resolver[SimpleMessageFormat] { def process(implicit mat: Materializer): PartialFunction[SimpleMessageFormat, Action] = { case SimpleStreamChunk(x) ⇒ if (x.length > 0) ConsumerAction.ConsumeStreamChunk else ConsumerAction.EndStream case SimpleCommand(PING_PONG, payload) ⇒ ProducerAction.Signal { x: SimpleCommand ⇒ Future(SimpleReply("PONG")) } case SimpleCommand(TOTAL_CHUNK_SIZE, payload) ⇒ ProducerAction.ConsumeStream { x: Source[SimpleStreamChunk, Any] ⇒ x.runWith(Sink.fold[Int, SimpleMessageFormat](0) { (b, a) ⇒ b + a.payload.length }).map(x ⇒ SimpleReply(x.toString)) } case SimpleCommand(GENERATE_NUMBERS, payload) ⇒ ProducerAction.ProduceStream { x: SimpleCommand ⇒ val count = payload.toInt Future(Source(List.range(0, count)).map(x ⇒ SimpleStreamChunk(x.toString)) ++ Source.single(SimpleStreamChunk(""))) } case SimpleCommand(ECHO, payload) ⇒ ProducerAction.Signal { x: SimpleCommand ⇒ Future(SimpleReply(x.payload)) } case x ⇒ println("Unhandled: " + x); ConsumerAction.Ignore } }
gideondk/sentinel
src/test/scala/nl/gideondk/sentinel/protocol/SimpleMessage.scala
Scala
apache-2.0
3,918
import play.api.libs.json._ import scala.collection.immutable.ListMap import scala.reflect.runtime.{ universe => ru } sealed trait Schema { def name: String lazy val descendents: Set[Schema] = this match { case dt: Atom => Set(this) case dt: Sum => dt.fields.foldLeft(Set(this))(_ union _._2.descendents) case dt: Product => dt.subtypes.foldLeft(Set(this))(_ union _.descendents) case dt: Sequence => Set(this) union dt.item.descendents } def ancestorOf(that: Schema): Boolean = this.descendents contains that } final case class Atom(name: String) extends Schema final case class Sum(name: String, fields: List[(String, Schema)] = Nil) extends Schema final case class Product(name: String, subtypes: List[Schema] = Nil) extends Schema final case class Sequence(name: String, item: Schema) extends Schema object Schema { implicit val writes: OWrites[Schema] = new OWrites[Schema] { def flatten(in: Schema): Set[Schema] = in match { case in @ Atom(name) => Set[Schema](in) case in @ Sum(name, children) => children.map(_._2).foldLeft(Set[Schema](in))((a, b) => a union flatten(b)) case in @ Product(name, children) => children.foldLeft(Set[Schema](in))((a, b) => a union flatten(b)) case in @ Sequence(name, child) => Set[Schema](in) union flatten(child) } def writes(in: Schema): JsObject = { flatten(in).toList.sortWith(_ ancestorOf _).foldLeft(JsObject(Nil)) { (obj, item) => item match { case item: Atom => obj + (item.name -> atomWrites.writes(item)) case item: Sum => obj + (item.name -> sumWrites.writes(item)) case item: Product => obj + (item.name -> productWrites.writes(item)) case item: Sequence => obj + (item.name -> sequenceWrites.writes(item)) } } } } val atomWrites: OWrites[Atom] = new OWrites[Atom] { def writes(in: Atom) = Json.obj( "type" -> "Atom" ) } val sumWrites: OWrites[Sum] = new OWrites[Sum] { def writes(in: Sum) = Json.obj( "type" -> "Sum", "name" -> in.name, "fields" -> JsObject(in.fields map { case (k, v) => k -> JsString(v.name) }) ) } val productWrites: OWrites[Product] = new OWrites[Product] { def writes(in: Product) = Json.obj( "type" -> "Product", "name" -> in.name, "subtypes" -> JsArray(in.subtypes map (v => JsString(v.name))) ) } val sequenceWrites: OWrites[Sequence] = new OWrites[Sequence] { def writes(in: Sequence) = Json.obj( "type" -> "Sequence", "name" -> in.name, "item" -> JsString(in.item.name) ) } }
davegurnell/shapeless-metadata
src/main/scala/Schema.scala
Scala
apache-2.0
2,705
package ella import org.specs2.mutable.Specification import spray.testkit.Specs2RouteTest import spray.http._ import StatusCodes._ class EllaServiceSpec extends Specification with Specs2RouteTest with EllaService { def actorRefFactory = system "Ella" should { "return a greeting for GET requests to the root path" in { Get() ~> route ~> check { responseAs[String] must be equalTo "Hello world" } } "return handle GET requests with integers in the URL path" in { Get("/42") ~> route ~> check { responseAs[String] must contain("42") } } "return handle GET requests with string in the URL path, but only one" in { Get("/asdf") ~> route ~> check { responseAs[String] must contain("asdf") } Get("/asdf/asdf") ~> route ~> check { handled must beFalse } } "leave POST requests to other paths unhandled" in { Post("/asdf") ~> route ~> check { handled must beFalse } } "return a MethodNotAllowed error for PUT requests to the root path" in { Put() ~> sealRoute(route) ~> check { status === MethodNotAllowed responseAs[String] === "HTTP method not allowed, supported methods: GET" } } } }
amezhenin/ella
src/test/scala/ella/EllaServiceSpec.scala
Scala
mit
1,263
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy private[spark] object ExecutorState extends Enumeration { val LAUNCHING, LOADING, RUNNING, KILLED, FAILED, LOST, EXITED = Value type ExecutorState = Value def isFinished(state: ExecutorState): Boolean = Seq(KILLED, FAILED, LOST, EXITED).contains(state) }
yelshater/hadoop-2.3.0
spark-core_2.10-1.0.0-cdh5.1.0/src/main/scala/org/apache/spark/deploy/ExecutorState.scala
Scala
apache-2.0
1,099
class C { type T; type U } trait Test { val x: (C { type U = T } { type T = String }) # U val y: String = x }
yusuke2255/dotty
tests/pos/lookuprefined.scala
Scala
bsd-3-clause
117
package reactivemongo.api import reactivemongo.api.{ BSONSerializationPack => SerPack } import reactivemongo.api.commands.{ CommandCodecs, WriteResult } @deprecated("Upgrade to 2.11+", "0.19.0") private[reactivemongo] object Serialization { type Pack = SerPack.type type DefaultCollection = collections.GenericCollection[this.Pack] with CollectionMetaCommands @deprecated("Upgrade to 2.11+", "0.19.0") @inline def defaultCollectionProducer: CollectionProducer[DefaultCollection] = reactivemongo.api.collections.bson.BSONCollectionProducer @inline def internalSerializationPack: this.Pack = SerPack lazy implicit val unitBoxReader = CommandCodecs.unitBoxReader(internalSerializationPack) lazy implicit val writeResultReader = CommandCodecs.writeResultReader[WriteResult, this.Pack]( internalSerializationPack) }
ornicar/ReactiveMongo
driver/src/main/scala-2.10/api/Serialization.scala
Scala
apache-2.0
851
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.examples import java.util.Random import scala.math.exp import breeze.linalg.{Vector, DenseVector} import org.apache.spark._ /** * Logistic regression based classification. * Usage: SparkLR [slices] * * This is an example implementation for learning how to use Spark. For more conventional use, * please refer to either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or * org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS based on your needs. */ object SparkLR { val N = 10000 // Number of data points val D = 10 // Numer of dimensions val R = 0.7 // Scaling factor val ITERATIONS = 5 val rand = new Random(42) case class DataPoint(x: Vector[Double], y: Double) def generateData: Array[DataPoint] = { def generatePoint(i: Int): DataPoint = { val y = if (i % 2 == 0) -1 else 1 val x = DenseVector.fill(D){rand.nextGaussian + y * R} DataPoint(x, y) } Array.tabulate(N)(generatePoint) } def showWarning() { System.err.println( """WARN: This is a naive implementation of Logistic Regression and is given as an example! |Please use either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or |org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS |for more conventional use. """.stripMargin) } def main(args: Array[String]) { showWarning() val sparkConf = new SparkConf().setAppName("SparkLR") val sc = new SparkContext(sparkConf) val numSlices = if (args.length > 0) args(0).toInt else 2 val points = sc.parallelize(generateData, numSlices).cache() // Initialize w to a random value var w = DenseVector.fill(D){2 * rand.nextDouble - 1} println("Initial w: " + w) for (i <- 1 to ITERATIONS) { println("On iteration " + i) val gradient = points.map { p => p.x * (1 / (1 + exp(-p.y * (w.dot(p.x)))) - 1) * p.y }.reduce(_ + _) w -= gradient } println("Final w: " + w) sc.stop() } }
shenbaise/mltoy
src/main/scala/org/apache/spark/examples/SparkLR.scala
Scala
apache-2.0
2,859
object Test { def foo(x: String, z: String, y: String = ""): Int = 1 def foo(x: String, y: String): Int = 2 /* line: 3 */foo("", "") }
ilinum/intellij-scala
testdata/resolve2/overloading/DefaultIgnored.scala
Scala
apache-2.0
141
/** * Copyright (C) 2014 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.http import org.orbeon.oxf.properties.Properties object PropertiesApacheHttpClient extends ApacheHttpClient(PropertiesConnectionSettings.apply) object PropertiesConnectionSettings { def apply: HttpClientSettings = { val props = Properties.instance.getPropertySet import HttpClientSettings._ HttpClientSettings( staleCheckingEnabled = props.getBoolean(StaleCheckingEnabledProperty, StaleCheckingEnabledDefault), soTimeout = props.getInteger(SOTimeoutProperty, SOTimeoutPropertyDefault).toInt, chunkRequests = props.getBoolean(ChunkRequestsProperty, ChunkRequestsDefault), proxyHost = Option(props.getString(ProxyHostProperty)), proxyPort = Option(props.getInteger(ProxyPortProperty)) map (_.toInt), proxyExclude = Option(props.getString(ProxyExcludeProperty)), sslHostnameVerifier = props.getString(SSLHostnameVerifierProperty, SSLHostnameVerifierDefault), sslKeystoreURI = Option(props.getStringOrURIAsString(SSLKeystoreURIProperty, allowEmpty = false)), sslKeystorePassword = Option(props.getString(SSLKeystorePasswordProperty)), sslKeystoreType = Option(props.getString(SSLKeystoreTypeProperty)), proxySSL = props.getBoolean(ProxySSLProperty, ProxySSLPropertyDefault), proxyUsername = Option(props.getString(ProxyUsernameProperty)), proxyPassword = Option(props.getString(ProxyPasswordProperty)), proxyNTLMHost = Option(props.getString(ProxyNTLMHostProperty)), proxyNTLMDomain = Option(props.getString(ProxyNTLMDomainProperty)) ) } }
ajw625/orbeon-forms
src/main/scala/org/orbeon/oxf/http/PropertiesApacheHttpClient.scala
Scala
lgpl-2.1
2,434
package com.github.dnvriend.aws.lambda.handler import java.io.{ InputStream, OutputStream } import com.amazonaws.services.lambda.runtime.{ Context, RequestStreamHandler } import com.github.dnvriend.ops.{ AllOps, JsonOps } import com.github.dnvriend.ops.Functional.DisjunctionNel import play.api.libs.json._ import scala.language.implicitConversions import scalaz._ import scalaz.Scalaz._ object Response { implicit val format: Format[Response] = Json.format[Response] val Ok: Response = Response(200, JsNull, Map("Content-Type" -> "application/json")) val Created: Response = Ok.copy(statusCode = 201) val Accepted: Response = Ok.copy(statusCode = 202) val NoContent: Response = Ok.copy(statusCode = 204) val MovedPermanently: Response = Ok.copy(statusCode = 301) val SeeOther: Response = Ok.copy(statusCode = 303) val TemporaryRedirect: Response = Ok.copy(statusCode = 307) val BadRequest: Response = Ok.copy(statusCode = 400) val NotFound: Response = Ok.copy(statusCode = 404) val InternalServerError: Response = Ok.copy(statusCode = 500) val NotImplemented: Response = Ok.copy(statusCode = 501) val ServiceUnavailable: Response = Ok.copy(statusCode = 503) } case class Response(statusCode: Int, body: JsValue, headers: Map[String, String]) { def withBody(data: JsValue): Response = copy(body = data) def withHeader(header: (String, String)): Response = copy(headers = this.headers + header) } case class Request(request: JsValue) extends AllOps { def body: JsValue = request("body") def pathParameters: JsValue = request("pathParameters") def requestParameters: JsValue = request("queryStringParameters") def bodyOpt[A: Reads]: Option[A] = bodyAs[A].toOption def bodyAs[A: Reads](implicit validator: Validator[A] = null): DisjunctionNel[Throwable, A] = for { data <- Json.parse(body.as[String]).as[A].safeNel validated <- (validator.? | Validator.emptyValidator(data)).validate(data).disjunction } yield validated def bodyAsStringOpt: Option[String] = body.as[String].safe.toOption def bodyAsString: Disjunction[Throwable, String] = body.as[String].safe def pathParamOpt[A: Reads]: Option[A] = pathParamAs.toOption def pathParamAs[A: Reads]: Disjunction[Throwable, A] = pathParameters.as[A].safe def requestParamOpt[A: Reads]: Option[A] = requestParamAs.toOption def requestParamAs[A: Reads]: Disjunction[Throwable, A] = requestParameters.as[A].safe } trait LambdaRequestHandler extends RequestStreamHandler with ToResponseConverters with JsonOps { override def handleRequest(input: InputStream, output: OutputStream, context: Context): Unit = { val request: Request = Request(Json.parse(input)) val response: Response = handle(request, context) val jsBody: JsValue = response.body.escapedJson val jsResponse: JsValue = Json.toJson(response.copy(body = jsBody)) val bytes: Array[Byte] = Json.toBytes(jsResponse) output.write(bytes) output.close() } def handle(request: Request, ctx: Context): Response }
dnvriend/serverless-test
aws-lambda-handler/src/main/scala/com/github/dnvriend/aws/lambda/handler/LambdaRequestHandler.scala
Scala
apache-2.0
3,012
package mesosphere.marathon package core.storage.repository.impl import java.time.OffsetDateTime import akka.http.scaladsl.marshalling.Marshaller import akka.http.scaladsl.unmarshalling.Unmarshaller import akka.stream.scaladsl.Source import akka.{ Done, NotUsed } import mesosphere.marathon.core.storage.repository.{ Repository, VersionedRepository } import mesosphere.marathon.core.storage.store.{ IdResolver, PersistenceStore } import scala.concurrent.Future /** * Default Repository of value types 'V' identified by their key 'Id' * that handles all default behavior for interacting with a given persistence store * for that value type. This allows the implicits to be hidden from the consumer of the API. */ class PersistenceStoreRepository[Id, V, K, C, S]( persistenceStore: PersistenceStore[K, C, S], extractId: V => Id)(implicit ir: IdResolver[Id, V, C, K], marshaller: Marshaller[V, S], unmarshaller: Unmarshaller[S, V]) extends Repository[Id, V] { override def ids(): Source[Id, NotUsed] = persistenceStore.ids() override def get(id: Id): Future[Option[V]] = persistenceStore.get(id) override def delete(id: Id): Future[Done] = persistenceStore.deleteAll(id) override def store(v: V): Future[Done] = persistenceStore.store(extractId(v), v) // Assume that the underlying store can limit its own concurrency. override def all(): Source[V, NotUsed] = ids().mapAsync(Int.MaxValue)(get).collect { case Some(x) => x } } /** * Default Repository of value types 'V' identified by their key 'Id' for Values that should be versioned. * that handles all default behavior for interacting with a given persistence store * for that value type. This allows the implicits to be hidden from the consumer of the API. */ class PersistenceStoreVersionedRepository[Id, V, K, C, S]( persistenceStore: PersistenceStore[K, C, S], extractId: V => Id, extractVersion: V => OffsetDateTime)(implicit ir: IdResolver[Id, V, C, K], marshaller: Marshaller[V, S], unmarshaller: Unmarshaller[S, V]) extends PersistenceStoreRepository[Id, V, K, C, S]( persistenceStore, extractId) with VersionedRepository[Id, V] { override def versions(id: Id): Source[OffsetDateTime, NotUsed] = persistenceStore.versions(id) override def getVersions(list: Seq[(Id, OffsetDateTime)]): Source[V, NotUsed] = persistenceStore.getVersions(list) override def getVersion(id: Id, version: OffsetDateTime): Future[Option[V]] = persistenceStore.get(id, version) override def storeVersion(v: V): Future[Done] = persistenceStore.store(extractId(v), v, extractVersion(v)) override def deleteCurrent(id: Id): Future[Done] = persistenceStore.deleteCurrent(id) }
guenter/marathon
src/main/scala/mesosphere/marathon/core/storage/repository/impl/PersistenceStoreRepository.scala
Scala
apache-2.0
2,723
import models.{AppDB, Bar} import org.scalatest.FlatSpec import org.scalatest.matchers.ShouldMatchers import org.squeryl.PrimitiveTypeMode.inTransaction import play.api.test._ import play.api.test.Helpers._ class BarSpec extends FlatSpec with ShouldMatchers { "A Bar" should "be creatable" in { running(FakeApplication(additionalConfiguration = inMemoryDatabase())) { inTransaction { val bar = AppDB.barTable insert Bar(Some("foo")) bar.id should not equal(0) } } } }
jschappet/play-example
test/BarSpec.scala
Scala
mit
513
package com.nulabinc.backlog.r2b.exporter.core import javax.inject.Inject import com.nulabinc.backlog.migration.common.conf.BacklogPaths import com.nulabinc.backlog.migration.common.domain.BacklogProjectKey import com.nulabinc.backlog.r2b.exporter.conf.ExportConfig import com.nulabinc.backlog.r2b.exporter.convert._ import com.nulabinc.backlog.r2b.mapping.core.MappingContainer import com.nulabinc.backlog.r2b.redmine.conf.RedmineApiConfiguration import com.nulabinc.backlog.r2b.redmine.domain.{PropertyValue, RedmineProjectId} import com.nulabinc.backlog.r2b.redmine.service.{IssueService, ProjectService, WikiService} class ExportContextProvider @Inject() ( apiConfig: RedmineApiConfiguration, backlogProjectKey: BacklogProjectKey, exportConfig: ExportConfig, projectId: RedmineProjectId, propertyValue: PropertyValue, backlogPaths: BacklogPaths, issueService: IssueService, projectService: ProjectService, mappingContainer: MappingContainer, wikiService: WikiService, issueWrites: IssueWrites, journalWrites: JournalWrites, userWrites: UserWrites, customFieldWrites: CustomFieldWrites, customFieldValueWrites: CustomFieldValueWrites, attachmentWrites: AttachmentWrites, wikiWrites: WikiWrites ) { def get(): ExportContext = { ExportContext( apiConfig, backlogProjectKey, exportConfig, projectId, backlogPaths, propertyValue, mappingContainer, projectService, issueService, wikiService, issueWrites, journalWrites, userWrites, customFieldWrites, customFieldValueWrites, attachmentWrites, wikiWrites ) } }
nulab/BacklogMigration-Redmine
src/main/scala/com/nulabinc/backlog/r2b/exporter/core/ExportContextProvider.scala
Scala
mit
1,695
/* __ *\\ ** ________ ___ / / ___ Scala API ** ** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\\___/_/ |_/____/_/ | | ** ** |/ ** \\* */ package scala package collection package generic /** Some bit operations. * * See http://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/ for * an explanation of unsignedCompare. */ private[collection] object BitOperations { trait Int { type Int = scala.Int def zero(i: Int, mask: Int) = (i & mask) == 0 def mask(i: Int, mask: Int) = i & (complement(mask - 1) ^ mask) def hasMatch(key: Int, prefix: Int, m: Int) = mask(key, m) == prefix def unsignedCompare(i: Int, j: Int) = (i < j) ^ (i < 0) ^ (j < 0) def shorter(m1: Int, m2: Int) = unsignedCompare(m2, m1) def complement(i: Int) = (-1) ^ i def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0) def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j) } object Int extends Int trait Long { type Long = scala.Long def zero(i: Long, mask: Long) = (i & mask) == 0L def mask(i: Long, mask: Long) = i & (complement(mask - 1) ^ mask) def hasMatch(key: Long, prefix: Long, m: Long) = mask(key, m) == prefix def unsignedCompare(i: Long, j: Long) = (i < j) ^ (i < 0L) ^ (j < 0L) def shorter(m1: Long, m2: Long) = unsignedCompare(m2, m1) def complement(i: Long) = (-1L) ^ i def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L) def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j) } object Long extends Long }
felixmulder/scala
src/library/scala/collection/generic/BitOperations.scala
Scala
bsd-3-clause
2,384
/* * Powell.scala * (Poirot) * * Copyright (c) 2013-2018 Hanns Holger Rutz. All rights reserved. * Code is often based on or identical to the original JaCoP Scala wrappers by * Krzysztof Kuchcinski and Radoslaw Szymanek. * * This software is published under the GNU Affero General Public License v3+ * * * For further information, please contact Hanns Holger Rutz at * [email protected] */ package de.sciss.poirot package examples import de.sciss.poirot.Implicits._ import org.jacop.floats.core.FloatDomain // Problem 1 from paper "Some tests of Generalized Bisection" by R. Baker Kearfott. object Powell extends App with Problem { FloatDomain.setPrecision(1e-20) val x = Vec.tabulate(4)(i => DoubleVar(s"x[$i]", -2, 2)) // constraint x(0) + 10.0*x(1) #= 0.0 //sum(x, Array[Double](1, 10, 0, 0)) #= 0 math.sqrt(5.0)*(x(2) - x(3)) #= 0.0 (x(1) - 2.0*x(2))*(x(1) - 2.0*x(2)) #= 0.0 math.sqrt(10.0)*(x(0) - x(3))*(x(0) - x(3)) #= 0.0 val (result, stats) = withStatistics(satisfyAll(searchDouble(x, inputOrder), () => println(x))) println(stats) }
Sciss/Poirot
src/test/scala/de/sciss/poirot/examples/Powell.scala
Scala
agpl-3.0
1,095
package enigma /** * Here the signal is connected to the 'T' input on the plugboard. Some of * the letters on the plugboard will be wired up to other letters (the plugs), * causing the signal to be diverted. If the 'T' input is not plugged to another * letter then our signal will pass straight to the 'T output. In our case, though * the 'T' is plugged to the 'K', so the signal is diverted to a new path, the * letter is now 'K'. */ case class Plugboard(shuffled: Seq[Char]){ // take the shuffled input, split it in half, interleave the halves and then // make sure that A -> B and B -> A such that the operations on the plugboard // are deterministic back and forth. private val mapping: Map[Char,Char] = shuffled.splitAt(Alphabet.length / 2 ).zipped.flatMap((a,b) => Seq(a -> b, b -> a)).toMap def transform(c: Char): Char = { // println("::::::::>> " + c) val out = mapping.get(c).getOrElse(c) // println("<<:::::::: " + out) out } }
timperrett/enigma
src/main/scala/enigma/Plugboard.scala
Scala
apache-2.0
986
/** MACHINE-GENERATED FROM AVRO SCHEMA. DO NOT EDIT DIRECTLY */ package example.idl import scala.annotation.switch import other.ns.{ExternalDependency, Suit} sealed trait ImportProtocol extends org.apache.avro.specific.SpecificRecordBase with Product with Serializable final case class DependentRecord(var dependency: ExternalDependency, var number: Int) extends org.apache.avro.specific.SpecificRecordBase with ImportProtocol { def this() = this(new ExternalDependency, 0) def get(field$: Int): AnyRef = { (field$: @switch) match { case 0 => { dependency }.asInstanceOf[AnyRef] case 1 => { number }.asInstanceOf[AnyRef] case _ => new org.apache.avro.AvroRuntimeException("Bad index") } } def put(field$: Int, value: Any): Unit = { (field$: @switch) match { case 0 => this.dependency = { value }.asInstanceOf[ExternalDependency] case 1 => this.number = { value }.asInstanceOf[Int] case _ => new org.apache.avro.AvroRuntimeException("Bad index") } () } def getSchema: org.apache.avro.Schema = DependentRecord.SCHEMA$ } final object DependentRecord { val SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\\"type\\":\\"record\\",\\"name\\":\\"DependentRecord\\",\\"namespace\\":\\"example.idl\\",\\"fields\\":[{\\"name\\":\\"dependency\\",\\"type\\":{\\"type\\":\\"record\\",\\"name\\":\\"ExternalDependency\\",\\"namespace\\":\\"other.ns\\",\\"fields\\":[{\\"name\\":\\"number\\",\\"type\\":\\"int\\"}]}},{\\"name\\":\\"number\\",\\"type\\":\\"int\\"}]}") } final case class DependentRecord2(var dependency: Suit, var name: String) extends org.apache.avro.specific.SpecificRecordBase with ImportProtocol { def this() = this(null, "") def get(field$: Int): AnyRef = { (field$: @switch) match { case 0 => { dependency }.asInstanceOf[AnyRef] case 1 => { name }.asInstanceOf[AnyRef] case _ => new org.apache.avro.AvroRuntimeException("Bad index") } } def put(field$: Int, value: Any): Unit = { (field$: @switch) match { case 0 => this.dependency = { value }.asInstanceOf[Suit] case 1 => this.name = { value.toString }.asInstanceOf[String] case _ => new org.apache.avro.AvroRuntimeException("Bad index") } () } def getSchema: org.apache.avro.Schema = DependentRecord2.SCHEMA$ } final object DependentRecord2 { val SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\\"type\\":\\"record\\",\\"name\\":\\"DependentRecord2\\",\\"namespace\\":\\"example.idl\\",\\"fields\\":[{\\"name\\":\\"dependency\\",\\"type\\":{\\"type\\":\\"enum\\",\\"name\\":\\"Suit\\",\\"namespace\\":\\"other.ns\\",\\"symbols\\":[\\"SPADES\\",\\"DIAMONDS\\",\\"CLUBS\\",\\"HEARTS\\"]}},{\\"name\\":\\"name\\",\\"type\\":\\"string\\"}]}") } final case class DependentRecord3(var dependency: Embedded, var value: Boolean) extends org.apache.avro.specific.SpecificRecordBase with ImportProtocol { def this() = this(new Embedded, false) def get(field$: Int): AnyRef = { (field$: @switch) match { case 0 => { dependency }.asInstanceOf[AnyRef] case 1 => { value }.asInstanceOf[AnyRef] case _ => new org.apache.avro.AvroRuntimeException("Bad index") } } def put(field$: Int, value: Any): Unit = { (field$: @switch) match { case 0 => this.dependency = { value }.asInstanceOf[Embedded] case 1 => this.value = { value }.asInstanceOf[Boolean] case _ => new org.apache.avro.AvroRuntimeException("Bad index") } () } def getSchema: org.apache.avro.Schema = DependentRecord3.SCHEMA$ } final object DependentRecord3 { val SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\\"type\\":\\"record\\",\\"name\\":\\"DependentRecord3\\",\\"namespace\\":\\"example.idl\\",\\"fields\\":[{\\"name\\":\\"dependency\\",\\"type\\":{\\"type\\":\\"record\\",\\"name\\":\\"Embedded\\",\\"fields\\":[{\\"name\\":\\"inner\\",\\"type\\":\\"int\\"}]}},{\\"name\\":\\"value\\",\\"type\\":\\"boolean\\"}]}") }
julianpeeters/avrohugger
avrohugger-core/src/test/expected/specific/example/idl/ImportProtocol.scala
Scala
apache-2.0
3,989
package at.forsyte.apalache.tla.bmcmt.rewriter /** * An object implementing Recoverable implements two methods: snapshot and recover. The method 'snapshot' takes * a snapshot of the object state. This snapshot can be restored with the method 'recover'. */ trait Recoverable[T] { /** * Take a snapshot and return it * @return the snapshot */ def snapshot(): T /** * Recover a previously saved snapshot (not necessarily saved by this object). * @param shot a snapshot */ def recover(shot: T): Unit }
konnov/apalache
tla-bmcmt/src/main/scala/at/forsyte/apalache/tla/bmcmt/rewriter/Recoverable.scala
Scala
apache-2.0
538
package lara.epfl.scalasca.core abstract class LatticeElement[T](val index: Int, val set: Set[T]) case class Bottom[T](override val index: Int) extends LatticeElement[T](index, Set[T]()) case class Regular[T](override val index: Int, override val set: Set[T]) extends LatticeElement[T](index, set) case class Top[T](override val index: Int) extends LatticeElement[T](index, Set[T]())
jean-andre-gauthier/scalasca
src/main/scala/lara/epfl/scalasca/core/LatticeElement.scala
Scala
bsd-3-clause
387
package lila.tournament import chess.format.FEN import org.joda.time.DateTime import org.joda.time.format.ISODateTimeFormat import play.api.i18n.Lang import play.api.libs.json._ import scala.concurrent.duration._ import scala.concurrent.ExecutionContext import lila.common.Json._ import lila.common.{ GreatPlayer, LightUser, Preload, Uptime } import lila.game.{ Game, LightPov } import lila.hub.LightTeam.TeamID import lila.memo.CacheApi._ import lila.rating.PerfType import lila.socket.Socket.SocketVersion import lila.user.{ LightUserApi, User } final class JsonView( lightUserApi: LightUserApi, playerRepo: PlayerRepo, pairingRepo: PairingRepo, tournamentRepo: TournamentRepo, cached: Cached, statsApi: TournamentStatsApi, shieldApi: TournamentShieldApi, cacheApi: lila.memo.CacheApi, proxyRepo: lila.round.GameProxyRepo, verify: Condition.Verify, duelStore: DuelStore, standingApi: TournamentStandingApi, pause: Pause )(implicit ec: ExecutionContext) { import JsonView._ private case class CachableData( duels: JsArray, duelTeams: Option[JsObject], featured: Option[JsObject], podium: Option[JsArray] ) def apply( tour: Tournament, page: Option[Int], me: Option[User], getUserTeamIds: User => Fu[List[TeamID]], getTeamName: TeamID => Option[String], playerInfoExt: Option[PlayerInfoExt], socketVersion: Option[SocketVersion], partial: Boolean, myInfo: Preload[Option[MyInfo]] = Preload.none )(implicit lang: Lang): Fu[JsObject] = for { data <- cachableData get tour.id myInfo <- myInfo.orLoad(me ?? { fetchMyInfo(tour, _) }) pauseDelay = me flatMap { u => pause.remainingDelay(u.id, tour) } full = !partial stand <- (myInfo, page) match { case (_, Some(p)) => standingApi(tour, p) case (Some(i), _) => standingApi(tour, i.page) case _ => standingApi(tour, 1) } playerInfoJson <- playerInfoExt ?? { pie => playerInfoExtended(tour, pie).map(_.some) } verdicts <- full ?? { (me, myInfo) match { case (None, _) => fuccess(tour.conditions.accepted.some) case (Some(_), Some(myInfo)) if !myInfo.withdraw => fuccess(tour.conditions.accepted.some) case (Some(user), Some(_)) => verify.rejoin(tour.conditions, user, getUserTeamIds) map some case (Some(user), None) => verify(tour.conditions, user, getUserTeamIds) map some } } stats <- statsApi(tour) shieldOwner <- full.?? { shieldApi currentOwner tour } teamsToJoinWith <- full.??(~(for { u <- me; battle <- tour.teamBattle } yield getUserTeamIds(u) map { teams => battle.teams.intersect(teams.toSet).toList })) teamStanding <- getTeamStanding(tour) myTeam <- myInfo.flatMap(_.teamId) ?? { getMyRankedTeam(tour, _) } } yield Json .obj( "nbPlayers" -> tour.nbPlayers, "duels" -> data.duels, "standing" -> stand ) .add("isStarted" -> tour.isStarted) .add("isFinished" -> tour.isFinished) .add("isRecentlyFinished" -> tour.isRecentlyFinished) .add("secondsToFinish" -> tour.isStarted.option(tour.secondsToFinish)) .add("secondsToStart" -> tour.isCreated.option(tour.secondsToStart)) .add("me" -> myInfo.map(myInfoJson(me, pauseDelay))) .add("featured" -> data.featured) .add("podium" -> data.podium) .add("playerInfo" -> playerInfoJson) .add("pairingsClosed" -> tour.pairingsClosed) .add("stats" -> stats) .add("socketVersion" -> socketVersion.map(_.value)) .add("teamStanding" -> teamStanding) .add("myTeam" -> myTeam) .add("duelTeams" -> data.duelTeams) ++ full.?? { Json .obj( "id" -> tour.id, "createdBy" -> tour.createdBy, "startsAt" -> formatDate(tour.startsAt), "system" -> "arena", // BC "fullName" -> tour.name(), "minutes" -> tour.minutes, "perf" -> full.option(tour.perfType), "clock" -> full.option(tour.clock), "variant" -> full.option(tour.variant.key) ) .add("spotlight" -> tour.spotlight) .add("berserkable" -> tour.berserkable) .add("position" -> tour.position.ifTrue(full).map(positionJson)) .add("verdicts" -> verdicts.map(Condition.JSONHandlers.verdictsFor(_, lang))) .add("schedule" -> tour.schedule.map(scheduleJson)) .add("private" -> tour.isPrivate) .add("quote" -> tour.isCreated.option(lila.quote.Quote.one(tour.id))) .add("defender" -> shieldOwner.map(_.value)) .add("greatPlayer" -> GreatPlayer.wikiUrl(tour.name).map { url => Json.obj("name" -> tour.name, "url" -> url) }) .add("teamBattle" -> tour.teamBattle.map { battle => Json .obj( "teams" -> JsObject(battle.sortedTeamIds.map { id => id -> JsString(getTeamName(id).getOrElse(id)) }) ) .add("joinWith" -> me.isDefined.option(teamsToJoinWith.sorted)) }) .add("description" -> tour.description) } def clearCache(tour: Tournament): Unit = { standingApi clearCache tour cachableData invalidate tour.id } def fetchMyInfo(tour: Tournament, me: User): Fu[Option[MyInfo]] = playerRepo.find(tour.id, me.id) flatMap { _ ?? { player => fetchCurrentGameId(tour, me) flatMap { gameId => getOrGuessRank(tour, player) dmap { rank => MyInfo(rank + 1, player.withdraw, gameId, player.team).some } } } } // if the user is not yet in the cached ranking, // guess its rank based on other players scores in the DB private def getOrGuessRank(tour: Tournament, player: Player): Fu[Int] = cached ranking tour flatMap { _ get player.userId match { case Some(rank) => fuccess(rank) case None => playerRepo.computeRankOf(player) } } def playerInfoExtended(tour: Tournament, info: PlayerInfoExt): Fu[JsObject] = for { ranking <- cached ranking tour sheet <- cached.sheet(tour, info.user.id) } yield info match { case PlayerInfoExt(user, player, povs) => val isPlaying = povs.headOption.??(_.game.playable) val povScores: List[(LightPov, Option[arena.Sheet.Score])] = povs zip { (isPlaying ?? List(none[arena.Sheet.Score])) ::: sheet.scores.map(some) } Json.obj( "player" -> Json .obj( "id" -> user.id, "name" -> user.username, "rating" -> player.rating, "score" -> player.score, "fire" -> player.fire, "nb" -> sheetNbs(sheet) ) .add("title" -> user.title) .add("performance" -> player.performanceOption) .add("rank" -> ranking.get(user.id).map(1 +)) .add("provisional" -> player.provisional) .add("withdraw" -> player.withdraw) .add("team" -> player.team), "pairings" -> povScores.map { case (pov, score) => Json .obj( "id" -> pov.gameId, "color" -> pov.color.name, "op" -> gameUserJson(pov.opponent.userId, pov.opponent.rating), "win" -> score.flatMap(_.isWin), "status" -> pov.game.status.id, "score" -> score.map(sheetScoreJson) ) .add("berserk" -> pov.player.berserk) } ) } private def fetchCurrentGameId(tour: Tournament, user: User): Fu[Option[Game.ID]] = if (Uptime.startedSinceSeconds(60)) fuccess(duelStore.find(tour, user)) else pairingRepo.playingByTourAndUserId(tour.id, user.id) private def fetchFeaturedGame(tour: Tournament): Fu[Option[FeaturedGame]] = tour.featuredId.ifTrue(tour.isStarted) ?? pairingRepo.byId flatMap { _ ?? { pairing => proxyRepo game pairing.gameId flatMap { _ ?? { game => cached ranking tour flatMap { ranking => playerRepo.pairByTourAndUserIds(tour.id, pairing.user1, pairing.user2) map { pairOption => for { (p1, p2) <- pairOption rp1 <- RankedPlayer(ranking)(p1) rp2 <- RankedPlayer(ranking)(p2) } yield FeaturedGame(game, rp1, rp2) } } } } } } private def sheetNbs(s: arena.Sheet) = Json.obj( "game" -> s.scores.size, "berserk" -> s.scores.count(_.isBerserk), "win" -> s.scores.count(_.res == arena.Sheet.ResWin) ) private val cachableData = cacheApi[Tournament.ID, CachableData](64, "tournament.json.cachable") { _.expireAfterWrite(1 second) .buildAsyncFuture { id => for { tour <- tournamentRepo byId id duels = duelStore.bestRated(id, 6) jsonDuels <- duels.map(duelJson).sequenceFu duelTeams <- tour.exists(_.isTeamBattle) ?? { playerRepo.teamsOfPlayers(id, duels.flatMap(_.userIds)) map { teams => JsObject(teams map { case (userId, teamId) => (userId, JsString(teamId)) }).some } } featured <- tour ?? fetchFeaturedGame podium <- tour.exists(_.isFinished) ?? podiumJsonCache.get(id) } yield CachableData( duels = JsArray(jsonDuels), duelTeams = duelTeams, featured = featured map featuredJson, podium = podium ) } } private def featuredJson(featured: FeaturedGame) = { val game = featured.game def ofPlayer(rp: RankedPlayer, p: lila.game.Player) = { val light = lightUserApi sync rp.player.userId Json .obj( "rank" -> rp.rank, "name" -> light.fold(rp.player.userId)(_.name), "rating" -> rp.player.rating ) .add("title" -> light.flatMap(_.title)) .add("berserk" -> p.berserk) } Json .obj( "id" -> game.id, "fen" -> chess.format.Forsyth.boardAndColor(game.situation), "orientation" -> game.naturalOrientation.name, "color" -> game.naturalOrientation.name, // app BC https://github.com/ornicar/lila/issues/7195 "lastMove" -> ~game.lastMoveKeys, "white" -> ofPlayer(featured.white, game player chess.White), "black" -> ofPlayer(featured.black, game player chess.Black) ) .add( // not named `clock` to avoid conflict with lichobile "c" -> game.clock.ifTrue(game.isBeingPlayed).map { c => Json.obj( "white" -> c.remainingTime(chess.White).roundSeconds, "black" -> c.remainingTime(chess.Black).roundSeconds ) } ) .add("winner" -> game.winnerColor.map(_.name)) } private def myInfoJson(u: Option[User], delay: Option[Pause.Delay])(i: MyInfo) = Json .obj( "rank" -> i.rank, "withdraw" -> i.withdraw, "gameId" -> i.gameId, "username" -> u.map(_.titleUsername) ) .add("pauseDelay", delay.map(_.seconds)) private def gameUserJson(userId: Option[String], rating: Option[Int]): JsObject = { val light = userId flatMap lightUserApi.sync Json .obj("rating" -> rating) .add("name" -> light.map(_.name)) .add("title" -> light.flatMap(_.title)) } private val podiumJsonCache = cacheApi[Tournament.ID, Option[JsArray]](32, "tournament.podiumJson") { _.expireAfterAccess(15 seconds) .expireAfterWrite(1 minute) .maximumSize(256) .buildAsyncFuture { id => tournamentRepo finishedById id flatMap { _ ?? { tour => playerRepo.bestByTourWithRank(id, 3).flatMap { top3 => // check that the winner is still correctly denormalized top3.headOption.map(_.player.userId).filter(w => tour.winnerId.fold(true)(w !=)) foreach { tournamentRepo.setWinnerId(tour.id, _) } top3.map { case rp @ RankedPlayer(_, player) => for { sheet <- cached.sheet(tour, player.userId) json <- playerJson(lightUserApi, sheet.some, rp, tour.streakable) } yield json ++ Json .obj( "nb" -> sheetNbs(sheet) ) .add("performance" -> player.performanceOption) }.sequenceFu } map { l => JsArray(l).some } } } } } private def duelPlayerJson(p: Duel.DuelPlayer): Fu[JsObject] = lightUserApi.async(p.name.id) map { u => Json .obj( "n" -> u.fold(p.name.value)(_.name), "r" -> p.rating.value, "k" -> p.rank.value ) .add("t" -> u.flatMap(_.title)) } private def duelJson(d: Duel): Fu[JsObject] = for { u1 <- duelPlayerJson(d.p1) u2 <- duelPlayerJson(d.p2) } yield Json.obj( "id" -> d.gameId, "p" -> Json.arr(u1, u2) ) def getTeamStanding(tour: Tournament): Fu[Option[JsArray]] = tour.isTeamBattle ?? { teamStandingJsonCache get tour.id dmap some } def apiTeamStanding(tour: Tournament): Fu[Option[JsArray]] = tour.teamBattle ?? { battle => if (battle.hasTooManyTeams) bigTeamStandingJsonCache get tour.id dmap some else teamStandingJsonCache get tour.id dmap some } private val teamStandingJsonCache = cacheApi[Tournament.ID, JsArray](4, "tournament.teamStanding") { _.expireAfterWrite(500 millis) .buildAsyncFuture(fetchAndRenderTeamStandingJson(TeamBattle.displayTeams)) } private val bigTeamStandingJsonCache = cacheApi[Tournament.ID, JsArray](4, "tournament.teamStanding.big") { _.expireAfterWrite(2 seconds) .buildAsyncFuture(fetchAndRenderTeamStandingJson(TeamBattle.maxTeams)) } private def fetchAndRenderTeamStandingJson(max: Int)(id: Tournament.ID) = cached.battle.teamStanding.get(id) map { ranked => JsArray(ranked take max map teamBattleRankedWrites.writes) } implicit private val teamBattleRankedWrites: Writes[TeamBattle.RankedTeam] = OWrites { rt => Json.obj( "rank" -> rt.rank, "id" -> rt.teamId, "score" -> rt.score, "players" -> rt.leaders.map { p => Json.obj( "user" -> lightUserApi.sync(p.userId), "score" -> p.score ) } ) } private def getMyRankedTeam(tour: Tournament, teamId: TeamID): Fu[Option[TeamBattle.RankedTeam]] = tour.teamBattle.exists(_.hasTooManyTeams) ?? cached.battle.teamStanding.get(tour.id) map { _.find(_.teamId == teamId) } private val teamInfoCache = cacheApi[(Tournament.ID, TeamID), Option[JsObject]](16, "tournament.teamInfo.json") { _.expireAfterWrite(5 seconds) .maximumSize(32) .buildAsyncFuture { case (tourId, teamId) => cached.teamInfo.get(tourId -> teamId) flatMap { _ ?? { info => lightUserApi.preloadMany(info.topPlayers.map(_.userId)) inject Json .obj( "id" -> teamId, "nbPlayers" -> info.nbPlayers, "rating" -> info.avgRating, "perf" -> info.avgPerf, "score" -> info.avgScore, "topPlayers" -> info.topPlayers.flatMap { p => lightUserApi.sync(p.userId) map { user => Json .obj( "name" -> user.name, "rating" -> p.rating, "score" -> p.score ) .add("fire" -> p.fire) .add("title" -> user.title) } } ) .some } } } } def teamInfo(tour: Tournament, teamId: TeamID): Fu[Option[JsObject]] = tour.isTeamBattle ?? { teamInfoCache get (tour.id -> teamId) } } object JsonView { def top(t: TournamentTop, getLightUser: LightUser.GetterSync): JsArray = JsArray { t.value.map { p => val light = getLightUser(p.userId) Json .obj( "n" -> light.fold(p.userId)(_.name), "s" -> p.score ) .add("t" -> light.flatMap(_.title)) .add("f" -> p.fire) .add("w" -> p.withdraw) } } val playerResultWrites: OWrites[Player.Result] = OWrites[Player.Result] { case Player.Result(player, user, rank) => Json .obj( "rank" -> rank, "score" -> player.score, "rating" -> player.rating, "username" -> user.name ) .add("title" -> user.title) .add("performance" -> player.performanceOption) .add("team" -> player.team) } def playerJson( lightUserApi: LightUserApi, sheets: Map[String, arena.Sheet], streakable: Boolean )(rankedPlayer: RankedPlayer)(implicit ec: ExecutionContext): Fu[JsObject] = playerJson(lightUserApi, sheets get rankedPlayer.player.userId, rankedPlayer, streakable) private[tournament] def playerJson( lightUserApi: LightUserApi, sheet: Option[arena.Sheet], rankedPlayer: RankedPlayer, streakable: Boolean )(implicit ec: ExecutionContext): Fu[JsObject] = { val p = rankedPlayer.player lightUserApi async p.userId map { light => Json .obj( "name" -> light.fold(p.userId)(_.name), "rank" -> rankedPlayer.rank, "rating" -> p.rating, "score" -> p.score, "sheet" -> sheet.map(sheetJson(streakable)) ) .add("title" -> light.flatMap(_.title)) .add("provisional" -> p.provisional) .add("withdraw" -> p.withdraw) .add("team" -> p.team) } } private[tournament] def sheetJson(streakable: Boolean)(s: arena.Sheet) = Json .obj( "scores" -> s.scores.reverse.map(sheetScoreJson), "total" -> s.total ) .add("fire" -> (streakable && s.onFire)) private[tournament] def sheetScoreJson(score: arena.Sheet.Score) = if (score.flag == arena.Sheet.Normal) JsNumber(score.value) else Json.arr(score.value, score.flag.id) private def formatDate(date: DateTime) = ISODateTimeFormat.dateTime print date private[tournament] def scheduleJson(s: Schedule) = Json.obj( "freq" -> s.freq.name, "speed" -> s.speed.key ) implicit val clockWrites: OWrites[chess.Clock.Config] = OWrites { clock => Json.obj( "limit" -> clock.limitSeconds, "increment" -> clock.incrementSeconds ) } private[tournament] def positionJson(fen: FEN): JsObject = Thematic.byFen(fen) match { case Some(pos) => Json .obj( "eco" -> pos.eco, "name" -> pos.name, "wikiPath" -> pos.wikiPath, "fen" -> pos.fen ) case None => Json .obj( "name" -> "Custom position", "fen" -> fen ) } implicit private[tournament] val spotlightWrites: OWrites[Spotlight] = OWrites { s => Json .obj( "headline" -> s.headline, "description" -> s.description ) .add("iconImg" -> s.iconImg) .add("iconFont" -> s.iconFont) } implicit private[tournament] def perfTypeWrites(implicit lang: Lang): OWrites[PerfType] = OWrites { pt => Json .obj("key" -> pt.key, "name" -> pt.trans) .add("icon" -> mobileBcIcons.get(pt)) // mobile BC only } implicit private[tournament] val statsWrites: Writes[TournamentStats] = Json.writes[TournamentStats] private[tournament] val mobileBcIcons: Map[PerfType, String] = Map( PerfType.UltraBullet -> "{", PerfType.Bullet -> "T", PerfType.Blitz -> ")", PerfType.Rapid -> "#", PerfType.Classical -> "+" ) }
luanlv/lila
modules/tournament/src/main/JsonView.scala
Scala
mit
20,550
package controllers import play.api._ import play.api.mvc._ import play.api.Play.current import play.api.cache.Cache import java.util._ object CacheController extends Controller { def setCache =Action{ implicit request => val now = new Date() val data = now.toString() val until = now.getTime() + 10000 val until2 = new Date(until).toString() Cache.set("Date", data, 10) Cache.set("Until", until2, 15) Logger.info(data) Logger.info(until.toString()) Logger.info(until2) Redirect(routes.CacheController.getCache) } def getCache = Action { implicit request => val now = new Date() Cache.getAs[String]("Date").map{ date => Ok(views.html.cache(date, Cache.getAs[String]("Until").getOrElse("None"), now.toString)) }.getOrElse{ Ok(views.html.cache("None","None",now.toString)) } } }
khonda/playframeworkPractice
app/controllers/CacheController.scala
Scala
mit
860
package mesosphere.marathon package api.v2.validation import com.wix.accord.Validator import com.wix.accord.dsl._ import mesosphere.marathon.api.v2.Validation import mesosphere.marathon.raml.{ EnvVarSecretRef, EnvVarValueOrSecret, SecretDef } trait SecretValidation { import Validation._ def stringify(ref: EnvVarValueOrSecret): String = ref match { case secretRef: EnvVarSecretRef => secretRef.secret case _ => ref.toString // this should never be called; if it is, validation output will not be friendly } def secretRefValidator(secrets: Map[String, SecretDef]) = validator[(String, EnvVarValueOrSecret)] { entry => entry._2 as stringify(entry._2) is isTrue("references an undefined secret"){ case ref: EnvVarSecretRef => secrets.contains(ref.secret) case _ => true } } val secretEntryValidator: Validator[(String, SecretDef)] = validator[(String, SecretDef)] { t => t._1 as "" is notEmpty t._2.source as "source" is notEmpty } val secretValidator = validator[Map[String, SecretDef]] { s => s.keys is every(notEmpty) s.values.map(_.source) as "source" is every(notEmpty) } } object SecretValidation extends SecretValidation
natemurthy/marathon
src/main/scala/mesosphere/marathon/api/v2/validation/SecretValidation.scala
Scala
apache-2.0
1,205
package org.jetbrains.plugins.scala package lang.psi.light.scala import com.intellij.psi.impl.light.LightElement import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScParameterClause} import org.jetbrains.plugins.scala.lang.psi.types.ScType import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement /** * @author Alefas * @since 03/04/14. */ class ScLightParameterClause(types: List[ScType], clause: ScParameterClause) extends LightElement(clause.getManager, clause.getLanguage) with ScParameterClause { override def isImplicit: Boolean = clause.isImplicit override def parameters: Seq[ScParameter] = clause.parameters.zip(types).zipWithIndex.map { case ((param, tp), i) => new ScLightParameter(param, tp, i) } override def toString: String = "Light parameter clause" override def addParameter(param: ScParameter): ScParameterClause = throw new UnsupportedOperationException("Operation on light element") override protected def findChildrenByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): Array[T] = throw new UnsupportedOperationException("Operation on light element") override protected def findChildByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): T = throw new UnsupportedOperationException("Operation on light element") }
consulo/consulo-scala
src/org/jetbrains/plugins/scala/lang/psi/light/scala/ScLightParameterClause.scala
Scala
apache-2.0
1,323
package outwatch import cats.{Applicative, Monoid} import cats.implicits._ import org.scalajs.dom import scala.scalajs.js import colibri._ import colibri.effect.RunSyncEffect trait ManagedSubscriptions { @inline def managed[F[_] : RunSyncEffect, T : CanCancel](subscription: F[T]): VDomModifier = managedFunction(() => RunSyncEffect[F].unsafeRun(subscription)) def managed[F[_] : RunSyncEffect : Applicative, T : CanCancel : Monoid](sub1: F[T], sub2: F[T], subscriptions: F[T]*): VDomModifier = { val composite = (sub1 :: sub2 :: subscriptions.toList).sequence.map[T](subs => Monoid[T].combineAll(subs)) managed(composite) } @inline def managedFunction[T : CanCancel](subscription: () => T): VDomModifier = CancelableModifier(() => Cancelable.lift(subscription())) object managedElement { def apply[T : CanCancel](subscription: dom.Element => T): VDomModifier = VDomModifier.delay { var lastSub: js.UndefOr[T] = js.undefined VDomModifier( dsl.onDomMount foreach { elem => lastSub = subscription(elem) }, dsl.onDomUnmount foreach { lastSub.foreach(CanCancel[T].cancel) } ) } def asHtml[T : CanCancel](subscription: dom.html.Element => T): VDomModifier = apply(elem => subscription(elem.asInstanceOf[dom.html.Element])) def asSvg[T : CanCancel](subscription: dom.svg.Element => T): VDomModifier = apply(elem => subscription(elem.asInstanceOf[dom.svg.Element])) } } object ManagedSubscriptions extends ManagedSubscriptions
OutWatch/outwatch
outwatch/src/main/scala/outwatch/ManagedSubscriptions.scala
Scala
apache-2.0
1,499
/** * Copyright (C) 2009-2011 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.scalate.sample.springmvc package config import controller.IndexController import org.springframework.context.annotation._ import org.fusesource.scalate.spring.view.ScalateViewResolver class AppConfig { @Bean def indexController = new IndexController @Bean def viewResolver = new ScalateViewResolver }
arashi01/scalate-samples
scalate-sample-spring-mvc/src/main/scala/org/fusesource/scalate/sample/springmvc/config/AppConfig.scala
Scala
apache-2.0
1,075
package com.sageserpent.americium import cats.free.Free.pure import com.sageserpent.americium.TrialsImplementation.{ Factory, GenerationOperation } import com.sageserpent.americium.java.CaseFactory trait CommonApi { def only[Case](onlyCase: Case): TrialsImplementation[Case] = TrialsImplementation(pure[GenerationOperation, Case](onlyCase)) def stream[Case]( caseFactory: CaseFactory[Case] ): TrialsImplementation[Case] = new TrialsImplementation( Factory(new CaseFactory[Case] { override def apply(input: Long): Case = { require(lowerBoundInput() <= input) require(upperBoundInput() >= input) caseFactory(input) } override def lowerBoundInput(): Long = caseFactory.lowerBoundInput() override def upperBoundInput(): Long = caseFactory.upperBoundInput() override def maximallyShrunkInput(): Long = caseFactory.maximallyShrunkInput() }) ) }
sageserpent-open/americium
src/main/scala/com/sageserpent/americium/CommonApi.scala
Scala
mit
934
package com.marmoush import akka.actor.ActorSystem import scala.concurrent.ExecutionContext /** * Created by spark on 11/17/15. */ package object scalasamples { object Contexts { private val akka: ActorSystem = ActorSystem("helloakka") implicit val simpleDbLookups: ExecutionContext = akka.dispatchers.lookup("main.contexts.simple-db-lookups") implicit val expensiveDbLookups: ExecutionContext = akka.dispatchers.lookup("main.contexts.expensive-db-lookups") implicit val dbWriteOperations: ExecutionContext = akka.dispatchers.lookup("main.contexts.db-write-operations") implicit val expensiveCpuOperations: ExecutionContext = akka.dispatchers.lookup("main.contexts.expensive-cpu-operations") } }
IsmailMarmoush/scala-samples
akka/src/main/scala/com/marmoush/scalasamples/package.scala
Scala
agpl-3.0
728
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler.cluster.k8s import io.fabric8.kubernetes.api.model.{DoneablePod, Pod} import io.fabric8.kubernetes.client.KubernetesClient import io.fabric8.kubernetes.client.dsl.PodResource import org.mockito.{ArgumentCaptor, Mock, MockitoAnnotations} import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.{mock, never, times, verify, when} import org.mockito.invocation.InvocationOnMock import org.mockito.stubbing.Answer import org.scalatest.BeforeAndAfter import scala.collection.mutable import org.apache.spark.{SparkConf, SparkFunSuite} import org.apache.spark.deploy.k8s.Config import org.apache.spark.deploy.k8s.Constants._ import org.apache.spark.deploy.k8s.Fabric8Aliases._ import org.apache.spark.deploy.k8s.KubernetesUtils._ import org.apache.spark.scheduler.ExecutorExited import org.apache.spark.scheduler.cluster.k8s.ExecutorLifecycleTestUtils._ class ExecutorPodsLifecycleManagerSuite extends SparkFunSuite with BeforeAndAfter { private var namedExecutorPods: mutable.Map[String, PodResource[Pod, DoneablePod]] = _ @Mock private var kubernetesClient: KubernetesClient = _ @Mock private var podOperations: PODS = _ @Mock private var schedulerBackend: KubernetesClusterSchedulerBackend = _ private var snapshotsStore: DeterministicExecutorPodsSnapshotsStore = _ private var eventHandlerUnderTest: ExecutorPodsLifecycleManager = _ before { MockitoAnnotations.openMocks(this).close() snapshotsStore = new DeterministicExecutorPodsSnapshotsStore() namedExecutorPods = mutable.Map.empty[String, PodResource[Pod, DoneablePod]] when(schedulerBackend.getExecutorsWithRegistrationTs()).thenReturn(Map.empty[String, Long]) when(kubernetesClient.pods()).thenReturn(podOperations) when(podOperations.withName(any(classOf[String]))).thenAnswer(namedPodsAnswer()) eventHandlerUnderTest = new ExecutorPodsLifecycleManager( new SparkConf(), kubernetesClient, snapshotsStore) eventHandlerUnderTest.start(schedulerBackend) } test("When an executor reaches error states immediately, remove from the scheduler backend.") { val failedPod = failedExecutorWithoutDeletion(1) snapshotsStore.updatePod(failedPod) snapshotsStore.notifySubscribers() val msg = exitReasonMessage(1, failedPod) val expectedLossReason = ExecutorExited(1, exitCausedByApp = true, msg) verify(schedulerBackend).doRemoveExecutor("1", expectedLossReason) verify(namedExecutorPods(failedPod.getMetadata.getName)).delete() } test("Don't remove executors twice from Spark but remove from K8s repeatedly.") { val failedPod = failedExecutorWithoutDeletion(1) snapshotsStore.updatePod(failedPod) snapshotsStore.notifySubscribers() snapshotsStore.updatePod(failedPod) snapshotsStore.notifySubscribers() val msg = exitReasonMessage(1, failedPod) val expectedLossReason = ExecutorExited(1, exitCausedByApp = true, msg) verify(schedulerBackend, times(1)).doRemoveExecutor("1", expectedLossReason) verify(namedExecutorPods(failedPod.getMetadata.getName), times(2)).delete() } test("When the scheduler backend lists executor ids that aren't present in the cluster," + " remove those executors from Spark.") { when(schedulerBackend.getExecutorsWithRegistrationTs()).thenReturn(Map("1" -> 7L)) val missingPodDelta = eventHandlerUnderTest.conf.get(Config.KUBERNETES_EXECUTOR_MISSING_POD_DETECT_DELTA) snapshotsStore.clock.advance(missingPodDelta + 7) snapshotsStore.replaceSnapshot(Seq.empty[Pod]) snapshotsStore.notifySubscribers() verify(schedulerBackend, never()).doRemoveExecutor(any(), any()) // 1 more millisecond and the accepted delta is over so the missing POD will be detected snapshotsStore.clock.advance(1) snapshotsStore.replaceSnapshot(Seq.empty[Pod]) snapshotsStore.notifySubscribers() val msg = "The executor with ID 1 (registered at 7 ms) was not found in the cluster at " + "the polling time (30008 ms) which is after the accepted detect delta time (30000 ms) " + "configured by `spark.kubernetes.executor.missingPodDetectDelta`. The executor may have " + "been deleted but the driver missed the deletion event. Marking this executor as failed." val expectedLossReason = ExecutorExited(-1, exitCausedByApp = false, msg) verify(schedulerBackend).doRemoveExecutor("1", expectedLossReason) } test("Keep executor pods in k8s if configured.") { val failedPod = failedExecutorWithoutDeletion(1) eventHandlerUnderTest.conf.set(Config.KUBERNETES_DELETE_EXECUTORS, false) snapshotsStore.updatePod(failedPod) snapshotsStore.notifySubscribers() val msg = exitReasonMessage(1, failedPod) val expectedLossReason = ExecutorExited(1, exitCausedByApp = true, msg) verify(schedulerBackend).doRemoveExecutor("1", expectedLossReason) verify(namedExecutorPods(failedPod.getMetadata.getName), never()).delete() val podCaptor = ArgumentCaptor.forClass(classOf[Pod]) verify(namedExecutorPods(failedPod.getMetadata.getName)).patch(podCaptor.capture()) val pod = podCaptor.getValue() assert(pod.getMetadata().getLabels().get(SPARK_EXECUTOR_INACTIVE_LABEL) === "true") } private def exitReasonMessage(failedExecutorId: Int, failedPod: Pod): String = { val reason = Option(failedPod.getStatus.getReason) val message = Option(failedPod.getStatus.getMessage) s""" |The executor with id $failedExecutorId exited with exit code 1. |The API gave the following brief reason: ${reason.getOrElse("N/A")} |The API gave the following message: ${message.getOrElse("N/A")} |The API gave the following container statuses: | |${containersDescription(failedPod)} """.stripMargin } private def namedPodsAnswer(): Answer[PodResource[Pod, DoneablePod]] = (invocation: InvocationOnMock) => { val podName: String = invocation.getArgument(0) namedExecutorPods.getOrElseUpdate( podName, mock(classOf[PodResource[Pod, DoneablePod]])) } }
witgo/spark
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsLifecycleManagerSuite.scala
Scala
apache-2.0
6,882
package prisoners_dilemma import org.scalacheck._ object StrategyGen { import Gen._ def streamOfN[T](n: Int, g: Gen[T]): Gen[Stream[T]] = Gen.containerOfN[Stream, T](n, g) def infiniteStream[T](g: Gen[T]) = const("poo") map { (_) => { def streaminate(gen: Gen[T]): Stream[Option[T]] = gen.sample #:: streaminate(gen) streaminate(g).flatten } } val strategyGen:Gen[RoundStrategy] = infiniteStream(Package.move) map (RoundStrategy.fromStream(_)) map (RoundStrategy.recording(_)) val strategizerGen: Gen[Strategizer] = strategyGen.map(Strategizer.thatDoes(_)) implicit val arbStrategy = Arbitrary(strategizerGen) } object StrategyProperties extends Properties("Various known strategies") { import Prop._ import RuleGenerators._ import StrategyGen._ property("Two random streams of moves are played out") = forAll(Gen.posNum[Int]) { (turns: Int) => val moveStreamGenerator = streamOfN(turns, Package.move) forAll(moveStreamGenerator, moveStreamGenerator) { (stream1: Stream[Move], stream2: Stream[Move]) => val s1 = RoundStrategy.fromStream(stream1) val s2 = RoundStrategy.fromStream(stream2) RoundStrategy.moves(s1, s2).take(turns).toSeq =? stream1.zip(stream2).take(turns) } } property("The sucker always cooperates") = forAll(strategyGen, Gen.posNum[Int]) { (opponent: RoundStrategy, turns: Int) => val allMoves:Stream[MoveSet] = RoundStrategy.moves(birds.StandardBirds.SUCKER, opponent).take(turns) val myMoves = allMoves.map (_._1) myMoves.forall(_ == Cooperate) :| s"The sucker defected OMG!! Here's the story: ${allMoves.toList}" } // TODO: Move titForTat over to birds package, and move both these tests too def play(turns: Int, one: RoundStrategy, other: RoundStrategy): (Seq[Move],Seq[Move]) = RoundStrategy.moves(one, other).take(turns).unzip property("Tit for Tat copies the prior move, and starts with Cooperate") = forAll(strategyGen, Gen.posNum[Int]) { (opponent:RoundStrategy, turns: Int) => val (myMoves, theirMoves) = play(turns, RoundStrategy.titForTat, opponent) val myFirstMove = myMoves.head val myOtherMoves = myMoves.tail val theirPriorMoves = theirMoves.take(turns - 1) (myFirstMove =? Cooperate) && (myOtherMoves =? theirPriorMoves) } }
Mharlin/better-testing-workshop
exercise2-prisoners-dilemma-completed/src/test/scala/prisoners-dilemma/StrategyTest.scala
Scala
mit
2,467
package reswing import rescala.default.{Event, Evt, implicitScheduler} import rescala.operator.cutOutOfUserComputation /** Represents `Swing` events that are fired by the library or passed to the * library. */ sealed abstract class ReSwingEvent[T] { private[reswing] def toEvent: Event[T] } final class ReSwingEventOut[T] private[reswing] (initLazily: ReSwingEventOut[T] => Unit) extends ReSwingEvent[T] { private val event: Lazy[Evt[T]] = Lazy { Evt[T]() } private[reswing] def toEvent = { if (!event.isDefined) initLazily(this); event() } private[reswing] def apply(value: T) = if (event.isDefined) event().fire(value) } final class ReSwingEventIn[T] private[reswing] (event: Lazy[Event[T]]) extends ReSwingEvent[T] { private[reswing] def toEvent = { event() } } final class ReSwingEventNone[T] private[reswing] extends ReSwingEvent[T] { private[reswing] def toEvent = null } object ReSwingEvent { /** Creates an empty event (that is never fired) to be used with the library. */ implicit def apply[T](value: Unit): ReSwingEventNone[T] = new ReSwingEventNone[T] /** Wraps a Event to be used with the library. */ implicit def apply[T](value: => Event[T]): ReSwingEventIn[T] = new ReSwingEventIn(Lazy { value }) /** Returns the Event representing the event. */ @cutOutOfUserComputation implicit def toEvent[T](value: ReSwingEvent[T]): Event[T] = value.toEvent }
guidosalva/REScala
Code/Extensions/RESwing/src/main/scala/reswing/ReSwingEvent.scala
Scala
apache-2.0
1,413
package com.twitter.penguin.korean.phrase_extractor import java.util.logging.Logger import com.twitter.penguin.korean.TestBase._ import com.twitter.penguin.korean.TwitterKoreanProcessor.tokenize import com.twitter.penguin.korean.normalizer.KoreanNormalizer import com.twitter.penguin.korean.tokenizer.KoreanTokenizer.KoreanToken import com.twitter.penguin.korean.util.KoreanPos import com.twitter.penguin.korean.{TestBase, TwitterKoreanProcessor} class KoreanPhraseExtractorTest extends TestBase { val LOG = Logger.getLogger(getClass.getSimpleName) case class SampleTextPair(text: String, phrases: String) val sampleText = List[SampleTextPair]( SampleTextPair( "블랙프라이데이: 이날 미국의 수백만 소비자들은 크리스마스 선물을 할인된 가격에 사는 것을 주 목적으로 블랙프라이데이 쇼핑을 한다.", "블랙프라이데이(Noun: 0, 7), 이날(Noun: 9, 2), 이날 미국(Noun: 9, 5), 이날 미국의 수백만(Noun: 9, 10), " + "미국의 수백만(Noun: 12, 7), 수백만(Noun: 16, 3), 이날 미국의 수백만 소비자들(Noun: 9, 15), " + "미국의 수백만 소비자들(Noun: 12, 12), 수백만 소비자들(Noun: 16, 8), 크리스마스(Noun: 26, 5), " + "크리스마스 선물(Noun: 26, 8), 할인(Noun: 36, 2), 할인된 가격(Noun: 36, 6), 가격(Noun: 40, 2), " + "주 목적(Noun: 50, 4), 블랙프라이데이 쇼핑(Noun: 57, 10), 미국(Noun: 12, 2), 소비자들(Noun: 20, 4), " + "선물(Noun: 32, 2), 목적(Noun: 52, 2), 쇼핑(Noun: 65, 2)" ), SampleTextPair( "결정했어. 마키 코레썸 사주시는 분께는 허니버터칩 한 봉지를 선물할 것이다.", "결정(Noun: 0, 2), 마키(Noun: 6, 2), 마키 코레썸(Noun: 6, 6), " + "마키 코레썸 사주시는 분께는 허니버터칩(Noun: 6, 21), 코레썸 사주시는 분께는 허니버터칩(Noun: 9, 18), " + "허니버터칩(Noun: 22, 5), 마키 코레썸 사주시는 분께는 허니버터칩 한 봉지(Noun: 6, 26), " + "코레썸 사주시는 분께는 허니버터칩 한 봉지(Noun: 9, 23), 허니버터칩 한 봉지(Noun: 22, 10), " + "봉지(Noun: 30, 2), 코레썸(Noun: 9, 3)" ), SampleTextPair( "[단독]정부, 새 고용 형태 '중규직' 만든다 http://url.com 이름도 바뀌겟군. 정규직은 상규직, " + "비정규직은 하규직. 중규직 참 창조적이다. 결국 기업은 비정규직으로 이용할게 뻔함.", "단독(Noun: 1, 2), 정부(Noun: 4, 2), 새 고용(Noun: 8, 4), 새 고용 형태(Noun: 8, 7), " + "고용 형태(Noun: 10, 5), 중규직(Noun: 17, 3), 이름(Noun: 41, 2), 정규직(Noun: 51, 3), " + "상규직(Noun: 56, 3), 비정규직(Noun: 61, 4), 하규직(Noun: 67, 3), 기업(Noun: 88, 2), " + "고용(Noun: 10, 2), 형태(Noun: 13, 2), 하규(Noun: 67, 2)" ), SampleTextPair( "키? ...난 절대 키가 작은 게 아냐. 이소자키나 츠루기가 비정상적으로 큰거야. 1학년이 그렇게 큰 게 말이 돼!? ", "난 절대(Noun: 6, 4), 난 절대 키(Noun: 6, 6), 절대 키(Noun: 8, 4), 작은 게(Noun: 14, 4), " + "이소자키(Noun: 23, 4), 츠루기(Noun: 29, 3), 1학년(Noun: 46, 3), 절대(Noun: 8, 2), " + "이소(Noun: 23, 2), 자키(Noun: 25, 2), 학년(Noun: 47, 2)" ), SampleTextPair( "Galaxy S5와 iPhone 6의 경쟁", "Galaxy(Noun: 0, 6), Galaxy S5(Noun: 0, 9), iPhone(Noun: 11, 6), " + "iPhone 6의(Noun: 11, 9), iPhone 6의 경쟁(Noun: 11, 12), 6의 경쟁(Noun: 18, 5), " + "S5(Noun: 7, 2), 경쟁(Noun: 21, 2)" ), SampleTextPair( "ABCㅋㅋLTE갤럭시S4ㅋㅋ꼬마가", "ABC(Noun: 0, 3), LTE갤럭시S4(Noun: 5, 8), 꼬마(Noun: 15, 2), LTE(Noun: 5, 3), " + "갤럭시(Noun: 8, 3), S4(Noun: 11, 2)" ), SampleTextPair( "아름다운 트위터 #해쉬태그 평화로운 트위터의 #hashtag @mention", "아름다운 트위터(Noun: 0, 8), 평화로운 트위터(Noun: 15, 8), 트위터(Noun: 5, 3), " + "#해쉬태그(Hashtag: 9, 5), #hashtag(Hashtag: 25, 8)" ) ) val spamText = "레알 시발 저거 카지노 포르노 야동 보다가 개빡쳤음" val superLongText: String = "허니버터칩정규직크리스마스" * 50 def time[R](block: => R): Long = { val t0 = System.currentTimeMillis() block val t1 = System.currentTimeMillis() t1 - t0 } test("collapsePos correctly collapse KoreanPos sequences") { assert(KoreanPhraseExtractor.collapsePos( Seq( KoreanToken("N", KoreanPos.Noun, 0, 1), KoreanToken("N", KoreanPos.Noun, 1, 1) )).mkString("/") === "N(Noun: 0, 1)/N(Noun: 1, 1)" ) assert(KoreanPhraseExtractor.collapsePos( Seq( KoreanToken("X", KoreanPos.KoreanParticle, 0, 1), KoreanToken("p", KoreanPos.NounPrefix, 1, 1), KoreanToken("N", KoreanPos.Noun, 2, 1) )).mkString("/") === "X(KoreanParticle: 0, 1)/pN(Noun: 1, 2)" ) assert(KoreanPhraseExtractor.collapsePos( Seq( KoreanToken("p", KoreanPos.NounPrefix, 0, 1), KoreanToken("X", KoreanPos.KoreanParticle, 1, 1), KoreanToken("N", KoreanPos.Noun, 2, 1) )).mkString("/") === "p(Noun: 0, 1)/X(KoreanParticle: 1, 1)/N(Noun: 2, 1)" ) assert(KoreanPhraseExtractor.collapsePos( Seq( KoreanToken("p", KoreanPos.NounPrefix, 0, 1), KoreanToken("N", KoreanPos.Noun, 1, 1), KoreanToken("X", KoreanPos.KoreanParticle, 2, 1) )).mkString("/") === "pN(Noun: 0, 2)/X(KoreanParticle: 2, 1)" ) assert(KoreanPhraseExtractor.collapsePos(tokenize(sampleText(0).text)).mkString("") === "블랙프라이데이(Noun: 0, 7):(Punctuation: 7, 1) (Space: 8, 1)이날(Noun: 9, 2) (Space: 11, 1)" + "미국(ProperNoun: 12, 2)의(Josa: 14, 1) (Space: 15, 1)수백만(Noun: 16, 3) (Space: 19, 1)" + "소비자들(Noun: 20, 4)은(Josa: 24, 1) (Space: 25, 1)크리스마스(Noun: 26, 5) (Space: 31, 1)" + "선물(Noun: 32, 2)을(Josa: 34, 1) (Space: 35, 1)할인(Noun: 36, 2)" + "된(Verb: 38, 1) (Space: 39, 1)가격(Noun: 40, 2)에(Josa: 42, 1) (Space: 43, 1)" + "사는(Verb: 44, 2) (Space: 46, 1)것(Noun: 47, 1)을(Josa: 48, 1) (Space: 49, 1)" + "주(Noun: 50, 1) (Space: 51, 1)목적(Noun: 52, 2)으로(Josa: 54, 2) (Space: 56, 1)" + "블랙프라이데이(Noun: 57, 7) (Space: 64, 1)쇼핑(Noun: 65, 2)을(Josa: 67, 1) (Space: 68, 1)" + "한다(Verb: 69, 2).(Punctuation: 71, 1)") assert(KoreanPhraseExtractor.collapsePos(tokenize(sampleText(1).text)).mkString("") === "결정(Noun: 0, 2)했어(Verb: 2, 2).(Punctuation: 4, 1) (Space: 5, 1)" + "마키(Noun: 6, 2) (Space: 8, 1)코레썸(ProperNoun: 9, 3) (Space: 12, 1)" + "사주시는(Verb: 13, 4) (Space: 17, 1)분께는(Verb: 18, 3) (Space: 21, 1)" + "허니버터칩(Noun: 22, 5) (Space: 27, 1)한(Verb: 28, 1) (Space: 29, 1)" + "봉지(Noun: 30, 2)를(Josa: 32, 1) (Space: 33, 1)선물할(Verb: 34, 3) (Space: 37, 1)" + "것(Noun: 38, 1)이다(Josa: 39, 2).(Punctuation: 41, 1)") } test("extractPhrases correctly extracts phrases") { assert(KoreanPhraseExtractor.extractPhrases( tokenize(sampleText(0).text), filterSpam = false ).mkString(", ") === "블랙프라이데이(Noun: 0, 7), 이날(Noun: 9, 2), 이날 미국(Noun: 9, 5), 이날 미국의 수백만(Noun: 9, 10), " + "미국의 수백만(Noun: 12, 7), 수백만(Noun: 16, 3), 이날 미국의 수백만 소비자들(Noun: 9, 15), " + "미국의 수백만 소비자들(Noun: 12, 12), 수백만 소비자들(Noun: 16, 8), 크리스마스(Noun: 26, 5), " + "크리스마스 선물(Noun: 26, 8), 할인(Noun: 36, 2), 할인된 가격(Noun: 36, 6), 가격(Noun: 40, 2), " + "주 목적(Noun: 50, 4), 블랙프라이데이 쇼핑(Noun: 57, 10), " + "미국(Noun: 12, 2), 소비자들(Noun: 20, 4), 선물(Noun: 32, 2), 목적(Noun: 52, 2), " + "쇼핑(Noun: 65, 2)") } test("extractPhrases correctly extracts phrases from a string") { sampleText.foreach { case SampleTextPair(text: String, phrases: String) => val tokens = tokenize(text) assert(KoreanPhraseExtractor.extractPhrases(tokens).mkString(", ") === phrases) } } test("extractPhrases should extract long noun-only phrases in reasonable time") { val tokens = tokenize(superLongText) assert(KoreanPhraseExtractor.extractPhrases(tokens).mkString(", ") === "허니버터칩(Noun: 0, 5), 정규직(Noun: 5, 3), 크리스마스(Noun: 8, 5)") assert(time(KoreanPhraseExtractor.extractPhrases(tokens)) < 10000) } test("extractPhrases should correctly extract the example set") { def phraseExtractor(text: String) = { val normalized = KoreanNormalizer.normalize(text) val tokens = tokenize(normalized) KoreanPhraseExtractor.extractPhrases(tokens).mkString("/") } assertExamples( "current_phrases.txt", LOG, phraseExtractor ) } test("extractPhrases should filter out spam and profane words") { val tokens = tokenize(spamText) assert(KoreanPhraseExtractor.extractPhrases(tokens).mkString(", ") === "레알(Noun: 0, 2), 레알 시발(Noun: 0, 5), 레알 시발 저거(Noun: 0, 8), 시발 저거(Noun: 3, 5), " + "레알 시발 저거 카지노(Noun: 0, 12), 시발 저거 카지노(Noun: 3, 9), 저거 카지노(Noun: 6, 6), " + "레알 시발 저거 카지노 포르노(Noun: 0, 16), 시발 저거 카지노 포르노(Noun: 3, 13), " + "저거 카지노 포르노(Noun: 6, 10), 카지노 포르노(Noun: 9, 7), " + "레알 시발 저거 카지노 포르노 야동(Noun: 0, 19), 시발 저거 카지노 포르노 야동(Noun: 3, 16), " + "저거 카지노 포르노 야동(Noun: 6, 13), 카지노 포르노 야동(Noun: 9, 10), 포르노 야동(Noun: 13, 6), " + "시발(Noun: 3, 2), 저거(Noun: 6, 2), 카지노(Noun: 9, 3), 포르노(Noun: 13, 3), 야동(Noun: 17, 2)") assert(KoreanPhraseExtractor.extractPhrases(tokens, filterSpam = true).mkString(", ") === "레알(Noun: 0, 2), 저거(Noun: 6, 2)") } }
NamHosung/SE
src/test/scala/com/twitter/penguin/korean/phrase_extractor/KoreanPhraseExtractorTest.scala
Scala
apache-2.0
10,368
package stealthnet.scala.cryptography import java.security.{Key, KeyFactory, PrivateKey, PublicKey} import java.security.interfaces.RSAPublicKey import java.security.spec.RSAPublicKeySpec import javax.crypto.Cipher import org.bouncycastle.crypto.{BlockCipher, BufferedBlockCipher} import org.bouncycastle.crypto.engines.RijndaelEngine import org.bouncycastle.crypto.params.{KeyParameter, ParametersWithIV} import org.bouncycastle.crypto.paddings.{ BlockCipherPadding, ISO10126d2Padding, PaddedBufferedBlockCipher, PKCS7Padding, X923Padding, ZeroBytePadding } import org.bouncycastle.crypto.modes.{ CFBBlockCipher, CTSBlockCipher, CBCBlockCipher, OFBBlockCipher } import org.bouncycastle.jce.provider.BouncyCastleProvider import scala.language.implicitConversions /** * Cipher modes. * * The different modes and values are the ones available in ''C#'' (on which * rely original ''StealthNet'' application): * {{{ * public enum CipherMode { CBC = 1, ECB = 2, OFB = 3, CFB = 4, CTS = 5 } * }}} */ object CipherMode extends Enumeration { /** Available mode: '''C'''ipher '''B'''lock '''C'''haining. */ val CBC = Value /** Available mode: '''E'''lectronic '''C'''ode'''B'''ook. */ val ECB = Value /** Available mode: '''O'''utput '''F'''eed'''B'''ack. */ val OFB = Value /** Available mode: '''C'''ipher '''F'''eed'''B'''ack. */ val CFB = Value /** Available mode: '''C'''ipher '''T'''ext '''S'''tealing. */ val CTS = Value private val CBC_id: Byte = 0x01 private val ECB_id: Byte = 0x02 private val OFB_id: Byte = 0x03 private val CFB_id: Byte = 0x04 private val CTS_id: Byte = 0x05 /** * Gets the id (protocol value) corresponding to a mode. * * @param v the mode for which to determine the id * @return the corresponding protocol value */ def id(v: CipherMode.Value): Byte = v match { case CBC => CBC_id case ECB => ECB_id case OFB => OFB_id case CFB => CFB_id case CTS => CTS_id } /** * Gets the mode corresponding to an id. * * @param v the id for which to determine the mode * @return an option value containing the corresponding mode, * or `None` if none exists */ def value(v: Byte): Option[CipherMode.Value] = v match { case CBC_id => Some(CBC) case ECB_id => Some(ECB) case OFB_id => Some(OFB) case CFB_id => Some(CFB) case CTS_id => Some(CTS) case _ => None } } /** * Padding modes. * * The different modes and values are the ones available in ''C#'' (on which * rely original ''StealthNet'' application): * {{{ * public enum PaddingMode { None = 1, PKCS7 = 2, Zeros = 3, ANSIX923 = 4, ISO10126 = 5 } * }}} */ object PaddingMode extends Enumeration { /** Available mode: no padding. */ val None = Value /** Available mode: ''PKCS #7''. */ val PKCS7 = Value /** Available mode: zero bytes. */ val Zeros = Value /** Available mode: ''ANSI X.923''. */ val ANSIX923 = Value /** Available mode: ''ISO 10126''. */ val ISO10126 = Value private val None_id: Byte = 0x01 private val PKCS7_id: Byte = 0x02 private val Zeros_id: Byte = 0x03 private val ANSIX923_id: Byte = 0x04 private val ISO10126_id: Byte = 0x05 /** * Gets the id (protocol value) corresponding to a mode. * * @param v the mode for which to determine the id * @return the corresponding protocol value */ def id(v: PaddingMode.Value): Byte = v match { case None => None_id case PKCS7 => PKCS7_id case Zeros => Zeros_id case ANSIX923 => ANSIX923_id case ISO10126 => ISO10126_id } /** * Gets the mode corresponding to an id. * * @param v the id for which to determine the mode * @return an option value containing the corresponding mode, * or `None` if none exists */ def value(v: Byte): Option[PaddingMode.Value] = v match { case None_id => Some(None) case PKCS7_id => Some(PKCS7) case Zeros_id => Some(Zeros) case ANSIX923_id => Some(ANSIX923) case ISO10126_id => Some(ISO10126) case _ => scala.None } } /** * Cipher helper. * * Provides necessary ciphers (''RSA'' and ''Rijndael'') to encrypt/decrypt * data. * * @todo should we call BouncyCastle directly for RSA, instead of using JCE ? */ object Ciphers { /** * Implicit conversion from ''RSA'' public key specification (modulus and * exponent) to proper ''RSA'' public key. */ implicit def keySpecToKey(keySpec: RSAPublicKeySpec): RSAPublicKey = KeyFactory.getInstance("RSA", BouncyCastleProvider.PROVIDER_NAME) .generatePublic(keySpec).asInstanceOf[RSAPublicKey] /** * Gets new ''RSA'' cipher. * * @param key ''RSA'' key: public one for encrypting, * private one for decrypting * @param mode cipher mode: `Cipher.ENCRYPT_MODE` or `Cipher.DECRYPT_MODE` */ private def rsaCipher(key: Key, mode: Int): Cipher = { val cipher = Cipher.getInstance("RSA") cipher.init(mode, key) cipher } /** * Gets new ''RSA'' cipher to encrypt. * * @param key ''RSA'' public key * @return ''RSA'' cipher using key to encrypt */ def rsaEncrypter(key: PublicKey): Cipher = rsaCipher(key, Cipher.ENCRYPT_MODE) /** * Gets new ''RSA'' cipher to decrypt. * * @param key ''RSA'' private key * @return ''RSA'' cipher using key to decrypt */ def rsaDecrypter(key: PrivateKey): Cipher = rsaCipher(key, Cipher.DECRYPT_MODE) /** * Gets new ''Rijndael'' cipher. * * @param rijndael ''Rijndael'' parameters * @param encryption whether the cipher is initialized for encryption (`true`) * or decryption (`false`) */ private def rijndaelCipher(rijndael: RijndaelParameters, encryption: Boolean): BufferedBlockCipher = { val engine = new RijndaelEngine(rijndael.blockSize) /* Note: ECB and CTS don't use IV, and only expect the key as parameter */ val (blockCipher, useIV): (Either[BlockCipher, BufferedBlockCipher], Boolean) = rijndael.cipherMode match { case CipherMode.CBC => (Left(new CBCBlockCipher(engine)), true) case CipherMode.ECB => (Left(engine), false) case CipherMode.OFB => (Left(new OFBBlockCipher(engine, rijndael.feedbackSize)), true) case CipherMode.CFB => (Left(new CFBBlockCipher(engine, rijndael.feedbackSize)), true) case CipherMode.CTS => (Right(new CTSBlockCipher(engine)), false) } /* Note: CTS does not use padding at all */ val cipher = blockCipher.fold({ blockCipher => val padding: Option[BlockCipherPadding] = rijndael.paddingMode match { case PaddingMode.None => None case PaddingMode.PKCS7 => Some(new PKCS7Padding()) case PaddingMode.Zeros => Some(new ZeroBytePadding()) case PaddingMode.ANSIX923 => Some(new X923Padding()) case PaddingMode.ISO10126 => Some(new ISO10126d2Padding()) } padding map { new PaddedBufferedBlockCipher(blockCipher, _) } getOrElse { new BufferedBlockCipher(blockCipher) } }, { blockCipher => blockCipher }) cipher.init(encryption, if (!useIV) new KeyParameter(rijndael.key) else new ParametersWithIV(new KeyParameter(rijndael.key), rijndael.iv) ) cipher } /** * Gets new ''Rijndael'' cipher to encrypt. * * @param rijndael ''Rijndael'' parameters * @return ''Rijndael'' cipher using parameters to encrypt */ def rijndaelEncrypter(rijndael: RijndaelParameters): BufferedBlockCipher = rijndaelCipher(rijndael, encryption = true) /** * Gets new ''Rijndael'' cipher to decrypt. * * @param rijndael ''Rijndael'' parameters * @return ''Rijndael'' cipher using parameters to decrypt */ def rijndaelDecrypter(rijndael: RijndaelParameters): BufferedBlockCipher = rijndaelCipher(rijndael, encryption = false) }
suiryc/StealthNet
core/src/main/scala/stealthnet/scala/cryptography/Ciphers.scala
Scala
gpl-3.0
7,809
/*** * Copyright 2014 Rackspace US, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rackspace.com.papi.components.checker.wadl import com.rackspace.com.papi.components.checker.TestConfig import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import scala.xml._ @RunWith(classOf[JUnitRunner]) class WADLCheckerRaxRolesSpec extends BaseCheckerSpec { // // Register some common prefixes, you'll need the for XPath // assertions. // register ("chk","http://www.rackspace.com/repose/wadl/checker") register ("xsd", "http://www.w3.org/2001/XMLSchema") feature ("The WADLCheckerBuilder can correctly transforma a WADL into checker format") { info ("As a developer") info ("I want to be able to transform a WADL which references the rax:roles extenson into a ") info ("a description of a machine that can handle the correct header validations in checker format") info ("so that an API validator can process the checker format to validate the roles") val raxRolesWADLNoRef = <application xmlns="http://wadl.dev.java.net/2009/02" xmlns:rax="http://docs.rackspace.com/api" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:tst="test://schema/a"> <grammars> <schema elementFormDefault="qualified" attributeFormDefault="unqualified" xmlns="http://www.w3.org/2001/XMLSchema" xmlns:xsd="http://www.w3.org/2001/XMLSchema" targetNamespace="test://schema/a"> <simpleType name="yesno"> <restriction base="xsd:string"> <enumeration value="yes"/> <enumeration value="no"/> </restriction> </simpleType> </schema> </grammars> <resources base="https://test.api.openstack.com"> <resource path="/a" rax:roles="a:admin"> <method id="putOnA" name="PUT" rax:roles="a:observer"/> <resource path="/b" rax:roles="b:creator"> <method id="postOnB" name="POST"/> <method id="putOnB" name="PUT" rax:roles="b:observer"/> <method id="deleteOnB" name="DELETE" rax:roles="b:observer b:admin"/> <method id="getOnB" name="GET" rax:roles="#all"/> </resource> <resource path="{yn}"> <param name="yn" style="template" type="tst:yesno"/> <method name="POST"> <doc title="postOnB"/> </method> <method id="putOnYN" name="PUT" rax:roles="b:observer"> <doc title="putOnB"/> </method> </resource> </resource> <resource path="/c"> <param name="X-Auth-Token" style="header" required="true" repeating="true"/> <method id="getOnC" name="GET" rax:roles="a:admin"/> <method id="postOnC" name="POST" rax:roles="a:observer a:admin"/> </resource> </resources> </application> val raxRolesWADLRef = <application xmlns="http://wadl.dev.java.net/2009/02" xmlns:rax="http://docs.rackspace.com/api" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:tst="test://schema/a"> <grammars> <schema elementFormDefault="qualified" attributeFormDefault="unqualified" xmlns="http://www.w3.org/2001/XMLSchema" xmlns:xsd="http://www.w3.org/2001/XMLSchema" targetNamespace="test://schema/a"> <simpleType name="yesno"> <restriction base="xsd:string"> <enumeration value="yes"/> <enumeration value="no"/> </restriction> </simpleType> </schema> </grammars> <resources base="https://test.api.openstack.com"> <resource path="/a" rax:roles="a:admin"> <method href="#putOnA" rax:roles="a:observer"/> <resource path="/b" rax:roles="b:creator"> <method href="#postOnB"/> <method href="#putOnB"/> <method href="#deleteOnB" rax:roles="b:observer b:admin"/> <method href="#getOnB"/> </resource> <resource path="{yn}"> <param name="yn" style="template" type="tst:yesno"/> <method href="#postOnB"/> <method href="#putOnB"/> </resource> </resource> <resource path="/c"> <param name="X-Auth-Token" style="header" required="true" repeating="true"/> <method href="#getOnC"/> <method href="#postOnC"/> </resource> </resources> <method id="putOnA" name="PUT"/> <method id="postOnB" name="POST"/> <method id="putOnB" name="PUT" rax:roles="b:observer"/> <method id="deleteOnB" name="DELETE" rax:roles="b:foo"/> <method id="getOnB" name="GET" rax:roles="#all"/> <method id="getOnC" name="GET" rax:roles="a:admin"/> <method id="postOnC" name="POST" rax:roles="a:observer a:admin"/> </application> val raxRolesDisabled = { val tf = TestConfig() tf.removeDups = false tf.checkHeaders = true tf } val raxRolesEnabled = { val tf = TestConfig() tf.removeDups = false tf.enableRaxRolesExtension = true tf } val raxRolesEnabledRemoveDups = { val tf = TestConfig() tf.removeDups = true tf.enableRaxRolesExtension = true tf } val raxRolesEnabledRemoveDupsPreserveLabels = { val tf = TestConfig() tf.removeDups = true tf.enableRaxRolesExtension = true tf.preserveMethodLabels = true tf } val raxRolesMaskEnabled = { val tf = TestConfig() tf.removeDups = false tf.enableRaxRolesExtension = true tf.maskRaxRoles403 = true tf } val raxRolesMaskEnabledRemoveDups = { val tf = TestConfig() tf.removeDups = true tf.enableRaxRolesExtension = true tf.maskRaxRoles403 = true tf } val raxRolesMaskEnabledRemoveDupsPreserveLabels = { val tf = TestConfig() tf.removeDups = true tf.enableRaxRolesExtension = true tf.maskRaxRoles403 = true tf } val wadls = Map[String, NodeSeq]("A WADL with rax:roles but no references" -> raxRolesWADLNoRef, "A WADL with rax:roles and method references" -> raxRolesWADLRef) for ((desc, inWADL) <- wadls) { scenario ("The WADL contains rax:roles, but rax:roles checks are disabled with "+desc) { Given (desc) When ("The wadl is translated with rax:roles disabled") val config = raxRolesDisabled val checker = builder.build (inWADL, config) Then("Header checks should not be set") assert (checker, "exactly-one(chk:checker/chk:grammar[@type='W3C_XML']/xsd:schema/xsd:simpleType[@name='yesno'])") assert (checker, "count(chk:checker/chk:step[@type='HEADER_ANY']) = 0") assert (checker, """every $s in chk:checker/chk:step[@type='URLXSD'] satisfies namespace-uri-from-QName(resolve-QName($s/@match, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@match, $s)) = 'yesno'""") assert (checker, """every $s in chk:checker/chk:step[@type='URL_FAIL' and @notTypes] satisfies namespace-uri-from-QName(resolve-QName($s/@notTypes, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@notTypes, $s)) = 'yesno'""") And("URLs and Methods should be validated as always") assert (checker, Start, URL("a"), Method("PUT"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("POST"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("PUT"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("DELETE"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("POST"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("PUT"), Accept) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET"), Accept) assert (checker, Start, URL("a"), Label("putOnA"), Accept) assert (checker, Start, URL("a"), URL("b"), Label("postOnB"), Accept) assert (checker, Start, URL("a"), URL("b"), Label("putOnB"), Accept) assert (checker, Start, URL("a"), URL("b"), Label("deleteOnB"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Label("postOnB"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Label("putOnB"), Accept) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Label("getOnC"), Accept) } scenario ("The WADL contains rax:roles and rax:roles checks are enabled without RemoveDups with "+desc) { Given (desc) When ("The wadl is translated with rax:roles disabled") val config = raxRolesEnabled val checker = builder.build (inWADL, config) Then("Header checks should be set for each method") assert (checker, "exactly-one(chk:checker/chk:grammar[@type='W3C_XML']/xsd:schema/xsd:simpleType[@name='yesno'])") assert (checker, "count(chk:checker/chk:step[@type='HEADER_ANY' and @code='403']) = 17") assert (checker, "count(chk:checker/chk:step[@type='METHOD_FAIL']) = 5") assert (checker, "count(chk:checker/chk:step[@type='URL_FAIL']) = 3") assert (checker, """every $s in chk:checker/chk:step[@type='URLXSD'] satisfies namespace-uri-from-QName(resolve-QName($s/@match, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@match, $s)) = 'yesno'""") assert (checker, """every $s in chk:checker/chk:step[@type='URL_FAIL' and @notTypes] satisfies namespace-uri-from-QName(resolve-QName($s/@notTypes, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@notTypes, $s)) = 'yesno'""") And("URLs and Methods should be validated as well as headers") assert (checker, Start, URL("a"), Method("PUT","putOnA"), HeaderAny("X-ROLES","a:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), Method("PUT","putOnA"), HeaderAny("X-ROLES","a:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("POST","postOnB"), HeaderAny("X-ROLES","a:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("POST","postOnB"), HeaderAny("X-ROLES","b:creator","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("PUT","putOnB"), HeaderAny("X-ROLES","b:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("PUT","putOnB"), HeaderAny("X-ROLES","a:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("PUT","putOnB"), HeaderAny("X-ROLES","b:creator","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("DELETE","deleteOnB"), HeaderAny("X-ROLES","b:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("DELETE","deleteOnB"), HeaderAny("X-ROLES","b:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("DELETE","deleteOnB"), HeaderAny("X-ROLES","a:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("DELETE","deleteOnB"), HeaderAny("X-ROLES","b:creator","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("POST","postOnB"), HeaderAny("X-ROLES","a:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("PUT","putOnB"), HeaderAny("X-ROLES","a:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("PUT","putOnB"), HeaderAny("X-ROLES","b:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), Method("PUT"), ContentFail) assert (checker, Start, URL("a"), URL("b"), Method("POST"), ContentFail) assert (checker, Start, URL("a"), URL("b"), Method("PUT"), ContentFail) assert (checker, Start, URL("a"), URL("b"), Method("DELETE"), ContentFail) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("POST"), ContentFail) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("PUT"), ContentFail) assert (checker, Start, MethodFail) assert (checker, Start, URL("c"), ContentFail) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), URLFail) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), MethodFail("GET|POST")) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET"), HeaderAny("X-ROLES","a:admin"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET"), ContentFail) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST"), HeaderAny("X-ROLES","a:admin"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST"), HeaderAny("X-ROLES","a:observer"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST"), ContentFail) } scenario ("The WADL contains rax:roles and rax:roles checks are enabled without RemoveDups with 403s masked and with "+desc) { Given (desc) When ("The wadl is translated with rax:roles disabled") val config = raxRolesMaskEnabled val checker = builder.build (inWADL, config) Then("Header checks should be set at the begining of each path") assert (checker, "exactly-one(chk:checker/chk:grammar[@type='W3C_XML']/xsd:schema/xsd:simpleType[@name='yesno'])") assert (checker, "count(chk:checker/chk:step[@type='HEADER_ANY']) = 5") assert (checker, "count(chk:checker/chk:step[@type='METHOD_FAIL']) = 30") assert (checker, "count(chk:checker/chk:step[@type='URL_FAIL']) = 30") assert (checker, """every $s in chk:checker/chk:step[@type='URLXSD'] satisfies namespace-uri-from-QName(resolve-QName($s/@match, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@match, $s)) = 'yesno'""") assert (checker, """every $s in chk:checker/chk:step[@type='URL_FAIL' and @notTypes] satisfies namespace-uri-from-QName(resolve-QName($s/@notTypes, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@notTypes, $s)) = 'yesno'""") And("URLs and Methods should be validated as well as headers") assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), Method("PUT","putOnA"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), MethodFail("PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), URLFail("b")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), URLFailT("tst:yesno")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URLFail("a|c")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("POST","postOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("DELETE","deleteOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), MethodFail("DELETE|GET|POST|PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URLXSD("tst:yesno"), Method("POST","postOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URLXSD("tst:yesno"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URLXSD("tst:yesno"), MethodFail("POST|PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:observer"),URL("a"), Method("PUT"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:observer"),URL("a"), MethodFail("PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:observer"),URL("a"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","a:observer"), URLFail("a|c")) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("POST","postOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("DELETE","deleteOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), MethodFail("DELETE|GET|POST|PUT")) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLFail("b")) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLFailT("tst:yesno")) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), Method("DELETE","deleteOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), MethodFail("DELETE|GET|PUT")) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLXSD("tst:yesno"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLXSD("tst:yesno"), MethodFail("PUT")) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), Method("DELETE","deleteOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), MethodFail("DELETE|GET")) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), MethodFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), ContentFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET","getOnC"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST","postOnC"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), Header("X-Auth-Token", "(?s).*"), MethodFail("GET|POST")) assert (checker, Start, HeaderAny("X-ROLES","a:observer"), URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST","postOnC"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:observer"), URL("c"), Header("X-Auth-Token", "(?s).*"), MethodFail("POST")) assert (checker, Start, URLFail("a")) assert (checker, Start, URL("a"), URLFail("b")) assert (checker, Start, URL("a"), MethodFail) assert (checker, Start, URL("a"), URL("b"), Method("GET"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, URL("a"), URL("b"), MethodFail("GET")) assert (checker, Start, URL("a"), URL("b"), URLFail) assert (checker, Start, MethodFail) } scenario ("The WADL contains rax:roles and rax:roles checks are enabled with RemoveDups with "+desc) { Given (desc) When ("The wadl is translated with rax:roles disabled") val config = raxRolesEnabledRemoveDups val checker = builder.build (inWADL, config) Then("Header checks should be set for each method") assert (checker, "exactly-one(chk:checker/chk:grammar[@type='W3C_XML']/xsd:schema/xsd:simpleType[@name='yesno'])") assert (checker, "count(chk:checker/chk:step[@type='HEADER_ANY' and @code='403']) = 6") assert (checker, "count(chk:checker/chk:step[@type='METHOD_FAIL']) = 5") assert (checker, "count(chk:checker/chk:step[@type='URL_FAIL']) = 3") assert (checker, """every $s in chk:checker/chk:step[@type='URLXSD'] satisfies namespace-uri-from-QName(resolve-QName($s/@match, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@match, $s)) = 'yesno'""") assert (checker, """every $s in chk:checker/chk:step[@type='URL_FAIL' and @notTypes] satisfies namespace-uri-from-QName(resolve-QName($s/@notTypes, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@notTypes, $s)) = 'yesno'""") And("URLs and Methods should be validated as well as headers") assert (checker, Start, URL("a"), Method("PUT"), HeaderAny("X-ROLES","a:admin|a:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("POST"), HeaderAny("X-ROLES","a:admin|b:creator","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("PUT"), HeaderAny("X-ROLES","a:admin|b:creator|b:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("DELETE"), HeaderAny("X-ROLES","a:admin|b:creator|b:observer|b:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("GET"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("POST"), HeaderAny("X-ROLES","a:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("PUT"), HeaderAny("X-ROLES","a:admin|b:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), Method("PUT"), ContentFail) assert (checker, Start, URL("a"), URL("b"), Method("POST"), ContentFail) assert (checker, Start, URL("a"), URL("b"), Method("PUT"), ContentFail) assert (checker, Start, URL("a"), URL("b"), Method("DELETE"), ContentFail) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("POST"), ContentFail) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("PUT"), ContentFail) assert (checker, Start, MethodFail) assert (checker, Start, URL("c"), ContentFail) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), URLFail) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), MethodFail("GET|POST")) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET"), HeaderAny("X-ROLES","a:admin"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET"), ContentFail) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST"), HeaderAny("X-ROLES","a:admin|a:observer"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST"), ContentFail) } scenario ("The WADL contains rax:roles and rax:roles checks are enabled with RemoveDups and preserve method labels with "+desc) { Given (desc) When ("The wadl is translated with rax:roles disabled") val config = raxRolesEnabledRemoveDupsPreserveLabels val checker = builder.build (inWADL, config) Then("Header checks should be set for each method") assert (checker, "exactly-one(chk:checker/chk:grammar[@type='W3C_XML']/xsd:schema/xsd:simpleType[@name='yesno'])") assert (checker, "count(chk:checker/chk:step[@type='HEADER_ANY' and @code='403']) = 6") assert (checker, "count(chk:checker/chk:step[@type='METHOD_FAIL']) = 5") assert (checker, "count(chk:checker/chk:step[@type='URL_FAIL']) = 3") assert (checker, """every $s in chk:checker/chk:step[@type='URLXSD'] satisfies namespace-uri-from-QName(resolve-QName($s/@match, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@match, $s)) = 'yesno'""") assert (checker, """every $s in chk:checker/chk:step[@type='URL_FAIL' and @notTypes] satisfies namespace-uri-from-QName(resolve-QName($s/@notTypes, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@notTypes, $s)) = 'yesno'""") And("URLs and Methods should be validated as well as headers") assert (checker, Start, URL("a"), Method("PUT","putOnA"), HeaderAny("X-ROLES","a:admin|a:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("POST","postOnB"), HeaderAny("X-ROLES","a:admin|b:creator","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("PUT","putOnB"), HeaderAny("X-ROLES","a:admin|b:creator|b:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("DELETE","deleteOnB"), HeaderAny("X-ROLES","a:admin|b:creator|b:observer|b:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("POST","postOnB"), HeaderAny("X-ROLES","a:admin","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("PUT","putOnB"), HeaderAny("X-ROLES","a:admin|b:observer","You are forbidden to perform the operation", 403), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, URL("a"), Method("PUT"), ContentFail) assert (checker, Start, URL("a"), URL("b"), Method("POST"), ContentFail) assert (checker, Start, URL("a"), URL("b"), Method("PUT"), ContentFail) assert (checker, Start, URL("a"), URL("b"), Method("DELETE"), ContentFail) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("POST"), ContentFail) assert (checker, Start, URL("a"), URLXSD("tst:yesno"), Method("PUT"), ContentFail) assert (checker, Start, MethodFail) assert (checker, Start, URL("c"), ContentFail) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), URLFail) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), MethodFail("GET|POST")) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET"), HeaderAny("X-ROLES","a:admin"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET"), ContentFail) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST"), HeaderAny("X-ROLES","a:admin|a:observer"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST"), ContentFail) } scenario ("The WADL contains rax:roles and rax:roles checks are enabled with RemoveDups with 403s masked and with "+desc) { Given (desc) When ("The wadl is translated with rax:roles disabled") val config = raxRolesMaskEnabledRemoveDups val checker = builder.build (inWADL, config) Then("Header checks should be set for each method") assert (checker, "exactly-one(chk:checker/chk:grammar[@type='W3C_XML']/xsd:schema/xsd:simpleType[@name='yesno'])") assert (checker, "count(chk:checker/chk:step[@type='HEADER_ANY']) = 5") assert (checker, "count(chk:checker/chk:step[@type='METHOD_FAIL']) = 9") assert (checker, "count(chk:checker/chk:step[@type='URL_FAIL']) = 5") assert (checker, """every $s in chk:checker/chk:step[@type='URLXSD'] satisfies namespace-uri-from-QName(resolve-QName($s/@match, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@match, $s)) = 'yesno'""") assert (checker, """every $s in chk:checker/chk:step[@type='URL_FAIL' and @notTypes] satisfies namespace-uri-from-QName(resolve-QName($s/@notTypes, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@notTypes, $s)) = 'yesno'""") And("URLs and Methods should be validated as well as headers") assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), Method("PUT"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), MethodFail("PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), URLFail("b")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), URLFailT("tst:yesno")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URLFail("a|c")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("POST"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("PUT"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("DELETE"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("GET"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), MethodFail("DELETE|GET|POST|PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URLXSD("tst:yesno"), Method("POST"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URLXSD("tst:yesno"), Method("PUT"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URLXSD("tst:yesno"), MethodFail("POST|PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:observer"),URL("a"), Method("PUT"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:observer"),URL("a"), MethodFail("PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:observer"),URL("a"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","a:observer"), URLFail("a|c")) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("POST"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("PUT"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("DELETE"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("GET"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), MethodFail("DELETE|GET|POST|PUT")) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLFail("b")) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLFailT("tst:yesno")) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), Method("PUT"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), Method("DELETE"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), Method("GET"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), MethodFail("DELETE|GET|PUT")) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLXSD("tst:yesno"), Method("PUT"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLXSD("tst:yesno"), MethodFail("PUT")) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), Method("DELETE"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), Method("GET"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), MethodFail("DELETE|GET")) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), MethodFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), ContentFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), Header("X-Auth-Token", "(?s).*"), MethodFail("GET|POST")) assert (checker, Start, HeaderAny("X-ROLES","a:observer"), URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:observer"), URL("c"), Header("X-Auth-Token", "(?s).*"), MethodFail("POST")) assert (checker, Start, URLFail("a")) assert (checker, Start, URL("a"), URLFail("b")) assert (checker, Start, URL("a"), MethodFail) assert (checker, Start, URL("a"), URL("b"), Method("GET"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, URL("a"), URL("b"), MethodFail("GET")) assert (checker, Start, URL("a"), URL("b"), URLFail) assert (checker, Start, MethodFail) } scenario ("The WADL contains rax:roles and rax:roles checks are enabled with RemoveDups with 403s masked and preserve method labels with "+desc) { Given (desc) When ("The wadl is translated with rax:roles disabled") val config = raxRolesMaskEnabledRemoveDupsPreserveLabels val checker = builder.build (inWADL, config) Then("Header checks should be set for each method") assert (checker, "exactly-one(chk:checker/chk:grammar[@type='W3C_XML']/xsd:schema/xsd:simpleType[@name='yesno'])") assert (checker, "count(chk:checker/chk:step[@type='HEADER_ANY']) = 5") assert (checker, "count(chk:checker/chk:step[@type='METHOD_FAIL']) = 9") assert (checker, "count(chk:checker/chk:step[@type='URL_FAIL']) = 5") assert (checker, """every $s in chk:checker/chk:step[@type='URLXSD'] satisfies namespace-uri-from-QName(resolve-QName($s/@match, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@match, $s)) = 'yesno'""") assert (checker, """every $s in chk:checker/chk:step[@type='URL_FAIL' and @notTypes] satisfies namespace-uri-from-QName(resolve-QName($s/@notTypes, $s)) = 'test://schema/a' and local-name-from-QName(resolve-QName($s/@notTypes, $s)) = 'yesno'""") And("URLs and Methods should be validated as well as headers") assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), Method("PUT","putOnA"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), MethodFail("PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), URLFail("b")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"),URL("a"), URLFailT("tst:yesno")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URLFail("a|c")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("POST","postOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("DELETE","deleteOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), MethodFail("DELETE|GET|POST|PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URLXSD("tst:yesno"), Method("POST","postOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URLXSD("tst:yesno"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("a"), URLXSD("tst:yesno"), MethodFail("POST|PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:observer"),URL("a"), Method("PUT","putOnA"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:observer"),URL("a"), MethodFail("PUT")) assert (checker, Start, HeaderAny("X-ROLES","a:observer"),URL("a"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","a:observer"), URLFail("a|c")) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("POST","postOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("DELETE","deleteOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), MethodFail("DELETE|GET|POST|PUT")) assert (checker, Start, HeaderAny("X-ROLES","b:creator"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLFail("b")) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLFailT("tst:yesno")) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), Method("DELETE","deleteOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), MethodFail("DELETE|GET|PUT")) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLXSD("tst:yesno"), Method("PUT","putOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:observer"), URL("a"), URLXSD("tst:yesno"), MethodFail("PUT")) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), Method("DELETE","deleteOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'b:admin', 'b:creator', 'b:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), MethodFail("DELETE|GET")) assert (checker, Start, HeaderAny("X-ROLES","b:admin"), URL("a"), URL("b"), URLFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), MethodFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), ContentFail) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), Header("X-Auth-Token", "(?s).*"), Method("GET","getOnC"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST","postOnC"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:admin"), URL("c"), Header("X-Auth-Token", "(?s).*"), MethodFail("GET|POST")) assert (checker, Start, HeaderAny("X-ROLES","a:observer"), URL("c"), Header("X-Auth-Token", "(?s).*"), Method("POST","postOnC"), RaxCaptureHeader("X-RELEVANT-ROLES", "for $h in req:headers('X-ROLES', true()) return if ($h = ('a:admin', 'a:observer')) then $h else ()"), Accept) assert (checker, Start, HeaderAny("X-ROLES","a:observer"), URL("c"), Header("X-Auth-Token", "(?s).*"), MethodFail("POST")) assert (checker, Start, URLFail("a")) assert (checker, Start, URL("a"), URLFail("b")) assert (checker, Start, URL("a"), MethodFail) assert (checker, Start, URL("a"), URL("b"), Method("GET","getOnB"), RaxCaptureHeader("X-RELEVANT-ROLES", "req:headers('X-ROLES', true())"), Accept) assert (checker, Start, URL("a"), URL("b"), MethodFail("GET")) assert (checker, Start, URL("a"), URL("b"), URLFail) assert (checker, Start, MethodFail) } } } }
wdschei/api-checker
core/src/test/scala/com/rackspace/com/papi/components/checker/wadl/WADLCheckerRaxRolesSpec.scala
Scala
apache-2.0
59,723
/** * Licensed to Big Data Genomics (BDG) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The BDG licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bdgenomics.adam.rdd.read import java.util.UUID import org.bdgenomics.adam.models.{ RecordGroup, RecordGroupDictionary } import org.bdgenomics.adam.rdd.ADAMContext._ import org.bdgenomics.adam.util.ADAMFunSuite import org.bdgenomics.formats.avro.{ AlignmentRecord, Contig } class MarkDuplicatesSuite extends ADAMFunSuite { val rgd = new RecordGroupDictionary(Seq( new RecordGroup("sammy sample", "machine foo", library = Some("library bar")))) def createUnmappedRead() = { AlignmentRecord.newBuilder() .setReadMapped(false) .build() } def createMappedRead(referenceName: String, start: Long, end: Long, readName: String = UUID.randomUUID().toString, avgPhredScore: Int = 20, numClippedBases: Int = 0, isPrimaryAlignment: Boolean = true, isNegativeStrand: Boolean = false) = { assert(avgPhredScore >= 10 && avgPhredScore <= 50) val qual = (for (i <- 0 until 100) yield (avgPhredScore + 33).toChar).toString() val cigar = if (numClippedBases > 0) "%dS%dM".format(numClippedBases, 100 - numClippedBases) else "100M" val contig = Contig.newBuilder .setContigName(referenceName) .build AlignmentRecord.newBuilder() .setContigName(contig.getContigName) .setStart(start) .setQual(qual) .setCigar(cigar) .setEnd(end) .setReadMapped(true) .setPrimaryAlignment(isPrimaryAlignment) .setReadName(readName) .setRecordGroupName("machine foo") .setDuplicateRead(false) .setReadNegativeStrand(isNegativeStrand) .build() } def createPair(firstReferenceName: String, firstStart: Long, firstEnd: Long, secondReferenceName: String, secondStart: Long, secondEnd: Long, readName: String = UUID.randomUUID().toString, avgPhredScore: Int = 20): Seq[AlignmentRecord] = { val firstContig = Contig.newBuilder .setContigName(firstReferenceName) .build val secondContig = Contig.newBuilder .setContigName(secondReferenceName) .build val firstOfPair = createMappedRead(firstReferenceName, firstStart, firstEnd, readName = readName, avgPhredScore = avgPhredScore) firstOfPair.setReadInFragment(0) firstOfPair.setMateMapped(true) firstOfPair.setMateContigName(secondContig.getContigName) firstOfPair.setMateAlignmentStart(secondStart) firstOfPair.setReadPaired(true) val secondOfPair = createMappedRead(secondReferenceName, secondStart, secondEnd, readName = readName, avgPhredScore = avgPhredScore, isNegativeStrand = true) secondOfPair.setReadInFragment(1) secondOfPair.setMateMapped(true) secondOfPair.setMateContigName(firstContig.getContigName) secondOfPair.setMateAlignmentStart(firstStart) secondOfPair.setReadPaired(true) Seq(firstOfPair, secondOfPair) } private def markDuplicates(reads: AlignmentRecord*) = { sc.parallelize(reads).markDuplicates(rgd).collect() } sparkTest("single read") { val read = createMappedRead("0", 100, 200) val marked = markDuplicates(read) // Can't have duplicates with a single read, should return the read unchanged. assert(marked(0) == read) } sparkTest("reads at different positions") { val read1 = createMappedRead("0", 42, 142) val read2 = createMappedRead("0", 43, 143) val marked = markDuplicates(read1, read2) // Reads shouldn't be modified assert(marked.contains(read1) && marked.contains(read2)) } sparkTest("reads at the same position") { val poorReads = for (i <- 0 until 10) yield { createMappedRead("1", 42, 142, avgPhredScore = 20, readName = "poor%d".format(i)) } val bestRead = createMappedRead("1", 42, 142, avgPhredScore = 30, readName = "best") val marked = markDuplicates(List(bestRead) ++ poorReads: _*) val (dups, nonDup) = marked.partition(p => p.getDuplicateRead) assert(nonDup.size == 1 && nonDup(0) == bestRead) assert(dups.forall(p => p.getReadName.startsWith("poor"))) } sparkTest("reads at the same position with clipping") { val poorClippedReads = for (i <- 0 until 5) yield { createMappedRead("1", 44, 142, numClippedBases = 2, avgPhredScore = 20, readName = "poorClipped%d".format(i)) } val poorUnclippedReads = for (i <- 0 until 5) yield { createMappedRead("1", 42, 142, avgPhredScore = 20, readName = "poorUnclipped%d".format(i)) } val bestRead = createMappedRead("1", 42, 142, avgPhredScore = 30, readName = "best") val marked = markDuplicates(List(bestRead) ++ poorClippedReads ++ poorUnclippedReads: _*) val (dups, nonDup) = marked.partition(p => p.getDuplicateRead) assert(nonDup.size == 1 && nonDup(0) == bestRead) assert(dups.forall(p => p.getReadName.startsWith("poor"))) } sparkTest("reads on reverse strand") { val poorReads = for (i <- 0 until 7) yield { createMappedRead("10", 42, 142, isNegativeStrand = true, avgPhredScore = 20, readName = "poor%d".format(i)) } val bestRead = createMappedRead("10", 42, 142, isNegativeStrand = true, avgPhredScore = 30, readName = "best") val marked = markDuplicates(List(bestRead) ++ poorReads: _*) val (dups, nonDup) = marked.partition(p => p.getDuplicateRead) assert(nonDup.size == 1 && nonDup(0) == bestRead) assert(dups.forall(p => p.getReadName.startsWith("poor"))) } sparkTest("unmapped reads") { val unmappedReads = for (i <- 0 until 10) yield createUnmappedRead() val marked = markDuplicates(unmappedReads: _*) assert(marked.size == unmappedReads.size) // Unmapped reads should never be marked duplicates assert(marked.forall(p => !p.getDuplicateRead)) } sparkTest("read pairs") { val poorPairs = for ( i <- 0 until 10; read <- createPair("0", 10, 110, "0", 110, 210, avgPhredScore = 20, readName = "poor%d".format(i)) ) yield read val bestPair = createPair("0", 10, 110, "0", 110, 210, avgPhredScore = 30, readName = "best") val marked = markDuplicates(bestPair ++ poorPairs: _*) val (dups, nonDups) = marked.partition(_.getDuplicateRead) assert(nonDups.size == 2 && nonDups.forall(p => p.getReadName.toString == "best")) assert(dups.forall(p => p.getReadName.startsWith("poor"))) } sparkTest("read pairs with fragments") { val fragments = for (i <- 0 until 10) yield { createMappedRead("2", 33, 133, avgPhredScore = 40, readName = "fragment%d".format(i)) } // Even though the phred score is lower, pairs always score higher than fragments val pairs = createPair("2", 33, 133, "2", 100, 200, avgPhredScore = 20, readName = "pair") val marked = markDuplicates(fragments ++ pairs: _*) val (dups, nonDups) = marked.partition(_.getDuplicateRead) assert(nonDups.size == 2 && nonDups.forall(p => p.getReadName.toString == "pair")) assert(dups.size == 10 && dups.forall(p => p.getReadName.startsWith("fragment"))) } test("quality scores") { // The ascii value 53 is equal to a phred score of 20 val qual = 53.toChar.toString * 100 val record = AlignmentRecord.newBuilder().setQual(qual).build() assert(MarkDuplicates.score(record) == 2000) } sparkTest("read pairs that cross chromosomes") { val poorPairs = for ( i <- 0 until 10; read <- createPair("ref0", 10, 110, "ref1", 110, 210, avgPhredScore = 20, readName = "poor%d".format(i)) ) yield read val bestPair = createPair("ref0", 10, 110, "ref1", 110, 210, avgPhredScore = 30, readName = "best") val marked = markDuplicates(bestPair ++ poorPairs: _*) val (dups, nonDups) = marked.partition(_.getDuplicateRead) assert(nonDups.size == 2 && nonDups.forall(p => p.getReadName.toString == "best")) assert(dups.forall(p => p.getReadName.startsWith("poor"))) } }
erictu/adam
adam-core/src/test/scala/org/bdgenomics/adam/rdd/read/MarkDuplicatesSuite.scala
Scala
apache-2.0
8,646
package chapter.four import java.io.File import java.util.{ Scanner, TreeMap } import scala.collection.mutable.Map import scala.collection.JavaConversions.mapAsScalaMap import scala.collection.immutable.{ HashMap, SortedMap } object ExerciseTwoThroughFive { val Word = """([-A-Za-z]+)""".r // todo: autocloseable? // 2 def wordCount(file: String): Map[String, Int] = { val wordCount = Map[String, Int]().withDefault(_ => 0) val stream = ExerciseTwoThroughFive.getClass.getClassLoader.getResourceAsStream(file) val in = new Scanner(stream) while (in.hasNext) { in.next match { case Word(word) => wordCount(word.toLowerCase) += 1 case _ => } } wordCount } // todo: remove code duplication // todo: cleanup // 3 def wordCount2(file: String): HashMap[String, Int] = { var wordCount = HashMap[String, Int]() val stream = ExerciseTwoThroughFive.getClass.getClassLoader.getResourceAsStream(file) val in = new Scanner(stream) while (in.hasNext) { in.next match { case Word(word) => wordCount = wordCount + (word.toLowerCase -> (wordCount.getOrElse(word.toLowerCase, 0) + 1)) case _ => } } wordCount } // 4 def wordCount3(file: String): SortedMap[String, Int] = { var wordCount = SortedMap[String, Int]() val stream = ExerciseTwoThroughFive.getClass.getClassLoader.getResourceAsStream(file) val in = new Scanner(stream) while (in.hasNext) { in.next match { case Word(word) => wordCount = wordCount + (word.toLowerCase -> (wordCount.getOrElse(word.toLowerCase, 0) + 1)) case _ => } } wordCount } // 5 def wordCount4(file: String): Map[String, Int] = { val wordCount: Map[String, Int]= new TreeMap[String, Int]().withDefault(_ => 0) val stream = ExerciseTwoThroughFive.getClass.getClassLoader.getResourceAsStream(file) val in = new Scanner(stream) while (in.hasNext) { in.next match { case Word(word) => wordCount(word.toLowerCase) += 1 case _ => } } wordCount } }
deekim/impatient-scala
src/main/scala/chapter/four/ExerciseTwoThroughFive.scala
Scala
apache-2.0
2,110
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.nn import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.T import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest import scala.util.Random class NarrowTableSerialTest extends ModuleSerializationTest { override def test(): Unit = { val narrowTable = NarrowTable[Float](1, 1) val input = T() input(1.0) = Tensor[Float](2, 2).apply1(e => Random.nextFloat()) input(2.0) = Tensor[Float](2, 2).apply1(e => Random.nextFloat()) input(3.0) = Tensor[Float](2, 2).apply1(e => Random.nextFloat()) runSerializationTest(narrowTable, input) } }
yiheng/BigDL
spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/NarrowTableSpec.scala
Scala
apache-2.0
1,249
package plda.week7 /** * * @author Lorand Szakacs, [email protected], [email protected] * @since 08 Nov 2016 * */ import TestingLanguage._ object Main extends App { 1.test { val x1 = 42 val x2 = 1 (x1, x2) } 2.test { val x1 = 42 val x2 = 42 (x1, x2) } }
2016-Fall-UPT-PLDA/labs
week-07/scala-test-dsl/src/main/scala/plda/week7/Main.scala
Scala
gpl-3.0
322
package org.perftester import java.io.File import java.nio.file.{Files, Paths} import ammonite.ops.{Command, Path, Shellout, read} import org.perftester.git.GitUtils import org.perftester.process.{IO, Parent, ProcessConfiguration} import org.perftester.renderer.{HtmlRenderer, TextRenderer} import org.perftester.results.{PhaseResults, ResultReader, RunDetails, RunResult} import org.perftester.sbtbot.SBTBotTestRunner import org.slf4j.{Logger, LoggerFactory} import scala.collection.immutable.SortedSet import scala.concurrent.Await import scala.concurrent.duration.Duration object ProfileMain { val log: Logger = LoggerFactory.getLogger("ProfileMain") val isWindows: Boolean = System.getProperty("os.name").startsWith("Windows") //flag file to indicate that the mkPack completed successfully val flag = "mkPack.success" def main(args: Array[String]): Unit = { // parser.parse returns Option[C] PerfTesterOptionParser.parser.parse(args, EnvironmentConfig()) match { case Some(envConfig) => runBenchmark(envConfig) case None => // arguments are bad, error message will have been displayed } } def runBenchmark(envConfig: EnvironmentConfig): Unit = { //for some reason the jgit command doesnt work //so use the shell log.info(s"Running: git fetch (in ${envConfig.checkoutDir})") Command(Vector.empty, sys.env, Shellout.executeStream)("git", "fetch", "--all")( envConfig.checkoutDir) //jgit API that doesnt work please fix // val git = GitUtils(envConfig.checkoutDir) // try { // git.fetchAll() // } finally git.dispose() val commitsWithId = Configurations .configurationsFor(envConfig) .get(envConfig.config) .orElse(Option(envConfig.configString).map(configString => (() => parseConfigString(configString)))) .getOrElse { println(s"[ERROR] Config ${envConfig.config} not found") throw new IllegalArgumentException(s"Config ${envConfig.config} not found") }() val outputFolder = envConfig.outputDir / envConfig.username / envConfig.config Files.createDirectories(outputFolder.toNIO) println("Output logging to " + outputFolder) val results: SortedSet[RunDetails] = { var all = SortedSet.empty[RunDetails] for (vmId <- 1 to envConfig.processes; (testConfig, testId) <- commitsWithId.zipWithIndex) { val plan = planRun(envConfig, outputFolder, testConfig, vmId, envConfig.iterations) if (plan.runTest && all.nonEmpty) TextRenderer(envConfig, all).outputTextResults val results = executeRuns(plan) all += RunDetails(vmId, testId, results) } all } TextRenderer(envConfig, results).outputTextResults HtmlRenderer.outputHtmlResults(outputFolder, envConfig, results) } def parseConfigString(configString: String): List[TestConfig] = { configString.split("\\\\|").toList.map { configLine => println("configLine = " + configLine) val id :: gitHash :: extraOptionsString = configLine.split(";").toList val extraOptions = extraOptionsString.flatMap(_.split(",").toList) TestConfig(id, BuildFromGit(gitHash), extraArgs = extraOptions) } } // -XX:MaxInlineLevel=32 //-XX:MaxInlineSize=35 def planRun( envConfig: EnvironmentConfig, outputFolder: Path, testConfig: TestConfig, vm: Int, repeat: Int ): RunPlan = { val (sourceDir: Path, reuseScalac: Boolean, scalacPackDir: Path) = testConfig match { case TestConfig(_, gitBuild @ BuildFromGit(baseSha, cherryPicks, customDir), _, _, _) => val targetDir = customDir.getOrElse(envConfig.checkoutDir) val packDir = envConfig.scalacBuildCache / gitBuild.fullShaName val reused = Files.exists(packDir / flag toNIO) (targetDir, reused, packDir) case TestConfig(_, bfd @ BuildFromDir(_, _, rebuild), _, _, _) => val sourceDir = bfd.path val targetBuild = buildDir(sourceDir) val reuse = { if (!Files.exists(targetBuild.toNIO)) { println(s"dir NOT reused - no build dir") false } else if (rebuild) { println(s"dir NOT reused - as rebuild requested") false } else if (!Files.exists(targetBuild / flag toNIO)) { println(s"dir NOT reused - no flag file") false } else { val sourceDT = Utils.lastChangedDate(sourceDir / "src") val buildDT = Utils.lastChangedDate(targetBuild) println(s"latest file times \\nsource $sourceDT\\nbuild $buildDT") val reuse = sourceDT._1.isBefore(buildDT._1) println(s"dir reused = $reuse - based on file times") reuse } } (sourceDir, reuse, targetBuild) } val profileOutputFile = outputFolder / s"run_${vm}_${testConfig.id}.csv" val exists = Files.exists(profileOutputFile.toNIO) val runTest = !envConfig.analyseOnly && (!exists || envConfig.overwriteResults || testConfig.buildDefn.forceOverwriteResults) val runScalac = !envConfig.analyseOnly && runTest && !reuseScalac RunPlan(runTest, vm, reuseScalac, sourceDir, scalacPackDir, profileOutputFile, runScalac, testConfig, repeat, envConfig) } def buildDir(path: Path): Path = path / "build" / "pack" def executeRuns( runPlan: RunPlan ): RunResult = { val action = { if (runPlan.runTest && runPlan.runScalac) "compile and test" else if (runPlan.runTest) "test" else "skip" } println( "\\n\\n******************************************************************************************************") println( s"EXECUTING RUN #${runPlan.vm} ${runPlan.testConfig.id} - ${runPlan.testConfig.buildDefn} $action") println( "******************************************************************************************************\\n\\n") if (runPlan.runTest) { if (runPlan.runScalac) buildScalaC(runPlan.testConfig.buildDefn, runPlan.scalaSourceDir, runPlan.scalaPackDir) //flag the build was used Utils.touch(runPlan.scalaPackDir toNIO) executeTest(runPlan) } ResultReader.readResults(runPlan.testConfig, runPlan.profileOutputFile, runPlan.repeats) } def buildScalaC(buildDefn: BuildType, sourceDir: Path, scalaPackDir: Path): Unit = { buildDefn match { case BuildFromGit(baseSha, cherryPicks, _) => val git = GitUtils(sourceDir) try { //we always fetch once for the whole run log.info(s"Running: git reset --hard $baseSha (in $sourceDir)") git.resetToRevision(baseSha) cherryPicks foreach { sha => log.info(s"Running: git cherry-pick $sha (in $sourceDir)") git.cherryPick(sha) } } catch { case t: Exception => log.error(s"Failed to execute git fetch/reset to $baseSha ${if (cherryPicks isEmpty) "" else "or cherry-pick to " + cherryPicks.mkString}", t) throw t } finally { git.dispose() } case bfd: BuildFromDir => log.info("BuildFromDir selected - fetch skipped") } log.info(s"Building compiler in $sourceDir") runSbt(List("setupPublishCore", "clean", "dist/mkPack"), sourceDir, Nil) if (scalaPackDir != buildDir(sourceDir)) { val nioScalaPackDir = scalaPackDir.toNIO Files.createDirectories(nioScalaPackDir) Utils.deleteDir(nioScalaPackDir) Utils.copy(buildDir(sourceDir) toNIO, nioScalaPackDir) } Utils.touch(scalaPackDir / flag toNIO) } def runSbt(command: List[String], dir: Path, extraJVMArgs: List[String]): Unit = { import collection.JavaConverters._ val escaped = if (isWindows) command map { s => s.replace("\\\\", "\\\\\\\\").replace("\\"", "\\\\\\"") } else command val fullCommand = sbtCommandLine(extraJVMArgs) ::: escaped log.info(s"running sbt : ${fullCommand.mkString("'", "' '", "'")}") val proc = new ProcessBuilder(fullCommand.asJava) proc.directory(dir.toIO) proc.inheritIO() proc.start().waitFor() match { case 0 => case r => throw new IllegalStateException(s"bad result $r") } } def sbtCommandLine(extraJVMArgs: List[String]): List[String] = { val sbt = new File("sbtlib/sbt-launch.jar").getAbsoluteFile require(sbt.exists(), "sbt-launch.jar must exist in sbtlib directory") val sbtOpts = sys.env.get("SBT_OPTS").toList.flatMap(_.split(" ")).filterNot(_.trim.isEmpty) List("java", "-Dfile.encoding=UTF8", "-Xmx12G", "-XX:MaxPermSize=256m", "-XX:ReservedCodeCacheSize=128m", "-Dsbt.log.format=true", "-mx2G") ::: extraJVMArgs ::: sbtOpts ::: List("-cp", sbt.toString, "xsbt.boot.Boot") } def executeTest(runPlan: RunPlan): Unit = { val mkPackPath = runPlan.scalaPackDir val debugArgs = if (runPlan.envConfig.runWithDebug) "-agentlib:jdwp=transport=dt_shmem,server=y,suspend=y" :: Nil else Nil val profileParams = List("-Yprofile-destination", runPlan.profileOutputFile.toString()) log.info("Logging stats to " + runPlan.profileOutputFile) if (Files.exists(runPlan.profileOutputFile.toNIO)) Files.delete(runPlan.profileOutputFile.toNIO) val jvmArgs = debugArgs ::: runPlan.testConfig.extraJVMArgs if (runPlan.testConfig.useSbt) { val extraArgsStr = if (runPlan.testConfig.extraArgs.nonEmpty) runPlan.testConfig.extraArgs.mkString("\\"", "\\",\\"", "\\", ") else "" val programArgs = List( s"""set scalaHome := Some(file("$mkPackPath"))""", s"""set scalacOptions in ThisBuild ++= List($extraArgsStr${profileParams .mkString("\\"", "\\",\\"", "\\"")})""" ) val dotfile = runPlan.envConfig.testDir / ".perf_tester" val sbtCommands = if (dotfile.toIO.exists()) read.lines(dotfile).toList.filterNot(_.trim.isEmpty) else "clean" :: "compile" :: Nil // slightly bogus default SBTBotTestRunner.run(runPlan.envConfig.testDir, programArgs, jvmArgs, runPlan.repeats, sbtCommands, runPlan.envConfig.runWithDebug) } else { val mkPackLibPath = (mkPackPath / "lib").toString() val classPath = List( "jline.jar", "scala-compiler-doc.jar", "scala-compiler.jar", "scala-library.jar", "scala-reflect.jar", "scala-repl-jline-embedded.jar", "scala-repl-jline.jar", "scala-swing_2.12-2.0.0.jar", "scala-xml_2.12-1.0.6.jar", "scalap.jar" ).map(mkPackPath + File.separator + "lib" + File.separator + _) // s"${lib}jline.jar;${lib}scala-compiler-doc.jar;${lib}scala-compiler.jar;${lib}scala-library.jar;${lib}scala-reflect.jar;${lib}scala-repl-jline-embedded.jar;${lib}scala-repl-jline.jar;${lib}scala-swing_2.12-2.0.0.jar;${lib}scala-xml_2.12-1.0.6.jar;${lib}scalap.jar" val params = List( "-XX:MaxInlineLevel=32", //"-XX:MaxInlineSize=35", "-Xmx10G", "-Xms32M", s"""-Dscala.home="$mkPackPath${File.separator}.."""", """-Denv.emacs="" """, "-Dscala.usejavacp=true" ) ::: debugArgs ::: runPlan.testConfig.extraJVMArgs //TODO need to read this val files = IO.listSourcesIn(Paths.get("S:/scala/akka/akka-actor/src/main/scala")) map (_.toString) //TODO need to read this val compileClassPath = List( "C:\\\\Users\\\\dev\\\\.ivy2\\\\cache\\\\org.scala-lang.modules\\\\scala-java8-compat_2.12\\\\bundles\\\\scala-java8-compat_2.12-0.8.0.jar", "C:\\\\Users\\\\dev\\\\.ivy2\\\\cache\\\\com.typesafe\\\\config\\\\bundles\\\\config-1.3.1.jar", "C:\\\\Users\\\\dev\\\\.m2\\\\repository\\\\com\\\\typesafe\\\\akka\\\\akka-actor_2.12\\\\2.5.1\\\\akka-actor_2.12-2.5.1.jar" ) //TODO need to read this val otherParams = List( "-sourcepath", s"${runPlan.envConfig.testDir}${File.separator}src${File.separator}main${File.separator}java${File.pathSeparator}${runPlan.envConfig.testDir}${File.separator}src${File.separator}main${File.separator}scala" ) ++ profileParams ++ runPlan.testConfig.extraArgs val id = "x" val parent = new Parent(ProcessConfiguration(new File("."), None, classPath, params)) val outputPath = "z:\\\\output\\\\" val outputDir = Path(outputPath) Files.createDirectories(outputDir.toNIO) parent.createGlobal(id, outputPath, compileClassPath, otherParams, files) for (cycle <- 1 to runPlan.envConfig.iterations) { val result = parent.runGlobal(id) println(s" run ${runPlan.vm} # $cycle took ${result / 1000 / 1000.0} ms") val delete = startClean(outputDir) parent.doGc() Await.result(delete, Duration.Inf) } parent.destroyGlobal(id) parent.doExit() } } def startClean(file: Path) = { import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future Future { IO.deleteDir(file.toNIO, false) } } def printAggResults(cycleId: Int, testConfig: TestConfig, summary: Boolean, stats: Stats, baselineStats: Stats, baselineId: String) = { val wallMsStr = stats.wallClockTimeAvg.formatted(6, 2) val allWallMsStr = stats.allWallClockTimeAvg.formatted(6, 2) val allCpuMsStr = stats.allCpuTimeAvg.formatted(6, 2) val allAllocatedBytesStr = stats.allAllocatedBytes.formatted(6, 2) val allIdleMsStr = stats.allIdleAvg.formatted(6, 2) def diff(thisTest: Distribution, baseline: Distribution) = { if (baseline.mean == 0) "N/A" else if (thisTest.mean == baseline.mean) "---" else if (thisTest.mean > baseline.mean) f"+${(thisTest.mean - baseline.mean) / baseline.mean * 100}%6.2f%%" else f"${(thisTest.mean - baseline.mean) / baseline.mean * 100}%6.2f%%" } if (summary) { println( "%-65s\\t%4s\\t%25s\\t%25s\\t%25s\\t%25s\\t%25s" .format(testConfig.id, stats.size, wallMsStr, allWallMsStr, allCpuMsStr, allIdleMsStr, allAllocatedBytesStr)) if (testConfig.id != baselineId) println( "%-65s\\t%4s\\t%15s\\t%25s\\t%25s\\t%25s\\t%25s" .format( "... v baseline", "", diff(stats.wallClockTimeAvg, baselineStats.wallClockTimeAvg), diff(stats.allWallClockTimeAvg, baselineStats.allWallClockTimeAvg), diff(stats.allCpuTimeAvg, baselineStats.allCpuTimeAvg), diff(stats.allIdleAvg, baselineStats.allIdleAvg), diff(stats.allAllocatedBytes, baselineStats.allAllocatedBytes) )) } else println( "%-65s\\t%4s\\t%4s\\t%25s\\t%25s\\t%25s\\t%25s\\t%25s" .format(testConfig.id, cycleId, stats.size, wallMsStr, allWallMsStr, allCpuMsStr, allIdleMsStr, allAllocatedBytesStr)) } def calcStats(results: Seq[PhaseResults], limit: Double): Stats = { val size = (results.size * limit).toInt def distribution(fn: PhaseResults => Double): Distribution = { if (results.isEmpty) Distribution(-1, -1, -1) else { val raw = (results map fn sorted).take(size) val mean = raw.sum / size if (raw.isEmpty) Distribution(-1, -1, -1) else Distribution(raw.head, raw.last, mean) } } val wallClockTimeAvg = distribution(_.wallClockTimeMS) val allWallClockTimeAvg = distribution(_.allWallClockTimeMS) val allCpuTimeAvg = distribution(_.cpuTimeMS) val allAllocatedBytes = distribution(_.allocatedMB) val allIdleAvg = distribution(_.idleTimeMS) Stats(size, wallClockTimeAvg, allWallClockTimeAvg, allCpuTimeAvg, allAllocatedBytes, allIdleAvg) } case class Distribution(min: Double, max: Double, mean: Double) { def formatted(s: Int, p: Int): String = { s"${formatResult(s, p, mean)} [${formatPercent(4, 2, (min / mean) * 100 - 100)}% ${formatPercent(4, 2, (max / mean) * 100 - 100)}%]" } def formatPercent(sigDigits: Int, decimalDigits: Int, value: Double): String = { String.format(s"%+$sigDigits.${decimalDigits}f", new java.lang.Double(value)) } def formatResult(sigDigits: Int, decimalDigits: Int, value: Double): String = { String.format(s"%,$sigDigits.${decimalDigits}f", new java.lang.Double(value)) } } case class Stats(size: Int, wallClockTimeAvg: Distribution, allWallClockTimeAvg: Distribution, allCpuTimeAvg: Distribution, allAllocatedBytes: Distribution, allIdleAvg: Distribution) case class RunPlan(runTest: Boolean, vm: Int, canReuseScalac: Boolean, scalaSourceDir: Path, scalaPackDir: Path, profileOutputFile: Path, runScalac: Boolean, testConfig: TestConfig, repeats: Int, envConfig: EnvironmentConfig) }
rorygraves/perf_tester
src/main/scala/org/perftester/ProfileMain.scala
Scala
apache-2.0
17,769
package strata.data import java.io.File import strata.util.IO /** An abstract program; a list of instructions. */ case class Program(instructions: Seq[Instruction], src: String) { override def toString = { src } } object Program { def fromFile(f: File): Program = { val Pattern = " (.*)#.*OPC=([^ ]*) *".r val Pattern2 = "callq .(.*)".r val instructions = (for (line <- IO.readFile(f).split("\\n")) yield { line match { case Pattern(code, opcode) => if (opcode != "retq" && opcode != "<label>") { if (opcode == "callq_label") { code match { case Pattern2(label) => Some(code, Instruction(opcode)(label)) case _ => assert(false) None } } else { Some((code, Instruction(opcode))) } } else { None } case _ => None } }).flatten Program(instructions.map(_._2), instructions.map(_._1.trim).mkString("\\n")) } }
StanfordPL/strata
src/main/scala/strata/data/Program.scala
Scala
apache-2.0
1,073
package mimir.exec; import mimir.algebra._; abstract class ResultIterator { def apply(v: Int): PrimitiveValue; def deterministicRow(): Boolean; def deterministicCol(v: Int): Boolean; def missingRows(): Boolean; def open() def getNext(): Boolean; def close(); def numCols: Int; def schema: List[(String,Type.T)]; def map[X](fn: (PrimitiveValue) => X) = (0 until numCols).map( (i) => fn(this(i)) ) def toList(): List[PrimitiveValue] = map( (x) => x ).toList def foreachRow(fn: ResultIterator => Unit): Unit = { open() while(getNext()){ fn(this) } close() } def allRows(): List[List[PrimitiveValue]] = { var ret = List[List[PrimitiveValue]]() foreachRow( (x) => { ret = ret ++ List(toList()) } ) return ret; } def reason(ind: Int): List[(String, String)] = List() }
Legacy25/mimir
mimircore/src/main/scala/mimir/exec/ResultIterator.scala
Scala
apache-2.0
829
package org.scalajs.jsenv.test import org.scalajs.jsenv._ import org.scalajs.core.tools.io._ import org.junit.Test import org.junit.Assert._ import scala.concurrent.Await /** A couple of tests that test communication for mix-in into a test suite */ trait ComTests extends AsyncTests { protected def newJSEnv: ComJSEnv protected def comRunner(code: String): ComJSRunner = { val codeVF = new MemVirtualJSFile("testScript.js").withContent(code) newJSEnv.comRunner(codeVF) } private def assertThrowClosed(msg: String, body: => Unit): Unit = { val thrown = try { body false } catch { case _: ComJSEnv.ComClosedException => true } assertTrue(msg, thrown) } @Test def comCloseJVMTest: Unit = { val com = comRunner(s""" scalajsCom.init(function(msg) { scalajsCom.send("received: " + msg); }); scalajsCom.send("Hello World"); """) start(com) assertEquals("Hello World", com.receive()) for (i <- 0 to 10) { com.send(i.toString) assertEquals(s"received: $i", com.receive()) } com.close() com.await(DefaultTimeout) com.stop() // should do nothing, and not fail } def comCloseJSTestCommon(timeout: Long): Unit = { val com = comRunner(s""" scalajsCom.init(function(msg) {}); for (var i = 0; i < 10; ++i) scalajsCom.send("msg: " + i); scalajsCom.close(); """) start(com) Thread.sleep(timeout) for (i <- 0 until 10) assertEquals(s"msg: $i", com.receive()) assertThrowClosed("Expect receive to throw after closing of channel", com.receive()) com.close() com.await(DefaultTimeout) com.stop() // should do nothing, and not fail } @Test def comCloseJSTest: Unit = comCloseJSTestCommon(0) @Test def comCloseJSTestDelayed: Unit = comCloseJSTestCommon(1000) @Test def doubleCloseTest: Unit = { val n = 10 val com = pingPongRunner(n) start(com) for (i <- 0 until n) { com.send("ping") assertEquals("pong", com.receive()) } com.close() com.await(DefaultTimeout) } @Test def multiEnvTest: Unit = { val n = 10 val envs = List.fill(5)(pingPongRunner(10)) envs.foreach(start) val ops = List[ComJSRunner => Unit]( _.send("ping"), com => assertEquals("pong", com.receive()) ) for { i <- 0 until n env <- envs op <- ops } op(env) envs.foreach(_.close()) envs.foreach(_.await(DefaultTimeout)) } private def pingPongRunner(count: Int) = { comRunner(s""" var seen = 0; scalajsCom.init(function(msg) { scalajsCom.send("pong"); if (++seen >= $count) scalajsCom.close(); }); """) } @Test def largeMessageTest: Unit = { // 1KB data val baseMsg = new String(Array.tabulate(512)(_.toChar)) val baseLen = baseMsg.length // Max message size: 1KB * 2^(2*iters+1) = 1MB val iters = 4 val com = comRunner(""" scalajsCom.init(function(msg) { scalajsCom.send(msg + msg); }); """) start(com) com.send(baseMsg) def resultFactor(iters: Int) = Math.pow(2, 2 * iters + 1).toInt for (i <- 0 until iters) { val reply = com.receive() val factor = resultFactor(i) assertEquals(baseLen * factor, reply.length) for (j <- 0 until factor) assertEquals(baseMsg, reply.substring(j * baseLen, (j + 1) * baseLen)) com.send(reply + reply) } val lastLen = com.receive().length assertEquals(baseLen * resultFactor(iters), lastLen) com.close() com.await(DefaultTimeout) } @Test def highCharTest: Unit = { // #1536 val com = comRunner(""" scalajsCom.init(scalajsCom.send); """) start(com) val msg = "\\uC421\\u8F10\\u0112\\uFF32" com.send(msg) assertEquals(msg, com.receive()) com.close() com.await(DefaultTimeout) } @Test def noInitTest: Unit = { val com = comRunner("") start(com) com.send("Dummy") com.close() com.await(DefaultTimeout) } @Test def stopTestCom: Unit = { val com = comRunner(s"""scalajsCom.init(function(msg) {});""") start(com) // Make sure the VM doesn't terminate. Thread.sleep(1000) assertTrue("VM should still be running", com.isRunning) // Stop VM instead of closing channel com.stop() try { com.await(DefaultTimeout) fail("Stopped VM should be in failure state") } catch { case _: Throwable => } } @Test def futureStopTest: Unit = { val com = comRunner(s"""scalajsCom.init(function(msg) {});""") val fut = start(com) // Make sure the VM doesn't terminate. Thread.sleep(1000) assertTrue("VM should still be running", com.isRunning) // Stop VM instead of closing channel com.stop() try { Await.result(fut, DefaultTimeout) fail("Stopped VM should be in failure state") } catch { case _: Throwable => } } }
lrytz/scala-js
js-envs-test-kit/src/main/scala/org/scalajs/jsenv/test/ComTests.scala
Scala
bsd-3-clause
5,042
/* * Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com> */ package play.api.data.format import org.specs2.mutable.Specification import java.util.{ UUID, Date, TimeZone } import play.api.data._ import play.api.data.Forms._ class FormatSpec extends Specification { "dateFormat" should { "support custom time zones" in { val data = Map("date" -> "00:00") val format = Formats.dateFormat("HH:mm", TimeZone.getTimeZone("America/Los_Angeles")) format.bind("date", data).right.map(_.getTime) should beRight(28800000L) format.unbind("date", new Date(28800000L)) should equalTo(data) val format2 = Formats.dateFormat("HH:mm", TimeZone.getTimeZone("GMT+0000")) format2.bind("date", data).right.map(_.getTime) should beRight(0L) format2.unbind("date", new Date(0L)) should equalTo(data) } } "java.time Types" should { import java.time.LocalDateTime "support LocalDateTime formatting with a pattern" in { val pattern = "yyyy/MM/dd HH:mm:ss" val data = Map("localDateTime" -> "2016/06/06 00:30:30") val format = Formats.localDateTimeFormat(pattern) val bind: Either[Seq[FormError], LocalDateTime] = format.bind("localDateTime", data) bind.right.map(dt => { (dt.getYear, dt.getMonthValue, dt.getDayOfMonth, dt.getHour, dt.getMinute, dt.getSecond) }) should beRight((2016, 6, 6, 0, 30, 30)) } "support LocalDateTime formatting with default pattern" in { val data = Map("localDateTime" -> "2016-10-10 11:11:11") val format = Formats.localDateTimeFormat format.bind("localDateTime", data).right.map { dt => (dt.getYear, dt.getMonthValue, dt.getDayOfMonth, dt.getHour, dt.getMinute, dt.getSecond) } should beRight((2016, 10, 10, 11, 11, 11)) } } "A simple mapping of BigDecimalFormat" should { "return a BigDecimal" in { Form("value" -> bigDecimal).bind(Map("value" -> "10.23")).fold( formWithErrors => { "The mapping should not fail." must equalTo("Error") }, { number => number must equalTo(BigDecimal("10.23")) } ) } } "A complex mapping of BigDecimalFormat" should { "12.23 must be a valid bigDecimal(10,2)" in { Form("value" -> bigDecimal(10, 2)).bind(Map("value" -> "10.23")).fold( formWithErrors => { "The mapping should not fail." must equalTo("Error") }, { number => number must equalTo(BigDecimal("10.23")) } ) } "12.23 must not be a valid bigDecimal(10,1) : Too many decimals" in { Form("value" -> bigDecimal(10, 1)).bind(Map("value" -> "10.23")).fold( formWithErrors => { formWithErrors.errors.head.message must equalTo("error.real.precision") }, { number => "The mapping should fail." must equalTo("Error") } ) } "12111.23 must not be a valid bigDecimal(5,2) : Too many digits" in { Form("value" -> bigDecimal(5, 2)).bind(Map("value" -> "12111.23")).fold( formWithErrors => { formWithErrors.errors.head.message must equalTo("error.real.precision") }, { number => "The mapping should fail." must equalTo("Error") } ) } } "A UUID mapping" should { "return a proper UUID when given one" in { val testUUID = UUID.randomUUID() Form("value" -> uuid).bind(Map("value" -> testUUID.toString)).fold( formWithErrors => { "The mapping should not fail." must equalTo("Error") }, { uuid => uuid must equalTo(testUUID) } ) } "give an error when an invalid UUID is passed in" in { Form("value" -> uuid).bind(Map("value" -> "Joe")).fold( formWithErrors => { formWithErrors.errors.head.message must equalTo("error.uuid") }, { uuid => uuid must equalTo(UUID.randomUUID()) } ) } } "A char mapping" should { "return a proper Char when given one" in { val testChar = 'M' Form("value" -> char).bind(Map("value" -> testChar.toString)).fold( formWithErrors => { "The mapping should not fail." must equalTo("Error") }, { char => char must equalTo(testChar) } ) } "give an error when an empty string is passed in" in { Form("value" -> char).bind(Map("value" -> " ")).fold( formWithErrors => { formWithErrors.errors.head.message must equalTo("error.required") }, { char => char must equalTo('X') } ) } } "String parsing utility function" should { val errorMessage = "error.parsing" def parsingFunction[T](fu: String => T) = Formats.parsing(fu, errorMessage, Nil) _ val intParse: String => Int = Integer.parseInt val testField = "field" val testNumber = 1234 "parse an integer from a string" in { parsingFunction(intParse)(testField, Map(testField -> testNumber.toString)).fold( errors => "The parsing should not fail" must equalTo("Error"), parsedInt => parsedInt mustEqual testNumber ) } "register a field error if string not parseable into an Int" in { parsingFunction(intParse)(testField, Map(testField -> "notParseable")).fold( errors => errors should containTheSameElementsAs(Seq(FormError(testField, errorMessage))), parsedInt => "The parsing should fail" must equalTo("Error") ) } "register a field error if unexpected exception encountered during parsing" in { parsingFunction(_ => throw new AssertionError)(testField, Map(testField -> testNumber.toString)).fold( errors => errors should containTheSameElementsAs(Seq(FormError(testField, errorMessage))), parsedInt => "The parsing should fail" must equalTo("Error") ) } } }
ktoso/playframework
framework/src/play/src/test/scala/play/api/data/format/FormatSpec.scala
Scala
apache-2.0
5,653
/* __ *\ ** ________ ___ / / ___ Scala API ** ** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\___/_/ |_/____/_/ | | ** ** |/ ** \* */ package scala.util.control /** A class that can be instantiated for the break control abstraction. * Example usage:<pre> * * val mybreaks = new Breaks * import</b> mybreaks.{break, breakable} * * breakable { * <b>for</b> (...) { * <b>if</b> (...) break * } * }</pre> * * Calls to break from one instantiation of Breaks will never * target breakable objects of some other instantion. */ class Breaks { private val breakException = new BreakControl /** A block from which one can exit with a `break''. */ def breakable(op: => Unit) { try { op } catch { case ex: BreakControl => if (ex ne breakException) throw ex } } /* Break from dynamically closest enclosing breakable block * @note this might be different than the statically closest enclosing * block! */ def break { throw breakException } } /** An object that can be used for the break control abstraction. * Example usage:<pre> * * <b>import</b> Breaks.{break, breakable} * * breakable { * <b>for</b> (...) { * <b>if</b> (...) break * } * }</pre> * */ object Breaks extends Breaks private class BreakControl extends ControlThrowable
cran/rkafkajars
java/scala/util/control/Breaks.scala
Scala
apache-2.0
1,739
/* Copyright 2009-2021 EPFL, Lausanne */ package stainless package extraction package object oo { object trees extends oo.Trees with ClassSymbols { case class Symbols( functions: Map[Identifier, FunDef], sorts: Map[Identifier, ADTSort], classes: Map[Identifier, ClassDef], typeDefs: Map[Identifier, TypeDef], ) extends ClassSymbols with OOAbstractSymbols { override val symbols: this.type = this } override def mkSymbols( functions: Map[Identifier, FunDef], sorts: Map[Identifier, ADTSort], classes: Map[Identifier, ClassDef], typeDefs: Map[Identifier, TypeDef], ): Symbols = { Symbols(functions, sorts, classes, typeDefs) } object printer extends Printer { val trees: oo.trees.type = oo.trees } } def extractor(using inox.Context) = { class LoweringImpl(override val s: trees.type, override val t: innerfuns.trees.type) extends CheckingTransformer { override def transform(fd: s.FunDef): t.FunDef = { super.transform(fd.copy(flags = fd.flags.filterNot(_ == s.IsInvariant))) } } val lowering = ExtractionPipeline(new LoweringImpl(trees, innerfuns.trees)) utils.DebugPipeline("AdtSpecialization", AdtSpecialization(trees, trees)) andThen utils.DebugPipeline("RefinementLifting", RefinementLifting(trees, trees)) andThen utils.DebugPipeline("TypeEncoding", TypeEncoding(trees, trees)) andThen utils.DebugPipeline("InvariantInitialization", InvariantInitialization(trees, trees)) andThen lowering } def fullExtractor(using inox.Context) = extractor andThen nextExtractor def nextExtractor(using inox.Context) = innerfuns.fullExtractor def phaseSemantics(using inox.Context): inox.SemanticsProvider { val trees: oo.trees.type } = { extraction.phaseSemantics(oo.trees)(fullExtractor) } def nextPhaseSemantics(using inox.Context): inox.SemanticsProvider { val trees: innerfuns.trees.type } = { innerfuns.phaseSemantics } }
epfl-lara/stainless
core/src/main/scala/stainless/extraction/oo/package.scala
Scala
apache-2.0
2,005
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2015-2021 Andre White. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.truthencode.ddo.model package object misc { /* Shared Pools */ final val PoolCleave = "Cleave" final val PoolGreatCleave = "GreatCleave" /** * Pool Timer shared among manyshot, ten thousand stars etc */ final val PoolManyShot = "ManyShot" }
adarro/ddo-calc
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/misc/package.scala
Scala
apache-2.0
943
package org.ai4fm.proofprocess.isabelle.core.patch import scala.util.{Failure, Success, Try} import org.eclipse.core.runtime.{CoreException, Platform} import org.ai4fm.proofprocess.isabelle.core.IPatchActionHandler import org.ai4fm.proofprocess.isabelle.core.IsabellePProcessCorePlugin.{error, log, plugin} import org.ai4fm.proofprocess.isabelle.core.patch.IsabellePatches.PatchInfo import isabelle.Isabelle_System /** * Controls checks and calls to Isabelle patching process. * * @author Andrius Velykis */ class IsabellePatcher { private var continueAfterPatch: Option[Boolean] = None /** * Checks and performs Isabelle patching for ProofProcess capture. * * Returns false if PP capture should not be continued, true if everything is ok */ def checkIsabellePatched(): Boolean = continueAfterPatch match { case Some(continue) => continue case None => { val continueCapture = Try(doCheckPatched()) match { case Failure(ex) => { log(error(Some(ex))) patchHandler foreach (_.reportPatchProblem(ex.getMessage)) // do not continue PP capture false } case Success(continue) => continue } continueAfterPatch = Some(continueCapture) continueCapture } } private def doCheckPatched(): Boolean = { val isabelleHome = Isabelle_System.jvm_path(Isabelle_System.getenv_strict("ISABELLE_HOME")) val unpatchedTest = IsabellePatches.findUnpatched(isabelleHome) unpatchedTest.fold( err => { patchHandler foreach (_.reportPatchProblem(err)) false }, patches => askAndPatch(isabelleHome, patches)) } private def askAndPatch(isabelleHome: String, patches: List[PatchInfo]): Boolean = if (!patches.isEmpty) { // only ask the user if there is something to patch patchHandler match { case None => true// ignore case Some(handler) => { val patchedFiles = patches map (_.targetPath) if (handler.performPatch(patchedFiles)) { // user agreed - do the patch IsabellePatches.applyPatches(isabelleHome, patches) handler.reportPatchCompleted() // do not allow continuing false } else { // cancelled - allow capturing with whatever we have true } } } } else { // no need to patch - allow to continue true } private def PATCH_HANDLER_ID = plugin.pluginId + ".patchHandler" private def patchHandler: Option[IPatchActionHandler] = { val extensionRegistry = Platform.getExtensionRegistry val extensions = extensionRegistry.getConfigurationElementsFor(PATCH_HANDLER_ID) try { val execExts = extensions.toStream map (_.createExecutableExtension("class")) execExts.headOption match { case Some(h: IPatchActionHandler) => Some(h) case _ => { log(error(msg = Some("Cannot instantiate patch handler extension!"))) None } } } catch { case ex: CoreException => { log(error(Some(ex))) None } } } }
andriusvelykis/proofprocess
org.ai4fm.proofprocess.isabelle.core/src/org/ai4fm/proofprocess/isabelle/core/patch/IsabellePatcher.scala
Scala
epl-1.0
3,169
package org.scalatra import collection.generic.CanBuildFrom import annotation.implicitNotFound /* * All credit for the code in this file is a minimized version of scalaz' Zero * in scalaz 7 the Zero type class has been removed and zero's only exist on a monoid. * Because we were abusing the Zero typeclass as a way to provide default values this * retains that functionality without the abuse of the identity value */ /** * A DefaultValue in type Z provides a default value for a given type Z */ @implicitNotFound( "No default value found for type ${Z}. Try to implement an implicit org.scalatra.DefaultValue for this type." ) trait DefaultValue[Z] { val default: Z } trait DefaultValueMethods { def default[Z](z: Z): DefaultValue[Z] = new DefaultValue[Z] { val default = z } def mdefault[Z](implicit z: DefaultValue[Z]): Z = z.default } object DefaultValueMethods extends DefaultValueMethods trait DefaultValueImplicits { import DefaultValueMethods._ import xml.{ Elem, Node, NodeSeq } implicit def UnitDefaultValue: DefaultValue[Unit] = default(()) implicit def StringDefaultValue: DefaultValue[String] = default("") implicit def IntDefaultValue: DefaultValue[Int] = default(0) implicit def BooleanDefaultValue: DefaultValue[Boolean] = default(false) implicit def CharDefaultValue: DefaultValue[Char] = default(0.toChar) implicit def ByteDefaultValue: DefaultValue[Byte] = default(0.toByte) implicit def LongDefaultValue: DefaultValue[Long] = default(0L) implicit def ShortDefaultValue: DefaultValue[Short] = default(0.toShort) implicit def FloatDefaultValue: DefaultValue[Float] = default(0F) implicit def DoubleDefaultValue: DefaultValue[Double] = default(0D) implicit def BigIntegerDefaultValue = default(java.math.BigInteger.valueOf(0)) implicit def BigIntDefaultValue: DefaultValue[BigInt] = default(BigInt(0)) implicit def BigDecimalDefaultValue: DefaultValue[BigDecimal] = default(BigDecimal(0)) implicit def TraversableDefaultValue[CC <: Traversable[_]](implicit cbf: CanBuildFrom[Nothing, Nothing, CC]): DefaultValue[CC] = default(cbf.apply.result) // Not implicit to ensure implicitly[DefaultValue[NodeSeq]].default === NodeSeqDefaultValue.default def NodeDefaultValue: DefaultValue[Node] = new DefaultValue[Node] { val default = new Node { override def text = null override def label = null override def child = Nil } } // Not implicit to ensure implicitly[DefaultValue[NodeSeq]].default === NodeSeqDefaultValue.default def ElemDefaultValue: DefaultValue[Elem] = new DefaultValue[Elem] { val default = new Elem(null, null, scala.xml.Null, xml.TopScope, false) } implicit def OptionDefaultValue[A]: DefaultValue[Option[A]] = default(None) implicit def ArrayDefaultValue[A: Manifest]: DefaultValue[Array[A]] = default(new Array[A](0)) implicit def EitherLeftDefaultValue[A, B](implicit bz: DefaultValue[B]): DefaultValue[Either.LeftProjection[A, B]] = default(Right(mdefault[B]).left) implicit def EitherRightDefaultValue[A: DefaultValue, B]: DefaultValue[Either.RightProjection[A, B]] = default(Left(mdefault[A]).right) implicit def EitherDefaultValue[A: DefaultValue, B]: DefaultValue[Either[A, B]] = default(Left(mdefault[A])) implicit def MapDefaultValue[K, V: DefaultValue]: DefaultValue[Map[K, V]] = default(Map.empty[K, V]) implicit def MultiMapHeadViewDefaultValue: DefaultValue[util.MultiMapHeadView[String, String]] = default(util.MultiMapHeadView.empty[String, String]) implicit def IndifferentMultiMapHeadViewDefaultValue: DefaultValue[util.MultiMapHeadView[String, String] with util.MapWithIndifferentAccess[String]] = default(util.MultiMapHeadView.emptyIndifferent[String]) implicit def Tuple2DefaultValue[A, B](implicit az: DefaultValue[A], bz: DefaultValue[B]): DefaultValue[(A, B)] = default((az.default, bz.default)) implicit def Tuple3DefaultValue[A, B, C](implicit az: DefaultValue[A], bz: DefaultValue[B], cz: DefaultValue[C]): DefaultValue[(A, B, C)] = default((az.default, bz.default, cz.default)) implicit def Tuple4DefaultValue[A, B, C, D](implicit az: DefaultValue[A], bz: DefaultValue[B], cz: DefaultValue[C], dz: DefaultValue[D]): DefaultValue[(A, B, C, D)] = default((az.default, bz.default, cz.default, dz.default)) implicit def Function1ABDefaultValue[A, B: DefaultValue]: DefaultValue[A ⇒ B] = default((_: A) ⇒ mdefault[B]) import java.util._ import java.util.concurrent._ implicit def JavaArrayListDefaultValue[A]: DefaultValue[ArrayList[A]] = default(new ArrayList[A]) implicit def JavaHashMapDefaultValue[K, V]: DefaultValue[HashMap[K, V]] = default(new HashMap[K, V]) implicit def JavaHashSetDefaultValue[A]: DefaultValue[HashSet[A]] = default(new HashSet[A]) implicit def JavaHashtableDefaultValue[K, V]: DefaultValue[Hashtable[K, V]] = default(new Hashtable[K, V]) implicit def JavaIdentityHashMapDefaultValue[K, V] = default(new IdentityHashMap[K, V]) implicit def JavaLinkedHashMapDefaultValue[K, V]: DefaultValue[LinkedHashMap[K, V]] = default(new LinkedHashMap[K, V]) implicit def JavaLinkedHashSetDefaultValue[A]: DefaultValue[LinkedHashSet[A]] = default(new LinkedHashSet[A]) implicit def JavaLinkedListDefaultValue[A]: DefaultValue[LinkedList[A]] = default(new LinkedList[A]) implicit def JavaPriorityQueueDefaultValue[A]: DefaultValue[PriorityQueue[A]] = default(new PriorityQueue[A]) implicit def JavaStackDefaultValue[A]: DefaultValue[Stack[A]] = default(new Stack[A]) implicit def JavaTreeMapDefaultValue[K, V]: DefaultValue[TreeMap[K, V]] = default(new TreeMap[K, V]) implicit def JavaTreeSetDefaultValue[A]: DefaultValue[TreeSet[A]] = default(new TreeSet[A]) implicit def JavaVectorDefaultValue[A]: DefaultValue[Vector[A]] = default(new Vector[A]) implicit def JavaWeakHashMapDefaultValue[K, V]: DefaultValue[WeakHashMap[K, V]] = default(new WeakHashMap[K, V]) implicit def JavaArrayBlockingQueueDefaultValue[A]: DefaultValue[ArrayBlockingQueue[A]] = default(new ArrayBlockingQueue[A](0)) implicit def JavaConcurrentHashMapDefaultValue[K, V]: DefaultValue[ConcurrentHashMap[K, V]] = default(new ConcurrentHashMap[K, V]) implicit def JavaConcurrentLinkedQueueDefaultValue[A]: DefaultValue[ConcurrentLinkedQueue[A]] = default(new ConcurrentLinkedQueue[A]) implicit def JavaCopyOnWriteArrayListDefaultValue[A]: DefaultValue[CopyOnWriteArrayList[A]] = default(new CopyOnWriteArrayList[A]) implicit def JavaCopyOnWriteArraySetDefaultValue[A]: DefaultValue[CopyOnWriteArraySet[A]] = default(new CopyOnWriteArraySet[A]) implicit def JavaLinkedBlockingQueueDefaultValue[A]: DefaultValue[LinkedBlockingQueue[A]] = default(new LinkedBlockingQueue[A]) implicit def JavaSynchronousQueueDefaultValue[A]: DefaultValue[SynchronousQueue[A]] = default(new SynchronousQueue[A]) } trait DefaultValues extends DefaultValueMethods with DefaultValueImplicits object DefaultValues extends DefaultValues
etorreborre/scalatra
core/src/main/scala/org/scalatra/DefaultValue.scala
Scala
bsd-2-clause
7,002
package controllers import javax.inject.Inject import dao.TeamDAO import models.Team._ import play.api.libs.json._ import play.api.mvc._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future class TeamController @Inject()(teamDao: TeamDAO) extends Controller { def getTeams(clubId: Option[Int]) = Action.async { implicit request => val teams: Future[Seq[Team]] = clubId match { case Some(x) => teamDao.getTeamByClub(x) case None => teamDao.all() } teams map { p => Ok(Json.toJson(p)) } } def getTeam(teamId: Int) = Action.async { implicit request => val team: Future[Option[Team]] = teamDao.getTeam(teamId) team map { case Some(p) => Ok(Json.toJson(p)) case None => NotFound } } def getTeamByClub(clubId: Int) = Action.async { implicit request => val teams: Future[Seq[Team]] = teamDao.getTeamByClub(clubId) teams map { p => Ok(Json.toJson(p)) } } def updateTeam(teamId: Int) = Action.async(parse.json[Team]) { implicit request => val team: Team = request.body val affectedRowsCount: Future[Int] = teamDao.updateTeam(teamId, team) affectedRowsCount map { case 1 => Ok case 0 => NotFound case _ => InternalServerError } } def createTeam = Action.async(parse.json[Team]) { implicit request => val team: Team = request.body val teamId: Future[Int] = teamDao.createTeam(team) teamId map { case id => Created(Json.toJson(id)) } } def deleteTeam(teamId: Int) = Action.async { implicit request => val affectedRowsCount: Future[Int] = teamDao.deleteTeam(teamId) affectedRowsCount map { case 1 => Ok case 0 => NotFound case _ => InternalServerError } } }
magura42/KickAppServer
app/controllers/TeamController.scala
Scala
mit
1,780
package utils import org.slf4j.LoggerFactory /** * Created by anand on 17/8/15. */ trait LoggerHelper { private val logger = LoggerFactory.getLogger(this.getClass) protected def debug(message: String): Unit = logger.debug(message) protected def debug(message: String, exception: Throwable): Unit = logger.debug(message, exception) protected def info(message: String): Unit = logger.info(message) protected def info(message: String, exception: Throwable): Unit = logger.info(message, exception) protected def warn(message: String): Unit = logger.warn(message) protected def warn(message: String, exception: Throwable): Unit = logger.warn(message, exception) protected def error(message: String): Unit = logger.error(message) protected def error(message: String, exception: Throwable): Unit = logger.error(message, exception) }
anand-singh/csr-ticketing-system
app/utils/LoggerHelper.scala
Scala
apache-2.0
858
import scala.language.experimental.macros object Macros { def impl(c: scala.reflect.macros.blackbox.Context) = { import c.universe._ q""" trait Foo { def x = 2 } new Foo {} """ } def foo: Any = macro impl }
yusuke2255/dotty
tests/untried/neg/macro-blackbox-structural/Impls_Macros_1.scala
Scala
bsd-3-clause
253
package Tutorial import Chisel._ import Node._ import Literal._ import scala.collection.mutable.HashMap import scala.collection.mutable.ArrayBuffer class dram (extCompName:String) extends gComponentLeaf (() => new mem_req_t)(() => new mem_rep_t) (ArrayBuffer(("dramBank0", () => UFix(width = 32) , () => UFix(width = 32)), ("dramBank1", () => UFix(width = 32) , () => UFix(width = 32)), ("dramBank2", () => UFix(width = 32) , () => UFix(width = 32)), ("dramBank3", () => UFix(width = 32) , () => UFix(width = 32)), ("dramBank4", () => UFix(width = 32) , () => UFix(width = 32)), ("dramBank5", () => UFix(width = 32) , () => UFix(width = 32)), ("dramBank6", () => UFix(width = 32) , () => UFix(width = 32)), ("dramBank7", () => UFix(width = 32) , () => UFix(width = 32)))) (extCompName = extCompName + "__type__engine__MT__8__") with include { val numOfThreads = 8 val NONE_SELECTED = UFix(numOfThreads,log2Up(numOfThreads+1)) val WaitForInputValid = UFix(0, 8) val WaitForOutputReady = UFix(255, 8) val WaitForReady = UFix(0, 1) val WaitForValid = UFix(1, 1) val inputTag = Vec(numOfThreads) {Reg(UFix(width=TAGWIDTH*2))} val State = Vec(numOfThreads) {Reg(UFix(width=8), resetVal=WaitForInputValid)} val EmitReturnState = Vec(numOfThreads) {Reg(UFix(width=8), resetVal=WaitForInputValid)} //val outstandingOffs = Vec(numOfThreads) {Reg(resetVal=UFix(0, 5))} val AllOffloadsReady = Bool() val AllOffloadsValid = Vec(numOfThreads) {Bool()} /*******************Thread states*********************************/ val subStateTh = Vec(numOfThreads) {Reg(resetVal=WaitForReady)} def myOff = io.elements.find(_._1 == "off").getOrElse(elseV)._2 val RB_HIT_DELAY = UFix(3, width = 32)//constant macro definition val RB_MISS_DELAY = UFix(7, width = 32)//constant macro definition val CHIP_ADDR_WIDTH = UFix(3, width = 32)//constant macro definition val COLUMN_ADDR_WIDTH = UFix(12, width = 32)//constant macro definition val BANK_ADDR_WIDTH = UFix(3, width = 32)//constant macro definition val ROW_ADDR_WIDTH = UFix(12, width = 32)//constant macro definition val CHIP_SHIFT_R = UFix(0, width = 32)//constant macro definition val CHIP_SHIFT_L = (CHIP_SHIFT_R+CHIP_ADDR_WIDTH)//constant macro definition val COLUMN_SHIFT_R = CHIP_SHIFT_L//constant macro definition val COLUMN_SHIFT_L = (CHIP_SHIFT_R+COLUMN_ADDR_WIDTH)//constant macro definition val BANK_SHIFT_R = COLUMN_SHIFT_L//constant macro definition val BANK_SHIFT_L = (BANK_ADDR_WIDTH+BANK_SHIFT_R)//constant macro definition val ROW_SHIFT_R = BANK_SHIFT_L//constant macro definition val ROW_SHIFT_L = (ROW_SHIFT_R+ROW_ADDR_WIDTH)//constant macro definition def chip(addr:UFix) = ((addr>>CHIP_SHIFT_R)& ((UFix(1, width = 32)<<CHIP_SHIFT_L)-UFix(1, width = 32)))//constant macro definition def column(addr:UFix) = ((addr>>COLUMN_SHIFT_R)& ((UFix(1, width = 32)<<COLUMN_SHIFT_L)-UFix(1, width = 32)))//constant macro definition def bank(addr:UFix) = ((addr>>BANK_SHIFT_R)& ((UFix(1, width = 32)<<BANK_SHIFT_L)-UFix(1, width = 32)))//constant macro definition def row(addr:UFix) = ((addr>>ROW_SHIFT_R)& ((UFix(1, width = 32)<<ROW_SHIFT_L)-UFix(1, width = 32)))//constant macro definition val rb0RowAddr = Vec (numOfThreads) {Reg(UFix(width = 32), resetVal=UFix(1, width = 32))} //Global variable val rb1RowAddr = Vec (numOfThreads) {Reg(UFix(width = 32), resetVal=UFix(0, width = 32))} //Global variable val rb2RowAddr = Vec (numOfThreads) {Reg(UFix(width = 32), resetVal=UFix(0, width = 32))} //Global variable val rb3RowAddr = Vec (numOfThreads) {Reg(UFix(width = 32), resetVal=UFix(0, width = 32))} //Global variable val rb4RowAddr = Vec (numOfThreads) {Reg(UFix(width = 32), resetVal=UFix(0, width = 32))} //Global variable val rb5RowAddr = Vec (numOfThreads) {Reg(UFix(width = 32), resetVal=UFix(0, width = 32))} //Global variable val rb6RowAddr = Vec (numOfThreads) {Reg(UFix(width = 32), resetVal=UFix(0, width = 32))} //Global variable val rb7RowAddr = Vec (numOfThreads) {Reg(UFix(width = 32), resetVal=UFix(0, width = 32))} //Global variable val bankAddr = Vec (numOfThreads) {Reg(UFix(width = 32))} //Global variable val rowAddr = Vec (numOfThreads) {Reg(UFix(width = 32))} //Global variable val inputReg = Vec(numOfThreads) {Reg(new mem_req_t)} val outputReg = Vec(numOfThreads) {Reg(new mem_rep_t)} def mymyOffdramBank2 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "dramBank2").getOrElse(elseV)._2 val dramBank2Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 32)) dramBank2Port <> mymyOffdramBank2 val dramBank2PortReplyStorage = Vec(numOfThreads) {Reg(UFix(width = 32))} def mymyOffdramBank3 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "dramBank3").getOrElse(elseV)._2 val dramBank3Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 32)) dramBank3Port <> mymyOffdramBank3 val dramBank3PortReplyStorage = Vec(numOfThreads) {Reg(UFix(width = 32))} def mymyOffdramBank4 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "dramBank4").getOrElse(elseV)._2 val dramBank4Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 32)) dramBank4Port <> mymyOffdramBank4 val dramBank4PortReplyStorage = Vec(numOfThreads) {Reg(UFix(width = 32))} def mymyOffdramBank5 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "dramBank5").getOrElse(elseV)._2 val dramBank5Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 32)) dramBank5Port <> mymyOffdramBank5 val dramBank5PortReplyStorage = Vec(numOfThreads) {Reg(UFix(width = 32))} def mymyOffdramBank0 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "dramBank0").getOrElse(elseV)._2 val dramBank0Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 32)) dramBank0Port <> mymyOffdramBank0 val dramBank0PortReplyStorage = Vec(numOfThreads) {Reg(UFix(width = 32))} def mymyOffdramBank1 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "dramBank1").getOrElse(elseV)._2 val dramBank1Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 32)) dramBank1Port <> mymyOffdramBank1 val dramBank1PortReplyStorage = Vec(numOfThreads) {Reg(UFix(width = 32))} def mymyOffdramBank7 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "dramBank7").getOrElse(elseV)._2 val dramBank7Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 32)) dramBank7Port <> mymyOffdramBank7 val dramBank7PortReplyStorage = Vec(numOfThreads) {Reg(UFix(width = 32))} def mymyOffdramBank6 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "dramBank6").getOrElse(elseV)._2 val dramBank6Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 32)) dramBank6Port <> mymyOffdramBank6 val dramBank6PortReplyStorage = Vec(numOfThreads) {Reg(UFix(width = 32))} val GS_CHECK_RB_HIT = UFix(1) val GS_WAIT_BANK_0 = UFix(2) val GS_WAIT_BANK_1 = UFix(3) val GS_WAIT_BANK_2 = UFix(4) val GS_WAIT_BANK_3 = UFix(5) val GS_WAIT_BANK_4 = UFix(6) val GS_WAIT_BANK_5 = UFix(7) val GS_WAIT_BANK_6 = UFix(8) val GS_WAIT_BANK_7 = UFix(9) val GS_FINISH = UFix(10) //stateCounters val prevStateThread0 = Reg(UFix(width=8), resetVal=WaitForInputValid) prevStateThread0 := State(0) when ((State(0) != prevStateThread0) && (State(0) < UFix(16)) && !pcPaused) { stateCounters(State(0)) := stateCounters(State(0)) + UFix(1) } /******************Winner threads*********************************/ val rThreadEncoder = new RREncode (numOfThreads) val rThread = rThreadEncoder.io.chosen Range(0, numOfThreads, 1).map(i => rThreadEncoder.io.valid(i) := (subStateTh(i) === WaitForReady)) rThreadEncoder.io.ready := (rThread != NONE_SELECTED) val vThreadEncoder = new RREncode (numOfThreads) val vThread = vThreadEncoder.io.chosen Range(0, numOfThreads, 1).map(i => vThreadEncoder.io.valid(i) := (subStateTh(i) === WaitForValid) && AllOffloadsValid(i)) vThreadEncoder.io.ready := vThread != NONE_SELECTED val sThreadEncoder = new RREncode (numOfThreads) val sThread = sThreadEncoder.io.chosen Range(0, numOfThreads, 1).map(i => sThreadEncoder.io.valid(i) := (subStateTh(i) === WaitForReady) && (State(i) === WaitForInputValid)) sThreadEncoder.io.ready := sThread != NONE_SELECTED Range(0, numOfThreads, 1).foreach(i => subStateTh(i) := MuxCase(subStateTh(i), Seq((AllOffloadsReady && UFix(i) === rThread && State(i) != WaitForInputValid && State(i) != WaitForOutputReady , WaitForValid), (UFix(i) === vThread, WaitForReady)))) dramBank0Port.rep.ready := Bool(true) dramBank1Port.rep.ready := Bool(true) dramBank2Port.rep.ready := Bool(true) dramBank3Port.rep.ready := Bool(true) dramBank4Port.rep.ready := Bool(true) dramBank5Port.rep.ready := Bool(true) dramBank6Port.rep.ready := Bool(true) dramBank7Port.rep.ready := Bool(true) /******************Ready stage handler************************/ val dramBank0PortHadReadyRequest = Reg(resetVal=Bool(false)) val dramBank0_ready_received = Reg(resetVal=Bool(false)) val dramBank1PortHadReadyRequest = Reg(resetVal=Bool(false)) val dramBank1_ready_received = Reg(resetVal=Bool(false)) val dramBank2PortHadReadyRequest = Reg(resetVal=Bool(false)) val dramBank2_ready_received = Reg(resetVal=Bool(false)) val dramBank3PortHadReadyRequest = Reg(resetVal=Bool(false)) val dramBank3_ready_received = Reg(resetVal=Bool(false)) val dramBank4PortHadReadyRequest = Reg(resetVal=Bool(false)) val dramBank4_ready_received = Reg(resetVal=Bool(false)) val dramBank5PortHadReadyRequest = Reg(resetVal=Bool(false)) val dramBank5_ready_received = Reg(resetVal=Bool(false)) val dramBank6PortHadReadyRequest = Reg(resetVal=Bool(false)) val dramBank6_ready_received = Reg(resetVal=Bool(false)) val dramBank7PortHadReadyRequest = Reg(resetVal=Bool(false)) val dramBank7_ready_received = Reg(resetVal=Bool(false)) AllOffloadsReady := (dramBank0Port.req.ready || dramBank0_ready_received || (!dramBank0PortHadReadyRequest && !dramBank0Port.req.valid)) && (dramBank1Port.req.ready || dramBank1_ready_received || (!dramBank1PortHadReadyRequest && !dramBank1Port.req.valid)) && (dramBank2Port.req.ready || dramBank2_ready_received || (!dramBank2PortHadReadyRequest && !dramBank2Port.req.valid)) && (dramBank3Port.req.ready || dramBank3_ready_received || (!dramBank3PortHadReadyRequest && !dramBank3Port.req.valid)) && (dramBank4Port.req.ready || dramBank4_ready_received || (!dramBank4PortHadReadyRequest && !dramBank4Port.req.valid)) && (dramBank5Port.req.ready || dramBank5_ready_received || (!dramBank5PortHadReadyRequest && !dramBank5Port.req.valid)) && (dramBank6Port.req.ready || dramBank6_ready_received || (!dramBank6PortHadReadyRequest && !dramBank6Port.req.valid)) && (dramBank7Port.req.ready || dramBank7_ready_received || (!dramBank7PortHadReadyRequest && !dramBank7Port.req.valid)) && Bool(true) dramBank0_ready_received := !(AllOffloadsReady) && (dramBank0_ready_received || dramBank0Port.req.ready) dramBank0PortHadReadyRequest := !AllOffloadsReady && (dramBank0PortHadReadyRequest || dramBank0Port.req.valid) dramBank1_ready_received := !(AllOffloadsReady) && (dramBank1_ready_received || dramBank1Port.req.ready) dramBank1PortHadReadyRequest := !AllOffloadsReady && (dramBank1PortHadReadyRequest || dramBank1Port.req.valid) dramBank2_ready_received := !(AllOffloadsReady) && (dramBank2_ready_received || dramBank2Port.req.ready) dramBank2PortHadReadyRequest := !AllOffloadsReady && (dramBank2PortHadReadyRequest || dramBank2Port.req.valid) dramBank3_ready_received := !(AllOffloadsReady) && (dramBank3_ready_received || dramBank3Port.req.ready) dramBank3PortHadReadyRequest := !AllOffloadsReady && (dramBank3PortHadReadyRequest || dramBank3Port.req.valid) dramBank4_ready_received := !(AllOffloadsReady) && (dramBank4_ready_received || dramBank4Port.req.ready) dramBank4PortHadReadyRequest := !AllOffloadsReady && (dramBank4PortHadReadyRequest || dramBank4Port.req.valid) dramBank5_ready_received := !(AllOffloadsReady) && (dramBank5_ready_received || dramBank5Port.req.ready) dramBank5PortHadReadyRequest := !AllOffloadsReady && (dramBank5PortHadReadyRequest || dramBank5Port.req.valid) dramBank6_ready_received := !(AllOffloadsReady) && (dramBank6_ready_received || dramBank6Port.req.ready) dramBank6PortHadReadyRequest := !AllOffloadsReady && (dramBank6PortHadReadyRequest || dramBank6Port.req.valid) dramBank7_ready_received := !(AllOffloadsReady) && (dramBank7_ready_received || dramBank7Port.req.ready) dramBank7PortHadReadyRequest := !AllOffloadsReady && (dramBank7PortHadReadyRequest || dramBank7Port.req.valid) /******************Valid stage handler************************/ val dramBank0PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank0_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank1PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank1_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank2PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank2_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank3PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank3_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank4PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank4_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank5PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank5_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank6PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank6_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank7PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))} val dramBank7_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))} for (i <- 0 to numOfThreads-1) { AllOffloadsValid(i) := ((dramBank0Port.rep.valid && (dramBank0Port.rep.tag === UFix(i, 5)))|| dramBank0_valid_received(i) || !dramBank0PortHadValidRequest(i)) && ((dramBank1Port.rep.valid && (dramBank1Port.rep.tag === UFix(i, 5)))|| dramBank1_valid_received(i) || !dramBank1PortHadValidRequest(i)) && ((dramBank2Port.rep.valid && (dramBank2Port.rep.tag === UFix(i, 5)))|| dramBank2_valid_received(i) || !dramBank2PortHadValidRequest(i)) && ((dramBank3Port.rep.valid && (dramBank3Port.rep.tag === UFix(i, 5)))|| dramBank3_valid_received(i) || !dramBank3PortHadValidRequest(i)) && ((dramBank4Port.rep.valid && (dramBank4Port.rep.tag === UFix(i, 5)))|| dramBank4_valid_received(i) || !dramBank4PortHadValidRequest(i)) && ((dramBank5Port.rep.valid && (dramBank5Port.rep.tag === UFix(i, 5)))|| dramBank5_valid_received(i) || !dramBank5PortHadValidRequest(i)) && ((dramBank6Port.rep.valid && (dramBank6Port.rep.tag === UFix(i, 5)))|| dramBank6_valid_received(i) || !dramBank6PortHadValidRequest(i)) && ((dramBank7Port.rep.valid && (dramBank7Port.rep.tag === UFix(i, 5)))|| dramBank7_valid_received(i) || !dramBank7PortHadValidRequest(i)) && Bool(true) dramBank0_valid_received(i) := !(vThread === UFix(i, 5)) && ((dramBank0_valid_received(i)) || (dramBank0Port.rep.valid && dramBank0Port.rep.tag === UFix(i, 5))) dramBank0PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (dramBank0PortHadValidRequest(i) || (UFix(i,5)===rThread && dramBank0Port.req.valid)/*(dramBank0PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/) dramBank1_valid_received(i) := !(vThread === UFix(i, 5)) && ((dramBank1_valid_received(i)) || (dramBank1Port.rep.valid && dramBank1Port.rep.tag === UFix(i, 5))) dramBank1PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (dramBank1PortHadValidRequest(i) || (UFix(i,5)===rThread && dramBank1Port.req.valid)/*(dramBank1PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/) dramBank2_valid_received(i) := !(vThread === UFix(i, 5)) && ((dramBank2_valid_received(i)) || (dramBank2Port.rep.valid && dramBank2Port.rep.tag === UFix(i, 5))) dramBank2PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (dramBank2PortHadValidRequest(i) || (UFix(i,5)===rThread && dramBank2Port.req.valid)/*(dramBank2PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/) dramBank3_valid_received(i) := !(vThread === UFix(i, 5)) && ((dramBank3_valid_received(i)) || (dramBank3Port.rep.valid && dramBank3Port.rep.tag === UFix(i, 5))) dramBank3PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (dramBank3PortHadValidRequest(i) || (UFix(i,5)===rThread && dramBank3Port.req.valid)/*(dramBank3PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/) dramBank4_valid_received(i) := !(vThread === UFix(i, 5)) && ((dramBank4_valid_received(i)) || (dramBank4Port.rep.valid && dramBank4Port.rep.tag === UFix(i, 5))) dramBank4PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (dramBank4PortHadValidRequest(i) || (UFix(i,5)===rThread && dramBank4Port.req.valid)/*(dramBank4PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/) dramBank5_valid_received(i) := !(vThread === UFix(i, 5)) && ((dramBank5_valid_received(i)) || (dramBank5Port.rep.valid && dramBank5Port.rep.tag === UFix(i, 5))) dramBank5PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (dramBank5PortHadValidRequest(i) || (UFix(i,5)===rThread && dramBank5Port.req.valid)/*(dramBank5PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/) dramBank6_valid_received(i) := !(vThread === UFix(i, 5)) && ((dramBank6_valid_received(i)) || (dramBank6Port.rep.valid && dramBank6Port.rep.tag === UFix(i, 5))) dramBank6PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (dramBank6PortHadValidRequest(i) || (UFix(i,5)===rThread && dramBank6Port.req.valid)/*(dramBank6PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/) dramBank7_valid_received(i) := !(vThread === UFix(i, 5)) && ((dramBank7_valid_received(i)) || (dramBank7Port.rep.valid && dramBank7Port.rep.tag === UFix(i, 5))) dramBank7PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (dramBank7PortHadValidRequest(i) || (UFix(i,5)===rThread && dramBank7Port.req.valid)/*(dramBank7PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/) } val dramBank0PortReplyValue = Mux(dramBank0Port.rep.valid && (vThread === dramBank0Port.rep.tag), dramBank0Port.rep.bits, dramBank0PortReplyStorage(vThread)) when (dramBank0Port.rep.valid) { dramBank0PortReplyStorage(dramBank0Port.rep.tag) := dramBank0Port.rep.bits } val dramBank1PortReplyValue = Mux(dramBank1Port.rep.valid && (vThread === dramBank1Port.rep.tag), dramBank1Port.rep.bits, dramBank1PortReplyStorage(vThread)) when (dramBank1Port.rep.valid) { dramBank1PortReplyStorage(dramBank1Port.rep.tag) := dramBank1Port.rep.bits } val dramBank2PortReplyValue = Mux(dramBank2Port.rep.valid && (vThread === dramBank2Port.rep.tag), dramBank2Port.rep.bits, dramBank2PortReplyStorage(vThread)) when (dramBank2Port.rep.valid) { dramBank2PortReplyStorage(dramBank2Port.rep.tag) := dramBank2Port.rep.bits } val dramBank3PortReplyValue = Mux(dramBank3Port.rep.valid && (vThread === dramBank3Port.rep.tag), dramBank3Port.rep.bits, dramBank3PortReplyStorage(vThread)) when (dramBank3Port.rep.valid) { dramBank3PortReplyStorage(dramBank3Port.rep.tag) := dramBank3Port.rep.bits } val dramBank4PortReplyValue = Mux(dramBank4Port.rep.valid && (vThread === dramBank4Port.rep.tag), dramBank4Port.rep.bits, dramBank4PortReplyStorage(vThread)) when (dramBank4Port.rep.valid) { dramBank4PortReplyStorage(dramBank4Port.rep.tag) := dramBank4Port.rep.bits } val dramBank5PortReplyValue = Mux(dramBank5Port.rep.valid && (vThread === dramBank5Port.rep.tag), dramBank5Port.rep.bits, dramBank5PortReplyStorage(vThread)) when (dramBank5Port.rep.valid) { dramBank5PortReplyStorage(dramBank5Port.rep.tag) := dramBank5Port.rep.bits } val dramBank6PortReplyValue = Mux(dramBank6Port.rep.valid && (vThread === dramBank6Port.rep.tag), dramBank6Port.rep.bits, dramBank6PortReplyStorage(vThread)) when (dramBank6Port.rep.valid) { dramBank6PortReplyStorage(dramBank6Port.rep.tag) := dramBank6Port.rep.bits } val dramBank7PortReplyValue = Mux(dramBank7Port.rep.valid && (vThread === dramBank7Port.rep.tag), dramBank7Port.rep.bits, dramBank7PortReplyStorage(vThread)) when (dramBank7Port.rep.valid) { dramBank7PortReplyStorage(dramBank7Port.rep.tag) := dramBank7Port.rep.bits } /**************************************************************/ val b=bank(inputReg(vThread).addr) val r=row(inputReg(vThread).addr) val t0 = dramBank0PortReplyValue val t1 = dramBank1PortReplyValue val t2 = dramBank2PortReplyValue val t3 = dramBank3PortReplyValue val t4 = dramBank4PortReplyValue val t5 = dramBank5PortReplyValue val t6 = dramBank6PortReplyValue val t7 = dramBank7PortReplyValue dramBank2Port.req.tag := rThread dramBank2Port.req.valid := (rThread != NONE_SELECTED) && !dramBank2_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_2)) dramBank2Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_2),UFix(0, width = 32)))) dramBank3Port.req.tag := rThread dramBank3Port.req.valid := (rThread != NONE_SELECTED) && !dramBank3_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_3)) dramBank3Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_3),UFix(0, width = 32)))) dramBank4Port.req.tag := rThread dramBank4Port.req.valid := (rThread != NONE_SELECTED) && !dramBank4_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_4)) dramBank4Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_4),UFix(0, width = 32)))) dramBank5Port.req.tag := rThread dramBank5Port.req.valid := (rThread != NONE_SELECTED) && !dramBank5_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_5)) dramBank5Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_5),UFix(0, width = 32)))) dramBank0Port.req.tag := rThread dramBank0Port.req.valid := (rThread != NONE_SELECTED) && !dramBank0_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_0)) dramBank0Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_0),UFix(0, width = 32)))) dramBank1Port.req.tag := rThread dramBank1Port.req.valid := (rThread != NONE_SELECTED) && !dramBank1_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_1)) dramBank1Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_1),UFix(0, width = 32)))) dramBank7Port.req.tag := rThread dramBank7Port.req.valid := (rThread != NONE_SELECTED) && !dramBank7_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_7)) dramBank7Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_7),UFix(0, width = 32)))) dramBank6Port.req.tag := rThread dramBank6Port.req.valid := (rThread != NONE_SELECTED) && !dramBank6_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_6)) dramBank6Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_WAIT_BANK_6),UFix(0, width = 32)))) when (sThread != NONE_SELECTED && io.in.valid) { inputReg(sThread) := io.in.bits inputTag(sThread) := io.in.tag rb0RowAddr(sThread) :=UFix(1, width = 32) rb1RowAddr(sThread) :=UFix(0, width = 32) rb2RowAddr(sThread) :=UFix(0, width = 32) rb3RowAddr(sThread) :=UFix(0, width = 32) rb4RowAddr(sThread) :=UFix(0, width = 32) rb5RowAddr(sThread) :=UFix(0, width = 32) rb6RowAddr(sThread) :=UFix(0, width = 32) rb7RowAddr(sThread) :=UFix(0, width = 32) State(sThread) := GS_CHECK_RB_HIT } when (rThread != NONE_SELECTED && State(rThread) === WaitForOutputReady && io.out.ready) { State(rThread) := EmitReturnState(rThread) } when (vThread != NONE_SELECTED &&State(vThread) === GS_CHECK_RB_HIT){ bankAddr(vThread):=b rowAddr(vThread):=r when (b=== UFix(0, width = 32)) { when (rb0RowAddr(vThread)=== r) { State(vThread):=GS_FINISH } .otherwise { rb0RowAddr(vThread):=r State(vThread):=GS_WAIT_BANK_0 } } .otherwise { when (b=== UFix(1, width = 32)) { when (rb1RowAddr(vThread)=== r) { State(vThread):=GS_FINISH } .otherwise { rb1RowAddr(vThread):=r State(vThread):=GS_WAIT_BANK_1 } } .otherwise { when (b=== UFix(2, width = 32)) { when (rb2RowAddr(vThread)=== r) { State(vThread):=GS_FINISH } .otherwise { rb2RowAddr(vThread):=r State(vThread):=GS_WAIT_BANK_2 } } .otherwise { when (b=== UFix(3, width = 32)) { when (rb3RowAddr(vThread)=== r) { State(vThread):=GS_FINISH } .otherwise { rb3RowAddr(vThread):=r State(vThread):=GS_WAIT_BANK_3 } } .otherwise { when (b=== UFix(4, width = 32)) { when (rb4RowAddr(vThread)=== r) { State(vThread):=GS_FINISH } .otherwise { rb4RowAddr(vThread):=r State(vThread):=GS_WAIT_BANK_4 } } .otherwise { when (b=== UFix(5, width = 32)) { when (rb5RowAddr(vThread)=== r) { State(vThread):=GS_FINISH } .otherwise { rb5RowAddr(vThread):=r State(vThread):=GS_WAIT_BANK_5 } } .otherwise { when (b=== UFix(6, width = 32)) { when (rb6RowAddr(vThread)=== r) { State(vThread):=GS_FINISH } .otherwise { rb6RowAddr(vThread):=r State(vThread):=GS_WAIT_BANK_6 } } .otherwise { when (b=== UFix(7, width = 32)) { when (rb7RowAddr(vThread)=== r) { State(vThread):=GS_FINISH } .otherwise { rb7RowAddr(vThread):=r State(vThread):=GS_WAIT_BANK_7 } } .otherwise { State(vThread):=GS_FINISH } } } } } } } } } when (vThread != NONE_SELECTED &&State(vThread) === GS_WAIT_BANK_0){ EmitReturnState(vThread) := WaitForInputValid State(vThread) := WaitForOutputReady } when (vThread != NONE_SELECTED &&State(vThread) === GS_WAIT_BANK_1){ EmitReturnState(vThread) := WaitForInputValid State(vThread) := WaitForOutputReady } when (vThread != NONE_SELECTED &&State(vThread) === GS_WAIT_BANK_2){ EmitReturnState(vThread) := WaitForInputValid State(vThread) := WaitForOutputReady } when (vThread != NONE_SELECTED &&State(vThread) === GS_WAIT_BANK_3){ EmitReturnState(vThread) := WaitForInputValid State(vThread) := WaitForOutputReady } when (vThread != NONE_SELECTED &&State(vThread) === GS_WAIT_BANK_4){ EmitReturnState(vThread) := WaitForInputValid State(vThread) := WaitForOutputReady } when (vThread != NONE_SELECTED &&State(vThread) === GS_WAIT_BANK_5){ EmitReturnState(vThread) := WaitForInputValid State(vThread) := WaitForOutputReady } when (vThread != NONE_SELECTED &&State(vThread) === GS_WAIT_BANK_6){ EmitReturnState(vThread) := WaitForInputValid State(vThread) := WaitForOutputReady } when (vThread != NONE_SELECTED &&State(vThread) === GS_WAIT_BANK_7){ EmitReturnState(vThread) := WaitForInputValid State(vThread) := WaitForOutputReady } when (vThread != NONE_SELECTED &&State(vThread) === GS_FINISH){ EmitReturnState(vThread) := WaitForInputValid State(vThread) := WaitForOutputReady } io.out.tag := inputTag(rThread) io.out.bits := outputReg(rThread) io.out.valid := rThread != NONE_SELECTED && State(rThread) === WaitForOutputReady io.in.ready := sThread != NONE_SELECTED /******************Engine specific performance counters************************/ val IsPcReset = io.pcIn.valid && io.pcIn.bits.request && io.pcIn.bits.pcType === Pcounters.pcReset var portId = 3 when (IsPcReset) { engineUtilization := UFix(0, Pcounters.PCWIDTH) } .otherwise { when (State(0) != WaitForInputValid && State(0) != WaitForOutputReady && !pcPaused) { engineUtilization := engineUtilization + UFix(1, Pcounters.PCWIDTH) } } for ((n, i) <- ioOff.elements) { if (n == "dramBank0") { when (IsPcReset) { offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH) } .elsewhen (/*i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&*/ (dramBank0PortHadValidRequest(0) /*|| dramBank0Port.req.valid*/) && !pcPaused) { offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH) } } if (n == "dramBank1") { when (IsPcReset) { offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH) } .elsewhen (/*i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&*/ (dramBank1PortHadValidRequest(0) /*|| dramBank1Port.req.valid*/) && !pcPaused) { offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH) } } if (n == "dramBank2") { when (IsPcReset) { offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH) } .elsewhen (/*i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&*/ (dramBank2PortHadValidRequest(0) /*|| dramBank2Port.req.valid*/) && !pcPaused) { offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH) } } if (n == "dramBank3") { when (IsPcReset) { offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH) } .elsewhen (/*i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&*/ (dramBank3PortHadValidRequest(0) /*|| dramBank3Port.req.valid*/) && !pcPaused) { offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH) } } if (n == "dramBank4") { when (IsPcReset) { offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH) } .elsewhen (/*i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&*/ (dramBank4PortHadValidRequest(0) /*|| dramBank4Port.req.valid*/) && !pcPaused) { offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH) } } if (n == "dramBank5") { when (IsPcReset) { offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH) } .elsewhen (/*i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&*/ (dramBank5PortHadValidRequest(0) /*|| dramBank5Port.req.valid*/) && !pcPaused) { offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH) } } if (n == "dramBank6") { when (IsPcReset) { offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH) } .elsewhen (/*i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&*/ (dramBank6PortHadValidRequest(0) /*|| dramBank6Port.req.valid*/) && !pcPaused) { offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH) } } if (n == "dramBank7") { when (IsPcReset) { offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH) } .elsewhen (/*i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&*/ (dramBank7PortHadValidRequest(0) /*|| dramBank7Port.req.valid*/) && !pcPaused) { offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH) } } portId = portId + 1 } when (IsPcReset) { offloadRateOverall := UFix(0, Pcounters.PCWIDTH) } .elsewhen (!AllOffloadsValid(0) && (subStateTh(0) === WaitForValid) && !pcPaused) { offloadRateOverall := offloadRateOverall + UFix(1, Pcounters.PCWIDTH) } }
seyedmaysamlavasani/GorillaPP
apps/pageRank/build/dram.scala
Scala
bsd-3-clause
32,018
package string_formats.yaml import de.zalando.play.controllers._ import org.scalacheck._ import org.scalacheck.Arbitrary._ import org.scalacheck.Prop._ import org.scalacheck.Test._ import org.specs2.mutable._ import play.api.test.Helpers._ import play.api.test._ import play.api.mvc.MultipartFormData.FilePart import play.api.mvc._ import org.junit.runner.RunWith import org.specs2.runner.JUnitRunner import java.net.URLEncoder import com.fasterxml.jackson.databind.ObjectMapper import play.api.http.Writeable import play.api.libs.Files.TemporaryFile import play.api.test.Helpers.{status => requestStatusCode_} import play.api.test.Helpers.{contentAsString => requestContentAsString_} import play.api.test.Helpers.{contentType => requestContentType_} import de.zalando.play.controllers.Base64String import Base64String._ import de.zalando.play.controllers.BinaryString import BinaryString._ import org.joda.time.DateTime import java.util.UUID import org.joda.time.LocalDate import Generators._ @RunWith(classOf[JUnitRunner]) class String_formats_yamlSpec extends Specification { def toPath[T](value: T)(implicit binder: PathBindable[T]): String = Option(binder.unbind("", value)).getOrElse("") def toQuery[T](key: String, value: T)(implicit binder: QueryStringBindable[T]): String = Option(binder.unbind(key, value)).getOrElse("") def toHeader[T](value: T)(implicit binder: PathBindable[T]): String = Option(binder.unbind("", value)).getOrElse("") def checkResult(props: Prop) = Test.check(Test.Parameters.default, props).status match { case Failed(args, labels) => val failureMsg = labels.mkString("\\n") + " given args: " + args.map(_.arg).mkString("'", "', '","'") failure(failureMsg) case Proved(_) | Exhausted | Passed => success case PropException(_, e, labels) => val error = if (labels.isEmpty) e.getLocalizedMessage() else labels.mkString("\\n") failure(error) } private def parserConstructor(mimeType: String) = PlayBodyParsing.jacksonMapper(mimeType) def parseResponseContent[T](mapper: ObjectMapper, content: String, mimeType: Option[String], expectedType: Class[T]) = mapper.readValue(content, expectedType) "GET /" should { def testInvalidInput(input: (GetDate_time, GetDate, GetBase64, GetUuid, BinaryString)) = { val (date_time, date, base64, uuid, petId) = input val url = s"""/?${toQuery("date_time", date_time)}&${toQuery("date", date)}&${toQuery("base64", base64)}&${toQuery("uuid", uuid)}""" val contentTypes: Seq[String] = Seq() val acceptHeaders: Seq[String] = Seq( "application/json", "application/yaml" ) val contentHeaders = for { ct <- contentTypes; ac <- acceptHeaders } yield (ac, ct) val propertyList = contentHeaders.map { case (acceptHeader, contentType) => val headers = Seq() :+ ("Accept" -> acceptHeader) :+ ("Content-Type" -> contentType) val parsed_petId = PlayBodyParsing.jacksonMapper("application/json").writeValueAsString(petId) val request = FakeRequest(GET, url).withHeaders(headers:_*).withBody(parsed_petId) val path = if (contentType == "multipart/form-data") { import de.zalando.play.controllers.WriteableWrapper.anyContentAsMultipartFormWritable val files: Seq[FilePart[TemporaryFile]] = Nil val data = Map.empty[String, Seq[String]] val form = new MultipartFormData(data, files, Nil, Nil) route(request.withMultipartFormDataBody(form)).get } else if (contentType == "application/x-www-form-urlencoded") { val form = Nil route(request.withFormUrlEncodedBody(form:_*)).get } else route(request).get val errors = new GetValidator(date_time, date, base64, uuid, petId).errors lazy val validations = errors flatMap { _.messages } map { m => s"Contains error: $m in ${contentAsString(path)}" |:(contentAsString(path).contains(m) ?= true) } (s"given 'Content-Type' [$contentType], 'Accept' header [$acceptHeader] and URL: [$url]" + "and body [" + parsed_petId + "]") |: all( "StatusCode = BAD_REQUEST" |: (requestStatusCode_(path) ?= BAD_REQUEST), s"Content-Type = $acceptHeader" |: (requestContentType_(path) ?= Some(acceptHeader)), "non-empty errors" |: (errors.nonEmpty ?= true), "at least one validation failing" |: atLeastOne(validations:_*) ) } if (propertyList.isEmpty) throw new IllegalStateException(s"No 'produces' defined for the $url") propertyList.reduce(_ && _) } def testValidInput(input: (GetDate_time, GetDate, GetBase64, GetUuid, BinaryString)) = { val (date_time, date, base64, uuid, petId) = input val parsed_petId = parserConstructor("application/json").writeValueAsString(petId) val url = s"""/?${toQuery("date_time", date_time)}&${toQuery("date", date)}&${toQuery("base64", base64)}&${toQuery("uuid", uuid)}""" val contentTypes: Seq[String] = Seq() val acceptHeaders: Seq[String] = Seq( "application/json", "application/yaml" ) val contentHeaders = for { ct <- contentTypes; ac <- acceptHeaders } yield (ac, ct) val propertyList = contentHeaders.map { case (acceptHeader, contentType) => val headers = Seq() :+ ("Accept" -> acceptHeader) :+ ("Content-Type" -> contentType) val request = FakeRequest(GET, url).withHeaders(headers:_*).withBody(parsed_petId) val path = if (contentType == "multipart/form-data") { import de.zalando.play.controllers.WriteableWrapper.anyContentAsMultipartFormWritable val files: Seq[FilePart[TemporaryFile]] = Nil val data = Map.empty[String, Seq[String]] val form = new MultipartFormData(data, files, Nil, Nil) route(request.withMultipartFormDataBody(form)).get } else if (contentType == "application/x-www-form-urlencoded") { val form = Nil route(request.withFormUrlEncodedBody(form:_*)).get } else route(request).get val errors = new GetValidator(date_time, date, base64, uuid, petId).errors val possibleResponseTypes: Map[Int,Class[_ <: Any]] = Map( 200 -> classOf[Null] ) val expectedCode = requestStatusCode_(path) val mimeType = requestContentType_(path) val mapper = parserConstructor(mimeType.getOrElse("application/json")) val parsedApiResponse = scala.util.Try { parseResponseContent(mapper, requestContentAsString_(path), mimeType, possibleResponseTypes(expectedCode)) } (s"Given response code [$expectedCode], 'Content-Type' [$contentType], 'Accept' header [$acceptHeader] and URL: [$url]" + "and body [" + parsed_petId + "]") |: all( "Response Code is allowed" |: (possibleResponseTypes.contains(expectedCode) ?= true), "Successful" |: (parsedApiResponse.isSuccess ?= true), s"Content-Type = $acceptHeader" |: (requestContentType_(path) ?= Some(acceptHeader)), "No errors" |: (errors.isEmpty ?= true) ) } if (propertyList.isEmpty) throw new IllegalStateException(s"No 'produces' defined for the $url") propertyList.reduce(_ && _) } "discard invalid data" in new WithApplication { val genInputs = for { date_time <- GetDate_timeGenerator date <- GetDateGenerator base64 <- GetBase64Generator uuid <- GetUuidGenerator petId <- BinaryStringGenerator } yield (date_time, date, base64, uuid, petId) val inputs = genInputs suchThat { case (date_time, date, base64, uuid, petId) => new GetValidator(date_time, date, base64, uuid, petId).errors.nonEmpty } val props = forAll(inputs) { i => testInvalidInput(i) } checkResult(props) } "do something with valid data" in new WithApplication { val genInputs = for { date_time <- GetDate_timeGenerator date <- GetDateGenerator base64 <- GetBase64Generator uuid <- GetUuidGenerator petId <- BinaryStringGenerator } yield (date_time, date, base64, uuid, petId) val inputs = genInputs suchThat { case (date_time, date, base64, uuid, petId) => new GetValidator(date_time, date, base64, uuid, petId).errors.isEmpty } val props = forAll(inputs) { i => testValidInput(i) } checkResult(props) } } }
zalando/play-swagger
play-scala-generator/src/test/resources/expected_results/tests/string_formats_yaml.scala
Scala
mit
9,666
package org.sisioh.aws4s.dynamodb.extension import com.amazonaws.services.{ dynamodbv2 => aws } import org.sisioh.aws4s.dynamodb.Implicits._ case class LocalSecondaryIndex(underlying: aws.model.LocalSecondaryIndexDescription) { val indexNameOpt: Option[String] = underlying.indexNameOpt val keySchemaOpt: Option[Seq[KeySchemaElement]] = underlying.keySchemaOpt.map(_.map(KeySchemaElement(_))) val projectionOpt: Option[Projection] = underlying.projectionOpt.map(Projection) val indexSizeBytesOpt: Option[Long] = underlying.indexSizeBytesOpt val itemCountOpt: Option[Long] = underlying.itemCountOpt def toLocalSecondaryIndex: aws.model.LocalSecondaryIndex = new aws.model.LocalSecondaryIndex() .withIndexName(underlying.getIndexName) .withKeySchema(underlying.getKeySchema) .withProjection(underlying.getProjection) }
sisioh/aws4s
aws4s-dynamodb-ext/src/main/scala/org/sisioh/aws4s/dynamodb/extension/LocalSecondaryIndex.scala
Scala
mit
868
package com.chaos.pingplusplus.model import com.google.gson.Gson /** * Created by zcfrank1st on 11/17/14. */ object Notify { class InnerObj { var obj: String = _ } def parseNotify(notifyJson: String): Any = { var innerObj: InnerObj = new InnerObj innerObj = new Gson().fromJson(notifyJson, innerObj.getClass) if (innerObj == null || innerObj.obj == null || innerObj.obj.isEmpty) return null if (innerObj.obj == "charge") { return new Gson().fromJson(notifyJson, classOf[Charge]) } else if (innerObj.obj == "refund") { return new Gson().fromJson(notifyJson, classOf[Refund]) } null } }
zcfrank1st/chaos-pingapp-scala
src/main/scala/com/chaos/pingplusplus/model/Notify.scala
Scala
mit
648
package scaladex.infra.sql import doobie._ import scaladex.core.model.Project import scaladex.infra.sql.DoobieUtils.Mappings._ import scaladex.infra.sql.DoobieUtils._ object ProjectSettingsTable { val table: String = "project_settings" val referenceFields: Seq[String] = Seq("organization", "repository") val settingsFields: Seq[String] = Seq( "default_stable_version", "default_artifact", "strict_versions", "custom_scaladoc", "documentation_links", "deprecated", "contributors_wanted", "artifact_deprecations", "cli_artifacts", "category", "beginner_issues_label" ) val insertOrUpdate: Update[(Project.Reference, Project.Settings, Project.Settings)] = insertOrUpdateRequest(table, referenceFields ++ settingsFields, referenceFields, settingsFields) val count: Query0[Long] = selectRequest(table, Seq("COUNT(*)")) }
scalacenter/scaladex
modules/infra/src/main/scala/scaladex/infra/sql/ProjectSettingsTable.scala
Scala
bsd-3-clause
886
package ch.ninecode.model import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.Serializer import com.esotericsoftware.kryo.io.Input import com.esotericsoftware.kryo.io.Output import org.apache.spark.sql.Row import ch.ninecode.cim.CIMClassInfo import ch.ninecode.cim.CIMContext import ch.ninecode.cim.CIMParseable import ch.ninecode.cim.CIMRelationship import ch.ninecode.cim.CIMSerializer /** * Examples would be "Boundary" or "Region" type of frame. * * @param IdentifiedObject [[ch.ninecode.model.IdentifiedObject IdentifiedObject]] Reference to the superclass object. * @param ModelFrame [[ch.ninecode.model.FrameworkPart FrameworkPart]] Model frames of the model frame type. * @group unused * @groupname unused Package unused */ final case class ModelFrameType ( IdentifiedObject: IdentifiedObject = null, ModelFrame: List[String] = null ) extends Element { /** * Return the superclass object. * * @return The typed superclass nested object. * @group Hierarchy * @groupname Hierarchy Class Hierarchy Related * @groupdesc Hierarchy Members related to the nested hierarchy of CIM classes. */ override def sup: IdentifiedObject = IdentifiedObject // // Row overrides // /** * Return a copy of this object as a Row. * * Creates a clone of this object for use in Row manipulations. * * @return The copy of the object. * @group Row * @groupname Row SQL Row Implementation * @groupdesc Row Members related to implementing the SQL Row interface */ override def copy (): Row = { clone().asInstanceOf[Row] } override def export_fields: String = { implicit val s: StringBuilder = new StringBuilder(sup.export_fields) implicit val clz: String = ModelFrameType.cls def emitattrs (position: Int, value: List[String]): Unit = if (mask(position) && (null != value)) value.foreach(x => emit_attribute(ModelFrameType.fields(position), x)) emitattrs(0, ModelFrame) s.toString } override def export: String = { "\\t<cim:ModelFrameType rdf:%s=\\"%s\\">\\n%s\\t</cim:ModelFrameType>".format(if (about) "about" else "ID", id, export_fields) } } object ModelFrameType extends CIMParseable[ModelFrameType] { override val fields: Array[String] = Array[String]( "ModelFrame" ) override val relations: List[CIMRelationship] = List( CIMRelationship("ModelFrame", "FrameworkPart", "0..*", "1") ) val ModelFrame: FielderMultiple = parse_attributes(attribute(cls, fields(0))) def parse (context: CIMContext): ModelFrameType = { implicit val ctx: CIMContext = context implicit val bitfields: Array[Int] = Array(0) val ret = ModelFrameType( IdentifiedObject.parse(context), masks(ModelFrame(), 0) ) ret.bitfields = bitfields ret } def serializer: Serializer[ModelFrameType] = ModelFrameTypeSerializer } object ModelFrameTypeSerializer extends CIMSerializer[ModelFrameType] { def write (kryo: Kryo, output: Output, obj: ModelFrameType): Unit = { val toSerialize: Array[() => Unit] = Array( () => writeList(obj.ModelFrame, output) ) IdentifiedObjectSerializer.write(kryo, output, obj.sup) implicit val bitfields: Array[Int] = obj.bitfields writeBitfields(output) writeFields(toSerialize) } def read (kryo: Kryo, input: Input, cls: Class[ModelFrameType]): ModelFrameType = { val parent = IdentifiedObjectSerializer.read(kryo, input, classOf[IdentifiedObject]) implicit val bitfields: Array[Int] = readBitfields(input) val obj = ModelFrameType( parent, if (isSet(0)) readList(input) else null ) obj.bitfields = bitfields obj } } private[ninecode] object _unused { def register: List[CIMClassInfo] = { List( ModelFrameType.register ) } }
derrickoswald/CIMScala
CIMReader/src/main/scala/ch/ninecode/model/unused.scala
Scala
mit
4,091
/*********************************************************************** * Copyright (c) 2013-2016 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. *************************************************************************/ package org.locationtech.geomesa.accumulo.tools.data import com.beust.jcommander.{Parameter, Parameters} import org.locationtech.geomesa.accumulo.tools.data.AddAttributeIndexCommand.AddAttributeIndexParams import org.locationtech.geomesa.accumulo.tools.{AccumuloDataStoreCommand, AccumuloDataStoreParams} import org.locationtech.geomesa.jobs.accumulo.index.{AttributeIndexArgs, AttributeIndexJob} import org.locationtech.geomesa.tools.{Command, RequiredAttributesParam, RequiredTypeNameParam} class AddAttributeIndexCommand extends AccumuloDataStoreCommand { override val name = "add-attribute-index" override val params = new AddAttributeIndexParams override def execute(): Unit = { // We instantiate the class at runtime to avoid classpath dependencies from commands that are not being used. new AddAttributeIndexCommandExecutor(params).run() } } object AddAttributeIndexCommand { @Parameters(commandDescription = "Run a Hadoop map reduce job to add an index for attributes") class AddAttributeIndexParams extends AccumuloDataStoreParams with RequiredTypeNameParam with RequiredAttributesParam { @Parameter(names = Array("--coverage"), description = "Type of index (join or full)", required = true) var coverage: String = null } } class AddAttributeIndexCommandExecutor(params: AddAttributeIndexParams) extends Runnable { override def run(): Unit = { // Imported here to avoid classpath issues when generating the autocomplete script import org.apache.hadoop.util.ToolRunner try { val args = new AttributeIndexArgs(Array.empty) args.inZookeepers = params.zookeepers args.inInstanceId = params.instance args.inUser = params.user args.inPassword = params.password args.inTableName = params.catalog args.inFeature = params.featureName args.coverage = params.coverage args.attributes.addAll(params.attributes) Command.user.info(s"Running map reduce index job for attributes: ${params.attributes} with coverage: ${params.coverage}...") val result = ToolRunner.run(new AttributeIndexJob(), args.unparse()) if (result == 0) { Command.user.info("Add attribute index command finished successfully.") } else { Command.user.error("Error encountered running attribute index command. Check hadoop's job history logs for more information.") } } catch { case e: Exception => Command.user.error(s"Exception encountered running attribute index command. " + s"Check hadoop's job history logs for more information if necessary: " + e.getMessage, e) } } }
MutahirKazmi/geomesa
geomesa-accumulo/geomesa-accumulo-tools/src/main/scala/org/locationtech/geomesa/accumulo/tools/data/AddAttributeIndexCommand.scala
Scala
apache-2.0
3,110
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.streaming import java.io.{InterruptedIOException, IOException, UncheckedIOException} import java.nio.channels.ClosedByInterruptException import java.util.UUID import java.util.concurrent.{CountDownLatch, ExecutionException, TimeUnit} import java.util.concurrent.atomic.AtomicReference import java.util.concurrent.locks.{Condition, ReentrantLock} import scala.collection.JavaConverters._ import scala.collection.mutable.{Map => MutableMap} import scala.util.control.NonFatal import com.google.common.util.concurrent.UncheckedExecutionException import org.apache.hadoop.fs.Path import org.apache.spark.{SparkContext, SparkException} import org.apache.spark.internal.Logging import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.streaming.InternalOutputModes._ import org.apache.spark.sql.execution.QueryExecution import org.apache.spark.sql.execution.command.StreamingExplainCommand import org.apache.spark.sql.execution.datasources.v2.StreamWriterCommitProgress import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources.v2.SupportsStreamingWrite import org.apache.spark.sql.sources.v2.writer.SupportsTruncate import org.apache.spark.sql.sources.v2.writer.streaming.StreamingWrite import org.apache.spark.sql.streaming._ import org.apache.spark.sql.util.CaseInsensitiveStringMap import org.apache.spark.util.{Clock, UninterruptibleThread, Utils} /** States for [[StreamExecution]]'s lifecycle. */ trait State case object INITIALIZING extends State case object ACTIVE extends State case object TERMINATED extends State case object RECONFIGURING extends State /** * Manages the execution of a streaming Spark SQL query that is occurring in a separate thread. * Unlike a standard query, a streaming query executes repeatedly each time new data arrives at any * [[Source]] present in the query plan. Whenever new data arrives, a [[QueryExecution]] is created * and the results are committed transactionally to the given [[Sink]]. * * @param deleteCheckpointOnStop whether to delete the checkpoint if the query is stopped without * errors. Checkpoint deletion can be forced with the appropriate * Spark configuration. */ abstract class StreamExecution( override val sparkSession: SparkSession, override val name: String, private val checkpointRoot: String, analyzedPlan: LogicalPlan, val sink: BaseStreamingSink, val trigger: Trigger, val triggerClock: Clock, val outputMode: OutputMode, deleteCheckpointOnStop: Boolean) extends StreamingQuery with ProgressReporter with Logging { import org.apache.spark.sql.streaming.StreamingQueryListener._ protected val pollingDelayMs: Long = sparkSession.sessionState.conf.streamingPollingDelay protected val minLogEntriesToMaintain: Int = sparkSession.sessionState.conf.minBatchesToRetain require(minLogEntriesToMaintain > 0, "minBatchesToRetain has to be positive") /** * A lock used to wait/notify when batches complete. Use a fair lock to avoid thread starvation. */ protected val awaitProgressLock = new ReentrantLock(true) protected val awaitProgressLockCondition = awaitProgressLock.newCondition() private val initializationLatch = new CountDownLatch(1) private val startLatch = new CountDownLatch(1) private val terminationLatch = new CountDownLatch(1) val resolvedCheckpointRoot = { val checkpointPath = new Path(checkpointRoot) val fs = checkpointPath.getFileSystem(sparkSession.sessionState.newHadoopConf()) if (sparkSession.conf.get(SQLConf.STREAMING_CHECKPOINT_ESCAPED_PATH_CHECK_ENABLED) && StreamExecution.containsSpecialCharsInPath(checkpointPath)) { // In Spark 2.4 and earlier, the checkpoint path is escaped 3 times (3 `Path.toUri.toString` // calls). If this legacy checkpoint path exists, we will throw an error to tell the user how // to migrate. val legacyCheckpointDir = new Path(new Path(checkpointPath.toUri.toString).toUri.toString).toUri.toString val legacyCheckpointDirExists = try { fs.exists(new Path(legacyCheckpointDir)) } catch { case NonFatal(e) => // We may not have access to this directory. Don't fail the query if that happens. logWarning(e.getMessage, e) false } if (legacyCheckpointDirExists) { throw new SparkException( s"""Error: we detected a possible problem with the location of your checkpoint and you |likely need to move it before restarting this query. | |Earlier version of Spark incorrectly escaped paths when writing out checkpoints for |structured streaming. While this was corrected in Spark 3.0, it appears that your |query was started using an earlier version that incorrectly handled the checkpoint |path. | |Correct Checkpoint Directory: $checkpointPath |Incorrect Checkpoint Directory: $legacyCheckpointDir | |Please move the data from the incorrect directory to the correct one, delete the |incorrect directory, and then restart this query. If you believe you are receiving |this message in error, you can disable it with the SQL conf |${SQLConf.STREAMING_CHECKPOINT_ESCAPED_PATH_CHECK_ENABLED.key}.""" .stripMargin) } } val checkpointDir = checkpointPath.makeQualified(fs.getUri, fs.getWorkingDirectory) fs.mkdirs(checkpointDir) checkpointDir.toString } logInfo(s"Checkpoint root $checkpointRoot resolved to $resolvedCheckpointRoot.") def logicalPlan: LogicalPlan /** * Tracks how much data we have processed and committed to the sink or state store from each * input source. * Only the scheduler thread should modify this field, and only in atomic steps. * Other threads should make a shallow copy if they are going to access this field more than * once, since the field's value may change at any time. */ @volatile var committedOffsets = new StreamProgress /** * Tracks the offsets that are available to be processed, but have not yet be committed to the * sink. * Only the scheduler thread should modify this field, and only in atomic steps. * Other threads should make a shallow copy if they are going to access this field more than * once, since the field's value may change at any time. */ @volatile var availableOffsets = new StreamProgress @volatile var sinkCommitProgress: Option[StreamWriterCommitProgress] = None /** The current batchId or -1 if execution has not yet been initialized. */ protected var currentBatchId: Long = -1 /** Metadata associated with the whole query */ protected val streamMetadata: StreamMetadata = { val metadataPath = new Path(checkpointFile("metadata")) val hadoopConf = sparkSession.sessionState.newHadoopConf() StreamMetadata.read(metadataPath, hadoopConf).getOrElse { val newMetadata = new StreamMetadata(UUID.randomUUID.toString) StreamMetadata.write(newMetadata, metadataPath, hadoopConf) newMetadata } } /** Metadata associated with the offset seq of a batch in the query. */ protected var offsetSeqMetadata = OffsetSeqMetadata( batchWatermarkMs = 0, batchTimestampMs = 0, sparkSession.conf) /** * A map of current watermarks, keyed by the position of the watermark operator in the * physical plan. * * This state is 'soft state', which does not affect the correctness and semantics of watermarks * and is not persisted across query restarts. * The fault-tolerant watermark state is in offsetSeqMetadata. */ protected val watermarkMsMap: MutableMap[Int, Long] = MutableMap() override val id: UUID = UUID.fromString(streamMetadata.id) override val runId: UUID = UUID.randomUUID /** * Pretty identified string of printing in logs. Format is * If name is set "queryName [id = xyz, runId = abc]" else "[id = xyz, runId = abc]" */ protected val prettyIdString = Option(name).map(_ + " ").getOrElse("") + s"[id = $id, runId = $runId]" /** * A list of unique sources in the query plan. This will be set when generating logical plan. */ @volatile protected var uniqueSources: Seq[BaseStreamingSource] = Seq.empty /** Defines the internal state of execution */ protected val state = new AtomicReference[State](INITIALIZING) @volatile var lastExecution: IncrementalExecution = _ /** Holds the most recent input data for each source. */ protected var newData: Map[BaseStreamingSource, LogicalPlan] = _ @volatile protected var streamDeathCause: StreamingQueryException = null /* Get the call site in the caller thread; will pass this into the micro batch thread */ private val callSite = Utils.getCallSite() /** Used to report metrics to coda-hale. This uses id for easier tracking across restarts. */ lazy val streamMetrics = new MetricsReporter( this, s"spark.streaming.${Option(name).getOrElse(id)}") /** Isolated spark session to run the batches with. */ private val sparkSessionForStream = sparkSession.cloneSession() /** * The thread that runs the micro-batches of this stream. Note that this thread must be * [[org.apache.spark.util.UninterruptibleThread]] to workaround KAFKA-1894: interrupting a * running `KafkaConsumer` may cause endless loop. */ val queryExecutionThread: QueryExecutionThread = new QueryExecutionThread(s"stream execution thread for $prettyIdString") { override def run(): Unit = { // To fix call site like "run at <unknown>:0", we bridge the call site from the caller // thread to this micro batch thread sparkSession.sparkContext.setCallSite(callSite) runStream() } } /** * A write-ahead-log that records the offsets that are present in each batch. In order to ensure * that a given batch will always consist of the same data, we write to this log *before* any * processing is done. Thus, the Nth record in this log indicated data that is currently being * processed and the N-1th entry indicates which offsets have been durably committed to the sink. */ val offsetLog = new OffsetSeqLog(sparkSession, checkpointFile("offsets")) /** * A log that records the batch ids that have completed. This is used to check if a batch was * fully processed, and its output was committed to the sink, hence no need to process it again. * This is used (for instance) during restart, to help identify which batch to run next. */ val commitLog = new CommitLog(sparkSession, checkpointFile("commits")) /** Whether all fields of the query have been initialized */ private def isInitialized: Boolean = state.get != INITIALIZING /** Whether the query is currently active or not */ override def isActive: Boolean = state.get != TERMINATED /** Returns the [[StreamingQueryException]] if the query was terminated by an exception. */ override def exception: Option[StreamingQueryException] = Option(streamDeathCause) /** Returns the path of a file with `name` in the checkpoint directory. */ protected def checkpointFile(name: String): String = new Path(new Path(resolvedCheckpointRoot), name).toString /** * Starts the execution. This returns only after the thread has started and [[QueryStartedEvent]] * has been posted to all the listeners. */ def start(): Unit = { logInfo(s"Starting $prettyIdString. Use $resolvedCheckpointRoot to store the query checkpoint.") queryExecutionThread.setDaemon(true) queryExecutionThread.start() startLatch.await() // Wait until thread started and QueryStart event has been posted } /** * Run the activated stream until stopped. */ protected def runActivatedStream(sparkSessionForStream: SparkSession): Unit /** * Activate the stream and then wrap a callout to runActivatedStream, handling start and stop. * * Note that this method ensures that [[QueryStartedEvent]] and [[QueryTerminatedEvent]] are * posted such that listeners are guaranteed to get a start event before a termination. * Furthermore, this method also ensures that [[QueryStartedEvent]] event is posted before the * `start()` method returns. */ private def runStream(): Unit = { try { sparkSession.sparkContext.setJobGroup(runId.toString, getBatchDescriptionString, interruptOnCancel = true) sparkSession.sparkContext.setLocalProperty(StreamExecution.QUERY_ID_KEY, id.toString) if (sparkSession.sessionState.conf.streamingMetricsEnabled) { sparkSession.sparkContext.env.metricsSystem.registerSource(streamMetrics) } // `postEvent` does not throw non fatal exception. postEvent(new QueryStartedEvent(id, runId, name)) // Unblock starting thread startLatch.countDown() // While active, repeatedly attempt to run batches. SparkSession.setActiveSession(sparkSession) updateStatusMessage("Initializing sources") // force initialization of the logical plan so that the sources can be created logicalPlan // Adaptive execution can change num shuffle partitions, disallow sparkSessionForStream.conf.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key, "false") // Disable cost-based join optimization as we do not want stateful operations to be rearranged sparkSessionForStream.conf.set(SQLConf.CBO_ENABLED.key, "false") offsetSeqMetadata = OffsetSeqMetadata( batchWatermarkMs = 0, batchTimestampMs = 0, sparkSessionForStream.conf) if (state.compareAndSet(INITIALIZING, ACTIVE)) { // Unblock `awaitInitialization` initializationLatch.countDown() runActivatedStream(sparkSessionForStream) updateStatusMessage("Stopped") } else { // `stop()` is already called. Let `finally` finish the cleanup. } } catch { case e if isInterruptedByStop(e, sparkSession.sparkContext) => // interrupted by stop() updateStatusMessage("Stopped") case e: IOException if e.getMessage != null && e.getMessage.startsWith(classOf[InterruptedException].getName) && state.get == TERMINATED => // This is a workaround for HADOOP-12074: `Shell.runCommand` converts `InterruptedException` // to `new IOException(ie.toString())` before Hadoop 2.8. updateStatusMessage("Stopped") case e: Throwable => streamDeathCause = new StreamingQueryException( toDebugString(includeLogicalPlan = isInitialized), s"Query $prettyIdString terminated with exception: ${e.getMessage}", e, committedOffsets.toOffsetSeq(sources, offsetSeqMetadata).toString, availableOffsets.toOffsetSeq(sources, offsetSeqMetadata).toString) logError(s"Query $prettyIdString terminated with error", e) updateStatusMessage(s"Terminated with exception: ${e.getMessage}") // Rethrow the fatal errors to allow the user using `Thread.UncaughtExceptionHandler` to // handle them if (!NonFatal(e)) { throw e } } finally queryExecutionThread.runUninterruptibly { // The whole `finally` block must run inside `runUninterruptibly` to avoid being interrupted // when a query is stopped by the user. We need to make sure the following codes finish // otherwise it may throw `InterruptedException` to `UncaughtExceptionHandler` (SPARK-21248). // Release latches to unblock the user codes since exception can happen in any place and we // may not get a chance to release them startLatch.countDown() initializationLatch.countDown() try { stopSources() state.set(TERMINATED) currentStatus = status.copy(isTriggerActive = false, isDataAvailable = false) // Update metrics and status sparkSession.sparkContext.env.metricsSystem.removeSource(streamMetrics) // Notify others sparkSession.streams.notifyQueryTermination(StreamExecution.this) postEvent( new QueryTerminatedEvent(id, runId, exception.map(_.cause).map(Utils.exceptionString))) // Delete the temp checkpoint when either force delete enabled or the query didn't fail if (deleteCheckpointOnStop && (sparkSession.sessionState.conf .getConf(SQLConf.FORCE_DELETE_TEMP_CHECKPOINT_LOCATION) || exception.isEmpty)) { val checkpointPath = new Path(resolvedCheckpointRoot) try { logInfo(s"Deleting checkpoint $checkpointPath.") val fs = checkpointPath.getFileSystem(sparkSession.sessionState.newHadoopConf()) fs.delete(checkpointPath, true) } catch { case NonFatal(e) => // Deleting temp checkpoint folder is best effort, don't throw non fatal exceptions // when we cannot delete them. logWarning(s"Cannot delete $checkpointPath", e) } } } finally { awaitProgressLock.lock() try { // Wake up any threads that are waiting for the stream to progress. awaitProgressLockCondition.signalAll() } finally { awaitProgressLock.unlock() } terminationLatch.countDown() } } } private def isInterruptedByStop(e: Throwable, sc: SparkContext): Boolean = { if (state.get == TERMINATED) { StreamExecution.isInterruptionException(e, sc) } else { false } } override protected def postEvent(event: StreamingQueryListener.Event): Unit = { sparkSession.streams.postListenerEvent(event) } /** Stops all streaming sources safely. */ protected def stopSources(): Unit = { uniqueSources.foreach { source => try { source.stop() } catch { case NonFatal(e) => logWarning(s"Failed to stop streaming source: $source. Resources may have leaked.", e) } } } /** * Blocks the current thread until processing for data from the given `source` has reached at * least the given `Offset`. This method is intended for use primarily when writing tests. */ private[sql] def awaitOffset(sourceIndex: Int, newOffset: Offset, timeoutMs: Long): Unit = { assertAwaitThread() def notDone = { val localCommittedOffsets = committedOffsets if (sources == null) { // sources might not be initialized yet false } else { val source = sources(sourceIndex) !localCommittedOffsets.contains(source) || localCommittedOffsets(source) != newOffset } } while (notDone) { awaitProgressLock.lock() try { awaitProgressLockCondition.await(timeoutMs, TimeUnit.MILLISECONDS) if (streamDeathCause != null) { throw streamDeathCause } } finally { awaitProgressLock.unlock() } } logDebug(s"Unblocked at $newOffset for ${sources(sourceIndex)}") } /** A flag to indicate that a batch has completed with no new data available. */ @volatile protected var noNewData = false /** * Assert that the await APIs should not be called in the stream thread. Otherwise, it may cause * dead-lock, e.g., calling any await APIs in `StreamingQueryListener.onQueryStarted` will block * the stream thread forever. */ private def assertAwaitThread(): Unit = { if (queryExecutionThread eq Thread.currentThread) { throw new IllegalStateException( "Cannot wait for a query state from the same thread that is running the query") } } /** * Await until all fields of the query have been initialized. */ def awaitInitialization(timeoutMs: Long): Unit = { assertAwaitThread() require(timeoutMs > 0, "Timeout has to be positive") if (streamDeathCause != null) { throw streamDeathCause } initializationLatch.await(timeoutMs, TimeUnit.MILLISECONDS) if (streamDeathCause != null) { throw streamDeathCause } } override def processAllAvailable(): Unit = { assertAwaitThread() if (streamDeathCause != null) { throw streamDeathCause } if (!isActive) return awaitProgressLock.lock() try { noNewData = false while (true) { awaitProgressLockCondition.await(10000, TimeUnit.MILLISECONDS) if (streamDeathCause != null) { throw streamDeathCause } if (noNewData || !isActive) { return } } } finally { awaitProgressLock.unlock() } } override def awaitTermination(): Unit = { assertAwaitThread() terminationLatch.await() if (streamDeathCause != null) { throw streamDeathCause } } override def awaitTermination(timeoutMs: Long): Boolean = { assertAwaitThread() require(timeoutMs > 0, "Timeout has to be positive") terminationLatch.await(timeoutMs, TimeUnit.MILLISECONDS) if (streamDeathCause != null) { throw streamDeathCause } else { !isActive } } /** Expose for tests */ def explainInternal(extended: Boolean): String = { if (lastExecution == null) { "No physical plan. Waiting for data." } else { val explain = StreamingExplainCommand(lastExecution, extended = extended) sparkSession.sessionState.executePlan(explain).executedPlan.executeCollect() .map(_.getString(0)).mkString("\\n") } } override def explain(extended: Boolean): Unit = { // scalastyle:off println println(explainInternal(extended)) // scalastyle:on println } override def explain(): Unit = explain(extended = false) override def toString: String = { s"Streaming Query $prettyIdString [state = $state]" } private def toDebugString(includeLogicalPlan: Boolean): String = { val debugString = s"""|=== Streaming Query === |Identifier: $prettyIdString |Current Committed Offsets: $committedOffsets |Current Available Offsets: $availableOffsets | |Current State: $state |Thread State: ${queryExecutionThread.getState}""".stripMargin if (includeLogicalPlan) { debugString + s"\\n\\nLogical Plan:\\n$logicalPlan" } else { debugString } } protected def getBatchDescriptionString: String = { val batchDescription = if (currentBatchId < 0) "init" else currentBatchId.toString Option(name).map(_ + "<br/>").getOrElse("") + s"id = $id<br/>runId = $runId<br/>batch = $batchDescription" } protected def createStreamingWrite( table: SupportsStreamingWrite, options: Map[String, String], inputPlan: LogicalPlan): StreamingWrite = { val writeBuilder = table.newWriteBuilder(new CaseInsensitiveStringMap(options.asJava)) .withQueryId(id.toString) .withInputDataSchema(inputPlan.schema) outputMode match { case Append => writeBuilder.buildForStreaming() case Complete => // TODO: we should do this check earlier when we have capability API. require(writeBuilder.isInstanceOf[SupportsTruncate], table.name + " does not support Complete mode.") writeBuilder.asInstanceOf[SupportsTruncate].truncate().buildForStreaming() case Update => // Although no v2 sinks really support Update mode now, but during tests we do want them // to pretend to support Update mode, and treat Update mode same as Append mode. if (Utils.isTesting) { writeBuilder.buildForStreaming() } else { throw new IllegalArgumentException( "Data source v2 streaming sinks does not support Update mode.") } } } } object StreamExecution { val QUERY_ID_KEY = "sql.streaming.queryId" val IS_CONTINUOUS_PROCESSING = "__is_continuous_processing" def isInterruptionException(e: Throwable, sc: SparkContext): Boolean = e match { // InterruptedIOException - thrown when an I/O operation is interrupted // ClosedByInterruptException - thrown when an I/O operation upon a channel is interrupted case _: InterruptedException | _: InterruptedIOException | _: ClosedByInterruptException => true // The cause of the following exceptions may be one of the above exceptions: // // UncheckedIOException - thrown by codes that cannot throw a checked IOException, such as // BiFunction.apply // ExecutionException - thrown by codes running in a thread pool and these codes throw an // exception // UncheckedExecutionException - thrown by codes that cannot throw a checked // ExecutionException, such as BiFunction.apply case e2 @ (_: UncheckedIOException | _: ExecutionException | _: UncheckedExecutionException) if e2.getCause != null => isInterruptionException(e2.getCause, sc) case se: SparkException => val jobGroup = sc.getLocalProperty("spark.jobGroup.id") if (jobGroup == null) return false val errorMsg = se.getMessage if (errorMsg.contains("cancelled") && errorMsg.contains(jobGroup) && se.getCause == null) { true } else if (se.getCause != null) { isInterruptionException(se.getCause, sc) } else { false } case _ => false } /** Whether the path contains special chars that will be escaped when converting to a `URI`. */ def containsSpecialCharsInPath(path: Path): Boolean = { path.toUri.getPath != new Path(path.toUri.toString).toUri.getPath } } /** * A special thread to run the stream query. Some codes require to run in the QueryExecutionThread * and will use `classOf[QueryxecutionThread]` to check. */ abstract class QueryExecutionThread(name: String) extends UninterruptibleThread(name)
Aegeaner/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamExecution.scala
Scala
apache-2.0
26,805
package exercises.ch04 object Ex03 { def map2[A,B,C](a: Option[A], b: Option[B])(f: (A,B) => C): Option[C] = (a,b) match { case (None, _) => None case (_, None) => None case (Some(a),Some(b)) => Some(f(a,b)) } def main(args: Array[String]): Unit = { println(map2[Int,Int,Int](None, Some(5))((a,b) => a+b)) println(map2[Int,Int,Int](Some(5), None)((a,b) => a+b)) println(map2[Int,Int,Int](Some(5), Some(4))((a,b) => a+b)) } }
VladMinzatu/fpinscala-exercises
src/main/scala/exercises/ch04/Ex03.scala
Scala
mit
460
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.master import java.text.SimpleDateFormat import java.util.{Date, Locale} import java.util.concurrent.{ScheduledFuture, TimeUnit} import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} import scala.util.Random import org.apache.spark.{SecurityManager, SparkConf, SparkException} import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState} import org.apache.spark.deploy.DeployMessages._ import org.apache.spark.deploy.master.DriverState.DriverState import org.apache.spark.deploy.master.MasterMessages._ import org.apache.spark.deploy.master.ui.MasterWebUI import org.apache.spark.deploy.rest.StandaloneRestServer import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.internal.config.Deploy._ import org.apache.spark.internal.config.UI._ import org.apache.spark.internal.config.Worker._ import org.apache.spark.metrics.{MetricsSystem, MetricsSystemInstances} import org.apache.spark.resource.{ResourceRequirement, ResourceUtils} import org.apache.spark.rpc._ import org.apache.spark.serializer.{JavaSerializer, Serializer} import org.apache.spark.util.{SparkUncaughtExceptionHandler, ThreadUtils, Utils} private[deploy] class Master( override val rpcEnv: RpcEnv, address: RpcAddress, webUiPort: Int, val securityMgr: SecurityManager, val conf: SparkConf) extends ThreadSafeRpcEndpoint with Logging with LeaderElectable { private val forwardMessageThread = ThreadUtils.newDaemonSingleThreadScheduledExecutor("master-forward-message-thread") // For application IDs private def createDateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.US) private val workerTimeoutMs = conf.get(WORKER_TIMEOUT) * 1000 private val retainedApplications = conf.get(RETAINED_APPLICATIONS) private val retainedDrivers = conf.get(RETAINED_DRIVERS) private val reaperIterations = conf.get(REAPER_ITERATIONS) private val recoveryMode = conf.get(RECOVERY_MODE) private val maxExecutorRetries = conf.get(MAX_EXECUTOR_RETRIES) val workers = new HashSet[WorkerInfo] val idToApp = new HashMap[String, ApplicationInfo] private val waitingApps = new ArrayBuffer[ApplicationInfo] val apps = new HashSet[ApplicationInfo] private val idToWorker = new HashMap[String, WorkerInfo] private val addressToWorker = new HashMap[RpcAddress, WorkerInfo] private val endpointToApp = new HashMap[RpcEndpointRef, ApplicationInfo] private val addressToApp = new HashMap[RpcAddress, ApplicationInfo] private val completedApps = new ArrayBuffer[ApplicationInfo] private var nextAppNumber = 0 private val drivers = new HashSet[DriverInfo] private val completedDrivers = new ArrayBuffer[DriverInfo] // Drivers currently spooled for scheduling private val waitingDrivers = new ArrayBuffer[DriverInfo] private var nextDriverNumber = 0 Utils.checkHost(address.host) private val masterMetricsSystem = MetricsSystem.createMetricsSystem(MetricsSystemInstances.MASTER, conf) private val applicationMetricsSystem = MetricsSystem.createMetricsSystem(MetricsSystemInstances.APPLICATIONS, conf) private val masterSource = new MasterSource(this) // After onStart, webUi will be set private var webUi: MasterWebUI = null private val masterUrl = address.toSparkURL private var masterWebUiUrl: String = _ private var state = RecoveryState.STANDBY private var persistenceEngine: PersistenceEngine = _ private var leaderElectionAgent: LeaderElectionAgent = _ private var recoveryCompletionTask: ScheduledFuture[_] = _ private var checkForWorkerTimeOutTask: ScheduledFuture[_] = _ // As a temporary workaround before better ways of configuring memory, we allow users to set // a flag that will perform round-robin scheduling across the nodes (spreading out each app // among all the nodes) instead of trying to consolidate each app onto a small # of nodes. private val spreadOutApps = conf.get(SPREAD_OUT_APPS) // Default maxCores for applications that don't specify it (i.e. pass Int.MaxValue) private val defaultCores = conf.get(DEFAULT_CORES) val reverseProxy = conf.get(UI_REVERSE_PROXY) if (defaultCores < 1) { throw new SparkException(s"${DEFAULT_CORES.key} must be positive") } // Alternative application submission gateway that is stable across Spark versions private val restServerEnabled = conf.get(MASTER_REST_SERVER_ENABLED) private var restServer: Option[StandaloneRestServer] = None private var restServerBoundPort: Option[Int] = None { val authKey = SecurityManager.SPARK_AUTH_SECRET_CONF require(conf.getOption(authKey).isEmpty || !restServerEnabled, s"The RestSubmissionServer does not support authentication via ${authKey}. Either turn " + "off the RestSubmissionServer with spark.master.rest.enabled=false, or do not use " + "authentication.") } override def onStart(): Unit = { logInfo("Starting Spark master at " + masterUrl) logInfo(s"Running Spark version ${org.apache.spark.SPARK_VERSION}") webUi = new MasterWebUI(this, webUiPort) webUi.bind() masterWebUiUrl = webUi.webUrl if (reverseProxy) { val uiReverseProxyUrl = conf.get(UI_REVERSE_PROXY_URL).map(_.stripSuffix("/")) if (uiReverseProxyUrl.nonEmpty) { System.setProperty("spark.ui.proxyBase", uiReverseProxyUrl.get) // If the master URL has a path component, it must end with a slash. // Otherwise the browser generates incorrect relative links masterWebUiUrl = uiReverseProxyUrl.get + "/" } webUi.addProxy() logInfo(s"Spark Master is acting as a reverse proxy. Master, Workers and " + s"Applications UIs are available at $masterWebUiUrl") } checkForWorkerTimeOutTask = forwardMessageThread.scheduleAtFixedRate( () => Utils.tryLogNonFatalError { self.send(CheckForWorkerTimeOut) }, 0, workerTimeoutMs, TimeUnit.MILLISECONDS) if (restServerEnabled) { val port = conf.get(MASTER_REST_SERVER_PORT) restServer = Some(new StandaloneRestServer(address.host, port, conf, self, masterUrl)) } restServerBoundPort = restServer.map(_.start()) masterMetricsSystem.registerSource(masterSource) masterMetricsSystem.start() applicationMetricsSystem.start() // Attach the master and app metrics servlet handler to the web ui after the metrics systems are // started. masterMetricsSystem.getServletHandlers.foreach(webUi.attachHandler) applicationMetricsSystem.getServletHandlers.foreach(webUi.attachHandler) val serializer = new JavaSerializer(conf) val (persistenceEngine_, leaderElectionAgent_) = recoveryMode match { case "ZOOKEEPER" => logInfo("Persisting recovery state to ZooKeeper") val zkFactory = new ZooKeeperRecoveryModeFactory(conf, serializer) (zkFactory.createPersistenceEngine(), zkFactory.createLeaderElectionAgent(this)) case "FILESYSTEM" => val fsFactory = new FileSystemRecoveryModeFactory(conf, serializer) (fsFactory.createPersistenceEngine(), fsFactory.createLeaderElectionAgent(this)) case "CUSTOM" => val clazz = Utils.classForName(conf.get(RECOVERY_MODE_FACTORY)) val factory = clazz.getConstructor(classOf[SparkConf], classOf[Serializer]) .newInstance(conf, serializer) .asInstanceOf[StandaloneRecoveryModeFactory] (factory.createPersistenceEngine(), factory.createLeaderElectionAgent(this)) case _ => (new BlackHolePersistenceEngine(), new MonarchyLeaderAgent(this)) } persistenceEngine = persistenceEngine_ leaderElectionAgent = leaderElectionAgent_ } override def onStop(): Unit = { masterMetricsSystem.report() applicationMetricsSystem.report() // prevent the CompleteRecovery message sending to restarted master if (recoveryCompletionTask != null) { recoveryCompletionTask.cancel(true) } if (checkForWorkerTimeOutTask != null) { checkForWorkerTimeOutTask.cancel(true) } forwardMessageThread.shutdownNow() webUi.stop() restServer.foreach(_.stop()) masterMetricsSystem.stop() applicationMetricsSystem.stop() persistenceEngine.close() leaderElectionAgent.stop() } override def electedLeader(): Unit = { self.send(ElectedLeader) } override def revokedLeadership(): Unit = { self.send(RevokedLeadership) } override def receive: PartialFunction[Any, Unit] = { case ElectedLeader => val (storedApps, storedDrivers, storedWorkers) = persistenceEngine.readPersistedData(rpcEnv) state = if (storedApps.isEmpty && storedDrivers.isEmpty && storedWorkers.isEmpty) { RecoveryState.ALIVE } else { RecoveryState.RECOVERING } logInfo("I have been elected leader! New state: " + state) if (state == RecoveryState.RECOVERING) { beginRecovery(storedApps, storedDrivers, storedWorkers) recoveryCompletionTask = forwardMessageThread.schedule(new Runnable { override def run(): Unit = Utils.tryLogNonFatalError { self.send(CompleteRecovery) } }, workerTimeoutMs, TimeUnit.MILLISECONDS) } case CompleteRecovery => completeRecovery() case RevokedLeadership => logError("Leadership has been revoked -- master shutting down.") System.exit(0) case WorkerDecommissioning(id, workerRef) => if (state == RecoveryState.STANDBY) { workerRef.send(MasterInStandby) } else { // We use foreach since get gives us an option and we can skip the failures. idToWorker.get(id).foreach(decommissionWorker) } case DecommissionWorkers(ids) => // The caller has already checked the state when handling DecommissionWorkersOnHosts, // so it should not be the STANDBY assert(state != RecoveryState.STANDBY) ids.foreach ( id => // We use foreach since get gives us an option and we can skip the failures. idToWorker.get(id).foreach { w => decommissionWorker(w) // Also send a message to the worker node to notify. w.endpoint.send(DecommissionWorker) } ) case RegisterWorker( id, workerHost, workerPort, workerRef, cores, memory, workerWebUiUrl, masterAddress, resources) => logInfo("Registering worker %s:%d with %d cores, %s RAM".format( workerHost, workerPort, cores, Utils.megabytesToString(memory))) if (state == RecoveryState.STANDBY) { workerRef.send(MasterInStandby) } else if (idToWorker.contains(id)) { workerRef.send(RegisteredWorker(self, masterWebUiUrl, masterAddress, true)) } else { val workerResources = resources.map(r => r._1 -> WorkerResourceInfo(r._1, r._2.addresses)) val worker = new WorkerInfo(id, workerHost, workerPort, cores, memory, workerRef, workerWebUiUrl, workerResources) if (registerWorker(worker)) { persistenceEngine.addWorker(worker) workerRef.send(RegisteredWorker(self, masterWebUiUrl, masterAddress, false)) schedule() } else { val workerAddress = worker.endpoint.address logWarning("Worker registration failed. Attempted to re-register worker at same " + "address: " + workerAddress) workerRef.send(RegisterWorkerFailed("Attempted to re-register worker at same address: " + workerAddress)) } } case RegisterApplication(description, driver) => // TODO Prevent repeated registrations from some driver if (state == RecoveryState.STANDBY) { // ignore, don't send response } else { logInfo("Registering app " + description.name) val app = createApplication(description, driver) registerApplication(app) logInfo("Registered app " + description.name + " with ID " + app.id) persistenceEngine.addApplication(app) driver.send(RegisteredApplication(app.id, self)) schedule() } case DriverStateChanged(driverId, state, exception) => state match { case DriverState.ERROR | DriverState.FINISHED | DriverState.KILLED | DriverState.FAILED => removeDriver(driverId, state, exception) case _ => throw new Exception(s"Received unexpected state update for driver $driverId: $state") } case Heartbeat(workerId, worker) => idToWorker.get(workerId) match { case Some(workerInfo) => workerInfo.lastHeartbeat = System.currentTimeMillis() case None => if (workers.map(_.id).contains(workerId)) { logWarning(s"Got heartbeat from unregistered worker $workerId." + " Asking it to re-register.") worker.send(ReconnectWorker(masterUrl)) } else { logWarning(s"Got heartbeat from unregistered worker $workerId." + " This worker was never registered, so ignoring the heartbeat.") } } case MasterChangeAcknowledged(appId) => idToApp.get(appId) match { case Some(app) => logInfo("Application has been re-registered: " + appId) app.state = ApplicationState.WAITING case None => logWarning("Master change ack from unknown app: " + appId) } if (canCompleteRecovery) { completeRecovery() } case WorkerSchedulerStateResponse(workerId, execResponses, driverResponses) => idToWorker.get(workerId) match { case Some(worker) => logInfo("Worker has been re-registered: " + workerId) worker.state = WorkerState.ALIVE val validExecutors = execResponses.filter( exec => idToApp.get(exec.desc.appId).isDefined) for (exec <- validExecutors) { val (execDesc, execResources) = (exec.desc, exec.resources) val app = idToApp(execDesc.appId) val execInfo = app.addExecutor( worker, execDesc.cores, execResources, Some(execDesc.execId)) worker.addExecutor(execInfo) worker.recoverResources(execResources) execInfo.copyState(execDesc) } for (driver <- driverResponses) { val (driverId, driverResource) = (driver.driverId, driver.resources) drivers.find(_.id == driverId).foreach { driver => driver.worker = Some(worker) driver.state = DriverState.RUNNING driver.withResources(driverResource) worker.recoverResources(driverResource) worker.addDriver(driver) } } case None => logWarning("Scheduler state from unknown worker: " + workerId) } if (canCompleteRecovery) { completeRecovery() } case WorkerLatestState(workerId, executors, driverIds) => idToWorker.get(workerId) match { case Some(worker) => for (exec <- executors) { val executorMatches = worker.executors.exists { case (_, e) => e.application.id == exec.appId && e.id == exec.execId } if (!executorMatches) { // master doesn't recognize this executor. So just tell worker to kill it. worker.endpoint.send(KillExecutor(masterUrl, exec.appId, exec.execId)) } } for (driverId <- driverIds) { val driverMatches = worker.drivers.exists { case (id, _) => id == driverId } if (!driverMatches) { // master doesn't recognize this driver. So just tell worker to kill it. worker.endpoint.send(KillDriver(driverId)) } } case None => logWarning("Worker state from unknown worker: " + workerId) } case UnregisterApplication(applicationId) => logInfo(s"Received unregister request from application $applicationId") idToApp.get(applicationId).foreach(finishApplication) case CheckForWorkerTimeOut => timeOutDeadWorkers() } override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = { case RequestSubmitDriver(description) => if (state != RecoveryState.ALIVE) { val msg = s"${Utils.BACKUP_STANDALONE_MASTER_PREFIX}: $state. " + "Can only accept driver submissions in ALIVE state." context.reply(SubmitDriverResponse(self, false, None, msg)) } else { logInfo("Driver submitted " + description.command.mainClass) val driver = createDriver(description) persistenceEngine.addDriver(driver) waitingDrivers += driver drivers.add(driver) schedule() // TODO: It might be good to instead have the submission client poll the master to determine // the current status of the driver. For now it's simply "fire and forget". context.reply(SubmitDriverResponse(self, true, Some(driver.id), s"Driver successfully submitted as ${driver.id}")) } case RequestKillDriver(driverId) => if (state != RecoveryState.ALIVE) { val msg = s"${Utils.BACKUP_STANDALONE_MASTER_PREFIX}: $state. " + s"Can only kill drivers in ALIVE state." context.reply(KillDriverResponse(self, driverId, success = false, msg)) } else { logInfo("Asked to kill driver " + driverId) val driver = drivers.find(_.id == driverId) driver match { case Some(d) => if (waitingDrivers.contains(d)) { waitingDrivers -= d self.send(DriverStateChanged(driverId, DriverState.KILLED, None)) } else { // We just notify the worker to kill the driver here. The final bookkeeping occurs // on the return path when the worker submits a state change back to the master // to notify it that the driver was successfully killed. d.worker.foreach { w => w.endpoint.send(KillDriver(driverId)) } } // TODO: It would be nice for this to be a synchronous response val msg = s"Kill request for $driverId submitted" logInfo(msg) context.reply(KillDriverResponse(self, driverId, success = true, msg)) case None => val msg = s"Driver $driverId has already finished or does not exist" logWarning(msg) context.reply(KillDriverResponse(self, driverId, success = false, msg)) } } case RequestDriverStatus(driverId) => if (state != RecoveryState.ALIVE) { val msg = s"${Utils.BACKUP_STANDALONE_MASTER_PREFIX}: $state. " + "Can only request driver status in ALIVE state." context.reply( DriverStatusResponse(found = false, None, None, None, Some(new Exception(msg)))) } else { (drivers ++ completedDrivers).find(_.id == driverId) match { case Some(driver) => context.reply(DriverStatusResponse(found = true, Some(driver.state), driver.worker.map(_.id), driver.worker.map(_.hostPort), driver.exception)) case None => context.reply(DriverStatusResponse(found = false, None, None, None, None)) } } case RequestMasterState => context.reply(MasterStateResponse( address.host, address.port, restServerBoundPort, workers.toArray, apps.toArray, completedApps.toArray, drivers.toArray, completedDrivers.toArray, state)) case BoundPortsRequest => context.reply(BoundPortsResponse(address.port, webUi.boundPort, restServerBoundPort)) case RequestExecutors(appId, requestedTotal) => context.reply(handleRequestExecutors(appId, requestedTotal)) case KillExecutors(appId, executorIds) => val formattedExecutorIds = formatExecutorIds(executorIds) context.reply(handleKillExecutors(appId, formattedExecutorIds)) case DecommissionWorkersOnHosts(hostnames) => if (state != RecoveryState.STANDBY) { context.reply(decommissionWorkersOnHosts(hostnames)) } else { context.reply(0) } case ExecutorStateChanged(appId, execId, state, message, exitStatus) => val execOption = idToApp.get(appId).flatMap(app => app.executors.get(execId)) execOption match { case Some(exec) => val appInfo = idToApp(appId) val oldState = exec.state exec.state = state if (state == ExecutorState.RUNNING) { assert(oldState == ExecutorState.LAUNCHING, s"executor $execId state transfer from $oldState to RUNNING is illegal") appInfo.resetRetryCount() } exec.application.driver.send(ExecutorUpdated(execId, state, message, exitStatus, None)) if (ExecutorState.isFinished(state)) { // Remove this executor from the worker and app logInfo(s"Removing executor ${exec.fullId} because it is $state") // If an application has already finished, preserve its // state to display its information properly on the UI if (!appInfo.isFinished) { appInfo.removeExecutor(exec) } exec.worker.removeExecutor(exec) val normalExit = exitStatus == Some(0) // Only retry certain number of times so we don't go into an infinite loop. // Important note: this code path is not exercised by tests, so be very careful when // changing this `if` condition. // We also don't count failures from decommissioned workers since they are "expected." if (!normalExit && oldState != ExecutorState.DECOMMISSIONED && appInfo.incrementRetryCount() >= maxExecutorRetries && maxExecutorRetries >= 0) { // < 0 disables this application-killing path val execs = appInfo.executors.values if (!execs.exists(_.state == ExecutorState.RUNNING)) { logError(s"Application ${appInfo.desc.name} with ID ${appInfo.id} failed " + s"${appInfo.retryCount} times; removing it") removeApplication(appInfo, ApplicationState.FAILED) } } } schedule() case None => logWarning(s"Got status update for unknown executor $appId/$execId") } context.reply(true) } override def onDisconnected(address: RpcAddress): Unit = { // The disconnected client could've been either a worker or an app; remove whichever it was logInfo(s"$address got disassociated, removing it.") addressToWorker.get(address).foreach(removeWorker(_, s"${address} got disassociated")) addressToApp.get(address).foreach(finishApplication) if (state == RecoveryState.RECOVERING && canCompleteRecovery) { completeRecovery() } } private def canCompleteRecovery = workers.count(_.state == WorkerState.UNKNOWN) == 0 && apps.count(_.state == ApplicationState.UNKNOWN) == 0 private def beginRecovery(storedApps: Seq[ApplicationInfo], storedDrivers: Seq[DriverInfo], storedWorkers: Seq[WorkerInfo]): Unit = { for (app <- storedApps) { logInfo("Trying to recover app: " + app.id) try { registerApplication(app) app.state = ApplicationState.UNKNOWN app.driver.send(MasterChanged(self, masterWebUiUrl)) } catch { case e: Exception => logInfo("App " + app.id + " had exception on reconnect") } } for (driver <- storedDrivers) { // Here we just read in the list of drivers. Any drivers associated with now-lost workers // will be re-launched when we detect that the worker is missing. drivers += driver } for (worker <- storedWorkers) { logInfo("Trying to recover worker: " + worker.id) try { registerWorker(worker) worker.state = WorkerState.UNKNOWN worker.endpoint.send(MasterChanged(self, masterWebUiUrl)) } catch { case e: Exception => logInfo("Worker " + worker.id + " had exception on reconnect") } } } private def completeRecovery(): Unit = { // Ensure "only-once" recovery semantics using a short synchronization period. if (state != RecoveryState.RECOVERING) { return } state = RecoveryState.COMPLETING_RECOVERY // Kill off any workers and apps that didn't respond to us. workers.filter(_.state == WorkerState.UNKNOWN).foreach( removeWorker(_, "Not responding for recovery")) apps.filter(_.state == ApplicationState.UNKNOWN).foreach(finishApplication) // Update the state of recovered apps to RUNNING apps.filter(_.state == ApplicationState.WAITING).foreach(_.state = ApplicationState.RUNNING) // Reschedule drivers which were not claimed by any workers drivers.filter(_.worker.isEmpty).foreach { d => logWarning(s"Driver ${d.id} was not found after master recovery") if (d.desc.supervise) { logWarning(s"Re-launching ${d.id}") relaunchDriver(d) } else { removeDriver(d.id, DriverState.ERROR, None) logWarning(s"Did not re-launch ${d.id} because it was not supervised") } } state = RecoveryState.ALIVE schedule() logInfo("Recovery complete - resuming operations!") } /** * Schedule executors to be launched on the workers. * Returns an array containing number of cores assigned to each worker. * * There are two modes of launching executors. The first attempts to spread out an application's * executors on as many workers as possible, while the second does the opposite (i.e. launch them * on as few workers as possible). The former is usually better for data locality purposes and is * the default. * * The number of cores assigned to each executor is configurable. When this is explicitly set, * multiple executors from the same application may be launched on the same worker if the worker * has enough cores and memory. Otherwise, each executor grabs all the cores available on the * worker by default, in which case only one executor per application may be launched on each * worker during one single schedule iteration. * Note that when `spark.executor.cores` is not set, we may still launch multiple executors from * the same application on the same worker. Consider appA and appB both have one executor running * on worker1, and appA.coresLeft > 0, then appB is finished and release all its cores on worker1, * thus for the next schedule iteration, appA launches a new executor that grabs all the free * cores on worker1, therefore we get multiple executors from appA running on worker1. * * It is important to allocate coresPerExecutor on each worker at a time (instead of 1 core * at a time). Consider the following example: cluster has 4 workers with 16 cores each. * User requests 3 executors (spark.cores.max = 48, spark.executor.cores = 16). If 1 core is * allocated at a time, 12 cores from each worker would be assigned to each executor. * Since 12 < 16, no executors would launch [SPARK-8881]. */ private def scheduleExecutorsOnWorkers( app: ApplicationInfo, usableWorkers: Array[WorkerInfo], spreadOutApps: Boolean): Array[Int] = { val coresPerExecutor = app.desc.coresPerExecutor val minCoresPerExecutor = coresPerExecutor.getOrElse(1) val oneExecutorPerWorker = coresPerExecutor.isEmpty val memoryPerExecutor = app.desc.memoryPerExecutorMB val resourceReqsPerExecutor = app.desc.resourceReqsPerExecutor val numUsable = usableWorkers.length val assignedCores = new Array[Int](numUsable) // Number of cores to give to each worker val assignedExecutors = new Array[Int](numUsable) // Number of new executors on each worker var coresToAssign = math.min(app.coresLeft, usableWorkers.map(_.coresFree).sum) /** Return whether the specified worker can launch an executor for this app. */ def canLaunchExecutorForApp(pos: Int): Boolean = { val keepScheduling = coresToAssign >= minCoresPerExecutor val enoughCores = usableWorkers(pos).coresFree - assignedCores(pos) >= minCoresPerExecutor val assignedExecutorNum = assignedExecutors(pos) // If we allow multiple executors per worker, then we can always launch new executors. // Otherwise, if there is already an executor on this worker, just give it more cores. val launchingNewExecutor = !oneExecutorPerWorker || assignedExecutorNum == 0 if (launchingNewExecutor) { val assignedMemory = assignedExecutorNum * memoryPerExecutor val enoughMemory = usableWorkers(pos).memoryFree - assignedMemory >= memoryPerExecutor val assignedResources = resourceReqsPerExecutor.map { req => req.resourceName -> req.amount * assignedExecutorNum }.toMap val resourcesFree = usableWorkers(pos).resourcesAmountFree.map { case (rName, free) => rName -> (free - assignedResources.getOrElse(rName, 0)) } val enoughResources = ResourceUtils.resourcesMeetRequirements( resourcesFree, resourceReqsPerExecutor) val underLimit = assignedExecutors.sum + app.executors.size < app.executorLimit keepScheduling && enoughCores && enoughMemory && enoughResources && underLimit } else { // We're adding cores to an existing executor, so no need // to check memory and executor limits keepScheduling && enoughCores } } // Keep launching executors until no more workers can accommodate any // more executors, or if we have reached this application's limits var freeWorkers = (0 until numUsable).filter(canLaunchExecutorForApp) while (freeWorkers.nonEmpty) { freeWorkers.foreach { pos => var keepScheduling = true while (keepScheduling && canLaunchExecutorForApp(pos)) { coresToAssign -= minCoresPerExecutor assignedCores(pos) += minCoresPerExecutor // If we are launching one executor per worker, then every iteration assigns 1 core // to the executor. Otherwise, every iteration assigns cores to a new executor. if (oneExecutorPerWorker) { assignedExecutors(pos) = 1 } else { assignedExecutors(pos) += 1 } // Spreading out an application means spreading out its executors across as // many workers as possible. If we are not spreading out, then we should keep // scheduling executors on this worker until we use all of its resources. // Otherwise, just move on to the next worker. if (spreadOutApps) { keepScheduling = false } } } freeWorkers = freeWorkers.filter(canLaunchExecutorForApp) } assignedCores } /** * Schedule and launch executors on workers */ private def startExecutorsOnWorkers(): Unit = { // Right now this is a very simple FIFO scheduler. We keep trying to fit in the first app // in the queue, then the second app, etc. for (app <- waitingApps) { val coresPerExecutor = app.desc.coresPerExecutor.getOrElse(1) // If the cores left is less than the coresPerExecutor,the cores left will not be allocated if (app.coresLeft >= coresPerExecutor) { // Filter out workers that don't have enough resources to launch an executor val usableWorkers = workers.toArray.filter(_.state == WorkerState.ALIVE) .filter(canLaunchExecutor(_, app.desc)) .sortBy(_.coresFree).reverse val appMayHang = waitingApps.length == 1 && waitingApps.head.executors.isEmpty && usableWorkers.isEmpty if (appMayHang) { logWarning(s"App ${app.id} requires more resource than any of Workers could have.") } val assignedCores = scheduleExecutorsOnWorkers(app, usableWorkers, spreadOutApps) // Now that we've decided how many cores to allocate on each worker, let's allocate them for (pos <- 0 until usableWorkers.length if assignedCores(pos) > 0) { allocateWorkerResourceToExecutors( app, assignedCores(pos), app.desc.coresPerExecutor, usableWorkers(pos)) } } } } /** * Allocate a worker's resources to one or more executors. * @param app the info of the application which the executors belong to * @param assignedCores number of cores on this worker for this application * @param coresPerExecutor number of cores per executor * @param worker the worker info */ private def allocateWorkerResourceToExecutors( app: ApplicationInfo, assignedCores: Int, coresPerExecutor: Option[Int], worker: WorkerInfo): Unit = { // If the number of cores per executor is specified, we divide the cores assigned // to this worker evenly among the executors with no remainder. // Otherwise, we launch a single executor that grabs all the assignedCores on this worker. val numExecutors = coresPerExecutor.map { assignedCores / _ }.getOrElse(1) val coresToAssign = coresPerExecutor.getOrElse(assignedCores) for (i <- 1 to numExecutors) { val allocated = worker.acquireResources(app.desc.resourceReqsPerExecutor) val exec = app.addExecutor(worker, coresToAssign, allocated) launchExecutor(worker, exec) app.state = ApplicationState.RUNNING } } private def canLaunch( worker: WorkerInfo, memoryReq: Int, coresReq: Int, resourceRequirements: Seq[ResourceRequirement]) : Boolean = { val enoughMem = worker.memoryFree >= memoryReq val enoughCores = worker.coresFree >= coresReq val enoughResources = ResourceUtils.resourcesMeetRequirements( worker.resourcesAmountFree, resourceRequirements) enoughMem && enoughCores && enoughResources } /** * @return whether the worker could launch the driver represented by DriverDescription */ private def canLaunchDriver(worker: WorkerInfo, desc: DriverDescription): Boolean = { canLaunch(worker, desc.mem, desc.cores, desc.resourceReqs) } /** * @return whether the worker could launch the executor according to application's requirement */ private def canLaunchExecutor(worker: WorkerInfo, desc: ApplicationDescription): Boolean = { canLaunch( worker, desc.memoryPerExecutorMB, desc.coresPerExecutor.getOrElse(1), desc.resourceReqsPerExecutor) } /** * Schedule the currently available resources among waiting apps. This method will be called * every time a new app joins or resource availability changes. */ private def schedule(): Unit = { if (state != RecoveryState.ALIVE) { return } // Drivers take strict precedence over executors val shuffledAliveWorkers = Random.shuffle(workers.toSeq.filter(_.state == WorkerState.ALIVE)) val numWorkersAlive = shuffledAliveWorkers.size var curPos = 0 for (driver <- waitingDrivers.toList) { // iterate over a copy of waitingDrivers // We assign workers to each waiting driver in a round-robin fashion. For each driver, we // start from the last worker that was assigned a driver, and continue onwards until we have // explored all alive workers. var launched = false var isClusterIdle = true var numWorkersVisited = 0 while (numWorkersVisited < numWorkersAlive && !launched) { val worker = shuffledAliveWorkers(curPos) isClusterIdle = worker.drivers.isEmpty && worker.executors.isEmpty numWorkersVisited += 1 if (canLaunchDriver(worker, driver.desc)) { val allocated = worker.acquireResources(driver.desc.resourceReqs) driver.withResources(allocated) launchDriver(worker, driver) waitingDrivers -= driver launched = true } curPos = (curPos + 1) % numWorkersAlive } if (!launched && isClusterIdle) { logWarning(s"Driver ${driver.id} requires more resource than any of Workers could have.") } } startExecutorsOnWorkers() } private def launchExecutor(worker: WorkerInfo, exec: ExecutorDesc): Unit = { logInfo("Launching executor " + exec.fullId + " on worker " + worker.id) worker.addExecutor(exec) worker.endpoint.send(LaunchExecutor(masterUrl, exec.application.id, exec.id, exec.application.desc, exec.cores, exec.memory, exec.resources)) exec.application.driver.send( ExecutorAdded(exec.id, worker.id, worker.hostPort, exec.cores, exec.memory)) } private def registerWorker(worker: WorkerInfo): Boolean = { // There may be one or more refs to dead workers on this same node (w/ different ID's), // remove them. workers.filter { w => (w.host == worker.host && w.port == worker.port) && (w.state == WorkerState.DEAD) }.foreach { w => workers -= w } val workerAddress = worker.endpoint.address if (addressToWorker.contains(workerAddress)) { val oldWorker = addressToWorker(workerAddress) if (oldWorker.state == WorkerState.UNKNOWN) { // A worker registering from UNKNOWN implies that the worker was restarted during recovery. // The old worker must thus be dead, so we will remove it and accept the new worker. removeWorker(oldWorker, "Worker replaced by a new worker with same address") } else { logInfo("Attempted to re-register worker at same address: " + workerAddress) return false } } workers += worker idToWorker(worker.id) = worker addressToWorker(workerAddress) = worker true } /** * Decommission all workers that are active on any of the given hostnames. The decommissioning is * asynchronously done by enqueueing WorkerDecommission messages to self. No checks are done about * the prior state of the worker. So an already decommissioned worker will match as well. * * @param hostnames: A list of hostnames without the ports. Like "localhost", "foo.bar.com" etc * * Returns the number of workers that matched the hostnames. */ private def decommissionWorkersOnHosts(hostnames: Seq[String]): Integer = { val hostnamesSet = hostnames.map(_.toLowerCase(Locale.ROOT)).toSet val workersToRemove = addressToWorker .filterKeys(addr => hostnamesSet.contains(addr.host.toLowerCase(Locale.ROOT))) .values val workersToRemoveHostPorts = workersToRemove.map(_.hostPort) logInfo(s"Decommissioning the workers with host:ports ${workersToRemoveHostPorts}") // The workers are removed async to avoid blocking the receive loop for the entire batch self.send(DecommissionWorkers(workersToRemove.map(_.id).toSeq)) // Return the count of workers actually removed workersToRemove.size } private def decommissionWorker(worker: WorkerInfo): Unit = { if (worker.state != WorkerState.DECOMMISSIONED) { logInfo("Decommissioning worker %s on %s:%d".format(worker.id, worker.host, worker.port)) worker.setState(WorkerState.DECOMMISSIONED) for (exec <- worker.executors.values) { logInfo("Telling app of decommission executors") exec.application.driver.send(ExecutorUpdated( exec.id, ExecutorState.DECOMMISSIONED, Some("worker decommissioned"), None, // worker host is being set here to let the driver know that the host (aka. worker) // is also being decommissioned. So the driver can unregister all the shuffle map // statues located at this host when it receives the executor lost event. Some(worker.host))) exec.state = ExecutorState.DECOMMISSIONED exec.application.removeExecutor(exec) } // On recovery do not add a decommissioned executor persistenceEngine.removeWorker(worker) } else { logWarning("Skipping decommissioning worker %s on %s:%d as worker is already decommissioned". format(worker.id, worker.host, worker.port)) } } private def removeWorker(worker: WorkerInfo, msg: String): Unit = { logInfo("Removing worker " + worker.id + " on " + worker.host + ":" + worker.port) worker.setState(WorkerState.DEAD) idToWorker -= worker.id addressToWorker -= worker.endpoint.address for (exec <- worker.executors.values) { logInfo("Telling app of lost executor: " + exec.id) exec.application.driver.send(ExecutorUpdated( exec.id, ExecutorState.LOST, Some("worker lost"), None, Some(worker.host))) exec.state = ExecutorState.LOST exec.application.removeExecutor(exec) } for (driver <- worker.drivers.values) { if (driver.desc.supervise) { logInfo(s"Re-launching ${driver.id}") relaunchDriver(driver) } else { logInfo(s"Not re-launching ${driver.id} because it was not supervised") removeDriver(driver.id, DriverState.ERROR, None) } } logInfo(s"Telling app of lost worker: " + worker.id) apps.filterNot(completedApps.contains(_)).foreach { app => app.driver.send(WorkerRemoved(worker.id, worker.host, msg)) } persistenceEngine.removeWorker(worker) schedule() } private def relaunchDriver(driver: DriverInfo): Unit = { // We must setup a new driver with a new driver id here, because the original driver may // be still running. Consider this scenario: a worker is network partitioned with master, // the master then relaunches driver driverID1 with a driver id driverID2, then the worker // reconnects to master. From this point on, if driverID2 is equal to driverID1, then master // can not distinguish the statusUpdate of the original driver and the newly relaunched one, // for example, when DriverStateChanged(driverID1, KILLED) arrives at master, master will // remove driverID1, so the newly relaunched driver disappears too. See SPARK-19900 for details. removeDriver(driver.id, DriverState.RELAUNCHING, None) val newDriver = createDriver(driver.desc) persistenceEngine.addDriver(newDriver) drivers.add(newDriver) waitingDrivers += newDriver schedule() } private def createApplication(desc: ApplicationDescription, driver: RpcEndpointRef): ApplicationInfo = { val now = System.currentTimeMillis() val date = new Date(now) val appId = newApplicationId(date) new ApplicationInfo(now, appId, desc, date, driver, defaultCores) } private def registerApplication(app: ApplicationInfo): Unit = { val appAddress = app.driver.address if (addressToApp.contains(appAddress)) { logInfo("Attempted to re-register application at same address: " + appAddress) return } applicationMetricsSystem.registerSource(app.appSource) apps += app idToApp(app.id) = app endpointToApp(app.driver) = app addressToApp(appAddress) = app waitingApps += app } private def finishApplication(app: ApplicationInfo): Unit = { removeApplication(app, ApplicationState.FINISHED) } def removeApplication(app: ApplicationInfo, state: ApplicationState.Value): Unit = { if (apps.contains(app)) { logInfo("Removing app " + app.id) apps -= app idToApp -= app.id endpointToApp -= app.driver addressToApp -= app.driver.address if (completedApps.size >= retainedApplications) { val toRemove = math.max(retainedApplications / 10, 1) completedApps.take(toRemove).foreach { a => applicationMetricsSystem.removeSource(a.appSource) } completedApps.trimStart(toRemove) } completedApps += app // Remember it in our history waitingApps -= app for (exec <- app.executors.values) { killExecutor(exec) } app.markFinished(state) if (state != ApplicationState.FINISHED) { app.driver.send(ApplicationRemoved(state.toString)) } persistenceEngine.removeApplication(app) schedule() // Tell all workers that the application has finished, so they can clean up any app state. workers.foreach { w => w.endpoint.send(ApplicationFinished(app.id)) } } } /** * Handle a request to set the target number of executors for this application. * * If the executor limit is adjusted upwards, new executors will be launched provided * that there are workers with sufficient resources. If it is adjusted downwards, however, * we do not kill existing executors until we explicitly receive a kill request. * * @return whether the application has previously registered with this Master. */ private def handleRequestExecutors(appId: String, requestedTotal: Int): Boolean = { idToApp.get(appId) match { case Some(appInfo) => logInfo(s"Application $appId requested to set total executors to $requestedTotal.") appInfo.executorLimit = requestedTotal schedule() true case None => logWarning(s"Unknown application $appId requested $requestedTotal total executors.") false } } /** * Handle a kill request from the given application. * * This method assumes the executor limit has already been adjusted downwards through * a separate [[RequestExecutors]] message, such that we do not launch new executors * immediately after the old ones are removed. * * @return whether the application has previously registered with this Master. */ private def handleKillExecutors(appId: String, executorIds: Seq[Int]): Boolean = { idToApp.get(appId) match { case Some(appInfo) => logInfo(s"Application $appId requests to kill executors: " + executorIds.mkString(", ")) val (known, unknown) = executorIds.partition(appInfo.executors.contains) known.foreach { executorId => val desc = appInfo.executors(executorId) appInfo.removeExecutor(desc) killExecutor(desc) } if (unknown.nonEmpty) { logWarning(s"Application $appId attempted to kill non-existent executors: " + unknown.mkString(", ")) } schedule() true case None => logWarning(s"Unregistered application $appId requested us to kill executors!") false } } /** * Cast the given executor IDs to integers and filter out the ones that fail. * * All executors IDs should be integers since we launched these executors. However, * the kill interface on the driver side accepts arbitrary strings, so we need to * handle non-integer executor IDs just to be safe. */ private def formatExecutorIds(executorIds: Seq[String]): Seq[Int] = { executorIds.flatMap { executorId => try { Some(executorId.toInt) } catch { case e: NumberFormatException => logError(s"Encountered executor with a non-integer ID: $executorId. Ignoring") None } } } /** * Ask the worker on which the specified executor is launched to kill the executor. */ private def killExecutor(exec: ExecutorDesc): Unit = { exec.worker.removeExecutor(exec) exec.worker.endpoint.send(KillExecutor(masterUrl, exec.application.id, exec.id)) exec.state = ExecutorState.KILLED } /** Generate a new app ID given an app's submission date */ private def newApplicationId(submitDate: Date): String = { val appId = "app-%s-%04d".format(createDateFormat.format(submitDate), nextAppNumber) nextAppNumber += 1 appId } /** Check for, and remove, any timed-out workers */ private def timeOutDeadWorkers(): Unit = { // Copy the workers into an array so we don't modify the hashset while iterating through it val currentTime = System.currentTimeMillis() val toRemove = workers.filter(_.lastHeartbeat < currentTime - workerTimeoutMs).toArray for (worker <- toRemove) { if (worker.state != WorkerState.DEAD) { val workerTimeoutSecs = TimeUnit.MILLISECONDS.toSeconds(workerTimeoutMs) logWarning("Removing %s because we got no heartbeat in %d seconds".format( worker.id, workerTimeoutSecs)) removeWorker(worker, s"Not receiving heartbeat for $workerTimeoutSecs seconds") } else { if (worker.lastHeartbeat < currentTime - ((reaperIterations + 1) * workerTimeoutMs)) { workers -= worker // we've seen this DEAD worker in the UI, etc. for long enough; cull it } } } } private def newDriverId(submitDate: Date): String = { val appId = "driver-%s-%04d".format(createDateFormat.format(submitDate), nextDriverNumber) nextDriverNumber += 1 appId } private def createDriver(desc: DriverDescription): DriverInfo = { val now = System.currentTimeMillis() val date = new Date(now) new DriverInfo(now, newDriverId(date), desc, date) } private def launchDriver(worker: WorkerInfo, driver: DriverInfo): Unit = { logInfo("Launching driver " + driver.id + " on worker " + worker.id) worker.addDriver(driver) driver.worker = Some(worker) worker.endpoint.send(LaunchDriver(driver.id, driver.desc, driver.resources)) driver.state = DriverState.RUNNING } private def removeDriver( driverId: String, finalState: DriverState, exception: Option[Exception]): Unit = { drivers.find(d => d.id == driverId) match { case Some(driver) => logInfo(s"Removing driver: $driverId") drivers -= driver if (completedDrivers.size >= retainedDrivers) { val toRemove = math.max(retainedDrivers / 10, 1) completedDrivers.trimStart(toRemove) } completedDrivers += driver persistenceEngine.removeDriver(driver) driver.state = finalState driver.exception = exception driver.worker.foreach(w => w.removeDriver(driver)) schedule() case None => logWarning(s"Asked to remove unknown driver: $driverId") } } } private[deploy] object Master extends Logging { val SYSTEM_NAME = "sparkMaster" val ENDPOINT_NAME = "Master" def main(argStrings: Array[String]): Unit = { Thread.setDefaultUncaughtExceptionHandler(new SparkUncaughtExceptionHandler( exitOnUncaughtException = false)) Utils.initDaemon(log) val conf = new SparkConf val args = new MasterArguments(argStrings, conf) val (rpcEnv, _, _) = startRpcEnvAndEndpoint(args.host, args.port, args.webUiPort, conf) rpcEnv.awaitTermination() } /** * Start the Master and return a three tuple of: * (1) The Master RpcEnv * (2) The web UI bound port * (3) The REST server bound port, if any */ def startRpcEnvAndEndpoint( host: String, port: Int, webUiPort: Int, conf: SparkConf): (RpcEnv, Int, Option[Int]) = { val securityMgr = new SecurityManager(conf) val rpcEnv = RpcEnv.create(SYSTEM_NAME, host, port, conf, securityMgr) val masterEndpoint = rpcEnv.setupEndpoint(ENDPOINT_NAME, new Master(rpcEnv, rpcEnv.address, webUiPort, securityMgr, conf)) val portsResponse = masterEndpoint.askSync[BoundPortsResponse](BoundPortsRequest) (rpcEnv, portsResponse.webUIPort, portsResponse.restPort) } }
ueshin/apache-spark
core/src/main/scala/org/apache/spark/deploy/master/Master.scala
Scala
apache-2.0
52,051
package co.theasi.plotly.writer import org.json4s._ import org.json4s.native.JsonMethods._ import org.json4s.JsonDSL._ import scala.util.{Try, Success, Failure} import co.theasi.plotly._ object FigureWriter { def draw( figure: Figure, fileName: String, fileOptions: FileOptions = FileOptions() )(implicit server: Server): PlotFile = { if (fileOptions.overwrite) { deleteIfExists(fileName) } val drawnGrid = drawGrid(figure, fileName, fileOptions) val body = plotAsJson(figure, drawnGrid, fileName) val request = Api.post("plots", compact(render(body))) val responseAsJson = Api.despatchAndInterpret(request) PlotFile.fromResponse(responseAsJson \ "file") } def plotAsJson( figure: Figure, drawnGrid: GridFile, fileName: String ): JObject = { val writeInfos = extractSeriesWriteInfos(figure, drawnGrid) val seriesAsJson = writeInfos.map { SeriesWriter.toJson } val plotIndices = indicesFromPlots(figure.plots) val layoutFragments = for { (index, viewPort, plot) <- (plotIndices, figure.viewPorts, figure.plots).zipped fragment = plot match { case p: CartesianPlot => CartesianPlotLayoutWriter.toJson(index, viewPort, p) case p: ThreeDPlot => ThreeDPlotLayoutWriter.toJson(index, viewPort, p) } } yield fragment val fragmentsAsJson = layoutFragments.reduce { _ ~ _ } val optionsAsJson = FigureOptionsWriter.toJson(figure.options) val layout = fragmentsAsJson ~ optionsAsJson val body = ("figure" -> ("data" -> seriesAsJson) ~ ("layout" -> layout) ) ~ ("filename" -> fileName) ~ ("world_readable" -> true) body } private def drawGrid( figure: Figure, fileName: String, fileOptions: FileOptions) (implicit server: Server) : GridFile = { val allSeries = for { subplot <- figure.plots series <- subplot.series } yield series val columns = allSeries.zipWithIndex.flatMap { case (s, index) => seriesToColumns(s, index) }.toMap val grid = Grid(columns) GridWriter.draw(grid, fileName + "-grid", fileOptions) } // scalastyle:off cyclomatic.complexity private def seriesToColumns( series: Series, index: Int ): List[(String, Iterable[PType])] = { val dataColumns = series match { case s: CartesianSeries2D[_, _] => List(s"x-$index" -> s.xs, s"y-$index" -> s.ys) case s: CartesianSeries1D[_] => List(s"x-$index" -> s.xs) case s: SurfaceZ[_] => s.zs.transpose.zipWithIndex.map { case (row, rowIndex) => s"z-$index-$rowIndex" -> row }.toList case s: SurfaceXYZ[_, _, _] => val firstRow = List(PString("")) ++ s.xs.toList val otherRows = s.ys.zip(s.zs).map { case (y, zRow) => List(y) ++ zRow.toList } val rows = List(firstRow) ++ otherRows.toList rows.transpose.zipWithIndex.map { case (row, 0) => s"y-$index" -> row case (row, rowIndex) => s"z-$index-$rowIndex" -> row } case s: Scatter3D[_, _, _] => List(s"x-$index" -> s.xs, s"y-$index" -> s.ys, s"z-$index" -> s.zs) } val optionColumns = series match { case s: Scatter[_, _] => scatterOptionsToColumns(s.options, index) case _ => List.empty[(String, Iterable[PType])] } dataColumns ++ optionColumns } // scalastyle:on cyclomatic.complexity private def indicesFromPlots(plots: Vector[Plot]): Vector[Int] = { // Get the index of each plot in the output document. // This is tricky because plotly expects each type of plot // to be numbered independently. // We do this by iterating through the plots, keeping running // counters for each of the plot types. case class Counters(cartesian: Int, threeD: Int) val plotCounters = plots.scanLeft(Counters(1, 1)) { (curIndices, plot) => plot match { case p: CartesianPlot => curIndices.copy(cartesian = curIndices.cartesian + 1) case p: ThreeDPlot => curIndices.copy(threeD = curIndices.threeD + 1) } } val plotIndices = plots.zip(plotCounters).map { case (plot, counters) => plot match { case p: CartesianPlot => counters.cartesian case p: ThreeDPlot => counters.threeD } } plotIndices } def scatterOptionsToColumns(options: ScatterOptions, index: Int) : List[(String, Iterable[PType])] = options.text match { case Some(IterableText(values)) => List(s"text-$index" -> values) case _ => List.empty } private def srcsFromDrawnGrid( drawnGrid: GridFile, series: Series, index: Int ): List[String] = { val srcs = series match { case s: Scatter3D[_, _, _] => val xName = s"x-$index" val yName = s"y-$index" val zName = s"z-$index" val xuid = drawnGrid.columnUids(xName) val yuid = drawnGrid.columnUids(yName) val zuid = drawnGrid.columnUids(zName) val xsrc = s"${drawnGrid.fileId}:$xuid" val ysrc = s"${drawnGrid.fileId}:$yuid" val zsrc = s"${drawnGrid.fileId}:$zuid" List(xsrc, ysrc, zsrc) case s: CartesianSeries2D[_, _] => val xName = s"x-$index" val yName = s"y-$index" val xuid = drawnGrid.columnUids(xName) val yuid = drawnGrid.columnUids(yName) val xsrc = s"${drawnGrid.fileId}:$xuid" val ysrc = s"${drawnGrid.fileId}:$yuid" List(xsrc, ysrc) case s: CartesianSeries1D[_] => val xName = s"x-$index" val xuid = drawnGrid.columnUids(xName) val xsrc = s"${drawnGrid.fileId}:$xuid" List(xsrc) case s: SurfaceZ[_] => val zPrefix = s"z-$index" val columnNames = s.zs.transpose.zipWithIndex.map { case (row, rowIndex) => zPrefix + s"-$rowIndex" } val uids = columnNames.map { colName => drawnGrid.columnUids(colName) } val uidString = s"${drawnGrid.fileId}:${uids.mkString(",")}" List(uidString) case s: SurfaceXYZ[_, _, _] => val yColumnName = s"y-$index" val yUid = drawnGrid.columnUids(yColumnName) val zPrefix = s"z-$index" val zColumnNames = s.zs.transpose.zipWithIndex.map { case (row, rowIndex) => zPrefix + s"-${rowIndex + 1}" } val zUids = zColumnNames.map { colName => drawnGrid.columnUids(colName) } val fileId = drawnGrid.fileId val yUidString = s"$fileId:$yUid?rows=1-" val zUidString = s"$fileId:${zUids.mkString(",")}?rows=1-" val xUidString = s"$fileId:${zUids.mkString(",")}?row=0" List(xUidString, yUidString, zUidString) } srcs } private def updateSeriesFromDrawnGrid( drawnGrid: GridFile, series: Series, index: Int ): Series = series match { case s: Scatter[_, _] => val newOptions = updateScatterOptionsFromDrawnGrid(drawnGrid, s.options, index) s.copy(options = newOptions) case s: Bar[_, _] => val newOptions = updateBarOptionsFromDrawnGrid(drawnGrid, s.options, index) s.copy(options = newOptions) case s: Box[_] => val newOptions = updateBoxOptionsFromDrawnGrid(drawnGrid, s.options, index) s.copy(options = newOptions) case o => o } private def updateScatterOptionsFromDrawnGrid( drawnGrid: GridFile, options: ScatterOptions, index: Int ): ScatterOptions = { val newText = options.text.map { case IterableText(values) => val textName = s"text-$index" val textUid = drawnGrid.columnUids(textName) val textSrc = s"${drawnGrid.fileId}:$textUid" SrcText(textSrc) case t => t } options.copy(text = newText) } private def updateBarOptionsFromDrawnGrid( drawnGrid: GridFile, options: BarOptions, index: Int ): BarOptions = options private def updateBoxOptionsFromDrawnGrid( drawnGrid: GridFile, options: BoxOptions, index: Int ): BoxOptions = options private def extractSeriesWriteInfos( figure: Figure, drawnGrid: GridFile ): Vector[SeriesWriteInfo] = { val allSeries = for { subplot <- figure.plots series <- subplot.series } yield series val seriesSrcs = for { (series, index) <- allSeries.zipWithIndex srcs = srcsFromDrawnGrid(drawnGrid, series, index) } yield srcs val allUpdatedSeries = for { (series, index) <- allSeries.zipWithIndex updatedSeries = updateSeriesFromDrawnGrid( drawnGrid, series, index) } yield updatedSeries val plotIndices = indicesFromPlots(figure.plots) val seriesPlotIndex = for { (subplot, plotIndex) <- figure.plots.zip(plotIndices) series <- subplot.series } yield plotIndex val writeInfos = for { (series, srcs, plotIndex) <- (allUpdatedSeries, seriesSrcs, seriesPlotIndex).zipped // The casts are really ugly. There must be a better way writeInfo = series match { case s: Scatter[_, _] => ScatterWriteInfo(srcs, plotIndex, s.options) case s: Scatter3D[_, _, _] => Scatter3DWriteInfo(srcs, plotIndex, s.options) case s: Bar[_, _] => BarWriteInfo(srcs, plotIndex, s.options) case s: Box[_] => BoxWriteInfo(srcs, plotIndex, s.options) case s: SurfaceZ[_] => SurfaceZWriteInfo(srcs, plotIndex, s.options) case s: SurfaceXYZ[_, _, _] => SurfaceXYZWriteInfo(srcs, plotIndex, s.options) } } yield writeInfo writeInfos.toVector } private def deleteIfExists(fileName: String)(implicit server: Server) { Try { PlotFile.fromFileName(fileName) } match { case Success(plot) => // exists already -> delete Api.despatchAndInterpret(Api.delete(s"plots/${plot.fileId}")) case Failure(PlotlyException("Not found.")) => // good to go case Failure(e) => throw e // some other error -> re-throw } } }
ASIDataScience/scala-plotly-client
src/main/scala/co/theasi/plotly/writer/FigureWriter.scala
Scala
mit
10,045
/* * ____ ____ _____ ____ ___ ____ * | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R) * | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data * | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc. * |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved. * * This program is free software: you can redistribute it and/or modify it under the terms of the * GNU Affero General Public License as published by the Free Software Foundation, either version * 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See * the GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License along with this * program. If not, see <http://www.gnu.org/licenses/>. * */ package com.precog.yggdrasil import blueeyes.json._ import com.precog.common._ import org.specs2.mutable.Specification import org.scalacheck.Gen import org.scalacheck.Gen._ import org.scalacheck.Arbitrary._ class SValueSpec extends Specification { "set" should { "set properties on an object" in { SObject(Map()).set(JPath(".foo.bar"), CString("baz")) must beSome(SObject(Map("foo" -> SObject(Map("bar" -> SString("baz")))))) } "set array indices" in { SObject(Map()).set(JPath(".foo[1].bar"), CString("baz")) must beSome(SObject(Map("foo" -> SArray(Vector(SNull, SObject(Map("bar" -> SString("baz")))))))) } "return None for a primitive" in { STrue.set(JPath(".foo.bar"), CString("hi")) must beNone } } "structure" should { "return correct sequence for an array" in { SArray(Vector(SBoolean(true))).structure must_== Seq((JPath("[0]"), CBoolean)) } } } // vim: set ts=4 sw=4 et:
precog/platform
yggdrasil/src/test/scala/com/precog/yggdrasil/SValueSpec.scala
Scala
agpl-3.0
2,025
package piecewise abstract class SplineConvert[-S <: PieceFunction, +R <: PieceFunction] extends Function1[S, R]{ }
daniil-timofeev/gridsplines
piecewise/src/main/scala/piecewise/SplineConvert.scala
Scala
apache-2.0
119
package io.getquill.context.orientdb import com.orientechnologies.orient.core.db.ODatabasePool import com.orientechnologies.orient.core.db.ODatabaseType import com.orientechnologies.orient.core.db.OrientDB import com.orientechnologies.orient.core.db.OrientDBConfig import com.orientechnologies.orient.core.metadata.schema.OSchema import io.getquill.Literal import io.getquill.OrientDBContextConfig import io.getquill.OrientDBMirrorContext import io.getquill.OrientDBSyncContext import io.getquill.TestEntities import io.getquill.util.LoadConfig object orientdb { private val databaseName = "GratefulDeadConcerts" private var setupDone = false private val conf = OrientDBContextConfig(LoadConfig("ctx")) private def setup(): Unit = { val orientDB = new OrientDB(conf.dbUrl, "root", "root", OrientDBConfig.defaultConfig()); orientDB.createIfNotExists(databaseName, ODatabaseType.MEMORY); val pool = new ODatabasePool(conf.dbUrl, "root", "root") val schema = pool.acquire().getMetadata.getSchema getOrCreateClass(schema, "DecodeNullTestEntity") getOrCreateClass(schema, "EncodingTestEntity") getOrCreateClass(schema, "ListEntity") getOrCreateClass(schema, "ListsEntity") getOrCreateClass(schema, "ListFrozen") getOrCreateClass(schema, "MapEntity") getOrCreateClass(schema, "MapsEntity") getOrCreateClass(schema, "MapFrozen") getOrCreateClass(schema, "TestEntity") getOrCreateClass(schema, "TestEntity2") getOrCreateClass(schema, "TestEntity3") getOrCreateClass(schema, "Person") getOrCreateClass(schema, "OrderTestEntity") getOrCreateClass(schema, "SetsEntity") getOrCreateClass(schema, "Contact") getOrCreateClass(schema, "Address") } private def getOrCreateClass(iSchema: OSchema, iClassName: String): Unit = { if (!iSchema.existsClass(iClassName)) { iSchema.createClass(iClassName) () } } def mirrorContext = { if (!setupDone) { setup(); setupDone = true } new OrientDBMirrorContext(Literal) with TestEntities } def testSyncDB = { if (!setupDone) { setup(); setupDone = true } new OrientDBSyncContext(Literal, "ctx") } }
mentegy/quill
quill-orientdb/src/test/scala/io/getquill/context/orientdb/orientdb.scala
Scala
apache-2.0
2,168
package shared.forms import shared.messages.Language case class FormData[T](language: Language, data: T, errors: Map[String, List[String]] = Map(), generalErrors: List[String] = Nil) { def hasErrors = errors.exists(_._2.nonEmpty) || generalErrors.nonEmpty }
Igorocky/lesn
shared/src/main/scala/shared/forms/FormData.scala
Scala
mit
330
package io.github.yzernik.bitcoinscodec.messages import io.github.yzernik.bitcoinscodec.CodecSuite import io.github.yzernik.bitcoinscodec.structures._ import scodec.bits._ class GetHeadersSpec extends CodecSuite { val getheaders = GetHeaders( 70001L, List(Hash(hex"000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f")), Hash(hex"0000000000000000000000000000000000000000000000000000000000000000")) "GetHeaders codec" should { "roundtrip" in { roundtrip(GetHeaders.codec(1), getheaders) roundtrip(Message.codec(Network.TestnetParams, 1), getheaders) } "decode" in { val bytes = hex""" 71110100 01 6fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000 0000000000000000000000000000000000000000000000000000000000000000 """.toBitVector shouldDecodeFullyTo(GetHeaders.codec(1), bytes, getheaders) } } }
yzernik/bitcoin-scodec
src/test/scala/io/github/yzernik/bitcoinscodec/messages/GetHeadersSpec.scala
Scala
mit
918
/* * Happy Melly Teller * Copyright (C) 2013 - 2016, Happy Melly http://www.happymelly.com * * This file is part of the Happy Melly Teller. * * Happy Melly Teller is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Happy Melly Teller is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Happy Melly Teller. If not, see <http://www.gnu.org/licenses/>. * * If you have questions concerning this license or the applicable additional * terms, you may contact by email Sergey Kotlov, [email protected] or * in writing Happy Melly One, Handelsplein 37, Rotterdam, The Netherlands, 3071 PR */ package models.repository import com.github.tototoshi.slick.MySQLJodaSupport._ import models.ExchangeRate import models.JodaMoney._ import models.database.ExchangeRateTable import org.joda.money.CurrencyUnit import org.joda.time.DateTimeZone._ import org.joda.time.LocalDate import play.api.Application import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfig} import slick.driver.JdbcProfile import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future /** * Set of methods for managing exchange rate records */ class ExchangeRateRepository(app: Application) extends HasDatabaseConfig[JdbcProfile] with ExchangeRateTable { val dbConfig = DatabaseConfigProvider.get[JdbcProfile](app) import driver.api._ val rates = TableQuery[ExchangeRates] /** * Returns the exchange rate from the database, or inverts a stored rate, for the given base and counter. */ def fromDatabase(base: CurrencyUnit, counter: CurrencyUnit, date: LocalDate = LocalDate.now(UTC)): Future[Option[ExchangeRate]] = { val query = for { rate <- rates if (rate.base === base && rate.counter === counter) || (rate.base === counter && rate.counter === base) if rate.timestamp >= date.toDateTimeAtStartOfDay(UTC) if rate.timestamp < date.plusDays(1).toDateTimeAtStartOfDay(UTC) } yield rate // Invert any rates that were found with base and counter the wrong way around. val results = db.run(query.result).map(_.toList.map { case rate @ ExchangeRate(_, resultBase, resultCounter, _, _) if resultBase == counter && resultCounter == base ⇒ rate.inverse case rate ⇒ rate }) results.map(_.headOption) } /** * Adds new rate to database * * @param rate Rate */ def insert(rate: ExchangeRate): Future[ExchangeRate] = { val query = rates returning rates.map(_.id) into ((value, id) => value.copy(id = Some(id))) db.run(query += rate) } def ratesFromDatabase(base: CurrencyUnit, date: LocalDate = LocalDate.now(UTC)): Future[List[ExchangeRate]] = { val query = for { rate ← rates if rate.base === base || rate.counter === base if rate.timestamp >= date.toDateTimeAtStartOfDay(UTC) if rate.timestamp < date.plusDays(1).toDateTimeAtStartOfDay(UTC) } yield rate // Invert any rates that were found with base and counter the wrong way around. db.run(query.result).map(_.toList.map { case rate @ ExchangeRate(_, _, resultCounter, _, _) if resultCounter == base ⇒ rate.inverse case rate ⇒ rate }) } }
HappyMelly/teller
app/models/repository/ExchangeRateRepository.scala
Scala
gpl-3.0
3,684
package com.sksamuel.elastic4s.requests.searches.aggs.responses.bucket import com.sksamuel.elastic4s.requests.searches.aggs.responses.{AggBucket, BucketAggregation} case class IpRangeAggResult(name: String, buckets: Seq[IpRangeBucket]) extends BucketAggregation case class IpRangeBucket(key: Option[String], override val docCount: Long, from: Option[String], to: Option[String], private[elastic4s] val data: Map[String, Any]) extends AggBucket object IpRangeAggResult { def apply(name: String, data: Map[String, Any]): IpRangeAggResult = IpRangeAggResult( name, data("buckets") match { case buckets: Seq[_] => buckets.asInstanceOf[Seq[Map[String, Any]]].map { map => mkBucket(map.get("key").map(_.toString), map) } //keyed results case buckets: Map[_, _] => buckets .asInstanceOf[Map[String, Any]] .map { case (key, values) => mkBucket(Some(key), values.asInstanceOf[Map[String, Any]]) } .toSeq } ) private def mkBucket(key: Option[String], map: Map[String, Any]): IpRangeBucket = IpRangeBucket( key, map("doc_count").toString.toLong, map.get("from").map(_.toString), map.get("to").map(_.toString), map ) }
sksamuel/elastic4s
elastic4s-domain/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/responses/bucket/iprange.scala
Scala
apache-2.0
1,387
/* * Part of GDL book_api. * Copyright (C) 2017 Global Digital Library * * See LICENSE */ package io.digitallibrary.bookapi.repository import io.digitallibrary.bookapi.model.domain.{Book, Publisher} import io.digitallibrary.bookapi.{IntegrationSuite, TestEnvironment} import io.digitallibrary.license.model.License class BookRepositoryTest extends IntegrationSuite with TestEnvironment { override val bookRepository = new BookRepository override val publisherRepository = new PublisherRepository test("that Book is added and retrieved") { withRollback { implicit session => val testName = "some-name" val publisher = publisherRepository.add(Publisher(None, None, "Publisher Name")) val license = License("cc-by-4.0") val book = bookRepository.add(Book(None, None, publisher.id.get, publisher, license, "storyweaver")) val withId = bookRepository.withId(book.id.get) withId.head.id should equal (book.id) withId.head.license.id should equal (license.id) withId.head.publisher.id should equal (publisher.id) } } test("that None is returned when id does not exist") { bookRepository.withId(100) should equal (None) } }
GlobalDigitalLibraryio/book-api
src/test/scala/io/digitallibrary/bookapi/repository/BookRepositoryTest.scala
Scala
apache-2.0
1,203
package mojave import shapeless.Lens object LensExamples extends App { import mojave._ trait Tag case class Html(content: Tag) case class Body(content: String) extends Tag case class Head(content: String) extends Tag private val wrapper = Html(Body("hello world")) private val compositeLens: Lens[Html, Option[String]] = (lens[Html]) .field[Tag]("content") .ifInstanceOf[Body] .optField[String]("content") println(compositeLens.set(wrapper)(Some("hallo welt"))) }
raimohanska/mojave
src/test/scala/mojave/LensExamples.scala
Scala
mit
499
package sms.core.actor import akka.actor.{ActorLogging, PoisonPill, Props} import akka.contrib.pattern.{DistributedPubSubExtension, DistributedPubSubMediator, ClusterSingletonManager} import sms.core.Logging abstract class SMSSingletonActorCompanion extends SMSActorCompanion { def singletonProps: Props def definedActorName: String private final def props(role: Option[String]): Props = ClusterSingletonManager.props( singletonProps = singletonProps, singletonName = definedActorName, terminationMessage = PoisonPill, role = role ) final def props: Props = props(roleName) final protected def actorName: Option[String] = Some(definedActorName) } abstract class SMSSingletonActor extends SMSActor with ActorLogging { override def preStart(): Unit = { log.debug("Registering self {} in DistributedPubSubExtension", self.path) DistributedPubSubExtension(context.system).mediator ! DistributedPubSubMediator.Put(self) } }
kjanosz/stock-market-sherlock
core/src/main/scala/sms/core/actor/singleton.scala
Scala
apache-2.0
966
package org.kimbasoft.akka.fsm import akka.actor.FSM.{Transition, CurrentState} import akka.actor.{ActorLogging, Actor} import org.kimbasoft.akka.fsm.StateActorFSM.Batch /** * Missing documentation. * * @author <a href="[email protected]">Steffen Krause</a> * @since 1.0 */ class StateActorListener extends Actor with ActorLogging { def receive: Receive = { case Batch(queue) => log.info(s"Received Batch message with payload $queue") case CurrentState(actor, state) => log.info(s"Subscribed to $actor in state '$state'") case Transition(actor, oldState, newState) => log.info(s"$actor changed states '$oldState' -> '$newState' ") case event => log.warning(s"Received unknown event $event") } }
kimba74/sandbox-scala
src/main/scala/org/kimbasoft/akka/fsm/StateActorListener.scala
Scala
gpl-3.0
760
package calculator import org.scalatest.FunSuite import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest._ import TweetLength.MaxTweetLength @RunWith(classOf[JUnitRunner]) class CalculatorSuite extends FunSuite with ShouldMatchers { /****************** ** TWEET LENGTH ** ******************/ def tweetLength(text: String): Int = text.codePointCount(0, text.length) test("tweetRemainingCharsCount with a constant signal") { val result = TweetLength.tweetRemainingCharsCount(Var("hello world")) assert(result() == MaxTweetLength - tweetLength("hello world")) val tooLong = "foo" * 200 val result2 = TweetLength.tweetRemainingCharsCount(Var(tooLong)) assert(result2() == MaxTweetLength - tweetLength(tooLong)) } test("tweetRemainingCharsCount with a supplementary char") { val result = TweetLength.tweetRemainingCharsCount(Var("foo blabla \\uD83D\\uDCA9 bar")) assert(result() == MaxTweetLength - tweetLength("foo blabla \\uD83D\\uDCA9 bar")) } test("colorForRemainingCharsCount with a constant signal") { val resultGreen1 = TweetLength.colorForRemainingCharsCount(Var(52)) assert(resultGreen1() == "green") val resultGreen2 = TweetLength.colorForRemainingCharsCount(Var(15)) assert(resultGreen2() == "green") val resultOrange1 = TweetLength.colorForRemainingCharsCount(Var(12)) assert(resultOrange1() == "orange") val resultOrange2 = TweetLength.colorForRemainingCharsCount(Var(0)) assert(resultOrange2() == "orange") val resultRed1 = TweetLength.colorForRemainingCharsCount(Var(-1)) assert(resultRed1() == "red") val resultRed2 = TweetLength.colorForRemainingCharsCount(Var(-5)) assert(resultRed2() == "red") } }
rranelli/rrreacprog
calculator/src/test/scala/calculator/CalculatorSuite.scala
Scala
unlicense
1,760
package com.twitter.finagle.zookeeper import com.twitter.conversions.DurationOps._ import com.twitter.io.TempDirectory.create import com.twitter.finagle.common.zookeeper.ZooKeeperClient import com.twitter.zk.ServerCnxnFactory import java.net.{InetAddress, InetSocketAddress} import org.apache.zookeeper.server.ZooKeeperServer class ZkInstance { var connectionFactory: ServerCnxnFactory = null var zookeeperServer: ZooKeeperServer = null var zookeeperClient: ZooKeeperClient = null var started = false lazy val zookeeperAddress = { if (!started) throw new IllegalStateException("can't get address until instance is started") new InetSocketAddress(zookeeperServer.getClientPort) } lazy val zookeeperConnectString = zookeeperAddress.getHostName() + ":" + zookeeperAddress.getPort() def start(): Unit = { started = true zookeeperServer = new ZooKeeperServer(create(), create(), ZooKeeperServer.DEFAULT_TICK_TIME) zookeeperServer.setMaxSessionTimeout(100) zookeeperServer.setMinSessionTimeout(100) connectionFactory = ServerCnxnFactory(InetAddress.getLoopbackAddress) connectionFactory.startup(zookeeperServer) zookeeperClient = new ZooKeeperClient(10.milliseconds, zookeeperAddress) // Disable noise from zookeeper logger // java.util.logging.LogManager.getLogManager().reset(); } def stop(): Unit = { connectionFactory.shutdown() zookeeperClient.close() } }
luciferous/finagle
finagle-serversets/src/test/scala/com/twitter/finagle/zookeeper/ZkInstance.scala
Scala
apache-2.0
1,440
package com.madsen.xcs.core.actuator import com.madsen.xsc.interop.actuator.{Actuator => InteropActuator, ActuatorStore} /** * Created by erikmadsen2 on 15/05/15. */ trait CompositeActuatorStore extends ActuatorStore { override final def lookup(s: String): InteropActuator = doLookup(s) protected def doLookup(s: String): CompositeActuator }
beatmadsen/xcs-main
src/main/scala/com/madsen/xcs/core/actuator/CompositeActuatorStore.scala
Scala
mit
352
package endpoints package documented package algebra import scala.language.higherKinds /** * Algebra interface for describing responses. * * This interface is modeled after [[endpoints.algebra.Responses]] but some * methods take additional parameters carrying documentation. */ trait Responses { /** Information carried by a response */ type Response[A] /** * Empty response. */ def emptyResponse(documentation: String): Response[Unit] /** * Text response. */ def textResponse(documentation: String): Response[String] }
Krever/endpoints
openapi/openapi/src/main/scala/endpoints/documented/algebra/Responses.scala
Scala
mit
564
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ml.linalg import org.apache.spark.ml.SparkMLFunSuite import org.apache.spark.ml.linalg.BLAS._ import org.apache.spark.ml.util.TestingUtils._ class BLASSuite extends SparkMLFunSuite { test("nativeL1Threshold") { assert(getBLAS(128) == BLAS.javaBLAS) assert(getBLAS(256) == BLAS.nativeBLAS) assert(getBLAS(512) == BLAS.nativeBLAS) } test("copy") { val sx = Vectors.sparse(4, Array(0, 2), Array(1.0, -2.0)) val dx = Vectors.dense(1.0, 0.0, -2.0, 0.0) val sy = Vectors.sparse(4, Array(0, 1, 3), Array(2.0, 1.0, 1.0)) val dy = Array(2.0, 1.0, 0.0, 1.0) val dy1 = Vectors.dense(dy.clone()) copy(sx, dy1) assert(dy1 ~== dx absTol 1e-15) val dy2 = Vectors.dense(dy.clone()) copy(dx, dy2) assert(dy2 ~== dx absTol 1e-15) intercept[IllegalArgumentException] { copy(sx, sy) } intercept[IllegalArgumentException] { copy(dx, sy) } withClue("vector sizes must match") { intercept[Exception] { copy(sx, Vectors.dense(0.0, 1.0, 2.0)) } } } test("scal") { val a = 0.1 val sx = Vectors.sparse(3, Array(0, 2), Array(1.0, -2.0)) val dx = Vectors.dense(1.0, 0.0, -2.0) scal(a, sx) assert(sx ~== Vectors.sparse(3, Array(0, 2), Array(0.1, -0.2)) absTol 1e-15) scal(a, dx) assert(dx ~== Vectors.dense(0.1, 0.0, -0.2) absTol 1e-15) } test("axpy") { val alpha = 0.1 val sx = Vectors.sparse(3, Array(0, 2), Array(1.0, -2.0)) val dx = Vectors.dense(1.0, 0.0, -2.0) val dy = Array(2.0, 1.0, 0.0) val expected = Vectors.dense(2.1, 1.0, -0.2) val dy1 = Vectors.dense(dy.clone()) axpy(alpha, sx, dy1) assert(dy1 ~== expected absTol 1e-15) val dy2 = Vectors.dense(dy.clone()) axpy(alpha, dx, dy2) assert(dy2 ~== expected absTol 1e-15) val sy = Vectors.sparse(4, Array(0, 1), Array(2.0, 1.0)) intercept[IllegalArgumentException] { axpy(alpha, sx, sy) } intercept[IllegalArgumentException] { axpy(alpha, dx, sy) } withClue("vector sizes must match") { intercept[Exception] { axpy(alpha, sx, Vectors.dense(1.0, 2.0)) } } } test("dot") { val sx = Vectors.sparse(3, Array(0, 2), Array(1.0, -2.0)) val dx = Vectors.dense(1.0, 0.0, -2.0) val sy = Vectors.sparse(3, Array(0, 1), Array(2.0, 1.0)) val dy = Vectors.dense(2.0, 1.0, 0.0) assert(dot(sx, sy) ~== 2.0 absTol 1e-15) assert(dot(sy, sx) ~== 2.0 absTol 1e-15) assert(dot(sx, dy) ~== 2.0 absTol 1e-15) assert(dot(dy, sx) ~== 2.0 absTol 1e-15) assert(dot(dx, dy) ~== 2.0 absTol 1e-15) assert(dot(dy, dx) ~== 2.0 absTol 1e-15) assert(dot(sx, sx) ~== 5.0 absTol 1e-15) assert(dot(dx, dx) ~== 5.0 absTol 1e-15) assert(dot(sx, dx) ~== 5.0 absTol 1e-15) assert(dot(dx, sx) ~== 5.0 absTol 1e-15) val sx1 = Vectors.sparse(10, Array(0, 3, 5, 7, 8), Array(1.0, 2.0, 3.0, 4.0, 5.0)) val sx2 = Vectors.sparse(10, Array(1, 3, 6, 7, 9), Array(1.0, 2.0, 3.0, 4.0, 5.0)) assert(dot(sx1, sx2) ~== 20.0 absTol 1e-15) assert(dot(sx2, sx1) ~== 20.0 absTol 1e-15) withClue("vector sizes must match") { intercept[Exception] { dot(sx, Vectors.dense(2.0, 1.0)) } } } test("spr") { // test dense vector val alpha = 0.1 val x = new DenseVector(Array(1.0, 2, 2.1, 4)) val U = new DenseVector(Array(1.0, 2, 2, 3, 3, 3, 4, 4, 4, 4)) val expected = new DenseVector(Array(1.1, 2.2, 2.4, 3.21, 3.42, 3.441, 4.4, 4.8, 4.84, 5.6)) spr(alpha, x, U) assert(U ~== expected absTol 1e-9) val matrix33 = new DenseVector(Array(1.0, 2, 3, 4, 5)) withClue("Size of vector must match the rank of matrix") { intercept[Exception] { spr(alpha, x, matrix33) } } // test sparse vector val sv = new SparseVector(4, Array(0, 3), Array(1.0, 2)) val U2 = new DenseVector(Array(1.0, 2, 2, 3, 3, 3, 4, 4, 4, 4)) spr(0.1, sv, U2) val expectedSparse = new DenseVector(Array(1.1, 2.0, 2.0, 3.0, 3.0, 3.0, 4.2, 4.0, 4.0, 4.4)) assert(U2 ~== expectedSparse absTol 1e-15) } test("syr") { val dA = new DenseMatrix(4, 4, Array(0.0, 1.2, 2.2, 3.1, 1.2, 3.2, 5.3, 4.6, 2.2, 5.3, 1.8, 3.0, 3.1, 4.6, 3.0, 0.8)) val x = new DenseVector(Array(0.0, 2.7, 3.5, 2.1)) val alpha = 0.15 val expected = new DenseMatrix(4, 4, Array(0.0, 1.2, 2.2, 3.1, 1.2, 4.2935, 6.7175, 5.4505, 2.2, 6.7175, 3.6375, 4.1025, 3.1, 5.4505, 4.1025, 1.4615)) syr(alpha, x, dA) assert(dA ~== expected absTol 1e-15) val dB = new DenseMatrix(3, 4, Array(0.0, 1.2, 2.2, 3.1, 1.2, 3.2, 5.3, 4.6, 2.2, 5.3, 1.8, 3.0)) withClue("Matrix A must be a symmetric Matrix") { intercept[Exception] { syr(alpha, x, dB) } } val dC = new DenseMatrix(3, 3, Array(0.0, 1.2, 2.2, 1.2, 3.2, 5.3, 2.2, 5.3, 1.8)) withClue("Size of vector must match the rank of matrix") { intercept[Exception] { syr(alpha, x, dC) } } val y = new DenseVector(Array(0.0, 2.7, 3.5, 2.1, 1.5)) withClue("Size of vector must match the rank of matrix") { intercept[Exception] { syr(alpha, y, dA) } } val xSparse = new SparseVector(4, Array(0, 2, 3), Array(1.0, 3.0, 4.0)) val dD = new DenseMatrix(4, 4, Array(0.0, 1.2, 2.2, 3.1, 1.2, 3.2, 5.3, 4.6, 2.2, 5.3, 1.8, 3.0, 3.1, 4.6, 3.0, 0.8)) syr(0.1, xSparse, dD) val expectedSparse = new DenseMatrix(4, 4, Array(0.1, 1.2, 2.5, 3.5, 1.2, 3.2, 5.3, 4.6, 2.5, 5.3, 2.7, 4.2, 3.5, 4.6, 4.2, 2.4)) assert(dD ~== expectedSparse absTol 1e-15) } test("gemm") { val dA = new DenseMatrix(4, 3, Array(0.0, 1.0, 0.0, 0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 3.0)) val sA = new SparseMatrix(4, 3, Array(0, 1, 3, 4), Array(1, 0, 2, 3), Array(1.0, 2.0, 1.0, 3.0)) val B = new DenseMatrix(3, 2, Array(1.0, 0.0, 0.0, 0.0, 2.0, 1.0)) val expected = new DenseMatrix(4, 2, Array(0.0, 1.0, 0.0, 0.0, 4.0, 0.0, 2.0, 3.0)) val BTman = new DenseMatrix(2, 3, Array(1.0, 0.0, 0.0, 2.0, 0.0, 1.0)) val BT = B.transpose assert(dA.multiply(B) ~== expected absTol 1e-15) assert(sA.multiply(B) ~== expected absTol 1e-15) val C1 = new DenseMatrix(4, 2, Array(1.0, 0.0, 2.0, 1.0, 0.0, 0.0, 1.0, 0.0)) val C2 = C1.copy val C3 = C1.copy val C4 = C1.copy val C5 = C1.copy val C6 = C1.copy val C7 = C1.copy val C8 = C1.copy val C9 = C1.copy val C10 = C1.copy val C11 = C1.copy val C12 = C1.copy val C13 = C1.copy val C14 = C1.copy val C15 = C1.copy val C16 = C1.copy val C17 = C1.copy val expected2 = new DenseMatrix(4, 2, Array(2.0, 1.0, 4.0, 2.0, 4.0, 0.0, 4.0, 3.0)) val expected3 = new DenseMatrix(4, 2, Array(2.0, 2.0, 4.0, 2.0, 8.0, 0.0, 6.0, 6.0)) val expected4 = new DenseMatrix(4, 2, Array(5.0, 0.0, 10.0, 5.0, 0.0, 0.0, 5.0, 0.0)) val expected5 = C1.copy gemm(1.0, dA, B, 2.0, C1) gemm(1.0, sA, B, 2.0, C2) gemm(2.0, dA, B, 2.0, C3) gemm(2.0, sA, B, 2.0, C4) assert(C1 ~== expected2 absTol 1e-15) assert(C2 ~== expected2 absTol 1e-15) assert(C3 ~== expected3 absTol 1e-15) assert(C4 ~== expected3 absTol 1e-15) gemm(1.0, dA, B, 0.0, C17) assert(C17 ~== expected absTol 1e-15) gemm(1.0, sA, B, 0.0, C17) assert(C17 ~== expected absTol 1e-15) withClue("columns of A don't match the rows of B") { intercept[Exception] { gemm(1.0, dA.transpose, B, 2.0, C1) } } val dATman = new DenseMatrix(3, 4, Array(0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 3.0)) val sATman = new SparseMatrix(3, 4, Array(0, 1, 2, 3, 4), Array(1, 0, 1, 2), Array(2.0, 1.0, 1.0, 3.0)) val dATT = dATman.transpose val sATT = sATman.transpose val BTT = BTman.transpose assert(dATT.multiply(B) ~== expected absTol 1e-15) assert(sATT.multiply(B) ~== expected absTol 1e-15) assert(dATT.multiply(BTT) ~== expected absTol 1e-15) assert(sATT.multiply(BTT) ~== expected absTol 1e-15) gemm(1.0, dATT, BTT, 2.0, C5) gemm(1.0, sATT, BTT, 2.0, C6) gemm(2.0, dATT, BTT, 2.0, C7) gemm(2.0, sATT, BTT, 2.0, C8) gemm(1.0, dA, BTT, 2.0, C9) gemm(1.0, sA, BTT, 2.0, C10) gemm(2.0, dA, BTT, 2.0, C11) gemm(2.0, sA, BTT, 2.0, C12) assert(C5 ~== expected2 absTol 1e-15) assert(C6 ~== expected2 absTol 1e-15) assert(C7 ~== expected3 absTol 1e-15) assert(C8 ~== expected3 absTol 1e-15) assert(C9 ~== expected2 absTol 1e-15) assert(C10 ~== expected2 absTol 1e-15) assert(C11 ~== expected3 absTol 1e-15) assert(C12 ~== expected3 absTol 1e-15) gemm(0, dA, B, 5, C13) gemm(0, sA, B, 5, C14) gemm(0, dA, B, 1, C15) gemm(0, sA, B, 1, C16) assert(C13 ~== expected4 absTol 1e-15) assert(C14 ~== expected4 absTol 1e-15) assert(C15 ~== expected5 absTol 1e-15) assert(C16 ~== expected5 absTol 1e-15) } test("gemv") { val dA = new DenseMatrix(4, 3, Array(0.0, 1.0, 0.0, 0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 3.0)) val sA = new SparseMatrix(4, 3, Array(0, 1, 3, 4), Array(1, 0, 2, 3), Array(1.0, 2.0, 1.0, 3.0)) val dA2 = new DenseMatrix(4, 3, Array(0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 3.0), true) val sA2 = new SparseMatrix(4, 3, Array(0, 1, 2, 3, 4), Array(1, 0, 1, 2), Array(2.0, 1.0, 1.0, 3.0), true) val dx = new DenseVector(Array(1.0, 2.0, 3.0)) val sx = dx.toSparse val expected = new DenseVector(Array(4.0, 1.0, 2.0, 9.0)) assert(dA.multiply(dx) ~== expected absTol 1e-15) assert(sA.multiply(dx) ~== expected absTol 1e-15) assert(dA.multiply(sx) ~== expected absTol 1e-15) assert(sA.multiply(sx) ~== expected absTol 1e-15) val y1 = new DenseVector(Array(1.0, 3.0, 1.0, 0.0)) val y2 = y1.copy val y3 = y1.copy val y4 = y1.copy val y5 = y1.copy val y6 = y1.copy val y7 = y1.copy val y8 = y1.copy val y9 = y1.copy val y10 = y1.copy val y11 = y1.copy val y12 = y1.copy val y13 = y1.copy val y14 = y1.copy val y15 = y1.copy val y16 = y1.copy val expected2 = new DenseVector(Array(6.0, 7.0, 4.0, 9.0)) val expected3 = new DenseVector(Array(10.0, 8.0, 6.0, 18.0)) gemv(1.0, dA, dx, 2.0, y1) gemv(1.0, sA, dx, 2.0, y2) gemv(1.0, dA, sx, 2.0, y3) gemv(1.0, sA, sx, 2.0, y4) gemv(1.0, dA2, dx, 2.0, y5) gemv(1.0, sA2, dx, 2.0, y6) gemv(1.0, dA2, sx, 2.0, y7) gemv(1.0, sA2, sx, 2.0, y8) gemv(2.0, dA, dx, 2.0, y9) gemv(2.0, sA, dx, 2.0, y10) gemv(2.0, dA, sx, 2.0, y11) gemv(2.0, sA, sx, 2.0, y12) gemv(2.0, dA2, dx, 2.0, y13) gemv(2.0, sA2, dx, 2.0, y14) gemv(2.0, dA2, sx, 2.0, y15) gemv(2.0, sA2, sx, 2.0, y16) assert(y1 ~== expected2 absTol 1e-15) assert(y2 ~== expected2 absTol 1e-15) assert(y3 ~== expected2 absTol 1e-15) assert(y4 ~== expected2 absTol 1e-15) assert(y5 ~== expected2 absTol 1e-15) assert(y6 ~== expected2 absTol 1e-15) assert(y7 ~== expected2 absTol 1e-15) assert(y8 ~== expected2 absTol 1e-15) assert(y9 ~== expected3 absTol 1e-15) assert(y10 ~== expected3 absTol 1e-15) assert(y11 ~== expected3 absTol 1e-15) assert(y12 ~== expected3 absTol 1e-15) assert(y13 ~== expected3 absTol 1e-15) assert(y14 ~== expected3 absTol 1e-15) assert(y15 ~== expected3 absTol 1e-15) assert(y16 ~== expected3 absTol 1e-15) withClue("columns of A don't match the rows of B") { intercept[Exception] { gemv(1.0, dA.transpose, dx, 2.0, y1) } intercept[Exception] { gemv(1.0, sA.transpose, dx, 2.0, y1) } intercept[Exception] { gemv(1.0, dA.transpose, sx, 2.0, y1) } intercept[Exception] { gemv(1.0, sA.transpose, sx, 2.0, y1) } } val y17 = new DenseVector(Array(0.0, 0.0)) val y18 = y17.copy val sA3 = new SparseMatrix(3, 2, Array(0, 2, 4), Array(1, 2, 0, 1), Array(2.0, 1.0, 1.0, 2.0)) .transpose val sA4 = new SparseMatrix(2, 3, Array(0, 1, 3, 4), Array(1, 0, 1, 0), Array(1.0, 2.0, 2.0, 1.0)) val sx3 = new SparseVector(3, Array(1, 2), Array(2.0, 1.0)) val expected4 = new DenseVector(Array(5.0, 4.0)) gemv(1.0, sA3, sx3, 0.0, y17) gemv(1.0, sA4, sx3, 0.0, y18) assert(y17 ~== expected4 absTol 1e-15) assert(y18 ~== expected4 absTol 1e-15) val dAT = new DenseMatrix(3, 4, Array(0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 3.0)) val sAT = new SparseMatrix(3, 4, Array(0, 1, 2, 3, 4), Array(1, 0, 1, 2), Array(2.0, 1.0, 1.0, 3.0)) val dATT = dAT.transpose val sATT = sAT.transpose assert(dATT.multiply(dx) ~== expected absTol 1e-15) assert(sATT.multiply(dx) ~== expected absTol 1e-15) assert(dATT.multiply(sx) ~== expected absTol 1e-15) assert(sATT.multiply(sx) ~== expected absTol 1e-15) } test("spmv") { /* A = [[3.0, -2.0, 2.0, -4.0], [-2.0, -8.0, 4.0, 7.0], [2.0, 4.0, -3.0, -3.0], [-4.0, 7.0, -3.0, 0.0]] x = [5.0, 2.0, -1.0, -9.0] Ax = [ 45., -93., 48., -3.] */ val A = new DenseVector(Array(3.0, -2.0, -8.0, 2.0, 4.0, -3.0, -4.0, 7.0, -3.0, 0.0)) val x = new DenseVector(Array(5.0, 2.0, -1.0, -9.0)) val n = 4 val y1 = new DenseVector(Array(-3.0, 6.0, -8.0, -3.0)) val y2 = y1.copy val y3 = y1.copy val y4 = y1.copy val y5 = y1.copy val y6 = y1.copy val y7 = y1.copy val expected1 = new DenseVector(Array(42.0, -87.0, 40.0, -6.0)) val expected2 = new DenseVector(Array(19.5, -40.5, 16.0, -4.5)) val expected3 = new DenseVector(Array(-25.5, 52.5, -32.0, -1.5)) val expected4 = new DenseVector(Array(-3.0, 6.0, -8.0, -3.0)) val expected5 = new DenseVector(Array(43.5, -90.0, 44.0, -4.5)) val expected6 = new DenseVector(Array(46.5, -96.0, 52.0, -1.5)) val expected7 = new DenseVector(Array(45.0, -93.0, 48.0, -3.0)) dspmv(n, 1.0, A, x, 1.0, y1) dspmv(n, 0.5, A, x, 1.0, y2) dspmv(n, -0.5, A, x, 1.0, y3) dspmv(n, 0.0, A, x, 1.0, y4) dspmv(n, 1.0, A, x, 0.5, y5) dspmv(n, 1.0, A, x, -0.5, y6) dspmv(n, 1.0, A, x, 0.0, y7) assert(y1 ~== expected1 absTol 1e-8) assert(y2 ~== expected2 absTol 1e-8) assert(y3 ~== expected3 absTol 1e-8) assert(y4 ~== expected4 absTol 1e-8) assert(y5 ~== expected5 absTol 1e-8) assert(y6 ~== expected6 absTol 1e-8) assert(y7 ~== expected7 absTol 1e-8) } }
ueshin/apache-spark
mllib-local/src/test/scala/org/apache/spark/ml/linalg/BLASSuite.scala
Scala
apache-2.0
15,394
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.descriptors import org.apache.flink.table.api.ValidationException import org.apache.flink.table.descriptors.FunctionDescriptorValidator.FROM import org.apache.flink.table.util.JavaScalaConversionUtil.toJava import scala.collection.JavaConverters._ /** * Validator for [[FunctionDescriptor]]. */ class FunctionDescriptorValidator extends DescriptorValidator { override def validate(properties: DescriptorProperties): Unit = { val classValidation = (_: String) => { new ClassInstanceValidator().validate(properties) } // check for 'from' if (properties.containsKey(FROM)) { properties.validateEnum( FROM, false, Map( FunctionDescriptorValidator.FROM_VALUE_CLASS -> toJava(classValidation) ).asJava ) } else { throw new ValidationException("Could not find 'from' property for function.") } } } object FunctionDescriptorValidator { val FROM = "from" val FROM_VALUE_CLASS = "class" }
ueshin/apache-flink
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/descriptors/FunctionDescriptorValidator.scala
Scala
apache-2.0
1,827
// Partest does proper mixed compilation: // 1. scalac *.scala *.java // 2. javac *.java // 3. scalc *.scala // // In the second scalc round, the classfile for A_1 is on the classpath. // Therefore the inliner has access to the bytecode of `bar`, which means // it can verify that the invocation to `bar` can be safely inlined. // // So both callsites of `flop` are inlined. // // In a single mixed compilation, `flop` cannot be inlined, see JUnit InlinerTest.scala, def mixedCompilationNoInline. class B { @inline final def flop = A_1.bar def g = flop } class C { def h(b: B) = b.flop }
felixmulder/scala
test/files/run/bcodeInlinerMixed/B_1.scala
Scala
bsd-3-clause
601
package scala.meta.tests package parsers import scala.meta._, Term.{Name => TermName, _}, Type.{Name => TypeName}, Name.{Anonymous, Indeterminate} import scala.meta.dialects.Scala211 class TermSuite extends ParseSuite { test("x") { val TermName("x") = term("x") } test("`x`") { val name @ TermName("x") = term("`x`") // TODO: revisit this once we have trivia in place // assert(name.isBackquoted === true) } test("a.b.c") { val outer @ Select(inner @ Select(TermName("a"), TermName("b")), TermName("c")) = term("a.b.c") // TODO: revisit this once we have trivia in place // assert(outer.isPostfix === false) // assert(inner.isPostfix === false) } test("a.b c") { val outer @ Select(inner @ Select(TermName("a"), TermName("b")), TermName("c")) = term("a.b c") // TODO: revisit this once we have trivia in place // assert(outer.isPostfix === true) // assert(inner.isPostfix === false) } test("foo.this") { val This(Indeterminate("foo")) = term("foo.this") } test("this") { val This(Anonymous()) = term("this") } test("a.super[b].c") { val Select(Super(Indeterminate("a"), Indeterminate("b")), TermName("c")) = term("a.super[b].c") } test("super[b].c") { val Select(Super(Anonymous(), Indeterminate("b")), TermName("c")) = term("super[b].c") } test("a.super.c") { val Select(Super(Indeterminate("a"), Anonymous()), TermName("c")) = term("a.super.c") } test("super.c") { val Select(Super(Anonymous(), Anonymous()), TermName("c")) = term("super.c") } test("s\\"a $b c\\"") { val Interpolate(TermName("s"), Lit("a ") :: Lit(" c") :: Nil, TermName("b") :: Nil) = term("s\\"a $b c\\"") } test("f(0)") { val Apply(TermName("f"), Lit(0) :: Nil) = term("f(0)") } test("f(x = 0)") { val Apply(TermName("f"), Arg.Named(TermName("x"), Lit(0)) :: Nil) = term("f(x = 0)") } test("f(x: _*)") { val Apply(TermName("f"), Arg.Repeated(TermName("x")) :: Nil) = term("f(x: _*)") } test("f(x = xs: _*)") { val Term.Apply(Term.Name("f"), Seq(Term.Arg.Named(Term.Name("x"), Term.Arg.Repeated(Term.Name("xs"))))) = term("f(x = xs: _*)") } test("a + ()") { val ApplyInfix(TermName("a"), TermName("+"), Nil, Nil) = term("a + ()") } test("a + b") { val ApplyInfix(TermName("a"), TermName("+"), Nil, TermName("b") :: Nil) = term("a + b") } test("a + b + c") { val ApplyInfix(ApplyInfix(TermName("a"), TermName("+"), Nil, TermName("b") :: Nil), TermName("+"), Nil, TermName("c") :: Nil) = term("a + b + c") } test("a :: b") { val ApplyInfix(TermName("a"), TermName("::"), Nil, TermName("b") :: Nil) = term("a :: b") } test("a :: b :: c") { val ApplyInfix(TermName("a"), TermName("::"), Nil, ApplyInfix(TermName("b"), TermName("::"), Nil, TermName("c") :: Nil) :: Nil) = term("a :: b :: c") } test("!a") { val ApplyUnary(TermName("!"), TermName("a")) = term("!a") } test("a = true") { val Assign(TermName("a"), Lit(true)) = term("a = true") } test("a(0) = true") { val Update(TermName("a"), (Lit(0) :: Nil) :: Nil, Lit(true)) = term("a(0) = true") } test("return") { val ret @ Return(Lit(())) = term("return") // TODO: revisit this once we have trivia in place // assert(ret.hasExpr === false) } test("return 1") { val ret @ Return(Lit(1)) = term("return 1") // TODO: revisit this once we have trivia in place // assert(ret.hasExpr === true) } test("throw 1") { val Throw(Lit(1)) = term("throw 1") } test("1: Int") { val Ascribe(Lit(1), TypeName("Int")) = term("1: Int") } test("1: @foo") { val Annotate(Lit(1), Mod.Annot(Ctor.Name("foo")) :: Nil) = term("1: @foo") } test("(true, false)") { val Tuple(Lit(true) :: Lit(false) :: Nil) = term("(true, false)") } test("{ true; false }") { val Block(Lit(true) :: Lit(false) :: Nil) = term("{ true; false }") } test("{ true }") { val Block(Lit(true) :: Nil) = term("{ true }") } test("if (true) true else false") { val iff @ If(Lit(true), Lit(true), Lit(false)) = term("if (true) true else false") // TODO: revisit this once we have trivia in place // assert(iff.hasElsep === true) } test("if (true) true; else false") { val iff @ If(Lit(true), Lit(true), Lit(false)) = term("if (true) true; else false") // TODO: revisit this once we have trivia in place // assert(iff.hasElsep === true) } test("if (true) true") { val iff @ If(Lit(true), Lit(true), Lit(())) = term("if (true) true") // TODO: revisit this once we have trivia in place // assert(iff.hasElsep === false) } test("() => x") { val Term.Function(Nil, Term.Name("x")) = term("() => x") val Term.Function(Nil, Term.Block(List(Term.Name("x")))) = blockStat("() => x") val Term.Function(Nil, Term.Name("x")) = templStat("() => x") } test("(()) => x") { val Term.Function(Nil, Term.Name("x")) = term("(()) => x") val Term.Function(Nil, Term.Block(List(Term.Name("x")))) = blockStat("(()) => x") val Term.Function(Nil, Term.Name("x")) = templStat("(()) => x") } test("x => x") { val Term.Function(List(Term.Param(Nil, Term.Name("x"), None, None)), Term.Name("x")) = term("x => x") val Term.Function(List(Term.Param(Nil, Term.Name("x"), None, None)), Term.Block(List(Term.Name("x")))) = blockStat("x => x") intercept[ParseException] { templStat("x => x") } } test("(x) => x") { val Term.Function(List(Term.Param(Nil, Term.Name("x"), None, None)), Term.Name("x")) = term("(x) => x") val Term.Function(List(Term.Param(Nil, Term.Name("x"), None, None)), Term.Block(List(Term.Name("x")))) = blockStat("(x) => x") intercept[ParseException] { templStat("(x) => x") } } test("_ => x") { val Term.Function(List(Term.Param(Nil, Name.Anonymous(), None, None)), Term.Name("x")) = term("_ => x") val Term.Function(List(Term.Param(Nil, Name.Anonymous(), None, None)), Term.Block(List(Term.Name("x")))) = blockStat("_ => x") intercept[ParseException] { templStat("_ => x") } } test("(_) => x") { val Term.Function(List(Term.Param(Nil, Name.Anonymous(), None, None)), Term.Name("x")) = term("(_) => x") val Term.Function(List(Term.Param(Nil, Name.Anonymous(), None, None)), Term.Block(List(Term.Name("x")))) = blockStat("(_) => x") intercept[ParseException] { templStat("(_) => x") } } test("x: Int => x") { // LAWL: this is how scalac's parser works val Term.Ascribe(Term.Name("x"), Type.Function(List(Type.Name("Int")), Type.Name("x"))) = term("x: Int => x") val Term.Function(List(Term.Param(Nil, Term.Name("x"), Some(Type.Name("Int")), None)), Term.Block(List(Term.Name("x")))) = blockStat("x: Int => x") intercept[ParseException] { templStat("x: Int => x") } } test("(x: Int) => x") { val Term.Function(List(Term.Param(Nil, Term.Name("x"), Some(Type.Name("Int")), None)), Term.Name("x")) = term("(x: Int) => x") val Term.Function(List(Term.Param(Nil, Term.Name("x"), Some(Type.Name("Int")), None)), Term.Block(List(Term.Name("x")))) = blockStat("(x: Int) => x") val Term.Function(List(Term.Param(Nil, Term.Name("x"), Some(Type.Name("Int")), None)), Term.Name("x")) = templStat("(x: Int) => x") } test("_: Int => x") { val Term.Ascribe(Term.Placeholder(), Type.Function(List(Type.Name("Int")), Type.Name("x"))) = term("_: Int => x") val Term.Function(List(Term.Param(Nil, Name.Anonymous(), Some(Type.Name("Int")), None)), Term.Block(List(Term.Name("x")))) = blockStat("_: Int => x") intercept[ParseException] { templStat("_: Int => x") } } test("(_: Int) => x") { val Term.Function(List(Term.Param(Nil, Name.Anonymous(), Some(Type.Name("Int")), None)), Term.Name("x")) = term("(_: Int) => x") val Term.Function(List(Term.Param(Nil, Name.Anonymous(), Some(Type.Name("Int")), None)), Term.Block(List(Term.Name("x")))) = blockStat("(_: Int) => x") val Term.Function(List(Term.Param(Nil, Name.Anonymous(), Some(Type.Name("Int")), None)), Term.Name("x")) = templStat("(_: Int) => x") } test("x: Int, y: Int => x") { intercept[ParseException] { term("x: Int, y: Int => x") } intercept[ParseException] { blockStat("x: Int, y: Int => x") } intercept[ParseException] { templStat("x: Int, y: Int => x") } } test("(x: Int, y: Int) => x") { val Term.Function(List(Term.Param(Nil, Term.Name("x"), Some(Type.Name("Int")), None), Term.Param(Nil, Term.Name("y"), Some(Type.Name("Int")), None)), Term.Name("x")) = term("(x: Int, y: Int) => x") val Term.Function(List(Term.Param(Nil, Term.Name("x"), Some(Type.Name("Int")), None), Term.Param(Nil, Term.Name("y"), Some(Type.Name("Int")), None)), Term.Block(List(Term.Name("x")))) = blockStat("(x: Int, y: Int) => x") val Term.Function(List(Term.Param(Nil, Term.Name("x"), Some(Type.Name("Int")), None), Term.Param(Nil, Term.Name("y"), Some(Type.Name("Int")), None)), Term.Name("x")) = templStat("(x: Int, y: Int) => x") } test("{ implicit x => () }") { val Block(Function(Term.Param(Mod.Implicit() :: Nil, TermName("x"), None, None) :: Nil, Block(Lit(()) :: Nil)) :: Nil) = term("{ implicit x => () }") } test("1 match { case 1 => true }") { val Match(Lit(1), Case(Lit(1), None, Lit(true)) :: Nil) = term("1 match { case 1 => true }") } test("1 match { case 1 => }") { val Match(Lit(1), Case(Lit(1), None, Term.Block(Nil)) :: Nil) = term("1 match { case 1 => }") } test("1 match { case 1 if true => }") { val Match(Lit(1), Case(Lit(1), Some(Lit(true)), Term.Block(Nil)) :: Nil) = term("1 match { case 1 if true => }") } test("try 1") { val TryWithCases(Lit(1), Nil, None) = term("try 1") } test("try 1 catch 1") { val TryWithTerm(Lit(1), Lit(1), None) = term("try 1 catch 1") } test("try 1 catch { case _ => }") { val TryWithCases(Lit(1), Case(Pat.Wildcard(), None, Term.Block(Nil)) :: Nil, None) = term("try 1 catch { case _ => }") } test("try 1 finally 1") { val TryWithCases(Lit(1), Nil, Some(Lit(1))) = term("try 1 finally 1") } test("{ case 1 => () }") { val PartialFunction(Case(Lit(1), None, Lit(())) :: Nil) = term("{ case 1 => () }") } test("while (true) false") { val While(Lit(true), Lit(false)) = term("while (true) false") } test("do false while(true)") { val Do(Lit(false), Lit(true)) = term("do false while(true)") } test("for (a <- b; if c; x = a) x") { val For(List(Enumerator.Generator(Pat.Var.Term(TermName("a")), TermName("b")), Enumerator.Guard(TermName("c")), Enumerator.Val(Pat.Var.Term(TermName("x")), TermName("a"))), TermName("x")) = term("for (a <- b; if c; x = a) x") } test("for (a <- b; if c; x = a) yield x") { val ForYield(List(Enumerator.Generator(Pat.Var.Term(TermName("a")), TermName("b")), Enumerator.Guard(TermName("c")), Enumerator.Val(Pat.Var.Term(TermName("x")), TermName("a"))), TermName("x")) = term("for (a <- b; if c; x = a) yield x") } test("f(_)") { val Apply(TermName("f"), List(Placeholder())) = term("f(_)") } test("_ + 1") { val ApplyInfix(Placeholder(), TermName("+"), Nil, Lit(1) :: Nil) = term("_ + 1") } test("1 + _") { val ApplyInfix(Lit(1), TermName("+"), Nil, Placeholder() :: Nil) = term("1 + _") } test("f _") { val Eta(TermName("f")) = term("f _") } test("new {}") { val New(Template(Nil, Nil, EmptySelf(), Some(Nil))) = term("new {}") } test("new { x }") { val New(Template(Nil, Nil, EmptySelf(), Some(Term.Name("x") :: Nil))) = term("new { x }") } test("new A") { val New(templ @ Template(Nil, Ctor.Name("A") :: Nil, EmptySelf(), None)) = term("new A") // TODO: revisit this once we have trivia in place // assert(templ.hasStats === false) } test("new A {}") { val New(templ @ Template(Nil, Ctor.Name("A") :: Nil, EmptySelf(), Some(Nil))) = term("new A {}") // TODO: revisit this once we have trivia in place // assert(templ.hasStats === true) } test("new A with B") { val New(Template(Nil, Ctor.Name("A") :: Ctor.Name("B") :: Nil, EmptySelf(), None)) = term("new A with B") } test("new { val x: Int = 1 } with A") { val New(Template(Defn.Val(Nil, List(Pat.Var.Term(TermName("x"))), Some(TypeName("Int")), Lit(1)) :: Nil, Ctor.Name("A") :: Nil, EmptySelf(), None)) = term("new { val x: Int = 1 } with A") } test("new { self: T => }") { val New(Template(Nil, Nil, Term.Param(Nil, TermName("self"), Some(TypeName("T")), None), Some(Nil))) = term("new { self: T => }") } test("a + (b = c)") { val ApplyInfix(TermName("a"), TermName("+"), Nil, Arg.Named(TermName("b"), TermName("c")) :: Nil) = term("a + (b = c)") } test("(a = b) + c") { val ApplyInfix(Assign(TermName("a"), TermName("b")), TermName("+"), Nil, TermName("c") :: Nil) = term("(a = b) + c") } test("a + (b = c).d") { val ApplyInfix(TermName("a"), TermName("+"), Nil, Select(Assign(TermName("b"), TermName("c")), TermName("d")) :: Nil) = term("a + (b = c).d") } test("a + (b: _*)") { val ApplyInfix(TermName("a"), TermName("+"), Nil, Arg.Repeated(TermName("b")) :: Nil) = term("a + (b: _*)") } test("local class") { val Term.Block(List( Defn.Class( List(Mod.Case()), Type.Name("C"), Nil, Ctor.Primary(Nil, Ctor.Name("this"), List(List(Term.Param(Nil, Term.Name("x"), Some(Type.Name("Int")), None)))), EmptyTemplate()))) = term("{ case class C(x: Int); }") } test("xml literal - 1") { val Term.Block(List( Defn.Val(Nil, List(Pat.Var.Term(Term.Name("x"))), None, Term.Xml(List(Lit("<p/>")), Nil)), Defn.Val(Nil, List(Pat.Var.Term(Term.Name("y"))), None, Term.Name("x")))) = term("""{ val x = <p/> val y = x }""") } test("implicit closure") { val Term.Apply(Term.Name("Action"), List( Term.Block(List( Term.Function( List(Term.Param(List(Mod.Implicit()), Term.Name("request"), Some(Type.Apply(Type.Name("Request"), List(Type.Name("AnyContent")))), None)), Term.Block(List(Term.Name("Ok")))))))) = term("Action { implicit request: Request[AnyContent] => Ok }") } test("#312") { val Term.Block(Seq( Defn.Val(Nil, Seq(Pat.Var.Term(Term.Name("x"))), None, Term.Ascribe(Term.Name("yz"), Type.Tuple(Seq(Type.Name("Y"), Type.Name("Z"))))), Term.Tuple(Seq(Term.Name("x"), Term.Name("x"))))) = term("""{ val x = yz: (Y, Z) (x, x) }""") } test("spawn { var v: Int = _; ??? }") { val Term.Apply( Term.Name("spawn"), Seq( Term.Block(Seq( Defn.Var(Nil, Seq(Pat.Var.Term(Term.Name("v"))), Some(Type.Name("Int")), None), Term.Name("???"))))) = term("spawn { var v: Int = _; ??? }") } test("#345") { val Term.Match(_, Seq(Case(_, _, rhs), _)) = term("""x match { case x => true // sobaka case y => y }""") assert(rhs.tokens.show[Structure] === "Tokens(true [26..30))") } test("a + (bs: _*) * c") { intercept[ParseException] { term("a + (bs: _*) * c") } } test("a + (c, d) * e") { val Term.ApplyInfix( Term.Name("a"), Term.Name("+"), Nil, Seq( Term.ApplyInfix( Term.Tuple(Seq(Term.Name("c"), Term.Name("d"))), Term.Name("*"), Nil, Seq(Term.Name("e"))))) = term("a + (c, d) * e") } test("a * (c, d) + e") { val Term.ApplyInfix( Term.ApplyInfix( Term.Name("a"), Term.Name("*"), Nil, Seq(Term.Name("c"), Term.Name("d"))), Term.Name("+"), Nil, Seq(Term.Name("e"))) = term("a * (c, d) + e") } test("(a + b) c") { val Term.Select(Term.ApplyInfix(Term.Name("a"), Term.Name("+"), Nil, Seq(Term.Name("b"))), Term.Name("c")) = term("(a + b) c") } test("a + b c") { val Term.Select(Term.ApplyInfix(Term.Name("a"), Term.Name("+"), Nil, Seq(Term.Name("b"))), Term.Name("c")) = term("a + b c") } test("disallow parse[Stat] on statseqs") { intercept[ParseException]{ stat("hello; world") } } test("\\"stat;\\".parse[Stat]") { val Term.Name("stat") = stat("stat;") } test("\\"stat;\\".parse[Term]") { intercept[ParseException]{ term("stat;") } } test("$_") { intercept[ParseException](term(""" q"x + $_" """)) } test("!x = y") { val Term.Assign(Term.ApplyUnary(Term.Name("!"), Term.Name("x")), Term.Name("y")) = term("!x = y") } test("!(arr.cast[Ptr[Byte]] + sizeof[Ptr[_]]).cast[Ptr[Int]] = length") { val Term.Assign( Term.ApplyUnary( Term.Name("!"), Term.ApplyType( Term.Select( Term.ApplyInfix( Term.ApplyType(Term.Select(Term.Name("arr"), Term.Name("cast")), Seq(Type.Apply(Type.Name("Ptr"), Seq(Type.Name("Byte"))))), Term.Name("+"), Nil, Seq(Term.ApplyType(Term.Name("sizeof"), Seq(Type.Apply(Type.Name("Ptr"), Seq(Type.Placeholder(Type.Bounds(None, None)))))))), Term.Name("cast")), Seq(Type.Apply(Type.Name("Ptr"), Seq(Type.Name("Int")))))), Term.Name("length")) = term("!(arr.cast[Ptr[Byte]] + sizeof[Ptr[_]]).cast[Ptr[Int]] = length") } }
Dveim/scalameta
scalameta/scalameta/src/test/scala/scala/meta/tests/parsers/TermSuite.scala
Scala
bsd-3-clause
17,515
import com.typesafe.sbt.pgp.PgpKeys import sbt.Keys._ import sbtrelease.ReleasePlugin.ReleaseKeys._ import xerial.sbt.Sonatype.SonatypeKeys._ object Release { val settings = xerial.sbt.Sonatype.sonatypeSettings ++ com.typesafe.sbt.SbtPgp.settings ++ sbtrelease.ReleasePlugin.releaseSettings ++ Seq( crossBuild := true, profileName := "de.leanovate", publishMavenStyle := true, pomExtra := { <url>https://github.com/leanovate/doby</url> <licenses> <license> <name>MIT</name> <url>http://opensource.org/licenses/MIT</url> </license> </licenses> <scm> <connection>scm:git:github.com/leanovate/play-mockws</connection> <developerConnection>scm:git:[email protected]:/leanovate/play-mockws</developerConnection> <url>github.com/leanovate/play-mockws</url> </scm> <developers> <developer> <id>yanns</id> <name>Yann Simon</name> <url>http://yanns.github.io/</url> </developer> </developers> }, publishArtifactsAction := PgpKeys.publishSigned.value ) }
matterche/play-mockws
project/Release.scala
Scala
mit
1,228
package org.bitcoins.script.reserved import org.scalatest.{FlatSpec, MustMatchers} /** * Created by chris on 1/22/16. */ class ReservedOperationsFactoryTest extends FlatSpec with MustMatchers { "ReservedOperationsFactory" must "instantiate reserved operations" in { ReservedOperation("50") must be (Some(OP_RESERVED)) ReservedOperation("62") must be (Some(OP_VER)) } it must "find OP_NOP1 from its hex value" in { ReservedOperation("b0") must be (Some(OP_NOP1)) } it must "find an undefined operation from its hex value" in { ReservedOperation("ba").isDefined must be (true) } }
Christewart/scalacoin
src/test/scala/org/bitcoins/script/reserved/ReservedOperationsFactoryTest.scala
Scala
mit
614
package com.lightning.walletapp.lnutils import android.graphics.drawable.BitmapDrawable import com.lightning.walletapp.Utils.app import language.implicitConversions import scodec.bits.ByteVector import android.view.Gravity import android.text.Html object ImplicitConversions { implicit class StringOps(source: String) { def html = Html.fromHtml(source, IconGetter, null) def s2hex = ByteVector.view(source getBytes "UTF-8").toHex def noSpaces = source.replace(" ", "").replace("\\u00A0", "") } implicit def bitcoinLibScript2bitcoinjScript(pubKeyScript: ByteVector): org.bitcoinj.script.Script = new org.bitcoinj.script.Script(pubKeyScript.toArray, System.currentTimeMillis / 1000L - 3600 * 24) implicit def bitcoinjTx2bitcoinLibTx(bitcoinjTx: org.bitcoinj.core.Transaction): fr.acinq.bitcoin.Transaction = fr.acinq.bitcoin.Transaction.read(bitcoinjTx.unsafeBitcoinSerialize) implicit def bitcoinLibTx2bitcoinjTx(bitcoinLibTx: fr.acinq.bitcoin.Transaction): org.bitcoinj.core.Transaction = new org.bitcoinj.core.Transaction(app.params, bitcoinLibTx.bin.toArray) } object IconGetter extends Html.ImageGetter { private[this] val metrics = app.getResources.getDisplayMetrics val scrWidth = metrics.widthPixels.toDouble / metrics.densityDpi val scrHeight = metrics.heightPixels.toDouble / metrics.densityDpi val maxDialog = metrics.densityDpi * 2.1 val isTablet = scrWidth > 3.5 private[this] val btcDrawableTitle = app.getResources.getDrawable(com.lightning.walletapp.R.drawable.icon_btc_shape, null).asInstanceOf[BitmapDrawable] private[this] val lnDrawableTitle = app.getResources.getDrawable(com.lightning.walletapp.R.drawable.icon_bolt_shape, null).asInstanceOf[BitmapDrawable] private[this] val btcDrawable = app.getResources.getDrawable(com.lightning.walletapp.R.drawable.icon_btc_shape, null).asInstanceOf[BitmapDrawable] private[this] val lnDrawable = app.getResources.getDrawable(com.lightning.walletapp.R.drawable.icon_bolt_shape, null).asInstanceOf[BitmapDrawable] def getDrawable(s: String) = s match { case "btcbig" => btcDrawableTitle case "lnbig" => lnDrawableTitle case "btc" => btcDrawable case "ln" => lnDrawable } import android.provider.Settings.System.{getFloat, FONT_SCALE} val bigFont = getFloat(app.getContentResolver, FONT_SCALE, 1) > 1 private[this] val btcTitleCorrection = if (bigFont) metrics.scaledDensity * 4.8 else metrics.scaledDensity * 4.2 private[this] val lnTitleCorrection = if (bigFont) metrics.scaledDensity * 6.6 else metrics.scaledDensity * 5.6 private[this] val btcCorrection = if (bigFont) metrics.scaledDensity * 3.6 else metrics.scaledDensity * 2.6 private[this] val lnCorrection = if (bigFont) metrics.scaledDensity * 3.0 else metrics.scaledDensity * 2.3 btcDrawableTitle.setBounds(0, 0, btcDrawable.getIntrinsicWidth, btcTitleCorrection.toInt + btcDrawableTitle.getIntrinsicHeight) lnDrawableTitle.setBounds(0, 0, lnDrawable.getIntrinsicWidth, lnTitleCorrection.toInt + lnDrawableTitle.getIntrinsicHeight) btcDrawable.setBounds(0, 0, btcDrawable.getIntrinsicWidth, btcCorrection.toInt + btcDrawable.getIntrinsicHeight) lnDrawable.setBounds(0, 0, lnDrawable.getIntrinsicWidth, lnCorrection.toInt + lnDrawable.getIntrinsicHeight) btcDrawableTitle.setGravity(Gravity.TOP) lnDrawableTitle.setGravity(Gravity.TOP) btcDrawable.setGravity(Gravity.TOP) lnDrawable.setGravity(Gravity.TOP) }
btcontract/lnwallet
app/src/main/java/com/lightning/walletapp/lnutils/ImplicitConversions.scala
Scala
apache-2.0
3,437