code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package utils import akka.util.Timeout import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} import org.scalatestplus.play.guice.GuiceOneAppPerSuite import play.api.inject.guice.GuiceApplicationBuilder import play.api.test.DefaultAwaitTimeout import play.api.{Application, Configuration} import scala.concurrent.duration._ import org.scalatest.OptionValues import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import play.api.mvc.Result import scala.language.implicitConversions trait IntegrationBaseSpec extends AnyWordSpecLike with Matchers with OptionValues with GuiceOneAppPerSuite with WiremockHelper with BeforeAndAfterEach with BeforeAndAfterAll with DefaultAwaitTimeout { override implicit def defaultAwaitTimeout: Timeout = 5.seconds import scala.concurrent.duration._ import scala.concurrent.{Await, Future} implicit val defaultTimeout: FiniteDuration = 5.seconds implicit def extractAwait[A](future: Future[A]): A = await[A](future) def await[A](future: Future[A])(implicit timeout: Duration): A = Await.result(future, timeout) // Convenience to avoid having to wrap andThen() parameters in Future.successful implicit def liftFuture[A](v: A): Future[A] = Future.successful(v) def status(of: Result): Int = of.header.status val localHost = "localhost" val localPort: Int = 19001 val localUrl = s"http://$localHost:$localPort" val additionalConfiguration: Seq[(String, Any)] = Seq.empty override implicit lazy val app: Application = new GuiceApplicationBuilder() .configure(Configuration("testserver.port" -> s"$localPort")) .configure(Configuration("play.http.router" -> "testOnlyDoNotUseInAppConf.Routes")) .configure(Configuration("microservice.services.iht.port" -> s"${WiremockHelper.wiremockPort}")) .configure(Configuration("auditing.consumer.baseUri.port" -> s"${WiremockHelper.wiremockPort}")) .configure(Configuration("metrics.enabled" -> true)) .configure(Configuration(additionalConfiguration: _*)) .build() override def beforeEach() = { resetWiremock() } override def beforeAll() = { super.beforeAll() startWiremock() } override def afterAll() = { stopWiremock() super.afterAll() } }
hmrc/iht-frontend
it/utils/IntegrationBaseSpec.scala
Scala
apache-2.0
2,867
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution import java.nio.charset.StandardCharsets import java.sql.Timestamp import org.apache.spark.rdd.RDD import org.apache.spark.sql.{AnalysisException, Row, SparkSession} import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.UnsupportedOperationChecker import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ReturnAnswer} import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.catalyst.util.DateTimeUtils import org.apache.spark.sql.execution.command.{DescribeTableCommand, ExecutedCommandExec, ShowTablesCommand} import org.apache.spark.sql.execution.exchange.{EnsureRequirements, ReuseExchange} import org.apache.spark.sql.types.{BinaryType, DateType, DecimalType, TimestampType, _} import org.apache.spark.util.Utils /** * The primary workflow for executing relational queries using Spark. Designed to allow easy * access to the intermediate phases of query execution for developers. * * While this is not a public class, we should avoid changing the function names for the sake of * changing them, because a lot of developers use the feature for debugging. */ class QueryExecution(val sparkSession: SparkSession, val logical: LogicalPlan) { // TODO: Move the planner an optimizer into here from SessionState. protected def planner = sparkSession.sessionState.planner def assertAnalyzed(): Unit = { try sparkSession.sessionState.analyzer.checkAnalysis(analyzed) catch { case e: AnalysisException => val ae = new AnalysisException(e.message, e.line, e.startPosition, Some(analyzed)) ae.setStackTrace(e.getStackTrace) throw ae } } def assertSupported(): Unit = { if (sparkSession.sessionState.conf.isUnsupportedOperationCheckEnabled) { UnsupportedOperationChecker.checkForBatch(analyzed) } } lazy val analyzed: LogicalPlan = { SparkSession.setActiveSession(sparkSession) sparkSession.sessionState.analyzer.execute(logical) } lazy val withCachedData: LogicalPlan = { assertAnalyzed() assertSupported() sparkSession.sharedState.cacheManager.useCachedData(analyzed) } lazy val optimizedPlan: LogicalPlan = sparkSession.sessionState.optimizer.execute(withCachedData) lazy val sparkPlan: SparkPlan = { SparkSession.setActiveSession(sparkSession) // TODO: We use next(), i.e. take the first plan returned by the planner, here for now, // but we will implement to choose the best plan. planner.plan(ReturnAnswer(optimizedPlan)).next() } // executedPlan should not be used to initialize any SparkPlan. It should be // only used for execution. lazy val executedPlan: SparkPlan = prepareForExecution(sparkPlan) /** Internal version of the RDD. Avoids copies and has no schema */ lazy val toRdd: RDD[InternalRow] = executedPlan.execute() /** * Prepares a planned [[SparkPlan]] for execution by inserting shuffle operations and internal * row format conversions as needed. */ protected def prepareForExecution(plan: SparkPlan): SparkPlan = { preparations.foldLeft(plan) { case (sp, rule) => rule.apply(sp) } } /** A sequence of rules that will be applied in order to the physical plan before execution. */ protected def preparations: Seq[Rule[SparkPlan]] = Seq( python.ExtractPythonUDFs, PlanSubqueries(sparkSession), EnsureRequirements(sparkSession.sessionState.conf), CollapseCodegenStages(sparkSession.sessionState.conf), ReuseExchange(sparkSession.sessionState.conf), ReuseSubquery(sparkSession.sessionState.conf)) protected def stringOrError[A](f: => A): String = try f.toString catch { case e: AnalysisException => e.toString } /** * Returns the result as a hive compatible sequence of strings. For native commands, the * execution is simply passed back to Hive. */ def hiveResultString(): Seq[String] = executedPlan match { case ExecutedCommandExec(desc: DescribeTableCommand) => SQLExecution.withNewExecutionId(sparkSession, this) { // If it is a describe command for a Hive table, we want to have the output format // be similar with Hive. desc.run(sparkSession).map { case Row(name: String, dataType: String, comment) => Seq(name, dataType, Option(comment.asInstanceOf[String]).getOrElse("")) .map(s => String.format(s"%-20s", s)) .mkString("\\t") } } // SHOW TABLES in Hive only output table names, while ours outputs database, table name, isTemp. case command: ExecutedCommandExec if command.cmd.isInstanceOf[ShowTablesCommand] => command.executeCollect().map(_.getString(1)) case command: ExecutedCommandExec => command.executeCollect().map(_.getString(0)) case other => SQLExecution.withNewExecutionId(sparkSession, this) { val result: Seq[Seq[Any]] = other.executeCollectPublic().map(_.toSeq).toSeq // We need the types so we can output struct field names val types = analyzed.output.map(_.dataType) // Reformat to match hive tab delimited output. result.map(_.zip(types).map(toHiveString)).map(_.mkString("\\t")).toSeq } } /** Formats a datum (based on the given data type) and returns the string representation. */ private def toHiveString(a: (Any, DataType)): String = { val primitiveTypes = Seq(StringType, IntegerType, LongType, DoubleType, FloatType, BooleanType, ByteType, ShortType, DateType, TimestampType, BinaryType) /** Implementation following Hive's TimestampWritable.toString */ def formatTimestamp(timestamp: Timestamp): String = { val timestampString = timestamp.toString if (timestampString.length() > 19) { if (timestampString.length() == 21) { if (timestampString.substring(19).compareTo(".0") == 0) { return DateTimeUtils.threadLocalTimestampFormat.get().format(timestamp) } } return DateTimeUtils.threadLocalTimestampFormat.get().format(timestamp) + timestampString.substring(19) } return DateTimeUtils.threadLocalTimestampFormat.get().format(timestamp) } def formatDecimal(d: java.math.BigDecimal): String = { if (d.compareTo(java.math.BigDecimal.ZERO) == 0) { java.math.BigDecimal.ZERO.toPlainString } else { d.stripTrailingZeros().toPlainString } } /** Hive outputs fields of structs slightly differently than top level attributes. */ def toHiveStructString(a: (Any, DataType)): String = a match { case (struct: Row, StructType(fields)) => struct.toSeq.zip(fields).map { case (v, t) => s""""${t.name}":${toHiveStructString(v, t.dataType)}""" }.mkString("{", ",", "}") case (seq: Seq[_], ArrayType(typ, _)) => seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]") case (map: Map[_, _], MapType(kType, vType, _)) => map.map { case (key, value) => toHiveStructString((key, kType)) + ":" + toHiveStructString((value, vType)) }.toSeq.sorted.mkString("{", ",", "}") case (null, _) => "null" case (s: String, StringType) => "\\"" + s + "\\"" case (decimal, DecimalType()) => decimal.toString case (other, tpe) if primitiveTypes contains tpe => other.toString } a match { case (struct: Row, StructType(fields)) => struct.toSeq.zip(fields).map { case (v, t) => s""""${t.name}":${toHiveStructString(v, t.dataType)}""" }.mkString("{", ",", "}") case (seq: Seq[_], ArrayType(typ, _)) => seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]") case (map: Map[_, _], MapType(kType, vType, _)) => map.map { case (key, value) => toHiveStructString((key, kType)) + ":" + toHiveStructString((value, vType)) }.toSeq.sorted.mkString("{", ",", "}") case (null, _) => "NULL" case (d: Int, DateType) => new java.util.Date(DateTimeUtils.daysToMillis(d)).toString case (t: Timestamp, TimestampType) => formatTimestamp(t) case (bin: Array[Byte], BinaryType) => new String(bin, StandardCharsets.UTF_8) case (decimal: java.math.BigDecimal, DecimalType()) => formatDecimal(decimal) case (other, tpe) if primitiveTypes.contains(tpe) => other.toString } } def simpleString: String = { s"""== Physical Plan == |${stringOrError(executedPlan.treeString(verbose = false))} """.stripMargin.trim } override def toString: String = { def output = Utils.truncatedString( analyzed.output.map(o => s"${o.name}: ${o.dataType.simpleString}"), ", ") val analyzedPlan = Seq( stringOrError(output), stringOrError(analyzed.treeString(verbose = true)) ).filter(_.nonEmpty).mkString("\\n") s"""== Parsed Logical Plan == |${stringOrError(logical.treeString(verbose = true))} |== Analyzed Logical Plan == |$analyzedPlan |== Optimized Logical Plan == |${stringOrError(optimizedPlan.treeString(verbose = true))} |== Physical Plan == |${stringOrError(executedPlan.treeString(verbose = true))} """.stripMargin.trim } /** A special namespace for commands that can be used to debug query execution. */ // scalastyle:off object debug { // scalastyle:on /** * Prints to stdout all the generated code found in this plan (i.e. the output of each * WholeStageCodegen subtree). */ def codegen(): Unit = { // scalastyle:off println println(org.apache.spark.sql.execution.debug.codegenString(executedPlan)) // scalastyle:on println } } }
Panos-Bletsos/spark-cost-model-optimizer
sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
Scala
apache-2.0
10,549
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.worker import java.io._ import scala.collection.JavaConversions._ import com.google.common.base.Charsets.UTF_8 import com.google.common.io.Files import org.apache.spark.rpc.RpcEndpointRef import org.apache.spark.{SecurityManager, SparkConf, Logging} import org.apache.spark.deploy.{ApplicationDescription, ExecutorState} import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged import org.apache.spark.util.{ShutdownHookManager, Utils} import org.apache.spark.util.logging.FileAppender /** * Worker进程中ExcetorRuner具体负责executor的启动和停止,每一个executor进程对应于一个ExecutorRunner * ExecutorRunner担任监工角色 * * Worker接到Master的LaunchExecutor消息后,ExecutorRunner启动一个新的进程 * * Manages the execution of one executor process. * This is currently only used in standalone mode. * 管理一个执行的executor进程,只使用standalone模式 * */ private[deploy] class ExecutorRunner( val appId: String, val execId: Int, val appDesc: ApplicationDescription, val cores: Int, val memory: Int, val worker: RpcEndpointRef,//Worker Actor的引用 val workerId: String, val host: String, val webUiPort: Int, val publicAddress: String, val sparkHome: File, val executorDir: File, val workerUrl: String, conf: SparkConf, val appLocalDirs: Seq[String], @volatile var state: ExecutorState.Value) extends Logging { private val fullId = appId + "/" + execId private var workerThread: Thread = null private var process: Process = null private var stdoutAppender: FileAppender = null private var stderrAppender: FileAppender = null // NOTE: This is now redundant with the automated shut-down enforced by the Executor. It might // make sense to remove this in the future. //注意:执行者执行的自动关闭现在已经是多余的,将来删除这个可能是有意义的 private var shutdownHook: AnyRef = null /** * 启动ExecutorRunner的时候实际创建了线程workerThread和shutdownHook */ private[worker] def start() { workerThread = new Thread("ExecutorRunner for " + fullId) { override def run() { fetchAndRunExecutor() } } //启动ExecutorRunner的时候实际创建了线程workerThread和shutdownHook workerThread.start() // Shutdown hook that kills actors on shutdown. //shutdownHook用于在Worker关闭时杀掉所有的Executor进程 shutdownHook = ShutdownHookManager.addShutdownHook { () => killProcess(Some("Worker shutting down")) } } /** * Kill executor process, wait for exit and notify worker to update resource status. * 停止executor进行,将停止的结果反馈给worker本身 * @param message the exception message which caused the executor's death * 导致executor死亡的异常消息 */ private def killProcess(message: Option[String]) { var exitCode: Option[Int] = None if (process != null) { logInfo("Killing process!") if (stdoutAppender != null) { stdoutAppender.stop() } if (stderrAppender != null) { stderrAppender.stop() } process.destroy() exitCode = Some(process.waitFor()) } //停止executor执行,将停止的结果反馈给worker本身 worker.send(ExecutorStateChanged(appId, execId, state, message, exitCode)) } /** * Stop this executor runner, including killing the process it launched * 停止运行executor,包括杀死启动进程 * */ private[worker] def kill() { if (workerThread != null) { // the workerThread will kill the child process when interrupted // 将workerThread杀死子的过程中断时 workerThread.interrupt() workerThread = null state = ExecutorState.KILLED try { ShutdownHookManager.removeShutdownHook(shutdownHook) } catch { case e: IllegalStateException => None } } } /** Replace variables such as {{EXECUTOR_ID}} and {{CORES}} in a command argument passed to us */ //通过Commmand启动时,需要将这些参数替换成真实分配的值 private[worker] def substituteVariables(argument: String): String = argument match { case "{{WORKER_URL}}" => workerUrl case "{{EXECUTOR_ID}}" => execId.toString case "{{HOSTNAME}}" => host case "{{CORES}}" => cores.toString case "{{APP_ID}}" => appId case other => other } /** * Download and run the executor described in our ApplicationDescription * 下载并运行应用程序描述 */ private def fetchAndRunExecutor() { try { // Launch the process //构造ProcessBuilder, val builder = CommandUtils.buildProcessBuilder(appDesc.command, new SecurityManager(conf), memory, sparkHome.getAbsolutePath, substituteVariables) val command = builder.command() logInfo("Launch command: " + command.mkString("\\"", "\\" \\"", "\\"")) //ProcessBuilder设置执行目录,环境变量 builder.directory(executorDir) builder.environment.put("SPARK_EXECUTOR_DIRS", appLocalDirs.mkString(File.pathSeparator)) // In case we are running this from within the Spark Shell, avoid creating a "scala" //如果我们正在运行从Spark Shell // parent process for the executor command //执行命令的父进程 builder.environment.put("SPARK_LAUNCH_WITH_SCALA", "0") // Add webUI log urls val baseUrl = s"http://$publicAddress:$webUiPort/logPage/?appId=$appId&executorId=$execId&logType=" builder.environment.put("SPARK_LOG_URL_STDERR", s"${baseUrl}stderr") builder.environment.put("SPARK_LOG_URL_STDOUT", s"${baseUrl}stdout") //启动ProessBuilder生成进程 process = builder.start() val header = "Spark Executor Command: %s\\n%s\\n\\n".format( command.mkString("\\"", "\\" \\"", "\\""), "=" * 40) // Redirect its stdout and stderr to files //重定向进程的文件输出流与错误流为executorDir目录下的文件stdout与stderr val stdout = new File(executorDir, "stdout") stdoutAppender = FileAppender(process.getInputStream, stdout, conf) val stderr = new File(executorDir, "stderr") Files.write(header, stderr, UTF_8) stderrAppender = FileAppender(process.getErrorStream, stderr, conf) // Wait for it to exit; executor may exit with code 0 (when driver instructs it to shutdown) // or with nonzero exit code //等待获取进程的退出状态,一旦收到退出状态,则向Worker发送ExecutorStatChange消息 val exitCode = process.waitFor() state = ExecutorState.EXITED val message = "Command exited with code " + exitCode //向Worker发送ExecutorStateChanged消息,Worker会将这个消息转发到Master,由于Executor是异常退出, //Master将会为该Apllcation分配新的Executor worker.send(ExecutorStateChanged(appId, execId, state, Some(message), Some(exitCode))) } catch { case interrupted: InterruptedException => { logInfo("Runner thread for executor " + fullId + " interrupted") state = ExecutorState.KILLED killProcess(None) } case e: Exception => { logError("Error running executor", e) state = ExecutorState.FAILED killProcess(Some(e.toString)) } } } }
tophua/spark1.52
core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
Scala
apache-2.0
8,213
/* * Copyright 2016 Coral realtime streaming analytics (http://coral-streaming.github.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.coral.actors import org.json4s._ import scala.concurrent.Future import org.json4s.JsonAST.{JObject, JValue} trait Trigger { type TriggerType = JObject => Future[Option[JValue]] def trigger: TriggerType } trait NoEmitTrigger extends Trigger { override def trigger: TriggerType = json => { noEmitTrigger(json) Future.successful(Some(JNothing)) } def noEmitTrigger(json: JObject): Unit } trait NoTrigger extends Trigger { override def trigger: TriggerType = json => Future.successful(Some(JNothing)) } trait SimpleEmitTrigger extends Trigger { override def trigger: TriggerType = { json => Future.successful(simpleEmitTrigger(json)) } def simpleEmitTrigger(json: JObject): Option[JValue] }
coral-streaming/coral
src/main/scala/io/coral/actors/Trigger.scala
Scala
apache-2.0
1,385
/** Copyright 2014 TappingStone, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prediction.data.storage.hbase import io.prediction.data.storage.BaseStorageClient import io.prediction.data.storage.StorageClientConfig import org.apache.hadoop.conf.Configuration import org.apache.hadoop.hbase.HBaseConfiguration import org.apache.hadoop.hbase.client.HConnectionManager import org.apache.hadoop.hbase.client.HConnection import org.apache.hadoop.hbase.client.HBaseAdmin //import org.apache.hadoop.hbase.NamespaceDescriptor //import org.apache.hadoop.hbase.NamespaceExistException case class HBClient( val conf: Configuration, val connection: HConnection, val admin: HBaseAdmin ) class StorageClient(val config: StorageClientConfig) extends BaseStorageClient { val conf = HBaseConfiguration.create() if (config.test) { // use fewer retries and shorter timeout for test mode conf.set("hbase.client.retries.number", "1") conf.set("zookeeper.session.timeout", "30000"); conf.set("zookeeper.recovery.retry", "1") } if (!config.parallel) HBaseAdmin.checkHBaseAvailable(conf) val connection = if (!config.parallel) HConnectionManager.createConnection(conf) else null val client = if (!config.parallel) HBClient( conf = conf, connection = connection, admin = new HBaseAdmin(connection) ) else null /* private val namespace = "predictionio_appdata" val nameDesc = NamespaceDescriptor.create(namespace).build() try { client.admin.createNamespace(nameDesc) } catch { case e: NamespaceExistException => Unit case e: Exception => throw new RuntimeException(e) } val eventClient = new HBEvents(client, namespace) */ override val prefix = "HB" }
TheDataShed/PredictionIO
data/src/main/scala/storage/hbase/StorageClient.scala
Scala
apache-2.0
2,323
/* * ____ ____ _____ ____ ___ ____ * | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R) * | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data * | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc. * |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved. * * This program is free software: you can redistribute it and/or modify it under the terms of the * GNU Affero General Public License as published by the Free Software Foundation, either version * 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See * the GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License along with this * program. If not, see <http://www.gnu.org/licenses/>. * */ package com.precog.util import scala.collection.{GenTraversable, GenMap} import scala.collection.generic.CanBuildFrom import scalaz.{Either3, Left3, Right3, Middle3} /** * Implicit container trait */ trait MapUtils { implicit def pimpMapUtils[A, B, CC[B] <: GenTraversable[B]](self: GenMap[A, CC[B]]): MapPimp[A, B, CC] = new MapPimp(self) } class MapPimp[A, B, CC[B] <: GenTraversable[B]](left: GenMap[A, CC[B]]) { def cogroup[C, CC2[C] <: GenTraversable[C], Result](right: GenMap[A, CC2[C]])(implicit cbf: CanBuildFrom[Nothing, (A, Either3[B, (CC[B], CC2[C]), C]), Result], cbfLeft: CanBuildFrom[CC[B], B, CC[B]], cbfRight: CanBuildFrom[CC2[C], C, CC2[C]]): Result = { val resultBuilder = cbf() left foreach { case (key, leftValues) => { right get key map { rightValues => resultBuilder += (key -> Either3.middle3[B, (CC[B], CC2[C]), C]((leftValues, rightValues))) } getOrElse { leftValues foreach { b => resultBuilder += (key -> Either3.left3[B, (CC[B], CC2[C]), C](b)) } } } } right foreach { case (key, rightValues) => { if (!(left get key isDefined)) { rightValues foreach { c => resultBuilder += (key -> Either3.right3[B, (CC[B], CC2[C]), C](c)) } } } } resultBuilder.result() } }
precog/platform
util/src/main/scala/com/precog/util/MapUtils.scala
Scala
agpl-3.0
2,411
package tests import builder.{CarDirector, SedanCarBuilder, SportsCarBuilder} /** * @author lmignot */ class BuilderTest extends BaseTest { describe("Sedan car builder") { it("should build a sedan car") { val carBuilder = SedanCarBuilder val director = CarDirector(carBuilder) val car = director.build.getCar car.getBodyStyle should not be null car.getEngine should not be null car.getPower should not be null car.getFuelType should not be null car.getBreaks should not be null car.getSeats should not be null car.getWindows should not be null car.carType should be ("SEDAN") } } describe("Sports car builder") { it("should build a sedan car") { val carBuilder = SportsCarBuilder val director = CarDirector(carBuilder) val car = director.build.getCar car.getBodyStyle should not be null car.getEngine should not be null car.getPower should not be null car.getFuelType should not be null car.getBreaks should not be null car.getSeats should not be null car.getWindows should not be null car.carType should be ("SPORTS") } } }
BBK-PiJ-2015-67/sdp-portfolio
exercises/week08/src/test/scala/tests/BuilderTest.scala
Scala
unlicense
1,193
package justin.db.actors.protocol import justin.db.consistenthashing.NodeId case class RegisterNode(nodeId: NodeId)
speedcom/JustinDB
justin-core/src/main/scala/justin/db/actors/protocol/ClusterSubscriberActorProtocol.scala
Scala
apache-2.0
118
package controllers import play.api._ import play.api.Play.current import play.api.mvc._ import play.api.libs.concurrent.Execution.Implicits.defaultContext import play.api.libs.functional.syntax._ import play.api.libs.json._ object Application extends Controller { def index = Action { Ok("Running :)") } def song(songId: String) = Action{ Ok(s"Lalalala $songId") } }
scalableminds/sbt-play-raml
sample/app/controllers/Application.scala
Scala
apache-2.0
387
/* NSC -- new Scala compiler * Copyright 2005-2013 LAMP/EPFL * @author Martin Odersky */ package scala package tools.nsc package typechecker import scala.collection.{ mutable, immutable } import mutable.ListBuffer import symtab.Flags._ /** This phase performs the following functions, each of which could be split out in a * mini-phase: * * (1) Adds super accessors for all super calls that either * appear in a trait or have as a target a member of some outer class. * * (2) Converts references to parameter fields that have the same name as a corresponding * public parameter field in a superclass to a reference to the superclass * field (corresponding = super class field is initialized with subclass field). * This info is pre-computed by the `alias` field in Typer. `dotc` follows a different * route; it computes everything in SuperAccessors and changes the subclass field * to a forwarder instead of manipulating references. This is more modular. * * (3) Adds protected accessors if the access to the protected member happens * in a class which is not a subclass of the member's owner. * * (4) Mangles the names of class-members which are * private up to an enclosing non-package class, in order to avoid overriding conflicts. * This is a dubious, and it would be better to deprecate class-qualified privates. * * (5) This phase also sets SPECIALIZED flag on type parameters with * `@specialized` annotation. We put this logic here because the * flag must be set before pickling. * * It also checks that: * * (1) Symbols accessed from super are not abstract, or are overridden by * an abstract override. * * (2) If a symbol accessed accessed from super is defined in a real class (not a trait), * there are no abstract members which override this member in Java's rules * (see SI-4989; such an access would lead to illegal bytecode) * * (3) Super calls do not go to some synthetic members of Any (see isDisallowed) * * (4) Super calls do not go to synthetic field accessors * * (5) A class and its companion object do not both define a class or module with the * same name. * * TODO: Rename phase to "Accessors" because it handles more than just super accessors */ abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers { import global._ import definitions._ import analyzer.{ restrictionError } /** the following two members override abstract members in Transform */ val phaseName: String = "superaccessors" /** The following flags may be set by this phase: */ override def phaseNewFlags: Long = notPRIVATE protected def newTransformer(unit: CompilationUnit): Transformer = new SuperAccTransformer(unit) class SuperAccTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { /** validCurrentOwner arrives undocumented, but I reverse engineer it to be * a flag for needsProtectedAccessor which is false while transforming either * a by-name argument block or a closure. This excludes them from being * considered able to access protected members via subclassing (why?) which in turn * increases the frequency with which needsProtectedAccessor will be true. */ private var validCurrentOwner = true private val accDefs = mutable.Map[Symbol, ListBuffer[Tree]]() private def storeAccessorDefinition(clazz: Symbol, tree: Tree) = { val buf = accDefs.getOrElse(clazz, sys.error("no acc def buf for "+clazz)) buf += typers(clazz) typed tree } private def ensureAccessor(sel: Select, mixName: TermName = nme.EMPTY) = { val Select(qual, name) = sel val sym = sel.symbol val clazz = qual.symbol val supername = nme.superName(name, mixName) val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse { debuglog(s"add super acc ${sym.fullLocationString} to $clazz") val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE | ARTIFACT) setAlias sym val tpe = clazz.thisType memberType sym match { case t if sym.isModuleNotMethod => NullaryMethodType(t) case t => t } acc setInfoAndEnter (tpe cloneInfo acc) // Diagnostic for SI-7091 if (!accDefs.contains(clazz)) reporter.error(sel.pos, s"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz.hasPackageFlag}. Accessor required for ${sel} (${showRaw(sel)})") else storeAccessorDefinition(clazz, DefDef(acc, EmptyTree)) acc } atPos(sel.pos)(Select(gen.mkAttributedThis(clazz), superAcc) setType sel.tpe) } private def transformArgs(params: List[Symbol], args: List[Tree]) = { treeInfo.mapMethodParamsAndArgs(params, args) { (param, arg) => if (isByNameParamType(param.tpe)) withInvalidOwner(transform(arg)) else transform(arg) } } /** Check that a class and its companion object to not both define * a class or module with same name */ private def checkCompanionNameClashes(sym: Symbol) = if (!sym.owner.isModuleClass) { val linked = sym.owner.linkedClassOfClass if (linked != NoSymbol) { var other = linked.info.decl(sym.name.toTypeName).filter(_.isClass) if (other == NoSymbol) other = linked.info.decl(sym.name.toTermName).filter(_.isModule) if (other != NoSymbol) reporter.error(sym.pos, "name clash: "+sym.owner+" defines "+sym+ "\\nand its companion "+sym.owner.companionModule+" also defines "+ other) } } private def transformSuperSelect(sel: Select): Tree = { val Select(sup @ Super(_, mix), name) = sel val sym = sel.symbol val clazz = sup.symbol if (sym.isDeferred) { val member = sym.overridingSymbol(clazz) if (mix != tpnme.EMPTY || member == NoSymbol || !(member.isAbstractOverride && member.isIncompleteIn(clazz))) reporter.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+ "unless it is overridden by a member declared `abstract' and `override'") } else if (mix == tpnme.EMPTY && !sym.owner.isTrait){ // SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract. val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner) intermediateClasses.map(sym.overridingSymbol).find(s => s.isDeferred && !s.isAbstractOverride && !s.owner.isTrait).foreach { absSym => reporter.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from $clazz because ${absSym.owner} redeclares it as abstract") } } else { // SD-143: a call super[T].m that resolves to A.m cannot be translated to correct bytecode if // - A is a class (not a trait / interface), but not the direct superclass. Invokespecial // would select an overriding method in the direct superclass, rather than A.m. // We allow this if there are statically no intervening overrides. // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.invokespecial // - A is a java-defined interface and not listed as direct parent of the class. In this // case, `invokespecial A.m` would be invalid. def hasClassOverride(member: Symbol, subclass: Symbol): Boolean = { if (subclass == ObjectClass || subclass == member.owner) false else if (member.overridingSymbol(subclass) != NoSymbol) true else hasClassOverride(member, subclass.superClass) } val owner = sym.owner if (mix != tpnme.EMPTY && !owner.isTrait && owner != clazz.superClass && hasClassOverride(sym, clazz.superClass)) { reporter.error(sel.pos, s"cannot emit super call: the selected $sym is declared in $owner, which is not the direct superclass of $clazz.\\n" + s"An unqualified super call (super.${sym.name}) would be allowed.") } else if (owner.isInterface && owner.isJavaDefined && !clazz.parentSymbols.contains(owner)) { reporter.error(sel.pos, s"unable to emit super call unless interface ${owner.name} (which declares $sym) is directly extended by $clazz.") } } def mixIsTrait = sup.tpe match { case SuperType(thisTpe, superTpe) => superTpe.typeSymbol.isTrait } val needAccessor = name.isTermName && { mix.isEmpty && (clazz.isTrait || clazz != currentClass || !validCurrentOwner) || // SI-8803. If we access super[A] from an inner class (!= currentClass) or closure (validCurrentOwner), // where A is the superclass we need an accessor. If A is a parent trait we don't: in this case mixin // will re-route the super call directly to the impl class (it's statically known). !mix.isEmpty && (clazz != currentClass || !validCurrentOwner) && !mixIsTrait } if (needAccessor) ensureAccessor(sel, mix.toTermName) else sel } // Disallow some super.XX calls targeting Any methods which would // otherwise lead to either a compiler crash or runtime failure. private lazy val isDisallowed = { import definitions._ Set[Symbol](Any_isInstanceOf, Object_isInstanceOf, Any_asInstanceOf, Object_asInstanceOf, Object_==, Object_!=, Object_##) } override def transform(tree: Tree): Tree = { val sym = tree.symbol def mayNeedProtectedAccessor(sel: Select, args: List[Tree], goToSuper: Boolean) = if (needsProtectedAccessor(sym, tree.pos)) { debuglog("Adding protected accessor for " + tree) transform(makeAccessor(sel, args)) } else if (goToSuper) super.transform(tree) else tree try tree match { // Don't transform patterns or strange trees will reach the matcher (ticket #4062) case CaseDef(pat, guard, body) => treeCopy.CaseDef(tree, pat, transform(guard), transform(body)) case ClassDef(_, _, _, _) => def transformClassDef = { checkCompanionNameClashes(sym) val decls = sym.info.decls for (s <- decls) { if (s.privateWithin.isClass && !s.isProtected && !s.privateWithin.isModuleClass && !s.hasFlag(EXPANDEDNAME) && !s.isConstructor) { val savedName = s.name decls.unlink(s) s.expandName(s.privateWithin) decls.enter(s) log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym)) } } super.transform(tree) } transformClassDef case ModuleDef(_, _, _) => checkCompanionNameClashes(sym) super.transform(tree) case Template(_, _, body) => def transformTemplate = { val ownAccDefs = new ListBuffer[Tree] accDefs(currentOwner) = ownAccDefs // ugly hack... normally, the following line should not be // necessary, the 'super' method taking care of that. but because // that one is iterating through parents (and we dont want that here) // we need to inline it. curTree = tree val body1 = atOwner(currentOwner)(transformTrees(body)) accDefs -= currentOwner ownAccDefs ++= body1 deriveTemplate(tree)(_ => ownAccDefs.toList) } transformTemplate case TypeApply(sel @ Select(This(_), name), args) => mayNeedProtectedAccessor(sel, args, goToSuper = false) // set a flag for all type parameters with `@specialized` annotation so it can be pickled case typeDef: TypeDef if typeDef.symbol.deSkolemize.hasAnnotation(definitions.SpecializedClass) => debuglog("setting SPECIALIZED flag on typeDef.symbol.deSkolemize where typeDef = " + typeDef) // we need to deSkolemize symbol so we get the same symbol as others would get when // inspecting type parameter from "outside"; see the discussion of skolems here: // https://groups.google.com/d/topic/scala-internals/0j8laVNTQsI/discussion typeDef.symbol.deSkolemize.setFlag(SPECIALIZED) typeDef case sel @ Select(qual, name) => def transformSelect = { // FIXME Once Inliners is modified with the "'meta-knowledge' that all fields accessed by @inline will be made public" [1] // this can be removed; the correct place for this in in ExplicitOuter. // // [1] https://groups.google.com/forum/#!topic/scala-internals/iPkMCygzws4 // if (closestEnclMethod(currentOwner) hasAnnotation definitions.ScalaInlineClass) sym.makeNotPrivate(sym.owner) qual match { case This(_) => // warn if they are selecting a private[this] member which // also exists in a superclass, because they may be surprised // to find out that a constructor parameter will shadow a // field. See SI-4762. if (settings.warnPrivateShadow) { if (sym.isPrivateLocal && sym.paramss.isEmpty) { qual.symbol.ancestors foreach { parent => parent.info.decls filterNot (x => x.isPrivate || x.isLocalToThis) foreach { m2 => if (sym.name == m2.name && m2.isGetter && m2.accessed.isMutable) { reporter.warning(sel.pos, sym.accessString + " " + sym.fullLocationString + " shadows mutable " + m2.name + " inherited from " + m2.owner + ". Changes to " + m2.name + " will not be visible within " + sym.owner + " - you may want to give them distinct names.") } } } } } def isAccessibleFromSuper(sym: Symbol) = { val pre = SuperType(sym.owner.tpe, qual.tpe) localTyper.context.isAccessible(sym, pre, superAccess = true) } // Direct calls to aliases of param accessors to the superclass in order to avoid // duplicating fields. // ... but, only if accessible (SI-6793) if (sym.isParamAccessor && sym.alias != NoSymbol && isAccessibleFromSuper(sym.alias)) { val result = (localTyper.typedPos(tree.pos) { Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias) }).asInstanceOf[Select] debuglog(s"alias replacement: $sym --> ${sym.alias} / $tree ==> $result"); //debug localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, beforeRefChecks = true)) } else { /* * A trait which extends a class and accesses a protected member * of that class cannot implement the necessary accessor method * because jvm access restrictions require the call site to be * in an actual subclass, and an interface cannot extenda class. * So, non-trait classes inspect their ancestors for any such situations * and generate the accessors. See SI-2296. * * TODO: anything we can improve here now that a trait compiles 1:1 to an interface? */ // FIXME - this should be unified with needsProtectedAccessor, but some // subtlety which presently eludes me is foiling my attempts. val shouldEnsureAccessor = ( currentClass.isTrait && sym.isProtected && sym.enclClass != currentClass && !sym.owner.isPackageClass // SI-7091 no accessor needed package owned (ie, top level) symbols && !sym.owner.isTrait && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass && qual.symbol.info.member(sym.name).exists && !needsProtectedAccessor(sym, tree.pos) ) if (shouldEnsureAccessor) { log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass) ensureAccessor(sel) } else mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = false) } case Super(_, mix) => if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) { if (!settings.overrideVars) reporter.error(tree.pos, "super may not be used on " + sym.accessedOrSelf) } else if (isDisallowed(sym)) { reporter.error(tree.pos, "super not allowed here: use this." + name.decode + " instead") } transformSuperSelect(sel) case _ => mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = true) } } transformSelect case DefDef(_, _, _, _, _, _) if tree.symbol.isMethodWithExtension => deriveDefDef(tree)(rhs => withInvalidOwner(transform(rhs))) case TypeApply(sel @ Select(qual, name), args) => mayNeedProtectedAccessor(sel, args, goToSuper = true) case Assign(lhs @ Select(qual, name), rhs) => def transformAssign = { if (lhs.symbol.isVariable && lhs.symbol.isJavaDefined && needsProtectedAccessor(lhs.symbol, tree.pos)) { debuglog("Adding protected setter for " + tree) val setter = makeSetter(lhs) debuglog("Replaced " + tree + " with " + setter) transform(localTyper.typed(Apply(setter, List(qual, rhs)))) } else super.transform(tree) } transformAssign case Apply(fn, args) => assert(fn.tpe != null, tree) treeCopy.Apply(tree, transform(fn), transformArgs(fn.tpe.params, args)) case Function(vparams, body) => withInvalidOwner { treeCopy.Function(tree, vparams, transform(body)) } case _ => super.transform(tree) } catch { case ex : AssertionError => if (sym != null && sym != NoSymbol) Console.println("TRANSFORM: " + tree.symbol.sourceFile) Console.println("TREE: " + tree) throw ex } } /** a typer for each enclosing class */ private var typers = immutable.Map[Symbol, analyzer.Typer]() /** Specialized here for performance; the previous blanked * introduction of typers in TypingTransformer caused a >5% * performance hit for the compiler as a whole. */ override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = { val savedValid = validCurrentOwner if (owner.isClass) validCurrentOwner = true val savedLocalTyper = localTyper localTyper = localTyper.atOwner(tree, if (owner.isModuleNotMethod) owner.moduleClass else owner) typers = typers updated (owner, localTyper) val result = super.atOwner(tree, owner)(trans) localTyper = savedLocalTyper validCurrentOwner = savedValid typers -= owner result } private def withInvalidOwner[A](trans: => A): A = { val saved = validCurrentOwner validCurrentOwner = false try trans finally validCurrentOwner = saved } /** Add a protected accessor, if needed, and return a tree that calls * the accessor and returns the same member. The result is already * typed. */ private def makeAccessor(tree: Select, targs: List[Tree]): Tree = { val Select(qual, _) = tree val sym = tree.symbol val clazz = hostForAccessorOf(sym, currentClass) assert(clazz != NoSymbol, sym) debuglog("Decided for host class: " + clazz) val accName = nme.protName(sym.unexpandedName) val hasArgs = sym.tpe.paramSectionCount > 0 val memberType = refChecks.toScalaRepeatedParam(sym.tpe) // fix for #2413 // if the result type depends on the this type of an enclosing class, the accessor // has to take an object of exactly this type, otherwise it's more general val objType = if (isThisType(memberType.finalResultType)) clazz.thisType else clazz.typeOfThis val accType = (protAcc: Symbol) => memberType match { case PolyType(tparams, restpe) => // luc: question to author: should the tparams symbols not be cloned and get a new owner (protAcc)? PolyType(tparams, MethodType(List(protAcc.newSyntheticValueParam(objType)), restpe.cloneInfo(protAcc).asSeenFrom(qual.tpe, sym.owner))) case _ => MethodType(List(protAcc.newSyntheticValueParam(objType)), memberType.cloneInfo(protAcc).asSeenFrom(qual.tpe, sym.owner)) } val protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s)) orElse { val newAcc = clazz.newMethod(nme.protName(sym.unexpandedName), tree.pos, newFlags = ARTIFACT) newAcc setInfoAndEnter accType(newAcc) val code = DefDef(newAcc, { val (receiver :: _) :: tail = newAcc.paramss val base: Tree = Select(Ident(receiver), sym) val allParamTypes = mapParamss(sym)(_.tpe) val args = map2(tail, allParamTypes)((params, tpes) => map2(params, tpes)(makeArg(_, receiver, _))) args.foldLeft(base)(Apply(_, _)) }) debuglog("created protected accessor: " + code) storeAccessorDefinition(clazz, code) newAcc } val selection = Select(This(clazz), protAcc) def mkApply(fn: Tree) = Apply(fn, qual :: Nil) val res = atPos(tree.pos) { targs.head match { case EmptyTree => mkApply(selection) case _ => mkApply(TypeApply(selection, targs)) } } debuglog(s"Replaced $tree with $res") if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res) } /** Adapt the given argument in call to protected member. * Adaptation may add a cast to a path-dependent type, for instance * * def prot$m(obj: Outer)(x: Inner) = obj.m(x.asInstanceOf[obj.Inner]). * * such a cast might be necessary when m expects an Outer.this.Inner (the * outer of 'obj' and 'x' have to be the same). This restriction can't be * expressed in the type system (but is implicit when defining method m). * * Also, it calls using repeated parameters are ascribed with ': _*' */ private def makeArg(v: Symbol, obj: Symbol, pt: Type): Tree = { // owner class val clazz = pt match { case TypeRef(pre, _, _) => thisTypeOfPath(pre) case _ => NoSymbol } val result = gen.paramToArg(v) if (clazz != NoSymbol && (obj.tpe.typeSymbol isSubClass clazz)) // path-dependent type gen.mkAsInstanceOf(result, pt.asSeenFrom(singleType(NoPrefix, obj), clazz)) else result } /** Add an accessor for field, if needed, and return a selection tree for it . * The result is not typed. */ private def makeSetter(tree: Select): Tree = { val field = tree.symbol val clazz = hostForAccessorOf(field, currentClass) assert(clazz != NoSymbol, field) debuglog("Decided for host class: " + clazz) val accName = nme.protSetterName(field.unexpandedName) val protectedAccessor = clazz.info decl accName orElse { val protAcc = clazz.newMethod(accName, field.pos, newFlags = ARTIFACT) val paramTypes = List(clazz.typeOfThis, field.tpe) val params = protAcc newSyntheticValueParams paramTypes val accessorType = MethodType(params, UnitTpe) protAcc setInfoAndEnter accessorType val obj :: value :: Nil = params storeAccessorDefinition(clazz, DefDef(protAcc, Assign(Select(Ident(obj), field.name), Ident(value)))) protAcc } atPos(tree.pos)(Select(This(clazz), protectedAccessor)) } /** Does `sym` need an accessor when accessed from `currentClass`? * A special case arises for classes with explicit self-types. If the * self type is a Java class, and a protected accessor is needed, we issue * an error. If the self type is a Scala class, we don't add an accessor. * An accessor is not needed if the access boundary is larger than the * enclosing package, since that translates to 'public' on the host sys. * (as Java has no real package nesting). * * If the access happens inside a 'trait', access is more problematic since * the implementation code is moved to an '$class' class which does not * inherit anything. Since we can't (yet) add accessors for 'required' * classes, this has to be signaled as error. */ private def needsProtectedAccessor(sym: Symbol, pos: Position): Boolean = { val clazz = currentClass def accessibleThroughSubclassing = validCurrentOwner && clazz.thisSym.isSubClass(sym.owner) && !clazz.isTrait val isCandidate = ( sym.isProtected && sym.isJavaDefined && !sym.isDefinedInPackage && !accessibleThroughSubclassing && (sym.enclosingPackageClass != currentClass.enclosingPackageClass) && (sym.enclosingPackageClass == sym.accessBoundary(sym.enclosingPackageClass)) ) val host = hostForAccessorOf(sym, clazz) def isSelfType = !(host.tpe <:< host.typeOfThis) && { if (host.typeOfThis.typeSymbol.isJavaDefined) restrictionError(pos, unit, "%s accesses protected %s from self type %s.".format(clazz, sym, host.typeOfThis) ) true } def isJavaProtected = host.isTrait && sym.isJavaDefined && { restrictionError(pos, unit, sm"""$clazz accesses protected $sym inside a concrete trait method. |Add an accessor in a class extending ${sym.enclClass} as a workaround.""" ) true } isCandidate && !host.isPackageClass && !isSelfType && !isJavaProtected } /** Return the innermost enclosing class C of referencingClass for which either * of the following holds: * - C is a subclass of sym.owner or * - C is declared in the same package as sym's owner */ private def hostForAccessorOf(sym: Symbol, referencingClass: Symbol): Symbol = { if (referencingClass.isSubClass(sym.owner.enclClass) || referencingClass.thisSym.isSubClass(sym.owner.enclClass) || referencingClass.enclosingPackageClass == sym.owner.enclosingPackageClass) { assert(referencingClass.isClass, referencingClass) referencingClass } else if(referencingClass.owner.enclClass != NoSymbol) hostForAccessorOf(sym, referencingClass.owner.enclClass) else referencingClass } /** For a path-dependent type, return the this type. */ private def thisTypeOfPath(path: Type): Symbol = path match { case ThisType(outerSym) => outerSym case SingleType(rest, _) => thisTypeOfPath(rest) case _ => NoSymbol } /** Is 'tpe' the type of a member of an enclosing class? */ private def isThisType(tpe: Type): Boolean = tpe match { case ThisType(sym) => sym.isClass && !sym.isPackageClass case TypeRef(pre, _, _) => isThisType(pre) case SingleType(pre, _) => isThisType(pre) case RefinedType(parents, _) => parents exists isThisType case AnnotatedType(_, tp) => isThisType(tp) case _ => false } } }
felixmulder/scala
src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
Scala
bsd-3-clause
28,220
package com.github.bespalovdn.asteriskscala.agi.transport import java.util.{List => JList} import com.github.bespalovdn.asteriskscala.agi.request.AgiRequest import io.netty.channel.ChannelHandlerContext import io.netty.handler.codec.MessageToMessageDecoder import scala.collection.mutable.ArrayBuffer class AgiRequestDecoder extends MessageToMessageDecoder[String] { override def decode(ctx: ChannelHandlerContext, msg: String, out: JList[AnyRef]): Unit = { if(msg.isEmpty && buffer.nonEmpty){ out.add(AgiRequest(buffer)) buffer.clear() }else{ buffer += msg } } private val buffer = ArrayBuffer.empty[String] } object AgiRequestDecoder { def channelHandlerName = AgiRequestDecoder.getClass.getSimpleName }
bespalovdn/asterisk-scala
agi/src/main/scala/com/github/bespalovdn/asteriskscala/agi/transport/AgiRequestDecoder.scala
Scala
mit
787
package pl.arapso.scaffoldings.scala.custom.matches object UnapplayExample { case class SomeStuff(name: String) def main(args: Array[String]) { val test = new SomeStuff("kupon") val result = test match { case SomeStuff("kuponik") => "OK" case _ => "NOT OK" } println(result) } }
arapso-scaffoldings/scala
scala-tutor/custom/src/main/scala/pl/arapso/scaffoldings/scala/custom/matches/UnapplayExample.scala
Scala
apache-2.0
320
/* * Copyright 2015 LG CNS. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package scouter.server.netio.service.net; import java.io.BufferedInputStream import java.io.BufferedOutputStream import java.io.EOFException import java.net.Socket import java.net.SocketTimeoutException import scouter.io.DataInputX import scouter.io.DataOutputX import scouter.net.NetCafe import scouter.net.RequestCmd import scouter.net.TcpFlag import scouter.server.LoginManager import scouter.server.logs.RequestLogger import scouter.server.netio.service.ServiceHandlingProxy import scouter.util.FileUtil import scouter.util.Hexa32 import scouter.server.Configure object ServiceWorker { var workers = 0; def inc() { this.synchronized { workers += 1; } } def desc() { this.synchronized { workers -= 1; } } def getActiveCount(): Int = { workers; } } class ServiceWorker(_socket: Socket) extends Runnable { var socket = _socket; val in = new DataInputX(new BufferedInputStream(socket.getInputStream())); val out = new DataOutputX(new BufferedOutputStream(socket.getOutputStream())); val conf = Configure.getInstance() override def run() { var remoteAddr = "" try { remoteAddr = "" + socket.getRemoteSocketAddress() val cafe = in.readInt(); cafe match { case NetCafe.TCP_AGENT => val objHash = in.readInt() val num= TcpAgentManager.add(objHash, new TcpAgentWorker(socket, in, out)) if(conf.debug_net){ println("Agent : " + remoteAddr + " open [" + Hexa32.toString32(objHash) + "] #"+num); } return case NetCafe.TCP_CLIENT => if(conf.debug_net){ println("Client : " + remoteAddr + " open #"+(ServiceWorker.getActiveCount()+1)); } case _ => if(conf.debug_net){ println("Unknown : " + remoteAddr + " drop"); } FileUtil.close(in); FileUtil.close(out); FileUtil.close(socket); return } } catch { case _: Throwable => FileUtil.close(in); FileUtil.close(out); FileUtil.close(socket); return } try { ServiceWorker.inc(); while (true) { val cmd = in.readText(); if (RequestCmd.CLOSE.equals(cmd)) { return } val session = in.readLong(); val login = LoginManager.okSession(session); RequestLogger.getInstance().add(cmd, session); ServiceHandlingProxy.process(cmd, in, out, login); out.writeByte(TcpFlag.NoNEXT); out.flush(); } } catch { case ne: NullPointerException => if(conf.debug_net){ println("Client : " + remoteAddr + " closed"); ne.printStackTrace(); } case e: EOFException => if(conf.debug_net){ println("Client : " + remoteAddr + " closed"); } case se: SocketTimeoutException => if(conf.debug_net){ println("Client : " + remoteAddr + " closed"); se.printStackTrace(); } case e: Exception => if(conf.debug_net){ println("Client : " + remoteAddr + " closed " + e + " workers=" + ServiceWorker.getActiveCount()); } case t: Throwable => t.printStackTrace(); } finally { FileUtil.close(in); FileUtil.close(out); FileUtil.close(socket); ServiceWorker.desc(); } } }
jhshin9/scouter
scouter.server/src/scouter/server/netio/service/net/TcpServiceWorker.scala
Scala
apache-2.0
4,618
package zeroadv.db import akka.actor.ActorSystem trait DbModule { lazy val mongoDb = MongoDb.connect(system) lazy val eventCollection = wire[EventCollection] def newWriteEventToMongoActor = wire[WriteEventToMongoActor] def system: ActorSystem }
adamw/zeroadv
collector/src/main/scala/zeroadv/db/DbModule.scala
Scala
gpl-2.0
256
package com.twitter.finatra.multiserver.Add2HttpServer import com.twitter.finatra.http.HttpServer import com.twitter.finatra.http.filters.CommonFilters import com.twitter.finatra.http.routing.HttpRouter class Add2Server extends HttpServer { override val modules = Seq(Add1HttpClientModule) override def configureHttp(router: HttpRouter) { router .filter[CommonFilters] .add[Add2Controller] } }
syamantm/finatra
inject-thrift-client-http-mapper/src/test/scala/com/twitter/finatra/multiserver/Add2HttpServer/Add2Server.scala
Scala
apache-2.0
419
package com.twitter.util import java.util.concurrent.atomic.AtomicReference import org.junit.runner.RunWith import org.scalacheck.Arbitrary.arbitrary import org.scalatest.FunSuite import org.scalatest.junit.JUnitRunner import org.scalatest.prop.GeneratorDrivenPropertyChecks import scala.collection.mutable @RunWith(classOf[JUnitRunner]) class VarTest extends FunSuite with GeneratorDrivenPropertyChecks { private case class U[T](init: T) extends UpdatableVar[T](init) { import Var.Observer var observerCount = 0 var accessCount = 0 override def observe(d: Int, obs: Observer[T]) = { accessCount += 1 observerCount += 1 Closable.all( super.observe(d, obs), Closable.make { deadline => observerCount -= 1 Future.Done }) } } test("Var.map") { val v = Var(123) val s = v map (_.toString) assert(Var.sample(s) === "123") v() = 8923 assert(Var.sample(s) === "8923") var buf = mutable.Buffer[String]() s observe { v => buf += v } assert(buf.toSeq === Seq("8923")) v() = 111 assert(buf.toSeq === Seq("8923", "111")) } test("depth ordering") { val v0 = U(3) val v1 = U(2) val v2 = v1 flatMap { i => v1 } val v3 = v2 flatMap { i => v1 } val v4 = v3 flatMap { i => v0 } var result = 1 v4.observe{ i => result = result+2 } // result = 3 v0.observe{ i => result = result*2 } // result = 6 assert(result === 6) result = 1 // reset the value, but this time the ordering will go v0, v4 because of depth v0() = 4 // trigger recomputation, supplied value is unused // v0 observation: result = result*2 = 2 // v4 observation: result = result+2 = 4 assert(result === 4) } test("version ordering") { val v1 = Var(2) var result = 0 val o1 = v1.observe { i => result = result + i } // result = 2 val o2 = v1.observe { i => result = result * i * i } // result = 2 * 2 * 2 = 8 val o3 = v1.observe { i => result = result + result + i } // result = 8 + 8 + 2 = 18 assert(result === 18) // ensure those three things happened in sequence result=1 // just reset for sanity v1() = 3 // this should invoke o1-o3 in order: // result = 1 + 3 = 4 // result = 4 * 3 * 3 = 36 // result = 36 + 36 + 3 = 75 assert(result === 75) } test("flatMap") { val us = Seq.fill(5) { U(0) } def short(us: Seq[Var[Int]]): Var[Int] = us match { case Seq(hd, tl@_*) => hd flatMap { case 0 => short(tl) case i => Var(i) } case Seq() => Var(-1) } val s = short(us) assert(Var.sample(s) === -1) assert(us forall (_.accessCount == 1), us map(_.accessCount) mkString ",") Var.sample(s); Var.sample(s) assert(us forall (_.accessCount == 3)) assert(us forall (_.observerCount == 0), us map(_.observerCount.toString) mkString(",")) // Now maintain a subscription. var cur = Var.sample(s) val sub = s.observe { cur = _ } assert(cur === -1) assert(us forall (_.observerCount == 1)) us(0).update(123) assert(cur === 123) assert(us(0).observerCount === 1) assert(us drop 1 forall (_.observerCount == 0)) us(1).update(333) assert(cur === 123) assert(us(0).observerCount === 1) assert(us drop 1 forall (_.observerCount == 0)) us(0).update(0) assert(cur === 333) assert(us(0).observerCount === 1) assert(us(1).observerCount === 1) assert(us drop 2 forall (_.observerCount == 0)) val f = sub.close() assert(f.isDefined) Await.result(f) assert(us forall (_.observerCount == 0)) } test("Var(init)") { val v = Var(123) var cur = Var.sample(v) val sub = v observe { cur = _ } v() = 333 assert(cur === 333) v() = 111 assert(cur === 111) val f = sub.close() assert(f.isDefined) Await.result(f) v() = 100 assert(cur === 111) } test("multiple observers at the same level") { val v = Var(2) val a = v map(_*2) val b = v map(_*3) var x, y = 0 a observe { x = _ } b observe { y = _ } assert(x === 4) assert(y === 6) v() = 1 assert(x === 2) assert(y === 3) } test("Var.async") { val x = Var[Int](333) val p = new Promise[Unit] var closed: Time = Time.Zero var called = 0 val c = Closable.make { t => closed = t p } val v = Var.async(123) { v => called += 1 x observe { v() = _ } c } assert(called === 0) var vv: Int = 0 val o = v observe { vv = _ } assert(called === 1) assert(vv === 333) assert(closed === Time.Zero) x() = 111 assert(vv === 111) assert(closed === Time.Zero) val o1 = v observe { v => () } val t = Time.now val f = o.close(t) assert(called === 1) assert(closed === Time.Zero) assert(f.isDone) // Closing the Var.async process is asynchronous with closing // the Var itself. val f1 = o1.close(t) assert(closed === t) assert(f1.isDone) } test("Var.collect[Seq]") { val vars = Seq( Var(1), Var(2), Var(3)) val coll = Var.collect(vars: Seq[Var[Int]]) val ref = new AtomicReference[Seq[Int]] coll.observeTo(ref) assert(ref.get === Seq(1,2,3)) vars(1).update(999) assert(ref.get === Seq(1,999,3)) } // This is either very neat or very horrendous, // depending on your point of view. test("Var.collect[Set]") { val vars = Seq( Var(1), Var(2), Var(3)) val coll = Var.collect(vars map (v => v: Var[Int]) toSet) val ref = new AtomicReference[Set[Int]] coll.observeTo(ref) assert(ref.get === Set(1,2,3)) vars(1).update(1) assert(ref.get === Set(1,3)) vars(1).update(999) assert(ref.get === Set(1,999,3)) } test("Var.observeUntil") { val v = Var[Int](123) // Now: Future.event(Event(v) filter(_%2 == 0)) // v.changes.filter(_%2==0).toFuture val f = v.observeUntil(_%2 == 0) assert(!f.isDefined) v() = 333 assert(!f.isDefined) v() = 332 assert(f.isDefined) assert(Await.result(f) === 332) } /** * ensure object consistency with Var.value */ test("Var.value") { val contents = List(1,2,3,4) val v1 = Var.value(contents) assert(Var.sample(v1) eq contents) v1.observe { l => assert(contents eq l) } } /** * Ensures that we halt observation after all observers are closed, and then * resume once observation returns. */ test("Var observers coming and going") { val v = Var(11) val f = v.flatMap { i => assert(i != 10) Var.value(i*2) } val c1 = f.observe { i => assert(i === 22) } val c2 = f.observe { i => assert(i === 22) } c1.close() c2.close() v() = 10 // this should not assert because it's unobserved v() = 22 // now it's safe to re-observe var observed = 3 val c3 = f.observe { i => observed = i } assert(Var.sample(f) === 44) assert(Var.sample(v) === 22) assert(observed === 44) } /** * This test is inspired by a conversation with marius where he asked how * would you embody this test in Vars: * if (x == 0) 0 else 1/x * * The idea is that you compose Var x with maps and flatMaps that do not * execute until they are observed. * * It is this case that prevents caching the value of the Var before it's observed */ test("Var not executing until observed") { val x = Var(0) val invertX = x map { i => 1/i } val result = x flatMap { i => if (i == 0) Var(0) else invertX } x() = 42 x() = 0 // this should not throw an exception because there are no observers x() = 1 assert(Var.sample(result) === 1) // invertX is observed briefly x() = 0 assert(Var.sample(result) === 0) // but invertX is not being observed here so we're ok } test("Var.Sampled") { val v = Var(123) v match { case Var.Sampled(123) => case _ => fail() } v() = 333 v match { case Var.Sampled(333) => case _ => fail } } def testPropagation(typ: String, newVar: Int => Var[Int]) { test("Don't propagate up-to-date "+typ+"-valued Var observations") { val v = Var(123) val w = newVar(333) val x = v flatMap { _ => w } var buf = mutable.Buffer[Int]() x observe { v => buf += v } assert(buf === Seq(333)) v() = 333 assert(buf === Seq(333)) } test("Do propagate out-of-date "+typ+"-valued observations") { val v = Var(123) val w1 = newVar(333) val w2 = newVar(444) val x = v flatMap { case 123 => w1 case _ => w2 } var buf = mutable.Buffer[Int]() x observe { v => buf += v } assert(buf === Seq(333)) v() = 333 assert(buf === Seq(333, 444)) v() = 123 assert(buf === Seq(333, 444, 333)) v() = 333 assert(buf === Seq(333, 444, 333, 444)) v() = 334 assert(buf === Seq(333, 444, 333, 444)) } } testPropagation("constant", Var.value) testPropagation("variable", Var.apply(_)) test("Race-a-Var") { class Counter(n: Int, u: Updatable[Int]) extends Thread { override def run() { var i = 1 while (i < n) { u() = i i += 1 } } } val N = 10000 val a, b = Var(0) val c = a.flatMap(_ => b) @volatile var j = -1 @volatile var n = 0 c observe { i => assert(i === j+1) j = i } val ac = new Counter(N, a) val bc = new Counter(N, b) ac.start() bc.start() ac.join() bc.join() assert(j === N-1) } test("Don't allow stale updates") { val a = Var(0) val ref = new AtomicReference[Seq[(Int, Int)]] (a join a).changes.build.register(Witness(ref)) assert(ref.get === Seq((0, 0))) a() = 1 assert(ref.get === Seq((0, 0), (1, 1))) } test("Var: diff/patch") { forAll(arbitrary[Seq[Set[Int]]].suchThat(_.nonEmpty)) { sets => val v = Var(sets.head) val w = new AtomicReference[Set[Int]] Var.patch[Set, Int](v.diff).changes.register(Witness(w)) for (set <- sets) { v() = set assert(set === w.get) } } } }
luciferous/util
util-core/src/test/scala/com/twitter/util/VarTest.scala
Scala
apache-2.0
10,309
package bad.robot.radiate.monitor import java.util.concurrent.ScheduledFuture trait Monitor { def start(tasks: List[MonitoringTask]): List[ScheduledFuture[_]] def cancel(tasks: List[ScheduledFuture[_]]) def stop: List[Runnable] }
tobyweston/radiate
src/main/scala/bad/robot/radiate/monitor/Monitor.scala
Scala
apache-2.0
238
package io.atal.butterfly.action import io.atal.butterfly.{Editor, Clipboard, Cursor} import org.scalatest._ /** Erase action unit test */ class EraseTest extends FlatSpec { "The erase action" should "remove a character or the selection" in { val action = new Erase val editor = new Editor() val clipboard = new Clipboard() editor.buffer.content = "California here we come, right back where we started from" // With one selection editor.cursors = List(new Cursor((0, 0), Some(new Cursor((0, 11))))) action.execute(editor, clipboard) assert(editor.buffer.content == "here we come, right back where we started from") editor.buffer.content = "California here we come, right back where we started from" // With multiple selections editor.cursors = List( new Cursor((0, 0), Some(new Cursor((0, 11)))), new Cursor((0, 45), Some(new Cursor((0, 53)))) ) action.execute(editor, clipboard) assert(editor.buffer.content == "here we come, right back where we from") // With no selection, simple character by character eraser editor.buffer.content = "California here we come, right back where we started from" editor.cursors = List( new Cursor((0, 1)), new Cursor((0, 45)) ) action.execute(editor, clipboard) assert(editor.buffer.content == "alifornia here we come, right back where westarted from") } }
Matthieu-Riou/Butterfly
src/test/scala/io/atal/butterfly/action/EraseTest.scala
Scala
mit
1,415
package jp.relx.awstools object EIPAllocator { def main(args: Array[String]) { println("HelloWorld") } }
urelx/aws-eip-allocator
src/main/scala/jp/relx/awstools/EIPAllocator.scala
Scala
apache-2.0
113
package com.github.al.roulette.player import com.github.al.roulette.player.impl._ import com.lightbend.lagom.scaladsl.playjson.{JsonSerializer, JsonSerializerRegistry} object PlayerSerializerRegistry extends JsonSerializerRegistry { override def serializers = List( JsonSerializer[PlayerState], JsonSerializer[CreatePlayer], JsonSerializer[IssueAccessToken.type], JsonSerializer[GetPlayer.type], JsonSerializer[PlayerCreated], JsonSerializer[AccessTokenIssued] ) }
andrei-l/reactive-roulette
player-impl/src/main/scala/com/github/al/roulette/player/PlayerSerializerRegistry.scala
Scala
mit
499
package com.trafficland.augmentsbt.releasemanagement import sbt.{Setting, SettingKey} object SnapshotReleaseTasks { lazy val releasePublishLibSnapshotSettingKey: SettingKey[Seq[String]] = SettingKey[Seq[String]] ( "release-publish-lib-snapshot-tasks", "a list of tasks to execute (in order) for publishing a library's snapshot release" ) lazy val releasePublishLibSnapshotTasks: Setting[Seq[String]] = releasePublishLibSnapshotSettingKey := Seq( "release-ready", "+publish-local", "+publish", "versionWriteSnapshotRelease", "git-release-commit", "git-tag", "versionToSnapshot", "git-version-bump-commit", "git-push-origin" ) lazy val releaseAppSnapshotSettingKey: SettingKey[Seq[String]] = SettingKey[Seq[String]] ( "release-app-snapshot-tasks", "a list of tasks to execute (in order) for releasing an app's snapshot release" ) lazy val releaseAppSnapshotTasks: Setting[Seq[String]] = releaseAppSnapshotSettingKey := Seq( "release-ready", "versionWriteSnapshotRelease", "git-release-commit", "git-tag", "versionToSnapshot", "git-version-bump-commit", "git-push-origin" ) }
ereichert/augment-sbt
src/main/scala/com/trafficland/augmentsbt/releasemanagement/SnapshotReleaseTasks.scala
Scala
apache-2.0
1,174
package tholowka.diz.marshalling.terms private [terms] object BooleanParser {} private [terms] case class BooleanParser { def consume(input: Boolean): String = { if (input) { "true" } else { "false" } } }
tholowka/diz
src/main/scala/tholowka/diz/marshalling/terms/BooleanParser.scala
Scala
mit
263
package scutil.gui.extension import java.awt._ import scutil.geom._ import scutil.gui.geomConversion object IntRectImplicits extends IntRectImplicits trait IntRectImplicits { implicit final class IntRectExt(peer:IntRect) { def toAwtRectangle:Rectangle = geomConversion IntRect_Rectangle peer } }
ritschwumm/scutil
modules/gui/src/main/scala/scutil/gui/extension/IntRectImplicits.scala
Scala
bsd-2-clause
304
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2015-2021 Andre White. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.truthencode.ddo.support.requisite /** * Created by adarr on 1/29/2017. */ trait Requisite
adarro/ddo-calc
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/support/requisite/Requisite.scala
Scala
apache-2.0
771
package scaredy.http object HttpResponseHeaders { sealed trait ResponseHeader { def asString: String } object RateLimitUsed extends ResponseHeader { val asString = "X-Ratelimit-Used" } object RateLimitRemaining extends ResponseHeader { val asString = "X-Ratelimit-Remaining" } object RateLimitReset extends ResponseHeader { val asString = "X-Ratelimit-Reset" } object Status extends ResponseHeader { val asString = "Status" } object Date extends ResponseHeader { val asString = "Date" } }
jjpatel/scaredy
src/main/scala/scaredy/http/HttpResponseHeaders.scala
Scala
mit
515
package controllers import com.bryzek.apidoc.api.v0.models.{ Membership, Organization, User } import models.MainTemplate import lib.Role import play.api.mvc._ import play.api.mvc.Results.Redirect import scala.concurrent.{ Await, Future } import scala.concurrent.duration._ import play.api.Play.current import java.util.UUID class AuthenticatedOrgRequest[A]( val org: Organization, val isMember: Boolean, val isAdmin: Boolean, user: User, request: Request[A] ) extends AuthenticatedRequest[A](user, request) { require( !isAdmin || (isAdmin && isMember), "A user that is an admin should always be considered a member" ) override def mainTemplate(title: Option[String] = None): MainTemplate = { MainTemplate( requestPath = request.path, title = title, user = Some(user), org = Some(org), isOrgMember = isMember, isOrgAdmin = isAdmin ) } def requireAdmin() { require(isAdmin, s"Action requires admin role. User[${user.guid}] is not an admin of Org[${org.key}]") } def requireMember() { require(isMember, s"Action requires member role. User[${user.guid}] is not a member of Org[${org.key}]") } } object AuthenticatedOrg extends ActionBuilder[AuthenticatedOrgRequest] { import scala.concurrent.ExecutionContext.Implicits.global def invokeBlock[A](request: Request[A], block: (AuthenticatedOrgRequest[A]) => Future[Result]) = { def returnUrl(orgKey: Option[String]): Option[String] = { if (request.method.toUpperCase == "GET") { Some(request.uri) } else { orgKey match { case None => Some(routes.ApplicationController.index().url) case Some(key) => Some(routes.Organizations.show(key).url) } } } val orgKeyOption = request.path.split("/").drop(1).headOption request.session.get("user_guid").map { userGuid => lib.ApiClient.awaitCallWith404(Authenticated.api().Users.getByGuid(UUID.fromString(userGuid))) match { case None => { // have a user guid, but user does not exist Future.successful(Redirect(routes.LoginController.index(return_url = returnUrl(orgKeyOption))).withNewSession) } case Some(u: User) => { val orgKey = orgKeyOption.getOrElse { sys.error(s"No org key for request path[${request.path}]") } val orgOption = lib.ApiClient.awaitCallWith404(Authenticated.api(Some(u)).Organizations.getByKey(orgKey)).headOption val memberships = Await.result(Authenticated.api(Some(u)).Memberships.get(orgKey = Some(orgKey), userGuid = Some(u.guid)), 1000.millis) orgOption match { case None => { Future.successful(Redirect("/").flashing("warning" -> s"Organization $orgKey not found")) } case Some(org: Organization) => { val isAdmin = !memberships.find(_.role == Role.Admin.key).isEmpty val isMember = isAdmin || !memberships.find(_.role == Role.Member.key).isEmpty val authRequest = new AuthenticatedOrgRequest(org, isMember, isAdmin, u, request) block(authRequest) } } } } } getOrElse { Future.successful(Redirect(routes.LoginController.index(return_url = returnUrl(orgKeyOption))).withNewSession) } } }
Seanstoppable/apidoc
www/app/controllers/AuthenticatedOrgRequest.scala
Scala
mit
3,356
import java.io.File import java.io.BufferedWriter import java.io.FileWriter import scala.io.Source object GenTheyWord { def generateFile(srcFileDir: String, srcClassName: String, targetFileDir: String, targetClassName: String) { val targetDir = new File(targetFileDir) targetDir.mkdirs() val writer = new BufferedWriter(new FileWriter(new File(targetFileDir, targetClassName + ".scala"))) try { val itLines = Source.fromFile(new File(srcFileDir, srcClassName + ".scala")).getLines().toList // for 2.8 for (itLine <- itLines) { //.replaceAll("\\"An it clause", "\\"A they clause") //.replaceAll("an it clause", "a they clause") val theyLine = itLine.replaceAll("\\\\sit\\\\(", " they\\\\(") .replaceAll("\\\\sit\\\\s", " they ") .replaceAll("\\"it\\\\s", "\\"they ") .replaceAll("they or they clause.\\"", "it or they clause.\\"") .replaceAll("\\"An they clause", "\\"A they clause") .replaceAll("an they or a they clause.\\"", "an it or a they clause.\\"") .replaceAll(srcClassName, targetClassName) writer.write(theyLine) writer.newLine() // add for 2.8 } } finally { writer.close() } } def main(args: Array[String]) { } def genTest(dir: File, version: String, scalaVersion: String) { generateFile("src/test/scala/org/scalatest", "FunSpecSuite", dir.getAbsolutePath, "FunSpecSuiteUsingThey") generateFile("src/test/scala/org/scalatest", "FunSpecSpec", dir.getAbsolutePath, "FunSpecSpecUsingThey") generateFile("src/test/scala/org/scalatest", "FlatSpecSpec", dir.getAbsolutePath, "FlatSpecSpecUsingThey") generateFile("src/test/scala/org/scalatest/path", "FunSpecSpec", dir.getAbsolutePath, "PathFunSpecSpecUsingThey") generateFile("src/test/scala/org/scalatest/fixture", "FunSpecSpec", dir.getAbsolutePath, "FixtureFunSpecSpecUsingThey") generateFile("src/test/scala/org/scalatest/fixture", "FlatSpecSpec", dir.getAbsolutePath, "FixtureFlatSpecSpecUsingThey") } }
travisbrown/scalatest
project/GenTheyWord.scala
Scala
apache-2.0
2,476
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // scalastyle:off println package org.apache.spark.examples.mllib import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.mllib.linalg.Vectors import org.apache.spark.mllib.linalg.distributed.RowMatrix /** * Compute the singular value decomposition (SVD) of a tall-and-skinny matrix. * * The input matrix must be stored in row-oriented dense format, one line per row with its entries * separated by space. For example, * {{{ * 0.5 1.0 * 2.0 3.0 * 4.0 5.0 * }}} * represents a 3-by-2 matrix, whose first row is (0.5, 1.0). */ object TallSkinnySVD { def main(args: Array[String]) { if (args.length != 1) { System.err.println("Usage: TallSkinnySVD <input>") System.exit(1) } val conf = new SparkConf().setAppName("TallSkinnySVD") val sc = new SparkContext(conf) // Load and parse the data file. val rows = sc.textFile(args(0)).map { line => val values = line.split(' ').map(_.toDouble) Vectors.dense(values) } val mat = new RowMatrix(rows) // Compute SVD. val svd = mat.computeSVD(mat.numCols().toInt) println("Singular values are " + svd.s) sc.stop() } } // scalastyle:on println
minixalpha/spark
examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnySVD.scala
Scala
apache-2.0
1,992
package uk.gov.gds.ier.validation import uk.gov.gds.ier.model.DOB import org.joda.time.{DateTime, DateMidnight} import org.joda.time.Months import uk.gov.gds.ier.model.DateLeft object DateValidator { lazy val minimumAge = 16 lazy val maximumAge = 115 lazy val maximumCitizenshipDuration = 115 def isExistingDate(dateOfBirth: DOB):Option[DateMidnight] = { try { Some(parseToDateMidnight(dateOfBirth)) } catch { case ex: Exception => None } } def isExistingDateInThePast(dateOfBirth: DateMidnight) = { try { dateOfBirth.isBeforeNow } catch { case ex: Exception => false } } def isTooOldToBeAlive(dateOfBirth: DateMidnight) = { isDateBefore(dateOfBirth, maximumAge) } def isCitizenshipTooOld(dateOfCitizenship: DateMidnight) = { isDateBefore(dateOfCitizenship, maximumCitizenshipDuration) } def isTooYoungToRegister(dateOfBirth: DOB) = { try { parseToDateMidnight(dateOfBirth).plusYears(minimumAge).isAfter(DateTime.now.toDateMidnight) } catch { case ex: Exception => false } } def dateLeftUkOver15Years(dateLeftUk:DateLeft):Boolean = { val leftUk = new DateTime().withMonthOfYear(dateLeftUk.month).withYear(dateLeftUk.year) val monthDiff = Months.monthsBetween(leftUk, DateTime.now()).getMonths() if (monthDiff >= 15 * 12) true else false } def isLessEighteen(dateOfBirth: DOB) = { try { val eighteenYearsAgo = DateTime.now.minusYears(18).toDateMidnight val dob = parseToDateMidnight(dateOfBirth) dob.isAfter(eighteenYearsAgo) || dob.isEqual(eighteenYearsAgo) } catch { case ex: Exception => false } } private def isDateBefore(date: DateMidnight, yearsBack: Int) = { try { date.plusYears(yearsBack).isBefore(DateTime.now.toDateMidnight.plusDays(1)) } catch { case ex: Exception => false } } private def parseToDateMidnight(dateOfBirth: DOB) = { new DateMidnight( dateOfBirth.year, dateOfBirth.month, dateOfBirth.day) } }
michaeldfallen/ier-frontend
app/uk/gov/gds/ier/validation/DateValidator.scala
Scala
mit
2,059
package org.aja.tantra.examples.util.lambdacalculus class PrettyPrinter { def apply(expr: Expr): String = expr match { case Lambda(arg, body) => p"λ$arg.$body" case CNumber(i) => i.toString case CBoolean(b) => b.toString case Apply(fun, arg) => p"$fun $arg" case Var(name, scope) => s"$name" } implicit class PrettyPrinting(val sc: StringContext) { def p(args: Expr*) = sc.s((args map parensIfNeeded): _*) } def parensIfNeeded(expr: Expr) = expr match { case v: Var => apply(v) case _ => "(" + apply(expr) + ")" } }
Mageswaran1989/aja
src/examples/scala/org/aja/tantra/examples/util/lambdacalculus/PrettyPrinter.scala
Scala
apache-2.0
586
package services.impl import com.google.inject.{Inject, Singleton} import dao.UserDAO import model.User import org.mindrot.jbcrypt.BCrypt import services.UserService import services.state.ActionState import scala.concurrent.{ExecutionContext, Future} /** * Created by camilosampedro on 16/10/16. */ @Singleton class UserServiceImpl @Inject()(userDAO: UserDAO)(implicit executionContext: ExecutionContext) extends UserService{ private val salt = BCrypt.gensalt() override def add(user: User): Future[ActionState] = userDAO.add(user.copy(password = BCrypt.hashpw(user.password, salt))) override def get(username: String): Future[Option[User]] = userDAO.get(username) override def checkAndGet(username: String, password: String): Future[Option[User]] = userDAO.get(username).map{ case Some(user) if BCrypt.checkpw(password, user.password) => Some(user) case _ => None } override def listAll: Future[Seq[User]] = userDAO.listAll }
ProjectAton/AtonLab
app/services/impl/UserServiceImpl.scala
Scala
gpl-3.0
958
/* * Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.scaladsl.it import akka.pattern.CircuitBreakerOpenException import akka.stream.Materializer import akka.stream.scaladsl.Flow import akka.stream.scaladsl.Sink import akka.stream.scaladsl.Source import akka.Done import akka.NotUsed import akka.util.ByteString import com.lightbend.lagom.scaladsl.api.AdditionalConfiguration import com.lightbend.lagom.scaladsl.it.mocks.MockRequestEntity import com.lightbend.lagom.scaladsl.it.mocks.MockResponseEntity import com.lightbend.lagom.scaladsl.it.mocks.MockService import com.lightbend.lagom.scaladsl.it.mocks.MockServiceImpl import com.lightbend.lagom.scaladsl.server.LagomApplication import com.lightbend.lagom.scaladsl.server.LagomApplicationContext import com.lightbend.lagom.scaladsl.server.LagomServer import com.lightbend.lagom.scaladsl.server.LocalServiceLocator import com.lightbend.lagom.scaladsl.testkit.ServiceTest import com.typesafe.config.ConfigFactory import org.scalatest.Matchers import org.scalatest.WordSpec import play.api.Configuration import play.api.libs.streams.AkkaStreams import play.api.libs.ws.ahc.AhcWSComponents import scala.concurrent.Await import scala.concurrent.Promise import scala.concurrent.duration._ import scala.util.Failure import scala.util.Success import scala.util.Try class ScaladslMockServiceSpec extends WordSpec with Matchers { List(AkkaHttp, Netty).foreach { implicit backend => s"A mock service ($backend)" should { "be possible to invoke" in withServer { implicit mat => client => val id = 10L val request = MockRequestEntity("bar", 20) val response = Await.result(client.mockCall(id).invoke(request), 10.seconds) response.incomingId should ===(id) response.incomingRequest should ===(request) } "be possible to invoke for NotUsed parameters" in withServer { implicit mat => client => MockService.invoked.set(false) Await.result(client.doNothing.invoke(), 10.seconds) should ===(NotUsed) MockService.invoked.get() should ===(true) } "be possible to invoke for Done parameters and response" in withServer { implicit mat => client => val response = Await.result(client.doneCall.invoke(Done), 10.seconds) response should ===(Done) } "be possible to invoke for ByteString parameters and response" in withServer { implicit mat => client => val request = ByteString.fromString("raw ByteString") Await.result(client.echoByteString.invoke(request), 10.seconds) should ===(request) } "work with streamed responses" in withServer { implicit mat => client => val request = new MockRequestEntity("entity", 1) Try(Await.result(client.streamResponse.invoke(request), 10.seconds)) match { case Success(result) => consume(result) should ===((1 to 3).map(i => MockResponseEntity(i, request))) case Failure(_) => println( "SKIPPED - This may sometimes fail due to https://github.com/playframework/playframework/issues/5365" ) } } "work with streamed responses and unit requests" in withServer { implicit mat => client => val resultStream = Await.result(client.unitStreamResponse.invoke(), 10.seconds) consume(resultStream) should ===((1 to 3).map(i => MockResponseEntity(i, new MockRequestEntity("entity", i)))) } "work with streamed requests" in withServer { implicit mat => client => val requests = (1 to 3).map(i => new MockRequestEntity("request", i)) val gotResponse = Promise[None.type]() val closeWhenGotResponse = Source.maybe[MockRequestEntity].mapMaterializedValue(_.completeWith(gotResponse.future)) val result = Await.result(client.streamRequest.invoke(Source(requests).concat(closeWhenGotResponse)), 10.seconds) gotResponse.success(None) result should ===(MockResponseEntity(1, requests(0))) } "work with streamed requests and unit responses" when { "an empty message is sent for unit" in withServer { implicit mat => client => // In this case, we wait for a response from the server before closing the connection. The response will be an // empty web socket message which will be returned to us as null MockService.firstReceived.set(null) val requests = (1 to 3).map(i => new MockRequestEntity("request", i)) val gotResponse = Promise[None.type]() val closeWhenGotResponse = Source.maybe[MockRequestEntity].mapMaterializedValue(_.completeWith(gotResponse.future)) Await.result(client.streamRequestUnit.invoke(Source(requests).concat(closeWhenGotResponse)), 10.seconds) should ===( NotUsed ) gotResponse.success(None) MockService.firstReceived.get() should ===(requests(0)) } "no message is sent for unit" in withServer { implicit mat => client => // In this case, we send nothing to the server, which is waiting for a single message before it sends a response, // instead we just close the connection, we want to make sure that the client call still returns. MockService.firstReceived.set(null) Await.result(client.streamRequestUnit.invoke(Source.empty), 10.seconds) should ===(NotUsed) } } "work with bidi streams" when { "the client closes the connection" in withServer { implicit mat => client => val requests = (1 to 3).map(i => new MockRequestEntity("request", i)) // Use a source that never terminates so we don't close the upstream (which would close the downstream), and then // use takeUpTo so that we close downstream when we've got everything we want val resultStream = Await.result(client.bidiStream.invoke(Source(requests).concat(Source.maybe)), 10.seconds) consume(resultStream.take(3)) should ===(requests.map(r => MockResponseEntity(1, r))) } "the server closes the connection" in withServer { implicit mat => client => val requests = (1 to 3).map(i => new MockRequestEntity("request", i)) val gotResponse = Promise[None.type]() val closeWhenGotResponse = Source.maybe[MockRequestEntity].mapMaterializedValue(_.completeWith(gotResponse.future)) val serverClosed = Promise[Done]() val trackServerClosed = AkkaStreams.ignoreAfterCancellation[MockResponseEntity].mapMaterializedValue(serverClosed.completeWith) val resultStream = Await.result(client.bidiStream.invoke(Source(requests).concat(closeWhenGotResponse)), 10.seconds) consume(resultStream.via(trackServerClosed).take(3)) should ===(requests.map(r => MockResponseEntity(1, r))) gotResponse.success(None) Await.result(serverClosed.future, 10.seconds) should ===(Done) } } "work with custom headers" in withServer { implicit mat => client => val (responseHeader, result) = Await.result( client.customHeaders .handleRequestHeader(_.withHeader("Foo-Header", "Bar")) .withResponseHeader .invoke("Foo-Header"), 10.seconds ) result should ===("Bar") responseHeader.getHeader("Header-Name") should ===(Some("Foo-Header")) responseHeader.status should ===(201) } "work with custom headers on streams" in withServer { implicit mat => client => val result = Await.result( client.streamCustomHeaders .handleRequestHeader(_.withHeaders(List("Header-1" -> "value1", "Header-2" -> "value2"))) .invoke(Source(List("Header-1", "Header-2")).concat(Source.maybe)), 10.seconds ) val values = consume(result.via(Flow[String].take(2))) values should ===(Seq("value1", "value2")) } "send the service name" in withServer { implicit mat => client => Await.result(client.serviceName.invoke(), 10.seconds) should ===("mockservice") } "send the service name on streams" in withServer { implicit mat => client => Await.result( Await .result(client.streamServiceName.invoke(), 10.seconds) .runWith(Sink.head), 10.seconds ) should ===("mockservice") } "work with query params" in withServer { implicit mat => client => Await.result( client.queryParamId(Some("foo")).invoke(), 10.seconds ) should ===("foo") } "work with collections of entities" in withServer { implicit mat => client => val request = new MockRequestEntity("results", 10) val response = Await.result(client.listResults.invoke(request), 10.seconds) response.size should ===(request.field2) } "work with custom serializers" when { "the serializer protocol uses a custom contentType" in withServer { implicit mat => client => val id = 20 val request = new MockRequestEntity("bar", id) val response = Await.result(client.customContentType.invoke(request), 10.seconds) response.incomingId should ===(id) response.incomingRequest should ===(request) } "the serializer protocol does not specify a contentType" in withServer { implicit mat => client => val id = 20 val request = new MockRequestEntity("bar", id) val response = Await.result(client.noContentType.invoke(request), 10.seconds) response.incomingId should ===(id) response.incomingRequest should ===(request) } } "be invoked with circuit breaker" in withServer { implicit mat => client => MockService.invoked.set(false) (1 to 20).foreach { _ => intercept[Exception] { Await.result(client.alwaysFail.invoke(), 10.seconds) } } MockService.invoked.get() should ===(true) MockService.invoked.set(false) intercept[CircuitBreakerOpenException] { Await.result(client.alwaysFail.invoke(), 10.seconds) } MockService.invoked.get() should ===(false) } } } private def consume[A](source: Source[A, _])(implicit mat: Materializer): Seq[A] = { Await.result(source.runWith(Sink.seq), 10.seconds) } private def withServer(block: Materializer => MockService => Unit)(implicit httpBackend: HttpBackend): Unit = { ServiceTest.withServer(ServiceTest.defaultSetup) { ctx => new LagomApplication(LagomApplicationContext.Test) with AhcWSComponents with LocalServiceLocator { override lazy val lagomServer = serverFor[MockService](new MockServiceImpl) override def additionalConfiguration: AdditionalConfiguration = { import scala.collection.JavaConverters._ super.additionalConfiguration ++ ConfigFactory.parseMap( Map( "play.server.provider" -> httpBackend.provider ).asJava ) } } } { server => block(server.materializer)(server.serviceClient.implement[MockService]) } } }
ignasi35/lagom
service/scaladsl/integration-tests/src/test/scala/com/lightbend/lagom/scaladsl/it/ScaladslMockServiceSpec.scala
Scala
apache-2.0
11,317
package com.twitter.finagle.util import com.twitter.conversions.time._ import com.twitter.util.TimerTask import java.util.Collections import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger import org.jboss.netty.{util => nu} import org.mockito.ArgumentCaptor import org.specs.SpecificationWithJUnit import org.specs.mock.Mockito class TimerSpec extends SpecificationWithJUnit with Mockito { "TimerFromNettyTimer" should { val timer = mock[nu.Timer] val nstop = new AtomicInteger(0) @volatile var running = true timer.stop() answers { args => running = false nstop.incrementAndGet() Collections.emptySet() } "Support cancelling recurring tasks" in { val t = new TimerFromNettyTimer(timer) val taskCaptor = ArgumentCaptor.forClass(classOf[nu.TimerTask]) val firstTimeout = mock[nu.Timeout] firstTimeout.isCancelled returns false timer.newTimeout(taskCaptor.capture(), any, any) returns firstTimeout var task: TimerTask = null task = t.schedule(1.second) { task.cancel() } taskCaptor.getValue.run(firstTimeout) there was atMostOne(timer).newTimeout(any, any, any) } } }
joshbedo/finagle
finagle-core/src/test/scala/com/twitter/finagle/util/TimerSpec.scala
Scala
apache-2.0
1,209
package org.helgoboss.scala_osgi_metatype /** * Contains Scala traits which accurately model the facilities of the OSGi Metatype API. */ package object interfaces { }
helgoboss/scala-osgi-metatype
src/main/scala/org/helgoboss/scala_osgi_metatype/interfaces/package.scala
Scala
mit
171
package org.scaladebugger.api.lowlevel.events import com.sun.jdi.event.{Event, EventIterator, EventSet} import org.scaladebugger.api.lowlevel.events.data.JDIEventDataResult import EventType.EventType import org.scaladebugger.test.helpers.ParallelMockFunSpec class EventSetProcessorSpec extends ParallelMockFunSpec { // TODO: This is a workaround for a log statement that is causing a test to // fail if we don't mock the toString function private val mockEventType = EventType.AccessWatchpointEventType // mock[EventType] private val mockEvent = mock[Event] private val mockEventIterator = mock[EventIterator] private val mockEventSet = mock[EventSet] (mockEventSet.iterator _).expects().returning(mockEventIterator).once() private val mockEventFunctionRetrieval = mockFunction[EventType, Seq[(Event, Seq[JDIEventDataResult]) => Boolean]] private val mockEventFunctions = Seq( mockFunction[Event, Seq[JDIEventDataResult], Boolean], mockFunction[Event, Seq[JDIEventDataResult], Boolean] ) // Workaround - see https://github.com/paulbutcher/ScalaMock/issues/33 private class TestEventProcessor extends EventProcessor(mockEvent, mockEventFunctions, true) private val mockEventProcessor = mock[TestEventProcessor] private val mockNewEventProcessor = mockFunction[Event, Seq[EventManager#EventHandler], EventProcessor] private val mockTransformEventToEventType = mockFunction[Event, Option[EventType]] // Takes a single boolean used to set the onExceptionResume flag // NOTE: Not using partial function so we can use named parameters private def newEventSetProcessor(onExceptionResume: Boolean) = new EventSetProcessor( eventSet = mockEventSet, eventFunctionRetrieval = mockEventFunctionRetrieval, onExceptionResume = onExceptionResume ) { override protected def newEventProcessor( event: Event, eventFunctions: Seq[EventManager#EventHandler] ): EventProcessor = mockNewEventProcessor(event, eventFunctions) override protected def transformEventToEventType( event: Event ): Option[EventType] = mockTransformEventToEventType(event) } describe("EventSetProcessor") { describe("#process") { it("should return true and resume the event set if there are no events in the set") { val expected = true val eventSetProcessor = newEventSetProcessor(onExceptionResume = true) // If the event set had no events, its iterator would say so immediately (mockEventIterator.hasNext _).expects().returning(false).once() // The event set should be resumed (mockEventSet.resume _).expects().once() val actual = eventSetProcessor.process() actual should be (expected) } it("should return true and resume the event set if all of the events are unknown event types") { val expected = true val eventSetProcessor = newEventSetProcessor(onExceptionResume = true) inSequence { // Only retrieve a single event (which will be unknown) (mockEventIterator.hasNext _).expects().returning(true).once() (mockEventIterator.next _).expects().returning(mockEvent).once() mockTransformEventToEventType.expects(*).returning(None).once() // Should be false as we are out of events (mockEventIterator.hasNext _).expects().returning(false).once() // The event set should be resumed (mockEventSet.resume _).expects().once() } val actual = eventSetProcessor.process() actual should be (expected) } it("should return true and resume the event set if all event processors for events return true") { val expected = true val eventSetProcessor = newEventSetProcessor(onExceptionResume = true) inSequence { inAnyOrder { // Retrieve two events (which will be evaluated as true) (mockEventIterator.hasNext _).expects().returning(true).twice() (mockEventIterator.next _).expects().returning(mockEvent).twice() mockTransformEventToEventType.expects(*) .returning(Some(mockEventType)).twice() mockEventFunctionRetrieval.expects(*) .returning(mockEventFunctions).twice() mockNewEventProcessor.expects(*, *) .returning(mockEventProcessor).twice() // Evaluate both events as true (mockEventProcessor.process _).expects().returning(true).twice() } // Should be false as we are out of events (mockEventIterator.hasNext _).expects().returning(false).once() // The event set should be resumed (mockEventSet.resume _).expects().once() } val actual = eventSetProcessor.process() actual should be (expected) } it("should return false and not resume the event set if any event processor for an event returns false") { val expected = false val eventSetProcessor = newEventSetProcessor(onExceptionResume = true) inSequence { inAnyOrder { // Retrieve two events (which will be evaluated as true) (mockEventIterator.hasNext _).expects().returning(true).twice() (mockEventIterator.next _).expects().returning(mockEvent).twice() mockTransformEventToEventType.expects(*) .returning(Some(mockEventType)).twice() mockEventFunctionRetrieval.expects(*) .returning(mockEventFunctions).twice() mockNewEventProcessor.expects(*, *) .returning(mockEventProcessor).twice() // Evaluate one as true and the other as false (mockEventProcessor.process _).expects().returning(false).once() (mockEventProcessor.process _).expects().returning(true).once() } // Should be false as we are out of events (mockEventIterator.hasNext _).expects().returning(false).once() // The event set should not be resumed (mockEventSet.resume _).expects().never() } val actual = eventSetProcessor.process() actual should be (expected) } } describe("#resume") { it("should resume the wrapped event set") { val eventSetProcessor = newEventSetProcessor(onExceptionResume = true) // The resume method merely wraps the event set's resume (mockEventSet.resume _).expects().once() eventSetProcessor.resume() } } } }
rcsenkbeil/scala-debugger
scala-debugger-api/src/test/scala/org/scaladebugger/api/lowlevel/events/EventSetProcessorSpec.scala
Scala
apache-2.0
6,599
package TAPLcomp.tyarith import scala.util.parsing.combinator.ImplicitConversions import scala.util.parsing.combinator.syntactical.StandardTokenParsers sealed trait Ty case object TyBool extends Ty case object TyNat extends Ty sealed trait Term case object TmTrue extends Term case object TmFalse extends Term case class TmIf(cond: Term, t1: Term, t2: Term) extends Term case object TmZero extends Term case class TmSucc(t: Term) extends Term case class TmPred(t: Term) extends Term case class TmIsZero(t: Term) extends Term object TyArithParsers extends StandardTokenParsers with ImplicitConversions { lexical.reserved += ("true", "false", "if", "then", "else", "iszero", "succ", "pred") lexical.delimiters += ("(", ")", ";") private def term: Parser[Term] = appTerm | ("if" ~> term) ~ ("then" ~> term) ~ ("else" ~> term) ^^ TmIf private def appTerm: Parser[Term] = aTerm | "succ" ~> aTerm ^^ TmSucc | "pred" ~> aTerm ^^ TmPred | "iszero" ~> aTerm ^^ TmIsZero // Atomic terms are ones that never require extra parentheses private def aTerm: Parser[Term] = "(" ~> term <~ ")" | "true" ^^ { _ => TmTrue } | "false" ^^ { _ => TmFalse } | numericLit ^^ { x => num(x.toInt) } private def num(x: Int): Term = x match { case 0 => TmZero case _ => TmSucc(num(x - 1)) } def input(s: String) = phrase(term)(new lexical.Scanner(s)) match { case t if t.successful => t.get case t => sys.error(t.toString) } }
hy-zhang/parser
Scala/Parser/src/TAPLcomp/tyarith/parser.scala
Scala
bsd-3-clause
1,492
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions import java.math.{BigDecimal => JavaBigDecimal} import java.time.ZoneId import java.util.concurrent.TimeUnit._ import org.apache.spark.SparkException import org.apache.spark.sql.catalyst.{InternalRow, WalkedTypePath} import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion} import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.util._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} import org.apache.spark.unsafe.types.UTF8String.{IntWrapper, LongWrapper} object Cast { /** * Returns true iff we can cast `from` type to `to` type. */ def canCast(from: DataType, to: DataType): Boolean = (from, to) match { case (fromType, toType) if fromType == toType => true case (NullType, _) => true case (_, StringType) => true case (StringType, BinaryType) => true case (_: IntegralType, BinaryType) => true case (StringType, BooleanType) => true case (DateType, BooleanType) => true case (TimestampType, BooleanType) => true case (_: NumericType, BooleanType) => true case (StringType, TimestampType) => true case (BooleanType, TimestampType) => true case (DateType, TimestampType) => true case (_: NumericType, TimestampType) => true case (StringType, DateType) => true case (TimestampType, DateType) => true case (StringType, CalendarIntervalType) => true case (StringType, _: NumericType) => true case (BooleanType, _: NumericType) => true case (DateType, _: NumericType) => true case (TimestampType, _: NumericType) => true case (_: NumericType, _: NumericType) => true case (ArrayType(fromType, fn), ArrayType(toType, tn)) => canCast(fromType, toType) && resolvableNullability(fn || forceNullable(fromType, toType), tn) case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) => canCast(fromKey, toKey) && (!forceNullable(fromKey, toKey)) && canCast(fromValue, toValue) && resolvableNullability(fn || forceNullable(fromValue, toValue), tn) case (StructType(fromFields), StructType(toFields)) => fromFields.length == toFields.length && fromFields.zip(toFields).forall { case (fromField, toField) => canCast(fromField.dataType, toField.dataType) && resolvableNullability( fromField.nullable || forceNullable(fromField.dataType, toField.dataType), toField.nullable) } case (udt1: UserDefinedType[_], udt2: UserDefinedType[_]) if udt1.userClass == udt2.userClass => true case _ => false } /** * Return true if we need to use the `timeZone` information casting `from` type to `to` type. * The patterns matched reflect the current implementation in the Cast node. * c.f. usage of `timeZone` in: * * Cast.castToString * * Cast.castToDate * * Cast.castToTimestamp */ def needsTimeZone(from: DataType, to: DataType): Boolean = (from, to) match { case (StringType, TimestampType) => true case (DateType, TimestampType) => true case (TimestampType, StringType) => true case (TimestampType, DateType) => true case (ArrayType(fromType, _), ArrayType(toType, _)) => needsTimeZone(fromType, toType) case (MapType(fromKey, fromValue, _), MapType(toKey, toValue, _)) => needsTimeZone(fromKey, toKey) || needsTimeZone(fromValue, toValue) case (StructType(fromFields), StructType(toFields)) => fromFields.length == toFields.length && fromFields.zip(toFields).exists { case (fromField, toField) => needsTimeZone(fromField.dataType, toField.dataType) } case _ => false } /** * Returns true iff we can safely up-cast the `from` type to `to` type without any truncating or * precision lose or possible runtime failures. For example, long -> int, string -> int are not * up-cast. */ def canUpCast(from: DataType, to: DataType): Boolean = (from, to) match { case _ if from == to => true case (from: NumericType, to: DecimalType) if to.isWiderThan(from) => true case (from: DecimalType, to: NumericType) if from.isTighterThan(to) => true case (f, t) if legalNumericPrecedence(f, t) => true case (DateType, TimestampType) => true case (_, StringType) => true // Spark supports casting between long and timestamp, please see `longToTimestamp` and // `timestampToLong` for details. case (TimestampType, LongType) => true case (LongType, TimestampType) => true case (ArrayType(fromType, fn), ArrayType(toType, tn)) => resolvableNullability(fn, tn) && canUpCast(fromType, toType) case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) => resolvableNullability(fn, tn) && canUpCast(fromKey, toKey) && canUpCast(fromValue, toValue) case (StructType(fromFields), StructType(toFields)) => fromFields.length == toFields.length && fromFields.zip(toFields).forall { case (f1, f2) => resolvableNullability(f1.nullable, f2.nullable) && canUpCast(f1.dataType, f2.dataType) } case _ => false } private def legalNumericPrecedence(from: DataType, to: DataType): Boolean = { val fromPrecedence = TypeCoercion.numericPrecedence.indexOf(from) val toPrecedence = TypeCoercion.numericPrecedence.indexOf(to) fromPrecedence >= 0 && fromPrecedence < toPrecedence } def canNullSafeCastToDecimal(from: DataType, to: DecimalType): Boolean = from match { case from: BooleanType if to.isWiderThan(DecimalType.BooleanDecimal) => true case from: NumericType if to.isWiderThan(from) => true case from: DecimalType => // truncating or precision lose (to.precision - to.scale) > (from.precision - from.scale) case _ => false // overflow } def forceNullable(from: DataType, to: DataType): Boolean = (from, to) match { case (NullType, _) => true case (_, _) if from == to => false case (StringType, BinaryType) => false case (StringType, _) => true case (_, StringType) => false case (FloatType | DoubleType, TimestampType) => true case (TimestampType, DateType) => false case (_, DateType) => true case (DateType, TimestampType) => false case (DateType, _) => true case (_, CalendarIntervalType) => true case (_, to: DecimalType) if !canNullSafeCastToDecimal(from, to) => true case (_: FractionalType, _: IntegralType) => true // NaN, infinity case _ => false } def resolvableNullability(from: Boolean, to: Boolean): Boolean = !from || to } /** * Cast the child expression to the target data type. * * When cast from/to timezone related types, we need timeZoneId, which will be resolved with * session local timezone by an analyzer [[ResolveTimeZone]]. */ @ExpressionDescription( usage = "_FUNC_(expr AS type) - Casts the value `expr` to the target data type `type`.", examples = """ Examples: > SELECT _FUNC_('10' as int); 10 """) case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String] = None) extends UnaryExpression with TimeZoneAwareExpression with NullIntolerant { def this(child: Expression, dataType: DataType) = this(child, dataType, None) override def toString: String = s"cast($child as ${dataType.simpleString})" override def checkInputDataTypes(): TypeCheckResult = { if (Cast.canCast(child.dataType, dataType)) { TypeCheckResult.TypeCheckSuccess } else { TypeCheckResult.TypeCheckFailure( s"cannot cast ${child.dataType.catalogString} to ${dataType.catalogString}") } } override def nullable: Boolean = Cast.forceNullable(child.dataType, dataType) || child.nullable override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) // When this cast involves TimeZone, it's only resolved if the timeZoneId is set; // Otherwise behave like Expression.resolved. override lazy val resolved: Boolean = childrenResolved && checkInputDataTypes().isSuccess && (!needsTimeZone || timeZoneId.isDefined) private[this] def needsTimeZone: Boolean = Cast.needsTimeZone(child.dataType, dataType) // [[func]] assumes the input is no longer null because eval already does the null check. @inline private[this] def buildCast[T](a: Any, func: T => Any): Any = func(a.asInstanceOf[T]) private lazy val dateFormatter = DateFormatter() private lazy val timestampFormatter = TimestampFormatter.getFractionFormatter(zoneId) // UDFToString private[this] def castToString(from: DataType): Any => Any = from match { case BinaryType => buildCast[Array[Byte]](_, UTF8String.fromBytes) case DateType => buildCast[Int](_, d => UTF8String.fromString(dateFormatter.format(d))) case TimestampType => buildCast[Long](_, t => UTF8String.fromString(DateTimeUtils.timestampToString(timestampFormatter, t))) case ArrayType(et, _) => buildCast[ArrayData](_, array => { val builder = new UTF8StringBuilder builder.append("[") if (array.numElements > 0) { val toUTF8String = castToString(et) if (!array.isNullAt(0)) { builder.append(toUTF8String(array.get(0, et)).asInstanceOf[UTF8String]) } var i = 1 while (i < array.numElements) { builder.append(",") if (!array.isNullAt(i)) { builder.append(" ") builder.append(toUTF8String(array.get(i, et)).asInstanceOf[UTF8String]) } i += 1 } } builder.append("]") builder.build() }) case MapType(kt, vt, _) => buildCast[MapData](_, map => { val builder = new UTF8StringBuilder builder.append("[") if (map.numElements > 0) { val keyArray = map.keyArray() val valueArray = map.valueArray() val keyToUTF8String = castToString(kt) val valueToUTF8String = castToString(vt) builder.append(keyToUTF8String(keyArray.get(0, kt)).asInstanceOf[UTF8String]) builder.append(" ->") if (!valueArray.isNullAt(0)) { builder.append(" ") builder.append(valueToUTF8String(valueArray.get(0, vt)).asInstanceOf[UTF8String]) } var i = 1 while (i < map.numElements) { builder.append(", ") builder.append(keyToUTF8String(keyArray.get(i, kt)).asInstanceOf[UTF8String]) builder.append(" ->") if (!valueArray.isNullAt(i)) { builder.append(" ") builder.append(valueToUTF8String(valueArray.get(i, vt)) .asInstanceOf[UTF8String]) } i += 1 } } builder.append("]") builder.build() }) case StructType(fields) => buildCast[InternalRow](_, row => { val builder = new UTF8StringBuilder builder.append("[") if (row.numFields > 0) { val st = fields.map(_.dataType) val toUTF8StringFuncs = st.map(castToString) if (!row.isNullAt(0)) { builder.append(toUTF8StringFuncs(0)(row.get(0, st(0))).asInstanceOf[UTF8String]) } var i = 1 while (i < row.numFields) { builder.append(",") if (!row.isNullAt(i)) { builder.append(" ") builder.append(toUTF8StringFuncs(i)(row.get(i, st(i))).asInstanceOf[UTF8String]) } i += 1 } } builder.append("]") builder.build() }) case pudt: PythonUserDefinedType => castToString(pudt.sqlType) case udt: UserDefinedType[_] => buildCast[Any](_, o => UTF8String.fromString(udt.deserialize(o).toString)) case _ => buildCast[Any](_, o => UTF8String.fromString(o.toString)) } // BinaryConverter private[this] def castToBinary(from: DataType): Any => Any = from match { case StringType => buildCast[UTF8String](_, _.getBytes) case ByteType => buildCast[Byte](_, NumberConverter.toBinary) case ShortType => buildCast[Short](_, NumberConverter.toBinary) case IntegerType => buildCast[Int](_, NumberConverter.toBinary) case LongType => buildCast[Long](_, NumberConverter.toBinary) } // UDFToBoolean private[this] def castToBoolean(from: DataType): Any => Any = from match { case StringType => buildCast[UTF8String](_, s => { if (StringUtils.isTrueString(s)) { true } else if (StringUtils.isFalseString(s)) { false } else { null } }) case TimestampType => buildCast[Long](_, t => t != 0) case DateType => // Hive would return null when cast from date to boolean buildCast[Int](_, d => null) case LongType => buildCast[Long](_, _ != 0) case IntegerType => buildCast[Int](_, _ != 0) case ShortType => buildCast[Short](_, _ != 0) case ByteType => buildCast[Byte](_, _ != 0) case DecimalType() => buildCast[Decimal](_, !_.isZero) case DoubleType => buildCast[Double](_, _ != 0) case FloatType => buildCast[Float](_, _ != 0) } // TimestampConverter private[this] def castToTimestamp(from: DataType): Any => Any = from match { case StringType => buildCast[UTF8String](_, utfs => DateTimeUtils.stringToTimestamp(utfs, zoneId).orNull) case BooleanType => buildCast[Boolean](_, b => if (b) 1L else 0) case LongType => buildCast[Long](_, l => longToTimestamp(l)) case IntegerType => buildCast[Int](_, i => longToTimestamp(i.toLong)) case ShortType => buildCast[Short](_, s => longToTimestamp(s.toLong)) case ByteType => buildCast[Byte](_, b => longToTimestamp(b.toLong)) case DateType => buildCast[Int](_, d => epochDaysToMicros(d, zoneId)) // TimestampWritable.decimalToTimestamp case DecimalType() => buildCast[Decimal](_, d => decimalToTimestamp(d)) // TimestampWritable.doubleToTimestamp case DoubleType => buildCast[Double](_, d => doubleToTimestamp(d)) // TimestampWritable.floatToTimestamp case FloatType => buildCast[Float](_, f => doubleToTimestamp(f.toDouble)) } private[this] def decimalToTimestamp(d: Decimal): Long = { (d.toBigDecimal * MICROS_PER_SECOND).longValue() } private[this] def doubleToTimestamp(d: Double): Any = { if (d.isNaN || d.isInfinite) null else (d * MICROS_PER_SECOND).toLong } // converting seconds to us private[this] def longToTimestamp(t: Long): Long = SECONDS.toMicros(t) // converting us to seconds private[this] def timestampToLong(ts: Long): Long = { Math.floorDiv(ts, MICROS_PER_SECOND) } // converting us to seconds in double private[this] def timestampToDouble(ts: Long): Double = { ts / MICROS_PER_SECOND.toDouble } // DateConverter private[this] def castToDate(from: DataType): Any => Any = from match { case StringType => buildCast[UTF8String](_, s => DateTimeUtils.stringToDate(s).orNull) case TimestampType => // throw valid precision more than seconds, according to Hive. // Timestamp.nanos is in 0 to 999,999,999, no more than a second. buildCast[Long](_, t => microsToEpochDays(t, zoneId)) } // IntervalConverter private[this] def castToInterval(from: DataType): Any => Any = from match { case StringType => buildCast[UTF8String](_, s => CalendarInterval.fromString(s.toString)) } // LongConverter private[this] def castToLong(from: DataType): Any => Any = from match { case StringType => val result = new LongWrapper() buildCast[UTF8String](_, s => if (s.toLong(result)) result.value else null) case BooleanType => buildCast[Boolean](_, b => if (b) 1L else 0L) case DateType => buildCast[Int](_, d => null) case TimestampType => buildCast[Long](_, t => timestampToLong(t)) case x: NumericType => b => x.numeric.asInstanceOf[Numeric[Any]].toLong(b) } // IntConverter private[this] def castToInt(from: DataType): Any => Any = from match { case StringType => val result = new IntWrapper() buildCast[UTF8String](_, s => if (s.toInt(result)) result.value else null) case BooleanType => buildCast[Boolean](_, b => if (b) 1 else 0) case DateType => buildCast[Int](_, d => null) case TimestampType => buildCast[Long](_, t => timestampToLong(t).toInt) case x: NumericType => b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b) } // ShortConverter private[this] def castToShort(from: DataType): Any => Any = from match { case StringType => val result = new IntWrapper() buildCast[UTF8String](_, s => if (s.toShort(result)) { result.value.toShort } else { null }) case BooleanType => buildCast[Boolean](_, b => if (b) 1.toShort else 0.toShort) case DateType => buildCast[Int](_, d => null) case TimestampType => buildCast[Long](_, t => timestampToLong(t).toShort) case x: NumericType => b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toShort } // ByteConverter private[this] def castToByte(from: DataType): Any => Any = from match { case StringType => val result = new IntWrapper() buildCast[UTF8String](_, s => if (s.toByte(result)) { result.value.toByte } else { null }) case BooleanType => buildCast[Boolean](_, b => if (b) 1.toByte else 0.toByte) case DateType => buildCast[Int](_, d => null) case TimestampType => buildCast[Long](_, t => timestampToLong(t).toByte) case x: NumericType => b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toByte } /** * Change the precision / scale in a given decimal to those set in `decimalType` (if any), * returning null if it overflows or modifying `value` in-place and returning it if successful. * * NOTE: this modifies `value` in-place, so don't call it on external data. */ private[this] def changePrecision(value: Decimal, decimalType: DecimalType): Decimal = { if (value.changePrecision(decimalType.precision, decimalType.scale)) value else null } /** * Create new `Decimal` with precision and scale given in `decimalType` (if any), * returning null if it overflows or creating a new `value` and returning it if successful. */ private[this] def toPrecision(value: Decimal, decimalType: DecimalType): Decimal = value.toPrecision(decimalType.precision, decimalType.scale) private[this] def castToDecimal(from: DataType, target: DecimalType): Any => Any = from match { case StringType => buildCast[UTF8String](_, s => try { changePrecision(Decimal(new JavaBigDecimal(s.toString)), target) } catch { case _: NumberFormatException => null }) case BooleanType => buildCast[Boolean](_, b => toPrecision(if (b) Decimal.ONE else Decimal.ZERO, target)) case DateType => buildCast[Int](_, d => null) // date can't cast to decimal in Hive case TimestampType => // Note that we lose precision here. buildCast[Long](_, t => changePrecision(Decimal(timestampToDouble(t)), target)) case dt: DecimalType => b => toPrecision(b.asInstanceOf[Decimal], target) case t: IntegralType => b => changePrecision(Decimal(t.integral.asInstanceOf[Integral[Any]].toLong(b)), target) case x: FractionalType => b => try { changePrecision(Decimal(x.fractional.asInstanceOf[Fractional[Any]].toDouble(b)), target) } catch { case _: NumberFormatException => null } } // DoubleConverter private[this] def castToDouble(from: DataType): Any => Any = from match { case StringType => buildCast[UTF8String](_, s => try s.toString.toDouble catch { case _: NumberFormatException => null }) case BooleanType => buildCast[Boolean](_, b => if (b) 1d else 0d) case DateType => buildCast[Int](_, d => null) case TimestampType => buildCast[Long](_, t => timestampToDouble(t)) case x: NumericType => b => x.numeric.asInstanceOf[Numeric[Any]].toDouble(b) } // FloatConverter private[this] def castToFloat(from: DataType): Any => Any = from match { case StringType => buildCast[UTF8String](_, s => try s.toString.toFloat catch { case _: NumberFormatException => null }) case BooleanType => buildCast[Boolean](_, b => if (b) 1f else 0f) case DateType => buildCast[Int](_, d => null) case TimestampType => buildCast[Long](_, t => timestampToDouble(t).toFloat) case x: NumericType => b => x.numeric.asInstanceOf[Numeric[Any]].toFloat(b) } private[this] def castArray(fromType: DataType, toType: DataType): Any => Any = { val elementCast = cast(fromType, toType) // TODO: Could be faster? buildCast[ArrayData](_, array => { val values = new Array[Any](array.numElements()) array.foreach(fromType, (i, e) => { if (e == null) { values(i) = null } else { values(i) = elementCast(e) } }) new GenericArrayData(values) }) } private[this] def castMap(from: MapType, to: MapType): Any => Any = { val keyCast = castArray(from.keyType, to.keyType) val valueCast = castArray(from.valueType, to.valueType) buildCast[MapData](_, map => { val keys = keyCast(map.keyArray()).asInstanceOf[ArrayData] val values = valueCast(map.valueArray()).asInstanceOf[ArrayData] new ArrayBasedMapData(keys, values) }) } private[this] def castStruct(from: StructType, to: StructType): Any => Any = { val castFuncs: Array[(Any) => Any] = from.fields.zip(to.fields).map { case (fromField, toField) => cast(fromField.dataType, toField.dataType) } // TODO: Could be faster? buildCast[InternalRow](_, row => { val newRow = new GenericInternalRow(from.fields.length) var i = 0 while (i < row.numFields) { newRow.update(i, if (row.isNullAt(i)) null else castFuncs(i)(row.get(i, from.apply(i).dataType))) i += 1 } newRow }) } private[this] def cast(from: DataType, to: DataType): Any => Any = { // If the cast does not change the structure, then we don't really need to cast anything. // We can return what the children return. Same thing should happen in the codegen path. if (DataType.equalsStructurally(from, to)) { identity } else if (from == NullType) { // According to `canCast`, NullType can be casted to any type. // For primitive types, we don't reach here because the guard of `nullSafeEval`. // But for nested types like struct, we might reach here for nested null type field. // We won't call the returned function actually, but returns a placeholder. _ => throw new SparkException(s"should not directly cast from NullType to $to.") } else { to match { case dt if dt == from => identity[Any] case StringType => castToString(from) case BinaryType => castToBinary(from) case DateType => castToDate(from) case decimal: DecimalType => castToDecimal(from, decimal) case TimestampType => castToTimestamp(from) case CalendarIntervalType => castToInterval(from) case BooleanType => castToBoolean(from) case ByteType => castToByte(from) case ShortType => castToShort(from) case IntegerType => castToInt(from) case FloatType => castToFloat(from) case LongType => castToLong(from) case DoubleType => castToDouble(from) case array: ArrayType => castArray(from.asInstanceOf[ArrayType].elementType, array.elementType) case map: MapType => castMap(from.asInstanceOf[MapType], map) case struct: StructType => castStruct(from.asInstanceOf[StructType], struct) case udt: UserDefinedType[_] if udt.userClass == from.asInstanceOf[UserDefinedType[_]].userClass => identity[Any] case _: UserDefinedType[_] => throw new SparkException(s"Cannot cast $from to $to.") } } } private[this] lazy val cast: Any => Any = cast(child.dataType, dataType) protected override def nullSafeEval(input: Any): Any = cast(input) override def genCode(ctx: CodegenContext): ExprCode = { // If the cast does not change the structure, then we don't really need to cast anything. // We can return what the children return. Same thing should happen in the interpreted path. if (DataType.equalsStructurally(child.dataType, dataType)) { child.genCode(ctx) } else { super.genCode(ctx) } } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val eval = child.genCode(ctx) val nullSafeCast = nullSafeCastFunction(child.dataType, dataType, ctx) ev.copy(code = eval.code + castCode(ctx, eval.value, eval.isNull, ev.value, ev.isNull, dataType, nullSafeCast)) } // The function arguments are: `input`, `result` and `resultIsNull`. We don't need `inputIsNull` // in parameter list, because the returned code will be put in null safe evaluation region. private[this] type CastFunction = (ExprValue, ExprValue, ExprValue) => Block private[this] def nullSafeCastFunction( from: DataType, to: DataType, ctx: CodegenContext): CastFunction = to match { case _ if from == NullType => (c, evPrim, evNull) => code"$evNull = true;" case _ if to == from => (c, evPrim, evNull) => code"$evPrim = $c;" case StringType => castToStringCode(from, ctx) case BinaryType => castToBinaryCode(from) case DateType => castToDateCode(from, ctx) case decimal: DecimalType => castToDecimalCode(from, decimal, ctx) case TimestampType => castToTimestampCode(from, ctx) case CalendarIntervalType => castToIntervalCode(from) case BooleanType => castToBooleanCode(from) case ByteType => castToByteCode(from, ctx) case ShortType => castToShortCode(from, ctx) case IntegerType => castToIntCode(from, ctx) case FloatType => castToFloatCode(from) case LongType => castToLongCode(from, ctx) case DoubleType => castToDoubleCode(from) case array: ArrayType => castArrayCode(from.asInstanceOf[ArrayType].elementType, array.elementType, ctx) case map: MapType => castMapCode(from.asInstanceOf[MapType], map, ctx) case struct: StructType => castStructCode(from.asInstanceOf[StructType], struct, ctx) case udt: UserDefinedType[_] if udt.userClass == from.asInstanceOf[UserDefinedType[_]].userClass => (c, evPrim, evNull) => code"$evPrim = $c;" case _: UserDefinedType[_] => throw new SparkException(s"Cannot cast $from to $to.") } // Since we need to cast input expressions recursively inside ComplexTypes, such as Map's // Key and Value, Struct's field, we need to name out all the variable names involved in a cast. private[this] def castCode(ctx: CodegenContext, input: ExprValue, inputIsNull: ExprValue, result: ExprValue, resultIsNull: ExprValue, resultType: DataType, cast: CastFunction): Block = { val javaType = JavaCode.javaType(resultType) code""" boolean $resultIsNull = $inputIsNull; $javaType $result = ${CodeGenerator.defaultValue(resultType)}; if (!$inputIsNull) { ${cast(input, result, resultIsNull)} } """ } private def writeArrayToStringBuilder( et: DataType, array: ExprValue, buffer: ExprValue, ctx: CodegenContext): Block = { val elementToStringCode = castToStringCode(et, ctx) val funcName = ctx.freshName("elementToString") val element = JavaCode.variable("element", et) val elementStr = JavaCode.variable("elementStr", StringType) val elementToStringFunc = inline"${ctx.addNewFunction(funcName, s""" |private UTF8String $funcName(${CodeGenerator.javaType(et)} $element) { | UTF8String $elementStr = null; | ${elementToStringCode(element, elementStr, null /* resultIsNull won't be used */)} | return elementStr; |} """.stripMargin)}" val loopIndex = ctx.freshVariable("loopIndex", IntegerType) code""" |$buffer.append("["); |if ($array.numElements() > 0) { | if (!$array.isNullAt(0)) { | $buffer.append($elementToStringFunc(${CodeGenerator.getValue(array, et, "0")})); | } | for (int $loopIndex = 1; $loopIndex < $array.numElements(); $loopIndex++) { | $buffer.append(","); | if (!$array.isNullAt($loopIndex)) { | $buffer.append(" "); | $buffer.append($elementToStringFunc(${CodeGenerator.getValue(array, et, loopIndex)})); | } | } |} |$buffer.append("]"); """.stripMargin } private def writeMapToStringBuilder( kt: DataType, vt: DataType, map: ExprValue, buffer: ExprValue, ctx: CodegenContext): Block = { def dataToStringFunc(func: String, dataType: DataType) = { val funcName = ctx.freshName(func) val dataToStringCode = castToStringCode(dataType, ctx) val data = JavaCode.variable("data", dataType) val dataStr = JavaCode.variable("dataStr", StringType) val functionCall = ctx.addNewFunction(funcName, s""" |private UTF8String $funcName(${CodeGenerator.javaType(dataType)} $data) { | UTF8String $dataStr = null; | ${dataToStringCode(data, dataStr, null /* resultIsNull won't be used */)} | return dataStr; |} """.stripMargin) inline"$functionCall" } val keyToStringFunc = dataToStringFunc("keyToString", kt) val valueToStringFunc = dataToStringFunc("valueToString", vt) val loopIndex = ctx.freshVariable("loopIndex", IntegerType) val mapKeyArray = JavaCode.expression(s"$map.keyArray()", classOf[ArrayData]) val mapValueArray = JavaCode.expression(s"$map.valueArray()", classOf[ArrayData]) val getMapFirstKey = CodeGenerator.getValue(mapKeyArray, kt, JavaCode.literal("0", IntegerType)) val getMapFirstValue = CodeGenerator.getValue(mapValueArray, vt, JavaCode.literal("0", IntegerType)) val getMapKeyArray = CodeGenerator.getValue(mapKeyArray, kt, loopIndex) val getMapValueArray = CodeGenerator.getValue(mapValueArray, vt, loopIndex) code""" |$buffer.append("["); |if ($map.numElements() > 0) { | $buffer.append($keyToStringFunc($getMapFirstKey)); | $buffer.append(" ->"); | if (!$map.valueArray().isNullAt(0)) { | $buffer.append(" "); | $buffer.append($valueToStringFunc($getMapFirstValue)); | } | for (int $loopIndex = 1; $loopIndex < $map.numElements(); $loopIndex++) { | $buffer.append(", "); | $buffer.append($keyToStringFunc($getMapKeyArray)); | $buffer.append(" ->"); | if (!$map.valueArray().isNullAt($loopIndex)) { | $buffer.append(" "); | $buffer.append($valueToStringFunc($getMapValueArray)); | } | } |} |$buffer.append("]"); """.stripMargin } private def writeStructToStringBuilder( st: Seq[DataType], row: ExprValue, buffer: ExprValue, ctx: CodegenContext): Block = { val structToStringCode = st.zipWithIndex.map { case (ft, i) => val fieldToStringCode = castToStringCode(ft, ctx) val field = ctx.freshVariable("field", ft) val fieldStr = ctx.freshVariable("fieldStr", StringType) val javaType = JavaCode.javaType(ft) code""" |${if (i != 0) code"""$buffer.append(",");""" else EmptyBlock} |if (!$row.isNullAt($i)) { | ${if (i != 0) code"""$buffer.append(" ");""" else EmptyBlock} | | // Append $i field into the string buffer | $javaType $field = ${CodeGenerator.getValue(row, ft, s"$i")}; | UTF8String $fieldStr = null; | ${fieldToStringCode(field, fieldStr, null /* resultIsNull won't be used */)} | $buffer.append($fieldStr); |} """.stripMargin } val writeStructCode = ctx.splitExpressions( expressions = structToStringCode.map(_.code), funcName = "fieldToString", arguments = ("InternalRow", row.code) :: (classOf[UTF8StringBuilder].getName, buffer.code) :: Nil) code""" |$buffer.append("["); |$writeStructCode |$buffer.append("]"); """.stripMargin } private[this] def castToStringCode(from: DataType, ctx: CodegenContext): CastFunction = { from match { case BinaryType => (c, evPrim, evNull) => code"$evPrim = UTF8String.fromBytes($c);" case DateType => val df = JavaCode.global( ctx.addReferenceObj("dateFormatter", dateFormatter), dateFormatter.getClass) (c, evPrim, evNull) => code"""$evPrim = UTF8String.fromString(${df}.format($c));""" case TimestampType => val tf = JavaCode.global( ctx.addReferenceObj("timestampFormatter", timestampFormatter), timestampFormatter.getClass) (c, evPrim, evNull) => code"""$evPrim = UTF8String.fromString( org.apache.spark.sql.catalyst.util.DateTimeUtils.timestampToString($tf, $c));""" case ArrayType(et, _) => (c, evPrim, evNull) => { val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder]) val bufferClass = JavaCode.javaType(classOf[UTF8StringBuilder]) val writeArrayElemCode = writeArrayToStringBuilder(et, c, buffer, ctx) code""" |$bufferClass $buffer = new $bufferClass(); |$writeArrayElemCode; |$evPrim = $buffer.build(); """.stripMargin } case MapType(kt, vt, _) => (c, evPrim, evNull) => { val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder]) val bufferClass = JavaCode.javaType(classOf[UTF8StringBuilder]) val writeMapElemCode = writeMapToStringBuilder(kt, vt, c, buffer, ctx) code""" |$bufferClass $buffer = new $bufferClass(); |$writeMapElemCode; |$evPrim = $buffer.build(); """.stripMargin } case StructType(fields) => (c, evPrim, evNull) => { val row = ctx.freshVariable("row", classOf[InternalRow]) val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder]) val bufferClass = JavaCode.javaType(classOf[UTF8StringBuilder]) val writeStructCode = writeStructToStringBuilder(fields.map(_.dataType), row, buffer, ctx) code""" |InternalRow $row = $c; |$bufferClass $buffer = new $bufferClass(); |$writeStructCode |$evPrim = $buffer.build(); """.stripMargin } case pudt: PythonUserDefinedType => castToStringCode(pudt.sqlType, ctx) case udt: UserDefinedType[_] => val udtRef = JavaCode.global(ctx.addReferenceObj("udt", udt), udt.sqlType) (c, evPrim, evNull) => { code"$evPrim = UTF8String.fromString($udtRef.deserialize($c).toString());" } case _ => (c, evPrim, evNull) => code"$evPrim = UTF8String.fromString(String.valueOf($c));" } } private[this] def castToBinaryCode(from: DataType): CastFunction = from match { case StringType => (c, evPrim, evNull) => code"$evPrim = $c.getBytes();" case _: IntegralType => (c, evPrim, evNull) => code"$evPrim = ${NumberConverter.getClass.getName.stripSuffix("$")}.toBinary($c);" } private[this] def castToDateCode( from: DataType, ctx: CodegenContext): CastFunction = from match { case StringType => val intOpt = ctx.freshVariable("intOpt", classOf[Option[Integer]]) (c, evPrim, evNull) => code""" scala.Option<Integer> $intOpt = org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToDate($c); if ($intOpt.isDefined()) { $evPrim = ((Integer) $intOpt.get()).intValue(); } else { $evNull = true; } """ case TimestampType => val zoneIdClass = classOf[ZoneId] val zid = JavaCode.global( ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName), zoneIdClass) (c, evPrim, evNull) => code"""$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.microsToEpochDays($c, $zid);""" case _ => (c, evPrim, evNull) => code"$evNull = true;" } private[this] def changePrecision(d: ExprValue, decimalType: DecimalType, evPrim: ExprValue, evNull: ExprValue, canNullSafeCast: Boolean): Block = { if (canNullSafeCast) { code""" |$d.changePrecision(${decimalType.precision}, ${decimalType.scale}); |$evPrim = $d; """.stripMargin } else { code""" |if ($d.changePrecision(${decimalType.precision}, ${decimalType.scale})) { | $evPrim = $d; |} else { | $evNull = true; |} """.stripMargin } } private[this] def castToDecimalCode( from: DataType, target: DecimalType, ctx: CodegenContext): CastFunction = { val tmp = ctx.freshVariable("tmpDecimal", classOf[Decimal]) val canNullSafeCast = Cast.canNullSafeCastToDecimal(from, target) from match { case StringType => (c, evPrim, evNull) => code""" try { Decimal $tmp = Decimal.apply(new java.math.BigDecimal($c.toString())); ${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)} } catch (java.lang.NumberFormatException e) { $evNull = true; } """ case BooleanType => (c, evPrim, evNull) => code""" Decimal $tmp = $c ? Decimal.apply(1) : Decimal.apply(0); ${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)} """ case DateType => // date can't cast to decimal in Hive (c, evPrim, evNull) => code"$evNull = true;" case TimestampType => // Note that we lose precision here. (c, evPrim, evNull) => code""" Decimal $tmp = Decimal.apply( scala.math.BigDecimal.valueOf(${timestampToDoubleCode(c)})); ${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)} """ case DecimalType() => (c, evPrim, evNull) => code""" Decimal $tmp = $c.clone(); ${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)} """ case x: IntegralType => (c, evPrim, evNull) => code""" Decimal $tmp = Decimal.apply((long) $c); ${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)} """ case x: FractionalType => // All other numeric types can be represented precisely as Doubles (c, evPrim, evNull) => code""" try { Decimal $tmp = Decimal.apply(scala.math.BigDecimal.valueOf((double) $c)); ${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)} } catch (java.lang.NumberFormatException e) { $evNull = true; } """ } } private[this] def castToTimestampCode( from: DataType, ctx: CodegenContext): CastFunction = from match { case StringType => val zoneIdClass = classOf[ZoneId] val zid = JavaCode.global( ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName), zoneIdClass) val longOpt = ctx.freshVariable("longOpt", classOf[Option[Long]]) (c, evPrim, evNull) => code""" scala.Option<Long> $longOpt = org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToTimestamp($c, $zid); if ($longOpt.isDefined()) { $evPrim = ((Long) $longOpt.get()).longValue(); } else { $evNull = true; } """ case BooleanType => (c, evPrim, evNull) => code"$evPrim = $c ? 1L : 0L;" case _: IntegralType => (c, evPrim, evNull) => code"$evPrim = ${longToTimeStampCode(c)};" case DateType => val zoneIdClass = classOf[ZoneId] val zid = JavaCode.global( ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName), zoneIdClass) (c, evPrim, evNull) => code"""$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.epochDaysToMicros($c, $zid);""" case DecimalType() => (c, evPrim, evNull) => code"$evPrim = ${decimalToTimestampCode(c)};" case DoubleType => (c, evPrim, evNull) => code""" if (Double.isNaN($c) || Double.isInfinite($c)) { $evNull = true; } else { $evPrim = (long)($c * $MICROS_PER_SECOND); } """ case FloatType => (c, evPrim, evNull) => code""" if (Float.isNaN($c) || Float.isInfinite($c)) { $evNull = true; } else { $evPrim = (long)($c * $MICROS_PER_SECOND); } """ } private[this] def castToIntervalCode(from: DataType): CastFunction = from match { case StringType => (c, evPrim, evNull) => code"""$evPrim = CalendarInterval.fromString($c.toString()); if(${evPrim} == null) { ${evNull} = true; } """.stripMargin } private[this] def decimalToTimestampCode(d: ExprValue): Block = { val block = inline"new java.math.BigDecimal($MICROS_PER_SECOND)" code"($d.toBigDecimal().bigDecimal().multiply($block)).longValue()" } private[this] def longToTimeStampCode(l: ExprValue): Block = code"$l * (long)$MICROS_PER_SECOND" private[this] def timestampToIntegerCode(ts: ExprValue): Block = code"java.lang.Math.floorDiv($ts, $MICROS_PER_SECOND)" private[this] def timestampToDoubleCode(ts: ExprValue): Block = code"$ts / (double)$MICROS_PER_SECOND" private[this] def castToBooleanCode(from: DataType): CastFunction = from match { case StringType => val stringUtils = inline"${StringUtils.getClass.getName.stripSuffix("$")}" (c, evPrim, evNull) => code""" if ($stringUtils.isTrueString($c)) { $evPrim = true; } else if ($stringUtils.isFalseString($c)) { $evPrim = false; } else { $evNull = true; } """ case TimestampType => (c, evPrim, evNull) => code"$evPrim = $c != 0;" case DateType => // Hive would return null when cast from date to boolean (c, evPrim, evNull) => code"$evNull = true;" case DecimalType() => (c, evPrim, evNull) => code"$evPrim = !$c.isZero();" case n: NumericType => (c, evPrim, evNull) => code"$evPrim = $c != 0;" } private[this] def castToByteCode(from: DataType, ctx: CodegenContext): CastFunction = from match { case StringType => val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper]) (c, evPrim, evNull) => code""" UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper(); if ($c.toByte($wrapper)) { $evPrim = (byte) $wrapper.value; } else { $evNull = true; } $wrapper = null; """ case BooleanType => (c, evPrim, evNull) => code"$evPrim = $c ? (byte) 1 : (byte) 0;" case DateType => (c, evPrim, evNull) => code"$evNull = true;" case TimestampType => (c, evPrim, evNull) => code"$evPrim = (byte) ${timestampToIntegerCode(c)};" case DecimalType() => (c, evPrim, evNull) => code"$evPrim = $c.toByte();" case x: NumericType => (c, evPrim, evNull) => code"$evPrim = (byte) $c;" } private[this] def castToShortCode( from: DataType, ctx: CodegenContext): CastFunction = from match { case StringType => val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper]) (c, evPrim, evNull) => code""" UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper(); if ($c.toShort($wrapper)) { $evPrim = (short) $wrapper.value; } else { $evNull = true; } $wrapper = null; """ case BooleanType => (c, evPrim, evNull) => code"$evPrim = $c ? (short) 1 : (short) 0;" case DateType => (c, evPrim, evNull) => code"$evNull = true;" case TimestampType => (c, evPrim, evNull) => code"$evPrim = (short) ${timestampToIntegerCode(c)};" case DecimalType() => (c, evPrim, evNull) => code"$evPrim = $c.toShort();" case x: NumericType => (c, evPrim, evNull) => code"$evPrim = (short) $c;" } private[this] def castToIntCode(from: DataType, ctx: CodegenContext): CastFunction = from match { case StringType => val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper]) (c, evPrim, evNull) => code""" UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper(); if ($c.toInt($wrapper)) { $evPrim = $wrapper.value; } else { $evNull = true; } $wrapper = null; """ case BooleanType => (c, evPrim, evNull) => code"$evPrim = $c ? 1 : 0;" case DateType => (c, evPrim, evNull) => code"$evNull = true;" case TimestampType => (c, evPrim, evNull) => code"$evPrim = (int) ${timestampToIntegerCode(c)};" case DecimalType() => (c, evPrim, evNull) => code"$evPrim = $c.toInt();" case x: NumericType => (c, evPrim, evNull) => code"$evPrim = (int) $c;" } private[this] def castToLongCode(from: DataType, ctx: CodegenContext): CastFunction = from match { case StringType => val wrapper = ctx.freshVariable("longWrapper", classOf[UTF8String.LongWrapper]) (c, evPrim, evNull) => code""" UTF8String.LongWrapper $wrapper = new UTF8String.LongWrapper(); if ($c.toLong($wrapper)) { $evPrim = $wrapper.value; } else { $evNull = true; } $wrapper = null; """ case BooleanType => (c, evPrim, evNull) => code"$evPrim = $c ? 1L : 0L;" case DateType => (c, evPrim, evNull) => code"$evNull = true;" case TimestampType => (c, evPrim, evNull) => code"$evPrim = (long) ${timestampToIntegerCode(c)};" case DecimalType() => (c, evPrim, evNull) => code"$evPrim = $c.toLong();" case x: NumericType => (c, evPrim, evNull) => code"$evPrim = (long) $c;" } private[this] def castToFloatCode(from: DataType): CastFunction = from match { case StringType => (c, evPrim, evNull) => code""" try { $evPrim = Float.valueOf($c.toString()); } catch (java.lang.NumberFormatException e) { $evNull = true; } """ case BooleanType => (c, evPrim, evNull) => code"$evPrim = $c ? 1.0f : 0.0f;" case DateType => (c, evPrim, evNull) => code"$evNull = true;" case TimestampType => (c, evPrim, evNull) => code"$evPrim = (float) (${timestampToDoubleCode(c)});" case DecimalType() => (c, evPrim, evNull) => code"$evPrim = $c.toFloat();" case x: NumericType => (c, evPrim, evNull) => code"$evPrim = (float) $c;" } private[this] def castToDoubleCode(from: DataType): CastFunction = from match { case StringType => (c, evPrim, evNull) => code""" try { $evPrim = Double.valueOf($c.toString()); } catch (java.lang.NumberFormatException e) { $evNull = true; } """ case BooleanType => (c, evPrim, evNull) => code"$evPrim = $c ? 1.0d : 0.0d;" case DateType => (c, evPrim, evNull) => code"$evNull = true;" case TimestampType => (c, evPrim, evNull) => code"$evPrim = ${timestampToDoubleCode(c)};" case DecimalType() => (c, evPrim, evNull) => code"$evPrim = $c.toDouble();" case x: NumericType => (c, evPrim, evNull) => code"$evPrim = (double) $c;" } private[this] def castArrayCode( fromType: DataType, toType: DataType, ctx: CodegenContext): CastFunction = { val elementCast = nullSafeCastFunction(fromType, toType, ctx) val arrayClass = JavaCode.javaType(classOf[GenericArrayData]) val fromElementNull = ctx.freshVariable("feNull", BooleanType) val fromElementPrim = ctx.freshVariable("fePrim", fromType) val toElementNull = ctx.freshVariable("teNull", BooleanType) val toElementPrim = ctx.freshVariable("tePrim", toType) val size = ctx.freshVariable("n", IntegerType) val j = ctx.freshVariable("j", IntegerType) val values = ctx.freshVariable("values", classOf[Array[Object]]) val javaType = JavaCode.javaType(fromType) (c, evPrim, evNull) => code""" final int $size = $c.numElements(); final Object[] $values = new Object[$size]; for (int $j = 0; $j < $size; $j ++) { if ($c.isNullAt($j)) { $values[$j] = null; } else { boolean $fromElementNull = false; $javaType $fromElementPrim = ${CodeGenerator.getValue(c, fromType, j)}; ${castCode(ctx, fromElementPrim, fromElementNull, toElementPrim, toElementNull, toType, elementCast)} if ($toElementNull) { $values[$j] = null; } else { $values[$j] = $toElementPrim; } } } $evPrim = new $arrayClass($values); """ } private[this] def castMapCode(from: MapType, to: MapType, ctx: CodegenContext): CastFunction = { val keysCast = castArrayCode(from.keyType, to.keyType, ctx) val valuesCast = castArrayCode(from.valueType, to.valueType, ctx) val mapClass = JavaCode.javaType(classOf[ArrayBasedMapData]) val keys = ctx.freshVariable("keys", ArrayType(from.keyType)) val convertedKeys = ctx.freshVariable("convertedKeys", ArrayType(to.keyType)) val convertedKeysNull = ctx.freshVariable("convertedKeysNull", BooleanType) val values = ctx.freshVariable("values", ArrayType(from.valueType)) val convertedValues = ctx.freshVariable("convertedValues", ArrayType(to.valueType)) val convertedValuesNull = ctx.freshVariable("convertedValuesNull", BooleanType) (c, evPrim, evNull) => code""" final ArrayData $keys = $c.keyArray(); final ArrayData $values = $c.valueArray(); ${castCode(ctx, keys, FalseLiteral, convertedKeys, convertedKeysNull, ArrayType(to.keyType), keysCast)} ${castCode(ctx, values, FalseLiteral, convertedValues, convertedValuesNull, ArrayType(to.valueType), valuesCast)} $evPrim = new $mapClass($convertedKeys, $convertedValues); """ } private[this] def castStructCode( from: StructType, to: StructType, ctx: CodegenContext): CastFunction = { val fieldsCasts = from.fields.zip(to.fields).map { case (fromField, toField) => nullSafeCastFunction(fromField.dataType, toField.dataType, ctx) } val tmpResult = ctx.freshVariable("tmpResult", classOf[GenericInternalRow]) val rowClass = JavaCode.javaType(classOf[GenericInternalRow]) val tmpInput = ctx.freshVariable("tmpInput", classOf[InternalRow]) val fieldsEvalCode = fieldsCasts.zipWithIndex.map { case (cast, i) => val fromFieldPrim = ctx.freshVariable("ffp", from.fields(i).dataType) val fromFieldNull = ctx.freshVariable("ffn", BooleanType) val toFieldPrim = ctx.freshVariable("tfp", to.fields(i).dataType) val toFieldNull = ctx.freshVariable("tfn", BooleanType) val fromType = JavaCode.javaType(from.fields(i).dataType) val setColumn = CodeGenerator.setColumn(tmpResult, to.fields(i).dataType, i, toFieldPrim) code""" boolean $fromFieldNull = $tmpInput.isNullAt($i); if ($fromFieldNull) { $tmpResult.setNullAt($i); } else { $fromType $fromFieldPrim = ${CodeGenerator.getValue(tmpInput, from.fields(i).dataType, i.toString)}; ${castCode(ctx, fromFieldPrim, fromFieldNull, toFieldPrim, toFieldNull, to.fields(i).dataType, cast)} if ($toFieldNull) { $tmpResult.setNullAt($i); } else { $setColumn; } } """ } val fieldsEvalCodes = ctx.splitExpressions( expressions = fieldsEvalCode.map(_.code), funcName = "castStruct", arguments = ("InternalRow", tmpInput.code) :: (rowClass.code, tmpResult.code) :: Nil) (input, result, resultIsNull) => code""" final $rowClass $tmpResult = new $rowClass(${fieldsCasts.length}); final InternalRow $tmpInput = $input; $fieldsEvalCodes $result = $tmpResult; """ } override def sql: String = dataType match { // HiveQL doesn't allow casting to complex types. For logical plans translated from HiveQL, this // type of casting can only be introduced by the analyzer, and can be omitted when converting // back to SQL query string. case _: ArrayType | _: MapType | _: StructType => child.sql case _ => s"CAST(${child.sql} AS ${dataType.sql})" } } /** * Cast the child expression to the target data type, but will throw error if the cast might * truncate, e.g. long -> int, timestamp -> data. */ case class UpCast(child: Expression, dataType: DataType, walkedTypePath: Seq[String] = Nil) extends UnaryExpression with Unevaluable { override lazy val resolved = false }
icexelloss/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
Scala
apache-2.0
54,934
package models.blog import javax.inject.Inject //import models.blog.Article import models.blog.BlogTablesDef.{ articles, insertArticle } import play.api.db.slick.DatabaseConfigProvider import slick.backend.DatabaseConfig import slick.driver.JdbcProfile import slick.jdbc.JdbcBackend import scala.concurrent.Future /** * Created by snc on 2/25/17. */ class BlogDAO @Inject() (dbConfigProvider: DatabaseConfigProvider) { private val dbConfig: DatabaseConfig[JdbcProfile] = dbConfigProvider.get[JdbcProfile] private val db: JdbcBackend#DatabaseDef = dbConfig.db import dbConfig.driver.api._ def getAllArticles: Future[Seq[Article]] = db.run(articles.sortBy(_.sort_order.desc).result) def getArticle(id: Long): Future[Option[Article]] = db.run(articles.filter(_.id === id).result.headOption) def addNewArticle(article: Article): Future[Article] = db.run(insertArticle += article) def upsertArticle(article: Article) = db.run(articles.insertOrUpdate(article)) def updateArticle(article: Article) = db.run(articles.filter(_.id === article.id).update(article)) def deleteArticle(id: Long): Future[Int] = db.run(articles.filter(_.id === id).delete) }
stanikol/walnuts
server/app/models/blog/BlogDAO.scala
Scala
apache-2.0
1,175
package com.yetu.play.authenticator.utils.di import com.typesafe.config.ConfigFactory import play.api.Play import play.api.Play.current import com.yetu.typesafeconfigextentension.ConfigExtension object ConfigLoader extends ConfigExtension { val config = ConfigFactory.load().substitutePropertyValues("application.environmentUrl") object AuthServer { val profileUrl = config.getString("silhouette.yetu.profileURL") val logoutURL = config.getString("silhouette.yetu.logoutURL") } val singleSignOut = Play.configuration.getBoolean("silhouette.yetu.singleSignOut") val onLogoutGoToIfNoSingleSignOut = Play.configuration.getString("silhouette.yetu.onLogoutGoToIfNoSingleSignOut").get val onLoginGoTo = Play.configuration.getString("silhouette.yetu.onLoginGoTo").get val onLogoutGoTo : String = singleSignOut.getOrElse(false) match { case true => AuthServer.logoutURL case false => onLogoutGoToIfNoSingleSignOut } }
yetu/yetu-play-authenticator
app/com/yetu/play/authenticator/utils/di/ConfigLoader.scala
Scala
apache-2.0
947
/* * Copyright (c) 2012, 2013, 2014, 2015, 2016 SURFnet BV * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the * following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this list of conditions and the following * disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided with the distribution. * * Neither the name of the SURFnet BV nor the names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package nl.surfnet import java.net.URI import nl.surfnet.nsiv2.messages._ package object safnari { type RequesterNsa = String type ConnectionId = String type GlobalReservationId = URI type ComputedPathSegments = Seq[(CorrelationId, ComputedSegment)] private val UuidGenerator = Uuid.randomUuidGenerator def newConnectionId: ConnectionId = UuidGenerator().toString }
BandwidthOnDemand/nsi-safnari
app/nl/surfnet/safnari/package.scala
Scala
bsd-3-clause
1,958
package ch.epfl.scala.index package server /* * Copyright 2015 Heiko Seeberger * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.lang.reflect.InvocationTargetException import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import org.json4s._ /** * Automatic to and from JSON marshalling/unmarshalling using an in-scope *Json4s* protocol. */ trait Json4sSupport { implicit val formats = DefaultFormats implicit val serialization = native.Serialization /** * HTTP entity => `A` * * @tparam A type to decode * @return unmarshaller for `A` */ implicit def json4sUnmarshaller[A: Manifest](implicit serialization: Serialization, formats: Formats): FromEntityUnmarshaller[A] = Unmarshaller.byteStringUnmarshaller.forContentTypes(`application/json`).mapWithCharset { (data, charset) => try serialization.read(data.decodeString(charset.nioCharset.name)) catch { case MappingException("unknown error", ite: InvocationTargetException) => throw ite.getCause } } /** * `A` => HTTP entity * * @tparam A type to encode, must be upper bounded by `AnyRef` * @return marshaller for any `A` value */ implicit def json4sMarshaller[A <: AnyRef](implicit serialization: Serialization, formats: Formats): ToEntityMarshaller[A] = Marshaller.StringMarshaller.wrap(`application/json`)(serialization.writePretty[A]) }
adamwy/scaladex
server/src/main/scala/ch.epfl.scala.index.server/Json4sSupport.scala
Scala
bsd-3-clause
2,192
/* * This file is part of Evo2DSim. * * Evo2DSim is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Evo2DSim is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Evo2DSim. If not, see <http://www.gnu.org/licenses/>. */ package org.vastness.evo2dsim.core.neuro import org.vastness.evo2dsim.core.evolution.genomes.{Node, NodeTag} import org.vastness.evo2dsim.core.data._, Record._ case class SensorNeuron(id: Int, bias: NumberT, t_func: TransferFunction, data: String)(var s_func: () => NumberT = () => zero) extends Neuron { val tag = NodeTag.Sensor var memory: Boolean = false val decay: NumberT = 0.95 private var lastSensoryInput: NumberT = zero override def calcActivity: NumberT = { var sensorInput = s_func() if(memory && sensorInput == zero) { sensorInput = lastSensoryInput * decay } lastSensoryInput = sensorInput super.calcActivity + sensorInput } override def dataHeader = super.dataHeader ++ Seq(h("input")) override def dataRow = append(super.dataRow, Seq(lastSensoryInput.toString)) } object SensorNeuron { def apply(n: Node): (() => NumberT) => SensorNeuron = SensorNeuron(n.id, n.bias, n.transferFunction, n.data) def apply(n: Node, o: SensorNeuron): SensorNeuron = SensorNeuron(n)(o.s_func) }
vchuravy/Evo2DSim
core/src/main/scala/org/vastness/evo2dsim/core/neuro/SensorNeuron.scala
Scala
mit
1,744
import a._ class C extends B { protected override def m() = super.m() + 1 def emm = m() } object Test extends App { assert(new C().emm == 2) }
scala/scala
test/files/run/t7936/Test_1.scala
Scala
apache-2.0
152
package com.lookout.borderpatrol.session.secret import com.lookout.borderpatrol.session._ import com.twitter.util.Time import scala.concurrent.{Future => ScalaFuture, Promise => ScalaPromise} import scala.util.{Failure, Success, Try} /** * This prototypes out an API for the SecretStore, keeping secrets in memory * which obviously doesn't work in a multi-server environment. * * Further work should be done to coordinate secrets among processes. * * For example, a zookeeper watcher could update current and previous in memory * on change, while an external service handles writing new secrets. */ sealed trait SecretStoreApi { def current: Secret def previous: Secret def find(f: (Secret) => Boolean): Try[Secret] } case class ConsulSecretStore(watcher: SecretsWatcherApi) extends SecretStoreApi { //During initialization, we want this to be a hard failure that prevents server from starting private[this] var _secrets: Secrets = watcher.initialSecrets /** * Get Next secrets * @return */ private def nextSecrets: Secrets = { watcher.getNext match { case Success(newSecrets) => newSecrets case Failure(f) => { //Do something indicating we got an error println(s"Unable to get new secrets: Exception $f") _secrets } } } def current = { val c = _secrets.current if (c.expiry > Time.now && c.expiry <= SecretExpiry.currentExpiry) c else { println("Secrets have expired") _secrets = nextSecrets } _secrets.current } def previous = _secrets.previous def find(f: (Secret) => Boolean) = if (f(current)) Success(current) else if (f(previous)) Success(previous) else Failure(new Exception("No matching secrets found")) } case class InMemorySecretStore(secrets: Secrets) extends SecretStoreApi { import com.lookout.borderpatrol.session.SecretExpiry._ private[this] var _secrets: Secrets = secrets def current = { val c = _secrets.current if (c.expiry > Time.now && c.expiry <= currentExpiry) c else { val c2 = Secret(currentExpiry) _secrets = Secrets(c2, c) c2 } } def previous = _secrets.previous def find(f: (Secret) => Boolean) = if (f(current)) Success(current) else if (f(previous)) Success(previous) else Failure(new Exception("No matching secrets found")) } trait SecretStoreComponent { implicit val secretStore: SecretStoreApi }
rtyler/borderpatrol
borderpatrol-core/src/main/scala/com/lookout/borderpatrol/session/secret/SecretStore.scala
Scala
mit
2,437
import com.typesafe.sbt.SbtScalariform._ object Format { lazy val settings = projectSettings ++ Seq( autoImport.scalariformAutoformat := true, ScalariformKeys.preferences := formattingPreferences) lazy val formattingPreferences = { import scalariform.formatter.preferences._ FormattingPreferences(). setPreference(AlignParameters, false). setPreference(AlignSingleLineCaseStatements, true). setPreference(CompactControlReadability, false). setPreference(CompactStringConcatenation, false). setPreference(DoubleIndentConstructorArguments, false). setPreference(FormatXml, true). setPreference(IndentLocalDefs, false). setPreference(IndentPackageBlocks, true). setPreference(IndentSpaces, 2). setPreference(MultilineScaladocCommentsStartOnFirstLine, false). setPreference(PreserveSpaceBeforeArguments, false). setPreference(RewriteArrowSymbols, false). setPreference(SpaceBeforeColon, false). setPreference(SpaceInsideBrackets, false). setPreference(SpacesAroundMultiImports, true). setPreference(SpacesWithinPatternBinders, true) } }
ReactiveMongo/ReactiveMongo
project/Format.scala
Scala
apache-2.0
1,153
// // Clause.scala -- Scala class Clause // Project OrcScala // // Created by dkitchin on Jun 3, 2010. // // Copyright (c) 2017 The University of Texas at Austin. All rights reserved. // // Use and redistribution of this file is governed by the license terms in // the LICENSE file found in the project's top-level directory and also found at // URL: http://orc.csres.utexas.edu/license.shtml . // package orc.compile.translate import scala.collection.immutable.{ List, Map, Nil } import scala.language.reflectiveCalls import orc.ast.ext._ import orc.ast.oil.named import orc.ast.hasOptionalVariableName._ import orc.compile.translate.PrimitiveForms._ import orc.error.OrcExceptionExtension._ import orc.error.compiletime._ case class Clause(formals: List[Pattern], maybeGuard: Option[Expression], body: Expression) extends orc.ast.AST { val arity = formals.size /** Convert a clause into a cascading match; if the clause patterns match, * execute the body, otherwise execute the fallthrough expression. * * The supplied args are the formal parameters of the overall function. */ def convert(args: List[named.BoundVar], fallthrough: named.Expression)(implicit ctx: TranslatorContext, translator: Translator): named.Expression = { import translator._ import ctx._ var targetConversion: Conversion = id def extendConversion(f: Conversion) { targetConversion = targetConversion andThen f } val targetContext: scala.collection.mutable.Map[String, named.Argument] = new scala.collection.mutable.HashMap() def extendContext(dcontext: Map[String, named.Argument]) { for ((name, y) <- dcontext) { /* Ensure that patterns are linear even across multiple arguments of a clause */ if (targetContext contains name) { reportProblem(NonlinearPatternException(name) at this) } else { targetContext += { (name, y) } } } } /* Convert this expression with respect to the current targetContext, * using the current targetConversion. */ def convertInContext(e: Expression): named.Expression = { targetConversion(translator.convert(e)(ctx.copy(context = context ++ targetContext))) } val (strictPairs, nonstrictPairs) = { val zipped: List[(Pattern, named.BoundVar)] = formals zip args zipped partition { case (p, _) => p.isStrict } } for ((p, x) <- nonstrictPairs) { val (_, dcontext, target) = convertPattern(p, x) extendConversion(target) extendContext(dcontext) } strictPairs match { /* * There are no strict patterns. */ case Nil => { maybeGuard match { case Some(guard) => { // If there are no strict patterns, then we just branch on the guard. val newGuard = guard -> convertInContext extendConversion({ makeConditionalFalseOnHalt(newGuard, _, fallthrough) }) } case None => { /* * If there are no strict patterns and there is no guard, * then the clause is unconditional. If there are any * subsequent clauses, they are redundant. */ fallthrough match { case named.Stop() => {} case _ => { reportProblem(RedundantMatch() at fallthrough) } } } } } /* * There is at least one strict pattern. */ case _ => { val x = new named.BoundVar(None) val (newSource, dcontext, target) = strictPairs match { case (strictPattern, strictArg) :: Nil => { x.optionalVariableName = Some(id"tmp_$strictPattern") val (source, dcontext, target) = convertPattern(strictPattern, x) val newSource = source(strictArg) (newSource, dcontext, target) } /* If there is more than one strict pattern, * we treat it as a single tuple pattern containing those patterns. */ case _ => { val (strictPatterns, strictArgs) = strictPairs.unzip val (source, dcontext, target) = convertPattern(TuplePattern(strictPatterns), x) val newSource = source(makeTuple(strictArgs)) (newSource, dcontext, target) } } extendContext(dcontext) extendConversion(target) val guardedSource = maybeGuard match { case Some(guard) => { val g = new named.BoundVar(None) val b = new named.BoundVar(Some(id"tmp$guard")) val newGuard = convertInContext(guard).subst(g, x) newSource > g > (named.Trim(newGuard) > b > callIft(b) >> g) } case None => newSource } extendConversion({ makeMatch(guardedSource, x, _, fallthrough) }) } } /* Finally, construct the new expression */ this ->> convertInContext(body) } } object Clause { /** If these clauses all have the same arity, return that arity. * Otherwise, throw an exception. * * The list of clauses is assumed to be nonempty. */ def commonArity(clauses: List[Clause]): Int = { val first :: rest = clauses rest find { _.arity != first.arity } match { case Some(clause) => throw (ClauseArityMismatch() at clause) case None => first.arity } } /** Convert a list of clauses to a single expression * which linearly matches those clauses. * * Also return the list of arguments against which the * converted body expression performs this match. * * The list of clauses is assumed to be nonempty. */ def convertClauses(clauses: List[Clause])(implicit ctx: TranslatorContext, translator: Translator): (List[named.BoundVar], named.Expression) = { val arity = commonArity(clauses) val args = (for (_ <- 0 until arity) yield new named.BoundVar(None)).toList val nil: named.Expression = named.Stop() def cons(clause: Clause, fail: named.Expression) = clause.convert(args, fail) val body = clauses.foldRight(nil)(cons) for ((a, i) <- args.zipWithIndex) { if (a.optionalVariableName.isEmpty) a.optionalVariableName = Some(id"arg$i") } (args, body) } }
orc-lang/orc
OrcScala/src/orc/compile/translate/Clause.scala
Scala
bsd-3-clause
6,346
/* * Copyright 2013 http4s.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.http4s package headers import cats.parse._ import org.typelevel.ci._ object Authorization { // https://datatracker.ietf.org/doc/html/rfc7235#section-4.2 private[http4s] val parser: Parser[Authorization] = { import org.http4s.internal.parsing.Rfc7235.credentials credentials.map(Authorization(_)) } def parse(s: String): ParseResult[Authorization] = ParseResult.fromParser(parser, "Invalid Authorization header")(s) def apply(basic: BasicCredentials): Authorization = Authorization(Credentials.Token(AuthScheme.Basic, basic.token)) val name: CIString = ci"Authorization" implicit val headerInstance: Header[Authorization, Header.Single] = Header.createRendered( name, _.credentials, parse, ) } final case class Authorization(credentials: Credentials)
http4s/http4s
core/shared/src/main/scala/org/http4s/headers/Authorization.scala
Scala
apache-2.0
1,420
import org.apache.spark.mllib.regression.LinearRegressionWithSGD import org.apache.spark.mllib.regression.LabeledPoint import org.apache.spark.mllib.linalg.Vectors // Load and parse the data val data = sc.textFile("../datasets/winequalityred_linearregression.csv") val parsedData = data.map { line => val parts = line.split(';') LabeledPoint(parts.last.toDouble, Vectors.dense(parts.take(11).map(_.toDouble))) } // Build linear regression model var regression = new LinearRegressionWithSGD().setIntercept(true) regression.optimizer.setStepSize(0.001) val model = regression.run(parsedData) // Export linear regression model to PMML model.toPMML("../exported_pmml_models/linearregression.xml") // Test model on training data // First from winequalityred_linearregression.csv (quality: 5) var predictedValue = model.predict(Vectors.dense(7.4,0.7,0,1.9,0.076,11,34,0.9978,3.51,0.56,9.4)) // Random from winequalityred_linearregression.csv (quality: 7) predictedValue = model.predict(Vectors.dense(11.5,0.54,0.71,4.4,0.124,6,15,0.9984,3.01,0.83,11.8))
rahuldhote/spark-pmml-exporter-validator
src/main/resources/spark_shell_exporter/linearregression_winequalityred.scala
Scala
agpl-3.0
1,056
/* * Copyright 2007-2011 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package bootstrap.liftweb import net.liftweb.util._ import net.liftweb.common._ import net.liftweb.http._ import net.liftweb.sitemap._ import net.liftweb.sitemap.Loc._ import Helpers._ /** * A class that's instantiated early and run. It allows the application * to modify lift's environment */ class Boot { def boot { // where to search snippet LiftRules.addToPackages("net.liftweb.webapptest") // Build SiteMap val entries = Menu("Home") / "index" :: Menu("htmlFragmentWithHead") / "htmlFragmentWithHead" :: Menu("htmlSnippetWithHead") / "htmlSnippetWithHead" :: Nil LiftRules.setSiteMap(SiteMap(entries:_*)) } }
sortable/framework
persistence/mapper/src/test/scala/bootstrap/liftweb/Boot.scala
Scala
apache-2.0
1,278
/** * Copyright (C) 2007 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.xforms.library import org.orbeon.oxf.xml.OrbeonFunctionLibrary import org.orbeon.saxon.`type`.BuiltInAtomicType._ import org.orbeon.saxon.expr.StaticProperty._ import org.orbeon.oxf.xforms.function.{Power, CountNonEmpty, IsCardNumber, BooleanFromString} /** * XForms functions that are a bit funny. */ trait XFormsFunnyFunctions extends OrbeonFunctionLibrary { // Define in early definition of subclass val XFormsFunnyFunctionsNS: Seq[String] Namespace(XFormsFunnyFunctionsNS) { Fun("boolean-from-string", classOf[BooleanFromString], 0, 1, BOOLEAN, EXACTLY_ONE, Arg(STRING, EXACTLY_ONE) ) Fun("is-card-number", classOf[IsCardNumber], 0, 1, BOOLEAN, EXACTLY_ONE, Arg(STRING, EXACTLY_ONE) ) Fun("count-non-empty", classOf[CountNonEmpty], 0, 1, INTEGER, EXACTLY_ONE, Arg(ANY_ATOMIC, ALLOWS_ZERO_OR_MORE) ) Fun("power", classOf[Power], 0, 2, NUMERIC, EXACTLY_ONE, Arg(NUMERIC, EXACTLY_ONE), Arg(NUMERIC, EXACTLY_ONE) ) } }
martinluther/orbeon-forms
src/main/scala/org/orbeon/oxf/xforms/library/XFormsFunnyFunctions.scala
Scala
lgpl-2.1
1,745
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos.spark import com.azure.cosmos.implementation.TestConfigurations import com.azure.cosmos.{ConsistencyLevel, CosmosClientBuilder} import org.apache.spark.sql.SparkSession object SampleCosmosCatalogE2EMain { def main(args: Array[String]) { val cosmosEndpoint = TestConfigurations.HOST val cosmosMasterKey = TestConfigurations.MASTER_KEY val cosmosDatabase = "testDB" val cosmosContainer = "testContainer" val client = new CosmosClientBuilder() .endpoint(cosmosEndpoint) .key(cosmosMasterKey) .consistencyLevel(ConsistencyLevel.EVENTUAL) .buildAsyncClient() client.createDatabaseIfNotExists(cosmosDatabase).block() client.getDatabase(cosmosDatabase).createContainerIfNotExists(cosmosContainer, "/id").block() client.close() val cfg = Map("spark.cosmos.accountEndpoint" -> cosmosEndpoint, "spark.cosmos.accountKey" -> cosmosMasterKey, "spark.cosmos.database" -> cosmosDatabase, "spark.cosmos.container" -> cosmosContainer ) val spark = SparkSession.builder() .appName("spark connector sample") .master("local") .getOrCreate() spark.conf.set(s"spark.sql.catalog.mycatalog", "com.azure.cosmos.spark.CosmosCatalog") spark.conf.set(s"spark.sql.catalog.mycatalog.spark.cosmos.accountEndpoint", cosmosEndpoint) spark.conf.set(s"spark.sql.catalog.mycatalog.spark.cosmos.accountKey", cosmosMasterKey) spark.sql(s"CREATE DATABASE IF NOT EXISTS mycatalog.myTestNamespace;") //options( // spark.cosmos.accountEndpoint '${configuration.get("spark.cosmos.accountEndpoint").get}', // spark.cosmos.accountKey '${configuration.get("spark.cosmos.accountKey").get}', spark.close() } }
Azure/azure-sdk-for-java
sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SampleCosmosCatalogE2EMain.scala
Scala
mit
1,832
package de.sciss.fscape package tests import de.sciss.fscape.Ops._ import de.sciss.fscape.gui.SimpleGUI import scala.swing.Swing object IfThenElseGESimpleTest extends App { // showStreamLog = true val g = Graph { import graph._ // (0: GE).poll(0, "zero") val p1: GE = 0 val dc1 = DC(1) val out = If (p1) Then { dc1 // DC(1) } Else { (1234: GE).poll(0, "test") 2: GE // DC(2) } out.take(1000).poll(0, "out") } val config = stream.Control.Config() config.useAsync = false // for debugging val ctrl = stream.Control(config) ctrl.run(g) Swing.onEDT { SimpleGUI(ctrl) } println("Running.") }
Sciss/FScape-next
core/jvm/src/test/scala/de/sciss/fscape/tests/IfThenElseGESimpleTest.scala
Scala
agpl-3.0
684
/* * * o o o o o * | o | |\\ /| | / * | o-o o--o o-o oo | | O | oo o-o OO o-o o o * | | | | | | | | | | | | | | | | \\ | | \\ / * O---oo-o o--O | o-o o-o-o o o o-o-o o o o-o o * | * o--o * o--o o o--o o o * | | | | o | | * O-Oo oo o-o o-O o-o o-O-o O-o o-o | o-O o-o * | \\ | | | | | | | | | | | | | |-' | | | \\ * o o o-o-o o o-o o-o o o o o | o-o o o-o o-o * * Logical Markov Random Fields (LoMRF). * * */ /** * LoMRF utilities. */ package object lomrf { final val NO_ENTRY_KEY = -1 final val DEFAULT_CAPACITY = 43 final val DEFAULT_LOAD_FACTOR = 0.75f // predicate prefix when functions are converted into auxiliary predicates final val AUX_PRED_PREFIX = "AUX" // function return value prefix final val FUNC_RET_VAR_PREFIX = "funcRetVar" final val ASCIILogo = """ |o o o o o || o | |\\ /| | / || o-o o--o o-o oo | | O | oo o-o OO o-o o o || | | | | | | | | | | | | | | | \\ | | \\ / |O---oo-o o--O | o-o o-o-o o o o-o-o o o o-o o | | | o--o |o--o o o--o o o || | | | o | | |O-Oo oo o-o o-O o-o o-O-o O-o o-o | o-O o-o || \\ | | | | | | | | | | | | | |-' | | | \\ |o o o-o-o o o-o o-o o o o o | o-o o o-o o-o | |Logical Markov Random Fields (LoMRF). """.stripMargin final val processors = sys.runtime.availableProcessors object BuildVersion { import java.net.URL final val version: String = "Version: " + { val clazz = lomrf.BuildVersion.getClass try { val classPath = clazz.getResource("package$" + clazz.getSimpleName + ".class").toString if (classPath.startsWith("jar")) { val manifestPath = classPath.substring(0, classPath.lastIndexOf("!") + 1) + "/META-INF/MANIFEST.MF" val manifest0 = new java.util.jar.Manifest(new URL(manifestPath).openStream) val attr = manifest0.getMainAttributes //val build = attr.getValue("Implementation-Build") val version = attr.getValue("Specification-Version") version } else "(undefined version)" } catch { case ex: NullPointerException => "(undefined version)" } } def apply(): String = version override def toString: String = version } }
anskarl/LoMRF
src/main/scala/lomrf/package.scala
Scala
apache-2.0
2,761
/* * * ____ __ ____________ ______ * / __/______ _/ /__ /_ __/ _/ //_/_ / * _\\ \\/ __/ _ `/ / _ `// / _/ // ,< / /_ * /___/\\__/\\_,_/_/\\_,_//_/ /___/_/|_| /___/ * * A plot library for Scala. * */ import sbt._ object Dependencies { object v { final val Logback = "1.2.3" final val ScalaLogging = "3.9.3" final val ScalaTest = "3.2.7" final val ScalaCheck = "1.15.3" final val ScalaOpt = "3.7.1" final val Ghost4J = "1.0.1" final val CSVParser = "2.9.1" final val Enums = "1.6.1" } // Logging using slf4j and logback lazy val Logging: Seq[ModuleID] = Seq( "ch.qos.logback" % "logback-classic" % v.Logback, "com.typesafe.scala-logging" %% "scala-logging" % v.ScalaLogging ) // ScalaTest and ScalaMeter for UNIT testing lazy val ScalaTest: Seq[ModuleID] = Seq( "org.scalatest" %% "scalatest" % v.ScalaTest % "test", "org.scalacheck" %% "scalacheck" % v.ScalaCheck % "test" ) // Libraries for option parsing, csv parsing and ghost-script lazy val Commons: Seq[ModuleID] = Seq( "com.github.scopt" %% "scopt" % v.ScalaOpt, "org.ghost4j" % "ghost4j" % v.Ghost4J, "com.univocity" % "univocity-parsers" % v.CSVParser, "com.beachape" %% "enumeratum" % v.Enums ) }
vagmcs/ScalaTIKZ
project/Dependencies.scala
Scala
lgpl-3.0
1,276
/* * Copyright 2013 Akira Ueda * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.physalis.shirahae import scala.util.Try import org.scalatest.FunSuite import org.mockito.Mockito._ import Imports._ import com.github.nscala_time.time.Imports._ class LogSuite extends FunSuite { test("test types") { val now = new DateTime(2013, 8, 16, 23, 9, 0, 0) val sql = EmbeddedParameterStyleSqlLogger.createMessage( """|select * from test |where c_int = ? | and c_long = ? | and c_float = ? | and c_double = ? | and c_string = ? | and c_boolean = ? | and c_datetime = ? | and c_null = ? | and c_none = ? | and c_some = ?""".stripMargin, List(1, 2, 3.0, 4.0, "abc", true, now, null, None, Some(3)) ) assert(sql === """select * from test""" + """ where c_int = 1 and c_long = 2 and c_float = 3.0 and c_double = 4.0""" + """ and c_string = 'abc' and c_boolean = true and c_datetime = '2013-08-16 23:09:00'""" + """ and c_null = null and c_none = null and c_some = 3""".stripMargin) } test("generated sql should be valid") { val sql = EmbeddedParameterStyleSqlLogger.createMessage( "select * from message where id = ? and user_name = ? and created_at < ?", List(1, "abc", new DateTime(2013, 8, 16, 23, 9, 0, 0)) ) assert(sql === "select * from message where id = 1 and user_name = 'abc' and created_at < '2013-08-16 23:09:00'") } test("log works without error") { EmbeddedParameterStyleSqlLogger.log( "select * from message where id = ? and user_name = ? and created_at < ?", 1, "abc", new DateTime(2013, 8, 16, 23, 9, 0, 0) ) } test("log works with single argument without error") { EmbeddedParameterStyleSqlLogger.log("select * from message where id = ?", 1) } }
akr4/shirahae-sql
src/test/scala/log-test.scala
Scala
apache-2.0
2,404
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest import org.scalactic._ import java.util.NoSuchElementException import org.scalatest.exceptions.StackDepthException import org.scalatest.exceptions.TestFailedException /** * Trait that provides an implicit conversion that adds a <code>value</code> method * to <code>Option</code>, which will return the value of the option if it is defined, * or throw <code>TestFailedException</code> if not. * * <p> * This construct allows you to express in one statement that an option should be defined * and that its value should meet some expectation. Here's an example: * </p> * * <pre class="stHighlight"> * opt.value should be &gt; 9 * </pre> * * <p> * Or, using an assertion instead of a matcher expression: * </p> * * <pre class="stHighlight"> * assert(opt.value &gt; 9) * </pre> * * <p> * Were you to simply invoke <code>get</code> on the <code>Option</code>, * if the option wasn't defined, it would throw a <code>NoSuchElementException</code>: * </p> * * <pre class="stHighlight"> * val opt: Option[Int] = None * * opt.get should be &gt; 9 // opt.get throws NoSuchElementException * </pre> * * <p> * The <code>NoSuchElementException</code> would cause the test to fail, but without providing a <a href="exceptions/StackDepth.html">stack depth</a> pointing * to the failing line of test code. This stack depth, provided by <a href="exceptions/TestFailedException.html"><code>TestFailedException</code></a> (and a * few other ScalaTest exceptions), makes it quicker for * users to navigate to the cause of the failure. Without <code>OptionValues</code>, to get * a stack depth exception you would need to make two statements, like this: * </p> * * <pre class="stHighlight"> * val opt: Option[Int] = None * * opt should be ('defined) // throws TestFailedException * opt.get should be &gt; 9 * </pre> * * <p> * The <code>OptionValues</code> trait allows you to state that more concisely: * </p> * * <pre class="stHighlight"> * val opt: Option[Int] = None * * opt.value should be &gt; 9 // opt.value throws TestFailedException * </pre> */ trait OptionValues { import scala.language.implicitConversions /** * Implicit conversion that adds a <code>value</code> method to <code>Option</code>. * * @param opt the <code>Option</code> on which to add the <code>value</code> method */ implicit def convertOptionToValuable[T](opt: Option[T])(implicit pos: source.Position): Valuable[T] = new Valuable(opt, pos) /** * Wrapper class that adds a <code>value</code> method to <code>Option</code>, allowing * you to make statements like: * * <pre class="stHighlight"> * opt.value should be &gt; 9 * </pre> * * @param opt An option to convert to <code>Valuable</code>, which provides the <code>value</code> method. */ class Valuable[T](opt: Option[T], pos: source.Position) { /** * Returns the value contained in the wrapped <code>Option</code>, if defined, else throws <code>TestFailedException</code> with * a detail message indicating the option was not defined. */ def value: T = { try { opt.get } catch { case cause: NoSuchElementException => throw new TestFailedException((_: StackDepthException) => Some(Resources.optionValueNotDefined), Some(cause), pos) } } } } /** * Companion object that facilitates the importing of <code>OptionValues</code> members as * an alternative to mixing it in. One use case is to import <code>OptionValues</code>'s members so you can use * <code>value</code> on option in the Scala interpreter: * * <pre class="stREPL"> * $ scala -cp scalatest-1.7.jar * Welcome to Scala version 2.9.1.final (Java HotSpot(TM) 64-Bit Server VM, Java 1.6.0_29). * Type in expressions to have them evaluated. * Type :help for more information. * * scala&gt; import org.scalatest._ * import org.scalatest._ * * scala&gt; import matchers.Matchers._ * import matchers.Matchers._ * * scala&gt; import OptionValues._ * import OptionValues._ * * scala&gt; val opt1: Option[Int] = Some(1) * opt1: Option[Int] = Some(1) * * scala&gt; val opt2: Option[Int] = None * opt2: Option[Int] = None * * scala&gt; opt1.value should be &lt; 10 * * scala&gt; opt2.value should be &lt; 10 * org.scalatest.TestFailedException: The Option on which value was invoked was not defined. * at org.scalatest.OptionValues$Valuable.value(OptionValues.scala:68) * at .&lt;init&gt;(&lt;console&gt;:18) * ... * </pre> * */ object OptionValues extends OptionValues
dotty-staging/scalatest
scalatest/src/main/scala/org/scalatest/OptionValues.scala
Scala
apache-2.0
5,188
package org.mdoc.common.model import cats.data.Xor import io.circe.generic.auto._ import io.circe.parse._ import io.circe.syntax._ import org.mdoc.common.model.Format.{ Html, Pdf } import org.mdoc.common.model.RenderingEngine.LibreOffice import org.mdoc.common.model.circe._ import org.scalacheck.Prop._ import org.scalacheck.Properties import scodec.bits.ByteVector object RenderingInputSpec extends Properties("RenderingInput") { { val json = """ {"id":{"self":"42"},"config":{"outputFormat":{"Pdf":{}},"engine":{"LibreOffice":{}}},"doc":{"format":{"Html":{}},"body":"SGVsbG8sIFdvcmxkIQ=="}} """.trim val config = RenderingConfig(Pdf, LibreOffice) val doc = Document(Html, ByteVector("Hello, World!".getBytes)) val input = RenderingInput(JobId("42"), config, doc) property("JSON decode") = secure { decode[RenderingInput](json) ?= Xor.right(input) } property("JSON encode") = secure { input.asJson.noSpaces ?= json } } }
m-doc/common-model
shared/src/test/scala/org/mdoc/common/model/RenderingInputSpec.scala
Scala
apache-2.0
987
/* * Copyright 2017 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.accounts import org.scalatest.mockito.MockitoSugar import org.scalatest.{Matchers, WordSpec} import uk.gov.hmrc.ct.box.retriever.BoxRetriever import uk.gov.hmrc.ct.box.{CtValidation, ValidatableBox} trait AccountStatementValidationFixture[T <: BoxRetriever] extends WordSpec with Matchers with MockitoSugar { def boxRetriever: T def setupMocks(): Unit = {} def doStatementValidationTests(boxId: String, builder: (Option[Boolean]) => ValidatableBox[T]): Unit = { setupMocks() s"$boxId" should { "validate successfully when true" in { builder(Some(true)).validate(boxRetriever) shouldBe Set.empty } "fail validation if not set" in { builder(None).validate(boxRetriever) shouldBe Set(CtValidation(Some(boxId), s"error.$boxId.required", None)) } "fail validation if false" in { builder(Some(false)).validate(boxRetriever) shouldBe Set(CtValidation(Some(boxId), s"error.$boxId.required", None)) } } } }
liquidarmour/ct-calculations
src/test/scala/uk/gov/hmrc/ct/accounts/AccountStatementValidationFixture.scala
Scala
apache-2.0
1,614
package com.sksamuel.elastic4s import java.util.UUID import com.sksamuel.elastic4s.admin.{OpenIndexDefinition, TypesExistsDefinition, RefreshIndexDefinition, IndicesStatsDefinition, IndexExistsDefinition, GetSegmentsDefinition, GetTemplateDefinition, FlushIndexDefinition, DeleteIndexTemplateDefinition, FieldStatsDefinition, ClusterStatsDefinition, ClusterHealthDefinition, ClusterStateDefinition, ClusterSettingsDefinition, CloseIndexDefinition, ClearCacheDefinition, ClusterDsl, SnapshotDsl, IndexTemplateDsl, IndexAdminDsl, FieldStatsDsl} import com.sksamuel.elastic4s.anaylzers.{TokenFilterDsl, TokenizerDsl, AnalyzerDsl} import com.sksamuel.elastic4s.mappings.FieldType.{ObjectType, NestedType, TokenCountType, StringType, ShortType, LongType, IpType, IntegerType, GeoShapeType, DateType, DoubleType, GeoPointType, MultiFieldType, FloatType, CompletionType, BooleanType, ByteType, BinaryType, AttachmentType} import com.sksamuel.elastic4s.mappings._ import scala.concurrent.duration._ import scala.concurrent.{Await, Future} import scala.language.implicitConversions /** @author Stephen Samuel */ trait ElasticDsl extends IndexDsl with AliasesDsl with AnalyzerDsl with BulkDsl with ClusterDsl with CountDsl with CreateIndexDsl with DeleteIndexDsl with DeleteDsl with FieldStatsDsl with ExplainDsl with GetDsl with IndexAdminDsl with IndexRecoveryDsl with IndexTemplateDsl with MappingDsl with MultiGetDsl with OptimizeDsl with PercolateDsl with ReindexDsl with ScriptDsl with SearchDsl with SettingsDsl with ScoreDsl with ScrollDsl with SnapshotDsl with TermVectorDsl with TokenizerDsl with TokenFilterDsl with UpdateDsl with ValidateDsl with DeprecatedElasticDsl with ElasticImplicits { case object add { def alias(alias: String): AddAliasExpectsIndex = { require(alias.nonEmpty, "alias name must not be null or empty") new AddAliasExpectsIndex(alias) } } def addAlias(name: String): AddAliasExpectsIndex = add alias name def aliases(aliasMutations: MutateAliasDefinition*): IndicesAliasesRequestDefinition = aliases(aliasMutations) def aliases(aliasMutations: Iterable[MutateAliasDefinition]): IndicesAliasesRequestDefinition = { new IndicesAliasesRequestDefinition(aliasMutations.toSeq: _*) } def agg = aggregation case object aggregation { def avg(name: String) = new AvgAggregationDefinition(name) def children(name: String) = new ChildrenAggregationDefinition(name) def count(name: String) = new ValueCountAggregationDefinition(name) def cardinality(name: String) = new CardinalityAggregationDefinition(name) def datehistogram(name: String) = new DateHistogramAggregation(name) def daterange(name: String) = new DateRangeAggregation(name) def extendedstats(name: String) = new ExtendedStatsAggregationDefinition(name) def filter(name: String) = new FilterAggregationDefinition(name) def filters(name: String) = new FiltersAggregationDefinition(name) def geobounds(name: String) = new GeoBoundsAggregationDefinition(name) def geodistance(name: String) = new GeoDistanceAggregationDefinition(name) def geohash(name: String) = new GeoHashGridAggregationDefinition(name) def global(name: String) = new GlobalAggregationDefinition(name) def histogram(name: String) = new HistogramAggregation(name) def ipRange(name: String) = new IpRangeAggregationDefinition(name) def max(name: String) = new MaxAggregationDefinition(name) def min(name: String) = new MinAggregationDefinition(name) def missing(name: String) = new MissingAggregationDefinition(name) def nested(name: String) = new NestedAggregationDefinition(name) def reverseNested(name: String) = new ReverseNestedAggregationDefinition(name) def percentiles(name: String) = new PercentilesAggregationDefinition(name) def percentileranks(name: String) = new PercentileRanksAggregationDefinition(name) def range(name: String) = new RangeAggregationDefinition(name) def sigTerms(name: String) = new SigTermsAggregationDefinition(name) def stats(name: String) = new StatsAggregationDefinition(name) def sum(name: String) = new SumAggregationDefinition(name) def terms(name: String) = new TermAggregationDefinition(name) def topHits(name: String) = new TopHitsAggregationDefinition(name) } case object clear { def cache(indexes: Iterable[String]): ClearCacheDefinition = new ClearCacheDefinition(indexes.toSeq) def cache(indexes: String*): ClearCacheDefinition = new ClearCacheDefinition(indexes) def scroll(id: String, ids: String*): ClearScrollDefinition = clearScroll(id +: ids) def scroll(ids: Iterable[String]): ClearScrollDefinition = clearScroll(ids) } def clearCache(indexes: String*): ClearCacheDefinition = new ClearCacheDefinition(indexes) def clearCache(indexes: Iterable[String]): ClearCacheDefinition = new ClearCacheDefinition(indexes.toSeq) def clearIndex(indexes: String*): ClearCacheDefinition = new ClearCacheDefinition(indexes) def clearIndex(indexes: Iterable[String]): ClearCacheDefinition = new ClearCacheDefinition(indexes.toSeq) def clearScroll(id: String, ids: String*): ClearScrollDefinition = ClearScrollDefinition(id +: ids) def clearScroll(ids: Iterable[String]): ClearScrollDefinition = ClearScrollDefinition(ids.toSeq) case object close { def index(index: String): CloseIndexDefinition = new CloseIndexDefinition(index) } def closeIndex(index: String): CloseIndexDefinition = close index index case object cluster { def persistentSettings(settings: Map[String, String]) = ClusterSettingsDefinition(settings, Map.empty) def transientSettings(settings: Map[String, String]) = ClusterSettingsDefinition(Map.empty, settings) } def clusterPersistentSettings(settings: Map[String, String]) = cluster persistentSettings settings def clusterTransientSettings(settings: Map[String, String]) = cluster transientSettings settings def clusterState = new ClusterStateDefinition def clusterHealth = new ClusterHealthDefinition() def clusterStats = new ClusterStatsDefinition @deprecated("use clusterStats", "1.6.1") def clusterStatus = new ClusterStatsDefinition def clusterHealth(indices: String*) = new ClusterHealthDefinition(indices: _*) case object completion { def suggestion(name: String) = new CompletionSuggestionDefinition(name) } def completionSuggestion: CompletionSuggestionDefinition = completion suggestion UUID.randomUUID.toString def completionSuggestion(name: String): CompletionSuggestionDefinition = completion suggestion name case object count { def from(index: String): CountDefinition = CountDefinition(IndexesAndTypes(index)) def from(indexes: String*): CountDefinition = CountDefinition(IndexesAndTypes(indexes)) def from(indexesAndTypes: IndexesAndTypes): CountDefinition = CountDefinition(indexesAndTypes) } def countFrom(index: String): CountDefinition = CountDefinition(IndexesAndTypes(index)) def countFrom(indexes: String*): CountDefinition = CountDefinition(IndexesAndTypes(indexes)) def countFrom(indexesAndTypes: IndexesAndTypes): CountDefinition = CountDefinition(indexesAndTypes) case object create { def index(name: String) = { require(name.nonEmpty, "index name must not be null or empty") new CreateIndexDefinition(name) } def snapshot(name: String) = { require(name.nonEmpty, "snapshot name must not be null or empty") new CreateSnapshotExpectsIn(name) } def repository(name: String): CreateRepositoryExpectsType = { require(name.nonEmpty, "repository name must not be null or empty") new CreateRepositoryExpectsType(name) } def template(name: String): CreateIndexTemplateExpectsPattern = { require(name.nonEmpty, "template name must not be null or empty") new CreateIndexTemplateExpectsPattern(name) } } def createIndex(name: String) = create index name def createSnapshot(name: String) = create snapshot name def createRepository(name: String) = create repository name def createTemplate(name: String) = create template name case object delete { def id(id: Any): DeleteByIdExpectsFrom = new DeleteByIdExpectsFrom(id) def index(indexes: String*): DeleteIndexDefinition = index(indexes) def index(indexes: Iterable[String]): DeleteIndexDefinition = new DeleteIndexDefinition(indexes.toSeq) def snapshot(name: String): DeleteSnapshotExpectsIn = new DeleteSnapshotExpectsIn(name) def template(name: String) = new DeleteIndexTemplateDefinition(name) } def delete(id: Any): DeleteByIdExpectsFrom = new DeleteByIdExpectsFrom(id) def deleteIndex(indexes: String*): DeleteIndexDefinition = deleteIndex(indexes) def deleteIndex(indexes: Iterable[String]): DeleteIndexDefinition = new DeleteIndexDefinition(indexes.toSeq) def deleteSnapshot(name: String): DeleteSnapshotExpectsIn = delete snapshot name def deleteTemplate(name: String): DeleteIndexTemplateDefinition = delete template name @deprecated("use explain(index, type, id).query(query)...", "2.0.0") case object explain { def id(id: String): ExplainExpectsIndex = new ExplainExpectsIndex(id) } def explain(index: String, `type`: String, id: String) = ExplainDefinition(index, `type`, id) case object field extends TypeableFields { val name = "" @deprecated("use field(name, type)", "2.0.0") def name(name: String): FieldDefinition = new FieldDefinition(name) @deprecated("use fieldSort(field:String)", "2.0.0") def sort(field: String): FieldSortDefinition = FieldSortDefinition(field) def stats(fields: String*): FieldStatsDefinition = new FieldStatsDefinition(fields = fields) def stats(fields: Iterable[String]): FieldStatsDefinition = new FieldStatsDefinition(fields = fields.toSeq) } @deprecated("use specific methods for each type, eg longField, stringField", "2.0.0") def field(name: String): FieldDefinition = FieldDefinition(name) def field(name: String, ft: AttachmentType.type) = new AttachmentFieldDefinition(name) def field(name: String, ft: BinaryType.type) = new BinaryFieldDefinition(name) def field(name: String, ft: BooleanType.type) = new BooleanFieldDefinition(name) def field(name: String, ft: ByteType.type) = new ByteFieldDefinition(name) def field(name: String, ft: CompletionType.type) = new CompletionFieldDefinition(name) def field(name: String, ft: DateType.type) = new DateFieldDefinition(name) def field(name: String, ft: DoubleType.type) = new DoubleFieldDefinition(name) def field(name: String, ft: FloatType.type) = new FloatFieldDefinition(name) def field(name: String, ft: GeoPointType.type) = new GeoPointFieldDefinition(name) def field(name: String, ft: GeoShapeType.type) = new GeoShapeFieldDefinition(name) def field(name: String, ft: IntegerType.type) = new IntegerFieldDefinition(name) def field(name: String, ft: IpType.type) = new IpFieldDefinition(name) def field(name: String, ft: LongType.type) = new LongFieldDefinition(name) def field(name: String, ft: MultiFieldType.type) = new MultiFieldDefinition(name) def field(name: String, ft: NestedType.type): NestedFieldDefinition = new NestedFieldDefinition(name) def field(name: String, ft: ObjectType.type): ObjectFieldDefinition = new ObjectFieldDefinition(name) def field(name: String, ft: ShortType.type) = new ShortFieldDefinition(name) def field(name: String, ft: StringType.type) = new StringFieldDefinition(name) def field(name: String, ft: TokenCountType.type) = new TokenCountDefinition(name) def fieldStats(fields: String*): FieldStatsDefinition = new FieldStatsDefinition(fields = fields) def fieldStats(fields: Iterable[String]): FieldStatsDefinition = new FieldStatsDefinition(fields = fields.toSeq) def fieldSort(field: String) = FieldSortDefinition(field) case object flush { def index(indexes: Iterable[String]): FlushIndexDefinition = new FlushIndexDefinition(indexes.toSeq) def index(indexes: String*): FlushIndexDefinition = new FlushIndexDefinition(indexes) } def flushIndex(indexes: Iterable[String]): FlushIndexDefinition = flush index indexes def flushIndex(indexes: String*): FlushIndexDefinition = flush index indexes case object fuzzyCompletion { def suggestion(name: String) = new FuzzyCompletionSuggestionDefinition(name) } def fuzzyCompletionSuggestion: FuzzyCompletionSuggestionDefinition = { fuzzyCompletionSuggestion(UUID.randomUUID.toString) } def fuzzyCompletionSuggestion(name: String): FuzzyCompletionSuggestionDefinition = fuzzyCompletion suggestion name case object geo { def sort(field: String): GeoDistanceSortDefinition = new GeoDistanceSortDefinition(field) } def geoSort(name: String): GeoDistanceSortDefinition = geo sort name case object get { def id(id: Any) = { require(id.toString.nonEmpty, "id must not be null or empty") new GetWithIdExpectsFrom(id.toString) } def alias(aliases: String*): GetAliasDefinition = new GetAliasDefinition(aliases) def cluster(stats: StatsKeyword): ClusterStatsDefinition = new ClusterStatsDefinition def cluster(health: HealthKeyword): ClusterHealthDefinition = new ClusterHealthDefinition def mapping(it: IndexesAndTypes): GetMappingDefinition = GetMappingDefinition(it) def segments(indexes: Indexes): GetSegmentsDefinition = GetSegmentsDefinition(indexes) def settings(indexes: Indexes): GetSettingsDefinition = GetSettingsDefinition(indexes) def template(name: String): GetTemplateDefinition = GetTemplateDefinition(name) def snapshot(names: Iterable[String]): GetSnapshotsExpectsFrom = new GetSnapshotsExpectsFrom(names.toSeq) def snapshot(names: String*): GetSnapshotsExpectsFrom = snapshot(names) } def get(id: Any): GetWithIdExpectsFrom = new GetWithIdExpectsFrom(id.toString) def getAlias(aliases: String*): GetAliasDefinition = new GetAliasDefinition(aliases) def getMapping(ixTp: IndexAndTypes): GetMappingDefinition = GetMappingDefinition(IndexesAndTypes(ixTp)) def getSegments(indexes: Indexes): GetSegmentsDefinition = get segments indexes def getSettings(indexes: Indexes): GetSettingsDefinition = get settings indexes def getSnapshot(names: Iterable[String]): GetSnapshotsExpectsFrom = get snapshot names def getSnapshot(names: String*): GetSnapshotsExpectsFrom = get snapshot names def getTemplate(name: String): GetTemplateDefinition = get template name trait HealthKeyword case object health extends HealthKeyword @deprecated("use highlight(field)", "2.0.0") case object highlight { @deprecated("use highlight(field)", "2.0.0") def field(field: String): HighlightDefinition = HighlightDefinition(field) } def highlight(field: String): HighlightDefinition = HighlightDefinition(field) trait StatsKeyword case object stats extends StatsKeyword case object index { def exists(indexes: Iterable[String]): IndexExistsDefinition = new IndexExistsDefinition(indexes.toSeq) def exists(indexes: String*): IndexExistsDefinition = new IndexExistsDefinition(indexes) def into(indexType: IndexAndTypes): IndexDefinition = { require(indexType != null, "indexType must not be null or empty") new IndexDefinition(indexType.index, indexType.types.head) } def stats(indexes: Iterable[String]): IndicesStatsDefinition = new IndicesStatsDefinition(indexes.toSeq) def stats(indexes: String*): IndicesStatsDefinition = new IndicesStatsDefinition(indexes) } def indexExists(indexes: Iterable[String]): IndexExistsDefinition = new IndexExistsDefinition(indexes.toSeq) def indexExists(indexes: String*): IndexExistsDefinition = new IndexExistsDefinition(indexes) def indexInto(indexType: IndexAndTypes): IndexDefinition = { require(indexType != null, "indexType must not be null or empty") new IndexDefinition(indexType.index, indexType.types.head) } def indexInto(index: String, `type`: String): IndexDefinition = { require(index.nonEmpty, "index must not be null or empty") new IndexDefinition(index, `type`) } def indexStats(indexes: Iterable[String]): IndicesStatsDefinition = new IndicesStatsDefinition(indexes.toSeq) def indexStats(indexes: String*): IndicesStatsDefinition = indexStats(indexes) case object inner { def hits(name: String): QueryInnerHitsDefinition = new QueryInnerHitsDefinition(name) def hit(name: String): InnerHitDefinition = new InnerHitDefinition(name) } def innerHit(name: String): InnerHitDefinition = inner hit name def innerHits(name: String): QueryInnerHitsDefinition = inner hits name case object mapping { def name(name: String): MappingDefinition = { require(name.nonEmpty, "mapping name must not be null or empty") new MappingDefinition(name) } } def mapping(name: String): MappingDefinition = mapping name name def multiget(gets: Iterable[GetDefinition]): MultiGetDefinition = new MultiGetDefinition(gets) def multiget(gets: GetDefinition*): MultiGetDefinition = new MultiGetDefinition(gets) case object open { def index(index: String): OpenIndexDefinition = new OpenIndexDefinition(index) } def openIndex(index: String): OpenIndexDefinition = open index index @deprecated("use optimizeIndex", "2.0.0") case object optimize { @deprecated("use optimizeIndex", "2.0.0") def index(indexes: Iterable[String]): OptimizeDefinition = OptimizeDefinition(indexes.toSeq) @deprecated("use optimizeIndex", "2.0.0") def index(indexes: String*): OptimizeDefinition = OptimizeDefinition(indexes.toSeq) } def optimizeIndex(indexes: String*): OptimizeDefinition = OptimizeDefinition(indexes) def optimizeIndex(indexes: Iterable[String]): OptimizeDefinition = OptimizeDefinition(indexes.toSeq) @deprecated("use percolate", "2.0.0") case object percolate { @deprecated("use percolate", "2.0.0") def in(indexType: IndexAndTypes): PercolateDefinition = PercolateDefinition(IndexesAndTypes(indexType)) } def percolateIn(indexType: IndexAndTypes): PercolateDefinition = percolateIn(IndexesAndTypes(indexType)) def percolateIn(indexesAndTypes: IndexesAndTypes): PercolateDefinition = PercolateDefinition(indexesAndTypes) case object phrase { def suggestion(name: String): PhraseSuggestionDefinition = new PhraseSuggestionDefinition(name) } def phraseSuggestion: PhraseSuggestionDefinition = phrase suggestion UUID.randomUUID.toString def phraseSuggestion(name: String): PhraseSuggestionDefinition = phrase suggestion name case object put { def mapping(indexType: IndexAndTypes): PutMappingDefinition = new PutMappingDefinition(indexType) } def putMapping(indexType: IndexAndTypes): PutMappingDefinition = new PutMappingDefinition(indexType) case object recover { def index(indexes: Iterable[String]): IndexRecoveryDefinition = new IndexRecoveryDefinition(indexes.toSeq) def index(indexes: String*): IndexRecoveryDefinition = new IndexRecoveryDefinition(indexes) } def recoverIndex(indexes: String*): IndexRecoveryDefinition = recover index indexes def recoverIndex(indexes: Iterable[String]): IndexRecoveryDefinition = recover index indexes case object refresh { def index(indexes: Iterable[String]): RefreshIndexDefinition = new RefreshIndexDefinition(indexes.toSeq) def index(indexes: String*): RefreshIndexDefinition = new RefreshIndexDefinition(indexes) } def refreshIndex(indexes: Iterable[String]): RefreshIndexDefinition = refresh index indexes def refreshIndex(indexes: String*): RefreshIndexDefinition = refresh index indexes case object remove { def alias(alias: String): RemoveAliasExpectsIndex = { require(alias.nonEmpty, "alias must not be null or empty") new RemoveAliasExpectsIndex(alias) } } def removeAlias(alias: String): RemoveAliasExpectsIndex = remove alias alias case object register { def id(id: Any): RegisterExpectsIndex = { require(id.toString.nonEmpty, "id must not be null or empty") new RegisterExpectsIndex(id.toString) } } def register(id: Any): RegisterExpectsIndex = register id id case object restore { def snapshot(name: String): RestoreSnapshotExpectsFrom = { require(name.nonEmpty, "snapshot name must not be null or empty") new RestoreSnapshotExpectsFrom(name) } } def restoreSnapshot(name: String): RestoreSnapshotExpectsFrom = restore snapshot name @deprecated("use scoreSort()", "2.0.0") case object score { @deprecated("use scoreSort()", "2.0.0") def sort: ScoreSortDefinition = ScoreSortDefinition() } def scoreSort(): ScoreSortDefinition = ScoreSortDefinition() case object script { @deprecated("use scriptSort(script)", "2.0.0") def sort(script: String): ScriptSortDefinition = new ScriptSortDefinition(script) @deprecated("use scriptField(script)", "2.0.0") def field(n: String): ExpectsScript = ExpectsScript(field = n) } def scriptSort(scriptText: String): ScriptSortDefinition = ScriptSortDefinition(scriptText) case object search { def in(indexesTypes: IndexesAndTypes): SearchDefinition = new SearchDefinition(indexesTypes) def scroll(id: String): SearchScrollDefinition = new SearchScrollDefinition(id) } def search(indexType: IndexAndTypes): SearchDefinition = search in indexType def search(indexes: String*): SearchDefinition = new SearchDefinition(IndexesAndTypes(indexes)) def searchScroll(id: String): SearchScrollDefinition = new SearchScrollDefinition(id) // -- helper methods to create the field definitions -- def attachmentField(name: String) = field(name).typed(AttachmentType) def binaryField(name: String) = field(name).typed(BinaryType) def booleanField(name: String) = field(name).typed(BooleanType) def byteField(name: String) = field(name).typed(ByteType) def completionField(name: String) = field(name).typed(CompletionType) def dateField(name: String) = field(name).typed(DateType) def doubleField(name: String) = field(name, DoubleType) def floatField(name: String) = field(name, FloatType) def geopointField(name: String) = field(name, GeoPointType) def geoshapeField(name: String) = field(name, GeoShapeType) def multiField(name: String) = field(name, MultiFieldType) def nestedField(name: String): NestedFieldDefinition = field(name).typed(NestedType) def objectField(name: String): ObjectFieldDefinition = field(name).typed(ObjectType) def intField(name: String) = field(name, IntegerType) def ipField(name: String) = field(name, IpType) def longField(name: String) = field(name, LongType) def scriptField(n: String): ExpectsScript = ExpectsScript(field = n) def scriptField(n: String, script: String): ScriptFieldDefinition = ScriptFieldDefinition(field = n, script) def shortField(name: String) = field(name, ShortType) def stringField(name: String): StringFieldDefinition = field(name, StringType) def tokenCountField(name: String) = field(name).typed(TokenCountType) def suggestions(suggestions: SuggestionDefinition*): SuggestDefinition = SuggestDefinition(suggestions) def suggestions(suggestions: Iterable[SuggestionDefinition]): SuggestDefinition = SuggestDefinition(suggestions.toSeq) def dynamicTemplate(name: String): DynamicTemplateExpectsMapping = new DynamicTemplateExpectsMapping(name) def dynamicTemplate(name: String, mapping: TypedFieldDefinition): DynamicTemplateDefinition = { DynamicTemplateDefinition(name, mapping) } case object term { def suggestion(name: String): TermSuggestionDefinition = new TermSuggestionDefinition(name) } def termVectors(index: String, `type`: String, id: String): TermVectorsDefinition = { TermVectorsDefinition(index, `type`, id) } def termSuggestion: TermSuggestionDefinition = term suggestion UUID.randomUUID.toString def termSuggestion(name: String): TermSuggestionDefinition = term suggestion name case object timestamp { def enabled(en: Boolean): TimestampDefinition = TimestampDefinition(en) } def timestamp(en: Boolean): TimestampDefinition = TimestampDefinition(en) class TypesExistExpectsIn(types: Seq[String]) { def in(indexes: String*): TypesExistsDefinition = new TypesExistsDefinition(indexes, types) } case object types { def exist(types: String*): TypesExistExpectsIn = new TypesExistExpectsIn(types) } def typesExist(types: String*): TypesExistExpectsIn = new TypesExistExpectsIn(types) case object update { def id(id: Any): UpdateExpectsIndex = { require(id.toString.nonEmpty, "id must not be null or empty") new UpdateExpectsIndex(id.toString) } def settings(index: String): UpdateSettingsDefinition = new UpdateSettingsDefinition(index) } def update(id: Any): UpdateExpectsIndex = new UpdateExpectsIndex(id.toString) case object validate { def in(indexType: IndexAndTypes): ValidateDefinition = ValidateDefinition(indexType.index, indexType.types.head) def in(value: String): ValidateDefinition = { require(value.nonEmpty, "value must not be null or empty") in(IndexAndTypes(value)) } def in(index: String, `type`: String): ValidateDefinition = ValidateDefinition(index, `type`) def in(tuple: (String, String)): ValidateDefinition = ValidateDefinition(tuple._1, tuple._2) } def validateIn(indexType: IndexAndTypes): ValidateDefinition = validate in indexType def validateIn(value: String): ValidateDefinition = validate in value implicit class RichFuture[T](future: Future[T]) { def await(implicit duration: Duration = 10.seconds): T = Await.result(future, duration) } } object ElasticDsl extends ElasticDsl
nicoo/elastic4s
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/ElasticDsl.scala
Scala
apache-2.0
25,784
package mesosphere.marathon import mesosphere.marathon.Protos.MarathonTask import mesosphere.marathon.core.base.{ Clock, ShutdownHooks } import mesosphere.marathon.core.launchqueue.{ LaunchQueueConfig, LaunchQueueModule } import mesosphere.marathon.core.leadership.AlwaysElectedLeadershipModule import mesosphere.marathon.core.matcher.DummyOfferMatcherManager import mesosphere.marathon.core.task.bus.TaskBusModule import mesosphere.marathon.integration.setup.WaitTestSupport import mesosphere.marathon.state.{ AppRepository, PathId } import mesosphere.marathon.tasks.TaskFactory.CreatedTask import mesosphere.marathon.tasks.{ TaskFactory, TaskIdUtil, TaskTracker } import org.apache.mesos.Protos.TaskID import org.hamcrest.{ BaseMatcher, Description } import org.mockito.Matchers import org.mockito.Mockito.{ when => call, _ } import org.mockito.internal.matchers.Equality import org.scalatest.{ BeforeAndAfter, GivenWhenThen } import scala.concurrent.Await import scala.concurrent.duration._ class LaunchQueueModuleTest extends MarathonSpec with BeforeAndAfter with GivenWhenThen { test("empty queue returns no results") { When("querying queue") val apps = taskQueue.list Then("no apps are returned") assert(apps.isEmpty) } test("An added queue item is returned in list") { Given("a task queue with one item") call(taskTracker.getTasks(app.id)).thenReturn(Iterable.empty[MarathonTask]) taskQueue.add(app) When("querying its contents") val list = taskQueue.list Then("we get back the added app") assert(list.size == 1) assert(list.head.app == app) assert(list.head.tasksLeftToLaunch == 1) assert(list.head.tasksLaunchedOrRunning == 0) assert(list.head.taskLaunchesInFlight == 0) verify(taskTracker).getTasks(app.id) } test("An added queue item is reflected via count") { Given("a task queue with one item") call(taskTracker.getTasks(app.id)).thenReturn(Iterable.empty[MarathonTask]) taskQueue.add(app) When("querying its count") val count = taskQueue.count(app.id) Then("we get a count == 1") assert(count == 1) verify(taskTracker).getTasks(app.id) } test("A purged queue item has a count of 0") { Given("a task queue with one item which is purged") call(taskTracker.getTasks(app.id)).thenReturn(Iterable.empty[MarathonTask]) taskQueue.add(app) taskQueue.purge(app.id) When("querying its count") val count = taskQueue.count(app.id) Then("we get a count == 0") assert(count == 0) verify(taskTracker).getTasks(app.id) } test("A re-added queue item has a count of 1") { Given("a task queue with one item which is purged") call(taskTracker.getTasks(app.id)).thenReturn(Iterable.empty[MarathonTask]) taskQueue.add(app) taskQueue.purge(app.id) taskQueue.add(app) When("querying its count") val count = taskQueue.count(app.id) Then("we get a count == 1") assert(count == 1) verify(taskTracker, times(2)).getTasks(app.id) } test("adding a queue item registers new offer matcher") { Given("An empty task tracker") call(taskTracker.getTasks(app.id)).thenReturn(Iterable.empty[MarathonTask]) When("Adding an app to the taskQueue") taskQueue.add(app) Then("A new offer matcher gets registered") WaitTestSupport.waitUntil("registered as offer matcher", 1.second) { offerMatcherManager.offerMatchers.size == 1 } verify(taskTracker).getTasks(app.id) } test("purging a queue item UNregisters offer matcher") { Given("An app in the queue") call(taskTracker.getTasks(app.id)).thenReturn(Iterable.empty[MarathonTask]) taskQueue.add(app) When("The app is purged") taskQueue.purge(app.id) Then("No offer matchers remain registered") assert(offerMatcherManager.offerMatchers.isEmpty) verify(taskTracker).getTasks(app.id) } test("an offer gets unsuccessfully matched against an item in the queue") { val offer = MarathonTestHelper.makeBasicOffer().build() Given("An app in the queue") call(taskTracker.getTasks(app.id)).thenReturn(Map.empty[String, MarathonTask].values) taskQueue.add(app) WaitTestSupport.waitUntil("registered as offer matcher", 1.second) { offerMatcherManager.offerMatchers.size == 1 } When("we ask for matching an offer") call(taskFactory.newTask(Matchers.any(), Matchers.any(), Matchers.any())).thenReturn(None) val matchFuture = offerMatcherManager.offerMatchers.head.matchOffer(clock.now() + 3.seconds, offer) val matchedTasks = Await.result(matchFuture, 3.seconds) Then("the offer gets passed to the task factory and respects the answer") verify(taskFactory).newTask(Matchers.eq(app), Matchers.eq(offer), Matchers.argThat(SameAsSeq(Seq.empty))) assert(matchedTasks.offerId == offer.getId) assert(matchedTasks.tasks == Seq.empty) verify(taskTracker).getTasks(app.id) } test("an offer gets successfully matched against an item in the queue") { val offer = MarathonTestHelper.makeBasicOffer().build() val taskId: TaskID = TaskIdUtil.newTaskId(app.id) val mesosTask = MarathonTestHelper.makeOneCPUTask("").setTaskId(taskId).build() val marathonTask = MarathonTask.newBuilder().setId(taskId.getValue).build() val createdTask = CreatedTask(mesosTask, marathonTask) Given("An app in the queue") call(taskTracker.getTasks(app.id)).thenReturn(Iterable.empty[MarathonTask]) call(taskFactory.newTask(Matchers.any(), Matchers.any(), Matchers.any())).thenReturn(Some(createdTask)) taskQueue.add(app) WaitTestSupport.waitUntil("registered as offer matcher", 1.second) { offerMatcherManager.offerMatchers.size == 1 } When("we ask for matching an offer") val matchFuture = offerMatcherManager.offerMatchers.head.matchOffer(clock.now() + 3.seconds, offer) val matchedTasks = Await.result(matchFuture, 3.seconds) Then("the offer gets passed to the task factory and respects the answer") verify(taskFactory).newTask(Matchers.eq(app), Matchers.eq(offer), Matchers.argThat(SameAsSeq(Seq.empty))) assert(matchedTasks.offerId == offer.getId) assert(matchedTasks.tasks.map(_.taskInfo) == Seq(mesosTask)) verify(taskTracker).getTasks(app.id) } private[this] val app = MarathonTestHelper.makeBasicApp().copy(id = PathId("/app")) private[this] var shutdownHooks: ShutdownHooks = _ private[this] var clock: Clock = _ private[this] var taskBusModule: TaskBusModule = _ private[this] var offerMatcherManager: DummyOfferMatcherManager = _ private[this] var appRepository: AppRepository = _ private[this] var taskTracker: TaskTracker = _ private[this] var taskFactory: TaskFactory = _ private[this] var module: LaunchQueueModule = _ private[this] def taskQueue = module.taskQueue before { shutdownHooks = ShutdownHooks() clock = Clock() taskBusModule = new TaskBusModule() offerMatcherManager = new DummyOfferMatcherManager() taskTracker = mock[TaskTracker]("taskTracker") taskFactory = mock[TaskFactory]("taskFactory") appRepository = mock[AppRepository]("appRepository") val config: LaunchQueueConfig = new LaunchQueueConfig {} config.afterInit() module = new LaunchQueueModule( config, AlwaysElectedLeadershipModule(shutdownHooks), clock, subOfferMatcherManager = offerMatcherManager, maybeOfferReviver = None, appRepository, taskTracker, taskFactory ) } after { verifyNoMoreInteractions(appRepository) verifyNoMoreInteractions(taskTracker) verifyNoMoreInteractions(taskFactory) shutdownHooks.shutdown() } }
Kosta-Github/marathon
src/test/scala/mesosphere/marathon/LaunchQueueModuleTest.scala
Scala
apache-2.0
7,684
package chat.tox.antox.fragments import android.content.{Intent, SharedPreferences} import android.os.Bundle import android.preference.PreferenceManager import android.support.v4.app.Fragment import android.view.View.OnClickListener import android.view.{LayoutInflater, View, ViewGroup} import android.widget.Button import chat.tox.antox.R import chat.tox.antox.activities.SettingsActivity import chat.tox.antox.tox.ToxSingleton import chat.tox.antox.utils.{ConnectionManager, ConnectionTypeChangeListener} class WifiWarningFragment extends Fragment { private var wifiWarningBar: Button = _ private var preferencesListener: SharedPreferences.OnSharedPreferenceChangeListener = _ private var preferences: SharedPreferences = _ override def onCreateView(inflater: LayoutInflater, container: ViewGroup, savedInstanceState: Bundle): View = { val rootView = inflater.inflate(R.layout.fragment_wifi_warning, container, false) wifiWarningBar = rootView.findViewById(R.id.wifi_only_warning).asInstanceOf[Button] wifiWarningBar.setOnClickListener(new OnClickListener { override def onClick(v: View): Unit = { onClickWifiOnlyWarning(v) } }) rootView } override def onStart(): Unit = { super.onStart() preferences = PreferenceManager.getDefaultSharedPreferences(getActivity) updateWifiWarning() ConnectionManager.addConnectionTypeChangeListener(new ConnectionTypeChangeListener { override def connectionTypeChange(connectionType: Int): Unit = { updateWifiWarning() } }) preferencesListener = new SharedPreferences.OnSharedPreferenceChangeListener() { override def onSharedPreferenceChanged(prefs: SharedPreferences, key: String): Unit = { key match { case "wifi_only" => updateWifiWarning() case _ => } } } preferences.registerOnSharedPreferenceChangeListener(preferencesListener) } def updateWifiWarning(): Unit = { if (getActivity == null) return if (!ToxSingleton.isToxConnected(preferences, getActivity)) { showWifiWarning() } else { hideWifiWarning() } } def onClickWifiOnlyWarning(view: View): Unit = { val intent = new Intent(getActivity, classOf[SettingsActivity]) startActivity(intent) } def showWifiWarning(): Unit = { getView.setVisibility(View.VISIBLE) } def hideWifiWarning(): Unit = { getView.setVisibility(View.GONE) } }
wiiam/Antox
app/src/main/scala/chat/tox/antox/fragments/WifiWarningFragment.scala
Scala
gpl-3.0
2,466
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.utils.tf.loaders import java.nio.ByteOrder import com.intel.analytics.bigdl.Module import com.intel.analytics.bigdl.nn.tf.Abs import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric import com.intel.analytics.bigdl.utils.tf.Context import com.intel.analytics.bigdl.utils.tf.loaders.Utils.getType import org.tensorflow.framework.{DataType, NodeDef} import scala.reflect.ClassTag class Abs extends TensorflowOpsLoader { override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder, context: Context[T])(implicit ev: TensorNumeric[T]): Module[T] = { val t = getType(nodeDef.getAttrMap, "T") if (t == DataType.DT_FLOAT) { Abs[T, Float]() } else if (t == DataType.DT_DOUBLE) { Abs[T, Double]() } else { throw new UnsupportedOperationException(s"Not support load Abs when type is ${t}") } } }
yiheng/BigDL
spark/dl/src/main/scala/com/intel/analytics/bigdl/utils/tf/loaders/Abs.scala
Scala
apache-2.0
1,529
package com.lynbrookrobotics.potassium.tasks /** * A continuous task where two subtasks are run sequentially * @param first the first task to run * @param second the task to run after the first task */ class SequentialContinuousTask private[tasks] (first: FiniteTask, second: ContinuousTask) extends ContinuousTask with FiniteTaskFinishedListener { private var currentPhase: SequentialPhase = Stopped override def onFinished(task: FiniteTask): Unit = { if (currentPhase == RunningFirst && task == first) { currentPhase = RunningSecond second.init() } } override def onStart(): Unit = { currentPhase = RunningFirst first.setFinishedListener(this) first.init() } override def onEnd(): Unit = { if (currentPhase == RunningFirst) { first.abort() } else if (currentPhase == RunningSecond) { second.abort() } currentPhase = Stopped } }
Team846/potassium
core/shared/src/main/scala/com/lynbrookrobotics/potassium/tasks/SequentialContinuousTask.scala
Scala
mit
921
package mesosphere import org.scalatest.concurrent.Eventually import org.scalatest.time.{Milliseconds, Span} import scala.concurrent.duration._ /** * Helpers which wait for conditions. */ object WaitTestSupport extends Eventually { def validFor(description: String, until: FiniteDuration)(valid: => Boolean): Boolean = { val deadLine = until.fromNow def checkValid(): Boolean = { if (!valid) throw new IllegalStateException(s"$description not valid for $until. Give up.") if (deadLine.isOverdue()) true else { Thread.sleep(100) checkValid() } } checkValid() } def waitUntil(description: String, maxWait: FiniteDuration)(fn: => Boolean): Unit = { eventually(timeout(Span(maxWait.toMillis, Milliseconds))) { if (!fn) throw new RuntimeException(s"$description not satisfied") } } def waitUntil(description: String)(fn: => Boolean)(implicit patienceConfig: PatienceConfig): Unit = { eventually { if (!fn) throw new RuntimeException(s"$description not satisfied") } } }
mesosphere/marathon
src/test/scala/mesosphere/WaitTestSupport.scala
Scala
apache-2.0
1,070
/* * Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com> */ package java8guide.ws import org.specs2.mutable._ import play.api.test._ import play.api.mvc._ import play.api.libs.json._ import play.test.Helpers._ import play.api.test.FakeApplication import play.api.libs.json.JsObject import javaguide.testhelpers.MockJavaActionHelper import play.api.http.Status object JavaWSSpec extends Specification with Results with Status { // It's much easier to test this in Scala because we need to set up a // fake application with routes. def fakeApplication = FakeApplication(withRoutes = { case ("GET", "/feed") => Action { val obj: JsObject = Json.obj( "title" -> "foo", "commentsUrl" -> "http://localhost:3333/comments" ) Ok(obj) } case ("GET", "/comments") => Action { val obj: JsObject = Json.obj( "count" -> "10" ) Ok(obj) } case (_, _) => Action { BadRequest("no binding found") } }) "The Java WS class" should { "call WS correctly" in new WithServer(app = fakeApplication, port = 3333) { val result = MockJavaActionHelper.call(app.injector.instanceOf[JavaWS.Controller1], fakeRequest()) status(result) must equalTo(OK) } "compose WS calls successfully" in new WithServer(app = fakeApplication, port = 3333) { val result = MockJavaActionHelper.call(app.injector.instanceOf[JavaWS.Controller2], fakeRequest()) status(result) must equalTo(OK) contentAsString(result) must beEqualTo("Number of comments: 10") } } }
jyotikamboj/container
pf-documentation/manual/working/javaGuide/main/ws/java8code/java8guide/ws/JavaWSSpec.scala
Scala
mit
1,646
package justin.db.kryo import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import java.util.UUID import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.io.{Input, Output} import justin.db.Data import justin.db.consistenthashing.NodeId import justin.db.vectorclocks.{Counter, VectorClock} import org.scalatest.{FlatSpec, Matchers} class DataSerializerTest extends FlatSpec with Matchers { behavior of "Data Serializer" it should "serialize/deserialize correctly" in { // kryo init val kryo = new Kryo() kryo.register(classOf[justin.db.Data], DataSerializer) // object val vClock = VectorClock[NodeId](Map(NodeId(1) -> Counter(3))) val timestamp = System.currentTimeMillis() val serializedData = Data(id = UUID.randomUUID(), value = "some value", vClock, timestamp) // serialization val bos = new ByteArrayOutputStream() val output = new Output(bos) val _ = kryo.writeObject(output, serializedData) output.flush() // deserialization val bis = new ByteArrayInputStream(bos.toByteArray) val input = new Input(bis) val deserializedData = kryo.readObject(input, classOf[Data]) serializedData shouldBe deserializedData } }
speedcom/JustinDB
justin-core/src/test/scala/justin/db/kryo/DataSerializerTest.scala
Scala
apache-2.0
1,269
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.words import org.scalatest.matchers._ import scala.collection.GenTraversable import org.scalatest.FailureMessages import org.scalatest.UnquotedString import org.scalautils.Equality import scala.annotation.tailrec /** * This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="../Matchers.html"><code>Matchers</code></a> for an overview of * the matchers DSL. * * @author Bill Venners */ class TheSameIteratedElementsAsContainMatcher[T](right: GenTraversable[T], equality: Equality[T]) extends ContainMatcher[T] { @tailrec private def checkEqual(left: Iterator[T], right: Iterator[T]): Boolean = { if (left.hasNext && right.hasNext) { val nextLeft = left.next val nextRight = right.next if (!equality.areEqual(nextLeft, nextRight)) false else checkEqual(left, right) } else left.isEmpty && right.isEmpty } /** * This method contains the matching code for theSameIteratedElementsAs. */ def apply(left: GenTraversable[T]): MatchResult = MatchResult( checkEqual(left.toIterator, right.toIterator), FailureMessages("didNotContainSameIteratedElements", left, right), FailureMessages("containedSameIteratedElements", left, right) ) }
svn2github/scalatest
src/main/scala/org/scalatest/words/TheSameIteratedElementsAsContainMatcher.scala
Scala
apache-2.0
1,902
/* __ *\\ ** ________ ___ / / ___ Scala API ** ** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\\___/_/ |_/____/_/ | | ** ** |/ ** \\* */ package scala package collection import mutable.ArrayBuffer import scala.annotation.{tailrec, migration} import scala.annotation.unchecked.{uncheckedVariance => uV} import immutable.Stream /** The `Iterator` object provides various functions for creating specialized iterators. * * @author Martin Odersky * @author Matthias Zenger * @version 2.8 * @since 2.8 */ object Iterator { /** With the advent of `TraversableOnce` and `Iterator`, it can be useful to have a builder which * operates on `Iterator`s so they can be treated uniformly along with the collections. * See `scala.util.Random.shuffle` for an example. */ implicit def IteratorCanBuildFrom[A] = new TraversableOnce.BufferedCanBuildFrom[A, Iterator] { def bufferToColl[B](coll: ArrayBuffer[B]) = coll.iterator def traversableToColl[B](t: GenTraversable[B]) = t.toIterator } /** The iterator which produces no values. */ val empty: Iterator[Nothing] = new AbstractIterator[Nothing] { def hasNext: Boolean = false def next(): Nothing = throw new NoSuchElementException("next on empty iterator") } /** Creates an iterator which produces a single element. * '''Note:''' Equivalent, but more efficient than Iterator(elem) * * @param elem the element * @return An iterator which produces `elem` on the first call to `next`, * and which has no further elements. */ def single[A](elem: A): Iterator[A] = new AbstractIterator[A] { private var hasnext = true def hasNext: Boolean = hasnext def next(): A = if (hasnext) { hasnext = false; elem } else empty.next() } /** Creates an iterator with given elements. * * @param elems The elements returned one-by-one from the iterator * @return An iterator which produces the given elements on the * first calls to `next`, and which has no further elements. */ def apply[A](elems: A*): Iterator[A] = elems.iterator /** Creates iterator that produces the results of some element computation a number of times. * * @param len the number of elements returned by the iterator. * @param elem the element computation * @return An iterator that produces the results of `n` evaluations of `elem`. */ def fill[A](len: Int)(elem: => A): Iterator[A] = new AbstractIterator[A] { private var i = 0 def hasNext: Boolean = i < len def next(): A = if (hasNext) { i += 1; elem } else empty.next() } /** Creates an iterator producing the values of a given function over a range of integer values starting from 0. * * @param end The number of elements returned by the iterator * @param f The function computing element values * @return An iterator that produces the values `f(0), ..., f(n -1)`. */ def tabulate[A](end: Int)(f: Int => A): Iterator[A] = new AbstractIterator[A] { private var i = 0 def hasNext: Boolean = i < end def next(): A = if (hasNext) { val result = f(i); i += 1; result } else empty.next() } /** Creates nn iterator returning successive values in some integer interval. * * @param start the start value of the iterator * @param end the end value of the iterator (the first value NOT returned) * @return the iterator producing values `start, start + 1, ..., end - 1` */ def range(start: Int, end: Int): Iterator[Int] = range(start, end, 1) /** An iterator producing equally spaced values in some integer interval. * * @param start the start value of the iterator * @param end the end value of the iterator (the first value NOT returned) * @param step the increment value of the iterator (must be positive or negative) * @return the iterator producing values `start, start + step, ...` up to, but excluding `end` */ def range(start: Int, end: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { if (step == 0) throw new IllegalArgumentException("zero step") private var i = start def hasNext: Boolean = (step <= 0 || i < end) && (step >= 0 || i > end) def next(): Int = if (hasNext) { val result = i; i += step; result } else empty.next() } /** Creates an infinite iterator that repeatedly applies a given function to the previous result. * * @param start the start value of the iterator * @param f the function that's repeatedly applied * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...` */ def iterate[T](start: T)(f: T => T): Iterator[T] = new AbstractIterator[T] { private[this] var first = true private[this] var acc = start def hasNext: Boolean = true def next(): T = { if (first) first = false else acc = f(acc) acc } } /** Creates an infinite-length iterator which returns successive values from some start value. * @param start the start value of the iterator * @return the iterator producing the infinite sequence of values `start, start + 1, start + 2, ...` */ def from(start: Int): Iterator[Int] = from(start, 1) /** Creates an infinite-length iterator returning values equally spaced apart. * * @param start the start value of the iterator * @param step the increment between successive values * @return the iterator producing the infinite sequence of values `start, start + 1 * step, start + 2 * step, ...` */ def from(start: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] { private var i = start def hasNext: Boolean = true def next(): Int = { val result = i; i += step; result } } /** Creates an infinite-length iterator returning the results of evaluating an expression. * The expression is recomputed for every element. * * @param elem the element computation. * @return the iterator containing an infinite number of results of evaluating `elem`. */ def continually[A](elem: => A): Iterator[A] = new AbstractIterator[A] { def hasNext = true def next = elem } /** Creates an iterator to which other iterators can be appended efficiently. * Nested ConcatIterators are merged to avoid blowing the stack. */ private final class ConcatIterator[+A](private var current: Iterator[A @uV]) extends Iterator[A] { private var tail: ConcatIteratorCell[A @uV] = null private var last: ConcatIteratorCell[A @uV] = null private var currentHasNextChecked = false // Advance current to the next non-empty iterator // current is set to null when all iterators are exhausted @tailrec private[this] def advance(): Boolean = { if (tail eq null) { current = null last = null false } else { current = tail.headIterator tail = tail.tail merge() if (currentHasNextChecked) true else if (current.hasNext) { currentHasNextChecked = true true } else advance() } } // If the current iterator is a ConcatIterator, merge it into this one @tailrec private[this] def merge(): Unit = if (current.isInstanceOf[ConcatIterator[_]]) { val c = current.asInstanceOf[ConcatIterator[A]] current = c.current currentHasNextChecked = c.currentHasNextChecked if (c.tail ne null) { c.last.tail = tail tail = c.tail } merge() } def hasNext = if (currentHasNextChecked) true else if (current eq null) false else if (current.hasNext) { currentHasNextChecked = true true } else advance() def next() = if (hasNext) { currentHasNextChecked = false current.next() } else Iterator.empty.next() override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = { val c = new ConcatIteratorCell[B](that, null).asInstanceOf[ConcatIteratorCell[A]] if(tail eq null) { tail = c last = c } else { last.tail = c last = c } if(current eq null) current = Iterator.empty this } } private[this] final class ConcatIteratorCell[A](head: => GenTraversableOnce[A], var tail: ConcatIteratorCell[A]) { def headIterator: Iterator[A] = head.toIterator } /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. */ private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] { private var remaining = limit private var dropping = start @inline private def unbounded = remaining < 0 private def skip(): Unit = while (dropping > 0) { if (underlying.hasNext) { underlying.next() dropping -= 1 } else dropping = 0 } def hasNext = { skip(); remaining != 0 && underlying.hasNext } def next() = { skip() if (remaining > 0) { remaining -= 1 underlying.next() } else if (unbounded) underlying.next() else empty.next() } override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { val lo = from max 0 def adjustedBound = if (unbounded) -1 else 0 max (remaining - lo) val rest = if (until < 0) adjustedBound // respect current bound, if any else if (until <= lo) 0 // empty else if (unbounded) until - lo // now finite else adjustedBound min (until - lo) // keep lesser bound if (rest == 0) empty else { dropping += lo remaining = rest this } } } } import Iterator.empty /** Iterators are data structures that allow to iterate over a sequence * of elements. They have a `hasNext` method for checking * if there is a next element available, and a `next` method * which returns the next element and discards it from the iterator. * * An iterator is mutable: most operations on it change its state. While it is often used * to iterate through the elements of a collection, it can also be used without * being backed by any collection (see constructors on the companion object). * * It is of particular importance to note that, unless stated otherwise, ''one should never * use an iterator after calling a method on it''. The two most important exceptions * are also the sole abstract methods: `next` and `hasNext`. * * Both these methods can be called any number of times without having to discard the * iterator. Note that even `hasNext` may cause mutation -- such as when iterating * from an input stream, where it will block until the stream is closed or some * input becomes available. * * Consider this example for safe and unsafe use: * * {{{ * def f[A](it: Iterator[A]) = { * if (it.hasNext) { // Safe to reuse "it" after "hasNext" * it.next // Safe to reuse "it" after "next" * val remainder = it.drop(2) // it is *not* safe to use "it" again after this line! * remainder.take(2) // it is *not* safe to use "remainder" after this line! * } else it * } * }}} * * @author Martin Odersky, Matthias Zenger * @version 2.8 * @since 1 * @define willNotTerminateInf * Note: will not terminate for infinite iterators. * @define mayNotTerminateInf * Note: may not terminate for infinite iterators. * @define preservesIterator * The iterator remains valid for further use whatever result is returned. * @define consumesIterator * After calling this method, one should discard the iterator it was called * on. Using it is undefined and subject to change. * @define consumesAndProducesIterator * After calling this method, one should discard the iterator it was called * on, and use only the iterator that was returned. Using the old iterator * is undefined, subject to change, and may result in changes to the new * iterator as well. * @define consumesTwoAndProducesOneIterator * After calling this method, one should discard the iterator it was called * on, as well as the one passed as a parameter, and use only the iterator * that was returned. Using the old iterators is undefined, subject to change, * and may result in changes to the new iterator as well. * @define consumesOneAndProducesTwoIterators * After calling this method, one should discard the iterator it was called * on, and use only the iterators that were returned. Using the old iterator * is undefined, subject to change, and may result in changes to the new * iterators as well. * @define consumesTwoIterators * After calling this method, one should discard the iterator it was called * on, as well as the one passed as parameter. Using the old iterators is * undefined and subject to change. */ trait Iterator[+A] extends TraversableOnce[A] { self => def seq: Iterator[A] = this /** Tests whether this iterator can provide another element. * * @return `true` if a subsequent call to `next` will yield an element, * `false` otherwise. * @note Reuse: $preservesIterator */ def hasNext: Boolean /** Produces the next element of this iterator. * * @return the next element of this iterator, if `hasNext` is `true`, * undefined behavior otherwise. * @note Reuse: $preservesIterator */ def next(): A /** Tests whether this iterator is empty. * * @return `true` if hasNext is false, `false` otherwise. * @note Reuse: $preservesIterator */ def isEmpty: Boolean = !hasNext /** Tests whether this Iterator can be repeatedly traversed. * * @return `false` * @note Reuse: $preservesIterator */ def isTraversableAgain = false /** Tests whether this Iterator has a known size. * * @return `true` for empty Iterators, `false` otherwise. * @note Reuse: $preservesIterator */ def hasDefiniteSize = isEmpty /** Selects first ''n'' values of this iterator. * * @param n the number of values to take * @return an iterator producing only the first `n` values of this iterator, or else the * whole iterator, if it produces fewer than `n` values. * @note Reuse: $consumesAndProducesIterator */ def take(n: Int): Iterator[A] = sliceIterator(0, n max 0) /** Advances this iterator past the first ''n'' elements, or the length of the iterator, whichever is smaller. * * @param n the number of elements to drop * @return an iterator which produces all values of the current iterator, except * it omits the first `n` values. * @note Reuse: $consumesAndProducesIterator */ def drop(n: Int): Iterator[A] = { var j = 0 while (j < n && hasNext) { next() j += 1 } this } /** Creates an iterator returning an interval of the values produced by this iterator. * * @param from the index of the first element in this iterator which forms part of the slice. * If negative, the slice starts at zero. * @param until the index of the first element following the slice. If negative, the slice is empty. * @return an iterator which advances this iterator past the first `from` elements using `drop`, * and then takes `until - from` elements, using `take`. * @note Reuse: $consumesAndProducesIterator */ def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0) /** Creates an optionally bounded slice, unbounded if `until` is negative. */ protected def sliceIterator(from: Int, until: Int): Iterator[A] = { val lo = from max 0 val rest = if (until < 0) -1 // unbounded else if (until <= lo) 0 // empty else until - lo // finite if (rest == 0) empty else new Iterator.SliceIterator(this, lo, rest) } /** Creates a new iterator that maps all produced values of this iterator * to new values using a transformation function. * * @param f the transformation function * @return a new iterator which transforms every value produced by this * iterator by applying the function `f` to it. * @note Reuse: $consumesAndProducesIterator */ def map[B](f: A => B): Iterator[B] = new AbstractIterator[B] { def hasNext = self.hasNext def next() = f(self.next()) } /** Concatenates this iterator with another. * * @param that the other iterator * @return a new iterator that first yields the values produced by this * iterator followed by the values produced by iterator `that`. * @note Reuse: $consumesTwoAndProducesOneIterator * * @usecase def ++(that: => Iterator[A]): Iterator[A] * @inheritdoc */ def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator.ConcatIterator(self) ++ that /** Creates a new iterator by applying a function to all values produced by this iterator * and concatenating the results. * * @param f the function to apply on each element. * @return the iterator resulting from applying the given iterator-valued function * `f` to each value produced by this iterator and concatenating the results. * @note Reuse: $consumesAndProducesIterator */ def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] { private var cur: Iterator[B] = empty private def nextCur() { cur = f(self.next()).toIterator } def hasNext: Boolean = { // Equivalent to cur.hasNext || self.hasNext && { nextCur(); hasNext } // but slightly shorter bytecode (better JVM inlining!) while (!cur.hasNext) { if (!self.hasNext) return false nextCur() } true } def next(): B = (if (hasNext) cur else empty).next() } /** Returns an iterator over all the elements of this iterator that satisfy the predicate `p`. * The order of the elements is preserved. * * @param p the predicate used to test values. * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. * @note Reuse: $consumesAndProducesIterator */ def filter(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { // TODO 2.12 - Make a full-fledged FilterImpl that will reverse sense of p private var hd: A = _ private var hdDefined: Boolean = false def hasNext: Boolean = hdDefined || { do { if (!self.hasNext) return false hd = self.next() } while (!p(hd)) hdDefined = true true } def next() = if (hasNext) { hdDefined = false; hd } else empty.next() } /** Tests whether every element of this iterator relates to the * corresponding element of another collection by satisfying a test predicate. * * @param that the other collection * @param p the test predicate, which relates elements from both collections * @tparam B the type of the elements of `that` * @return `true` if both collections have the same length and * `p(x, y)` is `true` for all corresponding elements `x` of this iterator * and `y` of `that`, otherwise `false` */ def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = { val that0 = that.toIterator while (hasNext && that0.hasNext) if (!p(next(), that0.next())) return false hasNext == that0.hasNext } /** Creates an iterator over all the elements of this iterator that * satisfy the predicate `p`. The order of the elements * is preserved. * * '''Note:''' `withFilter` is the same as `filter` on iterators. It exists so that * for-expressions with filters work over iterators. * * @param p the predicate used to test values. * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. * @note Reuse: $consumesAndProducesIterator */ def withFilter(p: A => Boolean): Iterator[A] = filter(p) /** Creates an iterator over all the elements of this iterator which * do not satisfy a predicate p. * * @param p the predicate used to test values. * @return an iterator which produces those values of this iterator which do not satisfy the predicate `p`. * @note Reuse: $consumesAndProducesIterator */ def filterNot(p: A => Boolean): Iterator[A] = filter(!p(_)) /** Creates an iterator by transforming values * produced by this iterator with a partial function, dropping those * values for which the partial function is not defined. * * @param pf the partial function which filters and maps the iterator. * @return a new iterator which yields each value `x` produced by this iterator for * which `pf` is defined the image `pf(x)`. * @note Reuse: $consumesAndProducesIterator */ @migration("`collect` has changed. The previous behavior can be reproduced with `toSeq`.", "2.8.0") def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] { // Manually buffer to avoid extra layer of wrapping with buffered private[this] var hd: A = _ // Little state machine to keep track of where we are // Seek = 0; Found = 1; Empty = -1 // Not in vals because scalac won't make them static (@inline def only works with -optimize) // BE REALLY CAREFUL TO KEEP COMMENTS AND NUMBERS IN SYNC! private[this] var status = 0/*Seek*/ def hasNext = { while (status == 0/*Seek*/) { if (self.hasNext) { hd = self.next() if (pf.isDefinedAt(hd)) status = 1/*Found*/ } else status = -1/*Empty*/ } status == 1/*Found*/ } def next() = if (hasNext) { status = 0/*Seek*/; pf(hd) } else Iterator.empty.next() } /** Produces a collection containing cumulative results of applying the * operator going left to right. * * $willNotTerminateInf * $orderDependent * * @tparam B the type of the elements in the resulting collection * @param z the initial value * @param op the binary operator applied to the intermediate result and the element * @return iterator with intermediate results * @note Reuse: $consumesAndProducesIterator */ def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] { var hasNext = true var elem = z def next() = if (hasNext) { val res = elem if (self.hasNext) elem = op(elem, self.next()) else hasNext = false res } else Iterator.empty.next() } /** Produces a collection containing cumulative results of applying the operator going right to left. * The head of the collection is the last cumulative result. * * $willNotTerminateInf * $orderDependent * * @tparam B the type of the elements in the resulting collection * @param z the initial value * @param op the binary operator applied to the intermediate result and the element * @return iterator with intermediate results * @example {{{ * Iterator(1, 2, 3, 4).scanRight(0)(_ + _).toList == List(10, 9, 7, 4, 0) * }}} * @note Reuse: $consumesAndProducesIterator */ def scanRight[B](z: B)(op: (A, B) => B): Iterator[B] = toBuffer.scanRight(z)(op).iterator /** Takes longest prefix of values produced by this iterator that satisfy a predicate. * * @param p The predicate used to test elements. * @return An iterator returning the values produced by this iterator, until * this iterator produces a value that does not satisfy * the predicate `p`. * @note Reuse: $consumesAndProducesIterator */ def takeWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { private var hd: A = _ private var hdDefined: Boolean = false private var tail: Iterator[A] = self def hasNext = hdDefined || tail.hasNext && { hd = tail.next() if (p(hd)) hdDefined = true else tail = Iterator.empty hdDefined } def next() = if (hasNext) { hdDefined = false; hd } else empty.next() } /** Partitions this iterator in two iterators according to a predicate. * * @param p the predicate on which to partition * @return a pair of iterators: the iterator that satisfies the predicate * `p` and the iterator that does not. * The relative order of the elements in the resulting iterators * is the same as in the original iterator. * @note Reuse: $consumesOneAndProducesTwoIterators */ def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { val self = buffered class PartitionIterator(p: A => Boolean) extends AbstractIterator[A] { var other: PartitionIterator = _ val lookahead = new mutable.Queue[A] def skip() = while (self.hasNext && !p(self.head)) { other.lookahead += self.next } def hasNext = !lookahead.isEmpty || { skip(); self.hasNext } def next() = if (!lookahead.isEmpty) lookahead.dequeue() else { skip(); self.next() } } val l = new PartitionIterator(p) val r = new PartitionIterator(!p(_)) l.other = r r.other = l (l, r) } /** Splits this Iterator into a prefix/suffix pair according to a predicate. * * @param p the test predicate * @return a pair of Iterators consisting of the longest prefix of this * whose elements all satisfy `p`, and the rest of the Iterator. * @note Reuse: $consumesOneAndProducesTwoIterators */ def span(p: A => Boolean): (Iterator[A], Iterator[A]) = { /* * Giving a name to following iterator (as opposed to trailing) because * anonymous class is represented as a structural type that trailing * iterator is referring (the finish() method) and thus triggering * handling of structural calls. It's not what's intended here. */ class Leading extends AbstractIterator[A] { private[this] var lookahead: mutable.Queue[A] = null private[this] var hd: A = _ /* Status is kept with magic numbers * 1 means next element is in hd and we're still reading into this iterator * 0 means we're still reading but haven't found a next element * -1 means we are done reading into the iterator, so we must rely on lookahead * -2 means we are done but have saved hd for the other iterator to use as its first element */ private[this] var status = 0 private def store(a: A) { if (lookahead == null) lookahead = new mutable.Queue[A] lookahead += a } def hasNext = { if (status < 0) (lookahead ne null) && lookahead.nonEmpty else if (status > 0) true else { if (self.hasNext) { hd = self.next() status = if (p(hd)) 1 else -2 } else status = -1 status > 0 } } def next() = { if (hasNext) { if (status == 1) { status = 0; hd } else lookahead.dequeue() } else empty.next() } def finish(): Boolean = status match { case -2 => status = -1 ; true case -1 => false case 1 => store(hd) ; status = 0 ; finish() case 0 => status = -1 while (self.hasNext) { val a = self.next() if (p(a)) store(a) else { hd = a return true } } false } def trailer: A = hd } val leading = new Leading val trailing = new AbstractIterator[A] { private[this] var myLeading = leading /* Status flag meanings: * -1 not yet accessed * 0 single element waiting in leading * 1 defer to self */ private[this] var status = -1 def hasNext = { if (status > 0) self.hasNext else { if (status == 0) true else if (myLeading.finish()) { status = 0 true } else { status = 1 myLeading = null self.hasNext } } } def next() = { if (hasNext) { if (status > 0) self.next() else { status = 1 val ans = myLeading.trailer myLeading = null ans } } else Iterator.empty.next() } override def toString = "unknown-if-empty iterator" } (leading, trailing) } /** Skips longest sequence of elements of this iterator which satisfy given * predicate `p`, and returns an iterator of the remaining elements. * * @param p the predicate used to skip elements. * @return an iterator consisting of the remaining elements * @note Reuse: $consumesAndProducesIterator */ def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator private[this] var status = -1 // Local buffering to avoid double-wrap with .buffered private[this] var fst: A = _ def hasNext: Boolean = if (status == 1) self.hasNext else if (status == 0) true else { while (self.hasNext) { val a = self.next() if (!p(a)) { fst = a status = 0 return true } } status = 1 false } def next() = if (hasNext) { if (status == 1) self.next() else { status = 1 fst } } else Iterator.empty.next() } /** Creates an iterator formed from this iterator and another iterator * by combining corresponding values in pairs. * If one of the two iterators is longer than the other, its remaining * elements are ignored. * * @param that The iterator providing the second half of each result pair * @return a new iterator containing pairs consisting of * corresponding elements of this iterator and `that`. The number * of elements returned by the new iterator is the * minimum of the number of elements returned by this * iterator and `that`. * @note Reuse: $consumesTwoAndProducesOneIterator */ def zip[B](that: Iterator[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { def hasNext = self.hasNext && that.hasNext def next = (self.next(), that.next()) } /** Appends an element value to this iterator until a given target length is reached. * * @param len the target length * @param elem the padding value * @return a new iterator consisting of producing all values of this iterator, * followed by the minimal number of occurrences of `elem` so * that the number of produced values is at least `len`. * @note Reuse: $consumesAndProducesIterator * * @usecase def padTo(len: Int, elem: A): Iterator[A] * @inheritdoc */ def padTo[A1 >: A](len: Int, elem: A1): Iterator[A1] = new AbstractIterator[A1] { private var count = 0 def hasNext = self.hasNext || count < len def next = { count += 1 if (self.hasNext) self.next() else if (count <= len) elem else empty.next() } } /** Creates an iterator that pairs each element produced by this iterator * with its index, counting from 0. * * @return a new iterator containing pairs consisting of * corresponding elements of this iterator and their indices. * @note Reuse: $consumesAndProducesIterator */ def zipWithIndex: Iterator[(A, Int)] = new AbstractIterator[(A, Int)] { var idx = 0 def hasNext = self.hasNext def next = { val ret = (self.next(), idx) idx += 1 ret } } /** Creates an iterator formed from this iterator and another iterator * by combining corresponding elements in pairs. * If one of the two iterators is shorter than the other, * placeholder elements are used to extend the shorter iterator to the length of the longer. * * @param that iterator `that` may have a different length * as the self iterator. * @param thisElem element `thisElem` is used to fill up the * resulting iterator if the self iterator is shorter than * `that` * @param thatElem element `thatElem` is used to fill up the * resulting iterator if `that` is shorter than * the self iterator * @return a new iterator containing pairs consisting of * corresponding values of this iterator and `that`. The length * of the returned iterator is the maximum of the lengths of this iterator and `that`. * If this iterator is shorter than `that`, `thisElem` values are used to pad the result. * If `that` is shorter than this iterator, `thatElem` values are used to pad the result. * @note Reuse: $consumesTwoAndProducesOneIterator * * @usecase def zipAll[B](that: Iterator[B], thisElem: A, thatElem: B): Iterator[(A, B)] * @inheritdoc */ def zipAll[B, A1 >: A, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1): Iterator[(A1, B1)] = new AbstractIterator[(A1, B1)] { def hasNext = self.hasNext || that.hasNext def next(): (A1, B1) = if (self.hasNext) { if (that.hasNext) (self.next(), that.next()) else (self.next(), thatElem) } else { if (that.hasNext) (thisElem, that.next()) else empty.next() } } /** Applies a function `f` to all values produced by this iterator. * * @param f the function that is applied for its side-effect to every element. * The result of function `f` is discarded. * * @tparam U the type parameter describing the result of function `f`. * This result will always be ignored. Typically `U` is `Unit`, * but this is not necessary. * * @note Reuse: $consumesIterator * * @usecase def foreach(f: A => Unit): Unit * @inheritdoc */ def foreach[U](f: A => U) { while (hasNext) f(next()) } /** Tests whether a predicate holds for all values produced by this iterator. * $mayNotTerminateInf * * @param p the predicate used to test elements. * @return `true` if the given predicate `p` holds for all values * produced by this iterator, otherwise `false`. * @note Reuse: $consumesIterator */ def forall(p: A => Boolean): Boolean = { var res = true while (res && hasNext) res = p(next()) res } /** Tests whether a predicate holds for some of the values produced by this iterator. * $mayNotTerminateInf * * @param p the predicate used to test elements. * @return `true` if the given predicate `p` holds for some of the values * produced by this iterator, otherwise `false`. * @note Reuse: $consumesIterator */ def exists(p: A => Boolean): Boolean = { var res = false while (!res && hasNext) res = p(next()) res } /** Tests whether this iterator contains a given value as an element. * $mayNotTerminateInf * * @param elem the element to test. * @return `true` if this iterator produces some value that is * is equal (as determined by `==`) to `elem`, `false` otherwise. * @note Reuse: $consumesIterator */ def contains(elem: Any): Boolean = exists(_ == elem) // Note--this seems faster than manual inlining! /** Finds the first value produced by the iterator satisfying a * predicate, if any. * $mayNotTerminateInf * * @param p the predicate used to test values. * @return an option value containing the first value produced by the iterator that satisfies * predicate `p`, or `None` if none exists. * @note Reuse: $consumesIterator */ def find(p: A => Boolean): Option[A] = { while (hasNext) { val a = next() if (p(a)) return Some(a) } None } /** Returns the index of the first produced value satisfying a predicate, or -1. * $mayNotTerminateInf * * @param p the predicate to test values * @return the index of the first produced value satisfying `p`, * or -1 if such an element does not exist until the end of the iterator is reached. * @note Reuse: $consumesIterator */ def indexWhere(p: A => Boolean): Int = indexWhere(p, 0) /** Returns the index of the first produced value satisfying a predicate, or -1, after or at * some start index. * $mayNotTerminateInf * * @param p the predicate to test values * @param from the start index * @return the index `>= from` of the first produced value satisfying `p`, * or -1 if such an element does not exist until the end of the iterator is reached. * @note Reuse: $consumesIterator */ def indexWhere(p: A => Boolean, from: Int): Int = { var i = 0 while (i < from && hasNext) { next() i += 1 } while (hasNext) { if (p(next())) return i i += 1 } -1 } /** Returns the index of the first occurrence of the specified * object in this iterable object. * $mayNotTerminateInf * * @param elem element to search for. * @return the index of the first occurrence of `elem` in the values produced by this iterator, * or -1 if such an element does not exist until the end of the iterator is reached. * @note Reuse: $consumesIterator */ def indexOf[B >: A](elem: B): Int = indexOf(elem, 0) /** Returns the index of the first occurrence of the specified object in this iterable object * after or at some start index. * $mayNotTerminateInf * * @param elem element to search for. * @param from the start index * @return the index `>= from` of the first occurrence of `elem` in the values produced by this * iterator, or -1 if such an element does not exist until the end of the iterator is * reached. * @note Reuse: $consumesIterator */ def indexOf[B >: A](elem: B, from: Int): Int = { var i = 0 while (i < from && hasNext) { next() i += 1 } while (hasNext) { if (next() == elem) return i i += 1 } -1 } /** Creates a buffered iterator from this iterator. * * @see [[scala.collection.BufferedIterator]] * @return a buffered iterator producing the same values as this iterator. * @note Reuse: $consumesAndProducesIterator */ def buffered: BufferedIterator[A] = new AbstractIterator[A] with BufferedIterator[A] { private var hd: A = _ private var hdDefined: Boolean = false def head: A = { if (!hdDefined) { hd = next() hdDefined = true } hd } def hasNext = hdDefined || self.hasNext def next() = if (hdDefined) { hdDefined = false hd } else self.next() } /** A flexible iterator for transforming an `Iterator[A]` into an * Iterator[Seq[A]], with configurable sequence size, step, and * strategy for dealing with elements which don't fit evenly. * * Typical uses can be achieved via methods `grouped` and `sliding`. */ class GroupedIterator[B >: A](self: Iterator[A], size: Int, step: Int) extends AbstractIterator[Seq[B]] with Iterator[Seq[B]] { require(size >= 1 && step >= 1, "size=%d and step=%d, but both must be positive".format(size, step)) private[this] var buffer: ArrayBuffer[B] = ArrayBuffer() // the buffer private[this] var filled = false // whether the buffer is "hot" private[this] var _partial = true // whether we deliver short sequences private[this] var pad: Option[() => B] = None // what to pad short sequences with /** Public functions which can be used to configure the iterator before use. * * Pads the last segment if necessary so that all segments will * have the same size. * * @param x The element that will be appended to the last segment, if necessary. * @return The same iterator, and ''not'' a new iterator. * @note This method mutates the iterator it is called on, which can be safely used afterwards. * @note This method is mutually exclusive with `withPartial(true)`. */ def withPadding(x: => B): this.type = { pad = Some(() => x) this } /** Public functions which can be used to configure the iterator before use. * * Select whether the last segment may be returned with less than `size` * elements. If not, some elements of the original iterator may not be * returned at all. * * @param x `true` if partial segments may be returned, `false` otherwise. * @return The same iterator, and ''not'' a new iterator. * @note This method mutates the iterator it is called on, which can be safely used afterwards. * @note This method is mutually exclusive with `withPadding`. */ def withPartial(x: Boolean): this.type = { _partial = x if (_partial == true) // reset pad since otherwise it will take precedence pad = None this } /** For reasons which remain to be determined, calling * self.take(n).toSeq cause an infinite loop, so we have * a slight variation on take for local usage. * NB: self.take.toSeq is slice.toStream, lazily built on self, * so a subsequent self.hasNext would not test self after the * group was consumed. */ private def takeDestructively(size: Int): Seq[A] = { val buf = new ArrayBuffer[A] var i = 0 // The order of terms in the following condition is important // here as self.hasNext could be blocking while (i < size && self.hasNext) { buf += self.next i += 1 } buf } private def padding(x: Int) = List.fill(x)(pad.get()) private def gap = (step - size) max 0 private def go(count: Int) = { val prevSize = buffer.size def isFirst = prevSize == 0 // If there is padding defined we insert it immediately // so the rest of the code can be oblivious val xs: Seq[B] = { val res = takeDestructively(count) // was: extra checks so we don't calculate length unless there's reason // but since we took the group eagerly, just use the fast length val shortBy = count - res.length if (shortBy > 0 && pad.isDefined) res ++ padding(shortBy) else res } lazy val len = xs.length lazy val incomplete = len < count // if 0 elements are requested, or if the number of newly obtained // elements is less than the gap between sequences, we are done. def deliver(howMany: Int) = { (howMany > 0 && (isFirst || len > gap)) && { if (!isFirst) buffer trimStart (step min prevSize) val available = if (isFirst) len else howMany min (len - gap) buffer ++= (xs takeRight available) filled = true true } } if (xs.isEmpty) false // self ran out of elements else if (_partial) deliver(len min size) // if _partial is true, we deliver regardless else if (incomplete) false // !_partial && incomplete means no more seqs else if (isFirst) deliver(len) // first element else deliver(step min size) // the typical case } // fill() returns false if no more sequences can be produced private def fill(): Boolean = { if (!self.hasNext) false // the first time we grab size, but after that we grab step else if (buffer.isEmpty) go(size) else go(step) } def hasNext = filled || fill() def next = { if (!filled) fill() if (!filled) throw new NoSuchElementException("next on empty iterator") filled = false buffer.toList } } /** Returns an iterator which groups this iterator into fixed size * blocks. Example usages: * {{{ * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7))) * (1 to 7).iterator grouped 3 toList * // Returns List(List(1, 2, 3), List(4, 5, 6)) * (1 to 7).iterator grouped 3 withPartial false toList * // Returns List(List(1, 2, 3), List(4, 5, 6), List(7, 20, 25) * // Illustrating that withPadding's argument is by-name. * val it2 = Iterator.iterate(20)(_ + 5) * (1 to 7).iterator grouped 3 withPadding it2.next toList * }}} * * @note Reuse: $consumesAndProducesIterator */ def grouped[B >: A](size: Int): GroupedIterator[B] = new GroupedIterator[B](self, size, size) /** Returns an iterator which presents a "sliding window" view of * another iterator. The first argument is the window size, and * the second is how far to advance the window on each iteration; * defaults to `1`. Example usages: * {{{ * // Returns List(List(1, 2, 3), List(2, 3, 4), List(3, 4, 5)) * (1 to 5).iterator.sliding(3).toList * // Returns List(List(1, 2, 3, 4), List(4, 5)) * (1 to 5).iterator.sliding(4, 3).toList * // Returns List(List(1, 2, 3, 4)) * (1 to 5).iterator.sliding(4, 3).withPartial(false).toList * // Returns List(List(1, 2, 3, 4), List(4, 5, 20, 25)) * // Illustrating that withPadding's argument is by-name. * val it2 = Iterator.iterate(20)(_ + 5) * (1 to 5).iterator.sliding(4, 3).withPadding(it2.next).toList * }}} * * @note Reuse: $consumesAndProducesIterator */ def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B] = new GroupedIterator[B](self, size, step) /** Returns the number of elements in this iterator. * $willNotTerminateInf * * @note Reuse: $consumesIterator */ def length: Int = this.size /** Creates two new iterators that both iterate over the same elements * as this iterator (in the same order). The duplicate iterators are * considered equal if they are positioned at the same element. * * Given that most methods on iterators will make the original iterator * unfit for further use, this methods provides a reliable way of calling * multiple such methods on an iterator. * * @return a pair of iterators * @note The implementation may allocate temporary storage for elements * iterated by one iterator but not yet by the other. * @note Reuse: $consumesOneAndProducesTwoIterators */ def duplicate: (Iterator[A], Iterator[A]) = { val gap = new scala.collection.mutable.Queue[A] var ahead: Iterator[A] = null class Partner extends AbstractIterator[A] { def hasNext: Boolean = self.synchronized { (this ne ahead) && !gap.isEmpty || self.hasNext } def next(): A = self.synchronized { if (gap.isEmpty) ahead = this if (this eq ahead) { val e = self.next() gap enqueue e e } else gap.dequeue() } // to verify partnerhood we use reference equality on gap because // type testing does not discriminate based on origin. private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue override def hashCode = gap.hashCode() override def equals(other: Any) = other match { case x: Partner => x.compareGap(gap) && gap.isEmpty case _ => super.equals(other) } } (new Partner, new Partner) } /** Returns this iterator with patched values. * Patching at negative indices is the same as patching starting at 0. * Patching at indices at or larger than the length of the original iterator appends the patch to the end. * If more values are replaced than actually exist, the excess is ignored. * * @param from The start index from which to patch * @param patchElems The iterator of patch values * @param replaced The number of values in the original iterator that are replaced by the patch. * @note Reuse: $consumesTwoAndProducesOneIterator */ def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] { private var origElems = self private var i = (if (from > 0) from else 0) // Counts down, switch to patch on 0, -1 means use patch first def hasNext: Boolean = { if (i == 0) { origElems = origElems drop replaced i = -1 } origElems.hasNext || patchElems.hasNext } def next(): B = { if (i == 0) { origElems = origElems drop replaced i = -1 } if (i < 0) { if (patchElems.hasNext) patchElems.next() else origElems.next() } else { if (origElems.hasNext) { i -= 1 origElems.next() } else { i = -1 patchElems.next() } } } } /** Copies selected values produced by this iterator to an array. * Fills the given array `xs` starting at index `start` with at most * `len` values produced by this iterator. * Copying will stop once either the end of the current iterator is reached, * or the end of the array is reached, or `len` elements have been copied. * * @param xs the array to fill. * @param start the starting index. * @param len the maximal number of elements to copy. * @tparam B the type of the elements of the array. * * @note Reuse: $consumesIterator * * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit * @inheritdoc * * $willNotTerminateInf */ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = { var i = start val end = start + math.min(len, xs.length - start) while (i < end && hasNext) { xs(i) = next() i += 1 } // TODO: return i - start so the caller knows how many values read? } /** Tests if another iterator produces the same values as this one. * * $willNotTerminateInf * * @param that the other iterator * @return `true`, if both iterators produce the same elements in the same order, `false` otherwise. * * @note Reuse: $consumesTwoIterators */ def sameElements(that: Iterator[_]): Boolean = { while (hasNext && that.hasNext) if (next != that.next) return false !hasNext && !that.hasNext } def toTraversable: Traversable[A] = toStream def toIterator: Iterator[A] = self def toStream: Stream[A] = if (self.hasNext) Stream.cons(self.next(), self.toStream) else Stream.empty[A] /** Converts this iterator to a string. * * @return `"empty iterator"` or `"non-empty iterator"`, depending on * whether or not the iterator is empty. * @note Reuse: $preservesIterator */ override def toString = (if (hasNext) "non-empty" else "empty")+" iterator" } /** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */ abstract class AbstractIterator[+A] extends Iterator[A]
felixmulder/scala
src/library/scala/collection/Iterator.scala
Scala
bsd-3-clause
52,307
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import java.sql.{Date, Timestamp} import java.time.{Instant, LocalDate} import java.util.Base64 import scala.collection.JavaConverters._ import scala.collection.mutable import scala.util.hashing.MurmurHash3 import org.json4s._ import org.json4s.JsonAST.JValue import org.json4s.jackson.JsonMethods._ import org.apache.spark.annotation.{Stable, Unstable} import org.apache.spark.sql.catalyst.CatalystTypeConverters import org.apache.spark.sql.catalyst.expressions.GenericRow import org.apache.spark.sql.catalyst.util.{DateFormatter, DateTimeUtils, TimestampFormatter} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.CalendarInterval /** * @since 1.3.0 */ @Stable object Row { /** * This method can be used to extract fields from a [[Row]] object in a pattern match. Example: * {{{ * import org.apache.spark.sql._ * * val pairs = sql("SELECT key, value FROM src").rdd.map { * case Row(key: Int, value: String) => * key -> value * } * }}} */ def unapplySeq(row: Row): Some[Seq[Any]] = Some(row.toSeq) /** * This method can be used to construct a [[Row]] with the given values. */ def apply(values: Any*): Row = new GenericRow(values.toArray) /** * This method can be used to construct a [[Row]] from a `Seq` of values. */ def fromSeq(values: Seq[Any]): Row = new GenericRow(values.toArray) def fromTuple(tuple: Product): Row = fromSeq(tuple.productIterator.toSeq) /** * Merge multiple rows into a single row, one after another. */ @deprecated("This method is deprecated and will be removed in future versions.", "3.0.0") def merge(rows: Row*): Row = { // TODO: Improve the performance of this if used in performance critical part. new GenericRow(rows.flatMap(_.toSeq).toArray) } /** Returns an empty row. */ val empty = apply() } /** * Represents one row of output from a relational operator. Allows both generic access by ordinal, * which will incur boxing overhead for primitives, as well as native primitive access. * * It is invalid to use the native primitive interface to retrieve a value that is null, instead a * user must check `isNullAt` before attempting to retrieve a value that might be null. * * To create a new Row, use `RowFactory.create()` in Java or `Row.apply()` in Scala. * * A [[Row]] object can be constructed by providing field values. Example: * {{{ * import org.apache.spark.sql._ * * // Create a Row from values. * Row(value1, value2, value3, ...) * // Create a Row from a Seq of values. * Row.fromSeq(Seq(value1, value2, ...)) * }}} * * A value of a row can be accessed through both generic access by ordinal, * which will incur boxing overhead for primitives, as well as native primitive access. * An example of generic access by ordinal: * {{{ * import org.apache.spark.sql._ * * val row = Row(1, true, "a string", null) * // row: Row = [1,true,a string,null] * val firstValue = row(0) * // firstValue: Any = 1 * val fourthValue = row(3) * // fourthValue: Any = null * }}} * * For native primitive access, it is invalid to use the native primitive interface to retrieve * a value that is null, instead a user must check `isNullAt` before attempting to retrieve a * value that might be null. * An example of native primitive access: * {{{ * // using the row from the previous example. * val firstValue = row.getInt(0) * // firstValue: Int = 1 * val isNull = row.isNullAt(3) * // isNull: Boolean = true * }}} * * In Scala, fields in a [[Row]] object can be extracted in a pattern match. Example: * {{{ * import org.apache.spark.sql._ * * val pairs = sql("SELECT key, value FROM src").rdd.map { * case Row(key: Int, value: String) => * key -> value * } * }}} * * @since 1.3.0 */ @Stable trait Row extends Serializable { /** Number of elements in the Row. */ def size: Int = length /** Number of elements in the Row. */ def length: Int /** * Schema for the row. */ def schema: StructType = null /** * Returns the value at position i. If the value is null, null is returned. The following * is a mapping between Spark SQL types and return types: * * {{{ * BooleanType -> java.lang.Boolean * ByteType -> java.lang.Byte * ShortType -> java.lang.Short * IntegerType -> java.lang.Integer * LongType -> java.lang.Long * FloatType -> java.lang.Float * DoubleType -> java.lang.Double * StringType -> String * DecimalType -> java.math.BigDecimal * * DateType -> java.sql.Date if spark.sql.datetime.java8API.enabled is false * DateType -> java.time.LocalDate if spark.sql.datetime.java8API.enabled is true * * TimestampType -> java.sql.Timestamp if spark.sql.datetime.java8API.enabled is false * TimestampType -> java.time.Instant if spark.sql.datetime.java8API.enabled is true * * BinaryType -> byte array * ArrayType -> scala.collection.Seq (use getList for java.util.List) * MapType -> scala.collection.Map (use getJavaMap for java.util.Map) * StructType -> org.apache.spark.sql.Row * }}} */ def apply(i: Int): Any = get(i) /** * Returns the value at position i. If the value is null, null is returned. The following * is a mapping between Spark SQL types and return types: * * {{{ * BooleanType -> java.lang.Boolean * ByteType -> java.lang.Byte * ShortType -> java.lang.Short * IntegerType -> java.lang.Integer * LongType -> java.lang.Long * FloatType -> java.lang.Float * DoubleType -> java.lang.Double * StringType -> String * DecimalType -> java.math.BigDecimal * * DateType -> java.sql.Date if spark.sql.datetime.java8API.enabled is false * DateType -> java.time.LocalDate if spark.sql.datetime.java8API.enabled is true * * TimestampType -> java.sql.Timestamp if spark.sql.datetime.java8API.enabled is false * TimestampType -> java.time.Instant if spark.sql.datetime.java8API.enabled is true * * BinaryType -> byte array * ArrayType -> scala.collection.Seq (use getList for java.util.List) * MapType -> scala.collection.Map (use getJavaMap for java.util.Map) * StructType -> org.apache.spark.sql.Row * }}} */ def get(i: Int): Any /** Checks whether the value at position i is null. */ def isNullAt(i: Int): Boolean = get(i) == null /** * Returns the value at position i as a primitive boolean. * * @throws ClassCastException when data type does not match. * @throws NullPointerException when value is null. */ def getBoolean(i: Int): Boolean = getAnyValAs[Boolean](i) /** * Returns the value at position i as a primitive byte. * * @throws ClassCastException when data type does not match. * @throws NullPointerException when value is null. */ def getByte(i: Int): Byte = getAnyValAs[Byte](i) /** * Returns the value at position i as a primitive short. * * @throws ClassCastException when data type does not match. * @throws NullPointerException when value is null. */ def getShort(i: Int): Short = getAnyValAs[Short](i) /** * Returns the value at position i as a primitive int. * * @throws ClassCastException when data type does not match. * @throws NullPointerException when value is null. */ def getInt(i: Int): Int = getAnyValAs[Int](i) /** * Returns the value at position i as a primitive long. * * @throws ClassCastException when data type does not match. * @throws NullPointerException when value is null. */ def getLong(i: Int): Long = getAnyValAs[Long](i) /** * Returns the value at position i as a primitive float. * Throws an exception if the type mismatches or if the value is null. * * @throws ClassCastException when data type does not match. * @throws NullPointerException when value is null. */ def getFloat(i: Int): Float = getAnyValAs[Float](i) /** * Returns the value at position i as a primitive double. * * @throws ClassCastException when data type does not match. * @throws NullPointerException when value is null. */ def getDouble(i: Int): Double = getAnyValAs[Double](i) /** * Returns the value at position i as a String object. * * @throws ClassCastException when data type does not match. */ def getString(i: Int): String = getAs[String](i) /** * Returns the value at position i of decimal type as java.math.BigDecimal. * * @throws ClassCastException when data type does not match. */ def getDecimal(i: Int): java.math.BigDecimal = getAs[java.math.BigDecimal](i) /** * Returns the value at position i of date type as java.sql.Date. * * @throws ClassCastException when data type does not match. */ def getDate(i: Int): java.sql.Date = getAs[java.sql.Date](i) /** * Returns the value at position i of date type as java.time.LocalDate. * * @throws ClassCastException when data type does not match. */ def getLocalDate(i: Int): java.time.LocalDate = getAs[java.time.LocalDate](i) /** * Returns the value at position i of date type as java.sql.Timestamp. * * @throws ClassCastException when data type does not match. */ def getTimestamp(i: Int): java.sql.Timestamp = getAs[java.sql.Timestamp](i) /** * Returns the value at position i of date type as java.time.Instant. * * @throws ClassCastException when data type does not match. */ def getInstant(i: Int): java.time.Instant = getAs[java.time.Instant](i) /** * Returns the value at position i of array type as a Scala Seq. * * @throws ClassCastException when data type does not match. */ def getSeq[T](i: Int): Seq[T] = getAs[scala.collection.Seq[T]](i).toSeq /** * Returns the value at position i of array type as `java.util.List`. * * @throws ClassCastException when data type does not match. */ def getList[T](i: Int): java.util.List[T] = getSeq[T](i).asJava /** * Returns the value at position i of map type as a Scala Map. * * @throws ClassCastException when data type does not match. */ def getMap[K, V](i: Int): scala.collection.Map[K, V] = getAs[Map[K, V]](i) /** * Returns the value at position i of array type as a `java.util.Map`. * * @throws ClassCastException when data type does not match. */ def getJavaMap[K, V](i: Int): java.util.Map[K, V] = getMap[K, V](i).asJava /** * Returns the value at position i of struct type as a [[Row]] object. * * @throws ClassCastException when data type does not match. */ def getStruct(i: Int): Row = getAs[Row](i) /** * Returns the value at position i. * For primitive types if value is null it returns 'zero value' specific for primitive * ie. 0 for Int - use isNullAt to ensure that value is not null * * @throws ClassCastException when data type does not match. */ def getAs[T](i: Int): T = get(i).asInstanceOf[T] /** * Returns the value of a given fieldName. * For primitive types if value is null it returns 'zero value' specific for primitive * ie. 0 for Int - use isNullAt to ensure that value is not null * * @throws UnsupportedOperationException when schema is not defined. * @throws IllegalArgumentException when fieldName do not exist. * @throws ClassCastException when data type does not match. */ def getAs[T](fieldName: String): T = getAs[T](fieldIndex(fieldName)) /** * Returns the index of a given field name. * * @throws UnsupportedOperationException when schema is not defined. * @throws IllegalArgumentException when a field `name` does not exist. */ def fieldIndex(name: String): Int = { throw new UnsupportedOperationException("fieldIndex on a Row without schema is undefined.") } /** * Returns a Map consisting of names and values for the requested fieldNames * For primitive types if value is null it returns 'zero value' specific for primitive * ie. 0 for Int - use isNullAt to ensure that value is not null * * @throws UnsupportedOperationException when schema is not defined. * @throws IllegalArgumentException when fieldName do not exist. * @throws ClassCastException when data type does not match. */ def getValuesMap[T](fieldNames: Seq[String]): Map[String, T] = { fieldNames.map { name => name -> getAs[T](name) }.toMap } override def toString: String = this.mkString("[", ",", "]") /** * Make a copy of the current [[Row]] object. */ def copy(): Row /** Returns true if there are any NULL values in this row. */ def anyNull: Boolean = { val len = length var i = 0 while (i < len) { if (isNullAt(i)) { return true } i += 1 } false } override def equals(o: Any): Boolean = { if (!o.isInstanceOf[Row]) return false val other = o.asInstanceOf[Row] if (other eq null) return false if (length != other.length) { return false } var i = 0 while (i < length) { if (isNullAt(i) != other.isNullAt(i)) { return false } if (!isNullAt(i)) { val o1 = get(i) val o2 = other.get(i) o1 match { case b1: Array[Byte] => if (!o2.isInstanceOf[Array[Byte]] || !java.util.Arrays.equals(b1, o2.asInstanceOf[Array[Byte]])) { return false } case f1: Float if java.lang.Float.isNaN(f1) => if (!o2.isInstanceOf[Float] || ! java.lang.Float.isNaN(o2.asInstanceOf[Float])) { return false } case d1: Double if java.lang.Double.isNaN(d1) => if (!o2.isInstanceOf[Double] || ! java.lang.Double.isNaN(o2.asInstanceOf[Double])) { return false } case d1: java.math.BigDecimal if o2.isInstanceOf[java.math.BigDecimal] => if (d1.compareTo(o2.asInstanceOf[java.math.BigDecimal]) != 0) { return false } case _ => if (o1 != o2) { return false } } } i += 1 } true } override def hashCode: Int = { // Using Scala's Seq hash code implementation. var n = 0 var h = MurmurHash3.seqSeed val len = length while (n < len) { h = MurmurHash3.mix(h, apply(n).##) n += 1 } MurmurHash3.finalizeHash(h, n) } /* ---------------------- utility methods for Scala ---------------------- */ /** * Return a Scala Seq representing the row. Elements are placed in the same order in the Seq. */ def toSeq: Seq[Any] = { val n = length val values = new Array[Any](n) var i = 0 while (i < n) { values.update(i, get(i)) i += 1 } values.toSeq } /** Displays all elements of this sequence in a string (without a separator). */ def mkString: String = mkString("") /** Displays all elements of this sequence in a string using a separator string. */ def mkString(sep: String): String = mkString("", sep, "") /** * Displays all elements of this traversable or iterator in a string using * start, end, and separator strings. */ def mkString(start: String, sep: String, end: String): String = { val n = length val builder = new StringBuilder builder.append(start) if (n > 0) { builder.append(get(0)) var i = 1 while (i < n) { builder.append(sep) builder.append(get(i)) i += 1 } } builder.append(end) builder.toString() } /** * Returns the value at position i. * * @throws UnsupportedOperationException when schema is not defined. * @throws ClassCastException when data type does not match. * @throws NullPointerException when value is null. */ private def getAnyValAs[T <: AnyVal](i: Int): T = if (isNullAt(i)) throw new NullPointerException(s"Value at index $i is null") else getAs[T](i) /** * The compact JSON representation of this row. * @since 3.0 */ @Unstable def json: String = compact(jsonValue) /** * The pretty (i.e. indented) JSON representation of this row. * @since 3.0 */ @Unstable def prettyJson: String = pretty(render(jsonValue)) /** * JSON representation of the row. * * Note that this only supports the data types that are also supported by * [[org.apache.spark.sql.catalyst.encoders.RowEncoder]]. * * @return the JSON representation of the row. */ private[sql] def jsonValue: JValue = { require(schema != null, "JSON serialization requires a non-null schema.") lazy val zoneId = DateTimeUtils.getZoneId(SQLConf.get.sessionLocalTimeZone) lazy val dateFormatter = DateFormatter.apply(zoneId) lazy val timestampFormatter = TimestampFormatter(zoneId) // Convert an iterator of values to a json array def iteratorToJsonArray(iterator: Iterator[_], elementType: DataType): JArray = { JArray(iterator.map(toJson(_, elementType)).toList) } // Convert a value to json. def toJson(value: Any, dataType: DataType): JValue = (value, dataType) match { case (null, _) => JNull case (b: Boolean, _) => JBool(b) case (b: Byte, _) => JLong(b) case (s: Short, _) => JLong(s) case (i: Int, _) => JLong(i) case (l: Long, _) => JLong(l) case (f: Float, _) => JDouble(f) case (d: Double, _) => JDouble(d) case (d: BigDecimal, _) => JDecimal(d) case (d: java.math.BigDecimal, _) => JDecimal(d) case (d: Decimal, _) => JDecimal(d.toBigDecimal) case (s: String, _) => JString(s) case (b: Array[Byte], BinaryType) => JString(Base64.getEncoder.encodeToString(b)) case (d: LocalDate, _) => JString(dateFormatter.format(d)) case (d: Date, _) => JString(dateFormatter.format(d)) case (i: Instant, _) => JString(timestampFormatter.format(i)) case (t: Timestamp, _) => JString(timestampFormatter.format(t)) case (i: CalendarInterval, _) => JString(i.toString) case (a: Array[_], ArrayType(elementType, _)) => iteratorToJsonArray(a.iterator, elementType) case (s: Seq[_], ArrayType(elementType, _)) => iteratorToJsonArray(s.iterator, elementType) case (m: Map[String @unchecked, _], MapType(StringType, valueType, _)) => new JObject(m.toList.sortBy(_._1).map { case (k, v) => k -> toJson(v, valueType) }) case (m: Map[_, _], MapType(keyType, valueType, _)) => new JArray(m.iterator.map { case (k, v) => new JObject("key" -> toJson(k, keyType) :: "value" -> toJson(v, valueType) :: Nil) }.toList) case (r: Row, _) => r.jsonValue case (v: Any, udt: UserDefinedType[Any @unchecked]) => val dataType = udt.sqlType toJson(CatalystTypeConverters.convertToScala(udt.serialize(v), dataType), dataType) case _ => throw new IllegalArgumentException(s"Failed to convert value $value " + s"(class of ${value.getClass}}) with the type of $dataType to JSON.") } // Convert the row fields to json var n = 0 var elements = new mutable.ListBuffer[JField] val len = length while (n < len) { val field = schema(n) elements += (field.name -> toJson(apply(n), field.dataType)) n += 1 } new JObject(elements.toList) } }
rednaxelafx/apache-spark
sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
Scala
apache-2.0
20,235
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.spark.tasks import java.io.IOException import scala.collection.mutable import org.apache.carbondata.common.factory.CarbonCommonFactory import org.apache.carbondata.core.cache.dictionary.Dictionary import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.util.DataTypeUtil import org.apache.carbondata.core.writer.CarbonDictionaryWriter import org.apache.carbondata.spark.rdd.DictionaryLoadModel /** * * @param valuesBuffer * @param dictionary * @param model * @param columnIndex * @param writer */ class DictionaryWriterTask(valuesBuffer: mutable.HashSet[String], dictionary: Dictionary, model: DictionaryLoadModel, columnIndex: Int, var writer: CarbonDictionaryWriter = null) { /** * execute the task * * @return distinctValueList and time taken to write */ def execute(): java.util.List[String] = { val values = valuesBuffer.toArray java.util.Arrays.sort(values, Ordering[String]) val dictService = CarbonCommonFactory.getDictionaryService writer = dictService.getDictionaryWriter( model.table, model.columnIdentifier(columnIndex), model.hdfsLocation) val distinctValues: java.util.List[String] = new java.util.ArrayList() try { if (!model.dictFileExists(columnIndex)) { writer.write(CarbonCommonConstants.MEMBER_DEFAULT_VAL) distinctValues.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL) } if (values.length >= 1) { if (model.dictFileExists(columnIndex)) { for (value <- values) { val parsedValue = DataTypeUtil.normalizeColumnValueForItsDataType(value, model.primDimensions(columnIndex)) if (null != parsedValue && dictionary.getSurrogateKey(parsedValue) == CarbonCommonConstants.INVALID_SURROGATE_KEY) { writer.write(parsedValue) distinctValues.add(parsedValue) } } } else { for (value <- values) { val parsedValue = DataTypeUtil.normalizeColumnValueForItsDataType(value, model.primDimensions(columnIndex)) if (null != parsedValue) { writer.write(parsedValue) distinctValues.add(parsedValue) } } } } } catch { case ex: IOException => throw ex } finally { if (null != writer) { writer.close() } } distinctValues } /** * update dictionary metadata */ def updateMetaData() { if (null != writer) { writer.commit() } } }
ashokblend/incubator-carbondata
integration/spark-common/src/main/scala/org/apache/carbondata/spark/tasks/DictionaryWriterTask.scala
Scala
apache-2.0
3,435
package com.chriswk.bnet.wow.model case class Perks(perks: List[Perk]) case class Perk(guildLevel: Long, spell: Spell)
chriswk/sbnetapi
src/main/scala/com/chriswk/bnet/wow/model/Perk.scala
Scala
mit
120
import language.experimental.namedTypeArguments trait Tuple case class TCons[H, T <: Tuple](h: H, t: T) extends Tuple case object TNil extends Tuple // Type level natural numbers ------------------------------------------------- sealed trait Nat sealed trait Succ[P <: Nat] extends Nat sealed trait Zero extends Nat // Accessor type class to compute the N'th element of an Tuple L -------------- trait At[L <: Tuple, N <: Nat, Out] { def apply(l: L): Out } object At { implicit def caseZero[H, T <: Tuple]: At[H TCons T, Zero, H] = new At[H TCons T, Zero, H] { def apply(l: H TCons T): H = { val (h TCons _) = l h } } implicit def caseN[H, T <: Tuple, N <: Nat, O] (implicit a: At[T, N, O]): At[H TCons T, Succ[N], O] = new At[H TCons T, Succ[N], O] { def apply(l: H TCons T): O = { val (_ TCons t) = l a(t) } } } // An HMap is an Tuple with HEntry elements. We are reusing Tuple for it's nice syntax final case class HEntry[K, V](value: V) // Accessor type class to compute the element of type K in a HMap L ----------- trait PhantomGet[K, M <: Tuple, I <: Nat] // extends PhantomAny object PhantomGet { implicit def getHead[K, V, T <: Tuple] : PhantomGet[K, HEntry[K, V] TCons T, Zero] = null implicit def getTail[K, H, T <: Tuple, I <: Nat] (implicit t: PhantomGet[K, T, I]) : PhantomGet[K, H TCons T, Succ[I]] = null } // Syntax --------------------------------------------------------------------- object syntax { object hmap { implicit class HmapGet[M <: Tuple](m: M) { def get[K, V, I <: Nat](k: K) (implicit g: PhantomGet[k.type, M, I], a: At[M, I, HEntry[k.type, V]] ): V = a(m).value } def --[K, V](key: K, value: V) = HEntry[key.type, V](value) } } object Test { def main(args: Array[String]): Unit = { import syntax.hmap.* val map1 = TCons(HEntry[K = "name"]("foo"), TCons(HEntry[K = "genre"](true), TCons(HEntry[K = "moneyz"](123), TCons(HEntry[K = "cat"]("bar"), (TNil: TNil.type))))) assert(map1.get("name") == "foo") assert(map1.get("genre") == true) assert(map1.get("moneyz") == 123) assert(map1.get("cat") == "bar") val map2 = TCons(--("name" , "foo"), TCons(--("genre" , true), TCons(--("moneyz", 123), TCons(--("cat" , "bar"), TNil)))) assert(map2.get("name") == "foo") assert(map2.get("genre") == true) assert(map2.get("moneyz") == 123) assert(map2.get("cat") == "bar") } }
dotty-staging/dotty
tests/run/hmap.scala
Scala
apache-2.0
2,595
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.BigDaddyAG import org.apache.flink.api.scala._ import scala.io.Source // Important: include the TABLE API import import org.apache.flink.api.scala.table._ import java.io.File // Define a class describing the "items" (lines) in your CSV file //case class gccColumnItems(col1: String, col2: String, col3: String, col4: String, col5: String, col6: String, col7: String, col8: String , col9: String) //case class MyLineitem(col1: String, col2: String) //case class bcrColumnItems(id: String, consentStatus: String) object DataPreprocessor { def main(args: Array[String]){ // enable recursive enumeration of nested input files val env = ExecutionEnvironment.getExecutionEnvironment val gccFile = getGccFile(env, "/Users/stefan/Documents/Uni/SoSe 2015/Medical Bioinformatics/assignment11/BigDaddyAG/MedBioPro/data/GCC/All/allGccDataClean.csv") .as('f1col1, 'f1col2, 'f1col3, 'f1col4, 'f1col5, 'f1col6, 'f1col7, 'f1col8, 'f1col9, 'f1col9, 'f1col10, 'f1col11 , 'f1col12 , 'f1col13, 'f1col14, 'f1col15, 'f1col16, 'f1col17) /* val items = gccFile.join(gccFile) .where('geneExpression.standardDeviation > 3) .select() */ env.execute("Preprocess data") } // This method reads all rows but only selected columns from a file and returns a dataset private def getGccFile(env: ExecutionEnvironment, path:String): DataSet[gccColumnItems] = { env.readCsvFile[gccColumnItems]( path, fieldDelimiter = "\\t", includedFields = Array(0, 1, 2, 3 , 4, 5, 6, 7, 8, 9 , 10 , 11, 12, 13, 14, 15, 16)) } // This method reads all rows but only selected columns from a file and returns a dataset private def getBcrFile(env: ExecutionEnvironment, path:String): DataSet[bcrColumnItems] = { env.readCsvFile[bcrColumnItems]( path, fieldDelimiter = "\\t", includedFields = Array(1, 3)) } /* private def standardDeviation() = { } */ }
BigDaddyAG/MedBioPro
src/main/scala/de/BigDaddyAG/DataPreprocessor.scala
Scala
mit
2,766
/******************************************************************************* * Copyright 2017 Capital One Services, LLC and Bitwise, Inc. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package hydrograph.engine.spark.components.adapter import hydrograph.engine.core.component.generator.RunSqlGenerator import hydrograph.engine.jaxb.commontypes.TypeBaseComponent import hydrograph.engine.spark.components.RunSQLComponent import hydrograph.engine.spark.components.adapter.base.RunProgramAdapterBase import hydrograph.engine.spark.components.base.CommandComponentSparkFlow import hydrograph.engine.spark.components.platform.BaseComponentParams /** * The Class RunSqlAdapter. * * @author Bitwise * */ class RunSqlAdapter(typeBaseComponent: TypeBaseComponent) extends RunProgramAdapterBase { private var runSqlEntityGenerator: RunSqlGenerator = null private var runSQLComponenet: RunSQLComponent = null override def createGenerator(): Unit = { runSqlEntityGenerator = new RunSqlGenerator(typeBaseComponent) } override def createComponent(baseComponentParams: BaseComponentParams): Unit = { runSQLComponenet= new RunSQLComponent(runSqlEntityGenerator.getEntity) } override def getComponent(): CommandComponentSparkFlow = runSQLComponenet }
capitalone/Hydrograph
hydrograph.engine/hydrograph.engine.spark/src/main/scala/hydrograph/engine/spark/components/adapter/RunSqlAdapter.scala
Scala
apache-2.0
1,857
package monadasync package stream import java.util.concurrent.atomic.AtomicReference import scalaz.std.option.none import scalaz.stream.Process import scalaz.syntax.monad._ import scalaz.syntax.monoid._ import scalaz.syntax.std.option._ import scalaz.{ Catchable, Monad, Monoid } class ProcessStepper[F[_]: Monad: Catchable, A: Monoid](p: Process[F, A]) { import scalaz.stream.Cause._ import scalaz.stream.Process.{ Await, Emit, Halt, Step } private val cur = new AtomicReference[Process[F, A]](p) def read: F[Option[A]] = readFrom(finishing = true) val Done: F[Option[A]] = none[A].point[F] private def readFrom(finishing: Boolean): F[Option[A]] = { cur.get.step match { case s: Step[F, A] @unchecked => (s.head, s.next) match { case (Emit(os), cont) => os.foldLeft[A](∅)((a, o) => a |+| o).point[F] >>= { a => cur.set(cont.continue) a.some.point[F] } case (awt: Await[F, Any, A] @unchecked, cont) => awt.evaluate flatMap { q => cur.set(q +: cont) readFrom(finishing = false) } } case Halt(End) => Done case Halt(Kill) => Done case Halt(Error(rsn)) => Catchable[F].fail(rsn) } } }
lukiano/monadasync
stream/src/main/scala/monadasync/stream/ProcessStepper.scala
Scala
apache-2.0
1,319
/* * La Trobe University - Distributed Deep Learning System * Copyright 2014 Matthias Langer ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.latrobe.blaze.objectives import edu.latrobe._ import edu.latrobe.blaze._ import edu.latrobe.time._ import java.io.OutputStream /** * A pseudo target that does nothing except invoking the garbage collector. * (Use this only for debugging purposes!) */ final class RunGarbageCollector(override val builder: RunGarbageCollectorBuilder, override val seed: InstanceSeed) extends IndependentObjective[RunGarbageCollectorBuilder] { override protected def doEvaluate(sink: Sink, optimizer: OptimizerLike, runBeginIterationNo: Long, runBeginTime: Timestamp, runNoSamples: Long, model: Module, batch: Batch, output: Tensor, value: Real) : Option[ObjectiveEvaluationResult] = { System.gc() System.runFinalization() None } } final class RunGarbageCollectorBuilder extends IndependentObjectiveBuilder[RunGarbageCollectorBuilder] { override def repr : RunGarbageCollectorBuilder = this override def canEqual(that: Any) : Boolean = that.isInstanceOf[RunGarbageCollectorBuilder] override protected def doCopy() : RunGarbageCollectorBuilder = RunGarbageCollectorBuilder() override def build(seed: InstanceSeed) : RunGarbageCollector = new RunGarbageCollector(this, seed) } object RunGarbageCollectorBuilder { final def apply() : RunGarbageCollectorBuilder = new RunGarbageCollectorBuilder }
bashimao/ltudl
blaze/src/main/scala/edu/latrobe/blaze/objectives/RunGarbageCollector.scala
Scala
apache-2.0
2,439
/* * Copyright 2016 rdbc contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.rdbc.pgsql.core.internal.typecodec.sco import io.rdbc.pgsql.core.types.{PgFloat4, PgFloat4Type} import scodec.codecs._ private[typecodec] object ScodecPgFloat4Codec extends ScodecPgValCodec[PgFloat4] with IgnoreSessionParams[PgFloat4] { val typ = PgFloat4Type val codec = float.as[PgFloat4] }
rdbc-io/rdbc-pgsql
rdbc-pgsql-core/src/main/scala/io/rdbc/pgsql/core/internal/typecodec/sco/ScodecPgFloat4Codec.scala
Scala
apache-2.0
919
trait Reader { type Contents def read(fileName: String): Contents } import scala.io._ class StringReader extends Reader { type Contents = String def read(fileName: String) = Source.fromFile(fileName, "UTF-8").mkString } import java.awt.image._ import java.io._ import javax.imageio._ class ImageReader extends Reader { type Contents = BufferedImage def read(fileName: String) = ImageIO.read(new File(fileName)) }
yeahnoob/scala-impatient-2e-code
src/ch19/sec12/Reader.scala
Scala
gpl-3.0
429
package slick.test.driver import org.junit.runner.RunWith import com.typesafe.slick.testkit.util.{StandardTestDBs, DriverTest, Testkit} @RunWith(classOf[Testkit]) class H2MemTest extends DriverTest(StandardTestDBs.H2Mem) @RunWith(classOf[Testkit]) class H2RownumTest extends DriverTest(StandardTestDBs.H2Rownum) @RunWith(classOf[Testkit]) class H2DiskTest extends DriverTest(StandardTestDBs.H2Disk) @RunWith(classOf[Testkit]) class HsqldbMemTest extends DriverTest(StandardTestDBs.HsqldbMem) @RunWith(classOf[Testkit]) class HsqldbDiskTest extends DriverTest(StandardTestDBs.HsqldbDisk) @RunWith(classOf[Testkit]) class SQLiteMemTest extends DriverTest(StandardTestDBs.SQLiteMem) @RunWith(classOf[Testkit]) class SQLiteDiskTest extends DriverTest(StandardTestDBs.SQLiteDisk) @RunWith(classOf[Testkit]) class DerbyMemTest extends DriverTest(StandardTestDBs.DerbyMem) @RunWith(classOf[Testkit]) class DerbyDiskTest extends DriverTest(StandardTestDBs.DerbyDisk) @RunWith(classOf[Testkit]) class PostgresTest extends DriverTest(StandardTestDBs.Postgres) @RunWith(classOf[Testkit]) class MySQLTest extends DriverTest(StandardTestDBs.MySQL) @RunWith(classOf[Testkit]) class HeapTest extends DriverTest(StandardTestDBs.Heap)
jkutner/slick
slick-testkit/src/test/scala/slick/test/driver/DriverTest.scala
Scala
bsd-2-clause
1,231
/* Copyright 2013 Ilya Lakhin (Илья Александрович Лахин) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package name.lakhin.eliah.projects package papacarlo.lexis final class Contextualizer { final class TokenPairDefinition(val open: String, val close: String, val context: Int) { private[lexis] var top = false private[lexis] var priorityLevel: Int = 1 private[lexis] var skipLevel: SkipLevel = OriginalSkipping private[lexis] var caching = false def forceSkip = { skipLevel = ForceSkip this } def forceUse = { skipLevel = ForceUse this } def priority(priority: Int) = { this.priorityLevel = priority this } def topContext = { this.top = true this } def allowCaching = { this.caching = true this } } private var pairs = Vector.empty[TokenPairDefinition] private var stateMachineCache = Option.empty[Map[String, Map[Int, (SeamType, Int)]]] private[papacarlo] var lineCutTokens = Set.empty[String] def trackContext(open: String, close: String) = { val result = new TokenPairDefinition(open, close, pairs.length + 1) pairs :+= result stateMachineCache = None if (open == Token.LineBreakKind & close != Token.LineBreakKind) lineCutTokens += close if (open != Token.LineBreakKind & close == Token.LineBreakKind) lineCutTokens += open result } private def stateMachine = { stateMachineCache.getOrElse({ var stateMachine = List.empty[(String, Int, SeamType, Int)] for (first <- pairs) { stateMachine ::= Tuple4(first.open, 0, EnterContext, first.context) stateMachine ::= Tuple4(first.close, first.context, LeaveContext, -1) if (first.open != first.close) { if (!first.top) stateMachine ::= Tuple4(first.open, first.context, EnterContext, first.context) if (first.close != Token.LineBreakKind) stateMachine ::= Tuple4(first.close, 0, UnexpectedSeam, -1) } for (second <- pairs) if (!second.top && first.context != second.context && first.priorityLevel >= second.priorityLevel) { stateMachine ::= Tuple4(first.open, second.context, EnterContext, first.context) if (first.open != first.close && first.close != Token.LineBreakKind) { stateMachine ::= Tuple4(first.close, second.context, UnexpectedSeam, -1) } } } val result = stateMachine .groupBy(_._1) .mapValues( _.groupBy(_._2) .mapValues( list => (list.head._3, list.head._4) ) .toMap ) .toMap this.stateMachineCache = Some(result) result }) } def contextualize(entryContext: Context, tokens: Seq[Token]) = { val stateMachine = this.stateMachine var context = entryContext for (token <- tokens) { val next = stateMachine .get(token.kind) .flatMap(_.get(context.kind)) .getOrElse(Tuple2(RegularSeam, 0)) token.seam = next._1 next._1 match { case EnterContext => context = context.branch(next._2) token.context = context case LeaveContext => token.context = context context = context.parent.getOrElse(Context.Base) case _ => token.context = context } } context } private[lexis] def isCachableContext(context: Context) = pairs.lift(context.kind - 1).exists(_.caching) private[lexis] def getContextSkipLevel(context: Context) = pairs.lift(context.kind - 1).map(_.skipLevel) }
Eliah-Lakhin/papa-carlo
src/main/scala/name.lakhin.eliah.projects/papacarlo/lexis/Contextualizer.scala
Scala
apache-2.0
4,664
package codechicken.multipart.scalatraits import codechicken.multipart.TileMultipart import codechicken.multipart.TMultiPart import codechicken.multipart.TSlottedPart /** * Mixin implementation for TSlottedPart. * Puts parts into a slot array for quick access at the cost of memory consumption */ trait TSlottedTile extends TileMultipart { var v_partMap = new Array[TMultiPart](27) override def copyFrom(that:TileMultipart) { super.copyFrom(that) if(that.isInstanceOf[TSlottedTile]) v_partMap = that.asInstanceOf[TSlottedTile].v_partMap } override def partMap(slot:Int) = v_partMap(slot) override def clearParts() { super.clearParts() for(i <- 0 until v_partMap.length) v_partMap(i) = null } override def partRemoved(part:TMultiPart, p:Int) { super.partRemoved(part, p) if(part.isInstanceOf[TSlottedPart]) for(i <- 0 until 27) if(partMap(i) == part) v_partMap(i) = null } override def canAddPart(part:TMultiPart):Boolean = { if(part.isInstanceOf[TSlottedPart]) { val slotMask = part.asInstanceOf[TSlottedPart].getSlotMask for(i <- 0 until v_partMap.length) if((slotMask&1<<i) != 0 && partMap(i) != null) return false } return super.canAddPart(part) } override def bindPart(part:TMultiPart) { super.bindPart(part) if(part.isInstanceOf[TSlottedPart]) { val mask = part.asInstanceOf[TSlottedPart].getSlotMask for(i <- 0 until 27) if ((mask&1<<i) > 0) v_partMap(i) = part } } }
Chicken-Bones/ForgeMultipart
src/codechicken/multipart/scalatraits/TSlottedTile.scala
Scala
lgpl-2.1
1,789
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** @author John Miller * @builder scalation.util.bld.BldMM_Array * @version 1.3 * @date Thu Sep 24 14:03:17 EDT 2015 * @see LICENSE (MIT style license file). * * @see www.programering.com/a/MDO2cjNwATI.html */ package scalation package util import java.io.{RandomAccessFile, Serializable} import java.lang.Cloneable import java.nio.{ByteBuffer, MappedByteBuffer} import java.nio.channels.FileChannel import scala.collection._ import scala.collection.mutable.{AbstractSeq, IndexedSeq} //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `MM_ArrayI` class provides support for large, persistent arrays via memory * mapped files. Currently, the size of a memory mapped array is limited to * 2GB (2^31), since indices are signed 32-bit integers. * FIX: use Long for indices and multiple files to remove 2GB limitation * @see https://github.com/xerial/larray/blob/develop/README.md * @param _length the number of elements in the `mem_mapped` array */ final class MM_ArrayI (_length: Int) extends AbstractSeq [Int] with IndexedSeq [Int] with Serializable with Cloneable { import MM_ArrayI.{_count, E_SIZE} /** The number of bytes in this memory mapped file */ val nBytes = _length * E_SIZE /** The file name for this memory mapped files */ val fname = { _count += 1; "mem_mapped_" + _count } /** The random/direct access file */ private val raf = new RandomAccessFile (MEM_MAPPED_DIR + fname, "rw"); /** The random access file mapped into memory */ private val mraf = raf.getChannel ().map (FileChannel.MapMode.READ_WRITE, 0, nBytes); /** The range of index positions for 'this' memory mapped array */ private val range = 0 until _length //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Return the size of elements in the memory mapped file. */ def length: Int = _length //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Get the bytes in the file starting at 'index'. * @param index the index position in the file */ def apply (index: Int): Int = { mraf.getInt (E_SIZE * index) } // apply //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Put the bytes in the file starting at 'index'. * @param index the index position in the file * @param x the double value to put */ def update (index: Int, x: Int) { mraf.putInt (E_SIZE * index, x) } // update //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Fold left through 'this' array. * @param s0 the initial value * @param f the function to apply */ def foldLeft (s0: Int)(f: (Int, Int) => Int): Int = { var s = s0 for (i <- range) s = f (s, apply(i)) s } // foldLeft //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Map elements of 'this' array by applying the function 'f'. * @param f the function to be applied */ def map (f: Int => Int): MM_ArrayI = { val c = new MM_ArrayI (_length) for (i <- range) c(i) = f(apply(i)) c } // map //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Slice 'this' starting at 'from' and continuing until 'till' * @param from the starting index for the slice (inclusive) * @param till the ending index for the slice (exclusive) */ override def slice (from: Int, till: Int): MM_ArrayI = { MM_ArrayI (super.slice (from, till)) } // slice //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Determine whether element 'x' is contained in this array. * @param x the element sought */ def contains (x: Int): Boolean = { for (i <- range if x == apply(i)) return true false } // contains //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Create a sequence for 'this' array. */ def deep: immutable.IndexedSeq [Int] = for (i <- range) yield apply(i) //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Close the memory mapped file. */ def close () { raf.close () } } // MM_ArrayI class //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `MM_ArrayI` companion object provides factory methods for the `MM_ArrayI` * class. */ object MM_ArrayI { /** The number of bytes required to store a `Int` */ private val E_SIZE = 4 /** The counter for ensuring files names are unique */ var _count = 0 //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Create a memory mapped array from one or more values (repeated values `Int*`). * @param x the first `Int` number * @param xs the rest of the `Int` numbers */ def apply (x: Int, xs: Int*): MM_ArrayI = { val c = new MM_ArrayI (1 + xs.length) c(0) = x for (i <- 0 until c.length) c(i+1) = xs(i) c } // apply //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Create a memory mapped array with 'n' elements. * @param n the number of elements */ def apply (xs: Seq [Int]): MM_ArrayI = { _count += 1 val c = new MM_ArrayI (xs.length) for (i <- 0 until c.length) c(i) = xs(i) c } // apply //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Create a memory mapped array with 'n' elements. * @param n the number of elements */ def ofDim (n: Int): MM_ArrayI = { _count += 1 new MM_ArrayI (n) } // ofDim //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Concatenate memory mapped arrays 'a' and 'b'. */ def concat (a: MM_ArrayI, b: MM_ArrayI): MM_ArrayI = { val (na, nb) = (a.length, b.length) val c = new MM_ArrayI (na + nb) for (i <- 0 until na) c(i) = a(i) for (i <- 0 until nb) c(i + na) = b(i) c } // concat } // MM_ArrayI object //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `MM_ArrayITest` is used to test the `MM_ArrayI` class. * > run-main scalation.util.MM_ArrayITest */ object MM_ArrayITest extends App { val n = 100 // number of elements val mraf = new MM_ArrayI (n) // memory mapped array // Write into the Memory Mapped File for (i <- 0 until n) mraf(i) = 2 * i println ("\\nWRITE: memory mapped file '" + mraf.fname + "' now has " + mraf.nBytes + " bytes") // Read from the Memory Mapped File println () // for (i <- 0 until n) print (mraf(i) + " ") println (mraf.deep) println ("READ: memory mapped file '" + mraf.fname + "' completed.") mraf.close () } // MM_ArrayITest object
NBKlepp/fda
scalation_1.3/scalation_mathstat/src/main/scala/scalation/util/MM_ArrayI.scala
Scala
mit
7,248
/* Copyright 2009-2021 EPFL, Lausanne */ object Arithmetic { def test(a: BigInt, b: BigInt, c: BigInt): BigInt = { require(a > b && c > BigInt(0)) c + a } ensuring( _ > c + b ) }
epfl-lara/stainless
frontends/benchmarks/coq/Arith.scala
Scala
apache-2.0
195
/* ,i::, :;;;;;;; ;:,,::;. 1ft1;::;1tL t1;::;1, :;::; _____ __ ___ __ fCLff ;:: tfLLC / ___/ / |/ /____ _ _____ / /_ CLft11 :,, i1tffLi \__ \ ____ / /|_/ // __ `// ___// __ \ 1t1i .;; .1tf ___/ //___// / / // /_/ // /__ / / / / CLt1i :,: .1tfL. /____/ /_/ /_/ \__,_/ \___//_/ /_/ Lft1,:;: , 1tfL: ;it1i ,,,:::;;;::1tti AeonDB .t1i .,::;;; ;1tt Copyright (c) 2014 S-Mach, Inc. Lft11ii;::;ii1tfL: Author: [email protected] .L1 1tt1ttt,,Li ...1LLLL... */ package s_mach.aeondb.impl import s_mach.aeondb._ class CommitBuilder[A,B,PB] { private[this] val _checkout = Map.newBuilder[A,Long] private[this] val _put = Map.newBuilder[A,B] private[this] val _replace = Map.newBuilder[A,PB] private[this] val _deactivate = Set.newBuilder[A] private[this] val _reactivate= Map.newBuilder[A,B] def checkout(key: A, version: Long) = { _checkout.+=((key,version)) this } def put( key: A, value: B ) = { _put.+=((key,value)) this } def replace( key: A, patch: PB, expectedVersion: Long ) = { _checkout.+=((key,expectedVersion)) _replace.+=((key,patch)) this } def deactivate(key:A, expectedVersion:Long) = { _checkout.+=((key,expectedVersion)) _deactivate += key this } def reactivate( key:A, value:B, expectedVersion:Long ) = { _checkout.+=((key,expectedVersion)) _reactivate += ((key,value)) this } def result() : (Checkout[A], Commit[A,B,PB]) = { val checkout = _checkout.result() val put = _put.result() val replace = _replace.result() val deactivate = _deactivate.result() val reactivate = _reactivate.result() require( replace.keySet.forall(checkout.contains), "All changed ids must be checked out" ) require( deactivate.forall(checkout.contains), "All deactivated ids must be checked out" ) require( reactivate.forall { case (k,_) => checkout.contains(k) }, "All reactivated ids must be checked out") ( checkout, Commit( put = put, replace = replace, deactivate = deactivate, reactivate = reactivate ) ) } } object CommitBuilder { def apply[A,B,PB]() : CommitBuilder[A,B,PB] = new CommitBuilder[A,B,PB] }
S-Mach/aeondb
src/main/scala/s_mach/aeondb/impl/CommitBuilder.scala
Scala
apache-2.0
2,554
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.server import java.util.Properties import scala.collection.Seq import kafka.zk.ZooKeeperTestHarness import kafka.utils.TestUtils import org.junit.{After, Before, Test} import org.junit.Assert._ import java.io.File import org.scalatest.Assertions.intercept import org.apache.zookeeper.KeeperException.NodeExistsException class ServerGenerateBrokerIdTest extends ZooKeeperTestHarness { var props1: Properties = null var config1: KafkaConfig = null var props2: Properties = null var config2: KafkaConfig = null val brokerMetaPropsFile = "meta.properties" var servers: Seq[KafkaServer] = Seq() @Before override def setUp(): Unit = { super.setUp() props1 = TestUtils.createBrokerConfig(-1, zkConnect) config1 = KafkaConfig.fromProps(props1) props2 = TestUtils.createBrokerConfig(0, zkConnect) config2 = KafkaConfig.fromProps(props2) } @After override def tearDown(): Unit = { TestUtils.shutdownServers(servers) super.tearDown() } @Test def testAutoGenerateBrokerId(): Unit = { var server1 = new KafkaServer(config1, threadNamePrefix = Option(this.getClass.getName)) server1.startup() server1.shutdown() assertTrue(verifyBrokerMetadata(config1.logDirs, 1001)) // restart the server check to see if it uses the brokerId generated previously server1 = TestUtils.createServer(config1, threadNamePrefix = Option(this.getClass.getName)) servers = Seq(server1) assertEquals(server1.config.brokerId, 1001) server1.shutdown() TestUtils.assertNoNonDaemonThreads(this.getClass.getName) } @Test def testUserConfigAndGeneratedBrokerId(): Unit = { // start the server with broker.id as part of config val server1 = new KafkaServer(config1, threadNamePrefix = Option(this.getClass.getName)) val server2 = new KafkaServer(config2, threadNamePrefix = Option(this.getClass.getName)) val props3 = TestUtils.createBrokerConfig(-1, zkConnect) val server3 = new KafkaServer(KafkaConfig.fromProps(props3), threadNamePrefix = Option(this.getClass.getName)) server1.startup() assertEquals(server1.config.brokerId, 1001) server2.startup() assertEquals(server2.config.brokerId, 0) server3.startup() assertEquals(server3.config.brokerId, 1002) servers = Seq(server1, server2, server3) servers.foreach(_.shutdown()) assertTrue(verifyBrokerMetadata(server1.config.logDirs, 1001)) assertTrue(verifyBrokerMetadata(server2.config.logDirs, 0)) assertTrue(verifyBrokerMetadata(server3.config.logDirs, 1002)) TestUtils.assertNoNonDaemonThreads(this.getClass.getName) } @Test def testDisableGeneratedBrokerId(): Unit = { val props3 = TestUtils.createBrokerConfig(3, zkConnect) props3.put(KafkaConfig.BrokerIdGenerationEnableProp, "false") // Set reserve broker ids to cause collision and ensure disabling broker id generation ignores the setting props3.put(KafkaConfig.MaxReservedBrokerIdProp, "0") val config3 = KafkaConfig.fromProps(props3) val server3 = TestUtils.createServer(config3, threadNamePrefix = Option(this.getClass.getName)) servers = Seq(server3) assertEquals(server3.config.brokerId, 3) server3.shutdown() assertTrue(verifyBrokerMetadata(server3.config.logDirs, 3)) TestUtils.assertNoNonDaemonThreads(this.getClass.getName) } @Test def testMultipleLogDirsMetaProps(): Unit = { // add multiple logDirs and check if the generate brokerId is stored in all of them val logDirs = props1.getProperty("log.dir")+ "," + TestUtils.tempDir().getAbsolutePath + "," + TestUtils.tempDir().getAbsolutePath props1.setProperty("log.dir", logDirs) config1 = KafkaConfig.fromProps(props1) var server1 = new KafkaServer(config1, threadNamePrefix = Option(this.getClass.getName)) server1.startup() servers = Seq(server1) server1.shutdown() assertTrue(verifyBrokerMetadata(config1.logDirs, 1001)) // addition to log.dirs after generation of a broker.id from zk should be copied over val newLogDirs = props1.getProperty("log.dir") + "," + TestUtils.tempDir().getAbsolutePath props1.setProperty("log.dir", newLogDirs) config1 = KafkaConfig.fromProps(props1) server1 = new KafkaServer(config1, threadNamePrefix = Option(this.getClass.getName)) server1.startup() servers = Seq(server1) server1.shutdown() assertTrue(verifyBrokerMetadata(config1.logDirs, 1001)) TestUtils.assertNoNonDaemonThreads(this.getClass.getName) } @Test def testConsistentBrokerIdFromUserConfigAndMetaProps(): Unit = { // check if configured brokerId and stored brokerId are equal or throw InconsistentBrokerException var server1 = new KafkaServer(config1, threadNamePrefix = Option(this.getClass.getName)) //auto generate broker Id server1.startup() servers = Seq(server1) server1.shutdown() server1 = new KafkaServer(config2, threadNamePrefix = Option(this.getClass.getName)) // user specified broker id try { server1.startup() } catch { case _: kafka.common.InconsistentBrokerIdException => //success } server1.shutdown() TestUtils.assertNoNonDaemonThreads(this.getClass.getName) } @Test def testBrokerMetadataOnIdCollision(): Unit = { // Start a good server val propsA = TestUtils.createBrokerConfig(1, zkConnect) val configA = KafkaConfig.fromProps(propsA) val serverA = TestUtils.createServer(configA, threadNamePrefix = Option(this.getClass.getName)) // Start a server that collides on the broker id val propsB = TestUtils.createBrokerConfig(1, zkConnect) val configB = KafkaConfig.fromProps(propsB) val serverB = new KafkaServer(configB, threadNamePrefix = Option(this.getClass.getName)) intercept[NodeExistsException] { serverB.startup() } servers = Seq(serverA) // verify no broker metadata was written serverB.config.logDirs.foreach { logDir => val brokerMetaFile = new File(logDir + File.separator + brokerMetaPropsFile) assertFalse(brokerMetaFile.exists()) } // adjust the broker config and start again propsB.setProperty(KafkaConfig.BrokerIdProp, "2") val newConfigB = KafkaConfig.fromProps(propsB) val newServerB = TestUtils.createServer(newConfigB, threadNamePrefix = Option(this.getClass.getName)) servers = Seq(serverA, newServerB) serverA.shutdown() newServerB.shutdown() // verify correct broker metadata was written assertTrue(verifyBrokerMetadata(serverA.config.logDirs, 1)) assertTrue(verifyBrokerMetadata(newServerB.config.logDirs, 2)) TestUtils.assertNoNonDaemonThreads(this.getClass.getName) } def verifyBrokerMetadata(logDirs: Seq[String], brokerId: Int): Boolean = { for (logDir <- logDirs) { val brokerMetadataOpt = new BrokerMetadataCheckpoint( new File(logDir + File.separator + brokerMetaPropsFile)).read() brokerMetadataOpt match { case Some(brokerMetadata) => if (brokerMetadata.brokerId != brokerId) return false case _ => return false } } true } }
sslavic/kafka
core/src/test/scala/unit/kafka/server/ServerGenerateBrokerIdTest.scala
Scala
apache-2.0
7,918
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.nn.mkldnn import com.intel.analytics.bigdl.mkl._ import com.intel.analytics.bigdl.nn.Utils import com.intel.analytics.bigdl.nn.abstractnn.Activity import com.intel.analytics.bigdl.tensor.Tensor class MaxPooling( kW: Int, kH: Int, dW: Int = 1, dH: Int = 1, padW: Int = 0, padH: Int = 0 ) extends MklDnnLayer { @transient private var workSpaceFormat: MemoryData = _ @transient private var workSpace: Tensor[Float] = _ @transient private var fwdMemPrims: Array[Long] = _ @transient private var bwdMemPrims: Array[Long] = _ @transient private var paddingTL: Array[Int] = _ @transient private var paddingBR: Array[Int] = _ @transient private var fwdPD: Long = _ override private[mkldnn] def initFwdPrimitives(inputs: Array[MemoryData], phase: Phase) = { _inputFormats = singleNativeData(inputs) val strides = Array(dW, dH) val kernel = Array(kH, kW) val n = _inputFormats(0).shape(0) val c = _inputFormats(0).shape(1) val h = _inputFormats(0).shape(2) val w = _inputFormats(0).shape(3) val (pt, pb, pl, pr, oh, ow) = Utils.getPaddingAndOutputSize(h, w, dH, dW, kH, kW, padH, padW) paddingTL = Array(pt, pl) paddingBR = Array(pb, pr) Utils.getSAMEOutSizeAndPadding(h, w, dH, dW, kH, kW) Utils.getOutSizeAndPaddingForDNN(h, w, dH, dW, kH, kW, padH, padW, true) val outputMD = MklDnn.MemoryDescInit(4, Array(n, c, oh, ow), DataType.F32, Memory.Format.any) val description = MklDnn.PoolingForwardDescInit( PropKind.Forward, AlgKind.PoolingMax, _inputFormats(0).getMemoryDescription(), outputMD, strides, kernel, paddingTL, paddingBR, MklDnn.PaddingKind.mkldnnPaddingZero) fwdPD = MklDnn.PrimitiveDescCreate(description, runtime.engine, 0L) _outputFormats = Array(MemoryData.primitiveOutput(fwdPD)) output = initTensor(_outputFormats(0)) workSpaceFormat = MemoryData.primitiveWorkSpace(fwdPD) workSpace = initTensor(workSpaceFormat) updateOutputPrimitives = Array(MklDnn.PrimitiveCreate2(fwdPD, _inputFormats.map(_.getPrimitive(runtime)), Array(0), 1, Array(_outputFormats(0), workSpaceFormat).map(_.getPrimitive(runtime)), 2)) fwdMemPrims = Array(_inputFormats(0), _outputFormats(0), workSpaceFormat) .map(_.getPrimitive(runtime)) (_inputFormats, _outputFormats) } override private[mkldnn] def initBwdPrimitives(grad: Array[MemoryData], phase: Phase) = { _gradOutputFormats = singleNativeData(grad) _gradOutputFormatsForWeight = _gradOutputFormats val strides = Array(dW, dH) val kernel = Array(kH, kW) val description = MklDnn.PoolingBackwardDescInit(AlgKind.PoolingMax, _inputFormats(0).getMemoryDescription(), _gradOutputFormats(0).getMemoryDescription(), strides, kernel, paddingTL, paddingBR, MklDnn.PaddingKind.mkldnnPaddingZero) val pd = MklDnn.PrimitiveDescCreate(description, runtime.engine, fwdPD) _gradInputFormats = Array(MemoryData.primitiveGradInput(pd)) updateGradInputPrimitives = Array(MklDnn.PrimitiveCreate2(pd, Array(_gradOutputFormats(0), workSpaceFormat).map(_.getPrimitive(runtime)), Array(0, 0), 2, _gradInputFormats.map(_.getPrimitive(runtime)), 1)) gradInput = initTensor(_gradInputFormats(0)) bwdMemPrims = Array(_inputFormats(0), _gradOutputFormats(0), workSpaceFormat, _gradInputFormats(0)).map(_.getPrimitive(runtime)) (_gradOutputFormats, _gradInputFormats) } override def updateOutput(input: Activity): Activity = { val buffer = Array(input.asInstanceOf[Tensor[Float]], output.asInstanceOf[Tensor[Float]], workSpace) MklDnnOps.streamSubmit(runtime.stream, 1, updateOutputPrimitives, 1, fwdMemPrims, buffer) output } override def updateGradInput(input: Activity, gradOutput: Activity): Activity = { val buffer = Array( input.asInstanceOf[Tensor[Float]], gradOutput.asInstanceOf[Tensor[Float]], workSpace, gradInput.asInstanceOf[Tensor[Float]]) MklDnnOps.streamSubmit(runtime.stream, 1, updateGradInputPrimitives, 1, bwdMemPrims, buffer) gradInput } } object MaxPooling { def apply( kW: Int, kH: Int, dW: Int = 1, dH: Int = 1, padW: Int = 0, padH: Int = 0 ): MaxPooling = new MaxPooling(kW, kH, dW, dH, padW, padH) }
yiheng/BigDL
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/mkldnn/MaxPooling.scala
Scala
apache-2.0
4,922
//package regolic //package dpllt //package qfa // //import regolic.asts.core.Trees._ //import regolic.asts.core.Manip._ //import regolic.asts.fol.Trees._ //import regolic.asts.fol.Manip._ //import regolic.asts.theories.array.Trees._ // //import scala.collection.mutable.HashMap //import scala.collection.mutable.ListBuffer // //class Component(implicit val context: Context) extends TheoryComponent { // // case class Literal(lhs: Term, rhs: Term, id: Int, polInt: Int) extends AbstractLiteral // // // class Solver extends AbstractSolver { // // override def setTrue(l: Literal): Either[Set[Literal], Set[Literal]] // // // def isSat(and: List[Formula]): Option[Map[FunctionSymbol, Term]] = { // // def isReadOverWrite(t: Term): Boolean = t match { // case Select(Store(a, i, v), j) => true // case _ => false // } // // var additionalCnstr: Formula = null // val (And(newLits), found) = findAndMap(And(and), (f: Formula) => false, isReadOverWrite _, (f: Formula) => f, (t: Term) => { // val Select(Store(a, i, v), j) = t // additionalCnstr = Equals(i, j) // v // }) // // if(!found) { // isSatNoStore(and) // } else { // isSat(additionalCnstr :: newLits) match { // case Some(m) => Some(m) // case None => { // val (And(newLits), _) = findAndMap(And(and), (f: Formula) => false, isReadOverWrite _, (f: Formula) => f, (t: Term) => { // val Select(Store(a, i, v), j) = t // additionalCnstr = Not(Equals(i, j)) // Select(a, j) // }) // isSat(additionalCnstr :: newLits) // } // } // } // } // // // TODO signature and FastCongruenceSolver // //the clause only contains select or top level store that can be safely eliminated // private def isSatNoStore(and: List[Formula]): Option[Map[FunctionSymbol, Term]] = { // var arrayVarToFun: Map[FunctionSymbol, FunctionSymbol] = Map() // def removeStore(t: Term): Term = t match { // case Store(a, _, _) => a // case sel@Select(FunctionApplication(a: FunctionSymbol, List()), i) => arrayVarToFun.get(a) match { // case Some(f) => FunctionApplication(f, List(i)) // case None => { // val f = freshFunctionSymbol("f", List(i.sort), sel.sort) // arrayVarToFun += (a -> f) // FunctionApplication(f, List(i)) // } // } // case t => t // } // val And(cleanTerms) = mapPreorder(And(and), (f: Formula) => f, removeStore _) // regolic.smt.qfeuf.CongruenceSolver.isSat(cleanTerms) // } // //}
regb/scabolic
src/main/scala/regolic/dpllt/qfa/Component.scala
Scala
mit
2,619
package io.clouderite.commons.scala.berries.string import io.clouderite.commons.scala.berries.other.Gzipper import io.clouderite.commons.scala.berries.string.StringOperations.toStringOperations import scala.util.matching.Regex import scala.util.{Failure, Success, Try} import scalaz.syntax.std.boolean._ import scala.language.implicitConversions import scala.language.postfixOps class StringOperations(value: String) { def sliceLines(from: Int, to: Int): String = { require(from >= 0, "from parameter must be greater than or equal 0") require(to >= 0, "to parameter must be greater than or equal 0") require(from <= to, "from parameter must be lower than or equal to parameter") value.dropLines(from).takeLines(to - from + 1) } def negSliceLines(from: Int, to: Int): List[String] = { require(from >= 0, "from parameter must be greater than or equal 0") require(to >= 0, "to parameter must be greater than or equal 0") require(from <= to, "from parameter must be lower than or equal to parameter") List(value.takeLines(from), value.dropLines(to + 1)) } def takeLines(num: Int): String = { require(num >= 0, "num parameter must be greater than or equal 0") value.linesWithSeparators.take(num).mkString } def dropLines(num: Int): String = { require(num >= 0, "num parameter must be greater than or equal 0") value.linesWithSeparators.drop(num).mkString } def matches(pattern: Regex): Boolean = pattern unapplySeq value isDefined def tryMatch(pattern: Regex): Try[String] = matches(pattern) .option(Success(value)) .getOrElse(Failure(new IllegalArgumentException(s"cannot match value against pattern '$pattern'"))) def gzip: Array[Byte] = Gzipper.gzip(value) } object StringOperations { implicit def toStringOperations(value: String): StringOperations = new StringOperations(value) }
clouderite/scala-berries
src/main/scala/io/clouderite/commons/scala/berries/string/StringOperations.scala
Scala
mit
1,890
package drt.server.feeds.stn import java.util.TimeZone import drt.server.feeds.common.XlsExtractorUtil._ import drt.shared.Terminals.Terminal import drt.shared.api.Arrival import drt.shared.{ArrivalStatus, ForecastFeedSource, PortCode, SDateLike} import org.apache.poi.ss.usermodel.{Cell, DateUtil} import org.slf4j.{Logger, LoggerFactory} import services.SDate import scala.util.{Failure, Success, Try} case class STNForecastFlightRow(scheduledDate: SDateLike, flightCode: String = "", origin: String = "", internationalDomestic: String = "", maxPax: Int = 0, totalPax: Int = 0 ) object STNForecastXLSExtractor { val log: Logger = LoggerFactory.getLogger(getClass) def apply(xlsFilePath: String): List[Arrival] = rows(xlsFilePath) .map(stnFieldsToArrival) .collect { case Success(arrival) => arrival } def rows(xlsFilePath: String): List[STNForecastFlightRow] = { log.info(s"Extracting STN forecast flights from XLS Workbook located at $xlsFilePath") val lgwWorkSheet = workbook(xlsFilePath) val sheet = sheetMapByName("Arrivals by flight", lgwWorkSheet) val headingIndexByNameMap: Map[String, Int] = headingIndexByName(sheet.getRow(1)) val arrivalRowsTry: Seq[Try[STNForecastFlightRow]] = for { rowNumber <- 2 to sheet.getLastRowNum row = sheet.getRow(rowNumber) if row.getCell(1) != null && row.getCell(1).getCellType != Cell.CELL_TYPE_BLANK } yield { Try { val scheduledCell = tryNumericThenStringCellDoubleOption(headingIndexByNameMap("SCHEDULED TIME& DATE"), row) val carrierCodeCell = stringCellOption(headingIndexByNameMap("AIRLINE"), row).getOrElse("") val flightNumberCell = tryNumericThenStringCellIntOption(headingIndexByNameMap("FLIGHT NUMBER"), row) val originCell = stringCellOption(headingIndexByNameMap("DESTINATION / ORIGIN"), row) val maxPaxCell = tryNumericThenStringCellDoubleOption(headingIndexByNameMap("FLIGHT CAPACITY"), row) val totalCell = tryNumericThenStringCellDoubleOption(headingIndexByNameMap("FLIGHT FORECAST"), row) val internationalDomesticCell = stringCellOption(headingIndexByNameMap("TYPE"), row) val scheduled = SDate(DateUtil.getJavaDate(scheduledCell, TimeZone.getTimeZone("UTC")).getTime) val flightNumber: String = if (flightNumberCell == 0) "" else flightNumberCell.toString STNForecastFlightRow(scheduledDate = scheduled, flightCode = s"$carrierCodeCell$flightNumber", origin = originCell.getOrElse(""), internationalDomestic = internationalDomesticCell.getOrElse(""), totalPax = totalCell.toInt, maxPax = maxPaxCell.toInt ) } } val arrivalRows = arrivalRowsTry.zipWithIndex.toList.flatMap { case (Success(a), _) => Some(a) case (Failure(e), i) => log.warn(s"Invalid data on row ${i + 2} ${e.getMessage}", e) None }.filter(_.internationalDomestic == "INTERNATIONAL") log.info(s"Extracted ${arrivalRows.size} arrival rows from STN XLS Workbook") arrivalRows } def stnFieldsToArrival(flightRow: STNForecastFlightRow): Try[Arrival] = { Try { Arrival( Operator = None, Status = ArrivalStatus("Port Forecast"), Estimated = None, Actual = None, EstimatedChox = None, ActualChox = None, Gate = None, Stand = None, MaxPax = Some(flightRow.maxPax), ActPax = if (flightRow.totalPax == 0) None else Option(flightRow.totalPax), TranPax = Some(0), RunwayID = None, BaggageReclaimId = None, AirportID = PortCode("STN"), Terminal = Terminal("T1"), rawICAO = flightRow.flightCode.replace(" ", ""), rawIATA = flightRow.flightCode.replace(" ", ""), Origin = PortCode(flightRow.origin), Scheduled = flightRow.scheduledDate.millisSinceEpoch, PcpTime = None, FeedSources = Set(ForecastFeedSource) ) } } }
UKHomeOffice/drt-scalajs-spa-exploration
server/src/main/scala/drt/server/feeds/stn/STNForecastXLSExtractor.scala
Scala
apache-2.0
4,190
package com.twitter.finagle.loadbalancer.aperture import com.twitter.finagle.util.Rng import scala.collection.mutable.ListBuffer private object Ring { /** * Returns the length of the intersection between the two ranges. * * @note this implementations assumes that the min(e0, e1) is greater * than max(b0, b1). It's up to the caller to handle the case where * the line segments wrap around the ring. */ def intersect(b0: Double, e0: Double, b1: Double, e1: Double): Double = { val len = math.min(e0, e1) - math.max(b0, b1) math.max(0.0, len) } } /** * Ring maps the indices [0, `size`) uniformly around a coordinate space [0.0, 1.0). * * It then provides methods for querying the indices across ranges (in the same * coordinate space) which the [[Aperture]] load balancer uses to calculate which * servers a respective client will talk to. See [[ProcessCoordinate]] for more * details about how clients compute their ranges which map into an instance of * [[Ring]]. * * @param size the number of indices mapped on the ring. * * @param rng the random number generator used for `pick` and `pick2`. */ private class Ring(size: Int, rng: Rng) { import Ring._ require(size > 0, s"size must be > 0: $size") /** * Returns the uniform width of any given index on the ring. The returned * value is bounded between (0, 1]. */ val unitWidth: Double = 1.0 / size /** * Returns the (zero-based) index between [0, `size`) which the * position `offset` maps to. * * @param offset A value between [0, 1.0). */ def index(offset: Double): Int = { if (offset < 0 && offset >= 1.0) throw new IllegalArgumentException(s"offset must be between [0, 1.0): $offset") math.floor(offset * size).toInt % size } /** * Returns the total number of indices that [offset, offset + width) intersects with. * * @note This returns the number of indices over which `pick` and `pick2` select. * Thus, we interpret a width of 0 as picking one index. */ def range(offset: Double, width: Double): Int = { if (width < 0 || width > 1.0) throw new IllegalArgumentException(s"width must be between [0, 1.0]: $width") // We will wrap around the entire ring, so return the size. if (width == 1) size // We only have one index to select from. Arguably, returning // a diff of zero here is correct too. However, in order to // project what `pick2` will do we return a range of 1. else if (width == 0) 1 else { val ab = { val i = index(offset) val w = weight(i, offset, width) if (w > 0) i else i + 1 } val ae = { val i = index((offset + width) % 1.0) val w = weight(i, offset, width) if (w > 0) i + 1 else i } val diff = ae - ab if (diff < 0) diff + size else diff } } /** * Returns the ratio of the intersection between `index` and [offset, offset + width). */ def weight(index: Int, offset: Double, width: Double): Double = { if (index >= size) throw new IllegalArgumentException(s"index must be < size: $index") if (width < 0 || width > 1.0) throw new IllegalArgumentException(s"width must be between [0, 1.0]: $width") // In cases where `offset + width` wraps around the ring, we need // to scale the range by 1.0 where it overlaps. val ab: Double = { val ab0 = index * unitWidth if (ab0 + 1 < offset + width) ab0 + 1 else ab0 } val ae: Double = ab + unitWidth intersect(ab, ae, offset, offset + width) / unitWidth } /** * Returns the indices where [offset, offset + width) intersects. * * @note This returns the indices over which `pick` and `pick2` select. * Thus, we interpret a width of 0 as picking one index. */ def indices(offset: Double, width: Double): Seq[Int] = { val seq = new ListBuffer[Int] var i = index(offset) var r = range(offset, width) while (r > 0) { val idx = i % size seq += idx i += 1 r -= 1 } seq } /** * Pick a random index between [0, `size`) where the range of the * index intersects with [offset, offset + width). * * @param width The width of the range. We interpret a width of 0 as the range * [offset, offset] and as such return a valid index. */ def pick(offset: Double, width: Double): Int = { if (width < 0 || width > 1.0) throw new IllegalArgumentException(s"width must be between [0, 1.0]: $width") index((offset + (rng.nextDouble() * width)) % 1.0) } /** * Picks a random index between [0, `size`) where the positions for the * respective index intersect with [offset, offset + width), so long as * the index is not `a` (if the range permits it). * * @note we expose this outside of `pick2` so that we can avoid a tuple * allocation on the hot path. */ def tryPickSecond(a: Int, offset: Double, width: Double): Int = { // Element `b` is picked from "piecewise" range we get by subtracting // the range of a, i.e.: [offset, ab), [ae, offset + width). // In cases where `offset + width` wraps around the ring, we need // to scale the range by 1.0 where it overlaps. val ab: Double = { val ab0 = (a * unitWidth) if (ab0 + 1 < offset + width) ab0 + 1 else ab0 } val ae: Double = ab + unitWidth val overlap = intersect(ab, ae, offset, offset + width) val rem = width - overlap if (rem > 0) { // Instead of actually splitting the range into two, we offset // any pick that takes place in the second range if there is a // possibility that our second choice falls within [ab, ae]. // // Note, special care must be taken to not bias towards ae + overlap, so // we treat the entire range greater than it uniformly. var pos = offset + (rng.nextDouble() * rem) if (pos >= ae - overlap) { pos += overlap } index(pos % 1.0) } else { // The range [offset, offset + width) is equivalent to [ab, ae). a } } /** * Picks two random indices between [0, `size`) where the positions for the * respective indices intersect with [offset, offset + width). The indices are * chosen uniformly and without replacement. * * @param width The width of the range. We interpret a width of 0 as the range * [offset, offset] and as such return a valid index. */ def pick2(offset: Double, width: Double): (Int, Int) = { val a = pick(offset, width) val b = tryPickSecond(a, offset, width) (a, b) } }
mkhq/finagle
finagle-core/src/main/scala/com/twitter/finagle/loadbalancer/aperture/Ring.scala
Scala
apache-2.0
6,586
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package whisk.core.database import scala.concurrent.Future import akka.actor.ActorSystem import akka.http.scaladsl.model.HttpMethods import akka.http.scaladsl.model.StatusCode import spray.json._ import spray.json.DefaultJsonProtocol._ import whisk.common.Logging /** * This class only handles the basic communication to the proper endpoints * ("JSON in, JSON out"). It is up to its clients to interpret the results. */ class CloudantRestClient(host: String, port: Int, username: String, password: String, db: String)(implicit system: ActorSystem, logging: Logging) extends CouchDbRestClient("https", host, port, username, password, db) { // https://cloudant.com/blog/cloudant-query-grows-up-to-handle-ad-hoc-queries/#.VvllCD-0z2C def simpleQuery(doc: JsObject): Future[Either[StatusCode, JsObject]] = { requestJson[JsObject](mkJsonRequest(HttpMethods.POST, uri(db, "_find"), doc)) } }
prccaraujo/openwhisk
common/scala/src/main/scala/whisk/core/database/CloudantRestClient.scala
Scala
apache-2.0
1,721
/* Title: Pure/Admin/jenkins.scala Author: Makarius Support for Jenkins continuous integration service. */ package isabelle import java.net.URL import java.time.ZoneId import scala.util.matching.Regex object Jenkins { /* server API */ def root(): String = Isabelle_System.getenv_strict("ISABELLE_JENKINS_ROOT") def invoke(url: String, args: String*): Any = { val req = url + "/api/json?" + args.mkString("&") val result = Url.read(req) try { JSON.parse(result) } catch { case ERROR(_) => error("Malformed JSON from " + quote(req)) } } /* build jobs */ def build_job_names(): List[String] = for { job <- JSON.array(invoke(root()), "jobs").getOrElse(Nil) _class <- JSON.string(job, "_class") if _class == "hudson.model.FreeStyleProject" name <- JSON.string(job, "name") } yield name def download_logs(job_names: List[String], dir: Path, progress: Progress = No_Progress) { val store = Sessions.store() val infos = job_names.flatMap(build_job_infos(_)) Par_List.map((info: Job_Info) => info.download_log(store, dir, progress), infos) } /* build log status */ val build_log_jobs = List("isabelle-nightly-benchmark", "isabelle-nightly-slow") val build_status_profiles: List[Build_Status.Profile] = build_log_jobs.map(job_name => Build_Status.Profile("jenkins " + job_name, 0, Build_Log.Prop.build_engine + " = " + SQL.string(Build_Log.Jenkins.engine) + " AND " + Build_Log.Data.session_name + " <> " + SQL.string("Pure") + " AND " + Build_Log.Data.status + " = " + SQL.string(Build_Log.Session_Status.finished.toString) + " AND " + Build_Log.Data.log_name + " LIKE " + SQL.string("%" + job_name))) /* job info */ sealed case class Job_Info( job_name: String, identify: Boolean, timestamp: Long, main_log: URL, session_logs: List[(String, String, URL)]) { val date: Date = Date(Time.ms(timestamp), ZoneId.of("Europe/Berlin")) def log_filename: Path = Build_Log.log_filename(Build_Log.Jenkins.engine, date, List(job_name)) def read_ml_statistics(store: Sessions.Store, session_name: String): List[Properties.T] = { def get_log(ext: String): Option[URL] = session_logs.collectFirst({ case (a, b, url) if a == session_name && b == ext => url }) get_log("db") match { case Some(url) => Isabelle_System.with_tmp_file(session_name, "db") { database => Bytes.write(database, Bytes.read(url)) using(SQLite.open_database(database))(db => store.read_ml_statistics(db, session_name)) } case None => get_log("gz") match { case Some(url) => val log_file = Build_Log.Log_File(session_name, Url.read_gzip(url)) log_file.parse_session_info(ml_statistics = true).ml_statistics case None => Nil } } } def download_log(store: Sessions.Store, dir: Path, progress: Progress = No_Progress) { val log_dir = dir + Build_Log.log_subdir(date) val log_path = log_dir + (if (identify) log_filename else log_filename.ext("xz")) if (!log_path.is_file) { progress.echo(log_path.expand.implode) Isabelle_System.mkdirs(log_dir) if (identify) { val log_file = Build_Log.Log_File(main_log.toString, Url.read(main_log)) val isabelle_version = log_file.find_match(Build_Log.Jenkins.Isabelle_Version) val afp_version = log_file.find_match(Build_Log.Jenkins.AFP_Version) File.write(log_path, Build_Log.Identify.content(date, isabelle_version, afp_version) + "\\n" + main_log.toString) } else { val ml_statistics = session_logs.map(_._1).toSet.toList.sorted.flatMap(session_name => read_ml_statistics(store, session_name). map(props => (Build_Log.SESSION_NAME -> session_name) :: props)) File.write_xz(log_path, terminate_lines(Url.read(main_log) :: ml_statistics.map(Build_Log.Log_File.print_props(Build_Log.ML_STATISTICS_MARKER, _))), XZ.options(6)) } } } } def build_job_infos(job_name: String): List[Job_Info] = { val Session_Log = new Regex("""^.*/log/([^/]+)\\.(db|gz)$""") val identify = job_name == "identify" val job = if (identify) "isabelle-nightly-slow" else job_name val infos = for { build <- JSON.array( invoke(root() + "/job/" + job, "tree=allBuilds[number,timestamp,artifacts[*]]"), "allBuilds").getOrElse(Nil) number <- JSON.int(build, "number") timestamp <- JSON.long(build, "timestamp") } yield { val job_prefix = root() + "/job/" + job + "/" + number val main_log = Url(job_prefix + "/consoleText") val session_logs = if (identify) Nil else { for { artifact <- JSON.array(build, "artifacts").getOrElse(Nil) log_path <- JSON.string(artifact, "relativePath") (name, ext) <- (log_path match { case Session_Log(a, b) => Some((a, b)) case _ => None }) } yield (name, ext, Url(job_prefix + "/artifact/" + log_path)) } Job_Info(job_name, identify, timestamp, main_log, session_logs) } infos.sortBy(info => - info.timestamp) } }
larsrh/libisabelle
modules/pide/2017/src/main/scala/Admin/jenkins.scala
Scala
apache-2.0
5,465
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.streaming.state import java.util.UUID import scala.reflect.ClassTag import org.apache.spark.{Partition, TaskContext} import org.apache.spark.rdd.RDD import org.apache.spark.sql.internal.SessionState import org.apache.spark.sql.types.StructType import org.apache.spark.util.SerializableConfiguration /** * An RDD that allows computations to be executed against [[StateStore]]s. It * uses the [[StateStoreCoordinator]] to get the locations of loaded state stores * and use that as the preferred locations. */ class StateStoreRDD[T: ClassTag, U: ClassTag]( dataRDD: RDD[T], storeUpdateFunction: (StateStore, Iterator[T]) => Iterator[U], checkpointLocation: String, queryRunId: UUID, operatorId: Long, storeVersion: Long, keySchema: StructType, valueSchema: StructType, indexOrdinal: Option[Int], sessionState: SessionState, @transient private val storeCoordinator: Option[StateStoreCoordinatorRef]) extends RDD[U](dataRDD) { private val storeConf = new StateStoreConf(sessionState.conf) // A Hadoop Configuration can be about 10 KB, which is pretty big, so broadcast it private val hadoopConfBroadcast = dataRDD.context.broadcast( new SerializableConfiguration(sessionState.newHadoopConf())) override protected def getPartitions: Array[Partition] = dataRDD.partitions /** * Set the preferred location of each partition using the executor that has the related * [[StateStoreProvider]] already loaded. */ override def getPreferredLocations(partition: Partition): Seq[String] = { val stateStoreProviderId = StateStoreProviderId( StateStoreId(checkpointLocation, operatorId, partition.index), queryRunId) storeCoordinator.flatMap(_.getLocation(stateStoreProviderId)).toSeq } override def compute(partition: Partition, ctxt: TaskContext): Iterator[U] = { var store: StateStore = null val storeProviderId = StateStoreProviderId( StateStoreId(checkpointLocation, operatorId, partition.index), queryRunId) store = StateStore.get( storeProviderId, keySchema, valueSchema, indexOrdinal, storeVersion, storeConf, hadoopConfBroadcast.value.value) val inputIter = dataRDD.iterator(partition, ctxt) storeUpdateFunction(store, inputIter) } }
aokolnychyi/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/StateStoreRDD.scala
Scala
apache-2.0
3,120
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.tools import java.io._ import java.nio.ByteBuffer import kafka.coordinator.group.{GroupMetadataKey, GroupMetadataManager, OffsetKey} import kafka.coordinator.transaction.TransactionLog import kafka.log._ import kafka.serializer.Decoder import kafka.utils._ import org.apache.kafka.clients.consumer.internals.ConsumerProtocol import org.apache.kafka.common.KafkaException import org.apache.kafka.common.record._ import org.apache.kafka.common.utils.{Time, Utils} import scala.collection.{Map, mutable} import scala.collection.mutable.ArrayBuffer import scala.collection.JavaConverters._ object DumpLogSegments { // visible for testing private[tools] val RecordIndent = "|" def main(args: Array[String]) { val opts = new DumpLogSegmentsOptions(args) CommandLineUtils.printHelpAndExitIfNeeded(opts, "This tool helps to parse a log file and dump its contents to the console, useful for debugging a seemingly corrupt log segment.") opts.checkArgs() val misMatchesForIndexFilesMap = mutable.Map[String, List[(Long, Long)]]() val timeIndexDumpErrors = new TimeIndexDumpErrors val nonConsecutivePairsForLogFilesMap = mutable.Map[String, List[(Long, Long)]]() for (arg <- opts.files) { val file = new File(arg) println(s"Dumping $file") val filename = file.getName val suffix = filename.substring(filename.lastIndexOf(".")) suffix match { case Log.LogFileSuffix => dumpLog(file, opts.shouldPrintDataLog, nonConsecutivePairsForLogFilesMap, opts.isDeepIteration, opts.maxMessageSize, opts.messageParser) case Log.IndexFileSuffix => dumpIndex(file, opts.indexSanityOnly, opts.verifyOnly, misMatchesForIndexFilesMap, opts.maxMessageSize) case Log.TimeIndexFileSuffix => dumpTimeIndex(file, opts.indexSanityOnly, opts.verifyOnly, timeIndexDumpErrors, opts.maxMessageSize) case Log.ProducerSnapshotFileSuffix => dumpProducerIdSnapshot(file) case Log.TxnIndexFileSuffix => dumpTxnIndex(file) case _ => System.err.println(s"Ignoring unknown file $file") } } misMatchesForIndexFilesMap.foreach { case (fileName, listOfMismatches) => System.err.println(s"Mismatches in :$fileName") listOfMismatches.foreach { case (indexOffset, logOffset) => System.err.println(s" Index offset: $indexOffset, log offset: $logOffset") } } timeIndexDumpErrors.printErrors() nonConsecutivePairsForLogFilesMap.foreach { case (fileName, listOfNonConsecutivePairs) => System.err.println(s"Non-consecutive offsets in $fileName") listOfNonConsecutivePairs.foreach { case (first, second) => System.err.println(s" $first is followed by $second") } } } private def dumpTxnIndex(file: File): Unit = { val index = new TransactionIndex(Log.offsetFromFile(file), file) for (abortedTxn <- index.allAbortedTxns) { println(s"version: ${abortedTxn.version} producerId: ${abortedTxn.producerId} firstOffset: ${abortedTxn.firstOffset} " + s"lastOffset: ${abortedTxn.lastOffset} lastStableOffset: ${abortedTxn.lastStableOffset}") } } private def dumpProducerIdSnapshot(file: File): Unit = { try { ProducerStateManager.readSnapshot(file).foreach { entry => print(s"producerId: ${entry.producerId} producerEpoch: ${entry.producerEpoch} " + s"coordinatorEpoch: ${entry.coordinatorEpoch} currentTxnFirstOffset: ${entry.currentTxnFirstOffset} ") entry.batchMetadata.headOption.foreach { metadata => print(s"firstSequence: ${metadata.firstSeq} lastSequence: ${metadata.lastSeq} " + s"lastOffset: ${metadata.lastOffset} offsetDelta: ${metadata.offsetDelta} timestamp: ${metadata.timestamp}") } println() } } catch { case e: CorruptSnapshotException => System.err.println(e.getMessage) } } /* print out the contents of the index */ // Visible for testing private[tools] def dumpIndex(file: File, indexSanityOnly: Boolean, verifyOnly: Boolean, misMatchesForIndexFilesMap: mutable.Map[String, List[(Long, Long)]], maxMessageSize: Int) { val startOffset = file.getName.split("\\.")(0).toLong val logFile = new File(file.getAbsoluteFile.getParent, file.getName.split("\\.")(0) + Log.LogFileSuffix) val fileRecords = FileRecords.open(logFile, false) val index = new OffsetIndex(file, baseOffset = startOffset, writable = false) //Check that index passes sanityCheck, this is the check that determines if indexes will be rebuilt on startup or not. if (indexSanityOnly) { index.sanityCheck() println(s"$file passed sanity check.") return } for (i <- 0 until index.entries) { val entry = index.entry(i) // since it is a sparse file, in the event of a crash there may be many zero entries, stop if we see one if (entry.offset == index.baseOffset && i > 0) return val slice = fileRecords.slice(entry.position, maxMessageSize) val firstBatchLastOffset = slice.batches.iterator.next().lastOffset if (firstBatchLastOffset != entry.offset) { var misMatchesSeq = misMatchesForIndexFilesMap.getOrElse(file.getAbsolutePath, List[(Long, Long)]()) misMatchesSeq ::= (entry.offset, firstBatchLastOffset) misMatchesForIndexFilesMap.put(file.getAbsolutePath, misMatchesSeq) } if (!verifyOnly) println(s"offset: ${entry.offset} position: ${entry.position}") } } // Visible for testing private[tools] def dumpTimeIndex(file: File, indexSanityOnly: Boolean, verifyOnly: Boolean, timeIndexDumpErrors: TimeIndexDumpErrors, maxMessageSize: Int) { val startOffset = file.getName.split("\\.")(0).toLong val logFile = new File(file.getAbsoluteFile.getParent, file.getName.split("\\.")(0) + Log.LogFileSuffix) val fileRecords = FileRecords.open(logFile, false) val indexFile = new File(file.getAbsoluteFile.getParent, file.getName.split("\\.")(0) + Log.IndexFileSuffix) val index = new OffsetIndex(indexFile, baseOffset = startOffset, writable = false) val timeIndex = new TimeIndex(file, baseOffset = startOffset, writable = false) try { //Check that index passes sanityCheck, this is the check that determines if indexes will be rebuilt on startup or not. if (indexSanityOnly) { timeIndex.sanityCheck() println(s"$file passed sanity check.") return } var prevTimestamp = RecordBatch.NO_TIMESTAMP for (i <- 0 until timeIndex.entries) { val entry = timeIndex.entry(i) // since it is a sparse file, in the event of a crash there may be many zero entries, stop if we see one if (entry.offset == timeIndex.baseOffset && i > 0) return val position = index.lookup(entry.offset).position val partialFileRecords = fileRecords.slice(position, Int.MaxValue) val batches = partialFileRecords.batches.asScala var maxTimestamp = RecordBatch.NO_TIMESTAMP // We first find the message by offset then check if the timestamp is correct. batches.find(_.lastOffset >= entry.offset) match { case None => timeIndexDumpErrors.recordShallowOffsetNotFound(file, entry.offset, -1.toLong) case Some(batch) if batch.lastOffset != entry.offset => timeIndexDumpErrors.recordShallowOffsetNotFound(file, entry.offset, batch.lastOffset) case Some(batch) => for (record <- batch.asScala) maxTimestamp = math.max(maxTimestamp, record.timestamp) if (maxTimestamp != entry.timestamp) timeIndexDumpErrors.recordMismatchTimeIndex(file, entry.timestamp, maxTimestamp) if (prevTimestamp >= entry.timestamp) timeIndexDumpErrors.recordOutOfOrderIndexTimestamp(file, entry.timestamp, prevTimestamp) } if (!verifyOnly) println(s"timestamp: ${entry.timestamp} offset: ${entry.offset}") prevTimestamp = entry.timestamp } } finally { fileRecords.closeHandlers() index.closeHandler() timeIndex.closeHandler() } } private trait MessageParser[K, V] { def parse(record: Record): (Option[K], Option[V]) } private class DecoderMessageParser[K, V](keyDecoder: Decoder[K], valueDecoder: Decoder[V]) extends MessageParser[K, V] { override def parse(record: Record): (Option[K], Option[V]) = { if (!record.hasValue) { (None, None) } else { val key = if (record.hasKey) Some(keyDecoder.fromBytes(Utils.readBytes(record.key))) else None val payload = Some(valueDecoder.fromBytes(Utils.readBytes(record.value))) (key, payload) } } } private class TransactionLogMessageParser extends MessageParser[String, String] { override def parse(record: Record): (Option[String], Option[String]) = { val txnKey = TransactionLog.readTxnRecordKey(record.key) val txnMetadata = TransactionLog.readTxnRecordValue(txnKey.transactionalId, record.value) val keyString = s"transactionalId=${txnKey.transactionalId}" val valueString = s"producerId:${txnMetadata.producerId}," + s"producerEpoch:${txnMetadata.producerEpoch}," + s"state=${txnMetadata.state}," + s"partitions=${txnMetadata.topicPartitions}," + s"txnLastUpdateTimestamp=${txnMetadata.txnLastUpdateTimestamp}," + s"txnTimeoutMs=${txnMetadata.txnTimeoutMs}" (Some(keyString), Some(valueString)) } } private class OffsetsMessageParser extends MessageParser[String, String] { private def hex(bytes: Array[Byte]): String = { if (bytes.isEmpty) "" else "%X".format(BigInt(1, bytes)) } private def parseOffsets(offsetKey: OffsetKey, payload: ByteBuffer) = { val group = offsetKey.key.group val topicPartition = offsetKey.key.topicPartition val offset = GroupMetadataManager.readOffsetMessageValue(payload) val keyString = s"offset::$group:${topicPartition.topic}:${topicPartition.partition}" val valueString = if (offset.metadata.isEmpty) String.valueOf(offset.offset) else s"${offset.offset}:${offset.metadata}" (Some(keyString), Some(valueString)) } private def parseGroupMetadata(groupMetadataKey: GroupMetadataKey, payload: ByteBuffer) = { val groupId = groupMetadataKey.key val group = GroupMetadataManager.readGroupMessageValue(groupId, payload, Time.SYSTEM) val protocolType = group.protocolType.getOrElse("") val assignment = group.allMemberMetadata.map { member => if (protocolType == ConsumerProtocol.PROTOCOL_TYPE) { val partitionAssignment = ConsumerProtocol.deserializeAssignment(ByteBuffer.wrap(member.assignment)) val userData = hex(Utils.toArray(partitionAssignment.userData())) if (userData.isEmpty) s"${member.memberId}=${partitionAssignment.partitions()}" else s"${member.memberId}=${partitionAssignment.partitions()}:$userData" } else { s"${member.memberId}=${hex(member.assignment)}" } }.mkString("{", ",", "}") val keyString = Json.encodeAsString(Map("metadata" -> groupId).asJava) val valueString = Json.encodeAsString(Map( "protocolType" -> protocolType, "protocol" -> group.protocolOrNull, "generationId" -> group.generationId, "assignment" -> assignment ).asJava) (Some(keyString), Some(valueString)) } override def parse(record: Record): (Option[String], Option[String]) = { if (!record.hasValue) (None, None) else if (!record.hasKey) { throw new KafkaException("Failed to decode message using offset topic decoder (message had a missing key)") } else { GroupMetadataManager.readMessageKey(record.key) match { case offsetKey: OffsetKey => parseOffsets(offsetKey, record.value) case groupMetadataKey: GroupMetadataKey => parseGroupMetadata(groupMetadataKey, record.value) case _ => throw new KafkaException("Failed to decode message using offset topic decoder (message had an invalid key)") } } } } /* print out the contents of the log */ private def dumpLog(file: File, printContents: Boolean, nonConsecutivePairsForLogFilesMap: mutable.Map[String, List[(Long, Long)]], isDeepIteration: Boolean, maxMessageSize: Int, parser: MessageParser[_, _]) { val startOffset = file.getName.split("\\.")(0).toLong println("Starting offset: " + startOffset) val fileRecords = FileRecords.open(file, false) try { var validBytes = 0L var lastOffset = -1L for (batch <- fileRecords.batches.asScala) { printBatchLevel(batch, validBytes) if (isDeepIteration) { for (record <- batch.asScala) { if (lastOffset == -1) lastOffset = record.offset else if (record.offset != lastOffset + 1) { var nonConsecutivePairsSeq = nonConsecutivePairsForLogFilesMap.getOrElse(file.getAbsolutePath, List[(Long, Long)]()) nonConsecutivePairsSeq ::= (lastOffset, record.offset) nonConsecutivePairsForLogFilesMap.put(file.getAbsolutePath, nonConsecutivePairsSeq) } lastOffset = record.offset print(s"$RecordIndent offset: ${record.offset} ${batch.timestampType}: ${record.timestamp} " + s"keysize: ${record.keySize} valuesize: ${record.valueSize}") if (batch.magic >= RecordBatch.MAGIC_VALUE_V2) { print(" sequence: " + record.sequence + " headerKeys: " + record.headers.map(_.key).mkString("[", ",", "]")) } else { print(s" crc: ${record.checksumOrNull} isvalid: ${record.isValid}") } if (batch.isControlBatch) { val controlTypeId = ControlRecordType.parseTypeId(record.key) ControlRecordType.fromTypeId(controlTypeId) match { case ControlRecordType.ABORT | ControlRecordType.COMMIT => val endTxnMarker = EndTransactionMarker.deserialize(record) print(s" endTxnMarker: ${endTxnMarker.controlType} coordinatorEpoch: ${endTxnMarker.coordinatorEpoch}") case controlType => print(s" controlType: $controlType($controlTypeId)") } } else if (printContents) { val (key, payload) = parser.parse(record) key.foreach(key => print(s" key: $key")) payload.foreach(payload => print(s" payload: $payload")) } println() } } validBytes += batch.sizeInBytes } val trailingBytes = fileRecords.sizeInBytes - validBytes if (trailingBytes > 0) println(s"Found $trailingBytes invalid bytes at the end of ${file.getName}") } finally fileRecords.closeHandlers() } private def printBatchLevel(batch: FileLogInputStream.FileChannelRecordBatch, accumulativeBytes: Long): Unit = { if (batch.magic >= RecordBatch.MAGIC_VALUE_V2) print("baseOffset: " + batch.baseOffset + " lastOffset: " + batch.lastOffset + " count: " + batch.countOrNull + " baseSequence: " + batch.baseSequence + " lastSequence: " + batch.lastSequence + " producerId: " + batch.producerId + " producerEpoch: " + batch.producerEpoch + " partitionLeaderEpoch: " + batch.partitionLeaderEpoch + " isTransactional: " + batch.isTransactional + " isControl: " + batch.isControlBatch) else print("offset: " + batch.lastOffset) println(" position: " + accumulativeBytes + " " + batch.timestampType + ": " + batch.maxTimestamp + " size: " + batch.sizeInBytes + " magic: " + batch.magic + " compresscodec: " + batch.compressionType + " crc: " + batch.checksum + " isvalid: " + batch.isValid) } class TimeIndexDumpErrors { val misMatchesForTimeIndexFilesMap = mutable.Map[String, ArrayBuffer[(Long, Long)]]() val outOfOrderTimestamp = mutable.Map[String, ArrayBuffer[(Long, Long)]]() val shallowOffsetNotFound = mutable.Map[String, ArrayBuffer[(Long, Long)]]() def recordMismatchTimeIndex(file: File, indexTimestamp: Long, logTimestamp: Long) { val misMatchesSeq = misMatchesForTimeIndexFilesMap.getOrElse(file.getAbsolutePath, new ArrayBuffer[(Long, Long)]()) if (misMatchesSeq.isEmpty) misMatchesForTimeIndexFilesMap.put(file.getAbsolutePath, misMatchesSeq) misMatchesSeq += ((indexTimestamp, logTimestamp)) } def recordOutOfOrderIndexTimestamp(file: File, indexTimestamp: Long, prevIndexTimestamp: Long) { val outOfOrderSeq = outOfOrderTimestamp.getOrElse(file.getAbsolutePath, new ArrayBuffer[(Long, Long)]()) if (outOfOrderSeq.isEmpty) outOfOrderTimestamp.put(file.getAbsolutePath, outOfOrderSeq) outOfOrderSeq += ((indexTimestamp, prevIndexTimestamp)) } def recordShallowOffsetNotFound(file: File, indexOffset: Long, logOffset: Long) { val shallowOffsetNotFoundSeq = shallowOffsetNotFound.getOrElse(file.getAbsolutePath, new ArrayBuffer[(Long, Long)]()) if (shallowOffsetNotFoundSeq.isEmpty) shallowOffsetNotFound.put(file.getAbsolutePath, shallowOffsetNotFoundSeq) shallowOffsetNotFoundSeq += ((indexOffset, logOffset)) } def printErrors() { misMatchesForTimeIndexFilesMap.foreach { case (fileName, listOfMismatches) => { System.err.println("Found timestamp mismatch in :" + fileName) listOfMismatches.foreach(m => { System.err.println(" Index timestamp: %d, log timestamp: %d".format(m._1, m._2)) }) } } outOfOrderTimestamp.foreach { case (fileName, outOfOrderTimestamps) => { System.err.println("Found out of order timestamp in :" + fileName) outOfOrderTimestamps.foreach(m => { System.err.println(" Index timestamp: %d, Previously indexed timestamp: %d".format(m._1, m._2)) }) } } shallowOffsetNotFound.values.foreach { listOfShallowOffsetNotFound => System.err.println("The following indexed offsets are not found in the log.") listOfShallowOffsetNotFound.foreach { case (indexedOffset, logOffset) => System.err.println(s"Indexed offset: $indexedOffset, found log offset: $logOffset") } } } } private class DumpLogSegmentsOptions(args: Array[String]) extends CommandDefaultOptions(args) { val printOpt = parser.accepts("print-data-log", "if set, printing the messages content when dumping data logs. Automatically set if any decoder option is specified.") val verifyOpt = parser.accepts("verify-index-only", "if set, just verify the index log without printing its content.") val indexSanityOpt = parser.accepts("index-sanity-check", "if set, just checks the index sanity without printing its content. " + "This is the same check that is executed on broker startup to determine if an index needs rebuilding or not.") val filesOpt = parser.accepts("files", "REQUIRED: The comma separated list of data and index log files to be dumped.") .withRequiredArg .describedAs("file1, file2, ...") .ofType(classOf[String]) val maxMessageSizeOpt = parser.accepts("max-message-size", "Size of largest message.") .withRequiredArg .describedAs("size") .ofType(classOf[java.lang.Integer]) .defaultsTo(5 * 1024 * 1024) val deepIterationOpt = parser.accepts("deep-iteration", "if set, uses deep instead of shallow iteration. Automatically set if print-data-log is enabled.") val valueDecoderOpt = parser.accepts("value-decoder-class", "if set, used to deserialize the messages. This class should implement kafka.serializer.Decoder trait. Custom jar should be available in kafka/libs directory.") .withOptionalArg() .ofType(classOf[java.lang.String]) .defaultsTo("kafka.serializer.StringDecoder") val keyDecoderOpt = parser.accepts("key-decoder-class", "if set, used to deserialize the keys. This class should implement kafka.serializer.Decoder trait. Custom jar should be available in kafka/libs directory.") .withOptionalArg() .ofType(classOf[java.lang.String]) .defaultsTo("kafka.serializer.StringDecoder") val offsetsOpt = parser.accepts("offsets-decoder", "if set, log data will be parsed as offset data from the " + "__consumer_offsets topic.") val transactionLogOpt = parser.accepts("transaction-log-decoder", "if set, log data will be parsed as " + "transaction metadata from the __transaction_state topic.") options = parser.parse(args : _*) def messageParser: MessageParser[_, _] = if (options.has(offsetsOpt)) { new OffsetsMessageParser } else if (options.has(transactionLogOpt)) { new TransactionLogMessageParser } else { val valueDecoder: Decoder[_] = CoreUtils.createObject[Decoder[_]](options.valueOf(valueDecoderOpt), new VerifiableProperties) val keyDecoder: Decoder[_] = CoreUtils.createObject[Decoder[_]](options.valueOf(keyDecoderOpt), new VerifiableProperties) new DecoderMessageParser(keyDecoder, valueDecoder) } lazy val shouldPrintDataLog: Boolean = options.has(printOpt) || options.has(offsetsOpt) || options.has(transactionLogOpt) || options.has(valueDecoderOpt) || options.has(keyDecoderOpt) lazy val isDeepIteration: Boolean = options.has(deepIterationOpt) || shouldPrintDataLog lazy val verifyOnly: Boolean = options.has(verifyOpt) lazy val indexSanityOnly: Boolean = options.has(indexSanityOpt) lazy val files = options.valueOf(filesOpt).split(",") lazy val maxMessageSize = options.valueOf(maxMessageSizeOpt).intValue() def checkArgs(): Unit = CommandLineUtils.checkRequiredArgs(parser, options, filesOpt) } }
gf53520/kafka
core/src/main/scala/kafka/tools/DumpLogSegments.scala
Scala
apache-2.0
23,278
/** * Copyright (c) 2012 Petr Kozelek <[email protected]> * * The full copyright and license information is presented * in the file LICENSE that was distributed with this source code. */ package mql.model.semantic import collection.mutable.HashMap trait Metadata { private[this] lazy val _metadata: collection.mutable.Map[Any, Any] = new HashMap[Any, Any] def metadata: collection.mutable.Map[Any, Any] = _metadata }
footcha/MQL
src/main/scala/mql/model/semantic/Metadata.scala
Scala
bsd-3-clause
432
/* Copyright 2014 Nest Labs Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nest.sparkle.loader import org.scalatest.FunSuite import org.scalatest.Matchers import java.nio.file.Paths import spray.util._ import akka.util.Timeout import scala.concurrent.duration._ import org.scalatest.Finders import scala.concurrent.ExecutionContext import nest.sparkle.util.Managed.implicits._ import nest.sparkle.util.Resources import nest.sparkle.test.SparkleTestConfig class TestTextTableParser extends FunSuite with Matchers with SparkleTestConfig { import ExecutionContext.Implicits.global def loadRowInfo(resourcePath: String)(fn: CloseableRowInfo => Unit) { val filePath = Resources.filePathString(resourcePath) val closeableRowInfo = TabularFile.load(Paths.get(filePath)).await managed(closeableRowInfo) foreach fn } test("load sample.csv file") { loadRowInfo("sample.csv") { rowInfo => rowInfo.valueColumns.length shouldBe 25 rowInfo.rows.toSeq.size shouldBe 78 } } test("load csv file with numeric timestamps") { loadRowInfo("epochs.csv") { rowInfo => rowInfo.valueColumns.length shouldBe 3 rowInfo.rows.toSeq.size shouldBe 2751 } } test("load csv file with numeric timestamps and no header") { loadRowInfo("just-time.csv") { rowInfo => rowInfo.rows.toSeq.size shouldBe 4 rowInfo.valueColumns.size shouldBe 0 } } test("test time parser") { TextTableParser.parseTime("2013-02-15T01:32:48.955") should be (Some(1360891968955L)) TextTableParser.parseTime("1357710557000") should be (Some(1357710557000L)) TextTableParser.parseTime("1373681685.0") should be (Some(1373681685000L)) } }
mighdoll/sparkle
sparkle-tests/src/test/scala/nest/sparkle/loader/TestTextTableParser.scala
Scala
apache-2.0
2,210
import sbt._ import Keys._ import scala.util.{Failure, Success} object TypeProviders { /** Slick type provider code gen */ val typeProviders = taskKey[Seq[File]]("Type provider code generation") val TypeProvidersConfig = config("codegen").hide val CompileConfig = config("compile") def codegenSettings = { inConfig(TypeProvidersConfig)(Defaults.configSettings) ++ Seq( Test / sourceGenerators += typeProviders.taskValue, typeProviders := typeProvidersTask.value, ivyConfigurations += TypeProvidersConfig.extend(Compile), (Test / compile) := ((Test / compile) dependsOn (TypeProvidersConfig / compile)).value, TypeProvidersConfig / unmanagedClasspath ++= (CompileConfig / fullClasspath).value, TypeProvidersConfig / unmanagedClasspath ++= (LocalProject("codegen") / Test / fullClasspath).value, Test / unmanagedClasspath ++= (TypeProvidersConfig / fullClasspath).value, Test / packageSrc / mappings ++= { val src = (Test / sourceDirectory).value / "codegen" val inFiles = src ** "*.scala" ((Test / managedSources).value.pair(Path.relativeTo((Test / sourceManaged).value) | Path.flat)) ++ // Add generated sources to sources JAR (inFiles pair (Path.relativeTo(src) | Path.flat)) // Add *.fm files to sources JAR } ) } def typeProvidersTask = Def.task { val cp = (TypeProvidersConfig / fullClasspath).value val r = (typeProviders / runner).value val output = (Test / sourceManaged).value val s = streams.value val srcDir = (Compile / sourceDirectory).value val slickSrc = (LocalProject("slick") / sourceDirectory).value val src = srcDir / "codegen" val outDir = (output/"slick-codegen").getPath val inFiles = (src ** "*.scala").get.toSet ++ (slickSrc / "main/scala/slick/codegen" ** "*.scala").get.toSet ++ (slickSrc / "main/scala/slick/jdbc/meta" ** "*.scala").get.toSet val cachedFun = FileFunction.cached(s.cacheDirectory / "type-providers", inStyle = FilesInfo.lastModified, outStyle = FilesInfo.exists) { (in: Set[File]) => IO.delete((output ** "*.scala").get) val errorsMain = r.run("slick.test.codegen.GenerateMainSources", cp.files, Array(outDir), s.log) val errorsRoundtrip = r.run("slick.test.codegen.GenerateRoundtripSources", cp.files, Array(outDir), s.log) (errorsMain, errorsRoundtrip) match { case (Success(_), Success(_)) => (output ** "*.scala").get.toSet case (Failure(failedMain), Failure(failedRoundtrip)) => sys.error(failedMain.getMessage + System.lineSeparator() + failedRoundtrip) case (failedMain, failedRoundtrip) => failedMain.fold(e => sys.error(e.getMessage), _ => sys.error(failedRoundtrip.failed.get.getMessage)) } } cachedFun(inFiles).toSeq } }
slick/slick
project/TypeProviders.scala
Scala
bsd-2-clause
2,828
/* * Copyright 2018 Combined Conditional Access Development, LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ccadllc.cedi.dtrace package interop package xb3 import cats.data.OptionT import cats.syntax.all._ import java.util.UUID import scala.util.matching.Regex import scodec.bits.ByteVector import XB3HeaderCodec._ /** * Implements the `HeaderCodec` trait, providing for the encoding and decoding of * [[https://istio.io/docs/tasks/telemetry/distributed-tracing.html X-B3]]-style * tracing HTTP headers into and from a `SpanId` respectively. */ class XB3HeaderCodec extends HeaderCodec { /** * Encodes [[https://istio.io/docs/tasks/telemetry/distributed-tracing.html X-B3]]-compliant * HTTP headers from the passed-in `SpanId`. * Note: A single `b3` compressed header will be generated combining the `traceId`, `spanId` * and `parentSpanId` when this function is called using the form: * `xb3HeaderCodec.encode(spanId, Map(XB3HeaderCodec.Compressed -> "true"))` * Note: The `X-B3-Sampled` Header will be set to "0" if called with the below form (where it might be combined * with with `Compressed` as shown above, in which case the sampled section of the compressed header will * be set to "0"). Otherwise the `X-B3-Sampled` header (or sampled section of compressed header) will be * set to "1" * `xb3HeaderCodec.encode(spanId, Map(XB3HeaderCodec.Sampled -> "false"))` */ override def encode(spanId: SpanId, properties: Map[String, String]): List[Header] = { val sampled = sampleTrace(properties) if (compressHeaders(properties)) encodeCompressed(spanId, sampled) else encodeUncompressed(spanId, sampled) } /** * Decodes a `SpanId` from * [[https://istio.io/docs/tasks/telemetry/distributed-tracing.html XB3/Zipkin]]-compliant * HTTP headers (or single compressed `b3` header if present). * The `properties` argument is not currently used with this function. */ override def decode(headers: List[Header], properties: Map[String, String]): Either[Header.DecodeFailure, Header.Decoded] = decodeCompressed(headers).flatMap(d => if (d.spanId.isEmpty) decodeUncompressed(headers) else Right(d)) private def compressHeaders(properties: Map[String, String]): Boolean = properties.get(Compressed).flatMap(s => Either.catchNonFatal(s.toBoolean).toOption).getOrElse(false) private def decodeCompressed(headers: List[Header]): Either[Header.DecodeFailure, Header.Decoded] = headers.collectFirst { case Header(CompressedHeaderName, value) => value }.traverse(decodeCompressedSpanId).map( _.fold(Header.Decoded(None, true)) { case (spanId, sampled) => Header.Decoded(Some(spanId), sampled) }) private def decodeCompressedSpanId(encoded: Header.Value): Either[Header.DecodeFailure, (SpanId, Boolean)] = encoded.value match { case CompressedHeaderRegex(traceId, spanId, sampled, parentId) => (for { traceId <- decodeTraceIdValue(traceId) spanId <- decodeSpanIdValue(spanId) parentId <- Option(parentId).fold(Either.right[Header.DecodeFailure, Long](spanId))(decodeSpanIdValue) } yield SpanId(traceId, parentId, spanId) -> Option(sampled).forall(_ =!= "0")).leftMap( e => Header.DecodeFailure(s"${e.message} within ${encoded.value}", e.cause)) case value => Left(Header.DecodeFailure(s"Could not parse $value into a SpanId", None)) } private def decodeSpanId(encoded: Header.Value): Either[Header.DecodeFailure, Long] = decodeSpanIdValue(encoded.value) private def decodeSpanIdValue(value: String): Either[Header.DecodeFailure, Long] = ByteVector.fromHexDescriptive(value).map(_.toLong()).leftMap(e => Header.DecodeFailure(s"$e for $value", None)) private def decodeTraceId(encoded: Header.Value): Either[Header.DecodeFailure, UUID] = decodeTraceIdValue(encoded.value) private def decodeTraceIdValue(value: String): Either[Header.DecodeFailure, UUID] = for { bv <- ByteVector.fromHexDescriptive(value).leftMap(e => Header.DecodeFailure(s"$e for $value", None)) uuid <- decodeUuid(bv) } yield uuid private def decodeUncompressed(headers: List[Header]): Either[Header.DecodeFailure, Header.Decoded] = { val spanIdAndSampled = for { traceId <- OptionT(headers.collectFirst { case Header(TraceIdHeaderName, value) => value }.traverse(decodeTraceId)) parentIdMaybe <- OptionT.liftF(headers.collectFirst { case Header(ParentIdHeaderName, value) => value }.traverse(decodeSpanId)) spanId <- OptionT(headers.collectFirst { case Header(SpanIdHeaderName, value) => value }.traverse(decodeSpanId)) sampled = headers.collectFirst { case Header(SampledHeaderName, value) => value.value =!= "0" }.getOrElse(true) } yield SpanId(traceId, parentIdMaybe getOrElse spanId, spanId) -> sampled spanIdAndSampled.value.map( _.fold(Header.Decoded(None, true)) { case (spanId, sampled) => Header.Decoded(Some(spanId), sampled) }) } private def decodeUuid(bv: ByteVector): Either[Header.DecodeFailure, UUID] = bv.size match { case TraceIdLongFormByteSize => Either.catchNonFatal(bv.toUUID).leftMap { t => Header.DecodeFailure(s"The $TraceIdHeaderName value ${bv.toHex} cannot be converted to UUID", Some(t)) } case TraceIdShortFormByteSize => Either.right(new UUID(bv.toLong(), 0L)) case other => Either.left(Header.DecodeFailure(s"The $TraceIdHeaderName must be either $TraceIdShortFormByteSize or $TraceIdLongFormByteSize but was ${bv.toHex}", None)) } private def encodeCompressed(spanId: SpanId, sampled: Boolean): List[Header] = { val hv = if (spanId.root) s"${encodeTraceIdValue(spanId.traceId)}-${encodeSpanIdValue(spanId.spanId)}-${encodeSampledValue(sampled)}" else s"${encodeTraceIdValue(spanId.traceId)}-${encodeSpanIdValue(spanId.spanId)}-${encodeSampledValue(sampled)}-${encodeSpanIdValue(spanId.parentSpanId)}" List(Header(CompressedHeaderName, Header.Value(hv))) } private def encodeUncompressed(spanId: SpanId, sampled: Boolean): List[Header] = { val traceIdH = List(Header(TraceIdHeaderName, encodeTraceId(spanId.traceId))) val parentIdH = if (spanId.root) List.empty[Header] else List(Header(ParentIdHeaderName, encodeSpanId(spanId.parentSpanId))) val spanIdH = List(Header(SpanIdHeaderName, encodeSpanId(spanId.spanId))) val sampledH = List(Header(SampledHeaderName, encodeSampled(sampled))) traceIdH ++ parentIdH ++ spanIdH ++ sampledH } private def encodeSampled(sampled: Boolean): Header.Value = Header.Value(encodeSampledValue(sampled)) private def encodeSampledValue(sampled: Boolean): String = if (sampled) "1" else "0" private def encodeSpanId(spanId: Long): Header.Value = Header.Value(encodeSpanIdValue(spanId)) private def encodeSpanIdValue(spanId: Long): String = ByteVector.fromLong(spanId).toHex private def encodeTraceId(traceId: UUID): Header.Value = Header.Value(encodeTraceIdValue(traceId)) private def encodeTraceIdValue(traceId: UUID): String = ByteVector.fromUUID(traceId).toHex private def sampleTrace(properties: Map[String, String]): Boolean = properties.get(Sampled).flatMap(s => Either.catchNonFatal(s.toBoolean).toOption).getOrElse(true) } object XB3HeaderCodec { /** * Property to pass to the `HeaderCodec.encode` method's `properties` that, when set to "true", * indicates a compressed `b3` HTTP header should be generated rather than three separate `X-B3-TraceId`, * `X-B3-ParentSpanId` and `X-B3-SpanId` headers. */ final val Compressed: String = "compressed-headers" /** * Property to pass to the `HeaderCodec.encode` method's `properties` that, when set to "true", * indicates the X-B3-Sampled header (or "sampled" section of compressed header) should be set to "1" * and when false that it should be set to "0". If not present, the flag will be set to "1" by * default in X-B3-Sampled Header/compressed sampled section as appropriate. */ final val Sampled: String = "sampled" /* Used to validate / parse B3 compressed HTTP header into a `SpanId` instance. */ final val CompressedHeaderRegex: Regex = s"([0-9a-fA-F]+)-([0-9a-fA-F]+)(?:-([0-1]))?(?:-([0-9a-fA-F]+))?".r /** The `X-B3` compliant compressed header format where TraceID-SpanId-ParentId are embedded in a single header */ final val CompressedHeaderName: Header.CaseInsensitiveName = Header.CaseInsensitiveName("b3") /** The `X-B3` compliant Trace ID HTTP header name. */ final val TraceIdHeaderName: Header.CaseInsensitiveName = Header.CaseInsensitiveName("X-B3-TraceId") /** The `X-B3` compliant Parent Span ID HTTP header name. */ final val ParentIdHeaderName: Header.CaseInsensitiveName = Header.CaseInsensitiveName("X-B3-ParentSpanId") /** The `X-B3` compliant Span ID HTTP header name. */ final val SpanIdHeaderName: Header.CaseInsensitiveName = Header.CaseInsensitiveName("X-B3-SpanId") /** The `X-B3` compliant Sampled HTTP header name. */ final val SampledHeaderName: Header.CaseInsensitiveName = Header.CaseInsensitiveName("X-B3-Sampled") /** The number of bytes (long form) of the Trace ID */ final val TraceIdLongFormByteSize: Long = 16L /** The number of bytes (short form) of the Trace ID. Note that the short form is not optimal if you want to ensure unique trace IDs */ final val TraceIdShortFormByteSize: Long = 8L }
ccadllc/cedi-dtrace
xb3/shared/src/main/scala/com/ccadllc/cedi/dtrace/interop/xb3/XB3HeaderCodec.scala
Scala
apache-2.0
9,859
package romanowski.slower object F { def fromF = E.fromE def toF = E.toE }
romanowski/presentations
code/src/main/scala/romanowski/slower/F.scala
Scala
unlicense
81
/* * Created on 2010/05/09 * Copyright (c) 2010-2011, Wei-ju Wu. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of Wei-ju Wu nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package org.zmpp.glk.windows import org.scalatest.FlatSpec import org.scalatest.matchers.ShouldMatchers import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner /** * Note: We compare with xUnit matchers, there seems to be a Scala/Specs bug, which * tries to use String.isEmpty which only exists in Java SE 6 */ @RunWith(classOf[JUnitRunner]) class GlkWindowsSpec extends FlatSpec with ShouldMatchers { "GlkWindowSystem" should "be initialized" in { val windowSystem = new GlkWindowSystem windowSystem.iterate(0) should be (null) } }
logicmoo/zmpp2
zmpp-glk/src/test/scala/GlkWindowsTest.scala
Scala
bsd-3-clause
2,133