code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.routing
import scala.collection.immutable
import akka.ConfigurationException
import akka.actor.Actor
import akka.actor.ActorContext
import akka.actor.ActorPath
import akka.actor.AutoReceivedMessage
import akka.actor.OneForOneStrategy
import akka.actor.Props
import akka.actor.SupervisorStrategy
import akka.actor.Terminated
import akka.dispatch.Dispatchers
import akka.actor.ActorSystem
import akka.japi.Util.immutableSeq
/**
* This trait represents a router factory: it produces the actual router actor
* and creates the routing table (a function which determines the recipients
* for each message which is to be dispatched). The resulting RoutedActorRef
* optimizes the sending of the message so that it does NOT go through the
* router’s mailbox unless the route returns an empty recipient set.
*
* '''Caution:''' This means
* that the route function is evaluated concurrently without protection by
* the RoutedActorRef: either provide a reentrant (i.e. pure) implementation or
* do the locking yourself!
*
* '''Caution:''' Please note that the [[akka.routing.Router]] which needs to
* be returned by `createActor()` should not send a message to itself in its
* constructor or `preStart()` or publish its self reference from there: if
* someone tries sending a message to that reference before the constructor of
* RoutedActorRef has returned, there will be a `NullPointerException`!
*/
@SerialVersionUID(1L)
trait RouterConfig extends Serializable {
/**
* Create the actual router, responsible for routing messages to routees.
* @param system the ActorSystem this router belongs to
*/
def createRouter(system: ActorSystem): Router
/**
* Dispatcher ID to use for running the “head” actor, which handles
* supervision, death watch and router management messages
*/
def routerDispatcher: String
/**
* Possibility to define an actor for controlling the routing
* logic from external stimuli (e.g. monitoring metrics).
* This actor will be a child of the router "head" actor.
* Management messages not handled by the "head" actor are
* delegated to this controller actor.
*/
def routingLogicController(routingLogic: RoutingLogic): Option[Props] = None
/**
* Is the message handled by the router head actor or the
* [[#routingLogicController]] actor.
*/
def isManagementMessage(msg: Any): Boolean = msg match {
case _: AutoReceivedMessage | _: Terminated | _: RouterManagementMesssage ⇒ true
case _ ⇒ false
}
/*
* Specify that this router should stop itself when all routees have terminated (been removed).
* By Default it is `true`, unless a `resizer` is used.
*/
def stopRouterWhenAllRouteesRemoved: Boolean = true
/**
* Overridable merge strategy, by default completely prefers `this` (i.e. no merge).
*/
def withFallback(other: RouterConfig): RouterConfig = this
/**
* Check that everything is there which is needed. Called in constructor of RoutedActorRef to fail early.
*/
def verifyConfig(path: ActorPath): Unit = ()
/**
* INTERNAL API
* The router "head" actor.
*/
private[akka] def createRouterActor(): RouterActor
}
/**
* INTERNAL API
*
* Used to override unset configuration in a router.
*/
private[akka] trait PoolOverrideUnsetConfig[T <: Pool] extends Pool {
final def overrideUnsetConfig(other: RouterConfig): RouterConfig =
if (other == NoRouter) this // NoRouter is the default, hence “neutral”
else {
other match {
case p: Pool ⇒
val wssConf: PoolOverrideUnsetConfig[T] =
if ((this.supervisorStrategy eq Pool.defaultSupervisorStrategy)
&& (p.supervisorStrategy ne Pool.defaultSupervisorStrategy))
this.withSupervisorStrategy(p.supervisorStrategy).asInstanceOf[PoolOverrideUnsetConfig[T]]
else this
if (wssConf.resizer.isEmpty && p.resizer.isDefined)
wssConf.withResizer(p.resizer.get)
else
wssConf
case _ ⇒ this
}
}
def withSupervisorStrategy(strategy: SupervisorStrategy): T
def withResizer(resizer: Resizer): T
}
/**
* Java API: Base class for custom router [[Group]]
*/
abstract class GroupBase extends Group {
def getPaths: java.lang.Iterable[String]
override final def paths: immutable.Iterable[String] = immutableSeq(getPaths)
}
/**
* `RouterConfig` for router actor with routee actors that are created external to the
* router and the router sends messages to the specified path using actor selection,
* without watching for termination.
*/
trait Group extends RouterConfig {
def paths: immutable.Iterable[String]
/**
* [[akka.actor.Props]] for a group router based on the settings defined by
* this instance.
*/
def props(): Props = Props.empty.withRouter(this)
/**
* INTERNAL API
*/
private[akka] def routeeFor(path: String, context: ActorContext): Routee =
ActorSelectionRoutee(context.actorSelection(path))
/**
* INTERNAL API
*/
private[akka] override def createRouterActor(): RouterActor = new RouterActor
}
object Pool {
val defaultSupervisorStrategy: SupervisorStrategy = OneForOneStrategy() {
case _ ⇒ SupervisorStrategy.Escalate
}
}
/**
* Java API: Base class for custom router [[Pool]]
*/
abstract class PoolBase extends Pool
/**
* `RouterConfig` for router actor that creates routees as child actors and removes
* them from the router if they terminate.
*/
trait Pool extends RouterConfig {
@deprecated("Implement nrOfInstances with ActorSystem parameter instead", "2.4")
def nrOfInstances: Int = -1
/**
* Initial number of routee instances
*/
def nrOfInstances(sys: ActorSystem): Int
/**
* Use a dedicated dispatcher for the routees of the pool.
* The dispatcher is defined in 'pool-dispatcher' configuration property in the
* deployment section of the router.
*/
def usePoolDispatcher: Boolean = false
/**
* INTERNAL API
*/
private[akka] def newRoutee(routeeProps: Props, context: ActorContext): Routee =
ActorRefRoutee(context.actorOf(enrichWithPoolDispatcher(routeeProps, context)))
/**
* INTERNAL API
*/
private[akka] def enrichWithPoolDispatcher(routeeProps: Props, context: ActorContext): Props =
if (usePoolDispatcher && routeeProps.dispatcher == Dispatchers.DefaultDispatcherId)
routeeProps.withDispatcher("akka.actor.deployment." + context.self.path.elements.drop(1).mkString("/", "/", "")
+ ".pool-dispatcher")
else
routeeProps
/**
* Pool with dynamically resizable number of routees return the [[akka.routing.Resizer]]
* to use. The resizer is invoked once when the router is created, before any messages can
* be sent to it. Resize is also triggered when messages are sent to the routees, and the
* resizer is invoked asynchronously, i.e. not necessarily before the message has been sent.
*/
def resizer: Option[Resizer]
/**
* SupervisorStrategy for the head actor, i.e. for supervising the routees of the pool.
*/
def supervisorStrategy: SupervisorStrategy
/**
* [[akka.actor.Props]] for a pool router based on the settings defined by
* this instance and the supplied [[akka.actor.Props]] for the routees created by the
* router.
*/
def props(routeeProps: Props): Props = routeeProps.withRouter(this)
/*
* Specify that this router should stop itself when all routees have terminated (been removed).
* By Default it is `true`, unless a `resizer` is used.
*/
override def stopRouterWhenAllRouteesRemoved: Boolean = resizer.isEmpty
/**
* INTERNAL API
*/
private[akka] override def createRouterActor(): RouterActor =
resizer match {
case None ⇒ new RouterPoolActor(supervisorStrategy)
case Some(r) ⇒ new ResizablePoolActor(supervisorStrategy)
}
}
/**
* If a custom router implementation is not a [[Group]] nor
* a [[Pool]] it may extend this base class.
*/
abstract class CustomRouterConfig extends RouterConfig {
/**
* INTERNAL API
*/
private[akka] override def createRouterActor(): RouterActor = new RouterActor
override def routerDispatcher: String = Dispatchers.DefaultDispatcherId
}
/**
* Router configuration which has no default, i.e. external configuration is required.
*/
case object FromConfig extends FromConfig {
/**
* Java API: get the singleton instance
*/
def getInstance = this
@inline final def apply(
resizer: Option[Resizer] = None,
supervisorStrategy: SupervisorStrategy = Pool.defaultSupervisorStrategy,
routerDispatcher: String = Dispatchers.DefaultDispatcherId) =
new FromConfig(resizer, supervisorStrategy, routerDispatcher)
@inline final def unapply(fc: FromConfig): Option[String] = Some(fc.routerDispatcher)
}
/**
* Java API: Router configuration which has no default, i.e. external configuration is required.
*
* This can be used when the dispatcher to be used for the head Router needs to be configured
* (defaults to default-dispatcher).
*/
@SerialVersionUID(1L)
class FromConfig(override val resizer: Option[Resizer],
override val supervisorStrategy: SupervisorStrategy,
override val routerDispatcher: String) extends Pool {
def this() = this(None, Pool.defaultSupervisorStrategy, Dispatchers.DefaultDispatcherId)
override def createRouter(system: ActorSystem): Router =
throw new UnsupportedOperationException("FromConfig must not create Router")
/**
* INTERNAL API
*/
override private[akka] def createRouterActor(): RouterActor =
throw new UnsupportedOperationException("FromConfig must not create RouterActor")
override def verifyConfig(path: ActorPath): Unit =
throw new ConfigurationException(s"Configuration missing for router [$path] in 'akka.actor.deployment' section.")
/**
* Setting the supervisor strategy to be used for the “head” Router actor.
*/
def withSupervisorStrategy(strategy: SupervisorStrategy): FromConfig =
new FromConfig(resizer, strategy, routerDispatcher)
/**
* Setting the resizer to be used.
*/
def withResizer(resizer: Resizer): FromConfig =
new FromConfig(Some(resizer), supervisorStrategy, routerDispatcher)
/**
* Setting the dispatcher to be used for the router head actor, which handles
* supervision, death watch and router management messages.
*/
def withDispatcher(dispatcherId: String): FromConfig =
new FromConfig(resizer, supervisorStrategy, dispatcherId)
override def nrOfInstances(sys: ActorSystem): Int = 0
/**
* [[akka.actor.Props]] for a group router based on the settings defined by
* this instance.
*/
def props(): Props = Props.empty.withRouter(this)
}
/**
* Routing configuration that indicates no routing; this is also the default
* value which hence overrides the merge strategy in order to accept values
* from lower-precedence sources. The decision whether or not to create a
* router is taken in the LocalActorRefProvider based on Props.
*/
@SerialVersionUID(1L)
abstract class NoRouter extends RouterConfig
case object NoRouter extends NoRouter {
override def createRouter(system: ActorSystem): Router = throw new UnsupportedOperationException("NoRouter has no Router")
/**
* INTERNAL API
*/
override private[akka] def createRouterActor(): RouterActor =
throw new UnsupportedOperationException("NoRouter must not create RouterActor")
override def routerDispatcher: String = throw new UnsupportedOperationException("NoRouter has no dispatcher")
override def withFallback(other: akka.routing.RouterConfig): akka.routing.RouterConfig = other
/**
* Java API: get the singleton instance
*/
def getInstance = this
def props(routeeProps: Props): Props = routeeProps.withRouter(this)
}
/**
* INTERNAL API
*/
@SerialVersionUID(1L) private[akka] trait RouterManagementMesssage
/**
* Sending this message to a router will make it send back its currently used routees.
* A [[Routees]] message is sent asynchronously to the "requester" containing information
* about what routees the router is routing over.
*/
@SerialVersionUID(1L) abstract class GetRoutees extends RouterManagementMesssage
@SerialVersionUID(1L) case object GetRoutees extends GetRoutees {
/**
* Java API: get the singleton instance
*/
def getInstance = this
}
/**
* Message used to carry information about what routees the router is currently using.
*/
@SerialVersionUID(1L)
final case class Routees(routees: immutable.IndexedSeq[Routee]) {
/**
* Java API
*/
def getRoutees: java.util.List[Routee] = {
import scala.collection.JavaConverters._
routees.asJava
}
}
/**
* Add a routee by sending this message to the router.
* It may be handled after other messages.
*/
@SerialVersionUID(1L)
final case class AddRoutee(routee: Routee) extends RouterManagementMesssage
/**
* Remove a specific routee by sending this message to the router.
* It may be handled after other messages.
*
* For a pool, with child routees, the routee is stopped by sending a [[akka.actor.PoisonPill]]
* to the routee. Precautions are taken reduce the risk of dropping messages that are concurrently
* being routed to the removed routee, but there are no guarantees.
*
*/
@SerialVersionUID(1L)
final case class RemoveRoutee(routee: Routee) extends RouterManagementMesssage
/**
* Increase or decrease the number of routees in a [[Pool]].
* It may be handled after other messages.
*
* Positive `change` will add that number of routees to the [[Pool]].
* Negative `change` will remove that number of routees from the [[Pool]].
* Routees are stopped by sending a [[akka.actor.PoisonPill]] to the routee.
* Precautions are taken reduce the risk of dropping messages that are concurrently
* being routed to the removed routee, but it is not guaranteed that messages are not
* lost.
*/
@SerialVersionUID(1L)
final case class AdjustPoolSize(change: Int) extends RouterManagementMesssage
|
jmnarloch/akka.js
|
akka-js-actor/jvm/src/main/scala/akka/routing/RouterConfig.scala
|
Scala
|
bsd-3-clause
| 14,111 |
package com.airbnb.common.ml.xgboost
import org.junit.Test
import com.airbnb.common.ml.util.ScalaLogging
class XGBoostPipelineTest extends ScalaLogging {
@Test
def testParams(): Unit = {
val params: Array[Double] = Array(0.1, 0.2, 10.0, 2.0, 0.3, 0.4, 5.0, 0.6, 0.7)
val map = XGBoostPipeline.getParamMap(params)
logger.info(map.toString())
}
}
|
airbnb/aerosolve
|
airlearner/airlearner-xgboost/src/test/scala/com/airbnb/common/ml/xgboost/XGBoostPipelineTest.scala
|
Scala
|
apache-2.0
| 368 |
package org.scalaide.ui.internal.editor.outline
import org.eclipse.jface.text.reconciler.IReconcilingStrategy
import org.eclipse.jface.text.reconciler.IReconcilingStrategyExtension
import org.scalaide.logging.HasLogger
import org.eclipse.jface.text._
import org.eclipse.core.runtime.IProgressMonitor
import org.eclipse.jface.text.reconciler.DirtyRegion
import org.scalaide.util.ui.DisplayThread
class ScalaOutlineReconcilingStrategy(icuEditor: OutlinePageEditorExtension) extends IReconcilingStrategy with IReconcilingStrategyExtension with HasLogger {
private def icUnit = icuEditor.getInteractiveCompilationUnit()
override def setDocument(doc: IDocument): Unit = {}
override def setProgressMonitor(pMonitor: IProgressMonitor): Unit = {}
override def reconcile(dirtyRegion: DirtyRegion, subRegion: IRegion): Unit = {
logger.debug("Incremental reconciliation not implemented.")
}
override def reconcile(partition: IRegion): Unit = {
val sop = Option(icuEditor.getOutlinePage)
if (!sop.isEmpty) {
val oldRoot = sop.get.getInput
icUnit.scalaProject.presentationCompiler.apply(comp => {
val rootNode = ModelBuilder.buildTree(comp, icUnit.sourceMap(icuEditor.getViewer.getDocument.get.toCharArray()).sourceFile)
val delta = if (oldRoot != null) oldRoot.updateAll(rootNode) else null
DisplayThread.asyncExec(
if (delta eq null)
sop.get.setInput(rootNode)
else
sop.get.update(delta))
})
}
}
override def initialReconcile(): Unit = {
reconcile(null)
}
}
|
dragos/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/ui/internal/editor/outline/ScalaOutlineReconcilingStrategy.scala
|
Scala
|
bsd-3-clause
| 1,578 |
package com.softdevgang.types.ikusalic
import scala.language.higherKinds
import scala.collection.generic.CanBuildFrom
import scala.concurrent.{Promise, ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
object FutureUtils {
implicit class AugmentedFuture[+A](private val future: Future[A]) extends AnyVal {
final def mapTry[B](f: Try[A] => B)(implicit executor: ExecutionContext): Future[B] = {
val p = Promise[B]()
future.onComplete(result => p.complete(Try(f(result))))
p.future
}
final def flatMapTry[B](f: Try[A] => Future[B])(implicit executor: ExecutionContext): Future[B] = {
val p = Promise[B]()
future.onComplete(result => p.completeWith(f(result)))
p.future
}
}
def traverseByPartitioning[A, B, M[_] <: TraversableOnce[_]]
(in: M[A])
(f: A => Future[B])
(implicit
cbfFailure: CanBuildFrom[M[A], (A, Throwable), M[(A, Throwable)]],
cbfSuccess: CanBuildFrom[M[A], (A, B), M[(A, B)]],
executor: ExecutionContext
): Future[(M[(A, Throwable)], M[(A, B)])] = {
val computations: List[(A, Future[B])] =
in.foldLeft(List.empty[(A, Future[B])]) { case (acc, untypedA) =>
val a = untypedA.asInstanceOf[A]
(a -> f(a)) :: acc
}.reverse
val failures = cbfFailure(in)
val successes = cbfSuccess(in)
def accumulateResults(cs: List[(A, Future[B])]): Future[(M[(A, Throwable)], M[(A, B)])] = cs match {
case (a, futureB) :: tail => futureB.flatMapTry { result =>
result match {
case Success(b) => successes += a -> b
case Failure(e) => failures += a -> e
}
accumulateResults(tail)
}
case Nil => Future.successful(failures.result() -> successes.result())
}
accumulateResults(computations)
}
}
|
SoftDevGang/jvm-types
|
src/main/scala/com/softdevgang/types/ikusalic/FutureUtils.scala
|
Scala
|
mit
| 2,058 |
package com.decaf.langz
sealed trait Expression
sealed trait SymbolExp extends Expression
case class NumberSym(value: Int) extends SymbolExp
case class StringSym(value: String) extends SymbolExp
sealed trait Operation extends Expression {
def left: Expression
def right: Expression
}
case class Add(left: Expression, right: Expression) extends Operation
case class Multiply(left: Expression, right: Expression) extends Operation
|
adamdecaf/some-lang
|
src/main/scala/ast/models.scala
|
Scala
|
mit
| 436 |
package org.scaladebugger.api.lowlevel.classes
import org.scalamock.scalatest.MockFactory
import org.scalatest.{FunSpec, Matchers, ParallelTestExecution}
import org.scaladebugger.api.lowlevel.DummyOperationException
import org.scaladebugger.test.helpers.ParallelMockFunSpec
class DummyClassPrepareManagerSpec extends ParallelMockFunSpec
{
private val TestRequestId = java.util.UUID.randomUUID().toString
private val classPrepareManager = new DummyClassPrepareManager
describe("DummyClassPrepareManager") {
describe("#classPrepareRequestList") {
it("should return an empty list") {
classPrepareManager.classPrepareRequestList should be (empty)
}
}
describe("#createClassPrepareRequestWithId") {
it("should return a failure of dummy operation") {
val result = classPrepareManager.createClassPrepareRequestWithId(
TestRequestId
)
result.isFailure should be (true)
result.failed.get shouldBe a [DummyOperationException]
}
}
describe("#hasClassPrepareRequest") {
it("should return false") {
val expected = false
val actual = classPrepareManager.hasClassPrepareRequest(
TestRequestId
)
actual should be (expected)
}
}
describe("#getClassPrepareRequest") {
it("should return None") {
val expected = None
val actual = classPrepareManager.getClassPrepareRequest(
TestRequestId
)
actual should be (expected)
}
}
describe("#getClassPrepareRequestInfo") {
it("should return None") {
val expected = None
val actual = classPrepareManager.getClassPrepareRequestInfo(
TestRequestId
)
actual should be (expected)
}
}
describe("#removeClassPrepareRequest") {
it("should return false") {
val expected = false
val actual = classPrepareManager.removeClassPrepareRequest(
TestRequestId
)
actual should be (expected)
}
}
}
}
|
chipsenkbeil/scala-debugger
|
scala-debugger-api/src/test/scala/org/scaladebugger/api/lowlevel/classes/DummyClassPrepareManagerSpec.scala
|
Scala
|
apache-2.0
| 2,065 |
package com.greencatsoft.d3.common
import scala.language.implicitConversions
import scala.math.{ max, min }
import scala.scalajs.js.annotation.JSExportAll
import org.scalajs.dom.ClientRect
import org.scalajs.dom.svg.{ Matrix, Rect, SVG }
@JSExportAll
case class Bounds(x: Double, y: Double, width: Double, height: Double) extends Transformable[Bounds] {
def intersects(bounds: Bounds): Boolean = {
require(bounds != null, "Missing argument 'bounds'.")
((x + width > bounds.x && x < bounds.x + bounds.width)
&& (y + height > bounds.y && y < bounds.y + bounds.height))
}
def contains(point: Point): Boolean = {
require(point != null, "Missing argument 'point'.")
point.x >= x && point.x <= x + width && point.y >= y && point.y <= y + height
}
def contains(bounds: Bounds): Boolean = {
require(bounds != null, "Missing argument 'bounds'.")
((x <= bounds.x && x + width >= bounds.x + bounds.width)
&& (y <= bounds.y && y + height >= bounds.y + bounds.height))
}
def +(size: Double): Bounds =
Bounds(x - size, y - size, width + (size * 2), height + (size * 2))
def +(bounds: Bounds): Bounds = {
val sx = min(x, bounds.x)
val sy = min(y, bounds.y)
val mx = max(x + width, bounds.x + bounds.width)
val my = max(y + height, bounds.y + bounds.height)
Bounds(sx, sy, mx - sx, my - sy)
}
def +(point: Point): Bounds = {
val sx = min(x, point.x)
val sy = min(y, point.y)
val mx = max(x + width, point.x)
val my = max(y + height, point.y)
Bounds(sx, sy, mx - sx, my - sy)
}
def location = Point(x, y)
def center = Point(x + width / 2, y + height / 2)
def size = Dimension(width, height)
override def matrixTransform(matrix: Matrix)(implicit ownerNode: SVG): Bounds =
Quad.fromBounds(this).matrixTransform(matrix).bounds
override def toString(): String = s"Bounds(x: $x, y: $y, width: $width, height: $height)"
}
object Bounds {
def empty: Bounds = new Bounds(0, 0, 0, 0)
def empty(location: Point): Bounds = new Bounds(location.x, location.y, 0, 0)
def merge(bounds: Bounds*): Bounds = bounds match {
case head :: tail => tail.fold(head)((b1, b2) => b1 + b2)
case Nil => Bounds.empty
}
implicit def svgRect2Bounds(rect: Rect) = {
require(rect != null, "Missing argument 'rect'.")
Bounds(rect.x, rect.y, rect.width, rect.height)
}
implicit def clientRect2Bounds(rect: ClientRect) = {
require(rect != null, "Missing argument 'rect'.")
Bounds(rect.left, rect.top, rect.width, rect.height)
}
implicit def bounds2Rect(bounds: Bounds)(implicit viewNode: SVG) = {
require(bounds != null, "Missing argument 'bounds'.")
val rect = viewNode.createSVGRect
rect.x = bounds.x
rect.y = bounds.y
rect.width = bounds.width
rect.height = bounds.height
rect
}
}
|
dsugden/scalajs-d3
|
src/main/scala/com/greencatsoft/d3/common/Bounds.scala
|
Scala
|
apache-2.0
| 2,850 |
package com.bot4s.telegram.methods
import com.bot4s.telegram.models.InputFile
/**
* Base type for multipart API requests (for file uploads)
*
* @tparam R Expected result type.
*
* Request will be serialized as multipart/form-data
*/
trait MultipartRequest[R] extends Request[R] {
def getFiles: List[(String, InputFile)]
}
|
mukel/telegrambot4s
|
core/src/com/bot4s/telegram/methods/MultipartRequest.scala
|
Scala
|
apache-2.0
| 332 |
package roc
package postgresql
package object server {
type Field = (Char, String)
type Fields = List[Field]
}
|
penland365/roc
|
core/src/main/scala/roc/postgresql/server/package.scala
|
Scala
|
bsd-3-clause
| 117 |
package scoobie.doobie
import scoobie.ast.ansi._
/**
* Created by jacob.barber on 5/25/16.
*/
trait DoobieSupport {
implicit def toQuerySelectExtensions(expr: QuerySelect[ScoobieFragmentProducer])(implicit sqlInterpreter: DoobieSqlInterpreter): QuerySelectExtensions = new QuerySelectExtensions(expr)
implicit def toQueryExpressionExtensions(expr: QueryExpression[ScoobieFragmentProducer])(implicit sqlInterpreter: DoobieSqlInterpreter): QueryExpressionExtensions = new QueryExpressionExtensions(expr)
implicit def toQueryModifyExtensions(expr: QueryModify[ScoobieFragmentProducer])(implicit sqlInterpreter: DoobieSqlInterpreter): QueryModifyExtensions = new QueryModifyExtensions(expr)
}
|
Jacoby6000/Scala-SQL-AST
|
doobie/support/src/main/scala/scoobie/doobie/DoobieSupport.scala
|
Scala
|
mit
| 701 |
package cbb.gmtrainner
case class MarkovMatCaculator(genes: List[Gene], korder: Int) {
val frameMatrices = FrameMatrices(korder)
val unframeMatrices = UnframeMatrices(korder)
def caculate() = {
unframeMatrices.process( genes.flatMap(_.introns) )
frameMatrices.process( genes.map((x) => {
val s = x.cds_seq
s.substring(0, s.length-3)
}) )
}
}
|
XingjianXu/gmtrainer
|
src/main/scala/cbb/gmtrainner/MarkovMatCaculator.scala
|
Scala
|
lgpl-3.0
| 378 |
package io.ddf.flink.content
import io.ddf.DDF
import io.ddf.content.AMetaDataHandler
import io.ddf.flink.FlinkDDF
import org.apache.flink.api.scala.DataSet
class MetaDataHandler(ddf: DDF) extends AMetaDataHandler(ddf) {
override protected def getNumRowsImpl: Long = {
val dataset: DataSet[Array[Object]] = ddf.getRepresentationHandler.get(classOf[DataSet[_]],classOf[Array[Object]]).asInstanceOf[DataSet[Array[Object]]]
dataset.count()
}
}
|
milliondreams/ddf-with-flink
|
flink/src/main/scala/io/ddf/flink/content/MetaDataHandler.scala
|
Scala
|
apache-2.0
| 455 |
package epic.features
import breeze.linalg._
import epic.features.SurfaceFeaturizer.{MarkedWordFeaturizer, MarkerPos}
import epic.framework.Feature
import scala.collection.mutable.ArrayBuffer
import scala.collection.immutable
/**
*
* @author dlwh
*/
trait WordFeaturizer[W] {
def anchor(words: IndexedSeq[W]):WordFeatureAnchoring[W]
def +(other: WordFeaturizer[W]) = (this,other) match {
case (x:ZeroFeaturizer[_],_) => other
case (_, x:ZeroFeaturizer[_]) => this
case (MultiWordFeaturizer(feats),MultiWordFeaturizer(feats2)) => new MultiWordFeaturizer(feats ++ feats2)
case (MultiWordFeaturizer(feats),_) => new MultiWordFeaturizer(feats :+ other)
case (_,MultiWordFeaturizer(feats2)) => new MultiWordFeaturizer(this +: feats2)
case _ => new MultiWordFeaturizer(this, other)
}
def *(other:WordFeaturizer[W]) = new ProductWordFeaturizer(this, other)
def offset(i: Int) = new OffsetWordFeaturizer(this, i)
}
object WordFeaturizer {
def goodPOSTagFeaturizer[L](counts: Counter2[L, String, Double]) = {
val dsl = new WordFeaturizer.DSL[L](counts)
import dsl._
(
unigrams(word + clss, 1)
+ bigrams(clss, 2)
+ bigrams(tagDict, 2)
+ suffixes()
+ prefixes()
+ props
)
}
def goodPOSTagTransitionFeaturizer[L](counts: Counter2[L, String, Double]) = {
val dsl = new WordFeaturizer.DSL[L](counts)
import dsl._
word + clss
}
def apply[W](f: W=>Array[Feature]) = new TabulatedWordFeaturizer(f)
case class DSL[L](counts: Counter2[L, String, Double],
commonWordThreshold: Int = 100,
unknownWordThreshold: Int = 2) {
val summedCounts = sum(counts, Axis._0)
val word = new IdentityWordFeaturizer[String](summedCounts, unknownWordThreshold)
val shape = new WordShapeFeaturizer(summedCounts, commonWordThreshold)
val clss = new WordClassFeaturizer(summedCounts, commonWordThreshold)
val tagDict = new TagDictionaryFeaturizer[L](counts, commonWordThreshold)
val props = new WordPropertyFeaturizer(summedCounts)
val lfsuf = LongestFrequentSuffixFeaturizer(summedCounts, commonWordThreshold)
def suffixes(order: Int = 5) = new WordSuffixFeaturizer(summedCounts, suffixOrder = order, commonWordThreshold = commonWordThreshold)
def prefixes(order: Int = 5) = new WordPrefixFeaturizer(summedCounts, prefixOrder = order, commonWordThreshold = commonWordThreshold)
def nextWordToRight(f: WordFeaturizer[String]): NextActualWordFeaturizer = new NextActualWordFeaturizer(f, lookRight = true)
def nextWordToLeft(f: WordFeaturizer[String]): NextActualWordFeaturizer = new NextActualWordFeaturizer(f, lookRight = false)
val zero = new ZeroFeaturizer[String]
def bigrams(f: WordFeaturizer[String], offsetOrder:Int = 1) = new MultiWordFeaturizer[String]({
for(i <- -offsetOrder until offsetOrder) yield {
f(i) * f(i+1)
}
})
def unigrams(f: WordFeaturizer[String], offsetOrder:Int = 1) = new MultiWordFeaturizer[String]({
for(i <- -offsetOrder to offsetOrder) yield {
if (i == 0) f else f(i)
}
})
def context(f: WordFeaturizer[String], order: Int = 4) = new ContextFeaturizer[String](f, order)
implicit class RichFeaturizer[String](f: WordFeaturizer[String]) {
def apply[T, R](i: T)(implicit wfChanger: WordFeaturizer.Modifier[String, T, R]):R = wfChanger(f, i)
def apply(mp: MarkerPos) = new MarkedWordFeaturizer(f, mp)
}
}
/** Used in the DSL for turning a WordFeaturizer into something else */
trait Modifier[W, T, R] {
def apply(f: WordFeaturizer[W], t: T):R
}
implicit def offsetModifier[W]: Modifier[W, Int, WordFeaturizer[W]] = new Modifier[W, Int, WordFeaturizer[W]] {
def apply(f: WordFeaturizer[W], t: Int): WordFeaturizer[W] = f offset t
}
implicit def rangeModifier[W]: Modifier[W, Range, WordFeaturizer[W]] = new Modifier[W, Range, WordFeaturizer[W]] {
def apply(f: WordFeaturizer[W], r: Range): WordFeaturizer[W] = r.map(i => f.offset(i):WordFeaturizer[W]).reduceLeft(_ * _)
}
class TabulatedWordFeaturizer[W](f: W=>Array[Feature]) extends WordFeaturizer[W] with Serializable {
override def anchor(w: IndexedSeq[W]): WordFeatureAnchoring[W] = new WordFeatureAnchoring[W] {
override def words: IndexedSeq[W] = w
val feats = words.map(f)
override def featuresForWord(pos: Int): Array[Feature] = if (pos < 0 || pos >= words.length) Array() else feats(pos)
}
}
}
class ZeroFeaturizer[W] extends WordFeaturizer[W] with SurfaceFeaturizer[W] with Serializable {
private val emptyArray = Array[Feature]()
def anchor(words: IndexedSeq[W]): SurfaceFeatureAnchoring[W] with WordFeatureAnchoring[W] = {
val w = words
new SurfaceFeatureAnchoring[W] with WordFeatureAnchoring[W] {
def words: IndexedSeq[W] = w
def featuresForWord(pos: Int): Array[Feature] = emptyArray
def featuresForSpan(begin: Int, end: Int): Array[Feature] = emptyArray
}
}
}
class NextActualWordFeaturizer(f: WordFeaturizer[String], lookRight: Boolean, isPunct: (String=>Boolean) = _.forall(!_.isLetterOrDigit)) extends WordFeaturizer[String] with Serializable {
val dir = if (lookRight) 'Right else 'Left
def anchor(words: IndexedSeq[String]): WordFeatureAnchoring[String] = {
val w = words
new WordFeatureAnchoring[String] {
val base = f.anchor(w)
// one for each position
val features: immutable.IndexedSeq[Array[Feature]] = w.indices.map { _pos =>
var pos = _pos
val delta = if (lookRight) 1 else -1
val feats = new ArrayBuffer[Feature]()
var done = false
while (!done && pos >= 0 && pos < w.length) {
if (isPunct(w(pos))) {
feats ++= base.featuresForWord(pos).map(PunctuationFeature(_, dir))
} else {
feats ++= base.featuresForWord(pos).map(ActualWordFeature(_, dir))
done = true
}
pos += delta
}
if (pos < 0 || pos >= w.length) feats ++= base.featuresForWord(pos)
feats.toArray
}
def words: IndexedSeq[String] = w
def featuresForWord(pos: Int): Array[Feature] = {
if (pos < 0 || pos >= w.length) base.featuresForWord(pos)
else features(pos)
}
}
}
}
case class PunctuationFeature(f: Feature, dir: Symbol) extends Feature
case class ActualWordFeature(f: Feature, dir: Symbol) extends Feature
|
jovilius/epic
|
src/main/scala/epic/features/WordFeaturizer.scala
|
Scala
|
apache-2.0
| 6,476 |
package controllers
import play.api._
import play.api.mvc._
import play.api.data._
import play.api.data.Forms._
import models._
object Application extends Controller {
val hostForm = Form(tuple(
"hostname" -> nonEmptyText,
"attribute_keys" -> list(text),
"attribute_values" -> list(text),
"monitored" -> boolean
))
val commandForm = Form(tuple(
"name" -> nonEmptyText,
"command" -> nonEmptyText
))
val serviceForm = Form(tuple(
"name" -> nonEmptyText,
"description" -> nonEmptyText,
"arguments" -> list(text),
"command_id" -> number,
"contact_id" -> number
))
def index = Action {
Ok(views.html.index("Hello world"))
}
def hosts = Action {
Ok(views.html.hosts(Host.all(),hostForm))
}
def newHost = Action { implicit request =>
hostForm.bindFromRequest.fold(
errors => BadRequest(views.html.hosts(Host.all(), errors)),
value => {
Host.create(value._1,(value._2 zip value._3).toMap, value._4)
Redirect(routes.Application.hosts)
}
)
}
def deleteHost(id: Long) = Action {
Host.delete(id)
Redirect(routes.Application.hosts)
}
def commands = Action {
Ok(views.html.commands(Command.all(),commandForm))
}
def newCommand = Action { implicit request =>
commandForm.bindFromRequest.fold(
errors => BadRequest(views.html.commands(Command.all(), errors)),
value => {
Command.create(value._1,value._2)
Redirect(routes.Application.commands)
}
)
}
def deleteCommand(id: Long) = Action {
Command.delete(id)
Redirect(routes.Application.commands)
}
def services = Action {
Ok(views.html.services(Service.all(),serviceForm))
}
def newService = Action { implicit request =>
serviceForm.bindFromRequest.fold(
errors => BadRequest(views.html.services(Service.all(), errors)),
value => {
Service.create(value._1,value._2,value._3,value._4,Some(value._5))
Redirect(routes.Application.services)
}
)
}
def deleteService(id: Long) = Action {
Service.delete(id)
Redirect(routes.Application.services)
}
}
|
byxorna/frosting
|
app/controllers/Application.scala
|
Scala
|
apache-2.0
| 2,154 |
package fs2.async.immutable
import scala.concurrent.ExecutionContext
import cats.Functor
import cats.effect.Effect
import fs2.Stream
/** Data type of a single value of type `A` that can be read in the effect `F`. */
abstract class Signal[F[_], A] { self =>
/**
* Returns the discrete version of this signal, updated only when the value
* is changed.
*
* The value _may_ change several times between reads, but it is
* guaranteed the latest value will be emitted after a series of changes.
*
* If you want to be notified about every single change, use `async.queue` for signalling.
*/
def discrete: Stream[F, A]
/**
* Returns the continuous version of this signal, which emits the
* current `A` value on each request for an element from the stream.
*
* Note that this may not see all changes of `A` as it
* always gets the current `A` at each request for an element.
*/
def continuous: Stream[F, A]
/**
* Asynchronously gets the current value of this `Signal`.
*/
def get: F[A]
}
object Signal {
implicit class ImmutableSignalSyntax[F[_], A](val self: Signal[F, A]) {
/**
* Converts this signal to signal of `B` by applying `f`.
*/
def map[B](f: A => B)(implicit F: Functor[F]): Signal[F, B] =
new Signal[F, B] {
def continuous: Stream[F, B] = self.continuous.map(f)
def discrete: Stream[F, B] = self.discrete.map(f)
def get: F[B] = F.map(self.get)(f)
}
}
implicit class BooleanSignalSyntax[F[_]](val self: Signal[F, Boolean]) {
def interrupt[A](s: Stream[F, A])(implicit F: Effect[F], ec: ExecutionContext): Stream[F, A] =
s.interruptWhen(self)
}
}
|
zaneli/fs2
|
core/shared/src/main/scala/fs2/async/immutable/Signal.scala
|
Scala
|
mit
| 1,710 |
package im.actor.server.persist.voximplant
import slick.driver.PostgresDriver.api._
import im.actor.server.models
class VoxUserTable(tag: Tag) extends Table[models.voximplant.VoxUser](tag, "vox_users") {
def userId = column[Int]("user_id", O.PrimaryKey)
def voxUserId = column[Long]("vox_user_id")
def userName = column[String]("user_name")
def displayName = column[String]("display_name")
def salt = column[String]("salt")
def * = (userId, voxUserId, userName, displayName, salt) <> (models.voximplant.VoxUser.tupled, models.voximplant.VoxUser.unapply)
}
object VoxUser {
val users = TableQuery[VoxUserTable]
def create(user: models.voximplant.VoxUser) =
users += user
def createOrReplace(user: models.voximplant.VoxUser) =
users.insertOrUpdate(user)
def findByUserId(userId: Int) =
users.filter(_.userId === userId).result.headOption
}
|
boneyao/actor-platform
|
actor-server/actor-persist/src/main/scala/im/actor/server/persist/voximplant/VoxUser.scala
|
Scala
|
mit
| 878 |
/*
* Copyright (c) 2018. Fengguo Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0
* which accompanies this distribution, and is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.amandroid.serialization
import org.argus.amandroid.core.decompile.DecompileLayout
import org.argus.jawa.core.util.{FileResourceUri, ISet}
import org.json4s.{CustomSerializer, Extraction, Formats, JValue}
import org.json4s.JsonDSL._
/**
* Created by fgwei on 5/2/17.
*/
object DecompileLayoutSerializer extends CustomSerializer[DecompileLayout](format => (
{
case jv: JValue =>
implicit val formats: Formats = format
val outputUri = (jv \\ "outputUri").extract[FileResourceUri]
val createFolder = (jv \\ "createFolder").extract[Boolean]
val srcFolder = (jv \\ "srcFolder").extract[String]
val libFolder = (jv \\ "libFolder").extract[String]
val createSeparateFolderForDexes = (jv \\ "createSeparateFolderForDexes").extract[Boolean]
val pkg = (jv \\ "pkg").extract[String]
val outputSrcUri = (jv \\ "outputSrcUri").extract[FileResourceUri]
val sourceFolders = (jv \\ "sourceFolders").extract[ISet[String]]
val libFolders = (jv \\ "libFolders").extract[ISet[String]]
val dependencies = (jv \\ "dependencies").extract[ISet[String]]
val thirdPartyLibraries = (jv \\ "thirdPartyLibraries").extract[ISet[String]]
val layout = DecompileLayout(outputUri, createFolder, srcFolder, libFolder, createSeparateFolderForDexes)
layout.pkg = pkg
layout.outputSrcUri = outputSrcUri
layout.sourceFolders = sourceFolders
layout.libFolders = libFolders
layout.dependencies = dependencies
layout.thirdPartyLibraries = thirdPartyLibraries
layout
},
{
case layout: DecompileLayout =>
implicit val formats: Formats = format
val outputUri: FileResourceUri = layout.outputSrcUri
val createFolder: Boolean = layout.createFolder
val srcFolder: String = layout.srcFolder
val libFolder: String = layout.libFolder
val createSeparateFolderForDexes: Boolean = layout.createSeparateFolderForDexes
val pkg: String = layout.pkg
val outputSrcUri: FileResourceUri = layout.outputSrcUri
val sourceFolders: ISet[String] = layout.sourceFolders
val libFolders: ISet[String] = layout.libFolders
val dependencies: ISet[String] = layout.dependencies
val thirdPartyLibraries: ISet[String] = layout.thirdPartyLibraries
("outputUri" -> outputUri) ~
("createFolder" -> createFolder) ~
("srcFolder" -> srcFolder) ~
("libFolder" -> libFolder) ~
("createSeparateFolderForDexes" -> createSeparateFolderForDexes) ~
("pkg" -> pkg) ~
("outputSrcUri" -> outputSrcUri) ~
("sourceFolders" -> Extraction.decompose(sourceFolders)) ~
("libFolders" -> Extraction.decompose(libFolders)) ~
("dependencies" -> Extraction.decompose(dependencies)) ~
("thirdPartyLibraries" -> Extraction.decompose(thirdPartyLibraries))
}
))
|
arguslab/Argus-SAF
|
amandroid/src/main/scala/org/argus/amandroid/serialization/DecompileLayoutSerializer.scala
|
Scala
|
apache-2.0
| 3,198 |
/*
* Copyright (C) 2014 - 2019 Dennis Vriend <https://github.com/dnvriend>
* Copyright (C) 2019 - 2021 Lightbend Inc. <https://www.lightbend.com>
*/
package akka.persistence.jdbc.query.javadsl
import akka.NotUsed
import akka.persistence.jdbc.query.scaladsl.{ JdbcReadJournal => ScalaJdbcReadJournal }
import akka.persistence.query.{ EventEnvelope, Offset }
import akka.persistence.query.javadsl._
import akka.stream.javadsl.Source
object JdbcReadJournal {
final val Identifier = ScalaJdbcReadJournal.Identifier
}
class JdbcReadJournal(journal: ScalaJdbcReadJournal)
extends ReadJournal
with CurrentPersistenceIdsQuery
with PersistenceIdsQuery
with CurrentEventsByPersistenceIdQuery
with EventsByPersistenceIdQuery
with CurrentEventsByTagQuery
with EventsByTagQuery {
/**
* Same type of query as `persistenceIds` but the event stream
* is completed immediately when it reaches the end of the "result set". Events that are
* stored after the query is completed are not included in the event stream.
*/
override def currentPersistenceIds(): Source[String, NotUsed] =
journal.currentPersistenceIds().asJava
/**
* `persistenceIds` is used to retrieve a stream of all `persistenceId`s as strings.
*
* The stream guarantees that a `persistenceId` is only emitted once and there are no duplicates.
* Order is not defined. Multiple executions of the same stream (even bounded) may emit different
* sequence of `persistenceId`s.
*
* The stream is not completed when it reaches the end of the currently known `persistenceId`s,
* but it continues to push new `persistenceId`s when new events are persisted.
* Corresponding query that is completed when it reaches the end of the currently
* known `persistenceId`s is provided by `currentPersistenceIds`.
*/
override def persistenceIds(): Source[String, NotUsed] =
journal.persistenceIds().asJava
/**
* Same type of query as `eventsByPersistenceId` but the event stream
* is completed immediately when it reaches the end of the "result set". Events that are
* stored after the query is completed are not included in the event stream.
*/
override def currentEventsByPersistenceId(
persistenceId: String,
fromSequenceNr: Long,
toSequenceNr: Long): Source[EventEnvelope, NotUsed] =
journal.currentEventsByPersistenceId(persistenceId, fromSequenceNr, toSequenceNr).asJava
/**
* `eventsByPersistenceId` is used to retrieve a stream of events for a particular persistenceId.
*
* The `EventEnvelope` contains the event and provides `persistenceId` and `sequenceNr`
* for each event. The `sequenceNr` is the sequence number for the persistent actor with the
* `persistenceId` that persisted the event. The `persistenceId` + `sequenceNr` is an unique
* identifier for the event.
*
* `fromSequenceNr` and `toSequenceNr` can be specified to limit the set of returned events.
* The `fromSequenceNr` and `toSequenceNr` are inclusive.
*
* The `EventEnvelope` also provides the `offset` that corresponds to the `ordering` column in
* the Journal table. The `ordering` is a sequential id number that uniquely identifies the
* position of each event, also across different `persistenceId`. The `Offset` type is
* `akka.persistence.query.Sequence` with the `ordering` as the offset value. This is the
* same `ordering` number as is used in the offset of the `eventsByTag` query.
*
* The returned event stream is ordered by `sequenceNr`.
*
* Causality is guaranteed (`sequenceNr`s of events for a particular `persistenceId` are always ordered
* in a sequence monotonically increasing by one). Multiple executions of the same bounded stream are
* guaranteed to emit exactly the same stream of events.
*
* The stream is not completed when it reaches the end of the currently stored events,
* but it continues to push new events when new events are persisted.
* Corresponding query that is completed when it reaches the end of the currently
* stored events is provided by `currentEventsByPersistenceId`.
*/
override def eventsByPersistenceId(
persistenceId: String,
fromSequenceNr: Long,
toSequenceNr: Long): Source[EventEnvelope, NotUsed] =
journal.eventsByPersistenceId(persistenceId, fromSequenceNr, toSequenceNr).asJava
/**
* Same type of query as `eventsByTag` but the event stream
* is completed immediately when it reaches the end of the "result set". Events that are
* stored after the query is completed are not included in the event stream.
*/
override def currentEventsByTag(tag: String, offset: Offset): Source[EventEnvelope, NotUsed] =
journal.currentEventsByTag(tag, offset).asJava
/**
* Query events that have a specific tag.
*
* The consumer can keep track of its current position in the event stream by storing the
* `offset` and restart the query from a given `offset` after a crash/restart.
* The offset is exclusive, i.e. the event corresponding to the given `offset` parameter is not
* included in the stream.
*
* For akka-persistence-jdbc the `offset` corresponds to the `ordering` column in the Journal table.
* The `ordering` is a sequential id number that uniquely identifies the position of each event within
* the event stream. The `Offset` type is `akka.persistence.query.Sequence` with the `ordering` as the
* offset value.
*
* The returned event stream is ordered by `offset`.
*
* The stream is not completed when it reaches the end of the currently stored events,
* but it continues to push new events when new events are persisted.
* Corresponding query that is completed when it reaches the end of the currently
* stored events is provided by [[CurrentEventsByTagQuery#currentEventsByTag]].
*/
override def eventsByTag(tag: String, offset: Offset): Source[EventEnvelope, NotUsed] =
journal.eventsByTag(tag, offset).asJava
}
|
dnvriend/akka-persistence-jdbc
|
core/src/main/scala/akka/persistence/jdbc/query/javadsl/JdbcReadJournal.scala
|
Scala
|
apache-2.0
| 6,005 |
// Copyright (c) 2013-2020 Rob Norris and Contributors
// This software is licensed under the MIT License (MIT).
// For more information see LICENSE or https://opensource.org/licenses/MIT
package doobie.free
import cats.~>
import cats.effect.kernel.{ CancelScope, Poll, Sync }
import cats.free.{ Free => FF } // alias because some algebras have an op called Free
import doobie.WeakAsync
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
import java.lang.String
import java.sql.SQLData
import java.sql.SQLInput
import java.sql.SQLOutput
object sqldata { module =>
// Algebra of operations for SQLData. Each accepts a visitor as an alternative to pattern-matching.
sealed trait SQLDataOp[A] {
def visit[F[_]](v: SQLDataOp.Visitor[F]): F[A]
}
// Free monad over SQLDataOp.
type SQLDataIO[A] = FF[SQLDataOp, A]
// Module of instances and constructors of SQLDataOp.
object SQLDataOp {
// Given a SQLData we can embed a SQLDataIO program in any algebra that understands embedding.
implicit val SQLDataOpEmbeddable: Embeddable[SQLDataOp, SQLData] =
new Embeddable[SQLDataOp, SQLData] {
def embed[A](j: SQLData, fa: FF[SQLDataOp, A]) = Embedded.SQLData(j, fa)
}
// Interface for a natural transformation SQLDataOp ~> F encoded via the visitor pattern.
// This approach is much more efficient than pattern-matching for large algebras.
trait Visitor[F[_]] extends (SQLDataOp ~> F) {
final def apply[A](fa: SQLDataOp[A]): F[A] = fa.visit(this)
// Common
def raw[A](f: SQLData => A): F[A]
def embed[A](e: Embedded[A]): F[A]
def raiseError[A](e: Throwable): F[A]
def handleErrorWith[A](fa: SQLDataIO[A])(f: Throwable => SQLDataIO[A]): F[A]
def monotonic: F[FiniteDuration]
def realTime: F[FiniteDuration]
def delay[A](thunk: => A): F[A]
def suspend[A](hint: Sync.Type)(thunk: => A): F[A]
def forceR[A, B](fa: SQLDataIO[A])(fb: SQLDataIO[B]): F[B]
def uncancelable[A](body: Poll[SQLDataIO] => SQLDataIO[A]): F[A]
def poll[A](poll: Any, fa: SQLDataIO[A]): F[A]
def canceled: F[Unit]
def onCancel[A](fa: SQLDataIO[A], fin: SQLDataIO[Unit]): F[A]
def fromFuture[A](fut: SQLDataIO[Future[A]]): F[A]
// SQLData
def getSQLTypeName: F[String]
def readSQL(a: SQLInput, b: String): F[Unit]
def writeSQL(a: SQLOutput): F[Unit]
}
// Common operations for all algebras.
final case class Raw[A](f: SQLData => A) extends SQLDataOp[A] {
def visit[F[_]](v: Visitor[F]) = v.raw(f)
}
final case class Embed[A](e: Embedded[A]) extends SQLDataOp[A] {
def visit[F[_]](v: Visitor[F]) = v.embed(e)
}
final case class RaiseError[A](e: Throwable) extends SQLDataOp[A] {
def visit[F[_]](v: Visitor[F]) = v.raiseError(e)
}
final case class HandleErrorWith[A](fa: SQLDataIO[A], f: Throwable => SQLDataIO[A]) extends SQLDataOp[A] {
def visit[F[_]](v: Visitor[F]) = v.handleErrorWith(fa)(f)
}
case object Monotonic extends SQLDataOp[FiniteDuration] {
def visit[F[_]](v: Visitor[F]) = v.monotonic
}
case object Realtime extends SQLDataOp[FiniteDuration] {
def visit[F[_]](v: Visitor[F]) = v.realTime
}
case class Suspend[A](hint: Sync.Type, thunk: () => A) extends SQLDataOp[A] {
def visit[F[_]](v: Visitor[F]) = v.suspend(hint)(thunk())
}
case class ForceR[A, B](fa: SQLDataIO[A], fb: SQLDataIO[B]) extends SQLDataOp[B] {
def visit[F[_]](v: Visitor[F]) = v.forceR(fa)(fb)
}
case class Uncancelable[A](body: Poll[SQLDataIO] => SQLDataIO[A]) extends SQLDataOp[A] {
def visit[F[_]](v: Visitor[F]) = v.uncancelable(body)
}
case class Poll1[A](poll: Any, fa: SQLDataIO[A]) extends SQLDataOp[A] {
def visit[F[_]](v: Visitor[F]) = v.poll(poll, fa)
}
case object Canceled extends SQLDataOp[Unit] {
def visit[F[_]](v: Visitor[F]) = v.canceled
}
case class OnCancel[A](fa: SQLDataIO[A], fin: SQLDataIO[Unit]) extends SQLDataOp[A] {
def visit[F[_]](v: Visitor[F]) = v.onCancel(fa, fin)
}
case class FromFuture[A](fut: SQLDataIO[Future[A]]) extends SQLDataOp[A] {
def visit[F[_]](v: Visitor[F]) = v.fromFuture(fut)
}
// SQLData-specific operations.
case object GetSQLTypeName extends SQLDataOp[String] {
def visit[F[_]](v: Visitor[F]) = v.getSQLTypeName
}
final case class ReadSQL(a: SQLInput, b: String) extends SQLDataOp[Unit] {
def visit[F[_]](v: Visitor[F]) = v.readSQL(a, b)
}
final case class WriteSQL(a: SQLOutput) extends SQLDataOp[Unit] {
def visit[F[_]](v: Visitor[F]) = v.writeSQL(a)
}
}
import SQLDataOp._
// Smart constructors for operations common to all algebras.
val unit: SQLDataIO[Unit] = FF.pure[SQLDataOp, Unit](())
def pure[A](a: A): SQLDataIO[A] = FF.pure[SQLDataOp, A](a)
def raw[A](f: SQLData => A): SQLDataIO[A] = FF.liftF(Raw(f))
def embed[F[_], J, A](j: J, fa: FF[F, A])(implicit ev: Embeddable[F, J]): FF[SQLDataOp, A] = FF.liftF(Embed(ev.embed(j, fa)))
def raiseError[A](err: Throwable): SQLDataIO[A] = FF.liftF[SQLDataOp, A](RaiseError(err))
def handleErrorWith[A](fa: SQLDataIO[A])(f: Throwable => SQLDataIO[A]): SQLDataIO[A] = FF.liftF[SQLDataOp, A](HandleErrorWith(fa, f))
val monotonic = FF.liftF[SQLDataOp, FiniteDuration](Monotonic)
val realtime = FF.liftF[SQLDataOp, FiniteDuration](Realtime)
def delay[A](thunk: => A) = FF.liftF[SQLDataOp, A](Suspend(Sync.Type.Delay, () => thunk))
def suspend[A](hint: Sync.Type)(thunk: => A) = FF.liftF[SQLDataOp, A](Suspend(hint, () => thunk))
def forceR[A, B](fa: SQLDataIO[A])(fb: SQLDataIO[B]) = FF.liftF[SQLDataOp, B](ForceR(fa, fb))
def uncancelable[A](body: Poll[SQLDataIO] => SQLDataIO[A]) = FF.liftF[SQLDataOp, A](Uncancelable(body))
def capturePoll[M[_]](mpoll: Poll[M]) = new Poll[SQLDataIO] {
def apply[A](fa: SQLDataIO[A]) = FF.liftF[SQLDataOp, A](Poll1(mpoll, fa))
}
val canceled = FF.liftF[SQLDataOp, Unit](Canceled)
def onCancel[A](fa: SQLDataIO[A], fin: SQLDataIO[Unit]) = FF.liftF[SQLDataOp, A](OnCancel(fa, fin))
def fromFuture[A](fut: SQLDataIO[Future[A]]) = FF.liftF[SQLDataOp, A](FromFuture(fut))
// Smart constructors for SQLData-specific operations.
val getSQLTypeName: SQLDataIO[String] = FF.liftF(GetSQLTypeName)
def readSQL(a: SQLInput, b: String): SQLDataIO[Unit] = FF.liftF(ReadSQL(a, b))
def writeSQL(a: SQLOutput): SQLDataIO[Unit] = FF.liftF(WriteSQL(a))
// Typeclass instances for SQLDataIO
implicit val WeakAsyncSQLDataIO: WeakAsync[SQLDataIO] =
new WeakAsync[SQLDataIO] {
val monad = FF.catsFreeMonadForFree[SQLDataOp]
override val applicative = monad
override val rootCancelScope = CancelScope.Cancelable
override def pure[A](x: A): SQLDataIO[A] = monad.pure(x)
override def flatMap[A, B](fa: SQLDataIO[A])(f: A => SQLDataIO[B]): SQLDataIO[B] = monad.flatMap(fa)(f)
override def tailRecM[A, B](a: A)(f: A => SQLDataIO[Either[A, B]]): SQLDataIO[B] = monad.tailRecM(a)(f)
override def raiseError[A](e: Throwable): SQLDataIO[A] = module.raiseError(e)
override def handleErrorWith[A](fa: SQLDataIO[A])(f: Throwable => SQLDataIO[A]): SQLDataIO[A] = module.handleErrorWith(fa)(f)
override def monotonic: SQLDataIO[FiniteDuration] = module.monotonic
override def realTime: SQLDataIO[FiniteDuration] = module.realtime
override def suspend[A](hint: Sync.Type)(thunk: => A): SQLDataIO[A] = module.suspend(hint)(thunk)
override def forceR[A, B](fa: SQLDataIO[A])(fb: SQLDataIO[B]): SQLDataIO[B] = module.forceR(fa)(fb)
override def uncancelable[A](body: Poll[SQLDataIO] => SQLDataIO[A]): SQLDataIO[A] = module.uncancelable(body)
override def canceled: SQLDataIO[Unit] = module.canceled
override def onCancel[A](fa: SQLDataIO[A], fin: SQLDataIO[Unit]): SQLDataIO[A] = module.onCancel(fa, fin)
override def fromFuture[A](fut: SQLDataIO[Future[A]]): SQLDataIO[A] = module.fromFuture(fut)
}
}
|
tpolecat/doobie
|
modules/free/src/main/scala/doobie/free/sqldata.scala
|
Scala
|
mit
| 8,052 |
package io.straight.ete.web.util
import java.io._
import java.util.zip.{ZipEntry, ZipOutputStream}
/**
* @author rbuckland
*/
object ZipFileUtil {
def zipDirectory(dir: File, zipFile: File) = {
val fout = new FileOutputStream(zipFile)
val zout = new ZipOutputStream(fout)
zipSubDirectory("", dir, zout)
zout.close()
}
/** Copies all bytes from the 'input' stream to the 'output' strem. */
private def transfer(input: InputStream, out: OutputStream)
{
val buffer = new Array[Byte](8192)
def transfer()
{
val read = input.read(buffer)
if(read >= 0)
{
out.write(buffer, 0, read)
transfer()
}
}
transfer()
}
private def zipSubDirectory(basePath: String, dir: File, zout: ZipOutputStream):Unit = {
val buffer = new Array[Byte](4096)
val files = dir.listFiles()
files.foreach { file =>
if (file.isDirectory()) {
val path = basePath + file.getName + "/"
zout.putNextEntry(new ZipEntry(path))
zipSubDirectory(path, file, zout)
zout.closeEntry()
} else {
val fin = new FileInputStream(file)
zout.putNextEntry(new ZipEntry(basePath + file.getName))
transfer(fin,zout)
zout.closeEntry()
fin.close()
}
}
}
}
|
rbuckland/ete
|
ete-tools-backend/src/main/scala/io/straight/ete/web/util/CommonUtils.scala
|
Scala
|
apache-2.0
| 1,306 |
package notebook.front
import java.util.UUID
import notebook.util.ClassUtils
import scala.xml.{Node, NodeSeq}
import scalaz._
trait Widget extends Iterable[Node] {
def toHtml: NodeSeq
def iterator = toHtml.iterator
def ++(other: Widget): Widget = toHtml ++ other
override def toString() = "<" + ClassUtils.getSimpleName(getClass) + " widget>"
}
class SimpleWidget(html: NodeSeq) extends Widget {
def toHtml = html
override def toString() = "<widget>"
}
object Widget {
implicit def toHtml(widget: Widget): NodeSeq = widget.toHtml
def fromHtml(html: NodeSeq): Widget = new SimpleWidget(html)
implicit def fromRenderer[A](value: A)
(implicit renderer: Renderer[A]): Widget = fromHtml(renderer.render(value))
object Empty extends Widget {
def toHtml = NodeSeq.Empty
override def toString() = "<empty widget>"
}
implicit val widgetInstances = new Monoid[Widget] {
def zero = Empty
def append(s1: Widget, s2: ⇒ Widget) = s1 ++ s2
}
// We're stripping out dashes because we want these to be valid JS identifiers.
// Prepending with the "obs_" accomplishes that as well in that it forces it to
// start with a letter, but it also helps make the namespace a little more
// manageable.
@deprecated("Avoid using IDs in widgets, to support the same widget appearing in multiple places on a page.", "1.0")
def generateId = "widget_" + UUID.randomUUID().toString.replaceAll("-", "")
}
|
fnouama/spark-notebook
|
modules/common/src/main/scala/notebook/front/Widget.scala
|
Scala
|
apache-2.0
| 1,451 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.streaming
import org.apache.spark.SparkConf
import org.apache.spark.streaming._
import org.apache.spark.streaming.flume._
import org.apache.spark.util.IntParam
/**
* Produces a count of events received from Flume.
*
* This should be used in conjunction with the Spark Sink running in a Flume agent. See
* the Spark Streaming programming guide for more details.
*
* Usage: FlumePollingEventCount <host> <port>
* `host` is the host on which the Spark Sink is running.
* `port` is the port at which the Spark Sink is listening.
*
* To run this example:
* `$ bin/run-example org.apache.spark.examples.streaming.FlumePollingEventCount [host] [port] `
*/
object FlumePollingEventCount {
def main(args: Array[String]) {
if (args.length < 2) {
System.err.println(
"Usage: FlumePollingEventCount <host> <port>")
System.exit(1)
}
val Array(host, IntParam(port)) = args
val batchInterval = Milliseconds(2000)
// Create the context and set the batch size
val sparkConf = new SparkConf().setAppName("FlumePollingEventCount")
val ssc = new StreamingContext(sparkConf, batchInterval)
// Create a flume stream that polls the Spark Sink running in a Flume agent
val stream = FlumeUtils.createPollingStream(ssc, host, port)
// Print out the count of events received from this server in each batch
stream.count().map(cnt => "Received " + cnt + " flume events." ).print()
ssc.start()
ssc.awaitTermination()
}
}
// scalastyle:on println
|
akopich/spark
|
external/flume/src/main/scala/org/apache/spark/examples/FlumePollingEventCount.scala
|
Scala
|
apache-2.0
| 2,388 |
package com.criteo.dev.cluster
import com.criteo.dev.cluster.NodeType.NodeType
import com.criteo.dev.cluster.docker.{DockerConstants, DockerMeta, DockerUtilities}
import com.criteo.dev.cluster.aws.{AwsConstants, AwsNodeMeta, AwsUtilities}
import com.criteo.dev.cluster.config.{AWSConfig, LocalConfig, SourceConfig, TargetConfig}
import com.criteo.dev.cluster.copy.CopyConstants
import org.jclouds.compute.domain.NodeMetadata
case class Node(ip: String, user: Option[String], key: Option[String], port: Option[String], nodeType: NodeType)
object NodeType extends Enumeration {
type NodeType = Value
val S3, AWS, Local, User = Value
}
object NodeFactory {
def getSourceFromConf(config: SourceConfig): Node = {
val ip = config.address
new Node(ip, None, None, None, NodeType.User)
}
//-- Begin Public API
/**
* TODO- Should not expose JClouds nodeMeta object in the API.
*/
def getAwsNode(awsConfig: AWSConfig, nodeMeta: NodeMetadata) : Node = {
val ip = AwsUtilities.ipAddress(nodeMeta)
new Node(ip, Option(awsConfig.user), Option(awsConfig.keyFile), None, NodeType.AWS)
}
@Public def getAwsNode(awsConfig: AWSConfig, nodeMeta: AwsNodeMeta) : Node = {
val ip = nodeMeta.publicIp
new Node(ip, Option(awsConfig.user), Option(awsConfig.keyFile), None, NodeType.AWS)
}
@Public def getDockerNode(localConfig: LocalConfig, dockerMeta: DockerMeta) : Node = {
val dockerIp = DockerUtilities.getSshHost
val port = DockerUtilities.getSshPort(dockerMeta.id)
val user = localConfig.clusterUser
new Node(dockerIp, Option(user), Some(DockerConstants.dockerPrivateKey), Option(port), NodeType.Local)
}
@Public def getS3Node(bucketId: String) : Node = {
new Node(bucketId, None, None, None, NodeType.S3)
}
}
|
criteo/berilia
|
src/main/scala/com/criteo/dev/cluster/Node.scala
|
Scala
|
apache-2.0
| 1,780 |
/*
* The MIT License
*
* Copyright (c) 2022 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.fulcrumgenomics.coord
import com.fulcrumgenomics.coord.LocatableOrderingTest._
import com.fulcrumgenomics.fasta.{SequenceDictionary, SequenceMetadata}
import com.fulcrumgenomics.testing.UnitSpec
import htsjdk.samtools.util.{Interval, Locatable}
/** Companion object for [[LocatableOrderingTest]]. */
object LocatableOrderingTest {
/** The reference sequence name for chromosome 1. */
private val Chr1: String = "chr1"
/** The reference sequence name for chromosome 2. */
private val Chr2: String = "chr2"
/** The sequence dictionary for the ordering. */
private val Dict: SequenceDictionary = SequenceDictionary(SequenceMetadata(Chr1), SequenceMetadata(Chr2))
/** The ordering of the given <Dict>. */
private val Ordering: Ordering[Locatable] = LocatableOrdering(Dict)
}
/** Unit tests for [[LocatableOrdering]]. */
class LocatableOrderingTest extends UnitSpec {
"LocatableOrdering" should "know when two locatables are equivalent" in {
val interval1 = new Interval(Chr1, 1, 1)
val interval2 = new Interval(Chr1, 1, 1)
Ordering.compare(interval1, interval2) shouldBe 0
}
it should "know when one Locatable is more 'left' than another Locatable on the same contig" in {
val interval1 = new Interval(Chr1, 1, 1)
val interval2 = new Interval(Chr1, 2, 2)
Ordering.compare(interval1, interval2) should be < 0
}
it should "know when one Locatable is more 'right' than another Locatable on the same contig" in {
val interval1 = new Interval(Chr1, 2, 2)
val interval2 = new Interval(Chr1, 1, 2)
Ordering.compare(interval1, interval2) should be > 0
}
it should "know when one Locatable is more 'left' than another Locatable on a further 'right' contig" in {
val interval1 = new Interval(Chr1, 1, 1)
val interval2 = new Interval(Chr2, 1, 1)
Ordering.compare(interval1, interval2) should be < 0
}
it should "know when one Locatable is more 'right' than another Locatable on a further 'left' contig" in {
val interval1 = new Interval(Chr2, 1, 1)
val interval2 = new Interval(Chr1, 1, 1)
Ordering.compare(interval1, interval2) should be > 0
}
it should "raise an exception when any of the locatables are aligned to contigs that don't exist" in {
val interval1 = new Interval(Chr1, 1, 1)
val interval2 = new Interval("ChrDoesNotExist", 1, 1)
a[NoSuchElementException] shouldBe thrownBy { Ordering.compare(interval1, interval2) }
}
it should "order genomic locatables first by contig, then start, then end" in {
val intervals = Seq(
new Interval(Chr2, 2, 5),
new Interval(Chr2, 2, 9),
new Interval(Chr1, 2, 5),
new Interval(Chr1, 2, 5),
new Interval(Chr1, 4, 5)
)
val expected = Seq(
new Interval(Chr1, 2, 5),
new Interval(Chr1, 2, 5),
new Interval(Chr1, 4, 5),
new Interval(Chr2, 2, 5),
new Interval(Chr2, 2, 9)
)
intervals.min(Ordering) shouldBe new Interval(Chr1, 2, 5)
intervals.max(Ordering) shouldBe new Interval(Chr2, 2, 9)
intervals.sorted(Ordering) should contain theSameElementsInOrderAs expected
}
}
|
fulcrumgenomics/fgbio
|
src/test/scala/com/fulcrumgenomics/coord/LocatableOrderingTest.scala
|
Scala
|
mit
| 4,275 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.datasource
import slamdata.Predef._
import quasar.api.datasource.DatasourceType
import quasar.api.resource._
import quasar.connector.{DatasourceSpec, MonadResourceErr, ResourceError}
import quasar.contrib.scalaz.MonadError_
import cats.effect.IO
import eu.timepit.refined.auto._
import scalaz.IMap
import scalaz.std.list._
import shims._
object PureDatasourceSpec extends DatasourceSpec[IO, List] {
implicit val ioMonadResourceErr: MonadError_[IO, ResourceError] =
MonadError_.facet[IO](ResourceError.throwableP)
val datasource =
PureDatasource[IO, List](
DatasourceType("pure-test", 1L),
IMap(
ResourcePath.root() / ResourceName("a") / ResourceName("b") -> 1,
ResourcePath.root() / ResourceName("a") / ResourceName("c") -> 2,
ResourcePath.root() / ResourceName("d") -> 3))
def nonExistentPath: ResourcePath =
ResourcePath.root() / ResourceName("x") / ResourceName("y")
def gatherMultiple[A](fga: List[A]): IO[List[A]] =
IO.pure(fga)
"evaluation" >> {
"known resource returns result" >>* {
datasource
.evaluate(ResourcePath.root() / ResourceName("d"))
.map(_ must_=== 3)
}
"known prefix errors with 'not a resource'" >>* {
val pfx = ResourcePath.root() / ResourceName("a")
MonadResourceErr[IO].attempt(datasource.evaluate(pfx)).map(_ must be_-\\/.like {
case ResourceError.NotAResource(p) => p must equal(pfx)
})
}
"unknown path errors with 'not found'" >>* {
MonadResourceErr[IO].attempt(datasource.evaluate(nonExistentPath)).map(_ must be_-\\/.like {
case ResourceError.PathNotFound(p) => p must equal(nonExistentPath)
})
}
}
}
|
slamdata/slamengine
|
impl/src/test/scala/quasar/impl/datasource/PureDatasourceSpec.scala
|
Scala
|
apache-2.0
| 2,324 |
package poly.collection.impl
import poly.collection._
import poly.collection.typeclass._
/**
* Adapted from the public-domain code from
* [[https://github.com/aappleby/smhasher/blob/master/src/MurmurHash3.cpp]].
* @author Austin Appleby
* @author Tongfei Chen
*/
object MurmurHash3 {
@inline final def mix(hash: Int, data: Int): Int = {
var h = mixLast(hash, data)
h = Integer.rotateLeft(h, 13)
h * 5 + 0xe6546b64
}
@inline final def mixLast(hash: Int, data: Int): Int = {
var k = data
k *= 0xcc9e2d51
k = Integer.rotateLeft(k, 15)
k *= 0x1b873593
hash ^ k
}
final def finalizeHash(hash: Int, length: Int): Int = fmix32(hash ^ length)
@inline final def fmix32(_h: Int): Int = {
var h = _h
h ^= h >>> 16
h *= 0x85ebca6b
h ^= h >>> 13
h *= 0xc2b2ae35
h ^= h >>> 16
h
}
final def symmetricHash[T](xs: Traversable[T], seed: Int = 0xb592f7ae)(implicit T: Hash[T]) = {
var n = 0
var sum = 0
var xor = 0
var prod = 1
for (x <- xs) {
val h = T hash x
sum += h
xor ^= h
if (h != 0) prod *= h
n += 1
}
var h = seed
h = mix(h, sum)
h = mix(h, xor)
h = mixLast(h, prod)
finalizeHash(h, n)
}
final def sequentialHash[T](xs: Traversable[T], seed: Int = 0xe73a8b15)(implicit T: Hash[T]) = {
var n = 0
var h = seed
for (x <- xs) {
h = mix(h, T hash x)
n += 1
}
finalizeHash(h, n)
}
}
|
ctongfei/poly-collection
|
core/src/main/scala/poly/collection/impl/MurmurHash3.scala
|
Scala
|
mit
| 1,470 |
package cromwell.engine.workflow
import java.nio.file.Path
import akka.actor.FSM.{CurrentState, Transition}
import akka.actor._
import better.files._
import cromwell.core.{CallOutput, WorkflowId}
import cromwell.engine
import cromwell.engine._
import cromwell.engine.workflow.SingleWorkflowRunnerActor._
import cromwell.engine.workflow.WorkflowManagerActor._
import cromwell.webservice.CromwellApiHandler._
import cromwell.webservice.{WdlValueJsonFormatter, WorkflowMetadataResponse}
import spray.json._
import scala.util._
object SingleWorkflowRunnerActor {
def props(source: WorkflowSourceFiles, metadataOutputFile: Option[Path], workflowManager: ActorRef): Props = {
Props(classOf[SingleWorkflowRunnerActor], source, metadataOutputFile, workflowManager)
}
sealed trait RunnerMessage
// The message to actually run the workflow is made explicit so the non-actor Main can `ask` this actor to do the
// running and collect a result.
case object RunWorkflow extends RunnerMessage
private case object IssueReply extends RunnerMessage
sealed trait RunnerState
case object NotStarted extends RunnerState
case object RunningWorkflow extends RunnerState
case object RequestingOutputs extends RunnerState
case object RequestingMetadata extends RunnerState
case object Done extends RunnerState
final case class RunnerData(replyTo: Option[ActorRef] = None,
id: Option[WorkflowId] = None,
terminalState: Option[WorkflowState] = None,
failures: Seq[Throwable] = Seq.empty) {
def addFailure(message: String): RunnerData = addFailure(new Throwable(message))
def addFailure(e: Throwable): RunnerData = this.copy(failures = e +: failures)
}
}
/**
* Designed explicitly for the use case of the 'run' functionality in Main. This Actor will start a workflow,
* print out the outputs when complete and then shut down the actor system. Note that multiple aspects of this
* are sub-optimal for future use cases where one might want a single workflow being run.
*/
case class SingleWorkflowRunnerActor(source: WorkflowSourceFiles,
metadataOutputPath: Option[Path],
workflowManager: ActorRef) extends LoggingFSM[RunnerState, RunnerData] with CromwellActor {
import SingleWorkflowRunnerActor._
val tag = "SingleWorkflowRunnerActor"
startWith(NotStarted, RunnerData())
private def requestMetadata: State = {
workflowManager ! WorkflowMetadata(stateData.id.get)
goto (RequestingMetadata)
}
private def issueReply: State = {
self ! IssueReply
goto (Done)
}
when (NotStarted) {
case Event(RunWorkflow, data) =>
log.info(s"$tag: launching workflow")
workflowManager ! SubmitWorkflow(source)
goto (RunningWorkflow) using data.copy(replyTo = Option(sender()))
}
when (RunningWorkflow) {
case Event(WorkflowManagerSubmitSuccess(id), data) =>
log.info(s"$tag: workflow ID UUID($id)")
workflowManager ! SubscribeToWorkflow(id)
stay() using data.copy(id = Option(id))
case Event(Transition(_, _, WorkflowSucceeded), data) =>
workflowManager ! WorkflowOutputs(data.id.get)
goto(RequestingOutputs) using data.copy(terminalState = Option(WorkflowSucceeded))
case Event(Transition(_, _, WorkflowFailed), data) =>
val updatedData = data.copy(terminalState = Option(WorkflowFailed)).addFailure(s"Workflow ${data.id.get} transitioned to state Failed")
// If there's an output path specified then request metadata, otherwise issue a reply to the original sender.
val nextState = if (metadataOutputPath.isDefined) requestMetadata else issueReply
nextState using updatedData
}
when (RequestingOutputs) {
case Event(WorkflowManagerWorkflowOutputsSuccess(id, outputs), data) =>
// Outputs go to stdout
outputOutputs(outputs)
if (metadataOutputPath.isDefined) requestMetadata else issueReply
}
when (RequestingMetadata) {
case Event(r: WorkflowManagerWorkflowMetadataSuccess, data) =>
val updatedData = outputMetadata(r.response) match {
case Success(_) => data
case Failure(e) => data.addFailure(e)
}
issueReply using updatedData
}
when (Done) {
case Event(IssueReply, data) =>
data.terminalState foreach { state => log.info(s"$tag workflow finished with status '$state'.") }
data.failures foreach { e => log.error(e, e.getMessage) }
val message = data.terminalState collect { case WorkflowSucceeded => () } getOrElse Status.Failure(data.failures.head)
data.replyTo foreach { _ ! message }
stay()
}
private def failAndFinish(e: Throwable): State = {
log.error(e, s"$tag received Failure message: ${e.getMessage}")
issueReply using stateData.addFailure(e)
}
whenUnhandled {
// Handle failures for all WorkflowManagerFailureResponses generically.
case Event(r: WorkflowManagerFailureResponse, data) => failAndFinish(r.failure)
case Event(Failure(e), data) => failAndFinish(e)
case Event(Status.Failure(e), data) => failAndFinish(e)
case Event((CurrentState(_, _) | Transition(_, _, _)), _) =>
// ignore uninteresting current state and transition messages
stay()
case Event(m, _) =>
log.warning(s"$tag: received unexpected message: $m")
stay()
}
/**
* Outputs the outputs to stdout, and then requests the metadata.
*/
private def outputOutputs(outputs: engine.WorkflowOutputs): Unit = {
import WdlValueJsonFormatter._
val outputValues = outputs mapValues { case CallOutput(wdlValue, _) => wdlValue }
println(outputValues.toJson.prettyPrint)
}
private def outputMetadata(metadata: WorkflowMetadataResponse): Try[Unit] = {
Try {
// This import is required despite what IntelliJ thinks.
import cromwell.webservice.WorkflowJsonSupport._
val path = metadataOutputPath.get
log.info(s"$tag writing metadata to $path")
path.createIfNotExists().write(metadata.toJson.prettyPrint)
}
}
}
|
cowmoo/cromwell
|
engine/src/main/scala/cromwell/engine/workflow/SingleWorkflowRunnerActor.scala
|
Scala
|
bsd-3-clause
| 6,129 |
package io.skysail.core.akka
import akka.actor.SupervisorStrategy.{Restart, Stop}
import akka.actor.{Actor, ActorInitializationException, ActorKilledException, ActorLogging, ActorRef, DeathPactException, OneForOneStrategy}
import akka.event.LoggingReceive
import akka.http.scaladsl.marshalling.Marshal
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.MediaTypeNegotiator
import akka.util.Timeout
import de.heikoseeberger.akkahttpjson4s.Json4sSupport._
import io.skysail.core.app.resources.PostSupport
import io.skysail.core.model.{ApplicationModel, RepresentationModel}
import io.skysail.core.resources._
import io.skysail.core.server.actors.ApplicationActor.{ProcessCommand, SkysailContext}
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.write
import org.json4s.{DefaultFormats, Extraction, JObject, jackson}
import org.osgi.framework.BundleContext
import play.twirl.api.HtmlFormat
import scala.concurrent.Future
import scala.concurrent.duration.DurationInt
object ControllerActor {
case class GetRequest()
case class PostRequest()
case class PutRequest()
case class DeleteRequest()
case class MyResponseEntity(val entity: ResponseEntity)
}
class ControllerActor[T]() extends Actor with ActorLogging {
implicit val askTimeout: Timeout = 1.seconds
var applicationActor: ActorRef = null
var applicationModel: ApplicationModel = null
import context._
def receive = in
def in: Receive = LoggingReceive {
case SkysailContext(cmd: ProcessCommand, model: ApplicationModel, resource: AsyncResource[T], _: Option[BundleContext]) => {
applicationActor = sender
applicationModel = model
resource.setActorContext(context)
resource.setApplicationModel(model)
cmd.ctx.request.method match {
case HttpMethods.GET => resource.get(RequestEvent(cmd, self))
case HttpMethods.POST => resource.asInstanceOf[PostSupport].post(RequestEvent(cmd, self))
case e: Any => resource.get(RequestEvent(cmd, self))
}
become(out)
}
case msg: Any => log info s"<<< IN <<<: received unknown message '$msg' in ${this.getClass.getName}"
}
def out: Receive = LoggingReceive {
case response: ListResponseEvent[T] =>
val negotiator = new MediaTypeNegotiator(response.req.cmd.ctx.request.headers)
val acceptedMediaRanges = negotiator.acceptedMediaRanges
implicit val formats = DefaultFormats
implicit val serialization = jackson.Serialization
val m = Marshal(response.entity.asInstanceOf[List[_]]).to[RequestEntity]
if (negotiator.isAccepted(MediaTypes.`text/html`)) {
handleHtmlWithFallback(response, m)
} else if (negotiator.isAccepted(MediaTypes.`application/json`)) {
handleJson(m, response)
}
case response: ResponseEvent[T] =>
val negotiator = new MediaTypeNegotiator(response.req.cmd.ctx.request.headers)
val acceptedMediaRanges = negotiator.acceptedMediaRanges
implicit val formats: DefaultFormats.type = DefaultFormats
implicit val serialization: Serialization.type = jackson.Serialization
val e1 = Extraction.decompose(response.entity)
if (e1.isInstanceOf[JObject]) {
val e = e1.asInstanceOf[JObject]
val written = write(e)
if (negotiator.isAccepted(MediaTypes.`text/html`)) {
handleHtmlWithFallback(response, e)
} else if (negotiator.isAccepted(MediaTypes.`application/json`)) {
handleJson(response, e)
}
} else {
applicationActor ! response
}
case msg: List[T] => {
log warning s">>> OUT(${this.hashCode()}) @deprecated >>>: List[T]"
implicit val formats = DefaultFormats
implicit val serialization = jackson.Serialization
val m = Marshal(msg).to[RequestEntity]
m.onSuccess {
case value =>
val reqEvent = RequestEvent(null, null)
val resEvent = ListResponseEvent(reqEvent, null)
log info s">>> OUT(${this.hashCode()} >>>: sending back to ${applicationActor}"
applicationActor ! resEvent.copy(entity = msg, httpResponse = resEvent.httpResponse.copy(entity = value))
}
}
case msg: ControllerActor.MyResponseEntity => {
log warning s">>> OUT(${this.hashCode()}) @deprecated >>>: ControllerActor.MyResponseEntity"
val reqEvent = RequestEvent(null, null)
val resEvent = ListResponseEvent(reqEvent, null)
applicationActor ! resEvent.copy(httpResponse = resEvent.httpResponse.copy(entity = msg.entity))
}
case msg: T => {
log warning s">>> OUT(${this.hashCode()}) @deprecated >>>: T"
val reqEvent = RequestEvent(null, null)
val resEvent = ListResponseEvent(reqEvent, null)
implicit val formats = DefaultFormats
val e = Extraction.decompose(msg).asInstanceOf[JObject]
val written = write(e)
val r = HttpEntity(ContentTypes.`application/json`, written)
applicationActor ! resEvent.copy(entity = msg, httpResponse = resEvent.httpResponse.copy(entity = r))
}
case msg: Any => log info s">>> OUT >>>: received unknown message '$msg' in ${this.getClass.getName}"
}
private def handleHtmlWithFallback(response: ListResponseEvent[T], m: Future[MessageEntity]) = {
try {
val loader = response.req.cmd.cls.getClassLoader
val resourceHtmlClass = loader.loadClass(getHtmlTemplate(response.req))
val applyMethod = resourceHtmlClass.getMethod("apply", classOf[RepresentationModel])
m.onSuccess {
case value =>
val rep = new RepresentationModel(response, applicationModel)
val r2 = applyMethod.invoke(resourceHtmlClass, rep).asInstanceOf[HtmlFormat.Appendable]
val answer = HttpEntity(ContentTypes.`text/html(UTF-8)`, r2.body)
applicationActor ! response.copy(entity = response.entity, httpResponse = response.httpResponse.copy(entity = answer))
}
} catch {
case e: Exception => log info s"rendering fallback to json, could not load '${getHtmlTemplate(response.req)}', reason: $e"; handleJson(m, response)
}
}
private def handleHtmlWithFallback(response: ResponseEvent[T], e: JObject): Unit = {
val resourceClassAsString = getHtmlTemplate(response.req)
try {
val loader = response.req.cmd.cls.getClassLoader
val resourceHtmlClass = loader.loadClass(resourceClassAsString)
val applyMethod = resourceHtmlClass.getMethod("apply", classOf[RepresentationModel])
val rep = new RepresentationModel(response, applicationModel)
val r2 = applyMethod.invoke(resourceHtmlClass, rep).asInstanceOf[HtmlFormat.Appendable]
val answer = HttpEntity(ContentTypes.`text/html(UTF-8)`, r2.body)
applicationActor ! response.copy(entity = response.entity, httpResponse = response.httpResponse.copy(entity = answer))
} catch {
case ex: Exception =>
log info s"rendering fallback to json, could not load '$resourceClassAsString', reason: $ex"
handleJson(response, e)
}
}
private def handleJson(m: Future[MessageEntity], response: ListResponseEvent[T]) = {
m.onSuccess {
case value =>
applicationActor ! response.copy(entity = response.entity, httpResponse = response.httpResponse.copy(entity = value))
}
}
private def handleJson(response: ResponseEvent[T], e: JObject) = {
import org.json4s.jackson.JsonMethods._
applicationActor ! response.copy(entity = response.entity,
httpResponse = response.httpResponse.copy(entity = compact(render(e))))
}
override def preRestart(reason: Throwable, message: Option[Any]) {
log.error(reason, "Restarting due to [{}] when processing [{}]", reason.getMessage, message.getOrElse(""))
}
private def getHtmlTemplate(req: RequestEvent) = {
s"${req.cmd.cls.getPackage.getName}.html.${req.cmd.cls.getSimpleName}_Get"
}
}
|
evandor/skysail-core
|
skysail.core/src/io/skysail/core/server/actors/ControllerActor.scala
|
Scala
|
apache-2.0
| 7,884 |
/**
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package actorbintree
import akka.actor._
import scala.collection.immutable.Queue
object BinaryTreeSet {
trait Operation {
def requester: ActorRef
def id: Int
def elem: Int
}
trait OperationReply {
def id: Int
}
val dummyId = -1
/**
* Request with identifier `id` to insert an element `elem` into the tree.
* The actor at reference `requester` should be notified when this operation
* is completed.
*/
case class Insert(requester: ActorRef, id: Int, elem: Int) extends Operation
/**
* Request with identifier `id` to check whether an element `elem` is present
* in the tree. The actor at reference `requester` should be notified when
* this operation is completed.
*/
case class Contains(requester: ActorRef, id: Int, elem: Int) extends Operation
/**
* Request with identifier `id` to remove the element `elem` from the tree.
* The actor at reference `requester` should be notified when this operation
* is completed.
*/
case class Remove(requester: ActorRef, id: Int, elem: Int) extends Operation
/** Request to perform garbage collection*/
case object GC
/**
* Holds the answer to the Contains request with identifier `id`.
* `result` is true if and only if the element is present in the tree.
*/
case class ContainsResult(id: Int, result: Boolean) extends OperationReply
/** Message to signal successful completion of an insert or remove operation. */
case class OperationFinished(id: Int) extends OperationReply
}
class BinaryTreeSet extends Actor {
import BinaryTreeSet._
import BinaryTreeNode._
def createRoot: ActorRef =
context.actorOf(BinaryTreeNode.props(0, initiallyRemoved = true))
var root = createRoot
// optional
var pendingQueue = Queue.empty[Operation]
// optional
def receive = normal
// optional
/** Accepts `Operation` and `GC` messages. */
val normal: Receive = {
case op: Operation => root ! op
case GC =>
createRoot match {
case gcRoot =>
root ! CopyTo(gcRoot)
context.become(garbageCollecting(gcRoot))
}
}
// optional
/**
* Handles messages while garbage collection is performed.
* `newRoot` is the root of the new binary tree where we want to copy
* all non-removed elements into.
*/
def garbageCollecting(newRoot: ActorRef): Receive = {
case op: Operation =>
pendingQueue :+= op
case CopyFinished =>
root ! PoisonPill
root = newRoot
pendingQueue.foreach(newRoot ! _)
pendingQueue = Queue()
context.become(normal)
case GC => ()
}
}
object BinaryTreeNode {
trait Position
case object Left extends Position
case object Right extends Position
case class CopyTo(treeNode: ActorRef)
case object CopyFinished
def props(elem: Int, initiallyRemoved: Boolean) =
Props(classOf[BinaryTreeNode], elem, initiallyRemoved)
}
class BinaryTreeNode(val elem: Int, initiallyRemoved: Boolean) extends Actor with ActorLogging { thisNode =>
import BinaryTreeNode._
import BinaryTreeSet._
var subtrees = Map[Position, ActorRef]()
var removed = initiallyRemoved
// optional
def receive = normal
// optional
/** Handles `Operation` messages and `CopyTo` requests. */
val normal: Receive = {
case insert @ Insert(requester, id, elem) =>
if (elem == thisNode.elem) {
removed = false
requester ! OperationFinished(id)
} else {
val position = nextPosition(elem)
subtrees.get(position) match {
case Some(node) => node ! insert
case None =>
subtrees += position -> context.actorOf(BinaryTreeNode.props(elem, false))
requester ! OperationFinished(id)
}
}
case contains @ Contains(requester, id, elem) =>
if (elem == thisNode.elem)
requester ! ContainsResult(id, !removed)
else {
val position = nextPosition(elem)
subtrees.get(position) match {
case Some(node) => node ! contains
case None => requester ! ContainsResult(id, false)
}
}
case remove @ Remove(requester, id, elem) =>
if (elem == thisNode.elem) {
removed = true
requester ! OperationFinished(id)
} else {
val position = nextPosition(elem)
subtrees.get(position) match {
case Some(node) => node ! remove
case None => requester ! OperationFinished(id)
}
}
case CopyTo(newRoot) => {
if (removed && subtrees.isEmpty) {
sender ! CopyFinished
} else {
if (!removed) {
newRoot ! Insert(self, dummyId, elem)
}
val children = subtrees.values.toSet
children.foreach(_ ! CopyTo(newRoot))
context.become(copying(sender, children, removed))
}
}
}
// optional
/**
* `expected` is the set of ActorRefs whose replies we are waiting for,
* `insertConfirmed` tracks whether the copy of this node to the new tree has been confirmed.
*/
def copying(requester: ActorRef, expected: Set[ActorRef], insertConfirmed: Boolean): Receive = {
case CopyFinished =>
val newExpected = expected - sender
if (newExpected.isEmpty && insertConfirmed)
requester ! CopyFinished
else
context.become(copying(requester, newExpected, insertConfirmed))
case OperationFinished(id) =>
if (expected.isEmpty)
requester ! CopyFinished
else
context.become(copying(requester, expected, true))
}
def nextPosition(elem: Int): Position =
if (elem < thisNode.elem) Left else Right
}
|
alex-learn/principles-reactive-programming
|
actorbintree/src/main/scala/actorbintree/BinaryTreeSet.scala
|
Scala
|
apache-2.0
| 5,708 |
import definiti.native._
import java.time.LocalDateTime
import play.api.libs.json._
import definiti.native.JsonPlaySupport._
package object my {
case class MyFirstType(myAttribute: String)
object MyFirstType {
val verification: Verification[MyFirstType] = Verification.none[MyFirstType]
val rawFormat: OFormat[MyFirstType] = Json.format[MyFirstType]
implicit val format: OFormat[MyFirstType] = formatWithValidation(rawFormat, verification)
}
case class MySecondType(myFirstAttribute: BigDecimal, mySecondAttribute: MyFirstType, myThirdAttribute: Seq[MyFirstType], myFourthAttribute: Option[MyFirstType], myFifthAttribute: Seq[MyThirdType], mySixthAttribute: Seq[MyThirdType])
object MySecondType {
val verification: Verification[MySecondType] = Verification.all(Verification.all(MyFirstType.verification).from[MySecondType](_.mySecondAttribute, "mySecondAttribute"), Verification.all(new ListVerification(MyFirstType.verification)).from[MySecondType](_.myThirdAttribute, "myThirdAttribute"), Verification.all(new OptionVerification(MyFirstType.verification)).from[MySecondType](_.myFourthAttribute, "myFourthAttribute"), Verification.all(AliasList.verification[MyThirdType], new ListVerification(MyThirdType.verification)).from[MySecondType](_.myFifthAttribute, "myFifthAttribute"), Verification.all(ListOfThird.verification).from[MySecondType](_.mySixthAttribute, "mySixthAttribute"))
val rawFormat: OFormat[MySecondType] = {
implicit val MyFirstTypeFormat = MyFirstType.rawFormat
implicit val MyThirdTypeFormat = MyThirdType.rawFormat
Json.format[MySecondType]
}
implicit val format: OFormat[MySecondType] = formatWithValidation(rawFormat, verification)
}
case class MyThirdType(myAttribute: String)
object MyThirdType {
val verification: Verification[MyThirdType] = Verification.none[MyThirdType]
val rawFormat: OFormat[MyThirdType] = Json.format[MyThirdType]
implicit val format: OFormat[MyThirdType] = formatWithValidation(rawFormat, verification)
}
object AliasList {
def verification[A](): Verification[Seq[A]] = Verification.none[Seq[A]]
}
object ListOfThird {
val verification: Verification[Seq[MyThirdType]] = Verification.none[Seq[MyThirdType]]
}
}
|
definiti/definiti-scala-model
|
src/test/resources/samples/json/play/withValidation/output.scala
|
Scala
|
mit
| 2,253 |
/*
* Copyright University of Basel, Graphics and Vision Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalismo.faces.gui
import javax.swing.event.{ChangeEvent, ChangeListener}
import javax.swing.{JSlider, SwingConstants}
/** slider which works on a continuous range, uses 100 ticks */
class MappedSlider(orientation: Int,
min: Double,
max: Double,
value: Double,
changeListener: Double => Unit) extends JSlider {
addChangeListener(new ChangeListener {
override def stateChanged(e: ChangeEvent): Unit = changeListener(sliderToRange(getValue))
})
/** map the continuous value to the slider range 0,1,...,100 */
private def rangeToSlider(value: Double): Int = ((value - min)/(max - min) * 100).toInt
/** map the slider value to the continuous range [min, max] */
private def sliderToRange(value: Int): Double = value.toDouble/100.0 * (max - min) + min
/** update the slider value */
def updateValue(value: Double): Unit = setValue(rangeToSlider(value))
}
object MappedSlider {
/** create a MappedSlider */
def apply(orientation: Int, min: Double, max: Double, value: Double, changeListener: Double => Unit) = new MappedSlider(orientation, min, max, value, changeListener)
/** create a vertical MappedSlider */
def apply(min: Double, max: Double, value: Double, changeListener: Double => Unit) = new MappedSlider(SwingConstants.VERTICAL, min, max, value, changeListener)
/** create a MappedSlider */
def apply(min: Double, max: Double, value: Double, changeListener: Double => Unit, orientation: Int) = new MappedSlider(orientation, min, max, value, changeListener)
}
|
unibas-gravis/scalismo-faces
|
src/main/scala/scalismo/faces/gui/MappedSlider.scala
|
Scala
|
apache-2.0
| 2,224 |
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.core.util
import org.apache.accumulo.core.client.{BatchScanner, Scanner}
import org.locationtech.geomesa.core.data.{AccumuloConnectorCreator, INTERNAL_GEOMESA_VERSION}
import org.locationtech.geomesa.core.index.ExplainerOutputType
import org.opengis.feature.simple.SimpleFeatureType
class ExplainingConnectorCreator(output: ExplainerOutputType) extends AccumuloConnectorCreator {
/**
* Create a BatchScanner for the SpatioTemporal Index Table
*
* @param numThreads number of threads for the BatchScanner
*/
override def createSpatioTemporalIdxScanner(sft: SimpleFeatureType, numThreads: Int): BatchScanner = new ExplainingBatchScanner(output)
/**
* Create a BatchScanner for the SpatioTemporal Index Table
*/
override def createSTIdxScanner(sft: SimpleFeatureType): BatchScanner = new ExplainingBatchScanner(output)
/**
* Create a Scanner for the Attribute Table (Inverted Index Table)
*/
override def createAttrIdxScanner(sft: SimpleFeatureType): Scanner = new ExplainingScanner(output)
/**
* Create a BatchScanner to retrieve only Records (SimpleFeatures)
*/
override def createRecordScanner(sft: SimpleFeatureType, numThreads: Int): BatchScanner = new ExplainingBatchScanner(output)
override def getGeomesaVersion(sft: SimpleFeatureType): Int = INTERNAL_GEOMESA_VERSION
}
|
kevinwheeler/geomesa
|
geomesa-core/src/main/scala/org/locationtech/geomesa/core/util/ExplainingConnectorCreator.scala
|
Scala
|
apache-2.0
| 1,976 |
/*************************************************************************
* *
* This file is part of the 20n/act project. *
* 20n/act enables DNA prediction for synthetic biology/bioengineering. *
* Copyright (C) 2017 20n Labs, Inc. *
* *
* Please direct all queries to [email protected]. *
* *
* This program is free software: you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation, either version 3 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program. If not, see <http://www.gnu.org/licenses/>. *
* *
*************************************************************************/
package com.act.workflow.tool_manager.workflow.workflow_mixins.mongo
import act.server.MongoDB
import act.shared.Chemical
import com.mongodb.DBObject
import com.mongodb.casbah.Imports.{BasicDBList, BasicDBObject}
import org.apache.logging.log4j.LogManager
import scala.collection.JavaConversions._
import scala.collection.mutable
trait MongoWorkflowUtilities {
private val logger = LogManager.getLogger(getClass.getName)
private val mongoConnections: mutable.HashMap[(String, String, Int), MongoDB] = new mutable.HashMap[(String, String, Int), MongoDB]()
/*
Related to instantiating Mongo
*/
/**
* Instantiates a connection with the MongoDB in act.server
*
* @param db The name of the database to connect to. Default marvin
* @param host The host to connect to. Default localhost
* @param port The port to listen at. Default 27017 (Mongo default)
*
* @return Created Mongo database connection.
*/
def connectToMongoDatabase(db: String = "marvin", host: String = "localhost", port: Int = 27017): MongoDB = {
val key = (db, host, port)
val cachedConnection = mongoConnections.get(key)
if (cachedConnection.isDefined){
return mongoConnections(key)
}
// Instantiate Mongo host.
logger.info("Setting up new Mongo database connection")
val connection = new MongoDB(host, port, db)
mongoConnections.put(key, connection)
connection
}
def createDbObject(values: Map[Keyword, Any]): BasicDBObject = {
new BasicDBObject(values map { case (key, value) => key.value -> value })
}
/**
* Unwinding a list creates a value that can be found by the name <PreviousListName>.<ValueName>.
* This function standardizes that naming procedure for use in querying unwound variables within lists.
*
* A document containing a field "lists" that looks like this:
* "lists" : [{val1 : 1, val2: 2}, {val1: 3, val2: 3}] is unwound to form:
*
* lists.val1 : [1, 3]
* lists.val2 : [2, 4]
*
* Thus, this naming pattern modification allows to easily access these newly created values.
*
* @param listName The name of the DBListObject that was unwound
* @param valueName The name of a value found within that list
*
* @return String containing the formatted names.
*/
def formatUnwoundName(listName: String, valueName: String): String = {
s"$listName.$valueName"
}
def formatUnwoundName(listName: String, valueName: Keyword): String = {
formatUnwoundName(listName, valueName.toString)
}
def formatUnwoundName(listName: Keyword, valueName: Keyword): String = {
formatUnwoundName(listName.toString, valueName.toString)
}
def formatUnwoundName(listName: Keyword, valueName: String): String = {
formatUnwoundName(listName.toString, valueName)
}
/**
* Creates a new query that checks if something exists
* True: Exists
* False: Doesn't exist
*
* Reference: https://docs.mongodb.com/manual/reference/operator/query/exists/
*
* @return DBObject that matches the above conditions
*/
def getMongoExists: BasicDBObject = {
createDbObject(MongoKeywords.EXISTS, true)
}
def createDbObject(key: Keyword, value: Any): BasicDBObject = {
new BasicDBObject(key.toString, value)
}
/**
* Creates a new query that checks if something doesn't exist
* True: Doesn't exist
* False: Exists
*
* Reference: https://docs.mongodb.com/manual/reference/operator/query/exists/
*
* @return DB Object that matches the above conditions
*/
def getMongoDoesntExist: BasicDBObject = {
createDbObject(MongoKeywords.EXISTS, false)
}
/**
* Truth value that returns true if any members of the truthValueList evaluate to true
*
* Reference: https://docs.mongodb.com/manual/reference/operator/query/or/
*
* @param truthValueList A list of DBObjects to check truth conditions against
*
* @return DBObject containing this query
*/
def defineMongoOr(truthValueList: BasicDBList): BasicDBObject = {
createDbObject(MongoKeywords.OR, truthValueList)
}
/*
General Mongo functionality
*/
def defineMongoNot(truthValue: String): BasicDBObject = {
createDbObject(MongoKeywords.NOT, truthValue)
}
/**
* Truth value that returns true if all members of the truthValueList evaluate to true
*
* Reference: https://docs.mongodb.com/manual/reference/operator/query/and/
*
* @param truthValueList A list of DBObjects to check truth conditions against
*
* @return DBObject containing this query
*/
def defineMongoAnd(truthValueList: BasicDBList): BasicDBObject = {
createDbObject(MongoKeywords.AND, truthValueList)
}
/**
* Query that returns true if any of the values in the queryList are equal to the field it is assigned to.
*
* Reference: https://docs.mongodb.com/manual/reference/operator/query/in/
*
* @param queryList A list of values that the field could equal
*
* @return DBObject containing this query
*/
def defineMongoIn(queryList: BasicDBList): BasicDBObject = {
createDbObject(MongoKeywords.IN, queryList)
}
/**
* Allows the use of REGEX to match field values.
*
* Reference: https://docs.mongodb.com/manual/reference/operator/query/regex/
*
* @param regex A regex string that will be matched against
*
* @return DBObject containing this query
*/
def defineMongoRegex(regex: String): BasicDBObject = {
createDbObject(MongoKeywords.REGEX, regex)
}
/**
* A normal query against the reactions database.
*
* Reference: https://docs.mongodb.com/manual/reference/method/db.collection.find/
*
* @param mongo Connection to a MongoDB
* @param key The key to match documents against
* @param filter A filter of the returned components of the document
*
* @return An iterator over the returned documents
*/
def mongoQueryReactions(mongo: MongoDB)(key: BasicDBObject, filter: BasicDBObject, notimeout: Boolean = true): Iterator[DBObject] = {
logger.debug(s"Querying reaction database with the query $key. Filtering values to obtain $filter")
mongo.getIteratorOverReactions(key, filter).toIterator
}
def mongoQueryChemicals(mongo: MongoDB)(key: BasicDBObject, filter: BasicDBObject, notimeout: Boolean = true): Iterator[DBObject] = {
logger.debug(s"Querying reaction database with the query $key. Filtering values to obtain $filter")
mongo.getIteratorOverChemicals(key, filter).toIterator
}
def mongoChemicalIterator(mongo: MongoDB)(key: BasicDBObject): Option[Iterator[Chemical]] = {
logger.debug(s"Querying chemical database with the query $key.")
Option(mongo.getJavaIteratorOverChemicals(key))
}
/**
* A normal query against the sequences database.
*
* Reference: https://docs.mongodb.com/manual/reference/method/db.collection.find/
*
* @param mongo Connection to a MongoDB
* @param key The key to match documents against
* @param filter A filter of the returned components of the document
*
* @return An iterator over the returned documents
*/
def mongoQuerySequences(mongo: MongoDB)(key: BasicDBObject, filter: BasicDBObject, notimeout: Boolean = true): Iterator[DBObject] = {
logger.debug(s"Querying sequence database with the query $key. Filtering values to obtain $filter")
mongo.getDbIteratorOverSeq(key, filter).toIterator
}
/**
* Filters all documents that cause thingsToMatch to be true.
*
* Operation:
* Checks thingsToMatch against each document and collects documents that evaluate to true
*
* Reference: https://docs.mongodb.com/manual/reference/operator/aggregation/match/
*
* @param thingsToMatch Conditional to evaluate true/false against
*
* @return DBObject constructing this request.
*/
def defineMongoMatch(thingsToMatch: BasicDBObject): BasicDBObject = {
createDbObject(MongoKeywords.MATCH, thingsToMatch)
}
/**
* Takes a a list within the Mongo document and unwinds it. Unwinding a list creates the pattern shown below:
*
* A document containing a field "lists" that looks like this:
*
* Operation:
* "lists" : [{val1 : 1, val2: 2}, {val1: 3, val2: 3}] -> {lists.val1 : [1, 3], lists.val2 : [2, 4]}
*
* Reference: https://docs.mongodb.com/manual/reference/operator/aggregation/unwind/
*
* @param listName The name of the list
*
* @return A formatted query that will do the above operation
*/
def defineMongoUnwind(listName: Keyword): BasicDBObject = {
createDbObject(MongoKeywords.UNWIND, dollarString(listName))
}
/*
Mongo aggregation handling.
*/
private def dollarString(inputKeyword: Keyword): String = {
dollarString(inputKeyword.value)
}
/**
* Many Mongo queries require a dollar sign in front of the keyword. Example: $exists
*
* The dollar sign is also used during aggregation to reference intermediate documents. Example: $_id
*
* Thus, this function changes f("String") -> "$String"
*
* @param inputString The string to be converted into dollar format
*
* @return Modified string
*/
private def dollarString(inputString: String): String = {
// Escape one dollar and do the input as well
s"$$$inputString"
}
def defineMongoGroup(nameOfGroupingValue: Keyword, outputListName: Keyword): BasicDBObject = {
defineMongoGroup(nameOfGroupingValue.toString, outputListName.toString)
}
def defineMongoGroup(nameOfGroupingValue: String, outputListName: Keyword): BasicDBObject = {
defineMongoGroup(nameOfGroupingValue, outputListName.toString)
}
/**
* Groups documents together by some given value.
* Requires an ID field and then accumulates any other fields indicating in the accumulator.
* In our example, we create an array via PUSH and use that to name a new field outputListName.
*
* Operation:
* Converts previous document -> ID, List of values in previous field.
*
* References:
* $push -> https://docs.mongodb.com/manual/reference/operator/aggregation/push/
* $group -> https://docs.mongodb.com/manual/reference/operator/aggregation/group/
*
* @param nameOfGroupingValue The name of the field which should be pushed into an array
* @param outputListName The name of the list that refers to the array created around nameOfGroupingValue
*
* @return DBObject to perform the group query
*/
def defineMongoGroup(nameOfGroupingValue: String, outputListName: String): BasicDBObject = {
// Create an array for the expression
val pushing = createDbObject(MongoKeywords.PUSH, dollarString(nameOfGroupingValue))
// Name the output array
val groupMap = new BasicDBObject(outputListName, pushing)
// The new document always requires an ID, so we just use the prior ID.
appendKeyToDbObject(groupMap, MongoKeywords.ID, dollarString(MongoKeywords.ID))
// Finally, we group everything together
createDbObject(MongoKeywords.GROUP, groupMap)
}
def appendKeyToDbObject(currentObject: BasicDBObject, key: Keyword, value: Any): BasicDBObject = {
currentObject.append(key.value, value)
}
def defineMongoGroup(nameOfGroupingValue: Keyword, outputListName: String): BasicDBObject = {
defineMongoGroup(nameOfGroupingValue.toString, outputListName)
}
def getWithDefault(document: DBObject, key: Keyword, default: String): String = {
getWithDefault(document, key.toString, default)
}
def getWithDefault(document: DBObject, key: String, default: String): String = {
val documentVal: AnyRef = document.get(key)
if (documentVal == null) {
default
} else {
documentVal.toString
}
}
/**
* Aggregate and process documents over the reactions DB with a given pipeline
*
* Reference: https://docs.mongodb.com/manual/aggregation/
*
* @param mongo Connection to a MongoDB
* @param pipeline A list of objects to apply sequentially to process the data.
*
* @return An iterator over all the returned documents
*/
def mongoApplyPipelineReactions(mongo: MongoDB, pipeline: List[DBObject]): Iterator[DBObject] = {
mongo.applyPipelineOverReactions(pipeline)
}
/**
* Aggregate and process documents over the sequences DB with a given pipeline
*
* Reference: https://docs.mongodb.com/manual/aggregation/
*
* @param mongo Connection to a MongoDB
* @param pipeline A list of objects to apply sequentially to process the data.
*
* @return An iterator over all the returned documents
*/
def mongoApplyPipelineSequences(mongo: MongoDB, pipeline: List[DBObject]): Iterator[DBObject] = {
mongo.applyPipelineOverSequences(pipeline)
}
/*
Mongo object utility functions
*/
/**
* Takes in a List of DBObjects and converts it to a BasicDBList
*
* @param scalaList The initial list
*
* @return A BasicDBList representation of the scalaList
*/
def convertListToMongoDbList(scalaList: List[BasicDBObject]): BasicDBList = {
val copyList = new BasicDBList
copyList.addAll(scalaList)
copyList
}
/**
* Takes in an iterator over DBObjects and creates a set out of them.
*
* @param iterator DBObject iterator
*
* @return Set of DBObjects
*/
def mongoDbIteratorToSet(iterator: Iterator[DBObject]): Set[DBObject] = {
val buffer = mutable.Set[DBObject]()
for (value <- iterator) {
buffer add value
}
buffer.toSet
}
/**
* Overload of the Iterable version, but converts iterator to a stream for processing
*
* @param iterator Iterator DBObject
* @param fields List of fields in the document
*
* @return The map of map of documents.
* The first map is keyed by the ID of the document,
* while maps contained within are keyed by the fields of that document.
*/
def mongoReturnQueryToMap(iterator: Iterator[DBObject], fields: List[String]): Map[Long, Map[String, AnyRef]] = {
mongoReturnQueryToMap(iterator.toStream, fields)
}
/**
* Converts an iterable into a Map of Maps.
* The first Map is keyed on the document ID and the second map on the document fields.
*
* @param iterator Iterable of DBObjects
* @param fields List of fields in the document
*
* @return The map of map of documents.
* The first map is keyed by the ID of the document,
* while maps contained within are keyed by the fields of that document.
*/
def mongoReturnQueryToMap(iterator: Iterable[DBObject], fields: List[String]): Map[Long, Map[String, AnyRef]] = {
// For each field name, pull out the values of that document and add it to a list, and make a list of those.
val filteredFields = fields.filter(!_.equals(MongoKeywords.ID.toString))
// Map each field as the key and the information in the document to what it goes to.
def defineFields(document: DBObject): Map[String, AnyRef] = {
filteredFields map (field => field -> document.get(field)) toMap
}
// Each document mapped by the ID mapped to a map of fields
val mapOfMaps = iterator map (document =>
document.get(MongoKeywords.ID.toString).asInstanceOf[Int].toLong -> defineFields(document)) toMap
// Exit if all values are empty, so error check here as we convert to a map.
mapOfMaps.size match {
case n if n <= 0 =>
throw new Exception(s"No values found matching any of the key supplied.")
case default =>
logger.info(s"Successfully found $default documents matching your query.")
}
mapOfMaps
}
}
|
20n/act
|
reachables/src/main/scala/com/act/workflow/tool_manager/workflow/workflow_mixins/mongo/MongoWorkflowUtilities.scala
|
Scala
|
gpl-3.0
| 17,394 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.core.mysqls
import java.util.UUID
import org.apache.s2graph.core.utils.logger
import play.api.libs.json.Json
import scalikejdbc._
object Service extends Model[Service] {
def apply(rs: WrappedResultSet): Service = {
Service(rs.intOpt("id"), rs.string("service_name"), rs.string("access_token"),
rs.string("cluster"), rs.string("hbase_table_name"), rs.int("pre_split_size"), rs.intOpt("hbase_table_ttl"))
}
def findByAccessToken(accessToken: String)(implicit session: DBSession = AutoSession): Option[Service] = {
val cacheKey = s"accessToken=$accessToken"
withCache(cacheKey)( sql"""select * from services where access_token = ${accessToken}""".map { rs => Service(rs) }.single.apply)
}
def findById(id: Int)(implicit session: DBSession = AutoSession): Service = {
val cacheKey = "id=" + id
withCache(cacheKey)( sql"""select * from services where id = ${id}""".map { rs => Service(rs) }.single.apply).get
}
def findByName(serviceName: String, useCache: Boolean = true)(implicit session: DBSession = AutoSession): Option[Service] = {
val cacheKey = "serviceName=" + serviceName
lazy val serviceOpt = sql"""
select * from services where service_name = ${serviceName}
""".map { rs => Service(rs) }.single.apply()
if (useCache) withCache(cacheKey)(serviceOpt)
else serviceOpt
}
def insert(serviceName: String, cluster: String,
hTableName: String, preSplitSize: Int, hTableTTL: Option[Int],
compressionAlgorithm: String)(implicit session: DBSession = AutoSession): Unit = {
logger.info(s"$serviceName, $cluster, $hTableName, $preSplitSize, $hTableTTL, $compressionAlgorithm")
val accessToken = UUID.randomUUID().toString()
sql"""insert into services(service_name, access_token, cluster, hbase_table_name, pre_split_size, hbase_table_ttl)
values(${serviceName}, ${accessToken}, ${cluster}, ${hTableName}, ${preSplitSize}, ${hTableTTL})""".execute.apply()
}
def delete(id: Int)(implicit session: DBSession = AutoSession) = {
val service = findById(id)
val serviceName = service.serviceName
sql"""delete from service_columns where id = ${id}""".execute.apply()
val cacheKeys = List(s"id=$id", s"serviceName=$serviceName")
cacheKeys.foreach { key =>
expireCache(key)
expireCaches(key)
}
}
def findOrInsert(serviceName: String, cluster: String, hTableName: String,
preSplitSize: Int, hTableTTL: Option[Int], compressionAlgorithm: String)(implicit session: DBSession = AutoSession): Service = {
findByName(serviceName) match {
case Some(s) => s
case None =>
insert(serviceName, cluster, hTableName, preSplitSize, hTableTTL, compressionAlgorithm)
val cacheKey = "serviceName=" + serviceName
expireCache(cacheKey)
findByName(serviceName).get
}
}
def findAll()(implicit session: DBSession = AutoSession) = {
val ls = sql"""select * from services""".map { rs => Service(rs) }.list.apply
putsToCache(ls.map { x =>
val cacheKey = s"id=${x.id.get}"
(cacheKey -> x)
})
putsToCache(ls.map { x =>
val cacheKey = s"serviceName=${x.serviceName}"
(cacheKey -> x)
})
}
def findAllConn()(implicit session: DBSession = AutoSession): List[String] = {
sql"""select distinct(cluster) from services""".map { rs => rs.string("cluster") }.list.apply
}
}
case class Service(id: Option[Int], serviceName: String, accessToken: String, cluster: String, hTableName: String, preSplitSize: Int, hTableTTL: Option[Int]) {
lazy val toJson =
id match {
case Some(_id) =>
Json.obj("id" -> _id, "name" -> serviceName, "accessToken" -> accessToken, "cluster" -> cluster,
"hTableName" -> hTableName, "preSplitSize" -> preSplitSize, "hTableTTL" -> hTableTTL)
case None =>
Json.parse("{}")
}
}
|
jongwook/incubator-s2graph
|
s2core/src/main/scala/org/apache/s2graph/core/mysqls/Service.scala
|
Scala
|
apache-2.0
| 4,733 |
/*
* Copyright (C) 2013 The Mango Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* The code of this project is a port of (or wrapper around) the Guava-libraries.
* See http://code.google.com/p/guava-libraries/
*
* @author Markus Schneider
*/
package org.feijoas.mango.common.cache
import java.util.concurrent.TimeUnit
import scala.collection.Traversable
import scala.concurrent.Future
import org.feijoas.mango.common.cache.CacheLoader._
import org.scalatest._
import com.google.common.{ cache => cgcc }
import com.google.common.cache.{ CacheLoader => GuavaCacheLoader }
import com.google.common.collect.{ ImmutableMap, Lists }
import org.junit.Assert._
import com.google.common.util.concurrent.ListenableFuture
import org.feijoas.mango.common.util.concurrent.Futures._
/**
* Tests for [[CacheLoaderWrapper]]
*
* @author Markus Schneider
* @since 0.7
*/
class CacheLoaderWrapperTest extends FlatSpec with Matchers {
behavior of "CacheLoaderWrapper"
def fixture = new {
val cacheLoader = new CountingCacheLoader
val wrapper: GuavaCacheLoader[Int, Int] = cacheLoader.asJava
}
it should "forward calls to load" in {
val f = fixture
import f._
wrapper.load(5) should be(25)
cacheLoader should be(CountingCacheLoader(1, 0, 0))
}
it should "forward calls to reload" in {
val f = fixture
import f._
wrapper.reload(5, 25).get(100, TimeUnit.MILLISECONDS) should be(25)
cacheLoader should be(CountingCacheLoader(0, 1, 0))
}
it should "forward calls to loadAll" in {
val f = fixture
import f._
wrapper.loadAll(Lists.newArrayList(1, 2, 3)) should be(ImmutableMap.of(1, 1, 2, 4, 3, 9))
cacheLoader should be(CountingCacheLoader(0, 0, 1))
}
it should "forward failed futures" in {
val one = new Object
val e = new Exception
val loader = new CacheLoader[AnyRef, AnyRef]() {
override def load(any: AnyRef) = one
override def reload(key: AnyRef, oldValue: AnyRef) = Future.failed[AnyRef](e)
}
val wrapper: GuavaCacheLoader[AnyRef, AnyRef] = asGuavaCacheLoaderConverter(loader).asJava
val cache: cgcc.LoadingCache[AnyRef, AnyRef] = cgcc.CacheBuilder.newBuilder().recordStats().build(wrapper);
var stats: cgcc.CacheStats = cache.stats()
assertEquals(0, stats.missCount());
assertEquals(0, stats.loadSuccessCount());
assertEquals(0, stats.loadExceptionCount());
assertEquals(0, stats.hitCount());
val key = new Object
assertSame(one, cache.getUnchecked(key))
stats = cache.stats();
assertEquals(1, stats.missCount());
assertEquals(1, stats.loadSuccessCount());
assertEquals(0, stats.loadExceptionCount());
assertEquals(0, stats.hitCount());
cache.refresh(key);
stats = cache.stats();
assertEquals(1, stats.missCount());
assertEquals(1, stats.loadSuccessCount());
assertEquals(1, stats.loadExceptionCount());
assertEquals(0, stats.hitCount());
assertSame(one, cache.getUnchecked(key));
stats = cache.stats();
assertEquals(1, stats.missCount());
assertEquals(1, stats.loadSuccessCount());
assertEquals(1, stats.loadExceptionCount());
assertEquals(1, stats.hitCount());
}
it should "wrap futures" in {
val one = new Object
val e = new Exception
val loader = new cgcc.CacheLoader[AnyRef, AnyRef]() {
override def load(any: AnyRef) = one
override def reload(key: AnyRef, oldValue: AnyRef) = Future.failed(e).asJava
}
val cache: cgcc.LoadingCache[AnyRef, AnyRef] = cgcc.CacheBuilder.newBuilder().recordStats().build(loader);
var stats: cgcc.CacheStats = cache.stats()
assertEquals(0, stats.missCount());
assertEquals(0, stats.loadSuccessCount());
assertEquals(0, stats.loadExceptionCount());
assertEquals(0, stats.hitCount());
val key = new Object
assertSame(one, cache.getUnchecked(key))
stats = cache.stats();
assertEquals(1, stats.missCount());
assertEquals(1, stats.loadSuccessCount());
assertEquals(0, stats.loadExceptionCount());
assertEquals(0, stats.hitCount());
cache.refresh(key);
stats = cache.stats();
assertEquals(1, stats.missCount());
assertEquals(1, stats.loadSuccessCount());
assertEquals(1, stats.loadExceptionCount());
assertEquals(0, stats.hitCount());
assertSame(one, cache.getUnchecked(key));
stats = cache.stats();
assertEquals(1, stats.missCount());
assertEquals(1, stats.loadSuccessCount());
assertEquals(1, stats.loadExceptionCount());
assertEquals(1, stats.hitCount());
}
}
/**
* We need this helper until there is mocking support for Scala
*/
private[mango] case class CountingCacheLoader(var loadCnt: Int = 0, var reloadCnt: Int = 0, var loadAllCnt: Int = 0)
extends CacheLoader[Int, Int] {
import scala.concurrent.ExecutionContext.Implicits.global
override def load(key: Int) = synchronized {
loadCnt = loadCnt + 1
key * key
}
override def reload(key: Int, oldValue: Int) = {
reloadCnt = reloadCnt + 1
Future { oldValue }
}
override def loadAll(keys: Traversable[Int]) = {
loadAllCnt = loadAllCnt + 1
keys.map { (key: Int) => (key, key * key) }.toMap
}
}
|
feijoas/mango
|
src/test/scala/org/feijoas/mango/common/cache/CacheLoaderWrapperTest.scala
|
Scala
|
apache-2.0
| 5,710 |
package ru.pavkin.todoist.api.suite
import ru.pavkin.todoist.api.core.tags
trait CommandAPISuite extends tags.Syntax {
type CommandResult
type TempIdCommandResult
}
|
vpavkin/scalist
|
core/src/main/scala/ru/pavkin/todoist/api/suite/CommandAPISuite.scala
|
Scala
|
mit
| 171 |
package play.api.data.validation
/**
* A validation error.
*
* @param message the error message
* @param args the error message arguments
*/
case class ValidationError(message: String, args: Any*)
|
michaelahlers/team-awesome-wedding
|
vendor/play-2.2.1/framework/src/play-datacommons/src/main/scala/play/api/data/validation/ValidationError.scala
|
Scala
|
mit
| 204 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.filters.https
import javax.inject.{ Inject, Provider, Singleton }
import play.api.http.HeaderNames._
import play.api.http.Status._
import play.api.inject.{ SimpleModule, bind }
import play.api.mvc._
import play.api.{ Configuration, Environment, Mode }
import play.api.Logger
/**
* A filter that redirects HTTP requests to https requests.
*
* To enable this filter, please add it to to your application.conf file using
* "play.filters.enabled+=play.filters.https.RedirectHttpsFilter"
*
* For documentation on configuring this filter, please see the Play documentation at
* https://www.playframework.com/documentation/latest/RedirectHttpsFilter
*/
@Singleton
class RedirectHttpsFilter @Inject() (config: RedirectHttpsConfiguration) extends EssentialFilter {
import RedirectHttpsKeys._
import config._
private val logger = Logger(getClass)
private[this] lazy val stsHeaders = {
if (!redirectEnabled) Seq.empty
else strictTransportSecurity.toSeq.map(STRICT_TRANSPORT_SECURITY -> _)
}
override def apply(next: EssentialAction): EssentialAction = EssentialAction { req =>
import play.api.libs.streams.Accumulator
import play.core.Execution.Implicits.trampoline
if (req.secure) {
next(req).map(_.withHeaders(stsHeaders: _*))
} else if (redirectEnabled) {
Accumulator.done(Results.Redirect(createHttpsRedirectUrl(req), redirectStatusCode))
} else {
logger.info(s"Not redirecting to HTTPS because $redirectEnabledPath flag is not set.")
next(req)
}
}
protected def createHttpsRedirectUrl(req: RequestHeader): String = {
import req.{ domain, uri }
sslPort match {
case None | Some(443) =>
s"https://$domain$uri"
case Some(port) =>
s"https://$domain:$port$uri"
}
}
}
case class RedirectHttpsConfiguration(
strictTransportSecurity: Option[String] = Some("max-age=31536000; includeSubDomains"),
redirectStatusCode: Int = PERMANENT_REDIRECT,
sslPort: Option[Int] = None, // should match up to ServerConfig.sslPort
redirectEnabled: Boolean = true
) {
@deprecated("Use redirectEnabled && strictTransportSecurity.isDefined", "2.7.0")
def hstsEnabled: Boolean = redirectEnabled && strictTransportSecurity.isDefined
}
private object RedirectHttpsKeys {
val stsPath = "play.filters.https.strictTransportSecurity"
val statusCodePath = "play.filters.https.redirectStatusCode"
val portPath = "play.filters.https.port"
val redirectEnabledPath = "play.filters.https.redirectEnabled"
}
@Singleton
class RedirectHttpsConfigurationProvider @Inject() (c: Configuration, e: Environment)
extends Provider[RedirectHttpsConfiguration] {
import RedirectHttpsKeys._
private val logger = Logger(getClass)
lazy val get: RedirectHttpsConfiguration = {
val strictTransportSecurity = c.get[Option[String]](stsPath)
val redirectStatusCode = c.get[Int](statusCodePath)
if (!isRedirect(redirectStatusCode)) {
throw c.reportError(statusCodePath, s"Status Code $redirectStatusCode is not a Redirect status code!")
}
val port = c.get[Option[Int]](portPath)
val redirectEnabled = c.get[Option[Boolean]](redirectEnabledPath).getOrElse {
if (e.mode != Mode.Prod) {
logger.info(
s"RedirectHttpsFilter is disabled by default except in Prod mode.\\n" +
s"See https://www.playframework.com/documentation/2.6.x/RedirectHttpsFilter"
)
}
e.mode == Mode.Prod
}
RedirectHttpsConfiguration(strictTransportSecurity, redirectStatusCode, port, redirectEnabled)
}
}
class RedirectHttpsModule extends SimpleModule(
bind[RedirectHttpsConfiguration].toProvider[RedirectHttpsConfigurationProvider],
bind[RedirectHttpsFilter].toSelf
)
/**
* The Redirect to HTTPS filter components for compile time dependency injection.
*/
trait RedirectHttpsComponents {
def configuration: Configuration
def environment: Environment
lazy val redirectHttpsConfiguration: RedirectHttpsConfiguration =
new RedirectHttpsConfigurationProvider(configuration, environment).get
lazy val redirectHttpsFilter: RedirectHttpsFilter =
new RedirectHttpsFilter(redirectHttpsConfiguration)
}
|
Shruti9520/playframework
|
framework/src/play-filters-helpers/src/main/scala/play/filters/https/RedirectHttpsFilter.scala
|
Scala
|
apache-2.0
| 4,276 |
/*
* Copyright 2010 Michael Fortin <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.brzy.webapp
import org.springframework.mock.web.{MockServletConfig, MockServletContext, MockHttpServletResponse, MockHttpServletRequest}
import org.brzy.webapp.application.{WebAppConfig, WebApp}
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.WordSpec
class BrzyServletSpec extends WordSpec with ShouldMatchers {
"Brzy Servlet" should {
"find action by path" in {
val webapp = WebApp(WebAppConfig.runtime(env="test",defaultConfig="/brzy-webapp.test.b.yml"))
assert(webapp != null)
assert(2 == webapp.controllers.size)
assert(20 == webapp.actions.size)
val context = new MockServletContext
context.setAttribute("application",webapp)
val request = new MockHttpServletRequest(context, "GET", "//users.brzy")
val response = new MockHttpServletResponse()
val servlet = new BrzyServlet()
servlet.init(new MockServletConfig(context))
servlet.service(request,response)
assert(200 == response.getStatus)
}
"find path with params" in {
val webapp = WebApp(WebAppConfig.runtime(env="test",defaultConfig="/brzy-webapp.test.b.yml"))
assert(webapp != null)
assert(2 == webapp.controllers.size)
assert(20 == webapp.actions.size)
val context = new MockServletContext
context.setAttribute("application",webapp)
val request = new MockHttpServletRequest(context, "GET", "//users/10.brzy")
val response = new MockHttpServletResponse()
val servlet = new BrzyServlet()
servlet.init(new MockServletConfig(context))
servlet.service(request,response)
assert(200 == response.getStatus)
}
}
}
|
m410/brzy
|
src/test/scala/org/brzy/webapp/BrzyServletSpec.scala
|
Scala
|
apache-2.0
| 2,276 |
package sangria.macros
import sangria.ast.Document
import sangria.parser.{SyntaxError, QueryParser}
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
object literal {
implicit class LiteralGraphQLStringContext(val sc: StringContext) extends AnyVal {
def graphql(): Document = macro Macro.impl
}
class Macro(context: blackbox.Context) extends {
val c = context
} with MacroAstLiftable {
import c.universe._
def impl() = {
c.prefix.tree match {
// Expects a string interpolation that doesn't contain any
// expressions, thus containing only a single tree
case Apply(_, List(Apply(_, t :: Nil))) =>
val q"${gql: String}" = t
try {
q"${QueryParser.parse(gql.stripMargin).get}"
} catch {
case syntaxError: SyntaxError =>
c.abort(c.enclosingPosition, syntaxError.getMessage)
}
case _ =>
c.abort(c.enclosingPosition, "Invalid `graphql` invocation syntax.")
}
}
}
}
|
narahari92/sangria
|
src/main/scala/sangria/macros/literal.scala
|
Scala
|
apache-2.0
| 1,058 |
package ucesoft.cbm.peripheral.rs232
import ucesoft.cbm.peripheral.cia.CIA
import RS232._
import ucesoft.cbm.{Clock, ClockEvent, Log}
abstract class AbstractRS232 extends RS232 with ModemCommandListener {
protected var cia1,cia2 : CIA = _
private[this] var txd,others = 0
private[this] var stop,parity,bits,length = 0
protected var dsr = DSR
protected var cts,dcd,ri = 0
protected var rts,dtr = false
private[this] var rxd = RXD
private[this] var outbuffer = 0
private[this] var bitreceived,bitsent,tmpParity = 0
protected var byteToSend = -1
private[this] var totalByteSent,totalByteReceived = 0
private[this] var configurationString = ""
private[this] var enabled = false
private[this] var statusListener : RS232StatusListener = _
private[this] var baudCycles = 0
private[this] val clk = Clock.systemClock
private[this] var sendState = 0
protected val modem = new Modem(this)
def hangUp : Unit = {}
def commandMode(on:Boolean) = {}
def connectTo(address:String) = {
Log.info(s"RS232 - Connecting to $address")
setConfiguration(address + "," + configurationString)
setEnabled(true)
connect(address)
}
def ring(ringing:Boolean): Unit = {
if (ringing) ri = RI else ri = 0
println("Ringing..")
}
def isEnabled = enabled
def setEnabled(enabled:Boolean) = {
this.enabled = enabled
if (enabled) {
clk.schedule(new ClockEvent("RS232-readCycle",clk.currentCycles + baudCycles,_ => readCycle))
dcd = RS232.DCD
}
else {
clk.cancel("RS232-readCycle")
dcd = 0
disconnect
}
}
def setRS232Listener(l:RS232StatusListener) = statusListener = l
def init : Unit = {}
def reset : Unit = {
disconnect
bitreceived = 0
outbuffer = 0
byteToSend = -1
dcd = 0
sendState = 0
modem.reset
}
override def getProperties = {
properties.setProperty("Total bytes received",totalByteReceived.toString)
properties.setProperty("Total bytes sent",totalByteSent.toString)
properties
}
def setCIA12(cia1:CIA,cia2:CIA) : Unit = {
this.cia2 = cia2
this.cia1 = cia1
}
def setTXD(high:Int) : Unit = {
txd = high
//println(s"TXD: $txd rts=$rts n=$bitreceived buffer=$outbuffer bits=$bits length=$length")
if ((!flowControlEnabled || rts) && bitreceived == 0) {
if (high == 0) { // consumes start bit
//println("TXD: consumed start bit")
bitreceived = 1
if (statusListener != null) statusListener.update(TXD, 1)
}
// else ignore stop bit
}
else
if ((!flowControlEnabled || rts) && bitreceived > 0) {
if (bitreceived < bits + 1) { // ignore parity & stops
outbuffer |= high << (bitreceived - 1)
bitreceived += 1
}
else
if (bitreceived == length - 1) {
bitreceived = 0
totalByteSent += 1
sendOutByte(outbuffer)
//println("OUTBUFFER => " + outbuffer)
if (statusListener != null) statusListener.update(TXD,0)
outbuffer = 0
}
else bitreceived += 1
}
}
protected def sendOutByte(byte:Int) : Unit = {
try {
if (modem.outputStream != null) {
modem.outputStream.write(byte)
//print(byte.toChar)
modem.outputStream.flush
}
}
catch {
case t:Throwable =>
Log.info(s"I/O error while writing from rs-232 ($componentID): " + t)
t.printStackTrace
disconnect
}
}
def getTXD : Int = txd
protected def checkCTS : Unit = {
// auto set cs
cts = if (!flowControlEnabled || rts) CTS else 0
}
def setOthers(value:Int) : Unit = {
others = value
rts = (others & RTS) > 0
dtr = (others & DTR) > 0
checkCTS
//println(s"RTS=$rts DTR=$dtr DCD=$dcd CTS=$cts")
if (statusListener != null) {
statusListener.update(RTS,others & RTS)
statusListener.update(DTR,others & DTR)
}
}
def getOthers : Int = {
rxd | others & RTS | others & DTR | ri | dcd | cts | dsr
}
def getConfiguration = configurationString
/**
* Syntax: <baud>,<bits>,<parity>,<stops>
*
* parity can be:
* n no parity
* e even parity
* o odd parity
* m mark parity
* s space parity
*/
def setConfiguration(conf:String) : Unit = {
configurationString = conf
val parts = conf.split(",")
if (parts.length != 4) throw new IllegalArgumentException("Bad configuration string")
//println(s"BAUD => ${parts(0)} $baudCycles")
bits = parts(1).toInt
stop = parts(3).toInt
modem.setBaud(parts(0).toInt)
baudCycles = math.round(clk.getClockHz / parts(0).toDouble).toInt
if (stop != 0 && stop != 1 && stop != 2) throw new IllegalArgumentException("Stop bits must be 0 or 1 or 2")
parity = parts(2).toUpperCase match {
case "N" => NO_PARITY
case "E" => EVEN_PARITY
case "O" => ODD_PARITY
case "M" => MARK_PARITY
case "S" => SPACE_PARITY
case _ => NO_PARITY
}
length = 1 + bits + stop + (if (parity != NO_PARITY) 1 else 0)
//println(s"RS-232 configured with bits=$bits stop=$stop parity=$parity")
}
protected def connect(address:String) : Unit = {
if (statusListener != null) {
statusListener.setRS232Enabled(true)
statusListener.connectedTo(address)
}
}
protected def disconnect : Unit = {
if (statusListener != null) {
statusListener.setRS232Enabled(false)
statusListener.disconnected
}
modem.commandModeMessage(HayesResultCode.NO_CARRIER)
}
protected def isByteAvailable : Boolean = modem.inputStream.available() > 0
protected def getByte : Int = modem.inputStream.read()
protected def canSend = !flowControlEnabled || (rts && dtr)
protected def sendRXD(rxdHigh:Boolean) : Unit = {}
protected def readCycle: Unit = {
try {
if (isByteAvailable && canSend) {
sendInByte(getByte)
}
else clk.schedule(new ClockEvent("RS232-readCycle",clk.currentCycles + baudCycles,_ => readCycle))
}
catch {
case e:Exception =>
Log.info(s"RS232 - Error while reading from stream: $e")
e.printStackTrace()
disconnect
clk.schedule(new ClockEvent("RS232-readCycle",clk.currentCycles + baudCycles,_ => readCycle))
}
}
protected def sendInByte(byte:Int) : Unit = {
totalByteReceived += 1
byteToSend = byte
sendState = 0
// send start bit
rxd = 0
cia2.setFlagLow
//sendRXD(false)
bitsent = 1
tmpParity = 0
//println(s"Sent start bit ($byteToSend) ${byteToSend.toChar}")
clk.schedule(new ClockEvent("RS232-in",clk.currentCycles + baudCycles,_ => sendBit))
if (statusListener != null) statusListener.update(RXD,1)
}
protected def stopin = stop
protected def sendBit: Unit = {
val scheduleNextBit = sendState match {
case 0 => // DATA BITS
if ((byteToSend & 1) > 0) rxd = RXD else rxd = 0
//println(s"Sent ($bitsent/$byteToSend)" + (if (rxd > 0) "1" else "0"))
byteToSend >>= 1
if (rxd > 0) tmpParity ^= 1
bitsent += 1
if (bitsent == bits + 1) { // DATA BITS FINISHED
sendState += 1
if (parity == NO_PARITY) {
if (stopin == 0) sendState = 4
else sendState += 1
} // skip parity
}
if (rxd == 0) cia2.setFlagLow
sendRXD(rxd > 0)
true
case 1 => // PARITY
rxd = parity match {
case ODD_PARITY => if (tmpParity == 0) RXD else 0
case EVEN_PARITY => if (tmpParity == 1) RXD else 0
case MARK_PARITY => RXD
case SPACE_PARITY => 0
}
if (stopin == 0) sendState = 4
else sendState += 1
if (rxd == 0) cia2.setFlagLow
sendRXD(rxd > 0)
true
case 2 => // STOP #1
rxd = RXD
sendState += 1
if (stop == 1) sendState += 1
sendRXD(rxd > 0)
//println("Sent stop #1")
true
case 3 => // STOP #2
rxd = RXD
sendState += 1
sendRXD(rxd > 0)
//println("Sent stop #2")
true
case 4 => // END
if (statusListener != null) statusListener.update(RXD,0)
false
}
if (scheduleNextBit) clk.schedule(new ClockEvent("RS232-in",clk.currentCycles + baudCycles,_ => sendBit))
else clk.schedule(new ClockEvent("RS232-readCycle",clk.currentCycles + baudCycles,_ => readCycle))
}
def connectionInfo = getConfiguration
}
|
abbruzze/kernal64
|
Kernal64/src/ucesoft/cbm/peripheral/rs232/AbstractRS232.scala
|
Scala
|
mit
| 8,497 |
package org.raisercostin.jedi
import java.io.InputStream
import java.time.Clock
import java.time.LocalDate
abstract case class CacheEntry(cache: NavigableFileInOutLocation) {
def cacheIt: Unit
}
trait CacheConfig {
def cacheFor(src: InputLocation): CacheEntry
}
object DefaultCacheConfig extends TimeSensitiveEtagCachedEntry(Locations.temp.child("default-cache"))
case class EtagCacheConfig(cacheFolder: NavigableFileInOutLocation) extends CacheConfig {
def cacheFor(origin: InputLocation): CacheEntry = new CacheEntry(cacheFolder.mkdirIfNecessary.child(origin.slug).withBaseName(x => x + "--etag-" + origin.etag)) {
def cacheIt: Unit = {
//since the name is computed based on etag is enough to check the existence of file
//TODO maybe we should start to delete equivalent files that are older
if (!cache.exists)
cache.copyFrom(origin)
}
}
}
case class TimeSensitiveCachedEntry(cacheFolder: NavigableFileInOutLocation) extends CacheConfig {
def cacheFor(origin: InputLocation): CacheEntry = new CacheEntry(cacheFolder.mkdirIfNecessary.child(origin.slug).withBaseName(x => x + "--date-" + LocalDate.now())) {
def cacheIt: Unit = {
if (!cache.exists)
cache.copyFrom(origin)
}
}
}
/**Use etag if UrlLocation returns one non empty otherwise use date.*/
case class TimeSensitiveEtagCachedEntry(cacheFolder: NavigableFileInOutLocation) extends CacheConfig {
def cacheFor(origin: InputLocation): CacheEntry = new CacheEntry(cacheFolder.mkdirIfNecessary.child(origin.slug).withBaseName { x =>
x +
(if (origin.etag.isEmpty())
"--date-" + LocalDate.now()
else
"--etag-" + origin.etag)
}) {
def cacheIt: Unit = {
if (!cache.exists)
cache.copyFrom(origin)
}
}
}
//TODO CachedLocation when printed should show the temporary file
case class CachedLocation[O <: InputLocation](cacheConfig: CacheConfig, origin: O) extends FileLocation { self =>
private lazy val cacheEntry: CacheEntry = cacheConfig.cacheFor(origin)
def cache = cacheEntry.cache
//def cache: InOutLocation = cacheConfig.cacheFor(origin)
override def build(path: String): self.type = origin match {
case n: NavigableLocation =>
CachedLocation(cacheConfig, n.build(path))
case _ =>
//TODO bug since origin is not used?
FileLocation(path)
}
override def childName(child: String): String = toPath.resolve(checkedChild(child)).toFile.getAbsolutePath
//override def withAppend: Repr = self.copy(append = true)
override def unsafeToInputStream: InputStream = {
flush
super.unsafeToInputStream
}
/**Force caching.*/
//TODO as async
def flush: this.type = {
cacheEntry.cacheIt
this
}
override def append: Boolean = cache.append
def fileFullPath: String = cache.nameAndBefore
def withAppend = ??? //this.copy(cache = cache.withAppend)
}
|
raisercostin/jedi-io
|
src/main/scala/org/raisercostin/jedi/CachedLocation.scala
|
Scala
|
apache-2.0
| 2,887 |
package io.github.mandar2812.dynaml.graph.utils
import com.tinkerpop.frames.{EdgeFrame, InVertex, OutVertex, Property}
/**
* Defines the [[EdgeFrame]] for the
* edges going out from the input data
* node.
*/
trait CausalEdge extends EdgeFrame {
@OutVertex
def getPoint(): Point
@InVertex
def getLabel(): Label
@Property("relation")
def getRelation(): String
@Property("relation")
def setRelation(value: String): Unit
}
|
transcendent-ai-labs/DynaML
|
dynaml-core/src/main/scala/io/github/mandar2812/dynaml/graph/utils/CausalEdge.scala
|
Scala
|
apache-2.0
| 444 |
// Project: sdocx
// Module:
// Description:
// Copyright (c) 2015 Johannes Kastner <[email protected]>
// Distributed under the MIT license.
package biz.enef.sdocx.opc
import java.io.Writer
case class Relationship(id: String, tpe: String, target: String) extends XMLSerializable {
final def write(w: Writer): Unit =
w.write(s"""<Relationship Id="$id" Type="$tpe" Target="$target"/>""")
}
case class RelationshipsProducer(name: String, relationships: Iterable[Relationship]) extends XmlPartProducer
with XMLSerializable {
def root = this
final def write(w: Writer): Unit = {
w.write("""<Relationships xmlns="http://schemas.openxmlformats.org/package/2006/relationships">""")
relationships.foreach(_.write(w))
w.write("</Relationships>")
}
}
|
jokade/sdocx
|
src/main/scala/biz/enef/sdocx/opc/relationships.scala
|
Scala
|
mit
| 803 |
package scala
package collection.immutable
import org.scalacheck._
class RangeProperties extends Properties("immutable.Range") {
import Prop._
import RangeProperties._
property("indexOf") = forAll { (r: Range, i: Int) =>
r.indexOf(i) == r.toVector.indexOf(i)
}
property("indexOf with start") = forAll { (r: Range, start: Int, i: Int) =>
r.indexOf(i, start) == r.toVector.indexOf(i, start)
}
property("lastIndexOf") = forAll { (r: Range, i: Int) =>
r.lastIndexOf(i) == r.toVector.lastIndexOf(i)
}
property("lastIndexOf with end") = forAll { (r: Range, end: Int, i: Int) =>
r.lastIndexOf(i, end) == r.toVector.lastIndexOf(i, end)
}
property("sorted") = forAll { (r: Range) =>
r.sorted.toVector == r.toVector.sorted
}
property("sorted backwards") = forAll { (r: Range) =>
r.sorted(Ordering.Int.reverse).toVector == r.toVector.sorted(Ordering.Int.reverse)
}
property("sameElements") = forAll { (r: Range, s: Range) =>
r.sameElements(s) == r.toVector.sameElements(s.toVector)
}
property("sameElements reflexive") = forAll { (r: Range) =>
r sameElements r
}
property("sameElements neg init") = forAll { (r: Range) =>
!r.isEmpty ==> !r.sameElements(r.init)
}
property("sameElements neg tail") = forAll { (r: Range) =>
!r.isEmpty ==> !r.sameElements(r.tail)
}
property("sameElements neg empty/non-empty") = forAll { (r1: Range, r2: Range) =>
(r1.isEmpty != r2.isEmpty) ==> !r1.sameElements(r2)
}
property("sameElements with different clusivity") = forAll { (r: Range) =>
!r.isEmpty ==> {
val oneFurther = if (r.step > 0) 1 else -1
if (r.isInclusive) r.sameElements(r.start until (r.end + oneFurther) by r.step)
else r.sameElements(r.start to (r.end - oneFurther) by r.step)
}
}
}
object RangeProperties {
final val MinInt = -128
final val MaxInt = 127
implicit val arbitraryRange: Arbitrary[Range] = Arbitrary(for {
start <- Gen.choose(MinInt, MaxInt)
step <- Gen.choose(MinInt, MaxInt) filter (_ != 0)
end <- Gen.choose(MinInt, MaxInt)
incl <- Gen.oneOf(true, false)
r = (if (incl) start to end else start until end) by step
} yield r)
}
|
lrytz/scala
|
test/scalacheck/scala/collection/immutable/RangeProperties.scala
|
Scala
|
apache-2.0
| 2,208 |
/**
* Copyright 2009 Jorge Ortiz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
package com.github.nscala_time.time
import org.joda.time._
import com.github.nscala_time.PimpedType
class RichReadablePeriod(val underlying: ReadablePeriod) extends Super with PimpedType[ReadablePeriod] {
def periodType: PeriodType = underlying.getPeriodType
}
|
tkawachi/nscala-time
|
src/main/scala/com/github/nscala_time/time/RichReadablePeriod.scala
|
Scala
|
apache-2.0
| 865 |
/*
* SufficientStatisticsSemiring.scala
* Sum and product operations defined for sufficient statistics according to a semiring algebraic structure.
*
* Created By: Michael Howard ([email protected])
* Creation Date: Jun 6, 2013
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email [email protected] for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.algorithm.factored
import com.cra.figaro.language._
import scala.collection._
import scala.collection.mutable.{ Set, Map }
/**
* Sum and product operations defined for sufficient statistics.
* Statistics consist of a probability and counts of the number of times various values have been seen.
*
* @param parameterMap Map of parameters to their sufficient statistics. Expectation
* Maximization determines the parameterMap automatically from the parameters.
*/
class SufficientStatisticsSemiring(parameterMap: immutable.Map[Parameter[_], Seq[Double]])
extends Semiring[(Double, mutable.Map[Parameter[_], Seq[Double]])] {
/**
* 0 probability and a vector of zeros for all parameters. The vector for a parameter
* must be of length equal to number of possible observations of the parameter
*/
val zero = (0.0, mutable.Map(parameterMap.toSeq: _*))
/**
* 1 probability and a vector of zeros for all parameters. The vector for a parameter
* must be of length equal to number of possible observations of the parameter
*/
val one = (1.0, mutable.Map(parameterMap.toSeq: _*))
/**
* Probabilities are multiplied using standard multiplication.
* Sufficient statistics for each parameter are summed together.
*/
def product(xVector: (Double, Map[Parameter[_], Seq[Double]]), yVector: (Double, Map[Parameter[_], Seq[Double]])): (Double, Map[Parameter[_], Seq[Double]]) = {
(simpleProduct(xVector._1, yVector._1), mapProduct(xVector, yVector))
}
private def componentProduct(xVector: Seq[Double], yVector: Seq[Double]): Seq[Double] = {
require(xVector.size == yVector.size)
(for ((x, y) <- xVector zip yVector) yield x * y)
}
/**
* Probabilities are added using standard addition.
* Sufficient statistics for each parameter are weighted by their respective probabilities and summed together,
* then divided by the sum of both probabilities.
*/
def sum(xVector: (Double, Map[Parameter[_], Seq[Double]]), yVector: (Double, Map[Parameter[_], Seq[Double]])): (Double, Map[Parameter[_], Seq[Double]]) = {
(simpleSum(xVector._1, yVector._1), mapSum(xVector, yVector))
}
private def mapSum(xVector: (Double, Map[Parameter[_], Seq[Double]]), yVector: (Double, Map[Parameter[_], Seq[Double]])): Map[Parameter[_], Seq[Double]] = {
require(xVector._2.size == yVector._2.size)
val result: Map[Parameter[_], Seq[Double]] = Map()
for (x <- xVector._2.keys) {
result += x -> weightedComponentSum(xVector._2(x), yVector._2(x), xVector._1, yVector._1)
}
result
}
private def mapProduct(xVector: (Double, Map[Parameter[_], Seq[Double]]), yVector: (Double, Map[Parameter[_], Seq[Double]])): Map[Parameter[_], Seq[Double]] = {
val result: Map[Parameter[_], Seq[Double]] = Map()
require(xVector._2.size == yVector._2.size)
for (x <- xVector._2.keys) {
result += x -> simpleComponentSum(xVector._2(x), yVector._2(x))
}
result
}
private def simpleComponentSum(xVector: Seq[Double], yVector: Seq[Double]): Seq[Double] = {
require(xVector.size == yVector.size)
(for ((x, y) <- xVector zip yVector) yield x + y)
}
private def weightedComponentSum(xVector: Seq[Double], yVector: Seq[Double], xProb: Double, yProb: Double): Seq[Double] = {
require(xVector.size == yVector.size)
val divisor = xProb + yProb
if (divisor > 0) {
(for ((x, y) <- xVector zip yVector) yield (xProb * x + yProb * y) / divisor)
} else {
(for ((x, y) <- xVector zip yVector) yield 0.0)
}
}
/*
* Usual multiplication.
*/
private def simpleProduct(x: Double, y: Double): Double = {
x * y
}
/*
* Usual addition.
*/
private def simpleSum(x: Double, y: Double): Double = {
x + y
}
}
object SufficientStatisticsSemiring
{
def apply(parameterMap : immutable.Map[Parameter[_], Seq[Double]]) = new SufficientStatisticsSemiring(parameterMap)
}
|
bruttenberg/figaro
|
Figaro/src/main/scala/com/cra/figaro/algorithm/factored/SufficientStatisticsSemiring.scala
|
Scala
|
bsd-3-clause
| 4,415 |
/**
* Copyright 2015 Peter Nerg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dmonix.akka.persistence
import akka.testkit.TestKit
import akka.actor.ActorSystem
import org.scalatest.Matchers
import akka.testkit.ImplicitSender
import org.scalatest.BeforeAndAfterAll
import org.scalatest.WordSpecLike
import scala.concurrent.duration._
import akka.testkit.TestProbe
import akka.actor.PoisonPill
/**
* @author Peter Nerg
*/
class ExampleSuite extends TestKit(ActorSystem("ExampleSuite", PersistenceSuiteTrait.config))
with ImplicitSender
with WordSpecLike
with Matchers
with BeforeAndAfterAll {
// Makes sure that the actor system is shut down.
override def afterAll() { system.terminate }
def createAccount(name: String) = system.actorOf(AccountActor.props(name), name+"-"+System.currentTimeMillis())
"ExampleSuite" should {
"Create and use account" in new ExampleSuite {
val probe = TestProbe()
val account = createAccount("Peter")
//simulate a few transaction
account ! Deposit(100)
expectMsg(Balance(100))
account ! Deposit(50)
expectMsg(Balance(150))
//make a snapshot with the above transactions
account ! Snap
//do some more transactions
account ! Deposit(200)
expectMsg(Balance(350))
account ! Withdraw(100)
expectMsg(Balance(250))
//kill the actor/account
probe watch account
account ! PoisonPill
probe.expectTerminated(account)
//resurrect the actor/account and verify its balance
val resurrected = createAccount("Peter")
resurrected ! Balance
expectMsg(Balance(250))
}
}
}
|
pnerg/akka-persistence-mock
|
src/test/scala/org/dmonix/akka/persistence/ExampleSuite.scala
|
Scala
|
apache-2.0
| 2,212 |
package es.uvigo.ei.sing.sds
package searcher
import scala.concurrent.Future
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import entity._
import service.ABNERService
final class ABNERSearcher extends SearcherAdapter {
lazy val abner = new ABNERService
override def search(query: String): Future[Set[Keyword.ID]] =
for {
entities <- abner.getEntities(query)
normalized <- Future.successful { entities map (_.txt.toLowerCase) }
keywordIds <- searchNormalized(normalized)
} yield keywordIds
}
|
agjacome/smart-drug-search
|
src/main/scala/searcher/ABNERSearcher.scala
|
Scala
|
mit
| 551 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Enterprise Data Management Council
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
///*
// * The MIT License (MIT)
// *
// * Copyright (c) 2015 Enterprise Data Management Council
// *
// * Permission is hereby granted, free of charge, to any person obtaining a copy
// * of this software and associated documentation files (the "Software"), to deal
// * in the Software without restriction, including without limitation the rights
// * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// * copies of the Software, and to permit persons to whom the Software is
// * furnished to do so, subject to the following conditions:
// *
// * The above copyright notice and this permission notice shall be included in all
// * copies or substantial portions of the Software.
// *
// * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// * SOFTWARE.
// */
//package org.edmcouncil.serializer
//
//import org.edmcouncil.util.PotentialFile
//
///**
// * Test the XmlSorter
// *
// * TODO: This test is NOT complete!!
// */
//class XmlSorterSpec extends UnitSpec {
//
// "An XmlSorter" must {
//
// "sort an XML file (this test is not finished)" in {
//
// suppressOutput {
//
// val file = PotentialFile("src/test/resources/wine.rdf")
// val sorter = RdfXmlSorter(file.path.get)
//
// sorter.printIt()
// }
// }
// }
//}
|
edmcouncil/rdf-serializer
|
src/test/scala/org/edmcouncil/rdf_toolkit/owlapi_serializer/XmlSorterSpec.scala
|
Scala
|
mit
| 2,883 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.fp
import slamdata.Predef._
import quasar.contrib.matryoshka.UnionWidth
import matryoshka.Delay
import org.scalacheck._
import iotaz.{TListK, CopK, TNilK}
import iotaz.TListK.:::
import quasar.contrib.iota.mkInject
sealed trait ArbitraryKMaterializer[LL <: TListK] {
def materialize(offset: Int): Delay[Arbitrary, CopK[LL, ?]]
}
object ArbitraryKMaterializer {
@SuppressWarnings(Array("org.wartremover.warts.AsInstanceOf"))
implicit def base[F[_]](
implicit
FA: Delay[Arbitrary, F]
): ArbitraryKMaterializer[F ::: TNilK] = new ArbitraryKMaterializer[F ::: TNilK] {
override def materialize(offset: Int): Delay[Arbitrary, CopK[F ::: TNilK, ?]] = {
val I = mkInject[F, F ::: TNilK](offset)
new Delay[Arbitrary, CopK[F ::: TNilK, ?]] {
override def apply[A](arb: Arbitrary[A]): Arbitrary[CopK[F ::: TNilK, A]] = {
Arbitrary(Gen.frequency(
(1, FA(arb).arbitrary.map(I(_)))
))
}
}
}
}
@SuppressWarnings(Array("org.wartremover.warts.AsInstanceOf"))
implicit def induct[F[_], LL <: TListK](
implicit
FA: Delay[Arbitrary, F],
LW: UnionWidth[CopK[LL, ?]],
LL: ArbitraryKMaterializer[LL]
): ArbitraryKMaterializer[F ::: LL] = new ArbitraryKMaterializer[F ::: LL] {
override def materialize(offset: Int): Delay[Arbitrary, CopK[F ::: LL, ?]] = {
val I = mkInject[F, F ::: LL](offset)
new Delay[Arbitrary, CopK[F ::: LL, ?]] {
override def apply[A](arb: Arbitrary[A]): Arbitrary[CopK[F ::: LL, A]] = {
Arbitrary(Gen.frequency(
(1, FA(arb).arbitrary.map(I(_))),
(LW.width, LL.materialize(offset + 1)(arb).arbitrary.asInstanceOf[Gen[CopK[F ::: LL, A]]])
))
}
}
}
}
}
|
slamdata/slamengine
|
foundation/src/test/scala/quasar/fp/ArbitraryKMaterializer.scala
|
Scala
|
apache-2.0
| 2,380 |
package play
import sbt.{ Project => SbtProject, Settings => SbtSettings, _ }
import sbt.Keys._
import play.console.Colors
import Keys._
import java.lang.{ ProcessBuilder => JProcessBuilder }
import sbt.complete.Parsers._
trait PlayCommands extends PlayAssetsCompiler with PlayEclipse with PlayInternalKeys {
this: PlayReloader =>
//- mainly scala, mainly java or none
val JAVA = "java"
val SCALA = "scala"
val NONE = "none"
val playCopyAssets = TaskKey[Seq[(File, File)]]("play-copy-assets")
val playCopyAssetsTask = (baseDirectory, managedResources in Compile, resourceManaged in Compile, playAssetsDirectories, playExternalAssets, classDirectory in Compile, cacheDirectory, streams, state) map { (b, resources, resourcesDirectories, r, externals, t, c, s, state) =>
val cacheFile = c / "copy-assets"
val mappings = (r.map(d => (d ***) --- (d ** HiddenFileFilter ***)).foldLeft(PathFinder.empty)(_ +++ _).filter(_.isFile) x relativeTo(b +: r.filterNot(_.getAbsolutePath.startsWith(b.getAbsolutePath))) map {
case (origin, name) => (origin, new java.io.File(t, name))
}) ++ (resources x rebase(resourcesDirectories, t))
val externalMappings = externals.map {
case (root, paths, common) => {
paths(root) x relativeTo(root :: Nil) map {
case (origin, name) => (origin, new java.io.File(t, common + "/" + name))
}
}
}.foldLeft(Seq.empty[(java.io.File, java.io.File)])(_ ++ _)
val assetsMapping = mappings ++ externalMappings
s.log.debug("Copy play resource mappings: " + assetsMapping.mkString("\\n\\t", "\\n\\t", ""))
Sync(cacheFile)(assetsMapping)
assetsMapping
}
//- test reporter
protected lazy val testListener = new PlayTestListener
val testResultReporter = TaskKey[List[String]]("test-result-reporter")
val testResultReporterTask = (state, thisProjectRef) map { (s, r) =>
testListener.result.toList
}
val testResultReporterReset = TaskKey[Unit]("test-result-reporter-reset")
val testResultReporterResetTask = (state, thisProjectRef) map { (s, r) =>
testListener.result.clear
}
val playReloadTask = (playCopyAssets, playCompileEverything) map { (_, analysises) =>
analysises.reduceLeft(_ ++ _)
}
def intellijCommandSettings = {
import org.sbtidea.SbtIdeaPlugin
// This stuff is all private in the IDEA plugin, so let's copy it here
val WithSources = "with-sources=yes"
val NoSources = "no-sources"
val NoClassifiers = "no-classifiers"
val SbtClassifiers = "sbt-classifiers"
val NoFsc = "no-fsc"
val NoTypeHighlighting = "no-type-highlighting"
val NoSbtBuildModule = "no-sbt-build-module"
val args = (Space ~> NoClassifiers | Space ~> SbtClassifiers | Space ~> NoFsc | Space ~> NoTypeHighlighting | Space ~> NoSbtBuildModule | Space ~> WithSources | Space ~> NoSources).*
SbtIdeaPlugin.settings ++ Seq(
commands += Command("idea")(_ => args) { (state, args) =>
// Firstly, attempt to compile the project, but ignore the result
SbtProject.runTask(compile in Compile, state)
SbtIdeaPlugin.doCommand(state, if (!args.contains(WithSources) && !(args.contains(NoSources) || args.contains(NoClassifiers))) {
args :+ NoClassifiers
} else {
args
})
}
)
}
// ----- Post compile (need to be refactored and fully configurable)
def PostCompile(scope: Configuration) = (sourceDirectory in scope, dependencyClasspath in scope, compile in scope, javaSource in scope, sourceManaged in scope, classDirectory in scope, cacheDirectory in scope) map { (src, deps, analysis, javaSrc, srcManaged, classes, cacheDir) =>
val classpath = (deps.map(_.data.getAbsolutePath).toArray :+ classes.getAbsolutePath).mkString(java.io.File.pathSeparator)
val timestampFile = cacheDir / "play_instrumentation"
val lastEnhanced = if (timestampFile.exists) IO.read(timestampFile).toLong else Long.MinValue
val javaClasses = (javaSrc ** "*.java").get flatMap { sourceFile =>
// PropertiesEnhancer is class-local, so no need to check outside the class.
if (analysis.apis.internal(sourceFile).compilation.startTime > lastEnhanced)
analysis.relations.products(sourceFile)
else
Nil
}
val templateClasses = (srcManaged ** "*.template.scala").get flatMap { sourceFile =>
if (analysis.apis.internal(sourceFile).compilation.startTime > lastEnhanced)
analysis.relations.products(sourceFile)
else
Nil
}
javaClasses.foreach(play.core.enhancers.PropertiesEnhancer.generateAccessors(classpath, _))
javaClasses.foreach(play.core.enhancers.PropertiesEnhancer.rewriteAccess(classpath, _))
templateClasses.foreach(play.core.enhancers.PropertiesEnhancer.rewriteAccess(classpath, _))
IO.write(timestampFile, System.currentTimeMillis.toString)
// EBean
if (classpath.contains("play-java-ebean")) {
val originalContextClassLoader = Thread.currentThread.getContextClassLoader
try {
val cp = deps.map(_.data.toURI.toURL).toArray :+ classes.toURI.toURL
Thread.currentThread.setContextClassLoader(new java.net.URLClassLoader(cp, ClassLoader.getSystemClassLoader))
import com.avaje.ebean.enhance.agent._
import com.avaje.ebean.enhance.ant._
import collection.JavaConverters._
import com.typesafe.config._
val cl = ClassLoader.getSystemClassLoader
val t = new Transformer(cp, "debug=-1")
val ft = new OfflineFileTransform(t, cl, classes.getAbsolutePath, classes.getAbsolutePath)
lazy val file = {
Option(System.getProperty("config.file")).map(f => new File(f)).getOrElse(new File("conf/application.conf"))
}
val config = Option(System.getProperty("config.resource"))
.map(ConfigFactory.parseResources(_)).getOrElse(ConfigFactory.parseFileAnySyntax(file))
val models = try {
config.getConfig("ebean").entrySet.asScala.map(_.getValue.unwrapped).toSet.mkString(",")
} catch { case e: ConfigException.Missing => "models.*" }
try {
ft.process(models)
} catch {
case _: Throwable =>
}
} finally {
Thread.currentThread.setContextClassLoader(originalContextClassLoader)
}
}
// Copy managed classes - only needed in Compile scope
if (scope.name.toLowerCase == "compile") {
val managedClassesDirectory = classes.getParentFile / (classes.getName + "_managed")
val managedClasses = ((srcManaged ** "*.scala").get ++ (srcManaged ** "*.java").get).map { managedSourceFile =>
analysis.relations.products(managedSourceFile)
}.flatten x rebase(classes, managedClassesDirectory)
// Copy modified class files
val managedSet = IO.copy(managedClasses)
// Remove deleted class files
(managedClassesDirectory ** "*.class").get.filterNot(managedSet.contains(_)).foreach(_.delete())
}
analysis
}
// ----- Play prompt
val playPrompt = { state: State =>
val extracted = SbtProject.extract(state)
import extracted._
(name in currentRef get structure.data).map { name =>
"[" + Colors.cyan(name) + "] $ "
}.getOrElse("> ")
}
// ----- Play commands
private def fork(args: Seq[String]) = {
val builder = new JProcessBuilder(args: _*)
Process(builder).run(JvmIO(new JvmLogger(), false))
}
val shCommand = Command.args("sh", "<shell command>") { (state: State, args: Seq[String]) =>
if (args.isEmpty)
println("sh <command to run>")
else
fork(args)
state
}
// -- Utility methods for 0.10-> 0.11 migration
def inAllDeps[T](base: ProjectRef, deps: ProjectRef => Seq[ProjectRef], key: SettingKey[T], data: SbtSettings[Scope]): Seq[T] =
inAllProjects(Dag.topologicalSort(base)(deps), key, data)
def inAllProjects[T](allProjects: Seq[Reference], key: SettingKey[T], data: SbtSettings[Scope]): Seq[T] =
allProjects.flatMap { p => key in p get data }
def inAllDependencies[T](base: ProjectRef, key: SettingKey[T], structure: Load.BuildStructure): Seq[T] = {
def deps(ref: ProjectRef): Seq[ProjectRef] =
SbtProject.getProject(ref, structure).toList.flatMap { p =>
p.dependencies.map(_.project) ++ p.aggregate
}
inAllDeps(base, deps, key, structure.data)
}
private[this] var commonClassLoader: ClassLoader = _
val playCommonClassloaderTask = (dependencyClasspath in Compile) map { classpath =>
lazy val commonJars: PartialFunction[java.io.File, java.net.URL] = {
case jar if jar.getName.startsWith("h2-") || jar.getName == "h2.jar" => jar.toURI.toURL
}
if (commonClassLoader == null) {
commonClassLoader = new java.net.URLClassLoader(classpath.map(_.data).collect(commonJars).toArray, null /* important here, don't depend of the sbt classLoader! */ ) {
override def toString = "Common ClassLoader: " + getURLs.map(_.toString).mkString(",")
}
}
commonClassLoader
}
val playCompileEverythingTask = (state, thisProjectRef) flatMap { (s, r) =>
inAllDependencies(r, (compile in Compile).task, SbtProject structure s).join
}
val buildRequireTask = (copyResources in Compile, crossTarget, requireJs, requireJsFolder, requireJsShim, requireNativePath, streams) map { (cr, crossTarget, requireJs, requireJsFolder, requireJsShim, requireNativePath, s) =>
val buildDescName = "app.build.js"
val jsFolder = if (!requireJsFolder.isEmpty) { requireJsFolder } else "javascripts"
val rjoldDir = crossTarget / "classes" / "public" / jsFolder
val buildDesc = crossTarget / "classes" / "public" / buildDescName
if (requireJs.isEmpty == false) {
val rjnewDir = new File(rjoldDir.getAbsolutePath + "-min")
//cleanup previous version
IO.delete(rjnewDir)
val relativeModulePath = (str: String) => str.replace(".js", "")
val shim = if (!requireJsShim.isEmpty) { """mainConfigFile: """" + jsFolder + """/""" + requireJsShim + """", """ } else { "" };
val content = """({appDir: """" + jsFolder + """",
baseUrl: ".",
dir:"""" + rjnewDir.getName + """", """ +
shim +
"""modules: [""" + requireJs.map(f => "{name: \\"" + relativeModulePath(f) + "\\"}").mkString(",") + """]})""".stripMargin
IO.write(buildDesc, content)
//run requireJS
s.log.info("RequireJS optimization has begun...")
s.log.info(buildDescName + ":")
s.log.info(content)
try {
requireNativePath.map(nativePath =>
println(play.core.jscompile.JavascriptCompiler.executeNativeCompiler(nativePath + " -o " + buildDesc.getAbsolutePath, buildDesc))
).getOrElse {
play.core.jscompile.JavascriptCompiler.require(buildDesc)
}
s.log.info("RequireJS optimization finished.")
} catch {
case ex: Exception =>
s.log.error("RequireJS optimization has failed...")
throw ex
}
//clean-up
IO.delete(buildDesc)
}
cr
}
val playCommand = Command.command("play", Help("play", ("play", "Enter the play console"), "Welcome to Play " + play.core.PlayVersion.current + """!
|
|These commands are available:
|-----------------------------
|classpath Display the project classpath.
|clean Clean all generated files.
|compile Compile the current application.
|console Launch the interactive Scala console (use :quit to exit).
|dependencies Display the dependencies summary.
|dist Construct standalone application package.
|exit Exit the console.
|h2-browser Launch the H2 Web browser.
|license Display licensing informations.
|package Package your application as a JAR.
|play-version Display the Play version.
|publish Publish your application in a remote repository.
|publish-local Publish your application in the local repository.
|reload Reload the current application build file.
|run <port> Run the current application in DEV mode.
|test Run Junit tests and/or Specs from the command line
|eclipse generate eclipse project file
|idea generate Intellij IDEA project file
|sh <command to run> execute a shell command
|start <port> Start the current application in another JVM in PROD mode.
|update Update application dependencies.
|
|Type `help` to get the standard sbt help.
|""".stripMargin)) { state: State =>
val extracted = SbtProject.extract(state)
import extracted._
// Display logo
println(play.console.Console.logo)
println("""
|> Type "help play" or "license" for more information.
|> Type "exit" or use Ctrl+D to leave this console.
|""".stripMargin)
state.copy(
remainingCommands = state.remainingCommands :+ "shell")
}
val h2Command = Command.command("h2-browser") { state: State =>
try {
val commonLoader = SbtProject.runTask(playCommonClassloader, state).get._2.toEither.right.get
val h2ServerClass = commonLoader.loadClass(classOf[org.h2.tools.Server].getName)
h2ServerClass.getMethod("main", classOf[Array[String]]).invoke(null, Array.empty[String])
} catch {
case e: Exception => e.printStackTrace
}
state
}
val licenseCommand = Command.command("license") { state: State =>
println(
"""
|This software is licensed under the Apache 2 license, quoted below.
|
|Copyright 2013 Typesafe <http://www.typesafe.com>
|
|Licensed under the Apache License, Version 2.0 (the "License"); you may not
|use this file except in compliance with the License. You may obtain a copy of
|the License at
|
| http://www.apache.org/licenses/LICENSE-2.0
|
|Unless required by applicable law or agreed to in writing, software
|distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|License for the specific language governing permissions and limitations under
|the License.
""".stripMargin)
state
}
val classpathCommand = Command.command("classpath") { state: State =>
val extracted = SbtProject.extract(state)
SbtProject.runTask(dependencyClasspath in Runtime, state).get._2.toEither match {
case Left(_) => {
println()
println("Cannot compute the classpath")
println()
state.fail
}
case Right(classpath) => {
println()
println("Here is the computed classpath of your application:")
println()
classpath.foreach { item =>
println("\\t- " + item.data.getAbsolutePath)
}
println()
state
}
}
}
val playMonitoredFiles = TaskKey[Seq[String]]("play-monitored-files")
val playMonitoredFilesTask = (thisProjectRef, state) map { (ref, state) =>
val src = inAllDependencies(ref, sourceDirectories in Compile, SbtProject structure state).foldLeft(Seq.empty[File])(_ ++ _)
val resources = inAllDependencies(ref, resourceDirectories in Compile, SbtProject structure state).foldLeft(Seq.empty[File])(_ ++ _)
val assets = inAllDependencies(ref, playAssetsDirectories, SbtProject structure state).foldLeft(Seq.empty[File])(_ ++ _)
(src ++ resources ++ assets).map { f =>
if (!f.exists) f.mkdirs(); f
}.map(_.getCanonicalPath).distinct
}
val computeDependencies = TaskKey[Seq[Map[Symbol, Any]]]("ivy-dependencies")
val computeDependenciesTask = (deliverLocal, ivySbt, streams, organizationName, moduleName, version, scalaBinaryVersion) map { (_, ivySbt, s, org, id, version, scalaVersion) =>
import scala.xml._
ivySbt.withIvy(s.log) { ivy =>
val report = XML.loadFile(
ivy.getResolutionCacheManager.getConfigurationResolveReportInCache(org + "-" + id + "_" + scalaVersion, "runtime"))
val deps: Seq[Map[Symbol, Any]] = (report \\ "dependencies" \\ "module").flatMap { module =>
(module \\ "revision").map { rev =>
Map(
'module -> (module \\ "@organisation" text, module \\ "@name" text, rev \\ "@name"),
'evictedBy -> (rev \\ "evicted-by").headOption.map(_ \\ "@rev" text),
'requiredBy -> (rev \\ "caller").map { caller =>
(caller \\ "@organisation" text, caller \\ "@name" text, caller \\ "@callerrev" text)
},
'artifacts -> (rev \\ "artifacts" \\ "artifact").flatMap { artifact =>
(artifact \\ "@location").headOption.map(node => new java.io.File(node.text).getName)
})
}
}
deps.filterNot(_('artifacts).asInstanceOf[Seq[_]].isEmpty)
}
}
val computeDependenciesCommand = Command.command("dependencies") { state: State =>
val extracted = SbtProject.extract(state)
SbtProject.runTask(computeDependencies, state).get._2.toEither match {
case Left(_) => {
println()
println("Cannot compute dependencies")
println()
state.fail
}
case Right(dependencies) => {
println()
println("Here are the resolved dependencies of your application:")
println()
def asTableRow(module: Map[Symbol, Any]): Seq[(String, String, String, Boolean)] = {
val formatted = (Seq(module.get('module).map {
case (org, name, rev) => org + ":" + name + ":" + rev
}).flatten,
module.get('requiredBy).collect {
case callers: Seq[_] => callers.collect {
case (org, name, rev) => org.toString + ":" + name.toString + ":" + rev.toString
}
}.toSeq.flatten,
module.get('evictedBy).map {
case Some(rev) => Seq("Evicted by " + rev)
case None => module.get('artifacts).collect {
case artifacts: Seq[_] => artifacts.map("As " + _.toString)
}.toSeq.flatten
}.toSeq.flatten)
val maxLines = Seq(formatted._1.size, formatted._2.size, formatted._3.size).max
formatted._1.padTo(maxLines, "").zip(
formatted._2.padTo(maxLines, "")).zip(
formatted._3.padTo(maxLines, "")).map {
case ((name, callers), notes) => (name, callers, notes, module.get('evictedBy).map { case Some(_) => true; case _ => false }.get)
}
}
def display(modules: Seq[Seq[(String, String, String, Boolean)]]) {
val c1Size = modules.flatten.map(_._1.size).max
val c2Size = modules.flatten.map(_._2.size).max
val c3Size = modules.flatten.map(_._3.size).max
def bar(length: Int) = (1 to length).map(_ => "-").mkString
val indent = if (Colors.isANSISupported) 9 else 0
val lineFormat = "| %-" + (c1Size + indent) + "s | %-" + (c2Size + indent) + "s | %-" + (c3Size + indent) + "s |"
val separator = "+-%s-+-%s-+-%s-+".format(
bar(c1Size), bar(c2Size), bar(c3Size))
println(separator)
println(lineFormat.format(Colors.cyan("Module"), Colors.cyan("Required by"), Colors.cyan("Note")))
println(separator)
modules.foreach { lines =>
lines.foreach {
case (module, caller, note, evicted) => {
println(lineFormat.format(
if (evicted) Colors.red(module) else Colors.green(module),
Colors.white(caller),
if (evicted) Colors.red(note) else Colors.white(note)))
}
}
println(separator)
}
}
display(dependencies.map(asTableRow))
println()
state
}
}
}
}
|
michaelahlers/team-awesome-wedding
|
vendor/play-2.2.1/framework/src/sbt-plugin/src/main/scala/PlayCommands.scala
|
Scala
|
mit
| 20,226 |
package scorex.unit
import java.nio.ByteBuffer
import org.scalatest.FunSuite
import scorex.network.message._
class MessageSpecification extends FunSuite {
test("PingMessage roundtrip") {
val msg = PingMessage
val parsedTry = Message.parse(ByteBuffer.wrap(msg.bytes))
assert(parsedTry.isSuccess)
}
test("ScoreMessage roundtrip 1") {
val h1 = 1
val s1 = BigInt(2)
val msg = ScoreMessage(h1, s1)
val parsed = Message.parse(ByteBuffer.wrap(msg.bytes)).get
assert(parsed.isInstanceOf[ScoreMessage])
assert(parsed.asInstanceOf[ScoreMessage].height == h1)
assert(parsed.asInstanceOf[ScoreMessage].score == s1)
}
test("ScoreMessage roundtrip 2") {
val h1 = Int.MaxValue - 1
val s1 = BigInt(Long.MaxValue) + 100
val msg = ScoreMessage(h1, s1)
val parsed = Message.parse(ByteBuffer.wrap(msg.bytes)).get
assert(parsed.isInstanceOf[ScoreMessage])
assert(parsed.asInstanceOf[ScoreMessage].height == h1)
assert(parsed.asInstanceOf[ScoreMessage].score == s1)
}
test("GetSignaturesMessage roundtrip 1") {
val e1 = 33: Byte
val e2 = 34: Byte
val s1 = e2 +: Array.fill(scorex.crypto.SigningFunctionsImpl.SignatureLength - 1)(e1)
val msg = GetSignaturesMessage(Seq(s1))
val parsed = Message.parse(ByteBuffer.wrap(msg.bytes)).get
assert(parsed.isInstanceOf[GetSignaturesMessage])
assert(parsed.asInstanceOf[GetSignaturesMessage].signatures.head.sameElements(s1))
}
test("SignaturesMessage roundtrip 1") {
val e1 = 33: Byte
val e2 = 34: Byte
val s1 = e2 +: Array.fill(scorex.crypto.SigningFunctionsImpl.SignatureLength - 1)(e1)
val s2 = e1 +: Array.fill(scorex.crypto.SigningFunctionsImpl.SignatureLength - 1)(e2)
val msg = SignaturesMessage(Seq(s1, s2))
val parsed = Message.parse(ByteBuffer.wrap(msg.bytes)).get
assert(parsed.isInstanceOf[SignaturesMessage])
assert(parsed.asInstanceOf[SignaturesMessage].signatures.head.sameElements(s1))
assert(parsed.asInstanceOf[SignaturesMessage].signatures.tail.head.sameElements(s2))
}
}
|
pozharko/Scorex-Lagonaki
|
src/test/scala/scorex/unit/MessageSpecification.scala
|
Scala
|
cc0-1.0
| 2,076 |
package chapter.seven
object ExerciseEight extends App {
}
|
deekim/impatient-scala
|
src/main/scala/chapter/seven/ExerciseEight.scala
|
Scala
|
apache-2.0
| 62 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.tf
import java.io.{File => JFile}
import com.google.protobuf.ByteString
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import com.intel.analytics.bigdl.utils.tf.TFRecordIterator
import org.tensorflow.example.Example
class DecodePngSerialTest extends ModuleSerializationTest {
private def getInputs(name: String): Tensor[ByteString] = {
import com.intel.analytics.bigdl.utils.tf.TFTensorNumeric.NumericByteString
val index = name match {
case "png" => 0
case "jpeg" => 1
case "gif" => 2
case "raw" => 3
}
val resource = getClass.getClassLoader.getResource("tf")
val path = resource.getPath + JFile.separator + "decode_image_test_case.tfrecord"
val file = new JFile(path)
val bytesVector = TFRecordIterator(file).toVector
val pngBytes = bytesVector(index)
val example = Example.parseFrom(pngBytes)
val imageByteString = example.getFeatures.getFeatureMap.get("image/encoded")
.getBytesList.getValueList.get(0)
Tensor[ByteString](Array(imageByteString), Array[Int]())
}
override def test(): Unit = {
val decodePng = new DecodePng[Float](1).setName("decodePng")
val input = getInputs("png")
runSerializationTest(decodePng, input)
}
}
|
wzhongyuan/BigDL
|
spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/tf/DecodePngSpec.scala
|
Scala
|
apache-2.0
| 1,945 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import java.util.concurrent._
import java.util.concurrent.atomic._
import scala.collection._
import junit.framework.Assert._
import kafka.cluster._
import kafka.message._
import kafka.server._
import org.scalatest.junit.JUnit3Suite
import kafka.integration.KafkaServerTestHarness
import kafka.utils.TestUtils
class FetcherTest extends JUnit3Suite with KafkaServerTestHarness {
val numNodes = 2
val configs =
for(props <- TestUtils.createBrokerConfigs(numNodes))
yield new KafkaConfig(props) {
override val enableZookeeper = false
}
val messages = new mutable.HashMap[Int, ByteBufferMessageSet]
val topic = "topic"
val cluster = new Cluster(configs.map(c => new Broker(c.brokerId, c.brokerId.toString, "localhost", c.port)))
val shutdown = ZookeeperConsumerConnector.shutdownCommand
val queue = new LinkedBlockingQueue[FetchedDataChunk]
val topicInfos = configs.map(c => new PartitionTopicInfo(topic,
c.brokerId,
new Partition(c.brokerId, 0),
queue,
new AtomicLong(0),
new AtomicLong(0),
new AtomicInteger(0)))
var fetcher: Fetcher = null
override def setUp() {
super.setUp
fetcher = new Fetcher(new ConsumerConfig(TestUtils.createConsumerProperties("", "", "")), null)
fetcher.stopConnectionsToAllBrokers
fetcher.startConnections(topicInfos, cluster, null)
}
override def tearDown() {
fetcher.stopConnectionsToAllBrokers
super.tearDown
}
def testFetcher() {
val perNode = 2
var count = sendMessages(perNode)
fetch(count)
Thread.sleep(100)
assertQueueEmpty()
count = sendMessages(perNode)
fetch(count)
Thread.sleep(100)
assertQueueEmpty()
}
def assertQueueEmpty(): Unit = assertEquals(0, queue.size)
def sendMessages(messagesPerNode: Int): Int = {
var count = 0
for(conf <- configs) {
val producer = TestUtils.createProducer("localhost", conf.port)
val ms = 0.until(messagesPerNode).map(x => new Message((conf.brokerId * 5 + x).toString.getBytes)).toArray
val mSet = new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = ms: _*)
messages += conf.brokerId -> mSet
producer.send(topic, mSet)
producer.close()
count += ms.size
}
count
}
def fetch(expected: Int) {
var count = 0
while(true) {
val chunk = queue.poll(2L, TimeUnit.SECONDS)
assertNotNull("Timed out waiting for data chunk " + (count + 1), chunk)
for(message <- chunk.messages)
count += 1
if(count == expected)
return
}
}
}
|
tnachen/kafka
|
core/src/test/scala/unit/kafka/integration/FetcherTest.scala
|
Scala
|
apache-2.0
| 3,719 |
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\
* @ @ *
* # # # # (c) 2016 CAB *
* # # # # # # *
* # # # # # # # # # # # # *
* # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* @ @ *
\* * http://github.com/alexcab * * * * * * * * * * * * * * * * * * * * * * * * * */
package mathact.parts.plumbing.fitting
/** Source of events, must be implemented by Outlet
* Created by CAB on 17.05.2016.
*/
trait Plug[H] extends Flange[H] { _: OutPipe[H] ⇒
// //Get Outlet
// private val outlet = this match{
// case out: Outlet[T] ⇒ out
// case _ ⇒ throw new Exception(
// s"[Plug] This trait must be implemented only with mathact.parts.plumbing.fitting.Outlet, " +
// s"found implementation: ${this.getClass.getName}")}
//Methods
/** Connecting of this Plug to given Socket
* @param socket - Socket[T] */
def attach(socket: ⇒Socket[H]): Unit = pump.connect(()⇒this, ()⇒socket)}
|
AlexCAB/ProbabilisticPlaying
|
mathact/src/main/scala/mathact/parts/plumbing/fitting/Plug.scala
|
Scala
|
mit
| 1,754 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.parquet
import java.math.{BigDecimal => JBigDecimal}
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import org.apache.parquet.filter2.predicate.{FilterApi, FilterPredicate, Operators}
import org.apache.parquet.filter2.predicate.FilterApi._
import org.apache.parquet.filter2.predicate.Operators.{Column => _, _}
import org.apache.parquet.schema.MessageType
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.optimizer.InferFiltersFromConstraints
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
import org.apache.spark.sql.execution.datasources.{DataSourceStrategy, HadoopFsRelation, LogicalRelation}
import org.apache.spark.sql.execution.datasources.orc.OrcFilters
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetTable
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
import org.apache.spark.util.{AccumulatorContext, AccumulatorV2}
/**
* A test suite that tests Parquet filter2 API based filter pushdown optimization.
*
* NOTE:
*
* 1. `!(a cmp b)` is always transformed to its negated form `a cmp' b` by the
* `BooleanSimplification` optimization rule whenever possible. As a result, predicate `!(a < 1)`
* results in a `GtEq` filter predicate rather than a `Not`.
*
* 2. `Tuple1(Option(x))` is used together with `AnyVal` types like `Int` to ensure the inferred
* data type is nullable.
*
* NOTE:
*
* This file intendedly enables record-level filtering explicitly. If new test cases are
* dependent on this configuration, don't forget you better explicitly set this configuration
* within the test.
*/
abstract class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContext {
protected def createParquetFilters(
schema: MessageType,
caseSensitive: Option[Boolean] = None): ParquetFilters =
new ParquetFilters(schema, conf.parquetFilterPushDownDate, conf.parquetFilterPushDownTimestamp,
conf.parquetFilterPushDownDecimal, conf.parquetFilterPushDownStringStartWith,
conf.parquetFilterPushDownInFilterThreshold,
caseSensitive.getOrElse(conf.caseSensitiveAnalysis))
override def beforeEach(): Unit = {
super.beforeEach()
// Note that there are many tests here that require record-level filtering set to be true.
spark.conf.set(SQLConf.PARQUET_RECORD_FILTER_ENABLED.key, "true")
}
override def afterEach(): Unit = {
try {
spark.conf.unset(SQLConf.PARQUET_RECORD_FILTER_ENABLED.key)
} finally {
super.afterEach()
}
}
def checkFilterPredicate(
df: DataFrame,
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
checker: (DataFrame, Seq[Row]) => Unit,
expected: Seq[Row]): Unit
private def checkFilterPredicate
(predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: Seq[Row])
(implicit df: DataFrame): Unit = {
checkFilterPredicate(df, predicate, filterClass, checkAnswer(_, _: Seq[Row]), expected)
}
private def checkFilterPredicate[T]
(predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: T)
(implicit df: DataFrame): Unit = {
checkFilterPredicate(predicate, filterClass, Seq(Row(expected)))(df)
}
private def checkBinaryFilterPredicate
(predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: Seq[Row])
(implicit df: DataFrame): Unit = {
def checkBinaryAnswer(df: DataFrame, expected: Seq[Row]) = {
assertResult(expected.map(_.getAs[Array[Byte]](0).mkString(",")).sorted) {
df.rdd.map(_.getAs[Array[Byte]](0).mkString(",")).collect().toSeq.sorted
}
}
checkFilterPredicate(df, predicate, filterClass, checkBinaryAnswer _, expected)
}
private def checkBinaryFilterPredicate
(predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: Array[Byte])
(implicit df: DataFrame): Unit = {
checkBinaryFilterPredicate(predicate, filterClass, Seq(Row(expected)))(df)
}
private def testTimestampPushdown(data: Seq[Timestamp]): Unit = {
assert(data.size === 4)
val ts1 = data.head
val ts2 = data(1)
val ts3 = data(2)
val ts4 = data(3)
withParquetDataFrame(data.map(i => Tuple1(i))) { implicit df =>
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], data.map(i => Row.apply(i)))
checkFilterPredicate('_1 === ts1, classOf[Eq[_]], ts1)
checkFilterPredicate('_1 <=> ts1, classOf[Eq[_]], ts1)
checkFilterPredicate('_1 =!= ts1, classOf[NotEq[_]],
Seq(ts2, ts3, ts4).map(i => Row.apply(i)))
checkFilterPredicate('_1 < ts2, classOf[Lt[_]], ts1)
checkFilterPredicate('_1 > ts1, classOf[Gt[_]], Seq(ts2, ts3, ts4).map(i => Row.apply(i)))
checkFilterPredicate('_1 <= ts1, classOf[LtEq[_]], ts1)
checkFilterPredicate('_1 >= ts4, classOf[GtEq[_]], ts4)
checkFilterPredicate(Literal(ts1) === '_1, classOf[Eq[_]], ts1)
checkFilterPredicate(Literal(ts1) <=> '_1, classOf[Eq[_]], ts1)
checkFilterPredicate(Literal(ts2) > '_1, classOf[Lt[_]], ts1)
checkFilterPredicate(Literal(ts3) < '_1, classOf[Gt[_]], ts4)
checkFilterPredicate(Literal(ts1) >= '_1, classOf[LtEq[_]], ts1)
checkFilterPredicate(Literal(ts4) <= '_1, classOf[GtEq[_]], ts4)
checkFilterPredicate(!('_1 < ts4), classOf[GtEq[_]], ts4)
checkFilterPredicate('_1 < ts2 || '_1 > ts3, classOf[Operators.Or], Seq(Row(ts1), Row(ts4)))
}
}
private def testDecimalPushDown(data: DataFrame)(f: DataFrame => Unit): Unit = {
withTempPath { file =>
data.write.parquet(file.getCanonicalPath)
readParquetFile(file.toString)(f)
}
}
// This function tests that exactly go through the `canDrop` and `inverseCanDrop`.
private def testStringStartsWith(dataFrame: DataFrame, filter: String): Unit = {
withTempPath { dir =>
val path = dir.getCanonicalPath
dataFrame.write.option("parquet.block.size", 512).parquet(path)
Seq(true, false).foreach { pushDown =>
withSQLConf(
SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> pushDown.toString) {
val accu = new NumRowGroupsAcc
sparkContext.register(accu)
val df = spark.read.parquet(path).filter(filter)
df.foreachPartition((it: Iterator[Row]) => it.foreach(v => accu.add(0)))
if (pushDown) {
assert(accu.value == 0)
} else {
assert(accu.value > 0)
}
AccumulatorContext.remove(accu.id)
}
}
}
}
test("filter pushdown - boolean") {
withParquetDataFrame((true :: false :: Nil).map(b => Tuple1.apply(Option(b)))) { implicit df =>
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], Seq(Row(true), Row(false)))
checkFilterPredicate('_1 === true, classOf[Eq[_]], true)
checkFilterPredicate('_1 <=> true, classOf[Eq[_]], true)
checkFilterPredicate('_1 =!= true, classOf[NotEq[_]], false)
}
}
test("filter pushdown - tinyint") {
withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toByte)))) { implicit df =>
assert(df.schema.head.dataType === ByteType)
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 === 1.toByte, classOf[Eq[_]], 1)
checkFilterPredicate('_1 <=> 1.toByte, classOf[Eq[_]], 1)
checkFilterPredicate('_1 =!= 1.toByte, classOf[NotEq[_]], (2 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 < 2.toByte, classOf[Lt[_]], 1)
checkFilterPredicate('_1 > 3.toByte, classOf[Gt[_]], 4)
checkFilterPredicate('_1 <= 1.toByte, classOf[LtEq[_]], 1)
checkFilterPredicate('_1 >= 4.toByte, classOf[GtEq[_]], 4)
checkFilterPredicate(Literal(1.toByte) === '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(1.toByte) <=> '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(2.toByte) > '_1, classOf[Lt[_]], 1)
checkFilterPredicate(Literal(3.toByte) < '_1, classOf[Gt[_]], 4)
checkFilterPredicate(Literal(1.toByte) >= '_1, classOf[LtEq[_]], 1)
checkFilterPredicate(Literal(4.toByte) <= '_1, classOf[GtEq[_]], 4)
checkFilterPredicate(!('_1 < 4.toByte), classOf[GtEq[_]], 4)
checkFilterPredicate('_1 < 2.toByte || '_1 > 3.toByte,
classOf[Operators.Or], Seq(Row(1), Row(4)))
}
}
test("filter pushdown - smallint") {
withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toShort)))) { implicit df =>
assert(df.schema.head.dataType === ShortType)
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 === 1.toShort, classOf[Eq[_]], 1)
checkFilterPredicate('_1 <=> 1.toShort, classOf[Eq[_]], 1)
checkFilterPredicate('_1 =!= 1.toShort, classOf[NotEq[_]], (2 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 < 2.toShort, classOf[Lt[_]], 1)
checkFilterPredicate('_1 > 3.toShort, classOf[Gt[_]], 4)
checkFilterPredicate('_1 <= 1.toShort, classOf[LtEq[_]], 1)
checkFilterPredicate('_1 >= 4.toShort, classOf[GtEq[_]], 4)
checkFilterPredicate(Literal(1.toShort) === '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(1.toShort) <=> '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(2.toShort) > '_1, classOf[Lt[_]], 1)
checkFilterPredicate(Literal(3.toShort) < '_1, classOf[Gt[_]], 4)
checkFilterPredicate(Literal(1.toShort) >= '_1, classOf[LtEq[_]], 1)
checkFilterPredicate(Literal(4.toShort) <= '_1, classOf[GtEq[_]], 4)
checkFilterPredicate(!('_1 < 4.toShort), classOf[GtEq[_]], 4)
checkFilterPredicate('_1 < 2.toShort || '_1 > 3.toShort,
classOf[Operators.Or], Seq(Row(1), Row(4)))
}
}
test("filter pushdown - integer") {
withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i)))) { implicit df =>
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1)
checkFilterPredicate('_1 <=> 1, classOf[Eq[_]], 1)
checkFilterPredicate('_1 =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 < 2, classOf[Lt[_]], 1)
checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4)
checkFilterPredicate('_1 <= 1, classOf[LtEq[_]], 1)
checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4)
checkFilterPredicate(Literal(1) === '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(1) <=> '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(2) > '_1, classOf[Lt[_]], 1)
checkFilterPredicate(Literal(3) < '_1, classOf[Gt[_]], 4)
checkFilterPredicate(Literal(1) >= '_1, classOf[LtEq[_]], 1)
checkFilterPredicate(Literal(4) <= '_1, classOf[GtEq[_]], 4)
checkFilterPredicate(!('_1 < 4), classOf[GtEq[_]], 4)
checkFilterPredicate('_1 < 2 || '_1 > 3, classOf[Operators.Or], Seq(Row(1), Row(4)))
}
}
test("filter pushdown - long") {
withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toLong)))) { implicit df =>
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1)
checkFilterPredicate('_1 <=> 1, classOf[Eq[_]], 1)
checkFilterPredicate('_1 =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 < 2, classOf[Lt[_]], 1)
checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4)
checkFilterPredicate('_1 <= 1, classOf[LtEq[_]], 1)
checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4)
checkFilterPredicate(Literal(1) === '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(1) <=> '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(2) > '_1, classOf[Lt[_]], 1)
checkFilterPredicate(Literal(3) < '_1, classOf[Gt[_]], 4)
checkFilterPredicate(Literal(1) >= '_1, classOf[LtEq[_]], 1)
checkFilterPredicate(Literal(4) <= '_1, classOf[GtEq[_]], 4)
checkFilterPredicate(!('_1 < 4), classOf[GtEq[_]], 4)
checkFilterPredicate('_1 < 2 || '_1 > 3, classOf[Operators.Or], Seq(Row(1), Row(4)))
}
}
test("filter pushdown - float") {
withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toFloat)))) { implicit df =>
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1)
checkFilterPredicate('_1 <=> 1, classOf[Eq[_]], 1)
checkFilterPredicate('_1 =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 < 2, classOf[Lt[_]], 1)
checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4)
checkFilterPredicate('_1 <= 1, classOf[LtEq[_]], 1)
checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4)
checkFilterPredicate(Literal(1) === '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(1) <=> '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(2) > '_1, classOf[Lt[_]], 1)
checkFilterPredicate(Literal(3) < '_1, classOf[Gt[_]], 4)
checkFilterPredicate(Literal(1) >= '_1, classOf[LtEq[_]], 1)
checkFilterPredicate(Literal(4) <= '_1, classOf[GtEq[_]], 4)
checkFilterPredicate(!('_1 < 4), classOf[GtEq[_]], 4)
checkFilterPredicate('_1 < 2 || '_1 > 3, classOf[Operators.Or], Seq(Row(1), Row(4)))
}
}
test("filter pushdown - double") {
withParquetDataFrame((1 to 4).map(i => Tuple1(Option(i.toDouble)))) { implicit df =>
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1)
checkFilterPredicate('_1 <=> 1, classOf[Eq[_]], 1)
checkFilterPredicate('_1 =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_)))
checkFilterPredicate('_1 < 2, classOf[Lt[_]], 1)
checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4)
checkFilterPredicate('_1 <= 1, classOf[LtEq[_]], 1)
checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4)
checkFilterPredicate(Literal(1) === '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(1) <=> '_1, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(2) > '_1, classOf[Lt[_]], 1)
checkFilterPredicate(Literal(3) < '_1, classOf[Gt[_]], 4)
checkFilterPredicate(Literal(1) >= '_1, classOf[LtEq[_]], 1)
checkFilterPredicate(Literal(4) <= '_1, classOf[GtEq[_]], 4)
checkFilterPredicate(!('_1 < 4), classOf[GtEq[_]], 4)
checkFilterPredicate('_1 < 2 || '_1 > 3, classOf[Operators.Or], Seq(Row(1), Row(4)))
}
}
test("filter pushdown - string") {
withParquetDataFrame((1 to 4).map(i => Tuple1(i.toString))) { implicit df =>
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(
'_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(i => Row.apply(i.toString)))
checkFilterPredicate('_1 === "1", classOf[Eq[_]], "1")
checkFilterPredicate('_1 <=> "1", classOf[Eq[_]], "1")
checkFilterPredicate(
'_1 =!= "1", classOf[NotEq[_]], (2 to 4).map(i => Row.apply(i.toString)))
checkFilterPredicate('_1 < "2", classOf[Lt[_]], "1")
checkFilterPredicate('_1 > "3", classOf[Gt[_]], "4")
checkFilterPredicate('_1 <= "1", classOf[LtEq[_]], "1")
checkFilterPredicate('_1 >= "4", classOf[GtEq[_]], "4")
checkFilterPredicate(Literal("1") === '_1, classOf[Eq[_]], "1")
checkFilterPredicate(Literal("1") <=> '_1, classOf[Eq[_]], "1")
checkFilterPredicate(Literal("2") > '_1, classOf[Lt[_]], "1")
checkFilterPredicate(Literal("3") < '_1, classOf[Gt[_]], "4")
checkFilterPredicate(Literal("1") >= '_1, classOf[LtEq[_]], "1")
checkFilterPredicate(Literal("4") <= '_1, classOf[GtEq[_]], "4")
checkFilterPredicate(!('_1 < "4"), classOf[GtEq[_]], "4")
checkFilterPredicate('_1 < "2" || '_1 > "3", classOf[Operators.Or], Seq(Row("1"), Row("4")))
}
}
test("filter pushdown - binary") {
implicit class IntToBinary(int: Int) {
def b: Array[Byte] = int.toString.getBytes(StandardCharsets.UTF_8)
}
withParquetDataFrame((1 to 4).map(i => Tuple1(i.b))) { implicit df =>
checkBinaryFilterPredicate('_1 === 1.b, classOf[Eq[_]], 1.b)
checkBinaryFilterPredicate('_1 <=> 1.b, classOf[Eq[_]], 1.b)
checkBinaryFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkBinaryFilterPredicate(
'_1.isNotNull, classOf[NotEq[_]], (1 to 4).map(i => Row.apply(i.b)).toSeq)
checkBinaryFilterPredicate(
'_1 =!= 1.b, classOf[NotEq[_]], (2 to 4).map(i => Row.apply(i.b)).toSeq)
checkBinaryFilterPredicate('_1 < 2.b, classOf[Lt[_]], 1.b)
checkBinaryFilterPredicate('_1 > 3.b, classOf[Gt[_]], 4.b)
checkBinaryFilterPredicate('_1 <= 1.b, classOf[LtEq[_]], 1.b)
checkBinaryFilterPredicate('_1 >= 4.b, classOf[GtEq[_]], 4.b)
checkBinaryFilterPredicate(Literal(1.b) === '_1, classOf[Eq[_]], 1.b)
checkBinaryFilterPredicate(Literal(1.b) <=> '_1, classOf[Eq[_]], 1.b)
checkBinaryFilterPredicate(Literal(2.b) > '_1, classOf[Lt[_]], 1.b)
checkBinaryFilterPredicate(Literal(3.b) < '_1, classOf[Gt[_]], 4.b)
checkBinaryFilterPredicate(Literal(1.b) >= '_1, classOf[LtEq[_]], 1.b)
checkBinaryFilterPredicate(Literal(4.b) <= '_1, classOf[GtEq[_]], 4.b)
checkBinaryFilterPredicate(!('_1 < 4.b), classOf[GtEq[_]], 4.b)
checkBinaryFilterPredicate(
'_1 < 2.b || '_1 > 3.b, classOf[Operators.Or], Seq(Row(1.b), Row(4.b)))
}
}
test("filter pushdown - date") {
implicit class StringToDate(s: String) {
def date: Date = Date.valueOf(s)
}
val data = Seq("2018-03-18", "2018-03-19", "2018-03-20", "2018-03-21")
withParquetDataFrame(data.map(i => Tuple1(i.date))) { implicit df =>
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('_1.isNotNull, classOf[NotEq[_]], data.map(i => Row.apply(i.date)))
checkFilterPredicate('_1 === "2018-03-18".date, classOf[Eq[_]], "2018-03-18".date)
checkFilterPredicate('_1 <=> "2018-03-18".date, classOf[Eq[_]], "2018-03-18".date)
checkFilterPredicate('_1 =!= "2018-03-18".date, classOf[NotEq[_]],
Seq("2018-03-19", "2018-03-20", "2018-03-21").map(i => Row.apply(i.date)))
checkFilterPredicate('_1 < "2018-03-19".date, classOf[Lt[_]], "2018-03-18".date)
checkFilterPredicate('_1 > "2018-03-20".date, classOf[Gt[_]], "2018-03-21".date)
checkFilterPredicate('_1 <= "2018-03-18".date, classOf[LtEq[_]], "2018-03-18".date)
checkFilterPredicate('_1 >= "2018-03-21".date, classOf[GtEq[_]], "2018-03-21".date)
checkFilterPredicate(
Literal("2018-03-18".date) === '_1, classOf[Eq[_]], "2018-03-18".date)
checkFilterPredicate(
Literal("2018-03-18".date) <=> '_1, classOf[Eq[_]], "2018-03-18".date)
checkFilterPredicate(
Literal("2018-03-19".date) > '_1, classOf[Lt[_]], "2018-03-18".date)
checkFilterPredicate(
Literal("2018-03-20".date) < '_1, classOf[Gt[_]], "2018-03-21".date)
checkFilterPredicate(
Literal("2018-03-18".date) >= '_1, classOf[LtEq[_]], "2018-03-18".date)
checkFilterPredicate(
Literal("2018-03-21".date) <= '_1, classOf[GtEq[_]], "2018-03-21".date)
checkFilterPredicate(!('_1 < "2018-03-21".date), classOf[GtEq[_]], "2018-03-21".date)
checkFilterPredicate(
'_1 < "2018-03-19".date || '_1 > "2018-03-20".date,
classOf[Operators.Or],
Seq(Row("2018-03-18".date), Row("2018-03-21".date)))
}
}
test("filter pushdown - timestamp") {
// spark.sql.parquet.outputTimestampType = TIMESTAMP_MILLIS
val millisData = Seq(Timestamp.valueOf("2018-06-14 08:28:53.123"),
Timestamp.valueOf("2018-06-15 08:28:53.123"),
Timestamp.valueOf("2018-06-16 08:28:53.123"),
Timestamp.valueOf("2018-06-17 08:28:53.123"))
withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key ->
ParquetOutputTimestampType.TIMESTAMP_MILLIS.toString) {
testTimestampPushdown(millisData)
}
// spark.sql.parquet.outputTimestampType = TIMESTAMP_MICROS
val microsData = Seq(Timestamp.valueOf("2018-06-14 08:28:53.123456"),
Timestamp.valueOf("2018-06-15 08:28:53.123456"),
Timestamp.valueOf("2018-06-16 08:28:53.123456"),
Timestamp.valueOf("2018-06-17 08:28:53.123456"))
withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key ->
ParquetOutputTimestampType.TIMESTAMP_MICROS.toString) {
testTimestampPushdown(microsData)
}
// spark.sql.parquet.outputTimestampType = INT96 doesn't support pushdown
withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key ->
ParquetOutputTimestampType.INT96.toString) {
withParquetDataFrame(millisData.map(i => Tuple1(i))) { implicit df =>
val schema = new SparkToParquetSchemaConverter(conf).convert(df.schema)
assertResult(None) {
createParquetFilters(schema).createFilter(sources.IsNull("_1"))
}
}
}
}
test("filter pushdown - decimal") {
Seq(
(false, Decimal.MAX_INT_DIGITS), // int32Writer
(false, Decimal.MAX_LONG_DIGITS), // int64Writer
(true, Decimal.MAX_LONG_DIGITS), // binaryWriterUsingUnscaledLong
(false, DecimalType.MAX_PRECISION) // binaryWriterUsingUnscaledBytes
).foreach { case (legacyFormat, precision) =>
withSQLConf(SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key -> legacyFormat.toString) {
val schema = StructType.fromDDL(s"a decimal($precision, 2)")
val rdd =
spark.sparkContext.parallelize((1 to 4).map(i => Row(new java.math.BigDecimal(i))))
val dataFrame = spark.createDataFrame(rdd, schema)
testDecimalPushDown(dataFrame) { implicit df =>
assert(df.schema === schema)
checkFilterPredicate('a.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate('a.isNotNull, classOf[NotEq[_]], (1 to 4).map(Row.apply(_)))
checkFilterPredicate('a === 1, classOf[Eq[_]], 1)
checkFilterPredicate('a <=> 1, classOf[Eq[_]], 1)
checkFilterPredicate('a =!= 1, classOf[NotEq[_]], (2 to 4).map(Row.apply(_)))
checkFilterPredicate('a < 2, classOf[Lt[_]], 1)
checkFilterPredicate('a > 3, classOf[Gt[_]], 4)
checkFilterPredicate('a <= 1, classOf[LtEq[_]], 1)
checkFilterPredicate('a >= 4, classOf[GtEq[_]], 4)
checkFilterPredicate(Literal(1) === 'a, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(1) <=> 'a, classOf[Eq[_]], 1)
checkFilterPredicate(Literal(2) > 'a, classOf[Lt[_]], 1)
checkFilterPredicate(Literal(3) < 'a, classOf[Gt[_]], 4)
checkFilterPredicate(Literal(1) >= 'a, classOf[LtEq[_]], 1)
checkFilterPredicate(Literal(4) <= 'a, classOf[GtEq[_]], 4)
checkFilterPredicate(!('a < 4), classOf[GtEq[_]], 4)
checkFilterPredicate('a < 2 || 'a > 3, classOf[Operators.Or], Seq(Row(1), Row(4)))
}
}
}
}
test("Ensure that filter value matched the parquet file schema") {
val scale = 2
val schema = StructType(Seq(
StructField("cint", IntegerType),
StructField("cdecimal1", DecimalType(Decimal.MAX_INT_DIGITS, scale)),
StructField("cdecimal2", DecimalType(Decimal.MAX_LONG_DIGITS, scale)),
StructField("cdecimal3", DecimalType(DecimalType.MAX_PRECISION, scale))
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val decimal = new JBigDecimal(10).setScale(scale)
val decimal1 = new JBigDecimal(10).setScale(scale + 1)
assert(decimal.scale() === scale)
assert(decimal1.scale() === scale + 1)
val parquetFilters = createParquetFilters(parquetSchema)
assertResult(Some(lt(intColumn("cdecimal1"), 1000: Integer))) {
parquetFilters.createFilter(sources.LessThan("cdecimal1", decimal))
}
assertResult(None) {
parquetFilters.createFilter(sources.LessThan("cdecimal1", decimal1))
}
assertResult(Some(lt(longColumn("cdecimal2"), 1000L: java.lang.Long))) {
parquetFilters.createFilter(sources.LessThan("cdecimal2", decimal))
}
assertResult(None) {
parquetFilters.createFilter(sources.LessThan("cdecimal2", decimal1))
}
assert(parquetFilters.createFilter(sources.LessThan("cdecimal3", decimal)).isDefined)
assertResult(None) {
parquetFilters.createFilter(sources.LessThan("cdecimal3", decimal1))
}
}
test("SPARK-6554: don't push down predicates which reference partition columns") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/part=1"
(1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path)
// If the "part = 1" filter gets pushed down, this query will throw an exception since
// "part" is not a valid column in the actual Parquet file
checkAnswer(
spark.read.parquet(dir.getCanonicalPath).filter("part = 1"),
(1 to 3).map(i => Row(i, i.toString, 1)))
}
}
}
test("SPARK-10829: Filter combine partition key and attribute doesn't work in DataSource scan") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/part=1"
(1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path)
// If the "part = 1" filter gets pushed down, this query will throw an exception since
// "part" is not a valid column in the actual Parquet file
checkAnswer(
spark.read.parquet(dir.getCanonicalPath).filter("a > 0 and (part = 0 or a > 1)"),
(2 to 3).map(i => Row(i, i.toString, 1)))
}
}
}
test("SPARK-12231: test the filter and empty project in partitioned DataSource scan") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}"
(1 to 3).map(i => (i, i + 1, i + 2, i + 3)).toDF("a", "b", "c", "d").
write.partitionBy("a").parquet(path)
// The filter "a > 1 or b < 2" will not get pushed down, and the projection is empty,
// this query will throw an exception since the project from combinedFilter expect
// two projection while the
val df1 = spark.read.parquet(dir.getCanonicalPath)
assert(df1.filter("a > 1 or b < 2").count() == 2)
}
}
}
test("SPARK-12231: test the new projection in partitioned DataSource scan") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}"
(1 to 3).map(i => (i, i + 1, i + 2, i + 3)).toDF("a", "b", "c", "d").
write.partitionBy("a").parquet(path)
// test the generate new projection case
// when projects != partitionAndNormalColumnProjs
val df1 = spark.read.parquet(dir.getCanonicalPath)
checkAnswer(
df1.filter("a > 1 or b > 2").orderBy("a").selectExpr("a", "b", "c", "d"),
(2 to 3).map(i => Row(i, i + 1, i + 2, i + 3)))
}
}
}
test("Filter applied on merged Parquet schema with new column should work") {
import testImplicits._
Seq("true", "false").foreach { vectorized =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
SQLConf.PARQUET_SCHEMA_MERGING_ENABLED.key -> "true",
SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorized) {
withTempPath { dir =>
val path1 = s"${dir.getCanonicalPath}/table1"
(1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path1)
val path2 = s"${dir.getCanonicalPath}/table2"
(1 to 3).map(i => (i, i.toString)).toDF("c", "b").write.parquet(path2)
// No matter "c = 1" gets pushed down or not, this query should work without exception.
val df = spark.read.parquet(path1, path2).filter("c = 1").selectExpr("c", "b", "a")
checkAnswer(
df,
Row(1, "1", null))
val path3 = s"${dir.getCanonicalPath}/table3"
val dfStruct = sparkContext.parallelize(Seq((1, 1))).toDF("a", "b")
dfStruct.select(struct("a").as("s")).write.parquet(path3)
val path4 = s"${dir.getCanonicalPath}/table4"
val dfStruct2 = sparkContext.parallelize(Seq((1, 1))).toDF("c", "b")
dfStruct2.select(struct("c").as("s")).write.parquet(path4)
// No matter "s.c = 1" gets pushed down or not, this query should work without exception.
val dfStruct3 = spark.read.parquet(path3, path4).filter("s.c = 1")
.selectExpr("s")
checkAnswer(dfStruct3, Row(Row(null, 1)))
}
}
}
}
// The unsafe row RecordReader does not support row by row filtering so run it with it disabled.
test("SPARK-11661 Still pushdown filters returned by unhandledFilters") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/part=1"
(1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path)
val df = spark.read.parquet(path).filter("a = 2")
// The result should be single row.
// When a filter is pushed to Parquet, Parquet can apply it to every row.
// So, we can check the number of rows returned from the Parquet
// to make sure our filter pushdown work.
assert(stripSparkFilter(df).count == 1)
}
}
}
}
test("SPARK-12218: 'Not' is included in Parquet filter pushdown") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/table1"
(1 to 5).map(i => (i, (i % 2).toString)).toDF("a", "b").write.parquet(path)
checkAnswer(
spark.read.parquet(path).where("not (a = 2) or not(b in ('1'))"),
(1 to 5).map(i => Row(i, (i % 2).toString)))
checkAnswer(
spark.read.parquet(path).where("not (a = 2 and b in ('1'))"),
(1 to 5).map(i => Row(i, (i % 2).toString)))
}
}
}
test("SPARK-12218 and SPARK-25559 Converting conjunctions into Parquet filter predicates") {
val schema = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = true),
StructField("c", DoubleType, nullable = true)
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val parquetFilters = createParquetFilters(parquetSchema)
assertResult(Some(and(
lt(intColumn("a"), 10: Integer),
gt(doubleColumn("c"), 1.5: java.lang.Double)))
) {
parquetFilters.createFilter(
sources.And(
sources.LessThan("a", 10),
sources.GreaterThan("c", 1.5D)))
}
// Testing when `canRemoveOneSideInAnd == true`
// case sources.And(lhs, rhs) =>
// ...
// case (Some(lhsFilter), None) if canRemoveOneSideInAnd => Some(lhsFilter)
assertResult(Some(lt(intColumn("a"), 10: Integer))) {
parquetFilters.createFilter(
sources.And(
sources.LessThan("a", 10),
sources.StringContains("b", "prefix")))
}
// Testing when `canRemoveOneSideInAnd == true`
// case sources.And(lhs, rhs) =>
// ...
// case (None, Some(rhsFilter)) if canRemoveOneSideInAnd => Some(rhsFilter)
assertResult(Some(lt(intColumn("a"), 10: Integer))) {
parquetFilters.createFilter(
sources.And(
sources.StringContains("b", "prefix"),
sources.LessThan("a", 10)))
}
// Testing complex And conditions
assertResult(Some(
FilterApi.and(lt(intColumn("a"), 10: Integer), gt(intColumn("a"), 5: Integer)))) {
parquetFilters.createFilter(
sources.And(
sources.And(
sources.LessThan("a", 10),
sources.StringContains("b", "prefix")
),
sources.GreaterThan("a", 5)))
}
// Testing complex And conditions
assertResult(Some(
FilterApi.and(gt(intColumn("a"), 5: Integer), lt(intColumn("a"), 10: Integer)))) {
parquetFilters.createFilter(
sources.And(
sources.GreaterThan("a", 5),
sources.And(
sources.StringContains("b", "prefix"),
sources.LessThan("a", 10)
)))
}
// Testing
// case sources.Not(pred) =>
// createFilterHelper(nameToParquetField, pred, canRemoveOneSideInAnd = false)
// .map(FilterApi.not)
//
// and
//
// Testing when `canRemoveOneSideInAnd == false`
// case sources.And(lhs, rhs) =>
// ...
// case (Some(lhsFilter), None) if canRemoveOneSideInAnd => Some(lhsFilter)
assertResult(None) {
parquetFilters.createFilter(
sources.Not(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix"))))
}
// Testing
// case sources.Not(pred) =>
// createFilterHelper(nameToParquetField, pred, canRemoveOneSideInAnd = false)
// .map(FilterApi.not)
//
// and
//
// Testing when `canRemoveOneSideInAnd == false`
// case sources.And(lhs, rhs) =>
// ...
// case (None, Some(rhsFilter)) if canRemoveOneSideInAnd => Some(rhsFilter)
assertResult(None) {
parquetFilters.createFilter(
sources.Not(
sources.And(
sources.StringContains("b", "prefix"),
sources.GreaterThan("a", 1))))
}
// Testing
// case sources.Not(pred) =>
// createFilterHelper(nameToParquetField, pred, canRemoveOneSideInAnd = false)
// .map(FilterApi.not)
//
// and
//
// Testing passing `canRemoveOneSideInAnd = false` into
// case sources.And(lhs, rhs) =>
// val lhsFilterOption = createFilterHelper(nameToParquetField, lhs, canRemoveOneSideInAnd)
assertResult(None) {
parquetFilters.createFilter(
sources.Not(
sources.And(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2))))
}
// Testing
// case sources.Not(pred) =>
// createFilterHelper(nameToParquetField, pred, canRemoveOneSideInAnd = false)
// .map(FilterApi.not)
//
// and
//
// Testing passing `canRemoveOneSideInAnd = false` into
// case sources.And(lhs, rhs) =>
// val rhsFilterOption = createFilterHelper(nameToParquetField, rhs, canRemoveOneSideInAnd)
assertResult(None) {
parquetFilters.createFilter(
sources.Not(
sources.And(
sources.GreaterThan("a", 2),
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")))))
}
}
test("SPARK-27699 Converting disjunctions into Parquet filter predicates") {
val schema = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = true),
StructField("c", DoubleType, nullable = true)
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val parquetFilters = createParquetFilters(parquetSchema)
// Testing
// case sources.Or(lhs, rhs) =>
// ...
// lhsFilter <- createFilterHelper(nameToParquetField, lhs, canRemoveOneSideInAnd = true)
assertResult(Some(
FilterApi.or(gt(intColumn("a"), 1: Integer), gt(intColumn("a"), 2: Integer)))) {
parquetFilters.createFilter(
sources.Or(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2)))
}
// Testing
// case sources.Or(lhs, rhs) =>
// ...
// rhsFilter <- createFilterHelper(nameToParquetField, rhs, canRemoveOneSideInAnd = true)
assertResult(Some(
FilterApi.or(gt(intColumn("a"), 2: Integer), gt(intColumn("a"), 1: Integer)))) {
parquetFilters.createFilter(
sources.Or(
sources.GreaterThan("a", 2),
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix"))))
}
// Testing
// case sources.Or(lhs, rhs) =>
// ...
// lhsFilter <- createFilterHelper(nameToParquetField, lhs, canRemoveOneSideInAnd = true)
// rhsFilter <- createFilterHelper(nameToParquetField, rhs, canRemoveOneSideInAnd = true)
assertResult(Some(
FilterApi.or(gt(intColumn("a"), 1: Integer), lt(intColumn("a"), 0: Integer)))) {
parquetFilters.createFilter(
sources.Or(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.And(
sources.LessThan("a", 0),
sources.StringContains("b", "foobar"))))
}
}
test("SPARK-27698 Convertible Parquet filter predicates") {
val schema = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = true),
StructField("c", DoubleType, nullable = true)
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val parquetFilters = createParquetFilters(parquetSchema)
assertResult(Seq(sources.And(sources.LessThan("a", 10), sources.GreaterThan("c", 1.5D)))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.LessThan("a", 10),
sources.GreaterThan("c", 1.5D))))
}
assertResult(Seq(sources.LessThan("a", 10))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.LessThan("a", 10),
sources.StringContains("b", "prefix"))))
}
assertResult(Seq(sources.LessThan("a", 10))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.StringContains("b", "prefix"),
sources.LessThan("a", 10))))
}
// Testing complex And conditions
assertResult(Seq(sources.And(sources.LessThan("a", 10), sources.GreaterThan("a", 5)))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.And(
sources.LessThan("a", 10),
sources.StringContains("b", "prefix")
),
sources.GreaterThan("a", 5))))
}
// Testing complex And conditions
assertResult(Seq(sources.And(sources.GreaterThan("a", 5), sources.LessThan("a", 10)))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.GreaterThan("a", 5),
sources.And(
sources.StringContains("b", "prefix"),
sources.LessThan("a", 10)
))))
}
// Testing complex And conditions
assertResult(Seq(sources.Or(sources.GreaterThan("a", 1), sources.GreaterThan("a", 2)))) {
parquetFilters.convertibleFilters(
Seq(sources.Or(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2))))
}
// Testing complex And/Or conditions, the And condition under Or condition can't be pushed down.
assertResult(Seq(sources.And(sources.LessThan("a", 10),
sources.Or(sources.GreaterThan("a", 1), sources.GreaterThan("a", 2))))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.LessThan("a", 10),
sources.Or(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2)))))
}
assertResult(Seq(sources.Or(sources.GreaterThan("a", 2), sources.GreaterThan("c", 1.1)))) {
parquetFilters.convertibleFilters(
Seq(sources.Or(
sources.GreaterThan("a", 2),
sources.And(
sources.GreaterThan("c", 1.1),
sources.StringContains("b", "prefix")))))
}
// Testing complex Not conditions.
assertResult(Seq.empty) {
parquetFilters.convertibleFilters(
Seq(sources.Not(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")))))
}
assertResult(Seq.empty) {
parquetFilters.convertibleFilters(
Seq(sources.Not(
sources.And(
sources.StringContains("b", "prefix"),
sources.GreaterThan("a", 1)))))
}
assertResult(Seq.empty) {
parquetFilters.convertibleFilters(
Seq(sources.Not(
sources.And(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2)))))
}
assertResult(Seq.empty) {
parquetFilters.convertibleFilters(
Seq(sources.Not(
sources.And(
sources.GreaterThan("a", 2),
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix"))))))
}
}
test("SPARK-16371 Do not push down filters when inner name and outer name are the same") {
withParquetDataFrame((1 to 4).map(i => Tuple1(Tuple1(i)))) { implicit df =>
// Here the schema becomes as below:
//
// root
// |-- _1: struct (nullable = true)
// | |-- _1: integer (nullable = true)
//
// The inner column name, `_1` and outer column name `_1` are the same.
// Obviously this should not push down filters because the outer column is struct.
assert(df.filter("_1 IS NOT NULL").count() === 4)
}
}
test("Filters should be pushed down for vectorized Parquet reader at row group level") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "true",
SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/table"
(1 to 1024).map(i => (101, i)).toDF("a", "b").write.parquet(path)
Seq(true, false).foreach { enablePushDown =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> enablePushDown.toString) {
val accu = new NumRowGroupsAcc
sparkContext.register(accu)
val df = spark.read.parquet(path).filter("a < 100")
df.foreachPartition((it: Iterator[Row]) => it.foreach(v => accu.add(0)))
if (enablePushDown) {
assert(accu.value == 0)
} else {
assert(accu.value > 0)
}
AccumulatorContext.remove(accu.id)
}
}
}
}
}
test("SPARK-17213: Broken Parquet filter push-down for string columns") {
Seq(true, false).foreach { vectorizedEnabled =>
withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorizedEnabled.toString) {
withTempPath { dir =>
import testImplicits._
val path = dir.getCanonicalPath
// scalastyle:off nonascii
Seq("a", "é").toDF("name").write.parquet(path)
// scalastyle:on nonascii
assert(spark.read.parquet(path).where("name > 'a'").count() == 1)
assert(spark.read.parquet(path).where("name >= 'a'").count() == 2)
// scalastyle:off nonascii
assert(spark.read.parquet(path).where("name < 'é'").count() == 1)
assert(spark.read.parquet(path).where("name <= 'é'").count() == 2)
// scalastyle:on nonascii
}
}
}
}
test("SPARK-20364: Disable Parquet predicate pushdown for fields having dots in the names") {
import testImplicits._
Seq(true, false).foreach { vectorized =>
withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorized.toString,
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> true.toString,
SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
withTempPath { path =>
Seq(Some(1), None).toDF("col.dots").write.parquet(path.getAbsolutePath)
val readBack = spark.read.parquet(path.getAbsolutePath).where("`col.dots` IS NOT NULL")
assert(readBack.count() == 1)
}
}
}
}
test("Filters should be pushed down for Parquet readers at row group level") {
import testImplicits._
withSQLConf(
// Makes sure disabling 'spark.sql.parquet.recordFilter' still enables
// row group level filtering.
SQLConf.PARQUET_RECORD_FILTER_ENABLED.key -> "false",
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") {
withTempPath { path =>
val data = (1 to 1024)
data.toDF("a").coalesce(1)
.write.option("parquet.block.size", 512)
.parquet(path.getAbsolutePath)
val df = spark.read.parquet(path.getAbsolutePath).filter("a == 500")
// Here, we strip the Spark side filter and check the actual results from Parquet.
val actual = stripSparkFilter(df).collect().length
// Since those are filtered at row group level, the result count should be less
// than the total length but should not be a single record.
// Note that, if record level filtering is enabled, it should be a single record.
// If no filter is pushed down to Parquet, it should be the total length of data.
assert(actual > 1 && actual < data.length)
}
}
}
test("SPARK-23852: Broken Parquet push-down for partially-written stats") {
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
// parquet-1217.parquet contains a single column with values -1, 0, 1, 2 and null.
// The row-group statistics include null counts, but not min and max values, which
// triggers PARQUET-1217.
val df = readResourceParquetFile("test-data/parquet-1217.parquet")
// Will return 0 rows if PARQUET-1217 is not fixed.
assert(df.where("col > 0").count() === 2)
}
}
test("filter pushdown - StringStartsWith") {
withParquetDataFrame((1 to 4).map(i => Tuple1(i + "str" + i))) { implicit df =>
checkFilterPredicate(
'_1.startsWith("").asInstanceOf[Predicate],
classOf[UserDefinedByInstance[_, _]],
Seq("1str1", "2str2", "3str3", "4str4").map(Row(_)))
Seq("2", "2s", "2st", "2str", "2str2").foreach { prefix =>
checkFilterPredicate(
'_1.startsWith(prefix).asInstanceOf[Predicate],
classOf[UserDefinedByInstance[_, _]],
"2str2")
}
Seq("2S", "null", "2str22").foreach { prefix =>
checkFilterPredicate(
'_1.startsWith(prefix).asInstanceOf[Predicate],
classOf[UserDefinedByInstance[_, _]],
Seq.empty[Row])
}
checkFilterPredicate(
!'_1.startsWith("").asInstanceOf[Predicate],
classOf[Operators.Not],
Seq().map(Row(_)))
Seq("2", "2s", "2st", "2str", "2str2").foreach { prefix =>
checkFilterPredicate(
!'_1.startsWith(prefix).asInstanceOf[Predicate],
classOf[Operators.Not],
Seq("1str1", "3str3", "4str4").map(Row(_)))
}
Seq("2S", "null", "2str22").foreach { prefix =>
checkFilterPredicate(
!'_1.startsWith(prefix).asInstanceOf[Predicate],
classOf[Operators.Not],
Seq("1str1", "2str2", "3str3", "4str4").map(Row(_)))
}
val schema = new SparkToParquetSchemaConverter(conf).convert(df.schema)
assertResult(None) {
createParquetFilters(schema).createFilter(sources.StringStartsWith("_1", null))
}
}
import testImplicits._
// Test canDrop() has taken effect
testStringStartsWith(spark.range(1024).map(_.toString).toDF(), "value like 'a%'")
// Test inverseCanDrop() has taken effect
testStringStartsWith(spark.range(1024).map(c => "100").toDF(), "value not like '10%'")
}
test("SPARK-17091: Convert IN predicate to Parquet filter push-down") {
val schema = StructType(Seq(
StructField("a", IntegerType, nullable = false)
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val parquetFilters = createParquetFilters(parquetSchema)
assertResult(Some(FilterApi.eq(intColumn("a"), null: Integer))) {
parquetFilters.createFilter(sources.In("a", Array(null)))
}
assertResult(Some(FilterApi.eq(intColumn("a"), 10: Integer))) {
parquetFilters.createFilter(sources.In("a", Array(10)))
}
// Remove duplicates
assertResult(Some(FilterApi.eq(intColumn("a"), 10: Integer))) {
parquetFilters.createFilter(sources.In("a", Array(10, 10)))
}
assertResult(Some(or(or(
FilterApi.eq(intColumn("a"), 10: Integer),
FilterApi.eq(intColumn("a"), 20: Integer)),
FilterApi.eq(intColumn("a"), 30: Integer)))
) {
parquetFilters.createFilter(sources.In("a", Array(10, 20, 30)))
}
assert(parquetFilters.createFilter(sources.In("a",
Range(0, conf.parquetFilterPushDownInFilterThreshold).toArray)).isDefined)
assert(parquetFilters.createFilter(sources.In("a",
Range(0, conf.parquetFilterPushDownInFilterThreshold + 1).toArray)).isEmpty)
import testImplicits._
withTempPath { path =>
val data = 0 to 1024
data.toDF("a").selectExpr("if (a = 1024, null, a) AS a") // convert 1024 to null
.coalesce(1).write.option("parquet.block.size", 512)
.parquet(path.getAbsolutePath)
val df = spark.read.parquet(path.getAbsolutePath)
Seq(true, false).foreach { pushEnabled =>
withSQLConf(
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> pushEnabled.toString) {
Seq(1, 5, 10, 11).foreach { count =>
val filter = s"a in(${Range(0, count).mkString(",")})"
assert(df.where(filter).count() === count)
val actual = stripSparkFilter(df.where(filter)).collect().length
if (pushEnabled && count <= conf.parquetFilterPushDownInFilterThreshold) {
assert(actual > 1 && actual < data.length)
} else {
assert(actual === data.length)
}
}
assert(df.where("a in(null)").count() === 0)
assert(df.where("a = null").count() === 0)
assert(df.where("a is null").count() === 1)
}
}
}
}
test("SPARK-25207: Case-insensitive field resolution for pushdown when reading parquet") {
def testCaseInsensitiveResolution(
schema: StructType,
expected: FilterPredicate,
filter: sources.Filter): Unit = {
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val caseSensitiveParquetFilters =
createParquetFilters(parquetSchema, caseSensitive = Some(true))
val caseInsensitiveParquetFilters =
createParquetFilters(parquetSchema, caseSensitive = Some(false))
assertResult(Some(expected)) {
caseInsensitiveParquetFilters.createFilter(filter)
}
assertResult(None) {
caseSensitiveParquetFilters.createFilter(filter)
}
}
val schema = StructType(Seq(StructField("cint", IntegerType)))
testCaseInsensitiveResolution(
schema, FilterApi.eq(intColumn("cint"), null.asInstanceOf[Integer]), sources.IsNull("CINT"))
testCaseInsensitiveResolution(
schema,
FilterApi.notEq(intColumn("cint"), null.asInstanceOf[Integer]),
sources.IsNotNull("CINT"))
testCaseInsensitiveResolution(
schema, FilterApi.eq(intColumn("cint"), 1000: Integer), sources.EqualTo("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.notEq(intColumn("cint"), 1000: Integer),
sources.Not(sources.EqualTo("CINT", 1000)))
testCaseInsensitiveResolution(
schema, FilterApi.eq(intColumn("cint"), 1000: Integer), sources.EqualNullSafe("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.notEq(intColumn("cint"), 1000: Integer),
sources.Not(sources.EqualNullSafe("CINT", 1000)))
testCaseInsensitiveResolution(
schema,
FilterApi.lt(intColumn("cint"), 1000: Integer), sources.LessThan("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.ltEq(intColumn("cint"), 1000: Integer),
sources.LessThanOrEqual("CINT", 1000))
testCaseInsensitiveResolution(
schema, FilterApi.gt(intColumn("cint"), 1000: Integer), sources.GreaterThan("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.gtEq(intColumn("cint"), 1000: Integer),
sources.GreaterThanOrEqual("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.or(
FilterApi.eq(intColumn("cint"), 10: Integer),
FilterApi.eq(intColumn("cint"), 20: Integer)),
sources.In("CINT", Array(10, 20)))
val dupFieldSchema = StructType(
Seq(StructField("cint", IntegerType), StructField("cINT", IntegerType)))
val dupParquetSchema = new SparkToParquetSchemaConverter(conf).convert(dupFieldSchema)
val dupCaseInsensitiveParquetFilters =
createParquetFilters(dupParquetSchema, caseSensitive = Some(false))
assertResult(None) {
dupCaseInsensitiveParquetFilters.createFilter(sources.EqualTo("CINT", 1000))
}
}
test("SPARK-25207: exception when duplicate fields in case-insensitive mode") {
withTempPath { dir =>
val count = 10
val tableName = "spark_25207"
val tableDir = dir.getAbsoluteFile + "/table"
withTable(tableName) {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
spark.range(count).selectExpr("id as A", "id as B", "id as b")
.write.mode("overwrite").parquet(tableDir)
}
sql(
s"""
|CREATE TABLE $tableName (A LONG, B LONG) USING PARQUET LOCATION '$tableDir'
""".stripMargin)
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
val e = intercept[SparkException] {
sql(s"select a from $tableName where b > 0").collect()
}
assert(e.getCause.isInstanceOf[RuntimeException] && e.getCause.getMessage.contains(
"""Found duplicate field(s) "B": [B, b] in case-insensitive mode"""))
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
checkAnswer(sql(s"select A from $tableName where B > 0"), (1 until count).map(Row(_)))
}
}
}
}
}
class ParquetV1FilterSuite extends ParquetFilterSuite {
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_READER_LIST, "parquet")
.set(SQLConf.USE_V1_SOURCE_WRITER_LIST, "parquet")
override def checkFilterPredicate(
df: DataFrame,
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
checker: (DataFrame, Seq[Row]) => Unit,
expected: Seq[Row]): Unit = {
val output = predicate.collect { case a: Attribute => a }.distinct
withSQLConf(
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_DATE_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_TIMESTAMP_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_DECIMAL_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> "true",
// Disable adding filters from constraints because it adds, for instance,
// is-not-null to pushed filters, which makes it hard to test if the pushed
// filter is expected or not (this had to be fixed with SPARK-13495).
SQLConf.OPTIMIZER_EXCLUDED_RULES.key -> InferFiltersFromConstraints.ruleName,
SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") {
val query = df
.select(output.map(e => Column(e)): _*)
.where(Column(predicate))
var maybeRelation: Option[HadoopFsRelation] = None
val maybeAnalyzedPredicate = query.queryExecution.optimizedPlan.collect {
case PhysicalOperation(_, filters,
LogicalRelation(relation: HadoopFsRelation, _, _, _)) =>
maybeRelation = Some(relation)
filters
}.flatten.reduceLeftOption(_ && _)
assert(maybeAnalyzedPredicate.isDefined, "No filter is analyzed from the given query")
val (_, selectedFilters, _) =
DataSourceStrategy.selectFilters(maybeRelation.get, maybeAnalyzedPredicate.toSeq)
assert(selectedFilters.nonEmpty, "No filter is pushed down")
val schema = new SparkToParquetSchemaConverter(conf).convert(df.schema)
val parquetFilters = createParquetFilters(schema)
// In this test suite, all the simple predicates are convertible here.
assert(parquetFilters.convertibleFilters(selectedFilters) === selectedFilters)
val pushedParquetFilters = selectedFilters.map { pred =>
val maybeFilter = parquetFilters.createFilter(pred)
assert(maybeFilter.isDefined, s"Couldn't generate filter predicate for $pred")
maybeFilter.get
}
// Doesn't bother checking type parameters here (e.g. `Eq[Integer]`)
assert(pushedParquetFilters.exists(_.getClass === filterClass),
s"${pushedParquetFilters.map(_.getClass).toList} did not contain ${filterClass}.")
checker(stripSparkFilter(query), expected)
}
}
}
class ParquetV2FilterSuite extends ParquetFilterSuite {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_READER_LIST, "")
override def checkFilterPredicate(
df: DataFrame,
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
checker: (DataFrame, Seq[Row]) => Unit,
expected: Seq[Row]): Unit = {
val output = predicate.collect { case a: Attribute => a }.distinct
withSQLConf(
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_DATE_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_TIMESTAMP_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_DECIMAL_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> "true",
// Disable adding filters from constraints because it adds, for instance,
// is-not-null to pushed filters, which makes it hard to test if the pushed
// filter is expected or not (this had to be fixed with SPARK-13495).
SQLConf.OPTIMIZER_EXCLUDED_RULES.key -> InferFiltersFromConstraints.ruleName,
SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") {
val query = df
.select(output.map(e => Column(e)): _*)
.where(Column(predicate))
query.queryExecution.optimizedPlan.collectFirst {
case PhysicalOperation(_, filters,
DataSourceV2Relation(parquetTable: ParquetTable, _, options)) =>
assert(filters.nonEmpty, "No filter is analyzed from the given query")
val scanBuilder = parquetTable.newScanBuilder(options)
val sourceFilters = filters.flatMap(DataSourceStrategy.translateFilter).toArray
scanBuilder.pushFilters(sourceFilters)
val pushedFilters = scanBuilder.pushedFilters()
assert(pushedFilters.nonEmpty, "No filter is pushed down")
val schema = new SparkToParquetSchemaConverter(conf).convert(df.schema)
val parquetFilters = createParquetFilters(schema)
// In this test suite, all the simple predicates are convertible here.
assert(parquetFilters.convertibleFilters(sourceFilters) === pushedFilters)
val pushedParquetFilters = pushedFilters.map { pred =>
val maybeFilter = parquetFilters.createFilter(pred)
assert(maybeFilter.isDefined, s"Couldn't generate filter predicate for $pred")
maybeFilter.get
}
// Doesn't bother checking type parameters here (e.g. `Eq[Integer]`)
assert(pushedParquetFilters.exists(_.getClass === filterClass),
s"${pushedParquetFilters.map(_.getClass).toList} did not contain ${filterClass}.")
checker(stripSparkFilter(query), expected)
case _ =>
throw new AnalysisException("Can not match ParquetTable in the query.")
}
}
}
}
class NumRowGroupsAcc extends AccumulatorV2[Integer, Integer] {
private var _sum = 0
override def isZero: Boolean = _sum == 0
override def copy(): AccumulatorV2[Integer, Integer] = {
val acc = new NumRowGroupsAcc()
acc._sum = _sum
acc
}
override def reset(): Unit = _sum = 0
override def add(v: Integer): Unit = _sum += v
override def merge(other: AccumulatorV2[Integer, Integer]): Unit = other match {
case a: NumRowGroupsAcc => _sum += a._sum
case _ => throw new UnsupportedOperationException(
s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}")
}
override def value: Integer = _sum
}
|
aosagie/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
|
Scala
|
apache-2.0
| 63,146 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.retriever
import uk.gov.hmrc.ct.accounts.frs10x.boxes.AC13
import uk.gov.hmrc.ct.accounts.frs102.boxes._
import uk.gov.hmrc.ct.accounts.frs102.boxes.loansToDirectors.LoansToDirectors
import uk.gov.hmrc.ct.accounts.frs102.boxes.relatedPartyTransactions.RelatedPartyTransactions
import uk.gov.hmrc.ct.accounts.frs10x.boxes._
import uk.gov.hmrc.ct.accounts.frs10x.retriever.Frs10xAccountsBoxRetriever
import uk.gov.hmrc.ct.box.retriever.FilingAttributesBoxValueRetriever
trait Frs102AccountsBoxRetriever extends Frs10xAccountsBoxRetriever {
self: FilingAttributesBoxValueRetriever =>
def ac13(): AC13
def ac15(): AC15
def ac16(): AC16
def ac17(): AC17
def ac18(): AC18
def ac19(): AC19
def ac20(): AC20
def ac21(): AC21
def ac24(): AC24
def ac25(): AC25
def ac26(): AC26 = AC26.calculate(this)
def ac27(): AC27 = AC27.calculate(this)
def ac28(): AC28
def ac29(): AC29
def ac30(): AC30
def ac31(): AC31
def ac32(): AC32 = AC32.calculate(this)
def ac33(): AC33 = AC33.calculate(this)
def ac34(): AC34
def ac35(): AC35
def ac36(): AC36 = AC36.calculate(this)
def ac37(): AC37 = AC37.calculate(this)
def ac42(): AC42
def ac43(): AC43
def ac44(): AC44
def ac45(): AC45
def ac48(): AC48 = AC48.calculate(this)
def ac49(): AC49 = AC49.calculate(this)
def ac50(): AC50
def ac51(): AC51
def ac52(): AC52
def ac53(): AC53
def ac54(): AC54
def ac55(): AC55
def ac56(): AC56 = AC56.calculate(this)
def ac57(): AC57 = AC57.calculate(this)
def ac58(): AC58
def ac59(): AC59
def ac60(): AC60 = AC60.calculate(this)
def ac61(): AC61 = AC61.calculate(this)
def ac62(): AC62 = AC62.calculate(this)
def ac63(): AC63 = AC63.calculate(this)
def ac64(): AC64
def ac65(): AC65
def ac66(): AC66
def ac67(): AC67
def ac68(): AC68 = AC68.calculate(this)
def ac69(): AC69 = AC69.calculate(this)
def ac70(): AC70
def ac71(): AC71
def ac74(): AC74
def ac75(): AC75
def ac76(): AC76
def ac77(): AC77
def ac80(): AC80 = AC80.calculate(this)
def ac81(): AC81 = AC81.calculate(this)
def ac106(): AC106
def ac107(): AC107
def ac106A(): AC106A
def ac125(): AC125
def ac126(): AC126
def ac130(): AC130
def ac131(): AC131 = AC131.calculate(this)
def ac132(): AC132 = AC132.calculate(this)
def ac200a(): AC200A
def ac200(): AC200
def ac212(): AC212
def ac213(): AC213
def ac214(): AC214
def ac217(): AC217 = AC217.calculate(this)
def ac219(): AC219
def ac115(): AC115
def ac116(): AC116
def ac117(): AC117 = AC117.calculate(this)
def ac119(): AC119
def ac120(): AC120
def ac121(): AC121 = AC121.calculate(this)
def ac122(): AC122 = AC122.calculate(this)
def ac209(): AC209
def ac210(): AC210
def ac211(): AC211
def ac320(): AC320
def ac320A(): AC320A
def ac321(): AC321
def ac322(): AC322
def ac323(): AC323
def ac324(): AC324
def ac7110A(): AC7110A
def ac138B(): AC138B
def ac139B(): AC139B
def ac150B(): AC150B
def ac151B(): AC151B
def ac5032(): AC5032
def ac5052A(): AC5052A
def ac5052B(): AC5052B
def ac5052C(): AC5052C
def ac5058A(): AC5058A
def ac5064A(): AC5064A
def ac124(): AC124
def ac128(): AC128
def ac133(): AC133 = AC133.calculate(this)
def ac5133(): AC5133
def ac187(): AC187
def ac188(): AC188 = AC188(ac77())
def ac189(): AC189
def ac190(): AC190 = AC190.calculate(this)
def ac5076C(): AC5076C
def ac114(): AC114
def ac118(): AC118
def ac123(): AC123 = AC123.calculate(this)
def ac5123(): AC5123
def ac7100(): AC7100
def ac7200(): AC7200
def ac7210A(): AC7210A
def ac7210B(): AC7210B
def ac7400(): AC7400
def ac7401(): AC7401
def ac7500(): AC7500
def ac7600(): AC7600
def ac7601(): AC7601
def ac7800(): AC7800
def loansToDirectors(): LoansToDirectors
def ac7900(): AC7900
def ac8084(): AC8084
def ac8085(): AC8085
def ac7901(): AC7901
def relatedPartyTransactions(): RelatedPartyTransactions
}
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frs102/retriever/Frs102AccountsBoxRetriever.scala
|
Scala
|
apache-2.0
| 4,700 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.rdd
import org.bdgenomics.adam.converters.DefaultHeaderLines
import org.bdgenomics.adam.models.{ SequenceRecord, SequenceDictionary, ReferenceRegion }
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.adam.rdd.read.AlignmentRecordRDD
import org.bdgenomics.adam.rdd.feature.FeatureRDD
import org.bdgenomics.adam.rdd.variant.GenotypeRDD
import org.bdgenomics.formats.avro._
import org.bdgenomics.utils.misc.SparkFunSuite
import scala.collection.mutable.ListBuffer
class SortedGenomicRDDSuite extends SparkFunSuite {
/**
* Determines if a given partition map has been correctly sorted
*
* @param list The partition map
* @return a boolean where true is sorted and false is unsorted
*/
def isSorted(list: Seq[Option[(ReferenceRegion, ReferenceRegion)]]): Boolean = {
val test = list.drop(1).map(_.get._1)
val test2 = list.dropRight(1).map(_.get._2)
!test2.zip(test).exists(f => f._1.start > f._2.start && f._1.end > f._2.end && f._1.referenceName > f._2.referenceName)
}
val chromosomeLengths = Map(1 -> 248956422, 2 -> 242193529, 3 -> 198295559, 4 -> 190214555, 5 -> 181538259, 6 -> 170805979, 7 -> 159345973, 8 -> 145138636, 9 -> 138394717, 10 -> 133797422, 11 -> 135086622, 12 -> 133275309, 13 -> 114364328, 14 -> 107043718, 15 -> 101991189, 16 -> 90338345, 17 -> 83257441, 18 -> 80373285, 19 -> 58617616, 20 -> 64444167, 21 -> 46709983, 22 -> 50818468)
val sd = new SequenceDictionary(Vector(
SequenceRecord("chr20", 63025520),
SequenceRecord("chr7", 159138663),
SequenceRecord("chr18", 78077248),
SequenceRecord("chr13", 115169878),
SequenceRecord("chr3", 198022430),
SequenceRecord("chr6", 171115067),
SequenceRecord("chr9", 141213431),
SequenceRecord("chr16", 90354753),
SequenceRecord("chr10", 135534747),
SequenceRecord("chr12", 133851895),
SequenceRecord("chr8", 146364022),
SequenceRecord("chr19", 59128983),
SequenceRecord("chr2", 243199373),
SequenceRecord("chr15", 102531392),
SequenceRecord("chr14", 107349540),
SequenceRecord("chr17", 81195210),
SequenceRecord("chr5", 180915260),
SequenceRecord("chr4", 191154276),
SequenceRecord("chr1", 249250621),
SequenceRecord("chr21", 48129895),
SequenceRecord("chr11,", 135006516)
))
sparkTest("testing that partition and sort provide correct outputs") {
// load in a generic bam
val x = sc.loadBam(resourceUrl("reads12.sam").getFile)
// sort and make into 16 partitions
val y = x.sortLexicographically(storePartitionMap = true, partitions = 16)
// sort and make into 32 partitions
val z = x.sortLexicographically(storePartitionMap = true, partitions = 32)
val arrayRepresentationOfZ = z.rdd.collect
//verify sort worked on actual values
for (currentArray <- List(y.rdd.collect, z.rdd.collect)) {
for (i <- currentArray.indices) {
if (i != 0) assert(
ReferenceRegion(arrayRepresentationOfZ(i).getContigName,
arrayRepresentationOfZ(i).getStart,
arrayRepresentationOfZ(i).getEnd).compareTo(
ReferenceRegion(arrayRepresentationOfZ(i - 1).getContigName,
arrayRepresentationOfZ(i - 1).getStart,
arrayRepresentationOfZ(i - 1).getEnd)) >= 0)
}
}
val partitionTupleCounts: Array[Int] = z.rdd.mapPartitions(f => Iterator(f.size)).collect
val partitionTupleCounts2: Array[Int] = y.rdd.mapPartitions(f => Iterator(f.size)).collect
// make sure that we didn't lose any data
assert(partitionTupleCounts.sum == partitionTupleCounts2.sum)
}
sparkTest("testing copartition maintains or adds sort") {
val x = sc.loadBam(resourceUrl("reads12.sam").getFile)
val z = x.sortLexicographically(storePartitionMap = true, partitions = 16)
val y = x.sortLexicographically(storePartitionMap = true, partitions = 32)
val a = x.copartitionByReferenceRegion(y)
val b = z.copartitionByReferenceRegion(y)
assert(!a.rdd.zip(b.rdd).collect.exists(f => f._1 != f._2))
}
sparkTest("testing that we don't drop any data on the right side even though it doesn't map to a partition on the left") {
// testing the left side with an extremely large region that is
// not the last record on a partition
// this test also tests the case that our
val genotypeRddBuilder = new ListBuffer[Genotype]()
genotypeRddBuilder += {
Genotype.newBuilder()
.setContigName("chr1")
.setStart(2L)
.setEnd(100L)
.setVariant(
Variant.newBuilder()
.setStart(2L)
.setEnd(100L)
.setAlternateAllele("A")
.setReferenceAllele("T")
.build()
)
.setSampleId("1")
.build()
}
genotypeRddBuilder += {
Genotype.newBuilder()
.setContigName("chr1")
.setStart(3L)
.setEnd(5L)
.setVariant(
Variant.newBuilder()
.setStart(3L)
.setEnd(5L)
.setAlternateAllele("A")
.setReferenceAllele("T")
.build()
)
.setSampleId("2")
.build()
}
genotypeRddBuilder += {
Genotype.newBuilder()
.setContigName("chr1")
.setStart(6L)
.setEnd(7L)
.setVariant(
Variant.newBuilder()
.setStart(6L)
.setEnd(7L)
.setAlternateAllele("A")
.setReferenceAllele("T")
.build()
)
.setSampleId("3")
.build()
}
genotypeRddBuilder += {
Genotype.newBuilder()
.setContigName("chr1")
.setStart(8L)
.setEnd(12L)
.setVariant(
Variant.newBuilder()
.setStart(8L)
.setEnd(12L)
.setAlternateAllele("A")
.setReferenceAllele("T")
.build()
)
.setSampleId("3")
.build()
}
val featureRddBuilder = new ListBuffer[Feature]()
featureRddBuilder += {
Feature.newBuilder()
.setContigName("chr1")
.setStart(61L)
.setEnd(62L)
.build()
}
featureRddBuilder += {
Feature.newBuilder()
.setContigName("chr1")
.setStart(11L)
.setEnd(15L)
.build()
}
featureRddBuilder += {
Feature.newBuilder()
.setContigName("chr1")
.setStart(3L)
.setEnd(6L)
.build()
}
featureRddBuilder += {
Feature.newBuilder()
.setContigName("chr1")
.setStart(6L)
.setEnd(8L)
.build()
}
featureRddBuilder += {
Feature.newBuilder()
.setContigName("chr1")
.setStart(50L)
.setEnd(52L)
.build()
}
featureRddBuilder += {
Feature.newBuilder()
.setContigName("chr1")
.setStart(1L)
.setEnd(2L)
.build()
}
val genotypes =
GenotypeRDD(sc.parallelize(genotypeRddBuilder),
sd, Seq(), DefaultHeaderLines.allHeaderLines)
.sortLexicographically(storePartitionMap = true, partitions = 2)
genotypes.rdd.mapPartitionsWithIndex((idx, iter) => {
iter.map(f => (idx, f))
}).collect
val features = FeatureRDD(sc.parallelize(featureRddBuilder), sd)
val x = features.copartitionByReferenceRegion(genotypes)
val z = x.rdd.mapPartitionsWithIndex((idx, iter) => {
if (idx == 0 && iter.size != 6) {
Iterator(true)
} else if (idx == 1 && iter.size != 2) {
Iterator(true)
} else {
Iterator()
}
})
x.rdd.mapPartitionsWithIndex((idx, iter) => {
iter.map(f => (idx, f))
}).collect
assert(z.collect.length == 0)
}
sparkTest("testing that sorted shuffleRegionJoin matches unsorted") {
val x = sc.loadBam(resourceUrl("reads12.sam").getFile)
// sort and make into 16 partitions
val z =
x.sortLexicographically(storePartitionMap = true, partitions = 1600)
// perform join using 1600 partitions
// 1600 is much more than the amount of data in the GenomicRDD
// so we also test our ability to handle this extreme request
val b = z.shuffleRegionJoin(x, Some(1600))
val c = x.shuffleRegionJoin(z, Some(1600))
val d = c.rdd.map(f => (f._1.getStart, f._2.getEnd)).collect.toSet
val e = b.rdd.map(f => (f._1.getStart, f._2.getEnd)).collect.toSet
val setDiff = d -- e
assert(setDiff.isEmpty)
assert(b.rdd.count == c.rdd.count)
}
sparkTest("testing that sorted fullOuterShuffleRegionJoin matches unsorted") {
val x = sc.loadBam(resourceUrl("reads12.sam").getFile)
val z = x.sortLexicographically(storePartitionMap = true, partitions = 16)
val d = x.fullOuterShuffleRegionJoin(z, Some(1))
val e = z.fullOuterShuffleRegionJoin(x, Some(1))
val setDiff = d.rdd.collect.toSet -- e.rdd.collect.toSet
assert(setDiff.isEmpty)
assert(d.rdd.count == e.rdd.count)
}
sparkTest("testing that sorted rightOuterShuffleRegionJoin matches unsorted") {
val x = sc.loadBam(resourceUrl("reads12.sam").getFile)
val z = x.sortLexicographically(storePartitionMap = true, partitions = 1)
val f = z.rightOuterShuffleRegionJoin(x, Some(1)).rdd.collect
val g = x.rightOuterShuffleRegionJoin(x).rdd.collect
val setDiff = f.toSet -- g.toSet
assert(setDiff.isEmpty)
assert(f.length == g.length)
}
sparkTest("testing that sorted leftOuterShuffleRegionJoin matches unsorted") {
val x = sc.loadBam(resourceUrl("reads12.sam").getFile)
val z = x.sortLexicographically(storePartitionMap = true, partitions = 1)
val h = z.leftOuterShuffleRegionJoin(x, Some(1)).rdd
val i = z.leftOuterShuffleRegionJoin(x).rdd
val setDiff = h.collect.toSet -- i.collect.toSet
assert(setDiff.isEmpty)
assert(h.count == i.count)
}
sparkTest("testing that we can persist the sorted knowledge") {
val x = sc.loadBam(resourceUrl("reads12.sam").getFile)
val z = x.sortLexicographically(storePartitionMap = true, partitions = 4)
val fileLocation = tmpLocation()
val saveArgs = new JavaSaveArgs(fileLocation, asSingleFile = false)
z.save(saveArgs, isSorted = true)
val t = sc.loadParquetAlignments(fileLocation)
assert(t.isSorted)
assert(t.rdd.partitions.length == z.rdd.partitions.length)
// determine that our data still fits within the partition map
assert(!t.rdd.mapPartitionsWithIndex((idx, iter) => {
iter.map(f => (idx, f))
}).zip(z.rdd.mapPartitionsWithIndex((idx, iter) => {
iter.map(f => (idx, f))
})).collect.exists(f => f._1 != f._2))
val test = t.rdd.collect.drop(1)
val test2 = t.rdd.collect.dropRight(1)
assert(!test2.zip(test).exists(f => {
ReferenceRegion(f._1.getContigName, f._1.getStart, f._1.getEnd)
.compareTo(ReferenceRegion(f._2.getContigName, f._2.getStart, f._2.getEnd)) >= 0
}))
}
}
|
laserson/adam
|
adam-core/src/test/scala/org/bdgenomics/adam/rdd/SortedGenomicRDDSuite.scala
|
Scala
|
apache-2.0
| 11,699 |
package com.nhlreplay.converter.xhtml
import xml.{Node, NodeSeq, XML}
import org.ccil.cowan.tagsoup.jaxp.SAXFactoryImpl
import xml.transform.{RuleTransformer, RewriteRule}
import com.typesafe.scalalogging.slf4j.Logging
import scala.io.Source
object XhtmlConverter extends Logging
{
def convertHtml(htmlContent: String, fileName: Option[String] = None): Source = {
fileName.map(file => logger.info(s"Converting file '$file'"))
filterXhtml(convertToXhtml(htmlContent))
}
private def convertToXhtml(htmlContent: String) = {
val parser = XML.withSAXParser(new SAXFactoryImpl().newSAXParser())
parser.loadString(htmlContent)
}
private def filterXhtml(xhtmlContent: NodeSeq) = {
val filter = new RewriteRule {
override def transform(node: Node): Seq[Node] = node match {
// Filter out all script tags
case n if n.label == "script" => NodeSeq.Empty
case x => x
}
}
Source.fromIterable(new RuleTransformer(filter).transform(xhtmlContent).mkString.toIterable)
}
}
|
peruukki/NHLReplay
|
app/com/nhlreplay/converter/xhtml/XhtmlConverter.scala
|
Scala
|
mit
| 1,036 |
package com.mthaler.keywords
import java.awt.{Dimension, BorderLayout}
import javax.swing.JFrame
class MainWindow extends JFrame {
setLayout(new BorderLayout())
setTitle("Keywords")
setPreferredSize(new Dimension(640, 480))
}
|
mthaler/keywords
|
src/main/scala/com/mthaler/keywords/MainWindow.scala
|
Scala
|
apache-2.0
| 235 |
// Copyright 2010, Brian T. Howard ([email protected])
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package edu.depauw.escalator
import java.io._
object Downalate {
var id = 0
var documentPath: File = _
val ESCESC = "//"
val ExampleStart = (ESCESC + """\\s*example\\s*""").r
val ExampleEnd = (ESCESC + """\\s*end\\s*example\\s*""").r
val TestStart = (ESCESC + """\\s*test\\s*""").r
val TestEnd = (ESCESC + """\\s*end\\s*test\\s*""").r
val ConsoleExampleStart = (ESCESC + """\\s*console\\s*example\\s*""").r
val ConsoleTestStart = (ESCESC + """\\s*console\\s*test\\s*""").r
val SourceCmd = (ESCESC + """\\s*source\\s*(.*)""").r
val FilePart = """('(?:\\\\'|[^'])*'|\\S*)\\s*(.*?)\\s*""".r
class DefaultState(val processed: String) {
def add(line: String): DefaultState = line match {
case ExampleStart() =>
new ExampleState(processed, "")
case TestStart() =>
new TestState(processed, "")
case ConsoleExampleStart() =>
if (Escalator.config.port != 0) new ConsoleExampleState(processed, "")
else new ExampleState(processed, "")
case ConsoleTestStart() =>
if (Escalator.config.port != 0) new ConsoleTestState(processed, "")
else new TestState(processed, "")
case SourceCmd(rest) =>
new DefaultState(join(processed, wrap(getSource(rest))))
case _ =>
new DefaultState(join(processed, line))
}
def wrap(p: (String, Seq[(String, Int)])) = {
val (path, chunks) = p
val (_, ext) = Util.baseExtension(path)
"\\n" +
<div><small><a href={ path + ".html" }>{ path }</a></small>{
for ((code, line) <- chunks) yield
<pre class={"brush: " + ext + "; first-line: " + line + ";"}>{ code }</pre>
}</div> + "\\n"
}
def wrapResults(code: String, spec: Boolean) = {
id += 1
val rawId = "raw" + id
val runId = "run" + id
val showRaw = "document.getElementById('" + rawId +
"').style.display='block';document.getElementById('" + runId +
"').style.display='none';return false;"
val showRun = "document.getElementById('" + rawId +
"').style.display='none';document.getElementById('" + runId +
"').style.display='block';return false;"
<div id={rawId} style="display: none;">
<small><a href="#" onclick={ showRun }>Show result</a></small>
<pre class="brush: scala;">{ code }</pre>
</div> + "\\n" +
<div id={runId} style="display: block;">
<small><a href="#" onclick={ showRaw }>Show source</a></small>
{
if (spec)
<pre class="brush: specs; light: true;">{ Escalator.runTest(code) }</pre>
else
<pre class="brush: plain; light: true;">{ Escalator.runExample(code) }</pre>
}
</div> + "\\n"
}
def wrapConsoleResults(code: String, spec: Boolean) = {
id += 1
val srcId = "src" + id
val resId = "res" + id
val rows = math.max(6, code.split("\\n").size)
<div>
<textarea id={srcId} cols="80" rows={rows.toString}
onkeydown={"return Escalator.handleKey(event, " + id + ", " + spec + ")"}>{ code }</textarea>
<small><a href="#" onclick={"Escalator.getResults(" + id + ", " + spec + "); return false"}>Run</a></small>
</div> + "\\n" +
<div id={resId} style="background-color: #e0e0e0;">Click 'Run' or press Ctrl-Enter for results</div>
}
def join(a: String, b: String) = a + "\\n" + b
/**
* Joins just the strings returned from a call to getSource.
*/
def extract(p: (String, Seq[(String, Int)])) = {
(for ((s, _) <- p._2) yield s).mkString("\\n")
}
}
class TestState(processed: String, buffer: String) extends DefaultState(processed) {
override def add(line: String) = line match {
case TestEnd() =>
new DefaultState(join(processed, wrapResults(buffer, true)))
case SourceCmd(rest) =>
new TestState(processed, join(buffer, extract(getSource(rest))))
case _ =>
new TestState(processed, join(buffer, line))
}
}
class ExampleState(processed: String, buffer: String) extends DefaultState(processed) {
override def add(line: String) = line match {
case ExampleEnd() =>
new DefaultState(join(processed, wrapResults(buffer, false)))
case SourceCmd(rest) =>
new ExampleState(processed, join(buffer, extract(getSource(rest))))
case _ =>
new ExampleState(processed, join(buffer, line))
}
}
class ConsoleTestState(processed: String, buffer: String) extends DefaultState(processed) {
override def add(line: String) = line match {
case TestEnd() =>
new DefaultState(join(processed, wrapConsoleResults(buffer, true)))
case SourceCmd(rest) =>
new ConsoleTestState(processed, join(buffer, extract(getSource(rest))))
case _ =>
new ConsoleTestState(processed, join(buffer, line))
}
}
class ConsoleExampleState(processed: String, buffer: String) extends DefaultState(processed) {
override def add(line: String) = line match {
case ExampleEnd() =>
new DefaultState(join(processed, wrapConsoleResults(buffer, false)))
case SourceCmd(rest) =>
new ConsoleExampleState(processed, join(buffer, extract(getSource(rest))))
case _ =>
new ConsoleExampleState(processed, join(buffer, line))
}
}
/**
* Expand all of the escalator commands in the source.
*
* @param source Incoming escalator document, as a String
* @return the expanded markdown version of the document
*/
def apply(source: String, path: File): String = {
Escalator.interpreter.reset()
documentPath = path
source.lines.foldLeft(new DefaultState(""))(_ add _).processed
}
/**
* Retrieve a source fragment from a file. The command consists of a filename
* (in single quotes in case it contains spaces), optionally followed by either
* a label in angle brackets or a sequence of pattern selectors.
*
* @param command The tail part of the //source ... command.
* @return a pair of the filename and a sequence of pairs of source fragments and
* their starting line numbers from the file
*/
def getSource(command: String): (String, Seq[(String, Int)]) = {
command match {
case FilePart(path, rest) => {
val path2 = Util.unquote(path)
val file = if (path2 startsWith "/") {
new File(Escalator.config.source, path2) // TODO should this be root instead?
} else {
new File(documentPath.getParentFile, path2)
}
if (rest == "") {
// Slurp in the whole file
(path2, List((Util.readFile(file), 1)))
} else if ((rest startsWith "<") && (rest endsWith ">")) {
// Extract a labeled fragment
val label = rest.substring(1, rest.length - 1)
(path2, List(getLabeledSource(file, label)))
} else {
// Find a fragment given a selector path
(path2, Find(file, rest))
}
}
case _ => ("Error in source command", Nil)
}
}
/**
* This works to grab source code out of another file, which should be
* denoted in the escalator file being read.
*
* @return Returns the source code if found, otherwise returns a message
* saying it couldn't find the named segment requested at the path provided.
*/
def getLabeledSource(file: File, label: String): (String, Int) = {
val source = Util.readFile(file)
val matchOpt = ("(?s)(.*?)([ \\\\t]*" + ESCESC + "\\\\s*<"+label+">\\\\s*)" + "(.*?)" +
"\\\\s*?" + ESCESC + "\\\\s*</"+label+">").r.findFirstMatchIn( source )
matchOpt match {
case Some(m) => (m.group(3), m.group(1).lines.length + 2)
case None => ("[!<"+label+"> not found in "+file+"!]", 0)
}
}
}
|
bhoward/Escalator
|
src/main/scala/edu/depauw/escalator/Downalate.scala
|
Scala
|
apache-2.0
| 8,494 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package tools
package util
import java.net.URL
import scala.tools.reflect.WrappedProperties.AccessControl
import scala.tools.nsc.{CloseableRegistry, Settings}
import scala.tools.nsc.util.ClassPath
import scala.reflect.io.{Directory, File, Path}
import PartialFunction.condOpt
import scala.tools.nsc.classpath._
// Loosely based on the draft specification at:
// https://wiki.scala-lang.org/display/SIW/Classpath
object PathResolver {
// Imports property/environment functions which suppress security exceptions.
import AccessControl._
import java.lang.System.{lineSeparator => EOL}
implicit class MkLines(val t: IterableOnce[_]) extends AnyVal {
def mkLines: String = t.iterator.mkString("", EOL, EOL)
def mkLines(header: String, indented: Boolean = false, embraced: Boolean = false): String = {
val space = "\\u0020"
val sep = if (indented) EOL + space * 2 else EOL
val (lbrace, rbrace) = if (embraced) (space + "{", EOL + "}") else ("", "")
t.iterator.mkString(header + lbrace + sep, sep, rbrace + EOL)
}
}
implicit class AsLines(val s: String) extends AnyVal {
// sm"""...""" could do this in one pass
def asLines = s.trim.stripMargin.linesIterator.mkLines
}
/** pretty print class path */
def ppcp(s: String) = ClassPath.split(s) match {
case Nil => ""
case Seq(x) => x
case xs => xs.mkString(EOL, EOL, "")
}
/** Values found solely by inspecting environment or property variables.
*/
object Environment {
import scala.collection.JavaConverters._
private def searchForBootClasspath: String = {
val props = System.getProperties
// This formulation should be immune to ConcurrentModificationExceptions when system properties
// we're unlucky enough to witness a partially published result of System.setProperty or direct
// mutation of the System property map. stringPropertyNames internally uses the Enumeration interface,
// rather than Iterator, and this disables the fail-fast ConcurrentModificationException.
val propNames = props.stringPropertyNames()
propNames.asScala collectFirst { case k if k endsWith ".boot.class.path" => props.getProperty(k) } getOrElse ""
}
/** Environment variables which java pays attention to so it
* seems we do as well.
*/
def sourcePathEnv = envOrElse("SOURCEPATH", "")
def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath)
def javaExtDirs = propOrEmpty("java.ext.dirs")
def scalaHome = propOrEmpty("scala.home")
def scalaExtDirs = propOrEmpty("scala.ext.dirs")
/** The java classpath and whether to use it. */
def javaUserClassPath = propOrElse("java.class.path", "")
def useJavaClassPath = propOrFalse("scala.usejavacp")
override def toString = s"""
|object Environment {
| scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath)
| javaBootClassPath = <${javaBootClassPath.length} chars>
| javaExtDirs = ${ppcp(javaExtDirs)}
| javaUserClassPath = ${ppcp(javaUserClassPath)}
| scalaExtDirs = ${ppcp(scalaExtDirs)}
|}""".asLines
}
/** Default values based on those in Environment as interpreted according
* to the path resolution specification.
*/
object Defaults {
def scalaSourcePath = Environment.sourcePathEnv
def javaBootClassPath = Environment.javaBootClassPath
def javaUserClassPath = Environment.javaUserClassPath
def javaExtDirs = Environment.javaExtDirs
def useJavaClassPath = Environment.useJavaClassPath
def scalaHome = Environment.scalaHome
def scalaHomeDir = Directory(scalaHome)
def scalaLibDir = Directory(scalaHomeDir / "lib")
def scalaClassesDir = Directory(scalaHomeDir / "classes")
def scalaLibAsJar = File(scalaLibDir / "scala-library.jar")
def scalaLibAsDir = Directory(scalaClassesDir / "library")
def scalaLibDirFound: Option[Directory] =
if (scalaLibAsJar.isFile) Some(scalaLibDir)
else if (scalaLibAsDir.isDirectory) Some(scalaClassesDir)
else None
def scalaLibFound =
if (scalaLibAsJar.isFile) scalaLibAsJar.path
else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path
else ""
// TODO It must be time for someone to figure out what all these things
// are intended to do. This is disabled here because it was causing all
// the scala jars to end up on the classpath twice: one on the boot
// classpath as set up by the runner (or regular classpath under -nobootcp)
// and then again here.
def scalaBootClassPath = ""
def scalaExtDirs = Environment.scalaExtDirs
def scalaPluginPath = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path
override def toString = s"""
|object Defaults {
| scalaHome = $scalaHome
| javaBootClassPath = ${ppcp(javaBootClassPath)}
| scalaLibDirFound = $scalaLibDirFound
| scalaLibFound = $scalaLibFound
| scalaBootClassPath = ${ppcp(scalaBootClassPath)}
| scalaPluginPath = ${ppcp(scalaPluginPath)}
|}""".asLines
}
/** Locations discovered by supplemental heuristics.
*/
object SupplementalLocations {
/** The platform-specific support jar.
*
* Usually this is `tools.jar` in the jdk/lib directory of the platform distribution.
*
* The file location is determined by probing the lib directory under JDK_HOME or JAVA_HOME,
* if one of those environment variables is set, then the lib directory under java.home,
* and finally the lib directory under the parent of java.home. Or, as a last resort,
* search deeply under those locations (except for the parent of java.home, on the notion
* that if this is not a canonical installation, then that search would have little
* chance of succeeding).
*/
def platformTools: Option[File] = {
val jarName = "tools.jar"
def jarPath(path: Path) = (path / "lib" / jarName).toFile
def jarAt(path: Path) = {
val f = jarPath(path)
if (f.isFile) Some(f) else None
}
val jdkDir = {
val d = Directory(jdkHome)
if (d.isDirectory) Some(d) else None
}
def deeply(dir: Directory) = dir.deepFiles find (_.name == jarName)
val home = envOrSome("JDK_HOME", envOrNone("JAVA_HOME")) map (p => Path(p))
val install = Some(Path(javaHome))
(home flatMap jarAt) orElse (install flatMap jarAt) orElse (install map (_.parent) flatMap jarAt) orElse
(jdkDir flatMap deeply)
}
override def toString = s"""
|object SupplementalLocations {
| platformTools = $platformTools
|}""".asLines
}
/** With no arguments, show the interesting values in Environment and Defaults.
* If there are arguments, show those in Calculated as if those options had been
* given to a scala runner.
*/
def main(args: Array[String]): Unit =
if (args.isEmpty) {
println(Environment)
println(Defaults)
} else {
val settings = new Settings()
val rest = settings.processArguments(args.toList, processAll = false)._2
val registry = new CloseableRegistry
try {
val pr = new PathResolver(settings, registry)
println("COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\\n".format(rest.mkString(" ")))
pr.result match {
case cp: AggregateClassPath =>
println(s"ClassPath has ${cp.aggregates.size} entries and results in:\\n${cp.asClassPathStrings}")
}
} finally {
registry.close()
}
}
}
final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistry = new CloseableRegistry) {
private val classPathFactory = new ClassPathFactory(settings, closeableRegistry)
import PathResolver.{ AsLines, Defaults, ppcp }
private def cmdLineOrElse(name: String, alt: String) = {
(commandLineFor(name) match {
case Some("") => None
case x => x
}) getOrElse alt
}
private def commandLineFor(s: String): Option[String] = condOpt(s) {
case "javabootclasspath" => settings.javabootclasspath.value
case "javaextdirs" => settings.javaextdirs.value
case "bootclasspath" => settings.bootclasspath.value
case "extdirs" => settings.extdirs.value
case "classpath" | "cp" => settings.classpath.value
case "sourcepath" => settings.sourcepath.value
}
/** Calculated values based on any given command line options, falling back on
* those in Defaults.
*/
object Calculated {
def scalaHome = Defaults.scalaHome
def useJavaClassPath = settings.usejavacp.value || Defaults.useJavaClassPath
def useManifestClassPath= settings.usemanifestcp.value
def javaBootClassPath = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath)
def javaExtDirs = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs)
def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath)
def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs)
/** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as:
* [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect
* [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg)
* [scaladoc] ^
* because the bootstrapping will look at the sourcepath and create package "reflect" in "<root>"
* and then when typing relative names, instead of picking <root>.scala.relect, typedIdentifier will pick up the
* <root>.reflect package created by the bootstrapping. Thus, no bootstrapping for scaladoc!
* TODO: we should refactor this as a separate -bootstrap option to have a clean implementation, no? */
def sourcePath = if (!settings.isScaladoc) cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) else ""
def userClassPath = settings.classpath.value // default is specified by settings and can be overridden there
import classPathFactory._
// Assemble the elements!
def basis = List[Iterable[ClassPath]](
jrt, // 0. The Java 9+ classpath (backed by the ct.sym or jrt:/ virtual system, if available)
classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
classesInPath(scalaBootClassPath), // 4. The Scala boot class path.
contentsOfDirsInPath(scalaExtDirs), // 5. The Scala extension class path.
classesInExpandedPath(userClassPath), // 6. The Scala application class path.
classesInManifest(useManifestClassPath), // 8. The Manifest class path.
sourcesInPath(sourcePath) // 7. The Scala source path.
)
private def jrt: Option[ClassPath] = JrtClassPath.apply(settings.releaseValue, closeableRegistry)
lazy val containers = basis.flatten.distinct
override def toString = s"""
|object Calculated {
| scalaHome = $scalaHome
| javaBootClassPath = ${ppcp(javaBootClassPath)}
| javaExtDirs = ${ppcp(javaExtDirs)}
| javaUserClassPath = ${ppcp(javaUserClassPath)}
| useJavaClassPath = $useJavaClassPath
| scalaBootClassPath = ${ppcp(scalaBootClassPath)}
| scalaExtDirs = ${ppcp(scalaExtDirs)}
| userClassPath = ${ppcp(userClassPath)}
| sourcePath = ${ppcp(sourcePath)}
|}""".asLines
}
def containers = Calculated.containers
import PathResolver.MkLines
def result: ClassPath = {
val cp = computeResult()
if (settings.Ylogcp) {
Console print f"Classpath built from ${settings.toConciseString} %n"
Console print s"Defaults: ${PathResolver.Defaults}"
Console print s"Calculated: $Calculated"
val xs = (Calculated.basis drop 2).flatten.distinct
Console print (xs mkLines (s"After java boot/extdirs classpath has ${xs.size} entries:", indented = true))
}
cp
}
def resultAsURLs: Seq[URL] = result.asURLs
@deprecated("Use resultAsURLs instead of this one", "2.11.5")
def asURLs: List[URL] = resultAsURLs.toList
private def computeResult(): ClassPath = AggregateClassPath(containers.toIndexedSeq)
}
|
martijnhoekstra/scala
|
src/compiler/scala/tools/util/PathResolver.scala
|
Scala
|
apache-2.0
| 13,210 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
import org.scalatest._
import SharedHelpers._
import events.TestFailed
import org.scalatest.exceptions.DuplicateTestNameException
import org.scalatest.exceptions.NotAllowedException
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.TestRegistrationClosedException
import org.scalatest.events.InfoProvided
class FeatureSpecSpec extends org.scalatest.FunSpec {
describe("A fixture.FeatureSpec") {
it("should return the test names in order of registration from testNames") {
val a = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
scenario("should do that") { fixture =>
}
scenario("should do this") { fixture =>
}
}
assertResult(List("Scenario: should do that", "Scenario: should do this")) {
a.testNames.iterator.toList
}
val b = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
}
assertResult(List[String]()) {
b.testNames.iterator.toList
}
val c = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
scenario("should do this") { fixture =>
}
scenario("should do that") { fixture =>
}
}
assertResult(List("Scenario: should do this", "Scenario: should do that")) {
c.testNames.iterator.toList
}
}
it("should throw NotAllowedException if a duplicate scenario name registration is attempted") {
intercept[DuplicateTestNameException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
scenario("test this") { fixture =>
}
scenario("test this") { fixture =>
}
}
}
intercept[DuplicateTestNameException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
scenario("test this") { fixture =>
}
ignore("test this") { fixture =>
}
}
}
intercept[DuplicateTestNameException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("test this") { fixture =>
}
ignore("test this") { fixture =>
}
}
}
intercept[DuplicateTestNameException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("test this") { fixture =>
}
scenario("test this") { fixture =>
}
}
}
}
it("should pass in the fixture to every test method") {
val a = new FeatureSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
scenario("should do this") { fixture =>
assert(fixture === hello)
}
scenario("should do that") { fixture =>
assert(fixture === hello)
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
assert(!rep.eventsReceived.exists(_.isInstanceOf[TestFailed]))
}
it("should throw NullPointerException if a null test tag is provided") {
// scenario
intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
scenario("hi", null) { fixture => }
}
}
val caught = intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
scenario("hi", mytags.SlowAsMolasses, null) { fixture => }
}
}
assert(caught.getMessage === "a test tag was null")
intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
scenario("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => }
}
}
// ignore
intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("hi", null) { fixture => }
}
}
val caught2 = intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("hi", mytags.SlowAsMolasses, null) { fixture => }
}
}
assert(caught2.getMessage === "a test tag was null")
intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => }
}
}
// registerTest
intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", null) { fixture => }
}
}
val caught3 = intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", mytags.SlowAsMolasses, null) { fixture => }
}
}
assert(caught3.getMessage === "a test tag was null")
intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => }
}
}
// registerIgnoredTest
intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", null) { fixture => }
}
}
val caught4 = intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", mytags.SlowAsMolasses, null) { fixture => }
}
}
assert(caught4.getMessage === "a test tag was null")
intercept[NullPointerException] {
new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => }
}
}
}
class TestWasCalledSuite extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
scenario("this") { fixture => theTestThisCalled = true }
scenario("that") { fixture => theTestThatCalled = true }
}
it("should execute all tests when run is called with testName None") {
val b = new TestWasCalledSuite
b.run(None, Args(SilentReporter))
assert(b.theTestThisCalled)
assert(b.theTestThatCalled)
}
it("should execute one test when run is called with a defined testName") {
val a = new TestWasCalledSuite
a.run(Some("Scenario: this"), Args(SilentReporter))
assert(a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should report as ignored, and not run, tests marked ignored") {
val a = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
scenario("test this") { fixture => theTestThisCalled = true }
scenario("test that") { fixture => theTestThatCalled = true }
}
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
val b = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { fixture => theTestThisCalled = true }
scenario("test that") { fixture => theTestThatCalled = true }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB))
assert(repB.testIgnoredReceived)
assert(repB.lastEvent.isDefined)
assert(repB.lastEvent.get.testName endsWith "test this")
assert(!b.theTestThisCalled)
assert(b.theTestThatCalled)
val c = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
scenario("test this") { fixture => theTestThisCalled = true }
ignore("test that") { fixture => theTestThatCalled = true }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repC))
assert(repC.testIgnoredReceived)
assert(repC.lastEvent.isDefined)
assert(repC.lastEvent.get.testName endsWith "test that", repC.lastEvent.get.testName)
assert(c.theTestThisCalled)
assert(!c.theTestThatCalled)
// The order I want is order of appearance in the file.
// Will try and implement that tomorrow. Subtypes will be able to change the order.
val d = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { fixture => theTestThisCalled = true }
ignore("test that") { fixture => theTestThatCalled = true }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD))
assert(repD.testIgnoredReceived)
assert(repD.lastEvent.isDefined)
assert(repD.lastEvent.get.testName endsWith "test that") // last because should be in order of appearance
assert(!d.theTestThisCalled)
assert(!d.theTestThatCalled)
}
it("should ignore a test marked as ignored if run is invoked with that testName") {
// If I provide a specific testName to run, then it should ignore an Ignore on that test
// method and actually invoke it.
val e = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { fixture => theTestThisCalled = true }
scenario("test that") { fixture => theTestThatCalled = true }
}
import scala.language.reflectiveCalls
val repE = new TestIgnoredTrackingReporter
e.run(Some("Scenario: test this"), Args(repE))
assert(repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(!e.theTestThatCalled)
}
it("should run only those tests selected by the tags to include and exclude sets") {
// Nothing is excluded
val a = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
scenario("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
scenario("test that") { fixture => theTestThatCalled = true }
}
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
val b = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
scenario("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
scenario("test that") { fixture => theTestThatCalled = true }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
val c = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
scenario("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
scenario("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
val d = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
scenario("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
val e = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
scenario("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
scenario("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
scenario("test the other") { fixture => theTestTheOtherCalled = true }
}
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
val f = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
scenario("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
scenario("test the other") { fixture => theTestTheOtherCalled = true }
}
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
val g = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
scenario("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
scenario("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
ignore("test the other") { fixture => theTestTheOtherCalled = true }
}
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
val h = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
scenario("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
scenario("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
scenario("test the other") { fixture => theTestTheOtherCalled = true }
}
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
val i = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
scenario("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
scenario("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
scenario("test the other") { fixture => theTestTheOtherCalled = true }
}
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
val j = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
ignore("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
scenario("test the other") { fixture => theTestTheOtherCalled = true }
}
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
val k = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
ignore("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
ignore("test the other") { fixture => theTestTheOtherCalled = true }
}
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should run only those registered tests selected by the tags to include and exclude sets") {
// Nothing is excluded
val a = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
registerTest("test that") { fixture => theTestThatCalled = true }
}
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
val b = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
registerTest("test that") { fixture => theTestThatCalled = true }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
val c = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
val d = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
val e = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
val f = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
val g = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerIgnoredTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
val h = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
val i = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
val j = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
val k = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerIgnoredTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should return the correct test count from its expectedTestCount method") {
val a = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("test this") { fixture => }
scenario("test that") { fixture => }
}
assert(a.expectedTestCount(Filter()) === 2)
val b = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
ignore("test this") { fixture => }
scenario("test that") { fixture => }
}
assert(b.expectedTestCount(Filter()) === 1)
val c = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("test this", mytags.FastAsLight) { fixture => }
scenario("test that") { fixture => }
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) === 1)
val d = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => }
scenario("test that", mytags.SlowAsMolasses) { fixture => }
scenario("test the other thing") { fixture => }
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 1)
assert(d.expectedTestCount(Filter()) === 3)
val e = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => }
scenario("test that", mytags.SlowAsMolasses) { fixture => }
ignore("test the other thing") { fixture => }
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 0)
assert(e.expectedTestCount(Filter()) === 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) === 10)
}
it("should return the correct test count from its expectedTestCount method when uses registerTest and registerIgnoredTest to register tests") {
val a = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this") { fixture => }
registerTest("test that") { fixture => }
}
assert(a.expectedTestCount(Filter()) === 2)
val b = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerIgnoredTest("test this") { fixture => }
registerTest("test that") { fixture => }
}
assert(b.expectedTestCount(Filter()) === 1)
val c = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight) { fixture => }
registerTest("test that") { fixture => }
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) === 1)
val d = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => }
registerTest("test that", mytags.SlowAsMolasses) { fixture => }
registerTest("test the other thing") { fixture => }
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 1)
assert(d.expectedTestCount(Filter()) === 3)
val e = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => }
registerTest("test that", mytags.SlowAsMolasses) { fixture => }
registerIgnoredTest("test the other thing") { fixture => }
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 0)
assert(e.expectedTestCount(Filter()) === 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) === 10)
}
it("should generate a TestPending message when the test body is (pending)") {
val a = new FeatureSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
scenario("should do this") (pending)
scenario("should do that") { fixture =>
assert(fixture === hello)
}
scenario("should do something else") { fixture =>
assert(fixture === hello)
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
}
it("should generate a test failure if a Throwable, or an Error other than direct Error subtypes " +
"known in JDK 1.5, excluding AssertionError") {
val a = new FeatureSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
scenario("throws AssertionError") { s => throw new AssertionError }
scenario("throws plain old Error") { s => throw new Error }
scenario("throws Throwable") { s => throw new Throwable }
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tf = rep.testFailedEventsReceived
assert(tf.size === 3)
}
it("should propagate out Errors that are direct subtypes of Error in JDK 1.5, other than " +
"AssertionError, causing Suites and Runs to abort.") {
val a = new FeatureSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
scenario("throws AssertionError") { s => throw new OutOfMemoryError }
}
intercept[OutOfMemoryError] {
a.run(None, Args(SilentReporter))
}
}
/*
it("should send InfoProvided events with aboutAPendingTest set to true for info " +
"calls made from a test that is pending") {
val a = new FeatureSpec with GivenWhenThen {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
scenario("should do something else") { s =>
given("two integers")
when("one is subracted from the other")
then("the result is the difference between the two numbers")
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testPending = rep.testPendingEventsReceived
assert(testPending.size === 1)
val recordedEvents = testPending(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && ip.aboutAPendingTest.get)
}
}
it("should send InfoProvided events with aboutAPendingTest set to false for info " +
"calls made from a test that is not pending") {
val a = new FeatureSpec with GivenWhenThen {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
scenario("should do something else") { s =>
given("two integers")
when("one is subracted from the other")
then("the result is the difference between the two numbers")
assert(1 + 1 === 2)
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testSucceeded = rep.testSucceededEventsReceived
assert(testSucceeded.size === 1)
val recordedEvents = testSucceeded(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && !ip.aboutAPendingTest.get)
}
}
*/
it("should allow both tests that take fixtures and tests that don't") {
val a = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("Hello, world!")
}
var takesNoArgsInvoked = false
scenario("take no args") { () =>
takesNoArgsInvoked = true
}
var takesAFixtureInvoked = false
scenario("takes a fixture") { s => takesAFixtureInvoked = true }
}
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 2, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAFixtureInvoked)
}
it("should work with test functions whose inferred result type is not Unit") {
val a = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("Hello, world!")
}
var takesNoArgsInvoked = false
scenario("should take no args") { () =>
takesNoArgsInvoked = true; true
}
var takesAFixtureInvoked = false
scenario("should take a fixture") { s => takesAFixtureInvoked = true; true }
}
import scala.language.reflectiveCalls
assert(!a.takesNoArgsInvoked)
assert(!a.takesAFixtureInvoked)
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 2, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAFixtureInvoked)
}
it("should work with ignored tests whose inferred result type is not Unit") {
val a = new FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("should test this") { () =>
theTestThisCalled = true; "hi"
}
ignore("should test that") { fixture => theTestThatCalled = true; 42 }
}
import scala.language.reflectiveCalls
assert(!a.theTestThisCalled)
assert(!a.theTestThatCalled)
val reporter = new EventRecordingReporter
a.run(None, Args(reporter))
assert(reporter.testIgnoredEventsReceived.size === 2)
assert(!a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should pass a NoArgTest to withFixture for tests that take no fixture") {
class MySpec extends FeatureSpec {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
aNoArgTestWasPassed = true
Succeeded
}
def withFixture(test: OneArgTest): Outcome = {
aOneArgTestWasPassed = true
Succeeded
}
scenario("something") { () =>
assert(1 + 1 === 2)
}
}
val s = new MySpec
s.run(None, Args(SilentReporter))
assert(s.aNoArgTestWasPassed)
assert(!s.aOneArgTestWasPassed)
}
it("should not pass a NoArgTest to withFixture for tests that take a Fixture") {
class MySpec extends FeatureSpec {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
aNoArgTestWasPassed = true
Succeeded
}
def withFixture(test: OneArgTest): Outcome = {
aOneArgTestWasPassed = true
Succeeded
}
scenario("something") { fixture =>
assert(1 + 1 === 2)
}
}
val s = new MySpec
s.run(None, Args(SilentReporter))
assert(!s.aNoArgTestWasPassed)
assert(s.aOneArgTestWasPassed)
}
it("should pass a NoArgTest that invokes the no-arg test when the " +
"NoArgTest's no-arg apply method is invoked") {
class MySuite extends FeatureSpec {
type FixtureParam = String
var theNoArgTestWasInvoked = false
def withFixture(test: OneArgTest): Outcome = {
// Shouldn't be called, but just in case don't invoke a OneArgTest
Succeeded
}
scenario("something") { () =>
theNoArgTestWasInvoked = true
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(s.theNoArgTestWasInvoked)
}
describe("(when a nesting rule has been violated)") {
it("should, if they call a feature from within an scenario clause, result in a TestFailedException when running the test") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("should blow up") { fixture =>
feature("in the wrong place, at the wrong time") {
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "Scenario: should blow up")
}
it("should, if they call a feature with a nested it from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("should blow up") { fixture =>
feature("in the wrong place, at the wrong time") {
scenario("should never run") { fixture =>
assert(1 === 1)
}
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "Scenario: should blow up")
}
it("should, if they call a nested it from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("should blow up") { fixture =>
scenario("should never run") { fixture =>
assert(1 === 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "Scenario: should blow up")
}
it("should, if they call a nested it with tags from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("should blow up") { fixture =>
scenario("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 === 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "Scenario: should blow up")
}
it("should, if they call a nested registerTest with tags from within an registerTest clause, result in a TestFailedException when running the test") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("should blow up") { fixture =>
registerTest("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 === 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "Scenario: should blow up")
}
it("should, if they call a feature with a nested ignore from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("should blow up") { fixture =>
feature("in the wrong place, at the wrong time") {
ignore("should never run") { fixture =>
assert(1 === 1)
}
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "Scenario: should blow up")
}
it("should, if they call a nested ignore from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("should blow up") { fixture =>
ignore("should never run") { fixture =>
assert(1 === 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "Scenario: should blow up")
}
it("should, if they call a nested ignore with tags from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
scenario("should blow up") { fixture =>
ignore("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 === 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "Scenario: should blow up")
}
it("should, if they call a nested registerIgnoredTest with tags from within a registerTest clause, result in a TestFailedException when running the test") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("should blow up") { fixture =>
registerIgnoredTest("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 === 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "Scenario: should blow up")
}
it("should, if they call a nested feature from within a feature clause, result in a SuiteAborted event when constructing the FeatureSpec") {
class MySpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
feature("should blow up") {
feature("should never run") {
}
}
}
val caught =
intercept[NotAllowedException] {
new MySpec
}
assert(caught.getMessage === "Feature clauses cannot be nested.")
}
}
}
it("should pass the correct test name in the OneArgTest passed to withFixture") {
val a = new FeatureSpec {
type FixtureParam = String
var correctTestNameWasPassed = false
def withFixture(test: OneArgTest): Outcome = {
correctTestNameWasPassed = test.name == "Scenario: should do something"
test("hi")
}
scenario("should do something") { fixture => }
}
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.correctTestNameWasPassed)
}
it("should pass the correct config map in the OneArgTest passed to withFixture") {
val a = new FeatureSpec {
type FixtureParam = String
var correctConfigMapWasPassed = false
def withFixture(test: OneArgTest): Outcome = {
correctConfigMapWasPassed = (test.configMap == ConfigMap("hi" -> 7))
test("hi")
}
scenario("should do something") { fixture => }
}
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> 7), None, new Tracker(), Set.empty))
assert(a.correctConfigMapWasPassed)
}
class ExamplePrefixSpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("hi")
}
feature("A Feature") {
scenario("A Scenario") { fixture =>
}
}
}
it("should prefix feature text with 'Feature: '") {
val rep = new EventRecordingReporter
(new ExamplePrefixSpec).run(None, Args(rep))
val scopeOpened = rep.scopeOpenedEventsReceived
assert(scopeOpened.size === 1)
assert(scopeOpened(0).message === "Feature: A Feature")
val scopeClosed = rep.scopeClosedEventsReceived
assert(scopeClosed.size === 1)
assert(scopeClosed(0).message === "Feature: A Feature")
}
it("should prefix scenario text with 'Scenario: '") {
val rep = new EventRecordingReporter
(new ExamplePrefixSpec).run(None, Args(rep))
val testStarting = rep.testStartingEventsReceived
assert(testStarting.size === 1)
assert(testStarting(0).testText === "Scenario: A Scenario")
val testSucceeded = rep.testSucceededEventsReceived
assert(testSucceeded.size === 1)
assert(testSucceeded(0).testText === "Scenario: A Scenario")
}
it("should allow test registration with registerTest and registerIgnoredTest") {
class TestSpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("a string") }
val a = 1
registerTest("test 1") { fixture =>
val e = intercept[TestFailedException] {
assert(a == 2)
}
assert(e.message == Some("1 did not equal 2"))
assert(e.failedCodeFileName == Some("FeatureSpecSpec.scala"))
assert(e.failedCodeLineNumber == Some(thisLineNumber - 4))
}
registerTest("test 2") { fixture =>
assert(a == 2)
}
registerTest("test 3") { fixture =>
pending
}
registerTest("test 4") { fixture =>
cancel
}
registerIgnoredTest("test 5") { fixture =>
assert(a == 2)
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "Scenario: test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "Scenario: test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "Scenario: test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "Scenario: test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "Scenario: test 5")
}
describe("when failure happens") {
it("should fire TestFailed event with correct stack depth info when test failed") {
class TestSpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("a string") }
scenario("fail scenario") { fixture =>
assert(1 === 2)
}
feature("a feature") {
scenario("nested fail scenario") { fixture =>
assert(1 === 2)
}
}
}
val rep = new EventRecordingReporter
val s1 = new TestSpec
s1.run(None, Args(rep))
assert(rep.testFailedEventsReceived.size === 2)
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FeatureSpecSpec.scala")
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 13)
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FeatureSpecSpec.scala")
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 11)
}
it("should generate NotAllowedException with correct stack depth info when has a feature nested inside a feature") {
class TestSpec extends FeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("a string") }
feature("a feature") {
feature("inner feature") {
ignore("nested fail scenario") { fixture =>
assert(1 === 1)
}
}
}
}
val rep = new EventRecordingReporter
val caught = intercept[NotAllowedException] {
new TestSpec
}
assert(caught.failedCodeFileName.get === "FeatureSpecSpec.scala")
assert(caught.failedCodeLineNumber.get === thisLineNumber - 12)
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a scenario nested inside a scenario") {
class TestSpec extends FeatureSpec {
type FixtureParam = String
var registrationClosedThrown = false
feature("a feature") {
scenario("a scenario") { fixture =>
scenario("nested scenario") { fixture =>
assert(1 == 2)
}
}
}
def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("a string")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FeatureSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("A scenario clause may not appear inside another scenario clause."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has an ignore nested inside a scenario") {
class TestSpec extends FeatureSpec {
type FixtureParam = String
var registrationClosedThrown = false
feature("a feature") {
scenario("a scenario") { fixture =>
ignore("ignore scenario") { fixture =>
assert(1 == 2)
}
}
}
def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("a string")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FeatureSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("An ignore clause may not appear inside a scenario clause."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a registerTest nested inside a registerTest") {
class TestSpec extends FeatureSpec {
type FixtureParam = String
var registrationClosedThrown = false
feature("a feature") {
registerTest("a scenario") { fixture =>
registerTest("nested scenario") { fixture =>
assert(1 == 2)
}
}
}
def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("a string")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FeatureSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a registerIgnoredTest nested inside a registerTest") {
class TestSpec extends FeatureSpec {
type FixtureParam = String
var registrationClosedThrown = false
feature("a feature") {
registerTest("a scenario") { fixture =>
registerIgnoredTest("ignore scenario") { fixture =>
assert(1 == 2)
}
}
}
def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("a string")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FeatureSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
}
}
|
travisbrown/scalatest
|
src/test/scala/org/scalatest/fixture/FeatureSpecSpec.scala
|
Scala
|
apache-2.0
| 64,090 |
package java.lang
class StringBuilder(private var content: String) extends CharSequence
with Appendable
with java.io.Serializable {
def this() = this("")
def this(initialCapacity: Int) = this("")
def this(csq: CharSequence) = this(csq.toString)
def append(s: String): StringBuilder = {
content += { if (s == null) "null" else s }
this
}
def append(b: scala.Boolean): StringBuilder = append(b.toString())
def append(c: scala.Char): StringBuilder = append(c.toString())
def append(str: Array[scala.Char]): StringBuilder =
append(str, 0, str.length)
def append(str: Array[scala.Char], offset: Int, len: Int): StringBuilder = {
var i = 0
while (i < len) {
content += str(i + offset)
i += 1
}
this
}
def append(b: scala.Byte): StringBuilder = append(b.toString())
def append(s: scala.Short): StringBuilder = append(s.toString())
def append(i: scala.Int): StringBuilder = append(i.toString())
def append(lng: scala.Long): StringBuilder = append(lng.toString())
def append(f: scala.Float): StringBuilder = append(f.toString())
def append(d: scala.Double): StringBuilder = append(d.toString())
def append(obj: AnyRef): StringBuilder = {
if (obj == null) append(null: String)
else append(obj.toString())
}
def append(csq: CharSequence): StringBuilder = append(csq: AnyRef)
def append(csq: CharSequence, start: Int, end: Int): StringBuilder = {
if (csq == null) append("null", start, end)
else append(csq.subSequence(start, end).toString())
}
def appendCodePoint(codePoint: Int): StringBuilder =
append(Character.toChars(codePoint))
override def toString(): String = content
def length(): Int = content.length()
def charAt(index: Int): Char = content.charAt(index)
def codePointAt(index: Int): Int = content.codePointAt(index)
def indexOf(str: String): Int = content.indexOf(str)
def indexOf(str: String, fromIndex: Int): Int =
content.indexOf(str, fromIndex)
def lastIndexOf(str: String): Int = content.lastIndexOf(str)
def lastIndexOf(str: String, fromIndex: Int): Int =
content.lastIndexOf(str, fromIndex)
def subSequence(start: Int, end: Int): CharSequence = substring(start, end)
def substring(start: Int): String = content.substring(start)
def substring(start: Int, end: Int): String = content.substring(start, end)
def reverse(): StringBuilder = {
val original = content
var result = ""
var i = 0
while (i < original.length) {
val c = original.charAt(i)
if (Character.isHighSurrogate(c) && (i+1 < original.length)) {
val c2 = original.charAt(i+1)
if (Character.isLowSurrogate(c2)) {
result = c.toString + c2.toString + result
i += 2
} else {
result = c.toString + result
i += 1
}
} else {
result = c.toString + result
i += 1
}
}
content = result
this
}
def deleteCharAt(index: Int): StringBuilder = {
if (index < 0 || index >= content.length)
throw new StringIndexOutOfBoundsException("String index out of range: " + index)
content = content.substring(0, index) + content.substring(index+1)
this
}
def ensureCapacity(minimumCapacity: Int): Unit = {
// Do nothing
}
/**
* @param start The beginning index, inclusive.
* @param end The ending index, exclusive.
* @param str String that will replace previous contents.
* @return This StringBuilder.
*/
def replace(start: Int, end: Int, str: String): StringBuilder = {
val length = content.length
if (start < 0 || start > end || start > length) {
throw new StringIndexOutOfBoundsException(
s"Illegal to replace substring at [$start - $end] in string of length $length")
}
val realEnd = if (end > length) length else end // java api convention
content = content.substring(0, start) + str + content.substring(realEnd)
this
}
def setCharAt(index: Int, ch: scala.Char): Unit = {
if (index < 0 || index >= content.length)
throw new IndexOutOfBoundsException("String index out of range: " + index)
content = content.substring(0, index) + ch + content.substring(index + 1)
}
def setLength(newLength: Int): Unit = {
if (newLength < 0)
throw new IndexOutOfBoundsException("String index out of range: " + newLength)
val len = length()
if (len == newLength) {
} else if (len < newLength) {
var index = len
while (index < newLength) {
append("\\u0000")
index += 1
}
} else {
content = substring(0, newLength)
}
}
def insert(index: Int, b: scala.Boolean): StringBuilder = insert(index, b.toString)
def insert(index: Int, b: scala.Byte): StringBuilder = insert(index, b.toString)
def insert(index: Int, s: scala.Short): StringBuilder = insert(index, s.toString)
def insert(index: Int, i: scala.Int): StringBuilder = insert(index, i.toString)
def insert(index: Int, l: scala.Long): StringBuilder = insert(index, l.toString)
def insert(index: Int, f: scala.Float): StringBuilder = insert(index, f.toString)
def insert(index: Int, d: scala.Double): StringBuilder = insert(index, d.toString)
def insert(index: Int, c: scala.Char): StringBuilder = insert(index, c.toString)
def insert(index: Int, csq: CharSequence): StringBuilder = insert(index: Int, csq: AnyRef)
def insert(index: Int, arr: Array[scala.Char]): StringBuilder = insert(index, arr, 0, arr.length)
def insert(index: Int, ref: AnyRef): StringBuilder =
if (ref == null)
insert(index, null: String)
else
insert(index, ref.toString)
def insert(index: Int, csq: CharSequence, start: Int, end: Int): StringBuilder =
if (csq == null)
insert(index, "null", start, end)
else
insert(index, csq.subSequence(start, end).toString)
def insert(index: Int, arr: Array[scala.Char], offset: Int, len: Int): StringBuilder = {
var str = ""
var i = 0
while (i < len) {
str += arr(i + offset)
i += 1
}
insert(index, str)
}
def insert(index: Int, str: String): StringBuilder = {
val thisLength = length()
if (index < 0 || index > thisLength)
throw new StringIndexOutOfBoundsException(index)
else if (index == thisLength)
append(str)
else
content = content.substring(0, index) + Option(str).getOrElse("null") + content.substring(index)
this
}
}
|
lrytz/scala-js
|
javalanglib/src/main/scala/java/lang/StringBuilder.scala
|
Scala
|
bsd-3-clause
| 6,629 |
package org.jetbrains.plugins.scala.util.teamcity
import org.jetbrains.annotations.ApiStatus
@ApiStatus.Experimental
object TeamcityUtils {
sealed trait Status {
import Status._
def value: String = this match {
case Normal => "NORMAL"
case Warning => "WARNING"
case Failure => "FAILURE"
case Error => "ERROR"
}
}
object Status {
case object Normal extends Status
case object Warning extends Status
case object Failure extends Status
case object Error extends Status
}
// https://www.jetbrains.com/help/teamcity/service-messages.html#Reporting+Messages+for+Build+Log
def logUnderTeamcity(message: String, status: Status = Status.Normal): Unit = {
val isUnderTeamcity = com.intellij.internal.statistic.utils.StatisticsUploadAssistant.isTeamcityDetected
if (isUnderTeamcity) {
val messageText = escapeTeamcityValue(message)
val result = s"##teamcity[message text='$messageText' status='${status.value}']"
println(result)
}
}
// https://www.jetbrains.com/help/teamcity/service-messages.html#Reporting+Messages+for+Build+Log
private def escapeTeamcityValue(value: String): String =
value
.replace("|", "||")
.replace("'", "|'")
.replace("\\n", "|n")
.replace("\\r", " |r")
.replace("[", "|[")
.replace("]", "|]")
//replace("\\\\uNNNN", "|0xNNNN") // todo
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/util/teamcity/TeamcityUtils.scala
|
Scala
|
apache-2.0
| 1,401 |
/*
* Copyright 2015 LG CNS.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.db;
import java.util.List
import scouter.server.Logger
import scouter.server.ShutdownManager
import scouter.server.db.xlog.XLogProfileDataReader
import scouter.server.db.xlog.XLogProfileDataWriter
import scouter.server.db.xlog.XLogProfileIndex
import scouter.util.DateUtil
import scouter.util.FileUtil
import scouter.util.RequestQueue
import scouter.util.IClose
import scouter.util.IShutdown
import scouter.util.ThreadUtil
import java.io.File
import scouter.server.util.ThreadScala
import scouter.server.util.OftenAction
object XLogProfileWR extends IClose {
val queue = new RequestQueue[Data](DBCtr.MAX_QUE_SIZE);
class ResultSet(keys: List[Long], var reader: XLogProfileDataReader) {
var max: Int = if (keys == null) 0 else keys.size()
var x: Int = 0;
def hasNext() = x < max
def readNext() = {
if (x >= max || reader == null) null else reader.read(keys.get(x));
x = x + 1
}
def close() =
if (this.reader != null) {
this.reader.close();
this.reader = null
}
}
val prefix = "xlog";
class Data(_time: Long, _txid: Long, _data: Array[Byte]) {
val time = _time
val txid = _txid
val data = _data
}
var currentDateUnit: Long = 0
var index: XLogProfileIndex = null
var writer: XLogProfileDataWriter = null
ThreadScala.start("scouter.server.db.XLogProfileWR") {
while (DBCtr.running) {
val m = queue.get();
try {
if (currentDateUnit != DateUtil.getDateUnit(m.time)) {
currentDateUnit = DateUtil.getDateUnit(m.time);
close();
open(DateUtil.yyyymmdd(m.time));
}
if (index == null) {
OftenAction.act("XLoWR", 10) {
queue.clear();
currentDateUnit = 0;
}
Logger.println("S141", 10, "can't open ");
} else {
val offset = writer.write(m.data)
index.addByTxid(m.txid, offset);
}
} catch {
case e: Throwable => e.printStackTrace()
}
}
close();
}
def add(time: Long, txid: Long, data: Array[Byte]) {
val ok = queue.put(new Data(time, txid, data));
if (ok == false) {
Logger.println("S142", 10, "queue exceeded!!");
}
}
def close() {
FileUtil.close(index);
FileUtil.close(writer);
writer = null;
index = null;
}
def open(date: String) {
try {
val path = getDBPath(date);
val f = new File(path)
if (f.exists() == false)
f.mkdirs();
var file = path + "/" + prefix;
index = XLogProfileIndex.open(file);
writer = XLogProfileDataWriter.open(date, file);
return ;
} catch {
case e: Throwable => {
close()
e.printStackTrace()
}
}
return ;
}
def getDBPath(date: String): String = {
val sb = new StringBuffer();
sb.append(DBCtr.getRootPath());
sb.append("/").append(date).append(XLogWR.dir);
return sb.toString();
}
}
|
jahnaviancha/scouter
|
scouter.server/src/scouter/server/db/XLogProfileWR.scala
|
Scala
|
apache-2.0
| 4,013 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.allqueries
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
class InsertIntoCarbonTableSpark2TestCase extends QueryTest with BeforeAndAfterAll {
override def beforeAll: Unit = {
sql("drop table if exists OneRowTable")
}
test("insert select one row") {
sql("create table OneRowTable(" +
"col1 string, col2 string, col3 int, col4 double) STORED AS carbondata")
sql("insert into OneRowTable select '0.1', 'a.b', 1, 1.2")
checkAnswer(sql("select * from OneRowTable"), Seq(Row("0.1", "a.b", 1, 1.2)))
}
test("test insert into with database name having underscore") {
sql("drop table if exists OneRowTable")
sql("create table OneRowTable(" +
"col1 string, col2 string, col3 int, col4 double) STORED AS carbondata")
sql("insert into OneRowTable select '0.1', 'a.b', 1, 1.2")
checkAnswer(sql("select * from OneRowTable"), Seq(Row("0.1", "a.b", 1, 1.2)))
sql("drop database if exists _default cascade")
sql("create database _default")
sql("create table _default._OneRowTable(" +
"col1 string, col2 string, col3 int, col4 double) STORED AS carbondata")
sql("insert into _default._OneRowTable select * from OneRowTable")
checkAnswer(sql("select * from _default._OneRowTable"), Seq(Row("0.1", "a.b", 1, 1.2)))
sql("drop database if exists _default cascade")
}
override def afterAll {
sql("drop table if exists OneRowTable")
sql("drop database if exists _default cascade")
}
}
|
zzcclp/carbondata
|
integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
|
Scala
|
apache-2.0
| 2,390 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.converters.sink
import org.apache.kafka.connect.sink.SinkRecord
/**
* Provides the interface for converting a Connect sink payload (JMS, MQTT, etc) to a SinkRecord
*/
trait Converter {
def initialize(map: Map[String, String]): Unit = {}
def convert(sinkTopic: String, data: SinkRecord): SinkRecord
}
object Converter {
val TopicKey = "topic"
}
|
datamountaineer/kafka-connect-common
|
src/main/scala/com/datamountaineer/streamreactor/connect/converters/sink/Converter.scala
|
Scala
|
apache-2.0
| 1,015 |
package de.choffmeister.microserviceutils.http
import akka.actor.ActorSystem
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{Directive1, Route}
class HttpServer()(implicit val system: ActorSystem) extends HttpServerBase {
override def routes: Route = reject
override val rateLimiterBackend: RateLimiterBackend = new MemoryRateLimiterBackend(60)
override def rateLimiterExtractor: Directive1[Option[(String, Long)]] = provide(None)
}
|
choffmeister/microservice-utils
|
microservice-utils/src/main/scala/de/choffmeister/microserviceutils/http/HttpServer.scala
|
Scala
|
mit
| 474 |
package com.arcusys.valamis.lesson.scorm.model.manifest
/** A navigation control which may be included in the to-hide list*/
object NavigationControlType extends Enumeration {
type NavigationControlType = Value
val Previous = Value("previous")
val Continue = Value("continue")
val Exit = Value("exit")
val ExitAll = Value("exitAll")
val Abandon = Value("abandon")
val AbandonAll = Value("abandonAll")
val SuspendAll = Value("suspendAll")
}
|
ViLPy/Valamis
|
valamis-scorm-lesson/src/main/scala/com/arcusys/valamis/lesson/scorm/model/manifest/NavigationControlType.scala
|
Scala
|
lgpl-3.0
| 456 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.features.avro
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.util.{Date, Locale, UUID}
import com.vividsolutions.jts.geom.Geometry
import com.vividsolutions.jts.io.WKBWriter
import org.apache.avro.{Schema, SchemaBuilder}
import org.geotools.util.Converters
import org.opengis.feature.simple.SimpleFeatureType
import scala.collection.JavaConversions._
object AvroSimpleFeatureUtils {
val FEATURE_ID_AVRO_FIELD_NAME: String = "__fid__"
val AVRO_SIMPLE_FEATURE_VERSION: String = "__version__"
val AVRO_SIMPLE_FEATURE_USERDATA: String = "__userdata__"
// Increment whenever encoding changes and handle in reader and writer
// Version 2 changed the WKT geom to a binary geom
// Version 3 adds byte array types to the schema...and is backwards compatible with V2
// Version 4 adds a custom name encoder function for the avro schema
// v4 can read version 2 and 3 files but version 3 cannot read version 4
val VERSION: Int = 4
val AVRO_NAMESPACE: String = "org.geomesa"
def generateSchema(sft: SimpleFeatureType,
withUserData: Boolean,
namespace: String = AVRO_NAMESPACE): Schema = {
val nameEncoder = new FieldNameEncoder(VERSION)
val initialAssembler: SchemaBuilder.FieldAssembler[Schema] =
SchemaBuilder.record(nameEncoder.encode(sft.getTypeName))
.namespace(namespace)
.fields
.name(AVRO_SIMPLE_FEATURE_VERSION).`type`.intType.noDefault
.name(FEATURE_ID_AVRO_FIELD_NAME).`type`.stringType.noDefault
val withFields =
sft.getAttributeDescriptors.foldLeft(initialAssembler) { case (assembler, ad) =>
addField(assembler, nameEncoder.encode(ad.getLocalName), ad.getType.getBinding, ad.isNillable)
}
val fullSchema = if (withUserData) {
withFields.name(AVRO_SIMPLE_FEATURE_USERDATA).`type`.array().items().record("userDataItem").fields()
.name("keyClass").`type`.stringType().noDefault()
.name("key").`type`.stringType().noDefault()
.name("valueClass").`type`.stringType().noDefault()
.name("value").`type`.stringType().noDefault().endRecord().noDefault()
} else {
withFields
}
fullSchema.endRecord
}
def addField(assembler: SchemaBuilder.FieldAssembler[Schema],
name: String,
ct: Class[_],
nillable: Boolean): SchemaBuilder.FieldAssembler[Schema] = {
val baseType = if (nillable) assembler.name(name).`type`.nullable() else assembler.name(name).`type`
ct match {
case c if classOf[String].isAssignableFrom(c) => baseType.stringType.noDefault
case c if classOf[java.lang.Integer].isAssignableFrom(c) => baseType.intType.noDefault
case c if classOf[java.lang.Long].isAssignableFrom(c) => baseType.longType.noDefault
case c if classOf[java.lang.Double].isAssignableFrom(c) => baseType.doubleType.noDefault
case c if classOf[java.lang.Float].isAssignableFrom(c) => baseType.floatType.noDefault
case c if classOf[java.lang.Boolean].isAssignableFrom(c) => baseType.booleanType.noDefault
case c if classOf[UUID].isAssignableFrom(c) => baseType.bytesType.noDefault
case c if classOf[Date].isAssignableFrom(c) => baseType.longType.noDefault
case c if classOf[Geometry].isAssignableFrom(c) => baseType.bytesType.noDefault
case c if classOf[java.util.List[_]].isAssignableFrom(c) => baseType.bytesType.noDefault
case c if classOf[java.util.Map[_, _]].isAssignableFrom(c) => baseType.bytesType.noDefault
case c if classOf[Array[Byte]].isAssignableFrom(c) => baseType.bytesType.noDefault
}
}
val primitiveTypes =
List(
classOf[String],
classOf[java.lang.Integer],
classOf[Int],
classOf[java.lang.Long],
classOf[Long],
classOf[java.lang.Double],
classOf[Double],
classOf[java.lang.Float],
classOf[Float],
classOf[java.lang.Boolean],
classOf[Boolean]
)
case class Binding(clazz: Class[_], conv: AnyRef => Any)
// Resulting functions in map are not thread-safe...use only as
// member variable, not in a static context
def createTypeMap(sft: SimpleFeatureType, wkbWriter: WKBWriter, nameEncoder: FieldNameEncoder): Map[String, Binding] = {
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors.RichAttributeDescriptor
sft.getAttributeDescriptors.map { ad =>
val binding = ad.getType.getBinding
val converter = if (primitiveTypes.contains(binding)) {
(value: AnyRef) => value
} else if (classOf[UUID].isAssignableFrom(binding)) {
(value: AnyRef) => encodeUUID(value.asInstanceOf[UUID])
} else if (classOf[Date].isAssignableFrom(binding)) {
(value: AnyRef) => value.asInstanceOf[Date].getTime
} else if (classOf[Geometry].isAssignableFrom(binding) ) {
(value: AnyRef) => ByteBuffer.wrap(wkbWriter.write(value.asInstanceOf[Geometry]))
} else if (ad.isList) {
(value: AnyRef) => encodeList(value.asInstanceOf[java.util.List[_]], ad.getListType())
} else if (ad.isMap) {
(value: AnyRef) => {
val (keyclass, valueclass) = ad.getMapTypes()
encodeMap(value.asInstanceOf[java.util.Map[_, _]], keyclass, valueclass)
}
} else if (classOf[Array[Byte]].isAssignableFrom(binding)) {
(value: AnyRef) => ByteBuffer.wrap(value.asInstanceOf[Array[Byte]])
} else {
(value: AnyRef) => Option(Converters.convert(value, classOf[String])).getOrElse(value.toString)
}
(nameEncoder.encode(ad.getLocalName), Binding(ad.getType.getBinding, converter))
}.toMap
}
def encodeUUID(uuid: UUID) =
ByteBuffer.allocate(16)
.putLong(uuid.getMostSignificantBits)
.putLong(uuid.getLeastSignificantBits)
.flip.asInstanceOf[ByteBuffer]
def decodeUUID(bb: ByteBuffer): UUID = new UUID(bb.getLong, bb.getLong)
/**
* Encodes a list of primitives or Dates into a byte buffer. The list items must be all of the same
* class.
*
* @param list
* @return
*/
def encodeList(list: java.util.List[_], binding: Class[_]): ByteBuffer = {
val size = Option(list).map(_.size)
size match {
case Some(s) if s == 0 => encodeEmptyCollection
case Some(s) => encodeNonEmptyList(list, s, binding)
case None => encodeNullCollection
}
}
/**
* Decodes a byte buffer created with @see encodeList back into a list
*
* @param bb
* @return
*/
def decodeList(bb: ByteBuffer): java.util.List[_] = {
val size = bb.getInt
if (size < 0) {
null
} else if (size == 0) {
java.util.Collections.emptyList()
} else {
val list = new java.util.ArrayList[Object](size)
val label = AvroSimpleFeatureUtils.getString(bb)
val readMethod = getReadMethod(label, bb)
(0 to size - 1).foreach(_ => list.add(readMethod()))
list
}
}
/**
* Encodes a map of primitives or Dates into a byte buffer. The map keys must be all of the same
* class, and the map values must all be of the same class.
*
* @param map
* @return
*/
def encodeMap(map: java.util.Map[_, _], keyBinding: Class[_], valueBinding: Class[_]): ByteBuffer = {
val size = Option(map).map(_.size)
size match {
case Some(s) if s == 0 => encodeEmptyCollection
case Some(s) => encodeNonEmptyMap(map, s, keyBinding, valueBinding)
case None => encodeNullCollection
}
}
/**
* Decodes a byte buffer created with @see encodeMap back into a map.
*
* @param bb
* @return
*/
def decodeMap(bb: ByteBuffer): java.util.Map[_, _] = {
val size = bb.getInt
if (size < 0) {
null
} else if (size == 0) {
java.util.Collections.emptyMap()
} else {
val map = new java.util.HashMap[Object, Object](size)
val keyType = AvroSimpleFeatureUtils.getString(bb)
val valueType = AvroSimpleFeatureUtils.getString(bb)
val keyReadMethod = getReadMethod(keyType, bb)
val valueReadMethod = getReadMethod(valueType, bb)
(0 to size - 1).foreach { _ =>
val key = keyReadMethod()
val value = valueReadMethod()
map.put(key, value)
}
map
}
}
private def encodeNullCollection: ByteBuffer =
ByteBuffer.allocate(4).putInt(-1).flip.asInstanceOf[ByteBuffer]
private def encodeEmptyCollection: ByteBuffer =
ByteBuffer.allocate(4).putInt(0).flip.asInstanceOf[ByteBuffer]
/**
* Encodes a list that has entries.
*
* @param list
* @param size
* @return
*/
private def encodeNonEmptyList(list: java.util.List[_], size: Int, binding: Class[_]): ByteBuffer = {
// get the class label for the list items
val label = binding.getSimpleName
// get the appropriate write method for the list type
val (bytesPerItem, putMethod): (Int, (ByteBuffer, Any) => Unit) = getWriteMethod(label)
// calculate the total size needed to encode the list
val totalBytes = getTotalBytes(bytesPerItem, size, list.iterator(), binding.getSimpleName)
val labelBytes = label.getBytes(StandardCharsets.UTF_8)
// 4 bytes for list size + 4 bytes for label bytes size + label bytes + item bytes
val bb = ByteBuffer.allocate(4 + 4 + labelBytes.size + totalBytes)
// first put the size of the list
bb.putInt(size)
// put the type of the list
AvroSimpleFeatureUtils.putString(bb, label)
// put each item
list.foreach(v => putMethod(bb, v))
// flip (reset) the buffer so that it's ready for reading
bb.flip
bb
}
/**
* Encodes a map that has entries.
*
* @param map
* @param size
* @return
*/
private def encodeNonEmptyMap(map: java.util.Map[_, _],
size: Int,
keyBinding: Class[_],
valueBinding: Class[_]): ByteBuffer = {
// pull out the class labels for the map keys/values
val keyLabel = keyBinding.getSimpleName
val valueLabel = valueBinding.getSimpleName
// get the appropriate write methods and approximate sizes for keys and values
val (bytesPerKeyItem, keyPutMethod) = getWriteMethod(keyLabel)
val (bytesPerValueItem, valuePutMethod) = getWriteMethod(valueLabel)
// get the exact size in bytes for keys and values
val totalKeyBytes = getTotalBytes(bytesPerKeyItem, size, map.keysIterator, keyLabel)
val totalValueBytes = getTotalBytes(bytesPerValueItem, size, map.valuesIterator, valueLabel)
val keyLabelBytes = keyLabel.getBytes(StandardCharsets.UTF_8)
val valueLabelBytes = valueLabel.getBytes(StandardCharsets.UTF_8)
// 4 bytes for map size + 8 bytes for label bytes size + label bytes + key bytes + value bytes
val totalBytes = 4 + 8 + keyLabelBytes.size + valueLabelBytes.size + totalKeyBytes + totalValueBytes
val bb = ByteBuffer.allocate(totalBytes)
// first put the size of the map
bb.putInt(size)
// put the types of the keys and values
AvroSimpleFeatureUtils.putString(bb, keyLabel)
AvroSimpleFeatureUtils.putString(bb, valueLabel)
// put each key value pair
map.foreach { case (k, v) =>
keyPutMethod(bb, k)
valuePutMethod(bb, v)
}
// flip (reset) the buffer so that it's ready for reading
bb.flip
bb
}
/**
* Gets the appropriate byte buffer method for the given object type.
*
* @param label
* @return size per item (if known, otherwise -1) + read method
*/
private def getWriteMethod(label: String): (Int, (ByteBuffer, Any) => Unit) =
label.toLowerCase(Locale.US) match {
case "string" => (-1, (bb, v) => putString(bb, v.asInstanceOf[String]))
case "int" |
"integer" => (4, (bb, v) => bb.putInt(v.asInstanceOf[Int]))
case "double" => (8, (bb, v) => bb.putDouble(v.asInstanceOf[Double]))
case "long" => (8, (bb, v) => bb.putLong(v.asInstanceOf[Long]))
case "float" => (4, (bb, v) => bb.putFloat(v.asInstanceOf[Float]))
case "date" => (8, (bb, v) => bb.putLong(v.asInstanceOf[Date].getTime))
case "boolean" => (1, (bb, v) => if (v.asInstanceOf[Boolean]) bb.put(1.toByte) else bb.put(0.toByte))
case "uuid" => (16, (bb, v) => putUUID(bb, v.asInstanceOf[UUID]))
case "byte[]" => (-1, (bb, v) => putBytes(bb, v.asInstanceOf[Array[Byte]]))
case _ =>
val msg = s"Invalid collection type: '$label'. Only primitives and Dates are supported."
throw new IllegalArgumentException(msg)
}
/**
* Gets the appropriate byte buffer method for the given object type.
*
* @param label
* @param bb
* @return
*/
private def getReadMethod(label: String, bb: ByteBuffer): () => Object =
label.toLowerCase(Locale.US) match {
case "string" => () => AvroSimpleFeatureUtils.getString(bb)
case "int" |
"integer" => () => bb.getInt.asInstanceOf[Object]
case "double" => () => bb.getDouble.asInstanceOf[Object]
case "long" => () => bb.getLong.asInstanceOf[Object]
case "float" => () => bb.getFloat.asInstanceOf[Object]
case "boolean" => () => java.lang.Boolean.valueOf(bb.get > 0)
case "date" => () => new Date(bb.getLong())
case "uuid" => () => getUUID(bb)
case "byte[]" => () => getBytes(bb)
case _ =>
val msg = s"Invalid collection type: '$label'. Only primitives and Dates are supported."
throw new IllegalArgumentException(msg)
}
/**
* Gets the total bytes needed to encode the given values. For most types, the size is fixed, but
* Strings and bytes are encoded with a dynamic length.
*
* @param bytesPerItem
* @param size
* @param values
* @return
*/
private def getTotalBytes(bytesPerItem: Int, size: Int, values: Iterator[_], label: String): Int =
if (bytesPerItem == -1) {
// bytes are variable, we need to calculate them based on content
// this only happens with strings
// add 4 to each to use for length encoding
label.toLowerCase match {
case "string" => values.map(_.asInstanceOf[String].getBytes(StandardCharsets.UTF_8).length + 4).sum
case "byte[]" => values.map(_.asInstanceOf[Array[Byte]].length + 4).sum
case _ => throw new IllegalArgumentException("invalid type")
}
} else {
bytesPerItem * size
}
/**
* Reads a string from a byte buffer that has been written using @see putString.
*
* @param bb
* @return
*/
private def getString(bb: ByteBuffer): String = {
val size = bb.getInt
val buf = new Array[Byte](size)
bb.get(buf)
new String(buf, StandardCharsets.UTF_8)
}
/**
* Writes a string to a byte buffer by encoding the length first, then the bytes of the string.
*
* @param bb
* @param s
* @return
*/
private def putString(bb: ByteBuffer, s: String): ByteBuffer = putBytes(bb, s.getBytes(StandardCharsets.UTF_8))
/**
* Writes a byte array to a byte buffer by encoding the length first, then the bytes
*
* @param bb
* @param arr
* @return
*/
private def putBytes(bb: ByteBuffer, arr: Array[Byte]): ByteBuffer = bb.putInt(arr.length).put(arr)
/**
* Reads a byte array from a byte buffer that has been written using @see putBytes
*
* @param bb
* @return
*/
private def getBytes(bb: ByteBuffer): Array[Byte] = {
val sz = bb.getInt
val bytes = new Array[Byte](sz)
bb.get(bytes, 0, sz)
bytes
}
private def putUUID(bb: ByteBuffer, uuid: UUID): ByteBuffer =
bb.putLong(uuid.getMostSignificantBits).putLong(uuid.getLeastSignificantBits)
private def getUUID(bb: ByteBuffer): UUID = new UUID(bb.getLong, bb.getLong)
}
|
ddseapy/geomesa
|
geomesa-features/geomesa-feature-avro/src/main/scala/org/locationtech/geomesa/features/avro/AvroSimpleFeatureUtils.scala
|
Scala
|
apache-2.0
| 16,382 |
package com.julianpeeters.avro.runtime.provider
import com.julianpeeters.caseclass.generator._
import org.apache.avro.Schema
import scala.collection.JavaConversions._
import scala.reflect.runtime.universe._
object AvroTypeMatcher {
def parseField(store: SchemaToClassStore, namespace: ClassNamespace, field: Schema.Field): FieldData = {
def avroToScalaType(schema: org.apache.avro.Schema): Type = {
schema.getType match {
case Schema.Type.ARRAY => {
typeOf[List[Any]] match {
case x @ TypeRef(pre, sym, args) => {
TypeRef(pre, sym, List(avroToScalaType(schema.getElementType)))
}
}
}
case Schema.Type.BOOLEAN => typeOf[Boolean]
//case Schema.Type.BYTES => //TODO
case Schema.Type.DOUBLE => typeOf[Double]
//case Schema.Type.FIXED => //TODO
case Schema.Type.FLOAT => typeOf[Float]
case Schema.Type.LONG => typeOf[Long]
case Schema.Type.INT => typeOf[Int]
//case Schema.Type.MAP => //TODO
case Schema.Type.NULL => typeOf[Null]
case Schema.Type.STRING => typeOf[String]
case Schema.Type.RECORD => {
field.schema.getType match {
// cases where a record is found as a field vs found as a member of a union vs
// found as an element of an array
case Schema.Type.ARRAY | Schema.Type.UNION => store.generatedClasses(schema).tpe
case _ => store.generatedClasses(field.schema).tpe
}
}
case Schema.Type.UNION => {
val unionSchemas = schema.getTypes.toList
if (unionSchemas.length == 2 && unionSchemas.exists(schema => schema.getType == Schema.Type.NULL)) {
val maybeSchema = unionSchemas.find(schema => schema.getType != Schema.Type.NULL)
if (maybeSchema.isDefined ) {
typeOf[Option[Any]] match {
case x @ TypeRef(pre, sym, args) => {
TypeRef(pre, sym, List(avroToScalaType(maybeSchema.get)))
}
}
}
else error("no avro type found in this union")
}
else error("not a union field")
}
case x => error( x + " is not a valid Avro type")
}
}
val fieldType = avroToScalaType(field.schema)
FieldData(field.name, fieldType)
}
}
|
julianpeeters/avro-scala-runtime-type-provider
|
src/main/scala/AvroTypeMatcher.scala
|
Scala
|
apache-2.0
| 2,436 |
package im.mange.flakeless
import im.mange.flakeless.innards.{Command, WaitForElements}
import org.openqa.selenium.{By, SearchContext}
object AssertElementListTextContains {
def apply(flakeless: Flakeless, by: By, expected: String): Unit = {
apply(flakeless.rawWebDriver, by, expected, Some(flakeless))
}
//TODO: I need to be converted to a Description, just not possible yet..
def apply(in: SearchContext, by: By, expected: String, flakeless: Option[Flakeless] = None): Unit = {
WaitForElements(flakeless,
Command("AssertElementListTextContains", Some(in), Some(by), expected = Some(expected)),
description = es => s"${es.map(t => s"""${t.getText}""").mkString(", ")}",
condition = es => es.map(_.getText).contains(expected))
}
}
|
alltonp/flakeless
|
src/main/scala/im/mange/flakeless/AssertElementListTextContains.scala
|
Scala
|
mit
| 770 |
//
// Scaled - a scalable editor extensible via JVM languages
// http://github.com/scaled/scaled/blob/master/LICENSE
package scaled.impl
import reactual.{Future, Signal, Value}
import scala.collection.mutable.ArrayBuffer
import scaled._
// TODO: should the point be automatically adjusted when text is inserted into the buffer before
// the point?
/** Implements [[BufferView]] and [[RBufferView]]. This class mainly defines the model, and
* [[BufferArea]] etc. actually visualize the model and handle UX.
*/
class BufferViewImpl (editor :Editor, _buffer :BufferImpl, initWid :Int, initHei :Int)
extends RBufferView(initWid, initHei) {
private val _lines = ArrayBuffer[LineViewImpl]() ++ _buffer.lines.map(new LineViewImpl(_))
private val _changed = Signal[BufferView.Change]()
override def changed = _changed
def clearEphemeralPopup () {
if (popup.isDefined && popup().isEphemeral) popup.clear()
}
// narrow the return types of these guys for our internal friends
override def buffer :BufferImpl = _buffer
override def lines :Seq[LineViewImpl] = _lines
// when the buffer is edited: add, remove and update lines
_buffer.edited.onValue { _ match {
case Buffer.Insert(start, end) =>
// the first line changed, the rest are new
_lines(start.row).invalidate()
if (end.row > start.row) {
val row = start.row+1
val added = _buffer.lines.slice(row, end.row+1)
val newlns = added map(new LineViewImpl(_))
_lines.insert(row, newlns :_*)
_changed.emit(BufferView.Change(row, added.length, this))
}
// now update the point based on the insert
point() = Loc.adjustForInsert(point(), start, end)
case Buffer.Delete(start, end, deleted) =>
// update the point based on the delete before deleting the lines
point() = Loc.adjustForDelete(point(), start, end)
// the first line changed, the rest are gone
_lines(start.row).invalidate()
if (end.row > start.row) {
val row = start.row+1 ; val deleted = end.row-row+1
_lines.remove(row, deleted)
_changed.emit(BufferView.Change(row, -deleted, this))
}
case Buffer.Transform(start, end, _) =>
start.row to end.row foreach { row => _lines(row).invalidate() }
}}
// pass style changes onto the line views
_buffer.lineStyled.onValue { loc => _lines(loc.row).onStyle(loc) }
}
|
swhgoon/scaled
|
editor/src/main/scala/scaled/impl/BufferViewImpl.scala
|
Scala
|
bsd-3-clause
| 2,404 |
package blended.testsupport.pojosr
import java.io.File
import java.util.Properties
import blended.container.context.api.ContainerContext
import com.typesafe.config.{Config, ConfigFactory, ConfigParseOptions}
import com.typesafe.config.ConfigObject
import com.typesafe.config.impl.Parseable
class MockContainerContext(baseDir: String) extends ContainerContext {
override def getContainerDirectory(): String = baseDir
override def getContainerConfigDirectory(): String = getContainerDirectory() + "/etc"
override def getContainerLogDirectory(): String = baseDir
override def getProfileDirectory(): String = getContainerDirectory()
override def getProfileConfigDirectory(): String = getContainerConfigDirectory()
override def getContainerHostname(): String = "localhost"
private def getSystemProperties(): Properties = {
// Avoid ConcurrentModificationException due to parallel setting of system properties by copying properties
val systemProperties = System.getProperties()
val systemPropertiesCopy = new Properties()
systemPropertiesCopy.putAll(systemProperties)
systemPropertiesCopy
}
private def loadSystemProperties(): ConfigObject = {
Parseable
.newProperties(
getSystemProperties(),
ConfigParseOptions.defaults().setOriginDescription("system properties")
)
.parse()
}
override def getContainerConfig(): Config = {
val sysProps = loadSystemProperties()
val envProps = ConfigFactory.systemEnvironment()
ConfigFactory
.parseFile(
new File(getProfileConfigDirectory(), "application.conf"),
ConfigParseOptions.defaults().setAllowMissing(false)
)
.withFallback(sysProps)
.withFallback(envProps)
.resolve()
}
}
|
lefou/blended
|
blended.testsupport.pojosr/src/main/scala/blended/testsupport/pojosr/MockContainerContext.scala
|
Scala
|
apache-2.0
| 1,765 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird
import algebra.BoundedSemilattice
import com.googlecode.javaewah.IntIterator
import com.googlecode.javaewah.{EWAHCompressedBitmap => CBitSet}
import scala.collection.immutable.BitSet
import scala.collection.compat._
object RichCBitSet {
def apply(xs: Int*): CBitSet = fromArray(xs.toArray)
// this sorts the array in-place
def fromArray(x: Array[Int]): CBitSet = {
val bs = new CBitSet
bs += x
}
def fromBitSet(bs: BitSet): CBitSet = {
val nbs = new CBitSet
val it = bs.iterator
while (it.hasNext) { nbs.set(it.next) }
nbs
}
implicit def cb2rcb(cb: CBitSet): RichCBitSet = new RichCBitSet(cb)
}
// An enrichment to give some scala-like operators to the compressed
// bit set.
class RichCBitSet(val cb: CBitSet) extends AnyVal {
def ++(b: CBitSet): CBitSet = cb.or(b)
def ==(b: CBitSet): Boolean = cb.equals(b)
def +=(xs: Array[Int]): cb.type = {
var idx = 0
java.util.Arrays.sort(xs)
while (idx < xs.length) {
cb.set(xs(idx))
idx += 1
}
cb
}
def toBitSet(width: Int): BitSet = {
val a = LongBitSet.empty(width)
val iter = cb.intIterator
while (iter.hasNext) {
val i = iter.next
a.set(i)
}
a.toBitSetNoCopy
}
}
private[algebird] case class LongBitSet(toArray: Array[Long]) extends AnyVal {
def toBitSetNoCopy: BitSet =
BitSet.fromBitMaskNoCopy(toArray)
def set(i: Int): Unit =
toArray(i / 64) |= 1L << (i % 64)
def +=(xs: Array[Int]): Unit = {
var idx = 0
while (idx < xs.length) {
set(xs(idx))
idx += 1
}
}
def +=(it: IntIterator): Unit =
while (it.hasNext) { set(it.next) }
}
private[algebird] object LongBitSet {
def empty(size: Int): LongBitSet =
LongBitSet(new Array[Long]((size + 63) / 64))
def fromCBitSet(cb: CBitSet, width: Int): LongBitSet = {
val lbs = empty(width)
lbs += cb.intIterator
lbs
}
}
object BloomFilter {
def apply[A](numEntries: Int, fpProb: Double)(implicit hash: Hash128[A]): BloomFilterMonoid[A] =
BloomFilter.optimalWidth(numEntries, fpProb) match {
case None =>
throw new java.lang.IllegalArgumentException(
s"BloomFilter cannot guarantee the specified false positive probability for the number of entries! (numEntries: $numEntries, fpProb: $fpProb)"
)
case Some(width) =>
val numHashes = BloomFilter.optimalNumHashes(numEntries, width)
BloomFilterMonoid[A](numHashes, width)(hash)
}
// Compute optimal number of hashes: k = m/n ln(2)
def optimalNumHashes(numEntries: Int, width: Int): Int =
math.ceil(width / numEntries * math.log(2)).toInt
// Compute optimal width: m = - n ln(p) / (ln(2))^2
// return None if we can't meet this false positive probability
def optimalWidth(numEntries: Int, fpProb: Double): Option[Int] = {
val widthEstimate = math
.ceil(-1 * numEntries * math.log(fpProb) / math.log(2) / math.log(2))
.toInt
if (widthEstimate == Int.MaxValue) None
else Some(widthEstimate)
}
/**
* Cardinality estimates are taken from Theorem 1 on page 15 of "Cardinality estimation and dynamic length
* adaptation for Bloom filters" by Papapetrou, Siberski, and Nejdl:
* http://www.softnet.tuc.gr/~papapetrou/publications/Bloomfilters-DAPD.pdf
*
* Roughly, by using bounds on the expected number of true bits after n elements have been inserted into the
* Bloom filter, we can go from the actual number of true bits (which is known) to an estimate of the
* cardinality.
*
* approximationWidth defines an interval around the maximum-likelihood cardinality estimate. Namely, the
* approximation returned is of the form (min, estimate, max) = ((1 - approxWidth) * estimate, estimate, (1
* + approxWidth) * estimate)
*/
def sizeEstimate(
numBits: Int,
numHashes: Int,
width: Int,
approximationWidth: Double = 0.05
): Approximate[Long] = {
assert(0 <= approximationWidth && approximationWidth < 1, "approximationWidth must lie in [0, 1)")
/*
* s(n) is the expected number of bits that have been set to true after
* n elements have been inserted into the Bloom filter.
* This is \\hat{S}(n) in the cardinality estimation paper used above.
*/
def s(n: Int): Double =
width * (1 - scala.math.pow(1 - 1.0 / width, numHashes * n))
/*
* sInverse(t) is the maximum likelihood value for the number of elements
* that have been inserted into the Bloom filter when it has t bits set to true.
* This is \\hat{S}^{-1}(t) in the cardinality estimation paper used above.
*/
def sInverse(t: Int): Double =
scala.math.log1p(-t.toDouble / width) / (numHashes * scala.math.log1p(-1.0 / width))
// Variable names correspond to those used in the paper.
val t = numBits
val n = sInverse(t).round.toInt
// Take the min and max because the probability formula assumes
// nl <= sInverse(t - 1) and sInverse(t + 1) <= nr
val nl =
scala.math.min(sInverse(t - 1).floor, (1 - approximationWidth) * n).toInt
val nr =
scala.math.max(sInverse(t + 1).ceil, (1 + approximationWidth) * n).toInt
val prob =
1 -
scala.math.exp(t - 1 - s(nl)) *
scala.math.pow(s(nl) / (t - 1), t - 1) -
scala.math.exp(-scala.math.pow(t + 1 - s(nr), 2) / (2 * s(nr)))
Approximate[Long](nl, n, nr, scala.math.max(0, prob))
}
}
/**
* Bloom Filter - a probabilistic data structure to test presence of an element.
*
* Operations 1) insert: hash the value k times, updating the bitfield at the index equal to each hashed value
* 2) query: hash the value k times. If there are k collisions, then return true; otherwise false.
*
* http://en.wikipedia.org/wiki/Bloom_filter
*/
case class BloomFilterMonoid[A](numHashes: Int, width: Int)(implicit hash: Hash128[A])
extends Monoid[BF[A]]
with BoundedSemilattice[BF[A]] {
val hashes: BFHash[A] = BFHash[A](numHashes, width)(hash)
override val zero: BF[A] = BFZero[A](hashes, width)
/**
* Assume the bloom filters are compatible (same width and same hashing functions). This is the union of the
* 2 bloom filters.
*/
override def plus(left: BF[A], right: BF[A]): BF[A] = left ++ right
override def sumOption(as: TraversableOnce[BF[A]]): Option[BF[A]] =
if (as.iterator.isEmpty) None
else {
// share a single mutable bitset
val longBitSet = LongBitSet.empty(width)
var sets = 0
@inline def set(i: Int): Unit = {
longBitSet.set(i)
sets += 1
}
var oneItem: BFItem[A] = null
@inline def add(it: BFItem[A]): Unit = {
oneItem = it
val hs = hashes(it.item)
var pos = 0
while (pos < hs.length) {
set(hs(pos))
pos += 1
}
}
as.iterator.foreach {
case BFZero(_, _) => ()
case bf @ BFItem(_, _, _) => add(bf)
case BFSparse(_, cbitset, _) =>
val iter = cbitset.intIterator
while (iter.hasNext) { set(iter.next) }
case BFInstance(_, bitset, _) =>
// these Ints are boxed so, that's a minor bummer
val iter = bitset.iterator
while (iter.hasNext) { set(iter.next) }
}
if (sets == 0) Some(zero)
else if (sets == numHashes && (oneItem != null)) Some(oneItem)
else if (sets < (width / 10)) {
val sbs = RichCBitSet.fromBitSet(longBitSet.toBitSetNoCopy)
Some(BFSparse(hashes, sbs, width))
} else Some(BFInstance(hashes, longBitSet.toBitSetNoCopy, width))
}
/**
* Create a bloom filter with one item.
*/
def create(item: A): BF[A] = BFItem(item, hashes, width)
/**
* Create a bloom filter with multiple items.
*/
def create(data: A*): BF[A] = create(data.iterator)
/**
* Create a bloom filter with multiple items from an iterator
*/
def create(data: Iterator[A]): BF[A] = sum(data.map(BFItem(_, hashes, width)))
}
object BF {
implicit def equiv[A]: Equiv[BF[A]] =
new Equiv[BF[A]] {
override def equiv(a: BF[A], b: BF[A]): Boolean = {
def toIntIt(b: BF[A]): IntIterator =
b match {
case BFItem(it, hashes, _) =>
new IntIterator {
// the hashes can have collisions so we need
// to remove duplicates
val hashvalues: Array[Int] = hashes(it)
java.util.Arrays.sort(hashvalues)
@annotation.tailrec
def uniq(src: Array[Int], dst: Array[Int], prev: Int, spos: Int, dpos: Int): Int =
if (spos >= src.length) dpos
else if (spos == 0) {
// first
val first = src(0)
dst(0) = first
uniq(src, dst, first, spos + 1, dpos + 1)
} else {
val cur = src(spos)
if (cur == prev) uniq(src, dst, prev, spos + 1, dpos)
else {
dst(dpos) = cur
uniq(src, dst, cur, spos + 1, dpos + 1)
}
}
val uniqVs = new Array[Int](hashvalues.length)
val len: Int = uniq(hashvalues, uniqVs, -1, 0, 0)
var pos = 0
override def hasNext: Boolean = pos < len
override def next: Int = {
val n = uniqVs(pos)
pos += 1
n
}
}
case BFSparse(_, cbitset, _) => cbitset.intIterator
case BFInstance(_, bitset, _) =>
new IntIterator {
val boxedIter: Iterator[Int] = bitset.iterator
override def hasNext: Boolean = boxedIter.hasNext
override def next: Int = boxedIter.next
}
case BFZero(_, _) =>
new IntIterator {
override def hasNext = false
override def next: Nothing = sys.error("BFZero has no hashes set")
}
}
def eqIntIter(a: IntIterator, b: IntIterator): Boolean = {
while (a.hasNext && b.hasNext) {
if (!(a.next == b.next)) return false
}
a.hasNext == b.hasNext
}
(a eq b) || ((a.numHashes == b.numHashes) &&
(a.width == b.width) &&
eqIntIter(toIntIt(a), toIntIt(b)))
}
}
}
/**
* Bloom Filter data structure
*/
sealed abstract class BF[A] extends java.io.Serializable {
def numHashes: Int
def width: Int
/**
* The number of bits set to true in the bloom filter
*/
def numBits: Int
/**
* Proportion of bits that are set to true.
*/
def density: Double = numBits.toDouble / width
def ++(other: BF[A]): BF[A]
def +(other: A): BF[A]
def checkAndAdd(item: A): (BF[A], ApproximateBoolean)
def contains(item: A): ApproximateBoolean =
if (maybeContains(item)) {
// The false positive probability (the probability that the Bloom filter erroneously
// claims that an element x is in the set when x is not) is roughly
// p = (1 - e^(-numHashes * setCardinality / width))^numHashes
// See: http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives
//
// However, the true set cardinality may not be known. From empirical evidence, though,
// it is upper bounded with high probability by 1.1 * estimatedCardinality (as long as the
// Bloom filter is not too full), so we plug this into the formula instead.
// TODO: investigate this upper bound and density more closely (or derive a better formula).
val fpProb =
if (density > 0.95)
1.0 // No confidence in the upper bound on cardinality.
else
scala.math.pow(1 - scala.math.exp(-numHashes * size.estimate * 1.1 / width), numHashes)
ApproximateBoolean(true, 1 - fpProb)
} else {
// False negatives are not possible.
ApproximateBoolean.exactFalse
}
/**
* This may be faster if you don't care about evaluating the false positive probability
*/
def maybeContains(item: A): Boolean
// Estimates the cardinality of the set of elements that have been
// inserted into the Bloom Filter.
def size: Approximate[Long]
def toBitSet: BitSet
/**
* Compute the Hamming distance between the two Bloom filters `a` and `b`. The distance is defined as the
* number of bits that need to change to in order to transform one filter into the other.
*/
def hammingDistance(that: BF[A]): Int =
(this, that) match {
// Comparing with empty filter should give number
// of bits in other set
case (_: BFZero[A], _: BFZero[A]) => 0
case (_: BFZero[A], y: BF[A]) => y.numBits
case (x: BF[A], _: BFZero[A]) => x.numBits
// Special case for Sparse vs. Sparse
case (x: BFSparse[A], y: BFSparse[A]) => x.bits.xorCardinality(y.bits)
// Otherwise compare as bit sets
case (_, _) => (this.toBitSet ^ that.toBitSet).size
}
}
/**
* Empty bloom filter.
*/
case class BFZero[A](hashes: BFHash[A], override val width: Int) extends BF[A] {
override def toBitSet: BitSet = BitSet()
override def numHashes: Int = hashes.size
override def numBits: Int = 0
override def ++(other: BF[A]): BF[A] = other
override def +(other: A): BFItem[A] = BFItem[A](other, hashes, width)
override def checkAndAdd(other: A): (BF[A], ApproximateBoolean) =
(this + other, ApproximateBoolean.exactFalse)
override def contains(item: A): ApproximateBoolean = ApproximateBoolean.exactFalse
override def maybeContains(item: A): Boolean = false
override def size: Approximate[Long] = Approximate.exact[Long](0)
}
/**
* Bloom Filter with 1 value.
*/
case class BFItem[A](item: A, hashes: BFHash[A], override val width: Int) extends BF[A] {
override def numHashes: Int = hashes.size
override def numBits: Int = numHashes
override def toBitSet: BitSet = {
val hashvalues = hashes(item)
BitSet.fromSpecific(hashvalues)
}
private[algebird] def toSparse: BFSparse[A] =
BFSparse[A](hashes, RichCBitSet.fromArray(hashes(item)), width)
override def ++(other: BF[A]): BF[A] =
other match {
case BFZero(_, _) => this
case BFItem(otherItem, _, _) => toSparse + otherItem
case _ => other + item
}
override def +(other: A): BF[A] = this ++ BFItem(other, hashes, width)
override def checkAndAdd(other: A): (BF[A], ApproximateBoolean) =
if (other == item) {
(this, ApproximateBoolean.exactTrue)
} else {
(this + other, ApproximateBoolean.exactFalse)
}
override def contains(x: A): ApproximateBoolean = ApproximateBoolean.exact(item == x)
override def maybeContains(x: A): Boolean =
item == x
override def size: Approximate[Long] = Approximate.exact[Long](1)
}
case class BFSparse[A](hashes: BFHash[A], bits: CBitSet, override val width: Int) extends BF[A] {
import RichCBitSet._
override def numHashes: Int = hashes.size
override def toBitSet: BitSet = bits.toBitSet(width)
override def numBits: Int = {
val it = bits.intIterator
var count = 0
while (it.hasNext) {
count += 1
it.next
}
count
}
/**
* Convert to a dense representation
*/
def dense: BFInstance[A] = BFInstance[A](hashes, bits.toBitSet(width), width)
override def ++(other: BF[A]): BF[A] = {
require(this.width == other.width)
require(this.numHashes == other.numHashes)
other match {
case BFZero(_, _) => this
case BFItem(item, _, _) => this + item
case bf @ BFSparse(_, otherBits, _) => {
// assume same hashes used
// This is expensive in general.
// We check to see if we are filling < 5%
// of the bits, if so, stay sparse, if not go dense
val newMaxSize = numBits + bf.numBits
if (newMaxSize < (width / 10)) {
BFSparse(hashes, bits ++ otherBits, width)
} else {
// Make a dense bitset
val lbs = LongBitSet.empty(width)
lbs += bits.intIterator
lbs += otherBits.intIterator
BFInstance(hashes, lbs.toBitSetNoCopy, width)
}
}
case _ => other ++ this
}
}
override def +(item: A): BF[A] = {
val bitsToActivate = bits.clone
bitsToActivate += hashes(item)
BFSparse(hashes, bitsToActivate, width)
}
override def checkAndAdd(other: A): (BF[A], ApproximateBoolean) =
(this + other, contains(other))
override def maybeContains(item: A): Boolean = {
val il = hashes(item)
var idx = 0
while (idx < il.length) {
val i = il(idx)
if (!bits.get(i)) return false
idx += 1
}
true
}
override def size: Approximate[Long] =
BloomFilter.sizeEstimate(numBits, numHashes, width, 0.05)
}
/*
* Bloom filter with multiple values
*/
case class BFInstance[A](hashes: BFHash[A], bits: BitSet, override val width: Int) extends BF[A] {
override def numHashes: Int = hashes.size
/**
* The number of bits set to true
*/
override def numBits: Int = bits.size
override def toBitSet: BitSet = bits
override def ++(other: BF[A]): BF[A] = {
require(this.width == other.width)
require(this.numHashes == other.numHashes)
other match {
case BFZero(_, _) => this
case BFItem(item, _, _) => this + item
case BFSparse(_, otherBits, _) =>
// assume same hashes used
BFInstance(hashes, bits | (new RichCBitSet(otherBits)).toBitSet(width), width)
case BFInstance(_, otherBits, _) => {
// assume same hashes used
BFInstance(hashes, bits ++ otherBits, width)
}
}
}
override def +(item: A): BFInstance[A] = {
val itemHashes = hashes(item)
val thisBS = LongBitSet.empty(width)
thisBS += itemHashes
BFInstance[A](hashes, bits | (thisBS.toBitSetNoCopy), width)
}
override def checkAndAdd(other: A): (BF[A], ApproximateBoolean) =
(this + other, contains(other))
override def maybeContains(item: A): Boolean = {
val il = hashes(item)
var idx = 0
while (idx < il.length) {
val i = il(idx)
if (!bits.contains(i)) return false
idx += 1
}
true
}
// use an approximation width of 0.05
override def size: Approximate[Long] =
BloomFilter.sizeEstimate(numBits, numHashes, width, 0.05)
}
object BFInstance {
def apply[A](hashes: BFHash[A], width: Int): BFInstance[A] =
empty(hashes, width)
def empty[A](hashes: BFHash[A], width: Int): BFInstance[A] =
BFInstance(hashes, BitSet.empty, width)
}
case class BFHash[A](numHashes: Int, width: Int)(implicit hash: Hash128[A]) {
def size: Int = numHashes
def apply(s: A): Array[Int] =
nextHash(s, 0, new Array[Int](4), 4, new Array[Int](numHashes))
private def splitLong(x: Long, buffer: Array[Int], idx: Int): Unit = {
// unfortunately, this is the function we committed to some time ago, and we have tests
// locking it down. x.toInt & 0x7fffffff should work, but this gives a few different values
def toNonNegativeInt(x: Long): Int =
(math
.abs(x)
.toInt) & 0x7fffffff // no change for positive numbers, converts Integer.MIN_VALUE to positive number
val upper = toNonNegativeInt(x >> 32)
val lower = toNonNegativeInt((x << 32) >> 32)
buffer(idx) = upper
buffer(idx + 1) = lower
}
@annotation.tailrec
private def nextHash(
valueToHash: A,
hashIndex: Int,
buffer: Array[Int],
bidx: Int,
target: Array[Int]
): Array[Int] =
if (hashIndex == numHashes) target
else {
val thisBidx = if (bidx > 3) {
val (a, b) =
hash.hashWithSeed((numHashes - hashIndex).toLong, valueToHash)
splitLong(a, buffer, 0)
splitLong(b, buffer, 2)
0
} else bidx
target(hashIndex) = buffer(thisBidx) % width
nextHash(valueToHash, hashIndex + 1, buffer, thisBidx + 1, target)
}
}
case class BloomFilterAggregator[A](bfMonoid: BloomFilterMonoid[A])
extends MonoidAggregator[A, BF[A], BF[A]] {
override val monoid: BloomFilterMonoid[A] = bfMonoid
override def prepare(value: A): BF[A] = monoid.create(value)
override def present(bf: BF[A]): BF[A] = bf
}
object BloomFilterAggregator {
def apply[A](numHashes: Int, width: Int)(implicit hash: Hash128[A]): BloomFilterAggregator[A] =
BloomFilterAggregator[A](BloomFilterMonoid[A](numHashes, width))
}
|
twitter/algebird
|
algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala
|
Scala
|
apache-2.0
| 21,178 |
import leon.annotation._
import leon.lang._
import leon.lang.synthesis._
object BinaryTree {
sealed abstract class Tree
case class Node(left : Tree, value : Int, right : Tree) extends Tree
case object Leaf extends Tree
def content(t : Tree): Set[Int] = t match {
case Leaf => Set()
case Node(l, v, r) => content(l) ++ Set(v) ++ content(r)
}
def size(t: Tree): Int = {
t match {
case Leaf => 0
case Node(l, v, r) => size(l) + size(r) + 1
}
} ensuring { _ >= 0 }
def insert(in: Tree, v: Int): Tree = {
Node(in, v, Leaf)
} ensuring { res => content(res) == content(in) ++ Set(v) }
def delete(in: Tree, v: Int): Tree = {
in match {
case Node(l, vv, r) =>
if (vv == v) {
delete(l, v) match {
case Node(ll, lv, lr) =>
Node(Node(ll, lv, lr), lv, delete(r, v))
case Leaf =>
delete(r, v)
}
} else {
Node(delete(l, v), vv, delete(r, v))
}
case Leaf =>
Leaf
}
} ensuring { res => content(res) == content(in) -- Set(v) }
// def union(in1: Tree, in2: Tree): Tree = {
// in1 match {
// case Node(l1, v1, r1) =>
// insert(union(r1, union(l1, in2)), v1)
// case Leaf =>
// in2
// }
// } ensuring { res => content(res) == content(in1) ++ content(in2) }
def union(in1: Tree, in2: Tree): Tree = choose {
(res: Tree) => content(res) == content(in1) ++ content(in2)
}
}
|
ericpony/scala-examples
|
testcases/synthesis/oopsla2013/BinaryTree/Union.scala
|
Scala
|
mit
| 1,492 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.codec
import wvlet.airframe.surface.Surface
import wvlet.airframe.surface.reflect.ReflectSurfaceFactory
import scala.reflect.runtime.universe._
/**
*/
trait CompatBase {
def codecOf[A: TypeTag]: MessageCodec[A] = MessageCodecFactory.defaultFactory.of[A]
def surfaceOfClass(cl: Class[_]): Surface = ReflectSurfaceFactory.ofClass(cl)
}
|
wvlet/airframe
|
airframe-codec/.jvm/src/main/scala-2/wvlet/airframe/codec/CompatBase.scala
|
Scala
|
apache-2.0
| 928 |
import com.edgesysdesign.droidrtty
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.FunSpec
class Specs extends FunSpec with ShouldMatchers {
describe("a spec") {
it("should do something") {
}
}
}
|
edge-sys-design/DroidRTTY
|
src/test/scala/Specs.scala
|
Scala
|
mpl-2.0
| 227 |
package nlp_serde.apps.d2d
import java.io.File
import java.text.SimpleDateFormat
import java.util.Date
import nlp_serde.annotators.StanfordAnnotator
import nlp_serde.writers.{PerLineJsonWriter, JsonWriter}
import nlp_serde.{FileFilters, Document}
import nlp_serde.readers.{PerLineJsonReader, DocPerFile}
import java.text.ParseException
/**
* Created by sameer on 11/3/14.
*/
class D2DReader extends DocPerFile {
private var _id = 0
def newDocId = {
_id += 1
"DOC%05d".format(_id - 1)
}
override def readDoc(name: String): Option[Document] = {
val f = new File(name)
if ((Set("NigeriaTwitterKeyWords",
"China'sEconomyModerateGrowthTransformationandtheBrightFuture",
"ConsulGeneralLIUKanInterviewedbyNewsAgencyofNigeria",
"SpeechfromChineseConsulGeneralLiuKan", "BlogbyyusufJune2014", "README") contains f.getName.dropRight(4)) ||
f.getCanonicalPath.contains("BokoHaramAbducts60WomenInAdamawa")) return None
val lines = io.Source.fromFile(f).getLines().map(_.trim).filterNot(_.isEmpty).toSeq.view.force
val doc = new Document()
doc.path = Some(f.getCanonicalPath.replaceAll(".*/2015/demo-JIFX_15-2/", ""))
doc.id = newDocId //doc.path.get
val docType = D2DReader.extractType(f)
doc.attrs("type") = docType.toString
println(f.getName)
try {
val title = D2DReader.extractTitle(lines, docType)
title.foreach(t => if (t.contains("Page has moved")) return None)
title.foreach(t => doc.attrs("title") = t)
val subtitle = D2DReader.extractSubTitle(lines, docType)
subtitle.foreach(t => doc.attrs("subtitle") = t)
val text = D2DReader.extractText(lines, docType)
text.foreach(t => doc.text = t)
val date = D2DReader.extractDate(lines, docType, f.getName)
date.foreach(d => {
val f = new SimpleDateFormat("yyyy-MM-dd")
doc.attrs("date") = f.format(d)
})
if (!doc.attrs.contains("date")) {
println(doc.toString)
System.exit(1)
}
} catch {
case e: Exception => {
println("Error in reading: " + f.getCanonicalPath)
e.printStackTrace()
System.exit(1)
}
}
Some(doc)
}
def readAll(baseDir: String) = {
readDir(baseDir + "/ali-baba/", FileFilters.byExtension("txt")) ++
readDir(baseDir + "/from-randy/", FileFilters.byExtension("txt")) ++
readDir(baseDir + "/from-randy/blogs/", FileFilters.byExtension("txt"))
}
}
object D2DReader {
object DocType extends Enumeration {
val fromRandy, blog, aliBaba, news = Value
}
def extractType(f: File): DocType.Value = {
val name = f.getName
val parent = f.getParent
if (name.startsWith("Blog")) return DocType.blog
if (parent.endsWith("ali-baba")) return DocType.aliBaba
if (parent.contains("from-randy")) return DocType.fromRandy
else return DocType.news
//println(s"name: $name, parent: $parent")
assert(false, s"Everything should be covered by the above cases, but $name in $parent is not.")
DocType.fromRandy
}
def extractTitle(lines: Seq[String], docType: DocType.Value): Option[String] = {
if (docType == DocType.aliBaba)
return Some(lines(0))
if (docType == DocType.blog)
return Some(lines(0))
if (docType == DocType.fromRandy)
return Some(lines(0))
if (docType == DocType.news) {
val idx = lines.indexWhere(_.contains("TITLE:"))
assert(idx >= 0, "Could not find title in news story!")
val titleString = lines(idx + 1)
return Some(titleString)
}
None
}
def extractSubTitle(lines: Seq[String], docType: DocType.Value): Option[String] = {
if (docType == DocType.aliBaba) {
val numHeader = 5 +
(if (lines(2).startsWith("IIR") || lines(2).startsWith("TAC") || lines(2).startsWith("Gaza")) 0 else 1) +
(if (lines(2).startsWith("Voice")) 1 else 0)
val pruned = lines(2).split(" ").take(numHeader).mkString(" ").trim
return Some(pruned)
}
None
}
def extractText(lines: Seq[String], docType: DocType.Value): Option[String] = {
if (docType == DocType.aliBaba) {
val text = lines.drop(5).mkString("\\n")
val numHeader = 5 +
(if (lines(2).startsWith("IIR") || lines(2).startsWith("TAC") || lines(2).startsWith("Gaza")) 0 else 1) +
(if (lines(2).startsWith("Voice")) 1 else 0)
val pruned = lines(2).split(" ").drop(numHeader).mkString(" ").trim
return Some(pruned + " " + text.trim)
}
if (docType == DocType.blog) {
if (lines(1).isEmpty || lines(1).charAt(0).isDigit) {
return Some(lines.drop(2).mkString(" ").trim)
}
return Some(lines.drop(1).mkString(" ").trim)
}
if (docType == DocType.fromRandy) {
if (lines(1).isEmpty || lines(1).charAt(0).isDigit) {
return Some(lines(0) + ". " + lines.drop(2).mkString(" ").trim)
}
return Some(lines(0) + ". " + lines.drop(1).mkString(" ").trim)
}
if (docType == DocType.news) {
val idx = lines.indexWhere(_.contains("TEXT:"))
assert(idx >= 0, "Could not find text in news story!")
return Some(lines.drop(idx + 1).mkString(" ").trim)
}
None
}
def extractDate(lines: Seq[String], docType: DocType.Value, fname: String): Option[Date] = {
if (docType == DocType.aliBaba) {
val formatter = new SimpleDateFormat("dd-MMM-yy")
return Some(formatter.parse(lines(1)))
}
if (docType == DocType.blog) {
if (!lines(1).isEmpty && lines(1).charAt(0).isDigit) {
val formatter = new SimpleDateFormat("dd MMMMM yyyy")
return Some(formatter.parse(lines(1)))
}
if (lines(0).startsWith("Blog by yusuf ") && lines(0).replaceAll("Blog by yusuf ", "").charAt(0).isDigit) {
val formatter = new SimpleDateFormat("dd MMMMM yyyy")
return Some(formatter.parse(lines(0).replaceAll("Blog by yusuf ", "")))
}
if (lines(0).startsWith("Blog 2 by yusuf ") && lines(0).replaceAll("Blog 2 by yusuf ", "").charAt(0).isDigit) {
val formatter = new SimpleDateFormat("dd MMMMM yyyy")
return Some(formatter.parse(lines(0).replaceAll("Blog 2 by yusuf ", "")))
}
if (lines(0).startsWith("Blog 3 by yusuf ") && lines(0).replaceAll("Blog 3 by yusuf ", "").charAt(0).isDigit) {
val formatter = new SimpleDateFormat("dd MMMMM yyyy")
return Some(formatter.parse(lines(0).replaceAll("Blog 3 by yusuf ", "")))
}
return None
}
if (docType == DocType.fromRandy) {
if (lines(1).contains("2014")) {
if (lines(1).charAt(0).isDigit) {
val formatter = new SimpleDateFormat("dd MMMMM yyyy")
return Some(formatter.parse(lines(1)))
}
// maybe MMMMM dd, yyyy or MMMMM dd yyyy
try {
val formatter = new SimpleDateFormat("MMMMM dd yyyy")
return Some(formatter.parse(lines(1)))
} catch {
case e: ParseException => {}
}
try {
val formatter = new SimpleDateFormat("MMMMM dd, yyyy")
return Some(formatter.parse(lines(1)))
} catch {
case e: ParseException => {}
}
if (lines(1).startsWith("BEIJING, ")) {
val str = lines(1).replaceAll("BEIJING, ", "")
val beijingDate = str.split(" ").take(3).mkString(" ")
try {
val formatter = new SimpleDateFormat("MMM. dd, yyyy")
return Some(formatter.parse(beijingDate))
} catch {
case e: ParseException => {}
}
try {
val formatter = new SimpleDateFormat("MMMMM dd, yyyy")
return Some(formatter.parse(beijingDate))
} catch {
case e: ParseException => {}
}
}
}
if (lines(2).contains("2014")) {
val formatter = new SimpleDateFormat("dd MMMMM yyyy")
return Some(formatter.parse(lines(2)))
}
}
if (docType == DocType.news) {
val idx = lines.indexWhere(_.contains("CITE:"))
assert(idx >= 0, "Could not find date in news story!")
try {
val dateString = lines(idx + 1).split(" ").takeRight(3).mkString(" ")
val formatter = new SimpleDateFormat("dd MMMMM yyyy")
return Some(formatter.parse(dateString))
} catch {
case e: ParseException => {}
}
try {
val dateString = lines(idx + 1).split(" ").drop(2).take(3).mkString(" ")
val formatter = new SimpleDateFormat("dd MMMMM, yyyy")
return Some(formatter.parse(dateString))
} catch {
case e: ParseException => {}
}
try {
val dateString = lines(idx + 1).split(" ").drop(2).take(3).mkString(" ")
val formatter = new SimpleDateFormat("MMMMM dd, yyyy")
return Some(formatter.parse(dateString))
} catch {
case e: ParseException => {}
}
try {
val dateString = lines(idx + 2).trim
val formatter = new SimpleDateFormat("dd.MMM.yyyy")
return Some(formatter.parse(dateString))
} catch {
case e: ParseException => {}
}
try {
val dateString = lines(idx + 1).trim
val formatter = new SimpleDateFormat("dd.MMM.yyyy")
return Some(formatter.parse(dateString))
} catch {
case e: ParseException => {}
}
try {
val dateString = lines(idx + 2).split(" ").takeRight(4).dropRight(1).mkString(" ")
val formatter = new SimpleDateFormat("dd MMMMM yyyy")
return Some(formatter.parse(dateString))
} catch {
case e: ParseException => {}
}
try {
val dateString = fname.take(8)
assert(dateString.forall(_.isDigit), "Could not find date in: %s (%s)".format(fname, lines.drop(idx).take(3).mkString("\\n")))
val formatter = new SimpleDateFormat("yyyyMMdd")
return Some(formatter.parse(dateString))
} catch {
case e: ParseException => {
println("Could not find date in: %s (%s)".format(fname, lines.drop(idx).take(3).mkString("\\n")))
System.exit(1)
}
}
}
None
}
def main(args: Array[String]): Unit = {
val outputFile = "data/d2d/docs.txt.json.gz"
val reader = new D2DReader
val docs = reader.readFilelist("data/2015/demo-JIFX_15-2/filelist.names", "data/2015/demo-JIFX_15-2/" + _)
//val annotator = new StanfordAnnotator()
//val nlpDocs = annotator.process(docs)
val writer = new PerLineJsonWriter(true)
writer.write(outputFile, docs)
}
}
object SplitOnKeyword {
def main(args: Array[String]): Unit = {
val input = args(0)
val keyword = args(1).toLowerCase
val file = new File(input)
val dir = file.getParent
val name = file.getName
val output = dir + "/" + keyword + "." + name
println(s"Reading from $input into $output")
val writer = new PerLineJsonWriter(true)
val reader = new PerLineJsonReader(true)
writer.write(output, reader.read(input).filter(_.text.toLowerCase.matches(s".*$keyword.*")))
}
}
|
sameersingh/nlp_serde
|
src/main/scala/nlp_serde/apps/d2d/D2DReader.scala
|
Scala
|
bsd-2-clause
| 11,023 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.monitoring.metrics
import java.time.Duration
import com.typesafe.config.{Config, ConfigFactory}
import kamon.metric.PeriodSnapshot
import kamon.module.MetricReporter
import kamon.Kamon
import kamon.tag.Lookups
import org.apache.openwhisk.core.connector.{Activation, EventMessage}
import org.apache.openwhisk.core.entity.{ActivationResponse, Subject, UUID}
import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfterEach
import org.scalatest.junit.JUnitRunner
import scala.concurrent.duration._
@RunWith(classOf[JUnitRunner])
class KamonRecorderTests extends KafkaSpecBase with BeforeAndAfterEach with KamonMetricNames {
var reporter: MetricReporter = _
override protected def beforeEach(): Unit = {
super.beforeEach()
TestReporter.reset()
val newConfig = ConfigFactory.parseString("""kamon {
| metric {
| tick-interval = 50 ms
| optimistic-tick-alignment = no
| }
|}""".stripMargin).withFallback(ConfigFactory.load())
Kamon.registerModule("test", TestReporter)
Kamon.reconfigure(newConfig)
reporter = TestReporter
}
override protected def afterEach(): Unit = {
reporter.stop()
Kamon.reconfigure(ConfigFactory.load())
super.afterEach()
}
behavior of "KamonConsumer"
val initiator = "initiatorTest"
val namespaceDemo = "demo"
val namespaceGuest = "guest"
val actionWithCustomPackage = "apimgmt/createApi"
val actionWithDefaultPackage = "createApi"
val kind = "nodejs:10"
val memory = 256
it should "push user events to kamon" in {
createCustomTopic(EventConsumer.userEventTopic)
val consumer = createConsumer(kafkaPort, system.settings.config, KamonRecorder)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceDemo/$actionWithCustomPackage").serialize)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceDemo/$actionWithDefaultPackage").serialize)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceGuest/$actionWithDefaultPackage").serialize)
sleep(sleepAfterProduce, "sleeping post produce")
consumer.shutdown().futureValue
sleep(4.second, "sleeping for Kamon reporters to get invoked")
// Custom package
TestReporter.counter(activationMetric, namespaceDemo, actionWithCustomPackage)(0).value shouldBe 1
TestReporter
.counter(activationMetric, namespaceDemo, actionWithCustomPackage)
.filter((t) => t.tags.get(Lookups.plain(actionMemory)) == memory.toString)(0)
.value shouldBe 1
TestReporter
.counter(activationMetric, namespaceDemo, actionWithCustomPackage)
.filter((t) => t.tags.get(Lookups.plain(actionKind)) == kind)(0)
.value shouldBe 1
TestReporter
.counter(statusMetric, namespaceDemo, actionWithCustomPackage)
.filter((t) => t.tags.get(Lookups.plain(actionStatus)) == ActivationResponse.statusDeveloperError)(0)
.value shouldBe 1
TestReporter.counter(coldStartMetric, namespaceDemo, actionWithCustomPackage)(0).value shouldBe 1
TestReporter.histogram(waitTimeMetric, namespaceDemo, actionWithCustomPackage).size shouldBe 1
TestReporter.histogram(initTimeMetric, namespaceDemo, actionWithCustomPackage).size shouldBe 1
TestReporter.histogram(durationMetric, namespaceDemo, actionWithCustomPackage).size shouldBe 1
// Default package
TestReporter.histogram(durationMetric, namespaceDemo, actionWithDefaultPackage).size shouldBe 1
// Blacklisted namespace should not be tracked
TestReporter.counter(activationMetric, namespaceGuest, actionWithDefaultPackage)(0).value shouldBe 0
// Blacklisted should be counted in "openwhisk.namespace.activations" metric
TestReporter.namespaceCounter(namespaceActivationMetric, namespaceGuest)(0).value shouldBe 1
}
private def newActivationEvent(actionPath: String) =
EventMessage(
"test",
Activation(actionPath, 2, 3.millis, 5.millis, 11.millis, kind, false, memory, None),
Subject("testuser"),
initiator,
UUID("test"),
Activation.typeName)
private object TestReporter extends MetricReporter {
var snapshotAccumulator = PeriodSnapshot.accumulator(Duration.ofDays(1), Duration.ZERO)
override def reportPeriodSnapshot(snapshot: PeriodSnapshot): Unit = {
snapshotAccumulator.add(snapshot)
}
override def stop(): Unit = {}
override def reconfigure(config: Config): Unit = {}
def reset(): Unit = {
snapshotAccumulator = PeriodSnapshot.accumulator(Duration.ofDays(1), Duration.ZERO)
}
def counter(metricName: String, namespace: String, action: String) = {
snapshotAccumulator
.peek()
.counters
.filter(_.name == metricName)
.flatMap(_.instruments)
.filter(_.tags.get(Lookups.plain(actionNamespace)) == namespace)
.filter(_.tags.get(Lookups.plain(initiatorNamespace)) == initiator)
.filter(_.tags.get(Lookups.plain(actionName)) == action)
}
def namespaceCounter(metricName: String, namespace: String) = {
snapshotAccumulator
.peek()
.counters
.filter(_.name == metricName)
.flatMap(_.instruments)
.filter(_.tags.get(Lookups.plain(actionNamespace)) == namespace)
.filter(_.tags.get(Lookups.plain(initiatorNamespace)) == initiator)
}
def histogram(metricName: String, namespace: String, action: String) = {
snapshotAccumulator
.peek()
.histograms
.filter(_.name == metricName)
.flatMap(_.instruments)
.filter(_.tags.get(Lookups.plain(actionNamespace)) == namespace)
.filter(_.tags.get(Lookups.plain(initiatorNamespace)) == initiator)
.filter(_.tags.get(Lookups.plain(actionName)) == action)
}
}
}
|
RSulzmann/openwhisk
|
core/monitoring/user-events/src/test/scala/org/apache/openwhisk/core/monitoring/metrics/KamonRecorderTests.scala
|
Scala
|
apache-2.0
| 6,701 |
package japgolly.microlibs.recursion
import cats.~>
object ScalaVerSpecific {
private[recursion] type Coseq[F[_], G[_]] = Lambda[A => F[G[A]]] ~> Lambda[A => G[F[A]]]
}
|
japgolly/microlibs-scala
|
recursion/shared/src/main/scala-2/japgolly/microlibs/recursion/ScalaVerSpecific.scala
|
Scala
|
apache-2.0
| 172 |
package optics
package excerices.monocle
import monocle.Iso
import monocle.macros.GenIso
import org.specs2.Specification
import org.specs2.cats.CatsEqMatcher
class IsoSpec extends Specification with CatsEqMatcher {
def is =
s2"""
Entering the Monocle world.
For some simple cases Iso can be generated automatically using build in macro.
Some of available operation are:
- get $test01
- reverseGet $test02
- modify $test03
- reverse $test04
Of course it's possible to create Iso manually and compose them like it was done ealier.
"""
def test01 = iso.get(MS(27.78)) must beEqualTo(27.78)
def test02 = iso.reverseGet(27.78) must beEqvTo(MS(27.78))
def test03 = iso.modify(_ + 10)(MS(27.78)) must beEqvTo(MS(28.06))
def test04 = iso.reverse.modify((ms: MS) => MS(ms.v * 2))(100.0) must beEqualTo(200.0)
lazy val iso: Iso[MS, Double] = GenIso[MS, Double]
}
|
theiterators/scalar-2017-optics-workshop
|
src/test/scala/optics/excerices/monocle/Ex01_iso.scala
|
Scala
|
mit
| 958 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package akka.persistence.jdbc.query
import akka.persistence.jdbc.util.Schema.{ Oracle, MySQL, Postgres }
import akka.persistence.query.EventEnvelope
abstract class CurrentEventsByTagTest(config: String) extends QueryTestSpec(config) {
it should "not find an event by tag for unknown tag" in {
withTestActors { (actor1, actor2, actor3) ⇒
actor1 ! withTags(1, "one")
actor1 ! withTags(2, "two")
actor1 ! withTags(3, "three")
eventually {
journalDao.countJournal.futureValue shouldBe 3
}
withCurrentEventsByTag()("unknown", 0) { tp ⇒
tp.request(Int.MaxValue)
tp.expectComplete()
}
}
}
it should "persist and find a tagged event with one tag" in
withTestActors { (actor1, actor2, actor3) ⇒
withClue("Persisting a tagged event") {
actor1 ! withTags(1, "one")
eventually {
withCurrentEventsByPersistenceid()("my-1") { tp ⇒
tp.request(Long.MaxValue)
tp.expectNext(EventEnvelope(1, "my-1", 1, 1))
tp.expectComplete()
}
}
}
withClue("query should find the event by tag") {
withCurrentEventsByTag()("one", 0) { tp ⇒
tp.request(Int.MaxValue)
tp.expectNext(EventEnvelope(1, "my-1", 1, 1))
tp.expectComplete()
}
}
withClue("query should find the event by persistenceId") {
withCurrentEventsByPersistenceid()("my-1", 1, 1) { tp ⇒
tp.request(Int.MaxValue)
tp.expectNext(EventEnvelope(1, "my-1", 1, 1))
tp.expectComplete()
}
}
}
it should "persist and find a tagged event with multiple tags" in
withTestActors { (actor1, actor2, actor3) ⇒
withClue("Persisting multiple tagged events") {
actor1 ! withTags(1, "one", "1", "prime")
actor1 ! withTags(2, "two", "2", "prime")
actor1 ! withTags(3, "three", "3", "prime")
actor1 ! withTags(4, "four", "4")
actor1 ! withTags(5, "five", "5", "prime")
actor2 ! withTags(3, "three", "3", "prime")
actor3 ! withTags(3, "three", "3", "prime")
actor1 ! 1
actor1 ! 1
eventually {
journalDao.countJournal.futureValue shouldBe 9
}
}
withClue("query should find events for tag 'one'") {
withCurrentEventsByTag()("one", 0) { tp ⇒
tp.request(Int.MaxValue)
tp.expectNext(EventEnvelope(1, "my-1", 1, 1))
tp.expectComplete()
}
}
withClue("query should find events for tag 'prime'") {
withCurrentEventsByTag()("prime", 0) { tp ⇒
tp.request(Int.MaxValue)
tp.expectNextUnordered(
EventEnvelope(1, "my-1", 1, 1),
EventEnvelope(2, "my-1", 2, 2),
EventEnvelope(3, "my-1", 3, 3),
EventEnvelope(5, "my-1", 5, 5),
EventEnvelope(1, "my-2", 1, 3),
EventEnvelope(1, "my-3", 1, 3)
)
tp.expectComplete()
}
}
withClue("query should find events for tag '3'") {
withCurrentEventsByTag()("3", 0) { tp ⇒
tp.request(Int.MaxValue)
tp.expectNextUnordered(
EventEnvelope(3, "my-1", 3, 3),
EventEnvelope(1, "my-2", 1, 3),
EventEnvelope(1, "my-3", 1, 3)
)
tp.expectComplete()
}
}
withClue("query should find events for tag '3'") {
withCurrentEventsByTag()("4", 0) { tp ⇒
tp.request(Int.MaxValue)
tp.expectNext(EventEnvelope(4, "my-1", 4, 4))
tp.expectComplete()
}
}
}
}
class PostgresCurrentEventsByTagTest extends CurrentEventsByTagTest("postgres-application.conf") {
dropCreate(Postgres())
}
class MySQLCurrentEventsByTagTest extends CurrentEventsByTagTest("mysql-application.conf") {
dropCreate(MySQL())
}
class OracleCurrentEventsByTagTest extends CurrentEventsByTagTest("oracle-application.conf") {
dropCreate(Oracle())
protected override def beforeEach(): Unit =
clearOracle()
override protected def afterAll(): Unit =
clearOracle()
}
|
prettynatty/akka-persistence-jdbc
|
src/test/scala/akka/persistence/jdbc/query/CurrentEventsByTagTest.scala
|
Scala
|
apache-2.0
| 4,761 |
package org.jetbrains.plugins.scala
package testingSupport.scalatest
import org.jetbrains.plugins.scala.lang.structureView.element.Test._
import org.jetbrains.plugins.scala.testingSupport.ScalaTestingTestCase
import org.jetbrains.plugins.scala.testingSupport.test.{AbstractTestConfigurationProducer, TestConfigurationUtil}
import org.jetbrains.plugins.scala.testingSupport.test.structureView.TestNodeProvider
/**
* @author Roman.Shein
* @since 09.10.2014.
*/
abstract class ScalaTestTestCase extends ScalaTestingTestCase {
override protected val configurationProducer: AbstractTestConfigurationProducer =
TestConfigurationUtil.scalaTestConfigurationProducer
override protected def runFileStructureViewTest(testClassName: String, status: Int, tests: String*): Unit = {
super.runFileStructureViewTest(testClassName, status, (if (status == IgnoredStatusId) {
tests.map(_ + TestNodeProvider.ignoredSuffix)
} else if (status == PendingStatusId) {
tests.map(_ + TestNodeProvider.pendingSuffix)
} else tests): _*)
}
}
|
jastice/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/testingSupport/scalatest/ScalaTestTestCase.scala
|
Scala
|
apache-2.0
| 1,055 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.metrics
import wvlet.airspec.AirSpec
/**
*/
class TimeVectorTest extends AirSpec {
test("succinct time window unit") {
// 2020 have 366 days, so we need to fix the offset
val t = TimeWindow.withUTC.withOffset("2019-01-02 01:23:45")
def check(x: String, expected: String): Unit = {
val w = t.parse(x)
val s = TimeVector.succinctTimeVector(w.startUnixTime, w.endUnixTime)
s.toDurationString shouldBe expected
}
val lst = Seq(
"-1d" -> "1d",
"-7d/0w" -> "1w",
"-1w" -> "1w",
"-20d" -> "20d",
"30d/2019-01-01" -> "30d",
"31d/2019-01-01" -> "1M",
"+2M/2019-01-01" -> "2M",
"-40d" -> "40d",
"-1M" -> "1M",
"-1M/0y" -> "1M",
"-2M" -> "2M",
"-356d" -> "356d",
"-366d" -> "366d",
"-365d" -> "1y",
"-1q" -> "1q",
"+5q" -> "5q",
"-1y" -> "1y"
)
lst.foreach(x => check(x._1, x._2))
}
}
|
wvlet/airframe
|
airframe-metrics/.jvm/src/test/scala/wvlet/airframe/metrics/TimeVectorTest.scala
|
Scala
|
apache-2.0
| 1,657 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package graph
import scala.collection.JavaConversions._
import org.objectweb.asm.Type
import com.asakusafw.lang.compiler.model.graph.UserOperator
import com.asakusafw.lang.compiler.planning.SubPlan
import com.asakusafw.spark.compiler.planning.SubPlanInfo
import com.asakusafw.spark.compiler.spi.NodeCompiler
class CoGroupCompiler extends NodeCompiler {
override def support(
subplan: SubPlan)(
implicit context: NodeCompiler.Context): Boolean = {
subplan.getAttribute(classOf[SubPlanInfo]).getDriverType == SubPlanInfo.DriverType.COGROUP
}
override def instantiator: Instantiator = CoGroupInstantiator
override def compile(
subplan: SubPlan)(
implicit context: NodeCompiler.Context): Type = {
assert(support(subplan), s"The subplan is not supported: ${subplan}")
val subPlanInfo = subplan.getAttribute(classOf[SubPlanInfo])
val primaryOperator = subPlanInfo.getPrimaryOperator
assert(primaryOperator.isInstanceOf[UserOperator],
s"The primary operator should be user operator: ${primaryOperator} [${subplan}]")
val operator = primaryOperator.asInstanceOf[UserOperator]
val builder =
new CoGroupClassBuilder(
operator)(
subplan.label,
subplan.getOutputs.toSeq) with CacheOnce
context.addClass(builder)
}
}
|
ueshin/asakusafw-spark
|
compiler/src/main/scala/com/asakusafw/spark/compiler/graph/CoGroupCompiler.scala
|
Scala
|
apache-2.0
| 1,961 |
package com.twitter.finagle.kestrelx
import com.twitter.concurrent.{Broker, Offer}
import com.twitter.conversions.time._
import com.twitter.finagle.{Addr, Group, Name, ServiceFactory}
import com.twitter.finagle.builder._
import com.twitter.finagle.kestrelx.protocol.{Response, Command, Kestrel}
import com.twitter.finagle.thrift.{ThriftClientFramedCodec, ClientId, ThriftClientRequest}
import com.twitter.util.{Closable, Duration, Future, Return, Throw, Try, Timer, Var, Witness}
import _root_.java.{util => ju}
import _root_.java.lang.UnsupportedOperationException
import _root_.java.util.logging.Level
import _root_.java.net.SocketAddress
import scala.collection.mutable
import scala.collection.JavaConversions._
/**
* Indicates that all [[com.twitter.finagle.kestrelx.ReadHandle ReadHandles]]
* that are backing a given [[com.twitter.finagle.kestrelx.MultiReader]] have
* died.
*/
object AllHandlesDiedException extends Exception
private[finagle] object MultiReaderHelper {
private[finagle] def merge(readHandles: Var[Try[Set[ReadHandle]]]): ReadHandle = {
val error = new Broker[Throwable]
val messages = new Broker[ReadMessage]
val close = new Broker[Unit]
val clusterUpdate = new Broker[Set[ReadHandle]]
def onClose(handles: Set[ReadHandle]) {
handles foreach { _.close }
error ! ReadClosedException
}
def loop(handles: Set[ReadHandle]) {
if (handles.isEmpty) {
error ! AllHandlesDiedException
return
}
val queues = handles.map { _.messages }.toSeq
val errors = handles.map { h => h.error map { _ => h } }.toSeq
val closeOf = close.recv { _ => onClose(handles) }
// We sequence here to ensure that `close` gets priority over reads.
val of = closeOf orElse {
Offer.choose(
closeOf,
Offer.choose(queues:_*) { m =>
messages ! m
loop(handles)
},
Offer.choose(errors:_*) { h =>
h.close()
loop(handles - h)
},
clusterUpdate.recv { newHandles =>
// Close any handles that exist in old set but not the new one.
(handles &~ newHandles) foreach { _.close() }
loop(newHandles)
}
)
}
of.sync()
}
// Wait until the ReadHandles set is populated before initializing.
val readHandlesPopulatedFuture = readHandles.changes.collect[Try[Set[ReadHandle]]] {
case r@Return(x) if x.nonEmpty => r
}.toFuture()
val closeWitness: Future[Closable] = readHandlesPopulatedFuture flatMap {
// Flatten the Future[Try[T]] to Future[T].
Future.const
} map { handles =>
// Once the cluster is non-empty, start looping and observing updates.
loop(handles)
// Send cluster updates on the appropriate broker.
val witness = Witness { tsr: Try[Set[ReadHandle]] =>
synchronized {
tsr match {
case Return(newHandles) => clusterUpdate !! newHandles
case Throw(t) => error !! t
}
}
}
readHandles.changes.register(witness)
}
val closeHandleOf: Offer[Unit] = close.send(()) map { _ =>
closeWitness onSuccess { _.close() }
}
ReadHandle(messages.recv, error.recv, closeHandleOf)
}
}
/**
* Read from multiple clients in round-robin fashion, "grabby hands"
* style using Kestrel's memcache protocol. The load balancing is simple,
* and falls out naturally from the user of the {{Offer}} mechanism: When
* there are multiple available messages, round-robin across them. Otherwise,
* wait for the first message to arrive.
*
* Var[Addr] example:
* {{{
* val name: com.twitter.finagle.Name = Resolver.eval(...)
* val va: Var[Addr] = name.bind()
* val readHandle =
* MultiReaderMemcache(va, "the-queue")
* .clientBuilder(
* ClientBuilder()
* .codec(MultiReaderMemcache.codec)
* .requestTimeout(1.minute)
* .connectTimeout(1.minute)
* .hostConnectionLimit(1) /* etc... but do not set hosts or build */)
* .retryBackoffs(/* Stream[Duration], Timer; optional */)
* .build()
* }}}
*/
object MultiReaderMemcache {
def apply(dest: Name, queueName: String): MultiReaderBuilderMemcache = {
dest match {
case Name.Bound(va) => apply(va, queueName)
case Name.Path(_) => throw new UnsupportedOperationException(
"Failed to bind Name.Path in `MultiReaderMemcache.apply`"
)
}
}
def apply(va: Var[Addr], queueName: String): MultiReaderBuilderMemcache = {
val config = MultiReaderConfig[Command, Response](va, queueName)
new MultiReaderBuilderMemcache(config)
}
/**
* Helper for getting the right codec for the memcache protocol
* @return the Kestrel codec
*/
def codec = Kestrel()
}
/**
* Read from multiple clients in round-robin fashion, "grabby hands"
* style using Kestrel's memcache protocol. The load balancing is simple,
* and falls out naturally from the user of the {{Offer}} mechanism: When
* there are multiple available messages, round-robin across them. Otherwise,
* wait for the first message to arrive.
*
* Example with a custom client builder:
* {{{
* val name: com.twitter.finagle.Name = Resolver.eval(...)
* val va: Var[Addr] = name.bind()
* val readHandle =
* MultiReaderThrift(va, "the-queue")
* .clientBuilder(
* ClientBuilder()
* .codec(MultiReaderThrift.codec(ClientId("myClientName"))
* .requestTimeout(1.minute)
* .connectTimeout(1.minute)
* .hostConnectionLimit(1) /* etc... but do not set hosts or build */)
* .retryBackoffs(/* Stream[Duration], Timer; optional */)
* .build()
* }}}
*
* Example without a customer client builder so clientId passed to apply
* {{{
* val name: com.twitter.finagle.Name = Resolver.eval(...)
* val va: Var[Addr] = name.bind()
* val readHandle =
* MultiReaderThrift(va, "the-queue", ClientId("myClientName"))
* .retryBackoffs(/* Stream[Duration], Timer; optional */)
* .build()
* }}}
*/
object MultiReaderThrift {
/**
* Used to create a thrift based MultiReader with a ClientId when a custom
* client builder will not be used. If a custom client builder will be
* used then it is more reasonable to use the version of apply that does
* not take a ClientId or else the client id will need to be passed to
* both apply and the codec in clientBuilder.
* @param dest a [[com.twitter.finagle.Name]] representing the Kestrel
* endpoints to connect to
* @param queueName the name of the queue to read from
* @param clientId the clientid to be used
* @return A MultiReaderBuilderThrift
*/
def apply(dest: Name, queueName: String, clientId: Option[ClientId]): MultiReaderBuilderThrift = {
dest match {
case Name.Bound(va) => apply(va, queueName, clientId)
case Name.Path(_) => throw new UnsupportedOperationException(
"Failed to bind Name.Path in `MultiReaderThrift.apply`"
)
}
}
/**
* Used to create a thrift based MultiReader with a ClientId when a custom
* client builder will not be used. If a custom client builder will be
* used then it is more reasonable to use the version of apply that does
* not take a ClientId or else the client id will need to be passed to
* both apply and the codec in clientBuilder.
* @param va endpoints for Kestrel
* @param queueName the name of the queue to read from
* @param clientId the clientid to be used
* @return A MultiReaderBuilderThrift
*/
def apply(
va: Var[Addr],
queueName: String,
clientId: Option[ClientId]
): MultiReaderBuilderThrift = {
val config = MultiReaderConfig[ThriftClientRequest, Array[Byte]](va, queueName, clientId)
new MultiReaderBuilderThrift(config)
}
/**
* Used to create a thrift based MultiReader when a ClientId will neither
* not be provided or will be provided to the codec was part of creating
* a custom client builder.
* This is provided as a separate method for Java compatability.
* @param va endpoints for Kestrel
* @param queueName the name of the queue to read from
* @return A MultiReaderBuilderThrift
*/
def apply(va: Var[Addr], queueName: String): MultiReaderBuilderThrift = {
this(va,queueName, None)
}
/**
* Helper for getting the right codec for the thrift protocol
* @return the ThriftClientFramedCodec codec
*/
def codec(clientId: ClientId) = ThriftClientFramedCodec(Some(clientId))
}
/**
* Read from multiple clients in round-robin fashion, "grabby hands"
* style. The load balancing is simple, and falls out naturally from
* the user of the {{Offer}} mechanism: When there are multiple
* available messages, round-robin across them. Otherwise, wait for
* the first message to arrive.
*
* Var[Addr] example:
* {{{
* val name: com.twitter.finagle.Name = Resolver.eval(...)
* val va: Var[Addr] = name.bind()
* val readHandle =
* MultiReader(va, "the-queue")
* .clientBuilder(
* ClientBuilder()
* .codec(Kestrel())
* .requestTimeout(1.minute)
* .connectTimeout(1.minute)
* .hostConnectionLimit(1) /* etc... but do not set hosts or build */)
* .retryBackoffs(/* Stream[Duration], Timer; optional */)
* .build()
* }}}
*/
@deprecated("Use MultiReaderMemcache or MultiReaderThrift instead", "6.15.1")
object MultiReader {
/**
* Create a Kestrel memcache protocol based builder
*/
@deprecated("Use MultiReaderMemcache.apply instead", "6.15.1")
def apply(va: Var[Addr], queueName: String): ClusterMultiReaderBuilder = {
val config = ClusterMultiReaderConfig(va, queueName)
new ClusterMultiReaderBuilder(config)
}
@deprecated("Use Var[Addr]-based `apply` method", "6.8.2")
def apply(cluster: Cluster[SocketAddress], queueName: String): ClusterMultiReaderBuilder = {
val Name.Bound(va) = Name.fromGroup(Group.fromCluster(cluster))
apply(va, queueName)
}
@deprecated("Use Var[Addr]-based `apply` method", "6.8.2")
def apply(clients: Seq[Client], queueName: String): ReadHandle =
apply(clients map { _.readReliably(queueName) })
/**
* A java friendly interface: we use scala's implicit conversions to
* feed in a {{java.util.Iterator<ReadHandle>}}
*/
@deprecated("Use Var[Addr]-based `apply` method", "6.8.2")
def apply(handles: ju.Iterator[ReadHandle]): ReadHandle =
MultiReaderHelper.merge(Var.value(Return(handles.toSet)))
@deprecated("Use Var[Addr]-based `apply` method", "6.8.2")
def apply(handles: Seq[ReadHandle]): ReadHandle =
MultiReaderHelper.merge(Var.value(Return(handles.toSet)))
@deprecated("Use Var[Addr]-based `apply` method", "6.8.2")
def newBuilder(cluster: Cluster[SocketAddress], queueName: String) = apply(cluster, queueName)
@deprecated("Use Var[Addr]-based `apply` method", "6.8.2")
def merge(readHandleCluster: Cluster[ReadHandle]): ReadHandle = {
val varTrySet = Group.fromCluster(readHandleCluster).set map { Try(_) }
MultiReaderHelper.merge(varTrySet)
}
}
/**
* Multi reader configuration settings
*/
final case class MultiReaderConfig[Req, Rep] private[kestrelx](
private val _va: Var[Addr],
private val _queueName: String,
private val _clientId: Option[ClientId] = None,
private val _txnAbortTimeout: Duration = Duration.Top,
private val _clientBuilder:
Option[ClientBuilder[Req, Rep, Nothing, ClientConfig.Yes, ClientConfig.Yes]] = None,
private val _timer: Option[Timer] = None,
private val _retryBackoffs: Option[() => Stream[Duration]] = None) {
// Delegators to make a friendly public API
val va = _va
val queueName = _queueName
val clientBuilder = _clientBuilder
val timer = _timer
val retryBackoffs = _retryBackoffs
val clientId = _clientId
val txnAbortTimeout = _txnAbortTimeout
}
@deprecated("Use MultiReaderConfig[Req, Rep] instead", "6.15.1")
final case class ClusterMultiReaderConfig private[kestrelx](
private val _va: Var[Addr],
private val _queueName: String,
private val _clientBuilder:
Option[ClientBuilder[Command, Response, Nothing, ClientConfig.Yes, ClientConfig.Yes]] = None,
private val _timer: Option[Timer] = None,
private val _retryBackoffs: Option[() => Stream[Duration]] = None) {
// Delegators to make a friendly public API
val va = _va
val queueName = _queueName
val clientBuilder = _clientBuilder
val timer = _timer
val retryBackoffs = _retryBackoffs
/**
* Convert to MultiReaderConfig[Command, Response] during deprecation
*/
def toMultiReaderConfig: MultiReaderConfig[Command, Response] = {
MultiReaderConfig[Command, Response](
this.va,
this.queueName,
None,
Duration.Top,
this.clientBuilder,
this.timer,
this.retryBackoffs)
}
}
/**
* Factory for [[com.twitter.finagle.kestrelx.ReadHandle]] instances.
*/
abstract class MultiReaderBuilder[Req, Rep, Builder] private[kestrelx](
config: MultiReaderConfig[Req, Rep]) {
type ClientBuilderBase = ClientBuilder[Req, Rep, Nothing, ClientConfig.Yes, ClientConfig.Yes]
private[this] val ReturnEmptySet = Return(Set.empty[ReadHandle])
protected[kestrelx] def copy(config: MultiReaderConfig[Req, Rep]): Builder
protected[kestrelx] def withConfig(
f: MultiReaderConfig[Req, Rep] => MultiReaderConfig[Req, Rep]): Builder = {
copy(f(config))
}
protected[kestrelx] def defaultClientBuilder: ClientBuilderBase
protected[kestrelx] def createClient(factory: ServiceFactory[Req, Rep]): Client
/**
* Specify the ClientBuilder used to generate client objects. <b>Do not</b> specify the
* hosts or cluster on the given ClientBuilder, and <b>do not</b> invoke <code>build()</code>
* on it. You must specify a codec and host
* connection limit, however.
*/
def clientBuilder(clientBuilder: ClientBuilderBase): Builder =
withConfig(_.copy(_clientBuilder = Some(clientBuilder)))
/**
* Specify the stream of Durations and Timer used for retry backoffs.
*/
def retryBackoffs(backoffs: () => Stream[Duration], timer: Timer): Builder =
withConfig(_.copy(_retryBackoffs = Some(backoffs), _timer = Some(timer)))
/**
* Specify the clientId to use, if applicable, for the default builder.
* If the default client builder is override using {{clientBuilder}} then
* this clientId has not effect.
*/
def clientId(clientId: ClientId): Builder =
withConfig(_.copy(_clientId = Some(clientId)))
private[this] def buildReadHandleVar(): Var[Try[Set[ReadHandle]]] = {
val baseClientBuilder = config.clientBuilder match {
case Some(clientBuilder) => clientBuilder
case None => defaultClientBuilder
}
// Use a mutable Map so that we can modify it in-place on cluster change.
val currentHandles = mutable.Map.empty[SocketAddress, ReadHandle]
val event = config.va.changes map {
case Addr.Bound(socketAddrs) => {
val newHandles = (socketAddrs &~ currentHandles.keySet) map { socketAddr =>
val factory = baseClientBuilder
.hosts(socketAddr)
.buildFactory()
val client = createClient(factory)
val handle = (config.retryBackoffs, config.timer) match {
case (Some(backoffs), Some(timer)) =>
client.readReliably(config.queueName, timer, backoffs())
case _ => client.readReliably(config.queueName)
}
(socketAddr, handle)
}
synchronized {
currentHandles.retain { case (addr, _) => socketAddrs.contains(addr) }
currentHandles ++= newHandles
}
Return(currentHandles.values.toSet)
}
case Addr.Failed(t) => Throw(t)
case _ => ReturnEmptySet
}
Var(Return(Set.empty), event)
}
/**
* Constructs a merged ReadHandle over the members of the configured cluster.
* The handle is updated as members are added or removed.
*/
def build(): ReadHandle = MultiReaderHelper.merge(buildReadHandleVar())
}
abstract class MultiReaderBuilderMemcacheBase[Builder] private[kestrelx](
config: MultiReaderConfig[Command, Response])
extends MultiReaderBuilder[Command, Response, Builder](config) {
type MemcacheClientBuilder =
ClientBuilder[Command, Response, Nothing, ClientConfig.Yes, ClientConfig.Yes]
protected[kestrelx] def defaultClientBuilder: MemcacheClientBuilder =
ClientBuilder()
.codec(Kestrel())
.connectTimeout(1.minute)
.requestTimeout(1.minute)
.hostConnectionLimit(1)
.daemon(true)
protected[kestrelx] def createClient(factory: ServiceFactory[Command, Response]): Client =
Client(factory)
}
@deprecated("Use MultiReaderBuilderMemcache instead", "6.15.1")
class ClusterMultiReaderBuilder private[kestrelx](config: ClusterMultiReaderConfig)
extends MultiReaderBuilderMemcacheBase[ClusterMultiReaderBuilder](config.toMultiReaderConfig) {
private def this(config: MultiReaderConfig[Command, Response]) = this(
ClusterMultiReaderConfig(config.va, config.queueName, config.clientBuilder, config.timer))
protected[kestrelx] def copy(
config: MultiReaderConfig[Command, Response]): ClusterMultiReaderBuilder =
new ClusterMultiReaderBuilder(config)
protected[kestrelx] def copy(config: ClusterMultiReaderConfig): ClusterMultiReaderBuilder =
new ClusterMultiReaderBuilder(config)
protected[kestrelx] def withConfig(
f: ClusterMultiReaderConfig => ClusterMultiReaderConfig): ClusterMultiReaderBuilder = {
copy(f(config))
}
}
/**
* Factory for [[com.twitter.finagle.kestrelx.ReadHandle]] instances using
* Kestrel's memcache protocol.
*/
class MultiReaderBuilderMemcache private[kestrelx](config: MultiReaderConfig[Command, Response])
extends MultiReaderBuilderMemcacheBase[MultiReaderBuilderMemcache](config) {
protected[kestrelx] def copy(
config: MultiReaderConfig[Command, Response]): MultiReaderBuilderMemcache =
new MultiReaderBuilderMemcache(config)
}
/**
* Factory for [[com.twitter.finagle.kestrelx.ReadHandle]] instances using
* Kestrel's thrift protocol.
*/
class MultiReaderBuilderThrift private[kestrelx](
config: MultiReaderConfig[ThriftClientRequest, Array[Byte]])
extends MultiReaderBuilder[ThriftClientRequest, Array[Byte], MultiReaderBuilderThrift](config) {
type ThriftClientBuilder =
ClientBuilder[ThriftClientRequest, Array[Byte], Nothing, ClientConfig.Yes, ClientConfig.Yes]
protected[kestrelx] def copy(
config: MultiReaderConfig[ThriftClientRequest, Array[Byte]]): MultiReaderBuilderThrift =
new MultiReaderBuilderThrift(config)
protected[kestrelx] def defaultClientBuilder: ThriftClientBuilder =
ClientBuilder()
.codec(ThriftClientFramedCodec(config.clientId))
.connectTimeout(1.minute)
.requestTimeout(1.minute)
.hostConnectionLimit(1)
.daemon(true)
protected[kestrelx] def createClient(
factory: ServiceFactory[ThriftClientRequest, Array[Byte]]): Client =
Client.makeThrift(factory, config.txnAbortTimeout)
/**
* While reading items, an open transaction will be auto aborted if not confirmed by the client within the specified
* timeout.
*/
def txnAbortTimeout(txnAbortTimeout: Duration) =
withConfig(_.copy(_txnAbortTimeout = txnAbortTimeout))
}
|
jamescway/finagle
|
finagle-kestrelx/src/main/scala/com/twitter/finagle/kestrelx/MultiReader.scala
|
Scala
|
apache-2.0
| 19,409 |
package actor
import scala.actors.Actor
/**
* Created by fqc on 2016/7/16.
*/
class ActorTask1 extends Actor {
override def act(): Unit = {
while (true) {
receive {
//偏函数PartialFunction
case "start" => {
println("starting.......")
println("当前线程:" + Thread.currentThread().getName)
// Thread.sleep(1000)
println("started")
}
case "stop" => {
println("stopping......")
println("当前线程:" + Thread.currentThread().getName)
//Thread.sleep(1000)
println("stopped")
}
}
}
}
}
class ActorTask2 extends Actor {
override def act(): Unit = {
loop(
react {
case "start" => {
println("starting.......")
println("当前线程:" + Thread.currentThread().getName)
// Thread.sleep(1000)
println("started")
}
case "stop" => {
println("stopping......")
println("当前线程:" + Thread.currentThread().getName)
//Thread.sleep(1000)
println("stopped")
}
case "exit" => {
exit()
}
}
)
}
}
object Boot {
def main(args: Array[String]) {
// val t1: ActorTask1 = new ActorTask1
// t1.start()
// t1 ! "start"
// t1 ! "stop"
// println("发送消息完毕")
//
val t2: ActorTask2 = new ActorTask2 //react接收方式会复用线程,更加的高效
t2.start()
t2 ! "start" //! 无返回值的异步消息,执行完该语句会立即执行下一句。 actor发送消息是异步的,但是接收到消息执行的过程是同步的
//t2.!("start")// 这种写法不如上面的DSL风格
t2 !? "start" //!? 所以想要start确认start执行完毕之后在发送消息 !? 该方法会阻塞
t2 ! "stop"
println("发送消息完毕")
t2 ! "start" //可以不断发送消息
t2 ! "exit"
//总结 我们看到以上的是发送字符串的,但不如对象,
// 对象又不如CaseClass 既能够封装数据又能够模式匹配
}
}
|
fqc/Scala_sidepro
|
src/actor/ActorMessage.scala
|
Scala
|
mit
| 2,129 |
/*
* Copyright 2014 OSSCube UK.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.osscube.spark.aerospike.rdd
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.sources.RelationProvider
class DefaultSource extends RelationProvider {
/**
* Creates a new relation for Aerospike select statement.
*/
def createRelation(sqlContext: SQLContext, parameters: Map[String, String]) = {
val partitionsPerServerPresent: Boolean = parameters.contains("partitionsPerServer") && !parameters("partitionsPerServer").isEmpty
val useUdfWithoutIndexQueryPresent: Boolean = parameters.contains("useUdfWithoutIndexQuery") && !parameters("useUdfWithoutIndexQuery").isEmpty
val useUdfWithoutIndexQuery = if (useUdfWithoutIndexQueryPresent) parameters("useUdfWithoutIndexQuery") == "true" else false
if(partitionsPerServerPresent)
AeroRelation(parameters("initialHost"), parameters("select"), parameters("partitionsPerServer").toInt, useUdfWithoutIndexQuery)(sqlContext)
else
AeroRelation(parameters("initialHost"), parameters("select"), 1, useUdfWithoutIndexQuery)(sqlContext)
}
}
|
viirya/aerospark
|
src/main/scala/com/osscube/spark/aerospike/rdd/DefaultSource.scala
|
Scala
|
apache-2.0
| 1,636 |
/*
* Copyright 2001-2012 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.selenium
import org.openqa.selenium.WebDriver
import org.openqa.selenium.firefox.FirefoxDriver
import org.openqa.selenium.firefox.FirefoxProfile
import org.openqa.selenium.safari.SafariDriver
import org.openqa.selenium.chrome.ChromeDriver
import org.openqa.selenium.ie.InternetExplorerDriver
import org.openqa.selenium.htmlunit.HtmlUnitDriver
import org.openqa.selenium.By
import org.openqa.selenium.WebElement
import java.util.concurrent.TimeUnit
import org.openqa.selenium.support.ui.WebDriverWait
import org.openqa.selenium.support.ui.Clock
import org.openqa.selenium.support.ui.Sleeper
import org.openqa.selenium.support.ui.ExpectedCondition
import scala.collection.mutable.Buffer
import scala.collection.JavaConverters._
import org.openqa.selenium.Cookie
import java.util.Date
import org.scalatest.time.Span
import org.scalatest.time.Milliseconds
import org.openqa.selenium.TakesScreenshot
import org.openqa.selenium.OutputType
import java.io.File
import java.io.FileOutputStream
import java.io.FileInputStream
import org.openqa.selenium.Alert
import org.openqa.selenium.support.ui.Select
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.StackDepthException
import org.openqa.selenium.JavascriptExecutor
import org.scalatest.ScreenshotCapturer
import org.scalatest.time.Nanosecond
import org.scalatest.Resources
/**
* Trait that provides a domain specific language (DSL) for writing browser-based tests using <a href="http://seleniumhq.org">Selenium</a>.
*
* To use ScalaTest's Selenium DSL, mix trait <code>WebBrowser</code> into your test class. This trait provides the DSL in its
* entirety except for one missing piece: an implicit <code>org.openqa.selenium.WebDriver</code>. One way to provide the missing
* implicit driver is to declare one as a member of your test class, like this:
*
* <pre class="stHighlight">
* import org.scalatest._
* import selenium._
*
* class BlogSpec extends FlatSpec with ShouldMatchers with WebBrowser {
*
* implicit val webDriver: WebDriver = new HtmlUnitDriver
*
* "The blog app home page" should "have the correct title" in {
* go to (host + "index.html")
* pageTitle should be ("Awesome Blog")
* }
* }
* </pre>
*
* <p>
* For convenience, however, ScalaTest provides a <code>WebBrowser</code> subtrait containing an implicit <code>WebDriver</code> for each
* driver provided by Selenium.
* Thus a simpler way to use the <code>HtmlUnit</code> driver, for example, is to extend
* ScalaTest's <a href="HtmlUnit.html"><code>HtmlUnit</code></a> trait, like this:
* </p>
*
* <pre class="stHighlight">
* import org.scalatest._
* import selenium._
*
* class BlogSpec extends FlatSpec with ShouldMatchers with HtmlUnit {
*
* "The blog app home page" should "have the correct title" in {
* go to (host + "index.html")
* pageTitle should be ("Awesome Blog")
* }
* }
* </pre>
*
* <p>
* The web driver traits provided by ScalaTest are:
* </p>
*
* <table style="border-collapse: collapse; border: 1px solid black">
* <tr><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"><strong>Driver</strong></th><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"><strong><code>WebBrowser</code> subtrait</strong></th></tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* Google Chrome
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* <a href="Chrome.html"><code>Chrome</code></a>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* Mozilla Firefox
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* <a href="Firefox.html"><code>Firefox</code></a>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* HtmlUnit
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* <a href="HtmlUnit.html"><code>HtmlUnit</code></a>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* Microsoft Internet Explorer
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* <a href="InternetExplorer.html"><code>InternetExplorer</code></a>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* Apple Safari
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center">
* <a href="Safari.html"><code>Safari</code></a>
* </td>
* </tr>
* </table>
*
* <h2>Navigation</h2>
*
* <p>
* You can ask the browser to retrieve a page (go to a URL) like this:
* </p>
*
* <pre class="stHighlight">
* go to "http://www.artima.com"
* </pre>
*
* <p>
* Note: If you are using the <em>page object pattern</em>, you can also go to a page using the <code>Page</code> instance, as
* illustrated in the section on <a href="#pageObjects">page objects</a> below.
* </p>
*
* <p>
* Once you have retrieved a page, you can fill in and submit forms, query for the values of page elements, and make assertions.
* In the following example, selenium will go to <code>http://www.google.com</code>, fill in the text box with
* <code>Cheese!</code>, press the submit button, and wait for result returned from an AJAX call:
* </p>
*
* <pre class="stHighlight">
* go to "http://www.google.com"
* click on "q"
* enter("Cheese!")
* submit()
* // Google's search is rendered dynamically with JavaScript.
* eventually { pageTitle should be ("Cheese! - Google Search") }
* </pre>
*
* <p>
* In the above example, the <code>"q"</code> used in “<code>click on "q"</code>”
* can be either the id or name of an element. ScalaTest's Selenium DSL will try to lookup by id first. If it cannot find
* any element with an id equal to <code>"q"</code>, it will then try lookup by name <code>"q"</code>.
* </p>
*
* <p>
* Alternatively, you can be more specific:
* </p>
*
* <pre class="stHighlight">
* click on id("q") // to lookup by id "q"
* click on name("q") // to lookup by name "q"
* </pre>
*
* <p>
* In addition to <code>id</code> and <code>name</code>, you can use the following approaches to lookup elements, just as you can do with
* Selenium's <code>org.openqa.selenium.By</code> class:
* </p>
*
* <ul>
* <li><code>xpath</code></li>
* <li><code>className</code></li>
* <li><code>cssSelector</code></li>
* <li><code>linkText</code></li>
* <li><code>partialLinkText</code></li>
* <li><code>tagName</code></li>
* </ul>
*
* <p>
* For example, you can select by link text with:
* </p>
*
* <pre class="stHighlight">
* click on linkText("click here!")
* </pre>
*
* <p>
* If an element is not found via any form of lookup, evaluation will complete abruptly with a <code>TestFailedException</code>.
* <p>
*
* <h2>Getting and setting input element values</h2>
*
* <p>
* ScalaTest's Selenium DSL provides a clear, simple syntax for accessing and updating the values of input elements such as
* text fields, radio buttons, checkboxes, and selection lists. If a requested element is not found, or if it is found but is
* not of the requested type, an exception will immediately result causing the test to fail.
* <p>
*
* <h3>Text fields and text areas</h3>
*
* <p>
* You can change a text field's value by assigning it via the <code>=</code> operator, like this:
* </p>
*
* <pre class="stHighlight">
* textField("q").value = "Cheese!"
* </pre>
*
* <p>
* And you can access a text field's value by simply invoking <code>value</code> on it:
* </p>
*
* <pre class="stHighlight">
* textField("q").value should be ("Cheese!")
* </pre>
*
* <p>
* If the text field is empty, <code>value</code> will return an empty string (<code>""</code>).
* </p>
*
* <p>
* You can use the same syntax with text areas by replacing <code>textField</code> with <code>textArea</code>, as in:
* </p>
*
* <pre class="stHighlight">
* textArea("body").value = "I saw something cool today!"
* textArea("body").value should be ("I saw something cool today!")
* </pre>
*
* <p>
* An alternate way to enter data into a text field or text area is to use <code>enter</code> or <code>pressKeys</code>.
* Although both of these send characters to the active element, <code>pressKeys</code> can be used on any kind of
* element, whereas <code>enter</code> can only be used on text fields and text areas. Another difference is that <code>enter</code>
* will clear the text field or area before sending the characters, effectively replacing any currently existing text with the
* new text passed to <code>enter</code>. By contrast, <code>pressKeys</code> does not do any clearing—it just appends
* more characters to any existing text. You can backup with <code>pressKeys</code>, however, by sending explicit backspace
* characters, <code>"\u0008"</code>.
* </p>
*
* <p>
* To use these commands, you must first click on the text field or area you are interested in
* to give it the focus. Here's an example:
* </p>
*
* <pre class="stHighlight">
* click on "q"
* enter("Cheese!")
* </pre>
*
* <p>
* Here's a (contrived) example of using <code>pressKeys</code> with backspace to fix a typo:
* </p>
*
* <pre class="stHighlight">
* click on "q" // q is the name or id of a text field or text area
* enter("Cheesey!") // Oops, meant to say Cheese!
* pressKeys("\u0008\u0008") // Send two backspaces; now the value is Cheese
* pressKeys("!") // Send the missing exclamation point; now the value is Cheese!
* </pre>
*
* <h3>Radio buttons</h3>
*
* <p>
* Radio buttons work together in groups. For example, you could have a group of radio buttons, like this:
* </p>
*
* <pre>
* <input type="radio" id="opt1" name="group1" value="Option 1"> Option 1</input>
* <input type="radio" id="opt2" name="group1" value="Option 2"> Option 2</input>
* <input type="radio" id="opt3" name="group1" value="Option 3"> Option 3</input>
* </pre>
*
* <p>
* You can select an option in either of two ways:
* </p>
*
* <pre class="stHighlight">
* radioButtonGroup("group1").value = "Option 2"
* radioButtonGroup("group1").selection = Some("Option 2")
* </pre>
*
* <p>
* Likewise, you can read the currently selected value of a group of radio buttons in two ways:
* </p>
*
* <pre class="stHighlight">
* radioButtonGroup("group1").value should be ("Option 2")
* radioButtonGroup("group1").selection should be (Some("Option 2"))
* </pre>
*
* <p>
* If the radio button has no selection at all, <code>selection</code> will return <code>None</code> whereas <code>value</code>
* will throw a <code>TestFailedException</code>. By using <code>value</code>, you are indicating you expect a selection, and if there
* isn't a selection that should result in a failed test.
* </p>
*
* <p>
* If you would like to work with <code>RadioButton</code> element directly, you can select it by calling <code>radioButton</code>:
* </p>
*
* <pre class="stHighlight">
* click on radioButton("opt1")
* </pre>
*
* <p>
* you can check if an option is selected by calling <code>isSelected</code>:
* </p>
*
* <pre class="stHighlight">
* radioButton("opt1").isSelected should be (true)
* </pre>
*
* <p>
* to get the value of radio button, you can call <code>value</code>:
* </p>
*
* <pre class="stHighlight">
* radioButton("opt1").value should be ("Option 1")
* </pre>
*
* <h3>Checkboxes</h3>
*
* <p>
* A checkbox in one of two states: selected or cleared. Here's how you select a checkbox:
* </p>
*
* <pre class="stHighlight">
* checkbox("cbx1").select()
* </pre>
*
* <p>
* And here's how you'd clear one:
* </p>
*
* <pre class="stHighlight">
* checkbox("cbx1").clear()
* </pre>
*
* <p>
* You can access the current state of a checkbox with <code>isSelected</code>:
* </p>
*
* <pre class="stHighlight">
* checkbox("cbx1").isSelected should be (true)
* </pre>
*
* <h3>Single-selection dropdown lists</h3>
*
* <p>
* Given the following single-selection dropdown list:
* </p>
*
* <pre>
* <select id="select1">
* <option value="option1">Option 1</option>
* <option value="option2">Option 2</option>
* <option value="option3">Option 3</option>
* </select>
* </pre>
*
* <p>
* You could select <code>Option 2</code> in either of two ways:
* </p>
*
* <pre class="stHighlight">
* singleSel("select1").value = "option2"
* singleSel("select1").selection = Some("option2")
* </pre>
*
* <p>
* To clear the selection, either invoke <code>clear</code> or set <code>selection</code> to <code>None</code>:
* </p>
*
* <pre class="stHighlight">
* singleSel.clear()
* singleSel("select1").selection = None
* </pre>
*
* <p>
* You can read the currently selected value of a single-selection list in the same manner as radio buttons:
* </p>
*
* <pre class="stHighlight">
* singleSel("select1").value should be ("option2")
* singleSel("select1").selection should be (Some("option2"))
* </pre>
*
* <p>
* If the single-selection list has no selection at all, <code>selection</code> will return <code>None</code> whereas <code>value</code>
* will throw a <code>TestFailedException</code>. By using <code>value</code>, you are indicating you expect a selection, and if there
* isn't a selection that should result in a failed test.
* </p>
*
* <h3>Multiple-selection lists</h3>
*
* <p>
* Given the following multiple-selection list:
* </p>
*
* <pre>
* <select name="select2" multiple="multiple">
* <option value="option4">Option 4</option>
* <option value="option5">Option 5</option>
* <option value="option6">Option 6</option>
* </select>
* </pre>
*
* <p>
* You could select <code>Option 5</code> and <code>Option 6</code> like this:
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").values = Seq("option5", "option6")
* </pre>
*
* <p>
* The previous command would essentially clear all selections first, then select <code>Option 5</code> and <code>Option 6</code>.
* If instead you want to <em>not</em> clear any existing selection, just additionally select <code>Option 5</code> and <code>Option 6</code>,
* you can use the <code>+=</code> operator, like this.
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").values += "option5"
* multiSel("select2").values += "option6"
* </pre>
*
* <p>
* To clear a specific option, pass its name to <code>clear</code>:
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").clear("option5")
* </pre>
*
* <p>
* To clear all selections, call <code>clearAll</code>:
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").clearAll()
* </pre>
*
* <p>
* You can access the current selections with <code>values</code>, which returns an immutable <code>IndexedSeq[String]</code>:
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").values should have size 2
* multiSel("select2").values(0) should be ("option5")
* multiSel("select2").values(1) should be ("option6")
* </pre>
*
* <h3>Clicking and submitting</h3>
*
* <p>
* You can click on any element with “<code>click on</code>” as shown previously:
* </p>
*
* <pre class="stHighlight">
* click on "aButton"
* click on name("aTextField")
* </pre>
*
* <p>
* If the requested element is not found, <code>click on</code> will throw an exception, failing the test.
* </p>
*
* <p>
* Clicking on a input element will give it the focus. If current focus is in on an input element within a form, you can submit the form by
* calling <code>submit</code>:
* </p>
*
* <pre class="stHighlight">
* submit()
* </pre>
*
* <h2>Switching</h2>
*
* <p>
* You can switch to a popup alert using the following code:
* </p>
*
* <pre class="stHighlight">
* switch to alert
* </pre>
*
* <p>
* to switch to a frame, you could:
* </p>
*
* <pre class="stHighlight">
* switch to frame(0) // switch by index
* switch to frame("name") // switch by name
* </pre>
*
* <p>
* If you have reference to a window handle (can be obtained from calling windowHandle/windowHandles), you can switch to a particular
* window by:
* </p>
*
* <pre class="stHighlight">
* switch to window(windowHandle)
* </pre>
*
* <p>
* You can also switch to active element and default content:
* </p>
*
* <pre class="stHighlight">
* switch to activeElement
* switch to defaultContent
* </pre>
*
* <h2>Navigation history</h2>
*
* <p>
* In real web browser, you can press the 'Back' button to go back to previous page. To emulate that action in your test, you can call <code>goBack</code>:
* </p>
*
* <pre class="stHighlight">
* goBack()
* </pre>
*
* <p>
* To emulate the 'Forward' button, you can call:
* </p>
*
* <pre class="stHighlight">
* goForward()
* </pre>
*
* And to refresh or reload the current page, you can call:
*
* <pre class="stHighlight">
* reloadPage()
* </pre>
*
* <h2>Cookies!</h2>
*
* <p>To create a new cookie, you'll say:</p>
*
* <pre class="stHighlight">
* add cookie ("cookie_name", "cookie_value")
* </pre>
*
* <p>
* to read a cookie value, you do:
* </p>
*
* <pre class="stHighlight">
* cookie("cookie_name").value should be ("cookie_value") // If value is undefined, throws TFE right then and there. Never returns null.
* </pre>
*
* <p>
* In addition to the common use of name-value cookie, you can pass these extra fields when creating the cookie, available ways are:
* </p>
*
* <pre class="stHighlight">
* cookie(name: String, value: String)
* cookie(name: String, value: String, path: String)
* cookie(name: String, value: String, path: String, expiry: Date)
* cookie(name: String, value: String, path: String, expiry: Date, domain: String)
* cookie(name: String, value: String, path: String, expiry: Date, domain: String, secure: Boolean)
* </pre>
*
* and to read those extra fields:
*
* <pre class="stHighlight">
* cookie("cookie_name").value // Read cookie's value
* cookie("cookie_name").path // Read cookie's path
* cookie("cookie_name").expiry // Read cookie's expiry
* cookie("cookie_name").domain // Read cookie's domain
* cookie("cookie_name").isSecure // Read cookie's isSecure flag
* </pre>
*
* <p>
* In order to delete a cookie, you could use the following code:
* </p>
*
* <pre class="stHighlight">
* delete cookie "cookie_name"
* </pre>
*
* <p>
* or to delete all cookies in the same domain:-
* </p>
*
* <pre class="stHighlight">
* delete all cookies
* </pre>
*
* To get the underlying Selenium cookie, you can use <code>underlying</code>:
*
* <pre class="stHighlight">
* cookie("cookie_name").underlying.validate() // call the validate() method on underlying Selenium cookie
* </pre>
*
* <h2>Other useful element properties</h2>
*
* <p>
* All element types (<code>textField</code>, <code>textArea</code>, <code>radioButton</code>, <code>checkbox</code>, <code>singleSel</code>, <code>multiSel</code>)
* support the following useful properties:
* </p>
*
* <table style="border-collapse: collapse; border: 1px solid black">
* <tr><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"><strong>Method</strong></th><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"><strong>Description</strong></th></tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>location</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* The XY location of the top-left corner of this <code>Element</code>.
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>size</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* The width/height size of this <code>Element</code>.
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>isDisplayed</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* Indicates whether this <code>Element</code> is displayed.
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>isEnabled</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* Indicates whether this <code>Element</code> is enabled.
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>isSelected</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* Indicates whether this <code>Element</code> is selected.
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>tagName</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* The tag name of this element.
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>underlying</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* The underlying <code>WebElement</code> wrapped by this <code>Element</code>.
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>attribute(name: String)</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* The attribute value of the given attribute name of this element, wrapped in a <code>Some</code>, or <code>None</code> if no
* such attribute exists on this <code>Element</code>.
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>text</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* Returns the visible (<em>i.e.</em>, not hidden by CSS) text of this element, including sub-elements, without any leading or trailing whitespace.
* </td>
* </tr>
* </table>
*
* <h2>Implicit wait</h2>
*
* <p>
* To set Selenium's implicit wait timeout, you can call the <code>implicitlyWait</code> method:
* </p>
*
* <pre class="stHighlight">
* implicitlyWait(Span(10, Seconds))
* </pre>
*
* <p>
* Invoking this method sets the amount of time the driver will wait when searching for an element that is not immediately present. For
* more information, see the documentation for method <code>implicitlyWait</code>.
* </p>
*
* <h2>Page source and current URL</h2>
*
* <p>
* It is possible to get the html source of currently loaded page, using:
* </p>
*
* <pre class="stHighlight">
* pageSource
* </pre>
*
* <p>
* and if needed, get the current URL of currently loaded page:
* </p>
*
* <pre class="stHighlight">
* currentUrl
* </pre>
*
* <h2>Screen capture</h2>
*
* <p>
* You can capture screen using the following code:
* </p>
*
* <pre class="stHighlight">
* val file = capture
* </pre>
*
* <p>
* By default, the captured image file will be saved in temporary folder (returned by java.io.tmpdir property), with random file name
* ends with .png extension. You can specify a fixed file name:
* </p>
*
* <pre class="stHighlight">
* capture to "MyScreenShot.png"
* </pre>
*
* <p>
* or
* </p>
*
* <pre class="stHighlight">
* capture to "MyScreenShot"
* </pre>
*
* <p>
* Both will result in a same file name <code>MyScreenShot.png</code>.
* </p>
*
* <p>
* You can also change the target folder screenshot file is written to, by saying:
* </p>
*
* <pre class="stHighlight">
* setCaptureDir("/home/your_name/screenshots")
* </pre>
*
* <p>
* If you want to capture a screenshot when something goes wrong (e.g. test failed), you can use <code>withScreenshot</code>:
* </p>
*
* <pre class="stHighlight">
* withScreenshot {
* assert("Gold" == "Silver", "Expected gold, but got silver")
* }
* </pre>
*
* <p>
* In case the test code fails, you'll see the screenshot location appended to the error message, for example:
* </p>
*
* <pre class="stHighlight">
* Expected gold but got silver; screenshot capture in /tmp/AbCdEfGhIj.png
* </pre>
*
* <a name="pageObjects"></a>
* <h2>Using the page object pattern</h2>
*
* <p>
* If you use the page object pattern, mixing trait <code>Page</code> into your page classes will allow you to use the <code>go to</code>
* syntax with your page objects. Here's an example:
* </p>
*
* <pre class="stHighlight">
* class HomePage extends Page {
* val url = "http://localhost:9000/index.html"
* }
*
* val homePage = new HomePage
* go to homePage
* </pre>
*
* <h2>Executing JavaScript</h2>
*
* <p>
* To execute arbitrary JavaScript, for example, to test some JavaScript functions on your page, pass it to <code>executeScript</code>:
* </p>
*
* <pre class="stHighlight">
* go to (host + "index.html")
* val result1 = executeScript("return document.title;")
* result1 should be ("Test Title")
* val result2 = executeScript("return 'Hello ' + arguments[0]", "ScalaTest")
* result2 should be ("Hello ScalaTest")
* </pre>
*
* <p>
* To execute an asynchronous bit of JavaScript, pass it to <code>executeAsyncScript</code>. You can set the script timeout with <code>setScriptTimeout</code>:
* </p>
*
* <pre class="stHighlight">
* val script = """
* var callback = arguments[arguments.length - 1];
* window.setTimeout(function() {callback('Hello ScalaTest')}, 500);
* """
* setScriptTimeout(1 second)
* val result = executeAsyncScript(script)
* result should be ("Hello ScalaTest")
* </pre>
*
* <h2>Querying for elements</h2>
*
* <p>
* You can query for arbitrary elements via <code>find</code> and <code>findAll</code>. The <code>find</code> method returns the first matching element, wrapped in a <code>Some</code>,
* or <code>None</code> if no element is found. The <code>findAll</code> method returns an immutable <code>IndexedSeq</code> of all matching elements. If no elements match the query, <code>findAll</code>
* returns an empty <code>IndexedSeq</code>. These methods allow you to perform rich queries using <code>for</code> expressions. Here are some examples:
* </p>
*
* <pre class="stHighlight">
* val ele: Option[Element] = find("q")
*
* val eles: colection.immutable.IndexedSeq[Element] = findAll(className("small"))
* for (e <- eles; if e.tagName != "input")
* e should be ('displayed)
* val textFields = eles filter { tf.isInstanceOf[TextField] }
* </pre>
*
* <h2>Cleaning up</h2>
*
* <p>
* To close the current browser window, and exit the driver if the current window was the only one remaining, use <code>close</code>:
* </p>
*
* <pre class="stHighlight">
* close()
* </pre>
*
* <p>
* To close all windows, and exit the driver, use <code>quit</code>:
* </p>
*
* <pre class="stHighlight">
* quit()
* </pre>
*
* <a name="alternateForms"/>
* <h2>Alternate forms</h2>
*
* <p>
* Although statements like “<code>delete all cookies</code>” fit well with matcher statements
* like “<code>title should be ("Cheese!")</code>”, they do not fit as well
* with the simple method call form of assertions. If you prefer, you can avoid operator notation
* and instead use alternatives that take the form of plain-old method calls. Here's an example:
* </p>
*
* <pre class="stHighlight">
* goTo("http://www.google.com")
* clickOn("q")
* textField("q").value = "Cheese!"
* submit()
* // Google's search is rendered dynamically with JavaScript.
* eventually(assert(pageTitle === "Cheese! - Google Search"))
* </pre>
*
* <p>
* Here's a table showing the complete list of alternatives:
* </p>
*
* <table style="border-collapse: collapse; border: 1px solid black">
* <tr><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"><strong>operator notation</strong></th><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"><strong>method call</strong></th></tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>go to (host + "index.html")</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>goTo(host + "index.html")</code>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>click on "aButton"</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>clickOn("aButton")</code>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>switch to activeElement</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>switchTo(activeElement)</code>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>add cookie ("cookie_name", "cookie_value")</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>addCookie("cookie_name", "cookie_value")</code>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>delete cookie "cookie_name"</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>deleteCookie("cookie_name")</code>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>delete all cookies</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>deleteAllCookies()</code>
* </td>
* </tr>
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>capture to "MyScreenShot"</code>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <code>captureTo("MyScreenShot")</code>
* </td>
* </tr>
* </table>
*
* @author Chua Chee Seng
* @author Bill Venners
*/
trait WebBrowser {
/**
* A point containing an XY screen location.
*/
case class Point(x: Int, y: Int)
/**
* A dimension containing the width and height of a screen element.
*/
case class Dimension(width: Int, height: Int)
/**
* Wrapper class for a Selenium <code>WebElement</code>.
*
* <p>
* This class provides idiomatic Scala access to the services of an underlying <code>WebElement</code>.
* You can access the wrapped <code>WebElement</code> via the <code>underlying</code> method.
* </p>
*/
sealed trait Element {
/**
* The XY location of the top-left corner of this <code>Element</code>.
*
* <p>
* This invokes <code>getLocation</code> on the underlying <code>WebElement</code>.
* </p>
*
* @return the location of the top-left corner of this element on the page
*/
def location: Point = Point(underlying.getLocation.getX, underlying.getLocation.getY)
/**
* The width/height size of this <code>Element</code>.
*
* <p>
* This invokes <code>getSize</code> on the underlying <code>WebElement</code>.
* </p>
*
* @return the size of the element on the page
*/
def size: Dimension = Dimension(underlying.getSize.getWidth, underlying.getSize.getHeight)
/**
* Indicates whether this <code>Element</code> is displayed.
*
* <p>
* This invokes <code>isDisplayed</code> on the underlying <code>WebElement</code>.
* </p>
*
* @return <code>true</code> if the element is currently displayed
*/
def isDisplayed: Boolean = underlying.isDisplayed
/**
* Indicates whether this <code>Element</code> is enabled.
*
* <p>
* This invokes <code>isEnabled</code> on the underlying <code>WebElement</code>, which
* will generally return <code>true</code> for everything but disabled input elements.
* </p>
*
* @return <code>true</code> if the element is currently enabled
*/
def isEnabled: Boolean = underlying.isEnabled
/**
* Indicates whether this <code>Element</code> is selected.
*
* <p>
* This method, which invokes <code>isSelected</code> on the underlying <code>WebElement</code>,
* is relevant only for input elements such as checkboxes, options in a single- or multiple-selection
* list box, and radio buttons. For any other element it will simply return <code>false</code>.
* </p>
*
* @return <code>true</code> if the element is currently selected or checked
*/
def isSelected: Boolean = underlying.isSelected
/**
* The tag name of this element.
*
* <p>
* This method invokes <code>getTagName</code> on the underlying <code>WebElement</code>.
* Note it returns the name of the tag, not the value of the of the <code>name</code> attribute.
* For example, it will return will return <code>"input"</code> for the element
* <code><input name="city" /></code>, not <code>"city"</code>.
* </p>
*
* @return the tag name of this element
*/
def tagName: String = underlying.getTagName
/**
* The underlying <code>WebElement</code> wrapped by this <code>Element</code>
*/
val underlying: WebElement
/**
* The attribute value of the given attribute name of this element, wrapped in a <code>Some</code>, or <code>None</code> if no
* such attribute exists on this <code>Element</code>.
*
* <p>
* This method invokes <code>getAttribute</code> on the underlying <code>WebElement</code>, passing in the
* specified <code>name</code>.
* </p>
*
* @return the attribute with the given name, wrapped in a <code>Some</code>, else <code>None</code>
*/
def attribute(name: String): Option[String] = Option(underlying.getAttribute(name))
/**
* Returns the visible (<em>i.e.</em>, not hidden by CSS) text of this element, including sub-elements, without any leading or trailing whitespace.
*
* @return the visible text enclosed by this element, or an empty string, if the element encloses no visible text
*/
def text: String = {
val txt = underlying.getText
if (txt != null) txt else "" // Just in case, I'm not sure if Selenium would ever return null here
}
/**
* Returns the result of invoking <code>equals</code> on the underlying <code>Element</code>, passing
* in the specified <code>other</code> object.
*
* @param other the object with which to compare for equality
*
* @return true if the passed object is equal to this one
*/
override def equals(other: Any): Boolean = underlying.equals(other)
/**
* Returns the result of invoking <code>hashCode</code> on the underlying <code>Element</code>.
*
* @return a hash code for this object
*/
override def hashCode: Int = underlying.hashCode
/**
* Returns the result of invoking <code>toString</code> on the underlying <code>Element</code>.
*
* @return a string representation of this object
*/
override def toString: String = underlying.toString
}
/**
* Trait that facilitates using the <em>page object pattern</em> with the ScalaTest Selenium DSL.
*
* <p>
* If you use the page object pattern, mixing trait <code>Page</code> into your page classes will allow you to use the <code>go to</code>
* syntax with your page objects. Here's an example:
* </p>
*
* <pre class="stHighlight">
* class HomePage extends Page {
* val url = "localhost:9000/index.html"
* }
*
* val homePage = new HomePage
* go to homePage
* </pre>
*/
trait Page {
/**
* The URL of the page represented by this page object.
*/
val url: String
}
// fluentLinium has a doubleClick. Wonder how they are doing that?
/**
* Wrapper class for a Selenium <code>Cookie</code>.
*
* <p>
* This class provides idiomatic Scala access to the services of an underlying <code>Cookie</code>.
* You can access the wrapped <code>Cookie</code> via the <code>underlying</code> method.
* </p>
*/
final class WrappedCookie(val underlying: Cookie) {
/**
* The domain to which this cookie is visible.
*
* <p>
* This invokes <code>getDomain</code> on the underlying <code>Cookie</code>.
* </p>
*
* @return the domain of this cookie
*/
def domain: String = underlying.getDomain
/**
* The expire date of this cookie.
*
* <p>
* This invokes <code>getExpiry</code> on the underlying <code>Cookie</code>.
* </p>
*
* @return the expire date of this cookie
*/
def expiry: Option[Date] = Option(underlying.getExpiry)
/**
* The name of this cookie.
*
* <p>
* This invokes <code>getName</code> on the underlying <code>Cookie</code>.
* </p>
*
* @return the name of this cookie
*/
def name: String = underlying.getName
/**
* The path of this cookie.
*
* <p>
* This invokes <code>getPath</code> on the underlying <code>Cookie</code>.
* </p>
*
* @return the path of this cookie
*/
def path: String = underlying.getPath
/**
* The value of this cookie.
*
* <p>
* This invokes <code>getValue</code> on the underlying <code>Cookie</code>.
* </p>
*
* @return the value of this cookie
*/
def value: String = underlying.getValue
/**
* Indicates whether the cookie requires a secure connection.
*
* <p>
* This invokes <code>isSecure</code> on the underlying <code>Cookie</code>.
* </p>
*
* @return true if this cookie requires a secure connection.
*/
def secure: Boolean = underlying.isSecure
/**
* Returns the result of invoking <code>equals</code> on the underlying <code>Cookie</code>, passing
* in the specified <code>other</code> object.
*
* <p>
* Two Selenium <code>Cookie</code>s are considered equal if their name and values are equal.
* </p>
*
* @param other the object with which to compare for equality
*
* @return true if the passed object is equal to this one
*/
override def equals(other: Any): Boolean = underlying.equals(other)
/**
* Returns the result of invoking <code>hashCode</code> on the underlying <code>Cookie</code>.
*
* @return a hash code for this object
*/
override def hashCode: Int = underlying.hashCode
/**
* Returns the result of invoking <code>toString</code> on the underlying <code>Cookie</code>.
*
* @return a string representation of this object
*/
override def toString: String = underlying.toString
}
/**
* This class is part of the ScalaTest's Selenium DSL. Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a>
* for an overview of the Selenium DSL.
*/
class CookiesNoun
/**
* This field supports cookie deletion in ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This field enables the following syntax:
* </p>
*
* <pre class="stHighlight">
* delete all cookies
* ^
* </pre>
*/
val cookies = new CookiesNoun
/**
* This sealed abstract class supports switching in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* One subclass of <code>SwitchTarget</code> exists for each kind of target that
* can be switched to: active element, alert box, default content, frame (indentified by index,
* name or id, or enclosed element), and window.
* </p>
*/
sealed abstract class SwitchTarget[T] {
/**
* Abstract method implemented by subclasses that represent "targets" to which the user can switch.
*
* @param driver the <code>WebDriver</code> with which to perform the switch
*/
def switch(driver: WebDriver): T
}
/**
* This class supports switching to the currently active element in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to activeElement
* ^
* </pre>
*/
final class ActiveElementTarget extends SwitchTarget[Element] {
/**
* Switches the driver to the currently active element.
*
* @param driver the <code>WebDriver</code> with which to perform the switch
*/
def switch(driver: WebDriver): Element = {
createTypedElement(driver.switchTo.activeElement)
}
}
/**
* This class supports switching to the alert box in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to alert
* ^
* </pre>
*/
final class AlertTarget extends SwitchTarget[Alert] {
/**
* Switches the driver to the currently active alert box.
*
* @param driver the <code>WebDriver</code> with which to perform the switch
*/
def switch(driver: WebDriver): Alert = {
driver.switchTo.alert
}
}
/**
* This class supports switching to the default content in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to defaultContent
* ^
* </pre>
*/
final class DefaultContentTarget extends SwitchTarget[WebDriver] {
/**
* Switches the driver to the default content
*
* @param driver the <code>WebDriver</code> with which to perform the switch
*/
def switch(driver: WebDriver): WebDriver = {
driver.switchTo.defaultContent
}
}
/**
* This class supports switching to a frame by index in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to frame(0)
* ^
* </pre>
*/
final class FrameIndexTarget(index: Int) extends SwitchTarget[WebDriver] {
/**
* Switches the driver to the frame at the index that was passed to the constructor.
*
* @param driver the <code>WebDriver</code> with which to perform the switch
*/
def switch(driver: WebDriver): WebDriver =
try {
driver.switchTo.frame(index)
}
catch {
case e: org.openqa.selenium.NoSuchFrameException =>
throw new TestFailedException(
sde => Some("Frame at index '" + index + "' not found."),
None,
getStackDepthFun("WebBrowser.scala", "switch", 1)
)
}
}
/**
* This class supports switching to a frame by name or ID in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to frame("name")
* ^
* </pre>
*/
final class FrameNameOrIdTarget(nameOrId: String) extends SwitchTarget[WebDriver] {
/**
* Switches the driver to the frame with the name or ID that was passed to the constructor.
*
* @param driver the <code>WebDriver</code> with which to perform the switch
*/
def switch(driver: WebDriver): WebDriver =
try {
driver.switchTo.frame(nameOrId)
}
catch {
case e: org.openqa.selenium.NoSuchFrameException =>
throw new TestFailedException(
sde => Some("Frame with name or ID '" + nameOrId + "' not found."),
None,
getStackDepthFun("WebBrowser.scala", "switch", 1)
)
}
}
/**
* This class supports switching to a frame by web element in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*/
final class FrameWebElementTarget(webElement: WebElement) extends SwitchTarget[WebDriver] {
/**
* Switches the driver to the frame containing the <code>WebElement</code> that was passed to the constructor.
*
* @param driver the <code>WebDriver</code> with which to perform the switch
*/
def switch(driver: WebDriver): WebDriver =
try {
driver.switchTo.frame(webElement)
}
catch {
case e: org.openqa.selenium.NoSuchFrameException =>
throw new TestFailedException(
sde => Some("Frame element '" + webElement + "' not found."),
None,
getStackDepthFun("WebBrowser.scala", "switch", 1)
)
}
}
/**
* This class supports switching to a frame by element in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*/
final class FrameElementTarget(element: Element) extends SwitchTarget[WebDriver] {
/**
* Switches the driver to the frame containing the <code>Element</code> that was passed to the constructor.
*
* @param driver the <code>WebDriver</code> with which to perform the switch
*/
def switch(driver: WebDriver): WebDriver =
try {
driver.switchTo.frame(element.underlying)
}
catch {
case e: org.openqa.selenium.NoSuchFrameException =>
throw new TestFailedException(
sde => Some("Frame element '" + element + "' not found."),
None,
getStackDepthFun("WebBrowser.scala", "switch", 1)
)
}
}
/**
* This class supports switching to a window by name or handle in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to window(windowHandle)
* ^
* </pre>
*/
final class WindowTarget(nameOrHandle: String) extends SwitchTarget[WebDriver] {
/**
* Switches the driver to the window with the name or ID that was passed to the constructor.
*
* @param driver the <code>WebDriver</code> with which to perform the switch
*/
def switch(driver: WebDriver): WebDriver =
try {
driver.switchTo.window(nameOrHandle)
}
catch {
case e: org.openqa.selenium.NoSuchWindowException =>
throw new TestFailedException(
sde => Some("Window with nameOrHandle '" + nameOrHandle + "' not found."),
None,
getStackDepthFun("WebBrowser.scala", "switch", 1)
)
}
}
private def isTextField(webElement: WebElement): Boolean =
webElement.getTagName.toLowerCase == "input" && webElement.getAttribute("type").toLowerCase == "text"
private def isTextArea(webElement: WebElement): Boolean =
webElement.getTagName.toLowerCase == "textarea"
private def isCheckBox(webElement: WebElement): Boolean =
webElement.getTagName.toLowerCase == "input" && webElement.getAttribute("type").toLowerCase == "checkbox"
private def isRadioButton(webElement: WebElement): Boolean =
webElement.getTagName == "input" && webElement.getAttribute("type") == "radio"
/**
* This class is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* textField("q").value should be ("Cheese!")
* </pre>
*
* @param underlying the <code>WebElement</code> representing a text field
* @throws TestFailedExeption if the passed <code>WebElement</code> does not represent a text field
*/
final class TextField(val underlying: WebElement) extends Element {
if(!isTextField(underlying))
throw new TestFailedException(
sde => Some("Element " + underlying + " is not text field."),
None,
getStackDepthFun("WebBrowser.scala", "this", 1)
)
/**
* Gets this text field's value.
*
* <p>
* This method invokes <code>getAttribute("value")</code> on the underlying <code>WebElement</code>.
* </p>
*
* @return the text field's value
*/
def value: String = underlying.getAttribute("value")
/**
* Sets this text field's value.
*
* @param value the new value
*/
def value_=(value: String) {
underlying.clear()
underlying.sendKeys(value)
}
/**
* Clears this text field.
*/
def clear() { underlying.clear() }
}
/**
* This class is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* textArea("q").value should be ("Cheese!")
* </pre>
*
* @param underlying the <code>WebElement</code> representing a text area
* @throws TestFailedExeption if the passed <code>WebElement</code> does not represent a text area
*/
final class TextArea(val underlying: WebElement) extends Element {
if(!isTextArea(underlying))
throw new TestFailedException(
sde => Some("Element " + underlying + " is not text area."),
None,
getStackDepthFun("WebBrowser.scala", "this", 1)
)
/**
* Gets this text area's value.
*
* <p>
* This method invokes <code>getAttribute("value")</code> on the underlying <code>WebElement</code>.
* </p>
*
* @return the text area's value
*/
def value: String = underlying.getAttribute("value")
/**
* Sets this text area's value.
*
* @param value the new value
*/
def value_=(value: String) {
underlying.clear()
underlying.sendKeys(value)
}
/**
* Clears this text area.
*/
def clear() { underlying.clear() }
}
/**
* This class is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* radioButton(id("opt1")).value should be ("Option 1!")
* </pre>
*
* @param underlying the <code>WebElement</code> representing a text area
* @throws TestFailedExeption if the passed <code>WebElement</code> does not represent a text area
*/
final class RadioButton(val underlying: WebElement) extends Element {
if(!isRadioButton(underlying))
throw new TestFailedException(
sde => Some("Element " + underlying + " is not radio button."),
None,
getStackDepthFun("WebBrowser.scala", "this", 1)
)
/**
* Gets this radio button's value.
*
* <p>
* Invokes <code>getAttribute("value")</code> on the underlying <code>WebElement</code>.
* </p>
*
* @return the radio button's value
*/
def value: String = underlying.getAttribute("value")
}
/**
* This class is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* radioButtonGroup("group1").value should be ("Option 2")
* </pre>
*
* @throws TestFailedExeption if no radio button with the passed <code>groupName</code> are found
*/
final class RadioButtonGroup(groupName: String, driver: WebDriver) {
private def groupElements = driver.findElements(By.name(groupName)).asScala.toList.filter(isRadioButton(_))
if (groupElements.length == 0)
throw new TestFailedException(
sde => Some("No radio buttons with group name '" + groupName + "' was found."),
None,
getStackDepthFun("WebBrowser.scala", "this", 1)
)
/**
* Returns the value of this group's selected radio button, or throws <code>TestFailedException</code> if no
* radio button in this group is selected.
*
* @return the value of this group's selected radio button
* @throws TestFailedExeption if no radio button in this group is selected
*/
def value: String = selection match {
case Some(v) => v
case None =>
throw new TestFailedException(
sde => Some("The radio button group on which value was invoked contained no selected radio button."),
None,
getStackDepthFun("WebBrowser.scala", "value", 1)
)
}
/**
* Returns the value of this group's selected radio button, wrapped in a <code>Some</code>, or <code>None</code>, if no
* radio button in this group is selected.
*
* @return the value of this group's selected radio button, wrapped in a <code>Some</code>, else <code>None</code>
*/
def selection: Option[String] = {
groupElements.find(_.isSelected) match {
case Some(radio) =>
Some(radio.getAttribute("value"))
case None =>
None
}
}
/**
* Selects the radio button with the passed value.
*
* @param the value of the radio button to select
* @throws TestFailedExeption if the passed string is not the value of any radio button in this group
*/
def value_=(value: String) {
groupElements.find(_.getAttribute("value") == value) match {
case Some(radio) =>
radio.click()
case None =>
throw new TestFailedException(
sde => Some("Radio button value '" + value + "' not found for group '" + groupName + "'."),
None,
getStackDepthFun("WebBrowser.scala", "value_=", 1)
)
}
}
}
/**
* This class is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* checkbox("cbx1").select()
* </pre>
*
* @param underlying the <code>WebElement</code> representing a checkbox
* @throws TestFailedExeption if the passed <code>WebElement</code> does not represent a checkbox
*/
final class Checkbox(val underlying: WebElement) extends Element {
if(!isCheckBox(underlying))
throw new TestFailedException(
sde => Some("Element " + underlying + " is not check box."),
None,
getStackDepthFun("WebBrowser.scala", "this", 1)
)
/**
* Selects this checkbox.
*/
def select() {
if (!underlying.isSelected)
underlying.click()
}
/**
* Clears this checkbox
*/
def clear() {
if (underlying.isSelected())
underlying.click()
}
/**
* Gets this checkbox's value.
*
* <p>
* This method invokes <code>getAttribute("value")</code> on the underlying <code>WebElement</code>.
* </p>
*
* @return the checkbox's value
*/
def value: String = underlying.getAttribute("value")
}
/**
* This class is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").values += "option5"
* ^
* </pre>
*
* <p>
* Instances of this class are returned from the <code>values</code> method of <code>MultiSel</code>.
* <code>MultiSelOptionSeq</code> is an immutable <code>IndexedSeq[String]</code> that wraps an underlying immutable <code>IndexedSeq[String]</code> and adds two
* methods, <code>+</code> and <code>-</code>, to facilitate the <code>+=</code> syntax for setting additional options
* of the <code>MultiSel</code>. The Scala compiler will rewrite:
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").values += "option5"
* </pre>
*
* <p>
* To:
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").values = multiSel("select2").values + "option5"
* </pre>
*
* <p>
* Thus, first a new <code>MultiSelOptionSeq</code> is created by invoking the <code>+</code> method on the <code>MultiSelOptionSeq</code>
* returned by <code>values</code>, and that result is passed to the <code>values_=</code> method.
* </p>
*
* <p>
* For symmetry, this class also offers a <code>-</code> method, which can be used to deselect an option, like this:
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").values -= "option5"
* ^
* </pre>
*
*/
class MultiSelOptionSeq(underlying: collection.immutable.IndexedSeq[String]) extends collection.immutable.IndexedSeq[String] {
/**
* Selects an element by its index in the sequence.
*
* <p>
* This method invokes <code>apply</code> on the underlying immutable <code>IndexedSeq[String]</code>, passing in <code>idx</code>, and returns the result.
* </p>
*
* @param idx the index to select
* @return the element of this sequence at index <code>idx</code>, where 0 indicates the first element
*/
def apply(idx: Int): String = underlying.apply(idx)
/**
* The length of this sequence.
*
* <p>
* This method invokes <code>length</code> on the underlying immutable <code>IndexedSeq[String]</code> and returns the result.
* </p>
*
* @return the number of elements in this sequence
*/
def length: Int = underlying.length
/**
* Appends a string element to this sequence, if it doesn't already exist in the sequence.
*
* <p>
* If the string element already exists in this sequence, this method returns itself. If not,
* this method returns a new <code>MultiSelOptionSeq</code> with the passed value appended to the
* end of the original <code>MultiSelOptionSeq</code>.
* </p>
*
* @param the string element to append to this sequence
* @return a <code>MultiSelOptionSeq</code> that contains the passed string value
*/
def +(value: String): MultiSelOptionSeq = {
if (!underlying.contains(value))
new MultiSelOptionSeq(underlying :+ value)
else
this
}
/**
* Removes a string element to this sequence, if it already exists in the sequence.
*
* <p>
* If the string element does not already exist in this sequence, this method returns itself. If the element
* is contained in this sequence, this method returns a new <code>MultiSelOptionSeq</code> with the passed value
* removed from the the original <code>MultiSelOptionSeq</code>, leaving any other elements in the same order.
* </p>
*
* @param the string element to append to this sequence
* @return a <code>MultiSelOptionSeq</code> that contains the passed string value
*/
def -(value: String): MultiSelOptionSeq = {
if (underlying.contains(value))
new MultiSelOptionSeq(underlying.filter(_ != value))
else
this
}
}
/**
* This class is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* singleSel.clear()
* </pre>
*
* @param underlying a <code>WebElement</code> representing a single selection list
* @throws TestFailedExeption if the passed <code>WebElement</code> does not represent a single selection list
*/
class SingleSel(val underlying: WebElement) extends Element {
if(underlying.getTagName.toLowerCase != "select")
throw new TestFailedException(
sde => Some("Element " + underlying + " is not select."),
None,
getStackDepthFun("WebBrowser.scala", "this", 1)
)
private val select = new Select(underlying)
if (select.isMultiple)
throw new TestFailedException(
sde => Some("Element " + underlying + " is not a single-selection list."),
None,
getStackDepthFun("WebBrowser.scala", "this", 1)
)
/**
* Returns the value of this single selection list, wrapped in a <code>Some</code>, or <code>None</code>, if this single
* selection list has no currently selected value.
*
* @return the value of this single selection list, wrapped in a <code>Some</code>, else <code>None</code>
*/
def selection = {
val first = select.getFirstSelectedOption
if (first == null)
None
else
Some(first.getAttribute("value"))
}
/**
* Gets this single selection list's selected value, or throws <code>TestFailedException</code> if no value is currently selected.
*
* @return the single selection list's value
* @throws TestFailedException if the single selection list has no selected value
*/
def value: String = selection match {
case Some(v) => v
case None =>
throw new TestFailedException(
sde => Some("The single selection list on which value was invoked had no selection."),
None,
getStackDepthFun("WebBrowser.scala", "value", 1)
)
}
/**
* Sets this single selection list's value to the passed value.
*
* @param value the new value
* @throws TestFailedException if the passed value does not match not one of the single selection list's values
*/
def value_=(value : String) {
try {
select.selectByValue(value)
}
catch {
case e: org.openqa.selenium.NoSuchElementException =>
throw new TestFailedException(
sde => Some(e.getMessage),
Some(e),
getStackDepthFun("WebBrowser.scala", "value_=", 1)
)
}
}
}
/**
* This class is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* multiSel("select2").clear("option5")
* </pre>
*
* @param underlying a <code>WebElement</code> representing a multiple selection list
* @throws TestFailedExeption if the passed <code>WebElement</code> does not represent a multiple selection list
*/
class MultiSel(val underlying: WebElement) extends Element {
if(underlying.getTagName.toLowerCase != "select")
throw new TestFailedException(
sde => Some("Element " + underlying + " is not select."),
None,
getStackDepthFun("WebBrowser.scala", "this", 1)
)
private val select = new Select(underlying)
if (!select.isMultiple)
throw new TestFailedException(
sde => Some("Element " + underlying + " is not a multi-selection list."),
None,
getStackDepthFun("WebBrowser.scala", "this", 1)
)
/**
* Clears the passed value in this multiple selection list.
*
* @param value the value to clear
*/
def clear(value: String) {
select.deselectByValue(value)
}
/**
* Gets all selected values of this multiple selection list.
*
* <p>
* If the multiple selection list has no selections, ths method will
* return an empty <code>IndexedSeq</code>.
* </p>
*
* @return An <code>IndexedSeq</code> containing the currently selected values
*/
def values: MultiSelOptionSeq = {
val elementSeq = Vector.empty ++ select.getAllSelectedOptions.asScala
new MultiSelOptionSeq(elementSeq.map(_.getAttribute("value")))
}
/**
* Clears any existing selections then sets all values contained in the passed <code>collection.Seq[String]</code>.
*
* <p>
* In other words, the <code>values_=</code> method <em>replaces</em> the current selections, if any, with
* new selections defined by the passed <code>Seq[String]</code>.
* </p>
*
* @param values a <code>Seq</code> of string values to select
* @throws TestFailedException if a value contained in the passed <code>Seq[String]</code> is not
* among this multiple selection list's values.
*/
def values_=(values: collection.Seq[String]) {
try {
clearAll()
values.foreach(select.selectByValue(_))
}
catch {
case e: org.openqa.selenium.NoSuchElementException =>
throw new TestFailedException(
sde => Some(e.getMessage),
Some(e),
getStackDepthFun("WebBrowser.scala", "value_=", 1)
)
}
}
/**
* Clears all selected values in this multiple selection list.
*
* @param value the value to clear
*/
def clearAll() {
select.deselectAll()
}
}
/**
* This object is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This object enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* go to "http://www.artima.com"
* ^
* </pre>
*/
object go {
/**
* Sends the browser to the passed URL.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* go to "http://www.artima.com"
* ^
* </pre>
*
* @param url the URL to which to send the browser
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def to(url: String)(implicit driver: WebDriver) {
driver.get(url)
}
/**
* Sends the browser to the URL contained in the passed <code>Page</code> object.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* go to homePage
* ^
* </pre>
*
* @param page the <code>Page</code> object containing the URL to which to send the browser
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def to(page: Page)(implicit driver: WebDriver) {
driver.get(page.url)
}
}
/**
* Sends the browser to the passed URL.
*
* <p>
* Here's an example:
* </p>
*
* <pre class="stHighlight">
* goTo("http://www.artima.com")
* </pre>
*
* @param url the URL to which to send the browser
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def goTo(url: String)(implicit driver: WebDriver) {
go to url
}
/**
* Sends the browser to the URL contained in the passed <code>Page</code> object.
*
* <p>
* Here's an example:
* </p>
*
* <pre class="stHighlight">
* goTo(homePage)
* </pre>
*
* @param page the <code>Page</code> object containing the URL to which to send the browser
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def goTo(page: Page)(implicit driver: WebDriver) {
go to page
}
/**
* Closes the current browser window, and exits the driver if the current window was the only one remaining.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def close()(implicit driver: WebDriver) {
driver.close()
}
@deprecated("The title method will be removed in the next 2.0 milestone release. Please use pageTitle instead.")
def title(implicit driver: WebDriver): String = {
val t = driver.getTitle
if (t != null) t else ""
}
/**
* Returns the title of the current page, or the empty string if the current page has no title.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the current page's title, or the empty string if the current page has no title
*/
def pageTitle(implicit driver: WebDriver): String = {
val t = driver.getTitle
if (t != null) t else ""
}
/**
* Returns the source of the current page.
*
* <p>
* This method invokes <code>getPageSource</code> on the passed <code>WebDriver</code> and returns the result.
* </p>
*
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the source of the current page
*/
def pageSource(implicit driver: WebDriver): String = driver.getPageSource
/**
* Returns the URL of the current page.
*
* <p>
* This method invokes <code>getCurrentUrl</code> on the passed <code>WebDriver</code> and returns the result.
* </p>
*
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the URL of the current page
*/
def currentUrl(implicit driver: WebDriver): String = driver.getCurrentUrl
/**
* This trait is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* Subclasses of this trait define different ways of querying for elements, enabling
* syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on id("q")
* ^
* </pre>
*/
sealed trait Query {
/**
* The Selenium <code>By</code> for this query.
*/
val by: By
/**
* The query string for this query.
*
* <p>
* For example, the query string for <code>id("q")</code> is <code>"q"</code>.
* </p>
*/
val queryString: String
/**
* Returns the first <code>Element</code> selected by this query, or throws <code>TestFailedException</code>
* if no <code>Element</code> is selected.
*
* <p>
* The class of the <code>Element</code> returned will be a subtype of <code>Element</code> if appropriate.
* For example, if this query selects a text field, the class of the returned <code>Element</code> will
* be <code>TextField</code>.
* </p>
*
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the <code>Element</code> selected by this query
* @throws TestFailedException if nothing is selected by this query
*/
def element(implicit driver: WebDriver): Element = {
try {
createTypedElement(driver.findElement(by))
}
catch {
case e: org.openqa.selenium.NoSuchElementException =>
throw new TestFailedException(
sde => Some("Element '" + queryString + "' not found."),
Some(e),
getStackDepthFun("WebBrowser.scala", "name", 1)
)
}
}
/**
* Returns the first <code>Element</code> selected by this query, wrapped in a <code>Some</code>, or <code>None</code>
* if no <code>Element</code> is selected.
*
* <p>
* The class of the <code>Element</code> returned will be a subtype of <code>Element</code> if appropriate.
* For example, if this query selects a text field, the class of the returned <code>Element</code> will
* be <code>TextField</code>.
* </p>
*
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the <code>Element</code> selected by this query, wrapped in a <code>Some</code>, or <code>None</code> if
* no <code>Element</code> is selected
*/
def findElement(implicit driver: WebDriver): Option[Element] =
try {
Some(createTypedElement(driver.findElement(by)))
}
catch {
case e: org.openqa.selenium.NoSuchElementException => None
}
/**
* Returns an <code>Iterator</code> over all <code>Element</code>s selected by this query.
*
* <p>
* The class of the <code>Element</code>s produced by the returned <code>Iterator</code> will be a
* subtypes of <code>Element</code> if appropriate. For example, if an <code>Element</code>representing
* a text field is returned by the <code>Iterator</code>, the class of the returned <code>Element</code> will
* be <code>TextField</code>.
* </p>
*
* <p>
* If no <code>Elements</code> are selected by this query, this method will return an empty <code>Iterator</code> will be returned.
* <p>
*
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the <code>Iterator</code> over all <code>Element</code>s selected by this query
*/
def findAllElements(implicit driver: WebDriver): Iterator[Element] = driver.findElements(by).asScala.toIterator.map { e => createTypedElement(e) }
/**
* Returns the first <code>WebElement</code> selected by this query, or throws <code>TestFailedException</code>
* if no <code>WebElement</code> is selected.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the <code>WebElement</code> selected by this query
* @throws TestFailedException if nothing is selected by this query
*/
def webElement(implicit driver: WebDriver): WebElement = {
try {
driver.findElement(by)
}
catch {
case e: org.openqa.selenium.NoSuchElementException =>
throw new TestFailedException(
sde => Some("WebElement '" + queryString + "' not found."),
Some(e),
getStackDepthFun("WebBrowser.scala", "name", 1)
)
}
}
}
/**
* An ID query.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on id("q")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
case class IdQuery(queryString: String) extends Query { val by = By.id(queryString)}
/**
* A name query.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on name("q")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
case class NameQuery(queryString: String) extends Query { val by = By.name(queryString) }
/**
* An XPath query.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on xpath("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
case class XPathQuery(queryString: String) extends Query { val by = By.xpath(queryString) }
// TODO: Are these case classes just to get at the val?
/**
* A class name query.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on className("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
case class ClassNameQuery(queryString: String) extends Query { val by = By.className(queryString) }
/**
* A CSS selector query.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on cssSelector("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
case class CssSelectorQuery(queryString: String) extends Query { val by = By.cssSelector(queryString) }
/**
* A link text query.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on linkText("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
case class LinkTextQuery(queryString: String) extends Query { val by = By.linkText(queryString) }
/**
* A partial link text query.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on partialLinkText("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
case class PartialLinkTextQuery(queryString: String) extends Query { val by = By.partialLinkText(queryString) }
/**
* A tag name query.
*
* <p>
* This class enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on tagName("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
case class TagNameQuery(queryString: String) extends Query { val by = By.tagName(queryString) }
/**
* Returns an ID query.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on id("q")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
def id(elementId: String): IdQuery = new IdQuery(elementId)
/**
* Returns a name query.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on name("q")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
def name(elementName: String): NameQuery = new NameQuery(elementName)
/**
* Returns an XPath query.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on xpath("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
def xpath(xpath: String): XPathQuery = new XPathQuery(xpath)
/**
* Returns a class name query.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on className("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
def className(className: String): ClassNameQuery = new ClassNameQuery(className)
/**
* Returns a CSS selector query.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on cssSelector("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
def cssSelector(cssSelector: String): CssSelectorQuery = new CssSelectorQuery(cssSelector)
/**
* Returns a link text query.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on linkText("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
def linkText(linkText: String): LinkTextQuery = new LinkTextQuery(linkText)
/**
* Returns a partial link text query.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on partialLinkText("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
def partialLinkText(partialLinkText: String): PartialLinkTextQuery = new PartialLinkTextQuery(partialLinkText)
/**
* Returns a tag name query.
*
* <p>
* This method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on tagName("???")
* ^
* </pre>
*
* @param queryString the query string for this query.
*/
def tagName(tagName: String): TagNameQuery = new TagNameQuery(tagName)
private def createTypedElement(element: WebElement): Element = {
if (isTextField(element))
new TextField(element)
else if (isTextArea(element))
new TextArea(element)
else if (isCheckBox(element))
new Checkbox(element)
else if (isRadioButton(element))
new RadioButton(element)
else if (element.getTagName.toLowerCase == "select") {
val select = new Select(element)
if (select.isMultiple)
new MultiSel(element)
else
new SingleSel(element)
}
else
new Element() { val underlying = element }
}
// XXX
/**
* Finds and returns the first element selected by the specified <code>Query</code>, wrapped
* in a <code>Some</code>, or <code>None</code> if no element is selected.
*
* <p>
* The class of the <code>Element</code> returned will be a subtype of <code>Element</code> if appropriate.
* For example, if the query selects a text field, the class of the returned <code>Element</code> will
* be <code>TextField</code>.
* </p>
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the <code>Element</code> selected by this query, wrapped in a <code>Some</code>, or <code>None</code> if
* no <code>Element</code> is selected
*/
def find(query: Query)(implicit driver: WebDriver): Option[Element] = query.findElement
/**
* Finds and returns the first element selected by the specified string ID or name, wrapped
* in a <code>Some</code>, or <code>None</code> if no element is selected. YYY
*
* <p>
* This method will try to lookup by id first. If it cannot find
* any element with an id equal to the specified <code>queryString</code>, it will then try lookup by name.
* </p>
*
* <p>
* The class of the <code>Element</code> returned will be a subtype of <code>Element</code> if appropriate.
* For example, if the query selects a text field, the class of the returned <code>Element</code> will
* be <code>TextField</code>.
* </p>
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the <code>Element</code> selected by this query, wrapped in a <code>Some</code>, or <code>None</code> if
* no <code>Element</code> is selected
*/
def find(queryString: String)(implicit driver: WebDriver): Option[Element] =
new IdQuery(queryString).findElement match {
case Some(element) => Some(element)
case None => new NameQuery(queryString).findElement match {
case Some(element) => Some(element)
case None => None
}
}
/**
* Returns an <code>Iterator</code> over all <code>Element</code>s selected by this query.
*
* <p>
* The class of the <code>Element</code>s produced by the returned <code>Iterator</code> will be a
* subtypes of <code>Element</code> if appropriate. For example, if an <code>Element</code>representing
* a text field is returned by the <code>Iterator</code>, the class of the returned <code>Element</code> will
* be <code>TextField</code>.
* </p>
*
* <p>
* If no <code>Elements</code> are selected by this query, this method will return an empty <code>Iterator</code> will be returned.
* <p>
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the <code>Iterator</code> over all <code>Element</code>s selected by this query
*/
def findAll(query: Query)(implicit driver: WebDriver): Iterator[Element] = query.findAllElements
/**
* Returns an <code>Iterator</code> over all <code>Element</code>s selected by the specified string ID or name
*
* <p>
* This method will try to lookup by id first. If it cannot find
* any element with an id equal to the specified <code>queryString</code>, it will then try lookup by name.
* </p>
*
* <p>
* The class of the <code>Element</code> returned will be a subtype of <code>Element</code> if appropriate.
* For example, if the query selects a text field, the class of the returned <code>Element</code> will
* be <code>TextField</code>.
* </p>
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return the <code>Iterator</code> over all <code>Element</code>s selected by this query
*/
def findAll(queryString: String)(implicit driver: WebDriver): Iterator[Element] = {
val byIdItr = new IdQuery(queryString).findAllElements
if (byIdItr.hasNext)
byIdItr
else
new NameQuery(queryString).findAllElements
}
private def tryQueries[T](queryString: String)(f: Query => T)(implicit driver: WebDriver): T = {
try {
f(IdQuery(queryString))
}
catch {
case _: Throwable => f(NameQuery(queryString))
}
}
/**
* Finds and returns the first <code>TextField</code> selected by the specified <code>Query</code>, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>TextField</code>.
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>TextField</code>
* @return the <code>TextField</code> selected by this query
*/
def textField(query: Query)(implicit driver: WebDriver): TextField = new TextField(query.webElement)
/**
* Finds and returns the first <code>TextField</code> selected by the specified string ID or name, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>TextField</code>.
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>TextField</code>
* @return the <code>TextField</code> selected by this query
*/
def textField(queryString: String)(implicit driver: WebDriver): TextField =
tryQueries(queryString)(q => new TextField(q.webElement))
/**
* Finds and returns the first <code>TextArea</code> selected by the specified <code>Query</code>, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>TextArea</code>.
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>TextArea</code>
* @return the <code>TextArea</code> selected by this query
*/
def textArea(query: Query)(implicit driver: WebDriver) = new TextArea(query.webElement)
/**
* Finds and returns the first <code>TextArea</code> selected by the specified string ID or name, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>TextArea</code>.
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>TextArea</code>
* @return the <code>TextArea</code> selected by this query
*/
def textArea(queryString: String)(implicit driver: WebDriver): TextArea =
tryQueries(queryString)(q => new TextArea(q.webElement))
/**
* Finds and returns <code>RadioButtonGroup</code> selected by the specified group name, throws <code>TestFailedException</code> if
* no element with the specified group name is found, or found any element with the specified group name but not a <code>RadioButton</code>
*
* @param groupName the group name with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if no element with the specified group name is found, or found any element with the specified group name but not a <code>RadioButton</code>
* @return the <code>RadioButtonGroup</code> selected by this query
*/
def radioButtonGroup(groupName: String)(implicit driver: WebDriver) = new RadioButtonGroup(groupName, driver)
/**
* Finds and returns the first <code>RadioButton</code> selected by the specified <code>Query</code>, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>RadioButton</code>.
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>RadioButton</code>
* @return the <code>RadioButton</code> selected by this query
*/
def radioButton(query: Query)(implicit driver: WebDriver) = new RadioButton(query.webElement)
/**
* Finds and returns the first <code>RadioButton</code> selected by the specified string ID or name, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>RadioButton</code>.
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>RadioButton</code>
* @return the <code>RadioButton</code> selected by this query
*/
def radioButton(queryString: String)(implicit driver: WebDriver): RadioButton =
tryQueries(queryString)(q => new RadioButton(q.webElement))
/**
* Finds and returns the first <code>Checkbox</code> selected by the specified <code>Query</code>, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>Checkbox</code>.
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>Checkbox</code>
* @return the <code>Checkbox</code> selected by this query
*/
def checkbox(query: Query)(implicit driver: WebDriver) = new Checkbox(query.webElement)
/**
* Finds and returns the first <code>Checkbox</code> selected by the specified string ID or name, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>Checkbox</code>.
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>Checkbox</code>
* @return the <code>Checkbox</code> selected by this query
*/
def checkbox(queryString: String)(implicit driver: WebDriver): Checkbox =
tryQueries(queryString)(q => new Checkbox(q.webElement))
/**
* Finds and returns the first <code>SingleSel</code> selected by the specified <code>Query</code>, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>SingleSel</code>.
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>SingleSel</code>
* @return the <code>SingleSel</code> selected by this query
*/
def singleSel(query: Query)(implicit driver: WebDriver) = new SingleSel(query.webElement)
/**
* Finds and returns the first <code>SingleSel</code> selected by the specified string ID or name, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>SingleSel</code>.
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>SingleSel</code>
* @return the <code>SingleSel</code> selected by this query
*/
def singleSel(queryString: String)(implicit driver: WebDriver): SingleSel =
tryQueries(queryString)(q => new SingleSel(q.webElement))
/**
* Finds and returns the first <code>MultiSel</code> selected by the specified <code>Query</code>, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>MultiSel</code>.
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>MultiSel</code>
* @return the <code>MultiSel</code> selected by this query
*/
def multiSel(query: Query)(implicit driver: WebDriver) = new MultiSel(query.webElement)
/**
* Finds and returns the first <code>MultiSel</code> selected by the specified string ID or name, throws <code>TestFailedException</code>
* if element not found or the found element is not a <code>MultiSel</code>.
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if element not found or found element is not a <code>MultiSel</code>
* @return the <code>MultiSel</code> selected by this query
*/
def multiSel(queryString: String)(implicit driver: WebDriver): MultiSel =
tryQueries(queryString)(q => new MultiSel(q.webElement))
/**
* This object is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This object enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* click on "aButton"
* ^
* </pre>
*/
object click {
/**
* Click on the specified <code>WebElement</code>
*
* @param element the <code>WebElement</code> to click on
*/
def on(element: WebElement) {
element.click()
}
/**
* Click on the first <code>Element</code> selected by the specified <code>Query</code>
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def on(query: Query)(implicit driver: WebDriver) {
query.webElement.click()
}
/**
* Click on the first <code>Element</code> selected by the specified string ID or name
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def on(queryString: String)(implicit driver: WebDriver) {
// stack depth is not correct if just call the button("...") directly.
val target = tryQueries(queryString)(q => q.webElement)
on(target)
}
/**
* Click on the specified <code>Element</code>
*
* @param element the <code>Element</code> to click on
*/
def on(element: Element) {
element.underlying.click()
}
}
/**
* Click on the specified <code>WebElement</code>
*
* @param element the <code>WebElement</code> to click on
*/
def clickOn(element: WebElement) {
click on element
}
/**
* Click on the first <code>Element</code> selected by the specified <code>Query</code>
*
* @param query the <code>Query</code> with which to search
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def clickOn(query: Query)(implicit driver: WebDriver) {
click on query
}
/**
* Click on the first <code>Element</code> selected by the specified string ID or name
*
* @param queryString the string with which to search, first by ID then by name
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def clickOn(queryString: String)(implicit driver: WebDriver) {
click on queryString
}
/**
* Click on the specified <code>Element</code>
*
* @param element the <code>Element</code> to click on
*/
def clickOn(element: Element) {
click on element
}
/**
* Submit the form where current active element belongs to, and throws TestFailedException if current active element is not
* in a form or underlying WebDriver encounters problem when submitting the form. If this causes the current page to change,
* this call will block until the new page is loaded.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
* @throws TestFailedException if current active element is not in a form or underlying WebDriver encounters problem when submitting the form.
*/
def submit()(implicit driver: WebDriver) {
try {
(switch to activeElement).underlying.submit()
}
catch {
case e: org.openqa.selenium.NoSuchElementException =>
throw new TestFailedException(
sde => Some("Current element is not a form element."),
Some(e),
getStackDepthFun("WebBrowser.scala", "name", 1)
)
case e: Throwable =>
// Could happens as bug in different WebDriver, like NullPointerException in HtmlUnitDriver when element is not a form element.
// Anyway, we'll just wrap them as TestFailedException
throw new TestFailedException(
sde => Some("WebDriver encountered problem to submit(): " + e.getMessage),
Some(e),
getStackDepthFun("WebBrowser.scala", "submit", 0)
)
}
}
/**
* Sets the amount of time the driver should wait when searching for an element that is not immediately present.
*
* <p>
* When searching for requested elements, Selenium will poll the page until the requested element (or at least one of multiple requested
* elements) is found or this "implicit wait" timeout has expired.
* If the timeout expires, Selenium will throw <code>NoSuchElementException</code>, which ScalaTest's Selenium DSL will wrap in a <code>TestFailedException</code>.
* </p>
*
* <p>
* You can alternatively set this timeout to zero and use ScalaTest's <code>eventually</code> construct.
* </p>
*
* <p>
* This method invokes <code>manage.timeouts.implicitlyWait</code> on the passed <code>WebDriver</code>. See the documentation of Selenium's
* <code>WebDriver#Timeouts</code> interface for more information.
* </p>
*
* @param timeout the time span to implicitly wait
* @param driver the <code>WebDriver</code> on which to set the implicit wait
*/
def implicitlyWait(timeout: Span)(implicit driver: WebDriver) {
driver.manage.timeouts.implicitlyWait(timeout.totalNanos, TimeUnit.NANOSECONDS)
}
@deprecated("The wait method will be removed in the next 2.0 milestone release. Please use eventually instead.")
def wait[T](timeout: Span, interval: Span = Span(500L, Milliseconds))(f: => T)(implicit driver: WebDriver): T =
new WebDriverWait(driver, timeout.totalNanos / 1000000000L, interval.totalNanos / 1000000)
.until(new ExpectedCondition[T]() {
override def apply(driver: WebDriver) = {
f
}
})
/**
* Close all windows, and exit the driver.
*
* @param driver the <code>WebDriver</code> on which to quit.
*/
def quit()(implicit driver: WebDriver) {
driver.quit()
}
/**
* Get an opaque handle to current active window that uniquely identifies it within the implicit driver instance.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def windowHandle(implicit driver: WebDriver): String = driver.getWindowHandle
/**
* Get a set of window handles which can be used to iterate over all open windows
*
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def windowHandles(implicit driver: WebDriver): Set[String] = driver.getWindowHandles.asScala.toSet
/**
* This object is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This object enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* switch to alert
* ^
* </pre>
*/
object switch {
/**
* Switch to the specified <code>SwitchTarget</code>
*
* @param target the <code>SwitchTarget</code> to switch to
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return instance of specified <code>SwitchTarget</code>'s type parameter
*/
def to[T](target: SwitchTarget[T])(implicit driver: WebDriver): T = {
target.switch(driver)
}
}
/**
* This value supports switching to the currently active element in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to activeElement
* ^
* </pre>
*/
val activeElement = new ActiveElementTarget()
/**
* This value supports switching to the alert box in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to alert
* ^
* </pre>
*/
val alert = new AlertTarget()
/**
* This value supports switching to the default content in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to defaultContent
* ^
* </pre>
*/
val defaultContent = new DefaultContentTarget()
/**
* This method supports switching to a frame by index in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to frame(0)
* ^
* </pre>
*
* @param index the index of frame to switch to
* @return a FrameIndexTarget instance
*/
def frame(index: Int) = new FrameIndexTarget(index)
/**
* This method supports switching to a frame by name or ID in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to frame("name")
* ^
* </pre>
*
* @param nameOrId name or ID of the frame to switch to
* @return a FrameNameOrIdTarget instance
*/
def frame(nameOrId: String) = new FrameNameOrIdTarget(nameOrId)
/**
* This method supports switching to a frame by web element in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* @param element <code>WebElement</code> which is contained in the frame to switch to
* @return a FrameWebElementTarget instance
*/
def frame(element: WebElement) = new FrameWebElementTarget(element)
/**
* This method supports switching to a frame by element in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* @param element <code>Element</code> which is contained in the frame to switch to
* @return a FrameElementTarget instance
*/
def frame(element: Element) = new FrameElementTarget(element)
/**
* This method supports switching to a frame by <code>Query</code> in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* @param query <code>Query</code> used to select <code>WebElement</code> which is contained in the frame to switch to
* @return a FrameWebElementTarget instance
*/
def frame(query: Query)(implicit driver: WebDriver) = new FrameWebElementTarget(query.webElement)
/**
* This class supports switching to a window by name or handle in ScalaTest's Selenium DSL.
* Please see the documentation for <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This class is enables the following syntax:
* </p>
*
* <pre>
* switch to window(windowHandle)
* ^
* </pre>
*
* @param nameOrHandle name or window handle of the window to switch to
* @return a WindowTarget instance
*/
def window(nameOrHandle: String) = new WindowTarget(nameOrHandle)
/**
* Switch to the specified <code>SwitchTarget</code>
*
* @param target the <code>SwitchTarget</code> to switch to
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return instance of specified <code>SwitchTarget</code>'s type parameter
*/
def switchTo[T](target: SwitchTarget[T])(implicit driver: WebDriver): T = switch to target
/**
* Go back to previous page.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def goBack()(implicit driver: WebDriver) {
driver.navigate.back()
}
/**
* Go forward to next page.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def goForward()(implicit driver: WebDriver) {
driver.navigate.forward()
}
/**
* Reload the current page.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def reloadPage()(implicit driver: WebDriver) {
driver.navigate.refresh()
}
/**
* This object is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This object enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* add cookie("aName", "aValue")
* ^
* </pre>
*/
object add {
private def addCookie(cookie: Cookie)(implicit driver: WebDriver) {
driver.manage.addCookie(cookie)
}
// Default values determined from http://code.google.com/p/selenium/source/browse/trunk/java/client/src/org/openqa/selenium/Cookie.java
/**
* Add cookie in the web browser. If the cookie's domain name is left blank (default), it is assumed that the cookie is meant for the domain of the current document.
*
* @param name cookie's name
* @param value cookie's value
* @param path cookie's path
* @param expiry cookie's expiry data
* @param domain cookie's domain name
* @param secure whether this cookie is secured.
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def cookie(name: String, value: String, path: String = "/", expiry: Date = null, domain: String = null, secure: Boolean = false)(implicit driver: WebDriver) {
addCookie(new Cookie(name, value, domain, path, expiry, secure))
}
}
/**
* Get a saved cookie from web browser, throws TestFailedException if the cookie does not exist.
*
* @param name cookie's name
* @return a WrappedCookie instance
*/
def cookie(name: String)(implicit driver: WebDriver): WrappedCookie = {
getCookie(name)
}
private def getCookie(name: String)(implicit driver: WebDriver): WrappedCookie = {
driver.manage.getCookies.asScala.toList.find(_.getName == name) match {
case Some(cookie) =>
new WrappedCookie(cookie)
case None =>
throw new TestFailedException(
sde => Some("Cookie '" + name + "' not found."),
None,
getStackDepthFun("WebBrowser.scala", "getCookie", 1)
)
}
}
/**
* This object is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This object enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* delete cookie "aName"
* ^
*
* delete all cookies
* ^
* </pre>
*/
object delete {
private def deleteCookie(name: String)(implicit driver: WebDriver) {
val cookie = getCookie(name)
if (cookie == null)
throw new TestFailedException(
sde => Some("Cookie '" + name + "' not found."),
None,
getStackDepthFun("WebBrowser.scala", "deleteCookie", 1)
)
driver.manage.deleteCookie(cookie.underlying)
}
/**
* Delete cookie with the specified name from web browser, throws TestFailedException if the specified cookie does not exists.
*
* @param name cookie's name
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def cookie(name: String)(implicit driver: WebDriver) {
deleteCookie(name)
}
/**
* Delete all cookies in the current domain from web browser.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def all(cookies: CookiesNoun)(implicit driver: WebDriver) {
driver.manage.deleteAllCookies()
}
}
/**
* Add cookie in the web browser. If the cookie's domain name is left blank (default), it is assumed that the cookie is meant for the domain of the current document.
*
* @param name cookie's name
* @param value cookie's value
* @param path cookie's path
* @param expiry cookie's expiry data
* @param domain cookie's domain name
* @param secure whether this cookie is secured.
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def addCookie(name: String, value: String, path: String = "/", expiry: Date = null, domain: String = null, secure: Boolean = false)(implicit driver: WebDriver) {
add cookie (name, value, path, expiry, domain, secure)
}
/**
* Delete cookie with the specified name from web browser, throws TestFailedException if the specified cookie does not exists.
*
* @param name cookie's name
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def deleteCookie(name: String)(implicit driver: WebDriver) {
delete cookie name
}
/**
* Delete all cookies in the current domain from web browser.
*
* @param driver the <code>WebDriver</code> with which to drive the browser
*/
def deleteAllCookies()(implicit driver: WebDriver) {
delete all cookies
}
/**
* Check if screenshot is supported
*
* @param driver the <code>WebDriver</code> with which to drive the browser
* @return true if screenshot is supported, false otherwise
*/
def isScreenshotSupported(implicit driver: WebDriver): Boolean = driver.isInstanceOf[TakesScreenshot]
/**
* This object is part of ScalaTest's Selenium DSL. Please see the documentation for
* <a href="WebBrowser.html"><code>WebBrowser</code></a> for an overview of the Selenium DSL.
*
* <p>
* This object enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* capture
* ^
*
* capture to "MyScreenshot.png"
* ^
* </pre>
*/
object capture {
/**
* Capture screenshot and save it as the specified name (if file name does not end with .png, it will be extended automatically) in capture directory,
* which by default is system property's java.io.tmpdir. You can change capture directory by calling <code>setCaptureDir</code>
*
* @param fileName screenshot file name, if does not end with .png, it will be extended automatically
*/
def to(fileName: String)(implicit driver: WebDriver) {
driver match {
case takesScreenshot: TakesScreenshot =>
val tmpFile = takesScreenshot.getScreenshotAs(OutputType.FILE)
val outFile = new File(targetDir, if (fileName.toLowerCase.endsWith(".png")) fileName else fileName + ".png")
new FileOutputStream(outFile) getChannel() transferFrom(
new FileInputStream(tmpFile) getChannel, 0, Long.MaxValue )
case _ =>
throw new UnsupportedOperationException("Screen capture is not support by " + driver.getClass.getName)
}
}
/**
* Capture screenshot and save it in capture directory, which by default is system property's java.io.tmpdir.
* You can change capture directory by calling <code>setCaptureDir</code>
*/
def apply()(implicit driver: WebDriver): File = {
driver match {
case takesScreenshot: TakesScreenshot =>
val tmpFile = takesScreenshot.getScreenshotAs(OutputType.FILE)
val fileName = tmpFile.getName
val outFile = new File(targetDir, if (fileName.toLowerCase.endsWith(".png")) fileName else fileName + ".png")
new FileOutputStream(outFile) getChannel() transferFrom(
new FileInputStream(tmpFile) getChannel, 0, Long.MaxValue )
outFile
case _ =>
throw new UnsupportedOperationException("Screen capture is not support by " + driver.getClass.getName)
}
}
}
/**
* Capture screenshot and save it as the specified name (if file name does not end with .png, it will be extended automatically) in capture directory,
* which by default is system property's java.io.tmpdir. You can change capture directory by calling <code>setCaptureDir</code>
*
* @param fileName screenshot file name, if does not end with .png, it will be extended automatically
*/
def captureTo(fileName: String)(implicit driver: WebDriver) {
capture to fileName
}
// Can get by with volatile, because the setting doesn't depend on the getting
@volatile private var targetDir = new File(System.getProperty("java.io.tmpdir"))
/**
* Set capture directory.
*
* @param targetDirPath the path of capture directory
*/
def setCaptureDir(targetDirPath: String) {
targetDir =
if (targetDirPath.endsWith(File.separator))
new File(targetDirPath)
else
new File(targetDirPath + File.separator)
if (!targetDir.exists)
targetDir.mkdirs()
}
/**
* Execute the given function, if <code>ModifiableMessage</code> exception is thrown from the given function,
* a screenshot will be captured automatically into capture directory, which by default is system property's java.io.tmpdir.
* You can change capture directory by calling <code>setCaptureDir</code>
*
* @param fun function to execute
*/
def withScreenshot(fun: => Unit)(implicit driver: WebDriver) {
try {
fun
}
catch {
case e: org.scalatest.exceptions.ModifiableMessage[_] =>
throw e.modifyMessage{ (currentMessage: Option[String]) =>
val captureFile: File = capture.apply()
currentMessage match {
case Some(currentMsg) =>
Some(currentMsg + "; screenshot captured in " + captureFile.getAbsolutePath)
case None =>
Some("screenshot captured in " + captureFile.getAbsolutePath)
}
}
}
}
/**
* Executes JavaScript in the context of the currently selected frame or window. The script fragment provided will be executed as the body of an anonymous function.
*
* <p>
* Within the script, you can use <code>document</code> to refer to the current document. Local variables will not be available once the script has finished executing, but global variables will.
* </p>
*
* <p>
* To return a value (e.g. if the script contains a return statement), then the following steps will be taken:
* </p>
*
* <ol>
* <li>For an HTML element, this method returns a WebElement</li>
* <li>For a decimal, a Double is returned</li>
* <li>For a non-decimal number, a Long is returned</li>
* <li>For a boolean, a Boolean is returned</li>
* <li>For all other cases, a String is returned</li>
* <li>For an array, return a List<Object> with each object following the rules above. We support nested lists</li>
* <li>Unless the value is null or there is no return value, in which null is returned</li>
* </ol>
*
* @param script the JavaScript to execute
* @param args the arguments to the script, may be empty
* @return One of Boolean, Long, String, List or WebElement. Or null
*/
def executeScript[T](script: String, args: AnyRef*)(implicit driver: WebDriver): AnyRef =
driver match {
case executor: JavascriptExecutor => executor.executeScript(script, args.toArray : _*)
case _ => throw new UnsupportedOperationException("Web driver " + driver.getClass.getName + " does not support javascript execution.")
}
/**
* Executes an asynchronous piece of JavaScript in the context of the currently selected frame or window. Unlike executing synchronous JavaScript,
* scripts executed with this method must explicitly signal they are finished by invoking the provided callback. This callback is always injected into
* the executed function as the last argument.
*
* <p>
* The first argument passed to the callback function will be used as the script's result. This value will be handled as follows:
* </p>
*
* <ol>
* <li>For an HTML element, this method returns a WebElement</li>
* <li>For a number, a Long is returned</li>
* <li>For a boolean, a Boolean is returned</li>
* <li>For all other cases, a String is returned</li>
* <li>For an array, return a List<Object> with each object following the rules above. We support nested lists</li>
* <li>Unless the value is null or there is no return value, in which null is returned</li>
* </ol>
*
* <p>
* Script arguments must be a number, a boolean, a String, WebElement, or a List of any combination of the above. An exception will
* be thrown if the arguments do not meet these criteria. The arguments will be made available to the JavaScript via the "arguments" variable.
* </p>
*
* @param script the JavaScript to execute
* @param args the arguments to the script, may be empty
* @return One of Boolean, Long, String, List, WebElement, or null
*/
def executeAsyncScript(script: String, args: AnyRef*)(implicit driver: WebDriver): AnyRef =
driver match {
case executor: JavascriptExecutor => executor.executeAsyncScript(script, args.toArray : _*)
case _ => throw new UnsupportedOperationException("Web driver " + driver.getClass.getName + " does not support javascript execution.")
}
/**
* Sets the amount of time to wait for an asynchronous script to finish execution before throwing an exception.
*
* @param timeout the amount of time to wait for an asynchronous script to finish execution before throwing exception
*/
def setScriptTimeout(timeout: Span)(implicit driver: WebDriver) {
driver.manage().timeouts().setScriptTimeout(timeout.totalNanos, TimeUnit.NANOSECONDS);
}
private def getStackDepthFun(fileName: String, methodName: String, adjustment: Int = 0): (StackDepthException => Int) = { sde =>
getStackDepth(sde.getStackTrace, fileName, methodName, adjustment)
}
private def getStackDepth(stackTrace: Array[StackTraceElement], fileName: String, methodName: String, adjustment: Int = 0) = {
val stackTraceList = stackTrace.toList
val fileNameIsDesiredList: List[Boolean] =
for (element <- stackTraceList) yield
element.getFileName == fileName // such as "Checkers.scala"
val methodNameIsDesiredList: List[Boolean] =
for (element <- stackTraceList) yield
element.getMethodName == methodName // such as "check"
// For element 0, the previous file name was not desired, because there is no previous
// one, so you start with false. For element 1, it depends on whether element 0 of the stack trace
// had the desired file name, and so forth.
val previousFileNameIsDesiredList: List[Boolean] = false :: (fileNameIsDesiredList.dropRight(1))
// Zip these two related lists together. They now have two boolean values together, when both
// are true, that's a stack trace element that should be included in the stack depth.
val zipped1 = methodNameIsDesiredList zip previousFileNameIsDesiredList
val methodNameAndPreviousFileNameAreDesiredList: List[Boolean] =
for ((methodNameIsDesired, previousFileNameIsDesired) <- zipped1) yield
methodNameIsDesired && previousFileNameIsDesired
// Zip the two lists together, that when one or the other is true is an include.
val zipped2 = fileNameIsDesiredList zip methodNameAndPreviousFileNameAreDesiredList
val includeInStackDepthList: List[Boolean] =
for ((fileNameIsDesired, methodNameAndPreviousFileNameAreDesired) <- zipped2) yield
fileNameIsDesired || methodNameAndPreviousFileNameAreDesired
val includeDepth = includeInStackDepthList.takeWhile(include => include).length
val depth = if (includeDepth == 0 && stackTrace(0).getFileName != fileName && stackTrace(0).getMethodName != methodName)
stackTraceList.takeWhile(st => st.getFileName != fileName || st.getMethodName != methodName).length
else
includeDepth
depth + adjustment
}
// Clears the text field or area, then presses the passed keys
/**
* Clears the current active <code>TextField</code> or <code>TextArea</code>, and presses the passed keys.
* Throws <code>TestFailedException</code> if current active is not <code>TextField</code> or <code>TextArea</code>.
*
* @param value keys to press in current active <code>TextField</code> or <code>TextArea</code>
*/
def enter(value: String)(implicit driver: WebDriver) {
val ae = switch to activeElement
ae match {
case tf: TextField => tf.value = value
case ta: TextArea => ta.value = value
case _ =>
throw new TestFailedException(
sde => Some("Currently selected element is neither a text field nor a text area"),
None,
getStackDepthFun("WebBrowser.scala", "switch", 1)
)
}
}
/**
* Press the passed keys to current active element.
*
* @param value keys to press in current active element
*/
def pressKeys(value: String)(implicit driver: WebDriver) {
val ae: WebElement = driver.switchTo.activeElement
ae.sendKeys(value)
}
}
/**
* Companion object that facilitates the importing of <code>WebBrowser</code> members as
* an alternative to mixing it in. One use case is to import <code>WebBrowser</code> members so you can use
* them in the Scala interpreter.
*/
object WebBrowser extends WebBrowser
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for HTMLUnit (an <code>org.openqa.selenium.htmlunit.HtmlUnitDriver</code>), with JavaScript
* enabled by default.
*
* <p>
* Note: You can disable JavaScript with:
* </p>
*
* <pre>
* webDriver.setJavascriptEnabled(false)
* </pre>
*/
trait HtmlUnit extends WebBrowser with ScreenshotCapturer {
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for HTMLUnit (an <code>org.openqa.selenium.htmlunit.HtmlUnitDriver</code>), with JavaScript
* enabled by default.
*
* <p>
* Note: You can disable JavaScript with:
* </p>
*
* <pre>
* webDriver.setJavascriptEnabled(false)
* </pre>
*/
implicit val webDriver = new HtmlUnitDriver()
webDriver.setJavascriptEnabled(true)
/**
* Captures a screenshot and saves it as a file in the specified directory.
*/
def captureScreenshot(directory: String) {
capture to directory
}
}
/**
* Companion object that facilitates the importing of <code>HtmlUnit</code> members as
* an alternative to mixing it in. One use case is to import <code>HtmlUnit</code> members so you can use
* them in the Scala interpreter.
*/
object HtmlUnit extends HtmlUnit
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for Firefox (an <code>org.openqa.selenium.firefox.FirefoxDriver</code>).
*
* <p>
* The <code>FirefoxDriver</code> uses the <code>FirefoxProfile</code> defined as <code>firefoxProfile</code>. By default this is just a <code>new FirefoxProfile</code>.
* You can mutate this object to modify the profile, or override <code>firefoxProfile</code>.
* </p>
*/
trait Firefox extends WebBrowser with ScreenshotCapturer {
/**
* The <code>FirefoxProfile</code> passed to the constructor of the <code>FirefoxDriver</code> returned by <code>webDriver</code>.
*
* <p>
* The <code>FirefoxDriver</code> uses the <code>FirefoxProfile</code> defined as <code>firefoxProfile</code>. By default this is just a <code>new FirefoxProfile</code>.
* You can mutate this object to modify the profile, or override <code>firefoxProfile</code>.
* </p>
*/
val firefoxProfile = new FirefoxProfile()
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for Firefox (an <code>org.openqa.selenium.firefox.FirefoxDriver</code>), with a default
* Firefox profile.
*
* <p>
* The <code>FirefoxDriver</code> uses the <code>FirefoxProfile</code> defined as <code>firefoxProfile</code>. By default this is just a <code>new FirefoxProfile</code>.
* You can mutate this object to modify the profile, or override <code>firefoxProfile</code>.
* </p>
*/
implicit val webDriver = new FirefoxDriver(firefoxProfile)
/**
* Captures a screenshot and saves it as a file in the specified directory.
*/
def captureScreenshot(directory: String) {
capture to directory
}
}
/**
* Companion object that facilitates the importing of <code>Firefox</code> members as
* an alternative to mixing it in. One use case is to import <code>Firefox</code> members so you can use
* them in the Scala interpreter.
*/
object Firefox extends Firefox
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for Safari (an <code>org.openqa.selenium.safari.SafariDriver</code>).
*/
trait Safari extends WebBrowser with ScreenshotCapturer {
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for Safari (an <code>org.openqa.selenium.safari.SafariDriver</code>).
*/
implicit val webDriver = new SafariDriver()
/**
* Captures a screenshot and saves it as a file in the specified directory.
*/
def captureScreenshot(directory: String) {
capture to directory
}
}
/**
* Companion object that facilitates the importing of <code>Safari</code> members as
* an alternative to mixing it in. One use case is to import <code>Safari</code> members so you can use
* them in the Scala interpreter.
*/
object Safari extends Safari
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for Chrome (an <code>org.openqa.selenium.chrome.ChromeDriver</code>).
*/
trait Chrome extends WebBrowser with ScreenshotCapturer {
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for Chrome (an <code>org.openqa.selenium.chrome.ChromeDriver</code>).
*/
implicit val webDriver = new ChromeDriver()
/**
* Captures a screenshot and saves it as a file in the specified directory.
*/
def captureScreenshot(directory: String) {
capture to directory
}
}
/**
* Companion object that facilitates the importing of <code>Chrome</code> members as
* an alternative to mixing it in. One use case is to import <code>Chrome</code> members so you can use
* them in the Scala interpreter.
*/
object Chrome extends Chrome
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for Internet Explorer (an <code>org.openqa.selenium.ie.InternetExplorerDriver</code>).
*/
trait InternetExplorer extends WebBrowser with ScreenshotCapturer {
/**
* <code>WebBrowser</code> subtrait that defines an implicit <code>WebDriver</code> for Internet Explorer (an <code>org.openqa.selenium.ie.InternetExplorerDriver</code>).
*/
implicit val webDriver = new InternetExplorerDriver()
/**
* Captures a screenshot and saves it as a file in the specified directory.
*/
def captureScreenshot(directory: String) {
capture to directory
}
}
/**
* Companion object that facilitates the importing of <code>InternetExplorer</code> members as
* an alternative to mixing it in. One use case is to import <code>InternetExplorer</code> members so you can use
* them in the Scala interpreter.
*/
object InternetExplorer extends InternetExplorer
/*
* <p>
* If you mix in <a href="../ScreenshotOnFailure.html"><code>ScreenshotOnFailure</code></a>, ScalaTest will capture a screenshot and store it to either the system temp directory
* or a directory you choose, and send the filename to the report, associated with the failed test. The <code>ScreenshotOnFailure</code> trait requires that it be
* mixed into a <a href="../ScreenshotCapturer.html"><code>ScreenshotCapturer</code></a>, which trait <code>WebBrowser</code> does not extend. To satisfy this
* requirement, you can extend one of <code>WebBrowser</code>'s subtraits, such as:
* </p>
*
* <pre class="stHighlight">
* class WebAppSpec extends Firefox with ScreenshotOnFailure {
* // ...
* }
* </pre>
*
*/
|
vivosys/scalatest
|
src/main/scala/org/scalatest/selenium/WebBrowser.scala
|
Scala
|
apache-2.0
| 133,935 |
/*
* Copyright (c) <2013>, Amanj Sherwany <http://www.amanj.me>
* All rights reserved.
* */
package ch.usi.inf.l3
import scala.reflect.api.Trees
import scala.reflect.api.Types
import scala.tools.nsc.Global
package object mina {
/**
* Two identity functions, to tell the plugin to deal with the passed
* expressions as a CT or RT value.
*/
def CT[T](expr: => T) = expr
def RT[T](expr: => T) = expr
}
|
amanjpro/mina
|
src/main/scala/ch/usi/inf/l3/mina/package.scala
|
Scala
|
bsd-3-clause
| 420 |
package example
import scala.annotation._
object Recitation2 {
def flip(f: (Int, Double) => Int): (Double, Int) => Int = (x, y) => f(y, x)
val id: Int => Int = x => x
def compose(f: Int => Int, g: Int => Int): Int => Int = x => f(g(x))
def repeated(f: Int => Int, n: Int): Int => Int = {
if (n == 0) x => x
else x => f(repeated(f, n - 1)(x))
}
}
|
rusucosmin/courses
|
fp/recitation/src/main/scala/example/recitation2.scala
|
Scala
|
mit
| 365 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.benchmark
import java.io.File
import scala.util.Random
import org.apache.spark.SparkConf
import org.apache.spark.benchmark.Benchmark
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions.{monotonically_increasing_id, timestamp_seconds}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType
import org.apache.spark.sql.types.{ByteType, Decimal, DecimalType, TimestampType}
/**
* Benchmark to measure read performance with Filter pushdown.
* To run this benchmark:
* {{{
* 1. without sbt: bin/spark-submit --class <this class> <spark sql test jar>
* 2. build/sbt "sql/test:runMain <this class>"
* 3. generate result: SPARK_GENERATE_BENCHMARK_FILES=1 build/sbt "sql/test:runMain <this class>"
* Results will be written to "benchmarks/FilterPushdownBenchmark-results.txt".
* }}}
*/
object FilterPushdownBenchmark extends SqlBasedBenchmark {
override def getSparkSession: SparkSession = {
val conf = new SparkConf()
.setAppName(this.getClass.getSimpleName)
// Since `spark.master` always exists, overrides this value
.set("spark.master", "local[1]")
.setIfMissing("spark.driver.memory", "3g")
.setIfMissing("spark.executor.memory", "3g")
.setIfMissing("orc.compression", "snappy")
.setIfMissing("spark.sql.parquet.compression.codec", "snappy")
SparkSession.builder().config(conf).getOrCreate()
}
private val numRows = 1024 * 1024 * 15
private val width = 5
private val mid = numRows / 2
// For Parquet/ORC, we will use the same value for block size and compression size
private val blockSize = org.apache.parquet.hadoop.ParquetWriter.DEFAULT_PAGE_SIZE
def withTempTable(tableNames: String*)(f: => Unit): Unit = {
try f finally tableNames.foreach(spark.catalog.dropTempView)
}
private def prepareTable(
dir: File, numRows: Int, width: Int, useStringForValue: Boolean): Unit = {
import spark.implicits._
val selectExpr = (1 to width).map(i => s"CAST(value AS STRING) c$i")
val valueCol = if (useStringForValue) {
monotonically_increasing_id().cast("string")
} else {
monotonically_increasing_id()
}
val df = spark.range(numRows).map(_ => Random.nextLong).selectExpr(selectExpr: _*)
.withColumn("value", valueCol)
.sort("value")
saveAsTable(df, dir)
}
private def prepareStringDictTable(
dir: File, numRows: Int, numDistinctValues: Int, width: Int): Unit = {
val selectExpr = (0 to width).map {
case 0 => s"CAST(id % $numDistinctValues AS STRING) AS value"
case i => s"CAST(rand() AS STRING) c$i"
}
val df = spark.range(numRows).selectExpr(selectExpr: _*).sort("value")
saveAsTable(df, dir, true)
}
private def saveAsTable(df: DataFrame, dir: File, useDictionary: Boolean = false): Unit = {
val orcPath = dir.getCanonicalPath + "/orc"
val parquetPath = dir.getCanonicalPath + "/parquet"
df.write.mode("overwrite")
.option("orc.dictionary.key.threshold", if (useDictionary) 1.0 else 0.8)
.option("orc.compress.size", blockSize)
.option("orc.stripe.size", blockSize).orc(orcPath)
spark.read.orc(orcPath).createOrReplaceTempView("orcTable")
df.write.mode("overwrite")
.option("parquet.block.size", blockSize).parquet(parquetPath)
spark.read.parquet(parquetPath).createOrReplaceTempView("parquetTable")
}
def filterPushDownBenchmark(
values: Int,
title: String,
whereExpr: String,
selectExpr: String = "*"): Unit = {
val benchmark = new Benchmark(title, values, minNumIters = 5, output = output)
Seq(false, true).foreach { pushDownEnabled =>
val name = s"Parquet Vectorized ${if (pushDownEnabled) s"(Pushdown)" else ""}"
benchmark.addCase(name) { _ =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> s"$pushDownEnabled") {
spark.sql(s"SELECT $selectExpr FROM parquetTable WHERE $whereExpr").noop()
}
}
}
Seq(false, true).foreach { pushDownEnabled =>
val name = s"Native ORC Vectorized ${if (pushDownEnabled) s"(Pushdown)" else ""}"
benchmark.addCase(name) { _ =>
withSQLConf(SQLConf.ORC_FILTER_PUSHDOWN_ENABLED.key -> s"$pushDownEnabled") {
spark.sql(s"SELECT $selectExpr FROM orcTable WHERE $whereExpr").noop()
}
}
}
benchmark.run()
}
private def runIntBenchmark(numRows: Int, width: Int, mid: Int): Unit = {
Seq("value IS NULL", s"$mid < value AND value < $mid").foreach { whereExpr =>
val title = s"Select 0 int row ($whereExpr)".replace("value AND value", "value")
filterPushDownBenchmark(numRows, title, whereExpr)
}
Seq(
s"value = $mid",
s"value <=> $mid",
s"$mid <= value AND value <= $mid",
s"${mid - 1} < value AND value < ${mid + 1}"
).foreach { whereExpr =>
val title = s"Select 1 int row ($whereExpr)".replace("value AND value", "value")
filterPushDownBenchmark(numRows, title, whereExpr)
}
val selectExpr = (1 to width).map(i => s"MAX(c$i)").mkString("", ",", ", MAX(value)")
Seq(10, 50, 90).foreach { percent =>
filterPushDownBenchmark(
numRows,
s"Select $percent% int rows (value < ${numRows * percent / 100})",
s"value < ${numRows * percent / 100}",
selectExpr
)
}
Seq("value IS NOT NULL", "value > -1", "value != -1").foreach { whereExpr =>
filterPushDownBenchmark(
numRows,
s"Select all int rows ($whereExpr)",
whereExpr,
selectExpr)
}
}
private def runStringBenchmark(
numRows: Int, width: Int, searchValue: Int, colType: String): Unit = {
Seq("value IS NULL", s"'$searchValue' < value AND value < '$searchValue'")
.foreach { whereExpr =>
val title = s"Select 0 $colType row ($whereExpr)".replace("value AND value", "value")
filterPushDownBenchmark(numRows, title, whereExpr)
}
Seq(
s"value = '$searchValue'",
s"value <=> '$searchValue'",
s"'$searchValue' <= value AND value <= '$searchValue'"
).foreach { whereExpr =>
val title = s"Select 1 $colType row ($whereExpr)".replace("value AND value", "value")
filterPushDownBenchmark(numRows, title, whereExpr)
}
val selectExpr = (1 to width).map(i => s"MAX(c$i)").mkString("", ",", ", MAX(value)")
Seq("value IS NOT NULL").foreach { whereExpr =>
filterPushDownBenchmark(
numRows,
s"Select all $colType rows ($whereExpr)",
whereExpr,
selectExpr)
}
}
override def runBenchmarkSuite(mainArgs: Array[String]): Unit = {
runBenchmark("Pushdown for many distinct value case") {
withTempPath { dir =>
withTempTable("orcTable", "parquetTable") {
Seq(true, false).foreach { useStringForValue =>
prepareTable(dir, numRows, width, useStringForValue)
if (useStringForValue) {
runStringBenchmark(numRows, width, mid, "string")
} else {
runIntBenchmark(numRows, width, mid)
}
}
}
}
}
runBenchmark("Pushdown for few distinct value case (use dictionary encoding)") {
withTempPath { dir =>
val numDistinctValues = 200
withTempTable("orcTable", "parquetTable") {
prepareStringDictTable(dir, numRows, numDistinctValues, width)
runStringBenchmark(numRows, width, numDistinctValues / 2, "distinct string")
}
}
}
runBenchmark("Pushdown benchmark for StringStartsWith") {
withTempPath { dir =>
withTempTable("orcTable", "parquetTable") {
prepareTable(dir, numRows, width, true)
Seq(
"value like '10%'",
"value like '1000%'",
s"value like '${mid.toString.substring(0, mid.toString.length - 1)}%'"
).foreach { whereExpr =>
val title = s"StringStartsWith filter: ($whereExpr)"
filterPushDownBenchmark(numRows, title, whereExpr)
}
}
}
}
runBenchmark(s"Pushdown benchmark for ${DecimalType.simpleString}") {
withTempPath { dir =>
Seq(
s"decimal(${Decimal.MAX_INT_DIGITS}, 2)",
s"decimal(${Decimal.MAX_LONG_DIGITS}, 2)",
s"decimal(${DecimalType.MAX_PRECISION}, 2)"
).foreach { dt =>
val columns = (1 to width).map(i => s"CAST(id AS string) c$i")
val valueCol = if (dt.equalsIgnoreCase(s"decimal(${Decimal.MAX_INT_DIGITS}, 2)")) {
monotonically_increasing_id() % 9999999
} else {
monotonically_increasing_id()
}
val df = spark.range(numRows)
.selectExpr(columns: _*).withColumn("value", valueCol.cast(dt))
withTempTable("orcTable", "parquetTable") {
saveAsTable(df, dir)
Seq(s"value = $mid").foreach { whereExpr =>
val title = s"Select 1 $dt row ($whereExpr)".replace("value AND value", "value")
filterPushDownBenchmark(numRows, title, whereExpr)
}
val selectExpr = (1 to width).map(i => s"MAX(c$i)").mkString("", ",", ", MAX(value)")
Seq(10, 50, 90).foreach { percent =>
filterPushDownBenchmark(
numRows,
s"Select $percent% $dt rows (value < ${numRows * percent / 100})",
s"value < ${numRows * percent / 100}",
selectExpr
)
}
}
}
}
}
runBenchmark("Pushdown benchmark for InSet -> InFilters") {
withTempPath { dir =>
withTempTable("orcTable", "parquetTable") {
prepareTable(dir, numRows, width, false)
Seq(5, 10, 50, 100).foreach { count =>
Seq(10, 50, 90).foreach { distribution =>
val filter =
Range(0, count).map(r => scala.util.Random.nextInt(numRows * distribution / 100))
val whereExpr = s"value in(${filter.mkString(",")})"
val title = s"InSet -> InFilters (values count: $count, distribution: $distribution)"
filterPushDownBenchmark(numRows, title, whereExpr)
}
}
}
}
}
runBenchmark(s"Pushdown benchmark for ${ByteType.simpleString}") {
withTempPath { dir =>
val columns = (1 to width).map(i => s"CAST(id AS string) c$i")
val df = spark.range(numRows).selectExpr(columns: _*)
.withColumn("value", (monotonically_increasing_id() % Byte.MaxValue).cast(ByteType))
.orderBy("value")
withTempTable("orcTable", "parquetTable") {
saveAsTable(df, dir)
Seq(s"value = CAST(${Byte.MaxValue / 2} AS ${ByteType.simpleString})")
.foreach { whereExpr =>
val title = s"Select 1 ${ByteType.simpleString} row ($whereExpr)"
.replace("value AND value", "value")
filterPushDownBenchmark(numRows, title, whereExpr)
}
val selectExpr = (1 to width).map(i => s"MAX(c$i)").mkString("", ",", ", MAX(value)")
Seq(10, 50, 90).foreach { percent =>
filterPushDownBenchmark(
numRows,
s"Select $percent% ${ByteType.simpleString} rows " +
s"(value < CAST(${Byte.MaxValue * percent / 100} AS ${ByteType.simpleString}))",
s"value < CAST(${Byte.MaxValue * percent / 100} AS ${ByteType.simpleString})",
selectExpr
)
}
}
}
}
runBenchmark(s"Pushdown benchmark for Timestamp") {
withTempPath { dir =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_TIMESTAMP_ENABLED.key -> true.toString) {
ParquetOutputTimestampType.values.toSeq.map(_.toString).foreach { fileType =>
withSQLConf(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> fileType) {
val columns = (1 to width).map(i => s"CAST(id AS string) c$i")
val df = spark.range(numRows).selectExpr(columns: _*)
.withColumn("value", timestamp_seconds(monotonically_increasing_id()))
withTempTable("orcTable", "parquetTable") {
saveAsTable(df, dir)
Seq(s"value = timestamp_seconds($mid)").foreach { whereExpr =>
val title = s"Select 1 timestamp stored as $fileType row ($whereExpr)"
.replace("value AND value", "value")
filterPushDownBenchmark(numRows, title, whereExpr)
}
val selectExpr = (1 to width)
.map(i => s"MAX(c$i)").mkString("", ",", ", MAX(value)")
Seq(10, 50, 90).foreach { percent =>
filterPushDownBenchmark(
numRows,
s"Select $percent% timestamp stored as $fileType rows " +
s"(value < timestamp_seconds(${numRows * percent / 100}))",
s"value < timestamp_seconds(${numRows * percent / 100})",
selectExpr
)
}
}
}
}
}
}
}
runBenchmark(s"Pushdown benchmark with many filters") {
val numRows = 1
val width = 500
withTempPath { dir =>
val columns = (1 to width).map(i => s"id c$i")
val df = spark.range(1).selectExpr(columns: _*)
withTempTable("orcTable", "parquetTable") {
saveAsTable(df, dir)
Seq(1, 250, 500).foreach { numFilter =>
val whereExpr = (1 to numFilter).map(i => s"c$i = 0").mkString(" and ")
// Note: InferFiltersFromConstraints will add more filters to this given filters
filterPushDownBenchmark(numRows, s"Select 1 row with $numFilter filters", whereExpr)
}
}
}
}
}
}
|
dbtsai/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/FilterPushdownBenchmark.scala
|
Scala
|
apache-2.0
| 14,752 |
package com.whitepages.cloudmanager.action
import com.whitepages.cloudmanager.client.{SolrCloudVersion, SolrRequestHelpers}
import com.whitepages.cloudmanager.state.{SolrReplica, SolrState, ClusterManager}
import org.apache.solr.common.params.ModifiableSolrParams
import org.apache.solr.common.params.CollectionParams.CollectionAction
case class DeleteReplica(collection: String, slice: String, node: String, safetyFactor: Int = 1) extends Action {
private val deleteReplicaCleanupFix = SolrCloudVersion(4,10)
override val preConditions: List[StateCondition] = List(
StateCondition("replica exists on node", Conditions.sliceIncludesNode(collection, slice, node)),
StateCondition(s"enough replicas to keep $safetyFactor alive", (state) => {
val safetyTest = if (Conditions.activeSliceOnNode(collection, slice, node)(state)) {
// we're deleting an active node, so make sure we have active spares
(activeNodeCount: Int) => activeNodeCount > safetyFactor
}
else {
// we're deleting an inactive node, so this operation doesn't change the active node count,
// but we still don't want to risk deleting the last replica
(activeNodeCount: Int) => activeNodeCount >= 1
}
Conditions.liveReplicaCount(collection, slice).andThen(safetyTest)(state)
})
)
override def execute(clusterManager: ClusterManager): Boolean = {
val replica = clusterManager.currentState.replicasFor(collection, slice).find(_.node == node)
val replicaName = replica.map(_.replicaName)
if (replicaName.isDefined) {
if (clusterManager.clusterVersion < deleteReplicaCleanupFix)
comment.warn("WARNING: DeleteReplica does NOT remove the files from disk until 4.10. See SOLR-6072.")
val params = new ModifiableSolrParams
params.set("action", CollectionAction.DELETEREPLICA.toString)
params.set("collection", collection)
params.set("shard", slice)
params.set("replica", replicaName.get)
val submitSuccess = SolrRequestHelpers.submitRequest(clusterManager.client, params)
// If the replica we're deleting is down right now, and the node is too, the request for deletion may
// return an error trying to forward the delete to the node hosting that replica.
// In that case, we simply have to rely on the postCondition check below to validate the delete actually worked.
submitSuccess || !replica.exists(_.active)
}
else {
comment.warn("Couldn't figure out the replica name from the node")
false
}
}
override val postConditions: List[StateCondition] = List(
StateCondition("replica no longer exists", Conditions.sliceIncludesNode(collection, slice, node).andThen(!_))
)
override def toString = s"DeleteReplica: collection: $collection, slice: $slice, node: ${SolrReplica.hostName(node)}, safetyFactor: $safetyFactor"
}
|
randomstatistic/solrcloud_manager
|
src/main/scala/com/whitepages/cloudmanager/action/DeleteReplica.scala
|
Scala
|
apache-2.0
| 2,891 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.TestUtils.assertSpilled
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.internal.SQLConf.{WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD, WINDOW_EXEC_BUFFER_SPILL_THRESHOLD}
import org.apache.spark.sql.test.SharedSparkSession
case class WindowData(month: Int, area: String, product: Int)
/**
* Test suite for SQL window functions.
*/
class SQLWindowFunctionSuite extends QueryTest with SharedSparkSession {
import testImplicits._
test("window function: udaf with aggregate expression") {
val data = Seq(
WindowData(1, "a", 5),
WindowData(2, "a", 6),
WindowData(3, "b", 7),
WindowData(4, "b", 8),
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
withTempView("windowData") {
sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
"""
|select area, sum(product), sum(sum(product)) over (partition by area)
|from windowData group by month, area
""".stripMargin),
Seq(
("a", 5, 11),
("a", 6, 11),
("b", 7, 15),
("b", 8, 15),
("c", 9, 19),
("c", 10, 19)
).map(i => Row(i._1, i._2, i._3)))
checkAnswer(
sql(
"""
|select area, sum(product) - 1, sum(sum(product)) over (partition by area)
|from windowData group by month, area
""".stripMargin),
Seq(
("a", 4, 11),
("a", 5, 11),
("b", 6, 15),
("b", 7, 15),
("c", 8, 19),
("c", 9, 19)
).map(i => Row(i._1, i._2, i._3)))
checkAnswer(
sql(
"""
|select area, sum(product), sum(product) / sum(sum(product)) over (partition by area)
|from windowData group by month, area
""".stripMargin),
Seq(
("a", 5, 5d/11),
("a", 6, 6d/11),
("b", 7, 7d/15),
("b", 8, 8d/15),
("c", 10, 10d/19),
("c", 9, 9d/19)
).map(i => Row(i._1, i._2, i._3)))
checkAnswer(
sql(
"""
|select area, sum(product), sum(product) / sum(sum(product) - 1) over
|(partition by area)
|from windowData group by month, area
""".stripMargin),
Seq(
("a", 5, 5d/9),
("a", 6, 6d/9),
("b", 7, 7d/13),
("b", 8, 8d/13),
("c", 10, 10d/17),
("c", 9, 9d/17)
).map(i => Row(i._1, i._2, i._3)))
}
}
test("window function: refer column in inner select block") {
val data = Seq(
WindowData(1, "a", 5),
WindowData(2, "a", 6),
WindowData(3, "b", 7),
WindowData(4, "b", 8),
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
withTempView("windowData") {
sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
"""
|select area, rank() over (partition by area order by tmp.month) + tmp.tmp1 as c1
|from (select month, area, product, 1 as tmp1 from windowData) tmp
""".stripMargin),
Seq(
("a", 2),
("a", 3),
("b", 2),
("b", 3),
("c", 2),
("c", 3)
).map(i => Row(i._1, i._2)))
}
}
test("window function: partition and order expressions") {
val data = Seq(
WindowData(1, "a", 5),
WindowData(2, "a", 6),
WindowData(3, "b", 7),
WindowData(4, "b", 8),
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
withTempView("windowData") {
sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
"""
|select month, area, product, sum(product + 1) over (partition by 1 order by 2)
|from windowData
""".stripMargin),
Seq(
(1, "a", 5, 51),
(2, "a", 6, 51),
(3, "b", 7, 51),
(4, "b", 8, 51),
(5, "c", 9, 51),
(6, "c", 10, 51)
).map(i => Row(i._1, i._2, i._3, i._4)))
checkAnswer(
sql(
"""
|select month, area, product, sum(product)
|over (partition by month % 2 order by 10 - product)
|from windowData
""".stripMargin),
Seq(
(1, "a", 5, 21),
(2, "a", 6, 24),
(3, "b", 7, 16),
(4, "b", 8, 18),
(5, "c", 9, 9),
(6, "c", 10, 10)
).map(i => Row(i._1, i._2, i._3, i._4)))
}
}
test("window function: distinct should not be silently ignored") {
val data = Seq(
WindowData(1, "a", 5),
WindowData(2, "a", 6),
WindowData(3, "b", 7),
WindowData(4, "b", 8),
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
withTempView("windowData") {
sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
val e = intercept[AnalysisException] {
sql(
"""
|select month, area, product, sum(distinct product + 1) over (partition by 1 order by 2)
|from windowData
""".stripMargin)
}
assert(e.getMessage.contains("Distinct window functions are not supported"))
}
}
test("window function: expressions in arguments of a window functions") {
val data = Seq(
WindowData(1, "a", 5),
WindowData(2, "a", 6),
WindowData(3, "b", 7),
WindowData(4, "b", 8),
WindowData(5, "c", 9),
WindowData(6, "c", 10)
)
withTempView("windowData") {
sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
"""
|select month, area, month % 2,
|lag(product, 1 + 1, product) over (partition by month % 2 order by area)
|from windowData
""".stripMargin),
Seq(
(1, "a", 1, 5),
(2, "a", 0, 6),
(3, "b", 1, 7),
(4, "b", 0, 8),
(5, "c", 1, 5),
(6, "c", 0, 6)
).map(i => Row(i._1, i._2, i._3, i._4)))
}
}
test("window function: Sorting columns are not in Project") {
val data = Seq(
WindowData(1, "d", 10),
WindowData(2, "a", 6),
WindowData(3, "b", 7),
WindowData(4, "b", 8),
WindowData(5, "c", 9),
WindowData(6, "c", 11)
)
withTempView("windowData") {
sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql("select month, product, sum(product + 1) over() from windowData order by area"),
Seq(
(2, 6, 57),
(3, 7, 57),
(4, 8, 57),
(5, 9, 57),
(6, 11, 57),
(1, 10, 57)
).map(i => Row(i._1, i._2, i._3)))
checkAnswer(
sql(
"""
|select area, rank() over (partition by area order by tmp.month) + tmp.tmp1 as c1
|from (select month, area, product as p, 1 as tmp1 from windowData) tmp order by p
""".stripMargin),
Seq(
("a", 2),
("b", 2),
("b", 3),
("c", 2),
("d", 2),
("c", 3)
).map(i => Row(i._1, i._2)))
checkAnswer(
sql(
"""
|select area, rank() over (partition by area order by month) as c1
|from windowData group by product, area, month order by product, area
""".stripMargin),
Seq(
("a", 1),
("b", 1),
("b", 2),
("c", 1),
("d", 1),
("c", 2)
).map(i => Row(i._1, i._2)))
checkAnswer(
sql(
"""
|select area, sum(product) / sum(sum(product)) over (partition by area) as c1
|from windowData group by area, month order by month, c1
""".stripMargin),
Seq(
("d", 1.0),
("a", 1.0),
("b", 0.4666666666666667),
("b", 0.5333333333333333),
("c", 0.45),
("c", 0.55)
).map(i => Row(i._1, i._2)))
}
}
// todo: fix this test case by reimplementing the function ResolveAggregateFunctions
ignore("window function: Pushing aggregate Expressions in Sort to Aggregate") {
val data = Seq(
WindowData(1, "d", 10),
WindowData(2, "a", 6),
WindowData(3, "b", 7),
WindowData(4, "b", 8),
WindowData(5, "c", 9),
WindowData(6, "c", 11)
)
withTempView("windowData") {
sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
checkAnswer(
sql(
"""
|select area, sum(product) over () as c from windowData
|where product > 3 group by area, product
|having avg(month) > 0 order by avg(month), product
""".stripMargin),
Seq(
("a", 51),
("b", 51),
("b", 51),
("c", 51),
("c", 51),
("d", 51)
).map(i => Row(i._1, i._2)))
}
}
test("window function: multiple window expressions in a single expression") {
val nums = sparkContext.parallelize(1 to 10).map(x => (x, x % 2)).toDF("x", "y")
nums.createOrReplaceTempView("nums")
val expected =
Row(1, 1, 1, 55, 1, 57) ::
Row(0, 2, 3, 55, 2, 60) ::
Row(1, 3, 6, 55, 4, 65) ::
Row(0, 4, 10, 55, 6, 71) ::
Row(1, 5, 15, 55, 9, 79) ::
Row(0, 6, 21, 55, 12, 88) ::
Row(1, 7, 28, 55, 16, 99) ::
Row(0, 8, 36, 55, 20, 111) ::
Row(1, 9, 45, 55, 25, 125) ::
Row(0, 10, 55, 55, 30, 140) :: Nil
val actual = sql(
"""
|SELECT
| y,
| x,
| sum(x) OVER w1 AS running_sum,
| sum(x) OVER w2 AS total_sum,
| sum(x) OVER w3 AS running_sum_per_y,
| ((sum(x) OVER w1) + (sum(x) OVER w2) + (sum(x) OVER w3)) as combined2
|FROM nums
|WINDOW w1 AS (ORDER BY x ROWS BETWEEN UnBOUNDED PRECEDiNG AND CuRRENT RoW),
| w2 AS (ORDER BY x ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOuNDED FoLLOWING),
| w3 AS (PARTITION BY y ORDER BY x ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)
""".stripMargin)
checkAnswer(actual, expected)
spark.catalog.dropTempView("nums")
}
test("window function: mutiple window expressions specified by range in a single expression") {
val nums = sparkContext.parallelize(1 to 10).map(x => (x, x % 2)).toDF("x", "y")
nums.createOrReplaceTempView("nums")
withTempView("nums") {
val expected =
Row(1, 1, 1, 4, null, 8, 25) ::
Row(1, 3, 4, 9, 1, 12, 24) ::
Row(1, 5, 9, 15, 4, 16, 21) ::
Row(1, 7, 16, 21, 8, 9, 16) ::
Row(1, 9, 25, 16, 12, null, 9) ::
Row(0, 2, 2, 6, null, 10, 30) ::
Row(0, 4, 6, 12, 2, 14, 28) ::
Row(0, 6, 12, 18, 6, 18, 24) ::
Row(0, 8, 20, 24, 10, 10, 18) ::
Row(0, 10, 30, 18, 14, null, 10) ::
Nil
val actual = sql(
"""
|SELECT
| y,
| x,
| sum(x) over w1 as history_sum,
| sum(x) over w2 as period_sum1,
| sum(x) over w3 as period_sum2,
| sum(x) over w4 as period_sum3,
| sum(x) over w5 as future_sum
|FROM nums
|WINDOW
| w1 AS (PARTITION BY y ORDER BY x RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW),
| w2 AS (PARTITION BY y ORDER BY x RANGE BETWEEN 2 PRECEDING AND 2 FOLLOWING),
| w3 AS (PARTITION BY y ORDER BY x RANGE BETWEEN 4 PRECEDING AND 2 PRECEDING ),
| w4 AS (PARTITION BY y ORDER BY x RANGE BETWEEN 2 FOLLOWING AND 4 FOLLOWING),
| w5 AS (PARTITION BY y ORDER BY x RANGE BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING)
""".stripMargin
)
checkAnswer(actual, expected)
}
}
test("SPARK-7595: Window will cause resolve failed with self join") {
checkAnswer(sql(
"""
|with
| v0 as (select 0 as key, 1 as value),
| v1 as (select key, count(value) over (partition by key) cnt_val from v0),
| v2 as (select v1.key, v1_lag.cnt_val from v1 cross join v1 v1_lag
| where v1.key = v1_lag.key)
| select key, cnt_val from v2 order by key limit 1
""".stripMargin), Row(0, 1))
}
test("SPARK-16633: lead/lag should return the default value if the offset row does not exist") {
checkAnswer(sql(
"""
|SELECT
| lag(123, 100, 321) OVER (ORDER BY id) as lag,
| lead(123, 100, 321) OVER (ORDER BY id) as lead
|FROM (SELECT 1 as id) tmp
""".stripMargin),
Row(321, 321))
checkAnswer(sql(
"""
|SELECT
| lag(123, 100, a) OVER (ORDER BY id) as lag,
| lead(123, 100, a) OVER (ORDER BY id) as lead
|FROM (SELECT 1 as id, 2 as a) tmp
""".stripMargin),
Row(2, 2))
}
test("lead/lag should respect null values") {
checkAnswer(sql(
"""
|SELECT
| b,
| lag(a, 1, 321) OVER (ORDER BY b) as lag,
| lead(a, 1, 321) OVER (ORDER BY b) as lead
|FROM (SELECT cast(null as int) as a, 1 as b
| UNION ALL
| select cast(null as int) as id, 2 as b) tmp
""".stripMargin),
Row(1, 321, null) :: Row(2, null, 321) :: Nil)
checkAnswer(sql(
"""
|SELECT
| b,
| lag(a, 1, c) OVER (ORDER BY b) as lag,
| lead(a, 1, c) OVER (ORDER BY b) as lead
|FROM (SELECT cast(null as int) as a, 1 as b, 3 as c
| UNION ALL
| select cast(null as int) as id, 2 as b, 4 as c) tmp
""".stripMargin),
Row(1, 3, null) :: Row(2, null, 4) :: Nil)
}
test("test with low buffer spill threshold") {
val nums = sparkContext.parallelize(1 to 10).map(x => (x, x % 2)).toDF("x", "y")
nums.createOrReplaceTempView("nums")
val expected =
Row(1, 1, 1) ::
Row(0, 2, 3) ::
Row(1, 3, 6) ::
Row(0, 4, 10) ::
Row(1, 5, 15) ::
Row(0, 6, 21) ::
Row(1, 7, 28) ::
Row(0, 8, 36) ::
Row(1, 9, 45) ::
Row(0, 10, 55) :: Nil
val actual = sql(
"""
|SELECT y, x, sum(x) OVER w1 AS running_sum
|FROM nums
|WINDOW w1 AS (ORDER BY x ROWS BETWEEN UNBOUNDED PRECEDiNG AND CURRENT RoW)
""".stripMargin)
withSQLConf(WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD.key -> "1",
WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "2") {
assertSpilled(sparkContext, "test with low buffer spill threshold") {
checkAnswer(actual, expected)
}
}
spark.catalog.dropTempView("nums")
}
}
|
dbtsai/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/SQLWindowFunctionSuite.scala
|
Scala
|
apache-2.0
| 15,694 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.cassandra
import java.nio.ByteBuffer
import com.twitter.conversions.time._
import com.twitter.finagle.stats.{DefaultStatsReceiver, StatsReceiver}
import com.twitter.util.{Duration, Future}
import com.twitter.zipkin.adjuster.{ApplyTimestampAndDuration, CorrectForClockSkew, MergeById}
import com.twitter.zipkin.common.Span
import com.twitter.zipkin.conversions.thrift._
import com.twitter.zipkin.storage.{CollectAnnotationQueries, IndexedTraceId, SpanStore}
import com.twitter.zipkin.thriftscala.{Span => ThriftSpan}
import com.twitter.zipkin.util.{FutureUtil, Util}
import org.twitter.zipkin.storage.cassandra.Repository
import scala.collection.JavaConverters._
object CassandraSpanStoreDefaults {
val KeyspaceName = Repository.KEYSPACE
val SpanTtl = 7.days
val IndexTtl = 3.days
val MaxTraceCols = 100000
val MaxConnections = 8
val SpanCodec = new ScroogeThriftCodec[ThriftSpan](ThriftSpan)
}
abstract class CassandraSpanStore(
stats: StatsReceiver = DefaultStatsReceiver.scope("CassandraSpanStore"),
spanTtl: Duration = CassandraSpanStoreDefaults.SpanTtl,
indexTtl: Duration = CassandraSpanStoreDefaults.IndexTtl,
maxTraceCols: Int = CassandraSpanStoreDefaults.MaxTraceCols
) extends SpanStore with CollectAnnotationQueries {
/** Deferred as repository creates network connections */
protected def repository: Repository
private[this] val IndexDelimiter = ":"
private[this] val IndexDelimiterBytes = IndexDelimiter.getBytes
private[this] val spanCodec = CassandraSpanStoreDefaults.SpanCodec
/**
* Internal helper methods
*/
private[this] def createSpanColumnName(span: Span): String =
"%d_%d_%d".format(span.id, span.annotations.hashCode, span.binaryAnnotations.hashCode)
private[this] def annotationKey(serviceName: String, annotation: String, value: Option[ByteBuffer]): ByteBuffer = {
ByteBuffer.wrap(
serviceName.getBytes ++ IndexDelimiterBytes ++ annotation.getBytes ++
value.map { v => IndexDelimiterBytes ++ Util.getArrayFromBuffer(v) }.getOrElse(Array()))
}
/**
* Stats
*/
private[this] val SpansStats = stats.scope("spans")
private[this] val SpansStoredCounter = SpansStats.counter("stored")
private[this] val SpansIndexedCounter = SpansStats.counter("indexed")
private[this] val IndexStats = stats.scope("index")
private[this] val IndexServiceNameCounter = IndexStats.counter("serviceName")
private[this] val IndexServiceNameNoNameCounter = IndexStats.scope("serviceName").counter("noName")
private[this] val IndexSpanNameCounter = IndexStats.scope("serviceName").counter("spanName")
private[this] val IndexSpanNameNoNameCounter = IndexStats.scope("serviceName").scope("spanName").counter("noName")
private[this] val IndexTraceStats = IndexStats.scope("trace")
private[this] val IndexTraceNoTimestampCounter = IndexTraceStats.counter("noTimestamp")
private[this] val IndexTraceByServiceNameCounter = IndexTraceStats.counter("serviceName")
private[this] val IndexTraceBySpanNameCounter = IndexTraceStats.counter("spanName")
private[this] val IndexTraceByDurationCounter = IndexTraceStats.counter("duration")
private[this] val IndexAnnotationCounter = IndexStats.scope("annotation").counter("standard")
private[this] val IndexBinaryAnnotationCounter = IndexStats.scope("annotation").counter("binary")
private[this] val IndexSpanNoTimestampCounter = IndexStats.scope("span").counter("noTimestamp")
private[this] val IndexSpanNoDurationCounter = IndexStats.scope("span").counter("noDuration")
private[this] val QueryStats = stats.scope("query")
private[this] val QueryGetSpansByTraceIdsStat = QueryStats.stat("getSpansByTraceIds")
private[this] val QueryGetServiceNamesCounter = QueryStats.counter("getServiceNames")
private[this] val QueryGetSpanNamesCounter = QueryStats.counter("getSpanNames")
private[this] val QueryGetTraceIdsByNameCounter = QueryStats.counter("getTraceIdsByName")
private[this] val QueryGetTraceIdsByAnnotationCounter = QueryStats.counter("getTraceIdsByAnnotation")
private[this] val QueryGetTraceIdsByDurationCounter = QueryStats.counter("getTraceIdsByDuration")
/**
* Internal indexing helpers
*/
private[this] def indexServiceName(span: Span): Future[Unit] = {
IndexServiceNameCounter.incr()
Future.join(span.serviceNames.toList map {
case "" =>
IndexServiceNameNoNameCounter.incr()
Future.value(())
case s =>
FutureUtil.toFuture(repository.storeServiceName(s, indexTtl.inSeconds))
})
}
private[this] def indexSpanNameByService(span: Span): Future[Unit] = {
if (span.name == "") {
IndexSpanNameNoNameCounter.incr()
Future.value(())
} else {
IndexSpanNameCounter.incr()
Future.join(
span.serviceNames.toSeq map { serviceName =>
FutureUtil.toFuture(repository.storeSpanName(serviceName, span.name, indexTtl.inSeconds))
})
}
}
private[this] def indexTraceIdByName(span: Span): Future[Unit] = {
if (span.timestamp.isEmpty)
IndexTraceNoTimestampCounter.incr()
span.timestamp map { timestamp =>
val serviceNames = span.serviceNames
Future.join(
serviceNames.toList map { serviceName =>
IndexTraceByServiceNameCounter.incr()
val storeFuture =
FutureUtil.toFuture(repository.storeTraceIdByServiceName(serviceName, timestamp, span.traceId, indexTtl.inSeconds))
if (span.name != "") {
IndexTraceBySpanNameCounter.incr()
Future.join(
storeFuture,
FutureUtil.toFuture(repository.storeTraceIdBySpanName(serviceName, span.name, timestamp, span.traceId, indexTtl.inSeconds)))
} else storeFuture
})
} getOrElse Future.value(())
}
private[this] def indexByAnnotations(span: Span): Future[Unit] = {
if (span.timestamp.isEmpty)
IndexSpanNoTimestampCounter.incr()
span.timestamp map { timestamp =>
val annotationsFuture = Future.join(
span.annotations
.groupBy(_.value)
.flatMap { case (_, as) =>
val a = as.min
a.host map { endpoint =>
IndexAnnotationCounter.incr()
FutureUtil.toFuture(
repository.storeTraceIdByAnnotation(
annotationKey(endpoint.serviceName, a.value, None), timestamp, span.traceId, indexTtl.inSeconds))
}
}.toList)
val binaryFuture = Future.join(span.binaryAnnotations flatMap { ba =>
ba.host map { endpoint =>
IndexBinaryAnnotationCounter.incr()
Future.join(
FutureUtil.toFuture(
repository.storeTraceIdByAnnotation(
annotationKey(endpoint.serviceName, ba.key, Option(ba.value)), timestamp, span.traceId, indexTtl.inSeconds)),
FutureUtil.toFuture(
repository.storeTraceIdByAnnotation(
annotationKey(endpoint.serviceName, ba.key, None), timestamp, span.traceId, indexTtl.inSeconds)))
}
})
Future.join(annotationsFuture, binaryFuture).map(_ => ())
} getOrElse Future.value(())
}
private[this] def indexByDuration(span: Span): Future[Unit] = {
(span.timestamp, span.duration) match {
case (Some(timestamp), Some(duration)) =>
Future.join(
span.serviceNames.toSeq.flatMap { serviceName =>
IndexTraceByDurationCounter.incr()
Seq(
repository.storeTraceIdByDuration(
serviceName, span.name, timestamp, duration, span.traceId, indexTtl.inSeconds),
repository.storeTraceIdByDuration(
serviceName, "", timestamp, duration, span.traceId, indexTtl.inSeconds)
)
}.map(FutureUtil.toFuture)
)
case (_, None) =>
IndexSpanNoDurationCounter.incr()
Future.value((): Unit)
case _ => Future.value((): Unit)
}
}
private[this] def getSpansByTraceIds(traceIds: Seq[Long], count: Int): Future[Seq[List[Span]]] = {
FutureUtil.toFuture(repository.getSpansByTraceIds(traceIds.toArray.map(Long.box), count))
.map { spansByTraceId =>
val spans =
spansByTraceId.asScala.mapValues { spans => spans.asScala.map(spanCodec.decode(_).toSpan) }
traceIds.flatMap(traceId => spans.get(traceId))
.map(MergeById)
.map(CorrectForClockSkew)
.map(ApplyTimestampAndDuration)
.sortBy(_.head)(Ordering[Span].reverse) // sort descending by the first span
}
}
/**
* API Implementation
*/
override def close() = repository.close()
override def apply(spans: Seq[Span]): Future[Unit] = {
SpansStoredCounter.incr(spans.size)
Future.join(
spans.map(s => s.copy(annotations = s.annotations.sorted))
.map(ApplyTimestampAndDuration.apply).map { span =>
SpansIndexedCounter.incr()
Future.join(
FutureUtil.toFuture(
repository.storeSpan(
span.traceId,
span.timestamp.getOrElse(0L),
createSpanColumnName(span),
spanCodec.encode(span.toThrift),
spanTtl.inSeconds)),
indexServiceName(span),
indexSpanNameByService(span),
indexTraceIdByName(span),
indexByAnnotations(span),
indexByDuration(span))
})
}
override def getTracesByIds(traceIds: Seq[Long]): Future[Seq[List[Span]]] = {
QueryGetSpansByTraceIdsStat.add(traceIds.size)
getSpansByTraceIds(traceIds, maxTraceCols)
}
override def getAllServiceNames(): Future[Seq[String]] = {
QueryGetServiceNamesCounter.incr()
FutureUtil.toFuture(repository.getServiceNames).map(_.asScala.toList.sorted)
}
override def getSpanNames(service: String): Future[Seq[String]] = {
QueryGetSpanNamesCounter.incr()
FutureUtil.toFuture(repository.getSpanNames(service)).map(_.asScala.toList.sorted)
}
override def getTraceIdsByName(
serviceName: String,
spanName: Option[String],
endTs: Long,
lookback: Long,
limit: Int
): Future[Seq[IndexedTraceId]] = {
QueryGetTraceIdsByNameCounter.incr()
val traceIdsFuture = FutureUtil.toFuture(spanName match {
// if we have a span name, look up in the service + span name index
// if not, look up by service name only
case Some(x :String) => repository.getTraceIdsBySpanName(serviceName, x, endTs * 1000, lookback * 1000, limit)
case None => repository.getTraceIdsByServiceName(serviceName, endTs * 1000, lookback * 1000, limit)
})
traceIdsFuture.map { traceIds =>
traceIds.asScala
.map { case (traceId, ts) => IndexedTraceId(traceId, timestamp = ts) }
.toSeq
}
}
override def getTraceIdsByAnnotation(
serviceName: String,
annotation: String,
value: Option[ByteBuffer],
endTs: Long,
lookback: Long,
limit: Int
): Future[Seq[IndexedTraceId]] = {
QueryGetTraceIdsByAnnotationCounter.incr()
FutureUtil.toFuture(
repository
.getTraceIdsByAnnotation(annotationKey(serviceName, annotation, value), endTs * 1000, lookback * 1000, limit))
.map { traceIds =>
traceIds.asScala
.map { case (traceId, ts) => IndexedTraceId(traceId, timestamp = ts) }
.toSeq
}
}
override protected def getTraceIdsByDuration(
serviceName: String,
spanName: Option[String],
minDuration: Long,
maxDuration: Option[Long],
endTs: Long,
lookback: Long,
limit: Int
): Future[Seq[IndexedTraceId]] = {
QueryGetTraceIdsByDurationCounter.incr()
Future.exception(new UnsupportedOperationException)
FutureUtil.toFuture(
repository
.getTraceIdsByDuration(serviceName, spanName getOrElse "", minDuration, maxDuration getOrElse Long.MaxValue,
endTs * 1000, (endTs - lookback) * 1000, limit, indexTtl.inSeconds))
.map { traceIds =>
traceIds.asScala
.map { case (traceId, ts) => IndexedTraceId(traceId, timestamp = ts) }
.toSeq
}
}
}
|
prat0318/zipkin
|
zipkin-cassandra/src/main/scala/com/twitter/zipkin/storage/cassandra/CassandraSpanStore.scala
|
Scala
|
apache-2.0
| 12,693 |
/*
* Copyright 2017-2020 Daniel Urban and contributors listed in AUTHORS
* Copyright 2020 Nokia
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.tauri.seals
package core
import java.util.UUID
import scala.util.{ Try, Success, Failure }
import io.circe._
import _root_.scodec.bits._
import UUIDUtils.UUIDBuilder
class NsUUIDSpec extends tests.BaseSpec {
/** Version 1 UUID */
val ns1 = uuid"8f07c16c-f14d-11e6-81af-1d121b157edb"
assert(ns1.variant() === 2)
assert(ns1.version() === 1)
/** Version 3 (name-based MD5) UUID for "example.com" (DNS) */
val ns3 = uuid"9073926b-929f-31c2-abc9-fad77ae3e8eb"
assert(ns3.variant() === 2)
assert(ns3.version() === 3)
/** Version 4 (random) UUID as test namespace */
val ns4 = uuid"3b4ff1b0-5235-47b9-bdff-8f7df19bf8a4"
assert(ns4.variant() === 2)
assert(ns4.version() === 4)
/** Version 5 (name-based SHA1) UUID for "example.com" (DNS) */
val ns5 = uuid"cfbff0d1-9375-5685-968c-48ce8b15ae17"
assert(ns5.variant() === 2)
assert(ns5.version() === 5)
/** DNS namespace from RFC 4122 */
val nsDns = uuid"6ba7b810-9dad-11d1-80b4-00c04fd430c8"
assert(nsDns.variant() === 2)
/** URL namespace from RFC 4122 */
val nsUrl = uuid"6ba7b811-9dad-11d1-80b4-00c04fd430c8"
assert(nsUrl.variant() === 2)
"Basic functionality" - {
"empty name" in {
NsUUID.uuid5(ns4, "") should === (uuid"56756e5d-8a7e-570f-a419-82ea6d431713")
}
"short name" in {
NsUUID.uuid5(ns4, "alpha") should === (uuid"21bbb574-bba8-51e4-8b71-2ab43a593184")
}
"long name" in {
NsUUID.uuid5(ns4, "the quick brown fox jumps over the lazy dog") should === (
uuid"e9e3506b-5eca-5b3b-916f-c9d8fdce37c8"
)
}
}
"Various namespaces" - {
"DNS" in {
NsUUID.uuid5(nsDns, "example.com") should === (ns5)
}
"URL" in {
NsUUID.uuid5(nsUrl, "http://www.example.com/a/b/c") should === (
uuid"c3d9ade2-286d-5034-ab44-93d660958179"
)
}
"v1" in {
NsUUID.uuid5(ns1, "foobar") should === (uuid"d247cb15-9aff-5df1-beff-fdbc144f042a")
}
"v3" in {
NsUUID.uuid5(ns3, "foobar") should === (uuid"ae857671-99d7-5c5c-b458-c95c071bc730")
}
"v5" in {
NsUUID.uuid5(ns5, "foobar") should === (uuid"f1030914-4615-533a-ba0f-ce2603a31662")
}
}
"Nested namespaces" - {
val n1 = uuid"d71eb6ce-094e-47d1-8a87-0fe592905d05"
val n2 = uuid"75f91432-77d8-4ab3-a9c4-2a2652878029"
val n3 = uuid"e3c836b9-ac3c-4cc9-8ff6-b208515deda8"
val n4 = uuid"4458e30e-8120-47fc-a325-39053796fd83"
"UUIDs and a name" in {
val name = "foobar"
NsUUID.uuid5nestedNsNm(name, ns1) should === (NsUUID.uuid5(ns1, name))
NsUUID.uuid5nestedNsNm(name, ns1, n1) should === (uuid"8fc121a2-bdb6-57fd-9f4b-8c57d9860d7d")
NsUUID.uuid5nestedNsNm(name, ns1, n1, n2, n3, n4) should === (uuid"7f8c26c6-d014-58cc-a205-25c13c2b98c0")
}
"names" in {
NsUUID.uuid5nested(ns1) should === (ns1)
NsUUID.uuid5nested(ns1, "foo") should === (uuid"37af6235-cf58-51f3-8a67-3e6a0eedff96")
NsUUID.uuid5nested(ns1, "foo", "bar", "baz") should === (uuid"fd8f5430-b2d5-5d2b-8524-57da37991e36")
}
"UUIDs" in {
NsUUID.uuid5nestedNs(ns1) should === (ns1)
NsUUID.uuid5nestedNs(ns1, n1) should === (uuid"da3145fc-debf-5024-be13-051b8a1217d2")
NsUUID.uuid5nestedNs(ns1, n1, n2, n3, n4) should === (uuid"cd7b7bd8-3810-5be5-9c6f-05c8dc1bb8c6")
}
"generated test data" in {
val str = stringFromResource("/test_data.json")
val json = io.circe.parser.parse(str).fold(err => fail(err.toString), x => x)
checkFromJsonData(json)
}
}
"UUIDBuilder" - {
val root = uuid"2fffa6dc-d430-4c8a-9d90-a53764159e89"
val u1 = uuid"ec11c15f-7137-4531-a862-8c72b71fd8d4"
val u2 = uuid"676f5566-97a3-47b4-8ab5-3fd6a3f00b61"
"empty" in {
UUIDBuilder(root).uuid should === (
NsUUID.uuid5bv(root, ByteVector.empty)
)
}
"with UUIDs" in {
(root / u1 / u2).uuid should === (
NsUUID.uuid5bv(root, ByteVector.fromUUID(u1) ++ ByteVector.fromUUID(u2))
)
}
"with ByteVectors" in {
(root / hex"deadbeef" / hex"abcdef").uuid should === (
NsUUID.uuid5bv(root, hex"deadbeef abcdef")
)
}
"with Strings" in {
(root / "xyz" / "1256hgds").uuid should === (
NsUUID.uuid5bv(root, ByteVector.encodeUtf8("xyz1256hgds").fold(
err => fail(err.toString),
bv => bv
))
)
}
"with mixed" in {
(root / u1 / hex"abef" / "éáű" / u2).uuid should === (
NsUUID.uuid5bv(
root,
ByteVector.fromUUID(u1) ++
hex"abef" ++
ByteVector.encodeUtf8("éáű").fold(
err => fail(err.toString),
bv => bv
) ++
ByteVector.fromUUID(u2)
)
)
}
}
def checkFromJsonData(j: Json): Unit = {
def go(j: Json, nss: Vector[Either[UUID, String]]): Unit = {
j.as[Map[String, Json]] match {
case Left(_) =>
// reached a leaf:
j.as[UUID].fold(err => fail(s"not an UUID: ${j} (${err})"), _ => ())
case Right(map) =>
for ((k, v) <- map) {
Try(UUID.fromString(k)) match {
case Success(uuid) =>
go(v, nss :+ Left(uuid))
case Failure(_) =>
if (k.isEmpty) {
// must've reached an UUID:
v.as[UUID].fold(
err => fail(s"expected UUID at empty key, got '${v}' (${err})"),
uuid => composite(nss) should === (uuid)
)
} else {
go(v, nss :+ Right(k))
}
}
}
}
}
def composite(nss: Vector[Either[UUID, String]]): UUID = nss match {
case Left(uuid) +: t => comp(uuid, t)
case _ => fail("root must be a UUID")
}
def comp(root: UUID, nss: Vector[Either[UUID, String]]): UUID = {
val (u, _) = nss.foldLeft((root, true)) { (st, us) =>
(st, us) match {
case ((s, true), Left(uuid)) => (NsUUID.uuid5nestedNs(s, uuid), true)
case ((s, _), Right(name)) => (NsUUID.uuid5(s, name), false)
case ((_, false), Left(_)) => fail("UUID after name")
}
}
u
}
go(j, Vector.empty)
}
def stringFromResource(res: String): String = {
val stream = this.getClass.getResourceAsStream(res)
try {
new java.util.Scanner(stream, "UTF-8").useDelimiter("\\\\A").next()
} finally {
stream.close()
}
}
}
|
durban/seals
|
tests/src/test/scala/dev/tauri/seals/core/NsUUIDSpec.scala
|
Scala
|
apache-2.0
| 7,210 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.