code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
// ticket #3496
object Test {
def main(args: Array[String]): Unit = {
val s = Stream.from(1)
s.take(5)
s.drop(5)
s.splitAt(5)
}
}
|
yusuke2255/dotty
|
tests/run/t3496.scala
|
Scala
|
bsd-3-clause
| 156 |
package org.jetbrains.plugins.scala.annotator
import org.jetbrains.plugins.scala.DependencyManagerBase._
import org.jetbrains.plugins.scala.base.libraryLoaders.{IvyManagedLoader, LibraryLoader}
import org.jetbrains.plugins.scala.debugger.{ScalaVersion, Scala_2_12}
/**
* Nikolay.Tropin
* 23-May-18
*/
class AkkaHttpHighlightingTest extends ScalaHighlightingTestBase {
override implicit val version: ScalaVersion = Scala_2_12
private val akkaHttpVersion = "10.0.11"
private val akkaVersion = "2.5.8"
override def librariesLoaders: Seq[LibraryLoader] =
super.librariesLoaders :+
IvyManagedLoader(
"com.typesafe.akka" %% "akka-http" % akkaHttpVersion,
"com.typesafe.akka" %% "akka-http-core" % akkaHttpVersion,
"com.typesafe.akka" %% "akka-actor" % akkaVersion
)
def testSCL11470(): Unit = {
assertNothing(errorsFromScalaCode(
"""
|import akka.http.scaladsl.server.Route
|import akka.http.scaladsl.server.Directives._
|import akka.http.scaladsl.server.directives.Credentials
|import akka.http.scaladsl.settings.RoutingSettings
|
|class Server {
| implicit val routingSettings: RoutingSettings = RoutingSettings("")
|
| val routes =
| Route.seal {
| path("secured") {
| authenticateBasic[String]("s", authenticator) { k =>
| complete(s"$k")
| }
| }
| }
|
| def authenticator(credentials: Credentials): Option[String] = None
|}
""".stripMargin))
}
}
|
jastice/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/annotator/AkkaHttpHighlightingTest.scala
|
Scala
|
apache-2.0
| 1,631 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.geotools
import java.util.{Date, UUID}
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.geotools.SftBuilder.Opts
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.AttributeOptions._
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class SftBuilderTest extends Specification {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
sequential
"SpecBuilder" >> {
"build simple types" >> {
val spec = new SftBuilder().intType("i").longType("l").floatType("f").doubleType("d").stringType("s").getSpec
spec mustEqual "i:Integer,l:Long,f:Float,d:Double,s:String"
}
"handle date and uuid types" >> {
val spec = new SftBuilder().date("d").uuid("u").getSpec
spec mustEqual "d:Date,u:UUID"
}
"provide index when set to true" >> {
val spec = new SftBuilder()
.intType("i", index = true)
.longType("l", index = true)
.floatType("f", index = true)
.doubleType("d", index = true)
.stringType("s", index = true)
.date("dt", Opts(index = true))
.uuid("u", index = true)
.getSpec
val expected = "i:Integer,l:Long,f:Float,d:Double,s:String,dt:Date,u:UUID".split(",").map(_+":index=true").mkString(",")
spec mustEqual expected
}
// Example of fold...also can do more complex things like zipping to automatically build SFTs
"work with foldLeft" >> {
val spec = ('a' to 'z').foldLeft(new SftBuilder()) { case (builder, name) =>
builder.stringType(name.toString)
}
val expected = ('a' to 'z').map{ c => c.toString + ":" + "String" }.mkString(",")
spec.getSpec mustEqual expected
val sft = spec.build("foobar")
sft.getAttributeCount mustEqual 26
sft.getAttributeDescriptors.map(_.getLocalName).toList mustEqual ('a' to 'z').map(_.toString).toList
}
"set default dtg correctly" >> {
new SftBuilder()
.date("foobar", default = true)
.build("foobar").getDtgField must beSome("foobar")
new SftBuilder()
.date("foobar")
.withDefaultDtg("foobar")
.build("foobar").getDtgField must beSome("foobar")
new SftBuilder()
.date("foobar")
.date("dtg")
.withDefaultDtg("foobar")
.build("foobar").getDtgField must beSome("foobar")
new SftBuilder()
.date("dtg")
.date("foobar")
.withDefaultDtg("foobar")
.build("foobar").getDtgField must beSome("foobar")
new SftBuilder()
.date("dtg")
.date("foobar", default = true)
.build("foobar").getDtgField must beSome("foobar")
}
"build lists" >> {
val builder = new SftBuilder()
.listType[Int]("i")
.listType[Long]("l")
.listType[Float]("f")
.listType[Double]("d")
.listType[String]("s")
.listType[Date]("dt")
.listType[UUID]("u")
val expected =
List(
"i" -> "Int",
"l" -> "Long",
"f" -> "Float",
"d" -> "Double",
"s" -> "String",
"dt" -> "Date",
"u" -> "UUID"
).map { case (k,v) => s"$k:List[$v]" }.mkString(",")
builder.getSpec mustEqual expected
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 7
sft.getAttributeDescriptors.map(_.getType.getBinding).forall (_ must beAssignableFrom[java.util.List[_]])
}
"build lists with Java Types" >> {
val builder = new SftBuilder()
.listType[java.lang.Integer]("i")
.listType[java.lang.Long]("l")
.listType[java.lang.Float]("f")
.listType[java.lang.Double]("d")
.listType[java.lang.String]("s")
.listType[java.util.Date]("dt")
.listType[java.util.UUID]("u")
val expected =
List(
"i" -> "Integer", //for java use Integer instead of Int
"l" -> "Long",
"f" -> "Float",
"d" -> "Double",
"s" -> "String",
"dt" -> "Date",
"u" -> "UUID"
).map { case (k,v) => s"$k:List[$v]" }.mkString(",")
builder.getSpec mustEqual expected
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 7
sft.getAttributeDescriptors.map(_.getType.getBinding).forall (_ must beAssignableFrom[java.util.List[_]])
}
"build maps" >> {
val builder = new SftBuilder()
.mapType[Int,Int]("i")
.mapType[Long,Long]("l")
.mapType[Float,Float]("f")
.mapType[Double,Double]("d")
.mapType[String,String]("s")
.mapType[Date,Date]("dt")
.mapType[UUID,UUID]("u")
val expected =
List(
"i" -> "Int",
"l" -> "Long",
"f" -> "Float",
"d" -> "Double",
"s" -> "String",
"dt" -> "Date",
"u" -> "UUID"
).map { case (k,v) => s"$k:Map[$v,$v]" }.mkString(",")
builder.getSpec mustEqual expected
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 7
sft.getAttributeDescriptors.map(_.getType.getBinding).forall (_ must beAssignableFrom[java.util.Map[_,_]])
}
"build maps of diff types" >> {
val builder = new SftBuilder()
.mapType[Int,String]("a")
.mapType[Long,UUID]("b")
.mapType[Date,Float]("c")
builder.getSpec mustEqual "a:Map[Int,String],b:Map[Long,UUID],c:Map[Date,Float]"
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 3
sft.getAttributeDescriptors.map(_.getType.getBinding).forall (_ must beAssignableFrom[java.util.Map[_,_]])
}
"handle multiple geoms" >> {
val builder = new SftBuilder()
.geometry("geom")
.point("foobar", default = true)
.multiLineString("mls")
builder.getSpec mustEqual s"geom:Geometry:srid=4326,*foobar:Point:srid=4326:index=true:$OPT_INDEX_VALUE=true,mls:MultiLineString:srid=4326"
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 3
sft.getGeometryDescriptor.getLocalName mustEqual "foobar"
}
"handle Bytes type" >> {
val spec = new SftBuilder().stringType("a").bytes("b").getSpec
spec mustEqual "a:String,b:Bytes"
val lSpec = new SftBuilder().listType[Array[Byte]]("lst").getSpec
lSpec mustEqual "lst:List[Bytes]"
val mSpec = new SftBuilder().mapType[String,Array[Byte]]("m").getSpec
mSpec mustEqual "m:Map[String,Bytes]"
val m2Spec = new SftBuilder().mapType[Array[Byte],Array[Byte]]("m2").getSpec
m2Spec mustEqual "m2:Map[Bytes,Bytes]"
}
}
}
|
jahhulbert-ccri/geomesa
|
geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/geotools/SftBuilderTest.scala
|
Scala
|
apache-2.0
| 7,276 |
package ee.cone.c4actor.hashsearch.index.dynamic
import java.time.Instant
import ee.cone.c4actor.QProtocol.S_Firstborn
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4actor._
import ee.cone.c4actor.dep.request.CurrentTimeProtocol.S_CurrentTimeNode
import ee.cone.c4actor.dep.request.{CurrentTimeConfig, CurrentTimeConfigApp}
import ee.cone.c4actor.hashsearch.base.InnerLeaf
import ee.cone.c4actor.hashsearch.index.dynamic.IndexNodeProtocol.{S_IndexNodeSettings, _}
import ee.cone.c4actor.hashsearch.rangers.{HashSearchRangerRegistryApi, HashSearchRangerRegistryApp, IndexType, RangerWithCl}
import ee.cone.c4assemble.Types.{Each, Values}
import ee.cone.c4assemble._
import scala.collection.immutable
import scala.collection.immutable.Seq
case class ProductWithId[Model <: Product](modelCl: Class[Model], modelId: Int)
trait DynamicIndexModelsApp {
def dynIndexModels: List[ProductWithId[_ <: Product]] = Nil
}
trait DynamicIndexAssemble
extends AssemblesApp
with WithIndexNodeProtocol
with DynamicIndexModelsApp
with SerializationUtilsApp
with CurrentTimeConfigApp
with HashSearchDynamicIndexApp
with HashSearchRangerRegistryApp
with DefaultModelRegistryApp {
def dynamicIndexRefreshRateSeconds: Long
def dynamicIndexNodeDefaultSetting: S_IndexNodeSettings = S_IndexNodeSettings("", false, None)
override def currentTimeConfig: List[CurrentTimeConfig] =
CurrentTimeConfig("DynamicIndexAssembleRefresh", dynamicIndexRefreshRateSeconds) ::
super.currentTimeConfig
override def assembles: List[Assemble] = {
modelListIntegrityCheck(dynIndexModels.distinct)
new ThanosTimeFilters(hashSearchVersion, maxTransforms = dynamicIndexMaxEvents) ::
dynIndexModels.distinct.map(p ⇒
new IndexNodeThanos(
p.modelCl, p.modelId,
dynamicIndexAssembleDebugMode,
dynamicIndexAutoStaticNodeCount,
dynamicIndexAutoStaticLiveSeconds,
dynamicIndexNodeDefaultSetting,
dynamicIndexDeleteAnywaySeconds
)(defaultModelRegistry, qAdapterRegistry, hashSearchRangerRegistry, idGenUtil)
) :::
super.assembles
}
def dynamicIndexMaxEvents: Int = 100000
def dynamicIndexAssembleDebugMode: Boolean = false
def dynamicIndexAutoStaticNodeCount: Int = 1000
def dynamicIndexAutoStaticLiveSeconds: Long = 60L * 60L
def dynamicIndexDeleteAnywaySeconds: Long = 60L * 60L * 24L * 1L
private def modelListIntegrityCheck: List[ProductWithId[_ <: Product]] ⇒ Unit = list ⇒ {
val map = list.distinct.groupBy(_.modelId)
if (map.values.forall(_.size == 1)) {
} else {
FailWith.apply(s"Dyn model List contains models with same Id: ${map.filter(_._2.size > 1)}")
}
}
lazy val hashSearchVersion: String = "MC5FLkU=" // note equals to base64 http://base64decode.toolur.com/
}
case class IndexNodeRich[Model <: Product](
srcId: SrcId,
isStatic: Boolean,
indexNode: S_IndexNode,
indexByNodes: List[IndexByNodeRich[Model]],
directive: Option[Any]
)
case class IndexByNodeRichCount[Model <: Product](
srcId: SrcId,
indexByNodeCount: Int
)
case class IndexByNodeRich[Model <: Product](
srcId: SrcId,
isAlive: Boolean,
indexByNode: S_IndexByNode
) {
lazy val heapIdsSet: Set[String] = indexByNode.heapIds.toSet
}
case class IndexByNodeStats(
srcId: SrcId,
lastPongSeconds: Long,
parentId: SrcId
)
sealed trait ThanosTimeTypes {
type PowerIndexNodeThanosAll = AbstractAll
type ThanosLEventsTransformsAll = AbstractAll
}
@assemble class ThanosTimeFiltersBase(version: String, maxTransforms: Int) extends ThanosTimeTypes {
def SnapTransformWatcher(
verId: SrcId,
firstBorn: Each[S_Firstborn],
versions: Values[S_IndexNodesVersion]
): Values[(SrcId, TxTransform)] =
if (versions.headOption.map(_.version).getOrElse("") == version) {
Nil
} else {
WithPK(SnapTransform(firstBorn.srcId + "Snap", firstBorn.srcId, version)) :: Nil
}
def PowerFilterCurrentTimeNode(
timeNode: SrcId,
firstborn: Values[S_Firstborn],
currentTimeNode: Each[S_CurrentTimeNode]
): Values[(PowerIndexNodeThanosAll, S_CurrentTimeNode)] =
if (currentTimeNode.srcId == "DynamicIndexAssembleRefresh")
WithAll(currentTimeNode) :: Nil
else
Nil
def ApplyThanosTransforms(
firsBornId: SrcId,
firstborn: Each[S_Firstborn],
@byEq[ThanosLEventsTransformsAll](All) @distinct events: Values[LEventTransform]
): Values[(SrcId, TxTransform)] =
if (events.nonEmpty)
WithPK(CollectiveTransform("ThanosTX", events.take(maxTransforms))) :: Nil
else
Nil
}
import ee.cone.c4actor.hashsearch.rangers.IndexType._
case class RangerDirective[Model <: Product](
nodeId: SrcId,
directive: Any
)
case class PreProcessedLeaf[Model <: Product](
leafId: SrcId,
originalLeafIds: List[SrcId],
indexNodeId: SrcId,
commonPrefix: String,
by: Product,
byId: Long,
ranger: RangerWithCl[_ <: Product, _]
)
case class ProcessedLeaf[Model <: Product](
leafId: SrcId,
originalLeafIds: List[SrcId],
preProcessed: PreProcessedLeaf[Model],
heapIds: List[SrcId]
) {
lazy val heapIdsSet: Set[SrcId] = heapIds.toSet
}
case class IndexNodeTyped[Model <: Product](
indexNodeId: SrcId,
indexNode: S_IndexNode
)
case class IndexByNodeTyped[Model <: Product](
leafId: SrcId,
indexByNode: S_IndexByNode
)
trait IndexNodeThanosUtils[Model <: Product] extends HashSearchIdGeneration {
def qAdapterRegistry: QAdapterRegistry
def rangerRegistryApi: HashSearchRangerRegistryApi
def idGenUtil: IdGenUtil
def defaultModelRegistry: DefaultModelRegistry
lazy val nameToIdMap: Map[String, Long] = qAdapterRegistry.byName.transform((_, v) ⇒ if (v.hasId) v.id else -1)
lazy val longToName: Map[Long, String] = qAdapterRegistry.byId.transform((_, v) ⇒ v.className)
def modelId: Int
def preProcessLeaf(
leaf: InnerLeaf[Model]
): List[PreProcessedLeaf[Model]] =
leaf.prodCondition match {
case Some(prod) ⇒
nameToIdMap.get(leaf.byClName) match {
case Some(byId) ⇒
rangerRegistryApi.getByByIdUntyped(byId) match {
case Some(ranger) ⇒
val preparedBy = innerPrepareLeaf(ranger, prod.by)
val commonPrefixEv = commonPrefix(modelId, leaf.lensNameList)
val leafIdEv = leafId(commonPrefixEv, preparedBy)
val indexNodeIdEv = indexNodeId(commonPrefixEv, byId)
PreProcessedLeaf[Model](leafIdEv, leaf.srcId :: Nil, indexNodeIdEv, commonPrefixEv, preparedBy, byId, ranger) :: Nil
case None ⇒ Nil
}
case None ⇒ Nil
}
case None ⇒ Nil
}
def processLeafWDefault(
leaf: PreProcessedLeaf[Model]
): ProcessedLeaf[Model] = {
val directive = defaultModelRegistry.get[Product](leaf.by.getClass.getName).create("")
processLeaf(leaf, directive)
}
def processLeaf[By <: Product](
leaf: PreProcessedLeaf[Model],
directive: By
): ProcessedLeaf[Model] = {
val ids = applyRangerInner(leaf.ranger, leaf.by, directive, leaf.commonPrefix)
ProcessedLeaf(leaf.leafId, leaf.originalLeafIds, leaf, ids)
}
lazy val indexTypeMap: Map[Long, IndexType] = rangerRegistryApi.getAll.map(r ⇒ nameToIdMap(r.byCl.getName) → r.indexType).toMap
def getIndexType(byId: Long): IndexType = indexTypeMap.getOrElse(byId, IndexType.Default)
def applyRangerInner[By <: Product](
ranger: RangerWithCl[By, _],
by: Product,
directive: Product,
commonPrefix: String
): List[SrcId] = {
ranger.ranges(directive.asInstanceOf[By])._2.apply(by).map(heapId(commonPrefix, _))
}
def innerPrepareLeaf[By <: Product](
ranger: RangerWithCl[By, _],
by: Product
): Product = ranger.prepareRequest(by.asInstanceOf[By])
def prepareDirective(directives: Values[RangerDirective[Model]]): Option[Product] =
Single.option(directives)
}
@assemble class IndexNodeThanosBase[Model <: Product](
modelCl: Class[Model], val modelId: Int,
debugMode: Boolean,
autoCount: Int,
autoLive: Long,
dynamicIndexNodeDefaultSetting: S_IndexNodeSettings,
deleteAnyway: Long
)(
val defaultModelRegistry: DefaultModelRegistry,
val qAdapterRegistry: QAdapterRegistry,
val rangerRegistryApi: HashSearchRangerRegistryApi,
val idGenUtil: IdGenUtil
)
extends AssembleName("IndexNodeThanos", modelCl) with ThanosTimeTypes
with IndexNodeThanosUtils[Model] {
type IndexNodeId = SrcId
type FilterPreProcessedLeafs = SrcId
// Mock join
def MockJoin(
srcId: SrcId,
firstborn: Each[S_Firstborn]
): Values[(SrcId, RangerDirective[Model])] =
Nil
//Process Leaf ignores leafsWithAll
def PreProcessLeaf(
leafId: SrcId,
leaf: Each[InnerLeaf[Model]]
): Values[(FilterPreProcessedLeafs, PreProcessedLeaf[Model])] =
preProcessLeaf(leaf).map(WithPK.apply)
def FilterPreProcessedLeaf(
leafId: SrcId,
@by[FilterPreProcessedLeafs] @distinct leafs: Values[PreProcessedLeaf[Model]]
): Values[(SrcId, PreProcessedLeaf[Model])] =
if (leafs.nonEmpty) {
WithPK(leafs.head.copy[Model](originalLeafIds = leafs.flatMap(_.originalLeafIds).distinct.toList)) :: Nil
} else {
Nil
}
def PreProcessedLeafToNode(
leafId: SrcId,
leaf: Each[PreProcessedLeaf[Model]]
): Values[(IndexNodeId, PreProcessedLeaf[Model])] =
(leaf.indexNodeId → leaf) :: Nil
def ProcessLeaf(
nodeId: SrcId,
directives: Values[RangerDirective[Model]],
@by[IndexNodeId] preLeafs: Values[PreProcessedLeaf[Model]]
): Values[(SrcId, ProcessedLeaf[Model])] =
Single.option(directives) match {
case Some(dir) ⇒ preLeafs.map(leaf ⇒ processLeaf(leaf, dir)).map(WithPK.apply)
case None ⇒ preLeafs.map(leaf ⇒ processLeafWDefault(leaf)).map(WithPK.apply)
}
// D_Node creation
def IndexNodeFilter(
indexNodeId: SrcId,
indexNode: Each[S_IndexNode]
): Values[(SrcId, IndexNodeTyped[Model])] =
if (indexNode.modelId == modelId)
WithPK(IndexNodeTyped[Model](indexNode.indexNodeId, indexNode)) :: Nil
else
Nil
def SoulIndexNodeCreation(
indexNodeId: SrcId,
indexNodes: Values[IndexNodeTyped[Model]],
@by[IndexNodeId] leafs: Values[PreProcessedLeaf[Model]]
): Values[(ThanosLEventsTransformsAll, LEventTransform)] =
(indexNodes.toList, leafs.toList) match {
case (Nil, leaf :: _) ⇒
if (debugMode)
PrintColored("y")(s"[Thanos.Soul, $modelId] Created S_IndexNode for ${(leaf.by.getClass.getName, leaf.commonPrefix)},${(modelCl.getName, modelId)}")
val indexType: IndexType = getIndexType(leaf.byId)
WithAll(SoulTransform(leaf.indexNodeId, modelId, leaf.byId, leaf.commonPrefix, dynamicIndexNodeDefaultSetting, indexType)) :: Nil
case (indexNode :: Nil, leaf :: _) ⇒
if (debugMode) {
val x = indexNode
val y = leaf
PrintColored("y")(s"[Thanos.Soul, $modelId] Both alive $x ${y.by}")
}
Nil
case (_ :: Nil, Nil) ⇒
Nil
case (Nil, Nil) ⇒
Nil
case _ ⇒
println(s"Multiple indexNodes in [Thanos.Soul, ${modelCl.toString}] - SoulIndexNodeCreation ${indexNodes.toList},${leafs.toList}")
Nil // WithAll(SoulCorrectionTransform(indexNodeId, indexNodes.map(_.indexNode).toList)) :: Nil
}
// ByNode creation
def IndexByNodeFilter(
indexNodeId: SrcId,
indexByNode: Each[S_IndexByNode]
): Values[(SrcId, IndexByNodeTyped[Model])] =
if (indexByNode.modelId == modelId)
WithPK(IndexByNodeTyped[Model](indexByNode.leafId, indexByNode)) :: Nil
else
Nil
def RealityInnerLeafIndexByNode(
innerLeafId: SrcId,
innerLeafs: Values[ProcessedLeaf[Model]],
indexByNodes: Values[IndexByNodeTyped[Model]],
indexByNodesLastSeen: Values[S_IndexByNodeLastSeen]
): Values[(ThanosLEventsTransformsAll, LEventTransform)] = {
(innerLeafs.toList, indexByNodes.toList) match {
case (leaf :: Nil, Nil) ⇒
if (debugMode)
PrintColored("r")(s"[Thanos.Reality, $modelId] Created ByNode for ${leaf.preProcessed.by}")
WithAll(RealityTransform(leaf.preProcessed.leafId, leaf.preProcessed.indexNodeId, leaf.heapIds, leaf.preProcessed.by.toString, modelId, autoLive)) :: Nil
case (Nil, node :: Nil) ⇒
if (indexByNodesLastSeen.isEmpty)
WithAll(MindTransform(node.leafId)) :: Nil
else
Nil
case (leaf :: Nil, node :: Nil) ⇒
if (debugMode)
PrintColored("r")(s"[Thanos.Reality, $modelId] Both alive ${leaf.preProcessed.by}")
if (indexByNodesLastSeen.nonEmpty)
WithAll(RevertedMindTransform(leaf.leafId)) :: Nil
else
Nil
case (Nil, Nil) ⇒ Nil
case (a, b) ⇒ FailWith.apply(s"Multiple inputs in [Thanos.Reality, $modelId] - RealityGiveLifeToIndexByNode: $a\\n${b.mkString("\\n")}")
}
}
// ByNode rich
def SpaceIndexByNodeRich(
indexByNodeId: SrcId,
nodes: Values[IndexByNodeTyped[Model]],
innerLeafs: Values[ProcessedLeaf[Model]],
indexByNodesLastSeen: Values[S_IndexByNodeLastSeen],
indexByNodeSettings: Values[S_IndexByNodeSettings],
@byEq[PowerIndexNodeThanosAll](All) currentTimes: Each[S_CurrentTimeNode]
): Values[(IndexNodeId, IndexByNodeRich[Model])] =
if (nodes.size == 1) {
val node = nodes.head
val currentTime = currentTimes.currentTimeSeconds
val leafIsPresent = innerLeafs.nonEmpty
val lastPong = indexByNodesLastSeen.headOption.map(_.lastSeenAtSeconds).getOrElse(0L)
val setting = indexByNodeSettings.headOption
val isAlive =
leafIsPresent || indexByNodesLastSeen.isEmpty ||
(setting.isDefined && (setting.get.alwaysAlive || currentTime - setting.get.keepAliveSeconds.getOrElse(0L) - lastPong <= 0))
val rich = IndexByNodeRich[Model](node.leafId, isAlive, node.indexByNode)
if (debugMode)
PrintColored("b", "w")(s"[Thanos.Space, $modelId] Updated IndexByNodeRich ${(isAlive, currentTime, node.leafId, innerLeafs.headOption.map(_.preProcessed.by))}")
(rich.indexByNode.indexNodeId → rich) :: Nil
} else if (innerLeafs.size == 1) {
val leaf = innerLeafs.head
val stubIndexByNode = S_IndexByNode(leaf.leafId, leaf.preProcessed.indexNodeId, modelId, leaf.heapIds, leaf.preProcessed.by.toString)
val rich = IndexByNodeRich[Model](leaf.leafId, isAlive = true, stubIndexByNode)
if (debugMode)
PrintColored("b", "w")(s"[Thanos.Space, $modelId] Created from leaf IndexByNodeRich ${(leaf.leafId, innerLeafs.headOption.map(_.preProcessed.by))}")
(rich.indexByNode.indexNodeId → rich) :: Nil
} else Nil
// NodeRich - dynamic
def SpaceIndexNodeRichNoneAlive(
indexNodeId: SrcId,
indexNode: Each[IndexNodeTyped[Model]],
indexNodeSettings: Values[S_IndexNodeSettings],
@by[IndexNodeId] indexByNodeRiches: Values[IndexByNodeRich[Model]],
directives: Values[RangerDirective[Model]]
): Values[(SrcId, IndexNodeRich[Model])] = {
val settings = indexNodeSettings.headOption
val isStatic = (settings.isDefined && settings.get.allAlwaysAlive) || (settings.isDefined && settings.get.keepAliveSeconds.isEmpty && indexByNodeRiches.size > autoCount)
if (!isStatic) {
val directive = prepareDirective(directives)
val rich = IndexNodeRich[Model](indexNode.indexNodeId, isStatic, indexNode.indexNode, indexByNodeRiches.toList, directive)
if (debugMode)
PrintColored("b", "w")(s"[Thanos.Space, $modelId] Updated IndexNodeRich Dynamic${(isStatic, indexNode.indexNodeId, indexByNodeRiches.size)}")
WithPK(rich) :: Nil
} else {
Nil
}
}
// Count children
def PowerIndexByNodeCounter(
indexNodeId: SrcId,
@by[IndexNodeId] indexByNodeRiches: Values[IndexByNodeRich[Model]]
): Values[(SrcId, IndexByNodeRichCount[Model])] =
WithPK(IndexByNodeRichCount[Model](indexNodeId, indexByNodeRiches.size)) :: Nil
// NodeRich - static
def SpaceIndexNodeRichAllAlive(
indexNodeId: SrcId,
indexNode: Each[IndexNodeTyped[Model]],
indexNodeSettings: Values[S_IndexNodeSettings],
childCounts: Values[IndexByNodeRichCount[Model]],
directives: Values[RangerDirective[Model]]
): Values[(SrcId, IndexNodeRich[Model])] = {
val settings = indexNodeSettings.headOption
val childCount = childCounts.headOption.map(_.indexByNodeCount).getOrElse(0)
val isAlive = (settings.isDefined && settings.get.allAlwaysAlive) || (settings.isDefined && settings.get.keepAliveSeconds.isEmpty && childCount > autoCount)
if (isAlive) {
val directive = prepareDirective(directives)
val rich = IndexNodeRich[Model](indexNode.indexNodeId, isAlive, indexNode.indexNode, Nil, directive)
if (debugMode)
PrintColored("b", "w")(s"[Thanos.Space, $modelId] Updated IndexNodeRich Static ${(isAlive, indexNode.indexNodeId)}")
WithPK(rich) :: Nil
} else {
Nil
}
}
// GC Nodes
def PowerGCIndexByNodes(
indexNodeRichId: SrcId,
parent: Each[IndexNodeRich[Model]]
): Values[(ThanosLEventsTransformsAll, LEventTransform)] =
if (!parent.isStatic)
for {
child ← parent.indexByNodes
if !child.isAlive
} yield {
if (debugMode)
PrintColored("m")(s"[Thanos.Power, $modelId] Deleted ${(child.indexByNode.leafId, child.indexByNode.byStr)}")
WithAll(PowerTransform(child.srcId, s"Power-${child.srcId}"))
}
else Nil
def PowerGCIndexForStatic(
indexByNodeId: SrcId,
indexByNodes: Each[IndexByNodeTyped[Model]],
indexByNodesLastSeen: Values[S_IndexByNodeLastSeen],
@byEq[PowerIndexNodeThanosAll](All) currentTimes: Each[S_CurrentTimeNode]
): Values[(ThanosLEventsTransformsAll, LEventTransform)] =
if (indexByNodesLastSeen.nonEmpty && currentTimes.currentTimeSeconds - indexByNodesLastSeen.head.lastSeenAtSeconds > deleteAnyway) {
WithAll(PowerTransform(indexByNodes.leafId, s"Anyway-${indexByNodes.leafId}")) :: Nil
} else {
Nil
}
}
case class RealityTransform[Model <: Product, By <: Product](srcId: SrcId, parentNodeId: String, heapIds: List[String], byStr: String, modelId: Int, defaultLive: Long) extends LEventTransform {
def lEvents(local: Context): Seq[LEvent[Product]] = {
val parentOpt: Option[S_IndexNodeSettings] = ByPK(classOf[S_IndexNodeSettings]).of(local).get(parentNodeId)
val settings: immutable.Seq[LEvent[Product]] = if (parentOpt.isDefined) {
val S_IndexNodeSettings(_, keepAlive, aliveSeconds) = parentOpt.get
val liveFor = aliveSeconds.getOrElse(defaultLive)
LEvent.update(S_IndexByNodeSettings(srcId, keepAlive, Some(liveFor)))
} else Nil
val now = Instant.now
val nowSeconds = now.getEpochSecond
val firstTime = System.currentTimeMillis()
val timedLocal: Seq[LEvent[Product]] =
LEvent.update(S_IndexByNode(srcId, parentNodeId, modelId, heapIds, byStr)) ++ settings ++ LEvent.delete(S_IndexByNodeLastSeen(srcId, nowSeconds))
val secondTime = System.currentTimeMillis()
LEvent.update(S_TimeMeasurement(srcId, Option(secondTime - firstTime))) ++ timedLocal
}
}
case class SoulTransform(srcId: SrcId, modelId: Int, byAdapterId: Long, commonPrefix: String, default: S_IndexNodeSettings, indexType: IndexType) extends LEventTransform {
def lEvents(local: Context): Seq[LEvent[Product]] = {
val firstTime = System.currentTimeMillis()
val S_IndexNodeSettings(_, alive, time) = default
val aliveWithType = if (indexType == Static) true else alive
val timedLocal: Seq[LEvent[Product]] =
LEvent.update(S_IndexNode(srcId, modelId, byAdapterId, commonPrefix)) ++
LEvent.update(S_IndexNodeSettings(srcId, allAlwaysAlive = aliveWithType, keepAliveSeconds = time))
val secondTime = System.currentTimeMillis()
LEvent.update(S_TimeMeasurement(srcId, Option(secondTime - firstTime))) ++ timedLocal
}
}
case class PowerTransform(srcId: SrcId, extraKey: String) extends LEventTransform {
def lEvents(local: Context): Seq[LEvent[Product]] =
LEvent.delete(S_IndexByNodeLastSeen(srcId, 0L)) ++
LEvent.delete(S_IndexByNode(srcId, "", 0, Nil, "")) ++
LEvent.delete(S_IndexByNodeSettings(srcId, false, None)) ++
LEvent.delete(S_TimeMeasurement(srcId, None))
}
case class MindTransform(srcId: SrcId) extends LEventTransform {
def lEvents(local: Context): Seq[LEvent[Product]] = {
val now = Instant.now
val nowSeconds = now.getEpochSecond
LEvent.update(S_IndexByNodeLastSeen(srcId, nowSeconds))
}
}
case class RevertedMindTransform(srcId: SrcId) extends LEventTransform {
def lEvents(local: Context): Seq[LEvent[Product]] = {
LEvent.delete(S_IndexByNodeLastSeen(srcId, 0L))
}
}
case class SoulCorrectionTransform(srcId: SrcId, indexNodeList: List[S_IndexNode]) extends LEventTransform {
def lEvents(local: Context): Seq[LEvent[Product]] =
indexNodeList.flatMap(node ⇒ node :: S_IndexNodeSettings(node.indexNodeId, false, None) :: S_TimeMeasurement(node.indexNodeId, None) :: Nil)
.flatMap(LEvent.delete)
}
case class SnapTransform(srcId: String, fbId: String, version: String) extends TxTransform {
def transform(local: Context): Context = {
val versionW = ByPK(classOf[S_IndexNodesVersion]).of(local).values.headOption.map(_.version).getOrElse("")
if (version != versionW) {
val delete =
(ByPK(classOf[S_IndexNodesVersion]).of(local).values ++
ByPK(classOf[S_IndexNode]).of(local).values ++
ByPK(classOf[S_IndexNodeSettings]).of(local).values ++
ByPK(classOf[S_IndexByNodesStats]).of(local).values ++
ByPK(classOf[S_IndexByNode]).of(local).values ++
ByPK(classOf[S_IndexByNodeLastSeen]).of(local).values ++
ByPK(classOf[S_IndexByNodeSettings]).of(local).values ++
ByPK(classOf[S_TimeMeasurement]).of(local).values).flatMap(LEvent.delete).toList
val add = LEvent.update(S_IndexNodesVersion(fbId, version))
TxAdd(delete ++ add)(local)
}
else
local
}
}
|
wregs/c4proto
|
c4actor-extra/src/main/scala/ee/cone/c4actor/hashsearch/index/dynamic/IndexNodeThanos.scala
|
Scala
|
apache-2.0
| 22,097 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.nio.{ByteBuffer, MappedByteBuffer}
import java.util.{Arrays, Random}
import com.google.common.io.{ByteStreams, Files}
import io.netty.channel.FileRegion
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.internal.config
import org.apache.spark.network.util.{ByteArrayWritableChannel, JavaUtils}
import org.apache.spark.security.CryptoStreamUtils
import org.apache.spark.util.Utils
import org.apache.spark.util.io.ChunkedByteBuffer
class DiskStoreSuite extends SparkFunSuite {
test("reads of memory-mapped and non memory-mapped files are equivalent") {
val conf = new SparkConf()
val securityManager = new SecurityManager(conf)
// It will cause error when we tried to re-open the filestore and the
// memory-mapped byte buffer tot he file has not been GC on Windows.
assume(!Utils.isWindows)
val confKey = config.STORAGE_MEMORY_MAP_THRESHOLD.key
// Create a non-trivial (not all zeros) byte array
val bytes = Array.tabulate[Byte](1000)(_.toByte)
val byteBuffer = new ChunkedByteBuffer(ByteBuffer.wrap(bytes))
val blockId = BlockId("rdd_1_2")
val diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
val diskStoreMapped = new DiskStore(conf.clone().set(confKey, "0"), diskBlockManager,
securityManager)
diskStoreMapped.putBytes(blockId, byteBuffer)
val mapped = diskStoreMapped.getBytes(blockId).toByteBuffer()
assert(diskStoreMapped.remove(blockId))
val diskStoreNotMapped = new DiskStore(conf.clone().set(confKey, "1m"), diskBlockManager,
securityManager)
diskStoreNotMapped.putBytes(blockId, byteBuffer)
val notMapped = diskStoreNotMapped.getBytes(blockId).toByteBuffer()
// Not possible to do isInstanceOf due to visibility of HeapByteBuffer
assert(notMapped.getClass.getName.endsWith("HeapByteBuffer"),
"Expected HeapByteBuffer for un-mapped read")
assert(mapped.isInstanceOf[MappedByteBuffer],
"Expected MappedByteBuffer for mapped read")
def arrayFromByteBuffer(in: ByteBuffer): Array[Byte] = {
val array = new Array[Byte](in.remaining())
in.get(array)
array
}
assert(Arrays.equals(new ChunkedByteBuffer(mapped).toArray, bytes))
assert(Arrays.equals(new ChunkedByteBuffer(notMapped).toArray, bytes))
}
test("block size tracking") {
val conf = new SparkConf()
val diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
val diskStore = new DiskStore(conf, diskBlockManager, new SecurityManager(conf))
val blockId = BlockId("rdd_1_2")
diskStore.put(blockId) { chan =>
val buf = ByteBuffer.wrap(new Array[Byte](32))
while (buf.hasRemaining()) {
chan.write(buf)
}
}
assert(diskStore.getSize(blockId) === 32L)
diskStore.remove(blockId)
assert(diskStore.getSize(blockId) === 0L)
}
test("blocks larger than 2gb") {
val conf = new SparkConf()
.set(config.MEMORY_MAP_LIMIT_FOR_TESTS.key, "10k")
val diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
val diskStore = new DiskStore(conf, diskBlockManager, new SecurityManager(conf))
val blockId = BlockId("rdd_1_2")
diskStore.put(blockId) { chan =>
val arr = new Array[Byte](1024)
for {
_ <- 0 until 20
} {
val buf = ByteBuffer.wrap(arr)
while (buf.hasRemaining()) {
chan.write(buf)
}
}
}
val blockData = diskStore.getBytes(blockId)
assert(blockData.size == 20 * 1024)
val chunkedByteBuffer = blockData.toChunkedByteBuffer(ByteBuffer.allocate)
val chunks = chunkedByteBuffer.chunks
assert(chunks.size === 2)
for (chunk <- chunks) {
assert(chunk.limit() === 10 * 1024)
}
val e = intercept[IllegalArgumentException]{
blockData.toByteBuffer()
}
assert(e.getMessage ===
s"requirement failed: can't create a byte buffer of size ${blockData.size}" +
" since it exceeds 10.0 KiB.")
}
test("block data encryption") {
val testData = new Array[Byte](128 * 1024)
new Random().nextBytes(testData)
val conf = new SparkConf()
val securityManager = new SecurityManager(conf, Some(CryptoStreamUtils.createKey(conf)))
val diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
val diskStore = new DiskStore(conf, diskBlockManager, securityManager)
val blockId = BlockId("rdd_1_2")
diskStore.put(blockId) { chan =>
val buf = ByteBuffer.wrap(testData)
while (buf.hasRemaining()) {
chan.write(buf)
}
}
assert(diskStore.getSize(blockId) === testData.length)
val diskData = Files.toByteArray(diskBlockManager.getFile(blockId.name))
assert(!Arrays.equals(testData, diskData))
val blockData = diskStore.getBytes(blockId)
assert(blockData.isInstanceOf[EncryptedBlockData])
assert(blockData.size === testData.length)
Map(
"input stream" -> readViaInputStream _,
"chunked byte buffer" -> readViaChunkedByteBuffer _,
"nio byte buffer" -> readViaNioBuffer _,
"managed buffer" -> readViaManagedBuffer _
).foreach { case (name, fn) =>
val readData = fn(blockData)
assert(readData.length === blockData.size, s"Size of data read via $name did not match.")
assert(Arrays.equals(testData, readData), s"Data read via $name did not match.")
}
}
private def readViaInputStream(data: BlockData): Array[Byte] = {
val is = data.toInputStream()
try {
ByteStreams.toByteArray(is)
} finally {
is.close()
}
}
private def readViaChunkedByteBuffer(data: BlockData): Array[Byte] = {
val buf = data.toChunkedByteBuffer(ByteBuffer.allocate _)
try {
buf.toArray
} finally {
buf.dispose()
}
}
private def readViaNioBuffer(data: BlockData): Array[Byte] = {
JavaUtils.bufferToArray(data.toByteBuffer())
}
private def readViaManagedBuffer(data: BlockData): Array[Byte] = {
val region = data.toNetty().asInstanceOf[FileRegion]
val byteChannel = new ByteArrayWritableChannel(data.size.toInt)
while (region.transferred() < region.count()) {
region.transferTo(byteChannel, region.transferred())
}
byteChannel.close()
byteChannel.getData
}
}
|
wangmiao1981/spark
|
core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
|
Scala
|
apache-2.0
| 7,148 |
package ch.ninecode.model
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.Serializer
import com.esotericsoftware.kryo.io.Input
import com.esotericsoftware.kryo.io.Output
import org.apache.spark.sql.Row
import ch.ninecode.cim.CIMClassInfo
import ch.ninecode.cim.CIMContext
import ch.ninecode.cim.CIMParseable
import ch.ninecode.cim.CIMSerializer
/**
* Vertical displacement relative to either sealevel, ground or the center of the earth.
*
* @param Element Reference to the superclass object.
* @param displacement <em>undocumented</em>
* @param kind <em>undocumented</em>
* @group EnvDomain
* @groupname EnvDomain Package EnvDomain
*/
final case class RelativeDisplacement
(
Element: BasicElement = null,
displacement: Double = 0.0,
kind: String = null
)
extends
Element
{
/**
* Return the superclass object.
*
* @return The typed superclass nested object.
* @group Hierarchy
* @groupname Hierarchy Class Hierarchy Related
* @groupdesc Hierarchy Members related to the nested hierarchy of CIM classes.
*/
override def sup: Element = Element
//
// Row overrides
//
/**
* Return a copy of this object as a Row.
*
* Creates a clone of this object for use in Row manipulations.
*
* @return The copy of the object.
* @group Row
* @groupname Row SQL Row Implementation
* @groupdesc Row Members related to implementing the SQL Row interface
*/
override def copy (): Row =
{
clone().asInstanceOf[Row]
}
override def export_fields: String =
{
implicit val s: StringBuilder = new StringBuilder(sup.export_fields)
implicit val clz: String = RelativeDisplacement.cls
def emitelem (position: Int, value: Any): Unit = if (mask(position)) emit_element(RelativeDisplacement.fields(position), value)
def emitattr (position: Int, value: Any): Unit = if (mask(position)) emit_attribute(RelativeDisplacement.fields(position), value)
emitelem(0, displacement)
emitattr(1, kind)
s.toString
}
override def export: String =
{
"\\t<cim:RelativeDisplacement rdf:%s=\\"%s\\">\\n%s\\t</cim:RelativeDisplacement>".format(if (about) "about" else "ID", id, export_fields)
}
}
object RelativeDisplacement
extends
CIMParseable[RelativeDisplacement]
{
override val fields: Array[String] = Array[String](
"displacement",
"kind"
)
val displacement: Fielder = parse_element(element(cls, fields(0)))
val kind: Fielder = parse_attribute(attribute(cls, fields(1)))
def parse (context: CIMContext): RelativeDisplacement =
{
implicit val ctx: CIMContext = context
implicit val bitfields: Array[Int] = Array(0)
val ret = RelativeDisplacement(
BasicElement.parse(context),
toDouble(mask(displacement(), 0)),
mask(kind(), 1)
)
ret.bitfields = bitfields
ret
}
def serializer: Serializer[RelativeDisplacement] = RelativeDisplacementSerializer
}
object RelativeDisplacementSerializer extends CIMSerializer[RelativeDisplacement]
{
def write (kryo: Kryo, output: Output, obj: RelativeDisplacement): Unit =
{
val toSerialize: Array[() => Unit] = Array(
() => output.writeDouble(obj.displacement),
() => output.writeString(obj.kind)
)
BasicElementSerializer.write(kryo, output, obj.sup.asInstanceOf[BasicElement])
implicit val bitfields: Array[Int] = obj.bitfields
writeBitfields(output)
writeFields(toSerialize)
}
def read (kryo: Kryo, input: Input, cls: Class[RelativeDisplacement]): RelativeDisplacement =
{
val parent = BasicElementSerializer.read(kryo, input, classOf[BasicElement])
implicit val bitfields: Array[Int] = readBitfields(input)
val obj = RelativeDisplacement(
parent,
if (isSet(0)) input.readDouble else 0.0,
if (isSet(1)) input.readString else null
)
obj.bitfields = bitfields
obj
}
}
private[ninecode] object _EnvDomain
{
def register: List[CIMClassInfo] =
{
List(
RelativeDisplacement.register
)
}
}
|
derrickoswald/CIMScala
|
CIMReader/src/main/scala/ch/ninecode/model/EnvDomain.scala
|
Scala
|
mit
| 4,324 |
/*
* Copyright 2015 eleflow.com.br.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eleflow.sparknotebook.enums
/**
* Created by dirceu on 20/02/15.
*/
object PeriodOfDay extends Enumeration {
type PeriodOfDay = Value
val Morning, Afternoon, Evening, Dawn = Value
}
|
eleflow/sparknotebook
|
src/main/scala/eleflow/sparknotebook/enums/PeriodOfDay.scala
|
Scala
|
apache-2.0
| 777 |
package com.optrak.testakka.modelutils
import ModelActor.{GreetingResponse, Hello, OnlyMessagesBeginningWithHExcpetion}
import akka.Done
import akka.actor.Actor
import com.optrak.testakka.api.{GreetingMessage, PersistentService}
import scala.concurrent.Future
object ModelActor {
class OnlyMessagesBeginningWithHExcpetion extends Exception("you can only have greetings beginning with H")
case object Hello
case class GreetingResponse(message: String)
}
abstract class ModelActor extends Actor {
def persistentService: PersistentService
implicit def executionContext = context.dispatcher
def id: String
var cached: Option[GreetingMessage] = None
// todo - some dodgy use of futures going on here perhaps. Need to check
override def receive: Receive = {
case gm: GreetingMessage =>
val senderz = sender
println(s"got greeting message $gm")
if (gm.message.startsWith("H"))
for {
using <- persistentService.useGreeting(id).invoke(gm)
returned <- persistentService.getGreeting(id).invoke
} yield {
assert(returned == gm, "not stored")
cached = Some(returned)
using
senderz ! Done
}
else
throw new OnlyMessagesBeginningWithHExcpetion
case Hello =>
println(s"got a hello")
val senderz = sender
println(s"sender was $senderz")
for {
cachedMessage <- cached.map {
Future.successful(_)
}
.getOrElse(
persistentService.getGreeting(id).invoke.map { greeting =>
println(s"got a greeting from service $persistentService")
cached = Some(greeting)
greeting
})
} yield {
println(s"gt cached message $cachedMessage")
senderz ! GreetingResponse(s"${cachedMessage.message}, $id!") }
}
}
|
Optrak/lagom-testbed
|
test-akka-integration/akka-model/utils/src/main/scala/com/optrak/testakka/modelutils/ModelActor.scala
|
Scala
|
apache-2.0
| 1,877 |
/*
* This file is part of the ToolXiT project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toolxit.bibtex
package bst
/**
* This trait offers a collection of built-in bst functions translated into Scala.
*
* @author Lucas Satabin
*
*/
trait BuiltIn[Rendered] {
import macros.OctalLiterals._
private val formatters = scala.collection.mutable.Map.empty[String, NameFormatter]
// character width
private lazy val widths = {
val res = Array.fill[Int](o"200")(0)
res(o"40") = 278
res(o"41") = 278
res(o"42") = 500
res(o"43") = 833
res(o"44") = 500
res(o"45") = 833
res(o"46") = 778
res(o"47") = 278
res(o"50") = 389
res(o"51") = 389
res(o"52") = 500
res(o"53") = 778
res(o"54") = 278
res(o"55") = 333
res(o"56") = 278
res(o"57") = 500
res(o"60") = 500
res(o"61") = 500
res(o"62") = 500
res(o"63") = 500
res(o"64") = 500
res(o"65") = 500
res(o"66") = 500
res(o"67") = 500
res(o"70") = 500
res(o"71") = 500
res(o"72") = 278
res(o"73") = 278
res(o"74") = 278
res(o"75") = 778
res(o"76") = 472
res(o"77") = 472
res(o"100") = 778
res(o"101") = 750
res(o"102") = 708
res(o"103") = 722
res(o"104") = 764
res(o"105") = 681
res(o"106") = 653
res(o"107") = 785
res(o"110") = 750
res(o"111") = 361
res(o"112") = 514
res(o"113") = 778
res(o"114") = 625
res(o"115") = 917
res(o"116") = 750
res(o"117") = 778
res(o"120") = 681
res(o"121") = 778
res(o"122") = 736
res(o"123") = 556
res(o"124") = 722
res(o"125") = 750
res(o"126") = 750
res(o"127") = 1028
res(o"130") = 750
res(o"131") = 750
res(o"132") = 611
res(o"133") = 278
res(o"134") = 500
res(o"135") = 278
res(o"136") = 500
res(o"137") = 278
res(o"140") = 278
res(o"141") = 500
res(o"142") = 556
res(o"143") = 444
res(o"144") = 556
res(o"145") = 444
res(o"146") = 306
res(o"147") = 500
res(o"150") = 556
res(o"151") = 278
res(o"152") = 306
res(o"153") = 528
res(o"154") = 278
res(o"155") = 833
res(o"156") = 556
res(o"157") = 500
res(o"160") = 556
res(o"161") = 528
res(o"162") = 392
res(o"163") = 394
res(o"164") = 389
res(o"165") = 556
res(o"166") = 528
res(o"167") = 722
res(o"170") = 528
res(o"171") = 528
res(o"172") = 444
res(o"173") = 500
res(o"174") = 1000
res(o"175") = 500
res(o"176") = 500
res
}
/**
* a function that, given an entry name, returns the rendering function
* if any. Returns `None' if none found.
*/
val renderingFunction: String ⇒ TOption[BibEntry ⇒ Rendered]
/**
* Adds a ‘.’ to it if the last non‘}’ character isn’t a ‘.’, ‘?’, or ‘!’,
* and returns this resulting string.
*/
def addPeriod$(string: String) = {
val periods = List('.', '!', '?')
val lastNonBraceIndex = string.lastIndexWhere(_ != '}')
if (lastNonBraceIndex >= 0 && periods.contains(string(lastNonBraceIndex)))
string
else
string + "."
}
/**
* Executes the function whose name is the entry type of an en-
* try. For example if an entry is of type book, this function executes the
* book function. When given as an argument to the ITERATE command,
* call.type$ actually produces the output for the entries. For an entry
* with an unknown type, it executes the function default.type. Thus you
* should define (before the READ command) one function for each standard
* entry type as well as a default.type function.
*
* In this case, it calls the `render*' method
*/
def callType$(implicit entry: Option[BibEntry]): TOption[BibEntry ⇒ Rendered] =
entry match {
case Some(e) ⇒
renderingFunction(e.name)
case None ⇒
TError("There is no current entry, unable to execute the `call.type$' function")
}
/**
* Turns a string to lower case, except for the first letter and for
* letters that appear at brace-depth strictly positive. Remember
* that special characters are at depth 0
*/
def toLowerButFirst(s: String) =
StringFormatters.toLowerButFirst(s)
/** Turns S to lower case, except parts that are at strictly positive brace-depth */
def toLower(s: String) =
StringFormatters.toLower(s)
/** Turns S to upper case, except parts that are at strictly positive brace-depth */
def toUpper(s: String) =
StringFormatters.toUpper(s)
/**
* Returns the internal key of the current entry.
*/
def cite$(implicit entry: Option[BibEntry]): TOption[String] =
entry match {
case Some(e) ⇒
TSome(e.key)
case None ⇒
TError("There is no current entry, unable to execute the `cite$' function")
}
/**
* extract the `authorNb'-th name of string `authorList' (in which names are
* separated by and), and formats it according to specification
* given by string `pattern'.
*/
def formatName$(pattern: String, authorNb: Int, authorList: String) = {
// extract author names
val list = AuthorNamesExtractor.toList(authorList)
if (list.size > authorNb) {
// get the formatter associated to the pattern
try {
val formatter = formatters.getOrElseUpdate(pattern, new NameFormatter(pattern))
// returns the formatted name
TSome(formatter(list(authorNb)))
}
catch {
case e: Exception ⇒
TError("Unable to call `format,name$' function:\n", e)
}
}
else {
// author does not exist
TError(authorNb + "-th author does not exist in {" + authorList + "}")
}
}
def numNames$(authorList: String) =
AuthorNamesExtractor.authorNb(authorList)
def purify$(s: String) = {
def purifyWord(word: Word): String =
word.letters.foldLeft("") { (result, current) ⇒
val purified = current match {
case CharacterLetter(c) if c.isLetterOrDigit ⇒ c
case CharacterLetter('-') ⇒ " "
case CharacterLetter('~') ⇒ " "
case SpecialLetter(_, Some(arg), false) ⇒ arg
case BlockLetter(parts) ⇒ purifyWord(SimpleWord(parts))
case _ ⇒ ""
}
result + purified
}
import StringUtils.StringParser
StringParser.parseAll(StringParser.string, s) match {
case StringParser.Success(res, _) ⇒
TSome(res.map(purifyWord _).mkString(" "))
case fail ⇒
TError(fail.toString)
}
}
def width$(s: String) = {
def charWidth(c: Char) =
if (c >= 0 && c < o"200")
widths(c)
else 0
def letterWidth(l: PseudoLetter): Int = l match {
case CharacterLetter(c) ⇒ charWidth(c)
case BlockLetter(parts) ⇒
parts.map(letterWidth _).sum // does not take braces into account
case SpecialLetter("oe", _, _) ⇒ 778
case SpecialLetter("OE", _, _) ⇒ 1014
case SpecialLetter("ae", _, _) ⇒ 722
case SpecialLetter("AE", _, _) ⇒ 903
case SpecialLetter("ss", _, _) ⇒ 500
case SpecialLetter(command, arg, _) if command(0).isLetter ⇒
charWidth(command(0)) + arg.map(_.map(charWidth _).sum).getOrElse(0)
case SpecialLetter(_, arg, _) ⇒
arg.map(_.map(charWidth _).sum).getOrElse(0)
}
def wordWidth(w: Word): Int =
w.letters.map(letterWidth _).sum
import StringUtils.StringParser
StringParser.parseAll(StringParser.string, s) match {
case StringParser.Success(res, _) ⇒
TSome(res.foldLeft(0) { (result, current) ⇒
result + wordWidth(current)
})
case fail ⇒
TError(fail.toString)
}
}
}
|
ISCPIF/PSEExperiments
|
openmole-src/openmole/third-parties/toolxit.bibtex/core/src/main/scala/toolxit/bibtex/bst/BuiltIn.scala
|
Scala
|
agpl-3.0
| 8,367 |
package mouse
import cats.~>
trait AnyFSyntax {
implicit final def anyfSyntaxMouse[F[_], A](fa: F[A]): AnyFOps[F, A] = new AnyFOps(fa)
}
final class AnyFOps[F[_], A](private val fa: F[A]) extends AnyVal {
@inline def ||>[G[_]](f: F ~> G): G[A] = f(fa)
@inline def thrushK[G[_]](f: F ~> G): G[A] = f(fa)
}
|
benhutchison/mouse
|
shared/src/main/scala/mouse/anyf.scala
|
Scala
|
mit
| 313 |
package com.atomist.param
import com.fasterxml.jackson.annotation.{JsonCreator, JsonProperty, JsonSetter}
import scala.collection.mutable.ListBuffer
class Parameter @JsonCreator()(@JsonProperty("name") val name: String) {
@JsonProperty
var description: String = ""
/**
* Default value for this parameter. Empty if there is no default.
*/
@JsonProperty("default_value")
private var defaultValue: String = ""
/**
* Reference to another property name.
*/
@JsonProperty("default_ref")
private var defaultRef: String = _
/**
* Regular expression used to validate this parameter.
*/
@JsonProperty
private var pattern: String = ParameterValidationPatterns.MatchAny
/**
* Description of what valid input looks like. This can be
* displayed to the user if validation using the pattern property fails.
*/
@JsonProperty("valid_input_description")
private var validInputDescription: String = "String value"
@JsonProperty
private var required: Boolean = true
/**
* Should we display this to users or is it purely for machines?
*/
@JsonProperty
private var displayable: Boolean = true
/**
* Returned to identify parameter.
*/
@JsonProperty
private val tags = new ListBuffer[Tag]
@JsonProperty("max_length")
private var maxLength: Int = -1
@JsonProperty("min_length")
private var minLength: Int = -1
@JsonProperty("display_name")
private var displayName: String = _
@JsonProperty("allowed_values")
private val allowedValues = new ListBuffer[AllowedValue]
def getName: String = name
def getDescription: String = description
@JsonSetter
def describedAs(description: String): this.type = {
this.description = description
this
}
def getDefaultValue: String = defaultValue
def hasDefaultValue: Boolean = defaultValue != null && !"".equals(defaultValue)
def setDefaultValue(defaultValue: String): this.type = {
this.defaultValue = defaultValue
this
}
def getDefaultRef: String = defaultRef
def hasDefaultRef: Boolean = defaultRef != null && !"".equals(defaultRef)
def setDefaultRef(defaultRef: String): this.type = {
this.defaultRef = defaultRef
this
}
def getPattern: String = pattern
def setPattern(pattern: String): this.type = {
this.pattern = pattern
this
}
def getValidInputDescription: String = validInputDescription
def setValidInputDescription(validInputDescription: String): this.type = {
this.validInputDescription = validInputDescription
this
}
def isRequired: Boolean = required
def setRequired(required: Boolean): this.type = {
this.required = required
this
}
def isDisplayable: Boolean = displayable
def setDisplayable(displayable: Boolean): this.type = {
this.displayable = displayable
this
}
def getTags: ListBuffer[Tag] = tags.filterNot(_ == null)
def tagWith(tag: Tag): this.type = {
tags += tag
this
}
def addTags(tags: Seq[Tag]): this.type = {
this.tags ++= tags
this
}
def getMaxLength: Int = maxLength
def setMaxLength(maxLength: Int): this.type = {
this.maxLength = maxLength
this
}
def getMinLength: Int = minLength
def setMinLength(minLength: Int): this.type = {
this.minLength = minLength
this
}
def getDisplayName: String = displayName
def setDisplayName(displayName: String): this.type = {
this.displayName = displayName
this
}
def getAllowedValues: ListBuffer[AllowedValue] = allowedValues.filterNot(_ == null)
def setAllowedValues(allowedValues: Seq[AllowedValue]): this.type = {
this.allowedValues.clear()
this.allowedValues ++= allowedValues
this
}
def withAllowedValue(allowedValue: AllowedValue): this.type = {
allowedValues += allowedValue
this
}
def withAllowedValue(name: String, displayName: String): this.type = {
withAllowedValue(AllowedValue(name, displayName))
this
}
/** If obj is a String:
* - If there are allowedValues, return true if value is in the list of allowedValues
* - If allowedValues is empty, return true if value meets the minimum and maximum
* length requirements and it satisfies the parameter validation regular
* expression.
* Otherwise return false.
* If obj is not a String, return false.
*
* @param obj value to be checked
* @return true is the obj is a valid value, false otherwise.
*/
def isValidValue(obj: Any): Boolean = obj match {
case s: String =>
if (allowedValues.nonEmpty) allowedValues.exists(_.value == s)
else (minLength < 0 || s.length >= minLength) &&
(maxLength < 0 || s.length <= maxLength) &&
pattern.r.findAllMatchIn(s).nonEmpty
case _ => false
}
override def toString: String = s"Parameter{name='$name', description='$description', " +
s"defaultValue='$defaultValue', defaultRef='$defaultRef', pattern='$pattern', " +
s"validInputDescription='$validInputDescription', required=$required, " +
s"displayable=$displayable, tags=$tags, maxLength=$maxLength, minLength=$minLength, " +
s"displayName='$displayName', allowedValues=$allowedValues}"
}
object Parameter {
def apply(name: String): Parameter = new Parameter(name)
def apply(name: String, pattern: String): Parameter = new Parameter(name).setPattern(pattern)
def apply(name: String, pattern: String, defaultValue: String): Parameter =
new Parameter(name).setPattern(pattern).setDefaultValue(defaultValue)
}
|
atomist/rug
|
src/main/scala/com/atomist/param/Parameter.scala
|
Scala
|
gpl-3.0
| 5,523 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers
import org.scalatest._
import org.scalatest.exceptions.TestFailedException
class ShouldPlusOrMinusSpec extends Spec with ShouldMatchers {
object `The be (X plusOrMinus Y) syntax` {
val sevenDotOh = 7.0
val minusSevenDotOh = -7.0
val sevenDotOhFloat = 7.0f
val minusSevenDotOhFloat = -7.0f
val sevenLong = 7L
val minusSevenLong = -7L
val sevenInt = 7
val minusSevenInt = -7
val sevenShort: Short = 7
val minusSevenShort: Short = -7
val sevenByte: Byte = 7
val minusSevenByte: Byte = -7
/*
I decided that for X plusOrMinus Y, Y can be any numeric type that's implicitly
convertible to X. So if X is Double, Y could be Double, Float, Long, Int, Short, Byte.
If X is Long, Y could be Long, Int, Short, Byte. If X is Short, Y could be Short or Byte.
And if X is Byte, Y must be Byte.
minusSevenDotOhFloat should be (-6.8f plusOrMinus 0.2d)
*/
def `should do nothing if the number is within the specified range` {
// Double plusOrMinus Double
sevenDotOh should be (7.1 plusOrMinus 0.2)
sevenDotOh should be (6.9 plusOrMinus 0.2)
sevenDotOh should be (7.0 plusOrMinus 0.2)
sevenDotOh should be (7.2 plusOrMinus 0.2)
sevenDotOh should be (6.8 plusOrMinus 0.2)
minusSevenDotOh should be (-7.1 plusOrMinus 0.2)
minusSevenDotOh should be (-6.9 plusOrMinus 0.2)
minusSevenDotOh should be (-7.0 plusOrMinus 0.2)
minusSevenDotOh should be (-7.2 plusOrMinus 0.2)
minusSevenDotOh should be (-6.8 plusOrMinus 0.2)
// Double plusOrMinus Float
sevenDotOh should be (7.1 plusOrMinus 0.2f)
sevenDotOh should be (6.9 plusOrMinus 0.2f)
sevenDotOh should be (7.0 plusOrMinus 0.2f)
sevenDotOh should be (7.2 plusOrMinus 0.2f)
sevenDotOh should be (6.8 plusOrMinus 0.2f)
minusSevenDotOh should be (-7.1 plusOrMinus 0.2f)
minusSevenDotOh should be (-6.9 plusOrMinus 0.2f)
minusSevenDotOh should be (-7.0 plusOrMinus 0.2f)
minusSevenDotOh should be (-7.2 plusOrMinus 0.2f)
minusSevenDotOh should be (-6.8 plusOrMinus 0.2f)
// Double plusOrMinus Long
sevenDotOh should be (7.1 plusOrMinus 2L)
sevenDotOh should be (6.9 plusOrMinus 2L)
sevenDotOh should be (7.0 plusOrMinus 2L)
sevenDotOh should be (7.2 plusOrMinus 2L)
sevenDotOh should be (6.8 plusOrMinus 2L)
minusSevenDotOh should be (-7.1 plusOrMinus 2L)
minusSevenDotOh should be (-6.9 plusOrMinus 2L)
minusSevenDotOh should be (-7.0 plusOrMinus 2L)
minusSevenDotOh should be (-7.2 plusOrMinus 2L)
minusSevenDotOh should be (-6.8 plusOrMinus 2L)
// Double plusOrMinus Int
sevenDotOh should be (7.1 plusOrMinus 2)
sevenDotOh should be (6.9 plusOrMinus 2)
sevenDotOh should be (7.0 plusOrMinus 2)
sevenDotOh should be (7.2 plusOrMinus 2)
sevenDotOh should be (6.8 plusOrMinus 2)
minusSevenDotOh should be (-7.1 plusOrMinus 2)
minusSevenDotOh should be (-6.9 plusOrMinus 2)
minusSevenDotOh should be (-7.0 plusOrMinus 2)
minusSevenDotOh should be (-7.2 plusOrMinus 2)
minusSevenDotOh should be (-6.8 plusOrMinus 2)
// Double plusOrMinus Short
sevenDotOh should be (7.1 plusOrMinus 2.toShort)
sevenDotOh should be (6.9 plusOrMinus 2.toShort)
sevenDotOh should be (7.0 plusOrMinus 2.toShort)
sevenDotOh should be (7.2 plusOrMinus 2.toShort)
sevenDotOh should be (6.8 plusOrMinus 2.toShort)
minusSevenDotOh should be (-7.1 plusOrMinus 2.toShort)
minusSevenDotOh should be (-6.9 plusOrMinus 2.toShort)
minusSevenDotOh should be (-7.0 plusOrMinus 2.toShort)
minusSevenDotOh should be (-7.2 plusOrMinus 2.toShort)
minusSevenDotOh should be (-6.8 plusOrMinus 2.toShort)
// Double plusOrMinus Byte
sevenDotOh should be (7.1 plusOrMinus 2.toByte)
sevenDotOh should be (6.9 plusOrMinus 2.toByte)
sevenDotOh should be (7.0 plusOrMinus 2.toByte)
sevenDotOh should be (7.2 plusOrMinus 2.toByte)
sevenDotOh should be (6.8 plusOrMinus 2.toByte)
minusSevenDotOh should be (-7.1 plusOrMinus 2.toByte)
minusSevenDotOh should be (-6.9 plusOrMinus 2.toByte)
minusSevenDotOh should be (-7.0 plusOrMinus 2.toByte)
minusSevenDotOh should be (-7.2 plusOrMinus 2.toByte)
minusSevenDotOh should be (-6.8 plusOrMinus 2.toByte)
// Float plusOrMinus Float
sevenDotOhFloat should be (7.1f plusOrMinus 0.2f)
sevenDotOhFloat should be (6.9f plusOrMinus 0.2f)
sevenDotOhFloat should be (7.0f plusOrMinus 0.2f)
sevenDotOhFloat should be (7.2f plusOrMinus 0.2f)
sevenDotOhFloat should be (6.8f plusOrMinus 0.2f)
minusSevenDotOhFloat should be (-7.1f plusOrMinus 0.2f)
minusSevenDotOhFloat should be (-6.9f plusOrMinus 0.2f)
minusSevenDotOhFloat should be (-7.0f plusOrMinus 0.2f)
minusSevenDotOhFloat should be (-7.2f plusOrMinus 0.2f)
minusSevenDotOhFloat should be (-6.8f plusOrMinus 0.2f)
// Float plusOrMinus Long
sevenDotOhFloat should be (7.1f plusOrMinus 2L)
sevenDotOhFloat should be (6.9f plusOrMinus 2L)
sevenDotOhFloat should be (7.0f plusOrMinus 2L)
sevenDotOhFloat should be (7.2f plusOrMinus 2L)
sevenDotOhFloat should be (6.8f plusOrMinus 2L)
minusSevenDotOhFloat should be (-7.1f plusOrMinus 2L)
minusSevenDotOhFloat should be (-6.9f plusOrMinus 2L)
minusSevenDotOhFloat should be (-7.0f plusOrMinus 2L)
minusSevenDotOhFloat should be (-7.2f plusOrMinus 2L)
minusSevenDotOhFloat should be (-6.8f plusOrMinus 2L)
// Float plusOrMinus Int
sevenDotOhFloat should be (7.1f plusOrMinus 2)
sevenDotOhFloat should be (6.9f plusOrMinus 2)
sevenDotOhFloat should be (7.0f plusOrMinus 2)
sevenDotOhFloat should be (7.2f plusOrMinus 2)
sevenDotOhFloat should be (6.8f plusOrMinus 2)
minusSevenDotOhFloat should be (-7.1f plusOrMinus 2)
minusSevenDotOhFloat should be (-6.9f plusOrMinus 2)
minusSevenDotOhFloat should be (-7.0f plusOrMinus 2)
minusSevenDotOhFloat should be (-7.2f plusOrMinus 2)
minusSevenDotOhFloat should be (-6.8f plusOrMinus 2)
// Float plusOrMinus Short
sevenDotOhFloat should be (7.1f plusOrMinus 2.toShort)
sevenDotOhFloat should be (6.9f plusOrMinus 2.toShort)
sevenDotOhFloat should be (7.0f plusOrMinus 2.toShort)
sevenDotOhFloat should be (7.2f plusOrMinus 2.toShort)
sevenDotOhFloat should be (6.8f plusOrMinus 2.toShort)
minusSevenDotOhFloat should be (-7.1f plusOrMinus 2.toShort)
minusSevenDotOhFloat should be (-6.9f plusOrMinus 2.toShort)
minusSevenDotOhFloat should be (-7.0f plusOrMinus 2.toShort)
minusSevenDotOhFloat should be (-7.2f plusOrMinus 2.toShort)
minusSevenDotOhFloat should be (-6.8f plusOrMinus 2.toShort)
// Float plusOrMinus Byte
sevenDotOhFloat should be (7.1f plusOrMinus 2.toByte)
sevenDotOhFloat should be (6.9f plusOrMinus 2.toByte)
sevenDotOhFloat should be (7.0f plusOrMinus 2.toByte)
sevenDotOhFloat should be (7.2f plusOrMinus 2.toByte)
sevenDotOhFloat should be (6.8f plusOrMinus 2.toByte)
minusSevenDotOhFloat should be (-7.1f plusOrMinus 2.toByte)
minusSevenDotOhFloat should be (-6.9f plusOrMinus 2.toByte)
minusSevenDotOhFloat should be (-7.0f plusOrMinus 2.toByte)
minusSevenDotOhFloat should be (-7.2f plusOrMinus 2.toByte)
minusSevenDotOhFloat should be (-6.8f plusOrMinus 2.toByte)
// Long plusOrMinus Long
sevenLong should be (9L plusOrMinus 2L)
sevenLong should be (8L plusOrMinus 2L)
sevenLong should be (7L plusOrMinus 2L)
sevenLong should be (6L plusOrMinus 2L)
sevenLong should be (5L plusOrMinus 2L)
minusSevenLong should be (-9L plusOrMinus 2L)
minusSevenLong should be (-8L plusOrMinus 2L)
minusSevenLong should be (-7L plusOrMinus 2L)
minusSevenLong should be (-6L plusOrMinus 2L)
minusSevenLong should be (-5L plusOrMinus 2L)
// Long plusOrMinus Int
sevenLong should be (9L plusOrMinus 2)
sevenLong should be (8L plusOrMinus 2)
sevenLong should be (7L plusOrMinus 2)
sevenLong should be (6L plusOrMinus 2)
sevenLong should be (5L plusOrMinus 2)
minusSevenLong should be (-9L plusOrMinus 2)
minusSevenLong should be (-8L plusOrMinus 2)
minusSevenLong should be (-7L plusOrMinus 2)
minusSevenLong should be (-6L plusOrMinus 2)
minusSevenLong should be (-5L plusOrMinus 2)
// Long plusOrMinus Short
sevenLong should be (9L plusOrMinus 2.toShort)
sevenLong should be (8L plusOrMinus 2.toShort)
sevenLong should be (7L plusOrMinus 2.toShort)
sevenLong should be (6L plusOrMinus 2.toShort)
sevenLong should be (5L plusOrMinus 2.toShort)
minusSevenLong should be (-9L plusOrMinus 2.toShort)
minusSevenLong should be (-8L plusOrMinus 2.toShort)
minusSevenLong should be (-7L plusOrMinus 2.toShort)
minusSevenLong should be (-6L plusOrMinus 2.toShort)
minusSevenLong should be (-5L plusOrMinus 2.toShort)
// Long plusOrMinus Byte
sevenLong should be (9L plusOrMinus 2.toByte)
sevenLong should be (8L plusOrMinus 2.toByte)
sevenLong should be (7L plusOrMinus 2.toByte)
sevenLong should be (6L plusOrMinus 2.toByte)
sevenLong should be (5L plusOrMinus 2.toByte)
minusSevenLong should be (-9L plusOrMinus 2.toByte)
minusSevenLong should be (-8L plusOrMinus 2.toByte)
minusSevenLong should be (-7L plusOrMinus 2.toByte)
minusSevenLong should be (-6L plusOrMinus 2.toByte)
minusSevenLong should be (-5L plusOrMinus 2.toByte)
// Int plusOrMinus Int
sevenInt should be (9 plusOrMinus 2)
sevenInt should be (8 plusOrMinus 2)
sevenInt should be (7 plusOrMinus 2)
sevenInt should be (6 plusOrMinus 2)
sevenInt should be (5 plusOrMinus 2)
minusSevenInt should be (-9 plusOrMinus 2)
minusSevenInt should be (-8 plusOrMinus 2)
minusSevenInt should be (-7 plusOrMinus 2)
minusSevenInt should be (-6 plusOrMinus 2)
minusSevenInt should be (-5 plusOrMinus 2)
// Int plusOrMinus Short
sevenInt should be (9 plusOrMinus 2.toShort)
sevenInt should be (8 plusOrMinus 2.toShort)
sevenInt should be (7 plusOrMinus 2.toShort)
sevenInt should be (6 plusOrMinus 2.toShort)
sevenInt should be (5 plusOrMinus 2.toShort)
minusSevenInt should be (-9 plusOrMinus 2.toShort)
minusSevenInt should be (-8 plusOrMinus 2.toShort)
minusSevenInt should be (-7 plusOrMinus 2.toShort)
minusSevenInt should be (-6 plusOrMinus 2.toShort)
minusSevenInt should be (-5 plusOrMinus 2.toShort)
// Int plusOrMinus Byte
sevenInt should be (9 plusOrMinus 2.toByte)
sevenInt should be (8 plusOrMinus 2.toByte)
sevenInt should be (7 plusOrMinus 2.toByte)
sevenInt should be (6 plusOrMinus 2.toByte)
sevenInt should be (5 plusOrMinus 2.toByte)
minusSevenInt should be (-9 plusOrMinus 2.toByte)
minusSevenInt should be (-8 plusOrMinus 2.toByte)
minusSevenInt should be (-7 plusOrMinus 2.toByte)
minusSevenInt should be (-6 plusOrMinus 2.toByte)
minusSevenInt should be (-5 plusOrMinus 2.toByte)
// Short plusOrMinus Short
sevenShort should be (9.toShort plusOrMinus 2.toShort)
sevenShort should be (8.toShort plusOrMinus 2.toShort)
sevenShort should be (7.toShort plusOrMinus 2.toShort)
sevenShort should be (6.toShort plusOrMinus 2.toShort)
sevenShort should be (5.toShort plusOrMinus 2.toShort)
minusSevenShort should be ((-9).toShort plusOrMinus 2.toShort)
minusSevenShort should be ((-8).toShort plusOrMinus 2.toShort)
minusSevenShort should be ((-7).toShort plusOrMinus 2.toShort)
minusSevenShort should be ((-6).toShort plusOrMinus 2.toShort)
minusSevenShort should be ((-5).toShort plusOrMinus 2.toShort)
// Short plusOrMinus Byte
sevenShort should be (9.toShort plusOrMinus 2.toByte)
sevenShort should be (8.toShort plusOrMinus 2.toByte)
sevenShort should be (7.toShort plusOrMinus 2.toByte)
sevenShort should be (6.toShort plusOrMinus 2.toByte)
sevenShort should be (5.toShort plusOrMinus 2.toByte)
minusSevenShort should be ((-9).toShort plusOrMinus 2.toByte)
minusSevenShort should be ((-8).toShort plusOrMinus 2.toByte)
minusSevenShort should be ((-7).toShort plusOrMinus 2.toByte)
minusSevenShort should be ((-6).toShort plusOrMinus 2.toByte)
minusSevenShort should be ((-5).toShort plusOrMinus 2.toByte)
// Byte plusOrMinus Byte
sevenByte should be (9.toByte plusOrMinus 2.toByte)
sevenByte should be (8.toByte plusOrMinus 2.toByte)
sevenByte should be (7.toByte plusOrMinus 2.toByte)
sevenByte should be (6.toByte plusOrMinus 2.toByte)
sevenByte should be (5.toByte plusOrMinus 2.toByte)
minusSevenByte should be ((-9).toByte plusOrMinus 2.toByte)
minusSevenByte should be ((-8).toByte plusOrMinus 2.toByte)
minusSevenByte should be ((-7).toByte plusOrMinus 2.toByte)
minusSevenByte should be ((-6).toByte plusOrMinus 2.toByte)
minusSevenByte should be ((-5).toByte plusOrMinus 2.toByte)
}
def `should do nothing if the number is within the specified range, when used with not` {
// Double plusOrMinus Double
sevenDotOh should not { be (7.5 plusOrMinus 0.2) }
sevenDotOh should not be (7.5 plusOrMinus 0.2)
sevenDotOh should not be (6.5 plusOrMinus 0.2)
minusSevenDotOh should not { be (-7.5 plusOrMinus 0.2) }
minusSevenDotOh should not be (-7.5 plusOrMinus 0.2)
minusSevenDotOh should not be (-6.5 plusOrMinus 0.2)
// Double plusOrMinus Float
sevenDotOh should not { be (7.5 plusOrMinus 0.2f) }
sevenDotOh should not be (7.5 plusOrMinus 0.2f)
sevenDotOh should not be (6.5 plusOrMinus 0.2f)
minusSevenDotOh should not { be (-7.5 plusOrMinus 0.2f) }
minusSevenDotOh should not be (-7.5 plusOrMinus 0.2f)
minusSevenDotOh should not be (-6.5 plusOrMinus 0.2f)
// Double plusOrMinus Long
sevenDotOh should not { be (10.0 plusOrMinus 2L) }
sevenDotOh should not be (4.0 plusOrMinus 2L)
sevenDotOh should not be (9.1 plusOrMinus 2L)
minusSevenDotOh should not { be (-10.0 plusOrMinus 2L) }
minusSevenDotOh should not be (-4.0 plusOrMinus 2L)
minusSevenDotOh should not be (-9.1 plusOrMinus 2L)
// Double plusOrMinus Int
sevenDotOh should not { be (10.0 plusOrMinus 2) }
sevenDotOh should not be (4.0 plusOrMinus 2)
sevenDotOh should not be (9.1 plusOrMinus 2)
minusSevenDotOh should not { be (-10.0 plusOrMinus 2) }
minusSevenDotOh should not be (-4.0 plusOrMinus 2)
minusSevenDotOh should not be (-9.1 plusOrMinus 2)
// Double plusOrMinus Short
sevenDotOh should not { be (10.0 plusOrMinus 2.toShort) }
sevenDotOh should not be (4.0 plusOrMinus 2.toShort)
sevenDotOh should not be (9.1 plusOrMinus 2.toShort)
minusSevenDotOh should not { be (-10.0 plusOrMinus 2.toShort) }
minusSevenDotOh should not be (-4.0 plusOrMinus 2.toShort)
minusSevenDotOh should not be (-9.1 plusOrMinus 2.toShort)
// Double plusOrMinus Byte
sevenDotOh should not { be (10.0 plusOrMinus 2.toByte) }
sevenDotOh should not be (4.0 plusOrMinus 2.toByte)
sevenDotOh should not be (9.1 plusOrMinus 2.toByte)
minusSevenDotOh should not { be (-10.0 plusOrMinus 2.toByte) }
minusSevenDotOh should not be (-4.0 plusOrMinus 2.toByte)
minusSevenDotOh should not be (-9.1 plusOrMinus 2.toByte)
// Float plusOrMinus Float
sevenDotOhFloat should not { be (7.5f plusOrMinus 0.2f) }
sevenDotOhFloat should not be (7.5f plusOrMinus 0.2f)
sevenDotOhFloat should not be (6.5f plusOrMinus 0.2f)
minusSevenDotOhFloat should not { be (-7.5f plusOrMinus 0.2f) }
minusSevenDotOhFloat should not be (-7.5f plusOrMinus 0.2f)
minusSevenDotOhFloat should not be (-6.5f plusOrMinus 0.2f)
// Float plusOrMinus Long
sevenDotOhFloat should not { be (10.0f plusOrMinus 2L) }
sevenDotOhFloat should not be (4.0f plusOrMinus 2L)
sevenDotOhFloat should not be (9.1f plusOrMinus 2L)
minusSevenDotOhFloat should not { be (-10.0f plusOrMinus 2L) }
minusSevenDotOhFloat should not be (-4.0f plusOrMinus 2L)
minusSevenDotOhFloat should not be (-9.1f plusOrMinus 2L)
// Float plusOrMinus Int
sevenDotOhFloat should not { be (10.0f plusOrMinus 2) }
sevenDotOhFloat should not be (4.0f plusOrMinus 2)
sevenDotOhFloat should not be (9.1f plusOrMinus 2)
minusSevenDotOhFloat should not { be (-10.0f plusOrMinus 2) }
minusSevenDotOhFloat should not be (-4.0f plusOrMinus 2)
minusSevenDotOhFloat should not be (-9.1f plusOrMinus 2)
// Float plusOrMinus Short
sevenDotOhFloat should not { be (10.0f plusOrMinus 2.toShort) }
sevenDotOhFloat should not be (4.0f plusOrMinus 2.toShort)
sevenDotOhFloat should not be (9.1f plusOrMinus 2.toShort)
minusSevenDotOhFloat should not { be (-10.0f plusOrMinus 2.toShort) }
minusSevenDotOhFloat should not be (-4.0f plusOrMinus 2.toShort)
minusSevenDotOhFloat should not be (-9.1f plusOrMinus 2.toShort)
// Float plusOrMinus Byte
sevenDotOhFloat should not { be (10.0f plusOrMinus 2.toByte) }
sevenDotOhFloat should not be (4.0f plusOrMinus 2.toByte)
sevenDotOhFloat should not be (9.1f plusOrMinus 2.toByte)
minusSevenDotOhFloat should not { be (-10.0f plusOrMinus 2.toByte) }
minusSevenDotOhFloat should not be (-4.0f plusOrMinus 2.toByte)
minusSevenDotOhFloat should not be (-9.1f plusOrMinus 2.toByte)
// Long plusOrMinus Long
sevenLong should not { be (10L plusOrMinus 2L) }
sevenLong should not be (4L plusOrMinus 2L)
sevenLong should not be (10L plusOrMinus 2L)
minusSevenLong should not { be (-10L plusOrMinus 2L) }
minusSevenLong should not be (-4L plusOrMinus 2L)
minusSevenLong should not be (-10L plusOrMinus 2L)
// Long plusOrMinus Int
sevenLong should not { be (10L plusOrMinus 2) }
sevenLong should not be (4L plusOrMinus 2)
sevenLong should not be (10L plusOrMinus 2)
minusSevenLong should not { be (-10L plusOrMinus 2) }
minusSevenLong should not be (-4L plusOrMinus 2)
minusSevenLong should not be (-10L plusOrMinus 2)
// Long plusOrMinus Short
sevenLong should not { be (10L plusOrMinus 2.toShort) }
sevenLong should not be (4L plusOrMinus 2.toShort)
sevenLong should not be (10L plusOrMinus 2.toShort)
minusSevenLong should not { be (-10L plusOrMinus 2.toShort) }
minusSevenLong should not be (-4L plusOrMinus 2.toShort)
minusSevenLong should not be (-10L plusOrMinus 2.toShort)
// Long plusOrMinus Byte
sevenLong should not { be (10L plusOrMinus 2.toByte) }
sevenLong should not be (4L plusOrMinus 2.toByte)
sevenLong should not be (10L plusOrMinus 2.toByte)
minusSevenLong should not { be (-10L plusOrMinus 2.toByte) }
minusSevenLong should not be (-4L plusOrMinus 2.toByte)
minusSevenLong should not be (-10L plusOrMinus 2.toByte)
// Int plusOrMinus Int
sevenInt should not { be (10 plusOrMinus 2) }
sevenInt should not be (4 plusOrMinus 2)
sevenInt should not be (10 plusOrMinus 2)
minusSevenInt should not { be (-10 plusOrMinus 2) }
minusSevenInt should not be (-4 plusOrMinus 2)
minusSevenInt should not be (-10 plusOrMinus 2)
// Int plusOrMinus Short
sevenInt should not { be (10 plusOrMinus 2.toShort) }
sevenInt should not be (4 plusOrMinus 2.toShort)
sevenInt should not be (10 plusOrMinus 2.toShort)
minusSevenInt should not { be (-10 plusOrMinus 2.toShort) }
minusSevenInt should not be (-4 plusOrMinus 2.toShort)
minusSevenInt should not be (-10 plusOrMinus 2.toShort)
// Int plusOrMinus Byte
sevenInt should not { be (10 plusOrMinus 2.toByte) }
sevenInt should not be (4 plusOrMinus 2.toByte)
sevenInt should not be (10 plusOrMinus 2.toByte)
minusSevenInt should not { be (-10 plusOrMinus 2.toByte) }
minusSevenInt should not be (-4 plusOrMinus 2.toByte)
minusSevenInt should not be (-10 plusOrMinus 2.toByte)
// Short plusOrMinus Short
sevenShort should not { be (10.toShort plusOrMinus 2.toShort) }
sevenShort should not be (4.toShort plusOrMinus 2.toShort)
sevenShort should not be (10.toShort plusOrMinus 2.toShort)
minusSevenShort should not { be ((-10).toShort plusOrMinus 2.toShort) }
minusSevenShort should not be ((-4).toShort plusOrMinus 2.toShort)
minusSevenShort should not be ((-10).toShort plusOrMinus 2.toShort)
// Short plusOrMinus Byte
sevenShort should not { be (10.toShort plusOrMinus 2.toByte) }
sevenShort should not be (4.toShort plusOrMinus 2.toByte)
sevenShort should not be (10.toShort plusOrMinus 2.toByte)
minusSevenShort should not { be ((-10).toShort plusOrMinus 2.toByte) }
minusSevenShort should not be ((-4).toShort plusOrMinus 2.toByte)
minusSevenShort should not be ((-10).toShort plusOrMinus 2.toByte)
// Byte plusOrMinus Byte
sevenByte should not { be (10.toByte plusOrMinus 2.toByte) }
sevenByte should not be (4.toByte plusOrMinus 2.toByte)
sevenByte should not be (10.toByte plusOrMinus 2.toByte)
minusSevenByte should not { be ((-10).toByte plusOrMinus 2.toByte) }
minusSevenByte should not be ((-4).toByte plusOrMinus 2.toByte)
minusSevenByte should not be ((-10).toByte plusOrMinus 2.toByte)
}
def `should do nothing if the number is within the specified range, when used in a logical-and expression` {
// Double plusOrMinus Double
sevenDotOh should ((be (7.1 plusOrMinus 0.2)) and (be (7.1 plusOrMinus 0.2)))
sevenDotOh should (be (6.9 plusOrMinus 0.2) and (be (7.1 plusOrMinus 0.2)))
sevenDotOh should (be (7.0 plusOrMinus 0.2) and be (7.0 plusOrMinus 0.2))
// Double plusOrMinus Float
sevenDotOh should ((be (7.1 plusOrMinus 0.2f)) and (be (7.1 plusOrMinus 0.2f)))
sevenDotOh should (be (6.9 plusOrMinus 0.2f) and (be (7.1 plusOrMinus 0.2f)))
sevenDotOh should (be (7.0 plusOrMinus 0.2f) and be (7.0 plusOrMinus 0.2f))
// Double plusOrMinus Long
sevenDotOh should ((be (7.1 plusOrMinus 2L)) and (be (7.1 plusOrMinus 2L)))
sevenDotOh should (be (6.9 plusOrMinus 2L) and (be (7.1 plusOrMinus 2L)))
sevenDotOh should (be (7.0 plusOrMinus 2L) and be (7.0 plusOrMinus 2L))
// Double plusOrMinus Int
sevenDotOh should ((be (7.1 plusOrMinus 2)) and (be (7.1 plusOrMinus 2)))
sevenDotOh should (be (6.9 plusOrMinus 2) and (be (7.1 plusOrMinus 2)))
sevenDotOh should (be (7.0 plusOrMinus 2) and be (7.0 plusOrMinus 2))
// Double plusOrMinus Short
sevenDotOh should ((be (7.1 plusOrMinus 2.toShort)) and (be (7.1 plusOrMinus 2.toShort)))
sevenDotOh should (be (6.9 plusOrMinus 2.toShort) and (be (7.1 plusOrMinus 2.toShort)))
sevenDotOh should (be (7.0 plusOrMinus 2.toShort) and be (7.0 plusOrMinus 2.toShort))
// Double plusOrMinus Byte
sevenDotOh should ((be (7.1 plusOrMinus 2.toByte)) and (be (7.1 plusOrMinus 2.toByte)))
sevenDotOh should (be (6.9 plusOrMinus 2.toByte) and (be (7.1 plusOrMinus 2.toByte)))
sevenDotOh should (be (7.0 plusOrMinus 2.toByte) and be (7.0 plusOrMinus 2.toByte))
// Float plusOrMinus Float
sevenDotOhFloat should ((be (7.1f plusOrMinus 0.2f)) and (be (7.1f plusOrMinus 0.2f)))
sevenDotOhFloat should (be (6.9f plusOrMinus 0.2f) and (be (7.1f plusOrMinus 0.2f)))
sevenDotOhFloat should (be (7.0f plusOrMinus 0.2f) and be (7.0f plusOrMinus 0.2f))
// Float plusOrMinus Long
sevenDotOhFloat should ((be (7.1f plusOrMinus 2L)) and (be (7.1f plusOrMinus 2L)))
sevenDotOhFloat should (be (6.9f plusOrMinus 2L) and (be (7.1f plusOrMinus 2L)))
sevenDotOhFloat should (be (7.0f plusOrMinus 2L) and be (7.0f plusOrMinus 2L))
// Float plusOrMinus Int
sevenDotOhFloat should ((be (7.1f plusOrMinus 2)) and (be (7.1f plusOrMinus 2)))
sevenDotOhFloat should (be (6.9f plusOrMinus 2) and (be (7.1f plusOrMinus 2)))
sevenDotOhFloat should (be (7.0f plusOrMinus 2) and be (7.0f plusOrMinus 2))
// Float plusOrMinus Short
sevenDotOhFloat should ((be (7.1f plusOrMinus 2.toShort)) and (be (7.1f plusOrMinus 2.toShort)))
sevenDotOhFloat should (be (6.9f plusOrMinus 2.toShort) and (be (7.1f plusOrMinus 2.toShort)))
sevenDotOhFloat should (be (7.0f plusOrMinus 2.toShort) and be (7.0f plusOrMinus 2.toShort))
// Float plusOrMinus Byte
sevenDotOhFloat should ((be (7.1f plusOrMinus 2.toByte)) and (be (7.1f plusOrMinus 2.toByte)))
sevenDotOhFloat should (be (6.9f plusOrMinus 2.toByte) and (be (7.1f plusOrMinus 2.toByte)))
sevenDotOhFloat should (be (7.0f plusOrMinus 2.toByte) and be (7.0f plusOrMinus 2.toByte))
// Long plusOrMinus Long
sevenLong should ((be (9L plusOrMinus 2L)) and (be (9L plusOrMinus 2L)))
sevenLong should (be (8L plusOrMinus 2L) and (be (9L plusOrMinus 2L)))
sevenLong should (be (7L plusOrMinus 2L) and be (7L plusOrMinus 2L))
// Long plusOrMinus Int
sevenLong should ((be (9L plusOrMinus 2)) and (be (9L plusOrMinus 2)))
sevenLong should (be (8L plusOrMinus 2) and (be (9L plusOrMinus 2)))
sevenLong should (be (7L plusOrMinus 2) and be (7L plusOrMinus 2))
// Long plusOrMinus Short
sevenLong should ((be (9L plusOrMinus 2.toShort)) and (be (9L plusOrMinus 2.toShort)))
sevenLong should (be (8L plusOrMinus 2.toShort) and (be (9L plusOrMinus 2.toShort)))
sevenLong should (be (7L plusOrMinus 2.toShort) and be (7L plusOrMinus 2.toShort))
// Long plusOrMinus Byte
sevenLong should ((be (9L plusOrMinus 2.toByte)) and (be (9L plusOrMinus 2.toByte)))
sevenLong should (be (8L plusOrMinus 2.toByte) and (be (9L plusOrMinus 2.toByte)))
sevenLong should (be (7L plusOrMinus 2.toByte) and be (7L plusOrMinus 2.toByte))
// Int plusOrMinus Int
sevenInt should ((be (9 plusOrMinus 2)) and (be (9 plusOrMinus 2)))
sevenInt should (be (8 plusOrMinus 2) and (be (9 plusOrMinus 2)))
sevenInt should (be (7 plusOrMinus 2) and be (7 plusOrMinus 2))
// Int plusOrMinus Short
sevenInt should ((be (9 plusOrMinus 2.toShort)) and (be (9 plusOrMinus 2.toShort)))
sevenInt should (be (8 plusOrMinus 2.toShort) and (be (9 plusOrMinus 2.toShort)))
sevenInt should (be (7 plusOrMinus 2.toShort) and be (7 plusOrMinus 2.toShort))
// Int plusOrMinus Byte
sevenInt should ((be (9 plusOrMinus 2.toByte)) and (be (9 plusOrMinus 2.toByte)))
sevenInt should (be (8 plusOrMinus 2.toByte) and (be (9 plusOrMinus 2.toByte)))
sevenInt should (be (7 plusOrMinus 2.toByte) and be (7 plusOrMinus 2.toByte))
// Short plusOrMinus Short
sevenShort should ((be (9.toShort plusOrMinus 2.toShort)) and (be (9.toShort plusOrMinus 2.toShort)))
sevenShort should (be (8.toShort plusOrMinus 2.toShort) and (be (9.toShort plusOrMinus 2.toShort)))
sevenShort should (be (7.toShort plusOrMinus 2.toShort) and be (7.toShort plusOrMinus 2.toShort))
// Short plusOrMinus Byte
sevenShort should ((be (9.toShort plusOrMinus 2.toByte)) and (be (9.toShort plusOrMinus 2.toByte)))
sevenShort should (be (8.toShort plusOrMinus 2.toByte) and (be (9.toShort plusOrMinus 2.toByte)))
sevenShort should (be (7.toShort plusOrMinus 2.toByte) and be (7.toShort plusOrMinus 2.toByte))
// Byte plusOrMinus Byte
sevenByte should ((be (9.toByte plusOrMinus 2.toByte)) and (be (9.toByte plusOrMinus 2.toByte)))
sevenByte should (be (8.toByte plusOrMinus 2.toByte) and (be (9.toByte plusOrMinus 2.toByte)))
sevenByte should (be (7.toByte plusOrMinus 2.toByte) and be (7.toByte plusOrMinus 2.toByte))
}
def `should do nothing if the number is within the specified range, when used in a logical-or expression` {
// Double plusOrMinus Double
sevenDotOh should ((be (7.1 plusOrMinus 0.2)) or (be (7.1 plusOrMinus 0.2)))
sevenDotOh should (be (6.9 plusOrMinus 0.2) or (be (7.1 plusOrMinus 0.2)))
sevenDotOh should (be (7.0 plusOrMinus 0.2) or be (7.0 plusOrMinus 0.2))
// Double plusOrMinus Float
sevenDotOh should ((be (7.1 plusOrMinus 0.2f)) or (be (7.1 plusOrMinus 0.2f)))
sevenDotOh should (be (6.9 plusOrMinus 0.2f) or (be (7.1 plusOrMinus 0.2f)))
sevenDotOh should (be (7.0 plusOrMinus 0.2f) or be (7.0 plusOrMinus 0.2f))
// Double plusOrMinus Long
sevenDotOh should ((be (7.1 plusOrMinus 2L)) or (be (7.1 plusOrMinus 2L)))
sevenDotOh should (be (6.9 plusOrMinus 2L) or (be (7.1 plusOrMinus 2L)))
sevenDotOh should (be (7.0 plusOrMinus 2L) or be (7.0 plusOrMinus 2L))
// Double plusOrMinus Int
sevenDotOh should ((be (7.1 plusOrMinus 2)) or (be (7.1 plusOrMinus 2)))
sevenDotOh should (be (6.9 plusOrMinus 2) or (be (7.1 plusOrMinus 2)))
sevenDotOh should (be (7.0 plusOrMinus 2) or be (7.0 plusOrMinus 2))
// Double plusOrMinus Short
sevenDotOh should ((be (7.1 plusOrMinus 2.toShort)) or (be (7.1 plusOrMinus 2.toShort)))
sevenDotOh should (be (6.9 plusOrMinus 2.toShort) or (be (7.1 plusOrMinus 2.toShort)))
sevenDotOh should (be (7.0 plusOrMinus 2.toShort) or be (7.0 plusOrMinus 2.toShort))
// Double plusOrMinus Byte
sevenDotOh should ((be (7.1 plusOrMinus 2.toByte)) or (be (7.1 plusOrMinus 2.toByte)))
sevenDotOh should (be (6.9 plusOrMinus 2.toByte) or (be (7.1 plusOrMinus 2.toByte)))
sevenDotOh should (be (7.0 plusOrMinus 2.toByte) or be (7.0 plusOrMinus 2.toByte))
// Float plusOrMinus Float
sevenDotOhFloat should ((be (7.1f plusOrMinus 0.2f)) or (be (7.1f plusOrMinus 0.2f)))
sevenDotOhFloat should (be (6.9f plusOrMinus 0.2f) or (be (7.1f plusOrMinus 0.2f)))
sevenDotOhFloat should (be (7.0f plusOrMinus 0.2f) or be (7.0f plusOrMinus 0.2f))
// Float plusOrMinus Long
sevenDotOhFloat should ((be (7.1f plusOrMinus 2L)) or (be (7.1f plusOrMinus 2L)))
sevenDotOhFloat should (be (6.9f plusOrMinus 2L) or (be (7.1f plusOrMinus 2L)))
sevenDotOhFloat should (be (7.0f plusOrMinus 2L) or be (7.0f plusOrMinus 2L))
// Float plusOrMinus Int
sevenDotOhFloat should ((be (7.1f plusOrMinus 2)) or (be (7.1f plusOrMinus 2)))
sevenDotOhFloat should (be (6.9f plusOrMinus 2) or (be (7.1f plusOrMinus 2)))
sevenDotOhFloat should (be (7.0f plusOrMinus 2) or be (7.0f plusOrMinus 2))
// Float plusOrMinus Short
sevenDotOhFloat should ((be (7.1f plusOrMinus 2.toShort)) or (be (7.1f plusOrMinus 2.toShort)))
sevenDotOhFloat should (be (6.9f plusOrMinus 2.toShort) or (be (7.1f plusOrMinus 2.toShort)))
sevenDotOhFloat should (be (7.0f plusOrMinus 2.toShort) or be (7.0f plusOrMinus 2.toShort))
// Float plusOrMinus Byte
sevenDotOhFloat should ((be (7.1f plusOrMinus 2.toByte)) or (be (7.1f plusOrMinus 2.toByte)))
sevenDotOhFloat should (be (6.9f plusOrMinus 2.toByte) or (be (7.1f plusOrMinus 2.toByte)))
sevenDotOhFloat should (be (7.0f plusOrMinus 2.toByte) or be (7.0f plusOrMinus 2.toByte))
// Long plusOrMinus Long
sevenLong should ((be (9L plusOrMinus 2L)) or (be (9L plusOrMinus 2L)))
sevenLong should (be (8L plusOrMinus 2L) or (be (9L plusOrMinus 2L)))
sevenLong should (be (7L plusOrMinus 2L) or be (7L plusOrMinus 2L))
// Long plusOrMinus Int
sevenLong should ((be (9L plusOrMinus 2)) or (be (9L plusOrMinus 2)))
sevenLong should (be (8L plusOrMinus 2) or (be (9L plusOrMinus 2)))
sevenLong should (be (7L plusOrMinus 2) or be (7L plusOrMinus 2))
// Long plusOrMinus Short
sevenLong should ((be (9L plusOrMinus 2.toShort)) or (be (9L plusOrMinus 2.toShort)))
sevenLong should (be (8L plusOrMinus 2.toShort) or (be (9L plusOrMinus 2.toShort)))
sevenLong should (be (7L plusOrMinus 2.toShort) or be (7L plusOrMinus 2.toShort))
// Long plusOrMinus Byte
sevenLong should ((be (9L plusOrMinus 2.toByte)) or (be (9L plusOrMinus 2.toByte)))
sevenLong should (be (8L plusOrMinus 2.toByte) or (be (9L plusOrMinus 2.toByte)))
sevenLong should (be (7L plusOrMinus 2.toByte) or be (7L plusOrMinus 2.toByte))
// Int plusOrMinus Int
sevenInt should ((be (9 plusOrMinus 2)) or (be (9 plusOrMinus 2)))
sevenInt should (be (8 plusOrMinus 2) or (be (9 plusOrMinus 2)))
sevenInt should (be (7 plusOrMinus 2) or be (7 plusOrMinus 2))
// Int plusOrMinus Short
sevenInt should ((be (9 plusOrMinus 2.toShort)) or (be (9 plusOrMinus 2.toShort)))
sevenInt should (be (8 plusOrMinus 2.toShort) or (be (9 plusOrMinus 2.toShort)))
sevenInt should (be (7 plusOrMinus 2.toShort) or be (7 plusOrMinus 2.toShort))
// Int plusOrMinus Byte
sevenInt should ((be (9 plusOrMinus 2.toByte)) or (be (9 plusOrMinus 2.toByte)))
sevenInt should (be (8 plusOrMinus 2.toByte) or (be (9 plusOrMinus 2.toByte)))
sevenInt should (be (7 plusOrMinus 2.toByte) or be (7 plusOrMinus 2.toByte))
// Short plusOrMinus Short
sevenShort should ((be (9.toShort plusOrMinus 2.toShort)) or (be (9.toShort plusOrMinus 2.toShort)))
sevenShort should (be (8.toShort plusOrMinus 2.toShort) or (be (9.toShort plusOrMinus 2.toShort)))
sevenShort should (be (7.toShort plusOrMinus 2.toShort) or be (7.toShort plusOrMinus 2.toShort))
// Short plusOrMinus Byte
sevenShort should ((be (9.toShort plusOrMinus 2.toByte)) or (be (9.toShort plusOrMinus 2.toByte)))
sevenShort should (be (8.toShort plusOrMinus 2.toByte) or (be (9.toShort plusOrMinus 2.toByte)))
sevenShort should (be (7.toShort plusOrMinus 2.toByte) or be (7.toShort plusOrMinus 2.toByte))
// Byte plusOrMinus Byte
sevenByte should ((be (9.toByte plusOrMinus 2.toByte)) or (be (9.toByte plusOrMinus 2.toByte)))
sevenByte should (be (8.toByte plusOrMinus 2.toByte) or (be (9.toByte plusOrMinus 2.toByte)))
sevenByte should (be (7.toByte plusOrMinus 2.toByte) or be (7.toByte plusOrMinus 2.toByte))
}
def `should do nothing if the number is not within the specified range, when used in a logical-and expression with not` {
// Double plusOrMinus Double
sevenDotOh should ((not be (17.1 plusOrMinus 0.2)) and (not be (17.1 plusOrMinus 0.2)))
sevenDotOh should (not (be (16.9 plusOrMinus 0.2)) and not (be (17.1 plusOrMinus 0.2)))
sevenDotOh should (not be (17.0 plusOrMinus 0.2) and not be (17.0 plusOrMinus 0.2))
// Double plusOrMinus Float
sevenDotOh should ((not be (17.1 plusOrMinus 0.2f)) and (not be (17.1 plusOrMinus 0.2f)))
sevenDotOh should (not (be (16.9 plusOrMinus 0.2f)) and not (be (17.1 plusOrMinus 0.2f)))
sevenDotOh should (not be (17.0 plusOrMinus 0.2f) and not be (17.0 plusOrMinus 0.2f))
// Double plusOrMinus Long
sevenDotOh should ((not be (17.1 plusOrMinus 2L)) and (not be (17.1 plusOrMinus 2L)))
sevenDotOh should (not (be (16.9 plusOrMinus 2L)) and not (be (17.1 plusOrMinus 2L)))
sevenDotOh should (not be (17.0 plusOrMinus 2L) and not be (17.0 plusOrMinus 2L))
// Double plusOrMinus Int
sevenDotOh should ((not be (17.1 plusOrMinus 2)) and (not be (17.1 plusOrMinus 2)))
sevenDotOh should (not (be (16.9 plusOrMinus 2)) and not (be (17.1 plusOrMinus 2)))
sevenDotOh should (not be (17.0 plusOrMinus 2) and not be (17.0 plusOrMinus 2))
// Double plusOrMinus Short
sevenDotOh should ((not be (17.1 plusOrMinus 2.toShort)) and (not be (17.1 plusOrMinus 2.toShort)))
sevenDotOh should (not (be (16.9 plusOrMinus 2.toShort)) and not (be (17.1 plusOrMinus 2.toShort)))
sevenDotOh should (not be (17.0 plusOrMinus 2.toShort) and not be (17.0 plusOrMinus 2.toShort))
// Double plusOrMinus Byte
sevenDotOh should ((not be (17.1 plusOrMinus 2.toByte)) and (not be (17.1 plusOrMinus 2.toByte)))
sevenDotOh should (not (be (16.9 plusOrMinus 2.toByte)) and not (be (17.1 plusOrMinus 2.toByte)))
sevenDotOh should (not be (17.0 plusOrMinus 2.toByte) and not be (17.0 plusOrMinus 2.toByte))
// Float plusOrMinus Float
sevenDotOhFloat should ((not be (17.1f plusOrMinus 0.2f)) and (not be (17.1f plusOrMinus 0.2f)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 0.2f)) and not (be (17.1f plusOrMinus 0.2f)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 0.2f) and not be (17.0f plusOrMinus 0.2f))
// Float plusOrMinus Long
sevenDotOhFloat should ((not be (17.1f plusOrMinus 2L)) and (not be (17.1f plusOrMinus 2L)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 2L)) and not (be (17.1f plusOrMinus 2L)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 2L) and not be (17.0f plusOrMinus 2L))
// Float plusOrMinus Int
sevenDotOhFloat should ((not be (17.1f plusOrMinus 2)) and (not be (17.1f plusOrMinus 2)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 2)) and not (be (17.1f plusOrMinus 2)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 2) and not be (17.0f plusOrMinus 2))
// Float plusOrMinus Short
sevenDotOhFloat should ((not be (17.1f plusOrMinus 2.toShort)) and (not be (17.1f plusOrMinus 2.toShort)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 2.toShort)) and not (be (17.1f plusOrMinus 2.toShort)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 2.toShort) and not be (17.0f plusOrMinus 2.toShort))
// Float plusOrMinus Byte
sevenDotOhFloat should ((not be (17.1f plusOrMinus 2.toByte)) and (not be (17.1f plusOrMinus 2.toByte)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 2.toByte)) and not (be (17.1f plusOrMinus 2.toByte)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 2.toByte) and not be (17.0f plusOrMinus 2.toByte))
// Long plusOrMinus Long
sevenLong should ((not be (19L plusOrMinus 2L)) and (not be (19L plusOrMinus 2L)))
sevenLong should (not (be (18L plusOrMinus 2L)) and not (be (19L plusOrMinus 2L)))
sevenLong should (not be (17L plusOrMinus 2L) and not be (17L plusOrMinus 2L))
// Long plusOrMinus Int
sevenLong should ((not be (19L plusOrMinus 2)) and (not be (19L plusOrMinus 2)))
sevenLong should (not (be (18L plusOrMinus 2)) and not (be (19L plusOrMinus 2)))
sevenLong should (not be (17L plusOrMinus 2) and not be (17L plusOrMinus 2))
// Long plusOrMinus Short
sevenLong should ((not be (19L plusOrMinus 2.toShort)) and (not be (19L plusOrMinus 2.toShort)))
sevenLong should (not (be (18L plusOrMinus 2.toShort)) and not (be (19L plusOrMinus 2.toShort)))
sevenLong should (not be (17L plusOrMinus 2.toShort) and not be (17L plusOrMinus 2.toShort))
// Long plusOrMinus Byte
sevenLong should ((not be (19L plusOrMinus 2.toByte)) and (not be (19L plusOrMinus 2.toByte)))
sevenLong should (not (be (18L plusOrMinus 2.toByte)) and not (be (19L plusOrMinus 2.toByte)))
sevenLong should (not be (17L plusOrMinus 2.toByte) and not be (17L plusOrMinus 2.toByte))
// Int plusOrMinus Int
sevenInt should ((not be (19 plusOrMinus 2)) and (not be (19 plusOrMinus 2)))
sevenInt should (not (be (18 plusOrMinus 2)) and not (be (19 plusOrMinus 2)))
sevenInt should (not be (17 plusOrMinus 2) and not be (17 plusOrMinus 2))
// Int plusOrMinus Short
sevenInt should ((not be (19 plusOrMinus 2.toShort)) and (not be (19 plusOrMinus 2.toShort)))
sevenInt should (not (be (18 plusOrMinus 2.toShort)) and not (be (19 plusOrMinus 2.toShort)))
sevenInt should (not be (17 plusOrMinus 2.toShort) and not be (17 plusOrMinus 2.toShort))
// Int plusOrMinus Byte
sevenInt should ((not be (19 plusOrMinus 2.toByte)) and (not be (19 plusOrMinus 2.toByte)))
sevenInt should (not (be (18 plusOrMinus 2.toByte)) and not (be (19 plusOrMinus 2.toByte)))
sevenInt should (not be (17 plusOrMinus 2.toByte) and not be (17 plusOrMinus 2.toByte))
// Short plusOrMinus Short
sevenShort should ((not be (19.toShort plusOrMinus 2.toShort)) and (not be (19.toShort plusOrMinus 2.toShort)))
sevenShort should (not (be (18.toShort plusOrMinus 2.toShort)) and not (be (19.toShort plusOrMinus 2.toShort)))
sevenShort should (not be (17.toShort plusOrMinus 2.toShort) and not be (17.toShort plusOrMinus 2.toShort))
// Short plusOrMinus Byte
sevenShort should ((not be (19.toShort plusOrMinus 2.toByte)) and (not be (19.toShort plusOrMinus 2.toByte)))
sevenShort should (not (be (18.toShort plusOrMinus 2.toByte)) and not (be (19.toShort plusOrMinus 2.toByte)))
sevenShort should (not be (17.toShort plusOrMinus 2.toByte) and not be (17.toShort plusOrMinus 2.toByte))
// Byte plusOrMinus Byte
sevenByte should ((not be (19.toByte plusOrMinus 2.toByte)) and (not be (19.toByte plusOrMinus 2.toByte)))
sevenByte should (not (be (18.toByte plusOrMinus 2.toByte)) and not (be (19.toByte plusOrMinus 2.toByte)))
sevenByte should (not be (17.toByte plusOrMinus 2.toByte) and not be (17.toByte plusOrMinus 2.toByte))
}
def `should do nothing if the number is not within the specified range, when used in a logical-or expression with not` {
// Double plusOrMinus Double
sevenDotOh should ((not be (17.1 plusOrMinus 0.2)) or (not be (17.1 plusOrMinus 0.2)))
sevenDotOh should (not (be (16.9 plusOrMinus 0.2)) or not (be (17.1 plusOrMinus 0.2)))
sevenDotOh should (not be (17.0 plusOrMinus 0.2) or not be (17.0 plusOrMinus 0.2))
// Double plusOrMinus Float
sevenDotOh should ((not be (17.1 plusOrMinus 0.2f)) or (not be (17.1 plusOrMinus 0.2f)))
sevenDotOh should (not (be (16.9 plusOrMinus 0.2f)) or not (be (17.1 plusOrMinus 0.2f)))
sevenDotOh should (not be (17.0 plusOrMinus 0.2f) or not be (17.0 plusOrMinus 0.2f))
// Double plusOrMinus Long
sevenDotOh should ((not be (17.1 plusOrMinus 2L)) or (not be (17.1 plusOrMinus 2L)))
sevenDotOh should (not (be (16.9 plusOrMinus 2L)) or not (be (17.1 plusOrMinus 2L)))
sevenDotOh should (not be (17.0 plusOrMinus 2L) or not be (17.0 plusOrMinus 2L))
// Double plusOrMinus Int
sevenDotOh should ((not be (17.1 plusOrMinus 2)) or (not be (17.1 plusOrMinus 2)))
sevenDotOh should (not (be (16.9 plusOrMinus 2)) or not (be (17.1 plusOrMinus 2)))
sevenDotOh should (not be (17.0 plusOrMinus 2) or not be (17.0 plusOrMinus 2))
// Double plusOrMinus Short
sevenDotOh should ((not be (17.1 plusOrMinus 2.toShort)) or (not be (17.1 plusOrMinus 2.toShort)))
sevenDotOh should (not (be (16.9 plusOrMinus 2.toShort)) or not (be (17.1 plusOrMinus 2.toShort)))
sevenDotOh should (not be (17.0 plusOrMinus 2.toShort) or not be (17.0 plusOrMinus 2.toShort))
// Double plusOrMinus Byte
sevenDotOh should ((not be (17.1 plusOrMinus 2.toByte)) or (not be (17.1 plusOrMinus 2.toByte)))
sevenDotOh should (not (be (16.9 plusOrMinus 2.toByte)) or not (be (17.1 plusOrMinus 2.toByte)))
sevenDotOh should (not be (17.0 plusOrMinus 2.toByte) or not be (17.0 plusOrMinus 2.toByte))
// Float plusOrMinus Float
sevenDotOhFloat should ((not be (17.1f plusOrMinus 0.2f)) or (not be (17.1f plusOrMinus 0.2f)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 0.2f)) or not (be (17.1f plusOrMinus 0.2f)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 0.2f) or not be (17.0f plusOrMinus 0.2f))
// Float plusOrMinus Long
sevenDotOhFloat should ((not be (17.1f plusOrMinus 2L)) or (not be (17.1f plusOrMinus 2L)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 2L)) or not (be (17.1f plusOrMinus 2L)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 2L) or not be (17.0f plusOrMinus 2L))
// Float plusOrMinus Int
sevenDotOhFloat should ((not be (17.1f plusOrMinus 2)) or (not be (17.1f plusOrMinus 2)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 2)) or not (be (17.1f plusOrMinus 2)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 2) or not be (17.0f plusOrMinus 2))
// Float plusOrMinus Short
sevenDotOhFloat should ((not be (17.1f plusOrMinus 2.toShort)) or (not be (17.1f plusOrMinus 2.toShort)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 2.toShort)) or not (be (17.1f plusOrMinus 2.toShort)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 2.toShort) or not be (17.0f plusOrMinus 2.toShort))
// Float plusOrMinus Byte
sevenDotOhFloat should ((not be (17.1f plusOrMinus 2.toByte)) or (not be (17.1f plusOrMinus 2.toByte)))
sevenDotOhFloat should (not (be (16.9f plusOrMinus 2.toByte)) or not (be (17.1f plusOrMinus 2.toByte)))
sevenDotOhFloat should (not be (17.0f plusOrMinus 2.toByte) or not be (17.0f plusOrMinus 2.toByte))
// Long plusOrMinus Long
sevenLong should ((not be (19L plusOrMinus 2L)) or (not be (19L plusOrMinus 2L)))
sevenLong should (not (be (18L plusOrMinus 2L)) or not (be (19L plusOrMinus 2L)))
sevenLong should (not be (17L plusOrMinus 2L) or not be (17L plusOrMinus 2L))
// Long plusOrMinus Int
sevenLong should ((not be (19L plusOrMinus 2)) or (not be (19L plusOrMinus 2)))
sevenLong should (not (be (18L plusOrMinus 2)) or not (be (19L plusOrMinus 2)))
sevenLong should (not be (17L plusOrMinus 2) or not be (17L plusOrMinus 2))
// Long plusOrMinus Short
sevenLong should ((not be (19L plusOrMinus 2.toShort)) or (not be (19L plusOrMinus 2.toShort)))
sevenLong should (not (be (18L plusOrMinus 2.toShort)) or not (be (19L plusOrMinus 2.toShort)))
sevenLong should (not be (17L plusOrMinus 2.toShort) or not be (17L plusOrMinus 2.toShort))
// Long plusOrMinus Byte
sevenLong should ((not be (19L plusOrMinus 2.toByte)) or (not be (19L plusOrMinus 2.toByte)))
sevenLong should (not (be (18L plusOrMinus 2.toByte)) or not (be (19L plusOrMinus 2.toByte)))
sevenLong should (not be (17L plusOrMinus 2.toByte) or not be (17L plusOrMinus 2.toByte))
// Int plusOrMinus Int
sevenInt should ((not be (19 plusOrMinus 2)) or (not be (19 plusOrMinus 2)))
sevenInt should (not (be (18 plusOrMinus 2)) or not (be (19 plusOrMinus 2)))
sevenInt should (not be (17 plusOrMinus 2) or not be (17 plusOrMinus 2))
// Int plusOrMinus Short
sevenInt should ((not be (19 plusOrMinus 2.toShort)) or (not be (19 plusOrMinus 2.toShort)))
sevenInt should (not (be (18 plusOrMinus 2.toShort)) or not (be (19 plusOrMinus 2.toShort)))
sevenInt should (not be (17 plusOrMinus 2.toShort) or not be (17 plusOrMinus 2.toShort))
// Int plusOrMinus Byte
sevenInt should ((not be (19 plusOrMinus 2.toByte)) or (not be (19 plusOrMinus 2.toByte)))
sevenInt should (not (be (18 plusOrMinus 2.toByte)) or not (be (19 plusOrMinus 2.toByte)))
sevenInt should (not be (17 plusOrMinus 2.toByte) or not be (17 plusOrMinus 2.toByte))
// Short plusOrMinus Short
sevenShort should ((not be (19.toShort plusOrMinus 2.toShort)) or (not be (19.toShort plusOrMinus 2.toShort)))
sevenShort should (not (be (18.toShort plusOrMinus 2.toShort)) or not (be (19.toShort plusOrMinus 2.toShort)))
sevenShort should (not be (17.toShort plusOrMinus 2.toShort) or not be (17.toShort plusOrMinus 2.toShort))
// Short plusOrMinus Byte
sevenShort should ((not be (19.toShort plusOrMinus 2.toByte)) or (not be (19.toShort plusOrMinus 2.toByte)))
sevenShort should (not (be (18.toShort plusOrMinus 2.toByte)) or not (be (19.toShort plusOrMinus 2.toByte)))
sevenShort should (not be (17.toShort plusOrMinus 2.toByte) or not be (17.toShort plusOrMinus 2.toByte))
// Byte plusOrMinus Byte
sevenByte should ((not be (19.toByte plusOrMinus 2.toByte)) or (not be (19.toByte plusOrMinus 2.toByte)))
sevenByte should (not (be (18.toByte plusOrMinus 2.toByte)) or not (be (19.toByte plusOrMinus 2.toByte)))
sevenByte should (not be (17.toByte plusOrMinus 2.toByte) or not be (17.toByte plusOrMinus 2.toByte))
}
def `should throw TestFailedException if the number is not within the specified range` {
// Double plusOrMinus Double
val caught1 = intercept[TestFailedException] {
sevenDotOh should be (17.1 plusOrMinus 0.2)
}
assert(caught1.getMessage === "7.0 was not 17.1 plus or minus 0.2")
// Double plusOrMinus Float
val caught2 = intercept[TestFailedException] {
sevenDotOh should be (17.1 plusOrMinus 0.2f)
}
assert(caught2.getMessage === "7.0 was not 17.1 plus or minus 0.20000000298023224")
// Double plusOrMinus Long
val caught3 = intercept[TestFailedException] {
sevenDotOh should be (17.1 plusOrMinus 2L)
}
assert(caught3.getMessage === "7.0 was not 17.1 plus or minus 2.0")
// Double plusOrMinus Int
val caught4 = intercept[TestFailedException] {
sevenDotOh should be (17.1 plusOrMinus 2)
}
assert(caught4.getMessage === "7.0 was not 17.1 plus or minus 2.0")
// Double plusOrMinus Short
val caught5 = intercept[TestFailedException] {
sevenDotOh should be (17.1 plusOrMinus 2.toShort)
}
assert(caught5.getMessage === "7.0 was not 17.1 plus or minus 2.0")
// Double plusOrMinus Byte
val caught6 = intercept[TestFailedException] {
sevenDotOh should be (17.1 plusOrMinus 2.toByte)
}
assert(caught6.getMessage === "7.0 was not 17.1 plus or minus 2.0")
// Float plusOrMinus Float
val caught7 = intercept[TestFailedException] {
sevenDotOhFloat should be (17.1f plusOrMinus 0.2f)
}
assert(caught7.getMessage === "7.0 was not 17.1 plus or minus 0.2")
// Float plusOrMinus Long
val caught8 = intercept[TestFailedException] {
sevenDotOhFloat should be (17.1f plusOrMinus 2L)
}
assert(caught8.getMessage === "7.0 was not 17.1 plus or minus 2.0")
// Float plusOrMinus Int
val caught9 = intercept[TestFailedException] {
sevenDotOhFloat should be (17.1f plusOrMinus 2)
}
assert(caught9.getMessage === "7.0 was not 17.1 plus or minus 2.0")
// Float plusOrMinus Short
val caught10 = intercept[TestFailedException] {
sevenDotOhFloat should be (17.1f plusOrMinus 2.toShort)
}
assert(caught10.getMessage === "7.0 was not 17.1 plus or minus 2.0")
// Float plusOrMinus Byte
val caught11 = intercept[TestFailedException] {
sevenDotOhFloat should be (17.1f plusOrMinus 2.toByte)
}
assert(caught11.getMessage === "7.0 was not 17.1 plus or minus 2.0")
// Long plusOrMinus Long
val caught12 = intercept[TestFailedException] {
sevenLong should be (19L plusOrMinus 2L)
}
assert(caught12.getMessage === "7 was not 19 plus or minus 2")
// Long plusOrMinus Int
val caught13 = intercept[TestFailedException] {
sevenLong should be (19L plusOrMinus 2)
}
assert(caught13.getMessage === "7 was not 19 plus or minus 2")
// Long plusOrMinus Short
val caught14 = intercept[TestFailedException] {
sevenLong should be (19L plusOrMinus 2.toShort)
}
assert(caught14.getMessage === "7 was not 19 plus or minus 2")
// Long plusOrMinus Byte
val caught15 = intercept[TestFailedException] {
sevenLong should be (19L plusOrMinus 2.toByte)
}
assert(caught15.getMessage === "7 was not 19 plus or minus 2")
// Int plusOrMinus Int
val caught16 = intercept[TestFailedException] {
sevenInt should be (19 plusOrMinus 2)
}
assert(caught16.getMessage === "7 was not 19 plus or minus 2")
// Int plusOrMinus Short
val caught17 = intercept[TestFailedException] {
sevenInt should be (19 plusOrMinus 2.toShort)
}
assert(caught17.getMessage === "7 was not 19 plus or minus 2")
// Int plusOrMinus Byte
val caught18 = intercept[TestFailedException] {
sevenInt should be (19 plusOrMinus 2.toByte)
}
assert(caught18.getMessage === "7 was not 19 plus or minus 2")
// Short plusOrMinus Short
val caught19 = intercept[TestFailedException] {
sevenShort should be (19.toShort plusOrMinus 2.toShort)
}
assert(caught19.getMessage === "7 was not 19 plus or minus 2")
// Short plusOrMinus Byte
val caught20 = intercept[TestFailedException] {
sevenShort should be (19.toShort plusOrMinus 2.toByte)
}
assert(caught20.getMessage === "7 was not 19 plus or minus 2")
// Byte plusOrMinus Byte
val caught21 = intercept[TestFailedException] {
sevenByte should be (19.toByte plusOrMinus 2.toByte)
}
assert(caught21.getMessage === "7 was not 19 plus or minus 2")
}
def `should throw TestFailedException if the number is within the specified range, when used with not` {
// Double plusOrMinus Double
val caught1 = intercept[TestFailedException] {
sevenDotOh should not be (7.1 plusOrMinus 0.2)
}
assert(caught1.getMessage === "7.0 was 7.1 plus or minus 0.2")
// Double plusOrMinus Float
val caught2 = intercept[TestFailedException] {
sevenDotOh should not be (7.1 plusOrMinus 0.2f)
}
assert(caught2.getMessage === "7.0 was 7.1 plus or minus 0.20000000298023224")
// Double plusOrMinus Long
val caught3 = intercept[TestFailedException] {
sevenDotOh should not be (7.1 plusOrMinus 2L)
}
assert(caught3.getMessage === "7.0 was 7.1 plus or minus 2.0")
// Double plusOrMinus Int
val caught4 = intercept[TestFailedException] {
sevenDotOh should not be (7.1 plusOrMinus 2)
}
assert(caught4.getMessage === "7.0 was 7.1 plus or minus 2.0")
// Double plusOrMinus Short
val caught5 = intercept[TestFailedException] {
sevenDotOh should not be (7.1 plusOrMinus 2.toShort)
}
assert(caught5.getMessage === "7.0 was 7.1 plus or minus 2.0")
// Double plusOrMinus Byte
val caught6 = intercept[TestFailedException] {
sevenDotOh should not be (7.1 plusOrMinus 2.toByte)
}
assert(caught6.getMessage === "7.0 was 7.1 plus or minus 2.0")
// Float plusOrMinus Float
val caught7 = intercept[TestFailedException] {
sevenDotOhFloat should not be (7.1f plusOrMinus 0.2f)
}
assert(caught7.getMessage === "7.0 was 7.1 plus or minus 0.2")
// Float plusOrMinus Long
val caught8 = intercept[TestFailedException] {
sevenDotOhFloat should not be (7.1f plusOrMinus 2L)
}
assert(caught8.getMessage === "7.0 was 7.1 plus or minus 2.0")
// Float plusOrMinus Int
val caught9 = intercept[TestFailedException] {
sevenDotOhFloat should not be (7.1f plusOrMinus 2)
}
assert(caught9.getMessage === "7.0 was 7.1 plus or minus 2.0")
// Float plusOrMinus Short
val caught10 = intercept[TestFailedException] {
sevenDotOhFloat should not be (7.1f plusOrMinus 2.toShort)
}
assert(caught10.getMessage === "7.0 was 7.1 plus or minus 2.0")
// Float plusOrMinus Byte
val caught11 = intercept[TestFailedException] {
sevenDotOhFloat should not be (7.1f plusOrMinus 2.toByte)
}
assert(caught11.getMessage === "7.0 was 7.1 plus or minus 2.0")
// Long plusOrMinus Long
val caught12 = intercept[TestFailedException] {
sevenLong should not be (9L plusOrMinus 2L)
}
assert(caught12.getMessage === "7 was 9 plus or minus 2")
// Long plusOrMinus Int
val caught13 = intercept[TestFailedException] {
sevenLong should not be (9L plusOrMinus 2)
}
assert(caught13.getMessage === "7 was 9 plus or minus 2")
// Long plusOrMinus Short
val caught14 = intercept[TestFailedException] {
sevenLong should not be (9L plusOrMinus 2.toShort)
}
assert(caught14.getMessage === "7 was 9 plus or minus 2")
// Long plusOrMinus Byte
val caught15 = intercept[TestFailedException] {
sevenLong should not be (9L plusOrMinus 2.toByte)
}
assert(caught15.getMessage === "7 was 9 plus or minus 2")
// Int plusOrMinus Int
val caught16 = intercept[TestFailedException] {
sevenInt should not be (9 plusOrMinus 2)
}
assert(caught16.getMessage === "7 was 9 plus or minus 2")
// Int plusOrMinus Short
val caught17 = intercept[TestFailedException] {
sevenInt should not be (9 plusOrMinus 2.toShort)
}
assert(caught17.getMessage === "7 was 9 plus or minus 2")
// Int plusOrMinus Byte
val caught18 = intercept[TestFailedException] {
sevenInt should not be (9 plusOrMinus 2.toByte)
}
assert(caught18.getMessage === "7 was 9 plus or minus 2")
// Short plusOrMinus Short
val caught19 = intercept[TestFailedException] {
sevenShort should not be (9.toShort plusOrMinus 2.toShort)
}
assert(caught19.getMessage === "7 was 9 plus or minus 2")
// Short plusOrMinus Byte
val caught20 = intercept[TestFailedException] {
sevenShort should not be (9.toShort plusOrMinus 2.toByte)
}
assert(caught20.getMessage === "7 was 9 plus or minus 2")
// Byte plusOrMinus Byte
val caught21 = intercept[TestFailedException] {
sevenByte should not be (9.toByte plusOrMinus 2.toByte)
}
assert(caught21.getMessage === "7 was 9 plus or minus 2")
}
def `should throw TestFailedException if the number is not within the specified range, when used in a logical-and expression` {
// Double plusOrMinus Double
val caught1 = intercept[TestFailedException] {
sevenDotOh should ((be (17.1 plusOrMinus 0.2)) and (be (17.1 plusOrMinus 0.2)))
}
assert(caught1.getMessage === "7.0 was not 17.1 plus or minus 0.2")
val caught2 = intercept[TestFailedException] {
sevenDotOh should (be (6.9 plusOrMinus 0.2) and (be (17.1 plusOrMinus 0.2)))
}
assert(caught2.getMessage === "7.0 was 6.9 plus or minus 0.2, but 7.0 was not 17.1 plus or minus 0.2")
val caught3 = intercept[TestFailedException] {
sevenDotOh should (be (17.0 plusOrMinus 0.2) and be (7.0 plusOrMinus 0.2))
}
assert(caught3.getMessage === "7.0 was not 17.0 plus or minus 0.2")
// Double plusOrMinus Float
val caught4 = intercept[TestFailedException] {
sevenDotOh should ((be (17.1 plusOrMinus 0.2f)) and (be (17.1 plusOrMinus 0.2f)))
}
assert(caught4.getMessage === "7.0 was not 17.1 plus or minus 0.20000000298023224")
val caught5 = intercept[TestFailedException] {
sevenDotOh should (be (6.9 plusOrMinus 0.2f) and (be (17.1 plusOrMinus 0.2f)))
}
assert(caught5.getMessage === "7.0 was 6.9 plus or minus 0.20000000298023224, but 7.0 was not 17.1 plus or minus 0.20000000298023224")
val caught6 = intercept[TestFailedException] {
sevenDotOh should (be (17.0 plusOrMinus 0.2f) and be (7.0 plusOrMinus 0.2f))
}
assert(caught6.getMessage === "7.0 was not 17.0 plus or minus 0.20000000298023224")
// Double plusOrMinus Long
val caught7 = intercept[TestFailedException] {
sevenDotOh should ((be (17.1 plusOrMinus 2L)) and (be (17.1 plusOrMinus 2L)))
}
assert(caught7.getMessage === "7.0 was not 17.1 plus or minus 2.0")
val caught8 = intercept[TestFailedException] {
sevenDotOh should (be (6.9 plusOrMinus 2L) and (be (17.1 plusOrMinus 2L)))
}
assert(caught8.getMessage === "7.0 was 6.9 plus or minus 2.0, but 7.0 was not 17.1 plus or minus 2.0")
val caught9 = intercept[TestFailedException] {
sevenDotOh should (be (17.0 plusOrMinus 2L) and be (7.0 plusOrMinus 2L))
}
assert(caught9.getMessage === "7.0 was not 17.0 plus or minus 2.0")
// Double plusOrMinus Int
val caught10 = intercept[TestFailedException] {
sevenDotOh should ((be (17.1 plusOrMinus 2)) and (be (17.1 plusOrMinus 2)))
}
assert(caught10.getMessage === "7.0 was not 17.1 plus or minus 2.0")
val caught11 = intercept[TestFailedException] {
sevenDotOh should (be (6.9 plusOrMinus 2) and (be (17.1 plusOrMinus 2)))
}
assert(caught2.getMessage === "7.0 was 6.9 plus or minus 0.2, but 7.0 was not 17.1 plus or minus 0.2")
val caught12 = intercept[TestFailedException] {
sevenDotOh should (be (7.0 plusOrMinus 2) and be (17.0 plusOrMinus 2))
}
assert(caught12.getMessage === "7.0 was 7.0 plus or minus 2.0, but 7.0 was not 17.0 plus or minus 2.0")
// Double plusOrMinus Short
val caught13 = intercept[TestFailedException] {
sevenDotOh should ((be (17.1 plusOrMinus 2.toShort)) and (be (17.1 plusOrMinus 2.toShort)))
}
assert(caught13.getMessage === "7.0 was not 17.1 plus or minus 2.0")
val caught14 = intercept[TestFailedException] {
sevenDotOh should (be (6.9 plusOrMinus 2.toShort) and (be (17.1 plusOrMinus 2.toShort)))
}
assert(caught14.getMessage === "7.0 was 6.9 plus or minus 2.0, but 7.0 was not 17.1 plus or minus 2.0")
val caught15 = intercept[TestFailedException] {
sevenDotOh should (be (17.0 plusOrMinus 2.toShort) and be (7.0 plusOrMinus 2.toShort))
}
assert(caught15.getMessage === "7.0 was not 17.0 plus or minus 2.0")
// Double plusOrMinus Byte
val caught16 = intercept[TestFailedException] {
sevenDotOh should ((be (17.1 plusOrMinus 2.toByte)) and (be (17.1 plusOrMinus 2.toByte)))
}
assert(caught16.getMessage === "7.0 was not 17.1 plus or minus 2.0")
val caught17 = intercept[TestFailedException] {
sevenDotOh should (be (6.9 plusOrMinus 2.toByte) and (be (17.1 plusOrMinus 2.toByte)))
}
assert(caught17.getMessage === "7.0 was 6.9 plus or minus 2.0, but 7.0 was not 17.1 plus or minus 2.0")
val caught18 = intercept[TestFailedException] {
sevenDotOh should (be (17.0 plusOrMinus 2.toByte) and be (7.0 plusOrMinus 2.toByte))
}
assert(caught18.getMessage === "7.0 was not 17.0 plus or minus 2.0")
// Float plusOrMinus Float
val caught19 = intercept[TestFailedException] {
sevenDotOhFloat should ((be (17.1f plusOrMinus 0.2f)) and (be (17.1f plusOrMinus 0.2f)))
}
assert(caught19.getMessage === "7.0 was not 17.1 plus or minus 0.2")
val caught20 = intercept[TestFailedException] {
sevenDotOhFloat should (be (6.9f plusOrMinus 0.2f) and (be (17.1f plusOrMinus 0.2f)))
}
assert(caught20.getMessage === "7.0 was 6.9 plus or minus 0.2, but 7.0 was not 17.1 plus or minus 0.2")
val caught21 = intercept[TestFailedException] {
sevenDotOhFloat should (be (17.0f plusOrMinus 0.2f) and be (7.0f plusOrMinus 0.2f))
}
assert(caught21.getMessage === "7.0 was not 17.0 plus or minus 0.2")
// Float plusOrMinus Long
val caught22 = intercept[TestFailedException] {
sevenDotOhFloat should ((be (17.1f plusOrMinus 2L)) and (be (17.1f plusOrMinus 2L)))
}
assert(caught22.getMessage === "7.0 was not 17.1 plus or minus 2.0")
val caught23 = intercept[TestFailedException] {
sevenDotOhFloat should (be (6.9f plusOrMinus 2L) and (be (17.1f plusOrMinus 2L)))
}
assert(caught23.getMessage === "7.0 was 6.9 plus or minus 2.0, but 7.0 was not 17.1 plus or minus 2.0")
val caught24 = intercept[TestFailedException] {
sevenDotOhFloat should (be (17.0f plusOrMinus 2L) and be (7.0f plusOrMinus 2L))
}
assert(caught24.getMessage === "7.0 was not 17.0 plus or minus 2.0")
// Float plusOrMinus Int
val caught25 = intercept[TestFailedException] {
sevenDotOhFloat should ((be (17.1f plusOrMinus 2)) and (be (17.1f plusOrMinus 2)))
}
assert(caught25.getMessage === "7.0 was not 17.1 plus or minus 2.0")
val caught26 = intercept[TestFailedException] {
sevenDotOhFloat should (be (6.9f plusOrMinus 2) and (be (17.1f plusOrMinus 2)))
}
assert(caught26.getMessage === "7.0 was 6.9 plus or minus 2.0, but 7.0 was not 17.1 plus or minus 2.0")
val caught27 = intercept[TestFailedException] {
sevenDotOhFloat should (be (17.0f plusOrMinus 2) and be (7.0f plusOrMinus 2))
}
assert(caught27.getMessage === "7.0 was not 17.0 plus or minus 2.0")
// Float plusOrMinus Short
val caught28 = intercept[TestFailedException] {
sevenDotOhFloat should ((be (17.1f plusOrMinus 2.toShort)) and (be (17.1f plusOrMinus 2.toShort)))
}
assert(caught28.getMessage === "7.0 was not 17.1 plus or minus 2.0")
val caught29 = intercept[TestFailedException] {
sevenDotOhFloat should (be (6.9f plusOrMinus 2.toShort) and (be (17.1f plusOrMinus 2.toShort)))
}
assert(caught29.getMessage === "7.0 was 6.9 plus or minus 2.0, but 7.0 was not 17.1 plus or minus 2.0")
val caught30 = intercept[TestFailedException] {
sevenDotOhFloat should (be (17.0f plusOrMinus 2.toShort) and be (7.0f plusOrMinus 2.toShort))
}
assert(caught30.getMessage === "7.0 was not 17.0 plus or minus 2.0")
// Float plusOrMinus Byte
val caught31 = intercept[TestFailedException] {
sevenDotOhFloat should ((be (17.1f plusOrMinus 2.toByte)) and (be (17.1f plusOrMinus 2.toByte)))
}
assert(caught31.getMessage === "7.0 was not 17.1 plus or minus 2.0")
val caught32 = intercept[TestFailedException] {
sevenDotOhFloat should (be (6.9f plusOrMinus 2.toByte) and (be (17.1f plusOrMinus 2.toByte)))
}
assert(caught32.getMessage === "7.0 was 6.9 plus or minus 2.0, but 7.0 was not 17.1 plus or minus 2.0")
val caught33 = intercept[TestFailedException] {
sevenDotOhFloat should (be (17.0f plusOrMinus 2.toByte) and be (7.0f plusOrMinus 2.toByte))
}
assert(caught33.getMessage === "7.0 was not 17.0 plus or minus 2.0")
// Long plusOrMinus Long
val caught34 = intercept[TestFailedException] {
sevenLong should ((be (19L plusOrMinus 2L)) and (be (9L plusOrMinus 2L)))
}
assert(caught34.getMessage === "7 was not 19 plus or minus 2")
val caught35 = intercept[TestFailedException] {
sevenLong should (be (18L plusOrMinus 2L) and (be (19L plusOrMinus 2L)))
}
assert(caught35.getMessage === "7 was not 18 plus or minus 2")
val caught36 = intercept[TestFailedException] {
sevenLong should (be (17L plusOrMinus 2L) and be (7L plusOrMinus 2L))
}
assert(caught36.getMessage === "7 was not 17 plus or minus 2")
// Long plusOrMinus Int
val caught37 = intercept[TestFailedException] {
sevenLong should ((be (19L plusOrMinus 2)) and (be (9L plusOrMinus 2)))
}
assert(caught37.getMessage === "7 was not 19 plus or minus 2")
val caught38 = intercept[TestFailedException] {
sevenLong should (be (8L plusOrMinus 2) and (be (19L plusOrMinus 2)))
}
assert(caught38.getMessage === "7 was 8 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught39 = intercept[TestFailedException] {
sevenLong should (be (17L plusOrMinus 2) and be (7L plusOrMinus 2))
}
assert(caught39.getMessage === "7 was not 17 plus or minus 2")
// Long plusOrMinus Short
val caught40 = intercept[TestFailedException] {
sevenLong should ((be (19L plusOrMinus 2.toShort)) and (be (9L plusOrMinus 2.toShort)))
}
assert(caught40.getMessage === "7 was not 19 plus or minus 2")
val caught41 = intercept[TestFailedException] {
sevenLong should (be (8L plusOrMinus 2.toShort) and (be (19L plusOrMinus 2.toShort)))
}
assert(caught41.getMessage === "7 was 8 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught42 = intercept[TestFailedException] {
sevenLong should (be (17L plusOrMinus 2.toShort) and be (7L plusOrMinus 2.toShort))
}
assert(caught42.getMessage === "7 was not 17 plus or minus 2")
// Long plusOrMinus Byte
val caught43 = intercept[TestFailedException] {
sevenLong should ((be (19L plusOrMinus 2.toByte)) and (be (9L plusOrMinus 2.toByte)))
}
assert(caught43.getMessage === "7 was not 19 plus or minus 2")
val caught44 = intercept[TestFailedException] {
sevenLong should (be (8L plusOrMinus 2.toByte) and (be (19L plusOrMinus 2.toByte)))
}
assert(caught44.getMessage === "7 was 8 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught45 = intercept[TestFailedException] {
sevenLong should (be (17L plusOrMinus 2.toByte) and be (7L plusOrMinus 2.toByte))
}
assert(caught45.getMessage === "7 was not 17 plus or minus 2")
// Int plusOrMinus Int
val caught46 = intercept[TestFailedException] {
sevenInt should ((be (19 plusOrMinus 2)) and (be (9 plusOrMinus 2)))
}
assert(caught46.getMessage === "7 was not 19 plus or minus 2")
val caught47 = intercept[TestFailedException] {
sevenInt should (be (8 plusOrMinus 2) and (be (19 plusOrMinus 2)))
}
assert(caught47.getMessage === "7 was 8 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught48 = intercept[TestFailedException] {
sevenInt should (be (17 plusOrMinus 2) and be (7 plusOrMinus 2))
}
assert(caught48.getMessage === "7 was not 17 plus or minus 2")
// Int plusOrMinus Short
val caught49 = intercept[TestFailedException] {
sevenInt should ((be (9 plusOrMinus 2.toShort)) and (be (19 plusOrMinus 2.toShort)))
}
assert(caught49.getMessage === "7 was 9 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught50 = intercept[TestFailedException] {
sevenInt should (be (8 plusOrMinus 2.toShort) and (be (19 plusOrMinus 2.toShort)))
}
assert(caught50.getMessage === "7 was 8 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught51 = intercept[TestFailedException] {
sevenInt should (be (17 plusOrMinus 2.toShort) and be (7 plusOrMinus 2.toShort))
}
assert(caught51.getMessage === "7 was not 17 plus or minus 2")
// Int plusOrMinus Byte
val caught52 = intercept[TestFailedException] {
sevenInt should ((be (19 plusOrMinus 2.toByte)) and (be (9 plusOrMinus 2.toByte)))
}
assert(caught52.getMessage === "7 was not 19 plus or minus 2")
val caught53 = intercept[TestFailedException] {
sevenInt should (be (8 plusOrMinus 2.toByte) and (be (19 plusOrMinus 2.toByte)))
}
assert(caught53.getMessage === "7 was 8 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught54 = intercept[TestFailedException] {
sevenInt should (be (17 plusOrMinus 2.toByte) and be (7 plusOrMinus 2.toByte))
}
assert(caught54.getMessage === "7 was not 17 plus or minus 2")
// Short plusOrMinus Short
val caught55 = intercept[TestFailedException] {
sevenShort should ((be (19.toShort plusOrMinus 2.toShort)) and (be (9.toShort plusOrMinus 2.toShort)))
}
assert(caught55.getMessage === "7 was not 19 plus or minus 2")
val caught56 = intercept[TestFailedException] {
sevenShort should (be (8.toShort plusOrMinus 2.toShort) and (be (19.toShort plusOrMinus 2.toShort)))
}
assert(caught56.getMessage === "7 was 8 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught57 = intercept[TestFailedException] {
sevenShort should (be (17.toShort plusOrMinus 2.toShort) and be (7.toShort plusOrMinus 2.toShort))
}
assert(caught57.getMessage === "7 was not 17 plus or minus 2")
// Short plusOrMinus Byte
val caught58 = intercept[TestFailedException] {
sevenShort should ((be (19.toShort plusOrMinus 2.toByte)) and (be (9.toShort plusOrMinus 2.toByte)))
}
assert(caught58.getMessage === "7 was not 19 plus or minus 2")
val caught59 = intercept[TestFailedException] {
sevenShort should (be (8.toShort plusOrMinus 2.toByte) and (be (19.toShort plusOrMinus 2.toByte)))
}
assert(caught59.getMessage === "7 was 8 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught60 = intercept[TestFailedException] {
sevenShort should (be (17.toShort plusOrMinus 2.toByte) and be (7.toShort plusOrMinus 2.toByte))
}
assert(caught60.getMessage === "7 was not 17 plus or minus 2")
// Byte plusOrMinus Byte
val caught61 = intercept[TestFailedException] {
sevenByte should ((be (19.toByte plusOrMinus 2.toByte)) and (be (9.toByte plusOrMinus 2.toByte)))
}
assert(caught61.getMessage === "7 was not 19 plus or minus 2")
val caught62 = intercept[TestFailedException] {
sevenByte should (be (8.toByte plusOrMinus 2.toByte) and (be (19.toByte plusOrMinus 2.toByte)))
}
assert(caught62.getMessage === "7 was 8 plus or minus 2, but 7 was not 19 plus or minus 2")
val caught63 = intercept[TestFailedException] {
sevenByte should (be (17.toByte plusOrMinus 2.toByte) and be (7.toByte plusOrMinus 2.toByte))
}
assert(caught63.getMessage === "7 was not 17 plus or minus 2")
}
def `should throw TestFailedException if the number is not within the specified range, when used in a logical-or expression` {
// Double plusOrMinus Double
val caught1 = intercept[TestFailedException] {
sevenDotOh should ((be (17.1 plusOrMinus 0.2)) or (be (17.1 plusOrMinus 0.2)))
}
assert(caught1.getMessage === "7.0 was not 17.1 plus or minus 0.2, and 7.0 was not 17.1 plus or minus 0.2")
val caught2 = intercept[TestFailedException] {
sevenDotOh should (be (16.9 plusOrMinus 0.2) or (be (17.1 plusOrMinus 0.2)))
}
assert(caught2.getMessage === "7.0 was not 16.9 plus or minus 0.2, and 7.0 was not 17.1 plus or minus 0.2")
val caught3 = intercept[TestFailedException] {
sevenDotOh should (be (17.0 plusOrMinus 0.2) or be (97.0 plusOrMinus 0.2))
}
assert(caught3.getMessage === "7.0 was not 17.0 plus or minus 0.2, and 7.0 was not 97.0 plus or minus 0.2")
}
def `should throw TestFailedException if the number is within the specified range, when used in a logical-and expression with not` {
// Double plusOrMinus Double
val caught1 = intercept[TestFailedException] {
sevenDotOh should (not (be (17.1 plusOrMinus 0.2)) and not (be (7.1 plusOrMinus 0.2)))
}
assert(caught1.getMessage === "7.0 was not 17.1 plus or minus 0.2, but 7.0 was 7.1 plus or minus 0.2")
val caught2 = intercept[TestFailedException] {
sevenDotOh should (not be (16.9 plusOrMinus 0.2) and (not be (7.1 plusOrMinus 0.2)))
}
assert(caught2.getMessage === "7.0 was not 16.9 plus or minus 0.2, but 7.0 was 7.1 plus or minus 0.2")
val caught3 = intercept[TestFailedException] {
sevenDotOh should (not be (17.0 plusOrMinus 0.2) and not be (7.0 plusOrMinus 0.2))
}
assert(caught3.getMessage === "7.0 was not 17.0 plus or minus 0.2, but 7.0 was 7.0 plus or minus 0.2")
// Check that the error message "short circuits"
val caught4 = intercept[TestFailedException] {
sevenDotOh should (not (be (7.1 plusOrMinus 0.2)) and not (be (7.1 plusOrMinus 0.2)))
}
assert(caught4.getMessage === "7.0 was 7.1 plus or minus 0.2")
}
def `should throw TestFailedException if the number is within the specified range, when used in a logical-or expression with not` {
// Double plusOrMinus Double
val caught1 = intercept[TestFailedException] {
sevenDotOh should (not (be (7.1 plusOrMinus 0.2)) or not (be (7.1 plusOrMinus 0.2)))
}
assert(caught1.getMessage === "7.0 was 7.1 plus or minus 0.2, and 7.0 was 7.1 plus or minus 0.2")
val caught2 = intercept[TestFailedException] {
sevenDotOh should ((not be (6.9 plusOrMinus 0.2)) or (not be (7.1 plusOrMinus 0.2)))
}
assert(caught2.getMessage === "7.0 was 6.9 plus or minus 0.2, and 7.0 was 7.1 plus or minus 0.2")
val caught3 = intercept[TestFailedException] {
sevenDotOh should (not be (7.0 plusOrMinus 0.2) or not be (7.0 plusOrMinus 0.2))
}
assert(caught3.getMessage === "7.0 was 7.0 plus or minus 0.2, and 7.0 was 7.0 plus or minus 0.2")
}
def `should throw IllegalArgumentException if the number passed as the range is 0 or negative` {
// Double plusOrMinus Double
val caught1 = intercept[IllegalArgumentException] {
sevenDotOh should be (7.1 plusOrMinus -0.2)
}
assert(caught1.getMessage === "-0.2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double plusOrMinus Float
val caught2 = intercept[IllegalArgumentException] {
sevenDotOh should be (7.1 plusOrMinus -0.2f)
}
assert(caught2.getMessage === "-0.20000000298023224 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double plusOrMinus Long
val caught3 = intercept[IllegalArgumentException] {
sevenDotOh should be (7.1 plusOrMinus -2L)
}
assert(caught3.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double plusOrMinus Int
val caught4 = intercept[IllegalArgumentException] {
sevenDotOh should be (7.1 plusOrMinus -2)
}
assert(caught4.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double plusOrMinus Short
val caught5 = intercept[IllegalArgumentException] {
sevenDotOh should be (7.1 plusOrMinus (-2).toShort)
}
assert(caught5.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double plusOrMinus Byte
val caught6 = intercept[IllegalArgumentException] {
sevenDotOh should be (7.1 plusOrMinus (-2).toByte)
}
assert(caught6.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float plusOrMinus Float
val caught7 = intercept[IllegalArgumentException] {
sevenDotOhFloat should be (7.1f plusOrMinus -0.2f)
}
assert(caught7.getMessage === "-0.2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float plusOrMinus Long
val caught8 = intercept[IllegalArgumentException] {
sevenDotOhFloat should be (7.1f plusOrMinus -2L)
}
assert(caught8.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float plusOrMinus Int
val caught9 = intercept[IllegalArgumentException] {
sevenDotOhFloat should be (7.1f plusOrMinus -2)
}
assert(caught9.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float plusOrMinus Short
val caught10 = intercept[IllegalArgumentException] {
sevenDotOhFloat should be (7.1f plusOrMinus (-2).toShort)
}
assert(caught10.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float plusOrMinus Byte
val caught11 = intercept[IllegalArgumentException] {
sevenDotOhFloat should be (7.1f plusOrMinus (-2).toByte)
}
assert(caught11.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long plusOrMinus Long
val caught12 = intercept[IllegalArgumentException] {
sevenLong should be (9L plusOrMinus -2L)
}
assert(caught12.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long plusOrMinus Int
val caught13 = intercept[IllegalArgumentException] {
sevenLong should be (9L plusOrMinus -2)
}
assert(caught13.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long plusOrMinus Short
val caught14 = intercept[IllegalArgumentException] {
sevenLong should be (9L plusOrMinus (-2).toShort)
}
assert(caught14.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long plusOrMinus Byte
val caught15 = intercept[IllegalArgumentException] {
sevenLong should be (9L plusOrMinus (-2).toByte)
}
assert(caught15.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Int plusOrMinus Int
val caught16 = intercept[IllegalArgumentException] {
sevenInt should be (9 plusOrMinus -2)
}
assert(caught16.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Int plusOrMinus Short
val caught17 = intercept[IllegalArgumentException] {
sevenInt should be (9 plusOrMinus (-2).toShort)
}
assert(caught17.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Int plusOrMinus Byte
val caught18 = intercept[IllegalArgumentException] {
sevenInt should be (9 plusOrMinus (-2).toByte)
}
assert(caught18.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Short plusOrMinus Short
val caught19 = intercept[IllegalArgumentException] {
sevenShort should be (9.toShort plusOrMinus (-2).toShort)
}
assert(caught19.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Short plusOrMinus Byte
val caught20 = intercept[IllegalArgumentException] {
sevenShort should be (9.toShort plusOrMinus (-2).toByte)
}
assert(caught20.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Byte plusOrMinus Byte
val caught21 = intercept[IllegalArgumentException] {
sevenByte should be (9.toByte plusOrMinus (-2).toByte)
}
assert(caught21.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
}
}
}
|
svn2github/scalatest
|
src/test/scala/org/scalatest/matchers/ShouldPlusOrMinusSpec.scala
|
Scala
|
apache-2.0
| 83,772 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2017 Ángel Cervera Claudio
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.acervera.osm4scala.examples.primitivesextraction
import java.io.{File, FilenameFilter}
import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfter, FunSuite}
import com.acervera.osm4scala.examples.primitivesextraction.PrimitivesExtraction._
/**
* Created by angelcervera on 23/06/16.
*/
class PrimitivesExtractionSuite extends FunSuite with BeforeAndAfter {
val extractRootFolder = "target/testing/PrimitivesExtractionSuite/"
before {
FileUtils.deleteQuietly(new File(extractRootFolder))
}
test("Extracting dense primitives from pbf") {
val pbfFile = "examples/primitivesextraction/src/test/resources/com/acervera/osm4scala/examples/primitivesextraction/dense_blocks.osm.pbf"
fromPbf(pbfFile, extractRootFolder)
assert( new File(extractRootFolder).list(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = new File(dir, name).isDirectory
}).length == 2, "Must extract two blocks.")
assert( new File(s"$extractRootFolder/0/").list(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = name.endsWith(".dense")
}).length == 1, "First block, one dense.")
assert( new File(s"$extractRootFolder/1/").list(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = name.endsWith(".dense")
}).length == 1, "Must extract two dense primitives.")
}
test("Extracting relations primitives from pbf") {
val pbfFile = "examples/primitivesextraction/src/test/resources/com/acervera/osm4scala/examples/primitivesextraction/relations_blocks.osm.pbf"
fromPbf(pbfFile, extractRootFolder)
assert( new File(extractRootFolder).list(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = new File(dir, name).isDirectory
}).length == 1, "Must extract one blocks.")
assert( new File(s"$extractRootFolder/0/").list(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = name.endsWith(".relation")
}).length == 8000, "8000 relations per block.")
}
test("Extracting ways primitives from pbf") {
val pbfFile = "examples/primitivesextraction/src/test/resources/com/acervera/osm4scala/examples/primitivesextraction/ways_blocks.osm.pbf"
fromPbf(pbfFile, extractRootFolder)
assert( new File(extractRootFolder).list(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = new File(dir, name).isDirectory
}).length == 1, "Must extract one blocks.")
assert( new File(s"$extractRootFolder/0/").list(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = name.endsWith(".way")
}).length == 8000, "8000 ways per block.")
}
}
|
angelcervera/pbf4scala
|
examples/primitivesextraction/src/test/scala/com/acervera/osm4scala/examples/primitivesextraction/PrimitivesExtractionSuite.scala
|
Scala
|
mit
| 3,890 |
package com.twitter.finatra.multiserver.test
import com.twitter.finagle.http.{Request, Status}
import com.twitter.finatra.http.{EmbeddedHttpServer, HttpMockResponses}
import com.twitter.finatra.httpclient.HttpClient
import com.twitter.finatra.multiserver.Add2HttpServer.Add2Server
import com.twitter.inject.Mockito
import com.twitter.inject.server.FeatureTest
import com.twitter.util.Future
class Add2ServerFeatureTest extends FeatureTest with Mockito with HttpMockResponses {
val mockHttpClient = smartMock[HttpClient]
override val server =
new EmbeddedHttpServer(new Add2Server)
.bind[HttpClient](mockHttpClient)
"add2" in {
mockHttpClient.execute(any[Request]) returns(
Future(ok("6")),
Future(ok("7")))
server.httpGet(
"/add2?num=5",
andExpect = Status.Ok,
withBody = "7")
}
}
|
syamantm/finatra
|
inject-thrift-client-http-mapper/src/test/scala/com/twitter/finatra/multiserver/test/Add2ServerFeatureTest.scala
|
Scala
|
apache-2.0
| 845 |
package com.twitter.finagle.context
import com.twitter.io.{Buf, BufByteWriter, ByteReader}
import com.twitter.util.{Return, Throw, Try}
/**
* Retries contains the number of times a request has been retried.
*
* @param retries the number of retries
*/
private[finagle] case class Retries(val retries: Int)
private[finagle] object Retries
extends Contexts.broadcast.Key[Retries]("com.twitter.finagle.Retries")
{
def current: Option[Retries] =
Contexts.broadcast.get(Retries)
override def marshal(retries: Retries): Buf = {
val bw = BufByteWriter.fixed(4)
bw.writeIntBE(retries.retries)
bw.owned()
}
override def tryUnmarshal(buf: Buf): Try[Retries] = {
if (buf.length != 4) {
Throw(new IllegalArgumentException(
s"Could not extract Retries from Buf. Length ${buf.length} but required 4"))
} else {
val retries: Int = ByteReader(buf).readIntBE()
Return(Retries(retries))
}
}
}
|
koshelev/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/context/Retries.scala
|
Scala
|
apache-2.0
| 950 |
package sbt
package compiler
import scala.reflect.Manifest
import scala.tools.nsc.{ast, interpreter, io, reporters, util, CompilerCommand, Global, Phase, Settings}
import interpreter.AbstractFileClassLoader
import io.{AbstractFile, PlainFile, VirtualDirectory}
import ast.parser.Tokens
import reporters.{ConsoleReporter, Reporter}
import util.BatchSourceFile
import Tokens.{EOF, NEWLINE, NEWLINES, SEMI}
import java.io.File
import java.nio.ByteBuffer
import java.net.URLClassLoader
// TODO: provide a way to cleanup backing directory
final class EvalImports(val strings: Seq[(String,Int)], val srcName: String)
final class EvalResult(val tpe: String, val getValue: ClassLoader => Any, val generated: Seq[File], val enclosingModule: String)
final class EvalException(msg: String) extends RuntimeException(msg)
// not thread safe, since it reuses a Global instance
final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Settings => Reporter, backing: Option[File])
{
def this(mkReporter: Settings => Reporter, backing: Option[File]) = this(Nil, IO.classLocationFile[ScalaObject] :: Nil, mkReporter, backing)
def this() = this(s => new ConsoleReporter(s), None)
backing.foreach(IO.createDirectory)
val classpathString = Path.makeString(classpath)
val options = "-cp" +: classpathString +: optionsNoncp
lazy val settings =
{
val s = new Settings(println)
val command = new CompilerCommand(options.toList, s)
s
}
lazy val reporter = mkReporter(settings)
lazy val global: Global = new Global(settings, reporter)
import global._
import definitions._
def eval(expression: String, imports: EvalImports = noImports, tpeName: Option[String] = None, srcName: String = "<setting>", line: Int = DefaultStartLine): EvalResult =
{
import Eval._
val hash = Hash.toHex(Hash(bytes( bytes(expression) :: optBytes(backing)(fileExistsBytes) :: seqBytes(options)(bytes) ::
seqBytes(classpath)(fileModifiedBytes) :: seqBytes(imports.strings.map(_._1))(bytes) :: optBytes(tpeName)(bytes) :: Nil)))
val moduleName = makeModuleName(hash)
lazy val unit = {
reporter.reset
mkUnit(srcName, line, expression)
}
lazy val run = new Run {
override def units = (unit :: Nil).iterator
}
def unlinkAll(): Unit = for( (sym, _) <- run.symSource ) unlink(sym)
def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym)
val (tpe, value) =
(tpeName, backing) match {
case (Some(tpe), Some(back)) if classExists(back, moduleName) => (tpe, loadPlain(back, moduleName))
case _ => try { eval0(expression, imports, tpeName, run, unit, backing, moduleName) } finally { unlinkAll() }
}
val classFiles = getClassFiles(backing, moduleName)
new EvalResult(tpe, value, classFiles, moduleName)
}
def eval0(expression: String, imports: EvalImports, tpeName: Option[String], run: Run, unit: CompilationUnit, backing: Option[File], moduleName: String): (String, ClassLoader => Any) =
{
val dir = backing match { case None => new VirtualDirectory("<virtual>", None); case Some(dir) => new PlainFile(dir) }
settings.outputDirs setSingleOutput dir
val importTrees = parseImports(imports)
val (parser, tree) = parseExpr(unit)
val tpt: Tree = tpeName match {
case Some(tpe) => parseType(tpe)
case None => TypeTree(NoType)
}
unit.body = augment(parser, importTrees, tree, tpt, moduleName)
def compile(phase: Phase): Unit =
{
globalPhase = phase
if(phase == null || phase == phase.next || reporter.hasErrors)
()
else
{
atPhase(phase) { phase.run }
compile(phase.next)
}
}
compile(run.namerPhase)
checkError("Type error in expression")
val tpe = atPhase(run.typerPhase.next) { (new TypeExtractor).getType(unit.body) }
(tpe, load(dir, moduleName))
}
def load(dir: AbstractFile, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new AbstractFileClassLoader(dir, parent))
def loadPlain(dir: File, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent))
val WrapValName = "$sbtdef"
//wrap tree in object objectName { def WrapValName = <tree> }
def augment(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], tree: Tree, tpt: Tree, objectName: String): Tree =
{
val emptyTypeName = nme.EMPTY.toTypeName
def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
NoMods,
nme.CONSTRUCTOR,
Nil,
List(Nil),
TypeTree(),
Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
)
def method = DefDef(NoMods, WrapValName, Nil, Nil, tpt, tree)
def moduleBody = Template(List(gen.scalaScalaObjectConstr), emptyValDef, List(emptyInit, method))
def moduleDef = ModuleDef(NoMods, objectName, moduleBody)
parser.makePackaging(0, emptyPkg, (imports :+ moduleDef).toList)
}
def getValue[T](objectName: String, loader: ClassLoader): T =
{
val clazz = Class.forName(objectName + "$", true, loader)
val module = clazz.getField("MODULE$").get(null)
val accessor = module.getClass.getMethod(WrapValName)
val value = accessor.invoke(module)
value.asInstanceOf[T]
}
final class TypeExtractor extends Traverser {
private[this] var result = ""
def getType(t: Tree) = { result = ""; traverse(t); result }
override def traverse(tree: Tree): Unit = tree match {
case d: DefDef if d.symbol.nameString == WrapValName => result = d.symbol.tpe.finalResultType.toString
case _ => super.traverse(tree)
}
}
private[this] def classExists(dir: File, name: String) = (new File(dir, name + ".class")).exists
// TODO: use the code from Analyzer
private[this] def getClassFiles(backing: Option[File], moduleName: String): Seq[File] =
backing match {
case None => Nil
case Some(dir) => dir listFiles moduleClassFilter(moduleName)
}
private[this] def moduleClassFilter(moduleName: String) = new java.io.FilenameFilter { def accept(dir: File, s: String) =
(s contains moduleName) && (s endsWith ".class")
}
private[this] def parseExpr(unit: CompilationUnit) =
{
val parser = new syntaxAnalyzer.UnitParser(unit)
val tree: Tree = parser.expr()
val extra = parser.in.token match {
case EOF => " Ensure that there are no blank lines within a setting."
case _ => ""
}
checkError("Error parsing expression." + extra)
parser.accept(EOF)
val extra2 = parser.in.token match {
case SEMI => " Note that settings are expressions and do not end with semicolons. (Semicolons are fine within {} blocks, however.)"
case NEWLINE | NEWLINES => " Ensure that settings are separated by blank lines."
case _ => ""
}
checkError("Error parsing expression." + extra2)
(parser, tree)
}
private[this] def parseType(tpe: String): Tree =
{
val tpeParser = new syntaxAnalyzer.UnitParser(mkUnit("<expected-type>", DefaultStartLine, tpe))
val tpt0: Tree = tpeParser.typ()
tpeParser.accept(EOF)
checkError("Error parsing expression type.")
tpt0
}
private[this] def parseImports(imports: EvalImports): Seq[Tree] =
imports.strings flatMap { case (s, line) => parseImport(mkUnit(imports.srcName, line, s)) }
private[this] def parseImport(importUnit: CompilationUnit): Seq[Tree] =
{
val parser = new syntaxAnalyzer.UnitParser(importUnit)
val trees: Seq[Tree] = parser.importClause()
parser.accept(EOF)
checkError("Error parsing imports for expression.")
trees
}
val DefaultStartLine = 0
private[this] def makeModuleName(hash: String): String = "$" + Hash.halve(hash)
private[this] def noImports = new EvalImports(Nil, "")
private[this] def mkUnit(srcName: String, firstLine: Int, s: String) = new CompilationUnit(new EvalSourceFile(srcName, firstLine, s))
private[this] def checkError(label: String) = if(reporter.hasErrors) throw new EvalException(label)
private[this] final class EvalSourceFile(name: String, startLine: Int, contents: String) extends BatchSourceFile(name, contents)
{
override def lineToOffset(line: Int): Int = super.lineToOffset((line - startLine) max 0)
override def offsetToLine(offset: Int): Int = super.offsetToLine(offset) + startLine
}
}
private object Eval
{
def optBytes[T](o: Option[T])(f: T => Array[Byte]): Array[Byte] = seqBytes(o.toSeq)(f)
def seqBytes[T](s: Seq[T])(f: T => Array[Byte]): Array[Byte] = bytes(s map f)
def bytes(b: Seq[Array[Byte]]): Array[Byte] = bytes(b.length) ++ b.flatten.toArray[Byte]
def bytes(b: Boolean): Array[Byte] = Array[Byte](if(b) 1 else 0)
def filesModifiedBytes(fs: Array[File]): Array[Byte] = if(fs eq null) filesModifiedBytes(Array[File]()) else seqBytes(fs)(fileModifiedBytes)
def fileModifiedBytes(f: File): Array[Byte] =
(if(f.isDirectory) filesModifiedBytes(f listFiles classDirFilter) else bytes(f.lastModified)) ++
bytes(f.getAbsolutePath)
def fileExistsBytes(f: File): Array[Byte] =
bytes(f.exists) ++
bytes(f.getAbsolutePath)
def bytes(s: String): Array[Byte] = s getBytes "UTF-8"
def bytes(l: Long): Array[Byte] =
{
val buffer = ByteBuffer.allocate(8)
buffer.putLong(l)
buffer.array
}
def bytes(i: Int): Array[Byte] =
{
val buffer = ByteBuffer.allocate(4)
buffer.putInt(i)
buffer.array
}
private val classDirFilter: FileFilter = DirectoryFilter || GlobFilter("*.class")
}
|
kuochaoyi/xsbt
|
compile/Eval.scala
|
Scala
|
bsd-3-clause
| 9,308 |
// Copyright (c) 2018 Ben Zimmer. All rights reserved.
// Generate EPUB files.
// Based on examples from: https://en.wikipedia.org/wiki/EPUB
package bdzimmer.secondary.export.controller
import scala.collection.immutable.Seq
import java.io.{BufferedOutputStream, File, FileOutputStream}
import java.util.zip.{CRC32, ZipEntry, ZipOutputStream}
import javax.imageio.{ImageIO, IIOImage, ImageWriteParam}
import javax.imageio.stream.FileImageOutputStream
import org.apache.commons.io.{FileUtils, FilenameUtils, IOUtils}
import bdzimmer.util.StringUtils._
import bdzimmer.secondary.export.model.WorldItems
import bdzimmer.secondary.export.model.Tags.ParsedTag
import bdzimmer.secondary.export.model.WorldItems.BookItem
import bdzimmer.secondary.export.view.Styles
import bdzimmer.secondary.export.controller.Book.SectionInfo
object Epub {
def page(title: String, content: String, chapter: Boolean): String = {
val chapterStyle = if (chapter) {
s"""<style type="text/css">${Styles.BookStyle}</style>"""
} else {
""
}
s"""<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
<head>
<meta http-equiv="Content-Type" content="application/xhtml+xml; charset=utf-8" />
<title>$title</title>
$chapterStyle
</head>
<body>
$content
</body>
</html>
"""
}
def authorNameParts(authorname: String): (String, String) = {
val (name, parts) = WorldItems.cleanName(authorname)
parts match {
case None => ("", name)
case Some(ps) => ps match {
case fst :: rest => (fst, rest.mkString(" "))
case _ => ("", name)
}
}
}
val Mimetype = "application/epub+zip"
val ContainerXml =
"""<?xml version="1.0" encoding="UTF-8" ?>
<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">
<rootfiles>
<rootfile full-path="OEBPS/content.opf" media-type="application/oebps-package+xml"/>
</rootfiles>
</container>
"""
def formatContentOpf(
uniqueIdentifier: String,
title: String,
firstname: String,
lastname: String,
sections: Seq[SectionInfo],
coverImageFilename: Option[String]
): String = {
// format contents of content.opf file
val cover = if (coverImageFilename.isDefined) {
"<meta name=\\"cover\\" content=\\"cover-image\\" />"
} else {
""
}
val manifestItems = sections.map(section => {
s""" <item id="${section.id}" href="${section.id}.xhtml" media-type="application/xhtml+xml" />"""
}) ++ coverImageFilename.map(x => {
val imageType = getImageType(x)
println("cover image type: " + imageType)
" <item id=\\"cover-image\\" href=\\"" + x + "\\" media-type=\\"image/" + imageType + "\\" />"
}).toList
val manifest =
"<manifest>\\n" +
manifestItems.mkString("\\n") + "\\n" +
""" <item id="ncx" href="toc.ncx" media-type="application/x-dtbncx+xml" />""" + "\\n" +
" </manifest>"
val spineItems = sections.map(section => {
s""" <itemref idref="${section.id}" />"""
})
val spine =
"""<spine toc="ncx">""" + "\\n" +
spineItems.mkString("\\n") + "\\n" +
" </spine>"
s"""<?xml version="1.0"?>
<package version="2.0" xmlns="http://www.idpf.org/2007/opf" unique-identifier="$uniqueIdentifier">
<metadata xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:opf="http://www.idpf.org/2007/opf">
<dc:title>$title</dc:title>
<dc:language>en</dc:language>
<dc:identifier id="$uniqueIdentifier" opf:scheme="NotISBN">$uniqueIdentifier</dc:identifier>
<dc:creator opf:file-as="$lastname, $firstname" opf:role="aut">$firstname $lastname</dc:creator>
$cover
</metadata>
$manifest
$spine
</package>"""
}
def formatTocNcx(
uniqueIdentifier: String,
title: String,
firstname: String,
lastname: String,
sections: Seq[SectionInfo]): String = {
// format contents of toc.ncx file
val navmapItems = sections.zipWithIndex.map(x => {
s""" <navPoint class="chapter" id="${x._1.id}" playOrder="${x._2}">""" + "\\n" +
s""" <navLabel><text>${x._1.name}</text></navLabel>""" + "\\n" +
s""" <content src="${x._1.id}.xhtml"/>""" + "\\n" +
s""" </navPoint>"""
})
val navmap =
"<navMap>\\n" +
navmapItems.mkString("\\n") + "\\n" +
" </navMap>"
s"""<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE ncx PUBLIC "-//NISO//DTD ncx 2005-1//EN"
"http://www.daisy.org/z3986/2005/ncx-2005-1.dtd">
<ncx version="2005-1" xml:lang="en" xmlns="http://www.daisy.org/z3986/2005/ncx/">
<head>
<!-- The following four metadata items are required for all NCX documents,
including those that conform to the relaxed constraints of OPS 2.0 -->
<meta name="dtb:uid" content="$uniqueIdentifier"/> <!-- same as in .opf -->
<meta name="dtb:depth" content="1"/> <!-- 1 or higher -->
<meta name="dtb:totalPageCount" content="0"/> <!-- must be 0 -->
<meta name="dtb:maxPageNumber" content="0"/> <!-- must be 0 -->
</head>
<docTitle>
<text>$title</text>
</docTitle>
<docAuthor>
<text>$lastname, $firstname</text>
</docAuthor>
$navmap
</ncx>
"""
}
def export(
filename: String,
book: BookItem,
tags: Map[Int, ParsedTag],
renderTags: RenderTags,
unstyledSections: Set[String],
imCompQuality: Option[Float],
localExportPath: String): Unit = {
val (sections, coverImageTag) = Book.sections(book.notes, tags, Some(renderTags))
// title is name of first section
val title = sections.headOption.map(_.name).getOrElse("empty")
val titlePage = sections.headOption.map(_.copy(name="Title Page"))
// replace empty section names with "Content"
val contentSections = sections.tail.map(x => if (x.name.equals("---")) x.copy(name="Content") else x)
val (firstname, lastname) = Epub.authorNameParts(book.authorname)
// cover page becomes new first section if cover image exists
// WIP: experimenting with disabling cover page; I think it causes a duplicate cover on KDP
// But it might be necessary to enable for Kobo.
// val cover = coverImageTag.map(
// x => Epub.SectionInfo("cover", "Cover", Epub.coverPage(RenderImages.itemImagePath(x.item))))
Epub.export(
filename,
book.uniqueIdentifier,
title,
firstname,
lastname,
// cover.toList ++ titlePage.toList ++ contentSections,
titlePage.toList ++ contentSections,
coverImageTag.map(x => RenderImages.itemImagePath(x.item)),
localExportPath,
unstyledSections,
imCompQuality)
}
def export(
outputFilename: String,
uniqueIdentifier: String,
title: String,
firstname: String,
lastname: String,
sections: Seq[SectionInfo],
coverImageFilename: Option[String],
imageDirname: String,
unstyledSections: Set[String],
imCompQuality: Option[Float]): Unit = {
// prep image filename and convert file
val coverImageFilenameConverted: Option[String] = coverImageFilename.map(origFilename => {
val imageType = getImageType(origFilename)
if (!imageType.equals("jpeg") && imCompQuality.isDefined) {
// if it isn't already jpeg and we are compressing images, we need to compress it.
// if we were also creating a downsized version or something, that would be done
// around here.
// TODO: add a compression tag / quality suffix
val jpgFilename = FilenameUtils.removeExtension(origFilename) + ".jpg"
val jpgFilenameFull = imageDirname / jpgFilename
val jpgFile = new File(jpgFilenameFull)
// if (jpgFile.exists()) {
// println(f"'${jpgFilenameFull}' already exists.")
// } else {
// convert the file
println(f"creating '${jpgFilenameFull}")
val image = ImageIO.read(new File(imageDirname / origFilename))
val quality = imCompQuality.getOrElse(1.0f)
println("image compression quality: " + quality)
val jpgWriter = ImageIO.getImageWritersByFormatName("jpg").next()
val jpgWriteParam = jpgWriter.getDefaultWriteParam
jpgWriteParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT)
jpgWriteParam.setCompressionQuality(quality)
jpgWriter.setOutput(new FileImageOutputStream(jpgFile))
val outputImage = new IIOImage(image, null, null)
jpgWriter.write(null, outputImage, jpgWriteParam)
// }
jpgFilename
} else {
origFilename
}
})
// expects the first page to be the title page
// and remaining pages prose
val contentOpf = formatContentOpf(
uniqueIdentifier,
title,
firstname,
lastname,
sections,
coverImageFilenameConverted)
val tocNcx = formatTocNcx(
uniqueIdentifier,
title,
firstname,
lastname,
sections)
val fout = new FileOutputStream(outputFilename)
val bout = new BufferedOutputStream(fout)
val zout = new ZipOutputStream(bout)
val mimetypeEntry = new ZipEntry("mimetype")
mimetypeEntry.setMethod(ZipEntry.STORED)
mimetypeEntry.setSize(Mimetype.length)
mimetypeEntry.setCompressedSize(Mimetype.length)
val crc = new CRC32()
crc.update(Mimetype.getBytes)
mimetypeEntry.setCrc(crc.getValue)
zout.putNextEntry(mimetypeEntry)
IOUtils.write(Mimetype, zout, "UTF-8")
zout.closeEntry()
zout.putNextEntry(new ZipEntry("META-INF/container.xml"))
IOUtils.write(ContainerXml, zout, "UTF-8")
zout.closeEntry()
zout.putNextEntry(new ZipEntry("OEBPS/content.opf"))
IOUtils.write(contentOpf, zout, "UTF-8")
zout.closeEntry()
zout.putNextEntry(new ZipEntry("OEBPS/toc.ncx"))
IOUtils.write(tocNcx, zout, "UTF-8")
zout.closeEntry()
sections.zipWithIndex.foreach({case (section, idx) => {
zout.putNextEntry(new ZipEntry("OEBPS/" + section.id + ".xhtml"))
val useBookStyle = idx > 0 && !unstyledSections.contains(section.name)
if (!useBookStyle) {
println("not using book style for section '" + section.name + "'")
}
IOUtils.write(
page(section.name, section.content, useBookStyle),
zout,
"UTF-8")
zout.closeEntry()
}})
// cover image
coverImageFilenameConverted.foreach(x => {
println("adding cover image '" + imageDirname / x + "'")
zout.putNextEntry(new ZipEntry("OEBPS/" + x))
FileUtils.copyFile(new File(imageDirname / x), zout)
zout.closeEntry()
})
zout.close()
}
def getImageType(filename: String): String = {
// jpeg media type files commonly have .jpg extension
// all the other filetypes (png, bmp, etc.) have identical extension to media type
FilenameUtils.getExtension(filename) match {
case "jpg" => "jpeg"
case x: String => x
}
}
}
|
bdzimmer/secondary
|
src/main/scala/bdzimmer/secondary/export/controller/Epub.scala
|
Scala
|
bsd-3-clause
| 11,070 |
package com.rikmuld.camping.features.blocks.lantern
import com.rikmuld.camping.CampingMod
import com.rikmuld.camping.Definitions.Lantern
import com.rikmuld.camping.Definitions.Lantern._
import com.rikmuld.corerm.tileentity.TileEntitySimple
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.item.ItemStack
import net.minecraft.nbt.NBTTagCompound
import net.minecraft.util.ITickable
import net.minecraft.util.math.BlockPos
import net.minecraft.world.World
object TileEntityLantern {
def timeFromStack(stack: ItemStack): Int =
Option(stack.getTagCompound).fold(
if (stack.getItemDamage == Lantern.ON) 750
else 0
)(_.getInteger("time"))
def stackFromTime(time: Int, count: Int): ItemStack = {
val stack = new ItemStack(
CampingMod.OBJ.lanternItem,
count,
if (time > 0) Lantern.ON else Lantern.OFF
)
if(time > 0) {
val compound = new NBTTagCompound()
compound.setInteger("time", time)
stack.setTagCompound(compound)
}
stack
}
}
class TileEntityLantern extends TileEntitySimple with ITickable {
private var burnTime: Int =
_
private var tick: Boolean =
false
override def readFromNBT(tag: NBTTagCompound): Unit = {
burnTime = tag.getInteger("burnTime")
super.readFromNBT(tag)
}
override def writeToNBT(tag: NBTTagCompound):NBTTagCompound = {
tag.setInteger("burnTime", burnTime)
super.writeToNBT(tag)
}
def setBurnTime(time: Int): Unit = {
burnTime = time * 20
tick = true
}
def getBurnTime: Int =
burnTime
def getLantern: BlockLantern =
world.getBlockState(pos).getBlock.asInstanceOf[BlockLantern]
override def update(): Unit =
if (tick && !world.isRemote)
if (burnTime > 0)
burnTime -= 1
else {
getLantern.setState(world, pos, STATE_LIT, false)
tick = false
}
override def init(stack: ItemStack, player: EntityPlayer, world: World, pos: BlockPos): Unit =
setBurnTime(TileEntityLantern.timeFromStack(stack))
}
|
Rikmuld/MC-Camping
|
scala/com/rikmuld/camping/features/blocks/lantern/TileEntityLantern.scala
|
Scala
|
gpl-3.0
| 2,034 |
package com.twitter.finagle.memcached.integration
import com.twitter.conversions.time._
import com.twitter.finagle._
import com.twitter.finagle.memcached.Client
import com.twitter.finagle.memcached.protocol.{Command, Response}
import com.twitter.io.Buf
import com.twitter.util.{Await, Awaitable}
import java.net.{InetAddress, InetSocketAddress}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfter, FunSuite}
@RunWith(classOf[JUnitRunner])
class ProxyTest extends FunSuite with BeforeAndAfter {
val TimeOut = 15.seconds
private def awaitResult[T](awaitable: Awaitable[T]): T = Await.result(awaitable, TimeOut)
type MemcacheService = Service[Command, Response]
/**
* Note: This integration test requires a real Memcached server to run.
*/
var externalClient: Client = null
var server: ListeningServer = null
var serverAddress: InetSocketAddress = null
var proxyService: MemcacheService = null
var proxyClient: MemcacheService = null
var testServer: Option[TestMemcachedServer] = None
before {
testServer = TestMemcachedServer.start()
if (testServer.isDefined) {
Thread.sleep(150) // On my box the 100ms sleep wasn't long enough
proxyClient = Memcached.client
.connectionsPerEndpoint(1)
.newService(
Name.bound(Address(testServer.get.address.asInstanceOf[InetSocketAddress])), "memcached")
proxyService = new MemcacheService {
def apply(request: Command) = proxyClient(request)
}
server = Memcached.server
.withLabel("memcached")
.serve(new InetSocketAddress(InetAddress.getLoopbackAddress, 0), proxyService)
serverAddress = server.boundAddress.asInstanceOf[InetSocketAddress]
externalClient = Client(Memcached.client.newService(
"%s:%d".format(serverAddress.getHostName, serverAddress.getPort)))
}
}
after {
// externalClient.close() needs to be called explicitly by each test. Otherwise
// 'quit' test would call it twice.
if (testServer.isDefined) {
server.close(0.seconds)
proxyService.close()
proxyClient.close()
testServer.map(_.stop())
}
}
override def withFixture(test: NoArgTest) = {
if (testServer == None) {
info("Cannot start memcached. skipping test...")
cancel()
}
else test()
}
test("Proxied Memcached Servers should handle a basic get/set operation") {
awaitResult(externalClient.delete("foo"))
assert(awaitResult(externalClient.get("foo")) == None)
awaitResult(externalClient.set("foo", Buf.Utf8("bar")))
val foo = awaitResult(externalClient.get("foo"))
assert(foo.isDefined)
val Buf.Utf8(res) = foo.get
assert(res == "bar")
externalClient.release()
}
if (Option(System.getProperty("USE_EXTERNAL_MEMCACHED")).isDefined) {
test("stats is supported") {
awaitResult(externalClient.delete("foo"))
assert(awaitResult(externalClient.get("foo")) == None)
awaitResult(externalClient.set("foo", Buf.Utf8("bar")))
Seq(None, Some("slabs")).foreach { arg =>
val stats = awaitResult(externalClient.stats(arg))
assert(stats != null)
assert(!stats.isEmpty)
stats.foreach { line =>
assert(line.startsWith("STAT"))
}
}
externalClient.release()
}
}
if (Option(System.getProperty("USE_EXTERNAL_MEMCACHED")).isDefined) {
test("stats (cachedump) is supported") {
awaitResult(externalClient.delete("foo"))
assert(awaitResult(externalClient.get("foo")) == None)
awaitResult(externalClient.set("foo", Buf.Utf8("bar")))
val slabs = awaitResult(externalClient.stats(Some("slabs")))
assert(slabs != null)
assert(!slabs.isEmpty)
val n = slabs.head.split(" ")(1).split(":")(0).toInt
val stats = awaitResult(externalClient.stats(Some("cachedump " + n + " 100")))
assert(stats != null)
assert(!stats.isEmpty)
stats.foreach { stat =>
assert(stat.startsWith("ITEM"))
}
assert(stats.find { stat => stat.contains("foo") }.isDefined)
externalClient.release()
}
}
test("quit is supported") {
awaitResult(externalClient.get("foo")) // do nothing
awaitResult(externalClient.quit())
intercept[ServiceClosedException] {
awaitResult(externalClient.get("foo"))
}
}
}
|
koshelev/finagle
|
finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/ProxyTest.scala
|
Scala
|
apache-2.0
| 4,388 |
package Sample
/**
* Created by foostan on 8/19/14.
*/
trait SuperCar extends Car{
}
|
foostan/2puml
|
sample/SuperCar.scala
|
Scala
|
mit
| 89 |
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.base.edu.model
import scala.collection.mutable.Buffer
import org.beangle.commons.lang.time.HourMinute
import org.beangle.data.model.IntId
import org.beangle.data.model.pojo.Named
import org.beangle.data.model.pojo.TemporalOn
import org.openurp.base.model.Campus
import org.openurp.code.edu.model.DayPart
/**
* 时间设置
*/
class TimeSetting extends IntId with Named with TemporalOn {
var project: Project = _
var campus: Option[Campus] = None
var minutesPerUnit: Short = _
var units: Buffer[CourseUnit] = new collection.mutable.ListBuffer[CourseUnit]
}
/**
* 课程小节
*/
class CourseUnit extends IntId with Named {
var indexno: Int = _
var beginAt: HourMinute = _
var endAt: HourMinute = _
var setting: TimeSetting = _
var part: DayPart = _
var enName: String = _
}
/** 连续周连续,单周,双周,任意*/
enum CircleWeekTypes {
case Continuely, Odd, Even, Random
}
|
openurp/api
|
base/src/main/scala/org/openurp/base/edu/model/TimeSetting.scala
|
Scala
|
lgpl-3.0
| 1,648 |
package controllers
import play.api.libs.json._
import play.api.mvc._
import models.Book._
import java.math.BigDecimal
import java.util.Date
import com.amazonaws.regions.Region
import com.amazonaws.regions.Regions
import com.amazonaws.services.cloudwatch.AmazonCloudWatchClient
import com.amazonaws.services.cloudwatch.model._
import scala.collection.JavaConverters._
object Application extends Controller {
val cloudWatch = new AmazonCloudWatchClient()
cloudWatch.setRegion(
Region.getRegion(Regions.US_WEST_2));
implicit val dimensionWrites = new Writes[Dimension] {
def writes(d: Dimension) = Json.obj(
"name" -> d.getName(),
"value" -> d.getValue()
)
}
def x(d: Double):JsNumber = {
if (d == null) null else JsNumber(new BigDecimal(d))
}
implicit val datapointWrites = new Writes[Datapoint] {
def writes(d: Datapoint) = if (d == null) null else Json.obj(
"sum" -> x(d.getSum()),
"timestamp" -> d.getTimestamp(),
"unit" -> d.getUnit()
)
}
implicit val metricWrites = new Writes[Metric] {
def writes(m: Metric) = Json.obj(
"metricName" -> m.getMetricName(),
"namespace" -> m.getNamespace(),
"dimensions" -> m.getDimensions().asScala
)
}
implicit val listMetricsResultWrites = new Writes[ListMetricsResult] {
def writes(result: ListMetricsResult) = Json.obj(
"metrics" -> result.getMetrics().asScala,
"nextToken" -> result.getNextToken()
)
}
implicit val getMetricsResultWrites = new Writes[GetMetricStatisticsResult] {
def writes(result: GetMetricStatisticsResult) = Json.obj(
"label" -> result.getLabel(),
"datapoints" -> result.getDatapoints().asScala
)
}
def listMetrics = Action {
val metrics = cloudWatch.listMetrics()
Ok(Json.toJson(metrics))
}
def getTransactions(start: Long, minutes: Int) = Action {
val days = 1
val endTime = new Date(System.currentTimeMillis())
val startTime = new Date(start)
val request = new GetMetricStatisticsRequest()
.withEndTime(endTime)
.withMetricName("Transactions")
.withNamespace("com.strongfellow.transactions")
.withPeriod(minutes * 60)
.withStartTime(startTime)
.withStatistics(Statistic.Sum)
val response = cloudWatch.getMetricStatistics(request)
Ok(Json.toJson(response))
}
def listBooks = Action {
Ok(Json.toJson(books))
}
def saveBook = Action(BodyParsers.parse.json) { request =>
val b = request.body.validate[Book]
b.fold(
errors => {
BadRequest(Json.obj("status" -> "OK", "message" -> JsError.toFlatJson(errors)))
},
book => {
addBook(book)
Ok(Json.obj("status" -> "OK"))
}
)
}
}
|
strongfellow/simple-rest-scala-txgraphs
|
app/controllers/Application.scala
|
Scala
|
mit
| 2,779 |
// scalac: -Yrangepos:false -language:dynamics
//
object Test extends App {
object stubUpdate {
def update(as: Any*) = println(".update"+as.toList.mkString("(",", ", ")"))
}
object stub {
def apply = {println(".apply"); stubUpdate}
def apply(as: Any*) = println(".apply"+as.toList.mkString("(",", ", ")"))
def update(as: Any*) = println(".update"+as.toList.mkString("(",", ", ")"))
}
class MyDynamic extends Dynamic {
def applyDynamic[T](n: String)(as: Any*) = {println("qual.applyDynamic("+ n +")"+ as.toList.mkString("(",", ", ")")); stub}
def applyDynamicNamed[T](n: String)(as: (String, Any)*) = {println("qual.applyDynamicNamed("+ n +")"+ as.toList.mkString("(",", ", ")")); stub}
def selectDynamic[T](n: String) = {println("qual.selectDynamic("+ n +")"); stub}
def updateDynamic(n: String)(x: Any): Unit = {println("qual.updateDynamic("+ n +")("+ x +")")}
}
val qual = new MyDynamic
val expr = "expr"
val a = "a"
val a2 = "a2"
type T = String
// If qual.sel is followed by a potential type argument list [Ts] and an argument list (arg1, …, argn) where none of the arguments argi are named:
// qual.applyDynamic(“sel”)(arg1, …, argn)
qual.sel()
qual.sel(a)
// qual.sel(a, a2: _*) -- should not accept varargs?
qual.sel(a)(a2)
qual.sel[T](a)
qual.sel[T](a)(a2)
// If qual.sel is followed by a potential type argument list [Ts]
// and a non-empty named argument list (x1 = arg1, …, xn = argn) where some name prefixes xi = might be missing:
// qual.applyDynamicNamed(“sel”)(xs1 -> arg1, …, xsn -> argn)
qual.sel(arg = a)
qual.sel[T](arg = a)
qual.sel(a, arg2 = "a2")
// qual.sel(a)(a2, arg2 = "a2")
// qual.sel[T](a)(a2, arg2 = "a2")
// qual.sel(arg = a, a2: _*)
// qual.sel(arg, arg2 = "a2", a2: _*)
// If qual.sel appears immediately on the left-hand side of an assignment
// qual.updateDynamic(“sel”)(expr)
qual.sel = expr
// If qual.sel, possibly applied to type arguments, but is
// not applied to explicit value arguments,
// nor immediately followed by an assignment operator:
// qual.selectDynamic[Ts](“sel”)
qual.sel
qual.sel[T]
qual.sel(1) = expr // parser turns this into qual.sel.update(1, expr)
qual.sel() = expr // parser turns this into qual.sel.update(expr)
qual.sel.apply(1)
qual.sel.apply(1) = 1
qual.apply(a)
qual.apply[String](a)
qual(a)
qual[String](a)
qual[T](arg = a)
qual(a, arg2 = "a2")
qual(a) = a2
}
|
martijnhoekstra/scala
|
test/files/run/applydynamic_sip.scala
|
Scala
|
apache-2.0
| 2,494 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.examples.recommendation
import org.apache.predictionio.controller.PAlgorithm
import org.apache.predictionio.controller.Params
import org.apache.predictionio.data.storage.BiMap
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import org.apache.spark.mllib.recommendation.ALS
import org.apache.spark.mllib.recommendation.{Rating => MLlibRating}
import org.apache.spark.mllib.recommendation.ALSModel
import grizzled.slf4j.Logger
case class ALSAlgorithmParams(
rank: Int,
numIterations: Int,
lambda: Double,
seed: Option[Long]) extends Params
class ALSAlgorithm(val ap: ALSAlgorithmParams)
extends PAlgorithm[PreparedData, ALSModel, Query, PredictedResult] {
@transient lazy val logger = Logger[this.type]
if (ap.numIterations > 30) {
logger.warn(
s"ALSAlgorithmParams.numIterations > 30, current: ${ap.numIterations}. " +
s"There is a chance of running to StackOverflowException." +
s"To remedy it, set lower numIterations or checkpoint parameters.")
}
override
def train(sc: SparkContext, data: PreparedData): ALSModel = {
// MLLib ALS cannot handle empty training data.
require(!data.ratings.take(1).isEmpty,
s"RDD[Rating] in PreparedData cannot be empty." +
" Please check if DataSource generates TrainingData" +
" and Preparator generates PreparedData correctly.")
// Convert user and item String IDs to Int index for MLlib
val userStringIntMap = BiMap.stringInt(data.ratings.map(_.user))
val itemStringIntMap = BiMap.stringInt(data.ratings.map(_.item))
val mllibRatings = data.ratings.map( r =>
// MLlibRating requires integer index for user and item
MLlibRating(userStringIntMap(r.user), itemStringIntMap(r.item), r.rating)
)
// seed for MLlib ALS
val seed = ap.seed.getOrElse(System.nanoTime)
// Set checkpoint directory
// sc.setCheckpointDir("checkpoint")
// If you only have one type of implicit event (Eg. "view" event only),
// set implicitPrefs to true
val implicitPrefs = false
val als = new ALS()
als.setUserBlocks(-1)
als.setProductBlocks(-1)
als.setRank(ap.rank)
als.setIterations(ap.numIterations)
als.setLambda(ap.lambda)
als.setImplicitPrefs(implicitPrefs)
als.setAlpha(1.0)
als.setSeed(seed)
als.setCheckpointInterval(10)
val m = als.run(mllibRatings)
new ALSModel(
rank = m.rank,
userFeatures = m.userFeatures,
productFeatures = m.productFeatures,
userStringIntMap = userStringIntMap,
itemStringIntMap = itemStringIntMap)
}
override
def predict(model: ALSModel, query: Query): PredictedResult = {
// Convert String ID to Int index for Mllib
model.userStringIntMap.get(query.user).map { userInt =>
// create inverse view of itemStringIntMap
val itemIntStringMap = model.itemStringIntMap.inverse
// recommendProductsWithFilter() returns Array[MLlibRating], which uses item Int
// index. Convert it to String ID for returning PredictedResult
val blackList = query.blackList.flatMap(model.itemStringIntMap.get) // ADDED
val itemScores = model
.recommendProductsWithFilter(userInt, query.num, blackList) // MODIFIED
.map (r => ItemScore(itemIntStringMap(r.product), r.rating))
PredictedResult(itemScores)
}.getOrElse{
logger.info(s"No prediction for unknown user ${query.user}.")
PredictedResult(Array.empty)
}
}
// This function is used by the evaluation module, where a batch of queries is sent to this engine
// for evaluation purpose.
override def batchPredict(model: ALSModel, queries: RDD[(Long, Query)]): RDD[(Long, PredictedResult)] = {
val userIxQueries: RDD[(Int, (Long, Query))] = queries
.map { case (ix, query) => {
// If user not found, then the index is -1
val userIx = model.userStringIntMap.get(query.user).getOrElse(-1)
(userIx, (ix, query))
}}
// Cross product of all valid users from the queries and products in the model.
val usersProducts: RDD[(Int, Int)] = userIxQueries
.keys
.filter(_ != -1)
.cartesian(model.productFeatures.map(_._1))
// Call mllib ALS's predict function.
val ratings: RDD[MLlibRating] = model.predict(usersProducts)
// The following code construct predicted results from mllib's ratings.
// Not optimal implementation. Instead of groupBy, should use combineByKey with a PriorityQueue
val userRatings: RDD[(Int, Iterable[MLlibRating])] = ratings.groupBy(_.user)
userIxQueries.leftOuterJoin(userRatings)
.map {
// When there are ratings
case (userIx, ((ix, query), Some(ratings))) => {
val topItemScores: Array[ItemScore] = ratings
.toArray
.sortBy(_.rating)(Ordering.Double.reverse) // note: from large to small ordering
.take(query.num)
.map { rating => ItemScore(
model.itemStringIntMap.inverse(rating.product),
rating.rating) }
(ix, PredictedResult(itemScores = topItemScores))
}
// When user doesn't exist in training data
case (userIx, ((ix, query), None)) => {
require(userIx == -1)
(ix, PredictedResult(itemScores = Array.empty))
}
}
}
}
|
PredictionIO/PredictionIO
|
examples/scala-parallel-recommendation/blacklist-items/src/main/scala/ALSAlgorithm.scala
|
Scala
|
apache-2.0
| 6,131 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.nscplugin.test
import org.scalajs.nscplugin.test.util._
import org.junit.Test
import org.junit.Ignore
// scalastyle:off line.size.limit
class JSInteropTest extends DirectTest with TestHelpers {
override def preamble: String =
"""
import scala.scalajs.js
import scala.scalajs.js.annotation._
"""
private val JSNativeLoadSpecAnnots = Seq(
"JSGlobal" -> "@JSGlobal",
"JSGlobal" -> "@JSGlobal(\"foo\")",
"JSImport" -> "@JSImport(\"foo\", \"bar\")",
"JSImport" -> "@JSImport(\"foo\", \"bar\", globalFallback = \"baz\")",
"JSGlobalScope" -> "@JSGlobalScope"
)
private def version = scala.util.Properties.versionNumberString
private def ifHasNewRefChecks(msg: String): String = {
if (version.startsWith("2.11.") ||
version.startsWith("2.12.")) {
""
} else {
msg.stripMargin.trim()
}
}
@Test def warnJSPackageObjectDeprecated: Unit = {
s"""
package object jspackage extends js.Object
""" hasErrors
s"""
|newSource1.scala:5: error: Package objects may not extend js.Any.
| package object jspackage extends js.Object
| ^
"""
}
@Test def noJSNameAnnotOnNonJSNative: Unit = {
for {
obj <- Seq("class", "trait", "object")
} yield {
s"""
@JSName("foo")
$obj A extends js.Object
object Sym {
val sym = js.Symbol()
}
@JSName(Sym.sym)
$obj B extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
|newSource1.scala:12: error: @JSName can only be used on members of JS types.
| @JSName(Sym.sym)
| ^
"""
}
for {
obj <- Seq("class", "trait", "object")
} yield {
s"""
@JSName("foo")
$obj A
object Sym {
val sym = js.Symbol()
}
@JSName(Sym.sym)
$obj B
""" hasErrors
"""
|newSource1.scala:5: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
|newSource1.scala:12: error: @JSName can only be used on members of JS types.
| @JSName(Sym.sym)
| ^
"""
}
"""
object Container {
@JSName("foo")
val a: Int = 1
@JSName("foo")
var b: Int = 2
@JSName("foo")
def c: Int = 3
@JSName("foo")
def d_=(v: Int): Unit = ()
@JSName("foo")
def e(x: Int): Int = x + 1
}
""" hasErrors
"""
|newSource1.scala:6: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
|newSource1.scala:9: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
|newSource1.scala:12: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
|newSource1.scala:15: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
|newSource1.scala:18: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
"""
}
@Test def okJSNameOnNestedObjects: Unit = {
"""
class A extends js.Object {
@JSName("foo")
object toto
@JSName("bar")
object tata extends js.Object
}
""".hasNoWarns()
"""
class A extends js.Object {
@JSName("foo")
private object toto
@JSName("bar")
private object tata extends js.Object
}
""" hasErrors
"""
|newSource1.scala:6: error: @JSName cannot be used on private members.
| @JSName("foo")
| ^
|newSource1.scala:9: error: @JSName cannot be used on private members.
| @JSName("bar")
| ^
"""
}
@Test def noJSGlobalAnnotOnNonJSNative: Unit = {
for {
obj <- Seq("class", "trait", "object")
} yield {
s"""
@JSGlobal
$obj A extends js.Object
@JSGlobal("Foo")
$obj B extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal
| ^
|newSource1.scala:8: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal("Foo")
| ^
"""
}
for {
obj <- Seq("class", "trait", "object")
} yield {
s"""
@JSGlobal
$obj A
@JSGlobal("Foo")
$obj B
""" hasErrors
"""
|newSource1.scala:5: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal
| ^
|newSource1.scala:8: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal("Foo")
| ^
"""
}
"""
object Container {
@JSGlobal
val a: Int = 1
@JSGlobal
var b: Int = 2
@JSGlobal
def c: Int = 3
@JSGlobal
def d_=(v: Int): Unit = ()
@JSGlobal
def e(x: Int): Int = x + 1
}
""" hasErrors
"""
|newSource1.scala:6: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal
| ^
|newSource1.scala:9: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal
| ^
|newSource1.scala:12: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal
| ^
|newSource1.scala:15: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal
| ^
|newSource1.scala:18: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal
| ^
"""
}
@Test def noJSImportAnnotOnNonJSNative: Unit = {
for {
obj <- Seq("class", "trait", "object")
} yield {
s"""
@JSImport("foo", JSImport.Namespace)
$obj A extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", JSImport.Namespace)
| ^
"""
}
for {
obj <- Seq("class", "trait", "object")
} yield {
s"""
@JSImport("foo", JSImport.Namespace)
$obj A
""" hasErrors
"""
|newSource1.scala:5: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", JSImport.Namespace)
| ^
"""
}
"""
object Container {
@JSImport("foo", "bar")
val a: Int = 1
@JSImport("foo", "bar")
var b: Int = 2
@JSImport("foo", "bar")
def c: Int = 3
@JSImport("foo", "bar")
def d_=(v: Int): Unit = ()
@JSImport("foo", "bar")
def e(x: Int): Int = x + 1
}
""" hasErrors
"""
|newSource1.scala:6: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar")
| ^
|newSource1.scala:9: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar")
| ^
|newSource1.scala:12: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar")
| ^
|newSource1.scala:15: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar")
| ^
|newSource1.scala:18: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar")
| ^
"""
for {
obj <- Seq("class", "trait", "object")
} yield {
s"""
@JSImport("foo", JSImport.Namespace, globalFallback = "Foo")
$obj A extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", JSImport.Namespace, globalFallback = "Foo")
| ^
"""
}
for {
obj <- Seq("class", "trait", "object")
} yield {
s"""
@JSImport("foo", JSImport.Namespace, globalFallback = "Foo")
$obj A
""" hasErrors
"""
|newSource1.scala:5: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", JSImport.Namespace, globalFallback = "Foo")
| ^
"""
}
"""
object Container {
@JSImport("foo", "bar", globalFallback = "Foo")
val a: Int = 1
@JSImport("foo", "bar", globalFallback = "Foo")
var b: Int = 2
@JSImport("foo", "bar", globalFallback = "Foo")
def c: Int = 3
@JSImport("foo", "bar", globalFallback = "Foo")
def d_=(v: Int): Unit = ()
@JSImport("foo", "bar", globalFallback = "Foo")
def e(x: Int): Int = x + 1
}
""" hasErrors
"""
|newSource1.scala:6: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar", globalFallback = "Foo")
| ^
|newSource1.scala:9: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar", globalFallback = "Foo")
| ^
|newSource1.scala:12: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar", globalFallback = "Foo")
| ^
|newSource1.scala:15: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar", globalFallback = "Foo")
| ^
|newSource1.scala:18: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("foo", "bar", globalFallback = "Foo")
| ^
"""
}
@Test def noJSGlobalScopeAnnotOnNonJSNative: Unit = {
"""
@JSGlobalScope
object A extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: @JSGlobalScope can only be used on native JS objects (with @js.native).
| @JSGlobalScope
| ^
"""
"""
@JSGlobalScope
object A
""" hasErrors
"""
|newSource1.scala:5: error: @JSGlobalScope can only be used on native JS objects (with @js.native).
| @JSGlobalScope
| ^
"""
}
@Test def noJSNameAnnotOnClass: Unit = {
"""
@js.native
@JSName("Foo")
class A extends js.Object
@js.native
@JSName("Foo")
abstract class B extends js.Object
""" hasErrors
"""
|newSource1.scala:6: error: @JSName can only be used on members of JS types.
| @JSName("Foo")
| ^
|newSource1.scala:7: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| class A extends js.Object
| ^
|newSource1.scala:10: error: @JSName can only be used on members of JS types.
| @JSName("Foo")
| ^
|newSource1.scala:11: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| abstract class B extends js.Object
| ^
"""
}
@Test def noJSNameAnnotOnObject: Unit = {
"""
@js.native
@JSName("Foo")
object A extends js.Object
""" hasErrors
"""
|newSource1.scala:6: error: @JSName can only be used on members of JS types.
| @JSName("Foo")
| ^
|newSource1.scala:7: error: Native JS objects must have exactly one annotation among @JSGlobal, @JSImport and @JSGlobalScope.
| object A extends js.Object
| ^
"""
}
@Test def noJSNameAnnotOnTrait: Unit = {
s"""
object Sym {
val sym = js.Symbol()
}
@js.native @JSGlobal
object Container extends js.Object {
@js.native
@JSName("foo")
trait A extends js.Object
@js.native
@JSName(Sym.sym)
trait B extends js.Object
}
""" hasErrors
"""
|newSource1.scala:12: error: @JSName cannot be used on traits.
| @JSName("foo")
| ^
|newSource1.scala:16: error: @JSName cannot be used on traits.
| @JSName(Sym.sym)
| ^
"""
}
@Test def noJSNameAnnotOnNativeValDef: Unit = {
s"""
object Sym {
val sym = js.Symbol()
}
object Container {
@js.native
@JSName("foo")
val a: Int = js.native
@js.native
@JSName("foo")
def b: Int = js.native
@js.native
@JSName("foo")
def c(x: Int): Int = js.native
@js.native
@JSName(Sym.sym)
val d: Int = js.native
@js.native
@JSName(Sym.sym)
def e: Int = js.native
@js.native
@JSName(Sym.sym)
def f(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:11: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
|newSource1.scala:12: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| val a: Int = js.native
| ^
|newSource1.scala:15: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
|newSource1.scala:16: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| def b: Int = js.native
| ^
|newSource1.scala:19: error: @JSName can only be used on members of JS types.
| @JSName("foo")
| ^
|newSource1.scala:20: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| def c(x: Int): Int = js.native
| ^
|newSource1.scala:23: error: @JSName can only be used on members of JS types.
| @JSName(Sym.sym)
| ^
|newSource1.scala:24: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| val d: Int = js.native
| ^
|newSource1.scala:27: error: @JSName can only be used on members of JS types.
| @JSName(Sym.sym)
| ^
|newSource1.scala:28: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| def e: Int = js.native
| ^
|newSource1.scala:31: error: @JSName can only be used on members of JS types.
| @JSName(Sym.sym)
| ^
|newSource1.scala:32: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| def f(x: Int): Int = js.native
| ^
"""
}
@Test def noJSGlobalAnnotOnTrait: Unit = {
s"""
@js.native
@JSGlobal
trait A extends js.Object
""" hasErrors
s"""
|newSource1.scala:6: error: Traits may not have an @JSGlobal annotation.
| @JSGlobal
| ^
"""
s"""
@js.native
@JSGlobal("Foo")
trait A extends js.Object
""" hasErrors
s"""
|newSource1.scala:6: error: Traits may not have an @JSGlobal annotation.
| @JSGlobal("Foo")
| ^
"""
}
@Test def noJSImportAnnotOnTrait: Unit = {
s"""
@js.native
@JSImport("foo", JSImport.Namespace)
trait A extends js.Object
""" hasErrors
s"""
|newSource1.scala:6: error: Traits may not have an @JSImport annotation.
| @JSImport("foo", JSImport.Namespace)
| ^
"""
s"""
@js.native
@JSImport("foo", JSImport.Namespace, globalFallback = "Foo")
trait A extends js.Object
""" hasErrors
s"""
|newSource1.scala:6: error: Traits may not have an @JSImport annotation.
| @JSImport("foo", JSImport.Namespace, globalFallback = "Foo")
| ^
"""
}
@Test def noJSGlobalScopeExceptOnObjects: Unit = {
"""
@js.native @JSGlobalScope
class A extends js.Any
@js.native @JSGlobalScope
trait B extends js.Any
object Container {
@js.native @JSGlobalScope
class C extends js.Any
@js.native @JSGlobalScope
trait D extends js.Any
@js.native @JSGlobalScope
val a: Int = js.native
@js.native @JSGlobalScope
def b: Int = js.native
@js.native @JSGlobalScope
def c(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:5: error: @JSGlobalScope can only be used on native JS objects (with @js.native).
| @js.native @JSGlobalScope
| ^
|newSource1.scala:8: error: Traits may not have an @JSGlobalScope annotation.
| @js.native @JSGlobalScope
| ^
|newSource1.scala:12: error: @JSGlobalScope can only be used on native JS objects (with @js.native).
| @js.native @JSGlobalScope
| ^
|newSource1.scala:15: error: Traits may not have an @JSGlobalScope annotation.
| @js.native @JSGlobalScope
| ^
|newSource1.scala:18: error: @JSGlobalScope can only be used on native JS objects (with @js.native).
| @js.native @JSGlobalScope
| ^
|newSource1.scala:19: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| val a: Int = js.native
| ^
|newSource1.scala:21: error: @JSGlobalScope can only be used on native JS objects (with @js.native).
| @js.native @JSGlobalScope
| ^
|newSource1.scala:24: error: @JSGlobalScope can only be used on native JS objects (with @js.native).
| @js.native @JSGlobalScope
| ^
"""
}
@Test def noTwoJSNativeLoadSpecAnnots: Unit = {
for {
(firstAnnotName, firstAnnot) <- JSNativeLoadSpecAnnots
(secondAnnotName, secondAnnot) <- JSNativeLoadSpecAnnots
} {
if (firstAnnotName == "JSGlobalScope" || secondAnnotName == "JSGlobalScope") {
s"""
|@js.native
|$firstAnnot
|$secondAnnot
|object A extends js.Object
""".stripMargin hasErrors s"""
|newSource1.scala:7: error: Native JS objects must have exactly one annotation among @JSGlobal, @JSImport and @JSGlobalScope.
|$secondAnnot
| ^
"""
} else {
s"""
|@js.native
|$firstAnnot
|$secondAnnot
|object A extends js.Object
|
|@js.native
|$firstAnnot
|$secondAnnot
|class A extends js.Object
""".stripMargin hasErrors s"""
|newSource1.scala:7: error: Native JS objects must have exactly one annotation among @JSGlobal, @JSImport and @JSGlobalScope.
|$secondAnnot
| ^
|newSource1.scala:12: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
|$secondAnnot
| ^
"""
if (firstAnnot != "@JSGlobal" && secondAnnot != "@JSGlobal") {
s"""
|object Container {
| @js.native
| $firstAnnot
| $secondAnnot
| val a: Int = js.native
|
| @js.native
| $firstAnnot
| $secondAnnot
| def b: Int = js.native
|
| @js.native
| $firstAnnot
| $secondAnnot
| def c(x: Int): Int = js.native
|}
""".stripMargin hasErrors s"""
|newSource1.scala:8: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| $secondAnnot
| ^
|newSource1.scala:13: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| $secondAnnot
| ^
|newSource1.scala:18: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| $secondAnnot
| ^
"""
}
}
}
}
@Test def noJSNativeAnnotWithoutJSAny: Unit = {
// With the correct amount of native load spec annotations
"""
@js.native @JSGlobal
class A
@js.native
trait B
@js.native @JSGlobal
object C
@js.native @JSGlobal
class D extends Enumeration
@js.native @JSGlobal
object E extends Enumeration
""" hasErrors
"""
|newSource1.scala:6: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| class A
| ^
|newSource1.scala:9: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| trait B
| ^
|newSource1.scala:12: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| object C
| ^
|newSource1.scala:15: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| class D extends Enumeration
| ^
|newSource1.scala:18: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| object E extends Enumeration
| ^
"""
// With an incorrect amount of native load spec annotations
"""
@js.native
class A
@js.native @JSGlobal
trait B
@js.native
object C
@js.native
class D extends Enumeration
@js.native
object E extends Enumeration
""" hasErrors
"""
|newSource1.scala:6: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| class A
| ^
|newSource1.scala:9: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| trait B
| ^
|newSource1.scala:12: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| object C
| ^
|newSource1.scala:15: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| class D extends Enumeration
| ^
|newSource1.scala:18: error: Classes, traits and objects not extending js.Any may not have an @js.native annotation
| object E extends Enumeration
| ^
"""
}
@Test def noInnerScalaClassTraitObjectInJSNative: Unit = {
for {
outer <- Seq("class", "trait")
inner <- Seq("class", "trait", "object")
} yield {
val jsGlobalAnnot =
if (outer == "trait") ""
else "@JSGlobal"
s"""
@js.native $jsGlobalAnnot
$outer A extends js.Object {
$inner A
}
""" hasErrors
s"""
|newSource1.scala:7: error: Native JS traits, classes and objects cannot contain inner Scala traits, classes or objects (i.e., not extending js.Any)
| $inner A
| ${" " * inner.length} ^
"""
}
}
@Test def noInnerNonNativeJSClassTraitObjectInJSNative: Unit = {
for {
outer <- Seq("class", "trait")
inner <- Seq("class", "trait", "object")
} yield {
val jsGlobalAnnot =
if (outer == "trait") ""
else "@JSGlobal"
s"""
@js.native $jsGlobalAnnot
$outer A extends js.Object {
$inner A extends js.Object
}
""" hasErrors
s"""
|newSource1.scala:7: error: Native JS classes and traits cannot contain non-native JS classes, traits or objects
| $inner A extends js.Object
| ${" " * inner.length} ^
"""
}
}
@Test def noScalaStuffInsideNativeJSObject: Unit = {
for {
inner <- Seq("class", "trait", "object")
} yield {
s"""
@js.native
@JSGlobal
object A extends js.Object {
$inner A
}
""" hasErrors
s"""
|newSource1.scala:8: error: Native JS traits, classes and objects cannot contain inner Scala traits, classes or objects (i.e., not extending js.Any)
| $inner A
| ${" " * inner.length} ^
"""
}
}
@Test def noNonSyntheticCompanionInsideNativeJSObject: Unit = {
// See #1891: The default parameter generates a synthetic companion object
// The synthetic companion should be allowed, but it may not be explicit
"""
@js.native @JSGlobal object A extends js.Object {
@js.native class B(x: Int = ???) extends js.Object
object B
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS traits, classes and objects cannot contain inner Scala traits, classes or objects (i.e., not extending js.Any)
| object B
| ^
"""
"""
@js.native @JSGlobal object A extends js.Object {
@js.native class B(x: Int = ???) extends js.Object
}
""".succeeds()
}
@Test def noNonNativeJSTypesInsideNativeJSObject: Unit = {
for {
inner <- Seq("class", "object")
} yield {
s"""
@js.native
@JSGlobal
object A extends js.Object {
$inner A extends js.Object
}
""" hasErrors
s"""
|newSource1.scala:8: error: Native JS objects cannot contain inner non-native JS classes or objects
| $inner A extends js.Object
| ${" " * inner.length} ^
"""
}
}
@Test def jsNativeValDefsHaveJSNativeRHS: Unit = {
"""
object Container {
@js.native @JSGlobal("a")
val a: Int = 1
@js.native @JSGlobal("b")
def b: Int = 3
@js.native @JSGlobal("c")
def c(x: Int): Int = x + 1
}
""" hasErrors
"""
|newSource1.scala:7: error: @js.native members may only call js.native.
| val a: Int = 1
| ^
|newSource1.scala:10: error: @js.native members may only call js.native.
| def b: Int = 3
| ^
|newSource1.scala:13: error: @js.native members may only call js.native.
| def c(x: Int): Int = x + 1
| ^
"""
}
@Test def noJSBracketAccessOnJSNativeValDefs: Unit = {
"""
object Container {
@js.native @JSGlobal("a")
@JSBracketAccess
val a: Int = js.native
@js.native @JSGlobal("b")
@JSBracketAccess
def b: Int = js.native
@js.native @JSGlobal("c")
@JSBracketAccess
def c(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: @JSBracketAccess is not allowed on @js.native vals and defs
| val a: Int = js.native
| ^
|newSource1.scala:12: error: @JSBracketAccess is not allowed on @js.native vals and defs
| def b: Int = js.native
| ^
|newSource1.scala:16: error: @JSBracketAccess is not allowed on @js.native vals and defs
| def c(x: Int): Int = js.native
| ^
"""
}
@Test def noJSBracketCallOnJSNativeValDefs: Unit = {
"""
object Container {
@js.native @JSGlobal("a")
@JSBracketCall
val a: Int = js.native
@js.native @JSGlobal("b")
@JSBracketCall
def b: Int = js.native
@js.native @JSGlobal("c")
@JSBracketCall
def c(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: @JSBracketCall is not allowed on @js.native vals and defs
| val a: Int = js.native
| ^
|newSource1.scala:12: error: @JSBracketCall is not allowed on @js.native vals and defs
| def b: Int = js.native
| ^
|newSource1.scala:16: error: @JSBracketCall is not allowed on @js.native vals and defs
| def c(x: Int): Int = js.native
| ^
"""
}
@Test def noJSNativeValDefsInJSObjects: Unit = {
"""
object A {
val sym = js.Symbol("foo")
}
object NonNativeContainer extends js.Object {
@js.native @JSGlobal("a")
val a: Int = js.native
@js.native @JSGlobal("b")
def b: Int = js.native
@js.native @JSGlobal("c")
def c(x: Int): Int = js.native
@js.native @JSName("foo")
val d: Int = js.native
@js.native @JSName("bar")
def e(x: Int): Int = js.native
@js.native @JSName(A.sym)
val f: Int = js.native
@js.native @JSName(A.sym)
def g(x: Int): Int = js.native
}
@js.native @JSGlobal
object NativeContainer extends js.Object {
@js.native @JSGlobal("a")
val a: Int = js.native
@js.native @JSGlobal("b")
def b: Int = js.native
@js.native @JSGlobal("c")
def c(x: Int): Int = js.native
@js.native @JSName("foo")
val d: Int = js.native
@js.native @JSName("bar")
def e(x: Int): Int = js.native
@js.native @JSName(A.sym)
val f: Int = js.native
@js.native @JSName(A.sym)
def g(x: Int): Int = js.native
}
@js.native @JSGlobal
object NativeContainer2 extends js.Object {
@js.native
val a: Int = js.native
@js.native
def b: Int = js.native
@js.native
def c(x: Int): Int = js.native
@js.native
val d: Int = js.native
@js.native
def e(x: Int): Int = js.native
@js.native @JSName(A.sym)
val f: Int = js.native
@js.native @JSName(A.sym)
def g(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:11: error: @js.native vals and defs can only appear in static Scala objects
| val a: Int = js.native
| ^
|newSource1.scala:14: error: @js.native vals and defs can only appear in static Scala objects
| def b: Int = js.native
| ^
|newSource1.scala:17: error: @js.native vals and defs can only appear in static Scala objects
| def c(x: Int): Int = js.native
| ^
|newSource1.scala:20: error: @js.native vals and defs can only appear in static Scala objects
| val d: Int = js.native
| ^
|newSource1.scala:23: error: @js.native vals and defs can only appear in static Scala objects
| def e(x: Int): Int = js.native
| ^
|newSource1.scala:26: error: @js.native vals and defs can only appear in static Scala objects
| val f: Int = js.native
| ^
|newSource1.scala:29: error: @js.native vals and defs can only appear in static Scala objects
| def g(x: Int): Int = js.native
| ^
|newSource1.scala:35: error: @js.native vals and defs can only appear in static Scala objects
| val a: Int = js.native
| ^
|newSource1.scala:38: error: @js.native vals and defs can only appear in static Scala objects
| def b: Int = js.native
| ^
|newSource1.scala:41: error: @js.native vals and defs can only appear in static Scala objects
| def c(x: Int): Int = js.native
| ^
|newSource1.scala:44: error: @js.native vals and defs can only appear in static Scala objects
| val d: Int = js.native
| ^
|newSource1.scala:47: error: @js.native vals and defs can only appear in static Scala objects
| def e(x: Int): Int = js.native
| ^
|newSource1.scala:50: error: @js.native vals and defs can only appear in static Scala objects
| val f: Int = js.native
| ^
|newSource1.scala:53: error: @js.native vals and defs can only appear in static Scala objects
| def g(x: Int): Int = js.native
| ^
|newSource1.scala:59: error: @js.native vals and defs can only appear in static Scala objects
| val a: Int = js.native
| ^
|newSource1.scala:62: error: @js.native vals and defs can only appear in static Scala objects
| def b: Int = js.native
| ^
|newSource1.scala:65: error: @js.native vals and defs can only appear in static Scala objects
| def c(x: Int): Int = js.native
| ^
|newSource1.scala:68: error: @js.native vals and defs can only appear in static Scala objects
| val d: Int = js.native
| ^
|newSource1.scala:71: error: @js.native vals and defs can only appear in static Scala objects
| def e(x: Int): Int = js.native
| ^
|newSource1.scala:74: error: @js.native vals and defs can only appear in static Scala objects
| val f: Int = js.native
| ^
|newSource1.scala:77: error: @js.native vals and defs can only appear in static Scala objects
| def g(x: Int): Int = js.native
| ^
"""
}
@Test def noJSNativeSetters: Unit = {
"""
object Container {
@js.native @JSGlobal("foo")
def foo_=(x: Int): Int = js.native
@js.native @JSGlobal("bar")
def bar_=(x: Int, y: Int): Unit = js.native
@js.native @JSGlobal("goo")
def goo_=(x: Int*): Unit = js.native
@js.native @JSGlobal("hoo")
def hoo_=(x: Int = 1): Unit = js.native
@js.native @JSImport("module.js", "foo")
def foo2_=(x: Int): Int = js.native
@js.native @JSImport("module.js", "bar")
def bar2_=(x: Int, y: Int): Unit = js.native
@js.native @JSImport("module.js", "goo")
def goo2_=(x: Int*): Unit = js.native
@js.native @JSImport("module.js", "hoo")
def hoo2_=(x: Int = 1): Unit = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo_=(x: Int): Int = js.native
| ^
|newSource1.scala:9: error: @js.native is not allowed on vars, lazy vals and setter defs
| def bar_=(x: Int, y: Int): Unit = js.native
| ^
|newSource1.scala:11: error: @js.native is not allowed on vars, lazy vals and setter defs
| def goo_=(x: Int*): Unit = js.native
| ^
|newSource1.scala:13: error: @js.native is not allowed on vars, lazy vals and setter defs
| def hoo_=(x: Int = 1): Unit = js.native
| ^
|newSource1.scala:16: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo2_=(x: Int): Int = js.native
| ^
|newSource1.scala:18: error: @js.native is not allowed on vars, lazy vals and setter defs
| def bar2_=(x: Int, y: Int): Unit = js.native
| ^
|newSource1.scala:20: error: @js.native is not allowed on vars, lazy vals and setter defs
| def goo2_=(x: Int*): Unit = js.native
| ^
|newSource1.scala:22: error: @js.native is not allowed on vars, lazy vals and setter defs
| def hoo2_=(x: Int = 1): Unit = js.native
| ^
"""
// containsErrors because some versions of the compiler use `_=` and some use `_=' (notice the quotes)
"""
object Container {
@js.native @JSGlobal("foo")
val foo_= : Int = js.native
}
""" containsErrors
"""
|newSource1.scala:7: error: Names of vals or vars may not end in `_=
"""
// containsErrors because some versions of the compiler use `_=` and some use `_=' (notice the quotes)
"""
object Container {
@js.native @JSImport("module.js")
val foo_= : Int = js.native
}
""" containsErrors
"""
|newSource1.scala:7: error: Names of vals or vars may not end in `_=
"""
}
@Test def noJSNativeVars: Unit = {
"""
object Container {
@js.native @JSGlobal("foo")
var foo: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: @js.native is not allowed on vars, lazy vals and setter defs
| var foo: Int = js.native
| ^
"""
}
@Test def noJSNativeLazyVals: Unit = {
"""
object Container {
@js.native @JSGlobal("foo")
lazy val foo: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: @js.native is not allowed on vars, lazy vals and setter defs
| lazy val foo: Int = js.native
| ^
"""
}
@Test def jsNativeValDefsCannotImplementAbstractMethod: Unit = {
"""
abstract class Parent {
val a: Int
def b: Int
def c(x: Int): Int
}
object Container extends Parent {
@js.native @JSGlobal("a")
val a: Int = js.native
@js.native @JSGlobal("b")
def b: Int = js.native
@js.native @JSGlobal("c")
def c(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:13: error: An @js.native member cannot implement the inherited member Parent.a
| val a: Int = js.native
| ^
|newSource1.scala:16: error: An @js.native member cannot implement the inherited member Parent.b
| def b: Int = js.native
| ^
|newSource1.scala:19: error: An @js.native member cannot implement the inherited member Parent.c
| def c(x: Int): Int = js.native
| ^
"""
}
@Test def jsNativeValDefsCannotOverrideConcreteMethod: Unit = {
"""
class Parent {
val a: Int = 1
def b: Int = 2
def c(x: Int): Int = x + 1
}
object Container extends Parent {
@js.native @JSGlobal("a")
override val a: Int = js.native
@js.native @JSGlobal("b")
override def b: Int = js.native
@js.native @JSGlobal("c")
override def c(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:13: error: An @js.native member cannot override the inherited member Parent.a
| override val a: Int = js.native
| ^
|newSource1.scala:16: error: An @js.native member cannot override the inherited member Parent.b
| override def b: Int = js.native
| ^
|newSource1.scala:19: error: An @js.native member cannot override the inherited member Parent.c
| override def c(x: Int): Int = js.native
| ^
"""
}
@Test def noBadSetters: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
def foo_=(x: Int): Int = js.native
def bar_=(x: Int, y: Int): Unit = js.native
def goo_=(x: Int*): Unit = js.native
def hoo_=(x: Int = 1): Unit = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: JS setters must return Unit
| def foo_=(x: Int): Int = js.native
| ^
|newSource1.scala:9: error: JS setters must have exactly one argument
| def bar_=(x: Int, y: Int): Unit = js.native
| ^
|newSource1.scala:10: error: JS setters may not have repeated params
| def goo_=(x: Int*): Unit = js.native
| ^
|newSource1.scala:11: error: JS setters may not have default params
| def hoo_=(x: Int = 1): Unit = js.native
| ^
"""
}
@Test def noBadBracketAccess: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
@js.annotation.JSBracketAccess
def foo(): Int = js.native
@js.annotation.JSBracketAccess
def bar(x: Int, y: Int, z: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:9: error: @JSBracketAccess methods must have one or two parameters
| def foo(): Int = js.native
| ^
|newSource1.scala:12: error: @JSBracketAccess methods must have one or two parameters
| def bar(x: Int, y: Int, z: Int): Int = js.native
| ^
"""
"""
@js.native
@JSGlobal
class A extends js.Object {
@js.annotation.JSBracketAccess
def foo(x: Int, y: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:9: error: @JSBracketAccess methods with two parameters must return Unit
| def foo(x: Int, y: Int): Int = js.native
| ^
"""
"""
@js.native
@JSGlobal
class A extends js.Object {
@js.annotation.JSBracketAccess
def bar(x: Int*): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:9: error: @JSBracketAccess methods may not have repeated parameters
| def bar(x: Int*): Int = js.native
| ^
"""
"""
@js.native
@JSGlobal
class A extends js.Object {
@js.annotation.JSBracketAccess
def bar(x: Int = 1): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:9: error: @JSBracketAccess methods may not have default parameters
| def bar(x: Int = 1): Int = js.native
| ^
"""
}
@Test def noBadBracketCall: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
@js.annotation.JSBracketCall
def foo(): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:9: error: @JSBracketCall methods must have at least one non-repeated parameter
| def foo(): Int = js.native
| ^
"""
}
@Test // #4284
def noBracketAccessAndJSName: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
@JSBracketAccess
@JSName("bar")
def bar(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:9: error: A member can have at most one annotation among @JSName, @JSBracketAccess and @JSBracketCall.
| @JSName("bar")
| ^
"""
}
// #4284
@Test def noBracketCallAndJSName: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
@JSBracketCall
@JSName("bar")
def bar(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:9: error: A member can have at most one annotation among @JSName, @JSBracketAccess and @JSBracketCall.
| @JSName("bar")
| ^
"""
}
// #4284
@Test def noBracketAccessAndBracketCall: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
@JSBracketAccess
@JSBracketCall
def bar(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:9: error: A member can have at most one annotation among @JSName, @JSBracketAccess and @JSBracketCall.
| @JSBracketCall
| ^
"""
}
@Test def noBadBinaryOp: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
def +(x: Int*): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: methods representing binary operations may not have repeated parameters
| def +(x: Int*): Int = js.native
| ^
"""
}
@Test def onlyJSTraits: Unit = {
"""
trait A
@js.native
@JSGlobal
class B extends js.Object with A
""" hasErrors
"""
|newSource1.scala:9: error: B extends A which does not extend js.Any.
| class B extends js.Object with A
| ^
"""
"""
@js.native
@JSGlobal
class B extends js.Object with java.io.Serializable
""" hasErrors
"""
|newSource1.scala:7: error: B extends java.io.Serializable which does not extend js.Any.
| class B extends js.Object with java.io.Serializable
| ^
"""
}
@Test def noCaseClassObject: Unit = {
"""
@js.native
@JSGlobal
case class A(x: Int) extends js.Object
""" hasErrors
"""
|newSource1.scala:7: error: Classes and objects extending js.Any may not have a case modifier
| case class A(x: Int) extends js.Object
| ^
"""
"""
@js.native
@JSGlobal
case object B extends js.Object
""" hasErrors
"""
|newSource1.scala:7: error: Classes and objects extending js.Any may not have a case modifier
| case object B extends js.Object
| ^
"""
"""
case class A(x: Int) extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: Classes and objects extending js.Any may not have a case modifier
| case class A(x: Int) extends js.Object
| ^
"""
"""
case object B extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: Classes and objects extending js.Any may not have a case modifier
| case object B extends js.Object
| ^
"""
}
@Test def noNativeJSNestedInScalaClassTrait: Unit = {
val outers = List("class", "trait")
val inners = List("trait", "class", "object")
for {
outer <- outers
inner <- inners
} yield {
val jsGlobalAnnot =
if (inner == "trait") ""
else "@JSGlobal"
val errTrg = if (inner == "object") "objects" else "classes"
s"""
$outer A {
@js.native $jsGlobalAnnot
$inner Inner extends js.Object
}
""" hasErrors
s"""
|newSource1.scala:7: error: Scala traits and classes may not have native JS members
| $inner Inner extends js.Object
| ${" " * inner.length}^
"""
}
}
@Test def noNativeJSNestedInNonNativeJS: Unit = {
val outers = List("class", "trait", "object")
val inners = List("class", "trait", "object")
for {
outer <- outers
inner <- inners
} yield {
val jsGlobalAnnot =
if (inner == "trait") ""
else "@JSGlobal"
val errTrg = if (inner == "object") "objects" else "classes"
s"""
$outer A extends js.Object {
@js.native $jsGlobalAnnot
$inner Inner extends js.Object
}
""" hasErrors
s"""
|newSource1.scala:7: error: non-native JS classes, traits and objects may not have native JS members
| $inner Inner extends js.Object
| ${" " * inner.length}^
"""
}
}
@Test def noLocalJSNative: Unit = {
"""
object A {
def a = {
@js.native @JSGlobal
class B extends js.Object
@js.native @JSGlobal
object C extends js.Object
@js.native @JSGlobal
val d: Int = js.native
@js.native @JSGlobal
var e: Int = js.native
@js.native @JSGlobal
def f: Int = js.native
@js.native @JSGlobal
def f_=(v: Int): Unit = js.native
@js.native @JSGlobal
def g(x: Int): Int = js.native
@js.native @JSGlobal
lazy val h: Int = js.native
}
}
""" hasErrors
"""
|newSource1.scala:8: error: @js.native is not allowed on local definitions
| class B extends js.Object
| ^
|newSource1.scala:11: error: @js.native is not allowed on local definitions
| object C extends js.Object
| ^
|newSource1.scala:14: error: @js.native is not allowed on local definitions
| val d: Int = js.native
| ^
|newSource1.scala:17: error: @js.native is not allowed on local definitions
| var e: Int = js.native
| ^
|newSource1.scala:20: error: @js.native is not allowed on local definitions
| def f: Int = js.native
| ^
|newSource1.scala:23: error: @js.native is not allowed on local definitions
| def f_=(v: Int): Unit = js.native
| ^
|newSource1.scala:26: error: @js.native is not allowed on local definitions
| def g(x: Int): Int = js.native
| ^
|newSource1.scala:29: error: @js.native is not allowed on local definitions
| lazy val h: Int = js.native
| ^
"""
}
@Test def noNativeInJSAny: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
@native
def value: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:9: error: Methods in a js.Any may not be @native
| def value: Int = js.native
| ^
"""
}
@Test def checkJSAnyBody: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
def value: Int = ???
val x: Int = ???
}
""" hasErrors
"""
|newSource1.scala:8: error: Concrete members of JS native types may only call js.native.
| def value: Int = ???
| ^
|newSource1.scala:9: error: Concrete members of JS native types may only call js.native.
| val x: Int = ???
| ^
"""
}
@Test def noWarnJSAnyDeferred: Unit = {
"""
@js.native
@JSGlobal
abstract class A extends js.Object {
def value: Int
val x: Int
}
""".hasNoWarns()
"""
@js.native
trait A extends js.Object {
def value: Int
val x: Int
}
""".hasNoWarns()
}
@Test def noCallSecondaryCtor: Unit = {
"""
@js.native
@JSGlobal
class A(x: Int, y: Int) extends js.Object {
def this(x: Int) = this(x, 5)
def this() = this(7)
}
""" hasErrors
"""
|newSource1.scala:9: error: A secondary constructor of a class extending js.Any may only call the primary constructor
| def this() = this(7)
| ^
"""
}
@Test def noPrivateMemberInNative: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
private[this] val a: Int = js.native
private val b: Int = js.native
private[A] val c: Int = js.native
private[this] var d: Int = js.native
private var e: Int = js.native
private[A] var f: Int = js.native
private[this] def g(): Int = js.native
private def h(): Int = js.native
private[A] def i(): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: Native JS classes may not have private members. Use a public member in a private facade instead.
| private[this] val a: Int = js.native
| ^
|newSource1.scala:9: error: Native JS classes may not have private members. Use a public member in a private facade instead.
| private val b: Int = js.native
| ^
|newSource1.scala:10: error: Native JS classes may not have private members. Use a public member in a private facade instead.
| private[A] val c: Int = js.native
| ^
|newSource1.scala:12: error: Native JS classes may not have private members. Use a public member in a private facade instead.
| private[this] var d: Int = js.native
| ^
|newSource1.scala:13: error: Native JS classes may not have private members. Use a public member in a private facade instead.
| private var e: Int = js.native
| ^
|newSource1.scala:14: error: Native JS classes may not have private members. Use a public member in a private facade instead.
| private[A] var f: Int = js.native
| ^
|newSource1.scala:16: error: Native JS classes may not have private members. Use a public member in a private facade instead.
| private[this] def g(): Int = js.native
| ^
|newSource1.scala:17: error: Native JS classes may not have private members. Use a public member in a private facade instead.
| private def h(): Int = js.native
| ^
|newSource1.scala:18: error: Native JS classes may not have private members. Use a public member in a private facade instead.
| private[A] def i(): Int = js.native
| ^
"""
}
@Test def noPrivateConstructorInNative: Unit = {
"""
@js.native
@JSGlobal
class A private () extends js.Object
""" hasErrors
"""
|newSource1.scala:7: error: Native JS classes may not have private constructors. Use `private[this]` to declare an internal constructor.
| class A private () extends js.Object
| ^
"""
"""
@js.native
@JSGlobal
class A private[A] () extends js.Object
""" hasErrors
"""
|newSource1.scala:7: error: Native JS classes may not have private constructors. Use `private[this]` to declare an internal constructor.
| class A private[A] () extends js.Object
| ^
"""
"""
@js.native
@JSGlobal
class A private[this] () extends js.Object
""".hasNoWarns()
}
@Test def noUseJsNative: Unit = {
"""
class A {
def foo = js.native
}
""" hasErrors
"""
|newSource1.scala:6: error: js.native may only be used as stub implementation in facade types
| def foo = js.native
| ^
"""
}
@Test def warnNothingInNativeJS: Unit = {
"""
@js.native
@JSGlobal
class A extends js.Object {
def foo = js.native
val bar = js.native
}
""" hasWarns
"""
|newSource1.scala:8: warning: The type of foo got inferred as Nothing. To suppress this warning, explicitly ascribe the type.
| def foo = js.native
| ^
|newSource1.scala:9: warning: The type of bar got inferred as Nothing. To suppress this warning, explicitly ascribe the type.
| val bar = js.native
| ^
"""
}
@Test def nativeClassHasLoadingSpec: Unit = {
"""
@js.native
class A extends js.Object
@js.native
abstract class B extends js.Object
object Container {
@js.native
class C extends js.Object
}
""" hasErrors
"""
|newSource1.scala:6: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| class A extends js.Object
| ^
|newSource1.scala:9: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| abstract class B extends js.Object
| ^
|newSource1.scala:13: error: Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport.
| class C extends js.Object
| ^
"""
}
@Test def nativeObjectHasLoadingSpec: Unit = {
"""
@js.native
object A extends js.Object
object Container {
@js.native
object B extends js.Object
}
""" hasErrors
"""
|newSource1.scala:6: error: Native JS objects must have exactly one annotation among @JSGlobal, @JSImport and @JSGlobalScope.
| object A extends js.Object
| ^
|newSource1.scala:10: error: Native JS objects must have exactly one annotation among @JSGlobal, @JSImport and @JSGlobalScope.
| object B extends js.Object
| ^
"""
}
@Test def noNativeDefinitionNamedApplyWithoutExplicitName: Unit = {
"""
@js.native
@JSGlobal
class apply extends js.Object
@js.native
@JSGlobal
object apply extends js.Object
""" hasErrors
"""
|newSource1.scala:6: error: Native JS definitions named 'apply' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
|newSource1.scala:10: error: Native JS definitions named 'apply' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
"""
"""
@js.native
@JSImport("foo.js")
class apply extends js.Object
@js.native
@JSImport("foo.js")
object apply extends js.Object
""" hasErrors
"""
|newSource1.scala:6: error: Native JS definitions named 'apply' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
|newSource1.scala:10: error: Native JS definitions named 'apply' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
"""
"""
object A {
@js.native
@JSGlobal
class apply extends js.Object
@js.native
@JSGlobal
object apply extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
|newSource1.scala:11: error: Native JS definitions named 'apply' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
"""
"""
object A {
@js.native
@JSImport("foo.js")
class apply extends js.Object
@js.native
@JSImport("foo.js")
object apply extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
|newSource1.scala:11: error: Native JS definitions named 'apply' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
"""
"""
package object A {
@js.native
@JSGlobal
class apply extends js.Object
@js.native
@JSGlobal
object apply extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
|newSource1.scala:11: error: Native JS definitions named 'apply' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
"""
"""
package object A {
@js.native
@JSImport("foo.js")
class apply extends js.Object
@js.native
@JSImport("foo.js")
object apply extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
|newSource1.scala:11: error: Native JS definitions named 'apply' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
"""
"""
object A {
@js.native
@JSGlobal
val apply: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
"""
"""
object A {
@js.native
@JSImport("foo.js")
val apply: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
"""
"""
object A {
@js.native
@JSGlobal
def apply: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
"""
"""
object A {
@js.native
@JSImport("foo.js")
def apply: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
"""
"""
object A {
@js.native
@JSGlobal
def apply(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
"""
"""
object A {
@js.native
@JSImport("foo.js")
def apply(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions named 'apply' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
"""
"""
@JSGlobal("apply")
@js.native
class apply extends js.Object
@JSGlobal("apply")
@js.native
object apply extends js.Object
object A {
@JSGlobal("apply")
@js.native
class apply extends js.Object
@JSGlobal("apply")
@js.native
object apply extends js.Object
}
object B {
@JSGlobal("apply")
@js.native
val apply: Int = js.native
}
object C {
@JSGlobal("apply")
@js.native
def apply: Int = js.native
}
object D {
@JSGlobal("apply")
@js.native
def apply(x: Int): Int = js.native
}
""".hasNoWarns()
"""
@JSImport("foo.js", "apply")
@js.native
class apply extends js.Object
@JSImport("foo.js", "apply")
@js.native
object apply extends js.Object
object A {
@JSImport("foo.js", "apply")
@js.native
class apply extends js.Object
@JSImport("foo.js", "apply")
@js.native
object apply extends js.Object
}
object B {
@JSImport("foo.js", "apply")
@js.native
val apply: Int = js.native
}
object C {
@JSImport("foo.js", "apply")
@js.native
def apply: Int = js.native
}
object D {
@JSImport("foo.js", "apply")
@js.native
def apply(x: Int): Int = js.native
}
""".hasNoWarns()
"""
@JSImport("foo.js", "apply", globalFallback = "apply")
@js.native
class apply extends js.Object
@JSImport("foo.js", "apply", globalFallback = "apply")
@js.native
object apply extends js.Object
object A {
@JSImport("foo.js", "apply", globalFallback = "apply")
@js.native
class apply extends js.Object
@JSImport("foo.js", "apply", globalFallback = "apply")
@js.native
object apply extends js.Object
}
""".hasNoWarns()
}
@Test def noNativeDefinitionWithSetterNameWithoutExplicitName: Unit = {
"""
@js.native
@JSGlobal
class foo_= extends js.Object
@js.native
@JSGlobal
object foo_= extends js.Object
""" hasErrors
"""
|newSource1.scala:6: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
|newSource1.scala:10: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
"""
"""
@js.native
@JSImport("foo.js")
class foo_= extends js.Object
@js.native
@JSImport("foo.js")
object foo_= extends js.Object
""" hasErrors
"""
|newSource1.scala:6: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
|newSource1.scala:10: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
"""
"""
object A {
@js.native
@JSGlobal
class foo_= extends js.Object
@js.native
@JSGlobal
object foo_= extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
|newSource1.scala:11: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
"""
"""
object A {
@js.native
@JSImport("foo.js")
class foo_= extends js.Object
@js.native
@JSImport("foo.js")
object foo_= extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
|newSource1.scala:11: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
"""
"""
package object A {
@js.native
@JSGlobal
class foo_= extends js.Object
@js.native
@JSGlobal
object foo_= extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
|newSource1.scala:11: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
"""
"""
package object A {
@js.native
@JSImport("foo.js")
class foo_= extends js.Object
@js.native
@JSImport("foo.js")
object foo_= extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
|newSource1.scala:11: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
"""
// containsErrors because some versions of the compiler use `_=` and some use `_=' (notice the quotes)
"""
object A {
@js.native
@JSGlobal
val foo_= : Int = js.native
}
""" containsErrors
"""
|newSource1.scala:8: error: Names of vals or vars may not end in `_=
"""
// containsErrors because some versions of the compiler use `_=` and some use `_=' (notice the quotes)
"""
object A {
@js.native
@JSImport("foo.js")
val foo_= : Int = js.native
}
""" containsErrors
"""
|newSource1.scala:8: error: Names of vals or vars may not end in `_=
"""
// containsErrors because some versions of the compiler use `_=` and some use `_=' (notice the quotes)
"""
object A {
@js.native
@JSGlobal
var foo_= : Int = js.native
}
""" containsErrors
"""
|newSource1.scala:8: error: Names of vals or vars may not end in `_=
"""
"""
object A {
@js.native
@JSGlobal
def foo_= : Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
|newSource1.scala:8: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo_= : Int = js.native
| ^
"""
"""
object A {
@js.native
@JSGlobal("foo")
def foo_= : Int = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo_= : Int = js.native
| ^
"""
"""
object A {
@js.native
@JSImport("foo.js")
def foo_= : Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
|newSource1.scala:8: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo_= : Int = js.native
| ^
"""
"""
object A {
@js.native
@JSImport("foo.js", "foo")
def foo_= : Int = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo_= : Int = js.native
| ^
"""
"""
object A {
@js.native
@JSGlobal
def foo_=(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSGlobal
| @JSGlobal
| ^
|newSource1.scala:8: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo_=(x: Int): Int = js.native
| ^
"""
"""
object A {
@js.native
@JSGlobal("foo")
def foo_=(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo_=(x: Int): Int = js.native
| ^
"""
"""
object A {
@js.native
@JSImport("foo.js")
def foo_=(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: Native JS definitions with a name ending in '_=' must have an explicit name in @JSImport
| @JSImport("foo.js")
| ^
|newSource1.scala:8: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo_=(x: Int): Int = js.native
| ^
"""
"""
object A {
@js.native
@JSImport("foo.js", "foo")
def foo_=(x: Int): Int = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: @js.native is not allowed on vars, lazy vals and setter defs
| def foo_=(x: Int): Int = js.native
| ^
"""
"""
@JSGlobal("foo")
@js.native
class foo_= extends js.Object
@JSGlobal("foo")
@js.native
object foo_= extends js.Object
object A {
@JSGlobal("foo")
@js.native
class foo_= extends js.Object
@JSGlobal("foo")
@js.native
object foo_= extends js.Object
}
""".hasNoWarns()
"""
@JSImport("foo.js", "foo_=")
@js.native
class foo_= extends js.Object
@JSImport("foo.js", "foo_=")
@js.native
object foo_= extends js.Object
object A {
@JSImport("foo.js", "foo_=")
@js.native
class foo_= extends js.Object
@JSImport("foo.js", "foo_=")
@js.native
object foo_= extends js.Object
}
""".hasNoWarns()
"""
@JSImport("foo.js", "foo_=", globalFallback = "foo")
@js.native
class foo_= extends js.Object
@JSImport("foo.js", "foo_=", globalFallback = "foo")
@js.native
object foo_= extends js.Object
object A {
@JSImport("foo.js", "foo_=", globalFallback = "foo")
@js.native
class foo_= extends js.Object
@JSImport("foo.js", "foo_=", globalFallback = "foo")
@js.native
object foo_= extends js.Object
}
""".hasNoWarns()
}
@Test def noNonLiteralJSName: Unit = {
"""
import js.annotation.JSName
object A {
val a = "Hello"
final val b = "World"
}
@js.native
@JSGlobal
class B extends js.Object {
@JSName(A.a)
def foo: Int = js.native
@JSName(A.b)
def bar: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:15: error: A string argument to JSName must be a literal string
| @JSName(A.a)
| ^
"""
}
@Test def noNonStaticStableJSNameSymbol: Unit = {
"""
import js.annotation.JSName
class A {
val a = js.Symbol("foo")
}
@js.native
@JSGlobal
class B extends js.Object {
@JSName(js.Symbol())
def foo: Int = js.native
@JSName(new A().a)
def bar: Int = js.native
}
class C extends js.Object {
@JSName(js.Symbol())
def foo: Int = js.native
@JSName(new A().a)
def bar: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:14: error: A js.Symbol argument to JSName must be a static, stable identifier
| @JSName(js.Symbol())
| ^
|newSource1.scala:16: error: A js.Symbol argument to JSName must be a static, stable identifier
| @JSName(new A().a)
| ^
|newSource1.scala:21: error: A js.Symbol argument to JSName must be a static, stable identifier
| @JSName(js.Symbol())
| ^
|newSource1.scala:23: error: A js.Symbol argument to JSName must be a static, stable identifier
| @JSName(new A().a)
| ^
"""
}
@Test def noSelfReferenceJSNameSymbol: Unit = {
"""
object A extends js.Object {
val a = js.Symbol("foo")
@JSName(a)
def foo: Int = 1
}
""" hasWarns
"""
|newSource1.scala:8: warning: This symbol is defined in the same object as the annotation's target. This will cause a stackoverflow at runtime
| @JSName(a)
| ^
"""
// Native objects are OK, since we do not control definition order.
"""
@JSGlobal
@js.native
object A extends js.Object {
val a: js.Symbol = js.native
@JSName(a)
def foo: Int = js.native
}
""".succeeds()
}
@Test def noJSGlobalOnMembersOfClassesAndTraits: Unit = {
for (outer <- Seq("class", "trait")) {
s"""
@js.native ${if (outer == "trait") "" else "@JSGlobal"}
$outer Foo extends js.Object {
@JSGlobal("bar1")
val bar1: Int = js.native
@JSGlobal("bar2")
var bar2: Int = js.native
@JSGlobal("bar3")
def bar3: Int = js.native
@js.native
@JSGlobal("Inner")
class Inner extends js.Object
@js.native
@JSGlobal("Inner")
object Inner extends js.Object
@js.native
@JSGlobal
class InnerImplied extends js.Object
@js.native
@JSGlobal
object InnerImplied extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal("bar1")
| ^
|newSource1.scala:9: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal("bar2")
| ^
|newSource1.scala:11: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal("bar3")
| ^
|newSource1.scala:15: error: Nested JS classes and objects cannot have an @JSGlobal annotation.
| @JSGlobal("Inner")
| ^
|newSource1.scala:19: error: Nested JS classes and objects cannot have an @JSGlobal annotation.
| @JSGlobal("Inner")
| ^
|newSource1.scala:23: error: Nested JS classes and objects cannot have an @JSGlobal annotation.
| @JSGlobal
| ^
|newSource1.scala:27: error: Nested JS classes and objects cannot have an @JSGlobal annotation.
| @JSGlobal
| ^
"""
}
}
@Test def noJSGlobalOnMembersOfObjects: Unit = {
s"""
@js.native @JSGlobal
object Foo extends js.Object {
@JSGlobal("bar1")
val bar1: Int = js.native
@JSGlobal("bar2")
var bar2: Int = js.native
@JSGlobal("bar3")
def bar3: Int = js.native
@js.native
@JSGlobal("Inner")
class Inner extends js.Object
@js.native
@JSGlobal("Inner")
object Inner extends js.Object
@js.native
@JSGlobal
class InnerImplied extends js.Object
@js.native
@JSGlobal
object InnerImplied extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal("bar1")
| ^
|newSource1.scala:9: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal("bar2")
| ^
|newSource1.scala:11: error: @JSGlobal can only be used on native JS definitions (with @js.native).
| @JSGlobal("bar3")
| ^
|newSource1.scala:15: error: Nested JS classes and objects cannot have an @JSGlobal annotation.
| @JSGlobal("Inner")
| ^
|newSource1.scala:19: error: Nested JS classes and objects cannot have an @JSGlobal annotation.
| @JSGlobal("Inner")
| ^
|newSource1.scala:23: error: Nested JS classes and objects cannot have an @JSGlobal annotation.
| @JSGlobal
| ^
|newSource1.scala:27: error: Nested JS classes and objects cannot have an @JSGlobal annotation.
| @JSGlobal
| ^
"""
}
@Test def noJSImportOnMembersOfClassesAndTraits: Unit = {
for {
outer <- Seq("class", "trait")
fallbackStr <- Seq("", ", globalFallback = \"Foo\"")
} {
s"""
@js.native ${if (outer == "trait") "" else "@JSGlobal"}
$outer Foo extends js.Object {
@JSImport("bar1", JSImport.Namespace$fallbackStr)
val bar1: Int = js.native
@JSImport("bar2", JSImport.Namespace$fallbackStr)
var bar2: Int = js.native
@JSImport("bar3", JSImport.Namespace$fallbackStr)
def bar3: Int = js.native
@js.native
@JSImport("Inner", JSImport.Namespace$fallbackStr)
class Inner extends js.Object
@js.native
@JSImport("Inner", JSImport.Namespace$fallbackStr)
object Inner extends js.Object
}
""" hasErrors
s"""
|newSource1.scala:7: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("bar1", JSImport.Namespace$fallbackStr)
| ^
|newSource1.scala:9: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("bar2", JSImport.Namespace$fallbackStr)
| ^
|newSource1.scala:11: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("bar3", JSImport.Namespace$fallbackStr)
| ^
|newSource1.scala:15: error: Nested JS classes and objects cannot have an @JSImport annotation.
| @JSImport("Inner", JSImport.Namespace$fallbackStr)
| ^
|newSource1.scala:19: error: Nested JS classes and objects cannot have an @JSImport annotation.
| @JSImport("Inner", JSImport.Namespace$fallbackStr)
| ^
"""
}
}
@Test def noJSImportOnMembersOfObjects: Unit = {
for {
fallbackStr <- Seq("", ", globalFallback = \"Foo\"")
} {
s"""
@js.native @JSGlobal
object Foo extends js.Object {
@JSImport("bar1", JSImport.Namespace$fallbackStr)
val bar1: Int = js.native
@JSImport("bar2", JSImport.Namespace$fallbackStr)
var bar2: Int = js.native
@JSImport("bar3", JSImport.Namespace$fallbackStr)
def bar3: Int = js.native
@js.native
@JSImport("Inner", JSImport.Namespace$fallbackStr)
class Inner extends js.Object
@js.native
@JSImport("Inner", JSImport.Namespace$fallbackStr)
object Inner extends js.Object
}
""" hasErrors
s"""
|newSource1.scala:7: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("bar1", JSImport.Namespace$fallbackStr)
| ^
|newSource1.scala:9: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("bar2", JSImport.Namespace$fallbackStr)
| ^
|newSource1.scala:11: error: @JSImport can only be used on native JS definitions (with @js.native).
| @JSImport("bar3", JSImport.Namespace$fallbackStr)
| ^
|newSource1.scala:15: error: Nested JS classes and objects cannot have an @JSImport annotation.
| @JSImport("Inner", JSImport.Namespace$fallbackStr)
| ^
|newSource1.scala:19: error: Nested JS classes and objects cannot have an @JSImport annotation.
| @JSImport("Inner", JSImport.Namespace$fallbackStr)
| ^
"""
}
}
@Test def noNonLiteralJSGlobal: Unit = {
"""
object A {
val a = "Hello"
}
@JSGlobal(A.a)
@js.native
object B extends js.Object
@JSGlobal(A.a)
@js.native
class C extends js.Object
""" hasErrors
"""
|newSource1.scala:9: error: The argument to @JSGlobal must be a literal string.
| @JSGlobal(A.a)
| ^
|newSource1.scala:13: error: The argument to @JSGlobal must be a literal string.
| @JSGlobal(A.a)
| ^
"""
}
@Test def noNonJSIdentifierJSGlobal: Unit = {
"""
@js.native
@JSGlobal
class `not-a-valid-JS-identifier` extends js.Object
@js.native
@JSGlobal("not-a-valid-JS-identifier")
object A extends js.Object
@js.native
@JSGlobal("not-a-valid-JS-identifier.further")
object B extends js.Object
@js.native
@JSGlobal("TopLevel.not-a-valid-JS-identifier") // valid
object C extends js.Object
@js.native
@JSGlobal("")
object D extends js.Object
@js.native
@JSGlobal(".tricky")
object E extends js.Object
""" hasErrors
"""
|newSource1.scala:7: error: The name of a JS global variable must be a valid JS identifier (got 'not-a-valid-JS-identifier')
| class `not-a-valid-JS-identifier` extends js.Object
| ^
|newSource1.scala:11: error: The name of a JS global variable must be a valid JS identifier (got 'not-a-valid-JS-identifier')
| object A extends js.Object
| ^
|newSource1.scala:15: error: The name of a JS global variable must be a valid JS identifier (got 'not-a-valid-JS-identifier')
| object B extends js.Object
| ^
|newSource1.scala:23: error: The name of a JS global variable must be a valid JS identifier (got '')
| object D extends js.Object
| ^
|newSource1.scala:27: error: The name of a JS global variable must be a valid JS identifier (got '')
| object E extends js.Object
| ^
"""
"""
@js.native
@JSImport("foo.js", "foo", globalFallback = "not-a-valid-JS-identifier")
object A extends js.Object
@js.native
@JSImport("foo.js", "foo", globalFallback = "not-a-valid-JS-identifier.further")
object B extends js.Object
@js.native
@JSImport("foo.js", "foo", globalFallback = "TopLevel.not-a-valid-JS-identifier") // valid
object C extends js.Object
@js.native
@JSImport("foo.js", "foo", globalFallback = "")
object D extends js.Object
@js.native
@JSImport("foo.js", "foo", globalFallback = ".tricky")
object E extends js.Object
""" hasErrors
"""
|newSource1.scala:7: error: The name of a JS global variable must be a valid JS identifier (got 'not-a-valid-JS-identifier')
| object A extends js.Object
| ^
|newSource1.scala:11: error: The name of a JS global variable must be a valid JS identifier (got 'not-a-valid-JS-identifier')
| object B extends js.Object
| ^
|newSource1.scala:19: error: The name of a JS global variable must be a valid JS identifier (got '')
| object D extends js.Object
| ^
|newSource1.scala:23: error: The name of a JS global variable must be a valid JS identifier (got '')
| object E extends js.Object
| ^
"""
}
@Test def noNonLiteralJSImport: Unit = {
// Without global fallback
"""
object A {
val a = "Hello"
}
@JSImport(A.a, JSImport.Namespace)
@js.native
object B1 extends js.Object
@JSImport(A.a, "B2")
@js.native
object B2 extends js.Object
@JSImport("B3", A.a)
@js.native
object B3 extends js.Object
@JSImport(A.a, JSImport.Namespace)
@js.native
object C1 extends js.Object
@JSImport(A.a, "C2")
@js.native
object C2 extends js.Object
@JSImport("C3", A.a)
@js.native
object C3 extends js.Object
@JSImport(A.a, A.a)
@js.native
object D extends js.Object
""" hasErrors
"""
|newSource1.scala:9: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, JSImport.Namespace)
| ^
|newSource1.scala:13: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, "B2")
| ^
|newSource1.scala:17: error: The second argument to @JSImport must be literal string or the JSImport.Namespace object.
| @JSImport("B3", A.a)
| ^
|newSource1.scala:21: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, JSImport.Namespace)
| ^
|newSource1.scala:25: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, "C2")
| ^
|newSource1.scala:29: error: The second argument to @JSImport must be literal string or the JSImport.Namespace object.
| @JSImport("C3", A.a)
| ^
|newSource1.scala:33: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, A.a)
| ^
|newSource1.scala:33: error: The second argument to @JSImport must be literal string or the JSImport.Namespace object.
| @JSImport(A.a, A.a)
| ^
"""
// With constant (valid) global fallback
"""
object A {
val a = "Hello"
}
@JSImport(A.a, JSImport.Namespace, globalFallback = "GlobB1")
@js.native
object B1 extends js.Object
@JSImport(A.a, "B2", globalFallback = "GlobB2")
@js.native
object B2 extends js.Object
@JSImport("B3", A.a, globalFallback = "GlobB3")
@js.native
object B3 extends js.Object
@JSImport(A.a, JSImport.Namespace, globalFallback = "GlobC1")
@js.native
object C1 extends js.Object
@JSImport(A.a, "C2", globalFallback = "GlobC2")
@js.native
object C2 extends js.Object
@JSImport("C3", A.a, globalFallback = "GlobC3")
@js.native
object C3 extends js.Object
@JSImport(A.a, A.a, globalFallback = "GlobD")
@js.native
object D extends js.Object
""" hasErrors
"""
|newSource1.scala:9: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, JSImport.Namespace, globalFallback = "GlobB1")
| ^
|newSource1.scala:13: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, "B2", globalFallback = "GlobB2")
| ^
|newSource1.scala:17: error: The second argument to @JSImport must be literal string or the JSImport.Namespace object.
| @JSImport("B3", A.a, globalFallback = "GlobB3")
| ^
|newSource1.scala:21: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, JSImport.Namespace, globalFallback = "GlobC1")
| ^
|newSource1.scala:25: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, "C2", globalFallback = "GlobC2")
| ^
|newSource1.scala:29: error: The second argument to @JSImport must be literal string or the JSImport.Namespace object.
| @JSImport("C3", A.a, globalFallback = "GlobC3")
| ^
|newSource1.scala:33: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, A.a, globalFallback = "GlobD")
| ^
|newSource1.scala:33: error: The second argument to @JSImport must be literal string or the JSImport.Namespace object.
| @JSImport(A.a, A.a, globalFallback = "GlobD")
| ^
"""
// With variable (invalid) global fallback
"""
object A {
val a = "Hello"
}
@JSImport(A.a, JSImport.Namespace, globalFallback = A.a)
@js.native
object B1 extends js.Object
@JSImport(A.a, "B2", globalFallback = A.a)
@js.native
object B2 extends js.Object
@JSImport("B3", A.a, globalFallback = A.a)
@js.native
object B3 extends js.Object
@JSImport(A.a, JSImport.Namespace, globalFallback = A.a)
@js.native
object C1 extends js.Object
@JSImport(A.a, "C2", globalFallback = A.a)
@js.native
object C2 extends js.Object
@JSImport("C3", A.a, globalFallback = A.a)
@js.native
object C3 extends js.Object
@JSImport(A.a, A.a, globalFallback = A.a)
@js.native
object D extends js.Object
""" hasErrors
"""
|newSource1.scala:9: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, JSImport.Namespace, globalFallback = A.a)
| ^
|newSource1.scala:9: error: The third argument to @JSImport, when present, must be a literal string.
| @JSImport(A.a, JSImport.Namespace, globalFallback = A.a)
| ^
|newSource1.scala:13: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, "B2", globalFallback = A.a)
| ^
|newSource1.scala:13: error: The third argument to @JSImport, when present, must be a literal string.
| @JSImport(A.a, "B2", globalFallback = A.a)
| ^
|newSource1.scala:17: error: The second argument to @JSImport must be literal string or the JSImport.Namespace object.
| @JSImport("B3", A.a, globalFallback = A.a)
| ^
|newSource1.scala:17: error: The third argument to @JSImport, when present, must be a literal string.
| @JSImport("B3", A.a, globalFallback = A.a)
| ^
|newSource1.scala:21: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, JSImport.Namespace, globalFallback = A.a)
| ^
|newSource1.scala:21: error: The third argument to @JSImport, when present, must be a literal string.
| @JSImport(A.a, JSImport.Namespace, globalFallback = A.a)
| ^
|newSource1.scala:25: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, "C2", globalFallback = A.a)
| ^
|newSource1.scala:25: error: The third argument to @JSImport, when present, must be a literal string.
| @JSImport(A.a, "C2", globalFallback = A.a)
| ^
|newSource1.scala:29: error: The second argument to @JSImport must be literal string or the JSImport.Namespace object.
| @JSImport("C3", A.a, globalFallback = A.a)
| ^
|newSource1.scala:29: error: The third argument to @JSImport, when present, must be a literal string.
| @JSImport("C3", A.a, globalFallback = A.a)
| ^
|newSource1.scala:33: error: The first argument to @JSImport must be a literal string.
| @JSImport(A.a, A.a, globalFallback = A.a)
| ^
|newSource1.scala:33: error: The second argument to @JSImport must be literal string or the JSImport.Namespace object.
| @JSImport(A.a, A.a, globalFallback = A.a)
| ^
|newSource1.scala:33: error: The third argument to @JSImport, when present, must be a literal string.
| @JSImport(A.a, A.a, globalFallback = A.a)
| ^
"""
}
@Test def noApplyProperty: Unit = {
// def apply
"""
@js.native
trait A extends js.Object {
def apply: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: A member named apply represents function application in JavaScript. A parameterless member should be exported as a property. You must add @JSName("apply")
| def apply: Int = js.native
| ^
"""
"""
import js.annotation.JSName
@js.native
trait A extends js.Object {
@JSName("apply")
def apply: Int = js.native
}
""".succeeds()
// val apply
"""
@js.native
trait A extends js.Object {
val apply: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: A member named apply represents function application in JavaScript. A parameterless member should be exported as a property. You must add @JSName("apply")
| val apply: Int = js.native
| ^
"""
"""
import js.annotation.JSName
@js.native
trait A extends js.Object {
@JSName("apply")
val apply: Int = js.native
}
""".succeeds()
// var apply
"""
@js.native
trait A extends js.Object {
var apply: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: A member named apply represents function application in JavaScript. A parameterless member should be exported as a property. You must add @JSName("apply")
| var apply: Int = js.native
| ^
"""
"""
import js.annotation.JSName
@js.native
trait A extends js.Object {
@JSName("apply")
var apply: Int = js.native
}
""".succeeds()
}
@Test def noAbstractLocalJSClass: Unit = {
"""
object Enclosing {
def method(): Unit = {
abstract class AbstractLocalJSClass extends js.Object
}
}
""" hasErrors
"""
|newSource1.scala:7: error: Implementation restriction: local JS classes cannot be abstract
| abstract class AbstractLocalJSClass extends js.Object
| ^
"""
}
@Test def noLoadJSConstructorOfUnstableRef: Unit = {
"""
class Enclosing {
class InnerJSClass extends js.Object
}
object A {
def method(): Any =
js.constructorOf[Enclosing#InnerJSClass]
}
""" hasErrors
"""
|newSource1.scala:11: error: stable reference to a JS class required but Enclosing#InnerJSClass found
| js.constructorOf[Enclosing#InnerJSClass]
| ^
"""
// version-dependent error message due to https://github.com/scala/bug/issues/10619
"""
class Enclosing {
class InnerJSClass extends js.Object
}
object A {
def newEnclosing: Enclosing = new Enclosing
def method(): Any =
js.constructorOf[newEnclosing.InnerJSClass]
}
""".fails()
"""
class Enclosing {
class InnerJSClass extends js.Object
}
object A {
def method(a: Any): Boolean =
a.isInstanceOf[Enclosing#InnerJSClass]
}
""" hasErrors
"""
|newSource1.scala:11: error: stable reference to a JS class required but Enclosing#InnerJSClass found
| a.isInstanceOf[Enclosing#InnerJSClass]
| ^
"""
// version-dependent error message due to https://github.com/scala/bug/issues/10619
"""
class Enclosing {
class InnerJSClass extends js.Object
}
object A {
def newEnclosing: Enclosing = new Enclosing
def method(a: Any): Boolean =
a.isInstanceOf[newEnclosing.InnerJSClass]
}
""".fails()
}
@Test def noJSSymbolNameOnNestedNativeClassesAndObjects: Unit = {
for {
kind <- Seq("class", "object")
} {
s"""
object Sym {
val sym = js.Symbol()
}
@js.native
@JSGlobal
object Enclosing extends js.Object {
@JSName(Sym.sym)
@js.native
$kind A extends js.Object
}
""" hasErrors
s"""
|newSource1.scala:12: error: Implementation restriction: @JSName with a js.Symbol is not supported on nested native classes and objects
| @JSName(Sym.sym)
| ^
"""
}
}
@Test def noBracketCallOrBracketAccessOnJSClasses: Unit = {
// native
"""
@js.native
@JSGlobal
@JSBracketCall
class A extends js.Object
@js.native
@JSGlobal
@JSBracketAccess
object B extends js.Object
""" hasErrors
"""
|newSource1.scala:8: error: @JSBracketCall is not allowed on JS classes and objects
| class A extends js.Object
| ^
|newSource1.scala:13: error: @JSBracketAccess is not allowed on JS classes and objects
| object B extends js.Object
| ^
"""
// Non-native
"""
@JSBracketCall
class A extends js.Object
@JSBracketAccess
object B extends js.Object
""" hasErrors
"""
|newSource1.scala:6: error: @JSBracketCall is not allowed on JS classes and objects
| class A extends js.Object
| ^
|newSource1.scala:9: error: @JSBracketAccess is not allowed on JS classes and objects
| object B extends js.Object
| ^
"""
// Nested native
"""
@js.native
@JSGlobal
object Enclosing extends js.Object {
@JSBracketCall
@js.native
class A extends js.Object
@JSBracketAccess
@js.native
object B extends js.Object
}
""" hasErrors
"""
|newSource1.scala:10: error: @JSBracketCall is not allowed on JS classes and objects
| class A extends js.Object
| ^
|newSource1.scala:14: error: @JSBracketAccess is not allowed on JS classes and objects
| object B extends js.Object
| ^
"""
// Nested non-native
"""
object Enclosing extends js.Object {
@JSBracketCall
object A extends js.Object
@JSBracketAccess
class B extends js.Object
}
""" hasErrors
"""
|newSource1.scala:7: error: @JSBracketCall is not allowed on JS classes and objects
| object A extends js.Object
| ^
|newSource1.scala:10: error: @JSBracketAccess is not allowed on JS classes and objects
| class B extends js.Object
| ^
"""
}
@Test def noDuplicateJSNameAnnotOnMember: Unit = {
for {
kind <- Seq("class", "object", "trait")
} {
"""
object A {
val a = js.Symbol()
}
@js.native
@JSGlobal
class A extends js.Object {
@JSName(A.a)
@JSName("foo")
def a: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:13: error: A member can have at most one annotation among @JSName, @JSBracketAccess and @JSBracketCall.
| @JSName("foo")
| ^
"""
}
}
@Test def nonNativeJSTypesNameOverrideErrors: Unit = {
"""
abstract class A extends js.Object {
def bar(): Int
}
class B extends A {
override def bar() = 1
}
""".hasNoWarns()
"""
trait A extends js.Object {
@JSName("foo")
def bar(): Int
}
class B extends A {
@JSName("foo")
override def bar() = 1
}
""".hasNoWarns()
"""
abstract class A extends js.Object {
@JSName("foo")
def bar(): Int
}
class B extends A {
@JSName("foo")
override def bar() = 1
}
""".hasNoWarns()
// #4375
"""
abstract class Parent extends js.Object {
type TypeMember <: CharSequence
type JSTypeMember <: js.Object
type Foo = Int
@JSName("Babar") def Bar: Int = 5
}
class Child extends Parent {
type TypeMember = String
override type JSTypeMember = js.Date // the override keyword makes no difference
@JSName("Foobar") def Foo: Int = 5
type Bar = Int
}
""".hasNoWarns()
"""
abstract class A extends js.Object {
@JSName("foo")
def bar(): Int
}
class B extends A {
@JSName("baz")
override def bar() = 1
}
""" hasErrors
"""
|newSource1.scala:11: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): Int in class B called from JS as method 'baz'
| is conflicting with
|def bar(): Int in class A called from JS as method 'foo'
|
| override def bar() = 1
| ^
"""
"""
abstract class A extends js.Object {
@JSName("foo")
def bar(): Int
}
class B extends A {
override def bar() = 1
}
""" hasErrors
"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): Int in class B called from JS as method 'bar'
| is conflicting with
|def bar(): Int in class A called from JS as method 'foo'
|
| override def bar() = 1
| ^
"""
"""
abstract class A extends js.Object {
@JSName("foo")
def bar(): Object
}
abstract class B extends A {
override def bar(): String
}
class C extends B {
override def bar() = "1"
}
""" hasErrors
"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): String in class B called from JS as method 'bar'
| is conflicting with
|def bar(): Object in class A called from JS as method 'foo'
|
| override def bar(): String
| ^
|newSource1.scala:13: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): String in class C called from JS as method 'bar'
| is conflicting with
|def bar(): Object in class A called from JS as method 'foo'
|
| override def bar() = "1"
| ^
"""
"""
abstract class A extends js.Object {
def bar(): Object
}
abstract class B extends A {
@JSName("foo")
override def bar(): String
}
class C extends B {
override def bar() = "1"
}
""" hasErrors
"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): String in class B called from JS as method 'foo'
| is conflicting with
|def bar(): Object in class A called from JS as method 'bar'
|
| override def bar(): String
| ^
|newSource1.scala:13: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): String in class C called from JS as method 'bar'
| is conflicting with
|override def bar(): String in class B called from JS as method 'foo'
|
| override def bar() = "1"
| ^
"""
"""
class A extends js.Object {
def foo: Int = 5
}
trait B extends A {
@JSName("bar")
def foo: Int
}
class C extends B
""" hasErrors
s"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in class A called from JS as property 'foo'
| is conflicting with
|def foo: Int in trait B called from JS as property 'bar'
|
| def foo: Int
| ^
|${ifHasNewRefChecks("""
|newSource1.scala:12: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in class A called from JS as property 'foo'
| is conflicting with
|def foo: Int in trait B called from JS as property 'bar'
|
| class C extends B
| ^
""")}
"""
"""
class A extends js.Object {
@JSName("bar")
def foo: Int = 5
}
trait B extends A {
def foo: Int
}
class C extends B
""" hasErrors
s"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in class A called from JS as property 'bar'
| is conflicting with
|def foo: Int in trait B called from JS as property 'foo'
|
| def foo: Int
| ^
|${ifHasNewRefChecks("""
|newSource1.scala:12: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in class A called from JS as property 'bar'
| is conflicting with
|def foo: Int in trait B called from JS as property 'foo'
|
| class C extends B
| ^
""")}
"""
"""
class A[T] extends js.Object {
@JSName("bar")
def foo(x: T): T = x
}
class B extends A[Int] {
override def foo(x: Int): Int = x
}
""" hasErrors
"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def foo(x: Int): Int in class B called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in class A called from JS as method 'bar'
|
| override def foo(x: Int): Int = x
| ^
"""
"""
trait A[T] extends js.Object {
@JSName("bar")
def foo(x: T): T
}
class B extends A[Int] {
override def foo(x: Int): Int = x
}
""" hasErrors
"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def foo(x: Int): Int in class B called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in trait A called from JS as method 'bar'
|
| override def foo(x: Int): Int = x
| ^
"""
"""
class A[T] extends js.Object {
@JSName("bar")
def foo(x: T): T = x
}
trait B extends A[Int] {
def foo(x: Int): Int
}
class C extends B {
override def foo(x: Int): Int = x
}
""" hasErrors
"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo(x: Int): Int in class A called from JS as method 'bar'
| is conflicting with
|def foo(x: Int): Int in trait B called from JS as method 'foo'
|
| def foo(x: Int): Int
| ^
|newSource1.scala:13: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def foo(x: Int): Int in class C called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in class A called from JS as method 'bar'
|
| override def foo(x: Int): Int = x
| ^
"""
"""
class A[T] extends js.Object {
def foo(x: T): T = x
}
trait B extends A[Int] {
@JSName("bar")
def foo(x: Int): Int
}
class C extends B {
override def foo(x: Int): Int = x
}
""" hasErrors
"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo(x: Int): Int in class A called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in trait B called from JS as method 'bar'
|
| def foo(x: Int): Int
| ^
|newSource1.scala:13: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def foo(x: Int): Int in class C called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in trait B called from JS as method 'bar'
|
| override def foo(x: Int): Int = x
| ^
"""
"""
trait A extends js.Object {
def foo: Int
}
trait B extends js.Object {
@JSName("bar")
def foo: Int
}
trait C extends A with B
""" hasErrors
"""
|newSource1.scala:12: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in trait B called from JS as property 'bar'
| is conflicting with
|def foo: Int in trait A called from JS as property 'foo'
|
| trait C extends A with B
| ^
"""
"""
trait A extends js.Object {
def foo: Int
}
trait B extends js.Object {
@JSName("bar")
def foo: Int
}
abstract class C extends A with B
""" hasErrors
"""
|newSource1.scala:12: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in trait B called from JS as property 'bar'
| is conflicting with
|def foo: Int in trait A called from JS as property 'foo'
|
| abstract class C extends A with B
| ^
"""
}
@Test def nonNativeJSTypesJSNameWithSymbolOverrideErrors: Unit = {
"""
object Syms {
val sym1 = js.Symbol()
}
trait A extends js.Object {
@JSName(Syms.sym1)
def bar(): Int
}
class B extends A {
@JSName(Syms.sym1)
override def bar() = 1
}
""".hasNoWarns()
"""
object Syms {
val sym1 = js.Symbol()
}
abstract class A extends js.Object {
@JSName(Syms.sym1)
def bar(): Int
}
class B extends A {
@JSName(Syms.sym1)
override def bar() = 1
}
""".hasNoWarns()
"""
object Syms {
val sym1 = js.Symbol()
val sym2 = js.Symbol()
}
abstract class A extends js.Object {
@JSName(Syms.sym1)
def bar(): Int
}
class B extends A {
@JSName(Syms.sym2)
override def bar() = 1
}
""" hasErrors
"""
|newSource1.scala:16: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): Int in class B called from JS as method 'Syms.sym2'
| is conflicting with
|def bar(): Int in class A called from JS as method 'Syms.sym1'
|
| override def bar() = 1
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
abstract class A extends js.Object {
@JSName(Syms.sym1)
def bar(): Int
}
class B extends A {
@JSName("baz")
override def bar() = 1
}
""" hasErrors
"""
|newSource1.scala:15: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): Int in class B called from JS as method 'baz'
| is conflicting with
|def bar(): Int in class A called from JS as method 'Syms.sym1'
|
| override def bar() = 1
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
abstract class A extends js.Object {
@JSName("foo")
def bar(): Int
}
class B extends A {
@JSName(Syms.sym1)
override def bar() = 1
}
""" hasErrors
"""
|newSource1.scala:15: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): Int in class B called from JS as method 'Syms.sym1'
| is conflicting with
|def bar(): Int in class A called from JS as method 'foo'
|
| override def bar() = 1
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
abstract class A extends js.Object {
@JSName(Syms.sym1)
def bar(): Int
}
class B extends A {
override def bar() = 1
}
""" hasErrors
"""
|newSource1.scala:14: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): Int in class B called from JS as method 'bar'
| is conflicting with
|def bar(): Int in class A called from JS as method 'Syms.sym1'
|
| override def bar() = 1
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
abstract class A extends js.Object {
@JSName(Syms.sym1)
def bar(): Object
}
abstract class B extends A {
override def bar(): String
}
class C extends B {
override def bar() = "1"
}
""" hasErrors
"""
|newSource1.scala:14: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): String in class B called from JS as method 'bar'
| is conflicting with
|def bar(): Object in class A called from JS as method 'Syms.sym1'
|
| override def bar(): String
| ^
|newSource1.scala:17: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): String in class C called from JS as method 'bar'
| is conflicting with
|def bar(): Object in class A called from JS as method 'Syms.sym1'
|
| override def bar() = "1"
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
abstract class A extends js.Object {
def bar(): Object
}
abstract class B extends A {
@JSName(Syms.sym1)
override def bar(): String
}
class C extends B {
override def bar() = "1"
}
""" hasErrors
"""
|newSource1.scala:14: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): String in class B called from JS as method 'Syms.sym1'
| is conflicting with
|def bar(): Object in class A called from JS as method 'bar'
|
| override def bar(): String
| ^
|newSource1.scala:17: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def bar(): String in class C called from JS as method 'bar'
| is conflicting with
|override def bar(): String in class B called from JS as method 'Syms.sym1'
|
| override def bar() = "1"
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
class A extends js.Object {
def foo: Int = 5
}
trait B extends A {
@JSName(Syms.sym1)
def foo: Int
}
class C extends B
""" hasErrors
s"""
|newSource1.scala:14: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in class A called from JS as property 'foo'
| is conflicting with
|def foo: Int in trait B called from JS as property 'Syms.sym1'
|
| def foo: Int
| ^
|${ifHasNewRefChecks("""
|newSource1.scala:16: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in class A called from JS as property 'foo'
| is conflicting with
|def foo: Int in trait B called from JS as property 'Syms.sym1'
|
| class C extends B
| ^
""")}
"""
"""
object Syms {
val sym1 = js.Symbol()
}
class A extends js.Object {
@JSName(Syms.sym1)
def foo: Int = 5
}
trait B extends A {
def foo: Int
}
class C extends B
""" hasErrors
s"""
|newSource1.scala:14: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in class A called from JS as property 'Syms.sym1'
| is conflicting with
|def foo: Int in trait B called from JS as property 'foo'
|
| def foo: Int
| ^
|${ifHasNewRefChecks("""
|newSource1.scala:16: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in class A called from JS as property 'Syms.sym1'
| is conflicting with
|def foo: Int in trait B called from JS as property 'foo'
|
| class C extends B
| ^
""")}
"""
"""
object Syms {
val sym1 = js.Symbol()
}
class A[T] extends js.Object {
@JSName(Syms.sym1)
def foo(x: T): T = x
}
class B extends A[Int] {
override def foo(x: Int): Int = x
}
""" hasErrors
"""
|newSource1.scala:14: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def foo(x: Int): Int in class B called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in class A called from JS as method 'Syms.sym1'
|
| override def foo(x: Int): Int = x
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
trait A[T] extends js.Object {
@JSName(Syms.sym1)
def foo(x: T): T
}
class B extends A[Int] {
override def foo(x: Int): Int = x
}
""" hasErrors
"""
|newSource1.scala:14: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def foo(x: Int): Int in class B called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in trait A called from JS as method 'Syms.sym1'
|
| override def foo(x: Int): Int = x
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
class A[T] extends js.Object {
@JSName(Syms.sym1)
def foo(x: T): T = x
}
trait B extends A[Int] {
def foo(x: Int): Int
}
class C extends B {
override def foo(x: Int): Int = x
}
""" hasErrors
"""
|newSource1.scala:14: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo(x: Int): Int in class A called from JS as method 'Syms.sym1'
| is conflicting with
|def foo(x: Int): Int in trait B called from JS as method 'foo'
|
| def foo(x: Int): Int
| ^
|newSource1.scala:17: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def foo(x: Int): Int in class C called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in class A called from JS as method 'Syms.sym1'
|
| override def foo(x: Int): Int = x
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
class A[T] extends js.Object {
def foo(x: T): T = x
}
trait B extends A[Int] {
@JSName(Syms.sym1)
def foo(x: Int): Int
}
class C extends B {
override def foo(x: Int): Int = x
}
""" hasErrors
"""
|newSource1.scala:14: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo(x: Int): Int in class A called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in trait B called from JS as method 'Syms.sym1'
|
| def foo(x: Int): Int
| ^
|newSource1.scala:17: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|override def foo(x: Int): Int in class C called from JS as method 'foo'
| is conflicting with
|def foo(x: Int): Int in trait B called from JS as method 'Syms.sym1'
|
| override def foo(x: Int): Int = x
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
trait A extends js.Object {
def foo: Int
}
trait B extends js.Object {
@JSName(Syms.sym1)
def foo: Int
}
trait C extends A with B
""" hasErrors
"""
|newSource1.scala:16: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in trait B called from JS as property 'Syms.sym1'
| is conflicting with
|def foo: Int in trait A called from JS as property 'foo'
|
| trait C extends A with B
| ^
"""
"""
object Syms {
val sym1 = js.Symbol()
}
trait A extends js.Object {
def foo: Int
}
trait B extends js.Object {
@JSName(Syms.sym1)
def foo: Int
}
abstract class C extends A with B
""" hasErrors
"""
|newSource1.scala:16: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def foo: Int in trait B called from JS as property 'Syms.sym1'
| is conflicting with
|def foo: Int in trait A called from JS as property 'foo'
|
| abstract class C extends A with B
| ^
"""
}
// #4282
@Test def jsTypesSpecialCallingConventionOverrideErrors: Unit = {
// name "apply" vs function application
"""
@js.native
@JSGlobal
class A extends js.Object {
def apply(): Int
}
class B extends A {
@JSName("apply")
def apply(): Int
}
""" hasErrors
"""
|newSource1.scala:13: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def apply(): Int in class B called from JS as method 'apply'
| is conflicting with
|def apply(): Int in class A called from JS as function application
|
| def apply(): Int
| ^
"""
// property vs method
"""
class A extends js.Object {
def a: Int
}
class B extends A {
def a(): Int
}
""" hasErrors
"""
|newSource1.scala:10: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def a(): Int in class B called from JS as method 'a'
| is conflicting with
|def a: Int in class A called from JS as property 'a'
|
| def a(): Int
| ^
"""
val postUnarySpace = {
val hasNoSpace = {
version.startsWith("2.11.") ||
version == "2.12.1" ||
version == "2.12.2" ||
version == "2.12.3" ||
version == "2.12.4" ||
version == "2.12.5" ||
version == "2.12.6" ||
version == "2.12.7" ||
version == "2.12.8" ||
version == "2.12.9" ||
version == "2.12.10" ||
version == "2.13.0" ||
version == "2.13.1"
}
if (hasNoSpace) ""
else " "
}
// unary op vs thing named like it
"""
@js.native
@JSGlobal
class A extends js.Object {
def unary_+ : Int
}
class B extends A {
@JSName("unary_+")
def unary_+ : Int
}
""" hasErrors
s"""
|newSource1.scala:13: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def unary_+$postUnarySpace: Int in class B called from JS as property 'unary_+'
| is conflicting with
|def unary_+$postUnarySpace: Int in class A called from JS as unary operator
|
| def unary_+ : Int
| ^
"""
// non-zero arg is OK
"""
class A extends js.Object {
def unary_+(x: String): Int = 1
}
class B extends A {
@JSName("unary_+")
override def unary_+(x: String): Int = 2
}
""".succeeds()
// binary op vs thing named like it
"""
@js.native
@JSGlobal
class A extends js.Object {
def ||(x: Int): Int
}
class B extends A {
@JSName("||")
def ||(x: Int): Int
}
""" hasErrors
"""
|newSource1.scala:13: error: A member of a JS class is overriding another member with a different JS calling convention.
|
|def ||(x: Int): Int in class B called from JS as method '||'
| is conflicting with
|def ||(x: Int): Int in class A called from JS as binary operator
|
| def ||(x: Int): Int
| ^
"""
// non-single arg is OK
"""
class A extends js.Object {
def ||(): Int = 1
}
class B extends A {
@JSName("||")
override def ||(): Int = 2
}
""".succeeds()
}
@Test def noDefaultConstructorArgsIfModuleIsJSNative: Unit = {
"""
class A(x: Int = 1) extends js.Object
@js.native
@JSGlobal
object A extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: Implementation restriction: constructors of non-native JS classes cannot have default parameters if their companion module is JS native.
| class A(x: Int = 1) extends js.Object
| ^
"""
"""
class A(x: Int = 1)
@js.native
@JSGlobal
object A extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: Implementation restriction: constructors of Scala classes cannot have default parameters if their companion module is JS native.
| class A(x: Int = 1)
| ^
"""
}
// #2547
@Test def noDefaultOverrideCrash: Unit = {
"""
@js.native
@JSGlobal
class NativeBase extends js.Object {
def add(option: js.Any = js.native): js.Any = js.native
}
class Derived extends NativeBase {
override def add(option: js.Any): js.Any = super.add(option)
}
""" hasErrors
"""
|newSource1.scala:11: error: When overriding a native method with default arguments, the overriding method must explicitly repeat the default arguments.
| override def add(option: js.Any): js.Any = super.add(option)
| ^
"""
"""
@js.native
trait NativeTrait extends js.Object {
def add(option: js.Any = js.native): js.Any = js.native
}
@js.native
@JSGlobal
class NativeBase extends NativeTrait
class Derived extends NativeBase {
override def add(option: js.Any): js.Any = super.add(option)
}
""" hasErrors
"""
|newSource1.scala:15: error: When overriding a native method with default arguments, the overriding method must explicitly repeat the default arguments.
| override def add(option: js.Any): js.Any = super.add(option)
| ^
"""
}
// # 3969
@Test def overrideEqualsHashCode: Unit = {
for {
obj <- List("class", "object")
} {
s"""
$obj A extends js.Object {
override def hashCode(): Int = 1
override def equals(obj: Any): Boolean = false
// this one works as expected (so allowed)
override def toString(): String = "frobber"
/* these are allowed, since they are protected in jl.Object.
* as a result, only the overrides can be called. So the fact that they
* do not truly override the methods in jl.Object is not observable.
*/
override def clone(): Object = null
override def finalize(): Unit = ()
// other methods in jl.Object are final.
}
""" hasWarns
"""
|newSource1.scala:6: warning: Overriding hashCode in a JS class does not change its hash code. To silence this warning, change the name of the method and optionally add @JSName("hashCode").
| override def hashCode(): Int = 1
| ^
|newSource1.scala:7: warning: Overriding equals in a JS class does not change how it is compared. To silence this warning, change the name of the method and optionally add @JSName("equals").
| override def equals(obj: Any): Boolean = false
| ^
"""
}
for {
obj <- List("class", "object")
} {
s"""
@js.native
@JSGlobal
$obj A extends js.Object {
override def hashCode(): Int = js.native
override def equals(obj: Any): Boolean = js.native
}
""" hasWarns
"""
|newSource1.scala:8: warning: Overriding hashCode in a JS class does not change its hash code. To silence this warning, change the name of the method and optionally add @JSName("hashCode").
| override def hashCode(): Int = js.native
| ^
|newSource1.scala:9: warning: Overriding equals in a JS class does not change how it is compared. To silence this warning, change the name of the method and optionally add @JSName("equals").
| override def equals(obj: Any): Boolean = js.native
| ^
"""
}
"""
@js.native
trait A extends js.Any {
override def hashCode(): Int = js.native
override def equals(obj: Any): Boolean = js.native
}
""" hasWarns
"""
|newSource1.scala:7: warning: Overriding hashCode in a JS class does not change its hash code. To silence this warning, change the name of the method and optionally add @JSName("hashCode").
| override def hashCode(): Int = js.native
| ^
|newSource1.scala:8: warning: Overriding equals in a JS class does not change how it is compared. To silence this warning, change the name of the method and optionally add @JSName("equals").
| override def equals(obj: Any): Boolean = js.native
| ^
"""
"""
trait A extends js.Any {
override def hashCode(): Int
override def equals(obj: Any): Boolean
}
""" hasWarns
"""
|newSource1.scala:6: warning: Overriding hashCode in a JS class does not change its hash code. To silence this warning, change the name of the method and optionally add @JSName("hashCode").
| override def hashCode(): Int
| ^
|newSource1.scala:7: warning: Overriding equals in a JS class does not change how it is compared. To silence this warning, change the name of the method and optionally add @JSName("equals").
| override def equals(obj: Any): Boolean
| ^
"""
}
}
|
scala-js/scala-js
|
compiler/src/test/scala/org/scalajs/nscplugin/test/JSInteropTest.scala
|
Scala
|
apache-2.0
| 131,720 |
import scalariform.formatter.preferences._
import com.typesafe.sbt.SbtScalariform
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
object Scalariform {
lazy val settings = {
ScalariformKeys.preferences := ScalariformKeys.preferences.value
.setPreference(AlignParameters, true)
.setPreference(DoubleIndentClassDeclaration, false)
.setPreference(PlaceScaladocAsterisksBeneathSecondAsterisk, true)
.setPreference(SpacesAroundMultiImports, false)
}
}
|
xheres/api-gbfraidfinder
|
project/Scalariform.scala
|
Scala
|
mit
| 487 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.util.concurrent.TimeUnit
import org.apache.spark.executor.ExecutorMetrics
import org.apache.spark.internal.Logging
import org.apache.spark.memory.MemoryManager
import org.apache.spark.metrics.ExecutorMetricType
import org.apache.spark.util.{ThreadUtils, Utils}
/**
* Creates a heartbeat thread which will call the specified reportHeartbeat function at
* intervals of intervalMs.
*
* @param memoryManager the memory manager for execution and storage memory.
* @param reportHeartbeat the heartbeat reporting function to call.
* @param name the thread name for the heartbeater.
* @param intervalMs the interval between heartbeats.
*/
private[spark] class Heartbeater(
memoryManager: MemoryManager,
reportHeartbeat: () => Unit,
name: String,
intervalMs: Long) extends Logging {
// Executor for the heartbeat task
private val heartbeater = ThreadUtils.newDaemonSingleThreadScheduledExecutor(name)
/** Schedules a task to report a heartbeat. */
def start(): Unit = {
// Wait a random interval so the heartbeats don't end up in sync
val initialDelay = intervalMs + (math.random * intervalMs).asInstanceOf[Int]
val heartbeatTask = new Runnable() {
override def run(): Unit = Utils.logUncaughtExceptions(reportHeartbeat())
}
heartbeater.scheduleAtFixedRate(heartbeatTask, initialDelay, intervalMs, TimeUnit.MILLISECONDS)
}
/** Stops the heartbeat thread. */
def stop(): Unit = {
heartbeater.shutdown()
heartbeater.awaitTermination(10, TimeUnit.SECONDS)
}
/**
* Get the current executor level metrics. These are returned as an array, with the index
* determined by MetricGetter.values
*/
def getCurrentMetrics(): ExecutorMetrics = {
val metrics = ExecutorMetricType.values.map(_.getMetricValue(memoryManager)).toArray
new ExecutorMetrics(metrics)
}
}
|
michalsenkyr/spark
|
core/src/main/scala/org/apache/spark/Heartbeater.scala
|
Scala
|
apache-2.0
| 2,685 |
package akka.osgi.ds.impl
import java.util.Properties
import java.util.concurrent.CyclicBarrier
import scala.collection.JavaConversions.asScalaSet
import scala.language.implicitConversions
import org.osgi.framework.Bundle
import org.osgi.framework.BundleContext
import org.osgi.framework.ServiceFactory
import org.osgi.framework.ServiceRegistration
import org.osgi.service.component.annotations.Activate
import org.osgi.service.component.annotations.Component
import org.osgi.service.component.annotations.ConfigurationPolicy
import org.osgi.service.component.annotations.Deactivate
import org.osgi.service.component.annotations.Reference
import org.osgi.service.component.annotations.ReferenceCardinality
import org.osgi.service.component.annotations.ReferencePolicy
import org.osgi.service.log.LogService
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigValueFactory
import com.typesafe.config.osgi.ConfigRecovery
import akka.actor.ActorSystem
import akka.osgi.BundleDelegatingClassLoader
/**
* A Declarative Services component that provides [[ActorSystemServiceFactory]] with
* configuration and registers it with the OSGi framework.
*
* The component uses `REQUIRE` configuration policy and PID `com.typesafe.akka`. It does not
* have a method annotated with `@Modified`, which means modification of the configuration
* causes deactivation of the component and subsequent activation of another instance that will
* be provided with new configuration.
*/
@Component(
configurationPid = "com.typesafe.akka",
configurationPolicy = ConfigurationPolicy.REQUIRE)
class ActorSystemComponent {
/** Service factory instance. */
var serviceFactory: Option[ActorSystemServiceFactory] = None
/** Registration object for the service factory*/
var registration: Option[ServiceRegistration[_]] = None
/** OSGi LogService instance */
var logService: Option[LogService] = None
/**
* Invoked by DS runtime when LogService becomes available
*/
@Reference(cardinality = ReferenceCardinality.OPTIONAL, policy = ReferencePolicy.DYNAMIC)
def bindLogService(service: LogService) = {
logService = Some(service)
serviceFactory.foreach(_.setLogSevice(logService))
}
/**
* Invoked by DS runtime when LogService disappears
*/
def unbindLogService(service: LogService) = {
logService = None
serviceFactory.foreach(_.setLogSevice(logService))
}
/**
* Starts up the component.
*
* At activation, an [[ActorSystemServiceFactory]] will be created and registered
* with OSGi framework. It will provide `ActorSystem` service objects customized for
* all requesting bundles.
*
* @param ctx `BundleContext` of the `com.typesafe.akka.osgi.ds.impl` bundle
* @param properties component properties fetched by Declarative Services runtime from
* `ConfigurationAdmin`.
*/
@Activate
def activate(ctx: BundleContext, properties: java.util.Map[String, _]): Unit = {
serviceFactory = Some(new ActorSystemServiceFactory(ConfigRecovery.fromProperties(properties)))
serviceFactory.foreach(_.setLogSevice(logService))
registration = serviceFactory.map(ctx.registerService(classOf[ActorSystem].getName(), _, null))
}
/**
* Shuts down the component.
*
* At deactivation, all provided service instances will be unregistered, and the `ActorSystem`
* underlying [[ActorSystemServiceFactory]] will be also shut down.
*/
@Deactivate
def deactivate: Unit = {
registration.foreach(_.unregister())
serviceFactory.foreach(_.shutdown())
}
}
|
rkrzewski/spray-osgi
|
com.typesafe.akka.osgi.ds/src/akka/osgi/ds/impl/ActorSystemComponent.scala
|
Scala
|
apache-2.0
| 3,599 |
package net.xylophones.planetoid.game.model
import net.xylophones.planetoid.game.maths.Vector2D
trait Circular {
def radius: Int
def position: Vector2D
}
|
wjsrobertson/planetoid
|
game/src/main/scala/net/xylophones/planetoid/game/model/Circular.scala
|
Scala
|
apache-2.0
| 163 |
package org.concurrency.ch6
import rx.lang.scala.Observable
import scala.concurrent.duration._
object ObservableTimerApp extends App {
def observableTimer(): Observable[Long] = {
Observable(
subscriptor => {
Observable.interval(5.seconds).filter(n => (n * 5) % 30 != 0).subscribe {
n => subscriptor.onNext(n)
}
Observable.interval(12.seconds).filter(n => (n * 12) % 30 != 0).subscribe {
n => subscriptor.onNext(n)
}
}
)
}
observableTimer().subscribe(println(_))
Thread.sleep(400000)
}
|
marcos-sb/concurrent-programming-scala
|
src/main/scala-2.11/org/concurrency/ch6/ObservableTimerApp.scala
|
Scala
|
apache-2.0
| 569 |
package scoverage.report
import java.io.File
import scala.xml.Node
import scala.xml.PrettyPrinter
import scoverage.DoubleFormat.twoFractionDigits
import scoverage._
/** @author Stephen Samuel */
class CoberturaXmlWriter(sourceDirectories: Seq[File], outputDir: File)
extends BaseReportWriter(sourceDirectories, outputDir) {
def this(baseDir: File, outputDir: File) = {
this(Seq(baseDir), outputDir)
}
def write(coverage: Coverage): Unit = {
val file = new File(outputDir, "cobertura.xml")
IOUtils.writeToFile(
file,
"<?xml version=\\"1.0\\"?>\\n<!DOCTYPE coverage SYSTEM \\"http://cobertura.sourceforge.net/xml/coverage-04.dtd\\">\\n" +
new PrettyPrinter(120, 4).format(xml(coverage))
)
}
def method(method: MeasuredMethod): Node = {
<method name={method.name}
signature="()V"
line-rate={twoFractionDigits(method.statementCoverage)}
branch-rate={twoFractionDigits(method.branchCoverage)}
complexity="0">
<lines>
{
method.statements.map(stmt => <line
number={stmt.line.toString}
hits={stmt.count.toString}
branch={stmt.branch.toString}/>)
}
</lines>
</method>
}
def klass(klass: MeasuredClass): Node = {
<class name={klass.fullClassName}
filename={relativeSource(klass.source).replace(File.separator, "/")}
line-rate={twoFractionDigits(klass.statementCoverage)}
branch-rate={twoFractionDigits(klass.branchCoverage)}
complexity="0">
<methods>
{klass.methods.map(method)}
</methods>
<lines>
{
klass.statements.map(stmt => <line
number={stmt.line.toString}
hits={stmt.count.toString}
branch={stmt.branch.toString}/>)
}
</lines>
</class>
}
def pack(pack: MeasuredPackage): Node = {
<package name={pack.name}
line-rate={twoFractionDigits(pack.statementCoverage)}
branch-rate={twoFractionDigits(pack.branchCoverage)}
complexity="0">
<classes>
{pack.classes.map(klass)}
</classes>
</package>
}
def source(src: File): Node = {
<source>{src.getCanonicalPath.replace(File.separator, "/")}</source>
}
def xml(coverage: Coverage): Node = {
<coverage line-rate={twoFractionDigits(coverage.statementCoverage)}
lines-valid={coverage.statementCount.toString}
lines-covered={coverage.invokedStatementCount.toString}
branches-valid={coverage.branchCount.toString}
branches-covered={coverage.invokedBranchesCount.toString}
branch-rate={twoFractionDigits(coverage.branchCoverage)}
complexity="0"
version="1.0"
timestamp={System.currentTimeMillis.toString}>
<sources>
<source>--source</source>
{sourceDirectories.filter(_.isDirectory).map(source)}
</sources>
<packages>
{coverage.packages.map(pack)}
</packages>
</coverage>
}
}
|
scoverage/scalac-scoverage-plugin
|
scalac-scoverage-plugin/src/main/scala/scoverage/report/CoberturaXmlWriter.scala
|
Scala
|
apache-2.0
| 3,083 |
package io.resourcepool.computerdatabase.gatling.process
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import com.typesafe.config.ConfigFactory
/**
* Created by Cédric Cousseran on 29/03/16.
* Browse a random number of pages with random parameters.
*/
object Browse {
val config = ConfigFactory.load
val random = new util.Random
val numberPage = random.nextInt(10) + 9
val feederColumn = csv("data/searchColumn.csv").random
val feederOrder = csv("data/searchOrder.csv").random
val feederPageSize = csv("data/searchPageSize.csv").random
val feederPage = Iterator.continually(Map("page" -> (random.nextInt(19) + 1)))
val browse = feed(feederColumn)
.feed(feederOrder)
.feed(feederPageSize)
.feed(feederPage)
.repeat(numberPage) {
exec {
http("Browse: Browse page")
.get(config.getString("application.urls.dashboardPage"))
.queryParam(config.getString("application.urls.param.page").toString(), "${page}")
.queryParam(config.getString("application.urls.param.pageSize").toString(), "${pageSize}")
.queryParam(config.getString("application.urls.param.column").toString(), "${column}")
.queryParam(config.getString("application.urls.param.order").toString(), "${order}")
.check(status.is(200))
}.pause(3, 10)
}
}
|
serrearthur/training-java
|
training-java/gatling-test/src/test/scala/io/resourcepool/computerdatabase/gatling/process/Browse.scala
|
Scala
|
apache-2.0
| 1,355 |
package io.surfkit.driver
import io.surfkit.am.IntTypeMapping
import io.surfkit.data.Data
import scala.Predef._
/**
*
* Created by Corey Auger
*/
object OpenToMetrics extends App with SparkSetup{
override def main(args: Array[String]) {
import sqlContext.implicits._
val p = new java.io.PrintWriter("./output/opento.json")
val women = sqlContext.sql(
"""
|SELECT pref_opento, city, state, country, gender, dob, profile_ethnicity, profile_bodytype
|FROM members
|WHERE gender = 1
""".stripMargin
).cache()
val men = sqlContext.sql(
"""
|SELECT pref_opento, city, state, country, gender, dob, profile_ethnicity, profile_bodytype
|FROM members
|WHERE gender = 2
""".stripMargin
).cache()
// TODO: group by city / region
// TODO: group by country
// TODO: group by age
// TODO: group by body type
//load the city and population data
val worldCities = sc.textFile("../data/cities/worldcitiespop.txt")
.map(_.split(","))
.filter(_(0) != "Country")
.filter(s => s(4) != "" && s(5) != "" && s(6) != "")
.map(s => Data.City(s(0), s(1), s(2), s(3), s(4).toInt, s(5).toDouble, s(6).toDouble))
.toDF()
//worldCities.show(100)
worldCities.registerTempTable("Cities")
val menN = men.count()
val womenN = women.count()
println(s"Num Women ${womenN}")
println(s"Num Men ${menN}")
p.write(s"Num Women ${womenN}\\n")
p.write(s"Num Men ${menN} \\n")
p.write("\\n\\n")
men.registerTempTable("Men")
women.registerTempTable("Women")
// open to totals...
val menOpenTo = men.map(r => if(!r.isNullAt(0)) "" else r.getString(0)).map { r =>
(r.split("\\\\|").filter(_ != "").map(s => IntTypeMapping.prefOpenTo.get(s.toInt)).filter(_ != None).map(_.get).toSet)
}
val womenOpenTo = women.map(r => if(!r.isNullAt(0)) "" else r.getString(0)).map { r =>
(r.split("\\\\|").filter(_ != "").map(s => IntTypeMapping.prefOpenTo.get(s.toInt)).filter(_ != None).map(_.get).toSet)
}
IntTypeMapping.prefOpenTo.values.map { opento =>
p.write(s"Men ${opento} totals\\n")
val menx = menOpenTo.filter(_.contains(opento)).count
p.write(s"${menx} / ${menN} ${(menx.toDouble/menN.toDouble)}\\n\\n")
p.write(s"Women ${opento} totals\\n")
val womenx = womenOpenTo.filter(_.contains(opento)).count
p.write(s"${womenx} / ${womenN} ${(womenx.toDouble/womenN.toDouble)}\\n\\n\\n")
}
// Discovered that Lat,Lng in a LOT of cases is messed up.. (sign is inverted)
// eg: "Vancouver"
// (distance, (city, population, openTo, userLat, userLng, cityLat, cityLng)
// List((245.80350473473368,(Vancouver,157517,|7|,49.25,123.1__,45.6388889,-122.6602778)), (246.25003299999997,(Vancouver,1837970,|7|,49.25,123.1___,49.25,-123.133333)))
/*
val menCityOpenTo = sqlContext.sql(
"""
|SELECT a.id, a.city, a.pref_opento, a.latitude, a.longitude, b.Population, b.Latitude, b.Longitude
|FROM Men a JOIN Cities b
|ON lower(a.city) = lower(b.City)
|WHERE a.latitude > 0
""".stripMargin
)
menCityOpenTo.show(40)
menCityOpenTo.map{ r =>
(s"${r.getString(1)}-${r.getInt(0)}", (r.getString(1), r.getInt(5), r.getString(2), r.getDouble(3), r.getDouble(4), r.getDouble(6), r.getDouble(7)))
}.groupByKey().map{ r =>
r._2.map{
case (city, population, openTo, userLat, userLng, cityLat, cityLng) =>
val dist = Math.sqrt( Math.pow(userLat-cityLat,2)+Math.pow(userLng-cityLng,2) )
(dist,(city,population,openTo, userLat, userLng, cityLat, cityLng))
}.toList.sortBy(_._1)
}.take(50).foreach(println)
*/
val menCityOpenTo = sqlContext.sql(
"""
|SELECT a.city, a.pref_opento, b.Population
|FROM Men a JOIN Cities b
|ON lower(a.city) = lower(b.City)
|ORDER BY b.Population
""".stripMargin
)
val womenCityOpenTo = sqlContext.sql(
"""
|SELECT a.city, a.pref_opento, b.Population
|FROM Women a JOIN Cities b
|ON lower(a.city) = lower(b.City)
|ORDER BY b.Population
""".stripMargin
)
val menCityOpenTo2 = menCityOpenTo.map { r =>
(r.getString(0), r.getString(1).split("\\\\|").filter(_ != "").map(s => IntTypeMapping.prefOpenTo.get(s.toInt)).filter(_ != None).map(_.get).toSet, r.getInt(2))
}
val womenCityOpenTo2 = womenCityOpenTo.map { r =>
(r.getString(0), r.getString(1).split("\\\\|").filter(_ != "").map(s => IntTypeMapping.prefOpenTo.get(s.toInt)).filter(_ != None).map(_.get).toSet, r.getInt(2))
}
IntTypeMapping.prefOpenTo.values.take(5).map { opento =>
p.write(s"Men Open to ${opento}\\n")
menCityOpenTo2.filter(_._2.contains(opento)).map(r => ((r._1,r._3), 1) ).reduceByKey((a,b) => a+b).map(s => (s._1._1,s._1._2.toDouble, s._2 )).sortBy( _._3, false).take(20).foreach(s => p.write(s.toString+ "\\n"))
//menCityOpenTo2.filter(_._2.contains(opento)).map(r => (r._1, 1) ).reduceByKey((a,b) => a+b).map(s => (s._1, s._2 )).sortBy( _._2, false).take(20).foreach(s => p.write(s.toString+ "\\n"))
p.write("\\n")
p.write(s"Women Open to ${opento}\\n")
womenCityOpenTo2.filter(_._2.contains(opento)).map(r => ((r._1,r._3), 1) ).reduceByKey((a,b) => a+b).map(s => (s._1._1,s._1._2.toDouble, s._2 )).sortBy( _._3, false).take(20).foreach(s => p.write(s.toString+ "\\n"))
p.write("\\n\\n")
}
p.close()
sc.stop()
}
}
|
coreyauger/ashley-madison-spark
|
src/main/scala/io/surfkit/driver/OpenToMetrics.scala
|
Scala
|
mit
| 5,554 |
import scala.collection._
import scala.language.postfixOps
object Test extends App {
val printTime = false
def sum[A](xs: Iterable[Int]) = xs.foldLeft(0)((x, y) => x + y)
def time(op: => Unit): Unit = {
val start = System.currentTimeMillis()
op
if (printTime) println(" time = "+(System.currentTimeMillis() - start)+"ms")
}
def test(msg: String, s0: collection.immutable.Set[Int], iters: Int) = {
println("***** "+msg+":")
var s = s0
s = s + 2
s = s + 3 + 4000 + 10000
println("test1: "+sum(s))
time {
s = s ++ (List.range(0, iters) map (2*))
println("test2: "+sum(s)+", iters = "+iters)
}
time {
var x = 0
for (i <- 0 to 10000)
if (s contains i) x += i
println("test3: "+x)
}
}
def test(msg: String, s0: collection.mutable.Set[Int], iters: Int) = {
println("***** "+msg+":")
var s = s0
s = s.clone() += 2
s = s.clone.addAll(List(3, 4000, 10000))
println("test1: "+sum(s))
time {
s = s ++ (List.range(0, iters) map (2*))
println("test2: "+sum(s)+", iters = "+iters)
}
time {
var x = 0
for (i <- 0 to 10000)
if (s contains i) x += i
println("test3: "+x)
}
}
def test(msg: String, s0: collection.immutable.Map[Int, Int], iters: Int) = {
println("***** "+msg+":")
var s = s0
s = s + (2 -> 2)
s = s + (3 -> 3) + (4000 -> 4000) + (10000 -> 10000)
println("test1: "+sum(s map (_._2)))
time {
s = s ++ (List.range(0, iters) map (x => x * 2 -> x * 2))
println("test2: "+sum(s map (_._2))+", iters = "+iters)
}
time {
var x = 0
for (i <- 0 to 10000)
s get i match {
case Some(i) => x += i
case None =>
}
println("test3: "+x)
}
if (iters == 5000) {
time {
var s1 = s
var x = 0
for (i <- 0 to 10000) {
s get i match {
case Some(i) => x += i
case None =>
}
s1 = s1 + ((i + 10000) -> i)
}
println("test4: "+x)
}
}
}
def test(msg: String, s0: collection.mutable.Map[Int, Int], iters: Int) = {
println("***** "+msg+":")
var s = s0
s = s.clone() += (2 -> 2)
s = s.clone().addAll(List(3 -> 3, 4000 -> 4000, 10000 -> 10000))
println("test1: "+sum(s map (_._2)))
time {
s = s ++ (List.range(0, iters) map (x => x * 2 -> x * 2))
println("test2: "+sum(s map (_._2))+", iters = "+iters)
}
time {
var x = 0
for (i <- 0 to 10000)
s get i match {
case Some(i) => x += i
case None =>
}
println("test3: "+x)
}
}
test("mutable.HashSet", new mutable.HashSet[Int], 5000)
test("mutable.LinkedHashSet", new mutable.LinkedHashSet[Int], 5000)
test("immutable.Set", immutable.Set[Int](), 5000)
test("immutable.ListSet", new immutable.ListSet[Int], 5000)
test("immutable.TreeSet", new immutable.TreeSet[Int], 5000)
test("mutable.HashMap", new mutable.HashMap[Int, Int], 5000)
test("mutable.LinkedHashMap", new mutable.LinkedHashMap[Int, Int], 5000)
test("immutable.Map", immutable.Map[Int, Int](), 5000)
test("immutable.TreeMap", new immutable.TreeMap[Int, Int], 5000)
test("immutable.ListMap", new immutable.ListMap[Int, Int], 3000)
}
|
scala/scala
|
test/files/run/collections.scala
|
Scala
|
apache-2.0
| 3,332 |
package com.arcusys.valamis.lesson.scorm.model.manifest
/**
* A rule executed each time after an attempt on a descendant activity terminates to see if this activity needs to exit
* @param conditions Set of conditions that define whether the rule will be applied or not
*/
class ExitConditionRule(conditions: RuleConditionSet) extends ConditionRule(conditions)
|
igor-borisov/valamis
|
valamis-scorm-lesson/src/main/scala/com/arcusys/valamis/lesson/scorm/model/manifest/ExitConditionRule.scala
|
Scala
|
gpl-3.0
| 364 |
/*
* Copyright 2014-16 Intelix Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package au.com.intelix.config
import com.typesafe.config.Config
case class RootConfig(override val config: Config) extends WithConfig
|
intelix/reactiveservices
|
tools/config/src/main/scala/au/com/intelix/config/RootConfig.scala
|
Scala
|
apache-2.0
| 738 |
package com.mehmetakiftutuncu.eshotroidplus.models
import com.github.mehmetakiftutuncu.errors.{CommonError, Errors}
import com.mehmetakiftutuncu.eshotroidplus.models.base.{Jsonable, ModelBase}
import com.mehmetakiftutuncu.eshotroidplus.utilities.Log
import play.api.libs.json.{JsObject, JsValue, Json}
case class Location(latitude: Double, longitude: Double) extends ModelBase {
override def toJson: JsObject = Location.toJson(this)
}
object Location extends LocationBase
trait LocationBase extends Jsonable[Location] {
override def toJson(location: Location): JsObject = {
Json.obj(
"lat" -> location.latitude,
"lon" -> location.longitude
)
}
override def fromJson(json: JsValue): Either[Errors, Location] = {
try {
val latitudeAsOpt = (json \\ "lat").asOpt[Double]
val longitudeAsOpt = (json \\ "lon").asOpt[Double]
val latitudeErrors = if (latitudeAsOpt.isEmpty) {
Errors(CommonError.invalidData.reason("Latitude is missing!"))
} else if (latitudeAsOpt.get < -90.0 || latitudeAsOpt.get > 90.0) {
Errors(CommonError.invalidData.reason("Latitude must be in [-90.0, 90.0]!").data(latitudeAsOpt.get.toString))
} else {
Errors.empty
}
val longitudeErrors = if (longitudeAsOpt.isEmpty) {
Errors(CommonError.invalidData.reason("Longitude is missing!"))
} else if (longitudeAsOpt.get < -180.0 || longitudeAsOpt.get > 180.0) {
Errors(CommonError.invalidData.reason("Longitude must be in [-180.0, 180.0]!").data(longitudeAsOpt.get.toString))
} else {
Errors.empty
}
val errors = latitudeErrors ++ longitudeErrors
if (errors.nonEmpty) {
Log.error("Location.fromJson", s"""Failed to create location from "$json"!""", errors)
Left(errors)
} else {
val location = Location(latitudeAsOpt.get, longitudeAsOpt.get)
Right(location)
}
} catch {
case t: Throwable =>
val errors = Errors(CommonError.invalidData)
Log.error(t, "Location.fromJson", s"""Failed to create location from "$json"!""")
Left(errors)
}
}
}
|
mehmetakiftutuncu/EshotroidPlusServer
|
app/com/mehmetakiftutuncu/eshotroidplus/models/Location.scala
|
Scala
|
gpl-3.0
| 2,144 |
package net.selenate.server
package actions
import net.selenate.common.comms.{ SeElementSelectMethod, SeElementSelector, SeOptionSelectMethod, SeOptionSelector }
import org.openqa.selenium.{ By, StaleElementReferenceException }
import org.openqa.selenium.remote.RemoteWebElement
import org.openqa.selenium.support.ui.Select
import scala.collection.JavaConversions._
import scala.util.{ Failure, Success, Try }
import net.selenate.common.comms.SeElementVisibility
trait ActionCommons
extends ActionCommonsBase
with ActionCommonsFrames
with ActionCommonsParsers
with ActionCommonsCerts { self: Loggable =>
protected def findElementList(selector: SeElementSelector): IndexedSeq[RemoteWebElement] = {
val by = byFactory(selector)
d.findElements(by)
.map(_.asInstanceOf[RemoteWebElement])
.filter(visibilityFilter(selector.getVisibility))
.toIndexedSeq
}
protected def byFactory(selector: SeElementSelector): By = {
import SeElementSelectMethod._
selector.getMethod match {
case CLASS_NAME => By.className(selector.getQuery)
case CSS_SELECTOR => By.cssSelector(selector.getQuery)
case ID => By.id(selector.getQuery)
case LINK_TEXT => By.linkText(selector.getQuery)
case NAME => By.name(selector.getQuery)
case PARTIAL_LINK_TEXT => By.partialLinkText(selector.getQuery)
case TAG_NAME => By.tagName(selector.getQuery)
case XPATH => By.xpath(selector.getQuery)
}
}
protected def visibilityFilter(visibility: SeElementVisibility)(e: RemoteWebElement) = {
import SeElementVisibility._
if (visibility != ANY) {
try {
(visibility, e.isDisplayed) match {
case (VISIBLE, false) => false
case (HIDDEN, true) => false
case _ => true
}
} catch {
case _: StaleElementReferenceException => false
}
} else {
true
}
}
protected def selectOption(s: Select, selector: SeOptionSelector) = {
import SeOptionSelectMethod._
def selectorFactory = selector.getMethod match {
case INDEX => (str: String) => s.selectByIndex(str.toInt)
case VALUE => s.selectByValue _
case VISIBLE_TEXT => s.selectByVisibleText _
}
selectorFactory(selector.getQuery)
}
protected def elementInAllWindows[T](selector: SeElementSelector)(f: (Address, RemoteWebElement) => T): Option[Try[T]] = {
val resultIterator = inAllWindows { address =>
findElementList(selector).toList match {
case Nil =>
None
case e :: _ =>
val r = Try(f(address, e))
Some(r)
}
}.flatten
if (resultIterator.hasNext) {
Some(resultIterator.next)
} else {
None
}
}
protected def elementInCache[T](selector: SeElementSelector)(f: (Address, RemoteWebElement) => T): Option[Try[T]] = {
getFromCache(selector) flatMap { cachedElement =>
val address = Address(cachedElement.windowHandle, cachedElement.framePath)
try {
if (context.useFrames) {
fullSwitch(cachedElement.windowHandle, cachedElement.framePath)
}
Some(Success(f(address, cachedElement.elem)))
} catch {
case e: StaleElementReferenceException =>
val foundElem = findElementList(selector).headOption
foundElem.map(elem => Try(f(address, elem)))
case e: Exception =>
None
}
}
}
}
|
tferega/selenate
|
code/Server/src/main/scala/net/selenate/server/actions/ActionCommons.scala
|
Scala
|
bsd-3-clause
| 3,520 |
package com.github.skozlov.ai
import java.io.File
import com.github.skozlov.ai.World.Temperature
import com.github.skozlov.commons.scala.collections.{MatrixBuilder, Matrix}
import com.github.skozlov.commons.scala.collections.Matrix.Coordinates
import com.github.skozlov.commons.scala.random.Random
import com.github.skozlov.commons.scala.reactivex.Property
import com.github.tototoshi.csv.{DefaultCSVFormat, CSVWriter}
import rx.lang.scala.subjects.PublishSubject
import rx.lang.scala.{Observable, Subject}
import scala.collection.mutable.ListBuffer
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, Future}
import scala.concurrent.duration.FiniteDuration
import resource._
class Model(minSize: Int, maxSize: Int, tactMinDuration: FiniteDuration, tactMaxDuration: FiniteDuration){
require(minSize >= 1)
require(minSize <= maxSize)
private val size = Random.elementFrom(minSize to maxSize)
private val fields: Matrix[Temperature] = {
val peakCoordinates: Coordinates = Coordinates.random(rowsCount = size, columnsCount = size)
val peakTemperature: Temperature = {
val maxHorizontalDistance: Int = Math.max(peakCoordinates.column, size - 1 - peakCoordinates.column)
val maxVerticalDistance: Int = Math.max(peakCoordinates.row, size - 1 - peakCoordinates.row)
maxHorizontalDistance + maxVerticalDistance
}
val fieldsBuilder = new MatrixBuilder[Int](size, size)
for(
row <- 0 to size-1;
column <- 0 to size-1;
coordinate = Coordinates(row, column)){
fieldsBuilder(coordinate) = peakTemperature - coordinate.distanceTo(peakCoordinates)
}
fieldsBuilder.toMatrix
}
private val agentInitCoordinates = Coordinates.random(rowsCount = size, columnsCount = size)
private val _worlds = new ListBuffer[World]
def worlds(): List[World] = _worlds.toList
def addAgent(agent: Agent): Unit = {
val world = new World(fields, agent, agentInitCoordinates)
_worlds += world
new WorldUI(world).visible = true
}
private var run = false
def start(): Unit ={
run = true
startSubject.onNext()
Future{
while (run){
val deadline = tactMinDuration.fromNow
Await.ready(Future.traverse(_worlds){world => Future{world.tact()}}.map{_ =>
val rest = deadline.timeLeft.toMillis
if(rest > 0) Thread.sleep(rest)
_tactNumber.value = _tactNumber.value + 1
}, tactMaxDuration)
}
}
}
def stop(): Unit ={
run = false
stopSubject.onNext()
}
def exportAsCsv(file: File): Unit ={
for(writer <- managed(CSVWriter.open(file)(new DefaultCSVFormat {
override val delimiter: Char = ';'
}))){
writer.writeAll(worlds().map{w =>
val agent = w.agent
agent.getClass.getName :: agent.totalPleasureHistory()})
}
}
private val startSubject: Subject[Unit] = PublishSubject()
val startStream: Observable[Unit] = startSubject
private val stopSubject: Subject[Unit] = PublishSubject()
val stopStream: Observable[Unit] = stopSubject
private val _tactNumber = Property(0)
val tactNumber: Observable[Int] = _tactNumber
}
|
skozlov/ai
|
src/main/scala/com/github/skozlov/ai/Model.scala
|
Scala
|
apache-2.0
| 3,036 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.validation
import com.netflix.spectator.api.Id
import com.typesafe.config.ConfigFactory
import munit.FunSuite
class NameValueLengthRuleSuite extends FunSuite {
private val config = ConfigFactory.parseString("""
|name {
| min-length = 3
| max-length = 5
|}
|others {
| min-length = 2
| max-length = 4
|}
""".stripMargin)
private val rule = NameValueLengthRule(config)
private def validate(k: String, v: String): ValidationResult = {
rule.validate(Map(k -> v))
rule.validate(if (k == "name") Id.create(v) else Id.create("foo").withTag(k, v))
}
test("name valid") {
assertEquals(validate("name", "abc"), ValidationResult.Pass)
assertEquals(validate("name", "abcd"), ValidationResult.Pass)
assertEquals(validate("name", "abcde"), ValidationResult.Pass)
}
test("name too short") {
val res = validate("name", "ab")
assert(res.isFailure)
}
test("name too long") {
val res = validate("name", "abcdef")
assert(res.isFailure)
}
test("others valid") {
assertEquals(validate("def", "ab"), ValidationResult.Pass)
assertEquals(validate("def", "abc"), ValidationResult.Pass)
assertEquals(validate("def", "abcd"), ValidationResult.Pass)
}
test("others too short") {
val res = validate("def", "a")
assert(res.isFailure)
}
test("others too long") {
val res = validate("def", "abcde")
assert(res.isFailure)
}
}
|
Netflix/atlas
|
atlas-core/src/test/scala/com/netflix/atlas/core/validation/NameValueLengthRuleSuite.scala
|
Scala
|
apache-2.0
| 2,088 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.io.{File, FileNotFoundException}
import java.util.Locale
import org.apache.hadoop.fs.Path
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.SparkException
import org.apache.spark.sql.TestingUDT.{IntervalData, IntervalUDT, NullData, NullUDT}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext with BeforeAndAfterAll {
import testImplicits._
override def beforeAll(): Unit = {
super.beforeAll()
spark.sessionState.conf.setConf(SQLConf.ORC_IMPLEMENTATION, "native")
}
override def afterAll(): Unit = {
try {
spark.sessionState.conf.unsetConf(SQLConf.ORC_IMPLEMENTATION)
} finally {
super.afterAll()
}
}
private val allFileBasedDataSources = Seq("orc", "parquet", "csv", "json", "text")
private val nameWithSpecialChars = "sp&cial%c hars"
allFileBasedDataSources.foreach { format =>
test(s"Writing empty datasets should not fail - $format") {
withTempPath { dir =>
Seq("str").toDS().limit(0).write.format(format).save(dir.getCanonicalPath)
}
}
}
// `TEXT` data source always has a single column whose name is `value`.
allFileBasedDataSources.filterNot(_ == "text").foreach { format =>
test(s"SPARK-23072 Write and read back unicode column names - $format") {
withTempPath { path =>
val dir = path.getCanonicalPath
// scalastyle:off nonascii
val df = Seq("a").toDF("한글")
// scalastyle:on nonascii
df.write.format(format).option("header", "true").save(dir)
val answerDf = spark.read.format(format).option("header", "true").load(dir)
assert(df.schema.sameType(answerDf.schema))
checkAnswer(df, answerDf)
}
}
}
// Only ORC/Parquet support this. `CSV` and `JSON` returns an empty schema.
// `TEXT` data source always has a single column whose name is `value`.
Seq("orc", "parquet").foreach { format =>
test(s"SPARK-15474 Write and read back non-empty schema with empty dataframe - $format") {
withTempPath { file =>
val path = file.getCanonicalPath
val emptyDf = Seq((true, 1, "str")).toDF().limit(0)
emptyDf.write.format(format).save(path)
val df = spark.read.format(format).load(path)
assert(df.schema.sameType(emptyDf.schema))
checkAnswer(df, emptyDf)
}
}
}
Seq("orc", "parquet").foreach { format =>
test(s"SPARK-23271 empty RDD when saved should write a metadata only file - $format") {
withTempPath { outputPath =>
val df = spark.emptyDataFrame.select(lit(1).as("i"))
df.write.format(format).save(outputPath.toString)
val partFiles = outputPath.listFiles()
.filter(f => f.isFile && !f.getName.startsWith(".") && !f.getName.startsWith("_"))
assert(partFiles.length === 1)
// Now read the file.
val df1 = spark.read.format(format).load(outputPath.toString)
checkAnswer(df1, Seq.empty[Row])
assert(df1.schema.equals(df.schema.asNullable))
}
}
}
allFileBasedDataSources.foreach { format =>
test(s"SPARK-23372 error while writing empty schema files using $format") {
withTempPath { outputPath =>
val errMsg = intercept[AnalysisException] {
spark.emptyDataFrame.write.format(format).save(outputPath.toString)
}
assert(errMsg.getMessage.contains(
"Datasource does not support writing empty or nested empty schemas"))
}
// Nested empty schema
withTempPath { outputPath =>
val schema = StructType(Seq(
StructField("a", IntegerType),
StructField("b", StructType(Nil)),
StructField("c", IntegerType)
))
val df = spark.createDataFrame(sparkContext.emptyRDD[Row], schema)
val errMsg = intercept[AnalysisException] {
df.write.format(format).save(outputPath.toString)
}
assert(errMsg.getMessage.contains(
"Datasource does not support writing empty or nested empty schemas"))
}
}
}
allFileBasedDataSources.foreach { format =>
test(s"SPARK-22146 read files containing special characters using $format") {
withTempDir { dir =>
val tmpFile = s"$dir/$nameWithSpecialChars"
spark.createDataset(Seq("a", "b")).write.format(format).save(tmpFile)
val fileContent = spark.read.format(format).load(tmpFile)
checkAnswer(fileContent, Seq(Row("a"), Row("b")))
}
}
}
// Separate test case for formats that support multiLine as an option.
Seq("json", "csv").foreach { format =>
test("SPARK-23148 read files containing special characters " +
s"using $format with multiline enabled") {
withTempDir { dir =>
val tmpFile = s"$dir/$nameWithSpecialChars"
spark.createDataset(Seq("a", "b")).write.format(format).save(tmpFile)
val reader = spark.read.format(format).option("multiLine", true)
val fileContent = reader.load(tmpFile)
checkAnswer(fileContent, Seq(Row("a"), Row("b")))
}
}
}
allFileBasedDataSources.foreach { format =>
testQuietly(s"Enabling/disabling ignoreMissingFiles using $format") {
def testIgnoreMissingFiles(): Unit = {
withTempDir { dir =>
val basePath = dir.getCanonicalPath
Seq("0").toDF("a").write.format(format).save(new Path(basePath, "first").toString)
Seq("1").toDF("a").write.format(format).save(new Path(basePath, "second").toString)
val thirdPath = new Path(basePath, "third")
val fs = thirdPath.getFileSystem(spark.sessionState.newHadoopConf())
Seq("2").toDF("a").write.format(format).save(thirdPath.toString)
val files = fs.listStatus(thirdPath).filter(_.isFile).map(_.getPath)
val df = spark.read.format(format).load(
new Path(basePath, "first").toString,
new Path(basePath, "second").toString,
new Path(basePath, "third").toString)
// Make sure all data files are deleted and can't be opened.
files.foreach(f => fs.delete(f, false))
assert(fs.delete(thirdPath, true))
for (f <- files) {
intercept[FileNotFoundException](fs.open(f))
}
checkAnswer(df, Seq(Row("0"), Row("1")))
}
}
withSQLConf(SQLConf.IGNORE_MISSING_FILES.key -> "true") {
testIgnoreMissingFiles()
}
withSQLConf(SQLConf.IGNORE_MISSING_FILES.key -> "false") {
val exception = intercept[SparkException] {
testIgnoreMissingFiles()
}
assert(exception.getMessage().contains("does not exist"))
}
}
}
// Text file format only supports string type
test("SPARK-24691 error handling for unsupported types - text") {
withTempDir { dir =>
// write path
val textDir = new File(dir, "text").getCanonicalPath
var msg = intercept[AnalysisException] {
Seq(1).toDF.write.text(textDir)
}.getMessage
assert(msg.contains("Text data source does not support int data type"))
msg = intercept[AnalysisException] {
Seq(1.2).toDF.write.text(textDir)
}.getMessage
assert(msg.contains("Text data source does not support double data type"))
msg = intercept[AnalysisException] {
Seq(true).toDF.write.text(textDir)
}.getMessage
assert(msg.contains("Text data source does not support boolean data type"))
msg = intercept[AnalysisException] {
Seq(1).toDF("a").selectExpr("struct(a)").write.text(textDir)
}.getMessage
assert(msg.contains("Text data source does not support struct<a:int> data type"))
msg = intercept[AnalysisException] {
Seq((Map("Tesla" -> 3))).toDF("cars").write.mode("overwrite").text(textDir)
}.getMessage
assert(msg.contains("Text data source does not support map<string,int> data type"))
msg = intercept[AnalysisException] {
Seq((Array("Tesla", "Chevy", "Ford"))).toDF("brands")
.write.mode("overwrite").text(textDir)
}.getMessage
assert(msg.contains("Text data source does not support array<string> data type"))
// read path
Seq("aaa").toDF.write.mode("overwrite").text(textDir)
msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", IntegerType, true) :: Nil)
spark.read.schema(schema).text(textDir).collect()
}.getMessage
assert(msg.contains("Text data source does not support int data type"))
msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", DoubleType, true) :: Nil)
spark.read.schema(schema).text(textDir).collect()
}.getMessage
assert(msg.contains("Text data source does not support double data type"))
msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", BooleanType, true) :: Nil)
spark.read.schema(schema).text(textDir).collect()
}.getMessage
assert(msg.contains("Text data source does not support boolean data type"))
}
}
// Unsupported data types of csv, json, orc, and parquet are as follows;
// csv -> R/W: Null, Array, Map, Struct
// json -> R/W: Interval
// orc -> R/W: Interval, W: Null
// parquet -> R/W: Interval, Null
test("SPARK-24204 error handling for unsupported Array/Map/Struct types - csv") {
withTempDir { dir =>
val csvDir = new File(dir, "csv").getCanonicalPath
var msg = intercept[AnalysisException] {
Seq((1, "Tesla")).toDF("a", "b").selectExpr("struct(a, b)").write.csv(csvDir)
}.getMessage
assert(msg.contains("CSV data source does not support struct<a:int,b:string> data type"))
msg = intercept[AnalysisException] {
val schema = StructType.fromDDL("a struct<b: Int>")
spark.range(1).write.mode("overwrite").csv(csvDir)
spark.read.schema(schema).csv(csvDir).collect()
}.getMessage
assert(msg.contains("CSV data source does not support struct<b:int> data type"))
msg = intercept[AnalysisException] {
Seq((1, Map("Tesla" -> 3))).toDF("id", "cars").write.mode("overwrite").csv(csvDir)
}.getMessage
assert(msg.contains("CSV data source does not support map<string,int> data type"))
msg = intercept[AnalysisException] {
val schema = StructType.fromDDL("a map<int, int>")
spark.range(1).write.mode("overwrite").csv(csvDir)
spark.read.schema(schema).csv(csvDir).collect()
}.getMessage
assert(msg.contains("CSV data source does not support map<int,int> data type"))
msg = intercept[AnalysisException] {
Seq((1, Array("Tesla", "Chevy", "Ford"))).toDF("id", "brands")
.write.mode("overwrite").csv(csvDir)
}.getMessage
assert(msg.contains("CSV data source does not support array<string> data type"))
msg = intercept[AnalysisException] {
val schema = StructType.fromDDL("a array<int>")
spark.range(1).write.mode("overwrite").csv(csvDir)
spark.read.schema(schema).csv(csvDir).collect()
}.getMessage
assert(msg.contains("CSV data source does not support array<int> data type"))
msg = intercept[AnalysisException] {
Seq((1, new UDT.MyDenseVector(Array(0.25, 2.25, 4.25)))).toDF("id", "vectors")
.write.mode("overwrite").csv(csvDir)
}.getMessage
assert(msg.contains("CSV data source does not support array<double> data type"))
msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", new UDT.MyDenseVectorUDT(), true) :: Nil)
spark.range(1).write.mode("overwrite").csv(csvDir)
spark.read.schema(schema).csv(csvDir).collect()
}.getMessage
assert(msg.contains("CSV data source does not support array<double> data type."))
}
}
test("SPARK-24204 error handling for unsupported Interval data types - csv, json, parquet, orc") {
withTempDir { dir =>
val tempDir = new File(dir, "files").getCanonicalPath
// write path
Seq("csv", "json", "parquet", "orc").foreach { format =>
var msg = intercept[AnalysisException] {
sql("select interval 1 days").write.format(format).mode("overwrite").save(tempDir)
}.getMessage
assert(msg.contains("Cannot save interval data type into external storage."))
msg = intercept[AnalysisException] {
spark.udf.register("testType", () => new IntervalData())
sql("select testType()").write.format(format).mode("overwrite").save(tempDir)
}.getMessage
assert(msg.toLowerCase(Locale.ROOT)
.contains(s"$format data source does not support calendarinterval data type."))
}
// read path
Seq("parquet", "csv").foreach { format =>
var msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", CalendarIntervalType, true) :: Nil)
spark.range(1).write.format(format).mode("overwrite").save(tempDir)
spark.read.schema(schema).format(format).load(tempDir).collect()
}.getMessage
assert(msg.toLowerCase(Locale.ROOT)
.contains(s"$format data source does not support calendarinterval data type."))
msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", new IntervalUDT(), true) :: Nil)
spark.range(1).write.format(format).mode("overwrite").save(tempDir)
spark.read.schema(schema).format(format).load(tempDir).collect()
}.getMessage
assert(msg.toLowerCase(Locale.ROOT)
.contains(s"$format data source does not support calendarinterval data type."))
}
}
}
test("SPARK-24204 error handling for unsupported Null data types - csv, parquet, orc") {
withTempDir { dir =>
val tempDir = new File(dir, "files").getCanonicalPath
Seq("orc").foreach { format =>
// write path
var msg = intercept[AnalysisException] {
sql("select null").write.format(format).mode("overwrite").save(tempDir)
}.getMessage
assert(msg.toLowerCase(Locale.ROOT)
.contains(s"$format data source does not support null data type."))
msg = intercept[AnalysisException] {
spark.udf.register("testType", () => new NullData())
sql("select testType()").write.format(format).mode("overwrite").save(tempDir)
}.getMessage
assert(msg.toLowerCase(Locale.ROOT)
.contains(s"$format data source does not support null data type."))
// read path
// We expect the types below should be passed for backward-compatibility
// Null type
var schema = StructType(StructField("a", NullType, true) :: Nil)
spark.range(1).write.format(format).mode("overwrite").save(tempDir)
spark.read.schema(schema).format(format).load(tempDir).collect()
// UDT having null data
schema = StructType(StructField("a", new NullUDT(), true) :: Nil)
spark.range(1).write.format(format).mode("overwrite").save(tempDir)
spark.read.schema(schema).format(format).load(tempDir).collect()
}
Seq("parquet", "csv").foreach { format =>
// write path
var msg = intercept[AnalysisException] {
sql("select null").write.format(format).mode("overwrite").save(tempDir)
}.getMessage
assert(msg.toLowerCase(Locale.ROOT)
.contains(s"$format data source does not support null data type."))
msg = intercept[AnalysisException] {
spark.udf.register("testType", () => new NullData())
sql("select testType()").write.format(format).mode("overwrite").save(tempDir)
}.getMessage
assert(msg.toLowerCase(Locale.ROOT)
.contains(s"$format data source does not support null data type."))
// read path
msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", NullType, true) :: Nil)
spark.range(1).write.format(format).mode("overwrite").save(tempDir)
spark.read.schema(schema).format(format).load(tempDir).collect()
}.getMessage
assert(msg.toLowerCase(Locale.ROOT)
.contains(s"$format data source does not support null data type."))
msg = intercept[AnalysisException] {
val schema = StructType(StructField("a", new NullUDT(), true) :: Nil)
spark.range(1).write.format(format).mode("overwrite").save(tempDir)
spark.read.schema(schema).format(format).load(tempDir).collect()
}.getMessage
assert(msg.toLowerCase(Locale.ROOT)
.contains(s"$format data source does not support null data type."))
}
}
}
test(s"SPARK-25132: case-insensitive field resolution when reading from Parquet") {
withTempDir { dir =>
val format = "parquet"
val tableDir = dir.getCanonicalPath + s"/$format"
val tableName = s"spark_25132_${format}"
withTable(tableName) {
val end = 5
val data = spark.range(end).selectExpr("id as A", "id * 2 as b", "id * 3 as B")
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
data.write.format(format).mode("overwrite").save(tableDir)
}
sql(s"CREATE TABLE $tableName (a LONG, b LONG) USING $format LOCATION '$tableDir'")
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
checkAnswer(sql(s"select a from $tableName"), data.select("A"))
checkAnswer(sql(s"select A from $tableName"), data.select("A"))
// RuntimeException is triggered at executor side, which is then wrapped as
// SparkException at driver side
val e1 = intercept[SparkException] {
sql(s"select b from $tableName").collect()
}
assert(
e1.getCause.isInstanceOf[RuntimeException] &&
e1.getCause.getMessage.contains(
"""Found duplicate field(s) "b": [b, B] in case-insensitive mode"""))
val e2 = intercept[SparkException] {
sql(s"select B from $tableName").collect()
}
assert(
e2.getCause.isInstanceOf[RuntimeException] &&
e2.getCause.getMessage.contains(
"""Found duplicate field(s) "b": [b, B] in case-insensitive mode"""))
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
checkAnswer(sql(s"select a from $tableName"), (0 until end).map(_ => Row(null)))
checkAnswer(sql(s"select b from $tableName"), data.select("b"))
}
}
}
}
}
object TestingUDT {
@SQLUserDefinedType(udt = classOf[IntervalUDT])
class IntervalData extends Serializable
class IntervalUDT extends UserDefinedType[IntervalData] {
override def sqlType: DataType = CalendarIntervalType
override def serialize(obj: IntervalData): Any =
throw new NotImplementedError("Not implemented")
override def deserialize(datum: Any): IntervalData =
throw new NotImplementedError("Not implemented")
override def userClass: Class[IntervalData] = classOf[IntervalData]
}
@SQLUserDefinedType(udt = classOf[NullUDT])
private[sql] class NullData extends Serializable
private[sql] class NullUDT extends UserDefinedType[NullData] {
override def sqlType: DataType = NullType
override def serialize(obj: NullData): Any = throw new NotImplementedError("Not implemented")
override def deserialize(datum: Any): NullData =
throw new NotImplementedError("Not implemented")
override def userClass: Class[NullData] = classOf[NullData]
}
}
|
sahilTakiar/spark
|
sql/core/src/test/scala/org/apache/spark/sql/FileBasedDataSourceSuite.scala
|
Scala
|
apache-2.0
| 20,620 |
package org.jetbrains.plugins.scala.lang.typeInference
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
/**
* @author Roman.Shein
* @since 01.04.2016.
*/
class CurriedTypeInferenceTest extends ScalaLightCodeInsightFixtureTestAdapter {
def testSCL7332() = checkTextHasNoErrors(
"""
|class Foo[A, B](a: A, b: B)(f: B => A)
|
|val foo1 = new Foo(1, identity[Int] _)({ case f => f(2) })
|val foo2 = new Foo(1, identity[Int] _)(f => f(2))
""".stripMargin)
}
|
loskutov/intellij-scala
|
test/org/jetbrains/plugins/scala/lang/typeInference/CurriedTypeInferenceTest.scala
|
Scala
|
apache-2.0
| 526 |
package mage
import com.typesafe.scalalogging.LazyLogging
/**
* Models the roll of a die using rollToHit() or rollDamage() and returns an algebraic type denoting the outcome
* of the attempt. Each result includes a value (for accumulating results), as well as the actual 'Die' rolled.
*/
sealed trait DieResult extends LazyLogging with Product with Serializable {
def die: Die
def value: Int
def roll: Int = die.value
def difficulty: Int = die.difficulty
def isSuccess: Boolean = die.isSuccess
def name: String = this.getClass.getSimpleName
def isBotch: Boolean = false
def isSpecial: Boolean = false
override def toString = s"$name($die, $value)"
}
final case class DieHit(die: Die) extends DieResult {
def value: Int = 1
require(!isBotch, s"$this should return false from isBotch")
require(isSuccess, s"$this should return true from isSuccess")
require(roll >= die.difficulty, s"$this should have a roll >= ${die.difficulty}")
}
final case class DieMiss(die: Die) extends DieResult {
def value: Int = 0
require(!isBotch, s"$this should return false from isBotch")
require(!isSuccess, s"$this should return false from isSuccess")
require(!isSpecial, s"$this should return false from isSpecial")
}
final case class DieBotch(die: Die) extends DieResult {
def value: Int = -1
override def isBotch = true
require(isBotch, s"$this should return true from isBotch")
require(!isSuccess, s"$this should return false from isSuccess")
require(!isSpecial, s"$this should return false from isSpecial")
}
final case class DieSpecialHit(die: Die) extends DieResult {
/** returns total number of hits from bonus rolls */
private def rollBonusHits(bonusHits: Int) : Int = {
val d = Die.roll(die.difficulty)
logger.trace(s" SpecialBonusRoll(${d.value})")
if (d.isSpecial) rollBonusHits(bonusHits + 1)
else if (d.isSuccess) bonusHits + 1
else if (d.isBotch) bonusHits - 1 else bonusHits
}
override def isSpecial = true
logger.trace(" Roll for Special Bonus Hits")
private val hits = rollBonusHits(1)
def value: Int = hits
require(!isBotch, s"$this should return false from isBotch")
require(isSuccess, s"$this should return true from isSuccess")
require(isSpecial, s"$this should return true from isSpecial")
require(value >= 0, s"$this should have a value >= 0")
require(roll == Die.dieBase, s"$this should have a roll = ${Die.dieBase}")
}
object DieResult extends LazyLogging {
/**
* Rolls one Die to hit and returns the result as a DieHit, DieMiss, DieBotch, or DieSpecialHit.
*
* @return a DieResult, one of the following: DieHit, DieMiss, DieBotch, or DieSpecialHit
* @note add a note for pre or post conditions, or any other notable restrictions or expectations.
* @see Core rules (WW4600), page xxx, ""
*/
def rollToHit(difficulty: Int = 6, specialty: Boolean = false): DieResult = {
val d = Die.roll(difficulty)
val r = if (d.isSpecial && specialty) DieSpecialHit(d)
else if (d.isSuccess) DieHit(d)
else if (d.isBotch) DieBotch(d)
else DieMiss(d)
logger.trace(" {}", r)
r
}
/**
* Rolls one Die of damage and return sthe results as a DieHit or DieMiss.
*
* @return a DieHit or DieMiss containing the rolled die
* @note There is no concept of botching when rolling damage a roll of 1 is returned as DieMiss(Die(1))
* @see Core rules (WW4600), page 237, "Stage Three: Resolution"
*/
def rollDamage(difficulty: Int = 6): DieResult = {
val d = Die.roll(difficulty)
val r = if (d.isSuccess) DieHit(d) else DieMiss(d)
logger.trace(" {}", r)
r
}
}
|
locke8/weaponmark
|
src/main/scala/mage/DieResult.scala
|
Scala
|
mit
| 3,865 |
package org.openmole.site
/**
* Created by mathieu on 11/05/17.
*/
package object shared {
lazy val sitexMain = "sitex-main"
lazy val sitexDoc = "sitex-doc"
lazy val sitexIntro = "sitex-intro"
lazy val sitexIntroMore = "sitex-intro-more"
lazy val sitexDocDetails = "sitex-doc-details"
}
|
openmole/openmole-site
|
site/shared/src/main/scala/org/openmole/site/package.scala
|
Scala
|
agpl-3.0
| 306 |
package com.sfxcode.nosql.mongo.relation
object RelationCache extends RelationCaching {
var relationCaching: RelationCaching = new DefaultRelationCache
override def addCachedValue(key: String, value: AnyRef): Unit =
relationCaching.addCachedValue(key, value)
override def getCachedValue[B <: AnyRef](key: String): B =
relationCaching.getCachedValue[B](key)
override def hasCachedValue(key: String): Boolean =
relationCaching.hasCachedValue(key)
override def removeCachedValue(key: String): Unit =
relationCaching.removeCachedValue(key)
}
|
sfxcode/simple-mongo
|
src/main/scala/com/sfxcode/nosql/mongo/relation/RelationCache.scala
|
Scala
|
apache-2.0
| 569 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2013-2014 Alexey Aksenov [email protected]
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: [email protected]
*/
package org.digimead.tabuddy.desktop.view.modification.ui.dialog.filtered
import org.digimead.digi.lib.log.api.XLoggable
import org.digimead.tabuddy.desktop.view.modification.Default
import org.digimead.tabuddy.model.Model
import org.digimead.tabuddy.model.graph.Graph
import org.eclipse.jface.viewers.{ CellLabelProvider, ViewerCell }
import org.eclipse.swt.graphics.Point
object ColumnTypeFrom extends XLoggable {
class TLabelProvider(graph: Graph[_ <: Model.Like]) extends CellLabelProvider {
/** Update the label for cell. */
override def update(cell: ViewerCell) = cell.getElement() match {
case item: FilterEditor.PropertyItem[_] ⇒
cell.setText(item.ptype.name(graph))
case unknown ⇒
log.fatal("Unknown item " + unknown.getClass())
}
/** Get the text displayed in the tool tip for object. */
override def getToolTipText(element: Object): String = element match {
case item: FilterEditor.PropertyItem[_] ⇒
item.ptype.typeSymbol.name
case unknown ⇒
log.fatal("Unknown item " + unknown.getClass())
null
}
/**
* Return the amount of pixels in x and y direction that the tool tip to
* pop up from the mouse pointer.
*/
override def getToolTipShift(obj: Object): Point = Default.toolTipShift
/** The time in milliseconds until the tool tip is displayed. */
override def getToolTipDisplayDelayTime(obj: Object): Int = Default.toolTipDisplayDelayTime
/** The time in milliseconds the tool tip is shown for. */
override def getToolTipTimeDisplayed(obj: Object): Int = Default.toolTipTimeDisplayed
}
}
|
digimead/digi-TABuddy-desktop
|
part-view-modification/src/main/scala/org/digimead/tabuddy/desktop/view/modification/ui/dialog/filtered/ColumnTypeFrom.scala
|
Scala
|
agpl-3.0
| 3,885 |
package com.peim.utils
import com.typesafe.config.ConfigFactory
trait Config {
private val config = ConfigFactory.load()
val httpHost: String = config.getString("http.host")
val httpPort: Int = config.getInt("http.port")
}
|
peim/money-transfer-service
|
src/main/scala/com/peim/utils/Config.scala
|
Scala
|
mit
| 232 |
package com.typesafe.slick.testkit.util
import com.typesafe.config.Config
import java.io._
import java.net.{URL, URLClassLoader}
import java.sql.{Connection, Driver}
import java.util.Properties
import java.util.concurrent.ExecutionException
import java.util.zip.GZIPInputStream
import scala.collection.mutable
import scala.concurrent.{Await, Future, ExecutionContext}
import slick.basic.{BasicProfile, Capability}
import slick.dbio.{NoStream, DBIOAction, DBIO}
import slick.jdbc.{JdbcProfile, ResultSetAction, JdbcDataSource}
import slick.jdbc.GetResult._
import slick.relational.RelationalProfile
import slick.sql.SqlProfile
import slick.util.AsyncExecutor
object TestDB {
object capabilities {
/** Marks a driver which is specially supported by the test kit for plain SQL queries. */
val plainSql = new Capability("test.plainSql")
/** Supports JDBC metadata in general */
val jdbcMeta = new Capability("test.jdbcMeta")
/** Supports JDBC metadata getClientInfoProperties method */
val jdbcMetaGetClientInfoProperties = new Capability("test.jdbcMetaGetClientInfoProperties")
/** Supports JDBC metadata getFunctions method */
val jdbcMetaGetFunctions = new Capability("test.jdbcMetaGetFunctions")
/** Supports JDBC metadata getIndexInfo method */
val jdbcMetaGetIndexInfo = new Capability("test.jdbcMetaGetIndexInfo")
/** Supports all tested transaction isolation levels */
val transactionIsolation = new Capability("test.transactionIsolation")
/** Supports select for update row locking */
val selectForUpdateRowLocking = new Capability("test.selectForUpdateRowLocking")
val all = Set(plainSql, jdbcMeta, jdbcMetaGetClientInfoProperties, jdbcMetaGetFunctions, jdbcMetaGetIndexInfo,
transactionIsolation, selectForUpdateRowLocking)
}
/** Copy a file, expanding it if the source name ends with .gz */
def copy(src: File, dest: File): Unit = {
dest.createNewFile()
val out = new FileOutputStream(dest)
try {
var in: InputStream = new FileInputStream(src)
try {
if(src.getName.endsWith(".gz")) in = new GZIPInputStream(in)
val buf = new Array[Byte](4096)
var cont = true
while(cont) {
val len = in.read(buf)
if(len < 0) cont = false
else out.write(buf, 0, len)
}
} finally in.close()
} finally out.close()
}
/** Delete files in the testDB directory */
def deleteDBFiles(prefix: String): Unit = {
assert(!prefix.isEmpty, "prefix must not be empty")
def deleteRec(f: File): Boolean = {
if(f.isDirectory()) f.listFiles.forall(deleteRec _) && f.delete()
else f.delete()
}
val dir = new File(TestkitConfig.testDir)
if(!dir.isDirectory) throw new IOException("Directory "+TestkitConfig.testDir+" not found")
for(f <- dir.listFiles if f.getName startsWith prefix) {
val p = TestkitConfig.testDir+"/"+f.getName
if(deleteRec(f)) println("[Deleted database file "+p+"]")
else throw new IOException("Couldn't delete database file "+p)
}
}
def mapToProps(m: Map[String, String]) = {
val p = new Properties
if(m ne null)
for((k,v) <- m) if(k.ne(null) && v.ne(null)) p.setProperty(k, v)
p
}
}
/**
* Describes a database against which you can run TestKit tests. It includes
* features such as reading the configuration file, setting up a DB connection,
* removing DB files left over by a test run, etc.
*/
trait TestDB {
type Profile <: BasicProfile
/** The test database name */
val confName: String
/** The test configuration */
lazy val config: Config = TestkitConfig.testConfig(confName)
/** Check if this test database is enabled */
def isEnabled = TestkitConfig.testDBs.map(_.contains(confName)).getOrElse(true)
/** This method is called to clean up before running all tests. */
def cleanUpBefore(): Unit = {}
/** This method is called to clean up after running all tests. It
* defaults to cleanUpBefore(). */
def cleanUpAfter() = cleanUpBefore()
/** The profile for the database */
val profile: Profile
/** Indicates whether the database persists after closing the last connection */
def isPersistent = true
/** This method is called between individual test methods to remove all
* database artifacts that were created by the test. */
def dropUserArtifacts(implicit session: profile.Backend#Session): Unit
/** Create the Database object for this test database configuration */
def createDB(): profile.Backend#Database
/** Indicates whether the database's sessions have shared state. When a
* database is shared but not persistent, Testkit keeps a session open
* to make it persistent. */
def isShared = true
/** The capabilities of the Slick profile, possibly modified for this
* test configuration. */
def capabilities: Set[Capability] = profile.capabilities ++ TestDB.capabilities.all
def confOptionalString(path: String) = if(config.hasPath(path)) Some(config.getString(path)) else None
def confString(path: String) = confOptionalString(path).getOrElse(null)
def confStrings(path: String) = TestkitConfig.getStrings(config, path).getOrElse(Nil)
/** The tests to run for this configuration. */
def testClasses: Seq[Class[_ <: GenericTest[_ >: Null <: TestDB]]] = TestkitConfig.testClasses
}
trait RelationalTestDB extends TestDB {
type Profile <: RelationalProfile
def assertTablesExist(tables: String*): DBIO[Unit]
def assertNotTablesExist(tables: String*): DBIO[Unit]
}
trait SqlTestDB extends RelationalTestDB { type Profile <: SqlProfile }
abstract class JdbcTestDB(val confName: String) extends SqlTestDB {
import profile.api.actionBasedSQLInterpolation
type Profile = JdbcProfile
lazy val database = profile.backend.Database
val jdbcDriver: String
final def getLocalTables(implicit session: profile.Backend#Session) = blockingRunOnSession(ec => localTables(ec))
final def getLocalSequences(implicit session: profile.Backend#Session) = blockingRunOnSession(ec => localSequences(ec))
def canGetLocalTables = true
def localTables(implicit ec: ExecutionContext): DBIO[Vector[String]] =
ResultSetAction[(String,String,String, String)](_.conn.getMetaData().getTables("", "", null, null)).map { ts =>
ts.filter(_._4.toUpperCase == "TABLE").map(_._3).sorted
}
def localSequences(implicit ec: ExecutionContext): DBIO[Vector[String]] =
ResultSetAction[(String,String,String, String)](_.conn.getMetaData().getTables("", "", null, null)).map { ts =>
ts.filter(_._4.toUpperCase == "SEQUENCE").map(_._3).sorted
}
def dropUserArtifacts(implicit session: profile.Backend#Session) = blockingRunOnSession { implicit ec =>
for {
tables <- localTables
sequences <- localSequences
_ <- DBIO.seq((tables.map(t => sqlu"""drop table if exists #${profile.quoteIdentifier(t)} cascade""") ++
sequences.map(t => sqlu"""drop sequence if exists #${profile.quoteIdentifier(t)} cascade""")): _*)
} yield ()
}
def assertTablesExist(tables: String*) =
DBIO.seq(tables.map(t => sql"""select 1 from #${profile.quoteIdentifier(t)} where 1 < 0""".as[Int]): _*)
def assertNotTablesExist(tables: String*) =
DBIO.seq(tables.map(t => sql"""select 1 from #${profile.quoteIdentifier(t)} where 1 < 0""".as[Int].failed): _*)
def createSingleSessionDatabase(implicit session: profile.Backend#Session, executor: AsyncExecutor = AsyncExecutor.default()): profile.Backend#Database = {
val wrappedConn = new DelegateConnection(session.conn) {
override def close(): Unit = ()
}
profile.backend.Database.forSource(new JdbcDataSource {
def createConnection(): Connection = wrappedConn
def close(): Unit = ()
val maxConnections: Option[Int] = Some(1)
}, executor)
}
final def blockingRunOnSession[R](f: ExecutionContext => DBIOAction[R, NoStream, Nothing])(implicit session: profile.Backend#Session): R = {
val ec = new ExecutionContext {
def execute(runnable: Runnable): Unit = runnable.run()
def reportFailure(t: Throwable): Unit = throw t
}
val db = createSingleSessionDatabase(session, new AsyncExecutor {
def executionContext: ExecutionContext = ec
def close(): Unit = ()
})
db.run(f(ec)).value.get.get
}
protected[this] def await[T](f: Future[T]): T =
try Await.result(f, TestkitConfig.asyncTimeout)
catch { case ex: ExecutionException => throw ex.getCause }
}
abstract class InternalJdbcTestDB(confName: String) extends JdbcTestDB(confName) { self =>
val url: String
def createDB(): profile.Backend#Database = database.forURL(url, driver = jdbcDriver)
override def toString = url
}
abstract class ExternalJdbcTestDB(confName: String) extends JdbcTestDB(confName) {
import profile.api.actionBasedSQLInterpolation
val jdbcDriver = confString("driver")
val testDB = confString("testDB")
val create = confStrings("create")
val postCreate = confStrings("postCreate")
val drop = confStrings("drop")
override def toString = confString("testConn.url")
override def isEnabled = super.isEnabled && config.getBoolean("enabled")
override lazy val testClasses: Seq[Class[_ <: GenericTest[_ >: Null <: TestDB]]] =
TestkitConfig.getStrings(config, "testClasses")
.map(_.map(n => Class.forName(n).asInstanceOf[Class[_ <: GenericTest[_ >: Null <: TestDB]]]))
.getOrElse(super.testClasses)
def databaseFor(path: String) = database.forConfig(path, config)
override def createDB() = databaseFor("testConn")
override def cleanUpBefore(): Unit = {
if(!drop.isEmpty || !create.isEmpty) {
println("[Creating test database "+this+"]")
await(databaseFor("adminConn").run(
DBIO.seq((drop ++ create).map(s => sqlu"#$s"): _*).withPinnedSession
))
}
if(!postCreate.isEmpty) {
await(createDB().run(
DBIO.seq(postCreate.map(s => sqlu"#$s"): _*).withPinnedSession
))
}
}
override def cleanUpAfter(): Unit = {
if(!drop.isEmpty) {
println("[Dropping test database "+this+"]")
await(databaseFor("adminConn").run(
DBIO.seq(drop.map(s => sqlu"#$s"): _*).withPinnedSession
))
}
}
}
|
slick/slick
|
slick-testkit/src/main/scala/com/typesafe/slick/testkit/util/TestDB.scala
|
Scala
|
bsd-2-clause
| 10,230 |
package scala.pickling.non.primitive.field
import org.scalatest.FunSuite
import scala.pickling._, scala.pickling.Defaults._, json._
// Note: Previously picklign would not consider `final` when eliding types. Now we always do.
final case class Person(name: String, age: Int)
case class Philipp(mother: Person)
class NonPrimitiveFieldTest extends FunSuite {
test("main") {
val gudrun = Person("Gudrun", 62)
val pckl = Philipp(gudrun).pickle
assert(pckl.value === """
|{
| "$type": "scala.pickling.non.primitive.field.Philipp",
| "mother": {
| "name": "Gudrun",
| "age": 62
| }
|}
""".stripMargin.trim)
assert(pckl.unpickle[Philipp] === Philipp(gudrun))
}
}
|
scala/pickling
|
core/src/test/scala/scala/pickling/generation/NonPrimitiveFieldTest.scala
|
Scala
|
bsd-3-clause
| 734 |
/*
* product-collections is distributed under the 2-Clause BSD license. See the
* LICENSE file in the root of the repository.
*
* Copyright (c) 2013 - 2014 Mark Lister
*/
package com.github.marklister.collections
/**
* ==Immutable product-collection datastructures==
*
* At the moment the only structure available is a CollSeq
*
* ===Creating===
* Generally you use the companion object to construct the appropriate
* CollSeq:
* {{{
* scala> CollSeq(("Jan",100,200),("Feb",120,230),("Mar",300,330))
* res0: com.github.marklister.collections.immutable.CollSeq3[String,Int,Int] =
* CollSeq((Jan,100,200),
* (Feb,120,230),
* (Mar,300,330))
* }}}
* ===Extract a column===
* {{{scala> res0._2
* res1: Seq[Int] = List(100, 120, 300)
* }}}
* ===Assemble columns into a CollSeq===
* {{{scala> res0._3.flatZip(res0._1).flatZip(res0._2)
* res3: com.github.marklister.collections.immutable.CollSeq3[Int,String,Int] =
* CollSeq((200,Jan,100),
* (230,Feb,120),
* (330,Mar,300))}}}
*/
package object immutable {}
|
marklister/product-collections
|
shared/src/main/scala/collections/immutable/package.scala
|
Scala
|
bsd-2-clause
| 1,062 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.compiler.codegenerator.opencl.hyperkernels.fastfouriertransform
/** The direction of the current 1D transform in a 1D, 2D, or 3D FFT.
*
* @author Greg Snider
*/
private[cogx]
abstract sealed class ClFFTKernelDir
private[cogx]
case object X extends ClFFTKernelDir
private[cogx]
case object Y extends ClFFTKernelDir
private[cogx]
case object Z extends ClFFTKernelDir
|
hpe-cct/cct-core
|
src/main/scala/cogx/compiler/codegenerator/opencl/hyperkernels/fastfouriertransform/ClFFTKernelDir.scala
|
Scala
|
apache-2.0
| 1,014 |
package org.bfn.ninetynineprobs
object P86 {
// TODO
}
|
bfontaine/99Scala
|
src/main/scala/P86.scala
|
Scala
|
mit
| 60 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.util
import java.io._
import scala.reflect.ClassTag
// TODO Why insist on AnyRef? Why not just Any? This would make app.nlp.DocumentProcessor a little cleaner. -akm
/** Provides member "attr" which is a map from a class to an attribute value (instance of that class).
This is used to attach arbitrary "attributes" to objects that inherit from this trait.
Conveniently these attributes do not need to be pre-compiled into the object as class members,
and yet when fetched they are returned with the correct Scala type known.
For example, attributes are used to attach a part-of-speech label to a cc.factorie.app.nlp.Token,
to attach a ParseTree to a Sentence, and coreference information to a Document.
Basic example usage: object foo extends Attr; foo.attr += "bar"; require(foo.attr[String] == "bar"); foo.attr.remove[String].
@author Andrew McCallum */
trait Attr extends Serializable {
/** A collection of attributes, keyed by the attribute class. */
def getAttr = attr
object attr extends Serializable {
private var _attr: Array[AnyRef] = new Array[AnyRef](2)
/** The number of attributes present. */
def length: Int = { var i = 0; while ((i < _attr.length) && (_attr(i) ne null)) i += 1; i }
/** The capacity of the array holding the attributes. May be more than "length", the number of attributes present. */
def capacity: Int = _attr.length
private def setCapacity(cap:Int): Unit = { val ta = new Array[AnyRef](cap); System.arraycopy(_attr, 0, ta, 0, math.min(cap, math.min(cap, _attr.length))) }
/** Make sure there is capacity of at least "cap" */
def ensureCapacity(cap:Int): Unit = if (cap > _attr.length) { val ta = new Array[AnyRef](cap); System.arraycopy(_attr, 0, ta, 0, _attr.length) }
/** Increase capacity by "incr". */
def increaseCapacity(incr:Int): Unit = { val ta = new Array[AnyRef](_attr.length+incr); System.arraycopy(_attr, 0, ta, 0, _attr.length); _attr = ta }
/** Remove the attribute at index i. */
def removeIndex(i:Int): Unit = {
val len = length
if (i == len - 1) _attr(i) = null
else {
System.arraycopy(_attr, i+1, _attr, i, len-i-1)
_attr(len-1) = null
}
}
/** Re-allocate the attribute array to remove any excess capacity */
def trimCapacity(): Unit = { val l = length; if (l < _attr.length) setCapacity(l) }
/** Add the given attribute, with key equal to its class. */
def +=[C<:AnyRef](value:C): C = {
var i = 0
val key = value.getClass
while (i < _attr.length && (_attr(i) ne null) && _attr(i).getClass != key)
i += 1
if (i == _attr.length)
increaseCapacity(1)
_attr(i) = value
value
}
/** Returns the index of the last attribute whose class is assignable from the argument.
Attributes occur in the order in which they were inserted.
Note this means you can add a:MyClass, then add b:SubclassOfMyClass, then index[MyClass] will return the index of "b". */
@inline final def index(key:Class[_]): Int = {
var i = _attr.length - 1
while (i >= 0) {
if ((_attr(i) ne null) && key.isAssignableFrom(_attr(i).getClass))
return i
i -= 1
}
-1
}
/** Returns the index of the last attribute whose class is assignable from the argument.
Attributes occur in the order in which they were inserted.
Note this means you can add a:MyClass, then add b:SubclassOfMyClass, then index[MyClass] will return the index of "b". */
@inline final def index[C<:AnyRef]()(implicit m: ClassTag[C]): Int = index(m.runtimeClass)
/** Return the index of the last attribute whose class is exactly the argument.
Attributes occur in the order in which they were inserted. */
@inline final def indexExactly(key:Class[_]): Int = {
var i = _attr.length - 1
while (i >= 0) {
if (key eq _attr(i).getClass) return i
i -= 1
}
-1
}
/** Return true if there is an attribute of class equal to or subclass of the argument. */
def contains[C<:AnyRef]()(implicit m: ClassTag[C]): Boolean = index(m.runtimeClass) >= 0
/** Return true if there is an attribute of class equal to or subclass of the argument. */
def contains(key:Class[_]): Boolean = index(key) >= 0
/** Return true if there is an attribute of class exactly equal to the argument. */
def containsExactly[C<:AnyRef]()(implicit m: ClassTag[C]): Boolean = indexExactly(m.runtimeClass) >= 0
/** Return true if there is an attribute of class exactly equal to the argument. */
def containsExactly(key: Class[_]): Boolean = indexExactly(key) >= 0
/** Returns a sequence of all attributes with classes assignable to C (i.e. that are either C or a subclass of C). */
def all[C<:AnyRef]()(implicit m: ClassTag[C]): Seq[C] = {
val key = m.runtimeClass
val result = new scala.collection.mutable.ArrayBuffer[C]
var i = 0
while (i < _attr.length) {
if ((_attr(i) ne null) && key.isAssignableFrom(_attr(i).getClass)) result += _attr(i).asInstanceOf[C]
i += 1
}
result
}
/** Remove all attributes with class matching or subclass of C.
For example, to remove all attributes call remove[AnyRef].
If call results in no removals, will not throw an Error. */
def remove[C<:AnyRef](implicit m: ClassTag[C]): Unit = {
val key = m.runtimeClass
var i = 0
while (i < _attr.length) {
if ((_attr(i) ne null) && key.isAssignableFrom(_attr(i).getClass)) removeIndex(i)
else i += 1
}
}
/** Return a sequence of all attributes */
def values: Seq[AnyRef] = {
val result = new scala.collection.mutable.ArrayBuffer[AnyRef]
var i = 0
while (i < _attr.length) {
if (_attr(i) ne null) result += _attr(i)
i += 1
}
result
}
/** Fetch the first value associated with the given class. If none present, return null. */
def apply[C<:AnyRef]()(implicit m: ClassTag[C]): C = {
var i = index(m.runtimeClass)
if (i >= 0) _attr(i).asInstanceOf[C] else null.asInstanceOf[C]
}
/** Fetch the first value associated with the given class. If none present, return null. */
def apply[C<:AnyRef](key:Class[C]): C ={
var i = index(key)
if (i >= 0) _attr(i).asInstanceOf[C] else null.asInstanceOf[C]
}
/** Fetch the first attribute who class is exactly equal to the given class. If none present, return null. */
def exactly[C<:AnyRef]()(implicit m: ClassTag[C]): C = {
var i = indexExactly(m.runtimeClass)
if (i >= 0) _attr(i).asInstanceOf[C] else null.asInstanceOf[C]
}
def get[C<:AnyRef](implicit m: ClassTag[C]): Option[C] = {
val result = this.apply[C]
if (result ne null) Option(result) else None
}
def getOrElse[C<:AnyRef](defaultValue:C)(implicit m: ClassTag[C]): C = {
val result = this.apply[C]
if (result ne null) result else defaultValue
}
def getOrElseUpdate[C<:AnyRef](defaultValue: =>C)(implicit m: ClassTag[C]): C = {
val result = this.apply[C]
if (result ne null) result else {
val value = defaultValue
this += value
value
}
}
override def toString = values.mkString(" ")
@throws(classOf[IOException])
@throws(classOf[ClassNotFoundException])
private def writeObject(stream: ObjectOutputStream): Unit = {
stream.defaultWriteObject()
stream.writeObject(values)
}
@throws(classOf[IOException])
@throws(classOf[ClassNotFoundException])
private def readObject(stream: ObjectInputStream): Unit = {
stream.defaultReadObject()
stream.readObject().asInstanceOf[Seq[AnyRef]].foreach { value => attr += value }
}
}
}
|
patverga/factorie
|
src/main/scala/cc/factorie/util/Attr.scala
|
Scala
|
apache-2.0
| 8,628 |
package com.plasmaconduit.json
import com.plasmaconduit.validation.Validation
import scala.language.implicitConversions
import scala.util.Try
sealed trait JsValue {
def as[A](implicit reader: JsReader[A]): Validation[reader.JsReaderFailure, A] = {
reader.read(this)
}
}
object JsValue {
implicit def from[A](a: A)(implicit c: JsWriter[A]): JsValue = c.write(a)
implicit def boolToJsValue(b: Boolean): JsValue = JsBoolean(b)
implicit def stringToJsValue(s: String): JsValue = JsString(s)
implicit def longToJsValue(n: Long): JsValue = JsLong(n)
implicit def floatToJsValue(f: Float): JsValue = JsFloat(f)
implicit def listToJsValue(s: List[JsValue]): JsValue = JsArray(s)
implicit def mapToJsValue(m: Map[String, JsValue]): JsValue = JsObject(m)
}
final case class JsBoolean(value: Boolean) extends JsValue {
override def toString = if (value) "true" else "false"
}
final case class JsString(value: String) extends JsValue {
override def toString = '"' + value.replace("\\n", "\\\\n") + '"'
}
final case class JsLong(value: Long) extends JsValue {
override def toString = value.toString
}
final case class JsFloat(value: Float) extends JsValue {
override def toString = value.toString
}
final case class JsOption[A](value: Option[A]) extends JsValue {
override def toString = value.map(_.toString).getOrElse(JsNull.toString)
}
final case class JsEither[L, R](value: Either[L, R]) extends JsValue {
override def toString = value.fold(_.toString, _.toString)
}
case object JsNull extends JsValue {
override def toString = "null"
}
final case class JsArray(value: List[JsValue]) extends JsValue {
override def toString = "[" + value.map(_.toString).mkString(", ") + "]"
def at(index: Int): Option[JsValue] = {
Try(value(index)).toOption
}
}
object JsArray {
def apply(values: JsValue*): JsArray = {
JsArray(values.toList)
}
}
final case class JsObject(map: Map[String, JsValue]) extends JsValue {
override def toString = "{" + map.toSeq.map({ n => "\\"" + n._1 + "\\":" + n._2.toString}).mkString(",") + "}"
def get(key: String): Option[JsValue] = map.get(key)
def set(key: String, value: JsValue): JsObject = JsObject(map + (key -> value))
def remove(key: String): JsObject = JsObject(map - key)
def ++(other: Map[String, JsValue]) = JsObject(map ++ other)
}
object JsObject {
def apply(pairs: (String, JsValue)*): JsObject = {
JsObject(pairs.toMap)
}
def apply(pair: (String, JsValue)): JsObject = {
JsObject(Map(pair))
}
def apply[A](pair: (String, A))(implicit w: JsWriter[A]): JsObject = {
JsObject(Map(pair._1 -> w.write(pair._2)))
}
}
|
plasmaconduit/json
|
src/main/scala/com/plasmaconduit/json/JsValue.scala
|
Scala
|
mit
| 2,660 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.http
import scala.collection.immutable.Seq
import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
import akka.actor.ActorSystem
import akka.event.Logging
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.server._
import akka.http.scaladsl.server.RouteResult.Rejected
import akka.http.scaladsl.server.directives.DebuggingDirectives
import akka.http.scaladsl.server.directives.LogEntry
import akka.stream.ActorMaterializer
import spray.json._
import whisk.common.LogMarker
import whisk.common.LogMarkerToken
import whisk.common.LoggingMarkers
import whisk.common.TransactionCounter
import whisk.common.TransactionId
import whisk.common.MetricEmitter
/**
* This trait extends the Akka Directives and Actor with logging and transaction counting
* facilities common to all OpenWhisk REST services.
*/
trait BasicHttpService extends Directives with TransactionCounter {
/** Rejection handler to terminate connection on a bad request. Delegates to Akka handler. */
implicit def customRejectionHandler(implicit transid: TransactionId) = {
RejectionHandler.default.mapRejectionResponse {
case res @ HttpResponse(_, _, ent: HttpEntity.Strict, _) =>
val error = ErrorResponse(ent.data.utf8String, transid).toJson
res.copy(entity = HttpEntity(ContentTypes.`application/json`, error.compactPrint))
case x => x
}
}
/**
* Gets the routes implemented by the HTTP service.
*
* @param transid the id for the transaction (every request is assigned an id)
*/
def routes(implicit transid: TransactionId): Route
/**
* Gets the log level for a given route. The default is
* InfoLevel so override as needed.
*
* @param route the route to determine the loglevel for
* @return a log level for the route
*/
def loglevelForRoute(route: String): Logging.LogLevel = Logging.InfoLevel
/** Rejection handler to terminate connection on a bad request. Delegates to Akka handler. */
val prioritizeRejections = recoverRejections { rejections =>
val priorityRejection = rejections.find {
case rejection: UnacceptedResponseContentTypeRejection => true
case _ => false
}
priorityRejection.map(rejection => Rejected(Seq(rejection))).getOrElse(Rejected(rejections))
}
/**
* Receives a message and runs the router.
*/
def route: Route = {
assignId { implicit transid =>
DebuggingDirectives.logRequest(logRequestInfo _) {
DebuggingDirectives.logRequestResult(logResponseInfo _) {
handleRejections(customRejectionHandler) {
prioritizeRejections {
toStrictEntity(30.seconds) {
routes
}
}
}
}
}
}
}
/** Assigns transaction id to every request. */
protected val assignId = extract(_ => transid())
/** Generates log entry for every request. */
protected def logRequestInfo(req: HttpRequest)(implicit tid: TransactionId): LogEntry = {
val m = req.method.name
val p = req.uri.path.toString
val q = req.uri.query().toString
val l = loglevelForRoute(p)
LogEntry(s"[$tid] $m $p $q", l)
}
protected def logResponseInfo(req: HttpRequest)(implicit tid: TransactionId): RouteResult => Option[LogEntry] = {
case RouteResult.Complete(res: HttpResponse) =>
val m = req.method.name
val p = req.uri.path.toString
val l = loglevelForRoute(p)
val name = "BasicHttpService"
val token = LogMarkerToken("http", s"${m.toLowerCase}.${res.status.intValue}", LoggingMarkers.count)
val marker = LogMarker(token, tid.deltaToStart, Some(tid.deltaToStart))
if (TransactionId.metricsKamon) {
MetricEmitter.emitHistogramMetric(token, tid.deltaToStart)
MetricEmitter.emitCounterMetric(token)
}
if (TransactionId.metricsLog) {
Some(LogEntry(s"[$tid] [$name] $marker", l))
} else {
None
}
case _ => None // other kind of responses
}
}
object BasicHttpService {
/**
* Starts an HTTP route handler on given port and registers a shutdown hook.
*/
def startService(route: Route, port: Int)(implicit actorSystem: ActorSystem,
materializer: ActorMaterializer): Unit = {
implicit val executionContext = actorSystem.dispatcher
val httpBinding = Http().bindAndHandle(route, "0.0.0.0", port)
sys.addShutdownHook {
Await.result(httpBinding.map(_.unbind()), 30.seconds)
actorSystem.terminate()
Await.result(actorSystem.whenTerminated, 30.seconds)
}
}
}
|
paulcastro/openwhisk
|
common/scala/src/main/scala/whisk/http/BasicHttpService.scala
|
Scala
|
apache-2.0
| 5,527 |
package io.github.shogowada.scalajs.reactjs.example
import java.net.ServerSocket
import org.eclipse.jetty.server.Server
import org.eclipse.jetty.server.handler.ResourceHandler
class TestTargetServer(project: String) {
private var server: Server = _
def start(): Unit = {
val maybeTarget = sys.props.get(s"target.path.$project")
assert(maybeTarget.isDefined)
val target = maybeTarget.get
println(s"Target path for $project: $target")
val port = freePort
println(s"Target port for $project: $port")
server = new Server(port)
val handler = new ResourceHandler()
handler.setResourceBase(s"$target")
server.setHandler(handler)
server.start()
println(s"Target host for $project: $host")
}
def freePort: Int = {
var socket: ServerSocket = null
var port: Int = 0
try {
socket = new ServerSocket(0)
socket.setReuseAddress(true)
port = socket.getLocalPort
} finally {
socket.close()
}
port
}
def host: String = s"http://localhost:${server.getURI.getPort}/classes"
def stop(): Unit = {
server.stop()
}
}
object TestTargetServers {
val customVirtualDOM = new TestTargetServer("custom-virtual-dom")
val helloWorld = new TestTargetServer("helloworld")
val helloWorldFunction = new TestTargetServer("helloworld-function")
val interactiveHelloWorld = new TestTargetServer("interactive-helloworld")
val lifecycle = new TestTargetServer("lifecycle")
val reduxDevTools = new TestTargetServer("redux-devtools")
val reduxMiddleware = new TestTargetServer("redux-middleware")
val router = new TestTargetServer("router")
val routerRedux = new TestTargetServer("router-redux")
val style = new TestTargetServer("style")
val todoApp = new TestTargetServer("todo-app")
val todoAppRedux = new TestTargetServer("todo-app-redux")
customVirtualDOM.start()
helloWorld.start()
helloWorldFunction.start()
interactiveHelloWorld.start()
lifecycle.start()
reduxDevTools.start()
reduxMiddleware.start()
routerRedux.start()
router.start()
style.start()
todoApp.start()
todoAppRedux.start()
sys.addShutdownHook(() => {
customVirtualDOM.stop()
helloWorld.stop()
helloWorldFunction.stop()
interactiveHelloWorld.stop()
lifecycle.stop()
reduxDevTools.stop()
reduxMiddleware.stop()
routerRedux.stop()
router.stop()
style.stop()
todoApp.stop()
todoAppRedux.stop()
})
}
|
shogowada/scalajs-reactjs
|
example/test/src/it/scala/io/github/shogowada/scalajs/reactjs/example/TestTargetServer.scala
|
Scala
|
mit
| 2,452 |
package com.rockymadden.stringmetric.filter
import com.rockymadden.stringmetric.StringFilter
/** Ensures ASCII letters do not matter. */
trait AsciiLetterFilter extends StringFilter {
abstract override def filter(charArray: Array[Char]): Array[Char] =
super.filter(charArray.filter(c => !((c >= 65 && c <= 90 ) || (c >= 97 && c <= 122))))
abstract override def filter(string: String): String = filter(string.toCharArray).mkString
}
|
cocoxu/multip
|
src/main/scala/com/rockymadden/stringmetric/filter/AsciiLetterFilter.scala
|
Scala
|
gpl-3.0
| 439 |
package controllers
import play.api.mvc._
import play.api.data._
import play.api.data.Forms._
import play.api.routing._
import models._
import views._
class Application extends Controller {
// -- Authentication
val loginForm = Form(
tuple(
"email" -> text,
"password" -> text
) verifying ("Invalid email or password", result => result match {
case (email, password) => User.authenticate(email, password).isDefined
})
)
/**
* Login page.
*/
def login = Action { implicit request =>
Ok(html.login(loginForm))
}
/**
* Handle login form submission.
*/
def authenticate = Action { implicit request =>
loginForm.bindFromRequest.fold(
formWithErrors => BadRequest(html.login(formWithErrors)),
user => Redirect(routes.Projects.index).withSession("email" -> user._1)
)
}
/**
* Logout and clean the session.
*/
def logout = Action {
Redirect(routes.Application.login).withNewSession.flashing(
"success" -> "You've been logged out"
)
}
// -- Javascript routing
def javascriptRoutes = Action { implicit request =>
import routes.javascript._
Ok(
JavaScriptReverseRouter("jsRoutes")(
Projects.add, Projects.delete, Projects.rename,
Projects.addGroup, Projects.deleteGroup, Projects.renameGroup,
Projects.addUser, Projects.removeUser, Tasks.addFolder,
Tasks.renameFolder, Tasks.deleteFolder, Tasks.index,
Tasks.add, Tasks.update, Tasks.delete
)
)
}
}
/**
* Provide security features
*/
trait Secured {
/**
* Retrieve the connected user email.
*/
private def username(request: RequestHeader) = request.session.get("email")
/**
* Redirect to login if the user in not authorized.
*/
private def onUnauthorized(request: RequestHeader) = Results.Redirect(routes.Application.login)
// --
/**
* Action for authenticated users.
*/
def IsAuthenticated(f: => String => Request[AnyContent] => Result) = Security.Authenticated(username, onUnauthorized) { user =>
Action(request => f(user)(request))
}
/**
* Check if the connected user is a member of this project.
*/
def IsMemberOf(project: Long)(f: => String => Request[AnyContent] => Result) = IsAuthenticated { user => request =>
if(Project.isMember(project, user)) {
f(user)(request)
} else {
Results.Forbidden
}
}
/**
* Check if the connected user is a owner of this task.
*/
def IsOwnerOf(task: Long)(f: => String => Request[AnyContent] => Result) = IsAuthenticated { user => request =>
if(Task.isOwner(task, user)) {
f(user)(request)
} else {
Results.Forbidden
}
}
}
|
scoverage/scoverage-maven-samples
|
playframework/singlemodule/zentasks/zentasks-scala-2.10/app/controllers/Application.scala
|
Scala
|
apache-2.0
| 2,717 |
package is.hail.utils
class TruncatedArrayIndexedSeq[T](a: Array[T], newLength: Int) extends IndexedSeq[T] with Serializable {
def length: Int = newLength
def apply(idx: Int): T = {
if (idx < 0 || idx >= newLength)
throw new IndexOutOfBoundsException(idx.toString)
a(idx)
}
}
|
hail-is/hail
|
hail/src/main/scala/is/hail/utils/TruncatedArrayIndexedSeq.scala
|
Scala
|
mit
| 299 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.util.{Date, UUID}
import scala.collection.mutable
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce._
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
import org.apache.spark._
import org.apache.spark.internal.Logging
import org.apache.spark.internal.io.{FileCommitProtocol, SparkHadoopWriterUtils}
import org.apache.spark.internal.io.FileCommitProtocol.TaskCommitMessage
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, ExternalCatalogUtils}
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.{UnsafeProjection, _}
import org.apache.spark.sql.catalyst.plans.physical.HashPartitioning
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeUtils}
import org.apache.spark.sql.execution.{SortExec, SparkPlan, SQLExecution}
import org.apache.spark.sql.types.StringType
import org.apache.spark.util.{SerializableConfiguration, Utils}
/** A helper object for writing FileFormat data out to a location. */
object FileFormatWriter extends Logging {
/**
* Max number of files a single task writes out due to file size. In most cases the number of
* files written should be very small. This is just a safe guard to protect some really bad
* settings, e.g. maxRecordsPerFile = 1.
*/
private val MAX_FILE_COUNTER = 1000 * 1000
/** Describes how output files should be placed in the filesystem. */
case class OutputSpec(
outputPath: String,
customPartitionLocations: Map[TablePartitionSpec, String],
outputColumns: Seq[Attribute])
/** A shared job description for all the write tasks. */
private class WriteJobDescription(
val uuid: String, // prevent collision between different (appending) write jobs
val serializableHadoopConf: SerializableConfiguration,
val outputWriterFactory: OutputWriterFactory,
val allColumns: Seq[Attribute],
val dataColumns: Seq[Attribute],
val partitionColumns: Seq[Attribute],
val bucketIdExpression: Option[Expression],
val path: String,
val customPartitionLocations: Map[TablePartitionSpec, String],
val maxRecordsPerFile: Long,
val timeZoneId: String,
val statsTrackers: Seq[WriteJobStatsTracker])
extends Serializable {
assert(AttributeSet(allColumns) == AttributeSet(partitionColumns ++ dataColumns),
s"""
|All columns: ${allColumns.mkString(", ")}
|Partition columns: ${partitionColumns.mkString(", ")}
|Data columns: ${dataColumns.mkString(", ")}
""".stripMargin)
}
/** The result of a successful write task. */
private case class WriteTaskResult(commitMsg: TaskCommitMessage, summary: ExecutedWriteSummary)
/**
* Basic work flow of this command is:
* 1. Driver side setup, including output committer initialization and data source specific
* preparation work for the write job to be issued.
* 2. Issues a write job consists of one or more executor side tasks, each of which writes all
* rows within an RDD partition.
* 3. If no exception is thrown in a task, commits that task, otherwise aborts that task; If any
* exception is thrown during task commitment, also aborts that task.
* 4. If all tasks are committed, commit the job, otherwise aborts the job; If any exception is
* thrown during job commitment, also aborts the job.
* 5. If the job is successfully committed, perform post-commit operations such as
* processing statistics.
* @return The set of all partition paths that were updated during this write job.
*/
def write(
sparkSession: SparkSession,
plan: SparkPlan,
fileFormat: FileFormat,
committer: FileCommitProtocol,
outputSpec: OutputSpec,
hadoopConf: Configuration,
partitionColumns: Seq[Attribute],
bucketSpec: Option[BucketSpec],
statsTrackers: Seq[WriteJobStatsTracker],
options: Map[String, String])
: Set[String] = {
val job = Job.getInstance(hadoopConf)
job.setOutputKeyClass(classOf[Void])
job.setOutputValueClass(classOf[InternalRow])
FileOutputFormat.setOutputPath(job, new Path(outputSpec.outputPath))
val partitionSet = AttributeSet(partitionColumns)
val dataColumns = outputSpec.outputColumns.filterNot(partitionSet.contains)
val bucketIdExpression = bucketSpec.map { spec =>
val bucketColumns = spec.bucketColumnNames.map(c => dataColumns.find(_.name == c).get)
// Use `HashPartitioning.partitionIdExpression` as our bucket id expression, so that we can
// guarantee the data distribution is same between shuffle and bucketed data source, which
// enables us to only shuffle one side when join a bucketed table and a normal one.
HashPartitioning(bucketColumns, spec.numBuckets).partitionIdExpression
}
val sortColumns = bucketSpec.toSeq.flatMap {
spec => spec.sortColumnNames.map(c => dataColumns.find(_.name == c).get)
}
val caseInsensitiveOptions = CaseInsensitiveMap(options)
// Note: prepareWrite has side effect. It sets "job".
val outputWriterFactory =
fileFormat.prepareWrite(sparkSession, job, caseInsensitiveOptions, dataColumns.toStructType)
val description = new WriteJobDescription(
uuid = UUID.randomUUID().toString,
serializableHadoopConf = new SerializableConfiguration(job.getConfiguration),
outputWriterFactory = outputWriterFactory,
allColumns = outputSpec.outputColumns,
dataColumns = dataColumns,
partitionColumns = partitionColumns,
bucketIdExpression = bucketIdExpression,
path = outputSpec.outputPath,
customPartitionLocations = outputSpec.customPartitionLocations,
maxRecordsPerFile = caseInsensitiveOptions.get("maxRecordsPerFile").map(_.toLong)
.getOrElse(sparkSession.sessionState.conf.maxRecordsPerFile),
timeZoneId = caseInsensitiveOptions.get(DateTimeUtils.TIMEZONE_OPTION)
.getOrElse(sparkSession.sessionState.conf.sessionLocalTimeZone),
statsTrackers = statsTrackers
)
// We should first sort by partition columns, then bucket id, and finally sorting columns.
val requiredOrdering = partitionColumns ++ bucketIdExpression ++ sortColumns
// the sort order doesn't matter
val actualOrdering = plan.outputOrdering.map(_.child)
val orderingMatched = if (requiredOrdering.length > actualOrdering.length) {
false
} else {
requiredOrdering.zip(actualOrdering).forall {
case (requiredOrder, childOutputOrder) =>
requiredOrder.semanticEquals(childOutputOrder)
}
}
SQLExecution.checkSQLExecutionId(sparkSession)
// This call shouldn't be put into the `try` block below because it only initializes and
// prepares the job, any exception thrown from here shouldn't cause abortJob() to be called.
committer.setupJob(job)
try {
val rdd = if (orderingMatched) {
plan.execute()
} else {
// SPARK-21165: the `requiredOrdering` is based on the attributes from analyzed plan, and
// the physical plan may have different attribute ids due to optimizer removing some
// aliases. Here we bind the expression ahead to avoid potential attribute ids mismatch.
val orderingExpr = requiredOrdering
.map(SortOrder(_, Ascending))
.map(BindReferences.bindReference(_, outputSpec.outputColumns))
SortExec(
orderingExpr,
global = false,
child = plan).execute()
}
val ret = new Array[WriteTaskResult](rdd.partitions.length)
sparkSession.sparkContext.runJob(
rdd,
(taskContext: TaskContext, iter: Iterator[InternalRow]) => {
executeTask(
description = description,
sparkStageId = taskContext.stageId(),
sparkPartitionId = taskContext.partitionId(),
sparkAttemptNumber = taskContext.attemptNumber(),
committer,
iterator = iter)
},
0 until rdd.partitions.length,
(index, res: WriteTaskResult) => {
committer.onTaskCommit(res.commitMsg)
ret(index) = res
})
val commitMsgs = ret.map(_.commitMsg)
committer.commitJob(job, commitMsgs)
logInfo(s"Job ${job.getJobID} committed.")
processStats(description.statsTrackers, ret.map(_.summary.stats))
logInfo(s"Finished processing stats for job ${job.getJobID}.")
// return a set of all the partition paths that were updated during this job
ret.map(_.summary.updatedPartitions).reduceOption(_ ++ _).getOrElse(Set.empty)
} catch { case cause: Throwable =>
logError(s"Aborting job ${job.getJobID}.", cause)
committer.abortJob(job)
throw new SparkException("Job aborted.", cause)
}
}
/** Writes data out in a single Spark task. */
private def executeTask(
description: WriteJobDescription,
sparkStageId: Int,
sparkPartitionId: Int,
sparkAttemptNumber: Int,
committer: FileCommitProtocol,
iterator: Iterator[InternalRow]): WriteTaskResult = {
val jobId = SparkHadoopWriterUtils.createJobID(new Date, sparkStageId)
val taskId = new TaskID(jobId, TaskType.MAP, sparkPartitionId)
val taskAttemptId = new TaskAttemptID(taskId, sparkAttemptNumber)
// Set up the attempt context required to use in the output committer.
val taskAttemptContext: TaskAttemptContext = {
// Set up the configuration object
val hadoopConf = description.serializableHadoopConf.value
hadoopConf.set("mapreduce.job.id", jobId.toString)
hadoopConf.set("mapreduce.task.id", taskAttemptId.getTaskID.toString)
hadoopConf.set("mapreduce.task.attempt.id", taskAttemptId.toString)
hadoopConf.setBoolean("mapreduce.task.ismap", true)
hadoopConf.setInt("mapreduce.task.partition", 0)
new TaskAttemptContextImpl(hadoopConf, taskAttemptId)
}
committer.setupTask(taskAttemptContext)
val writeTask =
if (sparkPartitionId != 0 && !iterator.hasNext) {
// In case of empty job, leave first partition to save meta for file format like parquet.
new EmptyDirectoryWriteTask(description)
} else if (description.partitionColumns.isEmpty && description.bucketIdExpression.isEmpty) {
new SingleDirectoryWriteTask(description, taskAttemptContext, committer)
} else {
new DynamicPartitionWriteTask(description, taskAttemptContext, committer)
}
try {
Utils.tryWithSafeFinallyAndFailureCallbacks(block = {
// Execute the task to write rows out and commit the task.
val summary = writeTask.execute(iterator)
writeTask.releaseResources()
WriteTaskResult(committer.commitTask(taskAttemptContext), summary)
})(catchBlock = {
// If there is an error, release resource and then abort the task
try {
writeTask.releaseResources()
} finally {
committer.abortTask(taskAttemptContext)
logError(s"Job $jobId aborted.")
}
})
} catch {
case e: FetchFailedException =>
throw e
case t: Throwable =>
throw new SparkException("Task failed while writing rows.", t)
}
}
/**
* For every registered [[WriteJobStatsTracker]], call `processStats()` on it, passing it
* the corresponding [[WriteTaskStats]] from all executors.
*/
private def processStats(
statsTrackers: Seq[WriteJobStatsTracker],
statsPerTask: Seq[Seq[WriteTaskStats]])
: Unit = {
val numStatsTrackers = statsTrackers.length
assert(statsPerTask.forall(_.length == numStatsTrackers),
s"""Every WriteTask should have produced one `WriteTaskStats` object for every tracker.
|There are $numStatsTrackers statsTrackers, but some task returned
|${statsPerTask.find(_.length != numStatsTrackers).get.length} results instead.
""".stripMargin)
val statsPerTracker = if (statsPerTask.nonEmpty) {
statsPerTask.transpose
} else {
statsTrackers.map(_ => Seq.empty)
}
statsTrackers.zip(statsPerTracker).foreach {
case (statsTracker, stats) => statsTracker.processStats(stats)
}
}
/**
* A simple trait for writing out data in a single Spark task, without any concerns about how
* to commit or abort tasks. Exceptions thrown by the implementation of this trait will
* automatically trigger task aborts.
*/
private trait ExecuteWriteTask {
/**
* Writes data out to files, and then returns the summary of relative information which
* includes the list of partition strings written out. The list of partitions is sent back
* to the driver and used to update the catalog. Other information will be sent back to the
* driver too and used to e.g. update the metrics in UI.
*/
def execute(iterator: Iterator[InternalRow]): ExecutedWriteSummary
def releaseResources(): Unit
}
/** ExecuteWriteTask for empty partitions */
private class EmptyDirectoryWriteTask(description: WriteJobDescription)
extends ExecuteWriteTask {
val statsTrackers: Seq[WriteTaskStatsTracker] =
description.statsTrackers.map(_.newTaskInstance())
override def execute(iter: Iterator[InternalRow]): ExecutedWriteSummary = {
ExecutedWriteSummary(
updatedPartitions = Set.empty,
stats = statsTrackers.map(_.getFinalStats()))
}
override def releaseResources(): Unit = {}
}
/** Writes data to a single directory (used for non-dynamic-partition writes). */
private class SingleDirectoryWriteTask(
description: WriteJobDescription,
taskAttemptContext: TaskAttemptContext,
committer: FileCommitProtocol) extends ExecuteWriteTask {
private[this] var currentWriter: OutputWriter = _
val statsTrackers: Seq[WriteTaskStatsTracker] =
description.statsTrackers.map(_.newTaskInstance())
private def newOutputWriter(fileCounter: Int): Unit = {
val ext = description.outputWriterFactory.getFileExtension(taskAttemptContext)
val currentPath = committer.newTaskTempFile(
taskAttemptContext,
None,
f"-c$fileCounter%03d" + ext)
currentWriter = description.outputWriterFactory.newInstance(
path = currentPath,
dataSchema = description.dataColumns.toStructType,
context = taskAttemptContext)
statsTrackers.map(_.newFile(currentPath))
}
override def execute(iter: Iterator[InternalRow]): ExecutedWriteSummary = {
var fileCounter = 0
var recordsInFile: Long = 0L
newOutputWriter(fileCounter)
while (iter.hasNext) {
if (description.maxRecordsPerFile > 0 && recordsInFile >= description.maxRecordsPerFile) {
fileCounter += 1
assert(fileCounter < MAX_FILE_COUNTER,
s"File counter $fileCounter is beyond max value $MAX_FILE_COUNTER")
recordsInFile = 0
releaseResources()
newOutputWriter(fileCounter)
}
val internalRow = iter.next()
currentWriter.write(internalRow)
statsTrackers.foreach(_.newRow(internalRow))
recordsInFile += 1
}
releaseResources()
ExecutedWriteSummary(
updatedPartitions = Set.empty,
stats = statsTrackers.map(_.getFinalStats()))
}
override def releaseResources(): Unit = {
if (currentWriter != null) {
try {
currentWriter.close()
} finally {
currentWriter = null
}
}
}
}
/**
* Writes data to using dynamic partition writes, meaning this single function can write to
* multiple directories (partitions) or files (bucketing).
*/
private class DynamicPartitionWriteTask(
desc: WriteJobDescription,
taskAttemptContext: TaskAttemptContext,
committer: FileCommitProtocol) extends ExecuteWriteTask {
/** Flag saying whether or not the data to be written out is partitioned. */
val isPartitioned = desc.partitionColumns.nonEmpty
/** Flag saying whether or not the data to be written out is bucketed. */
val isBucketed = desc.bucketIdExpression.isDefined
assert(isPartitioned || isBucketed,
s"""DynamicPartitionWriteTask should be used for writing out data that's either
|partitioned or bucketed. In this case neither is true.
|WriteJobDescription: ${desc}
""".stripMargin)
// currentWriter is initialized whenever we see a new key (partitionValues + BucketId)
private var currentWriter: OutputWriter = _
/** Trackers for computing various statistics on the data as it's being written out. */
private val statsTrackers: Seq[WriteTaskStatsTracker] =
desc.statsTrackers.map(_.newTaskInstance())
/** Extracts the partition values out of an input row. */
private lazy val getPartitionValues: InternalRow => UnsafeRow = {
val proj = UnsafeProjection.create(desc.partitionColumns, desc.allColumns)
row => proj(row)
}
/** Expression that given partition columns builds a path string like: col1=val/col2=val/... */
private lazy val partitionPathExpression: Expression = Concat(
desc.partitionColumns.zipWithIndex.flatMap { case (c, i) =>
val partitionName = ScalaUDF(
ExternalCatalogUtils.getPartitionPathString _,
StringType,
Seq(Literal(c.name), Cast(c, StringType, Option(desc.timeZoneId))))
if (i == 0) Seq(partitionName) else Seq(Literal(Path.SEPARATOR), partitionName)
})
/** Evaluates the `partitionPathExpression` above on a row of `partitionValues` and returns
* the partition string. */
private lazy val getPartitionPath: InternalRow => String = {
val proj = UnsafeProjection.create(Seq(partitionPathExpression), desc.partitionColumns)
row => proj(row).getString(0)
}
/** Given an input row, returns the corresponding `bucketId` */
private lazy val getBucketId: InternalRow => Int = {
val proj = UnsafeProjection.create(desc.bucketIdExpression.toSeq, desc.allColumns)
row => proj(row).getInt(0)
}
/** Returns the data columns to be written given an input row */
private val getOutputRow = UnsafeProjection.create(desc.dataColumns, desc.allColumns)
/**
* Opens a new OutputWriter given a partition key and/or a bucket id.
* If bucket id is specified, we will append it to the end of the file name, but before the
* file extension, e.g. part-r-00009-ea518ad4-455a-4431-b471-d24e03814677-00002.gz.parquet
*
* @param partitionValues the partition which all tuples being written by this `OutputWriter`
* belong to
* @param bucketId the bucket which all tuples being written by this `OutputWriter` belong to
* @param fileCounter the number of files that have been written in the past for this specific
* partition. This is used to limit the max number of records written for a
* single file. The value should start from 0.
* @param updatedPartitions the set of updated partition paths, we should add the new partition
* path of this writer to it.
*/
private def newOutputWriter(
partitionValues: Option[InternalRow],
bucketId: Option[Int],
fileCounter: Int,
updatedPartitions: mutable.Set[String]): Unit = {
val partDir = partitionValues.map(getPartitionPath(_))
partDir.foreach(updatedPartitions.add)
val bucketIdStr = bucketId.map(BucketingUtils.bucketIdToString).getOrElse("")
// This must be in a form that matches our bucketing format. See BucketingUtils.
val ext = f"$bucketIdStr.c$fileCounter%03d" +
desc.outputWriterFactory.getFileExtension(taskAttemptContext)
val customPath = partDir.flatMap { dir =>
desc.customPartitionLocations.get(PartitioningUtils.parsePathFragment(dir))
}
val currentPath = if (customPath.isDefined) {
committer.newTaskTempFileAbsPath(taskAttemptContext, customPath.get, ext)
} else {
committer.newTaskTempFile(taskAttemptContext, partDir, ext)
}
currentWriter = desc.outputWriterFactory.newInstance(
path = currentPath,
dataSchema = desc.dataColumns.toStructType,
context = taskAttemptContext)
statsTrackers.foreach(_.newFile(currentPath))
}
override def execute(iter: Iterator[InternalRow]): ExecutedWriteSummary = {
// If anything below fails, we should abort the task.
var recordsInFile: Long = 0L
var fileCounter = 0
val updatedPartitions = mutable.Set[String]()
var currentPartionValues: Option[UnsafeRow] = None
var currentBucketId: Option[Int] = None
for (row <- iter) {
val nextPartitionValues = if (isPartitioned) Some(getPartitionValues(row)) else None
val nextBucketId = if (isBucketed) Some(getBucketId(row)) else None
if (currentPartionValues != nextPartitionValues || currentBucketId != nextBucketId) {
// See a new partition or bucket - write to a new partition dir (or a new bucket file).
if (isPartitioned && currentPartionValues != nextPartitionValues) {
currentPartionValues = Some(nextPartitionValues.get.copy())
statsTrackers.foreach(_.newPartition(currentPartionValues.get))
}
if (isBucketed) {
currentBucketId = nextBucketId
statsTrackers.foreach(_.newBucket(currentBucketId.get))
}
recordsInFile = 0
fileCounter = 0
releaseResources()
newOutputWriter(currentPartionValues, currentBucketId, fileCounter, updatedPartitions)
} else if (desc.maxRecordsPerFile > 0 &&
recordsInFile >= desc.maxRecordsPerFile) {
// Exceeded the threshold in terms of the number of records per file.
// Create a new file by increasing the file counter.
recordsInFile = 0
fileCounter += 1
assert(fileCounter < MAX_FILE_COUNTER,
s"File counter $fileCounter is beyond max value $MAX_FILE_COUNTER")
releaseResources()
newOutputWriter(currentPartionValues, currentBucketId, fileCounter, updatedPartitions)
}
val outputRow = getOutputRow(row)
currentWriter.write(outputRow)
statsTrackers.foreach(_.newRow(outputRow))
recordsInFile += 1
}
releaseResources()
ExecutedWriteSummary(
updatedPartitions = updatedPartitions.toSet,
stats = statsTrackers.map(_.getFinalStats()))
}
override def releaseResources(): Unit = {
if (currentWriter != null) {
try {
currentWriter.close()
} finally {
currentWriter = null
}
}
}
}
}
/**
* Wrapper class for the metrics of writing data out.
*
* @param updatedPartitions the partitions updated during writing data out. Only valid
* for dynamic partition.
* @param stats one `WriteTaskStats` object for every `WriteJobStatsTracker` that the job had.
*/
case class ExecutedWriteSummary(
updatedPartitions: Set[String],
stats: Seq[WriteTaskStats])
|
esi-mineset/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileFormatWriter.scala
|
Scala
|
apache-2.0
| 24,412 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.jdbc
import java.sql.ResultSet
import com.spotify.scio._
import org.apache.beam.sdk.io.{jdbc => beam}
import com.spotify.scio.testing._
object JdbcJob {
def main(cmdlineArgs: Array[String]): Unit = {
val (opts, _) = ScioContext.parseArguments[CloudSqlOptions](cmdlineArgs)
val sc = ScioContext(opts)
sc.jdbcSelect(getReadOptions(opts))
.map(_ + "J")
.saveAsJdbc(getWriteOptions(opts))
sc.run()
()
}
def getReadOptions(opts: CloudSqlOptions): JdbcReadOptions[String] =
JdbcReadOptions(
connectionOptions = getConnectionOptions(opts),
query = "SELECT <this> FROM <this>",
rowMapper = (rs: ResultSet) => rs.getString(1)
)
def getWriteOptions(opts: CloudSqlOptions): JdbcWriteOptions[String] =
JdbcWriteOptions[String](
connectionOptions = getConnectionOptions(opts),
statement = "INSERT INTO <this> VALUES( ?, ? ..?)"
)
def connectionUrl(opts: CloudSqlOptions): String =
s"jdbc:mysql://google/${opts.getCloudSqlDb}?" +
s"cloudSqlInstance=${opts.getCloudSqlInstanceConnectionName}&" +
s"socketFactory=com.google.cloud.sql.mysql.SocketFactory"
def getConnectionOptions(opts: CloudSqlOptions): JdbcConnectionOptions =
JdbcConnectionOptions(
username = opts.getCloudSqlUsername,
password = Some(opts.getCloudSqlPassword),
connectionUrl = connectionUrl(opts),
classOf[java.sql.Driver]
)
}
class JdbcTest extends PipelineSpec {
def testJdbc(xs: String*): Unit = {
val args = Array(
"--cloudSqlUsername=john",
"--cloudSqlPassword=secret",
"--cloudSqlDb=mydb",
"--cloudSqlInstanceConnectionName=project-id:zone:db-instance-name"
)
val (opts, _) = ScioContext.parseArguments[CloudSqlOptions](args)
val readOpts = JdbcJob.getReadOptions(opts)
val writeOpts = JdbcJob.getWriteOptions(opts)
JobTest[JdbcJob.type]
.args(args: _*)
.input(JdbcIO[String](readOpts), Seq("a", "b", "c"))
.output(JdbcIO[String](writeOpts))(coll => coll should containInAnyOrder(xs))
.run()
}
it should "pass correct JDBC" in {
testJdbc("aJ", "bJ", "cJ")
}
it should "fail incorrect JDBC" in {
an[AssertionError] should be thrownBy { testJdbc("aJ", "bJ") }
an[AssertionError] should be thrownBy { testJdbc("aJ", "bJ", "cJ", "dJ") }
}
it should "connnect via JDBC without a password" in {
val args =
Array(
"--cloudSqlUsername=john",
"--cloudSqlDb=mydb",
"--cloudSqlInstanceConnectionName=project-id:zone:db-instance-name"
)
val (opts, _) = ScioContext.parseArguments[CloudSqlOptions](args)
val readOpts = JdbcJob.getReadOptions(opts)
val writeOpts = JdbcJob.getWriteOptions(opts)
val expected = Seq("aJ", "bJ", "cJ")
JobTest[JdbcJob.type]
.args(args: _*)
.input(JdbcIO[String](readOpts), Seq("a", "b", "c"))
.output(JdbcIO[String](writeOpts))(coll => coll should containInAnyOrder(expected))
.run()
}
it should "generate connection string with password" in {
val password = JdbcConnectionOptions(
username = "user",
password = Some("pass"),
connectionUrl = "foo",
driverClass = classOf[java.sql.Driver]
)
JdbcIO.jdbcIoId(password, "query") shouldEqual "user:pass@foo:query"
}
it should "generate connection string without password" in {
val noPassword = JdbcConnectionOptions(
username = "user",
password = None,
connectionUrl = "foo",
driverClass = classOf[java.sql.Driver]
)
JdbcIO.jdbcIoId(noPassword, "query") shouldEqual "user@foo:query"
}
it should "generate datasource config with password" in {
val opts = JdbcConnectionOptions(
username = "user",
password = Some("pass"),
connectionUrl = "foo",
driverClass = classOf[java.sql.Driver]
)
val expected = beam.JdbcIO.DataSourceConfiguration
.create(classOf[java.sql.Driver].getCanonicalName, "foo")
.withUsername("user")
.withPassword("pass")
JdbcIO.dataSourceConfiguration(opts).toString shouldBe expected.toString
}
it should "generate datasource config without password" in {
val opts = JdbcConnectionOptions(
username = "user",
password = None,
connectionUrl = "foo",
driverClass = classOf[java.sql.Driver]
)
val expected = beam.JdbcIO.DataSourceConfiguration
.create(classOf[java.sql.Driver].getCanonicalName, "foo")
.withUsername("user")
JdbcIO.dataSourceConfiguration(opts).toString shouldBe expected.toString
}
}
|
spotify/scio
|
scio-jdbc/src/test/scala/com/spotify/scio/jdbc/JdbcTest.scala
|
Scala
|
apache-2.0
| 5,202 |
package core.application.ui
import scala.language.postfixOps
import game.states.GameState
import javax.swing.JPanel
import java.awt.Dimension
import java.awt.Image
import java.awt.Graphics
import java.awt.Graphics2D
import graphics.sprites.spritesheet.SpriteSheetLoader
final class GameSurface private[ui] (width: Int, height: Int) {
//We can't create the back buffer image directly
private var backBuffer : Image = null
val panel : JPanel = new JPanel() {
override def paintComponent(g: Graphics) = {
super.paintComponent(g)
//We'll draw the back buffer image if it exists
if(backBuffer != null) {
g drawImage(backBuffer, 0, 0, null)
}
}
}
panel setPreferredSize(new Dimension(width, height))
private[ui] def draw(state : GameState) : Unit = {
val panelWidth = panel getWidth
val panelHeight = panel getHeight
/* First we'll check if the back buffer is created or if panel dimension
* have changed */
if(backBuffer == null || panelWidth != width || panelHeight != height) {
backBuffer = panel createImage(panelWidth, panelHeight)
}
val gBuf = backBuffer.getGraphics().asInstanceOf[Graphics2D]
state render(gBuf)
gBuf dispose
panel repaint()
}
}
|
NesKaphe/2DGameEngine
|
src/main/scala/core/application/ui/GamePanel.scala
|
Scala
|
lgpl-3.0
| 1,375 |
package core
import akka.actor._
import akka.pattern.{ask, pipe}
import akka.util.Timeout
import core.ResultReceivingActor._
import file.CsvFileActorMessages.ReadIndexMessage
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
object ResultReceivingActor {
case class RequestResultMessage(v1: Int)
}
class ResultReceivingActor(interimResultsFileActor: ActorRef) extends Actor {
implicit val timeout = Timeout(2.seconds)
def receive: Receive = {
case RequestResultMessage(v1) => {
(interimResultsFileActor ? ReadIndexMessage(v1)).mapTo[Option[Int]].map {
case Some(fv1) if fv1 > 10 => Some(fv1 - 10)
case Some(fv1) => Some(fv1)
case _ => None
} pipeTo sender
}
}
}
|
andriichuk/FractionOdds
|
src/main/scala/core/ResultReceivingActor.scala
|
Scala
|
apache-2.0
| 762 |
package com.olvind
package requiresjs
import ammonite.ops._
import jdk.nashorn.internal.ir.{FunctionNode, Node, ObjectNode}
import scala.language.postfixOps
object Require {
def apply(p: Path, indexNames: Set[String]): Required = {
val ctx = new ScanCtx
def recurse(requiredPath: Path, ctx: ScanCtx): Lazy[Required] =
ctx.required(requiredPath, doRecurse(requiredPath))
def doRecurse(requiredPath: Path)(ctx: ScanCtx): Lazy[Required] = {
val ResolvedPath(filePath: Path, folderPath: Path) =
ResolvePath(requiredPath, indexNames)
ctx.parsedFile(filePath) match {
case ParsedFile(_, fileStr: String, fileParsed: FunctionNode) =>
val imports: Seq[Import] = VisitorImports(fileParsed, folderPath).value
val components: Map[CompName, ObjectNode] = VisitorComponents(fileParsed).value
val memberMethods
: Map[CompName, Set[MemberMethod]] = VisitorComponentMembers(fileParsed).value
val exports: Seq[Node] = VisitorExports(fileParsed).value
//todo: split require/react parsing!
def component(compName: CompName, o: ObjectNode) =
Single(
compName,
FoundComponent(
name = compName,
file = filePath,
jsContent = fileStr.substring(o.getStart, o.getFinish),
props = VisitorPropType(compName, o, fileStr, imports).value,
methods = memberMethods.get(compName)
)
)
components.toList.distinct match {
case Nil ⇒
/* todo: Parse exports! */
val modules: Seq[Lazy[Required]] =
imports.collect {
case Import(varName, Left(innerPath: Path)) =>
recurse(innerPath, ctx)
}.distinct
Required(requiredPath, modules)
case (compName, o) :: Nil ⇒
Lazy(component(compName, o))
case many ⇒
Required(filePath, many map {
case (name, obj) => Lazy(component(name, obj))
})
}
case other =>
println(other)
Required(requiredPath, Nil)
}
}
recurse(p, ctx).run
}
}
|
chandu0101/scalajs-react-components
|
gen/src/main/scala/com/olvind/requiresjs/Require.scala
|
Scala
|
apache-2.0
| 2,312 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Uniformity
import org.scalactic.Prettifier
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import exceptions.TestFailedException
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers._
class ListShouldContainAtMostOneOfLogicalOrSpec extends AnyFreeSpec {
private val prettifier = Prettifier.default
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = upperCase(a) == upperCase(b)
}
val invertedListOfStringEquality =
new Equality[List[String]] {
def areEqual(a: List[String], b: Any): Boolean = a != b
}
private def upperCase(value: Any): Any =
value match {
case l: List[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
//ADDITIONAL//
val fileName: String = "ListShouldContainAtMostOneOfLogicalOrSpec.scala"
"a List" - {
val fumList: List[String] = List("fum", "foe")
val toList: List[String] = List("to", "you")
"when used with (contain atMostOneOf (...) or contain atMostOneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fam") or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fam") or contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fum") or contain atMostOneOf("fie", "fee", "fam", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fum") or contain atMostOneOf ("fie", "fee", "fum", "foe"))
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\"") + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fie\\", \\"fee\\", \\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FAM") or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FAM") or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE")))
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FAM") or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FAM") or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
(fumList should (contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM ") or contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fie", "fum") or contain atMostOneOf("fie", "fee", "fam", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (contain atMostOneOf ("fie", "fee", "fam", "foe") or contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (equal (...) and contain oneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (equal (fumList) or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (equal (toList) or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (equal (fumList) or contain atMostOneOf("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) or contain atMostOneOf ("fie", "fee", "fum", "foe"))
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fie\\", \\"fee\\", \\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (equal (fumList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (equal (toList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (equal (fumList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) or (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE")))
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (equal (toList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (equal (fumList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (equal (toList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (equal (fumList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
(fumList should (equal (toList) or contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM "))) (decided by invertedListOfStringEquality, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (equal (fumList) or contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (be (...) and contain theMostOneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (be (fumList) or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (be (toList) or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (be (fumList) or contain atMostOneOf("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) or contain atMostOneOf ("fie", "fee", "fum", "foe"))
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fie\\", \\"fee\\", \\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (be (fumList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (be (toList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (be (fumList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) or (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE")))
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (be (fumList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (be (toList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (be (fumList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (be (toList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
(fumList should (be (fumList) or contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM "))) (after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (be (fumList) or contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (contain oneOf (...) and be (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (contain atMostOneOf("fie", "fee", "fam", "foe") or be (fumList))
fumList should (contain atMostOneOf("fie", "fee", "fum", "foe") or be (fumList))
fumList should (contain atMostOneOf("fie", "fee", "fam", "foe") or be (toList))
val e1 = intercept[TestFailedException] {
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fum") or be (toList))
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\"") + ", and " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (contain atMostOneOf ("FIE", "FEE", "FAM", "FOE") or be (fumList))
fumList should (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE") or be (fumList))
fumList should (contain atMostOneOf ("FIE", "FEE", "FAM", "FOE") or be (toList))
val e1 = intercept[TestFailedException] {
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or be (toList))
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", and " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (contain atMostOneOf ("FIE", "FEE", "FAM", "FOE") or be (fumList))) (decided by upperCaseStringEquality)
(fumList should (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE") or be (fumList))) (decided by upperCaseStringEquality)
(fumList should (contain atMostOneOf ("FIE", "FEE", "FAM", "FOE") or be (toList))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or be (toList))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", and " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
(fumList should (contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FaM ") or be (fumList))) (after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain atMostOneOf("fee", "fie", "foe", "fie", "fum") or be (fumList))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not contain atMostOneOf (...) and not contain atMostOneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fum") or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fuu") or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fum") or not contain atMostOneOf("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fuu") or not contain atMostOneOf ("fee", "fie", "foe", "fuu"))
}
checkMessageStackDepth(e1, Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fuu\\"") + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fuu\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU") or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU") or not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU"))
}
checkMessageStackDepth(e1, Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\"") + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU") or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU") or not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\"") + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
(fumList should (contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM ") or contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fie", "fum") or not contain atMostOneOf("fie", "fee", "fum", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (not contain atMostOneOf ("fie", "fee", "fum", "foe") or not contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not equal (...) and not contain oneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (not equal (toList) or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not equal (fumList) or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not equal (toList) or not contain atMostOneOf("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not equal (fumList) or not contain atMostOneOf ("fee", "fie", "foe", "fuu"))
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fuu\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (not equal (toList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not equal (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not equal (toList) or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))
val e2 = intercept[TestFailedException] {
fumList should (not equal (fumList) or (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU")))
}
checkMessageStackDepth(e2, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (not equal (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (not equal (toList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (not equal (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not equal (toList) or not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
(fumList should (not contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FUM ") or not contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not equal (toList) or not contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not be (...) and not contain oneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (not be (toList) or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not be (fumList) or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not be (toList) or not contain atMostOneOf("fee", "fie", "foe", "fuu"))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) or not contain atMostOneOf ("fee", "fie", "foe", "fuu"))
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fuu\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (not be (toList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not be (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not be (toList) or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) or (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU")))
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (not be (toList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (not be (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (not be (toList) or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not be (fumList) or not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
(fumList should (not contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FUM ") or not contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not be (toList) or not contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
}
"collection of Lists" - {
val list1s: Vector[List[Int]] = Vector(List(1, 2), List(1, 2), List(1, 2))
val lists: Vector[List[Int]] = Vector(List(1, 2), List(1, 2), List(2, 3))
val nils: Vector[List[Int]] = Vector(Nil, Nil, Nil)
val listsNil: Vector[List[Int]] = Vector(List(1), List(1), Nil)
val hiLists: Vector[List[String]] = Vector(List("hi", "he"), List("hi", "he"), List("hi", "he"))
val toLists: Vector[List[String]] = Vector(List("to", "you"), List("to", "you"), List("to", "you"))
def allErrMsg(index: Int, message: String, lineNumber: Int, left: Any): String =
"'all' inspection failed, because: \\n" +
" at index " + index + ", " + message + " (" + fileName + ":" + (lineNumber) + ") \\n" +
"in " + decorateToStringValue(prettifier, left)
"when used with (contain oneOf (..) and contain oneOf (..)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
all (list1s) should (contain atMostOneOf (1, 6, 8) or contain atMostOneOf (1, 3, 4))
all (list1s) should (contain atMostOneOf (1, 2, 3) or contain atMostOneOf (1, 3, 4))
all (list1s) should (contain atMostOneOf (1, 6, 8) or contain atMostOneOf (1, 2, 3))
atLeast (2, lists) should (contain atMostOneOf (3, 2, 5) or contain atMostOneOf (2, 3, 4))
atLeast (2, lists) should (contain atMostOneOf (1, 2, 3) or contain atMostOneOf (2, 3, 4))
atLeast (2, lists) should (contain atMostOneOf (3, 2, 5) or contain atMostOneOf (1, 2, 3))
val e1 = intercept[TestFailedException] {
all (lists) should (contain atMostOneOf (2, 3, 4) or contain atMostOneOf (4, 3, 2))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, lists(2)) + " did not contain at most one of (2, 3, 4), and " + decorateToStringValue(prettifier, lists(2)) + " did not contain at most one of (4, 3, 2)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain atMostOneOf ("HI", "HO") or contain atMostOneOf ("HO", "HI"))
all (hiLists) should (contain atMostOneOf ("HI", "HE") or contain atMostOneOf ("HO", "HI"))
all (hiLists) should (contain atMostOneOf ("HI", "HO") or contain atMostOneOf ("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (contain atMostOneOf ("HI", "HE") or contain atMostOneOf ("HE", "HI"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HI\\", \\"HE\\"), and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HE\\", \\"HI\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(all (hiLists) should (contain atMostOneOf ("HI", "HO") or contain atMostOneOf ("HO", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (contain atMostOneOf ("HI", "HE") or contain atMostOneOf ("HO", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (contain atMostOneOf ("HI", "HO") or contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (contain atMostOneOf ("HI", "HE") or contain atMostOneOf ("HE", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HI\\", \\"HE\\"), and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HE\\", \\"HI\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain atMostOneOf (1, 2, 2, 3) or contain atMostOneOf (1, 3, 4))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain atMostOneOf (1, 3, 4) or contain atMostOneOf (1, 2, 2, 3))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (be (...) and contain oneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
all (list1s) should (be (List(1, 2)) or contain atMostOneOf (1, 6, 8))
all (list1s) should (be (List(2, 3)) or contain atMostOneOf (1, 6, 8))
all (list1s) should (be (List(1, 2)) or contain atMostOneOf (1, 2, 3))
val e1 = intercept[TestFailedException] {
all (list1s) should (be (List(2, 3)) or contain atMostOneOf (1, 2, 3))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, list1s(0)) + " was not equal to " + decorateToStringValue(prettifier, List(2, 3)) + ", and " + decorateToStringValue(prettifier, list1s(0)) + " did not contain at most one of (1, 2, 3)", thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
all (hiLists) should (be (List("hi", "he")) or contain atMostOneOf ("HI", "HO"))
all (hiLists) should (be (List("HO")) or contain atMostOneOf ("HI", "HO"))
all (hiLists) should (be (List("hi", "he")) or contain atMostOneOf ("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (be (List("HO")) or contain atMostOneOf ("HI", "HE"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was not equal to " + decorateToStringValue(prettifier, List("HO")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(all (hiLists) should (be (List("hi", "he")) or contain atMostOneOf ("HI", "HO"))) (decided by upperCaseStringEquality)
(all (hiLists) should (be (List("HO")) or contain atMostOneOf ("HI", "HO"))) (decided by upperCaseStringEquality)
(all (hiLists) should (be (List("hi", "he")) or contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (be (List("HO")) or contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was not equal to " + decorateToStringValue(prettifier, List("HO")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (be (List(1, 2)) or contain atMostOneOf (1, 2, 2, 3))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not contain oneOf (..) and not contain oneOf (..))" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
all (list1s) should (not contain atMostOneOf (3, 2, 1) or not contain atMostOneOf (1, 2, 3))
all (list1s) should (not contain atMostOneOf (1, 2, 8) or not contain atMostOneOf (1, 2, 3))
all (list1s) should (not contain atMostOneOf (3, 2, 1) or not contain atMostOneOf (1, 2, 8))
val e1 = intercept[TestFailedException] {
all (lists) should (not contain atMostOneOf (1, 2, 8) or not contain atMostOneOf (8, 2, 1))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, lists(2)) + " contained at most one of (1, 2, 8), and " + decorateToStringValue(prettifier, lists(2)) + " contained at most one of (8, 2, 1)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not contain atMostOneOf ("HI", "HE") or not contain atMostOneOf ("HE", "HI"))
all (hiLists) should (not contain atMostOneOf ("hi", "he") or not contain atMostOneOf ("HE", "HI"))
all (hiLists) should (not contain atMostOneOf ("HI", "HE") or not contain atMostOneOf ("hi", "he"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not contain atMostOneOf ("HE", "HEY", "HOWDY") or not contain atMostOneOf ("HE", "HEY", "HOWDY"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HE\\", \\"HEY\\", \\"HOWDY\\"), and " + decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HE\\", \\"HEY\\", \\"HOWDY\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(all (hiLists) should (not contain atMostOneOf ("HI", "HE") or not contain atMostOneOf ("HE", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (not contain atMostOneOf ("hi", "he") or not contain atMostOneOf ("HE", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (not contain atMostOneOf ("HI", "HE") or not contain atMostOneOf ("hi", "he"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not contain atMostOneOf ("HE", "HEY", "HOWDY") or not contain atMostOneOf ("HE", "HEY", "HOWDY"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HE\\", \\"HEY\\", \\"HOWDY\\"), and " + decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HE\\", \\"HEY\\", \\"HOWDY\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain atMostOneOf (1, 2, 2, 3) or not contain atMostOneOf (1, 2, 3))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain atMostOneOf (1, 2, 3) or not contain atMostOneOf (1, 2, 2, 3))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not be (...) and not contain atMostOneOf (...))" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
all (list1s) should (not be (List(2, 3)) or not contain atMostOneOf (1, 2, 3))
all (list1s) should (not be (List(1, 2)) or not contain atMostOneOf (1, 2, 3))
all (list1s) should (not be (List(2, 3)) or not contain atMostOneOf (2, 3, 4))
val e1 = intercept[TestFailedException] {
all (list1s) should (not be (List(1, 2)) or not contain atMostOneOf (2, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, list1s(0)) + " was equal to " + decorateToStringValue(prettifier, List(1, 2)) + ", and " + decorateToStringValue(prettifier, list1s(0)) + " contained at most one of (2, 3, 4)", thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not be (List("ho")) or not contain atMostOneOf ("HI", "HE"))
all (hiLists) should (not be (List("hi", "he")) or not contain atMostOneOf ("HE", "HI"))
all (hiLists) should (not be (List("ho")) or not contain atMostOneOf ("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not be (List("hi", "he")) or not contain atMostOneOf ("HI", "HO"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was equal to " + decorateToStringValue(prettifier, List("hi", "he")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HI\\", \\"HO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(all (hiLists) should (not be (List("ho")) or not contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
(all (hiLists) should (not be (List("hi", "he")) or not contain atMostOneOf ("HE", "HI"))) (decided by upperCaseStringEquality)
(all (hiLists) should (not be (List("ho")) or not contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not be (List("hi", "he")) or not contain atMostOneOf ("HI", "HO"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was equal to " + decorateToStringValue(prettifier, List("hi", "he")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HI\\", \\"HO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not be (List(2, 3)) or not contain atMostOneOf (1, 2, 2, 3))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/ListShouldContainAtMostOneOfLogicalOrSpec.scala
|
Scala
|
apache-2.0
| 44,227 |
package nexus.diff.syntax
import nexus._
/**
* @author Tongfei Chen
*/
trait BoolMixin {
implicit class BoolOps[B](val a: B)(implicit B: IsBool[B]) {
def &&(b: B) = B.and(a, b)
def ||(b: B) = B.or(a, b)
def unary_! = B.not(a)
def ^(b: B) = B.xor(a, b)
}
implicit class CondOps[B, E[_]](val c: B)(implicit B: Cond[B, E]) {
def cond[A](t: E[A], f: E[A]): E[A] = B.cond(c, t, f)
}
}
|
ctongfei/nexus
|
diff/src/main/scala/nexus/diff/syntax/GenBoolOps.scala
|
Scala
|
mit
| 420 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scala.pickling.externalizable.mapstatus
import org.scalatest.FunSuite
import scala.pickling._, scala.pickling.Defaults._, json._
import scala.reflect.{ClassTag, classTag}
import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput}
import java.util.concurrent.ConcurrentHashMap
/**
* This class represent an unique identifier for a BlockManager.
* The first 2 constructors of this class is made private to ensure that
* BlockManagerId objects can be created only using the apply method in
* the companion object. This allows de-duplication of ID objects.
* Also, constructor parameters are private to ensure that parameters cannot
* be modified from outside this class.
*/
class BlockManagerId private (
private var executorId_ : String,
private var host_ : String,
private var port_ : Int,
private var nettyPort_ : Int
) extends Externalizable {
private def this() = this(null, null, 0, 0) // For deserialization only
def executorId: String = executorId_
if (null != host_){
// Utils.checkHost(host_, "Expected hostname")
assert (port_ > 0)
}
def hostPort: String = {
// DEBUG code
// Utils.checkHost(host)
assert (port > 0)
host + ":" + port
}
def host: String = host_
def port: Int = port_
def nettyPort: Int = nettyPort_
override def writeExternal(out: ObjectOutput) {
out.writeUTF(executorId_)
out.writeUTF(host_)
out.writeInt(port_)
out.writeInt(nettyPort_)
}
override def readExternal(in: ObjectInput) {
executorId_ = in.readUTF()
host_ = in.readUTF()
port_ = in.readInt()
nettyPort_ = in.readInt()
}
@throws(classOf[IOException])
private def readResolve(): Object = BlockManagerId.getCachedBlockManagerId(this)
override def toString = "BlockManagerId(%s, %s, %d, %d)".format(executorId, host, port, nettyPort)
override def hashCode: Int = (executorId.hashCode * 41 + host.hashCode) * 41 + port + nettyPort
override def equals(that: Any) = that match {
case id: BlockManagerId =>
executorId == id.executorId && port == id.port && host == id.host && nettyPort == id.nettyPort
case _ =>
false
}
}
object BlockManagerId {
/**
* Returns a [[org.apache.spark.storage.BlockManagerId]] for the given configuraiton.
*
* @param execId ID of the executor.
* @param host Host name of the block manager.
* @param port Port of the block manager.
* @param nettyPort Optional port for the Netty-based shuffle sender.
* @return A new [[org.apache.spark.storage.BlockManagerId]].
*/
def apply(execId: String, host: String, port: Int, nettyPort: Int) =
getCachedBlockManagerId(new BlockManagerId(execId, host, port, nettyPort))
def apply(in: ObjectInput) = {
val obj = new BlockManagerId()
obj.readExternal(in)
getCachedBlockManagerId(obj)
}
val blockManagerIdCache = new ConcurrentHashMap[BlockManagerId, BlockManagerId]()
def getCachedBlockManagerId(id: BlockManagerId): BlockManagerId = {
blockManagerIdCache.putIfAbsent(id, id)
blockManagerIdCache.get(id)
}
}
/**
* Result returned by a ShuffleMapTask to a scheduler. Includes the block manager address that the
* task ran on as well as the sizes of outputs for each reducer, for passing on to the reduce tasks.
* The map output sizes are compressed using MapOutputTracker.compressSize.
*/
class MapStatus(var location: BlockManagerId, var compressedSizes: Array[Byte])
extends Externalizable {
def this() = this(null, null) // For deserialization only
def writeExternal(out: ObjectOutput) {
location.writeExternal(out)
val len = compressedSizes.length
out.writeInt(len)
out.write(compressedSizes)
}
def readExternal(in: ObjectInput) {
location = BlockManagerId(in)
val len = in.readInt()
compressedSizes = new Array[Byte](len)
in.readFully(compressedSizes)
}
}
class MapStatusTest extends FunSuite {
def register[T: ClassTag: Pickler: Unpickler : FastTypeTag](): Unit = {
val clazz = classTag[T].runtimeClass
val p = implicitly[Pickler[T]]
val up = implicitly[Unpickler[T]]
val tagKey = implicitly[FastTypeTag[T]].key
internal.currentRuntime.picklers.registerPickler(tagKey, p)
internal.currentRuntime.picklers.registerUnpickler(tagKey, up)
}
register[MapStatus]
test("main") {
val bid = BlockManagerId("0", "localhost", 8080, 8090)
val ms = new MapStatus(bid, Array[Byte](1, 2, 3, 4))
val sizes: String = ms.compressedSizes.mkString(",")
val p = (ms: Any).pickle
val up = p.unpickle[Any]
val ms2 = up.asInstanceOf[MapStatus]
val sizes2: String = ms2.compressedSizes.mkString(",")
assert(sizes == sizes2, "same array expected")
}
}
|
scala/pickling
|
core/src/test/scala/scala/pickling/generation/MapStatusTest.scala
|
Scala
|
bsd-3-clause
| 5,549 |
package at.linuxhacker.procmetrics.bin
import at.linuxhacker.procmetrics.lib._
import at.linuxhacker.procmetrics.global._
import at.linuxhacker.procmetrics.pidstats._
import at.linuxhacker.procmetrics.converts.ProcConverters
import at.linuxhacker.procmetrics.monitor._
import at.linuxhacker.procmetrics.values._
import scopt._
import play.api.libs.json._
import play.api.libs.functional.syntax._
import at.linuxhacker.procmetrics.couchdb.CouchDb
import at.linuxhacker.procmetrics.lib._
object Statistics {
case class Result( filteredPids: List[Pid], globals: List[ProcGlobal], stats: List[ProcCategory],
multiGlobalsStat: List[MultiGlobalStatsResult], sysStat: List[ProcGlobal] ) {
def toJson( ): JsObject = {
ProcConverters.toJson( globals, stats, multiGlobalsStat, sysStat )
}
}
def get( filter: ( String, List[Pid] ) => List[Pid], filterPattern: String ): Result = {
val procInfo = new ProcInfo( "/" )
val dirList = procInfo.getPidDirList()
val pids = procInfo.getCommandList( dirList )
val filteredPids = procInfo.filterPids( filter )( filterPattern, pids )
val stats = procInfo.getStat( List( Schedstat, Io, Statm, Status, PStat ), filteredPids )
val globals = procInfo.getGlobals( List( GlobalUptime, Cpuinfo, Loadavg ) )
val multiGlobalsStat = procInfo.getMultiGlobals( List( MultiGlobalStatsSpecifier( "NetDev", NetDev ) ) )
val netDeviceNames = multiGlobalsStat
.filter( x => x.name == "NetDev" )
.map( x => x.result.map( y => y.category ) )
.flatten
val sysStat = procInfo.getSysfsNetMac( netDeviceNames )
Result( filteredPids, globals, stats, multiGlobalsStat, sysStat )
}
}
object ProcMetricsMain {
def main( args: Array[String] ): Unit = {
val filterPattern = {
if ( args.length > 0 )
args(0)
else
""
}
println( Statistics.get( ProcFilter.patternFilter, filterPattern ).toJson.toString )
}
}
object ProcMetricsQemu {
def main( args: Array[String] ): Unit = {
val result =Statistics.get( ProcFilter.patternFilter, "qemu" )
val data1 = result.toJson
val qemuDescriptions = result.filteredPids.map( x => ProcGlobal( x.pid, extractValues( x.cmdline ) ) )
val data2 = ProcConverters.globalsToJson( qemuDescriptions )
val jsonTransformer = (__).json.update(
__.read[JsObject].map { o =>
o ++ Json.obj( "qemu_parameter" -> JsObject( data2 ) ) } )
data1.transform( jsonTransformer ) match {
case JsSuccess( result, x ) => println ( result )
case _ => throw new Exception ( "unbelievable..." )
}
}
private case class ExtractInfo( name: String,
regex: scala.util.matching.Regex, f:( String ) => ProcGenValue )
private val infoList = List(
ExtractInfo( "memory", """.*-m (\d+).*""".r,
( x: String ) => ValueFactory.create( x.toInt ) ),
ExtractInfo( "name", """.*-name ([a-zA-Z1-9\-_]+).*""".r,
( x: String ) => ValueFactory.create( x ) ),
ExtractInfo( "mac", """mac=([^, ]+)""".r,
( x: String ) => ValueFactory.create( x ) ) )
//val x = ( item.regex findAllMatchIn cmdline ).map( x => x.group(1) ).toList
private def extractValues( cmdline: String ): List[ProcValue] = {
infoList.map( item => {
val result = ( item.regex findAllMatchIn cmdline ).map( x => x.group(1) ).toList
val x = result.length match {
case 1 =>
ProcValueFactory.create( item.name, item.f( result(0) ) )
case x if x > 1 =>
ProcValueFactory.create( item.name, { result.map( i => item.f( i ) ) } )
case _ =>
throw new Exception( "Cannot find regex: " +item.regex + " in commandline: " + cmdline )
}
x
} )
}
}
object ProcMetricsCouchDb {
case class Config(
couchdbUrl: String = "http://localhost:5984",
database: String = "mydatabase",
docType: String = "metrics1",
docNamePrefix: String = "m1",
modules: Seq[String] = Seq( ),
help: Boolean = false,
filter: String = ""
//regexFilter: Boolean = false
)
def main( args: Array[String] ): Unit = {
val parser = new OptionParser[Config]( "procmetrics-couchdb" ) {
head( "ProcMetricsCouchDb", "0.1" )
opt[String]( 'c', "couchdb-url" ) action { ( x, c )
=> c.copy( couchdbUrl = x ) } text( "CouchDb Url: http://localhost:5984/" )
opt[String]( 'd', "database" ) action{ ( x, c )
=> c.copy( database = x) } text( "Database name" )
opt[String]( 't', "doc-type" ) action{ ( x, c )
=> c.copy( docType = x ) } text( "Document type name, for example: metrics1" )
opt[String]( 'p', "doc-name-prefix" ) action{ ( x, c )
=> c.copy( docNamePrefix = x ) } text( "Docment name prefix, for example: m1" )
opt[Seq[String]]( 'm', "modules" ) action { ( x, c )
=> c.copy( modules = x ) } text( "modulename, modulename... , list all modules with -l" )
opt[Unit]( 'h', "help" ) action{ ( x, c )
=> c.copy( help = true ) } text( "Show help" )
opt[String]( 'f', "filter" ) action { ( x, c )
=> c.copy( filter = x ) } text( "process filter" )
/*
opt[Unit]( 'e', "regex-filter" ) action{ ( x, c )
=> c.copy( regexFilter = true ) } text( "It is a regex filter type" ) */
}
val result = parser.parse( args.toSeq, Config( ) )
if ( result.get.help )
println( parser.usage )
else
sendMetrics( result )
}
private def sendMetrics( result: Option[ProcMetricsCouchDb.Config] ): Unit = {
val timestamp = ( System.currentTimeMillis / 1000 ).toInt
val docId = result.get.docNamePrefix + "_" + timestamp
val docType = result.get.docType
val url = result.get.couchdbUrl + "/" + result.get.database + "/" + docId
val filter: ( String, List[Pid] ) => List[Pid] = {
if ( result.get.filter != "" )
ProcFilter.patternFilter
else
ProcFilter.nullFilter
}
val data = Statistics.get(filter, result.get.filter ).toJson
val jsonTransformer = ( __ ).json.update(
__.read[JsObject].map { o => {
var x = o ++ Json.obj( "_id" -> docId )
x = x ++ Json.obj( "docType" -> docType )
x ++ Json.obj( "runtime" -> timestamp ) } } )
val transformed = data.transform( jsonTransformer )
transformed.asOpt match {
case Some( s ) => {
val response = CouchDb.put( url, s.toString )
if ( !response.success ) {
println( "Error storing Document, code. " + response.code )
if ( response.code < 0 )
println( "Exception: " + response.message )
else
println( "CouchDb Message: " + response.body )
} else {
println( "Document successfully stored in CouchDb" )
}
}
case _ => println( "Unknown error." )
}
}
}
object ProcMetricsMonitor {
def main( args: Array[String] ): Unit = {
val columns = List(
Column( "netstat", "in_octets" ),
Column( "netstat", "out_octets" ),
Column( "status", "VmSize" ),
Column( "io", "read_bytes" ),
Column( "io", "write_bytes" ),
Column( "stat", "cpu_sum_sec" ) )
while ( true ) {
val procInfo = new ProcInfo( "/" )
val dirList = procInfo.getPidDirList()
val pids = procInfo.getCommandList( dirList )
val filteredPids = {
if ( args.length > 0 )
procInfo.filterPids( ProcFilter.patternFilter )( args(0), pids )
else
procInfo.filterPids( ProcFilter.nullFilter )( "", pids )
}
val filteredPidsToCmdlineMap = filteredPids.map( x => x.pid -> x.cmdline ).toMap
val t1 = MonitorFunctions.transformToColumns(
procInfo.getStat( List( Schedstat, Io, Statm, Status, PStat ),
filteredPids ),
columns )
Thread.sleep( 1000 )
val t2 = MonitorFunctions.transformToColumns(
procInfo.getStat( List( Schedstat, Io, Statm, Status, PStat ),
filteredPids ),
columns )
val r = MonitorFunctions.diffTables( t1, t2 )
val x = r.filter( _._2.filter( _.values(0).asInstanceOf[ProcFloatValue].value != 0f ).length > 0 )
x.foreach( pid => {
val withoutNull = pid._2.filter( _.values(0).asInstanceOf[ProcFloatValue].value != 0f )
if ( withoutNull.length > 0 ) {
val t = withoutNull.map( x => x.name + ": " + x.values(0).asInstanceOf[ProcFloatValue].value )
val cmdline = filteredPidsToCmdlineMap( pid._1 )
println( "PID: %8s c: %-20s v: %s"
.format( pid._1,
{ if ( cmdline.length() <= 20 ) cmdline else cmdline.substring( 0, 19 ) },
t.foldLeft( "" )( ( t, c ) => t + " " + c ) ) )
}
} )
println ( )
}
}
}
|
hstraub/ProcMetrics
|
src/main/scala/at/linuxhacker/procmetrics/bin/ProcMetricsMain.scala
|
Scala
|
gpl-3.0
| 8,855 |
package com.instantor.plugin
package utils
import sbt._
import library.BranchTools
import com.instantor.commons.Memoize1
import com.instantor.props.PropsResolver
class BranchToolsWrapper(logger: Logger) {
private val bt = new BranchTools(logger)
val topProjectName = bt.topProjectName _
val propsResolver = Memoize1(bt.propsResolver)
val credentials = Memoize1(bt.credentials)
}
|
instantor/SbtInstantorPlugin
|
code/src/main/scala/com/instantor/plugin/utils/BranchToolsWrapper.scala
|
Scala
|
bsd-3-clause
| 396 |
/*
* Can be used like this:
* ```
* import my.package.TryExtended
*
* assert(Try(1).zip(Try("2")) == Try(1,"2"))
* ```
*/
implicit class TryExtended[+T](val wrapped: Try[T]) extends AnyVal {
def zip[That](that: => Try[That]): Try[(T, That)] =
for (a <- wrapped; b <- that) yield (a, b)
}
object TryExtended {
// Similar to Future.sequence, but for Try
def sequence[T](tries: Iterable[Try[T]]): Try[Seq[T]] = {
val acc = new ArrayBuffer[T]()
tries.foreach { t =>
if (t.isFailure) return t.map(_ => ArrayBuffer.empty)
acc += t.get
}
Success(acc)
}
}
|
broartem/snippets
|
scala/TryExtended.scala
|
Scala
|
mit
| 598 |
package com.karasiq.shadowcloud.model.keys
import com.karasiq.shadowcloud.model.RegionId
import com.karasiq.shadowcloud.model.keys.KeyProps.RegionSet
@SerialVersionUID(0L)
final case class KeyProps(key: KeySet, regionSet: RegionSet, forEncryption: Boolean, forDecryption: Boolean)
object KeyProps {
type RegionSet = Set[RegionId]
object RegionSet {
val all: RegionSet = Set.empty
def enabledOn(regionSet: RegionSet, regionId: RegionId) = regionSet.isEmpty || regionSet.contains(regionId)
}
}
|
Karasiq/shadowcloud
|
model/src/main/scala/com/karasiq/shadowcloud/model/keys/KeyProps.scala
|
Scala
|
apache-2.0
| 509 |
package akka.dispatch.verification
import akka.actor.Cell
import akka.actor.ActorRef
import akka.actor.Props;
import akka.actor.AutoReceivedMessage;
import akka.actor.ActorIdentity;
import akka.actor.ReceiveTimeout;
import akka.dispatch.Envelope
// The interface for schedulers
trait Scheduler {
def isSystemCommunication(sender: ActorRef, receiver: ActorRef): Boolean
def isSystemCommunication(sender: ActorRef, receiver: ActorRef, msg: Any): Boolean = {
if (msg.isInstanceOf[AutoReceivedMessage] ||
msg.isInstanceOf[ActorIdentity] ||
msg.isInstanceOf[ReceiveTimeout]) {
return true
}
return isSystemCommunication(sender, receiver)
}
// Is this message a system message
def isSystemMessage(src: String, dst: String): Boolean
def isSystemMessage(src: String, dst: String, msg: Any): Boolean = {
if (msg.isInstanceOf[AutoReceivedMessage] ||
msg.isInstanceOf[ActorIdentity] ||
msg.isInstanceOf[ReceiveTimeout]) {
return true
}
return isSystemMessage(src, dst)
}
// Notification that the system has been reset
def start_trace() : Unit
// Get the next message to schedule. Make sure not to return a message that
// is destined for a blocked actor! Otherwise an exception will be thrown.
def schedule_new_message(blockedActors: Set[String]) : Option[(Cell, Envelope)]
// Get next event to schedule (used while restarting the system)
def next_event() : Event
// Notify that there are no more events to run
def notify_quiescence () : Unit
// Called before we start processing a newly received event
def before_receive(cell: Cell) : Unit
// Called after receive is done being processed
def after_receive(cell: Cell) : Unit
def before_receive(cell: Cell, msg: Any) : Unit =
before_receive(cell)
def after_receive(cell: Cell, msg: Any) : Unit =
after_receive(cell)
// Record that an event was produced
def event_produced(event: Event) : Unit
def event_produced(cell: Cell, envelope: Envelope) : Unit
// Record that an event was consumed
def event_consumed(event: Event) : Unit
def event_consumed(cell: Cell, envelope: Envelope)
// Tell the scheduler that it should eventually schedule the given message.
// Used to feed messages from the external world into actor systems.
// Called when timer is cancelled
def notify_timer_cancel(receiver: String, msg: Any)
// Interface for (safely) sending external messages
def enqueue_message(sender: Option[ActorRef], receiver: String, msg: Any)
// Interface for (safely) sending timers (akka.scheduler messages)
def enqueue_timer(receiver: String, msg: Any) = enqueue_message(None, receiver, msg)
// Interface for notifying the scheduler about a code block that has just
// been scheduled by the application, through akka.scheduler.schedule().
// cell and envelope are fake, used as placeholders.
def enqueue_code_block(cell: Cell, envelope: Envelope) {
event_produced(cell, envelope)
}
// Shut down the actor system.
def shutdown()
// Invoked whenever the application logs to the console. Used mostly for
// Synoptic integration.
// [http://www.cs.ubc.ca/~bestchai/papers/esecfse2011-final.pdf]
def notify_log_message(msg: String) = {}
// When an actor has been terminated, the ActorCell references associated
// with it are no longer valid. Remove all of them, and return all
// (sender, message) pairs that used to be pending for this actor. These
// may later be resent by Instrumenter.
def actorTerminated(actor: String): Seq[(String, Any)] = {
throw new RuntimeException("NYI")
}
// Invoked by Instrumenter after dispatchAfterMailboxIdle(name) has been
// called and name's mailbox has been set to idle state
def handleMailboxIdle() {
Instrumenter().start_dispatch
}
}
|
NetSys/demi
|
src/main/scala/verification/schedulers/Scheduler.scala
|
Scala
|
bsd-2-clause
| 3,827 |
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert.cluster
import org.specs.SpecificationWithJUnit
import org.specs.mock.Mockito
import org.specs.util.WaitFor
import actors.Actor._
class ClusterNotificationManagerComponentSpec extends SpecificationWithJUnit with Mockito with WaitFor with ClusterNotificationManagerComponent {
val clusterNotificationManager = new ClusterNotificationManager
clusterNotificationManager.start
val shortNodes = Set(Node(1, "localhost:31313", Set(1, 2), false))
val nodes = shortNodes ++ List(Node(2, "localhost:31314", Set(3, 4), true),
Node(3, "localhost:31315", Set(5, 6), false))
"ClusterNotificationManager" should {
import ClusterNotificationMessages._
"when handling an AddListener message" in {
"send a Connected event to the listener if the cluster is connected" in {
clusterNotificationManager ! Connected(nodes)
var callCount = 0
var currentNodes: Set[Node] = Set()
val listener = actor {
react {
case ClusterEvents.Connected(n) => callCount += 1; currentNodes = n
}
}
clusterNotificationManager ! AddListener(listener)
callCount must eventually(be_==(1))
currentNodes.size must be_==(1)
currentNodes.foreach { node =>
node.id must be_==(2)
}
}
"not send a Connected event to the listener if the cluster is not connected" in {
var callCount = 0
val listener = actor {
react {
case ClusterEvents.Connected(_) => callCount += 1
}
}
clusterNotificationManager ! AddListener(listener)
waitFor(20.ms)
callCount must be_==(0)
}
}
"when handling a RemoveListener message remove the listener" in {
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Connected(_) => callCount += 1
case ClusterEvents.NodesChanged(_) => callCount += 1
}
}
}
val key = clusterNotificationManager !? AddListener(listener) match {
case AddedListener(key) => key
}
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! RemoveListener(key)
clusterNotificationManager ! NodesChanged(nodes)
callCount must eventually(be_==(1))
}
"when handling a Connected message" in {
"notify listeners" in {
var callCount = 0
var currentNodes: Set[Node] = Set()
val listener = actor {
loop {
react {
case ClusterEvents.Connected(n) => callCount += 1; currentNodes = n
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Connected(nodes)
callCount must eventually(be_==(1))
currentNodes.size must be_==(1)
currentNodes.foreach { node =>
node.id must be_==(2)
}
}
"do nothing if already connected" in {
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Connected(_) => callCount += 1
case _ =>
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! Connected(nodes)
callCount must eventually(be_==(1))
}
}
"when handling a NodesChanged message" in {
"notify listeners" in {
var callCount = 0
var currentNodes: Set[Node] = Set()
val listener = actor {
loop {
react {
case ClusterEvents.NodesChanged(n) =>
callCount += 1
currentNodes = n
}
}
}
clusterNotificationManager ! Connected(shortNodes)
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! NodesChanged(nodes)
callCount must eventually(be_==(1))
currentNodes.size must be_==(1)
currentNodes.foreach { node =>
node.id must be_==(2)
}
}
}
"do nothing is not connected" in {
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.NodesChanged(n) => callCount += 1
case _ =>
}
}
}
clusterNotificationManager ! Connected(shortNodes)
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! NodesChanged(nodes)
callCount must eventually(be_==(1))
}
"when handling a Disconnected message" in {
"disconnects the cluster" in {
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! Disconnected
clusterNotificationManager !? GetCurrentNodes match {
case CurrentNodes(nodes) => nodes.size must be_==(0)
}
}
"notify listeners" in {
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Disconnected => callCount += 1
case _ =>
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! Disconnected
callCount must eventually(be_==(1))
}
"do nothing if not connected" in {
var callCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Disconnected => callCount += 1
case _ =>
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Disconnected
callCount must eventually(be_==(0))
}
}
"when handling a Shutdown message stop handling events after shutdown" in {
var connectedCallCount = 0
var shutdownCallCount = 0
val listener = actor {
loop {
react {
case ClusterEvents.Connected(_) => connectedCallCount += 1
case ClusterEvents.Shutdown => shutdownCallCount += 1
case _ =>
}
}
}
clusterNotificationManager ! AddListener(listener)
clusterNotificationManager ! Connected(nodes)
clusterNotificationManager ! Shutdown
clusterNotificationManager ! Connected(nodes)
connectedCallCount must eventually(be_==(1))
shutdownCallCount must eventually(be_==(1))
}
}
}
|
rhavyn/norbert
|
cluster/src/test/scala/com/linkedin/norbert/cluster/ClusterNotificationManagerComponentSpec.scala
|
Scala
|
apache-2.0
| 7,196 |
package com.sksamuel.elastic4s.testkit
import com.sksamuel.elastic4s.http.JavaClient
import com.sksamuel.elastic4s.{ElasticClient, ElasticDsl, ElasticProperties}
import scala.util.Try
trait DockerTests extends ElasticDsl with ClientProvider {
val elasticHost = sys.env.getOrElse("ES_HOST", "127.0.0.1")
val elasticPort = sys.env.getOrElse("ES_PORT",
// use obscure ports for the tests to reduce the risk of interfering with existing elastic installations/containers
"39227"
)
val client = ElasticClient(JavaClient(ElasticProperties(s"http://$elasticHost:$elasticPort")))
protected def deleteIdx(indexName: String): Unit = {
Try {
client.execute {
ElasticDsl.deleteIndex(indexName)
}.await
}
}
protected def createIdx(name: String) = Try {
client.execute {
createIndex(name)
}.await
}
protected def cleanIndex(indexName: String): Unit = {
deleteIdx(indexName)
createIdx(indexName)
}
}
|
stringbean/elastic4s
|
elastic4s-testkit/src/main/scala/com/sksamuel/elastic4s/testkit/DockerTests.scala
|
Scala
|
apache-2.0
| 975 |
package dispatch.json
import dispatch._
trait ImplicitJsHandlers {
/** Add JSON-processing method ># to dispatch.Request */
implicit def requestToJsHandlers(r: Request) = new JsHandlers(r)
implicit def stringToJsHandlers(r: String) = new JsHandlers(new Request(r))
}
trait JsHttp extends ImplicitJsHandlers with Js
object JsHttp extends JsHttp
class JsHandlers(subject: Request) {
/** Process response as JsValue in block */
def ># [T](block: json.Js.JsF[T]) = subject >> { (stm, charset) =>
block(json.Js(stm, charset))
}
}
|
cmc333333/Databinder-Dispatch
|
http+json/src/main/scala/JsHttp.scala
|
Scala
|
lgpl-2.1
| 545 |
{{#public}}
package {{package}}
import com.twitter.scrooge.{
LazyTProtocol,
TFieldBlob, ThriftException, ThriftStruct, ThriftStructCodec3, ThriftStructFieldInfo,
ThriftStructMetaData, ThriftUtil}
import org.apache.thrift.protocol._
import org.apache.thrift.transport.{TMemoryBuffer, TTransport}
import java.nio.ByteBuffer
import java.util.Arrays
import scala.collection.immutable.{Map => immutable$Map}
import scala.collection.mutable.Builder
import scala.collection.mutable.{
ArrayBuffer => mutable$ArrayBuffer, Buffer => mutable$Buffer,
HashMap => mutable$HashMap, HashSet => mutable$HashSet}
import scala.collection.{Map, Set}
{{/public}}
{{docstring}}
object {{StructName}} extends ThriftStructCodec3[{{StructName}}] {
private val NoPassthroughFields = immutable$Map.empty[Short, TFieldBlob]
val Struct = new TStruct("{{StructNameForWire}}")
{{#fields}}
val {{fieldConst}} = new TField("{{fieldNameForWire}}", TType.{{constType}}, {{id}})
{{#isEnum}}
val {{fieldConst}}I32 = new TField("{{fieldNameForWire}}", TType.I32, {{id}})
{{/isEnum}}
val {{fieldConst}}Manifest = implicitly[Manifest[{{fieldType}}]]
{{/fields}}
/**
* Field information in declaration order.
*/
lazy val fieldInfos: scala.List[ThriftStructFieldInfo] = scala.List[ThriftStructFieldInfo](
{{#fields}}
new ThriftStructFieldInfo(
{{fieldConst}},
{{optional}},
{{required}},
{{fieldConst}}Manifest,
{{#fieldKeyType}}
_root_.scala.Some(implicitly[Manifest[{{fieldKeyType}}]]),
{{/fieldKeyType}}
{{^fieldKeyType}}
_root_.scala.None,
{{/fieldKeyType}}
{{#fieldValueType}}
_root_.scala.Some(implicitly[Manifest[{{fieldValueType}}]]),
{{/fieldValueType}}
{{^fieldValueType}}
_root_.scala.None,
{{/fieldValueType}}
{{#fieldTypeAnnotations}}
immutable$Map(
{{#pairs}}
"{{key}}" -> "{{value}}"
{{/pairs|,}}
),
{{/fieldTypeAnnotations}}
{{^fieldTypeAnnotations}}
immutable$Map.empty[String, String],
{{/fieldTypeAnnotations}}
{{#fieldFieldAnnotations}}
immutable$Map(
{{#pairs}}
"{{key}}" -> "{{value}}"
{{/pairs|,}}
)
{{/fieldFieldAnnotations}}
{{^fieldFieldAnnotations}}
immutable$Map.empty[String, String]
{{/fieldFieldAnnotations}}
)
{{/fields|,}}
)
lazy val structAnnotations: immutable$Map[String, String] =
{{#structAnnotations}}
immutable$Map[String, String](
{{#pairs}}
"{{key}}" -> "{{value}}"
{{/pairs|,}}
)
{{/structAnnotations}}
{{^structAnnotations}}
immutable$Map.empty[String, String]
{{/structAnnotations}}
/**
* Checks that all required fields are non-null.
*/
def validate(_item: {{StructName}}): Unit = {
{{#fields}}
{{#required}}
{{#nullable}}
if (_item.{{fieldName}} == null) throw new TProtocolException("Required field {{fieldName}} cannot be null")
{{/nullable}}
{{/required}}
{{/fields}}
}
def withoutPassthroughFields(original: {{StructName}}): {{StructName}} =
new {{InstanceClassName}}(
{{#fields}}
{{fieldName}} =
{
val field = original.{{fieldName}}
{{#passthroughFields}}{{>withoutPassthrough}}{{/passthroughFields}}
}
{{/fields|,}}
)
override def encode(_item: {{StructName}}, _oproto: TProtocol): Unit = {
_item.write(_oproto)
}
{{#withTrait}}
private[this] def lazyDecode(_iprot: LazyTProtocol): {{StructName}} = {
{{#fields}}
{{#isLazyReadEnabled}}
var {{fieldNameForWire}}Offset: Int = -1
{{/isLazyReadEnabled}}
{{^isLazyReadEnabled}}
{{#optional}}
var {{fieldName}}: Option[{{fieldType}}] = None
{{/optional}}
{{^optional}}
var {{fieldName}}: {{fieldType}} = {{defaultReadValue}}
{{/optional}}
{{/isLazyReadEnabled}}
{{#required}}
var {{gotName}} = false
{{/required}}
{{/fields}}
var _passthroughFields: Builder[(Short, TFieldBlob), immutable$Map[Short, TFieldBlob]] = null
var _done = false
val _start_offset = _iprot.offset
_iprot.readStructBegin()
while (!_done) {
val _field = _iprot.readFieldBegin()
if (_field.`type` == TType.STOP) {
_done = true
} else {
_field.id match {
{{#fields}}
case {{id}} =>
{{>readLazyField}}
{{/fields}}
case _ =>
if (_passthroughFields == null)
_passthroughFields = immutable$Map.newBuilder[Short, TFieldBlob]
_passthroughFields += (_field.id -> TFieldBlob.read(_field, _iprot))
}
_iprot.readFieldEnd()
}
}
_iprot.readStructEnd()
{{#fields}}
{{#required}}
if (!{{gotName}}) throw new TProtocolException("Required field '{{fieldName}}' was not found in serialized data for struct {{StructName}}")
{{/required}}
{{/fields}}
new Lazy{{InstanceClassName}}(
_iprot,
_iprot.buffer,
_start_offset,
_iprot.offset,
{{#fields}}
{{#isLazyReadEnabled}}{{fieldNameForWire}}Offset{{/isLazyReadEnabled}}{{^isLazyReadEnabled}}{{fieldName}}{{/isLazyReadEnabled}},
{{/fields}}
if (_passthroughFields == null)
NoPassthroughFields
else
_passthroughFields.result()
)
}
override def decode(_iprot: TProtocol): {{StructName}} =
_iprot match {
case i: LazyTProtocol => lazyDecode(i)
case i => eagerDecode(i)
}
private[this] def eagerDecode(_iprot: TProtocol): {{StructName}} = {
{{/withTrait}}
{{^withTrait}}
override def decode(_iprot: TProtocol): {{StructName}} = {
{{/withTrait}}
{{#fields}}
{{#optional}}
var {{fieldName}}: _root_.scala.Option[{{fieldType}}] = _root_.scala.None
{{/optional}}
{{^optional}}
var {{fieldName}}: {{fieldType}} = {{defaultReadValue}}
{{#required}}
var {{gotName}} = false
{{/required}}
{{/optional}}
{{/fields}}
var _passthroughFields: Builder[(Short, TFieldBlob), immutable$Map[Short, TFieldBlob]] = null
var _done = false
_iprot.readStructBegin()
while (!_done) {
val _field = _iprot.readFieldBegin()
if (_field.`type` == TType.STOP) {
_done = true
} else {
_field.id match {
{{#fields}}
case {{id}} =>
{{>readField}}
{{/fields}}
case _ =>
if (_passthroughFields == null)
_passthroughFields = immutable$Map.newBuilder[Short, TFieldBlob]
_passthroughFields += (_field.id -> TFieldBlob.read(_field, _iprot))
}
_iprot.readFieldEnd()
}
}
_iprot.readStructEnd()
{{#fields}}
{{#required}}
if (!{{gotName}}) throw new TProtocolException("Required field '{{fieldName}}' was not found in serialized data for struct {{StructName}}")
{{/required}}
{{/fields}}
new {{InstanceClassName}}(
{{#fields}}
{{fieldName}},
{{/fields}}
if (_passthroughFields == null)
NoPassthroughFields
else
_passthroughFields.result()
)
}
def apply(
{{#fields}}
{{fieldName}}: {{>optionalType}}{{#hasDefaultValue}} = {{defaultFieldValue}}{{/hasDefaultValue}}{{#optional}} = _root_.scala.None{{/optional}}
{{/fields|,}}
): {{StructName}} =
new {{InstanceClassName}}(
{{#fields}}
{{fieldName}}
{{/fields|,}}
)
{{#arity0}}
def unapply(_item: {{StructName}}): Boolean = true
{{/arity0}}
{{#arity1}}
def unapply(_item: {{StructName}}): _root_.scala.Option[{{>optionalType}}] = _root_.scala.Some(_item.{{fieldName}})
{{/arity1}}
{{#arityN}}
def unapply(_item: {{StructName}}): _root_.scala.Option[{{product}}] = _root_.scala.Some(_item)
{{/arityN}}
{{#fields}}
@inline private def {{readFieldValueName}}(_iprot: TProtocol): {{fieldType}} = {
{{#readWriteInfo}}
{{>readValue}}
{{/readWriteInfo}}
}
@inline private def {{writeFieldName}}({{valueVariableName}}: {{fieldType}}, _oprot: TProtocol): Unit = {
{{#readWriteInfo}}
_oprot.writeFieldBegin({{fieldConst}}{{#isEnum}}I32{{/isEnum}})
{{writeFieldValueName}}({{valueVariableName}}, _oprot)
_oprot.writeFieldEnd()
{{/readWriteInfo}}
}
@inline private def {{writeFieldValueName}}({{valueVariableName}}: {{fieldType}}, _oprot: TProtocol): Unit = {
{{#readWriteInfo}}
{{>writeValue}}
{{/readWriteInfo}}
}
{{/fields}}
{{#withTrait}}
object Immutable extends ThriftStructCodec3[{{StructName}}] {
override def encode(_item: {{StructName}}, _oproto: TProtocol): Unit = { _item.write(_oproto) }
override def decode(_iprot: TProtocol): {{StructName}} = {{StructName}}.decode(_iprot)
override lazy val metaData: ThriftStructMetaData[{{StructName}}] = {{StructName}}.metaData
}
/**
* The default read-only implementation of {{StructName}}. You typically should not need to
* directly reference this class; instead, use the {{StructName}}.apply method to construct
* new instances.
*/
class Immutable(
{{#fields}}
val {{fieldName}}: {{>optionalType}},
{{/fields}}
override val _passthroughFields: immutable$Map[Short, TFieldBlob])
extends {{StructName}} {
def this(
{{#fields}}
{{fieldName}}: {{>optionalType}}{{#hasDefaultValue}} = {{defaultFieldValue}}{{/hasDefaultValue}}{{#optional}} = _root_.scala.None{{/optional}}
{{/fields|,}}
) = this(
{{#fields}}
{{fieldName}},
{{/fields}}
Map.empty
)
}
/**
* This is another Immutable, this however keeps strings as lazy values that are lazily decoded from the backing
* array byte on read.
*/
private[this] class Lazy{{InstanceClassName}}(
_proto: LazyTProtocol,
_buf: Array[Byte],
_start_offset: Int,
_end_offset: Int,
{{#fields}}
{{#isLazyReadEnabled}}{{fieldNameForWire}}Offset: Int,{{/isLazyReadEnabled}}{{^isLazyReadEnabled}}val {{fieldName}}: {{>optionalType}},{{/isLazyReadEnabled}}
{{/fields}}
override val _passthroughFields: immutable$Map[Short, TFieldBlob])
extends {{StructName}} {
override def write(_oprot: TProtocol): Unit = {
_oprot match {
case i: LazyTProtocol => i.writeRaw(_buf, _start_offset, _end_offset - _start_offset)
case _ => super.write(_oprot)
}
}
{{#fields}}
{{#isLazyReadEnabled}}
lazy val {{fieldName}}: {{>optionalType}} =
{{#optional}}
if ({{fieldNameForWire}}Offset == -1)
None
else {
Some(_proto.{{decodeProtocol}}(_buf, {{fieldNameForWire}}Offset))
}
{{/optional}}
{{^optional}}
if ({{fieldNameForWire}}Offset == -1)
{{defaultReadValue}}
else {
_proto.{{decodeProtocol}}(_buf, {{fieldNameForWire}}Offset)
}
{{/optional}}
{{/isLazyReadEnabled}}
{{/fields}}
/**
* Override the super hash code to make it a lazy val rather than def.
*
* Calculating the hash code can be expensive, caching it where possible
* can provide significant performance wins. (Key in a hash map for instance)
* Usually not safe since the normal constructor will accept a mutable map or
* set as an arg
* Here however we control how the class is generated from serialized data.
* With the class private and the contract that we throw away our mutable references
* having the hash code lazy here is safe.
*/
override lazy val hashCode = super.hashCode
}
/**
* This Proxy trait allows you to extend the {{StructName}} trait with additional state or
* behavior and implement the read-only methods from {{StructName}} using an underlying
* instance.
*/
trait Proxy extends {{StructName}} {
protected def {{underlyingStructName}}: {{StructName}}
{{#fields}}
override def {{fieldName}}: {{>optionalType}} = {{underlyingStructName}}.{{fieldName}}
{{/fields}}
override def _passthroughFields = {{underlyingStructName}}._passthroughFields
}
{{/withTrait}}
}
{{#withTrait}}
trait {{StructName}}
{{/withTrait}}
{{^withTrait}}
class {{StructName}}(
{{#fields}}
val {{fieldName}}: {{>optionalType}},
{{/fields}}
val _passthroughFields: immutable$Map[Short, TFieldBlob])
{{/withTrait}}
extends {{parentType}}
with {{product}}
with java.io.Serializable
{
import {{StructName}}._
{{^withTrait}}
def this(
{{#fields}}
{{fieldName}}: {{>optionalType}}{{#hasDefaultValue}} = {{defaultFieldValue}}{{/hasDefaultValue}}{{#optional}} = _root_.scala.None{{/optional}}
{{/fields|,}}
) = this(
{{#fields}}
{{fieldName}},
{{/fields}}
Map.empty
)
{{/withTrait}}
{{#withTrait}}
{{#fields}}
def {{fieldName}}: {{>optionalType}}
{{/fields}}
def _passthroughFields: immutable$Map[Short, TFieldBlob] = immutable$Map.empty
{{/withTrait}}
{{#fields}}
def _{{indexP1}} = {{fieldName}}
{{/fields}}
{{#isResponse}}
def successField: Option[{{successFieldType}}] = {{successFieldValue}}
def exceptionFields: Iterable[Option[com.twitter.scrooge.ThriftException]] = {{exceptionValues}}
{{/isResponse}}
{{#withFieldGettersAndSetters}}
/**
* Gets a field value encoded as a binary blob using TCompactProtocol. If the specified field
* is present in the passthrough map, that value is returned. Otherwise, if the specified field
* is known and not optional and set to None, then the field is serialized and returned.
*/
def getFieldBlob(_fieldId: Short): _root_.scala.Option[TFieldBlob] = {
lazy val _buff = new TMemoryBuffer(32)
lazy val _oprot = new TCompactProtocol(_buff)
_passthroughFields.get(_fieldId) match {
case blob: _root_.scala.Some[TFieldBlob] => blob
case _root_.scala.None => {
val _fieldOpt: _root_.scala.Option[TField] =
_fieldId match {
{{#fields}}
case {{id}} =>
{{#readWriteInfo}}
{{#optional}}
if ({{fieldName}}.isDefined) {
{{/optional}}
{{^optional}}
{{#nullable}}
if ({{fieldName}} ne null) {
{{/nullable}}
{{^nullable}}
if (true) {
{{/nullable}}
{{/optional}}
{{writeFieldValueName}}({{fieldName}}{{#optional}}.get{{/optional}}, _oprot)
_root_.scala.Some({{StructName}}.{{fieldConst}})
} else {
_root_.scala.None
}
{{/readWriteInfo}}
{{/fields}}
case _ => _root_.scala.None
}
_fieldOpt match {
case _root_.scala.Some(_field) =>
val _data = Arrays.copyOfRange(_buff.getArray, 0, _buff.length)
_root_.scala.Some(TFieldBlob(_field, _data))
case _root_.scala.None =>
_root_.scala.None
}
}
}
}
/**
* Collects TCompactProtocol-encoded field values according to `getFieldBlob` into a map.
*/
def getFieldBlobs(ids: TraversableOnce[Short]): immutable$Map[Short, TFieldBlob] =
(ids flatMap { id => getFieldBlob(id) map { id -> _ } }).toMap
/**
* Sets a field using a TCompactProtocol-encoded binary blob. If the field is a known
* field, the blob is decoded and the field is set to the decoded value. If the field
* is unknown and passthrough fields are enabled, then the blob will be stored in
* _passthroughFields.
*/
def setField(_blob: TFieldBlob): {{StructName}} = {
{{#fields}}
var {{fieldName}}: {{>optionalType}} = this.{{fieldName}}
{{/fields}}
var _passthroughFields = this._passthroughFields
_blob.id match {
{{#fields}}
{{#readWriteInfo}}
case {{id}} =>
{{#optional}}
{{fieldName}} = _root_.scala.Some({{readFieldValueName}}(_blob.read))
{{/optional}}
{{^optional}}
{{fieldName}} = {{readFieldValueName}}(_blob.read)
{{/optional}}
{{/readWriteInfo}}
{{/fields}}
case _ => _passthroughFields += (_blob.id -> _blob)
}
new {{InstanceClassName}}(
{{#fields}}
{{fieldName}},
{{/fields}}
_passthroughFields
)
}
/**
* If the specified field is optional, it is set to None. Otherwise, if the field is
* known, it is reverted to its default value; if the field is unknown, it is removed
* from the passthroughFields map, if present.
*/
def unsetField(_fieldId: Short): {{StructName}} = {
{{#fields}}
var {{fieldName}}: {{>optionalType}} = this.{{fieldName}}
{{/fields}}
_fieldId match {
{{#fields}}
case {{id}} =>
{{#optional}}
{{fieldName}} = _root_.scala.None
{{/optional}}
{{^optional}}
{{fieldName}} = {{defaultReadValue}}
{{/optional}}
{{/fields}}
case _ =>
}
new {{InstanceClassName}}(
{{#fields}}
{{fieldName}},
{{/fields}}
_passthroughFields - _fieldId
)
}
/**
* If the specified field is optional, it is set to None. Otherwise, if the field is
* known, it is reverted to its default value; if the field is unknown, it is removed
* from the passthroughFields map, if present.
*/
{{#fields}}
def {{unsetName}}: {{StructName}} = unsetField({{id}})
{{/fields}}
{{/withFieldGettersAndSetters}}
override def write(_oprot: TProtocol): Unit = {
{{StructName}}.validate(this)
_oprot.writeStructBegin(Struct)
{{#fields}}
{{#readWriteInfo}}
{{#optional}}
if ({{fieldName}}.isDefined) {{writeFieldName}}({{fieldName}}.get, _oprot)
{{/optional}}
{{^optional}}
{{#nullable}}
if ({{fieldName}} ne null) {{writeFieldName}}({{fieldName}}, _oprot)
{{/nullable}}
{{^nullable}}
{{writeFieldName}}({{fieldName}}, _oprot)
{{/nullable}}
{{/optional}}
{{/readWriteInfo}}
{{/fields}}
if (_passthroughFields.nonEmpty) {
_passthroughFields.values.foreach { _.write(_oprot) }
}
_oprot.writeFieldStop()
_oprot.writeStructEnd()
}
def copy(
{{#fields}}
{{fieldName}}: {{>optionalType}} = this.{{fieldName}},
{{/fields}}
_passthroughFields: immutable$Map[Short, TFieldBlob] = this._passthroughFields
): {{StructName}} =
new {{InstanceClassName}}(
{{#fields}}
{{fieldName}},
{{/fields}}
_passthroughFields
)
override def canEqual(other: Any): Boolean = other.isInstanceOf[{{StructName}}]
override def equals(other: Any): Boolean =
canEqual(other) &&
_root_.scala.runtime.ScalaRunTime._equals(this, other) &&
_passthroughFields == other.asInstanceOf[{{StructName}}]._passthroughFields
override def hashCode: Int = _root_.scala.runtime.ScalaRunTime._hashCode(this)
override def toString: String = _root_.scala.runtime.ScalaRunTime._toString(this)
{{#hasExceptionMessage}}
override def getMessage: String = String.valueOf({{exceptionMessageField}})
{{/hasExceptionMessage}}
override def productArity: Int = {{arity}}
override def productElement(n: Int): Any = n match {
{{#fields}}
case {{index}} => this.{{fieldName}}
{{/fields}}
case _ => throw new IndexOutOfBoundsException(n.toString)
}
override def productPrefix: String = "{{StructName}}"
}
|
thirstycrow/scrooge
|
scrooge-generator/src/main/resources/scalagen/struct.scala
|
Scala
|
apache-2.0
| 18,413 |
package com.ing.baker.runtime.akka.implementations
class InteractionOne() {
def apply(recipeInstanceId: String, initialIngredient: String): String = ""
}
|
ing-bank/baker
|
core/akka-runtime/src/test/scala/com/ing/baker/runtime/akka/implementations/InteractionOne.scala
|
Scala
|
mit
| 157 |
package db.impl
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import scala.jdk.CollectionConverters._
import play.api.Environment
import play.api.db.evolutions.{Evolution, Evolutions, EvolutionsReader}
import play.api.libs.Collections
//Some stuff taken from ResourceEvolutionsReader
class OreEvolutionsReader(environment: Environment) extends EvolutionsReader {
private val namePattern = """(\\d+)(?:_.+)?\\.sql""".r
override def evolutions(db: String): collection.Seq[Evolution] = {
val upsMarker = """^(#|--).*!Ups.*$""".r
val downsMarker = """^(#|--).*!Downs.*$""".r
val UPS = "UPS"
val DOWNS = "DOWNS"
val UNKNOWN = "UNKNOWN"
val mapUpsAndDowns: PartialFunction[String, String] = {
case upsMarker(_) => UPS
case downsMarker(_) => DOWNS
case _ => UNKNOWN
}
val isMarker: PartialFunction[String, Boolean] = {
case upsMarker(_) => true
case downsMarker(_) => true
case _ => false
}
val folder = environment.getFile(Evolutions.directoryName(db))
val files = folder
.listFiles(_.getName.endsWith(".sql"))
.toSeq
.flatMap { file =>
file.getName match {
case namePattern(revision) => Some((revision.toInt, file))
case _ => None
}
}
.sortBy(_._1)
require(files.toMap.sizeIs == files.length, "Found more than one evolution with the same revision")
files.map {
case (revision, file) =>
val script = Files.readAllLines(file.toPath, StandardCharsets.UTF_8).asScala
val parsed = Collections
.unfoldLeft(("", script.toList.map(_.trim))) {
case (_, Nil) => None
case (context, lines) =>
val (some, next) = lines.span(l => !isMarker(l))
Some(
(
next.headOption.map(c => (mapUpsAndDowns(c), next.tail)).getOrElse("" -> Nil),
context -> some.mkString("\\n")
)
)
}
.reverse
.drop(1)
.groupBy(i => i._1)
.view
.mapValues(_.map(_._2).mkString("\\n").trim)
.toMap
Evolution(revision, parsed.getOrElse(UPS, ""), parsed.getOrElse(DOWNS, ""))
}
}
}
|
SpongePowered/Ore
|
ore/app/db/impl/OreEvolutionsReader.scala
|
Scala
|
mit
| 2,327 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.common
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.dag.Transformation
import org.apache.flink.api.java.typeutils.{GenericTypeInfo, RowTypeInfo, TypeExtractor}
import org.apache.flink.streaming.api.datastream.AsyncDataStream.OutputMode
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.streaming.api.operators.async.AsyncWaitOperatorFactory
import org.apache.flink.streaming.api.operators.{ProcessOperator, SimpleOperatorFactory}
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.api.{TableConfig, TableException, TableSchema}
import org.apache.flink.table.catalog.ObjectIdentifier
import org.apache.flink.table.connector.source.{AsyncTableFunctionProvider, LookupTableSource, TableFunctionProvider}
import org.apache.flink.table.data.RowData
import org.apache.flink.table.functions.{AsyncTableFunction, TableFunction, UserDefinedFunction}
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.codegen.LookupJoinCodeGenerator._
import org.apache.flink.table.planner.codegen.{CodeGeneratorContext, LookupJoinCodeGenerator}
import org.apache.flink.table.planner.functions.utils.UserDefinedFunctionUtils.{getParamClassesConsiderVarArgs, getUserDefinedMethod, signatureToString, signaturesToString}
import org.apache.flink.table.planner.plan.nodes.FlinkRelNode
import org.apache.flink.table.planner.plan.nodes.exec.ExecNode
import org.apache.flink.table.planner.plan.schema.{LegacyTableSourceTable, TableSourceTable}
import org.apache.flink.table.planner.plan.utils.LookupJoinUtil._
import org.apache.flink.table.planner.plan.utils.PythonUtil.containsPythonCall
import org.apache.flink.table.planner.plan.utils.RelExplainUtil.preferExpressionFormat
import org.apache.flink.table.planner.plan.utils.{JoinTypeUtil, RelExplainUtil}
import org.apache.flink.table.planner.utils.TableConfigUtils.getMillisecondFromConfigDuration
import org.apache.flink.table.runtime.connector.source.LookupRuntimeProviderContext
import org.apache.flink.table.runtime.operators.join.lookup.{AsyncLookupJoinRunner, AsyncLookupJoinWithCalcRunner, LookupJoinRunner, LookupJoinWithCalcRunner}
import org.apache.flink.table.runtime.types.ClassLogicalTypeConverter
import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.{fromDataTypeToLogicalType, fromLogicalTypeToDataType}
import org.apache.flink.table.runtime.types.PlannerTypeUtils.isInteroperable
import org.apache.flink.table.runtime.types.TypeInfoDataTypeConverter.fromDataTypeToTypeInfo
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo
import org.apache.flink.table.sources.LookupableTableSource
import org.apache.flink.table.types.DataType
import org.apache.flink.table.types.logical.utils.LogicalTypeUtils.toInternalConversionClass
import org.apache.flink.table.types.logical.{LogicalType, RowType, TypeInformationRawType}
import org.apache.flink.types.Row
import com.google.common.primitives.Primitives
import org.apache.calcite.plan.{RelOptCluster, RelOptTable, RelTraitSet}
import org.apache.calcite.rel.`type`.{RelDataType, RelDataTypeField}
import org.apache.calcite.rel.core.{JoinInfo, JoinRelType}
import org.apache.calcite.rel.{RelNode, RelWriter, SingleRel}
import org.apache.calcite.rex._
import org.apache.calcite.sql.SqlKind
import org.apache.calcite.sql.fun.SqlStdOperatorTable
import org.apache.calcite.sql.validate.SqlValidatorUtil
import org.apache.calcite.tools.RelBuilder
import org.apache.calcite.util.mapping.IntPair
import java.util.Collections
import java.util.concurrent.CompletableFuture
import scala.collection.JavaConverters._
import scala.collection.mutable
/**
* Common abstract RelNode for temporal table join which shares most methods.
*
* For a look join query:
*
* <pre>
* SELECT T.id, T.content, D.age
* FROM T JOIN userTable FOR SYSTEM_TIME AS OF T.proctime AS D
* ON T.content = concat(D.name, '!') AND D.age = 11 AND T.id = D.id
* WHERE D.name LIKE 'Jack%'
* </pre>
*
* The LookJoin physical node encapsulates the following RelNode tree:
*
* <pre>
* Join (l.name = r.name)
* / \\
* RelNode Calc (concat(name, "!") as name, name LIKE 'Jack%')
* |
* DimTable (lookup-keys: age=11, id=l.id)
* (age, id, name)
* </pre>
*
* The important member fields in LookupJoin:
* <ul>
* <li>joinPairs: "0=0" (equal condition of Join)</li>
* <li>joinKeyPairs: empty (left input field index to dim table field index)</li>
* <li>allLookupKeys: [$0=11, $1=l.id] ($0 and $1 is the indexes of age and id in dim table)</li>
* <li>remainingCondition: l.name=r.name</li>
* <ul>
*
* The workflow of lookup join:
*
* 1) lookup records dimension table using the lookup-keys <br>
* 2) project & filter on the lookup-ed records <br>
* 3) join left input record and lookup-ed records <br>
* 4) only outputs the rows which match to the remainingCondition <br>
*
* @param input input rel node
* @param calcOnTemporalTable the calc (projection&filter) after table scan before joining
*/
abstract class CommonLookupJoin(
cluster: RelOptCluster,
traitSet: RelTraitSet,
input: RelNode,
// TODO: refactor this into TableSourceTable, once legacy TableSource is removed
temporalTable: RelOptTable,
val calcOnTemporalTable: Option[RexProgram],
val joinInfo: JoinInfo,
val joinType: JoinRelType)
extends SingleRel(cluster, traitSet, input)
with FlinkRelNode {
val temporalTableSchema: TableSchema = FlinkTypeFactory.toTableSchema(temporalTable.getRowType)
// join key pairs from left input field index to temporal table field index
val joinKeyPairs: Array[IntPair] = getTemporalTableJoinKeyPairs(joinInfo, calcOnTemporalTable)
// all potential index keys, mapping from field index in table source to LookupKey
val allLookupKeys: Map[Int, LookupKey] = analyzeLookupKeys(
cluster.getRexBuilder,
joinKeyPairs,
temporalTableSchema,
calcOnTemporalTable)
val lookupKeyIndicesInOrder: Array[Int] = allLookupKeys.keys.toList.sorted.toArray
// remaining condition the filter joined records (left input record X lookup-ed records)
val remainingCondition: Option[RexNode] = getRemainingJoinCondition(
cluster.getRexBuilder,
input.getRowType,
calcOnTemporalTable,
allLookupKeys.keys.toList.sorted.toArray,
joinKeyPairs,
joinInfo,
allLookupKeys)
// ----------------------------------------------------------------------------------------
// Member fields initialized based on TableSource type
// ----------------------------------------------------------------------------------------
lazy val lookupFunction: UserDefinedFunction = {
temporalTable match {
case t: TableSourceTable =>
// TODO: support nested lookup keys in the future,
// currently we only support top-level lookup keys
val indices = lookupKeyIndicesInOrder.map(Array(_))
val tableSource = t.tableSource.asInstanceOf[LookupTableSource]
val providerContext = new LookupRuntimeProviderContext(indices)
val provider = tableSource.getLookupRuntimeProvider(providerContext)
provider match {
case tf: TableFunctionProvider[_] => tf.createTableFunction()
case atf: AsyncTableFunctionProvider[_] => atf.createAsyncTableFunction()
}
case t: LegacyTableSourceTable[_] =>
val lookupFieldNamesInOrder = lookupKeyIndicesInOrder
.map(temporalTableSchema.getFieldNames()(_))
val tableSource = t.tableSource.asInstanceOf[LookupableTableSource[_]]
if (tableSource.isAsyncEnabled) {
tableSource.getAsyncLookupFunction(lookupFieldNamesInOrder)
} else {
tableSource.getLookupFunction(lookupFieldNamesInOrder)
}
}
}
lazy val isAsyncEnabled: Boolean = lookupFunction match {
case _: TableFunction[_] => false
case _: AsyncTableFunction[_] => true
}
lazy val tableSourceDescription: String = temporalTable match {
case t: TableSourceTable =>
s"DynamicTableSource [${t.tableSource.asSummaryString()}]"
case t: LegacyTableSourceTable[_] =>
s"TableSource [${t.tableSource.explainSource()}]"
}
lazy val tableIdentifier: ObjectIdentifier = temporalTable match {
case t: TableSourceTable => t.tableIdentifier
case t: LegacyTableSourceTable[_] => t.tableIdentifier
}
if (containsPythonCall(joinInfo.getRemaining(cluster.getRexBuilder))) {
throw new TableException("Only inner join condition with equality predicates supports the " +
"Python UDF taking the inputs from the left table and the right table at the same time, " +
"e.g., ON T1.id = T2.id && pythonUdf(T1.a, T2.b)")
}
override def deriveRowType(): RelDataType = {
val flinkTypeFactory = cluster.getTypeFactory.asInstanceOf[FlinkTypeFactory]
val rightType = if (calcOnTemporalTable.isDefined) {
calcOnTemporalTable.get.getOutputRowType
} else {
temporalTable.getRowType
}
SqlValidatorUtil.deriveJoinRowType(
input.getRowType,
rightType,
joinType,
flinkTypeFactory,
null,
Collections.emptyList[RelDataTypeField])
}
override def explainTerms(pw: RelWriter): RelWriter = {
val inputFieldNames = input.getRowType.getFieldNames.asScala.toArray
val tableFieldNames = temporalTableSchema.getFieldNames
val resultFieldNames = getRowType.getFieldNames.asScala.toArray
val whereString = calcOnTemporalTable match {
case Some(calc) =>
RelExplainUtil.conditionToString(calc, getExpressionString, preferExpressionFormat(pw))
case None => ""
}
val lookupKeys = allLookupKeys.map {
case (tableField, FieldRefLookupKey(inputField)) =>
s"${tableFieldNames(tableField)}=${inputFieldNames(inputField)}"
case (tableField, ConstantLookupKey(_, literal)) =>
s"${tableFieldNames(tableField)}=${RelExplainUtil.literalToString(literal)}"
}.mkString(", ")
val selection = calcOnTemporalTable match {
case Some(calc) =>
val rightSelect = RelExplainUtil.selectionToString(
calc,
getExpressionString,
preferExpressionFormat(pw))
inputFieldNames.mkString(", ") + ", " + rightSelect
case None =>
resultFieldNames.mkString(", ")
}
super.explainTerms(pw)
.item("table", tableIdentifier.asSummaryString())
.item("joinType", JoinTypeUtil.getFlinkJoinType(joinType))
.item("async", isAsyncEnabled)
.item("lookup", lookupKeys)
.itemIf("where", whereString, whereString.nonEmpty)
.itemIf("joinCondition",
joinConditionToString(resultFieldNames, remainingCondition),
remainingCondition.isDefined)
.item("select", selection)
}
// ----------------------------------------------------------------------------------------
// Physical Translation
// ----------------------------------------------------------------------------------------
def translateToPlanInternal(
inputTransformation: Transformation[RowData],
env: StreamExecutionEnvironment,
config: TableConfig,
relBuilder: RelBuilder): Transformation[RowData] = {
val inputRowType = FlinkTypeFactory.toLogicalRowType(input.getRowType)
val tableSourceRowType = FlinkTypeFactory.toLogicalRowType(temporalTable.getRowType)
val resultRowType = FlinkTypeFactory.toLogicalRowType(getRowType)
val producedTypeInfo = fromDataTypeToTypeInfo(getLookupFunctionProducedType)
// validate whether the node is valid and supported.
validate(
inputRowType,
tableSourceRowType,
allLookupKeys,
joinType)
val lookupFieldTypesInOrder = lookupKeyIndicesInOrder
.map(temporalTableSchema.getFieldDataTypes()(_)).map(fromDataTypeToLogicalType)
val leftOuterJoin = joinType == JoinRelType.LEFT
val operatorFactory = if (isAsyncEnabled) {
val asyncBufferCapacity= config.getConfiguration
.getInteger(ExecutionConfigOptions.TABLE_EXEC_ASYNC_LOOKUP_BUFFER_CAPACITY)
val asyncTimeout = getMillisecondFromConfigDuration(config,
ExecutionConfigOptions.TABLE_EXEC_ASYNC_LOOKUP_TIMEOUT)
val asyncLookupFunction = lookupFunction.asInstanceOf[AsyncTableFunction[_]]
// return type valid check
val udtfResultType = asyncLookupFunction.getResultType
val extractedResultTypeInfo = TypeExtractor.createTypeInfo(
asyncLookupFunction,
classOf[AsyncTableFunction[_]],
asyncLookupFunction.getClass,
0)
checkUdtfReturnType(
udtfResultType,
extractedResultTypeInfo)
val futureType = new TypeInformationRawType(
new GenericTypeInfo(classOf[CompletableFuture[_]]))
val parameters = Array(futureType) ++ lookupFieldTypesInOrder
checkEvalMethodSignature(
asyncLookupFunction,
parameters,
extractedResultTypeInfo)
val generatedFetcher = LookupJoinCodeGenerator.generateAsyncLookupFunction(
config,
relBuilder.getTypeFactory.asInstanceOf[FlinkTypeFactory],
inputRowType,
resultRowType,
producedTypeInfo,
lookupKeyIndicesInOrder,
allLookupKeys,
asyncLookupFunction)
val asyncFunc = if (calcOnTemporalTable.isDefined) {
// a projection or filter after table source scan
val rightRowType = FlinkTypeFactory
.toLogicalRowType(calcOnTemporalTable.get.getOutputRowType)
val generatedResultFuture = LookupJoinCodeGenerator.generateTableAsyncCollector(
config,
"TableFunctionResultFuture",
inputRowType,
rightRowType,
remainingCondition)
val generatedCalc = generateCalcMapFunction(
config,
calcOnTemporalTable,
tableSourceRowType)
new AsyncLookupJoinWithCalcRunner(
generatedFetcher,
generatedCalc,
generatedResultFuture,
producedTypeInfo,
InternalTypeInfo.of(rightRowType),
leftOuterJoin,
asyncBufferCapacity)
} else {
// right type is the same as table source row type, because no calc after temporal table
val rightRowType = tableSourceRowType
val generatedResultFuture = LookupJoinCodeGenerator.generateTableAsyncCollector(
config,
"TableFunctionResultFuture",
inputRowType,
rightRowType,
remainingCondition)
new AsyncLookupJoinRunner(
generatedFetcher,
generatedResultFuture,
producedTypeInfo,
InternalTypeInfo.of(rightRowType),
leftOuterJoin,
asyncBufferCapacity)
}
// force ORDERED output mode currently, optimize it to UNORDERED
// when the downstream do not need orderness
new AsyncWaitOperatorFactory(asyncFunc, asyncTimeout, asyncBufferCapacity, OutputMode.ORDERED)
} else {
// sync join
val syncLookupFunction = lookupFunction.asInstanceOf[TableFunction[_]]
// return type valid check
val udtfResultType = syncLookupFunction.getResultType
val extractedResultTypeInfo = TypeExtractor.createTypeInfo(
syncLookupFunction,
classOf[TableFunction[_]],
syncLookupFunction.getClass,
0)
checkUdtfReturnType(
udtfResultType,
extractedResultTypeInfo)
checkEvalMethodSignature(
syncLookupFunction,
lookupFieldTypesInOrder,
extractedResultTypeInfo)
val generatedFetcher = LookupJoinCodeGenerator.generateLookupFunction(
config,
relBuilder.getTypeFactory.asInstanceOf[FlinkTypeFactory],
inputRowType,
resultRowType,
producedTypeInfo,
lookupKeyIndicesInOrder,
allLookupKeys,
syncLookupFunction,
env.getConfig.isObjectReuseEnabled)
val ctx = CodeGeneratorContext(config)
val processFunc = if (calcOnTemporalTable.isDefined) {
// a projection or filter after table source scan
val rightRowType = FlinkTypeFactory
.toLogicalRowType(calcOnTemporalTable.get.getOutputRowType)
val generatedCollector = generateCollector(
ctx,
inputRowType,
rightRowType,
resultRowType,
remainingCondition,
None)
val generatedCalc = generateCalcMapFunction(
config,
calcOnTemporalTable,
tableSourceRowType)
new LookupJoinWithCalcRunner(
generatedFetcher,
generatedCalc,
generatedCollector,
leftOuterJoin,
rightRowType.getFieldCount)
} else {
// right type is the same as table source row type, because no calc after temporal table
val rightRowType = tableSourceRowType
val generatedCollector = generateCollector(
ctx,
inputRowType,
rightRowType,
resultRowType,
remainingCondition,
None)
new LookupJoinRunner(
generatedFetcher,
generatedCollector,
leftOuterJoin,
rightRowType.getFieldCount)
}
SimpleOperatorFactory.of(new ProcessOperator(processFunc))
}
ExecNode.createOneInputTransformation(
inputTransformation,
getRelDetailedDescription,
operatorFactory,
InternalTypeInfo.of(resultRowType),
inputTransformation.getParallelism)
}
private def rowTypeEquals(expected: TypeInformation[_], actual: TypeInformation[_]): Boolean = {
// check internal and external type, cause we will auto convert external class to internal
// class (eg: Row => RowData).
(expected.getTypeClass == classOf[RowData] || expected.getTypeClass == classOf[Row]) &&
(actual.getTypeClass == classOf[RowData] || actual.getTypeClass == classOf[Row])
}
private def checkEvalMethodSignature(
func: UserDefinedFunction,
expectedTypes: Array[LogicalType],
udtfReturnType: TypeInformation[_])
: Array[Class[_]] = {
val expectedTypeClasses = if (udtfReturnType.getTypeClass == classOf[Row]) {
expectedTypes.map(ClassLogicalTypeConverter.getDefaultExternalClassForType)
} else {
expectedTypes.map {
// special case for generic type
case gt: TypeInformationRawType[_] => gt.getTypeInformation.getTypeClass
case t@_ => toInternalConversionClass(t)
}
}
val method = getUserDefinedMethod(
func,
"eval",
expectedTypeClasses,
expectedTypes,
_ => expectedTypes.indices.map(_ => null).toArray,
parameterTypeEquals,
(_, _) => false).getOrElse {
val msg = s"Given parameter types of the lookup TableFunction of $tableSourceDescription " +
s"do not match the expected signature.\\n" +
s"Expected: eval${signatureToString(expectedTypeClasses)} \\n" +
s"Actual: eval${signaturesToString(func, "eval")}"
throw new TableException(msg)
}
getParamClassesConsiderVarArgs(method.isVarArgs,
method.getParameterTypes, expectedTypes.length)
}
private def parameterTypeEquals(candidate: Class[_], expected: Class[_]): Boolean = {
candidate == null ||
candidate == expected ||
expected == classOf[Object] ||
candidate == classOf[Object] || // Special case when we don't know the type
expected.isPrimitive && Primitives.wrap(expected) == candidate ||
(candidate.isArray &&
expected.isArray &&
candidate.getComponentType.isInstanceOf[Object] &&
expected.getComponentType == classOf[Object])
}
/**
* Gets the remaining join condition which is used
*/
private def getRemainingJoinCondition(
rexBuilder: RexBuilder,
leftRelDataType: RelDataType,
calcOnTemporalTable: Option[RexProgram],
checkedLookupFields: Array[Int],
joinKeyPairs: Array[IntPair],
joinInfo: JoinInfo,
allLookupKeys: Map[Int, LookupKey]): Option[RexNode] = {
// indexes of right key field
val rightKeyIndexes = calcOnTemporalTable match {
case Some(program) =>
checkedLookupFields.map { lookupFieldIndex => // lookupFieldIndex is field index on table
program
.getOutputRowType.getFieldNames
.indexOf(program.getInputRowType.getFieldNames.get(lookupFieldIndex))
}
case None =>
checkedLookupFields
}
val joinPairs = joinInfo.pairs().asScala.toArray
val remainingPairs = joinPairs.filter(p => !rightKeyIndexes.contains(p.target))
val joinRowType = getRowType
// convert remaining pairs to RexInputRef tuple for building SqlStdOperatorTable.EQUALS calls
val remainingEquals = remainingPairs.map { p =>
val leftFieldType = leftRelDataType.getFieldList.get(p.source).getType
val leftInputRef = new RexInputRef(p.source, leftFieldType)
val rightIndex = leftRelDataType.getFieldCount + p.target
val rightFieldType = joinRowType.getFieldList.get(rightIndex).getType
val rightInputRef = new RexInputRef(rightIndex, rightFieldType)
rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, leftInputRef, rightInputRef)
}
val remainingAnds = remainingEquals ++ joinInfo.nonEquiConditions.asScala
// build a new condition
val condition = RexUtil.composeConjunction(
rexBuilder,
remainingAnds.toList.asJava)
if (condition.isAlwaysTrue) {
None
} else {
Some(condition)
}
}
/**
* Gets the join key pairs from left input field index to temporal table field index
* @param joinInfo the join information of temporal table join
* @param calcOnTemporalTable the calc programs on temporal table
*/
private def getTemporalTableJoinKeyPairs(
joinInfo: JoinInfo,
calcOnTemporalTable: Option[RexProgram]): Array[IntPair] = {
val joinPairs = joinInfo.pairs().asScala.toArray
calcOnTemporalTable match {
case Some(program) =>
// the target key of joinInfo is the calc output fields, we have to remapping to table here
val keyPairs = new mutable.ArrayBuffer[IntPair]()
joinPairs.map {
p =>
val calcSrcIdx = getIdenticalSourceField(program, p.target)
if (calcSrcIdx != -1) {
keyPairs += new IntPair(p.source, calcSrcIdx)
}
}
keyPairs.toArray
case None => joinPairs
}
}
/**
* Analyze potential lookup keys (including [[ConstantLookupKey]] and [[FieldRefLookupKey]])
* of the temporal table from the join condition and calc program on the temporal table.
*
* @param rexBuilder the RexBuilder
* @param joinKeyPairs join key pairs from left input field index to temporal table field index
* @param calcOnTemporalTable the calc program on temporal table
* @return all the potential lookup keys
*/
private def analyzeLookupKeys(
rexBuilder: RexBuilder,
joinKeyPairs: Array[IntPair],
temporalTableSchema: TableSchema,
calcOnTemporalTable: Option[RexProgram]): Map[Int, LookupKey] = {
// field_index_in_table_source => constant_lookup_key
val constantLookupKeys = new mutable.HashMap[Int, ConstantLookupKey]
// analyze constant lookup keys
if (calcOnTemporalTable.isDefined && null != calcOnTemporalTable.get.getCondition) {
val program = calcOnTemporalTable.get
val condition = RexUtil.toCnf(
cluster.getRexBuilder,
program.expandLocalRef(program.getCondition))
// presume 'A = 1 AND A = 2' will be reduced to ALWAYS_FALSE
extractConstantFieldsFromEquiCondition(condition, constantLookupKeys)
}
val fieldRefLookupKeys = joinKeyPairs.map(p => (p.target, FieldRefLookupKey(p.source)))
(constantLookupKeys ++ fieldRefLookupKeys).toMap
}
private def getLookupFunctionProducedType: DataType = temporalTable match {
case t: LegacyTableSourceTable[_] =>
t.tableSource.getProducedDataType
case _: TableSourceTable =>
val rowType = FlinkTypeFactory.toLogicalRowType(temporalTable.getRowType)
val dataRowType = fromLogicalTypeToDataType(rowType)
val isRow = lookupFunction match {
case tf: TableFunction[_] =>
val extractedResultTypeInfo = TypeExtractor.createTypeInfo(
tf,
classOf[TableFunction[_]],
tf.getClass,
0)
extractedResultTypeInfo.getTypeClass == classOf[Row]
case atf: AsyncTableFunction[_] =>
val extractedResultTypeInfo = TypeExtractor.createTypeInfo(
atf,
classOf[AsyncTableFunction[_]],
atf.getClass,
0)
extractedResultTypeInfo.getTypeClass == classOf[Row]
}
if (isRow) {
// we limit to use default conversion class if using Row
dataRowType
} else {
// bridge to RowData if is not external Row
dataRowType.bridgedTo(classOf[RowData])
}
}
// ----------------------------------------------------------------------------------------
// Physical Optimization Utilities
// ----------------------------------------------------------------------------------------
// this is highly inspired by Calcite's RexProgram#getSourceField(int)
private def getIdenticalSourceField(rexProgram: RexProgram, outputOrdinal: Int): Int = {
assert((outputOrdinal >= 0) && (outputOrdinal < rexProgram.getProjectList.size()))
val project = rexProgram.getProjectList.get(outputOrdinal)
var index = project.getIndex
while (true) {
var expr = rexProgram.getExprList.get(index)
expr match {
case call: RexCall if call.getOperator == SqlStdOperatorTable.IN_FENNEL =>
// drill through identity function
expr = call.getOperands.get(0)
case call: RexCall if call.getOperator == SqlStdOperatorTable.CAST =>
// drill through identity function
expr = call.getOperands.get(0)
case _ =>
}
expr match {
case ref: RexLocalRef => index = ref.getIndex
case ref: RexInputRef => return ref.getIndex
case _ => return -1
}
}
-1
}
private def extractConstantFieldsFromEquiCondition(
condition: RexNode,
constantFieldMap: mutable.HashMap[Int, ConstantLookupKey]): Unit = condition match {
case c: RexCall if c.getKind == SqlKind.AND =>
c.getOperands.asScala.foreach(r => extractConstantField(r, constantFieldMap))
case rex: RexNode => extractConstantField(rex, constantFieldMap)
case _ =>
}
private def extractConstantField(
pred: RexNode,
constantFieldMap: mutable.HashMap[Int, ConstantLookupKey]): Unit = pred match {
case c: RexCall if c.getKind == SqlKind.EQUALS =>
val left = c.getOperands.get(0)
val right = c.getOperands.get(1)
val (inputRef, literal) = (left, right) match {
case (literal: RexLiteral, ref: RexInputRef) => (ref, literal)
case (ref: RexInputRef, literal: RexLiteral) => (ref, literal)
case _ => return // non-constant condition
}
val dataType = FlinkTypeFactory.toLogicalType(inputRef.getType)
constantFieldMap.put(inputRef.getIndex, ConstantLookupKey(dataType, literal))
case _ => // ignore
}
// ----------------------------------------------------------------------------------------
// Validation
// ----------------------------------------------------------------------------------------
private def validate(
inputRowType: RowType,
tableSourceRowType: RowType,
allLookupKeys: Map[Int, LookupKey],
joinType: JoinRelType): Unit = {
// validate table source implementation first
validateTableSource()
// check join on all fields of PRIMARY KEY or (UNIQUE) INDEX
if (allLookupKeys.isEmpty) {
throw new TableException(
"Temporal table join requires an equality condition on fields of " +
s"table [${tableIdentifier.asSummaryString()}].")
}
val lookupKeyPairs = joinKeyPairs.filter(p => allLookupKeys.contains(p.target))
val leftKeys = lookupKeyPairs.map(_.source)
val rightKeys = lookupKeyPairs.map(_.target)
val leftKeyTypes = leftKeys.map(inputRowType.getTypeAt)
// use original keyPair to validate key types (rigthKeys may include constant keys)
val rightKeyTypes = rightKeys.map(tableSourceRowType.getTypeAt)
// check type
val incompatibleConditions = new mutable.ArrayBuffer[String]()
for (i <- lookupKeyPairs.indices) {
val leftType = leftKeyTypes(i)
val rightType = rightKeyTypes(i)
if (!isInteroperable(leftType, rightType)) {
val leftName = inputRowType.getFieldNames.get(i)
val rightName = tableSourceRowType.getFieldNames.get(i)
val condition = s"$leftName[$leftType]=$rightName[$rightType]"
incompatibleConditions += condition
}
}
if (incompatibleConditions.nonEmpty) {
throw new TableException(s"Temporal table join requires equivalent condition " +
s"of the same type, but the condition is ${incompatibleConditions.mkString(", ")}")
}
if (joinType != JoinRelType.LEFT && joinType != JoinRelType.INNER) {
throw new TableException(
"Temporal table join currently only support INNER JOIN and LEFT JOIN, " +
"but was " + joinType.toString + " JOIN")
}
// success
}
private def validateTableSource(): Unit = temporalTable match {
case t: TableSourceTable =>
if (!t.tableSource.isInstanceOf[LookupTableSource]) {
throw new TableException(s"$tableSourceDescription must " +
s"implement LookupTableSource interface if it is used in temporal table join.")
}
case t: LegacyTableSourceTable[_] =>
val tableSource = t.tableSource
if (!tableSource.isInstanceOf[LookupableTableSource[_]]) {
throw new TableException(s"$tableSourceDescription must " +
s"implement LookupableTableSource interface if it is used in temporal table join.")
}
val tableSourceProducedType = fromDataTypeToTypeInfo(tableSource.getProducedDataType)
if (!tableSourceProducedType.isInstanceOf[InternalTypeInfo[RowData]] &&
!tableSourceProducedType.isInstanceOf[RowTypeInfo]) {
throw new TableException(
"Temporal table join only support Row or RowData type as return type of temporal table." +
" But was " + tableSourceProducedType)
}
}
private def checkUdtfReturnType(
udtfReturnTypeInfo: TypeInformation[_],
extractedUdtfReturnTypeInfo: TypeInformation[_]): Unit = {
if (udtfReturnTypeInfo != null) {
if (!udtfReturnTypeInfo.isInstanceOf[InternalTypeInfo[RowData]] &&
!udtfReturnTypeInfo.isInstanceOf[RowTypeInfo]) {
throw new TableException(
s"Result type of the async lookup TableFunction of $tableSourceDescription " +
s"is $udtfReturnTypeInfo type, currently only Row and RowData are supported.")
}
} else {
if (extractedUdtfReturnTypeInfo.getTypeClass != classOf[RowData] &&
extractedUdtfReturnTypeInfo.getTypeClass != classOf[Row]) {
throw new TableException(
s"Result type of the lookup TableFunction of $tableSourceDescription is " +
s"$extractedUdtfReturnTypeInfo type, " +
s"but currently only Row and RowData are supported.")
}
}
temporalTable match {
case t: LegacyTableSourceTable[_] =>
// Legacy TableSource should check the consistency between UDTF return type
// and source produced type
val tableSource = t.tableSource
val tableSourceProducedType = fromDataTypeToTypeInfo(tableSource.getProducedDataType)
if (udtfReturnTypeInfo != null) {
if (!rowTypeEquals(tableSourceProducedType, udtfReturnTypeInfo)) {
throw new TableException(
s"The $tableSourceDescription return type $tableSourceProducedType " +
s"does not match its lookup function return type $udtfReturnTypeInfo")
}
} else {
if (!rowTypeEquals(tableSourceProducedType, extractedUdtfReturnTypeInfo)) {
throw new TableException(
s"The $tableSourceDescription return type $tableSourceProducedType does not match " +
s"its lookup function extracted return type $extractedUdtfReturnTypeInfo")
}
}
case _ =>
// pass, DynamicTableSource doesn't has produced type
}
}
// ----------------------------------------------------------------------------------------
// toString Utilities
// ----------------------------------------------------------------------------------------
private def joinConditionToString(
resultFieldNames: Array[String],
joinCondition: Option[RexNode]): String = joinCondition match {
case Some(condition) =>
getExpressionString(condition, resultFieldNames.toList, None)
case None => "N/A"
}
}
|
tzulitai/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/common/CommonLookupJoin.scala
|
Scala
|
apache-2.0
| 34,083 |
package model.services.rewardRules
import java.util.UUID
import model.services.GamificationEngineTrait
class RewardRule_CommentWithAnnTags() extends RewardRuleTrait{
override val action_id: Int = GamificationEngineTrait.COMMENT_WITH_ANN_TAGS
override def getPoints(user_id: UUID): Int = {
// reward only if less than 10 comments have been liked today by the user
1
}
}
|
scify/DemocracIT-Web
|
app/model/services/rewardRules/RewardRule_CommentWithAnnTags.scala
|
Scala
|
apache-2.0
| 388 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package common.assets
package object controllers {
type Assets = _root_.controllers.Assets
}
|
wsargent/playframework
|
documentation/manual/working/commonGuide/assets/code/CommonAssets.scala
|
Scala
|
apache-2.0
| 173 |
package org.tensorflow.contrib.scala
import com.twitter.bijection.Bijection
import org.tensorflow.contrib.scala.Rank._
import org.tensorflow.{DataType, Tensor}
/**
* Support for TensorFlow byte strings.
*
* Byte strings are containers for variable-length data, typically protobuf messages
* to be processed by a TF graph. Tensors containing byte strings may be of any rank.
*
* It is useful to tag byte strings with information about the
*/
object ByteStrings {
import scala.languageFeature.implicitConversions
/**
* Convert a [[ByteString]] to a 0-D [[TypedTensor]].
*/
implicit def byteString2Tensor[T]: Bijection[ByteString[T], TypedTensor[`0D`, ByteString[T]]] =
Bijection.build[ByteString[T], TypedTensor[`0D`, ByteString[T]]] { str =>
Tensor.create(str: Array[Byte]).taggedWith[`0D`, ByteString[T]]
} { t =>
assert(t.dataType() == DataType.STRING)
t.bytesValue().asInstanceOf[ByteString[T]]
}
/**
* Implicit class providing convenience methods for byte arrays.
*/
implicit class RichByteArray(array: Array[Byte]) {
/**
* View this byte array as a byte string representing an instance of [[T]].
*/
def asByteString[T] = array.asInstanceOf[ByteString[T]]
}
}
|
cookieai/flink-tensorflow
|
flink-tensorflow/src/main/scala/org/tensorflow/contrib/scala/ByteStrings.scala
|
Scala
|
apache-2.0
| 1,262 |
package bugs.bug2
//works
trait AB4 {
}
|
raisercostin/jedi-io
|
src/main/scala/bugs/bug2/AB5.scala
|
Scala
|
apache-2.0
| 46 |
/*
* Copyright 2010 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.webui
import com.sun.jersey.spi.container.{ContainerResponse, ContainerRequest, ContainerResponseFilter}
/**
*
*/
class CacheControlResponseFilter extends ContainerResponseFilter {
def filter(p1: ContainerRequest, p2: ContainerResponse): ContainerResponse = {
import scala.collection.JavaConversions._
p2.getHttpHeaders.putSingle("Cache-Control", "no-cache, no-store")
p2.getHttpHeaders.putSingle("Expires", "-1")
p2
}
}
|
alexeev/jboss-fuse-mirror
|
sandbox/fmc/fmc-rest/src/main/scala/org/fusesource/fabric/webui/CacheControlResponseFilter.scala
|
Scala
|
apache-2.0
| 1,103 |
package spire
package random
import org.scalatest.Matchers
import org.scalatest._
import prop._
import org.scalacheck._
class ShufflingTest extends PropSpec with Matchers with GeneratorDrivenPropertyChecks {
val rng = spire.random.rng.Lcg64.fromTime()
val range = Gen.chooseNum(1, 1000)
property("shuffling doesn't change members") {
forAll(range) { (n: Int) =>
val ns1 = rng.generateInts(n)
val ns2 = ns1.clone
rng.shuffle(ns1)
ns1.sorted shouldBe ns2.sorted
}
}
}
|
tixxit/spire
|
tests/src/test/scala/spire/random/ShufflingTest.scala
|
Scala
|
mit
| 511 |
class DependentImplicitTezt {
trait Bridge
class Outer {
class Inner extends Bridge
object Inner {
implicit def fromOther(b: Bridge): Inner = throw new Error("todo")
}
def run(x: Inner) = throw new Error("todo")
}
val o1 = new Outer
val o2 = new Outer
val i1 = new o1.Inner
val i2 = new o2.Inner
def doesntCompile: Unit = {
o1.run(i2) // should compile
}
def workaround1: Unit = {
o1.run(i2: Bridge) // ok
}
def workaround2: Unit = {
import o1.Inner.fromOther
o1.run(i2) // ok
}
}
|
lrytz/scala
|
test/files/pos/t4947.scala
|
Scala
|
apache-2.0
| 552 |
/*
*************************************************************************************
* Copyright 2013 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.repository
import net.liftweb.common._
import com.normation.rudder.domain.parameters._
import com.normation.eventlog.ModificationId
import com.normation.eventlog.EventActor
import com.normation.rudder.domain.archives.ParameterArchiveId
/**
* The Parameter Repository (Read Only) to read parameters from LDAP
*/
trait RoParameterRepository {
def getGlobalParameter(parameterName : ParameterName) : Box[GlobalParameter]
def getAllGlobalParameters() : Box[Seq[GlobalParameter]]
def getAllOverridable() : Box[Seq[GlobalParameter]]
}
trait WoParameterRepository {
def saveParameter(parameter : GlobalParameter, modId: ModificationId, actor:EventActor, reason:Option[String]) : Box[AddGlobalParameterDiff]
def updateParameter(parameter : GlobalParameter, modId: ModificationId, actor:EventActor, reason:Option[String]) : Box[Option[ModifyGlobalParameterDiff]]
def delete(parameterName:ParameterName, modId: ModificationId, actor:EventActor, reason:Option[String]) : Box[DeleteGlobalParameterDiff]
/**
* A (dangerous) method that replace all existing parameters
* by the list given in parameter.
* If succeed, return an identifier of the place were
* are stored the old parameters - it is the
* responsibility of the user to delete them.
*/
def swapParameters(newParameters:Seq[GlobalParameter]) : Box[ParameterArchiveId]
/**
* Delete a set of saved rules.
*/
def deleteSavedParametersArchiveId(saveId:ParameterArchiveId) : Box[Unit]
}
|
armeniaca/rudder
|
rudder-core/src/main/scala/com/normation/rudder/repository/ParameterRepository.scala
|
Scala
|
gpl-3.0
| 3,204 |
// Equites, a Scala chess playground
// Copyright © 2014 Frank S. Thomas <[email protected]>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package eu.timepit.equites
import scalaz._, Scalaz._
case class File(value: Int) extends FileAndRankOps[File] {
def companion: File.type = File
}
case class Rank(value: Int) extends FileAndRankOps[Rank] {
def companion: Rank.type = Rank
}
object File extends {
val min: File = new File(0)
val max: File = new File(7)
} with FileAndRankCompanion[File]
object Rank extends {
val min: Rank = new Rank(0)
val max: Rank = new Rank(7)
} with FileAndRankCompanion[Rank]
trait FileAndRankOps[T <: FileAndRankOps[T]] {
self: T =>
def value: Int
def companion: FileAndRankCompanion[T]
def isValid: Boolean =
companion.min.value <= value && value <= companion.max.value
def minDistToBounds: Int =
math.min(value - companion.min.value, companion.max.value - value)
def map(f: Int => Int): T = companion(f(value))
def +(i: Int): T = map(_ + i)
def -(i: Int): T = map(_ - i)
def unary_- : T = map(-_)
def +(that: T): T = this + that.value
def -(that: T): T = this - that.value
}
trait FileAndRankCompanion[T <: FileAndRankOps[T]] {
def apply(i: Int): T
def min: T
def max: T
val range: Range = min.value to max.value
val all: Seq[T] = range.map(apply)
implicit val equalInst = Equal.equalA[T]
implicit val orderInst = Order.orderBy((t: T) => t.value)
}
|
equites-chess/equites-core
|
src/main/scala/eu/timepit/equites/FileAndRank.scala
|
Scala
|
gpl-3.0
| 2,055 |
package com.stephentu.sql
import org.specs2.mutable._
class ResolverSpec extends Specification {
object resolver extends Resolver
private def doTest(q: String) = {
val parser = new SQLParser
val r = parser.parse(q)
resolver.resolve(r.get, TestSchema.definition)
}
"Resolver" should {
"resolve query1" in {
val s0 = doTest(Queries.q1)
s0.ctx.projections.size must_== 10
}
"resolve query2" in {
val s0 = doTest(Queries.q2)
s0.ctx.projections.size must_== 8
}
"resolve query3" in {
val s0 = doTest(Queries.q3)
s0.ctx.projections.size must_== 4
}
"resolve query4" in {
val s0 = doTest(Queries.q4)
s0.ctx.projections.size must_== 2
}
"resolve query5" in {
val s0 = doTest(Queries.q5)
s0.ctx.projections.size must_== 2
}
"resolve query6" in {
val s0 = doTest(Queries.q6)
s0.ctx.projections.size must_== 1
}
"resolve query7" in {
val s0 = doTest(Queries.q7)
s0.ctx.projections.size must_== 4
}
"resolve query8" in {
val s0 = doTest(Queries.q8)
s0.ctx.projections.size must_== 2
}
"resolve query9" in {
val s0 = doTest(Queries.q9)
s0.ctx.projections.size must_== 3
}
"resolve query10" in {
val s0 = doTest(Queries.q10)
s0.ctx.projections.size must_== 8
}
"resolve query11" in {
val s0 = doTest(Queries.q11)
s0.ctx.projections.size must_== 2
}
"resolve query12" in {
val s0 = doTest(Queries.q12)
s0.ctx.projections.size must_== 3
}
"resolve query13" in {
val s0 = doTest(Queries.q13)
s0.ctx.projections.size must_== 2
}
"resolve query14" in {
val s0 = doTest(Queries.q14)
s0.ctx.projections.size must_== 1
}
"resolve query16" in {
val s0 = doTest(Queries.q16)
s0.ctx.projections.size must_== 4
}
"resolve query17" in {
val s0 = doTest(Queries.q17)
s0.ctx.projections.size must_== 1
}
"resolve query18" in {
val s0 = doTest(Queries.q18)
s0.ctx.projections.size must_== 6
}
"resolve query19" in {
val s0 = doTest(Queries.q19)
s0.ctx.projections.size must_== 1
}
"resolve query20" in {
val s0 = doTest(Queries.q20)
s0.ctx.projections.size must_== 2
}
"resolve query21" in {
val s0 = doTest(Queries.q21)
s0.ctx.projections.size must_== 2
}
"resolve query22" in {
val s0 = doTest(Queries.q22)
s0.ctx.projections.size must_== 3
}
}
}
|
debugger87/scala-sql-parser
|
src/test/scala/resolver.scala
|
Scala
|
mit
| 2,586 |
package models.v2
import java.util.UUID
import scala.collection.JavaConversions._
import com.datastax.driver.core._
import com.datastax.driver.core.utils.UUIDs
import com.datastax.driver.mapping
import com.datastax.driver.mapping.annotations._
import com.datastax.driver.mapping.annotations.Table
import com.datastax.driver.mapping.Result
import constants.Db
import database.Cassandra.manager
@Table(keyspace = "balloontest", name = "active_token")
class Token {
@PartitionKey
var key: String = _
@ClusteringColumn
@Column(name = "token_id")
var token: UUID = _
@Column(name = "target_id")
var targetId: UUID = _
def setKey(key: String) = this.key = key
def getKey = key
def setToken(token: UUID) = this.token = token
def getToken = token
def getTargetId = targetId
def setTargetId(targetId: UUID) = this.targetId = targetId
}
@Accessor
trait TokenAccessor {
// @Query("SELECT * FROM token WHERE key=? AND token=?")
// def getOne(key: String, token: UUID): Token
}
object Token {
val accessor = manager.createAccessor(classOf[TokenAccessor])
val mapper = manager.mapper(classOf[Token])
def apply(key: String, token: UUID, targetId: UUID) = {
val tk = new Token
tk.setKey(key)
tk.setToken(token)
tk.setTargetId(targetId)
tk
}
def get(key: String, token_id: UUID) = mapper.get(key, token_id)
def create(tk: Token) = {
mapper.save(tk)
}
}
|
lequangdzung/quora-clone
|
api-app/app/models/v2/Token.scala
|
Scala
|
gpl-2.0
| 1,419 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
import scala.reflect.ClassTag
/**
* An append-only, non-threadsafe, array-backed vector that is optimized for primitive types.
* 一个追加,非线程安全的,阵列的支持向量,是原始vector类型的优化
*/
private[spark]
class PrimitiveVector[@specialized(Long, Int, Double) V: ClassTag](initialSize: Int = 64) {
private var _numElements = 0
private var _array: Array[V] = _
// NB: This must be separate from the declaration, otherwise the specialized parent class
// will get its own array with the same initial size.
//注意:这必须与声明分开,否则专业的父类将获得具有相同初始大小的自己的数组。
_array = new Array[V](initialSize)
def apply(index: Int): V = {
require(index < _numElements)
_array(index)
}
def +=(value: V): Unit = {
if (_numElements == _array.length) {
resize(_array.length * 2)
}
_array(_numElements) = value
_numElements += 1
}
def capacity: Int = _array.length
def length: Int = _numElements
def size: Int = _numElements
def iterator: Iterator[V] = new Iterator[V] {
var index = 0
override def hasNext: Boolean = index < _numElements
override def next(): V = {
if (!hasNext) {
throw new NoSuchElementException
}
val value = _array(index)
index += 1
value
}
}
/** Gets the underlying array backing this vector.
* 获取支持此向量的底层数组
* */
def array: Array[V] = _array
/** Trims this vector so that the capacity is equal to the size.
* 修剪这个矢量,使容量等于大小
* */
def trim(): PrimitiveVector[V] = resize(size)
/**
* Resizes the array, dropping elements if the total length decreases.
* 调整大小的数组,如果总长度减小,减少的元素
* */
def resize(newLength: Int): PrimitiveVector[V] = {
_array = copyArrayWithLength(newLength)
if (newLength < _numElements) {
_numElements = newLength
}
this
}
/**
* Return a trimmed version of the underlying array.
* 返回裁剪基本数组
* */
def toArray: Array[V] = {
copyArrayWithLength(size)
}
private def copyArrayWithLength(length: Int): Array[V] = {
val copy = new Array[V](length)
_array.copyToArray(copy)
copy
}
}
|
tophua/spark1.52
|
core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
|
Scala
|
apache-2.0
| 3,156 |
package com.actionml.utilities
/*
* Copyright ActionML, LLC under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* ActionML licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** immutable Fifo of fixed size so oldest is dropped when the limit is reached */
class FixedSizeFifo[T](val limit: Int)( private val out: List[T], private val in: List[T] )
extends Traversable[T] with Serializable {
override def size = in.size + out.size
def :+( t: T ) = {
val (nextOut,nextIn) = if (size == limit) {
if( out.nonEmpty) {
( out.tail, t::in )
} else {
( in.reverse.tail, List(t) )
}
} else ( out, t::in )
new FixedSizeFifo( limit )( nextOut, nextIn )
}
private lazy val deq = {
if( out.isEmpty ) {
val revIn = in.reverse
( revIn.head, new FixedSizeFifo( limit )( revIn.tail, List() ) )
} else {
( out.head, new FixedSizeFifo( limit )( out.tail, in ) )
}
}
override lazy val head = deq._1
override lazy val tail = deq._2
def foreach[U]( f: T => U ) = ( out ::: in.reverse ) foreach f
}
object FixedSizeFifo {
def apply[T]( limit: Int ) = new FixedSizeFifo[T]( limit )(List(),List())
}
|
actionml/harness
|
rest-server/common/src/main/scala/com/actionml/utilities/FixedSizeFifo.scala
|
Scala
|
apache-2.0
| 1,814 |
package org.biancama.algorithms.sort
import scala.annotation.tailrec
/**
* Created by massimo on 30/04/16.
*/
object MergeSort {
/**
* mergesort :: Ord a => [a] -> [a]
* mergesort [] = []
* mergesort [x] = [x]
* mergesort xs = merge (mergesort (xs take n)) (mergesort (xs drop n))
*
* @return ordered list
*/
def sort[A](unsortedList: List[A])(implicit ord: Ordering[A]): List[A] = {
/**
* merge :: Ord a => [a] -> [a] -> [a]
* merge xs [] = xs
* merge [] ys = ys
* merge (x:xs) (y:ys)
* | (x <= y) = x:(merge xs (y:ys))
* | otherwise = y:(merge (x:xs) ys)
*
* @return ordered list
*/
@tailrec
def merge (ll :List[A], lr: List[A], app: List[A]) : List[A] = (ll, lr) match {
case (Nil, Nil) => app
case (_, Nil) => ll ++ app
case (Nil, _) => lr ++ app
case (xl::xxl, xr::xxr) => if(ord.lt(xl, xr)) merge(xxl, lr, xl::app) else merge(ll, xxr, xr::app)
}
if (unsortedList.length < 2) unsortedList
else {
val center = unsortedList.length / 2
val (left, right) = unsortedList splitAt(center)
merge(sort(left), sort(right), Nil)
}
}
}
|
biancama/data-structures-scala
|
src/main/scala/org/biancama/algorithms/sort/MergeSort.scala
|
Scala
|
gpl-3.0
| 1,212 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v2
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600e.v2.retriever.CT600EBoxRetriever
case class E2(value: Option[Int]) extends CtBoxIdentifier("Total repayment") with CtOptionalInteger with Input with ValidatableBox[CT600EBoxRetriever] {
override def validate(boxRetriever: CT600EBoxRetriever): Set[CtValidation] = validateZeroOrPositiveInteger(this)
}
|
pncampbell/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600e/v2/E2.scala
|
Scala
|
apache-2.0
| 1,001 |
package com.chobostudy.datastructure
import scala.annotation.tailrec
/**
* @author loustler
* @since 08/11/2017 21:16
*/
trait ListHelper {
def size[A](x: List[A]): Int = {
@tailrec
def loop(i: Int, y: List[A]): Int = y match {
case Nil => i
case Cons(h, Nil) => i + 1
case Cons(h, t) => loop(i + 1, t)
}
loop(0, x)
}
def getHead[A](x: List[A]): A = x match {
case Nil =>
sys.error("Can not find head element, cause the given list is Nil.")
case Cons(h, _) => h
}
def isEmpty[A](x: List[A]): Boolean = x match {
case Nil => true
case _ => false
}
def has[A](x: List[A], e: A): Boolean = x match {
case Nil => sys.error("The given list is Nil")
case Cons(h, Nil) => if (h == e) true else false
case Cons(h, t) => if (h == e) true else has(t, e)
}
def append[A](x: List[A], y: List[A]): List[A] = x match {
case Nil => y
case Cons(h, t) => Cons(h, append(t, y))
}
}
|
codechobostudy/FPIS
|
src/main/scala/com/chobostudy/datastructure/ListHelper.scala
|
Scala
|
apache-2.0
| 1,004 |
package vlad187.math
import org.joda.time._
/**
* Created by Vlad187 on 2/28/2014.
*/
case class Metric(name: String)
case class Measurement[T <: Numeric[T]](metric: Metric, value: T, time: DateTime)
case class Serial[T <: Numeric[T]](measurement: Measurement[T], measurements: Measurement[T]*) {
val allMeasurements: Seq[Measurement[T]] = measurement +: measurements
lazy val interval: Interval = new Interval(allMeasurements.head.time, allMeasurements.last.time)
lazy val duration: Duration = interval.toDuration
lazy val isEmpty: Boolean = allMeasurements.isEmpty
lazy val nonEmpty: Boolean = allMeasurements.nonEmpty
}
class ComparableSeries[T <: Numeric[T]](series: Array[Serial[T]]) {
lazy val comparables: Array[Serial[T]] = series.filter(_.nonEmpty)
lazy val latestSerial: Serial[T] = comparables.maxBy(_.interval.getStartMillis)
lazy val longestSerial: Serial[T] = comparables.maxBy(_.duration.getMillis)
lazy val timeShifts: Array[Duration] =
comparables.map(current =>
new Duration(latestSerial.interval.getStart, current.interval.getStart)
)
}
|
Vlad187/slash-tools
|
src/main/scala/vlad187/math/Series.scala
|
Scala
|
apache-2.0
| 1,095 |
package se.lu.nateko.cp.meta.test.reasoner
import org.scalatest.funspec.AnyFunSpec
import se.lu.nateko.cp.meta.onto.reasoner.HermitBasedReasoner
import se.lu.nateko.cp.meta.test.TestConfig
import se.lu.nateko.cp.meta.utils.owlapi._
import java.net.URI
class HermitBasedReasonerTests extends AnyFunSpec{
val owlOnto = TestConfig.owlOnto
val reasoner = new HermitBasedReasoner(owlOnto)
describe("getPropertiesWhoseDomainIncludes(owlClass)"){
it("should return expected props"){
val owlClass = TestConfig.getOWLClass("Organization")
val props = reasoner.getPropertiesWhoseDomainIncludes(owlClass)
.map(oc => getLastFragment(oc.getIRI))
assert(props.toSet === Set("hasName", "locatedAt", "hasTcId", "hasEtcId", "hasAtcId", "hasOtcId", "hasEmail", "hasDepiction"))
}
}
describe("getSubClasses"){
it("should return fully-defined classes that have Equivalent-To block"){
val spatCovClass = TestConfig.getOWLClass("SpatialCoverage")
val latLonClass = new URI(TestConfig.ontUri + "LatLonBox")
val subClasses = reasoner.getSubClasses(spatCovClass, false)
assert(subClasses.map(_.getIRI.toURI).contains(latLonClass))
}
}
}
|
ICOS-Carbon-Portal/meta
|
src/test/scala/se/lu/nateko/cp/meta/test/reasoner/HermitBasedReasonerTests.scala
|
Scala
|
gpl-3.0
| 1,156 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
import scala.reflect.ClassTag
/**
* Abstraction for sorting an arbitrary input buffer of data. This interface requires determining
* the sort key for a given element index, as well as swapping elements and moving data from one
* buffer to another.
*
* Example format: an array of numbers, where each element is also the key.
* See [[KVArraySortDataFormat]] for a more exciting format.
*
* Note: Declaring and instantiating multiple subclasses of this class would prevent JIT inlining
* overridden methods and hence decrease the shuffle performance.
*
* @tparam K Type of the sort key of each element
* @tparam Buffer Internal data structure used by a particular format (e.g., Array[Int]).
*/
// TODO: Making Buffer a real trait would be a better abstraction, but adds some complexity.
private[spark]
abstract class SortDataFormat[K, Buffer] {
/**
* Creates a new mutable key for reuse. This should be implemented if you want to override
* [[getKey(Buffer, Int, K)]].
*/
def newKey(): K = null.asInstanceOf[K]
/** Return the sort key for the element at the given index. */
protected def getKey(data: Buffer, pos: Int): K
/**
* Returns the sort key for the element at the given index and reuse the input key if possible.
* The default implementation ignores the reuse parameter and invokes [[getKey(Buffer, Int]].
* If you want to override this method, you must implement [[newKey()]].
*/
def getKey(data: Buffer, pos: Int, reuse: K): K = {
getKey(data, pos)
}
/** Swap two elements. */
def swap(data: Buffer, pos0: Int, pos1: Int): Unit
/** Copy a single element from src(srcPos) to dst(dstPos). */
def copyElement(src: Buffer, srcPos: Int, dst: Buffer, dstPos: Int): Unit
/**
* Copy a range of elements starting at src(srcPos) to dst, starting at dstPos.
* Overlapping ranges are allowed.
*/
def copyRange(src: Buffer, srcPos: Int, dst: Buffer, dstPos: Int, length: Int): Unit
/**
* Allocates a Buffer that can hold up to 'length' elements.
* All elements of the buffer should be considered invalid until data is explicitly copied in.
*/
def allocate(length: Int): Buffer
}
/**
* Supports sorting an array of key-value pairs where the elements of the array alternate between
* keys and values, as used in [[AppendOnlyMap]].
*
* @tparam K Type of the sort key of each element
* @tparam T Type of the Array we're sorting. Typically this must extend AnyRef, to support cases
* when the keys and values are not the same type.
*/
private[spark]
class KVArraySortDataFormat[K, T <: AnyRef : ClassTag] extends SortDataFormat[K, Array[T]] {
override def getKey(data: Array[T], pos: Int): K = data(2 * pos).asInstanceOf[K]
override def swap(data: Array[T], pos0: Int, pos1: Int) {
val tmpKey = data(2 * pos0)
val tmpVal = data(2 * pos0 + 1)
data(2 * pos0) = data(2 * pos1)
data(2 * pos0 + 1) = data(2 * pos1 + 1)
data(2 * pos1) = tmpKey
data(2 * pos1 + 1) = tmpVal
}
override def copyElement(src: Array[T], srcPos: Int, dst: Array[T], dstPos: Int) {
dst(2 * dstPos) = src(2 * srcPos)
dst(2 * dstPos + 1) = src(2 * srcPos + 1)
}
override def copyRange(src: Array[T], srcPos: Int, dst: Array[T], dstPos: Int, length: Int) {
System.arraycopy(src, 2 * srcPos, dst, 2 * dstPos, 2 * length)
}
override def allocate(length: Int): Array[T] = {
new Array[T](2 * length)
}
}
|
sh-cho/cshSpark
|
util/collection/SortDataFormat.scala
|
Scala
|
apache-2.0
| 4,275 |
def foo(x: String) = 1
def foo(x: Byte) = 2
/* line: 2 */foo(' ')
|
katejim/intellij-scala
|
testdata/resolve2/bug2/SCL2722.scala
|
Scala
|
apache-2.0
| 66 |
package io.udash.web.guide.views.ext.demo
import io.udash.properties.single.Property
import io.udash.web.guide.demos.AutoDemo
import io.udash.web.guide.styles.partials.GuideStyles
import scalatags.JsDom.all._
object DynamicRemoteTranslationsDemo extends AutoDemo {
private val (rendered, source) = {
import io.udash.bootstrap.utils.BootstrapStyles._
import io.udash.css.CssView._
import io.udash.i18n._
import io.udash.web.guide.Context.serverRpc
import io.udash.web.guide.demos.i18n.Translations
import org.scalajs.dom
import scalatags.JsDom.all._
import scala.concurrent.duration.DurationInt
implicit val translationProvider: RemoteTranslationProvider =
new RemoteTranslationProvider(
serverRpc.demos.translations,
Some(dom.window.localStorage),
6.hours
)
implicit val lang: Property[Lang] = Property(Lang("en"))
div(
button(
Button.btn,
Button.color(Color.Primary)
)(id := "enButton", onclick := ((_: dom.Event) => lang.set(Lang("en"))))("EN"), " ",
button(
Button.btn,
Button.color(Color.Primary)
)(id := "plButton", onclick := ((_: dom.Event) => lang.set(Lang("pl"))))("PL"),
div(Card.card, Card.body, Background.color(Color.Light), Spacing.margin(
side = Side.Top,
size = SpacingSize.Normal
))(ul(
li(
"auth.loginLabel: ",
Translations.auth.loginLabel.translatedDynamic()
),
li(
"auth.passwordLabel: ",
Translations.auth.passwordLabel.translatedDynamic()
),
li(
"auth.login.buttonLabel: ",
Translations.auth.login.buttonLabel.translatedDynamic()
),
li(
"auth.login.retriesLeft: ",
Translations.auth.login.retriesLeft(3).translatedDynamic()
),
li(
"auth.login.retriesLeftOne: ",
Translations.auth.login.retriesLeftOne.translatedDynamic()
),
li(
"auth.register.buttonLabel: ",
Translations.auth.register.buttonLabel.translatedDynamic()
)
))
)
}.withSourceCode
override protected def demoWithSource(): (Modifier, Iterator[String]) = {
import io.udash.css.CssView._
(
div(
id := "dynamic-rpc-translations-demo",
GuideStyles.frame,
GuideStyles.useBootstrap
)(rendered),
source.linesIterator
)
}
}
|
UdashFramework/udash-core
|
guide/guide/.js/src/main/scala/io/udash/web/guide/views/ext/demo/DynamicRemoteTranslationsDemo.scala
|
Scala
|
apache-2.0
| 2,457 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.