code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.sksamuel.elastic4s.bulk
import com.sksamuel.elastic4s.testkit.ElasticSugar
import org.scalatest.{FlatSpec, Matchers}
import scala.concurrent.Await
import scala.concurrent.duration._
class BulkProcessorTest extends FlatSpec with Matchers with ElasticSugar {
client.execute {
createIndex("books").mappings(
mapping("plays")
)
}.await
"bulk processor" should "insert all data" in {
val processor = bulkProcessor().actionCount(3).concurrentRequests(2).build(client)
processor.add(indexInto("books" / "plays").fields("name" -> "Midsummer Nights Dream"))
processor.add(indexInto("books" / "plays").fields("name" -> "Cymbeline"))
processor.add(indexInto("books" / "plays").fields("name" -> "Winters Tale"))
processor.add(indexInto("books" / "plays").fields("name" -> "King John"))
processor.close(10.seconds)
blockUntilCount(4, "books")
}
it should "honour action count" in {
val processor = bulkProcessor().actionCount(2).concurrentRequests(1).build(client)
processor.add(indexInto("books" / "novels").fields("name" -> "Moby Dick"))
processor.add(indexInto("books" / "novels").fields("name" -> "Uncle Toms Cabin"))
blockUntilCount(2, "books" / "novels")
processor.add(indexInto("books" / "novels").fields("name" -> "Life of Pi"))
processor.add(indexInto("books" / "novels").fields("name" -> "Catcher in the Rye"))
blockUntilCount(4, "books" / "novels")
Await.ready(processor.close(), 10.seconds)
}
}
|
ulric260/elastic4s
|
elastic4s-core-tests/src/test/scala/com/sksamuel/elastic4s/bulk/BulkProcessorTest.scala
|
Scala
|
apache-2.0
| 1,507 |
package org.apache.spark.sql.jython
import org.apache.spark.internal.Logging
import org.apache.spark.sql.UDFRegistration
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.{List => JList, Map => JMap}
import javax.script._
import scala.collection.JavaConverters._
import org.python.core._
import org.apache.spark.SparkContext
import org.apache.spark.sql.catalyst.expressions.{Expression, ScalaUDF}
import org.apache.spark.sql.types._
import org.apache.spark.sql.Row
/**
* A serialized version of a Python lambda function to be executed in Jython.
*
* TODO: Consider if extending ScalaUDF is the right path forward
* TODO: Consider if pipelining multiple JythonUDFs is important
*
* @param name Name of UDF
* @param func JYthon function
* @param sfunc Scala function (normally supplied only at copy time)
* @param dataType Return data type.
* @param children Sequence of child expressions.
*/
private[sql] class JythonUDF(
name: String,
func: JythonFunction,
sfunc: AnyRef,
dataType: DataType,
children: Seq[Expression])
extends ScalaUDF(sfunc, dataType, children) {
// For the copy constructor keep the same ScalaFunc.
def this(sfunc: AnyRef, dataType: DataType, children: Seq[Expression],
inputTypes: Seq[DataType], name: String, func: JythonFunction) = {
this(name, func, sfunc, dataType, children)
}
// This is the constructor we expect to be called from Python, converts the Python code to a
// wrapped Scala function.
def this(name: String, func: JythonFunction, dataType: DataType, children: Seq[Expression]) {
this(name, func, func.toScalaFunc(JythonConverter.build(dataType), children.size), dataType,
children)
}
override def toString: String = s"$name(${children.mkString(", ")})"
override def nullable: Boolean = true
// Pass the name and function when copying
override protected def otherCopyArgs: Seq[AnyRef] = {
List(name, func)
}
}
/**
* A wrapper for a Jython function, contains all necessary context to run the function in Jython.
*
*
* @param src Python lambda expression as a string
* @param pythonVars Variables to be set before the function, as a base 64 encoded pickle map of
* name and value.
* @param imports Python imports as a base 64 encoded pickle set of module, name, target.
* @param setupCode String of setup code (helper functions, etc.)
* @param sparkContext SparkContext used to broadcast the function.
*/
private[sql] case class JythonFunction(src: String, pythonVars: String, imports: String,
setupCode: String, @transient val sparkContext: SparkContext) {
val className = s"__reservedPandaClass"
// Skip importing pickle and base64 if not needed
val preImports = if (imports.isEmpty && pythonVars.isEmpty) {
""
} else {
s"""
|from base64 import b64decode
|import pickle
""".stripMargin('|')
}
// Only decode/load imports if non empty
val importCode = if (imports.isEmpty) {
""
} else {
s"""
|imports = pickle.loads(b64decode('${imports}'))
|for module, name, target in imports:
| exec "from %s import %s as %s" % (module, name, target)
""".stripMargin('|')
}
// Only decode/load vars if non empty
val varsCode = if (pythonVars.isEmpty) {
""
} else {
s"""
|pythonVars = pickle.loads(b64decode('${pythonVars}'))
|for k, v in pythonVars.iteritems():
| exec "%s = v" % k
""".stripMargin('|')
}
val code = s"""
|import os
|import sys
|if "PYTHONPATH" in os.environ:
| sys.path.extend(os.environ["PYTHONPATH"].split(":"))
|if "SPARK_HOME" in os.environ:
| sys.path.extend([os.environ["SPARK_HOME"] + "/python/",
| os.environ["SPARK_HOME"] + "/python/lib/py4j-0.10.1-src.zip"])
|
|${preImports}
|
|${importCode}
|
|${varsCode}
|
|${setupCode}
|
|class ${className}(object):
| def __init__(self):
| self.call = ${src}
|${className}_instance = ${className}()""".stripMargin('|')
val lazyFunc = sparkContext.broadcast(new LazyJythonFunc(code, className))
/**
* Compile this function to a Scala function.
*/
def toScalaFunc(converter: Any => Any, children: Int): AnyRef = {
children match {
case 0 => () => converter(lazyFunc.value.scalaFunc())
/**
* Generated by:
* (1 to 22).map { x =>
* val inputs = 1.to(x).map(y => s"ar${y}: AnyRef").reduce(_ + ", "+ _)
* val calls = 1.to(x).map(y=> s"ar${y}").reduce(_ + ", " + _)
* s"case ${x} => ($inputs) => converter(lazyFunc.value.scalaFunc(${calls}))"
* }.mkString("\n")
*/
// scalastyle:off line.size.limit
case 1 => (ar1: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1))
case 2 => (ar1: AnyRef, ar2: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2))
case 3 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3))
case 4 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4))
case 5 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5))
case 6 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6))
case 7 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7))
case 8 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8))
case 9 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9))
case 10 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10))
case 11 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11))
case 12 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12))
case 13 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13))
case 14 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef, ar14: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13, ar14))
case 15 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef, ar14: AnyRef, ar15: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13, ar14, ar15))
case 16 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef, ar14: AnyRef, ar15: AnyRef, ar16: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13, ar14, ar15, ar16))
case 17 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef, ar14: AnyRef, ar15: AnyRef, ar16: AnyRef, ar17: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13, ar14, ar15, ar16, ar17))
case 18 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef, ar14: AnyRef, ar15: AnyRef, ar16: AnyRef, ar17: AnyRef, ar18: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13, ar14, ar15, ar16, ar17, ar18))
case 19 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef, ar14: AnyRef, ar15: AnyRef, ar16: AnyRef, ar17: AnyRef, ar18: AnyRef, ar19: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13, ar14, ar15, ar16, ar17, ar18, ar19))
case 20 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef, ar14: AnyRef, ar15: AnyRef, ar16: AnyRef, ar17: AnyRef, ar18: AnyRef, ar19: AnyRef, ar20: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13, ar14, ar15, ar16, ar17, ar18, ar19, ar20))
case 21 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef, ar14: AnyRef, ar15: AnyRef, ar16: AnyRef, ar17: AnyRef, ar18: AnyRef, ar19: AnyRef, ar20: AnyRef, ar21: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13, ar14, ar15, ar16, ar17, ar18, ar19, ar20, ar21))
case 22 => (ar1: AnyRef, ar2: AnyRef, ar3: AnyRef, ar4: AnyRef, ar5: AnyRef, ar6: AnyRef, ar7: AnyRef, ar8: AnyRef, ar9: AnyRef, ar10: AnyRef, ar11: AnyRef, ar12: AnyRef, ar13: AnyRef, ar14: AnyRef, ar15: AnyRef, ar16: AnyRef, ar17: AnyRef, ar18: AnyRef, ar19: AnyRef, ar20: AnyRef, ar21: AnyRef, ar22: AnyRef) => converter(lazyFunc.value.scalaFunc(ar1, ar2, ar3, ar4, ar5, ar6, ar7, ar8, ar9, ar10, ar11, ar12, ar13, ar14, ar15, ar16, ar17, ar18, ar19, ar20, ar21, ar22))
// scalastyle:on line.size.limit
case _ => throw new Exception("Unsupported number of children " + children)
}
}
}
/**
* Since the compiled code functions aren't, delay compilation till we get to the worker
* but we also want to minimize the number of compiles we do on the workers.
*
* @params code The code representing the python class to be evaluated.
* @params className The name of the primary class to be called.
*/
private[sql] class LazyJythonFunc(code: String, className: String) extends Serializable {
@transient lazy val jython = JythonFunc.jython
@transient lazy val ctx = {
val sctx = new SimpleScriptContext()
sctx.setBindings(jython.createBindings(), ScriptContext.ENGINE_SCOPE)
sctx
}
@transient lazy val scope = ctx.getBindings(ScriptContext.ENGINE_SCOPE)
@transient lazy val func = {
jython.eval(code, ctx)
scope.get(s"${className}_instance")
}
def scalaFunc(ar: AnyRef*): Any = {
val pythonRet = jython.asInstanceOf[Invocable].invokeMethod(func, "call", ar : _*)
pythonRet
}
}
/**
* Constructs converters for Jython return types to Scala types based on the specified data type.
*/
private[sql] object JythonConverter {
// Needs to be on the worker - not properly serializable.
@transient lazy val fieldsPyStr = new PyString("__fields__")
def build(dt: DataType): Any => Any = {
dt match {
case LongType => x => x.asInstanceOf[java.math.BigInteger].longValue()
case IntegerType => x => x.asInstanceOf[java.math.BigInteger].intValue()
case arrayType: ArrayType =>
val innerConv = build(arrayType.elementType)
x => {
val arr = x.asInstanceOf[JList[_]].asScala
arr.map(innerConv)
}
case mapType: MapType =>
val keyConverter = build(mapType.keyType)
val valueConverter = build(mapType.valueType)
x => {
val dict = x.asInstanceOf[JMap[_, _]].asScala
dict.map{case (k, v) => (keyConverter(k), valueConverter(v))}
}
case structType: StructType =>
val converters = structType.fields.map(f => build(f.dataType))
x => {
val rez = x.asInstanceOf[PyTupleDerived]
// Determine if the Row is named, or not.
val dict = rez.getDict().asInstanceOf[PyStringMap]
if (dict.has_key(fieldsPyStr)) {
val pyFields = dict.get(fieldsPyStr).asInstanceOf[JList[String]].asScala
val pyFieldsArray = pyFields.toArray
val structFields = structType.fields.map(_.name)
val rezArray = rez.toArray()
val elements = structFields.zip(converters).map{case (name, conv) =>
val idx = pyFieldsArray.indexOf(name)
conv(rezArray(idx))
}
Row(elements : _*)
} else {
val itr = rez.asScala
Row(converters.zip(itr).map{case (conv, v) => conv(v)} : _*)
}
}
case _ => x => x
}
}
}
/**
* Companion object for Jython script engine requirements.
*
* Starting the Jython script engine is slow, so try and do it infrequently.
*/
private[sql] object JythonFunc {
@transient lazy val mgr = new ScriptEngineManager()
@transient lazy val jython = mgr.getEngineByName("python")
}
|
mariusvniekerk/spark-jython-udf
|
src/main/scala/org/apache/spark/sql/jython/JythonUDF.scala
|
Scala
|
apache-2.0
| 15,540 |
package org.cddb.lsmt
trait Serializer[T] {
def serialize(obj: T): Array[Byte]
def deserialize(data: Array[Byte]): Option[T]
}
|
tierex/cddb
|
core/src/main/scala/org/cddb/lsmt/Serializer.scala
|
Scala
|
apache-2.0
| 136 |
package org.danielnixon.extrawarts
import org.scalatest.FunSuite
import org.wartremover.test.WartTestTraverser
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
class FutureObjectTest extends FunSuite {
val futs: List[Future[String]] = Nil
test("can't use scala.concurrent.Future#reduce") {
val result = WartTestTraverser(FutureObject) {
val foo = Future.reduce(futs)((r, t) => r)
}
assertResult(List("[wartremover:FutureObject] Future#reduce is disabled - use Future#fold instead"), "result.errors")(result.errors)
assertResult(List.empty, "result.warnings")(result.warnings)
}
test("FutureObject wart obeys SuppressWarnings") {
val result = WartTestTraverser(FutureObject) {
@SuppressWarnings(Array("org.danielnixon.extrawarts.FutureObject"))
val foo = Future.reduce(futs)((r, t) => r)
}
assertResult(List.empty, "result.errors")(result.errors)
assertResult(List.empty, "result.warnings")(result.warnings)
}
}
|
danielnixon/extrawarts
|
core/src/test/scala/org/danielnixon/extrawarts/FutureObjectTest.scala
|
Scala
|
apache-2.0
| 1,017 |
package org.workcraft.gui.modeleditor.tools
import java.awt.Color
import java.awt.Graphics2D
import java.awt.geom.Point2D
import org.workcraft.gui.modeleditor.MouseButton
import org.workcraft.gui.modeleditor.LeftButton
import org.workcraft.gui.modeleditor.Modifier
import org.workcraft.gui.modeleditor.ToolMouseListener
import org.workcraft.scala.Expressions._
import org.workcraft.graphics.GraphicalContent
import org.workcraft.gui.GUI
import javax.swing.JPanel
import org.workcraft.scala.effects.IO
import org.workcraft.scala.effects.IO._
import scalaz.Scalaz._
import org.workcraft.graphics.Colorisation
object NodeGeneratorTool {
def apply(look: Button, painter: Expression[GraphicalContent], action: Point2D.Double => IO[Unit]): ModelEditorTool =
new ModelEditorTool {
def button = look
def createInstance(env: ToolEnvironment) = ioPure.pure {
new ModelEditorToolInstance {
def keyBindings = Nil
def mouseListener: Option[ToolMouseListener] = Some(new DummyMouseListener {
override def buttonPressed(btn: MouseButton, modifiers: Set[Modifier], position: Point2D.Double): IO[Unit] = {
if (btn == LeftButton) action(position) else {}.pure[IO]
}
})
def userSpaceContent: Expression[GraphicalContent] = painter
def screenSpaceContent: Expression[GraphicalContent] =
GUI.editorMessage(env.viewport, Color.BLACK, "Click to create a " + look.label)
def interfacePanel: Option[JPanel] = None
}
}
}
}
|
tuura/workcraft-2.2
|
ScalaGraphEditorUtil/src/main/scala/org/workcraft/gui/modeleditor/tools/NodeGeneratorTool.scala
|
Scala
|
gpl-3.0
| 1,554 |
package com.twitter.scalding.examples
import com.twitter.scalding._
import com.twitter.scalding.typed.ComputedValue
object KMeans {
/**
* This is the euclidean norm between two vectors
*/
private def distance(v1: Vector[Double], v2: Vector[Double]): Double =
math.sqrt(v1.iterator
.zip(v2.iterator)
.map { case (l, r) => (l - r) * (l - r) }
.sum)
// Just normal vector addition
private def add(v1: Vector[Double], v2: Vector[Double]): Vector[Double] =
v1.zip(v2).map { case (l, r) => l + r }
// normal scalar multiplication
private def scale(s: Double, v: Vector[Double]): Vector[Double] =
v.map { x => s * x }
// Here we return the centroid of some vectors
private def centroidOf(vecs: TraversableOnce[Vector[Double]]): Vector[Double] = {
val (vec, count) = vecs
// add a 1 to each value to count the number of vectors in one pass:
.map { v => (v, 1) }
// Here we add both the count and the vectors:
.reduce { (ll, rr) =>
val (l, lc) = ll
val (r, rc) = rr
(add(l, r), lc + rc)
}
// Now scale to get the pointwise average
scale(1.0 / count, vec)
}
private def closest[Id](from: Vector[Double],
centroids: TraversableOnce[(Id, Vector[Double])]): (Id, Vector[Double]) =
centroids
// compute the distance to each center
.map { case (id, cent) => (distance(from, cent), (id, cent)) }
// take the minimum by the distance, ignoring the id and the centroid
.minBy { case (dist, _) => dist }
// Just keep the id and the centroid
._2
type LabeledVector = (Int, Vector[Double])
/**
* This runs one step in a kmeans algorithm
* It returns the number of vectors that changed clusters,
* the new clusters
* and the new list of labeled vectors
*/
def kmeansStep(k: Int,
s: Stat,
clusters: ValuePipe[List[LabeledVector]],
points: TypedPipe[LabeledVector]): Execution[(ValuePipe[List[LabeledVector]], TypedPipe[LabeledVector])] = {
// Do a cross product to produce all point, cluster pairs
// in scalding, the smaller pipe should go on the right.
val next = points.leftCross(clusters)
// now compute the closest cluster for each vector
.map {
case ((oldId, vector), Some(centroids)) =>
val (id, newcentroid) = closest(vector, centroids)
if (id != oldId) s.inc()
(id, vector)
case (_, None) => sys.error("Missing clusters, this should never happen")
}
.forceToDiskExecution
// Now update the clusters:
next.map { pipe =>
(ComputedValue(pipe
.group
// There is no need to use more than k reducers
.withReducers(k)
.mapValueStream { vectors => Iterator(centroidOf(vectors)) }
// Now collect them all into one big
.groupAll
.toList
// discard the "all" key used to group them together
.values), pipe)
}
}
def initializeClusters(k: Int, points: TypedPipe[Vector[Double]]): (ValuePipe[List[LabeledVector]], TypedPipe[LabeledVector]) = {
val rng = new java.util.Random(123)
// take a random k vectors:
val clusters = points.map { v => (rng.nextDouble, v) }
.groupAll
.sortedTake(k)(Ordering.by(_._1))
.mapValues { randk =>
randk.iterator
.zipWithIndex
.map { case ((_, v), id) => (id, v) }
.toList
}
.values
// attach a random cluster to each vector
val labeled = points.map { v => (rng.nextInt(k), v) }
(ComputedValue(clusters), labeled)
}
/*
* Run the full k-means algorithm by flatMapping the above function into itself
* while the number of vectors that changed is not zero
*/
def kmeans(k: Int,
clusters: ValuePipe[List[LabeledVector]],
points: TypedPipe[LabeledVector]): Execution[(Int, ValuePipe[List[LabeledVector]], TypedPipe[LabeledVector])] = {
val key = StatKey("changed", "scalding.kmeans")
def go(s: Stat,
c: ValuePipe[List[LabeledVector]],
p: TypedPipe[LabeledVector],
step: Int): Execution[(Int, ValuePipe[List[LabeledVector]], TypedPipe[LabeledVector])] =
kmeansStep(k, s, c, p)
.getAndResetCounters
.flatMap {
case ((nextC, nextP), counters) =>
val changed = counters(key)
if (changed == 0L) Execution.from((step, nextC, nextP))
else go(s, nextC, nextP, step + 1)
}
Execution.withId { implicit uid =>
go(Stat(key), clusters, points, 0)
}
}
def apply(k: Int, points: TypedPipe[Vector[Double]]): Execution[(Int, ValuePipe[List[LabeledVector]], TypedPipe[LabeledVector])] = {
val (clusters, labeled) = initializeClusters(k, points)
kmeans(k, clusters, labeled)
}
}
|
tdyas/scalding
|
scalding-commons/src/main/scala/com/twitter/scalding/examples/KMeans.scala
|
Scala
|
apache-2.0
| 4,797 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package akka.actor
import java.lang.Thread.UncaughtExceptionHandler
import akka.actor.ActorSystem.findClassLoader
import akka.actor.setup.ActorSystemSetup
import com.typesafe.config.{Config, ConfigFactory}
import org.apache.flink.runtime.util.FatalExitExceptionHandler
import scala.concurrent.ExecutionContext
/**
* [[ActorSystemImpl]] which has a configurable [[java.lang.Thread.UncaughtExceptionHandler]].
*/
class RobustActorSystem(
name: String,
applicationConfig: Config,
classLoader: ClassLoader,
defaultExecutionContext: Option[ExecutionContext],
guardianProps: Option[Props],
setup: ActorSystemSetup,
val optionalUncaughtExceptionHandler: Option[UncaughtExceptionHandler])
extends ActorSystemImpl(
name,
applicationConfig,
classLoader,
defaultExecutionContext,
guardianProps,
setup) {
override protected def uncaughtExceptionHandler: Thread.UncaughtExceptionHandler =
optionalUncaughtExceptionHandler.getOrElse(super.uncaughtExceptionHandler)
}
object RobustActorSystem {
def create(name: String, applicationConfig: Config): RobustActorSystem = {
apply(name, ActorSystemSetup.create(BootstrapSetup(None, Option(applicationConfig), None)))
}
def create(
name: String,
applicationConfig: Config,
uncaughtExceptionHandler: UncaughtExceptionHandler): RobustActorSystem = {
apply(
name,
ActorSystemSetup.create(BootstrapSetup(None, Option(applicationConfig), None)),
uncaughtExceptionHandler
)
}
def apply(name: String, setup: ActorSystemSetup): RobustActorSystem = {
internalApply(name, setup, Some(FatalExitExceptionHandler.INSTANCE))
}
def apply(
name: String,
setup: ActorSystemSetup,
uncaughtExceptionHandler: UncaughtExceptionHandler): RobustActorSystem = {
internalApply(name, setup, Some(uncaughtExceptionHandler))
}
def internalApply(
name: String,
setup: ActorSystemSetup,
uncaughtExceptionHandler: Option[UncaughtExceptionHandler]): RobustActorSystem = {
val bootstrapSettings = setup.get[BootstrapSetup]
val cl = bootstrapSettings.flatMap(_.classLoader).getOrElse(findClassLoader())
val appConfig = bootstrapSettings.flatMap(_.config).getOrElse(ConfigFactory.load(cl))
val defaultEC = bootstrapSettings.flatMap(_.defaultExecutionContext)
new RobustActorSystem(
name,
appConfig,
cl,
defaultEC,
None,
setup,
uncaughtExceptionHandler).start()
}
}
|
hequn8128/flink
|
flink-runtime/src/main/scala/akka/actor/RobustActorSystem.scala
|
Scala
|
apache-2.0
| 3,329 |
package scala.c.engine
package ast
import org.eclipse.cdt.core.dom.ast._
import org.eclipse.cdt.internal.core.dom.parser.c._
import scala.annotation.switch
object Expressions {
def evaluate(expr: IASTInitializerClause)(implicit state: State): Option[ValueType] = expr match {
case exprList: IASTExpressionList =>
exprList.getExpressions.map{x => evaluate(x)}.last
case ternary: IASTConditionalExpression =>
val result = TypeHelper.resolveBoolean (evaluate(ternary.getLogicalConditionExpression).get)
val expr = if (result) {
ternary.getPositiveResultExpression
} else {
ternary.getNegativeResultExpression
}
evaluate(expr)
case cast: IASTCastExpression =>
val theType = TypeHelper.getType(cast.getTypeId).theType
val operand = evaluate(cast.getOperand).get
Some(operand match {
case str @ StringLiteral(_) => str
case LValue(addr, aType) =>
theType match {
case ptr: IPointerType if aType.isInstanceOf[IArrayType] =>
val newAddr = state.allocateSpace(4)
state.Stack.writeToMemory(addr, newAddr, theType)
LValue(state, newAddr, theType)
case _ => LValue(state, addr, theType)
}
case RValue(value, _) =>
val newAddr = state.allocateSpace(TypeHelper.sizeof(theType))
state.Stack.writeToMemory(TypeHelper.cast(theType, value).value, newAddr, theType)
LValue(state, newAddr, theType)
})
case fieldRef: IASTFieldReference =>
val struct = evaluate(fieldRef.getFieldOwner).get.asInstanceOf[LValue]
val structType = TypeHelper.resolveStruct(struct.theType)
val baseAddr = if (fieldRef.isPointerDereference) {
state.readPtrVal(struct.address)
} else {
struct.address
}
val field = TypeHelper.offsetof(structType, baseAddr, fieldRef.getFieldName.toString, state: State)
Some(field)
case subscript: IASTArraySubscriptExpression =>
val arrayVarPtr = evaluate(subscript.getArrayExpression).head
val baseValue = TypeHelper.resolve(evaluate(subscript.getArgument).get).value
val index = TypeHelper.cast(TypeHelper.intType, baseValue).value.asInstanceOf[Int]
arrayVarPtr match {
case Address(addr, theType) =>
val offset = addr + index * TypeHelper.sizeof(theType)
Some(LValue(state, offset, theType))
case lValue @ LValue(_, theType) =>
val aType = TypeHelper.getPointerType(theType)
val base = arrayVarPtr.theType match {
case _: IPointerType =>
state.readPtrVal(lValue.address)
case _: IArrayType =>
lValue.address
}
val offset = base + index * TypeHelper.sizeof(aType)
Some(LValue(state, offset, aType))
}
case unary: IASTUnaryExpression =>
Some(UnaryExpression.execute(unary))
case lit: IASTLiteralExpression =>
Some(Literal.cast(lit.toString))
case id: IASTIdExpression =>
Some(state.context.resolveId(id.getName).get)
case typeExpr: IASTTypeIdExpression =>
// used for sizeof calls on a type
val theType = TypeHelper.getType(typeExpr.getTypeId).theType
Some(RValue(TypeHelper.sizeof(theType), TypeHelper.intType))
case call: IASTFunctionCallExpression =>
val pop = evaluate(call.getFunctionNameExpression).head
val name = if (state.hasFunction(call.getFunctionNameExpression.getRawSignature)) {
call.getFunctionNameExpression.getRawSignature
} else {
val info = pop.asInstanceOf[LValue]
val resolved = TypeHelper.stripSyntheticTypeInfo(info.theType)
resolved match {
case _: IPointerType => state.getFunctionByIndex(info.rValue.value.asInstanceOf[Int]).name
}
}
state.callTheFunction(name, call, None)
case bin: IASTBinaryExpression =>
(bin.getOperator, evaluate(bin.getOperand1).head) match {
case (IASTBinaryExpression.op_logicalOr, op1 @ RValue(x: Boolean, _)) if x => Some(op1)
case (IASTBinaryExpression.op_logicalAnd, op1 @ RValue(x: Boolean, _)) if !x => Some(op1)
case (_, op1) =>
val op2 = evaluate(bin.getOperand2).head
val result = if (Utils.isAssignment(bin.getOperator)) {
Declarator.assign(op1.asInstanceOf[LValue], List(op2), bin.getOperand2, bin.getOperator)
} else {
BinaryExpr.evaluate(op1, op2, bin.getOperator)
}
Some(result)
}
}
}
|
bdwashbu/cEngine
|
src/scala/c/engine/ast/Expressions.scala
|
Scala
|
apache-2.0
| 4,659 |
package keehive
case class Secret (
data: Map[String, String] = Map(),
timestamp: Long = System.currentTimeMillis):
def get(key: String): String = data.getOrElse(key, "")
def updated(kv: (String, String)): Secret = new Secret(data.updated(kv._1, kv._2))
lazy val maxKeyLength: Int = data.keys.map(_.length).max
lazy val pad: Map[String, String] =
data.map{ case (k,_) => (k, " " * (maxKeyLength - k.length)) }
def select(firstKeys: Seq[String], excludeKeys: Seq[String]): Seq[(String, String)] =
val otherKeys = (data.keySet diff firstKeys.toSet).toSeq.sorted
val selectedKeys = (firstKeys ++ otherKeys) diff excludeKeys
val kvs = selectedKeys.map(k => (k, get(k)))
kvs.filterNot(_._2.isEmpty)
def showLines(firstKeys: Seq[String], dontShow: Seq[String]): String =
val kvs = select(firstKeys, dontShow)
kvs.map{ case (k,v) => s"$k:${pad(k)} $v"}.mkString("\\n")
def show(firstKeys: Seq[String], excludeKeys: Seq[String], padFirstTo: Int = 15 ): String =
val kvs = select(firstKeys, excludeKeys)
val xs = kvs.map{ case (k,v) => s"$k:$v" }
val padded = if xs.nonEmpty then xs.updated(0, xs.head.padTo(padFirstTo, ' ')) else xs
padded.mkString(" ")
def ageMillis: Long = System.currentTimeMillis - timestamp
def ageDays: Int = (ageMillis / (1000 * 60 * 60 * 24)).toInt
def resetTimestamp: Secret = new Secret(data, System.currentTimeMillis)
object Secret:
def empty: Secret = new Secret()
|
bjornregnell/keehive
|
src/main/scala/keehive/Secret.scala
|
Scala
|
gpl-3.0
| 1,473 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, quoteIdentifier}
/**
* A field inside a StructType.
* @param name The name of this field.
* @param dataType The data type of this field.
* @param nullable Indicates if values of this field can be `null` values.
* @param metadata The metadata of this field. The metadata should be preserved during
* transformation if the content of the column is not modified, e.g, in selection.
*
* @since 1.3.0
*/
@Stable
case class StructField(
name: String,
dataType: DataType,
nullable: Boolean = true,
metadata: Metadata = Metadata.empty) {
/** No-arg constructor for kryo. */
protected def this() = this(null, null)
private[sql] def buildFormattedString(prefix: String, builder: StringBuilder): Unit = {
builder.append(s"$prefix-- $name: ${dataType.typeName} (nullable = $nullable)\\n")
DataType.buildFormattedString(dataType, s"$prefix |", builder)
}
// override the default toString to be compatible with legacy parquet files.
override def toString: String = s"StructField($name,$dataType,$nullable)"
private[sql] def jsonValue: JValue = {
("name" -> name) ~
("type" -> dataType.jsonValue) ~
("nullable" -> nullable) ~
("metadata" -> metadata.jsonValue)
}
/**
* Updates the StructField with a new comment value.
*/
def withComment(comment: String): StructField = {
val newMetadata = new MetadataBuilder()
.withMetadata(metadata)
.putString("comment", comment)
.build()
copy(metadata = newMetadata)
}
/**
* Return the comment of this StructField.
*/
def getComment(): Option[String] = {
if (metadata.contains("comment")) Option(metadata.getString("comment")) else None
}
/**
* Returns a string containing a schema in DDL format. For example, the following value:
* `StructField("eventId", IntegerType)` will be converted to `eventId` INT.
*
* @since 2.4.0
*/
def toDDL: String = {
val comment = getComment()
.map(escapeSingleQuotedString)
.map(" COMMENT '" + _ + "'")
s"${quoteIdentifier(name)} ${dataType.sql}${comment.getOrElse("")}"
}
}
|
pgandhi999/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
|
Scala
|
apache-2.0
| 3,135 |
package net.scalytica.symbiotic.postgres
import net.scalytica.symbiotic.api.repository.RepositoryProvider
import net.scalytica.symbiotic.config.ConfigReader
import net.scalytica.symbiotic.fs.FileSystemIO
import net.scalytica.symbiotic.postgres.docmanagement.{
PostgresFSTreeRepository,
PostgresFileRepository,
PostgresFolderRepository,
PostgresIndexDataRepository
}
object PostgresRepositories extends RepositoryProvider {
lazy val config = ConfigReader.load()
lazy val fileSystemIO = new FileSystemIO(config)
override lazy val fileRepository =
new PostgresFileRepository(config, fileSystemIO)
override lazy val folderRepository = new PostgresFolderRepository(config)
override lazy val fsTreeRepository = new PostgresFSTreeRepository(config)
override lazy val indexDataRepository =
new PostgresIndexDataRepository(config, fileSystemIO)
}
|
kpmeen/symbiotic
|
symbiotic-postgres/src/main/scala/net/scalytica/symbiotic/postgres/PostgresRepositories.scala
|
Scala
|
apache-2.0
| 874 |
package au.com.onegeek.respite.models
import au.com.onegeek.respite.config.TestConfigurationModule
import org.scalatest.concurrent.ScalaFutures
import au.com.onegeek.respite.models.User
import reactivemongo.bson.BSONObjectID
import play.api.libs.json._
import au.com.onegeek.respite.UnitSpec
class ModelTests extends UnitSpec with ScalaFutures {
implicit val bindingModule = TestConfigurationModule
"An Model Object" should {
"Serialise to a sane JSON format" in {
val user = new User(id = BSONObjectID("53af77a90100000100a16ffb"), username = "mfellows", firstName = "Matt")
println(Json.toJson(user))
Json.toJson(user).toString should equal("{\\"id\\":{\\"$oid\\":\\"53af77a90100000100a16ffb\\"},\\"username\\":\\"mfellows\\",\\"firstName\\":\\"Matt\\"}")
Json.toJson(user).toString should include ("53af77a90100000100a16ffb")
val user2 = new User(username = "Hillary", firstName = "Hillman")
println(Json.toJson(user2))
}
"Serialise from a sane JSON format" in {
val user = new User(id = BSONObjectID("53af77a90100000100a16ffb"), username = "mfellows", firstName = "Matt")
val json = "{\\"id\\":{\\"$oid\\":\\"53af77a90100000100a16ffb\\"},\\"username\\":\\"mfellows\\",\\"firstName\\":\\"Matt\\"}"
val user2: User = Json.parse(json).validate[User].get
println(user2)
user.id.stringify should equal(user2.id.stringify)
user should equal (user2)
}
"Validate JSON objects without an id (for creating one)" in {
val user = new User(username = "mfellows", firstName = "Matt")
val json = Json.toJson(user).toString()
val jsonToUser = "{\\"username\\":\\"mfellows\\",\\"firstName\\":\\"Matt\\"}"
val parsedUser = Json.parse(jsonToUser).validate[User].getOrElse( {println("Fail!"); fail("Validation failed for model without an ID")})
parsedUser.firstName should equal ("Matt")
println(parsedUser.id)
Json.parse(jsonToUser).validate[User] match {
case e: JsError => println(s"${e}"); fail("Validation Error")
case e: JsSuccess[User] => e.get.firstName should equal("Matt")
}
}
"Provide sensible validation error messages" in {
// val json = "{\\"firstName\\":\\"Matt\\"}"
// val json = "{\\"_id\\":{\\"$oid\\":\\"53af77a90100000100a16ffb\\"},\\"firstName\\":\\"Matt\\", \\"username\\":\\"foo\\"}"
val json = "{\\"firstName\\":\\"Matt\\", \\"nousername\\":\\"foo\\"}"
Json.parse(json).validate[User] match {
case e: JsError => println(s"${JsError.toFlatJson(e)}"); JsError.toFlatJson(e).toString should equal ("{\\"obj.username\\":[{\\"msg\\":\\"error.path.missing\\",\\"args\\":[]}]}")
case e: JsSuccess[User] => e.get.firstName should equal("Matt")
}
}
"Provide sensible validation error messages - ID sent but other fields not" in {
// val json = "{\\"nofirstName\\":\\"Matt\\", \\"nousername\\":\\"foo\\",\\"id\\":{\\"$oid\\":\\"53af77a90100000100a16ffb\\"}}"
val json = "{\\"firstName\\":\\"Matt\\", \\"nousername\\":\\"foo\\"}"
Json.parse(json).validate[User] match {
case e: JsError =>
println(s"${JsError.toFlatJson(e)}")
// TODO: It should probably equal this:
println(e)
JsError.toFlatJson(e).toString should equal ("{\\"obj.username\\":[{\\"msg\\":\\"error.path.missing\\",\\"args\\":[]}]}")
// But it actually equals this {"obj.id.username":[{"msg":"error.path.missing","args":[]}]}
// Note the id field sneaking it's way in there.
}
}
}
}
|
mefellows/respite
|
respite-core/src/test/scala/au/com/onegeek/respite/models/ModelTests.scala
|
Scala
|
mit
| 3,476 |
package demo
package components
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import org.scalajs.dom
import org.scalajs.dom.ext.PimpedNodeList
object CodeHighlight {
val component = ScalaComponent
.builder[String]("CodeHighLighter")
.render_P(P => <.code(^.`class` := "scala", ^.padding := "20px", P))
.configure(installSyntaxHighlighting)
.build
def installSyntaxHighlighting[P, C <: Children, S, B]: ScalaComponent.Config[P, C, S, B] =
_.componentDidMount(_ => applySyntaxHighlight)
.componentDidUpdate(_ => applySyntaxHighlight)
def applySyntaxHighlight = Callback {
import scala.scalajs.js.Dynamic.{global => g}
val nodeList = dom.document.querySelectorAll("code").toArray
nodeList.foreach(n => g.hljs.highlightBlock(n))
}
def apply(code: String) = component(code)
}
|
rleibman/scalajs-react-components
|
demo/src/main/scala/demo/components/CodeHighlight.scala
|
Scala
|
apache-2.0
| 856 |
/**
* Copyright (C) 2013 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr.persistence.rest
import java.io.ByteArrayOutputStream
import java.net.URI
import org.dom4j.Document
import org.orbeon.oxf.fr.FormRunnerAuth._
import org.orbeon.oxf.fr.relational._
import org.orbeon.oxf.http.StreamedContent
import org.orbeon.oxf.util.ScalaUtils._
import org.orbeon.oxf.util._
import org.orbeon.oxf.xml.dom4j.Dom4jUtils
import scala.util.Try
private object HttpRequest {
private val PersistenceBase = "http://localhost:8080/orbeon/fr/service/persistence/"
case class Credentials(username: String, roles: Set[String], group: String)
sealed trait Body
case class XML (doc : Document ) extends Body
case class Binary(file: Array[Byte]) extends Body
private def request(
path : String,
method : String,
version : Version,
body : Option[Body],
credentials : Option[Credentials])(implicit
logger : IndentedLogger
): ConnectionResult = {
val documentURL = new URI(PersistenceBase + path)
val headers = {
import Version._
val versionHeader = version match {
case Unspecified ⇒ Nil
case Next ⇒ List(OrbeonFormDefinitionVersion → List("next"))
case Specific(version) ⇒ List(OrbeonFormDefinitionVersion → List(version.toString))
case ForDocument(documentId) ⇒ List(OrbeonForDocumentId → List(documentId))
}
val credentialHeaders = credentials.map(c ⇒ List(
OrbeonUsernameHeaderName → List(c.username),
OrbeonGroupHeaderName → List(c.group),
OrbeonRolesHeaderName → c.roles.to[List]
)).to[List].flatten
Connection.buildConnectionHeadersLowerIfNeeded(
scheme = documentURL.getScheme,
hasCredentials = false,
customHeaders = List(versionHeader, credentialHeaders).flatten.toMap,
headersToForward = Connection.headersToForwardFromProperty,
cookiesToForward = Connection.cookiesToForwardFromProperty
)
}
val contentType = body.map {
case XML (_) ⇒ "application/xml"
case Binary(_) ⇒ "application/octet-stream"
}
val messageBody = body map {
case XML (doc ) ⇒ Dom4jUtils.domToString(doc).getBytes
case Binary(file) ⇒ file
}
val content = messageBody map
(StreamedContent.fromBytes(_, contentType))
Connection(
httpMethodUpper = method,
url = documentURL,
credentials = None,
content = content,
headers = headers,
loadState = true,
logBody = false
).connect(
saveState = true
)
}
def put(url: String, version: Version, body: Body, credentials: Option[Credentials] = None)(implicit logger: IndentedLogger): Int =
useAndClose(request(url, "PUT", version, Some(body), credentials))(_.statusCode)
def del(url: String, version: Version, credentials: Option[Credentials] = None)(implicit logger: IndentedLogger): Int =
useAndClose(request(url, "DELETE", version, None, credentials))(_.statusCode)
def get(url: String, version: Version, credentials: Option[Credentials] = None)(implicit logger: IndentedLogger): (Int, Map[String, Seq[String]], Try[Array[Byte]]) =
useAndClose(request(url, "GET", version, None, credentials)) { cxr ⇒
val statusCode = cxr.statusCode
val headers = cxr.headers
val body =
useAndClose(cxr.content.inputStream) { inputStream ⇒
Try {
val outputStream = new ByteArrayOutputStream
NetUtils.copyStream(inputStream, outputStream)
outputStream.toByteArray
}
}
(statusCode, headers, body)
}
}
|
wesley1001/orbeon-forms
|
src/test/scala/org/orbeon/oxf/fr/persistence/rest/HttpRequest.scala
|
Scala
|
lgpl-2.1
| 4,404 |
package dotty.tools.languageserver
import org.junit.Test
import dotty.tools.dotc.reporting.ErrorMessageID._
import dotty.tools.languageserver.util.Code._
import org.eclipse.lsp4j.DiagnosticSeverity._
class DiagnosticsTest {
@Test def diagnosticWrongType: Unit =
code"""object Test {
| val x: Int = $m1"foo"$m2
|}""".withSource
.diagnostics(m1,
(m1 to m2, """Found: ("foo" : String)
|Required: Int""".stripMargin, Error, Some(TypeMismatchID))
)
@Test def diagnosticMissingLambdaBody: Unit =
code"""object Test {
| Nil.map(x => x).filter(x$m1 =>$m2)$m3
|}""".withSource
.diagnostics(m1,
(m2 to m2, "expression expected but ')' found", Error, Some(IllegalStartSimpleExprID))
)
@Test def diagnosticPureExpression: Unit =
code"""object Test {
| ${m1}1$m2
|}""".withSource
.diagnostics(m1,
(m1 to m2,
"A pure expression does nothing in statement position; you may be omitting necessary parentheses",
Warning, Some(PureExpressionInStatementPositionID)))
@Test def diagnosticWorksheetPureExpression: Unit =
ws"""${m1}1""".withSource
.diagnostics(m1 /* no "pure expression" warning because this is a worksheet */)
}
|
dotty-staging/dotty
|
language-server/test/dotty/tools/languageserver/DiagnosticsTest.scala
|
Scala
|
apache-2.0
| 1,297 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io.File
import java.net.URI
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.{SecurityManager, SparkConf, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.util.{MutableURLClassLoader, Utils}
private[deploy] object DependencyUtils extends Logging {
def resolveMavenDependencies(
packagesExclusions: String,
packages: String,
repositories: String,
ivyRepoPath: String,
ivySettingsPath: Option[String]): String = {
val exclusions: Seq[String] =
if (!StringUtils.isBlank(packagesExclusions)) {
packagesExclusions.split(",")
} else {
Nil
}
// Create the IvySettings, either load from file or build defaults
val ivySettings = ivySettingsPath match {
case Some(path) =>
SparkSubmitUtils.loadIvySettings(path, Option(repositories), Option(ivyRepoPath))
case None =>
SparkSubmitUtils.buildIvySettings(Option(repositories), Option(ivyRepoPath))
}
SparkSubmitUtils.resolveMavenCoordinates(packages, ivySettings, exclusions = exclusions)
}
def resolveAndDownloadJars(
jars: String,
userJar: String,
sparkConf: SparkConf,
hadoopConf: Configuration,
secMgr: SecurityManager): String = {
val targetDir = Utils.createTempDir()
Option(jars)
.map {
resolveGlobPaths(_, hadoopConf)
.split(",")
.filterNot(_.contains(userJar.split("/").last))
.mkString(",")
}
.filterNot(_ == "")
.map(downloadFileList(_, targetDir, sparkConf, hadoopConf, secMgr))
.orNull
}
def addJarsToClassPath(jars: String, loader: MutableURLClassLoader): Unit = {
if (jars != null) {
for (jar <- jars.split(",")) {
addJarToClasspath(jar, loader)
}
}
}
/**
* Download a list of remote files to temp local files. If the file is local, the original file
* will be returned.
*
* @param fileList A comma separated file list.
* @param targetDir A temporary directory for which downloaded files.
* @param sparkConf Spark configuration.
* @param hadoopConf Hadoop configuration.
* @param secMgr Spark security manager.
* @return A comma separated local files list.
*/
def downloadFileList(
fileList: String,
targetDir: File,
sparkConf: SparkConf,
hadoopConf: Configuration,
secMgr: SecurityManager): String = {
require(fileList != null, "fileList cannot be null.")
Utils.stringToSeq(fileList)
.map(downloadFile(_, targetDir, sparkConf, hadoopConf, secMgr))
.mkString(",")
}
/**
* Download a file from the remote to a local temporary directory. If the input path points to
* a local path, returns it with no operation.
*
* @param path A file path from where the files will be downloaded.
* @param targetDir A temporary directory for which downloaded files.
* @param sparkConf Spark configuration.
* @param hadoopConf Hadoop configuration.
* @param secMgr Spark security manager.
* @return Path to the local file.
*/
def downloadFile(
path: String,
targetDir: File,
sparkConf: SparkConf,
hadoopConf: Configuration,
secMgr: SecurityManager): String = {
require(path != null, "path cannot be null.")
val uri = Utils.resolveURI(path)
uri.getScheme match {
case "file" | "local" => path
case "http" | "https" | "ftp" if Utils.isTesting =>
// This is only used for SparkSubmitSuite unit test. Instead of downloading file remotely,
// return a dummy local path instead.
val file = new File(uri.getPath)
new File(targetDir, file.getName).toURI.toString
case _ =>
val fname = new Path(uri).getName()
val localFile = Utils.doFetchFile(uri.toString(), targetDir, fname, sparkConf, secMgr,
hadoopConf)
localFile.toURI().toString()
}
}
def resolveGlobPaths(paths: String, hadoopConf: Configuration): String = {
require(paths != null, "paths cannot be null.")
Utils.stringToSeq(paths).flatMap { path =>
val (base, fragment) = splitOnFragment(path)
(resolveGlobPath(base, hadoopConf), fragment) match {
case (resolved, Some(_)) if resolved.length > 1 => throw new SparkException(
s"${base.toString} resolves ambiguously to multiple files: ${resolved.mkString(",")}")
case (resolved, Some(namedAs)) => resolved.map(_ + "#" + namedAs)
case (resolved, _) => resolved
}
}.mkString(",")
}
def addJarToClasspath(localJar: String, loader: MutableURLClassLoader): Unit = {
val uri = Utils.resolveURI(localJar)
uri.getScheme match {
case "file" | "local" =>
val file = new File(uri.getPath)
if (file.exists()) {
loader.addURL(file.toURI.toURL)
} else {
logWarning(s"Local jar $file does not exist, skipping.")
}
case _ =>
logWarning(s"Skip remote jar $uri.")
}
}
/**
* Merge a sequence of comma-separated file lists, some of which may be null to indicate
* no files, into a single comma-separated string.
*/
def mergeFileLists(lists: String*): String = {
val merged = lists.filterNot(StringUtils.isBlank)
.flatMap(Utils.stringToSeq)
if (merged.nonEmpty) merged.mkString(",") else null
}
private def splitOnFragment(path: String): (URI, Option[String]) = {
val uri = Utils.resolveURI(path)
val withoutFragment = new URI(uri.getScheme, uri.getSchemeSpecificPart, null)
(withoutFragment, Option(uri.getFragment))
}
private def resolveGlobPath(uri: URI, hadoopConf: Configuration): Array[String] = {
uri.getScheme match {
case "local" | "http" | "https" | "ftp" => Array(uri.toString)
case _ =>
val fs = FileSystem.get(uri, hadoopConf)
Option(fs.globStatus(new Path(uri))).map { status =>
status.filter(_.isFile).map(_.getPath.toUri.toString)
}.getOrElse(Array(uri.toString))
}
}
}
|
tejasapatil/spark
|
core/src/main/scala/org/apache/spark/deploy/DependencyUtils.scala
|
Scala
|
apache-2.0
| 6,974 |
package app.circumstances
import utils.WithJsBrowser
import app.FunctionalTestCommon
import utils.pageobjects.circumstances.start_of_process.GReportChangesPage
import utils.pageobjects.xml_validation.{XMLBusinessValidation, XMLCircumstancesBusinessValidation}
import utils.pageobjects.{Page, PageObjects, TestData, XmlPage}
class FunctionalTestCase2Spec extends FunctionalTestCommon {
isolated
section("functional")
"The application Circumstances" should {
"Successfully run absolute Circumstances Test Case 2" in new WithJsBrowser with PageObjects {
val page = GReportChangesPage(context)
val circs = TestData.readTestDataFromFile("/functional_scenarios/circumstances/TestCase2.csv")
page goToThePage()
val lastPage = page runClaimWith(circs)
lastPage match {
case p: XmlPage => {
val validator: XMLBusinessValidation = new XMLCircumstancesBusinessValidation
validateAndPrintErrors(p, circs, validator) should beTrue
}
case p: Page => println(p.source)
}
}
}
section("functional")
}
|
Department-for-Work-and-Pensions/ClaimCapture
|
c3/test/app/circumstances/FunctionalTestCase2Spec.scala
|
Scala
|
mit
| 1,088 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import scala.collection.immutable.ListSet
import Suite.autoTagClassAnnotations
/**
* A suite of property-based tests.
*
* <table><tr><td class="usage">
* <strong>Recommended Usage</strong>:
* Class <code>PropSpec</code> is a good fit for teams that want to write tests exclusively in terms of property checks, and is also a good choice
* for writing the occasional <a href="#testMatrix">test matrix</a> when a different style trait is chosen as the main unit testing style.
* </td></tr></table>
*
* Here's an example <code>PropSpec</code>:
*
* <pre class="stHighlight">
* package org.scalatest.examples.propspec
*
* import org.scalatest._
* import prop._
* import scala.collection.immutable._
*
* class SetSpec extends PropSpec with TableDrivenPropertyChecks with Matchers {
*
* val examples =
* Table(
* "set",
* BitSet.empty,
* HashSet.empty[Int],
* TreeSet.empty[Int]
* )
*
* property("an empty Set should have size 0") {
* forAll(examples) { set =>
* set.size should be (0)
* }
* }
*
* property("invoking head on an empty set should produce NoSuchElementException") {
* forAll(examples) { set =>
* evaluating { set.head } should produce [NoSuchElementException]
* }
* }
* }
* </pre>
*
* <p>
* You can run a <code>PropSpec</code> by invoking <code>execute</code> on it.
* This method, which prints test results to the standard output, is intended to serve as a
* convenient way to run tests from within the Scala interpreter. For example,
* to run <code>SetSpec</code> from within the Scala interpreter, you could write:
* </p>
*
* <pre class="stREPL">
* scala> new SetSpec execute
* </pre>
*
* <p>
* And you would see:
* </p>
*
* <pre class="stREPL">
* <span class="stGreen">SetSpec:
* - an empty Set should have size 0
* - invoking head on an empty Set should produce NoSuchElementException</span>
* </pre>
*
* <p>
* Or, to run just the “<code>an empty Set should have size 0</code>” method, you could pass that test's name, or any unique substring of the
* name, such as <code>"size 0"</code> or even just <code>"0"</code>. Here's an example:
* </p>
*
* <pre class="stREPL">
* scala> new SetSpec execute "size 0"
* <span class="stGreen">SetSpec:
* - an empty Set should have size 0</span>
* </pre>
*
* <p>
* You can also pass to <code>execute</code> a <a href="ConfigMap.html"><em>config map</em></a> of key-value
* pairs, which will be passed down into suites and tests, as well as other parameters that configure the run itself.
* For more information on running in the Scala interpreter, see the documentation for <code>execute</code> (below) and the
* <a href="Shell.html">ScalaTest shell</a>.
* </p>
*
* <p>
* The <code>execute</code> method invokes a <code>run</code> method that takes two
* parameters. This <code>run</code> method, which actually executes the suite, will usually be invoked by a test runner, such
* as <a href="run$.html"><code>run</code></a>, <a href="tools/Runner$.html"><code>tools.Runner</code></a>, a build tool, or an IDE.
* </p>
*
* <p>
* “<code>property</code>” is a method, defined in <code>PropSpec</code>, which will be invoked
* by the primary constructor of <code>SetSpec</code>. You specify the name of the test as
* a string between the parentheses, and the test code itself between curly braces.
* The test code is a function passed as a by-name parameter to <code>property</code>, which registers
* it for later execution.
* </p>
*
* <p>
* A <code>PropSpec</code>'s lifecycle has two phases: the <em>registration</em> phase and the
* <em>ready</em> phase. It starts in registration phase and enters ready phase the first time
* <code>run</code> is called on it. It then remains in ready phase for the remainder of its lifetime.
* </p>
*
* <p>
* Tests can only be registered with the <code>property</code> method while the <code>PropSpec</code> is
* in its registration phase. Any attempt to register a test after the <code>PropSpec</code> has
* entered its ready phase, <em>i.e.</em>, after <code>run</code> has been invoked on the <code>PropSpec</code>,
* will be met with a thrown <a href="exceptions/TestRegistrationClosedException.html"><code>TestRegistrationClosedException</code></a>. The recommended style
* of using <code>PropSpec</code> is to register tests during object construction as is done in all
* the examples shown here. If you keep to the recommended style, you should never see a
* <code>TestRegistrationClosedException</code>.
* </p>
*
* <h2>Ignored tests</h2>
*
* <p>
* To support the common use case of temporarily disabling a test, with the
* good intention of resurrecting the test at a later time, <code>PropSpec</code> provides registration
* methods that start with <code>ignore</code> instead of <code>property</code>. Here's an example:
* </p>
*
* <pre class="stHighlight">
* package org.scalatest.examples.suite.ignore
*
* import org.scalatest._
* import prop._
* import scala.collection.immutable._
*
* class SetSpec extends PropSpec with TableDrivenPropertyChecks with Matchers {
*
* val examples =
* Table(
* "set",
* BitSet.empty,
* HashSet.empty[Int],
* TreeSet.empty[Int]
* )
*
* ignore("an empty Set should have size 0") {
* forAll(examples) { set =>
* set.size should be (0)
* }
* }
*
* property("invoking head on an empty set should produce NoSuchElementException") {
* forAll(examples) { set =>
* evaluating { set.head } should produce [NoSuchElementException]
* }
* }
* }
* </pre>
*
* <p>
* If you run this version of <code>SetSuite</code> with:
* </p>
*
* <pre class="stREPL">
* scala> new SetSpec execute
* </pre>
*
* <p>
* It will run only the second test and report that the first test was ignored:
* </p>
*
* <pre class="stREPL">
* <span class="stGreen">SetSuite:</span>
* <span class="stYellow">- an empty Set should have size 0 !!! IGNORED !!!</span>
* <span class="stGreen">- invoking head on an empty Set should produce NoSuchElementException</span>
* </pre>
*
* <a name="informers"></a><h2>Informers</h2></a>
*
* <p>
* One of the parameters to <code>PropSpec</code>'s <code>run</code> method is a <a href="Reporter.html"><code>Reporter</code></a>, which
* will collect and report information about the running suite of tests.
* Information about suites and tests that were run, whether tests succeeded or failed,
* and tests that were ignored will be passed to the <code>Reporter</code> as the suite runs.
* Most often the reporting done by default by <code>PropSpec</code>'s methods will be sufficient, but
* occasionally you may wish to provide custom information to the <code>Reporter</code> from a test.
* For this purpose, an <a href="Informer.html"><code>Informer</code></a> that will forward information
* to the current <code>Reporter</code> is provided via the <code>info</code> parameterless method.
* You can pass the extra information to the <code>Informer</code> via its <code>apply</code> method.
* The <code>Informer</code> will then pass the information to the <code>Reporter</code> via an <a href="events/InfoProvided.html"><code>InfoProvided</code></a> event.
* Here's an example that shows both a direct use as well as an indirect use through the methods
* of <a href="GivenWhenThen.html"><code>GivenWhenThen</code></a>:
* </p>
*
* <pre class="stHighlight">
* package org.scalatest.examples.propspec.info
*
* import org.scalatest._
* import prop._
* import collection.mutable
*
* class SetSpec extends PropSpec with TableDrivenPropertyChecks with GivenWhenThen {
*
* val examples =
* Table(
* "set",
* mutable.BitSet.empty,
* mutable.HashSet.empty[Int],
* mutable.LinkedHashSet.empty[Int]
* )
*
* property("an element can be added to an empty mutable Set") {
*
* forAll(examples) { set =>
*
* info("----------------")
*
* Given("an empty mutable " + set.getClass.getSimpleName)
* assert(set.isEmpty)
*
* When("an element is added")
* set += 99
*
* Then("the Set should have size 1")
* assert(set.size === 1)
*
* And("the Set should contain the added element")
* assert(set.contains(99))
* }
* }
* }
* </pre>
*
*
* If you run this <code>PropSpec</code> from the interpreter, you will see the following output:
*
* <pre class="stREPL">
* scala> new SetSpec execute
* <span class="stGreen">SetSpec:
* - an element can be added to an empty mutable Set
* + ----------------
* + Given an empty mutable BitSet
* + When an element is added
* + Then the Set should have size 1
* + And the Set should contain the added element
* + ----------------
* + Given an empty mutable HashSet
* + When an element is added
* + Then the Set should have size 1
* + And the Set should contain the added element
* + ----------------
* + Given an empty mutable LinkedHashSet
* + When an element is added
* + Then the Set should have size 1
* + And the Set should contain the added element</span>
* </pre>
*
* <a name="documenters"></a><h2>Documenters</h2></a>
*
* <p>
* <code>PropSpec</code> also provides a <code>markup</code> method that returns a <a href="Documenter.html"><code>Documenter</code></a>, which allows you to send
* to the <code>Reporter</code> text formatted in <a href="http://daringfireball.net/projects/markdown/" target="_blank">Markdown syntax</a>.
* You can pass the extra information to the <code>Documenter</code> via its <code>apply</code> method.
* The <code>Documenter</code> will then pass the information to the <code>Reporter</code> via an <a href="events/MarkupProvided.html"><code>MarkupProvided</code></a> event.
* </p>
*
* <p>
* Here's an example <code>PropSpec</code> that uses <code>markup</code>:
* </p>
*
* <pre class="stHighlight">
* package org.scalatest.examples.propspec.markup
*
* import org.scalatest._
* import prop._
* import collection.mutable
*
* class SetSpec extends PropSpec with TableDrivenPropertyChecks with GivenWhenThen {
*
* markup { """
*
* Mutable Set
* -----------
*
* A set is a collection that contains no duplicate elements.
*
* To implement a concrete mutable set, you need to provide implementations
* of the following methods:
*
* def contains(elem: A): Boolean
* def iterator: Iterator[A]
* def += (elem: A): this.type
* def -= (elem: A): this.type
*
* If you wish that methods like `take`,
* `drop`, `filter` return the same kind of set,
* you should also override:
*
* def empty: This
*
* It is also good idea to override methods `foreach` and
* `size` for efficiency.
*
* """ }
*
* val examples =
* Table(
* "set",
* mutable.BitSet.empty,
* mutable.HashSet.empty[Int],
* mutable.LinkedHashSet.empty[Int]
* )
*
* property("an element can be added to an empty mutable Set") {
*
* forAll(examples) { set =>
*
* info("----------------")
*
* Given("an empty mutable " + set.getClass.getSimpleName)
* assert(set.isEmpty)
*
* When("an element is added")
* set += 99
*
* Then("the Set should have size 1")
* assert(set.size === 1)
*
* And("the Set should contain the added element")
* assert(set.contains(99))
* }
*
* markup("This test finished with a **bold** statement!")
* }
* }
* </pre>
*
* <p>
* Although all of ScalaTest's built-in reporters will display the markup text in some form,
* the HTML reporter will format the markup information into HTML. Thus, the main purpose of <code>markup</code> is to
* add nicely formatted text to HTML reports. Here's what the above <code>SetSpec</code> would look like in the HTML reporter:
* </p>
*
* <img class="stScreenShot" src="../../lib/propSpec.gif">
*
* <a name="notifiersAlerters"></a><h2>Notifiers and alerters</h2></a>
*
* <p>
* ScalaTest records text passed to <code>info</code> and <code>markup</code> during tests, and sends the recorded text in the <code>recordedEvents</code> field of
* test completion events like <code>TestSucceeded</code> and <code>TestFailed</code>. This allows string reporters (like the standard out reporter) to show
* <code>info</code> and <code>markup</code> text <em>after</em> the test name in a color determined by the outcome of the test. For example, if the test fails, string
* reporters will show the <code>info</code> and <code>markup</code> text in red. If a test succeeds, string reporters will show the <code>info</code>
* and <code>markup</code> text in green. While this approach helps the readability of reports, it means that you can't use <code>info</code> to get status
* updates from long running tests.
* </p>
*
* <p>
* To get immediate (<em>i.e.</em>, non-recorded) notifications from tests, you can use <code>note</code> (a <a href="Notifier.html"><code>Notifier</code></a>) and <code>alert</code>
* (an <a href="Alerter.html"><code>Alerter</code></a>). Here's an example showing the differences:
* </p>
*
* <pre class="stHighlight">
* package org.scalatest.examples.propspec.note
*
* import org.scalatest._
* import prop._
* import collection.mutable
*
* class SetSpec extends PropSpec with TableDrivenPropertyChecks {
*
* val examples =
* Table(
* "set",
* mutable.BitSet.empty,
* mutable.HashSet.empty[Int],
* mutable.LinkedHashSet.empty[Int]
* )
*
* property("an element can be added to an empty mutable Set") {
*
* info("info is recorded")
* markup("markup is *also* recorded")
* note("notes are sent immediately")
* alert("alerts are also sent immediately")
*
* forAll(examples) { set =>
*
* assert(set.isEmpty)
* set += 99
* assert(set.size === 1)
* assert(set.contains(99))
* }
* }
* }
* </pre>
*
* <p>
* Because <code>note</code> and <code>alert</code> information is sent immediately, it will appear <em>before</em> the test name in string reporters, and its color will
* be unrelated to the ultimate outcome of the test: <code>note</code> text will always appear in green, <code>alert</code> text will always appear in yellow.
* Here's an example:
* </p>
*
* <pre class="stREPL">
* scala> new SetSpec execute
* <span class="stGreen">SetSpec:
* + notes are sent immediately</span>
* <span class="stYellow">+ alerts are also sent immediately</span>
* <span class="stGreen">- an element can be added to an empty mutable Set
* + info is recorded
* + markup is *also* recorded</span>
* </pre>
*
* <p>
* In summary, use <code>info</code> and <code>markup</code> for text that should form part of the specification output. Use
* <code>note</code> and <code>alert</code> to send status notifications. (Because the HTML reporter is intended to produce a
* readable, printable specification, <code>info</code> and <code>markup</code> text will appear in the HTML report, but
* <code>note</code> and <code>alert</code> text will not.)
* </p>
*
* <a name="pendingTests"></a><h2>Pending tests</h2></a>
*
* <p>
* A <em>pending test</em> is one that has been given a name but is not yet implemented. The purpose of
* pending tests is to facilitate a style of testing in which documentation of behavior is sketched
* out before tests are written to verify that behavior (and often, before the behavior of
* the system being tested is itself implemented). Such sketches form a kind of specification of
* what tests and functionality to implement later.
* </p>
*
* <p>
* To support this style of testing, a test can be given a name that specifies one
* bit of behavior required by the system being tested. The test can also include some code that
* sends more information about the behavior to the reporter when the tests run. At the end of the test,
* it can call method <code>pending</code>, which will cause it to complete abruptly with <a href="exceptions/TestPendingException.html"><code>TestPendingException</code></a>.
* </p>
*
* <p>
* Because tests in ScalaTest can be designated as pending with <code>TestPendingException</code>, both the test name and any information
* sent to the reporter when running the test can appear in the report of a test run.
* (The code of a pending test is executed just like any other test.) However, because the test completes abruptly
* with <code>TestPendingException</code>, the test will be reported as pending, to indicate
* the actual test, and possibly the functionality, has not yet been implemented.
* </p>
*
* <p>
* You can mark tests pending in <code>PropSpec</code> like this:
* </p>
*
* <pre class="stHighlight">
* import org.scalatest._
* import prop._
* import scala.collection.immutable._
*
* class SetSpec extends PropSpec with TableDrivenPropertyChecks with Matchers {
*
* val examples =
* Table(
* "set",
* BitSet.empty,
* HashSet.empty[Int],
* TreeSet.empty[Int]
* )
*
* property("an empty Set should have size 0") (pending)
*
* property("invoking head on an empty set should produce NoSuchElementException") {
* forAll(examples) { set =>
* evaluating { set.head } should produce [NoSuchElementException]
* }
* }
* }
* </pre>
*
* <p>
* (Note: "<code>(pending)</code>" is the body of the test. Thus the test contains just one statement, an invocation
* of the <code>pending</code> method, which throws <code>TestPendingException</code>.)
* If you run this version of <code>SetSuite</code> with:
* </p>
*
* <pre class="stREPL">
* scala> new SetSuite execute
* </pre>
*
* <p>
* It will run both tests, but report that first test is pending. You'll see:
* </p>
*
* <pre class="stREPL">
* <span class="stGreen">SetSuite:</span>
* <span class="stYellow">- An empty Set should have size 0 (pending)</span>
* <span class="stGreen">- Invoking head on an empty Set should produce NoSuchElementException</span>
* </pre>
*
* <p>
* One difference between an ignored test and a pending one is that an ignored test is intended to be used during a
* significant refactorings of the code under test, when tests break and you don't want to spend the time to fix
* all of them immediately. You can mark some of those broken tests as ignored temporarily, so that you can focus the red
* bar on just failing tests you actually want to fix immediately. Later you can go back and fix the ignored tests.
* In other words, by ignoring some failing tests temporarily, you can more easily notice failed tests that you actually
* want to fix. By contrast, a pending test is intended to be used before a test and/or the code under test is written.
* Pending indicates you've decided to write a test for a bit of behavior, but either you haven't written the test yet, or
* have only written part of it, or perhaps you've written the test but don't want to implement the behavior it tests
* until after you've implemented a different bit of behavior you realized you need first. Thus ignored tests are designed
* to facilitate refactoring of existing code whereas pending tests are designed to facilitate the creation of new code.
* </p>
*
* <p>
* One other difference between ignored and pending tests is that ignored tests are implemented as a test tag that is
* excluded by default. Thus an ignored test is never executed. By contrast, a pending test is implemented as a
* test that throws <code>TestPendingException</code> (which is what calling the <code>pending</code> method does). Thus
* the body of pending tests are executed up until they throw <code>TestPendingException</code>. The reason for this difference
* is that it enables your unfinished test to send <code>InfoProvided</code> messages to the reporter before it completes
* abruptly with <code>TestPendingException</code>, as shown in the previous example on <code>Informer</code>s
* that used the <code>GivenWhenThen</code> trait.
* </p>
*
* <a name="taggingTests"></a><h2>Tagging tests</h2>
*
* <p>
* A <code>PropSpec</code>'s tests may be classified into groups by <em>tagging</em> them with string names.
* As with any suite, when executing a <code>PropSpec</code>, groups of tests can
* optionally be included and/or excluded. To tag a <code>PropSpec</code>'s tests,
* you pass objects that extend class <code>org.scalatest.Tag</code> to methods
* that register tests. Class <code>Tag</code> takes one parameter, a string name. If you have
* created tag annotation interfaces as described in the <a href="Tag.html"><code>Tag</code> documentation</a>, then you
* will probably want to use tag names on your test functions that match. To do so, simply
* pass the fully qualified names of the tag interfaces to the <code>Tag</code> constructor. For example, if you've
* defined tag annotation interfaces with fully qualified names, <code>com.mycompany.tags.SlowTest</code> and
* <code>com.mycompany.tags.DbTest</code>, then you could
* create matching tags for <code>PropSpec</code>s like this:
* </p>
*
* <pre class="stHighlight">
* package org.scalatest.examples.propspec.tagging
*
* import org.scalatest.Tag
*
* object SlowTest extends Tag("com.mycompany.tags.SlowTest")
* object DbTest extends Tag("com.mycompany.tags.DbTest")
* </pre>
*
* <p>
* Given these definitions, you could place <code>PropSpec</code> tests into groups like this:
* </p>
*
* <pre class="stHighlight">
* import org.scalatest._
* import prop._
* import scala.collection.immutable._
*
* class SetSpec extends PropSpec with TableDrivenPropertyChecks with Matchers {
*
* val examples =
* Table(
* "set",
* BitSet.empty,
* HashSet.empty[Int],
* TreeSet.empty[Int]
* )
*
* property("an empty Set should have size 0", SlowTest) {
* forAll(examples) { set =>
* set.size should be (0)
* }
* }
*
* property("invoking head on an empty set should produce NoSuchElementException",
* SlowTest, DbTest) {
*
* forAll(examples) { set =>
* evaluating { set.head } should produce [NoSuchElementException]
* }
* }
* }
* </pre>
*
* <p>
* This code marks both tests with the <code>com.mycompany.tags.SlowTest</code> tag,
* and the second test with the <code>com.mycompany.tags.DbTest</code> tag.
* </p>
*
* <p>
* The <code>run</code> method takes a <code>Filter</code>, whose constructor takes an optional
* <code>Set[String]</code> called <code>tagsToInclude</code> and a <code>Set[String]</code> called
* <code>tagsToExclude</code>. If <code>tagsToInclude</code> is <code>None</code>, all tests will be run
* except those those belonging to tags listed in the
* <code>tagsToExclude</code> <code>Set</code>. If <code>tagsToInclude</code> is defined, only tests
* belonging to tags mentioned in the <code>tagsToInclude</code> set, and not mentioned in <code>tagsToExclude</code>,
* will be run.
* </p>
*
* <a name="sharedFixtures"></a>
* <h2>Shared fixtures</h2>
*
* <p>
* A test <em>fixture</em> is composed of the objects and other artifacts (files, sockets, database
* connections, <em>etc.</em>) tests use to do their work.
* When multiple tests need to work with the same fixtures, it is important to try and avoid
* duplicating the fixture code across those tests. The more code duplication you have in your
* tests, the greater drag the tests will have on refactoring the actual production code.
* </p>
*
* <p>
* ScalaTest recommends three techniques to eliminate such code duplication:
* </p>
*
* <ul>
* <li>Refactor using Scala</li>
* <li>Override <code>withFixture</code></li>
* <li>Mix in a <em>before-and-after</em> trait</li>
* </ul>
*
* <p>Each technique is geared towards helping you reduce code duplication without introducing
* instance <code>var</code>s, shared mutable objects, or other dependencies between tests. Eliminating shared
* mutable state across tests will make your test code easier to reason about and more amenable for parallel
* test execution.</p>
*
* <p>
* The techniques in <code>PropSpec</code> are identical to those in <code>FunSuite</code>, but with “<code>test</code>”
* replaced by “<code>property</code>”. The following table summarizes the options with a link to the relevant
* documentation for trait <code>FunSuite</code>:
* </p>
*
* <table style="border-collapse: collapse; border: 1px solid black">
*
* <tr>
* <td colspan="2" style="background-color: #CCCCCC; border-width: 1px; padding: 3px; padding-top: 7px; border: 1px solid black; text-align: left">
* <strong>Refactor using Scala when different tests need different fixtures.</strong>
* </td>
* </tr>
*
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* <a href="FunSuite.html#getFixtureMethods">get-fixture methods</a>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* The <em>extract method</em> refactor helps you create a fresh instances of mutable fixture objects in each test
* that needs them, but doesn't help you clean them up when you're done.
* </td>
* </tr>
*
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* <a href="FunSuite.html#fixtureContextObjects">fixture-context objects</a>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* By placing fixture methods and fields into traits, you can easily give each test just the newly created
* fixtures it needs by mixing together traits. Use this technique when you need <em>different combinations
* of mutable fixture objects in different tests</em>, and don't need to clean up after.
* </td>
* </tr>
*
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* <a href="FunSuite.html#loanFixtureMethods">loan-fixture methods</a>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* Factor out dupicate code with the <em>loan pattern</em> when different tests need different fixtures <em>that must be cleaned up afterwards</em>.
* </td>
* </tr>
*
* <tr>
* <td colspan="2" style="background-color: #CCCCCC; border-width: 1px; padding: 3px; padding-top: 7px; border: 1px solid black; text-align: left">
* <strong>Override <code>withFixture</code> when most or all tests need the same fixture.</strong>
* </td>
* </tr>
*
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* <a href="FunSuite.html#withFixtureNoArgTest">
* <code>withFixture(NoArgTest)</code></a>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* <p>
* The recommended default approach when most or all tests need the same fixture treatment. This general technique
* allows you, for example, to perform side effects at the beginning and end of all or most tests,
* transform the outcome of tests, retry tests, make decisions based on test names, tags, or other test data.
* Use this technique unless:
* </p>
* <ul>
* <li>Different tests need different fixtures (refactor using Scala instead)</li>
* <li>An exception in fixture code should abort the suite, not fail the test (use a <em>before-and-after</em> trait instead)</li>
* <li>You have objects to pass into tests (override <code>withFixture(<em>One</em>ArgTest)</code> instead)</li>
* </ul>
* </td>
* </tr>
*
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* <a href="FunSuite.html#withFixtureOneArgTest">
* <code>withFixture(OneArgTest)</code>
* </a>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* Use when you want to pass the same fixture object or objects as a parameter into all or most tests.
* </td>
* </tr>
*
* <tr>
* <td colspan="2" style="background-color: #CCCCCC; border-width: 1px; padding: 3px; padding-top: 7px; border: 1px solid black; text-align: left">
* <strong>Mix in a before-and-after trait when you want an aborted suite, not a failed test, if the fixture code fails.</strong>
* </td>
* </tr>
*
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* <a href="FunSuite.html#beforeAndAfter"><code>BeforeAndAfter</code></a>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* Use this boilerplate-buster when you need to perform the same side-effects before and/or after tests, rather than at the beginning or end of tests.
* </td>
* </tr>
*
* <tr>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">
* <a href="FunSuite.html#composingFixtures"><code>BeforeAndAfterEach</code></a>
* </td>
* <td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: left">
* Use when you want to <em>stack traits</em> that perform the same side-effects before and/or after tests, rather than at the beginning or end of tests.
* </td>
* </tr>
*
* </table>
*
* <a name="testMatrix"></a>
* <h4>Using <code>PropSpec</code> to implement a test matrix</h4>
*
* <p>
* Using fixture-context objects in a <code>PropSpec</code> is a good way to implement a test matrix.
* What is the matrix? A test matrix is a series of tests that you need to run on a series of subjects. For example, The Scala API contains
* many implementations of trait <code>Set</code>. Every implementation must obey the contract of <code>Set</code>.
* One property of any <code>Set</code> is that an empty <code>Set</code> should have size 0, another is that
* invoking head on an empty <code>Set</code> should give you a <code>NoSuchElementException</code>, and so on. Already you have a matrix,
* where rows are the properties and the columns are the set implementations:
* </p>
*
* <table style="border-collapse: collapse; border: 1px solid black">
* <tr><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"> </th><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"><code>BitSet</code></th><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"><code>HashSet</code></th><th style="background-color: #CCCCCC; border-width: 1px; padding: 3px; text-align: center; border: 1px solid black"><code>TreeSet</code></th></tr>
* <tr><td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">An empty Set should have size 0</td><td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center"><span class="stGreen">pass</span></td><td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center"><span class="stGreen">pass</span></td><td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center"><span class="stGreen">pass</span></td></td></tr>
* <tr><td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: right">Invoking head on an empty set should produce NoSuchElementException</td><td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center"><span class="stGreen">pass</span></td><td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center"><span class="stGreen">pass</span></td><td style="border-width: 1px; padding: 3px; border: 1px solid black; text-align: center"><span class="stGreen">pass</span></td></td></tr>
* </table>
*
* <p>
* One way to implement this test matrix is to define a trait to represent the columns (in this case, <code>BitSet</code>, <code>HashSet</code>,
* and <code>TreeSet</code>) as elements in a single-dimensional <code>Table</code>. Each element in the <code>Table</code> represents
* one <code>Set</code> implementation. Because different properties may require different fixture instances for those implementations, you
* can define a trait to hold the examples, like this:
*
* <pre class="stHighlight">
* trait SetExamples extends Tables {
*
* def examples = Table("set", bitSet, hashSet, treeSet)
*
* def bitSet: BitSet
* def hashSet: HashSet[Int]
* def treeSet: TreeSet[Int]
* }
* </pre>
*
* <p>
* Given this trait, you could provide empty sets in one implementation of <code>SetExamples</code>, and non-empty sets in another.
* Here's how you might provide empty set examples:
* </p>
*
* <pre class="stHighlight">
* class EmptySetExamples extends SetExamples {
* def bitSet = BitSet.empty
* def hashSet = HashSet.empty[Int]
* def treeSet = TreeSet.empty[Int]
* }
* </pre>
*
* <p>
* And here's how you might provide set examples with one item each:
* </p>
*
* <pre class="stHighlight">
* class SetWithOneItemExamples extends SetExamples {
* def bitSet = BitSet(1)
* def hashSet = HashSet(1)
* def treeSet = TreeSet(1)
* }
* </pre>
*
* <p>
* Armed with these example classes, you can define checks of properties that require
* empty or non-empty set fixtures by using instances of these classes as fixture-context
* objects. In other words, the columns of the test matrix are implemented as elements of
* a one-dimensional table of fixtures, the rows are implemented as <code>property</code>
* clauses of a <code>PropSpec</code>.
* </p>
*
* <p>
* Here's a complete example that checks the two properties mentioned previously:
* </p>
*
* <pre class="stHighlight">
* package org.scalatest.examples.propspec.matrix
*
* import org.scalatest._
* import org.scalatest.prop._
* import scala.collection.immutable._
*
* trait SetExamples extends Tables {
*
* def examples = Table("set", bitSet, hashSet, treeSet)
*
* def bitSet: BitSet
* def hashSet: HashSet[Int]
* def treeSet: TreeSet[Int]
* }
*
* class EmptySetExamples extends SetExamples {
* def bitSet = BitSet.empty
* def hashSet = HashSet.empty[Int]
* def treeSet = TreeSet.empty[Int]
* }
*
* class SetSpec extends PropSpec with TableDrivenPropertyChecks with Matchers {
*
* property("an empty Set should have size 0") {
* new EmptySetExamples {
* forAll(examples) { set =>
* set.size should be (0)
* }
* }
* }
*
* property("invoking head on an empty set should produce NoSuchElementException") {
* new EmptySetExamples {
* forAll(examples) { set =>
* evaluating { set.head } should produce [NoSuchElementException]
* }
* }
* }
* }
* </pre>
*
* <p>
* One benefit of this approach is that the compiler will help you when you need to add either a new row
* or column to the matrix. In either case, you'll need to ensure all cells are checked to get your code to compile.
* </p>
*
* <a name="sharedTests"></a><h2>Shared tests</h2>
*
* <p>
* Sometimes you may want to run the same test code on different fixture objects. That is to say, you may want to write tests that are "shared"
* by different fixture objects.
* You accomplish this in a <code>PropSpec</code> in the same way you would do it in a <code>FunSuite</code>, except instead of <code>test</code>
* you say <code>property</code>, and instead of <code>testsFor</code> you say <code>propertiesFor</code>.
* For more information, see the <a href="FunSuite.html#sharedTests">Shared tests</a> section of <code>FunSuite</code>'s
* documentation.
* </p>
*
* @author Bill Venners
*/
@Finders(Array("org.scalatest.finders.PropSpecFinder"))
class PropSpec extends PropSpecLike {
/**
* Returns a user friendly string for this suite, composed of the
* simple name of the class (possibly simplified further by removing dollar signs if added by the Scala interpeter) and, if this suite
* contains nested suites, the result of invoking <code>toString</code> on each
* of the nested suites, separated by commas and surrounded by parentheses.
*
* @return a user-friendly string for this suite
*/
override def toString: String = Suite.suiteToString(None, this)
}
|
SRGOM/scalatest
|
scalatest/src/main/scala/org/scalatest/PropSpec.scala
|
Scala
|
apache-2.0
| 37,327 |
package com.wavesplatform.state.diffs.ci
import com.wavesplatform.db.WithDomain
import com.wavesplatform.db.WithState.AddrWithBalance
import com.wavesplatform.features.BlockchainFeatures._
import com.wavesplatform.lang.directives.values.V5
import com.wavesplatform.lang.script.Script
import com.wavesplatform.lang.v1.compiler.TestCompiler
import com.wavesplatform.settings.TestFunctionalitySettings
import com.wavesplatform.test._
import com.wavesplatform.transaction.TxHelpers
class OverheadCallableCallTest extends PropSpec with WithDomain {
private val body = {
val n = 65
s"""
| func f0() = true
| ${(0 until n).map(i => s"func f${i + 1}() = if (f$i()) then f$i() else f$i()").mkString("\\n")}
| f$n()
""".stripMargin
}
private val dAppScript: Script =
TestCompiler(V5).compileContract(
s"""
| @Callable(i)
| func default() = {
| strict r = $body
| []
| }
""".stripMargin
)
private val settings =
TestFunctionalitySettings
.withFeatures(BlockV5, SynchronousCalls)
.copy(estimationOverflowFixHeight = 999, estimatorSumOverflowFixHeight = 4)
property("overhead callable call should be safe both before and after fix") {
val invoker = TxHelpers.signer(0)
val dApp = TxHelpers.signer(1)
val balances = AddrWithBalance.enoughBalances(invoker, dApp)
val setScript = TxHelpers.setScript(dApp, dAppScript)
val invoke1 = TxHelpers.invoke(dApp.toAddress, func = None, invoker = invoker)
val invoke2 = TxHelpers.invoke(dApp.toAddress, func = None, invoker = invoker)
withDomain(domainSettingsWithFS(settings), balances) { d =>
d.appendBlock(setScript)
d.appendBlockE(invoke1) should produce("Evaluation was uncompleted with unused complexity = 0")
d.appendBlock()
d.appendBlockE(invoke2) should produce("Evaluation was uncompleted with unused complexity = 0")
}
}
}
|
wavesplatform/Waves
|
node/src/test/scala/com/wavesplatform/state/diffs/ci/OverheadCallableCallTest.scala
|
Scala
|
mit
| 1,957 |
import scala.tools.partest.DirectTest
import scala.reflect.internal.util.BatchSourceFile
object Test extends DirectTest {
// Java code
override def code = """
|public @interface MyAnnotation { String value(); }
""".stripMargin
override def extraSettings: String = "-usejavacp -Ystop-after:typer -Xprint:parser"
override def show(): Unit = {
// redirect err to out, for logging
val prevErr = System.err
System.setErr(System.out)
compile()
System.setErr(prevErr)
}
override def newSources(sourceCodes: String*) = {
assert(sourceCodes.size == 1)
List(new BatchSourceFile("annodef.java", sourceCodes(0)))
}
}
|
densh/dotty
|
tests/pending/run/t5699.scala
|
Scala
|
bsd-3-clause
| 657 |
package org.littlewings.infinispan.task
import org.infinispan.Cache
import org.infinispan.tasks.{ServerTask, TaskContext}
class CompatibilityCacheSimpleTask extends ServerTask[String] {
private[task] var taskContext: TaskContext = _
override def getName: String = "cacheSimpleTask"
override def setTaskContext(taskContext: TaskContext): Unit =
this.taskContext = taskContext
override def call: String = {
val parameters = taskContext.getParameters.get
val marshaller = taskContext.getMarshaller.get
val cache = taskContext.getCache.get.asInstanceOf[Cache[String, Array[Byte]]]
val key = parameters.get("key")
val value = marshaller.objectFromByteBuffer(cache.get(key))
s"key = ${key}, value = ${value}"
}
}
|
kazuhira-r/infinispan-getting-started
|
remote-task/src/main/scala/org/littlewings/infinispan/task/CompatibilityCacheSimpleTask.scala
|
Scala
|
mit
| 752 |
def contramap[A, B](f: B => A): Op[Boolean, A] => Op[Boolean, B] = {
case Op(g) => Op(g compose f)
}
|
hmemcpy/milewski-ctfp-pdf
|
src/content/1.10/code/scala/snippet25.scala
|
Scala
|
gpl-3.0
| 102 |
/*
* Copyright (c) 2014-2022 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.iglu.schemaddl.jsonschema
import cats.data._
import cats.implicits._
/**
* Contains Schema validation logic for JSON AST to find nonsense (impossible)
* JSON Schemas, ie. Schemas which cannot validate ANY value, yet
* syntactically correct.
* This doesn't have logic to validate accordance to JSON Schema specs such as
* non-empty `required` or numeric `maximum`. Separate validator should be
* used for that.
*
* @see https://github.com/snowplow/iglu/issues/164
*/
object SanityLinter {
type Report = Map[Pointer.SchemaPointer, NonEmptyList[Linter.Issue]]
/**
*
* Main working function, traversing JSON Schema
* It lints all properties on current level, then tries to extract all
* subschemas from properties like `items`, `additionalItems` etc and
* recursively lint them as well
*
* @param schema parsed JSON AST
* @param linters of linters to be used
* @return non-empty list of summed failures (all, including nested) or
* unit in case of success
*/
def lint(schema: Schema, linters: List[Linter]): Report =
Schema.traverse(schema, validate(linters))
.runS(ValidationState.empty)
.value.toMap
/** Get list of linters from their names or list of unknown names */
def getLinters(names: List[String]): Either[NonEmptyList[String], List[Linter]] =
names
.map(name => (name, Linter.allLintersMap.get(name)))
.traverse[ValidatedNel[String, *], Linter] {
case (_, Some(linter)) => linter.validNel
case (name, None) => name.invalidNel
}.toEither
private def validate(linters: List[Linter])(jsonPointer: Pointer.SchemaPointer, schema: Schema): State[ValidationState, Unit] = {
val results = linters
.traverse[ValidatedNel[Linter.Issue, *], Unit](linter => linter(jsonPointer, schema).toValidatedNel)
results match {
case Validated.Invalid(errors) =>
State.modify[ValidationState] { state =>
state.add((jsonPointer, errors))
}
case Validated.Valid(_) =>
State.pure(())
}
}
private case class ValidationState(issues: List[(Pointer.SchemaPointer, NonEmptyList[Linter.Issue])]) {
def add(recent: (Pointer.SchemaPointer, NonEmptyList[Linter.Issue])): ValidationState =
ValidationState(recent :: issues)
def toMap: Map[Pointer.SchemaPointer, NonEmptyList[Linter.Issue]] = issues.toMap
}
private object ValidationState {
val empty = ValidationState(List.empty)
}
}
|
snowplow/schema-ddl
|
modules/core/src/main/scala/com.snowplowanalytics/iglu.schemaddl/jsonschema/SanityLinter.scala
|
Scala
|
apache-2.0
| 3,211 |
package dibl.proto
import scala.scalajs.js.annotation.{ JSExport, JSExportTopLevel }
@JSExportTopLevel("PrototypeDiagram") object PrototypeDiagram {
/** Completes a document supposed to have groups of SVG elements as in
* GroundForge-help/docs/images/matrix-template.png
*
* The groups are positioned outside the visible area of the document
* with their circles on the same position.
* The id of a group is the character in the circle prefixed with a "vc".
* A element (with id "oops") on the same pile indicates a stitch that has
* another number of outgoing pairs than 2. The transparency when referencing
* this element indicates the number of outgoing pairs
* Very bright: just one; darker: more than two.
*
* @param config values of form fields on tiles.html plus values collected by setStitch calls
* @return SVG elements at some grid position referencing something in the pile.
* Some elements reference in an opaque way and call setStitch on click events.
* Other elements reference semi transparent and repeat the opaque elements.
*/
@JSExport
def create(config: TilesConfig): String = {
val itemMatrix = config.getItemMatrix
val clones = (for {
r <- itemMatrix.indices
c <- itemMatrix(r).indices.reverse
} yield {
val item = itemMatrix(r)(c)
val stitch = item.stitch
val vectorCode = item.vectorCode.toString.toUpperCase
val translate = s"transform='translate(${ c * 10 + 38 },${ r * 10 + 1 })'"
val nrOfPairsOut = config.nrOfPairsOut(r)(c)
val opacity = (vectorCode, item.isOpaque )match {
case ("-", _) => "0.05"
case (_, true) => "1"
case _ => "0.3"
}
val hasHiddenInputField = item.isOpaque && !"-VWXYZ".contains(vectorCode)
s"""${ warning(vectorCode, translate, nrOfPairsOut, item.noStitch) }
|<use
| xlink:href='#vc$vectorCode'
| $translate
| style='stroke:#000;opacity:$opacity;'
|/>
|${ textInput(hasHiddenInputField, r, c, config) }""".stripMargin
}).mkString("\\n")
embed(clones)
}
private def textInput(isActive: Boolean, r: Int, c: Int, config: TilesConfig) = {
val item = config.getItemMatrix(r)(c)
if (isActive)
s"""<foreignObject x='${ 19 + c * 10 }' y='${ 970 + r * 10 }' width='4em' height='8'>
| <input name='${ item.id }'
| id='${ item.id }'
| type='text'
| value='${ item.stitch }'
| onchange='showProto()'
| ></input>
|</foreignObject>
|""".stripMargin
else ""
}
private def warning(vectorCode: String, translate: String, nrOfPairsOut: Int, noStitch: Boolean) = {
(nrOfPairsOut, vectorCode, noStitch) match {
case (_, _, true) | (2, _, _) | (_, "-", _) => "" // a two-in/two-out stitch or no stitch
case _ => s"""<use xlink:href='#oops' $translate style='opacity:0.${ 1 + nrOfPairsOut };'></use>"""
}
}
private lazy val symbols = {
val arrowStyle = """fill:none;stroke-width:1.1;stroke:#000;marker-end:url(#Arrow1Mend)"""
def arrow(path: String) = s"""<path d="m $path" style="$arrowStyle"></path>"""
val shortE = arrow("-12,978 -6,0")
val shortNE = arrow("-12,969 -7,7")
val shortN = arrow("-20,970 0,6")
val shortNW = arrow("-29,969 7,7")
val shortW = arrow("-28,978 6,0")
val doubleW = arrow("-28,978 c 3,-2 4,-1 6,0") + arrow("-28,978 c 2,2 4,1 6,0")
val doubleNW = arrow("-29,969 c 1,4 4,6 7,7") + arrow("-29,969 c 4,1 6,4 7,7")
val doubleN = arrow("-20,970 c 2,3 1,4 0,6") + arrow("-20,970 c -2,3 -1,4 0,6")
val doubleNE = arrow("-12,969 c -4,1 -6,4 -7,7") + arrow("-12,969 c -1,4 -4,6 -7,7")
val doubleE = arrow("-12,978 c -3,-2 -4,-1 -6,0") + arrow("-12,977 c -3,2 -4,1 -6,0")
val doubleS = arrow("-20,986 c -2,-3 -1,-4 0,-6") + arrow("m -20,986 c 2,-3 1,-4 0,-6")
val doubleSE = arrow("-11,986 c -4,-1 -6,-4 -7,-7") + arrow("-11,986 c -1,-4 -4,-6 -7,-7")
val doubleSW = arrow("-28,986 c 1,-4 4,-6 7,-7") + arrow("m -28,986 c 4,-1 6,-4 7,-7")
val shortSW = arrow("-12,986 -7,-7")
val shortS = arrow("-20,986 0,-6")
val shortSE = arrow("-28,986 7,-7")
val longN = arrow("-20,960 0,16")
val longE = arrow("-2,978 -16,0")
val longW = arrow("-38,978 16,0")
val textProps = """y="979.27722" x="-21.02791" style="font-size:3.3px;font-family:Arial;fill:#000000;stroke:none""""
val circlePath = "m -18.064645,978.05982 c 0,0.55229 -0.223858,1.05229 -0.585787,1.41422 -0.361929,0.36192 -0.861929,0.58578 -1.414213,0.58578 -0.552284,0 -1.052284,-0.22386 -1.414213,-0.58578 -0.361929,-0.36193 -0.585787,-0.86193 -0.585787,-1.41422 0,-0.55228 0.223858,-1.05228 0.585787,-1.41421 0.361929,-0.36193 0.861929,-0.58579 1.414213,-0.58579 0.552284,0 1.052284,0.22386 1.414213,0.58579 0.361929,0.36193 0.585787,0.86193 0.585787,1.41421 z"
val circle = s"""<path style="fill:white;stroke-width:1" d="$circlePath"></path>"""
def symbol(tag: Char, arrows: String) = {
s"""<g id="vc$tag">$circle<text $textProps>$tag</text>$arrows</g>"""
}
Seq(
symbol('0', shortE + shortNE) ,
symbol('1', shortE + shortN) ,
symbol('2', shortE + shortNW) ,
symbol('3', shortW + shortE) ,
symbol('4', shortNE + shortN) ,
symbol('5', shortNE + shortNW) ,
symbol('6', shortNE + shortW) ,
symbol('7', shortN + shortNW) ,
symbol('8', shortN + shortW) ,
symbol('9', shortNW + shortW) ,
symbol('A', shortE + longN) ,
symbol('B', shortNE + longN) ,
symbol('C', longN + shortNW) ,
symbol('D', longN + shortW) ,
symbol('E', longE + shortNE) ,
symbol('F', longE + shortN) ,
symbol('G', longE + longN) ,
symbol('H', longE + shortNW) ,
symbol('I', longE + shortW) ,
symbol('J', longW + shortE) ,
symbol('K', longE + longW) ,
symbol('L', shortNE + longW) ,
symbol('M', shortN + longW) ,
symbol('N', longN + longW) ,
symbol('O', shortNW + longW) ,
symbol('P', doubleW) ,
symbol('Q', doubleNW) ,
symbol('R', doubleN) ,
symbol('S', doubleNE) ,
symbol('T', doubleE) ,
symbol('V', shortW) ,
symbol('W', shortNW) ,
symbol('X', shortN) ,
symbol('Y', shortNE) ,
symbol('Z', shortE) ,
symbol('-', ""),
).mkString("\\n ")
}
def embed(clones: String): String = {
s"""<svg
| xmlns="http://www.w3.org/2000/svg"
| xmlns:xlink='http://www.w3.org/1999/xlink'
| width="297mm"
| height="210mm"
| id="svg2"
| version="1.1">
| <defs id="defs4">
| <marker orient="auto" refY="0" refX="0" id="Arrow1Mend" style="overflow:visible">
| <path d="M 0,0 2,-5 -12.5,0 5,5 0,0 Z" style="fill-rule:evenodd;stroke-width:1pt" transform="matrix(-0.22,0,0,-0.22,-2,0)"></path>
| </marker>
| </defs>
| <g id="layer1" transform="matrix(2.7,0,0,2.7,-10,-2600)">
| <g>
| <path id="oops"
| d="m -15.737308,978.07528 a 4.4367617,4.4367617 0 0 1 -2.222417,3.84823 4.4428755,4.4428755 0 0 1 -4.443852,-0.002 4.4428755,4.4428755 0 0 1 -2.219481,-3.84991 l 4.442876,0.002"
| style="fill:#000000;stroke:none;"
| ></path>
| $symbols
| </g>
| $clones
| </g>
|</svg>
|""".stripMargin
}
}
|
jo-pol/GroundForge
|
src/main/scala/dibl/proto/PrototypeDiagram.scala
|
Scala
|
gpl-3.0
| 7,492 |
/*
* scala-swing (https://www.scala-lang.org)
*
* Copyright EPFL, Lightbend, Inc., contributors
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.swing
package event
case class ColorChanged(source: Component, c: Color) extends ComponentEvent with SelectionEvent
|
scala/scala-swing
|
src/main/scala/scala/swing/event/ColorChanged.scala
|
Scala
|
apache-2.0
| 438 |
package tests
class Baz {
def baz/*caret*/() = 0
}
object Baz2 extends Baz {
override val baz/*caret*/ = 3
}
object Baz3 extends {override var /*caret*/baz = 1} with Baz {}
class Baz4 extends Baz {
override def baz/*caret*/() = 1
}
object Test {
Baz2.baz/*caret*/
Baz3.baz/*caret*/
Baz3.baz/*caret*/_=(3)
}
|
LPTK/intellij-scala
|
testdata/rename3/overrideDef/before/tests/Baz.scala
|
Scala
|
apache-2.0
| 323 |
package com.sothr.imagetools.engine
import java.io.File
import akka.actor.{ActorRef, Props}
import com.sothr.imagetools.engine.image.{Image, ImageService, SimilarImages}
import grizzled.slf4j.Logging
import scala.collection.mutable
/**
* Engine that works sequentially
* Very Slow, but consistent. Excellent for testing
*
* Created by drew on 1/26/14.
*/
class SequentialEngine extends Engine with Logging {
var processedListener = system.actorOf(Props[DefaultLoggingEngineListener],
name = "ProcessedEngineListener")
var similarityListener = system.actorOf(Props[DefaultLoggingEngineListener],
name = "SimilarityEngineListener")
override def setProcessedListener(listenerRef: ActorRef) = {
this.processedListener = listenerRef
}
override def setSimilarityListener(listenerRef: ActorRef) = {
this.similarityListener = listenerRef
}
def getSimilarImagesForDirectory(directoryPath: String, recursive: Boolean = false, recursiveDepth: Int = 500): List[SimilarImages] = {
debug(s"Looking for similar images in directory: $directoryPath")
val images = getImagesForDirectory(directoryPath, recursive, recursiveDepth)
info(s"Searching ${images.length} images for similarities")
val ignoreSet = new mutable.HashSet[Image]()
val allSimilarImages = new mutable.MutableList[SimilarImages]()
var processedCount = 0
var similarCount = 0
for (rootImage <- images) {
if (!ignoreSet.contains(rootImage)) {
if (processedCount % 25 == 0) {
//info(s"Processed ${processedCount}/${images.length - ignoreSet.size} About ${images.length -
// processedCount} images to go")
similarityListener ! ScannedFileCount(processedCount, images.length - ignoreSet.size)
}
debug(s"Looking for images similar to: ${rootImage.imagePath}")
ignoreSet += rootImage
val similarImages = new mutable.MutableList[Image]()
for (image <- images) {
if (!ignoreSet.contains(image)) {
if (rootImage.isSimilarTo(image)) {
debug(s"Image: ${image.imagePath} is similar")
similarImages += image
ignoreSet += image
similarCount += 1
}
}
}
if (similarImages.length > 1) {
similarImages += rootImage
val similar = new SimilarImages(similarImages.toSet)
debug(s"Found similar images: ${similar.toString}")
allSimilarImages += similar
}
processedCount += 1
}
}
info(s"Finished processing ${images.size} images. Found $similarCount similar images")
this.processSimilarities(allSimilarImages.toList)
}
def getImagesForDirectory(directoryPath: String, recursive: Boolean = false, recursiveDepth: Int = 500): List[Image] = {
debug(s"Looking for images in directory: $directoryPath")
val images: mutable.MutableList[Image] = new mutable.MutableList[Image]()
val imageFiles = getAllImageFiles(directoryPath, recursive, recursiveDepth)
val directory: File = new File(directoryPath)
var count = 0
for (file <- imageFiles) {
count += 1
if (count % 25 == 0) {
//info(s"Processed ${count}/${imageFiles.size}")
processedListener ! ScannedFileCount(count, imageFiles.size)
}
val image = ImageService.getImage(file)
if (image != null) {
images += image
}
}
images.toList
}
}
|
warricksothr/ImageTools
|
engine/src/main/scala/com/sothr/imagetools/engine/SequentialEngine.scala
|
Scala
|
mit
| 3,455 |
/*
* Copyright (c) 2015,
* Ilya Sergey, Christopher Earl, Matthew Might and David Van Horn
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the project "Reachability" nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.ucombinator.scheme.transform
import org.ucombinator.scheme.syntax._
/**
A <code>Monomorphizer</code> creates a separates definition of
every function for each point of use.
*/
class Monomorphizer extends ProgramTransformer {
def apply(prog: Program): Program = {
throw new Exception()
}
}
|
ilyasergey/reachability
|
src/org/ucombinator/scheme/transform/Monomorphizer.scala
|
Scala
|
bsd-3-clause
| 1,945 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools
package cmd
import nsc.io._
import java.util.Properties
import java.io.FileInputStream
import scala.sys.SystemProperties
/** Contains logic for translating a property key/value pair into
* equivalent command line arguments. The default settings will
* translate, given programInfo.runner == "foo" :
*
* foo.bar=true to --bar // if --bar is unary
* foo.bar=quux to --bar quux // if --bar is binary
*/
class PropertyMapper(reference: Reference) extends (((String, String)) => List[String]) {
import reference._
lazy val RunnerName = programInfo.runner
// e.g. "partest.shootout" -> "--shootout"
def propNameToOptionName(key: String): Option[String] = (key split '.').toList match {
case List(RunnerName, name) => Some(name)
case _ => None
}
def isPassThrough(key: String): Boolean = false // e.g. "partest.options"
def onError(key: String, value: String): Unit = () // called when translate fails
def translate(key: String, value: String): List[String] = {
val opt = toOpt(key)
if (isUnaryOption(key) && isTrue(value)) List(opt)
else if (isBinaryOption(key)) List(opt, value)
else returning(Nil)(_ => onError(key, value))
}
def isTrue(value: String) = List("yes", "on", "true") contains value.toLowerCase
def apply(kv: (String, String)): List[String] = {
val (k, v) = kv
if (isPassThrough(k)) toArgs(v)
else propNameToOptionName(k) match {
case Some(optName) => translate(optName, v)
case _ => Nil
}
}
}
trait Property extends Reference {
def propMapper: PropertyMapper
override def propertyArgs: List[String] = systemPropertiesToOptions
def loadProperties(file: File): Properties =
returning(new Properties)(_ load new FileInputStream(file.path))
def systemPropertiesToOptions: List[String] =
propertiesToOptions(new SystemProperties().toList)
def propertiesToOptions(file: File): List[String] =
propertiesToOptions(loadProperties(file))
def propertiesToOptions(props: java.util.Properties): List[String] = {
import scala.collection.JavaConverters._
propertiesToOptions(props.asScala.toList)
}
def propertiesToOptions(props: List[(String, String)]) = props flatMap propMapper
}
|
felixmulder/scala
|
src/compiler/scala/tools/cmd/Property.scala
|
Scala
|
bsd-3-clause
| 2,406 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
import org.scalatest._
import SharedHelpers.EventRecordingReporter
import scala.concurrent.{Future, ExecutionContext}
class AsyncPropSpecLikeSpec extends org.scalatest.FunSpec {
describe("AsyncPropSpecLike") {
it("can be used for tests that return Future") {
class ExampleSpec extends AsyncPropSpecLike {
implicit val executionContext: ExecutionContext = ExecutionContext.Implicits.global
type FixtureParam = String
def withAsyncFixture(test: OneArgAsyncTest): Future[Outcome] =
test("testing")
val a = 1
property("test 1") { fixture =>
Future {
assert(a == 1)
}
}
property("test 2") { fixture =>
Future {
assert(a == 2)
}
}
property("test 3") { fixture =>
Future {
pending
}
}
property("test 4") { fixture =>
Future {
cancel
}
}
ignore("test 5") { fixture =>
Future {
cancel
}
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
status.waitUntilCompleted()
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("can be used for tests that did not return Future") {
class ExampleSpec extends AsyncPropSpecLike {
implicit val executionContext: ExecutionContext = ExecutionContext.Implicits.global
type FixtureParam = String
def withAsyncFixture(test: OneArgAsyncTest): Future[Outcome] =
test("testing")
val a = 1
property("test 1") { fixture =>
assert(a == 1)
}
property("test 2") { fixture =>
assert(a == 2)
}
property("test 3") { fixture =>
pending
}
property("test 4") { fixture =>
cancel
}
ignore("test 5") { fixture =>
cancel
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
status.waitUntilCompleted()
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
}
}
|
SRGOM/scalatest
|
scalatest-test/src/test/scala/org/scalatest/fixture/AsyncPropSpecLikeSpec.scala
|
Scala
|
apache-2.0
| 4,266 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.producer.async
import kafka.common._
import kafka.message.{NoCompressionCodec, Message, ByteBufferMessageSet}
import kafka.producer._
import kafka.serializer.Encoder
import kafka.utils.{CoreUtils, Logging, SystemTime}
import scala.util.Random
import scala.collection.{Seq, Map}
import scala.collection.mutable.{ArrayBuffer, HashMap, Set}
import java.util.concurrent.atomic._
import kafka.api.{TopicMetadata, ProducerRequest}
import org.apache.kafka.common.utils.Utils
class DefaultEventHandler[K,V](config: ProducerConfig,
private val partitioner: Partitioner,
private val encoder: Encoder[V],
private val keyEncoder: Encoder[K],
private val producerPool: ProducerPool,
private val topicPartitionInfos: HashMap[String, TopicMetadata] = new HashMap[String, TopicMetadata])
extends EventHandler[K,V] with Logging {
val isSync = ("sync" == config.producerType)
val correlationId = new AtomicInteger(0)
val brokerPartitionInfo = new BrokerPartitionInfo(config, producerPool, topicPartitionInfos)
private val topicMetadataRefreshInterval = config.topicMetadataRefreshIntervalMs
private var lastTopicMetadataRefreshTime = 0L
private val topicMetadataToRefresh = Set.empty[String]
private val sendPartitionPerTopicCache = HashMap.empty[String, Int]
private val producerStats = ProducerStatsRegistry.getProducerStats(config.clientId)
private val producerTopicStats = ProducerTopicStatsRegistry.getProducerTopicStats(config.clientId)
def handle(events: Seq[KeyedMessage[K,V]]) {
val serializedData = serialize(events)
serializedData.foreach {
keyed =>
val dataSize = keyed.message.payloadSize
producerTopicStats.getProducerTopicStats(keyed.topic).byteRate.mark(dataSize)
producerTopicStats.getProducerAllTopicsStats.byteRate.mark(dataSize)
}
var outstandingProduceRequests = serializedData
var remainingRetries = config.messageSendMaxRetries + 1
val correlationIdStart = correlationId.get()
debug("Handling %d events".format(events.size))
while (remainingRetries > 0 && outstandingProduceRequests.size > 0) {
topicMetadataToRefresh ++= outstandingProduceRequests.map(_.topic)
if (topicMetadataRefreshInterval >= 0 &&
SystemTime.milliseconds - lastTopicMetadataRefreshTime > topicMetadataRefreshInterval) {
CoreUtils.swallowError(brokerPartitionInfo.updateInfo(topicMetadataToRefresh.toSet, correlationId.getAndIncrement))
sendPartitionPerTopicCache.clear()
topicMetadataToRefresh.clear
lastTopicMetadataRefreshTime = SystemTime.milliseconds
}
outstandingProduceRequests = dispatchSerializedData(outstandingProduceRequests)
if (outstandingProduceRequests.size > 0) {
info("Back off for %d ms before retrying send. Remaining retries = %d".format(config.retryBackoffMs, remainingRetries-1))
// back off and update the topic metadata cache before attempting another send operation
Thread.sleep(config.retryBackoffMs)
// get topics of the outstanding produce requests and refresh metadata for those
CoreUtils.swallowError(brokerPartitionInfo.updateInfo(outstandingProduceRequests.map(_.topic).toSet, correlationId.getAndIncrement))
sendPartitionPerTopicCache.clear()
remainingRetries -= 1
producerStats.resendRate.mark()
}
}
if(outstandingProduceRequests.size > 0) {
producerStats.failedSendRate.mark()
val correlationIdEnd = correlationId.get()
error("Failed to send requests for topics %s with correlation ids in [%d,%d]"
.format(outstandingProduceRequests.map(_.topic).toSet.mkString(","),
correlationIdStart, correlationIdEnd-1))
throw new FailedToSendMessageException("Failed to send messages after " + config.messageSendMaxRetries + " tries.", null)
}
}
private def dispatchSerializedData(messages: Seq[KeyedMessage[K,Message]]): Seq[KeyedMessage[K, Message]] = {
val partitionedDataOpt = partitionAndCollate(messages)
partitionedDataOpt match {
case Some(partitionedData) =>
val failedProduceRequests = new ArrayBuffer[KeyedMessage[K, Message]]
for ((brokerid, messagesPerBrokerMap) <- partitionedData) {
if (logger.isTraceEnabled) {
messagesPerBrokerMap.foreach(partitionAndEvent =>
trace("Handling event for Topic: %s, Broker: %d, Partitions: %s".format(partitionAndEvent._1, brokerid, partitionAndEvent._2)))
}
val messageSetPerBrokerOpt = groupMessagesToSet(messagesPerBrokerMap)
messageSetPerBrokerOpt match {
case Some(messageSetPerBroker) =>
val failedTopicPartitions = send(brokerid, messageSetPerBroker)
failedTopicPartitions.foreach(topicPartition => {
messagesPerBrokerMap.get(topicPartition) match {
case Some(data) => failedProduceRequests.appendAll(data)
case None => // nothing
}
})
case None => // failed to group messages
messagesPerBrokerMap.values.foreach(m => failedProduceRequests.appendAll(m))
}
}
failedProduceRequests
case None => // failed to collate messages
messages
}
}
def serialize(events: Seq[KeyedMessage[K,V]]): Seq[KeyedMessage[K,Message]] = {
val serializedMessages = new ArrayBuffer[KeyedMessage[K,Message]](events.size)
events.foreach{e =>
try {
if(e.hasKey)
serializedMessages += new KeyedMessage[K,Message](topic = e.topic, key = e.key, partKey = e.partKey, message = new Message(key = keyEncoder.toBytes(e.key), bytes = encoder.toBytes(e.message)))
else
serializedMessages += new KeyedMessage[K,Message](topic = e.topic, key = e.key, partKey = e.partKey, message = new Message(bytes = encoder.toBytes(e.message)))
} catch {
case t: Throwable =>
producerStats.serializationErrorRate.mark()
if (isSync) {
throw t
} else {
// currently, if in async mode, we just log the serialization error. We need to revisit
// this when doing kafka-496
error("Error serializing message for topic %s".format(e.topic), t)
}
}
}
serializedMessages
}
def partitionAndCollate(messages: Seq[KeyedMessage[K,Message]]): Option[Map[Int, collection.mutable.Map[TopicAndPartition, Seq[KeyedMessage[K,Message]]]]] = {
val ret = new HashMap[Int, collection.mutable.Map[TopicAndPartition, Seq[KeyedMessage[K,Message]]]]
try {
for (message <- messages) {
val topicPartitionsList = getPartitionListForTopic(message)
val partitionIndex = getPartition(message.topic, message.partitionKey, topicPartitionsList)
val brokerPartition = topicPartitionsList(partitionIndex)
// postpone the failure until the send operation, so that requests for other brokers are handled correctly
val leaderBrokerId = brokerPartition.leaderBrokerIdOpt.getOrElse(-1)
var dataPerBroker: HashMap[TopicAndPartition, Seq[KeyedMessage[K,Message]]] = null
ret.get(leaderBrokerId) match {
case Some(element) =>
dataPerBroker = element.asInstanceOf[HashMap[TopicAndPartition, Seq[KeyedMessage[K,Message]]]]
case None =>
dataPerBroker = new HashMap[TopicAndPartition, Seq[KeyedMessage[K,Message]]]
ret.put(leaderBrokerId, dataPerBroker)
}
val topicAndPartition = TopicAndPartition(message.topic, brokerPartition.partitionId)
var dataPerTopicPartition: ArrayBuffer[KeyedMessage[K,Message]] = null
dataPerBroker.get(topicAndPartition) match {
case Some(element) =>
dataPerTopicPartition = element.asInstanceOf[ArrayBuffer[KeyedMessage[K,Message]]]
case None =>
dataPerTopicPartition = new ArrayBuffer[KeyedMessage[K,Message]]
dataPerBroker.put(topicAndPartition, dataPerTopicPartition)
}
dataPerTopicPartition.append(message)
}
Some(ret)
}catch { // Swallow recoverable exceptions and return None so that they can be retried.
case ute: UnknownTopicOrPartitionException => warn("Failed to collate messages by topic,partition due to: " + ute.getMessage); None
case lnae: LeaderNotAvailableException => warn("Failed to collate messages by topic,partition due to: " + lnae.getMessage); None
case oe: Throwable => error("Failed to collate messages by topic, partition due to: " + oe.getMessage); None
}
}
private def getPartitionListForTopic(m: KeyedMessage[K,Message]): Seq[PartitionAndLeader] = {
val topicPartitionsList = brokerPartitionInfo.getBrokerPartitionInfo(m.topic, correlationId.getAndIncrement)
debug("Broker partitions registered for topic: %s are %s"
.format(m.topic, topicPartitionsList.map(p => p.partitionId).mkString(",")))
val totalNumPartitions = topicPartitionsList.length
if(totalNumPartitions == 0)
throw new NoBrokersForPartitionException("Partition key = " + m.key)
topicPartitionsList
}
/**
* Retrieves the partition id and throws an UnknownTopicOrPartitionException if
* the value of partition is not between 0 and numPartitions-1
* @param topic The topic
* @param key the partition key
* @param topicPartitionList the list of available partitions
* @return the partition id
*/
private def getPartition(topic: String, key: Any, topicPartitionList: Seq[PartitionAndLeader]): Int = {
val numPartitions = topicPartitionList.size
if(numPartitions <= 0)
throw new UnknownTopicOrPartitionException("Topic " + topic + " doesn't exist")
val partition =
if(key == null) {
// If the key is null, we don't really need a partitioner
// So we look up in the send partition cache for the topic to decide the target partition
val id = sendPartitionPerTopicCache.get(topic)
id match {
case Some(partitionId) =>
// directly return the partitionId without checking availability of the leader,
// since we want to postpone the failure until the send operation anyways
partitionId
case None =>
val availablePartitions = topicPartitionList.filter(_.leaderBrokerIdOpt.isDefined)
if (availablePartitions.isEmpty)
throw new LeaderNotAvailableException("No leader for any partition in topic " + topic)
val index = Utils.abs(Random.nextInt) % availablePartitions.size
val partitionId = availablePartitions(index).partitionId
sendPartitionPerTopicCache.put(topic, partitionId)
partitionId
}
} else
partitioner.partition(key, numPartitions)
if(partition < 0 || partition >= numPartitions)
throw new UnknownTopicOrPartitionException("Invalid partition id: " + partition + " for topic " + topic +
"; Valid values are in the inclusive range of [0, " + (numPartitions-1) + "]")
trace("Assigning message of topic %s and key %s to a selected partition %d".format(topic, if (key == null) "[none]" else key.toString, partition))
partition
}
/**
* Constructs and sends the produce request based on a map from (topic, partition) -> messages
*
* @param brokerId the broker that will receive the request
* @param messagesPerTopic the messages as a map from (topic, partition) -> messages
* @return the set (topic, partitions) messages which incurred an error sending or processing
*/
private def send(brokerId: Int, messagesPerTopic: collection.mutable.Map[TopicAndPartition, ByteBufferMessageSet]) = {
if(brokerId < 0) {
warn("Failed to send data since partitions %s don't have a leader".format(messagesPerTopic.map(_._1).mkString(",")))
messagesPerTopic.keys.toSeq
} else if(messagesPerTopic.size > 0) {
val currentCorrelationId = correlationId.getAndIncrement
val producerRequest = new ProducerRequest(currentCorrelationId, config.clientId, config.requestRequiredAcks,
config.requestTimeoutMs, messagesPerTopic)
var failedTopicPartitions = Seq.empty[TopicAndPartition]
try {
val syncProducer = producerPool.getProducer(brokerId)
debug("Producer sending messages with correlation id %d for topics %s to broker %d on %s:%d"
.format(currentCorrelationId, messagesPerTopic.keySet.mkString(","), brokerId, syncProducer.config.host, syncProducer.config.port))
val response = syncProducer.send(producerRequest)
debug("Producer sent messages with correlation id %d for topics %s to broker %d on %s:%d"
.format(currentCorrelationId, messagesPerTopic.keySet.mkString(","), brokerId, syncProducer.config.host, syncProducer.config.port))
if(response != null) {
if (response.status.size != producerRequest.data.size)
throw new KafkaException("Incomplete response (%s) for producer request (%s)".format(response, producerRequest))
if (logger.isTraceEnabled) {
val successfullySentData = response.status.filter(_._2.error == ErrorMapping.NoError)
successfullySentData.foreach(m => messagesPerTopic(m._1).foreach(message =>
trace("Successfully sent message: %s".format(if(message.message.isNull) null else message.message.toString()))))
}
val failedPartitionsAndStatus = response.status.filter(_._2.error != ErrorMapping.NoError).toSeq
failedTopicPartitions = failedPartitionsAndStatus.map(partitionStatus => partitionStatus._1)
if(failedTopicPartitions.size > 0) {
val errorString = failedPartitionsAndStatus
.sortWith((p1, p2) => p1._1.topic.compareTo(p2._1.topic) < 0 ||
(p1._1.topic.compareTo(p2._1.topic) == 0 && p1._1.partition < p2._1.partition))
.map{
case(topicAndPartition, status) =>
topicAndPartition.toString + ": " + ErrorMapping.exceptionFor(status.error).getClass.getName
}.mkString(",")
warn("Produce request with correlation id %d failed due to %s".format(currentCorrelationId, errorString))
}
failedTopicPartitions
} else {
Seq.empty[TopicAndPartition]
}
} catch {
case t: Throwable =>
warn("Failed to send producer request with correlation id %d to broker %d with data for partitions %s"
.format(currentCorrelationId, brokerId, messagesPerTopic.map(_._1).mkString(",")), t)
messagesPerTopic.keys.toSeq
}
} else {
List.empty
}
}
private def groupMessagesToSet(messagesPerTopicAndPartition: collection.mutable.Map[TopicAndPartition, Seq[KeyedMessage[K, Message]]]) = {
/** enforce the compressed.topics config here.
* If the compression codec is anything other than NoCompressionCodec,
* Enable compression only for specified topics if any
* If the list of compressed topics is empty, then enable the specified compression codec for all topics
* If the compression codec is NoCompressionCodec, compression is disabled for all topics
*/
try {
val messagesPerTopicPartition = messagesPerTopicAndPartition.map { case (topicAndPartition, messages) =>
val rawMessages = messages.map(_.message)
(topicAndPartition,
config.compressionCodec match {
case NoCompressionCodec =>
debug("Sending %d messages with no compression to %s".format(messages.size, topicAndPartition))
new ByteBufferMessageSet(NoCompressionCodec, rawMessages: _*)
case _ =>
config.compressedTopics.size match {
case 0 =>
debug("Sending %d messages with compression codec %d to %s"
.format(messages.size, config.compressionCodec.codec, topicAndPartition))
new ByteBufferMessageSet(config.compressionCodec, rawMessages: _*)
case _ =>
if (config.compressedTopics.contains(topicAndPartition.topic)) {
debug("Sending %d messages with compression codec %d to %s"
.format(messages.size, config.compressionCodec.codec, topicAndPartition))
new ByteBufferMessageSet(config.compressionCodec, rawMessages: _*)
}
else {
debug("Sending %d messages to %s with no compression as it is not in compressed.topics - %s"
.format(messages.size, topicAndPartition, config.compressedTopics.toString))
new ByteBufferMessageSet(NoCompressionCodec, rawMessages: _*)
}
}
}
)
}
Some(messagesPerTopicPartition)
} catch {
case t: Throwable => error("Failed to group messages", t); None
}
}
def close() {
if (producerPool != null)
producerPool.close
}
}
|
junrao/kafka
|
core/src/main/scala/kafka/producer/async/DefaultEventHandler.scala
|
Scala
|
apache-2.0
| 18,044 |
package org.reactivecouchbase.experimental
object CouchbaseFS {
}
|
en-japan/ReactiveCouchbase-core
|
driver/src/main/scala/org/reactivecouchbase/experimental/couchfs.scala
|
Scala
|
apache-2.0
| 67 |
package v1.wikiq
import javax.inject.Inject
import epam.idobrovolskiy.wikipedia.trending.cli.WikiDateRangeParser
import play.api.routing.Router.Routes
import play.api.routing.SimpleRouter
import play.api.routing.sird._
/**
* Routes and URLs to the PostResource controller.
*/
class WikiqRouter @Inject()(controller: WikiqController) extends SimpleRouter {
val prefix = "/v1/wikiq"
override def routes: Routes = {
case GET(p"/tokens/$date_range") =>
controller.tokens(date_range)
case GET(p"/articles/$date_range") => {
WikiDateRangeParser.parse(date_range) match {
case Some(range) => println(range)
case _ => throw new IllegalArgumentException(date_range)
}
???
}
case GET(p"/distribution/$tokens") => {
println(tokens)
???
}
}
}
|
igor-dobrovolskiy-epam/wikipedia-analysis-scala-core
|
web/app/v1/wikiq/WikiqRouter.scala
|
Scala
|
apache-2.0
| 822 |
package net.mentalarray.doozie.PigSupport
/**
* Created by kdivincenzo on 2/18/15.
*/
sealed trait PigClientState { }
object PigClientState {
case object Connected extends PigClientState
case object WorkRequested extends PigClientState
case object Finished extends PigClientState
case object Disconnected extends PigClientState
case object Error extends PigClientState
}
|
antagonist112358/tomahawk
|
workflow-engine/src/net/mentalarray/doozie/PigSupport/PigClientState.scala
|
Scala
|
apache-2.0
| 386 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.k8s
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicReference
import io.fabric8.kubernetes.api.model.{Pod, PodBuilder}
import org.jmock.lib.concurrent.DeterministicScheduler
import org.scalatest.BeforeAndAfter
import scala.collection.mutable
import org.apache.spark.SparkFunSuite
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.util.ManualClock
class ExecutorPodsSnapshotsStoreSuite extends SparkFunSuite with BeforeAndAfter {
private var eventBufferScheduler: DeterministicScheduler = _
private var eventQueueUnderTest: ExecutorPodsSnapshotsStoreImpl = _
private var clock: ManualClock = _
before {
eventBufferScheduler = new DeterministicScheduler()
clock = new ManualClock()
eventQueueUnderTest = new ExecutorPodsSnapshotsStoreImpl(eventBufferScheduler, clock)
ExecutorPodsSnapshot.setShouldCheckAllContainers(false)
}
test("Subscribers get notified of events periodically.") {
val receivedSnapshots1 = mutable.Buffer.empty[ExecutorPodsSnapshot]
val receivedSnapshots2 = mutable.Buffer.empty[ExecutorPodsSnapshot]
eventQueueUnderTest.addSubscriber(1000) {
receivedSnapshots1 ++= _
}
eventQueueUnderTest.addSubscriber(2000) {
receivedSnapshots2 ++= _
}
eventBufferScheduler.runUntilIdle()
assert(receivedSnapshots1 === Seq(ExecutorPodsSnapshot()))
assert(receivedSnapshots2 === Seq(ExecutorPodsSnapshot()))
clock.advance(100)
pushPodWithIndex(1)
// Force time to move forward so that the buffer is emitted, scheduling the
// processing task on the subscription executor...
eventBufferScheduler.tick(1000, TimeUnit.MILLISECONDS)
// ... then actually execute the subscribers.
assert(receivedSnapshots1 === Seq(
ExecutorPodsSnapshot(),
ExecutorPodsSnapshot(Seq(podWithIndex(1)), 0)))
assert(receivedSnapshots2 === Seq(ExecutorPodsSnapshot()))
eventBufferScheduler.tick(1000, TimeUnit.MILLISECONDS)
// Don't repeat snapshots
assert(receivedSnapshots1 === Seq(
ExecutorPodsSnapshot(),
ExecutorPodsSnapshot(Seq(podWithIndex(1)), 0)))
assert(receivedSnapshots2 === Seq(
ExecutorPodsSnapshot(),
ExecutorPodsSnapshot(Seq(podWithIndex(1)), 0)))
pushPodWithIndex(2)
pushPodWithIndex(3)
eventBufferScheduler.tick(1000, TimeUnit.MILLISECONDS)
assert(receivedSnapshots1 === Seq(
ExecutorPodsSnapshot(),
ExecutorPodsSnapshot(Seq(podWithIndex(1)), 0),
ExecutorPodsSnapshot(Seq(podWithIndex(1), podWithIndex(2)), 0),
ExecutorPodsSnapshot(Seq(podWithIndex(1), podWithIndex(2), podWithIndex(3)), 0)))
assert(receivedSnapshots2 === Seq(
ExecutorPodsSnapshot(),
ExecutorPodsSnapshot(Seq(podWithIndex(1)), 0)))
eventBufferScheduler.tick(1000, TimeUnit.MILLISECONDS)
assert(receivedSnapshots1 === Seq(
ExecutorPodsSnapshot(),
ExecutorPodsSnapshot(Seq(podWithIndex(1)), 0),
ExecutorPodsSnapshot(Seq(podWithIndex(1), podWithIndex(2)), 0),
ExecutorPodsSnapshot(Seq(podWithIndex(1), podWithIndex(2), podWithIndex(3)), 0)))
assert(receivedSnapshots1 === receivedSnapshots2)
}
test("Even without sending events, initially receive an empty buffer.") {
val receivedInitialSnapshot = new AtomicReference[Seq[ExecutorPodsSnapshot]](null)
eventQueueUnderTest.addSubscriber(1000) {
receivedInitialSnapshot.set
}
assert(receivedInitialSnapshot.get == null)
eventBufferScheduler.runUntilIdle()
assert(receivedInitialSnapshot.get === Seq(ExecutorPodsSnapshot()))
}
test("Replacing the snapshot passes the new snapshot to subscribers.") {
val receivedSnapshots = mutable.Buffer.empty[ExecutorPodsSnapshot]
eventQueueUnderTest.addSubscriber(1000) {
receivedSnapshots ++= _
}
eventQueueUnderTest.updatePod(podWithIndex(1))
eventBufferScheduler.tick(1000, TimeUnit.MILLISECONDS)
assert(receivedSnapshots === Seq(
ExecutorPodsSnapshot(),
ExecutorPodsSnapshot(Seq(podWithIndex(1)), 0)))
clock.advance(100)
eventQueueUnderTest.replaceSnapshot(Seq(podWithIndex(2)))
eventBufferScheduler.tick(1000, TimeUnit.MILLISECONDS)
assert(receivedSnapshots === Seq(
ExecutorPodsSnapshot(),
ExecutorPodsSnapshot(Seq(podWithIndex(1)), 0),
ExecutorPodsSnapshot(Seq(podWithIndex(2)), 100)))
}
private def pushPodWithIndex(index: Int): Unit =
eventQueueUnderTest.updatePod(podWithIndex(index))
private def podWithIndex(index: Int): Pod =
new PodBuilder()
.editOrNewMetadata()
.withName(s"pod-$index")
.addToLabels(SPARK_EXECUTOR_ID_LABEL, index.toString)
.endMetadata()
.editOrNewStatus()
.withPhase("running")
.endStatus()
.build()
}
|
maropu/spark
|
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsSnapshotsStoreSuite.scala
|
Scala
|
apache-2.0
| 5,628 |
package pl.project13.scala.akka.raft.cluster
import protocol._
import akka.cluster.{Member, Cluster}
import akka.cluster.ClusterEvent._
import concurrent.duration._
import pl.project13.scala.akka.raft.config.{RaftConfig, RaftConfiguration}
import pl.project13.scala.akka.raft.protocol._
import akka.actor._
/**
* Akka cluster ready [[pl.project13.scala.akka.raft.RaftActor]].
*
* '''Requires cluster node role: "raft"'''
*
* In order to guarantee that raft is running on exactly the nodes in the cluster you want it to,
* a Node on which a ClusterRaftActor can start MUST have the role `"raft"`, otherwise it will fail to initialize.
* The role validation can be turned off, in case you want to start raft on all available nodes (without looking at the
* presence of the "raft" role), but it is discouraged to do so.
*
* @param keepInitUntilFound keeps underlying `raftActor` in `Init` state, until this number of other raft actors has been auto-discovered
*/
// todo change the impl to make this REALLY transparent
class ClusterRaftActor(raftActor: ActorRef, keepInitUntilFound: Int) extends Actor with ActorLogging
with ClusterRaftGrouping {
val cluster = Cluster(context.system)
val raftConfig = RaftConfiguration(context.system)
checkIfRunningOnRaftNodeOrStop(raftConfig, cluster)
import context.dispatcher
val identifyTimeout = raftConfig.clusterAutoDiscoveryIdentifyTimeout
/**
* Used to keep track if we still need to retry sending Identify to an address.
* If node is not here, it has responsed with at least one ActorIdentity.
*/
private var awaitingIdentifyFrom = Set.empty[Address]
override def preStart(): Unit = {
super.preStart()
log.info("Joining new raft actor to cluster, will watch {}", raftActor.path)
// ClusterRaftActor will react with termination, if the raftActor terminates
context watch raftActor
// tell the raft actor, that this one is it's "outside world representative"
raftActor ! AssignClusterSelf(self)
cluster.subscribe(self, initialStateMode = InitialStateAsEvents, classOf[MemberEvent], classOf[UnreachableMember])
}
override def postStop(): Unit = {
context unwatch raftActor
cluster unsubscribe self
super.postStop()
}
def receive = {
// members joining
case MemberUp(member) if isRaftNode(member) =>
log.info("Node is Up: {}, selecting and adding actors to Raft cluster..", member.address)
tryIdentifyRaftMembers(member.address, RaftMembersIdentifyTimedOut(member.address, raftConfig.clusterAutoDiscoveryRetryCount))
case MemberUp(member) =>
log.debug("Another node joined, but it's does not have a [{}] role, ignoring it.", raftGroupRole)
// identifying new members ------------------
case ActorIdentity(address: Address, Some(raftActorRef)) =>
log.info("Adding actor {} to Raft cluster, from address: {}", raftActorRef, address)
// we got at-least-one response, if we get more that's good, but no need to retry
awaitingIdentifyFrom -= address
raftActor ! RaftMemberAdded(raftActorRef, keepInitUntilFound)
case ActorIdentity(address: Address, None) =>
log.warning("Unable to find any raft-actors on node: {}", address)
awaitingIdentifyFrom -= address // == got at-least-one response
case timedOut: RaftMembersIdentifyTimedOut
if timedOut.shouldRetry && awaitingIdentifyFrom.contains(timedOut.address) =>
log.warning("Did not hear back for Identify call to {}, will try again {} more times...", timedOut.address, timedOut.retryMoreTimes)
tryIdentifyRaftMembers(timedOut.address, timedOut.forRetry) // todo enable handling of these messages in any state, extend clustermanagementBehavior!
case timedOut: RaftMembersIdentifyTimedOut =>
log.debug("Did hear back from {}, stopping retry", timedOut.address)
// end of identifying new members -----------
// members going away
case UnreachableMember(member) =>
log.info("Node detected as unreachable: {}", member)
// todo remove from raft ???
case MemberRemoved(member, previousStatus) if member == self =>
log.info("This member was removed from cluster, stopping self (prev status: {})", previousStatus)
context stop self
case MemberRemoved(member, previousStatus) =>
log.info("Member is Removed: {} after {}", member.address, previousStatus)
// todo remove from raft ???
case _: MemberEvent =>
// ignore
case Terminated(watchedActor) if watchedActor == raftActor /* sanity check, really needed? */ =>
context stop self
case msg =>
// all other messages, we proxy through to the RaftActor, it will handle the rest
raftActor.tell(msg, sender())
}
private def tryIdentifyRaftMembers(address: Address, onIdentityTimeout: RaftMembersIdentifyTimedOut) {
val memberSelection = context.actorSelection(raftMembersPath(address))
// we need a response from this node
awaitingIdentifyFrom += address
context.system.scheduler.scheduleOnce(identifyTimeout, self, onIdentityTimeout)
memberSelection ! Identify(address)
}
private def isRaftNode(member: Member) =
member.roles contains raftGroupRole
/**
* If `check-for-raft-cluster-node-role` is enabled, will check if running on a node with the `"raft"` role.
* If not running on a `"raft"` node, will throw an
*/
protected def checkIfRunningOnRaftNodeOrStop(config: RaftConfig, cluster: Cluster) {
if (!cluster.selfRoles.contains(raftGroupRole)) {
log.warning(
s"""Tried to initialize ${getClass.getSimpleName} on cluster node (${cluster.selfAddress}), but it's roles: """ +
s"""${cluster.selfRoles} did not include the required ${raftGroupRole}role, so stopping this actor. """ +
"""Please verify your actor spawning logic, or update your configuration with akka.cluster.roles = [ "raft" ] for this node."""
)
context.system.stop(self)
}
}
}
object ClusterRaftActor {
def props(raftActor: ActorRef, keepInitUntilMembers: Int) = {
Props(classOf[ClusterRaftActor], raftActor, keepInitUntilMembers)
}
}
|
ktoso/akka-raft
|
src/main/scala/pl/project13/scala/akka/raft/cluster/ClusterRaftActor.scala
|
Scala
|
apache-2.0
| 6,164 |
package edu.neu.coe.scala.parse
import org.scalatest.{ FlatSpec, Matchers }
import edu.neu.coe.scala.numerics.Rational
import scala.util._
import edu.neu.coe.scala.numerics.Rational._
/**
* @author scalaprof
*/
class ExpressionParserSpec extends FlatSpec with Matchers {
"DoubleExpressionParser(1)" should "be 1.0" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "1")
r should matchPattern { case parser.Success(_, _) => }
r.get.value should matchPattern { case Success(1.0) => }
}
"DoubleExpressionParser(1+1)" should "be 2.0" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "1+1")
r should matchPattern { case parser.Success(_, _) => }
r.get.value should matchPattern { case Success(2.0) => }
}
"DoubleExpressionParser(1*2+1)" should "be 3.0" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "1*2+1")
r should matchPattern { case parser.Success(_, _) => }
r.get.value should matchPattern { case Success(3.0) => }
}
"DoubleExpressionParser(1*2+1-1.5)" should "be 1.5" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "1*2+1-1.5")
r should matchPattern { case parser.Success(_, _) => }
r.get.value should matchPattern { case Success(1.5) => }
}
"DoubleExpressionParser(1/0)" should "be infinite" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "1/0")
r.get.value should matchPattern { case Success(Double.PositiveInfinity) => }
}
"DoubleExpressionParser(1*2+1-3/2)" should "be 1.5" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "1*2+1-3/2")
r should matchPattern { case parser.Success(_, _) => }
r.get.value should matchPattern { case Success(1.5) => }
}
"DoubleExpressionParser(1*2+1-pi/2)" should "fail" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "1*2+1-pi/2")
r should matchPattern { case parser.Failure("factor", _) => }
}
"DoubleExpressionParser(1?2)" should "fail" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "(1?2)")
r should matchPattern { case parser.Failure("`)' expected but `?' found", _) => }
}
"RationalExpressionParser(1)" should "be 1" in {
val parser = RationalExpressionParser
val r = parser.parseAll(parser.expr, "1")
r should matchPattern { case parser.Success(_, _) => }
r.get.value should matchPattern { case Success(Rational.one) => }
}
"RationalExpressionParser(1+1)" should "be 2/1" in {
val parser = RationalExpressionParser
val r = parser.parseAll(parser.expr, "1+1")
r should matchPattern { case parser.Success(_, _) => }
r.get.value should matchPattern { case Success(Rational(2,1)) => }
}
"RationalExpressionParser(1*2+1)" should "be 3/1" in {
val parser = RationalExpressionParser
val r = parser.parseAll(parser.expr, "1*2+1")
r should matchPattern { case parser.Success(_, _) => }
r.get.value should matchPattern { case Success(Rational(3,1)) => }
}
"RationalExpressionParser(1*2+1-3/2)" should "be 3/2" in {
val parser = RationalExpressionParser
val r = parser.parseAll(parser.expr, "1*2+1-3/2")
r should matchPattern { case parser.Success(_, _) => }
r.get.value should matchPattern { case Success(Rational(3,2)) => }
}
"RationalExpressionParser(1/0)" should "be infinite" in {
val parser = RationalExpressionParser
val r = parser.parseAll(parser.expr, "1/0")
r.get.value should matchPattern { case Success(Rational.infinity) => }
}
"(" should "fail" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "(")
r should matchPattern { case parser.Failure("factor", _) => }
}
"1+2=2" should "fail" in {
val parser = DoubleExpressionParser
val r = parser.parseAll(parser.expr, "1+2=2")
r should matchPattern { case parser.Failure("expr", _) => }
}
"IntExpressionParser(3/2)" should "fail" in {
val parser = IntExpressionParser
val r = parser.parseAll(parser.expr, "3/2")
an [IllegalArgumentException] should be thrownBy r.get.value
}
}
|
rchillyard/Scalaprof
|
Numerics/src/test/scala/edu/neu/coe/scala/parse/ExpressionParserSpec.scala
|
Scala
|
gpl-2.0
| 4,236 |
package filodb.spark
import akka.actor.ActorSystem
import com.typesafe.config.{Config, ConfigFactory}
import org.apache.spark.SparkContext
import filodb.cassandra.columnstore.CassandraColumnStore
import filodb.cassandra.metastore.CassandraMetaStore
import filodb.coordinator.DefaultCoordinatorSetup
object FiloSetup extends DefaultCoordinatorSetup {
import collection.JavaConverters._
// The global config of filodb with cassandra, columnstore, etc. sections
var config: Config = _
lazy val system = ActorSystem("filo-spark")
lazy val columnStore = new CassandraColumnStore(config)
lazy val metaStore = new CassandraMetaStore(config.getConfig("cassandra"))
def init(filoConfig: Config): Unit = {
config = filoConfig
coordinatorActor // Force coordinatorActor to start
}
def init(context: SparkContext): Unit = init(configFromSpark(context))
def configFromSpark(context: SparkContext): Config = {
val conf = context.getConf
val filoOverrides = conf.getAll.collect { case (k, v) if k.startsWith("filodb") =>
k.replace("filodb.", "") -> v
}
ConfigFactory.parseMap(filoOverrides.toMap.asJava)
.withFallback(ConfigFactory.load)
}
}
|
YanjieGao/FiloDB
|
spark/src/main/scala/filodb.spark/FiloSetup.scala
|
Scala
|
apache-2.0
| 1,287 |
package extracells.integration.opencomputers
import scala.collection.JavaConversions._
import appeng.api.AEApi
import appeng.api.config.Actionable
import appeng.api.implementations.tiles.IWirelessAccessPoint
import appeng.api.networking.security.MachineSource
import appeng.api.networking.storage.IStorageGrid
import appeng.api.networking.{IGrid, IGridHost, IGridNode}
import appeng.api.storage.IMEMonitor
import appeng.api.storage.data.{IAEFluidStack, IAEItemStack}
import appeng.api.util.WorldCoord
import appeng.tile.misc.TileSecurity
import li.cil.oc.api.Network
import li.cil.oc.api.driver.EnvironmentHost
import li.cil.oc.api.internal.{Agent, Database, Drone, Robot}
import li.cil.oc.api.machine.{Arguments, Callback, Context}
import li.cil.oc.api.network._
import li.cil.oc.api.prefab.ManagedEnvironment
import li.cil.oc.integration.{appeng, ec}
import li.cil.oc.server.network.Component
import net.minecraft.item.ItemStack
import net.minecraftforge.common.util.ForgeDirection
import net.minecraftforge.fluids.FluidContainerRegistry
class UpgradeAE(host: EnvironmentHost) extends ManagedEnvironment with appeng.NetworkControl[TileSecurity] with ec.NetworkControl[TileSecurity]{
val robot: Robot =
if (host.isInstanceOf[Robot])
host.asInstanceOf[Robot]
else
null
val drone: Drone =
if (host.isInstanceOf[Drone])
host.asInstanceOf[Drone]
else
null
var isActive = false
val agent: Agent = host.asInstanceOf[Agent]
setNode(Network.newNode(this, Visibility.Network).withConnector().withComponent("upgrade_me", Visibility.Neighbors).create());
def getComponent: ItemStack = {
if (robot != null)
return robot.getStackInSlot(robot.componentSlot(node.address))
else if(drone != null){
val i = drone.internalComponents.iterator
while (i.hasNext){
val item = i.next
if(item != null && item.getItem == ItemUpgradeAE)
return item
}
}
null
}
def getSecurity: IGridHost = {
if (host.world.isRemote) return null
val component = getComponent
val sec = AEApi.instance.registries.locatable.getLocatableBy(getAEKey(component)).asInstanceOf[IGridHost]
if(checkRange(component, sec))
sec
else
null
}
def checkRange(stack: ItemStack, sec: IGridHost): Boolean = {
if (sec == null) return false
val gridNode: IGridNode = sec.getGridNode(ForgeDirection.UNKNOWN)
if (gridNode == null) return false
val grid = gridNode.getGrid
if(grid == null) return false
stack.getItemDamage match{
case 0 =>
grid.getMachines(AEApi.instance.definitions.blocks.wireless.maybeEntity.get.asInstanceOf[Class[_ <: IGridHost]]).iterator.hasNext
case 1 =>
val gridBlock = gridNode.getGridBlock
if (gridBlock == null) return false
val loc = gridBlock.getLocation
if (loc == null) return false
for (node <- grid.getMachines(AEApi.instance.definitions.blocks.wireless.maybeEntity.get.asInstanceOf[Class[_ <: IGridHost]])) {
val accessPoint: IWirelessAccessPoint = node.getMachine.asInstanceOf[IWirelessAccessPoint]
val distance: WorldCoord = accessPoint.getLocation.subtract(agent.xPosition.toInt, agent.yPosition.toInt, agent.zPosition.toInt)
val squaredDistance: Int = distance.x * distance.x + distance.y * distance.y + distance.z * distance.z
val range = accessPoint.getRange
if (squaredDistance <= range * range) return true
}
false
case _ =>
val gridBlock = gridNode.getGridBlock
if (gridBlock == null) return false
val loc = gridBlock.getLocation
if (loc == null) return false
for (node <- grid.getMachines(AEApi.instance.definitions.blocks.wireless.maybeEntity.get.asInstanceOf[Class[_ <: IGridHost]])) {
val accessPoint: IWirelessAccessPoint = node.getMachine.asInstanceOf[IWirelessAccessPoint]
val distance: WorldCoord = accessPoint.getLocation.subtract(agent.xPosition.toInt, agent.yPosition.toInt, agent.zPosition.toInt)
val squaredDistance: Int = distance.x * distance.x + distance.y * distance.y + distance.z * distance.z
val range = accessPoint.getRange / 2
if (squaredDistance <= range * range) return true
}
false
}
}
def getGrid: IGrid = {
if (host.world.isRemote) return null
val securityTerminal = getSecurity
if (securityTerminal == null) return null
val gridNode: IGridNode = securityTerminal.getGridNode(ForgeDirection.UNKNOWN)
if (gridNode == null) return null
gridNode.getGrid
}
def getAEKey(stack: ItemStack): Long = {
try {
return WirelessHandlerUpgradeAE.getEncryptionKey(stack).toLong
}
catch {
case ignored: Throwable => {
}
}
0L
}
override def tile: TileSecurity = {
val sec = getSecurity
if (sec == null)
throw new SecurityException("No Security Station")
val node = sec.getGridNode(ForgeDirection.UNKNOWN)
if (node == null) throw new SecurityException("No Security Station")
val gridBlock = node.getGridBlock
if (gridBlock == null) throw new SecurityException("No Security Station")
val coord = gridBlock.getLocation
if (coord == null) throw new SecurityException("No Security Station")
val tileSecurity = coord.getWorld.getTileEntity(coord.x, coord.y, coord.z).asInstanceOf[TileSecurity]
if (tileSecurity == null) throw new SecurityException("No Security Station")
tileSecurity
}
def getFluidInventory: IMEMonitor[IAEFluidStack] = {
val grid = getGrid
if (grid == null) return null
val storage: IStorageGrid = grid.getCache(classOf[IStorageGrid])
if (storage == null) return null
storage.getFluidInventory
}
def getItemInventory: IMEMonitor[IAEItemStack] = {
val grid = getGrid
if (grid == null) return null
val storage: IStorageGrid = grid.getCache(classOf[IStorageGrid])
if (storage == null) return null
storage.getItemInventory
}
@Callback(doc = "function([number:amount]):number -- Transfer selected items to your ae system.")
def sendItems(context: Context, args: Arguments): Array[AnyRef] = {
val selected = agent.selectedSlot
val invRobot = agent.mainInventory
if (invRobot.getSizeInventory <= 0) return Array(0.underlying.asInstanceOf[AnyRef])
val stack = invRobot.getStackInSlot(selected)
val inv = getItemInventory
if (stack == null || inv == null) return Array(0.underlying.asInstanceOf[AnyRef])
val amount = Math.min(args.optInteger(0, 64), stack.stackSize)
val stack2 = stack.copy
stack2.stackSize = amount
val notInjectet = inv.injectItems(AEApi.instance.storage.createItemStack(stack2), Actionable.MODULATE, new MachineSource(tile))
if (notInjectet == null){
stack.stackSize -= amount
if (stack.stackSize <= 0)
invRobot.setInventorySlotContents(selected, null)
else
invRobot.setInventorySlotContents(selected, stack)
return Array(amount.underlying.asInstanceOf[AnyRef])
}else{
stack.stackSize = stack.stackSize - amount + notInjectet.getStackSize.toInt
if (stack.stackSize <= 0)
invRobot.setInventorySlotContents(selected, null)
else
invRobot.setInventorySlotContents(selected, stack)
return Array((stack2.stackSize - notInjectet.getStackSize).underlying.asInstanceOf[AnyRef])
}
}
@Callback(doc = "function(database:address, entry:number[, number:amount]):number -- Get items from your ae system.")
def requestItems(context: Context, args: Arguments): Array[AnyRef] = {
val address = args.checkString(0)
val entry = args.checkInteger(1)
val amount = args.optInteger(2, 64)
val selected = agent.selectedSlot
val invRobot = agent.mainInventory
if (invRobot.getSizeInventory <= 0) return Array(0.underlying.asInstanceOf[AnyRef])
val inv = getItemInventory
println(inv)
if (inv == null) return Array(0.underlying.asInstanceOf[AnyRef])
val n: Node = node.network.node(address)
if (n == null) throw new IllegalArgumentException("no such component")
if (!(n.isInstanceOf[Component])) throw new IllegalArgumentException("no such component")
val component: Component = n.asInstanceOf[Component]
val env: Environment = n.host
if (!(env.isInstanceOf[Database])) throw new IllegalArgumentException("not a database")
val database: Database = env.asInstanceOf[Database]
val sel = invRobot.getStackInSlot(selected)
val inSlot =
if (sel == null)
0
else
sel.stackSize
val maxSize =
if (sel == null)
64
else
sel.getMaxStackSize
val stack = database.getStackInSlot(entry - 1)
if(stack == null) return Array(0.underlying.asInstanceOf[AnyRef])
stack.stackSize = Math.min(amount, maxSize - inSlot)
val stack2 = stack.copy
stack2.stackSize = 1
val sel2 =
if (sel != null) {
val sel3 = sel.copy
sel3.stackSize = 1
sel3
}else
null
if(sel != null && !ItemStack.areItemStacksEqual(sel2, stack2)) return Array(0.underlying.asInstanceOf[AnyRef])
val extracted = inv.extractItems(AEApi.instance.storage.createItemStack(stack), Actionable.MODULATE, new MachineSource(tile))
if(extracted == null) return Array(0.underlying.asInstanceOf[AnyRef])
val ext = extracted.getStackSize.toInt
stack.stackSize = inSlot + ext
invRobot.setInventorySlotContents(selected, stack)
return Array(ext.underlying.asInstanceOf[AnyRef])
}
@Callback(doc = "function([number:amount]):number -- Transfer selecte fluid to your ae system.")
def sendFluids(context: Context, args: Arguments): Array[AnyRef] = {
val selected = agent.selectedTank
val tanks = agent.tank
if (tanks.tankCount <= 0) return Array(0.underlying.asInstanceOf[AnyRef])
val tank = tanks.getFluidTank(selected)
val inv = getFluidInventory
if (tank == null || inv == null || tank.getFluid == null) return Array(0.underlying.asInstanceOf[AnyRef])
val amount = Math.min(args.optInteger(0, tank.getCapacity), tank.getFluidAmount)
val fluid = tank.getFluid
val fluid2 = fluid.copy
fluid2.amount = amount
val notInjectet = inv.injectItems(AEApi.instance.storage.createFluidStack(fluid2), Actionable.MODULATE, new MachineSource(tile))
if (notInjectet == null){
tank.drain(amount, true)
return Array(amount.underlying.asInstanceOf[AnyRef])
}else{
tank.drain(amount - notInjectet.getStackSize.toInt, true)
return Array((amount - notInjectet.getStackSize).underlying.asInstanceOf[AnyRef])
}
}
@Callback(doc = "function(database:address, entry:number[, number:amount]):number -- Get fluid from your ae system.")
def requestFluids(context: Context, args: Arguments): Array[AnyRef] = {
val address = args.checkString(0)
val entry = args.checkInteger(1)
val amount = args.optInteger(2, FluidContainerRegistry.BUCKET_VOLUME)
val tanks = agent.tank
val selected = agent.selectedTank
if (tanks.tankCount <= 0) return Array(0.underlying.asInstanceOf[AnyRef])
val tank = tanks.getFluidTank(selected)
val inv = getFluidInventory
if (tank == null || inv == null) return Array(0.underlying.asInstanceOf[AnyRef])
val n: Node = node.network.node(address)
if (n == null) throw new IllegalArgumentException("no such component")
if (!(n.isInstanceOf[Component])) throw new IllegalArgumentException("no such component")
val component: Component = n.asInstanceOf[Component]
val env: Environment = n.host
if (!(env.isInstanceOf[Database])) throw new IllegalArgumentException("not a database")
val database: Database = env.asInstanceOf[Database]
val fluid = FluidContainerRegistry.getFluidForFilledItem(database.getStackInSlot(entry - 1))
fluid.amount = amount
val fluid2 = fluid.copy()
fluid2.amount = tank.fill(fluid, false)
if (fluid2.amount == 0) return Array(0.underlying.asInstanceOf[AnyRef])
val extracted = inv.extractItems(AEApi.instance.storage.createFluidStack(fluid2), Actionable.MODULATE, new MachineSource(tile))
if (extracted == 0) return Array(0.underlying.asInstanceOf[AnyRef])
return Array(tank.fill(extracted.getFluidStack, true).underlying.asInstanceOf[AnyRef])
}
@Callback(doc = "function():boolean -- Return true if the card is linket to your ae network.")
def isLinked(context: Context, args: Arguments): Array[AnyRef] = {
val isLinked = getGrid != null
Array(boolean2Boolean(isLinked))
}
override def update() {
super.update()
if (host.world.getTotalWorldTime % 10 == 0 && isActive) {
if (!node.asInstanceOf[Connector].tryChangeBuffer(-getEnergy)) {
isActive = false
}
}
}
def getEnergy = {
val c = getComponent
if (c == null)
.0
else
c.getItemDamage match{
case 0 => .6
case 1 => .3
case _ => .05
}
}
override def onMessage(message: Message) {
super.onMessage(message)
if (message.name == "computer.stopped") {
isActive = false
}
else if (message.name == "computer.started") {
isActive = true
}
}
}
|
AmethystAir/ExtraCells2
|
src/main/scala/extracells/integration/opencomputers/UpgradeAE.scala
|
Scala
|
mit
| 13,219 |
package com.shocktrade.common.models.contest
import scala.scalajs.js
/**
* Represents a Portfolio-like model
* @author Lawrence Daniels <[email protected]>
*/
trait PortfolioLike extends js.Object {
def playerID: js.UndefOr[String]
def active: js.UndefOr[Boolean]
def perks: js.UndefOr[js.Array[String]]
def cashAccount: js.UndefOr[CashAccount]
def marginAccount: js.UndefOr[MarginAccount]
def orders: js.UndefOr[js.Array[_ <: OrderLike]]
def closedOrders: js.UndefOr[js.Array[_ <: OrderLike]]
def performance: js.UndefOr[js.Array[_ <: PerformanceLike]]
def positions: js.UndefOr[js.Array[_ <: PositionLike]]
}
|
ldaniels528/shocktrade.js
|
app/shared/common/src/main/scala/com/shocktrade/common/models/contest/PortfolioLike.scala
|
Scala
|
apache-2.0
| 656 |
package com.github.mrpowers.spark.daria.utils
import org.apache.spark.sql.SparkSession
object DirHelpers {
lazy val spark: SparkSession = {
SparkSession
.builder()
.master("local")
.appName("spark session")
.getOrCreate()
}
def numBytes(dirname: String): Long = {
val filePath = new org.apache.hadoop.fs.Path(dirname)
val fileSystem = filePath.getFileSystem(spark.sparkContext.hadoopConfiguration)
fileSystem.getContentSummary(filePath).getLength
}
def bytesToGb(bytes: Long): Long = {
bytes / 1073741824L
}
def num1GBPartitions(gigabytes: Long): Int = {
if (gigabytes == 0L) 1 else gigabytes.toInt
}
}
|
MrPowers/spark-daria
|
src/main/scala/com/github/mrpowers/spark/daria/utils/DirHelpers.scala
|
Scala
|
mit
| 676 |
package com.krrrr38.mackerel4s
import org.scalatest.{ FunSpec, Matchers }
class MackerelClientSettingSpec extends FunSpec with Matchers {
val setting = MackerelClientSetting
describe("client setting") {
it("contain valid version") {
setting.API_VERSION shouldBe "v0"
}
it("contain valid url") {
setting.BASE_URL.startsWith("https://mackerel.io/api/") shouldBe true
}
it("contain valid header api key") {
setting.AUTH_HEADER_KEY shouldBe "X-Api-Key"
}
it("contain valid header User-Agent key") {
setting.USER_AGENT_KEY shouldBe "User-Agent"
}
}
}
|
krrrr38/mackerel-client-scala
|
src/test/scala/com/krrrr38/mackerel4s/MackerelClientSettingSpec.scala
|
Scala
|
mit
| 612 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import java.io._
import java.nio.channels._
import java.nio.file.StandardOpenOption
/**
* A file lock a la flock/funlock
*
* The given path will be created and opened if it doesn't exist.
*/
class FileLock(val file: File) extends Logging {
private val channel = FileChannel.open(file.toPath, StandardOpenOption.CREATE, StandardOpenOption.READ,
StandardOpenOption.WRITE)
private var flock: java.nio.channels.FileLock = null
/**
* Lock the file or throw an exception if the lock is already held
*/
def lock(): Unit = {
this synchronized {
trace(s"Acquiring lock on ${file.getAbsolutePath}")
flock = channel.lock()
}
}
/**
* Try to lock the file and return true if the locking succeeds
*/
def tryLock(): Boolean = {
this synchronized {
trace(s"Acquiring lock on ${file.getAbsolutePath}")
try {
// weirdly this method will return null if the lock is held by another
// process, but will throw an exception if the lock is held by this process
// so we have to handle both cases
flock = channel.tryLock()
flock != null
} catch {
case _: OverlappingFileLockException => false
}
}
}
/**
* Unlock the lock if it is held
*/
def unlock(): Unit = {
this synchronized {
trace(s"Releasing lock on ${file.getAbsolutePath}")
if(flock != null)
flock.release()
}
}
/**
* Destroy this lock, closing the associated FileChannel
*/
def destroy() = {
this synchronized {
unlock()
channel.close()
}
}
}
|
guozhangwang/kafka
|
core/src/main/scala/kafka/utils/FileLock.scala
|
Scala
|
apache-2.0
| 2,419 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import org.apache.kafka.common.config.SaslConfigs
import org.apache.kafka.common.security.auth.SecurityProtocol
import org.apache.kafka.common.errors.{GroupAuthorizationException, TopicAuthorizationException}
import org.junit.jupiter.api.{BeforeEach, Test, TestInfo, Timeout}
import org.junit.jupiter.api.Assertions.{assertEquals, assertTrue, fail}
import scala.collection.immutable.List
import scala.jdk.CollectionConverters._
abstract class SaslEndToEndAuthorizationTest extends EndToEndAuthorizationTest {
override protected def securityProtocol = SecurityProtocol.SASL_SSL
override protected val serverSaslProperties = Some(kafkaServerSaslProperties(kafkaServerSaslMechanisms, kafkaClientSaslMechanism))
override protected val clientSaslProperties = Some(kafkaClientSaslProperties(kafkaClientSaslMechanism))
protected def kafkaClientSaslMechanism: String
protected def kafkaServerSaslMechanisms: List[String]
@BeforeEach
override def setUp(testInfo: TestInfo): Unit = {
// create static config including client login context with credentials for JaasTestUtils 'client2'
startSasl(jaasSections(kafkaServerSaslMechanisms, Option(kafkaClientSaslMechanism), Both))
// set dynamic properties with credentials for JaasTestUtils 'client1' so that dynamic JAAS configuration is also
// tested by this set of tests
val clientLoginContext = jaasClientLoginModule(kafkaClientSaslMechanism)
producerConfig.put(SaslConfigs.SASL_JAAS_CONFIG, clientLoginContext)
consumerConfig.put(SaslConfigs.SASL_JAAS_CONFIG, clientLoginContext)
adminClientConfig.put(SaslConfigs.SASL_JAAS_CONFIG, clientLoginContext)
super.setUp(testInfo)
}
/**
* Test with two consumers, each with different valid SASL credentials.
* The first consumer succeeds because it is allowed by the ACL,
* the second one connects ok, but fails to consume messages due to the ACL.
*/
@Timeout(15)
@Test
def testTwoConsumersWithDifferentSaslCredentials(): Unit = {
setAclsAndProduce(tp)
val consumer1 = createConsumer()
// consumer2 retrieves its credentials from the static JAAS configuration, so we test also this path
consumerConfig.remove(SaslConfigs.SASL_JAAS_CONFIG)
consumerConfig.remove(SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS)
val consumer2 = createConsumer()
consumer1.assign(List(tp).asJava)
consumer2.assign(List(tp).asJava)
consumeRecords(consumer1, numRecords)
try {
consumeRecords(consumer2)
fail("Expected exception as consumer2 has no access to topic or group")
} catch {
// Either exception is possible depending on the order that the first Metadata
// and FindCoordinator requests are received
case e: TopicAuthorizationException => assertTrue(e.unauthorizedTopics.contains(topic))
case e: GroupAuthorizationException => assertEquals(group, e.groupId)
}
confirmReauthenticationMetrics()
}
}
|
TiVo/kafka
|
core/src/test/scala/integration/kafka/api/SaslEndToEndAuthorizationTest.scala
|
Scala
|
apache-2.0
| 3,780 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.exceptions.TestFailedException
import matchers.BeMatcher
import matchers.MatchResult
import Matchers._
class ShouldBeMatcherSpec extends FunSpec with ReturnsNormallyThrowsAssertion {
class OddMatcher extends BeMatcher[Int] {
def apply(left: Int): MatchResult = {
MatchResult(
left % 2 == 1,
left.toString + " was even",
left.toString + " was odd"
)
}
}
val odd = new OddMatcher
val even = not (odd)
describe("The BeMatcher syntax") {
it("should do nothing if a BeMatcher matches") {
1 should be (odd)
2 should be (even)
// 1 shouldBe odd
// 2 shouldBe even
}
it("should throw TestFailedException if a BeMatcher does not match") {
val caught1 = intercept[TestFailedException] {
4 should be (odd)
// 4 shouldBe odd
}
assert(caught1.getMessage === "4 was even")
val caught2 = intercept[TestFailedException] {
5 should be (even)
// 5 shouldBe even
}
assert(caught2.getMessage === "5 was odd")
}
it("should do nothing if a BeMatcher does not match, when used with not") {
2 should not be (odd)
1 should not be (even)
22 should not (not (be (even)))
1 should not (not (be (odd)))
}
it("should throw TestFailedException if a BeMatcher matches, when used with not") {
val caught1 = intercept[TestFailedException] {
3 should not be (odd)
}
assert(caught1.getMessage === "3 was odd")
val caught2 = intercept[TestFailedException] {
6 should not be (even)
}
assert(caught2.getMessage === "6 was even")
val caught3 = intercept[TestFailedException] {
6 should not (not (be (odd)))
}
assert(caught3.getMessage === "6 was even")
}
it("should do nothing if a BeMatcher matches, when used in a logical-and expression") {
1 should (be (odd) and be (odd))
1 should (be (odd) and (be (odd)))
2 should (be (even) and be (even))
2 should (be (even) and (be (even)))
}
it("should throw TestFailedException if at least one BeMatcher does not match, when used in a logical-or expression") {
// both false
val caught1 = intercept[TestFailedException] {
2 should (be (odd) and be (odd))
}
assert(caught1.getMessage === "2 was even")
val caught2 = intercept[TestFailedException] {
2 should (be (odd) and (be (odd)))
}
assert(caught2.getMessage === "2 was even")
val caught3 = intercept[TestFailedException] {
1 should (be (even) and be (even))
}
assert(caught3.getMessage === "1 was odd")
val caught4 = intercept[TestFailedException] {
1 should (be (even) and (be (even)))
}
assert(caught4.getMessage === "1 was odd")
// first false
val caught5 = intercept[TestFailedException] {
1 should (be (even) and be (odd))
}
assert(caught5.getMessage === "1 was odd")
val caught6 = intercept[TestFailedException] {
1 should (be (even) and (be (odd)))
}
assert(caught6.getMessage === "1 was odd")
val caught7 = intercept[TestFailedException] {
2 should (be (odd) and be (even))
}
assert(caught7.getMessage === "2 was even")
val caught8 = intercept[TestFailedException] {
2 should (be (odd) and (be (even)))
}
assert(caught8.getMessage === "2 was even")
// TODO: Remember to try a BeMatcher[Any] one, to make sure it works on an Int
// second false
val caught9 = intercept[TestFailedException] {
1 should (be (odd) and be (even))
}
assert(caught9.getMessage === "1 was odd, but 1 was odd")
val caught10 = intercept[TestFailedException] {
1 should (be (odd) and (be (even)))
}
assert(caught10.getMessage === "1 was odd, but 1 was odd")
val caught11 = intercept[TestFailedException] {
2 should (be (even) and be (odd))
}
assert(caught11.getMessage === "2 was even, but 2 was even")
val caught12 = intercept[TestFailedException] {
2 should (be (even) and (be (odd)))
}
assert(caught12.getMessage === "2 was even, but 2 was even")
}
it("should do nothing if at least one BeMatcher matches, when used in a logical-or expression") {
// both true
1 should (be (odd) or be (odd))
1 should (be (odd) or (be (odd)))
2 should (be (even) or be (even))
2 should (be (even) or (be (even)))
// first false
1 should (be (even) or be (odd))
1 should (be (even) or (be (odd)))
2 should (be (odd) or be (even))
2 should (be (odd) or (be (even)))
// second false
1 should (be (odd) or be (even))
1 should (be (odd) or (be (even)))
2 should (be (even) or be (odd))
2 should (be (even) or (be (odd)))
}
it("should throw TestFailedException if a BeMatcher does not match, when used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
2 should (be (odd) or be (odd))
}
assert(caught1.getMessage === "2 was even, and 2 was even")
val caught2 = intercept[TestFailedException] {
2 should (be (odd) or (be (odd)))
}
assert(caught2.getMessage === "2 was even, and 2 was even")
val caught3 = intercept[TestFailedException] {
1 should (be (even) or be (even))
}
assert(caught3.getMessage === "1 was odd, and 1 was odd")
val caught4 = intercept[TestFailedException] {
1 should (be (even) or (be (even)))
}
assert(caught4.getMessage === "1 was odd, and 1 was odd")
}
it("should do nothing if a BeMatcher does not match, when used in a logical-and expression with not") {
2 should (not be (odd) and not be (odd))
2 should (not be (odd) and not (be (odd)))
2 should (not be (odd) and (not (be (odd))))
1 should (not be (even) and not be (even))
1 should (not be (even) and not (be (even)))
1 should (not be (even) and (not (be (even))))
}
it("should throw TestFailedException if at least one BeMatcher matches, when used in a logical-and expression with not") {
// both true
val caught1 = intercept[TestFailedException] {
1 should (not be (odd) and not be (odd))
}
assert(caught1.getMessage === "1 was odd")
val caught2 = intercept[TestFailedException] {
1 should (not be (odd) and not (be (odd)))
}
assert(caught2.getMessage === "1 was odd")
val caught3 = intercept[TestFailedException] {
1 should (not be (odd) and (not (be (odd))))
}
assert(caught3.getMessage === "1 was odd")
val caught4 = intercept[TestFailedException] {
2 should (not be (even) and not be (even))
}
assert(caught4.getMessage === "2 was even")
val caught5 = intercept[TestFailedException] {
2 should (not be (even) and not (be (even)))
}
assert(caught5.getMessage === "2 was even")
val caught6 = intercept[TestFailedException] {
2 should (not be (even) and (not (be (even))))
}
assert(caught6.getMessage === "2 was even")
// first false
val caught7 = intercept[TestFailedException] {
1 should (not be (even) and not be (odd))
}
assert(caught7.getMessage === "1 was odd, but 1 was odd")
val caught8 = intercept[TestFailedException] {
1 should (not be (even) and not (be (odd)))
}
assert(caught8.getMessage === "1 was odd, but 1 was odd")
val caught9 = intercept[TestFailedException] {
1 should (not be (even) and (not (be (odd))))
}
assert(caught9.getMessage === "1 was odd, but 1 was odd")
val caught10 = intercept[TestFailedException] {
2 should (not be (odd) and not be (even))
}
assert(caught10.getMessage === "2 was even, but 2 was even")
val caught11 = intercept[TestFailedException] {
2 should (not be (odd) and not (be (even)))
}
assert(caught11.getMessage === "2 was even, but 2 was even")
val caught12 = intercept[TestFailedException] {
2 should (not be (odd) and (not (be (even))))
}
assert(caught12.getMessage === "2 was even, but 2 was even")
// second false
val caught13 = intercept[TestFailedException] {
1 should (not be (odd) and not be (even))
}
assert(caught13.getMessage === "1 was odd")
val caught14 = intercept[TestFailedException] {
1 should (not be (odd) and not (be (even)))
}
assert(caught14.getMessage === "1 was odd")
val caught15 = intercept[TestFailedException] {
1 should (not be (odd) and (not (be (even))))
}
assert(caught15.getMessage === "1 was odd")
val caught16 = intercept[TestFailedException] {
2 should (not be (even) and not be (odd))
}
assert(caught16.getMessage === "2 was even")
val caught17 = intercept[TestFailedException] {
2 should (not be (even) and not (be (odd)))
}
assert(caught17.getMessage === "2 was even")
val caught18 = intercept[TestFailedException] {
2 should (not be (even) and (not (be (odd))))
}
assert(caught18.getMessage === "2 was even")
}
it("should do nothing if at least one BeMatcher doesn't match, when used in a logical-or expression when used with not") {
// both false
2 should (not be (odd) or not be (odd))
2 should (not be (odd) or not (be (odd)))
2 should (not be (odd) or (not (be (odd))))
1 should (not be (even) or not be (even))
1 should (not be (even) or not (be (even)))
1 should (not be (even) or (not (be (even))))
// first false
1 should (not be (even) or not be (odd))
1 should (not be (even) or not (be (odd)))
1 should (not be (even) or (not (be (odd))))
2 should (not be (odd) or not be (even))
2 should (not be (odd) or not (be (even)))
2 should (not be (odd) or (not (be (even))))
// second false
1 should (not be (odd) or not be (even))
1 should (not be (odd) or not (be (even)))
1 should (not be (odd) or (not (be (even))))
2 should (not be (even) or not be (odd))
2 should (not be (even) or not (be (odd)))
2 should (not be (even) or (not (be (odd))))
}
it("should throw TestFailedException if both BeMatcher match, when used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
1 should (not be (odd) or not be (odd))
}
assert(caught1.getMessage === "1 was odd, and 1 was odd")
val caught2 = intercept[TestFailedException] {
1 should (not be (odd) or not (be (odd)))
}
assert(caught2.getMessage === "1 was odd, and 1 was odd")
val caught3 = intercept[TestFailedException] {
1 should (not be (odd) or (not (be (odd))))
}
assert(caught3.getMessage === "1 was odd, and 1 was odd")
val caught4 = intercept[TestFailedException] {
2 should (not be (even) or not be (even))
}
assert(caught4.getMessage === "2 was even, and 2 was even")
val caught5 = intercept[TestFailedException] {
2 should (not be (even) or not (be (even)))
}
assert(caught5.getMessage === "2 was even, and 2 was even")
val caught6 = intercept[TestFailedException] {
2 should (not be (even) or (not (be (even))))
}
assert(caught6.getMessage === "2 was even, and 2 was even")
}
it("should work when the types aren't exactly the same") {
class UnlikableMatcher extends BeMatcher[Any] {
def apply(left: Any): MatchResult = {
MatchResult(
false,
left.toString + " was not to my liking",
left.toString + " was to my liking"
)
}
}
val unlikable = new UnlikableMatcher
val likable = not (unlikable)
1 should be (likable)
2 should not be (unlikable)
// 1 shouldBe likable
val caught1 = intercept[TestFailedException] {
1 should be (unlikable)
}
assert(caught1.getMessage === "1 was not to my liking")
val caught2 = intercept[TestFailedException] {
"The dish" should not be (likable)
}
assert(caught2.getMessage === "The dish was not to my liking")
}
}
describe("the compose method on BeMatcher") {
it("should return another BeMatcher") {
val oddAsInt = odd compose { (s: String) => s.toInt }
"3" should be (oddAsInt)
"4" should not be (oddAsInt)
// "3" shouldBe oddAsInt
}
}
describe("A factory method on BeMatcher's companion object") {
it("should produce a be-matcher that executes the passed function when its apply is called") {
val f = { (s: String) => MatchResult(s.length < 3, "s was not less than 3", "s was less than 3") }
val lessThanThreeInLength = BeMatcher(f)
"" should be (lessThanThreeInLength)
"x" should be (lessThanThreeInLength)
"xx" should be (lessThanThreeInLength)
"xxx" should not be (lessThanThreeInLength)
"xxxx" should not be (lessThanThreeInLength)
// "" shouldBe lessThanThreeInLength
// "x" shouldBe lessThanThreeInLength
// "xx" shouldBe lessThanThreeInLength
}
}
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/ShouldBeMatcherSpec.scala
|
Scala
|
apache-2.0
| 14,132 |
import collection.immutable.HashMap
object Test {
def main(args: Array[String]): Unit = {
resolveDefault()
resolveFirst()
resolveSecond()
resolveMany()
}
def resolveDefault(): Unit = {
val a = HashMap(1 -> "1")
val b = HashMap(1 -> "2")
val r = a.merged(b)(null)
println(r)
println(r(1))
}
def resolveFirst(): Unit = {
val a = HashMap(1 -> "1")
val b = HashMap(1 -> "2")
def collision(a: (Int, String), b: (Int, String)) = {
println(a)
a
}
val r = a.merged(b) { collision }
println(r)
println(r(1))
}
def resolveSecond(): Unit = {
val a = HashMap(1 -> "1")
val b = HashMap(1 -> "2")
def collision(a: (Int, String), b: (Int, String)) = {
println(b)
b
}
val r = a.merged(b) { collision }
println(r)
println(r(1))
}
def resolveMany(): Unit = {
val a = HashMap((0 until 100) zip (0 until 100): _*)
val b = HashMap((0 until 100) zip (100 until 200): _*)
def collision(a: (Int, Int), b: (Int, Int)) = {
(a._1, a._2 + b._2)
}
val r = a.merged(b) { collision }
for ((k, v) <- r) assert(v == 100 + 2 * k, (k, v))
}
}
|
yusuke2255/dotty
|
tests/run/t5879.scala
|
Scala
|
bsd-3-clause
| 1,186 |
//
// Copyright (c) 2015 IronCore Labs
//
package com.ironcorelabs.davenport
import scalaz._, Scalaz._, scalaz.concurrent.Task, scalaz.stream.Process
import db._
import tags.RequiresCouch
import syntax.dbprog._
import scala.concurrent.duration._
@RequiresCouch
class CouchConnectionSpec extends TestBase with KnobsConfiguration {
var connection: CouchConnection = null
val davenportConfig = knobsConfiguration.run
override def beforeAll() = {
connection = CouchConnection(davenportConfig)
()
}
override def afterAll() = {
connection.disconnect.attemptRun.value
()
}
"CouchConnection" should {
"handle a failed connection" in {
//Disconnect our connection.
connection.disconnect.attemptRun.value
// Prove that the connection fails
val connectionfail = db.getDoc(Key("a")).execute(connection.openDatastore(BucketNameAndPassword("default", None)))
connectionfail.attemptRun.leftValue shouldBe a[DisconnectedException]
//Reconnect so the next test has a connection.
connection = CouchConnection(davenportConfig)
}
"be able to open and close bucket" in {
val b = BucketNameAndPassword("default", None)
val openedBucket = connection.openBucket(b).attemptRun.value
openedBucket.name shouldBe b.name
connection.openBuckets.get(b).value shouldBe openedBucket
val closeTask = connection.closeBucket(b)
//Close should succeed
closeTask.attemptRun should beRight(true)
//The bucket shouldn't be there anymore
connection.openBuckets.get(b) shouldBe None
}
"return false for closeBucket which isn't open" in {
val b = BucketNameAndPassword("myuknownbucket", None)
connection.closeBucket(b).attemptRun.value shouldBe false
}
}
}
|
BobWall23/davenport
|
src/test/scala/com/ironcorelabs/davenport/CouchConnectionSpec.scala
|
Scala
|
mit
| 1,784 |
package org.kuleuven.mai.vision.image
import org.apache.spark.rdd.RDD
/**
* @author mandar
*/
class SparkHistogram(vals: RDD[Double]) {
private val _values = vals
private val hist: (Int) => (Array[Double], Array[Long]) = _values.histogram
def values = _values
def +(other: SparkHistogram): SparkHistogram =
SparkHistogram(this.values union other.values)
}
object SparkHistogram {
def apply(values: RDD[Double]) = new SparkHistogram(values)
}
|
mandar2812/ScalaImageToolbox
|
ImageDenoising/src/main/scala-2.11/org/kuleuven/mai/vision/image/SparkHistogram.scala
|
Scala
|
apache-2.0
| 460 |
package com.gilt.pickling.avro
import org.scalatest.{Assertions, FunSuite}
import org.apache.avro.Schema
import com.gilt.pickling.TestUtils
import TestUtils._
import org.apache.avro.generic.GenericData
import scala.pickling._
import scala.collection.JavaConversions._
import java.util.{Set => JSet}
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import com.gilt.pickling.TestObjs._
object SetOfPrimitivesTest {
import org.scalacheck.{Gen, Arbitrary}
implicit val arbSetOfInts = Arbitrary(for (set <- Gen.containerOf[Set, Int](Gen.choose(Int.MinValue, Int.MaxValue))) yield SetOfInts(set))
implicit val arbSetOfLongs = Arbitrary(for (set <- Gen.containerOf[Set, Long](Gen.choose(Long.MinValue, Long.MaxValue))) yield SetOfLongs(set))
implicit val arbSetOfDoubles = Arbitrary(for (set <- Gen.containerOf[Set, Double](Gen.chooseNum(Double.MinValue / 2, Double.MaxValue / 2))) yield SetOfDoubles(set))
implicit val arbSetOfFloats = Arbitrary(for (set <- Gen.containerOf[Set, Float](Gen.chooseNum(Float.MinValue, Float.MaxValue))) yield SetOfFloats(set))
implicit val arbSetOfBooleans = Arbitrary(for (set <- Gen.containerOf[Set, Boolean](Gen.oneOf(true, false))) yield SetOfBooleans(set))
implicit val arbSetOfStrings = Arbitrary(for (set <- Gen.containerOf[Set, String](Gen.alphaStr)) yield SetOfStrings(set))
implicit val arbSetOfBytes = Arbitrary(for (set <- Gen.containerOf[Set, Byte](Gen.choose(Byte.MinValue, Byte.MaxValue))) yield SetOfBytes(set))
implicit val arbSetOfShorts = Arbitrary(for (set <- Gen.containerOf[Set, Short](Gen.choose(Short.MinValue, Short.MaxValue))) yield SetOfShorts(set))
implicit val arbSetOfChars = Arbitrary(for (set <- Gen.containerOf[Set, Char](Gen.choose(Char.MinValue, Char.MaxValue))) yield SetOfChars(set))
}
class SetOfPrimitivesTest extends FunSuite with Assertions with GeneratorDrivenPropertyChecks {
import SetOfPrimitivesTest._
// Array of Ints
test("Pickle a case class with an set of ints") {
forAll {
(obj: SetOfInts) =>
val pckl = obj.pickle
assert(generateBytesFromAvro(obj.list, "/avro/sets/SetOfInts.avsc") === pckl.value)
}
}
test("Unpickle a case class with an set of ints") {
forAll {
(obj: SetOfInts) =>
val bytes = generateBytesFromAvro(obj.list, "/avro/sets/SetOfInts.avsc")
val hydratedObj: SetOfInts = bytes.unpickle[SetOfInts]
assert(obj.list === hydratedObj.list)
}
}
test("Round trip a case class with an set of ints") {
forAll {
(obj: SetOfInts) =>
val pckl = obj.pickle
val hydratedObj: SetOfInts = pckl.unpickle[SetOfInts]
assert(obj.list === hydratedObj.list)
}
}
// Array of Longs
test("Pickle a case class with an set of longs") {
forAll {
(obj: SetOfLongs) =>
val pckl = obj.pickle
assert(generateBytesFromAvro(obj.list, "/avro/sets/SetOfLongs.avsc") === pckl.value)
}
}
test("Unpickle a case class with an set of longs") {
forAll {
(obj: SetOfLongs) =>
val bytes = generateBytesFromAvro(obj.list, "/avro/sets/SetOfLongs.avsc")
val hydratedObj: SetOfLongs = bytes.unpickle[SetOfLongs]
assert(obj.list === hydratedObj.list)
}
}
test("Round trip a case class with an set of longs") {
forAll {
(obj: SetOfLongs) =>
val pckl = obj.pickle
val hydratedObj: SetOfLongs = pckl.unpickle[SetOfLongs]
assert(obj.list === hydratedObj.list)
}
}
// Array of Doubles
test("Pickle a case class with an set of doubles") {
forAll {
(obj: SetOfDoubles) =>
val pckl = obj.pickle
assert(generateBytesFromAvro(obj.list, "/avro/sets/SetOfDoubles.avsc") === pckl.value)
}
}
test("Unpickle a case class with an set of doubles") {
forAll {
(obj: SetOfDoubles) =>
val bytes = generateBytesFromAvro(obj.list, "/avro/sets/SetOfDoubles.avsc")
val hydratedObj: SetOfDoubles = bytes.unpickle[SetOfDoubles]
assert(obj.list === hydratedObj.list)
}
}
test("Round trip a case class with an set of doubles") {
forAll {
(obj: SetOfDoubles) =>
val pckl = obj.pickle
val hydratedObj: SetOfDoubles = pckl.unpickle[SetOfDoubles]
assert(obj.list === hydratedObj.list)
}
}
// Array of Floats
test("Pickle a case class with an set of floats") {
val obj = SetOfFloats(Set(1.1F, 2.2F, 3.3F, 4.4F))
val pckl = obj.pickle
assert(generateBytesFromAvro(obj.list, "/avro/sets/SetOfFloats.avsc") === pckl.value)
}
test("Unpickle a case class with an set of floats") {
val obj = SetOfFloats(Set(1.1F, 2.2F, 3.3F, 4.4F))
val bytes = generateBytesFromAvro(obj.list, "/avro/sets/SetOfFloats.avsc")
val hydratedObj: SetOfFloats = bytes.unpickle[SetOfFloats]
assert(obj.list === hydratedObj.list)
}
test("Round trip a case class with an set of floats") {
val obj = SetOfFloats(Set(1.1F, 2.2F, 3.3F, 4.4F))
val pckl = obj.pickle
val hydratedObj: SetOfFloats = pckl.unpickle[SetOfFloats]
assert(obj.list === hydratedObj.list)
}
// Array of Floats
test("Pickle a case class with an set of boolean") {
val obj = SetOfBooleans(Set(true, false, true, true))
val pckl = obj.pickle
assert(generateBytesFromAvro(obj.list, "/avro/sets/SetOfBooleans.avsc") === pckl.value)
}
test("Unpickle a case class with an set of boolean") {
val obj = SetOfBooleans(Set(true, false, true, true))
val bytes = generateBytesFromAvro(obj.list, "/avro/sets/SetOfBooleans.avsc")
val hydratedObj: SetOfBooleans = bytes.unpickle[SetOfBooleans]
assert(obj.list === hydratedObj.list)
}
test("Round trip a case class with an set of boolean") {
val obj = SetOfBooleans(Set(true, false, true, true))
val pckl = obj.pickle
val hydratedObj: SetOfBooleans = pckl.unpickle[SetOfBooleans]
assert(obj.list === hydratedObj.list)
}
// Array of Strings
test("Pickle a case class with an set of string") {
val obj = SetOfStrings(Set[String]("a", "b", "c", "d"))
val pckl = obj.pickle
assert(generateBytesFromAvro(obj.list, "/avro/sets/SetOfStrings.avsc") === pckl.value)
}
test("Unpickle a case class with an set of string") {
val obj = SetOfStrings(Set[String]("a", "b", "c", "d"))
val bytes = generateBytesFromAvro(obj.list, "/avro/sets/SetOfStrings.avsc")
val hydratedObj: SetOfStrings = bytes.unpickle[SetOfStrings]
assert(obj.list === hydratedObj.list)
}
test("Round trip a case class with an set of string") {
val obj = SetOfStrings(Set[String]("a", "b", "c", "d"))
val pckl = obj.pickle
val hydratedObj: SetOfStrings = pckl.unpickle[SetOfStrings]
assert(obj.list === hydratedObj.list)
}
// Array of Bytes
// TODO A better solution is to write this a as bytebuffer
test("Pickle a case class with an set of bytes") {
val obj = SetOfBytes(Set(1.toByte, 2.toByte, 3.toByte, 4.toByte))
val pckl = obj.pickle
assert(generateBytesFromAvro(obj.list.map(_.toInt), "/avro/sets/SetOfBytes.avsc") === pckl.value)
}
test("Unpickle a case class with an set of bytes") {
val obj = SetOfBytes(Set(1.toByte, 2.toByte, 3.toByte, 4.toByte))
val bytes = generateBytesFromAvro(obj.list.map(_.toInt), "/avro/sets/SetOfBytes.avsc")
val hydratedObj: SetOfBytes = bytes.unpickle[SetOfBytes]
assert(obj.list === hydratedObj.list)
}
test("Round trip a case class with an set of bytes") {
val obj = SetOfBytes(Set(1.toByte, 2.toByte, 3.toByte, 4.toByte))
val pckl = obj.pickle
val hydratedObj: SetOfBytes = pckl.unpickle[SetOfBytes]
assert(obj.list === hydratedObj.list)
}
// Array of Shorts
test("Pickle a case class with an set of shorts") {
val obj = SetOfShorts(Set(1.toShort, 2.toShort, 3.toShort, 4.toShort))
val pckl = obj.pickle
assert(generateBytesFromAvro(obj.list.map(_.toInt), "/avro/sets/SetOfShorts.avsc") === pckl.value)
}
test("Unpickle a case class with an set of shorts") {
val obj = SetOfShorts(Set(1.toShort, 2.toShort, 3.toShort, 4.toShort))
val bytes = generateBytesFromAvro(obj.list.map(_.toInt), "/avro/sets/SetOfShorts.avsc")
val hydratedObj: SetOfShorts = bytes.unpickle[SetOfShorts]
assert(obj.list === hydratedObj.list)
}
test("Round trip a case class with an set of shorts") {
val obj = SetOfShorts(Set(1.toShort, 2.toShort, 3.toShort, 4.toShort))
val pckl = obj.pickle
val hydratedObj: SetOfShorts = pckl.unpickle[SetOfShorts]
assert(obj.list === hydratedObj.list)
}
// Array of Chars
test("Pickle a case class with an set of chars") {
val obj = SetOfChars(Set(1.toChar, 2.toChar, 3.toChar, 4.toChar))
val pckl = obj.pickle
assert(generateBytesFromAvro(obj.list.map(_.toInt), "/avro/sets/SetOfChars.avsc") === pckl.value)
}
test("Unpickle a case class with an set of chars") {
val obj = SetOfChars(Set(1.toChar, 2.toChar, 3.toChar, 4.toChar))
val bytes = generateBytesFromAvro(obj.list.map(_.toInt), "/avro/sets/SetOfChars.avsc")
val hydratedObj: SetOfChars = bytes.unpickle[SetOfChars]
assert(obj.list === hydratedObj.list)
}
test("Round trip a case class with an set of chars") {
val obj = SetOfChars(Set(1.toChar, 2.toChar, 3.toChar, 4.toChar))
val pckl = obj.pickle
val hydratedObj: SetOfChars = pckl.unpickle[SetOfChars]
assert(obj.list === hydratedObj.list)
}
private def generateBytesFromAvro(value: JSet[_], schemaFileLocation: String): Array[Byte] = {
val schema: Schema = retrieveAvroSchemaFromFile(schemaFileLocation)
val record = new GenericData.Record(schema)
record.put("list", value) // need java collection at this point
convertToBytes(schema, record)
}
}
|
gilt/gfc-avro
|
src/test/scala/com/gilt/pickling/avro/SetOfPrimitivesTest.scala
|
Scala
|
apache-2.0
| 9,740 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.util.Locale
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Range}
import org.apache.spark.sql.catalyst.rules._
import org.apache.spark.sql.types.{DataType, IntegerType, LongType}
/**
* Rule that resolves table-valued function references.
*/
object ResolveTableValuedFunctions extends Rule[LogicalPlan] {
/**
* List of argument names and their types, used to declare a function.
*/
private case class ArgumentList(args: (String, DataType)*) {
/**
* Try to cast the expressions to satisfy the expected types of this argument list. If there
* are any types that cannot be casted, then None is returned.
*/
def implicitCast(values: Seq[Expression]): Option[Seq[Expression]] = {
if (args.length == values.length) {
val casted = values.zip(args).map { case (value, (_, expectedType)) =>
TypeCoercion.ImplicitTypeCasts.implicitCast(value, expectedType)
}
if (casted.forall(_.isDefined)) {
return Some(casted.map(_.get))
}
}
None
}
override def toString: String = {
args.map { a =>
s"${a._1}: ${a._2.typeName}"
}.mkString(", ")
}
}
/**
* A TVF maps argument lists to resolver functions that accept those arguments. Using a map
* here allows for function overloading.
*/
private type TVF = Map[ArgumentList, Seq[Any] => LogicalPlan]
/**
* TVF builder.
*/
private def tvf(args: (String, DataType)*)(pf: PartialFunction[Seq[Any], LogicalPlan])
: (ArgumentList, Seq[Any] => LogicalPlan) = {
(ArgumentList(args: _*),
pf orElse {
case args =>
throw new IllegalArgumentException(
"Invalid arguments for resolved function: " + args.mkString(", "))
})
}
/**
* Internal registry of table-valued functions.
*/
private val builtinFunctions: Map[String, TVF] = Map(
"range" -> Map(
/* range(end) */
tvf("end" -> LongType) { case Seq(end: Long) =>
Range(0, end, 1, None)
},
/* range(start, end) */
tvf("start" -> LongType, "end" -> LongType) { case Seq(start: Long, end: Long) =>
Range(start, end, 1, None)
},
/* range(start, end, step) */
tvf("start" -> LongType, "end" -> LongType, "step" -> LongType) {
case Seq(start: Long, end: Long, step: Long) =>
Range(start, end, step, None)
},
/* range(start, end, step, numPartitions) */
tvf("start" -> LongType, "end" -> LongType, "step" -> LongType,
"numPartitions" -> IntegerType) {
case Seq(start: Long, end: Long, step: Long, numPartitions: Int) =>
Range(start, end, step, Some(numPartitions))
})
)
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case u: UnresolvedTableValuedFunction if u.functionArgs.forall(_.resolved) =>
builtinFunctions.get(u.functionName.toLowerCase(Locale.ROOT)) match {
case Some(tvf) =>
val resolved = tvf.flatMap { case (argList, resolver) =>
argList.implicitCast(u.functionArgs) match {
case Some(casted) =>
Some(resolver(casted.map(_.eval())))
case _ =>
None
}
}
resolved.headOption.getOrElse {
val argTypes = u.functionArgs.map(_.dataType.typeName).mkString(", ")
u.failAnalysis(
s"""error: table-valued function ${u.functionName} with alternatives:
|${tvf.keys.map(_.toString).toSeq.sorted.map(x => s" ($x)").mkString("\n")}
|cannot be applied to: (${argTypes})""".stripMargin)
}
case _ =>
u.failAnalysis(s"could not resolve `${u.functionName}` to a table-valued function")
}
}
}
|
MLnick/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala
|
Scala
|
apache-2.0
| 4,718 |
package rescala
import rescala.levelbased.LevelBasedPropagationEngines
object Engines extends LevelBasedPropagationEngines {
implicit val default: SimpleEngine = synchron
val all: List[SimpleEngine] = List(synchron, unmanaged)
val locksweep: TEngine = null
val parallellocksweep: TEngine = null
}
|
volkc/REScala
|
Main/native/src/main/scala/rescala/Engines.scala
|
Scala
|
apache-2.0
| 310 |
package scapi.ot
import akka.actor.{ActorSystem, Props}
import scapi.ot.ObliviousTransferProtocolMessages.Start
object ObliviousTransferExample extends App {
val sigma = 0: Byte
val system = ActorSystem("OT")
val otSender = system.actorOf(Props[ObliviousTransferGroupElementSender])
val otReceiver = system.actorOf(Props(classOf[ObliviousTransferReceiver], otSender))
otReceiver ! Start(sigma)
}
|
kushti/scala-scapi
|
src/main/scala/scapi/ot/ObliviousTransferExample.scala
|
Scala
|
cc0-1.0
| 412 |
package synahive.restapi.http
import akka.http.scaladsl.server.Directives._
import synahive.restapi.http.routes._
import synahive.restapi.utils.CorsSupport
trait HttpService extends UsersServiceRoute with AuthServiceRoute with CorsSupport {
val routes =
pathPrefix("v1") {
corsHandler {
usersRoute ~
authRoute
}
}
}
|
synahive/synahive-server
|
src/main/scala/synahive/restapi/http/HttpService.scala
|
Scala
|
mit
| 360 |
class C1 {
type T
def this(x: T) = { this() }
}
class C1a[T] {
def this(x: T) = { this() } // works, no error here
}
class C2(x: Int) {
def this(a: Int, b: Int = x) = {
this(b)
}
}
class C3 {
val x = 0
def this(a: Int = x) = { this() }
}
|
folone/dotty
|
tests/untried/neg/t5543.scala
|
Scala
|
bsd-3-clause
| 261 |
package bot.line.client
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.Materializer
import scala.concurrent.{ExecutionContext, Future}
class ReplyMessageClient(accessToken: String)
(implicit val system: ActorSystem,
implicit val materializer: Materializer,
implicit val ec: ExecutionContext) extends MessageReplier {
override def replyMessage(replyToken: String, message: String): Future[Unit] = {
val request = ReplyMessageRequest(accessToken, replyToken, message).httpRequest
val responseFuture = Http().singleRequest(request)
responseFuture.collect {
case response if response.status.isSuccess() => println(s"message sent!")
case error => println(s"request failed: $error")
}
}
}
|
xoyo24/akka-http-line-bot
|
src/main/scala/bot/line/client/ReplyMessageClient.scala
|
Scala
|
mit
| 828 |
/*
* Copyright (C) 2019 Lightbend Inc. <https://www.lightbend.com>
*/
package akka.actor.testkit.typed.internal
import akka.actor.testkit.typed.LoggingEvent
import akka.annotation.InternalApi
// import ch.qos.logback.classic.spi.ILoggingEvent
// import ch.qos.logback.classic.spi.ThrowableProxy
// import ch.qos.logback.core.AppenderBase
/**
* INTERNAL API
*
* The `TestAppender` emits the logging events to the registered [[LoggingTestKitImpl]], which
* are added and removed to the appender dynamically from tests.
*
* `TestAppender` is currently requiring Logback as SLF4J implementation.
* Similar can probably be implemented with other backends, such as Log4j2.
*/
@InternalApi private[akka] object TestAppender {
import LogbackUtil._
private val TestAppenderName = "AkkaTestAppender"
def setupTestAppender(loggerName: String): Unit = {
// val logbackLogger = getLogbackLogger(loggerName)
// logbackLogger.getAppender(TestAppenderName) match {
// case null =>
// val testAppender = new TestAppender
// testAppender.setName(TestAppenderName)
// testAppender.setContext(logbackLogger.getLoggerContext)
// testAppender.start()
// logbackLogger.addAppender(testAppender)
// case _: TestAppender =>
// // ok, already setup
// case other =>
// throw new IllegalStateException(s"Unexpected $TestAppenderName already added: $other")
// }
}
def addFilter(loggerName: String, filter: LoggingTestKitImpl): Unit =
getTestAppender(loggerName).addTestFilter(filter)
def removeFilter(loggerName: String, filter: LoggingTestKitImpl): Unit =
getTestAppender(loggerName).removeTestFilter(filter)
private def getTestAppender(loggerName: String): TestAppender = {
val logger = getLogbackLogger(loggerName)
// logger.getAppender(TestAppenderName) match {
// case testAppender: TestAppender => testAppender
// case null =>
// throw new IllegalStateException(s"No $TestAppenderName was setup for logger [${logger.getName}]")
// case other =>
// throw new IllegalStateException(
// s"Unexpected $TestAppenderName already added for logger [${logger.getName}]: $other")
// }
new TestAppender()
}
}
/**
* INTERNAL API
*/
@InternalApi private[akka] class TestAppender {
// extends AppenderBase[ILoggingEvent] {
import LogbackUtil._
private var filters: List[LoggingTestKitImpl] = Nil
// invocations are synchronized via doAppend in AppenderBase
// override
def append(event: Any): Unit = {
// import akka.util.ccompat.JavaConverters._
// val throwable = event.getThrowableProxy match {
// case p: ThrowableProxy =>
// Option(p.getThrowable)
// case _ => None
// }
// val loggingEvent = LoggingEvent(
// level = convertLevel(event.getLevel),
// message = event.getFormattedMessage,
// loggerName = event.getLoggerName,
// threadName = event.getThreadName,
// timeStamp = event.getTimeStamp,
// marker = Option(event.getMarker),
// throwable = throwable,
// mdc = event.getMDCPropertyMap.asScala.toMap)
// filter(loggingEvent)
}
private def filter(event: LoggingEvent): Boolean = {
filters.exists(f =>
try {
f.apply(event)
} catch {
case _: Exception => false
})
}
def addTestFilter(filter: LoggingTestKitImpl): Unit = synchronized {
filters ::= filter
}
def removeTestFilter(filter: LoggingTestKitImpl): Unit = synchronized {
@scala.annotation.tailrec
def removeFirst(list: List[LoggingTestKitImpl], zipped: List[LoggingTestKitImpl] = Nil): List[LoggingTestKitImpl] =
list match {
case head :: tail if head == filter => tail.reverse_:::(zipped)
case head :: tail => removeFirst(tail, head :: zipped)
case Nil => filters // filter not found, just return original list
}
filters = removeFirst(filters)
}
}
|
unicredit/akka.js
|
akka-js-typed-testkit/js/src/main/scala/akka/actor/testkit/typed/internal/TestAppender.scala
|
Scala
|
bsd-3-clause
| 4,027 |
package org.jetbrains.plugins.scala
package codeInspection.collections
import org.jetbrains.plugins.scala.codeInspection.InspectionBundle
/**
* Nikolay.Tropin
* 5/30/13
*/
class FindNotEqualsNoneTest extends OperationsOnCollectionInspectionTest {
val hint = InspectionBundle.message("find.notEquals.none.hint")
def test_1() {
val selected = s"(Nil ${START}find (_ => true)) != None$END"
check(selected)
val text = "(Nil find (_ => true)) != None"
val result = "Nil exists (_ => true)"
testFix(text, result, hint)
}
def test_2() {
val selected = s"Nil.${START}find(_ => true) != None$END"
check(selected)
val text = "Nil.find(_ => true) != None"
val result = "Nil.exists(_ => true)"
testFix(text, result, hint)
}
override val inspectionClass = classOf[FindNotEqualsNoneInspection]
}
|
consulo/consulo-scala
|
test/org/jetbrains/plugins/scala/codeInspection/collections/FindNotEqualsNoneTest.scala
|
Scala
|
apache-2.0
| 841 |
package au.com.intelix.config
import com.typesafe.config.ConfigFactory
trait WithBlankConfig extends WithConfig {
lazy implicit override val config = ConfigFactory.empty()
}
|
intelix/reactiveservices
|
tools/config/src/main/scala/au/com/intelix/config/WithBlankConfig.scala
|
Scala
|
apache-2.0
| 180 |
package org.http4s
package play.test // Get out of play package so we can import custom instances
import _root_.play.api.libs.json._
import cats.effect.IO
import cats.effect.laws.util.TestContext
import org.http4s.headers.`Content-Type`
import org.http4s.jawn.JawnDecodeSupportSpec
import org.http4s.play._
// Originally based on CirceSpec
class PlaySpec extends JawnDecodeSupportSpec[JsValue] {
implicit val testContext = TestContext()
testJsonDecoder(jsonDecoder)
sealed case class Foo(bar: Int)
val foo = Foo(42)
implicit val format: OFormat[Foo] = Json.format[Foo]
"json encoder" should {
val json: JsValue = Json.obj("test" -> JsString("PlaySupport"))
"have json content type" in {
jsonEncoder.headers.get(`Content-Type`) must_== Some(
`Content-Type`(MediaType.application.json))
}
"write JSON" in {
writeToString(json) must_== ("""{"test":"PlaySupport"}""")
}
}
"jsonEncoderOf" should {
"have json content type" in {
jsonEncoderOf[IO, Foo].headers.get(`Content-Type`) must_== Some(
`Content-Type`(MediaType.application.json))
}
"write compact JSON" in {
writeToString(foo)(jsonEncoderOf[IO, Foo]) must_== ("""{"bar":42}""")
}
}
"jsonOf" should {
"decode JSON from a Play decoder" in {
val result = jsonOf[IO, Foo]
.decode(Request[IO]().withEntity(Json.obj("bar" -> JsNumber(42)): JsValue), strict = true)
result.value.unsafeRunSync must_== Right(Foo(42))
}
}
"Uri codec" should {
"round trip" in {
// TODO would benefit from Arbitrary[Uri]
val uri = Uri.uri("http://www.example.com/")
Json.fromJson[Uri](Json.toJson(uri)).asOpt must_== (Some(uri))
}
}
"Message[F].decodeJson[A]" should {
"decode json from a message" in {
val req = Request[IO]().withEntity(Json.toJson(foo))
req.decodeJson[Foo] must returnValue(foo)
}
"fail on invalid json" in {
val req = Request[IO]().withEntity(Json.toJson(List(13, 14)))
req.decodeJson[Foo].attempt.unsafeRunSync must beLeft
}
}
"PlayEntityCodec" should {
"decode json without defining EntityDecoder" in {
import org.http4s.play.PlayEntityDecoder._
val request = Request[IO]().withEntity(Json.obj("bar" -> JsNumber(42)): JsValue)
val result = request.as[Foo]
result.unsafeRunSync must_== Foo(42)
}
"encode without defining EntityEncoder using default printer" in {
import org.http4s.play.PlayEntityEncoder._
writeToString(foo) must_== """{"bar":42}"""
}
}
}
|
aeons/http4s
|
play-json/src/test/scala/org/http4s/play/PlaySpec.scala
|
Scala
|
apache-2.0
| 2,576 |
// Solution-2.scala
// Solution to Exercise 2 in "Creating Classes"
class Lion
class Giraffe
val lion2 = new Lion
val giraffe2 = new Giraffe
val giraffe3 = new Giraffe
println(lion2)
println(giraffe2)
println(giraffe3)
/* OUTPUT_SHOULD_CONTAIN
Main$$anon$1$Lion@
Main$$anon$1$Giraffe@
Main$$anon$1$Giraffe@
*/
|
P7h/ScalaPlayground
|
Atomic Scala/atomic-scala-solutions/11_CreatingClasses/Solution-2.scala
|
Scala
|
apache-2.0
| 314 |
package im.actor.server.push.apple
import akka.actor.ActorSystem
import com.google.protobuf.wrappers.{ Int32Value, StringValue }
import com.relayrides.pushy.apns.PushNotificationResponse
import com.relayrides.pushy.apns.util.{ SimpleApnsPushNotification, TokenUtil }
import im.actor.server.model.push.ApplePushCredentials
import io.netty.util.concurrent.{ Future ⇒ NFuture }
import scodec.bits.BitVector
import scala.collection.concurrent.TrieMap
trait APNSSend {
private val listeners = TrieMap.empty[String, PushFutureListener]
protected def sendNotification(payload: String, creds: ApplePushCredentials, userId: Int)(implicit client: ApplePushExtension#Client, system: ActorSystem): NFuture[PushNotificationResponse[SimpleApnsPushNotification]] = {
// when topic is null, it will be taken from APNs certificate
// http://relayrides.github.io/pushy/apidocs/0.6/com/relayrides/pushy/apns/ApnsPushNotification.html#getTopic--
val token = BitVector(creds.token.toByteArray).toHex
val topic: String = (creds.apnsKey, creds.bundleId) match {
case (_, Some(bundleId)) ⇒ bundleId.value
case (Some(key), _) ⇒ ApplePushExtension(system).apnsBundleId.get(key.value).orNull
case _ ⇒
system.log.warning("Wrong creds format on sending notification. Creds: {}", creds)
null
}
system.log.debug(s"Sending APNS, token: {}, key: {}, isVoip: {}, topic: {}, payload: $payload", token, creds.apnsKey, creds.isVoip, topic)
val notification = new SimpleApnsPushNotification(TokenUtil.sanitizeTokenString(token), topic, payload)
val listener = listeners.getOrElseUpdate(token, new PushFutureListener(userId, creds, extractCredsId(creds))(system))
client.sendNotification(notification).addListener(listener)
}
protected def extractCredsId(creds: ApplePushCredentials): String = (creds.apnsKey, creds.bundleId) match {
case (Some(Int32Value(key)), _) ⇒ key.toString
case (_, Some(StringValue(bundleId))) ⇒ bundleId
case _ ⇒ throw new RuntimeException("Wrong credentials format")
}
}
|
EaglesoftZJ/actor-platform
|
actor-server/actor-core/src/main/scala/im/actor/server/push/apple/APNSSend.scala
|
Scala
|
agpl-3.0
| 2,114 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.orc.cloud
import com.cloudera.spark.cloud.s3.S3ATestSetup
import org.apache.spark.sql.sources.{CloudPartitionTest, ParquetRelationTrait}
class S3AParquetPartitionSuite extends CloudPartitionTest with S3ATestSetup
with ParquetRelationTrait {
init()
def init(): Unit = {
// propagate S3 credentials
if (enabled) {
initFS()
}
}
}
|
hortonworks-spark/cloud-integration
|
cloud-examples/src/test/scala/org/apache/spark/sql/hive/orc/cloud/S3AParquetPartitionSuite.scala
|
Scala
|
apache-2.0
| 1,195 |
package org.jetbrains.plugins.hocon.highlight
import java.util
import com.intellij.icons.AllIcons
import com.intellij.openapi.editor.colors.TextAttributesKey
import com.intellij.openapi.fileTypes.{SyntaxHighlighter, SyntaxHighlighterFactory}
import com.intellij.openapi.options.colors.{AttributesDescriptor, ColorDescriptor, ColorSettingsPage}
import org.jetbrains.plugins.hocon.highlight.{HoconHighlighterColors => HHC}
import org.jetbrains.plugins.hocon.lang.HoconLanguage
import scala.collection.JavaConverters._
class HoconColorSettingsPage extends ColorSettingsPage {
def getIcon =
AllIcons.FileTypes.Config
def getDemoText: String =
s"""<hashcomment># hash comment</hashcomment>
|<doubleslashcomment>// double slash comment<doubleslashcomment>
|
|<include>include</include> <inclmod>classpath</inclmod><imparens>(</imparens><quotedstring>"included.conf"</quotedstring><imparens>)</imparens>
|
|<key>object</key><dot>.</dot><key>subobject</key> <braces>{</braces>
| <key>someList</key> <pathvalueseparator>=</pathvalueseparator> <brackets>[</brackets>
| <null>null</null><comma>,</comma>
| <boolean>true</boolean><comma>,</comma>
| <number>123.4e5</number><comma>,</comma>
| <unquotedstring>unquoted string </unquotedstring><badchar>*</badchar><comma>,</comma>
| <quotedstring>"quo</quotedstring><validstringescape>\\\\n</validstringescape><quotedstring>ted</quotedstring><invalidstringescape>\\\\d</invalidstringescape><quotedstring> string"</quotedstring><comma>,</comma>
| <substsign>$$</substsign><substbraces>{</substbraces><optsubstsign>?</optsubstsign><substkey>substitution</substkey><dot>.</dot><substkey>inner</substkey><substbraces>}</substbraces><comma>,</comma>
| <multilinestring>${"\\"\\"\\""}multiline\\n multiline${"\\"\\"\\""}</multilinestring>
| <brackets>]</brackets>
|<braces>}</braces>
|""".stripMargin.trim
def getAdditionalHighlightingTagToDescriptorMap: util.Map[String, TextAttributesKey] = Map(
"badchar" -> HHC.BadCharacter,
"hashcomment" -> HHC.HashComment,
"doubleslashcomment" -> HHC.DoubleSlashComment,
"null" -> HHC.Null,
"boolean" -> HHC.Boolean,
"number" -> HHC.Number,
"quotedstring" -> HHC.QuotedString,
"multilinestring" -> HHC.MultilineString,
"validstringescape" -> HHC.ValidStringEscape,
"invalidstringescape" -> HHC.InvalidStringEscape,
"brackets" -> HHC.Brackets,
"braces" -> HHC.Braces,
"imparens" -> HHC.IncludeModifierParens,
"substbraces" -> HHC.SubBraces,
"pathvalueseparator" -> HHC.KeyValueSeparator,
"comma" -> HHC.Comma,
"include" -> HHC.Include,
"inclmod" -> HHC.IncludeModifier,
"substsign" -> HHC.SubstitutionSign,
"optsubstsign" -> HHC.OptionalSubstitutionSign,
"unquotedstring" -> HHC.UnquotedString,
"dot" -> HHC.PathSeparator,
"key" -> HHC.EntryKey,
"substkey" -> HHC.SubstitutionKey
).asJava
def getHighlighter: SyntaxHighlighter =
SyntaxHighlighterFactory.getSyntaxHighlighter(HoconLanguage, null, null)
def getDisplayName =
"HOCON"
def getColorDescriptors: Array[ColorDescriptor] =
Array.empty
def getAttributeDescriptors =
HoconColorSettingsPage.Attrs
}
object HoconColorSettingsPage {
final val Attrs = Array(
"Bad character" -> HHC.BadCharacter,
"Hash comment" -> HHC.HashComment,
"Double slash comment" -> HHC.DoubleSlashComment,
"Null" -> HHC.Null,
"Boolean" -> HHC.Boolean,
"Number" -> HHC.Number,
"Quoted string" -> HHC.QuotedString,
"Multiline string" -> HHC.MultilineString,
"Valid string escape" -> HHC.ValidStringEscape,
"Invalid string escape" -> HHC.InvalidStringEscape,
"Brackets" -> HHC.Brackets,
"Braces" -> HHC.Braces,
"Include modifier parens" -> HHC.IncludeModifierParens,
"Substitution braces" -> HHC.SubBraces,
"Path-value separator ('=', ':', '+=')" -> HHC.KeyValueSeparator,
"Comma" -> HHC.Comma,
"Include keyword" -> HHC.Include,
"Include modifier" -> HHC.IncludeModifier,
"Substitution sign" -> HHC.SubstitutionSign,
"Optional substitution sign" -> HHC.OptionalSubstitutionSign,
"Unquoted string" -> HHC.UnquotedString,
"Path separator" -> HHC.PathSeparator,
"Key" -> HHC.EntryKey,
"Key in substitution" -> HHC.SubstitutionKey
).map({
case (displayName, key) => new AttributesDescriptor(displayName, key)
})
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/hocon/highlight/HoconColorSettingsPage.scala
|
Scala
|
apache-2.0
| 4,482 |
package jp.co.bizreach.play2stub
import play.api.http.HeaderNames
import play.api.mvc.{ResponseHeader, AnyContent, Request, Result}
import play.mvc.Http.Status
/**
* Do something before the request is processed.
*/
trait BeforeFilter {
def process(request:Request[AnyContent]):Request[AnyContent]
}
/**
* Do something after result is generated.
*/
trait AfterFilter {
def process(request:Request[AnyContent], result:Result):Result
}
/**
* Set redirect status when redirect url is specified.
* When normal statuses or not-found status returns and "redirect" path is specified,
* set SEE OTHER (303) with the specified url and the passed headers.
*/
class RedirectFilter extends AfterFilter {
def process(request: Request[AnyContent], result: Result): Result = {
if (result.header.status < 300 || result.header.status == 404) {
Stub.route(request).flatMap { r => r.redirectUrl.map { url =>
result.copy(
header = ResponseHeader(
status = Status.SEE_OTHER,
headers = result.header.headers
)
).withHeaders(HeaderNames.LOCATION -> url)
}
}.getOrElse(
result
)
} else
result
}
}
|
bizreach/play2-stub
|
src/main/scala/jp/co/bizreach/play2stub/Filters.scala
|
Scala
|
apache-2.0
| 1,224 |
// Copyright (C) 2019 MapRoulette contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.maproulette.models.utils
import java.sql.{PreparedStatement, SQLException}
import anorm._
import org.apache.commons.lang3.StringUtils
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import org.maproulette.session.SearchParameters
import scala.collection.mutable.ListBuffer
sealed trait SQLKey {
def getSQLKey(): String
}
case class AND() extends SQLKey {
override def getSQLKey(): String = "AND"
}
case class OR() extends SQLKey {
override def getSQLKey(): String = "OR"
}
case class WHERE() extends SQLKey {
override def getSQLKey(): String = "WHERE"
}
/**
* Helper functions for any Data Access Layer classes
*
* @author cuthbertm
*/
trait DALHelper {
private val dateFormat = DateTimeFormat.forPattern("yyyy-MM-dd")
// The set of characters that are allowed for column names, so that we can sanitize in unknown input
// for protection against SQL injection
private val ordinary =
(('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9') ++ Seq('_') ++ Seq('.')).toSet
/**
* Function will return "ALL" if value is 0 otherwise the value itself. Postgres does not allow
* using 0 for ALL
*
* @param value The limit used in the query
* @return ALL if 0 otherwise the value
*/
def sqlLimit(value: Int): String = if (value <= 0) "ALL" else s"$value"
/**
* Corrects the search string by adding % before and after string, so that it doesn't rely
* on simply an exact match. If value not supplied, then will simply return %
*
* @param value The search string that you are using to match with
* @return
*/
def search(value: String): String = if (value.nonEmpty) s"%$value%" else "%"
/**
* Creates the ORDER functionality, with the column and direction
*
* @param orderColumn The column that you are ordering with (or multiple comma separated columns)
* @param orderDirection Direction of ordering ASC or DESC
* @param tablePrefix table alias if required
* @param nameFix The namefix really is just a way to force certain queries specific to MapRoulette
* to use a much more efficient query plan. The difference in performance can be quite
* large. We don't do it by default because it relies on the "name" column which is
* not guaranteed.
* @return
*/
def order(
orderColumn: Option[String] = None,
orderDirection: String = "ASC",
tablePrefix: String = "",
nameFix: Boolean = false,
ignoreCase: Boolean = false
): String = orderColumn match {
case Some(column) =>
this.testColumnName(column)
val direction = orderDirection match {
case "DESC" => "DESC"
case _ => "ASC"
}
// sanitize the column name to prevent sql injection. Only allow underscores and A-Za-z
if (column.forall(this.ordinary.contains)) {
val casedColumn = new StringBuilder()
if (ignoreCase) {
casedColumn ++= "LOWER("
}
casedColumn ++= this.getPrefix(tablePrefix) + column
if (ignoreCase) {
casedColumn ++= ")"
}
s"ORDER BY $casedColumn $direction ${if (nameFix) {
"," + this.getPrefix(tablePrefix) + "name";
} else {
"";
}}"
} else {
""
}
case None => ""
}
def sqlWithParameters(query: String, parameters: ListBuffer[NamedParameter]): SimpleSql[Row] = {
if (parameters.nonEmpty) {
SQL(query).on(parameters.toSeq: _*)
} else {
SQL(query).asSimple[Row]()
}
}
def parentFilter(parentId: Long)(implicit conjunction: Option[SQLKey] = Some(AND())): String =
if (parentId != -1) {
s"${this.getSqlKey} parent_id = $parentId"
} else {
""
}
def getLongListFilter(list: Option[List[Long]], columnName: String)(
implicit conjunction: Option[SQLKey] = Some(AND())
): String = {
this.testColumnName(columnName)
list match {
case Some(idList) if idList.nonEmpty =>
s"${this.getSqlKey} $columnName IN (${idList.mkString(",")})"
case _ => ""
}
}
def getOptionalFilter(filterValue: Option[Any], columnName: String, key: String) = {
filterValue match {
case Some(value) => s"$columnName = {$key}"
case None => ""
}
}
def getOptionalMatchFilter(filterValue: Option[Any], columnName: String, key: String) = {
filterValue match {
case Some(value) => s"LOWER($columnName) LIKE LOWER({$key})"
case None => ""
}
}
def getIntListFilter(list: Option[List[Int]], columnName: String)(
implicit conjunction: Option[SQLKey] = Some(AND())
): String = {
this.testColumnName(columnName)
list match {
case Some(idList) if idList.nonEmpty =>
s"${this.getSqlKey} $columnName IN (${idList.mkString(",")})"
case _ => ""
}
}
private def testColumnName(columnName: String): Unit = {
if (!columnName.forall(this.ordinary.contains)) {
throw new SQLException(s"Invalid column name provided `$columnName`")
}
}
private def getSqlKey(implicit conjunction: Option[SQLKey]): String = {
conjunction match {
case Some(c) => c.getSQLKey()
case None => ""
}
}
def getDateClause(column: String, start: Option[DateTime] = None, end: Option[DateTime] = None)(
implicit sqlKey: Option[SQLKey] = None
): String = {
this.testColumnName(column)
val dates = getDates(start, end)
s"${this.getSqlKey} $column::date BETWEEN '${dates._1}' AND '${dates._2}'"
}
def getDates(start: Option[DateTime] = None, end: Option[DateTime] = None): (String, String) = {
val startDate = start match {
case Some(s) => dateFormat.print(s)
case None => dateFormat.print(DateTime.now().minusWeeks(1))
}
val endDate = end match {
case Some(e) => dateFormat.print(e)
case None => dateFormat.print(DateTime.now())
}
(startDate, endDate)
}
def addSearchToQuery(
params: SearchParameters,
whereClause: StringBuilder,
projectPrefix: String = "p",
challengePrefix: String = "c"
)(implicit projectSearch: Boolean = true): ListBuffer[NamedParameter] = {
val parameters = new ListBuffer[NamedParameter]()
if (!projectSearch) {
params.getProjectIds match {
case Some(p) if p.nonEmpty =>
whereClause ++= s"$challengePrefix.parent_id IN (${p.mkString(",")})"
case _ =>
params.projectSearch match {
case Some(ps) if ps.nonEmpty =>
params.fuzzySearch match {
case Some(x) =>
whereClause ++= this.fuzzySearch(s"$projectPrefix.display_name", "ps", x)(
if (whereClause.isEmpty) None else Some(AND())
)
parameters += (Symbol("ps") -> ps)
case None =>
whereClause ++= (if (whereClause.isEmpty) "" else " AND ")
whereClause ++= " (" + this.searchField(s"$projectPrefix.display_name", "ps")(
None
)
whereClause ++= s" OR $challengePrefix.id IN (SELECT vp2.challenge_id FROM virtual_project_challenges vp2 INNER JOIN projects p2 ON p2.id = vp2.project_id WHERE " +
this.searchField(s"p2.display_name", "ps")(None) + " AND p2.enabled=true)) "
parameters += (Symbol("ps") -> s"%$ps%")
}
case _ => // we can ignore this
}
this.appendInWhereClause(
whereClause,
this.enabled(params.projectEnabled.getOrElse(false), projectPrefix)(None)
)
}
}
params.getChallengeIds match {
case Some(c) if c.nonEmpty =>
this.appendInWhereClause(whereClause, s"$challengePrefix.id IN (${c.mkString(",")})")
case _ =>
params.challengeParams.challengeSearch match {
case Some(cs) if cs.nonEmpty =>
params.fuzzySearch match {
case Some(x) =>
this.appendInWhereClause(
whereClause,
this.fuzzySearch(s"$challengePrefix.name", "cs", x)(None)
)
parameters += (Symbol("cs") -> cs)
case None =>
this.appendInWhereClause(
whereClause,
this.searchField(s"$challengePrefix.name", "cs")(None)
)
parameters += (Symbol("cs") -> s"%$cs%")
}
case _ => // ignore
}
this.appendInWhereClause(
whereClause,
this.enabled(params.challengeParams.challengeEnabled.getOrElse(false), challengePrefix)(
None
)
)
}
parameters
}
/**
* All MapRoulette objects contain the enabled column that define whether it is enabled in the
* system or not. This will create the WHERE part of the clause checking for enabled values in the
* query
*
* @param value If looking only for enabled elements this needs to be set to true
* @param tablePrefix If used as part of a join or simply the table alias if required
* @param key Defaulted to "AND"
* @return
*/
def enabled(value: Boolean, tablePrefix: String = "")(
implicit key: Option[SQLKey] = Some(AND())
): String = {
if (value) {
s"${this.getSqlKey} ${this.getPrefix(tablePrefix)}enabled = TRUE"
} else {
""
}
}
/**
* Just appends the period at the end of the table prefix if the provided string is not empty
*
* @param prefix The table prefix that is being used in the query
* @return
*/
private def getPrefix(prefix: String): String =
if (StringUtils.isEmpty(prefix) || !prefix.forall(this.ordinary.contains)) "" else s"$prefix."
/**
* This function will handle the conjunction in a where clause. So if you are you creating
* a dynamic where clause this will handle adding the conjunction clause if required
*
* @param whereClause The StringBuilder where clause
* @param value The value that is being appended
* @param conjunction The conjunction, by default AND
*/
def appendInWhereClause(whereClause: StringBuilder, value: String)(
implicit conjunction: Option[SQLKey] = Some(AND())
): Unit = {
if (whereClause.nonEmpty && value.nonEmpty) {
whereClause ++= s" ${this.getSqlKey} $value"
} else {
whereClause ++= value
}
}
/**
* Set the search field in the where clause correctly, it will also surround the values
* with LOWER to make sure that match is case insensitive
*
* @param column The column that you are searching against
* @param conjunction Default is AND, but can use AND or OR
* @param key The search string that you are testing against
* @return
*/
def searchField(column: String, key: String = "ss")(
implicit conjunction: Option[SQLKey] = Some(AND())
): String =
s" ${this.getSqlKey} LOWER($column) LIKE LOWER({$key})"
/**
* Adds fuzzy search to any query. This will include the Levenshtein, Metaphone and Soundex functions
* that will search the string. On large datasets this could potentially decrease performance
*
* @param column The column that we are comparing
* @param key The key used in anorm for the value to compare with
* @param levenshsteinScore The levenshstein score, which is the difference between the strings
* @param metaphoneSize The maximum size of the metaphone code
* @param conjunction Default AND
* @return A string with all the fuzzy search functions
*/
def fuzzySearch(
column: String,
key: String = "ss",
levenshsteinScore: Int = DALHelper.DEFAULT_LEVENSHSTEIN_SCORE,
metaphoneSize: Int = DALHelper.DEFAULT_METAPHONE_SIZE
)(implicit conjunction: Option[SQLKey] = Some(AND())): String = {
val score = if (levenshsteinScore > 0) {
levenshsteinScore
} else {
3
}
s""" ${this.getSqlKey} ($column <> '' AND
(LEVENSHTEIN(LOWER($column), LOWER({$key})) < $score OR
METAPHONE(LOWER($column), 4) = METAPHONE(LOWER({$key}), $metaphoneSize) OR
SOUNDEX(LOWER($column)) = SOUNDEX(LOWER({$key})))
)"""
}
def addChallengeTagMatchingToQuery(
params: SearchParameters,
whereClause: StringBuilder,
joinClause: StringBuilder,
challengePrefix: String = "c"
): ListBuffer[NamedParameter] = {
val parameters = new ListBuffer[NamedParameter]()
params.challengeParams.challengeTags match {
case Some(ct) if ct.nonEmpty =>
joinClause ++=
s"""
INNER JOIN tags_on_challenges toc ON toc.challenge_id = $challengePrefix.id
INNER JOIN tags tgs ON tgs.id = toc.tag_id
"""
val tags = ListBuffer[String]()
ct.zipWithIndex.foreach(tagWithIndex => {
parameters += new NamedParameter(s"tag${tagWithIndex._2}", tagWithIndex._1)
tags += s"{tag${tagWithIndex._2}}"
})
this.appendInWhereClause(whereClause, s"tgs.name IN (${tags.mkString(",")})")
case _ => // ignore
}
parameters
}
/**
* Our key for our objects are current Long, but can support String if need be. This function
* handles transforming java objects to SQL for a specific set related to the object key
*
* @tparam Key The type of Key, this is currently always Long, but could be changed easily enough in the future
* @return
*/
def keyToStatement[Key]: ToStatement[Key] = {
new ToStatement[Key] {
def set(s: PreparedStatement, i: Int, identifier: Key) =
identifier match {
case id: String => ToStatement.stringToStatement.set(s, i, id)
case Some(id: String) => ToStatement.stringToStatement.set(s, i, id)
case id: Long => ToStatement.longToStatement.set(s, i, id)
case Some(id: Long) => ToStatement.longToStatement.set(s, i, id)
case intValue: Integer => ToStatement.integerToStatement.set(s, i, intValue)
case list: List[Long @unchecked] => ToStatement.listToStatement[Long].set(s, i, list)
}
}
}
}
object DALHelper {
private val DEFAULT_LEVENSHSTEIN_SCORE = 3
private val DEFAULT_METAPHONE_SIZE = 4
}
|
Crashfreak/maproulette2
|
app/org/maproulette/models/utils/DALHelper.scala
|
Scala
|
apache-2.0
| 14,660 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.registration.returns
import config.{BaseControllerComponents, FrontendAppConfig}
import controllers.BaseController
import forms.SendGoodsOverseasForm
import models.api.returns.OverseasCompliance
import play.api.mvc.{Action, AnyContent}
import services.{ReturnsService, SessionProfile, SessionService}
import uk.gov.hmrc.auth.core.AuthConnector
import views.html.returns.SendGoodsOverseasView
import javax.inject.{Inject, Singleton}
import scala.concurrent.{ExecutionContext, Future}
@Singleton
class SendGoodsOverseasController @Inject()(val authConnector: AuthConnector,
val sessionService: SessionService,
val returnsService: ReturnsService,
val view: SendGoodsOverseasView)
(implicit appConfig: FrontendAppConfig,
val executionContext: ExecutionContext,
baseControllerComponents: BaseControllerComponents)
extends BaseController with SessionProfile {
val show: Action[AnyContent] = isAuthenticatedWithProfile() {
implicit request =>
implicit profile =>
returnsService.getReturns.map { returns =>
returns.overseasCompliance match {
case Some(OverseasCompliance(Some(goodsToOverseas), _, _, _, _, _)) =>
Ok(view(SendGoodsOverseasForm.form.fill(goodsToOverseas)))
case _ =>
Ok(view(SendGoodsOverseasForm.form))
}
}
}
val submit: Action[AnyContent] = isAuthenticatedWithProfile() {
implicit request =>
implicit profile =>
SendGoodsOverseasForm.form.bindFromRequest.fold(
errors => Future.successful(BadRequest(view(errors))),
success => {
for {
returns <- returnsService.getReturns
updatedReturns = returns.copy(
overseasCompliance = returns.overseasCompliance match {
case None =>
Some(OverseasCompliance(goodsToOverseas = Some(success)))
case Some(_) if success =>
returns.overseasCompliance.map(_.copy(
goodsToOverseas = Some(success)
))
case Some(_) =>
returns.overseasCompliance.map(_.copy(
goodsToOverseas = Some(success),
goodsToEu = None
))
}
)
_ <- returnsService.submitReturns(updatedReturns)
} yield {
if (success) {
Redirect(routes.SendEUGoodsController.show)
} else {
Redirect(routes.StoringGoodsController.show)
}
}
}
)
}
}
|
hmrc/vat-registration-frontend
|
app/controllers/registration/returns/SendGoodsOverseasController.scala
|
Scala
|
apache-2.0
| 3,512 |
package commands.makemkv
import commands.FilenameSanitiser
import org.specs2.mutable.Specification
/**
* Created by alex on 07/05/15.
*/
class FilenameSanitiserSpec extends Specification with FilenameSanitiser {
"Upper case characters" should {
"be replaced by lower case characters" in {
sanitise("AB") must be equalTo("ab")
}
}
"Lower case characters" should {
"stay the same" in {
sanitise("AB") must be equalTo("ab")
}
}
"Digits" should {
"stay the same" in {
sanitise("543") must be equalTo("543")
}
}
"Punctuation" should {
"be removed" in {
sanitise("!,()") must be equalTo("")
}
}
"Whitespace" should {
"be normalised to a single underscore" in {
sanitise("ab \tcd") must beEqualTo("ab_cd")
}
}
}
|
unclealex72/ripper
|
src/test/scala/commands/makemkv/FilenameSanitiserSpec.scala
|
Scala
|
mit
| 804 |
package io.flow.dependency.api.lib
import io.flow.util.Version
case class ArtifactVersion(
tag: Version,
crossBuildVersion: Option[Version]
)
|
flowcommerce/dependency
|
api/app/lib/ArtifactVersion.scala
|
Scala
|
mit
| 149 |
package dbtarzan.db
import dbtarzan.db.foreignkeys.ForeignKeyTextBuilder
object SqlBuilder {
val selectClause = "SELECT * FROM "
/* builds the SQL to query the table from the (potential) original foreign key (to know which rows it has to show), the potential where filter and the table name */
def buildSql(structure: DBTableStructure) : QuerySql = {
val foreignClosure = structure.foreignFilter.map(ForeignKeyTextBuilder.buildClause(_, structure.attributes))
val filters = List(foreignClosure, structure.genericFilter.map(_.text)).flatten
val delimitedTableNameWithSchema = QueryAttributesApplier.from(structure.attributes).applySchemaAndDelimiters(structure.description.name)
val orderBy: String = structure.orderByFields.map(SqlPartsBuilder.buildOrderBy).getOrElse("")
QuerySql(selectClause + delimitedTableNameWithSchema + SqlPartsBuilder.buildFilters(filters) + orderBy)
}
def buildSql(structure: DBRowStructure) : QuerySql = {
val delimitedTableNameWithSchema = QueryAttributesApplier.from(structure.attributes).applySchemaAndDelimiters(structure.tableName)
val sqlFieldBuilder = new SqlFieldBuilder(structure.columns.fields, structure.attributes)
QuerySql(selectClause + delimitedTableNameWithSchema + SqlPartsBuilder.buildFilters(structure.filter.map(sqlFieldBuilder.buildFieldText)))
}
}
|
aferrandi/dbtarzan
|
src/main/scala/dbtarzan/db/SqlBuilder.scala
|
Scala
|
apache-2.0
| 1,344 |
package nasa.nccs.caching
import java.io._
import java.nio.file.{Files, Paths}
import nasa.nccs.utilities.Logger
import org.apache.commons.io.FileUtils
import collection.mutable
import com.googlecode.concurrentlinkedhashmap.{ConcurrentLinkedHashMap, EntryWeigher, EvictionListener}
import nasa.nccs.utilities.Loggable
import nasa.nccs.cdapi.cdm.DiskCacheFileMgr
import nasa.nccs.edas.utilities.appParameters
import scala.concurrent.ExecutionContext.Implicits.global
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.io.Source
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}
trait Cache[K,V] { cache ⇒
type KeyEventNotifier[K] = (String,K) => Unit
/**
* Selects the (potentially non-existing) cache entry with the given key.
*/
def apply(key: K) = new Keyed(key)
def put( key: K, value: V )
def putF( key: K, value: Future[V] )
def putIfAbsent( key: K, value: V )
def putIfAbsentF( key: K, fvalue: Future[V] )
def getEntries: Seq[(K,V)]
class Keyed(key: K) {
/**
* Returns either the cached Future for the key or evaluates the given call-by-name argument
* which produces either a value instance of type `V` or a `Future[V]`.
*/
def apply(magnet: ⇒ ValueMagnet[V])(implicit ec: ExecutionContext): Future[V] =
cache.apply(key, () ⇒ try magnet.future catch { case NonFatal(e) ⇒ Future.failed(e) })
/**
* Returns either the cached Future for the key or evaluates the given function which
* should lead to eventual completion of the promise.
*/
def apply[U](f: Promise[V] ⇒ U)(implicit ec: ExecutionContext): Future[V] =
cache.apply(key, () ⇒ { val p = Promise[V](); f(p); p.future })
}
/**
* Returns either the cached Future for the given key or evaluates the given value generating
* function producing a `Future[V]`.
*/
def apply(key: K, genValue: () ⇒ Future[V])(implicit ec: ExecutionContext): Future[V]
/**
* Retrieves the future instance that is currently in the cache for the given key.
* Returns None if the key has no corresponding cache entry.
*/
def get(key: K): Option[Future[V]]
/**
* Removes the cache item for the given key. Returns the removed item if it was found (and removed).
*/
def remove(key: K): Option[Future[V]]
/**
* Clears the cache by removing all entries.
*/
def clear(): Set[K]
def persist()
/**
* Returns the set of keys in the cache, in no particular order
* Should return in roughly constant time.
* Note that this number might not reflect the exact keys of active, unexpired
* cache entries, since expired entries are only evicted upon next access
* (or by being thrown out by a capacity constraint).
*/
def keys: Set[K]
def values: Iterable[Future[V]]
/**
* Returns a snapshot view of the keys as an iterator, traversing the keys from the least likely
* to be retained to the most likely. Note that this is not constant time.
*
* @param limit No more than limit keys will be returned
*/
def ascendingKeys(limit: Option[Int] = None): Iterator[K]
/**
* Returns the upper bound for the number of currently cached entries.
* Note that this number might not reflect the exact number of active, unexpired
* cache entries, since expired entries are only evicted upon next access
* (or by being thrown out by a capacity constraint).
*/
def size: Long
}
class ValueMagnet[V](val future: Future[V])
object ValueMagnet {
implicit def fromAny[V](block: V): ValueMagnet[V] = fromFuture(Future.successful(block))
implicit def fromFuture[V](future: Future[V]): ValueMagnet[V] = new ValueMagnet(future)
}
/**
* The cache has a defined maximum number of entries it can store. After the maximum capacity is reached new
* entries cause old ones to be evicted in a last-recently-used manner, i.e. the entries that haven't been accessed for
* the longest time are evicted first.
*/
//class DeletionListener[K,Future[V]]( val cache: Int ) extends EvictionListener[K,Future[V]] {
// override def onEviction(key: K, value: V ) {;}
//}
class PersistentCache( cname: String, ctype: String ) extends FutureCache[String,String](cname,ctype) {
override def persist(): Unit = {
Files.createDirectories( Paths.get(cacheFile).getParent )
val ostr = new PrintWriter( cacheFile )
val entries = getEntries.toList
logger.info( " ***Persisting cache %s to file '%s', entries: [ %s ]".format( cname, cacheFile, entries.mkString(",") ) )
entries.foreach( entry => ostr.write( entry._1 + ";" + entry._2 + "\\n" ) )
ostr.close()
}
override def clear(): Set[String] = {
val keys = super.clear()
logger.info( " ** Deleting cache directory: " + cacheFile )
FileUtils.deleteDirectory( Paths.get(cacheFile).getParent.toFile )
keys
}
override def restore: Option[ Array[(String ,String)] ] = {
try {
val istr = Source.fromFile(cacheFile)
logger.info(s"Restoring $cname cache map from: " + cacheFile);
val entries: Iterator[(String ,String)] = for( line <- istr.getLines; tup = line.split(";") ) yield { tup(0) -> tup(1) }
Some( entries.toArray )
} catch {
case err: Throwable =>
logger.warn("Can't load persisted cache file '" + cacheFile + "' due to error: " + err.toString );
None
}
}
}
class FutureCache[K,V](val cname: String, val ctype: String ) extends Cache[K,V] with Loggable {
val KpG = 1000000L
val maxCapacity: Long = appParameters(Array(ctype.toLowerCase,cname.toLowerCase,"capacity").mkString("."),"30").toLong * KpG
val initialCapacity: Int=64
val cacheFile = DiskCacheFileMgr.getDiskCacheFilePath( ctype, cname + ".ssv" )
require(maxCapacity >= 0, "maxCapacity must not be negative")
require(initialCapacity <= maxCapacity, "initialCapacity must be <= maxCapacity")
private[caching] val store = getStore()
def evictionNotice( key: K, value: Future[V] ) = { logger.info( "Evicting Key %s".format( key.toString ) ) }
def entrySize( key: K, value: Future[V] ): Int = { 1 }
def weightedSize: Long = store.weightedSize()
def capacity_log( key: K, msg: String ) = synchronized {
logger.info( s"CACHE LOG: %s [$cname-$ctype](%s): size = %d".format( msg, key.toString, weightedSize ) );
}
def getStore(): ConcurrentLinkedHashMap[K, Future[V]] = {
val evictionListener = new EvictionListener[K,Future[V]]{ def onEviction(key: K, value: Future[V] ): Unit = {
capacity_log( key, "--" )
evictionNotice(key,value)
} }
val sizeWeighter = new EntryWeigher[K,Future[V]]{ def weightOf(key: K, value: Future[V] ): Int = { entrySize(key,value) } }
val hmap = new ConcurrentLinkedHashMap.Builder[K, Future[V]].initialCapacity(initialCapacity).maximumWeightedCapacity(maxCapacity).listener( evictionListener ).weigher( sizeWeighter ).build()
restore match {
case Some( entryArray ) => entryArray.foreach { case (key,value) => hmap.put(key,Future(value)) }
case None => Unit
}
hmap
}
def get(key: K) = Option(store.get(key))
def getEntries: Seq[(K,V)] = {
val entrySet = store.entrySet.toSet
val entries = for (entry: java.util.Map.Entry[K, Future[V]] <- entrySet ) yield entry.getValue.value match {
case Some(value) ⇒ Some( (entry.getKey, value.get) )
case None => None
}
entries.flatten.toSeq
}
def persist(): Unit = {;}
protected def restore: Option[ Array[(K,V)] ] = { None }
def clear(): Set[K] = {
val keys: Set[K] = Set[K](store.keys.toSeq: _*)
store.clear()
keys
}
def put( key: K, value: V ) = if( store.put(key, Future(value) ) == null ) { capacity_log( key, "++" ) }
def putF( key: K, fvalue: Future[V] ) = if( store.put(key, fvalue ) == null ) { capacity_log( key, "++" ) }
def putIfAbsent( key: K, value: V ) = if( store.putIfAbsent(key, Future(value) ) == null ) { capacity_log( key, "++" ) }
def putIfAbsentF( key: K, fvalue: Future[V] ) = if( store.putIfAbsent(key, fvalue ) == null ) { capacity_log( key, "++" ) }
def apply(key: K, genValue: () ⇒ Future[V])(implicit ec: ExecutionContext): Future[V] = {
val promise = Promise[V]()
store.putIfAbsent(key, promise.future) match {
case null ⇒
genValue() andThen {
case Success(value) =>
capacity_log( key, "++" )
promise.complete( Success(value) )
case Failure(e) =>
val err = if( e.getCause == null ) e else e.getCause
logger.warn(s"Failed to add element %s to cache $cname:$ctype due to error %s".format( key.toString, err.toString ) )
logger.error( "Error Stack Trace:\\n" + err.getStackTrace.mkString("\\n"))
store.remove(key, promise.future)
}
case existingFuture ⇒ existingFuture
}
}
def keys: Set[K] = store.keySet().asScala.toSet
def values: Iterable[Future[V]] = store.values().asScala
def remove( key: K ) = Some( store.remove( key ) )
def ascendingKeys(limit: Option[Int] = None) =
limit.map { lim ⇒ store.ascendingKeySetWithLimit(lim) }
.getOrElse(store.ascendingKeySet())
.iterator().asScala
def size = store.size
}
private[caching] class Entry[T](val promise: Promise[T]) {
@volatile var created = Timestamp.now
@volatile var lastAccessed = Timestamp.now
def future = promise.future
def refresh(): Unit = {
// we dont care whether we overwrite a potentially newer value
lastAccessed = Timestamp.now
}
override def toString = future.value match {
case Some(Success(value)) ⇒ value.toString
case Some(Failure(exception)) ⇒ exception.toString
case None ⇒ "pending"
}
}
|
nasa-nccs-cds/EDAS
|
src/main/scala/nasa/nccs/caching/memoryCache.scala
|
Scala
|
gpl-2.0
| 9,828 |
package mimir.parser
import fastparse._, NoWhitespace._
import fastparse.Parsed
import sparsity.parser.StreamParser
import java.io.Reader
sealed abstract class MimirCommand
case class SlashCommand(
body: String
) extends MimirCommand
case class SQLCommand(
body: MimirStatement
) extends MimirCommand
object MimirCommand
{
def apply(input: Reader): StreamParser[MimirCommand] =
new StreamParser[MimirCommand](
parse(_:Iterator[String], command(_), verboseFailures = true),
input
)
def apply(input: String): Parsed[MimirCommand] =
parse(input, command(_))
def command[_:P]: P[MimirCommand] = P(
slashCommand
| ( MimirSQL.statement.map { SQLCommand(_) } ~ ";" )
)
def slashCommand[_:P] = P(
"/" ~/
CharsWhile(
c => (c != '\\n') && (c != '\\r')
).!.map { SlashCommand(_) } ~/
CharsWhile(
c => (c == '\\n') || (c == '\\r')
).?
)
}
|
UBOdin/mimir
|
src/main/scala/mimir/parser/MimirCommand.scala
|
Scala
|
apache-2.0
| 922 |
package authentication.controllers
import articles.services.ArticleService
import authentication.models.SecurityUser
import authentication.{ExceptionCode, HttpExceptionResponse, MissingOrInvalidCredentials}
import commons.repositories.ActionRunner
import julienrf.json.derived
import org.pac4j.core.credentials.UsernamePasswordCredentials
import org.pac4j.core.credentials.authenticator.Authenticator
import org.pac4j.core.profile.CommonProfile
import org.pac4j.http.client.direct.{DirectBasicAuthClient, HeaderClient}
import org.pac4j.jwt.credentials.authenticator.JwtAuthenticator
import org.pac4j.jwt.profile.{JwtGenerator, JwtProfile}
import org.pac4j.play.PlayWebContext
import org.pac4j.play.store.PlayCacheSessionStore
import play.api.libs.json._
import play.api.mvc.Security.AuthenticatedRequest
import play.api.mvc._
import play.cache.SyncCacheApi
import play.mvc.Http
import scala.concurrent.ExecutionContext
case class BearerTokenResponse(token: String) {
val aType: String = "Bearer"
}
object BearerTokenResponse {
implicit val jsonReads: Reads[BearerTokenResponse] = Json.reads[BearerTokenResponse]
implicit val jsonWrites: Writes[BearerTokenResponse] = (tokenResponse: BearerTokenResponse) => {
JsObject(List(
"token" -> JsString(tokenResponse.token),
"type" -> JsString(tokenResponse.aType)
))
}
}
class AuthenticationController(actionRunner: ActionRunner,
cacheApi: SyncCacheApi,
httpBasicAuthenticator: Authenticator[UsernamePasswordCredentials],
components: ControllerComponents,
jwtGenerator: JwtGenerator[CommonProfile],
implicit private val ec: ExecutionContext) extends AbstractController(components) {
private val playCacheSessionStore = new PlayCacheSessionStore(cacheApi)
private val client = new DirectBasicAuthClient(httpBasicAuthenticator)
def authenticate: Action[AnyContent] = Action { request =>
val webContext = new PlayWebContext(request, playCacheSessionStore)
Option(client.getCredentials(webContext))
.map(credentials => {
val profile = new JwtProfile()
profile.setId(credentials.getUsername)
val jwtToken = jwtGenerator.generate(profile)
val json = Json.toJson(BearerTokenResponse(jwtToken))
Ok(json)
})
.getOrElse(Forbidden(Json.toJson(HttpExceptionResponse(MissingOrInvalidCredentials))))
}
}
|
Dasiu/play-framework-test-project
|
app/authentication/controllers/AuthenticationController.scala
|
Scala
|
mit
| 2,498 |
package mesosphere.marathon
package api
import akka.Done
import akka.actor.ActorSystem
import akka.stream.scaladsl.Source
import akka.stream.{ActorMaterializer, ActorMaterializerSettings}
import mesosphere.marathon.core.deployment.DeploymentPlan
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.core.instance.update.{InstanceUpdateOperation, InstancesSnapshot}
import mesosphere.marathon.core.instance.{Instance, TestInstanceBuilder}
import mesosphere.marathon.core.task.termination.{KillReason, KillService}
import mesosphere.marathon.core.task.tracker.InstanceTracker.InstancesBySpec
import mesosphere.marathon.core.task.tracker.InstanceTracker
import mesosphere.marathon.plugin.auth.Identity
import mesosphere.marathon.state._
import mesosphere.AkkaUnitTest
import org.mockito.ArgumentCaptor
import org.mockito.Mockito._
import scala.concurrent.Future
class TaskKillerTest extends AkkaUnitTest {
"TaskKiller" should {
//regression for #3251
"No tasks to kill should return with an empty array" in {
val f = new Fixture
import f.auth.identity
val appId = AbsolutePathId("/invalid")
when(f.tracker.specInstances(appId)).thenReturn(Future.successful(Seq.empty))
when(f.groupManager.runSpec(appId)).thenReturn(Some(AppDefinition(appId, role = "*")))
val result = f.taskKiller.kill(appId, (tasks) => Seq.empty[Instance]).futureValue
result.isEmpty shouldEqual true
}
"AppNotFound" in {
val f = new Fixture
import f.auth.identity
val appId = AbsolutePathId("/invalid")
when(f.tracker.specInstances(appId)).thenReturn(Future.successful(Seq.empty))
when(f.groupManager.runSpec(appId)).thenReturn(None)
val result = f.taskKiller.kill(appId, (tasks) => Seq.empty[Instance])
result.failed.futureValue shouldEqual PathNotFoundException(appId)
}
"AppNotFound with scaling" in {
val f = new Fixture
import f.auth.identity
val appId = AbsolutePathId("/invalid")
when(f.tracker.instancesBySpec()).thenReturn(Future.successful(InstancesBySpec.empty))
when(f.tracker.specInstances(appId)).thenReturn(Future.successful(Seq.empty))
val result = f.taskKiller.killAndScale(appId, (tasks) => Seq.empty[Instance], force = true)
result.failed.futureValue shouldEqual PathNotFoundException(appId)
}
"KillRequested with scaling" in {
val f = new Fixture
import f.auth.identity
val appId = AbsolutePathId("/app")
val instance1 = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val instance2 = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val tasksToKill = Seq(instance1, instance2)
when(f.tracker.instancesBySpec()).thenReturn(Future.successful(InstancesBySpec.forInstances(tasksToKill)))
when(f.tracker.specInstances(appId)).thenReturn(Future.successful(tasksToKill))
when(f.groupManager.group(appId.parent)).thenReturn(Some(Group.empty(appId.parent)))
val groupUpdateCaptor = ArgumentCaptor.forClass(classOf[(RootGroup) => RootGroup])
val forceCaptor = ArgumentCaptor.forClass(classOf[Boolean])
val toKillCaptor = ArgumentCaptor.forClass(classOf[Map[AbsolutePathId, Seq[Instance]]])
val expectedDeploymentPlan = DeploymentPlan.empty
when(
f.groupManager
.updateRoot(any[AbsolutePathId], groupUpdateCaptor.capture(), any[Timestamp], forceCaptor.capture(), toKillCaptor.capture())
).thenReturn(Future.successful(expectedDeploymentPlan))
val result = f.taskKiller.killAndScale(appId, (tasks) => tasksToKill, force = true)
result.futureValue shouldEqual expectedDeploymentPlan
forceCaptor.getValue shouldEqual true
toKillCaptor.getValue shouldEqual Map(appId -> tasksToKill)
}
"KillRequested without scaling" in {
val f = new Fixture
import f.auth.identity
val appId = AbsolutePathId("/my/app")
val instance = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val tasksToKill = Seq(instance)
when(f.groupManager.runSpec(appId)).thenReturn(Some(AppDefinition(appId, role = "*")))
when(f.tracker.specInstances(appId)).thenReturn(Future.successful(tasksToKill))
val result = f.taskKiller.kill(
appId,
{ tasks =>
tasks should equal(tasksToKill)
tasksToKill
}
)
result.futureValue shouldEqual tasksToKill
verify(f.killService, times(1)).killInstancesAndForget(tasksToKill, KillReason.KillingTasksViaApi)
}
"Kill and scale w/o force should fail if there is a deployment" in {
val f = new Fixture
import f.auth.identity
val appId = AbsolutePathId("/my/app")
val instance1 = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val instance2 = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val tasksToKill = Seq(instance1, instance2)
when(f.tracker.specInstances(appId)).thenReturn(Future.successful(tasksToKill))
when(f.tracker.instancesBySpec()).thenReturn(Future.successful(InstancesBySpec.forInstances(tasksToKill)))
when(f.groupManager.group(appId.parent)).thenReturn(Some(Group.empty(appId.parent)))
val groupUpdateCaptor = ArgumentCaptor.forClass(classOf[(RootGroup) => RootGroup])
val forceCaptor = ArgumentCaptor.forClass(classOf[Boolean])
when(
f.groupManager.updateRoot(
any[AbsolutePathId],
groupUpdateCaptor.capture(),
any[Timestamp],
forceCaptor.capture(),
any[Map[AbsolutePathId, Seq[Instance]]]
)
).thenReturn(Future.failed(AppLockedException()))
val result = f.taskKiller.killAndScale(appId, (tasks) => tasksToKill, force = false)
result.failed.futureValue shouldEqual AppLockedException()
forceCaptor.getValue shouldEqual false
}
"kill with wipe will kill running and expunge all" in {
val f = new Fixture
import f.auth.identity
val appId = AbsolutePathId("/my/app")
val app = AppDefinition(appId, role = "*")
val runningInstance: Instance = TestInstanceBuilder.newBuilder(appId).addTaskRunning().getInstance()
val reservedInstance: Instance = TestInstanceBuilder.scheduledWithReservation(app)
val instancesToKill = Seq(runningInstance, reservedInstance)
when(f.groupManager.runSpec(appId)).thenReturn(Some(AppDefinition(appId, role = "*")))
when(f.tracker.specInstances(appId)).thenReturn(Future.successful(instancesToKill))
when(f.tracker.forceExpunge(runningInstance.instanceId)).thenReturn(Future.successful(Done))
when(f.tracker.forceExpunge(reservedInstance.instanceId)).thenReturn(Future.successful(Done))
val result = f.taskKiller.kill(
appId,
{ instances =>
instances should equal(instancesToKill)
instancesToKill
},
wipe = true
)
result.futureValue shouldEqual instancesToKill
// all found instances are expunged and the launched instance is eventually expunged again
verify(f.tracker, atLeastOnce).forceExpunge(runningInstance.instanceId)
verify(f.tracker).forceExpunge(reservedInstance.instanceId)
}
"allows kill and scale for an app for which a user specifically has access" in {
// Regression test for MARATHON-8731
val business = Builders.newAppDefinition.command(id = AbsolutePathId("/business"))
val devBackend = Builders.newAppDefinition.command(id = AbsolutePathId("/dev/backend"))
val initialRoot = Builders.newRootGroup(apps = Seq(business, devBackend))
val businessInstance = TestInstanceBuilder.newBuilderForRunSpec(business).addTaskRunning().instance
val devBackendInstance = TestInstanceBuilder.newBuilderForRunSpec(devBackend).addTaskRunning().instance
val authFn: Any => Boolean = {
case app: AppDefinition =>
(app.id == devBackend.id)
case _ =>
???
}
new FixtureWithRealInstanceTracker(initialRoot, authFn) {
instanceTracker.process(InstanceUpdateOperation.Schedule(businessInstance)).futureValue
instanceTracker.process(InstanceUpdateOperation.Schedule(devBackendInstance)).futureValue
val deployment = taskKiller.killAndScale(Map(devBackendInstance.runSpecId -> Seq(devBackendInstance)), force = false).futureValue
deployment.affectedRunSpecIds shouldBe Set(devBackend.id)
}
}
}
class FixtureWithRealInstanceTracker(initialRoot: RootGroup = RootGroup.empty(), authFn: Any => Boolean = _ => true) {
val testInstanceTrackerFixture = new TestInstanceTrackerFixture(initialRoot, authFn = authFn)
val instanceTracker = testInstanceTrackerFixture.instanceTracker
val killService: KillService = mock[KillService]
val auth = testInstanceTrackerFixture.authFixture.auth
implicit val identity: Identity = testInstanceTrackerFixture.authFixture.identity
testInstanceTrackerFixture.service.deploy(any, any).returns(Future(Done))
val taskKiller: TaskKiller = new TaskKiller(
instanceTracker,
testInstanceTrackerFixture.groupManager,
testInstanceTrackerFixture.authFixture.auth,
testInstanceTrackerFixture.authFixture.auth,
killService
)
}
class Fixture {
val auth: TestAuthFixture = new TestAuthFixture
val tracker: InstanceTracker = mock[InstanceTracker]
tracker.setGoal(any, any, any).returns(Future.successful(Done))
tracker.instanceUpdates.returns(Source.single(InstancesSnapshot(Nil) -> Source.empty))
val killService: KillService = mock[KillService]
val groupManager: GroupManager = mock[GroupManager]
implicit val system = ActorSystem("test")
def materializerSettings = ActorMaterializerSettings(system)
implicit val mat = ActorMaterializer(materializerSettings)
val taskKiller: TaskKiller = new TaskKiller(tracker, groupManager, auth.auth, auth.auth, killService)
}
}
|
mesosphere/marathon
|
src/test/scala/mesosphere/marathon/api/TaskKillerTest.scala
|
Scala
|
apache-2.0
| 10,031 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet.module
import org.apache.mxnet.DType.DType
import org.apache.mxnet._
import org.apache.mxnet.module.DataParallelExecutorGroup.Builder
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
private object DataParallelExecutorGroup {
private val logger: Logger = LoggerFactory.getLogger(classOf[DataParallelExecutorGroup])
// Load a list of arrays into a list of arrays specified by slices
private def loadGeneralMulti(data: Seq[NDArray],
targets: Seq[Array[((Int, Int), NDArray)]],
majorAxis: Seq[Int]): Unit = {
for (((dSrc, dTargets), axis) <- data zip targets zip majorAxis) {
for (((sliceIdxStart, sliceIdxStop), dDst) <- dTargets) {
if (axis >= 0) {
// copy slice
val shape = dSrc.shape
val begin = Array.fill(shape.length)(0)
val end = shape.toArray
begin(axis) = sliceIdxStart
end(axis) = sliceIdxStop
if (dSrc.context == dDst.context) {
NDArray.crop(Map(
"begin" -> new Shape(begin),
"end" -> new Shape(end),
"out" -> dDst))(dSrc)
} else {
// on different device, crop and then do cross device copy
val dDstCopy: NDArray = NDArray.crop(Map(
"begin" -> new Shape(begin),
"end" -> new Shape(end)))(dSrc)
dDstCopy.copyTo(dDst)
}
} else {
dSrc.copyTo(dDst)
}
}
}
}
private def loadGeneral(data: Seq[NDArray], targets: Seq[NDArray]): Unit = {
for ((dSrc, dTarget) <- data zip targets) {
dSrc.copyTo(dTarget)
}
}
// Load data into sliced arrays
private def loadData(batch: DataBatch,
targets: Seq[Array[((Int, Int), NDArray)]],
majorAxis: Seq[Int]): Unit = {
loadGeneralMulti(batch.data, targets, majorAxis)
}
// Load label into sliced arrays
private def loadLabel(batch: DataBatch,
targets: Seq[Array[((Int, Int), NDArray)]],
majorAxis: Seq[Int]): Unit = {
loadGeneralMulti(batch.label, targets, majorAxis)
}
// Merge outputs that lives on multiple context into one,
// so that they look like living on one context.
private def mergeMultiContext(outputs: IndexedSeq[IndexedSeq[NDArray]], majorAxis: Seq[Int])
: IndexedSeq[NDArray] = {
(outputs zip majorAxis).map { case (tensors, axis) =>
if (axis >= 0) {
NDArray.concatenate(tensors, axis = axis, alwaysCopy = false)
} else {
// negative axis means the there is no batch_size axis, and all the
// results should be the same on each device. We simply take the first one,
// without checking they are actually the same
tensors(0)
}
}
}
private object Builder {
private[module] def convertGradReq(
gradReq: String, argNames: IndexedSeq[String], paramNames: IndexedSeq[String],
fixedParamNames: Set[String], dataNames: Seq[String], inputsNeedGrad: Boolean)
: Map[String, String] = {
require(argNames != null, "Invalid argNames")
require(paramNames != null, "Invalid paramNames")
require(fixedParamNames != null, "Invalid fixedParamNames")
require(dataNames != null, "Invalid dataNames")
argNames.map(k => {
if (paramNames.contains(k)) {
(k, if (fixedParamNames.contains(k)) "null" else gradReq)
} else if (dataNames.contains(k)) {
(k, if (inputsNeedGrad) gradReq else "null")
} else {
(k, "null")
}
}).toMap
}
}
class Builder private[module](private val symbol: Symbol,
private val contexts: Array[Context],
private val paramNames: IndexedSeq[String]) {
private var workLoadList: IndexedSeq[Float] = null
private var dataShapes: IndexedSeq[DataDesc] = null
private var labelShapes: Option[IndexedSeq[DataDesc]] = None
private var forTraining: Boolean = true
private var inputsNeedGrad: Boolean = false
private var sharedGroup: Option[DataParallelExecutorGroup] = None
private var inputTypes: Option[Map[String, DType]] = None
private var fixedParamNames: Set[String] = Set.empty[String]
private var gradReqs: Map[String, String] = null
val argNames = symbol.listArguments()
def setWorkLoadList(workLoad: IndexedSeq[Float]): Builder = {
this.workLoadList = workLoad
this
}
def setDataShapes(shapes: IndexedSeq[DataDesc]): Builder = {
require(shapes != null, "Invalid shapes")
this.dataShapes = shapes
this
}
def setDataShapesByName(shapes: IndexedSeq[(String, Shape)]): Builder = {
require(shapes != null, "Invalid shapes")
this.dataShapes = shapes.map { case (k, s) => new DataDesc(k, s) }
this
}
def setLabelShapes(shapes: IndexedSeq[DataDesc]): Builder = {
this.labelShapes = Option(shapes)
this
}
def setLabelShapesByName(shapes: IndexedSeq[(String, Shape)]): Builder = {
this.labelShapes = Option(shapes).map(shapesInst =>
shapesInst.map { case (k, s) => new DataDesc(k, s) }
)
this
}
def setForTraining(forTraining: Boolean): Builder = {
this.forTraining = forTraining
this
}
def setInputsNeedGrad(needGrad: Boolean): Builder = {
this.inputsNeedGrad = needGrad
this
}
def setSharedGroup(sharedGroup: DataParallelExecutorGroup): Builder = {
this.sharedGroup = Option(sharedGroup)
this
}
def setInputTypes(inputTypes: Map[String, DType]): Builder = {
this.inputTypes = Option(inputTypes)
this
}
def setFixedParamNames(fixedParamNames: Set[String]): Builder = {
this.fixedParamNames = Option(fixedParamNames).getOrElse(Set.empty[String])
this
}
def setGradReq(gradReq: Map[String, String]): Builder = {
require(dataShapes != null, "dataShapes must be set first")
val gradReqTmp = mutable.HashMap.empty[String, String]
val dataNames = dataShapes.map(_.name)
for (k <- argNames) {
if (paramNames.contains(k)) {
gradReqTmp.put(k, if (fixedParamNames.contains(k)) "null" else "write")
} else if (dataNames.contains(k)) {
gradReqTmp.put(k, if (inputsNeedGrad) "write" else "null")
} else {
gradReqTmp.put(k, "null")
gradReqTmp ++= gradReq
}
}
this.gradReqs = gradReqTmp.toMap
this
}
def setGradReq(gradReq: String): Builder = {
require(dataShapes != null, "dataShapes must be set first")
val dataNames = dataShapes.map(_.name)
this.gradReqs = Builder.convertGradReq(
gradReq, argNames, paramNames, fixedParamNames, dataNames, inputsNeedGrad)
this
}
def setGradReq(gradReq: Seq[(String, String)]): Builder = {
require(gradReq.size == argNames.size,
s"provided number of gradReq (${gradReq.size}) do not match number of args " +
s"(${argNames.size})")
this.gradReqs = gradReq.toMap
this
}
def build(): DataParallelExecutorGroup = {
new DataParallelExecutorGroup(
symbol, contexts, workLoadList, dataShapes, labelShapes, paramNames, forTraining,
inputsNeedGrad, sharedGroup, inputTypes, fixedParamNames, this.gradReqs)
}
}
}
/**
* DataParallelExecutorGroup is a group of executors that lives on a group of devices.
* This is a helper class used to implement data parallelism. Each mini-batch will
* be split and run on the devices.
* @param symbol The common symbolic computation graph for all executors.
* @param contexts A list of contexts.
* @param workLoadList If not `None`, could be a list of numbers that
* specify the workload to be assigned to different context.
* Larger number indicate heavier workload.
* @param dataShapes Should be a list of (name, shape) tuples, for the shapes of data.
* Note the order is important and should be the same as the order that
* the `DataIter` provide the data.
* @param labelShapes Should be a list of (name, shape) tuples, for the shapes of label.
* Note the order is important and should be the same as the order that
* the `DataIter` provide the label.
* @param paramNames A list of strings, indicating the names of parameters
* (e.g. weights, filters, etc.) in the computation graph.
* @param forTraining Indicate whether the executors should be bind for training.
* When not doing training, the memory for gradients will not be allocated.
* @param inputsNeedGrad Indicate whether the gradients for the input data should be computed.
* This is currently not used.
* It will be useful for implementing composition of modules.
* @param sharedGroup Default is `None`. This is used in bucketing. When not `None`,
* it should be a executor group corresponding to a different bucket.
* In other words, it will correspond to a different symbol but
* with the same set of parameters (e.g. unrolled RNNs with different lengths).
* In this case, many memory will be shared.
* @param inputTypes Default is `None`. When not `None`,
* can be used to specify the data type for each of the data/label inputs.
* @param fixedParamNames Indicate parameters to be fixed during training.
* Parameters in this list will not allocate space for gradient,
* nor do gradient calculation.
* @param gradReq Requirement for gradient accumulation. Can be 'write', 'add', or 'null',
* be specified for each argument.
*/
class DataParallelExecutorGroup private[module](
symbol: Symbol,
contexts: Array[Context],
workLoadList: IndexedSeq[Float],
var dataShapes: IndexedSeq[DataDesc],
var labelShapes: Option[IndexedSeq[DataDesc]] = None,
private[module] val paramNames: IndexedSeq[String],
forTraining: Boolean,
inputsNeedGrad: Boolean,
sharedGroup: Option[DataParallelExecutorGroup] = None,
inputTypes: Option[Map[String, DType]] = None,
fixedParamNames: Set[String] = Set.empty[String],
gradReq: Map[String, String] = null) {
require(symbol != null, "Undefined symbol")
require(contexts != null, "Undefined context")
private val argNames = symbol.listArguments()
private val auxNames = symbol.listAuxiliaryStates()
private val gradReqRun =
if (!forTraining) {
val dataNames = dataShapes.map(_.name)
Builder.convertGradReq("null",
argNames, paramNames, fixedParamNames, dataNames, inputsNeedGrad)
} else {
gradReq
}
private val sharedDataArrays: Array[mutable.Map[String, NDArray]] =
sharedGroup.map(_.sharedDataArrays).getOrElse(
Array.fill(contexts.length)(mutable.Map.empty[String, NDArray]))
private var batchSize: Int = -1
private var slices: Array[(Int, Int)] = null
private var execs: Array[Executor] = null
private var dataArrays: Seq[Array[((Int, Int), NDArray)]] = null
private var labelArrays: Option[Seq[Array[((Int, Int), NDArray)]]] = None
private[module] var paramArrays: IndexedSeq[Array[NDArray]] = null
private[module] var gradArrays: IndexedSeq[Array[NDArray]] = null
private[module] var auxArrays: IndexedSeq[Array[NDArray]] = null
private var inputGradArrays: IndexedSeq[Array[NDArray]] = null
private var dataLayouts = decideSlices(dataShapes)
private var labelLayouts =
// call it to make sure labels has the same batch size as data
if (labelShapes != None) decideSlices(labelShapes.get)
else null
private val outputLayouts = symbol.listOutputs().map(name => {
val sym = symbol.get(name)
val layout = sym.attr("__layout__")
sym.dispose()
DataDesc.getBatchAxis(layout)
}
)
bindExec(dataShapes, labelShapes, sharedGroup)
def getBatchSize: Int = batchSize
/**
* Decide the slices for each context according to the workload.
* @param dataShapes list of DataDesc(name, shape) specifying
* the shapes for the input data or label.
*/
private def decideSlices(dataShapes: Seq[DataDesc]): Seq[Int] = {
require(dataShapes.size > 0, "dataShapes must be non empty")
val majorAxis = dataShapes.map(data => DataDesc.getBatchAxis(Option(data.layout)))
for ((dataDesc, axis) <- dataShapes.zip(majorAxis)) {
if (axis != -1) {
val batchSize = dataDesc.shape(axis)
if (this.batchSize != -1) {
require(batchSize == this.batchSize,
s"all data must have the same batch size: $batchSize," +
s"but ${dataDesc.name} has shape ${dataDesc.shape}")
} else {
this.batchSize = batchSize
require(this.workLoadList != null, "Undefined workLoadList")
this.slices = ExecutorManager.splitInputSlice(this.batchSize, this.workLoadList)
}
}
}
majorAxis
}
/**
* Bind executors on their respective devices.
* @param dataShapes DataDesc for input data.
* @param labelShapes DataDesc for input labels.
* @param sharedGroup
* @param reshape
*/
def bindExec(dataShapes: IndexedSeq[DataDesc], labelShapes: Option[IndexedSeq[DataDesc]],
sharedGroup: Option[DataParallelExecutorGroup], reshape: Boolean = false): Unit = {
this.batchSize = -1
dataLayouts = decideSlices(dataShapes)
labelLayouts = {
// call it to make sure labels has the same batch size as data
if (labelShapes != None) decideSlices(labelShapes.get)
else null
}
if (reshape) {
(0 until contexts.length).foreach { i =>
val dataShapesSliced = slicedShape(dataShapes, i, dataLayouts)
val labelShapesSliced = labelShapes.map(slicedShape(_, i, labelLayouts))
val inputShapes
= dataShapesSliced.toMap ++ labelShapesSliced.getOrElse(Map.empty[String, Shape])
ResourceScope.usingIfScopeExists(execs(i).scope) {
val tmpExec = execs(i).reshape(allowUpSizing = true, kwargs = inputShapes)
execs(i).dispose()
execs(i) = tmpExec
}
}
} else {
execs = (0 until contexts.length).map(i =>
bindIthExec(i, dataShapes, labelShapes, sharedGroup)
).toArray
}
this.dataShapes = dataShapes
this.labelShapes = labelShapes
// convenient data structures
dataArrays = dataShapes.map(dataDesc =>
this.execs.zipWithIndex.map { case (e, i) => (this.slices(i), e.argDict(dataDesc.name)) }
)
labelArrays = labelShapes.map(shapes =>
shapes.map(labelDesc =>
this.execs.zipWithIndex.map { case (e, i) => (this.slices(i), e.argDict(labelDesc.name)) }
)
)
paramArrays = argNames.zipWithIndex.withFilter {
case (name, i) => paramNames.contains(name)
}.map { case (name, i) =>
execs.map(_.argArrays(i))
}
gradArrays =
if (forTraining) {
argNames.zipWithIndex.withFilter {
case (name, i) => paramNames.contains(name)
}.map { case (name, i) =>
execs.map(_.gradArrays(i))
}
} else {
null
}
val dataNames = dataShapes.map(_.name)
inputGradArrays =
if (inputsNeedGrad) {
argNames.zipWithIndex.withFilter {
case (name, i) => dataNames.contains(name)
}.map { case (name, i) =>
execs.map(_.gradArrays(i))
}
} else {
null
}
auxArrays = (0 until auxNames.length).map(i => execs.map(_.auxArrays(i)))
}
/**
* Reshape executors.
* @param dataShapes
* @param labelShapes
*/
def reshape(dataShapes: IndexedSeq[DataDesc], labelShapes: Option[IndexedSeq[DataDesc]]): Unit = {
if (!(dataShapes == this.dataShapes && labelShapes == this.labelShapes)) {
this.bindExec(dataShapes, labelShapes, None, reshape = true)
}
}
/**
* Assign, i.e. copy parameters to all the executors.
* @param argParams A dictionary of name to `NDArray` parameter mapping.
* @param auxParams A dictionary of name to `NDArray` auxiliary variable mapping.
* @param allowExtra hether allow extra parameters that are not needed by symbol.
* If this is True, no error will be thrown when argParams or auxParams
* contain extra parameters that is not needed by the executor.
*/
def setParams(argParams: Map[String, NDArray], auxParams: Map[String, NDArray],
allowExtra: Boolean = false): Unit = {
execs.foreach(_.copyParamsFrom(argParams, auxParams, allowExtraParams = allowExtra))
}
/**
* Copy data from each executor to `arg_params` and `aux_params`.
* @param argParams target parameter arrays
* @param auxParams target aux arrays
* Note this function will inplace update the NDArrays in arg_params and aux_params.
*/
def getParams(argParams: Map[String, NDArray], auxParams: Map[String, NDArray]): Unit = {
for ((name, block) <- paramNames.zip(paramArrays)) {
val weight = (block.map(_.copyTo(Context.cpu())).reduce((a: NDArray, b: NDArray) =>
(a + b).disposeDeps()
) / block.length).disposeDeps()
val weightNewType = weight.asType(argParams(name).dtype)
weightNewType.copyTo(argParams(name))
weight.dispose()
weightNewType.dispose()
}
for ((name, block) <- auxNames.zip(auxArrays)) {
val weight = (block.map(_.copyTo(Context.cpu())).reduce((a: NDArray, b: NDArray) =>
(a + b).disposeDeps()
) / block.length).disposeDeps()
val weightNewType = weight.asType(auxParams(name).dtype)
weightNewType.copyTo(auxParams(name))
weight.dispose()
weightNewType.dispose()
}
}
/**
* Split `dataBatch` according to workload and run forward on each devices.
* @param dataBatch
* @param isTrain The hint for the backend, indicating whether we are during training phase.
* Default is `None`, then the value `self.for_training` will be used.
*/
def forward(dataBatch: DataBatch, isTrain: Option[Boolean] = None): Unit = {
DataParallelExecutorGroup.loadData(dataBatch, dataArrays, dataLayouts)
val isTrainOpt = isTrain.getOrElse(this.forTraining)
labelArrays.foreach(labels => {
require(!isTrainOpt || dataBatch.label != null, "label must be defined if in training phase")
if (dataBatch.label != null) {
require(labelLayouts != null, "label layouts are undefined")
DataParallelExecutorGroup.loadLabel(dataBatch, labels, labelLayouts)
}
})
execs.foreach(_.forward(isTrainOpt))
}
// Get the shapes of the outputs.
def getOutputShapes: IndexedSeq[(String, Shape)] = {
val outputs = execs(0).outputs
val shapes = outputs.map(_.shape)
(symbol.listOutputs() zip shapes zip outputLayouts) map { case ((key, theShape), axis) =>
val shape = theShape.toArray
if (axis >= 0) {
shape(axis) = batchSize
}
(key, Shape(shape))
}
}
/**
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
* The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
(0 until execs(0).outputs.length).map(i => execs.map(_.outputs(i)).toIndexedSeq)
}
/**
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be merged from multiple devices,
* as they look like from a single executor.
* The results will look like `[out1, out2]`
*/
def getOutputsMerged(): IndexedSeq[NDArray] = {
DataParallelExecutorGroup.mergeMultiContext(getOutputs(), outputLayouts)
}
/**
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
* The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
require(inputsNeedGrad, "Cannot get InputGrads when inputNeedGrad is set to false")
inputGradArrays.map(_.toIndexedSeq)
}
/**
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be merged from multiple devices,
* as they look like from a single executor.
* The results will look like `[grad1, grad2]`
*/
def getInputGradsMerged(): IndexedSeq[NDArray] = {
DataParallelExecutorGroup.mergeMultiContext(getInputGrads(), dataLayouts)
}
/**
* Run backward on all devices. A backward should be called after
* a call to the forward function. Backward cannot be called unless
* `this.for_training` is `True`.
* @param outGrads Gradient on the outputs to be propagated back.
* This parameter is only needed when bind is called
* on outputs that are not a loss function.
*/
def backward(outGrads: Array[NDArray] = null): Unit = {
require(forTraining, "re-bind with forTraining = true to run backward")
for (((exec, islice), i) <- (execs zip slices).zipWithIndex) {
val outGradsSlice =
if (outGrads != null) {
(outGrads zip outputLayouts).map { case (grad, axis) =>
if (axis >= 0) {
val ogMySlice: NDArray = NDArray.slice_axis(
Map("axis" -> axis, "begin" -> islice._1, "end" -> islice._2))(grad)
ogMySlice.asInContext(contexts(i))
} else {
grad.copyTo(contexts(i))
}
}
} else {
Array.empty[NDArray]
}
exec.backward(outGrads = outGradsSlice)
}
}
/**
* Accumulate the performance according to `eval_metric` on all devices.
* @param evalMetric The metric used for evaluation.
* @param labels Typically comes from `label` of a `DataBatch`.
*/
def updateMetric(evalMetric: EvalMetric, labels: IndexedSeq[NDArray]): Unit = {
for ((texec, islice) <- this.execs zip this.slices) {
val labelsSlice =
(labels zip this.labelLayouts) map { case (label, axis) =>
if (axis == 0) {
label.slice(islice)
} else if (axis > 0) {
val labelMySlice: NDArray = NDArray.slice_axis(Map(
"axis" -> axis, "begin" -> islice._1, "end" -> islice._2))(label)
.asInContext(label.context)
labelMySlice
} else {
label
}
}
evalMetric.update(labelsSlice, texec.outputs)
// Clear up any slices we created (sometimes we don't slice so check for this)
(labels zip labelsSlice).foreach { case (label, labelSlice) =>
if (label ne labelSlice) {
labelSlice.dispose()
}
}
}
}
// Internal utility function to bind the i-th executor.
private def bindIthExec(i: Int, dataShapes: Seq[DataDesc],
labelShapes: Option[Seq[DataDesc]],
sharedGroup: Option[DataParallelExecutorGroup]): Executor = {
val dataShapesSliced = slicedShape(dataShapes, i, dataLayouts)
val labelShapesSliced = labelShapes.map(slicedShape(_, i, labelLayouts))
val sharedExec = sharedGroup.map(_.execs(i))
val context = contexts(i)
val sharedDataArrays = this.sharedDataArrays(i)
val inputShapes
= dataShapesSliced.toMap ++ labelShapesSliced.getOrElse(Map.empty[String, Shape])
val (argShapes, _, auxShapes) = symbol.inferShape(inputShapes)
require(argShapes != null, "Shape inference failed." +
s"Known shapes are $inputShapes for symbol arguments ${symbol.listArguments()} " +
s"and aux states ${symbol.listAuxiliaryStates()}")
val inputTypesGot = inputTypes.getOrElse(inputShapes.map { case (k, v) =>
(k, Base.MX_REAL_TYPE)
})
val (argTypes, _, auxTypes) = symbol.inferType(inputTypesGot)
require(argTypes != null, "Type inference failed." +
s"Known types as $inputTypes for symbol arguments ${symbol.listArguments()} " +
s"and aux states ${symbol.listAuxiliaryStates()}")
val argArrays = ArrayBuffer.empty[NDArray]
val gradArrayMap = mutable.HashMap.empty[String, NDArray]
// create or borrow arguments and gradients
for (j <- 0 until argNames.length) {
val name = argNames(j)
val argArr =
if (paramNames.contains(name)) {
// model parameter
sharedExec match {
case None =>
val argArr = NDArray.zeros(argShapes(j), context, dtype = argTypes(j))
if (gradReqRun(name) != "null") {
val gradArr = NDArray.zeros(argShapes(j), context, dtype = argTypes(j))
gradArrayMap.put(name, gradArr)
}
argArr
case Some(sharedExecInst) =>
val argArr = sharedExecInst.argDict(name)
require(argArr.shape == argShapes(j),
s"Shape ${argArr.shape} of argument $name does not match " +
s"inferred shape ${argShapes(j)}")
require(argArr.dtype == argTypes(j),
s"Type ${argArr.dtype} of argument $name does not match " +
s"inferred type ${argTypes(j)}")
if (gradReqRun(name) != "null") {
gradArrayMap.put(name, sharedExecInst.gradDict(name))
}
argArr
}
} else {
// data or label
val argArr = getOrReshape(name, sharedDataArrays, argShapes(j), argTypes(j), context)
// data might also need grad if inputs_need_grad is True
if (gradReqRun(name) != "null") {
gradArrayMap.put(name,
getOrReshape(s"grad of $name", sharedDataArrays, argShapes(j), argTypes(j), context))
}
argArr
}
argArrays.append(argArr)
}
// create or borrow aux variables
val auxArrays =
sharedExec match {
case None => (auxShapes zip auxTypes).map { case (s, t) =>
NDArray.zeros(s, context, dtype = t)
}.toArray
case Some(sharedExecInst) =>
for ((arr, j) <- sharedExecInst.auxArrays.zipWithIndex) {
require(auxShapes(j) == arr.shape,
s"Shape ${arr.shape} of aux variable ${auxNames(j)} does not match " +
s"inferred shape ${auxShapes(j)}")
require(auxTypes(j) == arr.dtype,
s"Type ${arr.dtype} of aux variable ${auxNames(j)} does not match " +
s"inferred type ${auxTypes(j)}")
}
sharedExecInst.auxArrays.map(identity)
}
symbol.bind(ctx = context, args = argArrays.toSeq, argsGrad = gradArrayMap.toMap,
gradsReq = gradReqRun, auxStates = auxArrays.toSeq, group2ctx = null,
sharedExec = sharedExec.orNull)
}
/**
* Get the sliced shapes for the i-th executor.
* @param shapes : The original (name, shape) pairs.
* @param i Which executor we are dealing with.
* @param majorAxis
*/
private def slicedShape(shapes: Seq[DataDesc], i: Int, majorAxis: Seq[Int])
: Seq[(String, Shape)] = {
(shapes zip majorAxis).map { case (DataDesc(k, shape, _ , _), axis) =>
val shapeArr = shape.toArray
if (axis >= 0) {
shapeArr(axis) = slices(i)._2 - slices(i)._1
}
(k, Shape(shapeArr))
}
}
// Install monitor on all executors
def installMonitor(monitor: Monitor): Unit = {
execs.foreach(monitor.install)
}
// Internal helper to get a memory block or re-use by re-shaping
private def getOrReshape(name: String,
sharedDataArrays: mutable.Map[String, NDArray],
argShape: Shape,
argType: DType,
context: Context): NDArray = {
if (sharedDataArrays.contains(name)) {
val argArr = sharedDataArrays(name)
if (argArr.shape.product >= argShape.product) {
// nice, we can directly re-use this data blob
require(argArr.dtype == argType,
s"Type ${argArr.dtype} of argument $name does not match infered type ${argType}")
argArr.reshape(argShape)
} else {
DataParallelExecutorGroup.logger.warn(s"bucketing: data $name has a shape $argShape," +
s"which is larger than already allocated shape ${argArr.shape}." +
"Need to re-allocate. Consider putting default_bucket_key to be the bucket" +
"taking the largest input for better memory sharing.")
val argArrNew = NDArray.zeros(argShape, context, dtype = argType)
// replace existing shared array because the new one is bigger
sharedDataArrays.put(name, argArrNew)
argArrNew
}
} else {
val argArrNew = NDArray.zeros(argShape, context, dtype = argType)
sharedDataArrays.put(name, argArrNew)
argArrNew
}
}
}
|
reminisce/mxnet
|
scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
|
Scala
|
apache-2.0
| 30,372 |
/*
* Copyright 2019 Qameta Software OÜ
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.qameta.allure.scalatest.testdata
import org.scalatest.flatspec.AnyFlatSpec
/**
* @author charlie (Dmitry Baev).
*/
class SimpleSpec extends AnyFlatSpec {
"test" should "be passed" in {
}
}
|
allure-framework/allure-java
|
allure-scalatest/src/test/scala/io/qameta/allure/scalatest/testdata/SimpleSpec.scala
|
Scala
|
apache-2.0
| 825 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp.
package scala
object Product1 {
def unapply[T1](x: Product1[T1]): Option[Product1[T1]] =
Some(x)
}
/** Product1 is a Cartesian product of 1 component.
*/
trait Product1[@specialized(Int, Long, Double) +T1] extends Any with Product {
/** The arity of this product.
* @return 1
*/
override def productArity: Int = 1
/** Returns the n-th projection of this product if 0 <= n < productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
* @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException if the `n` is out of range(n < 0 || n >= 1).
*/
@throws(classOf[IndexOutOfBoundsException])
override def productElement(n: Int): Any = n match {
case 0 => _1
case _ => throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max 0)")
}
/** A projection of element 1 of this Product.
* @return A projection of element 1.
*/
def _1: T1
}
|
lrytz/scala
|
src/library/scala/Product1.scala
|
Scala
|
apache-2.0
| 1,395 |
package com.ing.bakery.baker
import akka.actor.ActorSystem
import cats.effect.{ContextShift, IO, Resource, Timer}
import com.ing.baker.runtime.akka.internal.CachingInteractionManager
import com.ing.baker.runtime.akka.{AkkaBaker, AkkaBakerConfig}
import com.ing.bakery.mocks.EventListener
import com.ing.bakery.testing.BakeryFunSpec
import com.typesafe.config.ConfigFactory
import org.http4s.Status.Ok
import org.http4s.client.blaze.BlazeClientBuilder
import org.scalatest.concurrent.Eventually
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.funspec.FixtureAsyncFunSpecLike
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.must.Matchers
import java.io.File
import java.net.InetSocketAddress
import java.util.UUID
import scala.concurrent.ExecutionContext
class WatcherSpec extends AnyFunSuite with Matchers with Eventually {
test("Watcher starts") {
val config = ConfigFactory.load("application-watcher.conf")
implicit val executionContext: ExecutionContext = ExecutionContext.global
implicit val timer: Timer[IO] = IO.timer(executionContext)
implicit val contextShift: ContextShift[IO] = IO.contextShift(executionContext)
val s = (for {
system <- Resource.make(IO {
ActorSystem(UUID.randomUUID().toString, ConfigFactory.parseString(
"""
|akka {
| stdout-loglevel = "OFF"
| loglevel = "OFF"
|}
|""".stripMargin)) })((system: ActorSystem) => IO.fromFuture(IO {
system.terminate().flatMap(_ => system.whenTerminated) })(contextShift).void)
_ <- Watcher.resource(config, system, None)
} yield ())
assert(!WatcherReadinessCheck.ready)
s.use(_ => IO.unit).unsafeRunAsyncAndForget()
assert(TestWatcher.started)
eventually {
assert(WatcherReadinessCheck.ready)
}
assert(!TestWatcher.triggered)
eventually {
assert(TestWatcher.triggered)
}
}
}
|
ing-bank/baker
|
bakery/state/src/test/scala/com/ing/bakery/baker/WatcherSpec.scala
|
Scala
|
mit
| 1,956 |
package almond.api.helpers
import java.io.{BufferedInputStream, IOException}
import java.net.{URL, URLConnection}
import java.nio.file.{Files, Paths}
import java.util.Base64
import almond.display.UpdatableDisplay
import almond.interpreter.api.DisplayData.ContentType
import almond.interpreter.api.{DisplayData, OutputHandler}
@deprecated("Use almond.display.Data instead", "0.4.1")
final class Display(id: String, contentType: String) {
def update(content: String)(implicit outputHandler: OutputHandler): Unit =
outputHandler.updateDisplay(
DisplayData(Map(contentType -> content))
.withId(id)
)
override def toString =
s"$contentType #$id"
}
object Display {
@deprecated("Use almond.display.UpdatableDisplay.useRandomIds instead", "0.4.1")
def useRandomIds(): Boolean =
UpdatableDisplay.useRandomIds()
@deprecated("Use almond.display.UpdatableDisplay.generateId instead", "0.4.1")
def newId(): String =
UpdatableDisplay.generateId()
@deprecated("Use almond.display.UpdatableDisplay.generateDiv instead", "0.4.1")
def newDiv(prefix: String = "data-"): String =
UpdatableDisplay.generateDiv(prefix)
@deprecated("Use almond.display.Markdown instead", "0.4.1")
def markdown(content: String)(implicit outputHandler: OutputHandler): Display = {
val id = UpdatableDisplay.generateId()
outputHandler.display(
DisplayData.markdown(content)
.withId(id)
)
new Display(id, DisplayData.ContentType.markdown)
}
@deprecated("Use almond.display.Html instead", "0.4.1")
def html(content: String)(implicit outputHandler: OutputHandler): Display = {
val id = UpdatableDisplay.generateId()
outputHandler.display(
DisplayData.html(content)
.withId(id)
)
new Display(id, DisplayData.ContentType.html)
}
@deprecated("Use almond.display.Latex instead", "0.4.1")
def latex(content: String)(implicit outputHandler: OutputHandler): Display = {
val id = UpdatableDisplay.generateId()
outputHandler.display(
DisplayData.latex(content)
.withId(id)
)
new Display(id, DisplayData.ContentType.latex)
}
@deprecated("Use almond.display.Text instead", "0.4.1")
def text(content: String)(implicit outputHandler: OutputHandler): Display = {
val id = UpdatableDisplay.generateId()
outputHandler.display(
DisplayData.text(content)
.withId(id)
)
new Display(id, DisplayData.ContentType.text)
}
@deprecated("Use almond.display.Javascript instead", "0.4.1")
def js(content: String)(implicit outputHandler: OutputHandler): Unit =
outputHandler.display(
DisplayData.js(content)
)
@deprecated("Use almond.display.Svg instead", "0.4.1")
def svg(content: String)(implicit outputHandler: OutputHandler): Display = {
val id = UpdatableDisplay.generateId()
outputHandler.display(
DisplayData.svg(content)
.withId(id)
)
new Display(id, DisplayData.ContentType.svg)
}
@deprecated("Use almond.display.Image instead", "0.4.1")
object Image {
sealed abstract class Format(val contentType: String) extends Product with Serializable
case object JPG extends Format(ContentType.jpg)
case object PNG extends Format(ContentType.png)
case object GIF extends Format(ContentType.gif)
private val imageTypes = Set(JPG, PNG, GIF).map(_.contentType)
private def dimensionMetadata(width: Option[String], height: Option[String]): Map[String, String] =
Map() ++
width.map("width" -> _) ++
height.map("height" -> _)
def fromArray(
content: Array[Byte],
format: Format,
width: Option[String] = None,
height: Option[String] = None,
id: String = UpdatableDisplay.generateId()
)(implicit outputHandler: OutputHandler): Display = {
DisplayData(
data = Map(format.contentType -> Base64.getEncoder.encodeToString(content)),
metadata = dimensionMetadata(width, height),
idOpt = Some(id)
).show()
new Display(id, format.contentType)
}
def fromUrl(
url: String,
embed: Boolean = false,
format: Option[Format] = None,
width: Option[String] = None,
height: Option[String] = None,
id: String = UpdatableDisplay.generateId()
)(implicit outputHandler: OutputHandler): Display = {
val connection = new URL(url).openConnection()
connection.setConnectTimeout(5000)
connection.connect()
val contentType = format.map(_.contentType).getOrElse(connection.getContentType)
val data = if (embed) {
if(!imageTypes.contains(contentType))
throw new IOException("Unknown or unsupported content type: " + contentType)
val input = new BufferedInputStream(connection.getInputStream)
val rawImage = Iterator.continually(input.read).takeWhile(_ != -1).map(_.toByte).toArray
contentType -> Base64.getEncoder.encodeToString(rawImage)
} else {
val dimensionAttrs = dimensionMetadata(width, height).map{case (k,v) => s"$k=$v"}.mkString(" ")
ContentType.html -> s"<img src='$url' $dimensionAttrs/>"
}
DisplayData(
data = Map(data),
metadata = dimensionMetadata(width, height),
idOpt = Some(id)
).show()
new Display(id, contentType)
}
def fromFile(
path: String,
format: Option[Format] = None,
width: Option[String] = None,
height: Option[String] = None,
id: String = UpdatableDisplay.generateId()
)(implicit outputHandler: OutputHandler): Display = {
val contentType = format.map(_.contentType).getOrElse(URLConnection.guessContentTypeFromName(path))
if(!imageTypes.contains(contentType))
throw new IOException("Unknown or unsupported content type: " + contentType)
val imgPath = Paths.get(path)
val content = Files.readAllBytes(imgPath)
DisplayData(
data = Map(contentType -> Base64.getEncoder.encodeToString(content)),
metadata = dimensionMetadata(width, height),
idOpt = Some(id)
).show()
new Display(id, contentType)
}
}
}
|
alexarchambault/jupyter-scala
|
modules/scala/jupyter-api/src/main/scala/almond/api/helpers/Display.scala
|
Scala
|
apache-2.0
| 6,133 |
/**
* Copyright (C) 2013 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr
import org.orbeon.saxon.om.NodeInfo
import org.orbeon.oxf.xml.XMLUtils
import org.orbeon.scaxon.XML._
import org.orbeon.oxf.fr.FormRunner._
trait FormRunnerSummary {
// Get a field's label in HTML for the Summary page
//@XPathFunction
def htmlFieldLabel(name: String, htmlLabel: Boolean, resources: NodeInfo): String = {
def resourceLabelOpt = (resources \ name \ "label" map (v ⇒ if (htmlLabel) v.stringValue else XMLUtils.escapeXMLMinimal(v.stringValue))).headOption
resourceLabelOpt getOrElse '[' + name + ']'
}
//@XPathFunction
def duplicate(data: NodeInfo, app: String, form: String, fromDocument: String, toDocument: String, formVersion: String): Unit = {
val someFormVersion = Some(formVersion) // use the same form version as the data to clone
putWithAttachments(
data = data.root,
toBaseURI = "", // local save
fromBasePath = createFormDataBasePath(app, form, isDraft = false, fromDocument),
toBasePath = createFormDataBasePath(app, form, isDraft = false, toDocument),
filename = "data.xml",
commonQueryString = "",
forceAttachments = true,
formVersion = someFormVersion
)
}
}
|
wesley1001/orbeon-forms
|
src/main/scala/org/orbeon/oxf/fr/FormRunnerSummary.scala
|
Scala
|
lgpl-2.1
| 1,906 |
package vexriscv.plugin
import vexriscv._
import vexriscv.VexRiscv
import spinal.core._
import spinal.lib.KeepAttribute
//Input buffer generaly avoid the FPGA synthesis to duplicate reg inside the DSP cell, which could stress timings quite much.
class MulPlugin(var inputBuffer : Boolean = false,
var outputBuffer : Boolean = false) extends Plugin[VexRiscv] with VexRiscvRegressionArg {
object MUL_LL extends Stageable(UInt(32 bits))
object MUL_LH extends Stageable(SInt(34 bits))
object MUL_HL extends Stageable(SInt(34 bits))
object MUL_HH extends Stageable(SInt(34 bits))
object MUL_LOW extends Stageable(SInt(34+16+2 bits))
object IS_MUL extends Stageable(Bool)
override def getVexRiscvRegressionArgs(): Seq[String] = {
List("MUL=yes")
}
override def setup(pipeline: VexRiscv): Unit = {
import Riscv._
import pipeline.config._
val actions = List[(Stageable[_ <: BaseType],Any)](
// SRC1_CTRL -> Src1CtrlEnum.RS,
// SRC2_CTRL -> Src2CtrlEnum.RS,
REGFILE_WRITE_VALID -> True,
BYPASSABLE_EXECUTE_STAGE -> False,
BYPASSABLE_MEMORY_STAGE -> False,
RS1_USE -> True,
RS2_USE -> True,
IS_MUL -> True
)
val decoderService = pipeline.service(classOf[DecoderService])
decoderService.addDefault(IS_MUL, False)
decoderService.add(List(
MULX -> actions
))
}
override def build(pipeline: VexRiscv): Unit = {
import pipeline._
import pipeline.config._
//Do partial multiplication, four times 16 bits * 16 bits
execute plug new Area {
import execute._
val aSigned,bSigned = Bool
val a,b = Bits(32 bit)
// a := input(SRC1)
// b := input(SRC2)
val delay = (if(inputBuffer) 1 else 0) + (if(outputBuffer) 1 else 0)
val delayLogic = (delay != 0) generate new Area{
val counter = Reg(UInt(log2Up(delay+1) bits))
when(arbitration.isValid && input(IS_MUL) && counter =/= delay){
arbitration.haltItself := True
}
counter := counter + 1
when(!arbitration.isStuck || arbitration.isStuckByOthers){
counter := 0
}
}
val withInputBuffer = inputBuffer generate new Area{
val rs1 = RegNext(input(RS1))
val rs2 = RegNext(input(RS2))
a := rs1
b := rs2
}
val noInputBuffer = (!inputBuffer) generate new Area{
a := input(RS1)
b := input(RS2)
}
switch(input(INSTRUCTION)(13 downto 12)) {
is(B"01") {
aSigned := True
bSigned := True
}
is(B"10") {
aSigned := True
bSigned := False
}
default {
aSigned := False
bSigned := False
}
}
val aULow = a(15 downto 0).asUInt
val bULow = b(15 downto 0).asUInt
val aSLow = (False ## a(15 downto 0)).asSInt
val bSLow = (False ## b(15 downto 0)).asSInt
val aHigh = (((aSigned && a.msb) ## a(31 downto 16))).asSInt
val bHigh = (((bSigned && b.msb) ## b(31 downto 16))).asSInt
val withOuputBuffer = outputBuffer generate new Area{
val mul_ll = RegNext(aULow * bULow)
val mul_lh = RegNext(aSLow * bHigh)
val mul_hl = RegNext(aHigh * bSLow)
val mul_hh = RegNext(aHigh * bHigh)
insert(MUL_LL) := mul_ll
insert(MUL_LH) := mul_lh
insert(MUL_HL) := mul_hl
insert(MUL_HH) := mul_hh
}
val noOutputBuffer = (!outputBuffer) generate new Area{
insert(MUL_LL) := aULow * bULow
insert(MUL_LH) := aSLow * bHigh
insert(MUL_HL) := aHigh * bSLow
insert(MUL_HH) := aHigh * bHigh
}
Component.current.afterElaboration{
//Avoid synthesis tools to retime RS1 RS2 from execute stage to decode stage leading to bad timings (ex : Vivado, even if retiming is disabled)
KeepAttribute(input(RS1))
KeepAttribute(input(RS2))
}
}
//First aggregation of partial multiplication
memory plug new Area {
import memory._
insert(MUL_LOW) := S(0, MUL_HL.dataType.getWidth + 16 + 2 bit) + (False ## input(MUL_LL)).asSInt + (input(MUL_LH) << 16) + (input(MUL_HL) << 16)
}
//Final aggregation of partial multiplications, REGFILE_WRITE_DATA overriding
writeBack plug new Area {
import writeBack._
val result = input(MUL_LOW) + (input(MUL_HH) << 32)
when(arbitration.isValid && input(IS_MUL)){
switch(input(INSTRUCTION)(13 downto 12)){
is(B"00"){
output(REGFILE_WRITE_DATA) := input(MUL_LOW)(31 downto 0).asBits
}
is(B"01",B"10",B"11"){
output(REGFILE_WRITE_DATA) := result(63 downto 32).asBits
}
}
}
}
}
}
|
SpinalHDL/VexRiscv
|
src/main/scala/vexriscv/plugin/MulPlugin.scala
|
Scala
|
mit
| 4,862 |
package org.cddcore.engine
class DecisionTreeTracerSpec extends CddEngineSpec {
"The DecisionTreeTracer" should "return a list of decision trees, each with one more scenario added" in {
val e = new Engine[Int, String] {
1 produces "one" when (_ == 1)
2 produces "two" when (_ == 2)
3 produces "three" when (_ == 3)
}
val List(s1, s2, s3) = e.allScenarios
s1.situation shouldBe 1
val List (dt1, dt2, dt3) = DecisionTreeBeingBuiltTracer(e)
dt1.allScenarios shouldBe List(s1)
}
}
|
phil-rice/CddCore2
|
module/engine/src/test/scala/org/cddcore/engine/DecisionTreeTracerSpec.scala
|
Scala
|
bsd-2-clause
| 543 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package docs.scaladsl.gettingstarted
package helloservice {
//#helloservice
import akka.Done
import akka.NotUsed
import com.lightbend.lagom.scaladsl.api._
import play.api.libs.json._
trait HelloService extends Service {
def hello(id: String): ServiceCall[NotUsed, String]
def useGreeting(id: String): ServiceCall[GreetingMessage, Done]
final override def descriptor = {
import Service._
named("hello")
.withCalls(
pathCall("/api/hello/:id", hello _),
pathCall("/api/hello/:id", useGreeting _)
)
.withAutoAcl(true)
}
}
case class GreetingMessage(message: String)
object GreetingMessage {
implicit val format: Format[GreetingMessage] = Json.format[GreetingMessage]
}
//#helloservice
//#helloserviceimpl
import com.lightbend.lagom.scaladsl.api.ServiceCall
import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRegistry
class HelloServiceImpl(persistentEntityRegistry: PersistentEntityRegistry) extends HelloService {
override def hello(id: String) = ServiceCall { _ =>
val ref = persistentEntityRegistry.refFor[HelloEntity](id)
ref.ask(Hello(id, None))
}
override def useGreeting(id: String) = ServiceCall { request =>
val ref = persistentEntityRegistry.refFor[HelloEntity](id)
ref.ask(UseGreetingMessage(request.message))
}
}
//#helloserviceimpl
import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.ReplyType
import com.lightbend.lagom.scaladsl.persistence.PersistentEntity
sealed trait HelloCommand
case class Hello(id: String, timestamp: Option[String]) extends ReplyType[String] with HelloCommand
case class UseGreetingMessage(msg: String) extends ReplyType[Done] with HelloCommand
sealed trait HelloEvent
case class HelloState()
class HelloEntity extends PersistentEntity {
override type Command = HelloCommand
override type Event = HelloEvent
override type State = HelloState
override def initialState = HelloState()
override def behavior = PartialFunction.empty
}
}
|
rcavalcanti/lagom
|
docs/manual/scala/gettingstarted/code/GettingStarted.scala
|
Scala
|
apache-2.0
| 2,194 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Mon May 25 17:57:02 EDT 2015
* @see LICENSE (MIT style license file).
*/
package scalation.linalgebra.bld
import java.io.{File, PrintWriter}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `BldBidMatrix` object is used to build square (upper) bidiagonal matrix
* classes for various base types.
* > run-main scalation.linalgebra.bld.BldBidMatrix
*/
object BldBidMatrix extends App with BldParams
{
println ("BldBidMatrix: generate code for Bidiagonal Matrix classes")
for (i <- 0 until kind.length-1) { // do not generate `BidMatrixS`
val VECTO = kind(i)._1
val VECTOR = kind(i)._1.replace ("o", "or")
val BASE = kind(i)._2
val MATRI = kind(i)._6
val ZERO = kind(i)._8
val ONE = kind(i)._9
val BASE_LC = BASE.toLowerCase
val MATRIX = { val m = MATRI.splitAt (MATRI.size-1); m._1 + "x" + m._2 }
val IMPORT = if (CUSTOM contains BASE) s"scalation.math.$BASE.{abs => ABS, _}"
else "scala.math.{abs => ABS}"
val IMPORT2 = if (CUSTOM contains BASE) s"scalation.math.{$BASE, oneIf}"
else s"scalation.math.{${BASE_LC}_exp, oneIf}"
// Beginning of string holding code template -----------------------------------
val code = raw"""
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @builder scalation.linalgebra.bld.BldBidMatrix
* @version 1.2
* @date Mon May 19 15:52:24 EDT 2014
* @see LICENSE (MIT style license file).
*/
package scalation.linalgebra
import scala.io.Source.fromFile
import $IMPORT
import $IMPORT2
import scalation.util.Error
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Bid$MATRIX` class stores and operates on square (upper) bidiagonal matrices.
* The elements are of type of `$BASE`. A matrix is stored as two vectors:
* the diagonal vector and the sup-diagonal vector.
* @param d1 the first/row dimension (square => d2 = d1)
*/
class Bid$MATRIX (val d1: Int)
extends $MATRI with Error with Serializable
{
/** Dimension 1
*/
lazy val dim1 = d1
/** Dimension 2
*/
lazy val dim2 = d1
/** Size of the sup-diagonal
*/
private val n = d1 - 1
/** Range for the diagonal
*/
private val range_d = 0 until d1
/** Range for the sup-diagonal
*/
private val range_s = 0 until n
/** Diagonal of the matrix
*/
private var _dg: $VECTOR = new $VECTOR (d1)
/** Sup-diagonal of the matrix
*/
private var _sd: $VECTOR = new $VECTOR (n)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a bidiagonal matrix with the given diagonal and sup-diagonal.
* @param v1 the diagonal vector
* @param v2 the sup-diagonal vector
*/
def this (v1: $VECTO, v2: $VECTO)
{
this (v1.dim)
for (i <- range_d) _dg(i) = v1(i)
for (i <- range_s) _sd(i) = v2(i)
} // constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a bidiagonal matrix from the given matrix.
* @param b the matrix of values to assign
*/
def this (b: $MATRI)
{
this (b.dim1)
for (i <- range_d) _dg(i) = b(i, i)
for (i <- range_s) _sd(i) = b(i, i+1)
} // constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a clone of 'this' m-by-n matrix.
*/
def copy (): Bid$MATRIX = new Bid$MATRIX (_dg, _sd)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create an m-by-n matrix with all elements initialized to zero.
* @param m the number of rows
* @param n the number of columns
*/
def zero (m: Int = dim1, n: Int = dim2): Bid$MATRIX = new Bid$MATRIX (m)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the diagonal of 'this' bidiagonal matrix.
*/
def dg: $VECTOR = _dg
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the diagonal of 'this' bidiagonal matrix.
* @param v the vector to assign to the diagonal
*/
def dg_ (v: $VECTOR) { _dg = v }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the sup-diagonal of this bidiagonal matrix.
*/
def sd: $VECTOR = _sd
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the sup-diagonal of 'this' bidiagonal matrix.
* @param v the vector to assign to the sup-diagonal
*/
def sd_ (v: $VECTOR) { _sd = v }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' bidiagonal matrix's element at the 'i,j'-th index position.
* @param i the row index
* @param j the column index
*/
def apply (i: Int, j: Int): $BASE =
{
if (i == j) _dg(i) // on diagonal
else if (i + 1 == j) _sd(i) // on sup-diagonal (above diagonal)
else throw new Exception ("Bid$MATRIX.apply: element not on diagonals")
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' bidiagonal matrix's element at the 'i,j'-th index position,
* returning 0, if off bidiagonal.
* @param i the row index
* @param j the column index
*/
def at (i: Int, j: Int): $BASE =
{
if (i < 0 || j < 0 || i >= d1 || j >= d1) $ZERO
else if (i == j) _dg(i) // on diagonal
else if (i + 1 == j) _sd(i) // on sup-diagonal (above diagonal)
else $ZERO
} // at
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' bidiagonal matrix's vector at the 'i'-th index position ('i'-th row).
* @param i the row index
*/
def apply (i: Int): $VECTOR =
{
val u = new $VECTOR (d1)
u(i) = _dg(i)
if (i > 0) u(i-1) = _sd(i-1)
if (i < n) u(i+1) = _sd(i)
u
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get a slice 'this' bidiagonal matrix row-wise on range 'ir' and column-wise
* on range 'jr'.
* Ex: b = a(2..4, 3..5)
* @param ir the row range
* @param jr the column range
*/
def apply (ir: Range, jr: Range): Bid$MATRIX =
{
if (ir != jr) flaw ("apply", "requires same ranges to maintain squareness")
slice (ir.start, ir.end)
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' bidiagonal matrix's element at the 'i,j'-th index position to
* the scalar 'x'.
* @param i the row index
* @param j the column index
* @param x the scalar value to assign
*/
def update (i: Int, j: Int, x: $BASE)
{
if (i == j) _dg(i) = x
else if (i == j + 1) _sd(j) = x
else if (i + 1 == j) _sd(i) = x
else flaw ("update", "element not on bidiagonal")
} // update
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' bidiagonal matrix's row at the 'i'-th index position to the
* vector 'u'.
* @param i the row index
* @param u the vector value to assign
*/
def update (i: Int, u: $VECTO)
{
_dg(i) = u(i)
if (i > 0) _sd(i-1) = u(i-1)
if (i < n) _sd(i) = u(i+1)
} // update
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set a slice 'this' bidiagonal matrix row-wise on range 'ir' and column-wise
* on range 'jr'.
* Ex: a(2..4, 3..5) = b
* @param ir the row range
* @param jr the column range
* @param b the matrix to assign
*/
def update (ir: Range, jr: Range, b: $MATRI)
{
if (ir != jr) flaw ("update", "requires same ranges to maintain squareness")
if (b.isInstanceOf [Bid$MATRIX]) {
val bb = b.asInstanceOf [Bid$MATRIX]
for (i <- ir) {
_dg(i) = bb.dg(i - ir.start)
if (i > ir.start) _sd(i-1) = bb.sd(i - ir.start - 1)
} // for
} else {
flaw ("update", "must convert b to a Bid$MATRIX first")
} // if
} // update
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set all the elements in 'this' bidiagonal matrix to the scalar 'x'.
* @param x the scalar value to assign
*/
def set (x: $BASE)
{
for (i <- range1) {
_dg(i) = x
if (i > 0) _sd(i) = x
} // for
} // set
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set all the values in 'this' bidiagonal matrix as copies of the values in 2D array u.
* @param u the 2D array of values to assign
*/
def set (u: Array [Array [$BASE]])
{
throw new NoSuchMethodException ("values for Bid$MATRIX should be diagonal")
} // set
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' bidiagonal matrix's 'i'th row starting at column 'j' to the
* vector 'u'.
* @param i the row index
* @param u the vector value to assign
* @param j the starting column index
*/
def set (i: Int, u: $VECTO, j: Int = 0)
{
if (i >= j) _dg(i) = u(i)
if (i-1 >= j) _sd(i-1) = u(i+1)
} // set
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' `Bid$MATRIX` into a `BidMatrixI`.
*/
def toInt: BidMatrixI = new BidMatrixI (_dg.toInt, _sd.toInt)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' tridiagonal matrix to a dense matrix.
*/
def toDense: $MATRIX =
{
val c = new $MATRIX (dim1, dim1)
for (i <- range1) {
c(i, i) = _dg(i)
if (i > 0) c(i, i-1) = _sd(i-1)
} // for
c
} // for
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' bidiagonal matrix row-wise 'from' to 'end'.
* @param from the start row of the slice (inclusive)
* @param end the end row of the slice (exclusive)
*/
def slice (from: Int, end: Int): Bid$MATRIX =
{
val c = new Bid$MATRIX (end - from)
for (i <- c.range1) {
c._dg(i) = _dg(i + from)
if (i > 0) c._sd(i - 1) = _sd(i + from - 1)
} // for
c
} // slice
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' bidiagonal matrix column-wise 'from' to 'end'.
* @param from the start column of the slice (inclusive)
* @param end the end column of the slice (exclusive)
*/
def sliceCol (from: Int, end: Int): Bid$MATRIX =
{
val c = new Bid$MATRIX (end - from)
for (j <- c.range2) {
c._dg(j) = _dg(j + from)
if (j > 0) c._sd(j - 1) = _sd(j + from - 1)
} // for
c
} // sliceCol
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' bidiagonal matrix row-wise 'r_from' to 'r_end' and column-wise
* 'c_from' to 'c_end'.
* @param r_from the start of the row slice
* @param r_end the end of the row slice
* @param c_from the start of the column slice
* @param c_end the end of the column slice
*/
def slice (r_from: Int, r_end: Int, c_from: Int, c_end: Int): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX must be square")
} // slice
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' bidiagonal matrix excluding the given 'row' and 'col'umn.
* @param row the row to exclude
* @param col the column to exclude
*/
def sliceExclude (row: Int, col: Int): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support sliceExclude")
} // sliceExclude
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select rows from 'this' bidiagonal matrix according to the given index/basis.
* @param rowIndex the row index positions (e.g., (0, 2, 5))
*/
def selectRows (rowIndex: Array [Int]): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support selectRows")
} // selectRows
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get column 'col' from 'this' bidiagonal matrix, returning it as a vector.
* @param col the column to extract from the matrix
* @param from the position to start extracting from
*/
def col (col: Int, from: Int = 0): $VECTOR =
{
val u = new $VECTOR (d1 - from)
for (i <- (from max col-1) until (d1 min col+2)) u(i-from) = this(i, col)
u
} // col
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set column 'col' of 'this' bidiagonal matrix to a vector.
* @param col the column to set
* @param u the vector to assign to the column
*/
def setCol (col: Int, u: $VECTO)
{
_dg(col) = u(col)
if (col > 0) _sd(col-1) = u(col-1)
} // setCol
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select columns from 'this' bidiagonal matrix according to the given index/basis.
* Ex: Can be used to divide a matrix into a basis and a non-basis.
* @param colIndex the column index positions (e.g., (0, 2, 5))
*/
def selectCols (colIndex: Array [Int]): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support selectCols")
} // selectCols
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Transpose 'this' bidiagonal matrix (rows => columns).
*/
def t: Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support transpose")
} // t
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (row) vector 'u' and 'this' matrix, i.e., prepend 'u' to 'this'.
* @param u the vector to be prepended as the new first row in new matrix
*/
def +: (u: $VECTO): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support +:")
} // +:
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (column) vector 'u' and 'this' matrix, i.e., prepend 'u' to 'this'.
* @param u the vector to be prepended as the new first column in new matrix
*/
def +^: (u: $VECTO): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support +^:")
} // +^:
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate 'this' matrix and (row) vector 'u', i.e., append 'u' to 'this'.
* @param u the vector to be appended as the new last row in new matrix
*/
def :+ (u: $VECTO): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support :+")
} // :+
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate 'this' matrix and (column) vector 'u', i.e., append 'u' to 'this'.
* @param u the vector to be appended as the new last column in new matrix
*/
def :^+ (u: $VECTO): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support :^+")
} // :^+
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (row-wise) 'this' matrix and matrix 'b'.
* @param b the matrix to be concatenated as the new last rows in new matrix
*/
def ++ (b: $MATRI): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support ++")
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (column-wise) 'this' matrix and matrix 'b'.
* @param b the matrix to be concatenated as the new last columns in new matrix
*/
def ++^ (b: $MATRI): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support ++^")
} // ++^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' bidiagonal matrix and matrix 'b'.
* @param b the matrix to add (requires 'leDimensions')
*/
def + (b: $MATRI): Bid$MATRIX =
{
val bid = b.asInstanceOf [Bid$MATRIX]
if (d1 == bid.d1) {
new Bid$MATRIX (_dg + bid.dg, _sd + bid.sd)
} else {
flaw ("+", "matrix b has the wrong dimensions")
null
} // if
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' bidiagonal matrix and (row) vector u.
* @param u the vector to add
*/
def + (u: $VECTO): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support + with $VECTO")
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' bidiagonal matrix and scalar 'x'.
* @param x the scalar to add
*/
def + (x: $BASE): Bid$MATRIX =
{
new Bid$MATRIX (_dg + x, _sd + x)
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' bidiagonal matrix and matrix 'b'.
* @param b the matrix to add (requires 'leDimensions')
*/
def += (b: $MATRI): Bid$MATRIX =
{
val bid = b.asInstanceOf [Bid$MATRIX]
if (d1 == bid.d1) {
_dg += bid.dg
_sd += bid.sd
} else {
flaw ("+=", "matrix b has the wrong dimensions")
} // if
this
} // +=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' bidiagonal matrix and (row) vector 'u'.
* @param u the vector to add
*/
def += (u: $VECTO): $MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support += with $VECTO")
} // +=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' bidiagonal matrix and scalar 'x'.
* @param x the scalar to add
*/
def += (x: $BASE): Bid$MATRIX =
{
_dg += x; _sd += x; this
} // +=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' bidiagonal matrix subtract matrix 'b'.
* @param b the matrix to subtract (requires 'leDimensions')
*/
def - (b: $MATRI): Bid$MATRIX =
{
val bid = b.asInstanceOf [Bid$MATRIX]
if (d1 == bid.d1) {
new Bid$MATRIX (_dg - bid.dg, _sd - bid.sd)
} else {
flaw ("-", "matrix b has the wrong dimensions")
null
} // if
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' bidiagonal matrix subtract (row) vector 'u'.
* @param u the vector to subtract
*/
def - (u: $VECTO): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support - with $VECTO")
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' bidiagonal matrix subtract scalar 'x'.
* @param x the scalar to subtract
*/
def - (x: $BASE): Bid$MATRIX =
{
new Bid$MATRIX (_dg - x, _sd - x)
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' bidiagonal bidiagonal matrix subtract in-place matrix 'b'.
* @param b the matrix to subtract (requires 'leDimensions')
*/
def -= (b: $MATRI): Bid$MATRIX =
{
val bid = b.asInstanceOf [Bid$MATRIX]
if (d1 == bid.d1) {
_dg -= bid.dg
_sd -= bid.sd
} else {
flaw ("-=", "matrix b has the wrong dimensions")
} // if
this
} // -=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' bidiagonal matrix subtract in-place (row) vector 'u'.
* @param u the vector to subtract
*/
def -= (u: $VECTO): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support -= with $VECTO")
} // -=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' bidiagonal matrix subtract in-place scalar 'x'.
* @param x the scalar to subtract
*/
def -= (x: $BASE): Bid$MATRIX =
{
_dg -= x; _sd -= x; this
} // -=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' bidiagonal matrix by matrix 'b'.
* @param b the matrix to multiply by
*/
def * (b: $MATRI): Bid$MATRIX =
{
throw new NoSuchMethodException ("Bid$MATRIX does not support * with general matrices")
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' bidiagonal matrix by matrix 'b'. Requires 'b' to have
* type `Bid$MATRIX`, but returns a more general type of matrix.
* @param b the matrix to multiply by
*/
def * (b: Bid$MATRIX): $MATRIX =
{
val c = new $MATRIX (d1)
for (i <- 0 until d1; j <- (i-2 max 0) to (i+2 min n)) {
var sum = $ZERO
val k1 = ((i min j) - 1) max 0
val k2 = ((i max j) + 1) min n
for (k <- k1 to k2) sum += at(i, k) * b.at(k, j)
c(i, j) = sum
} // for
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' bidiagonal matrix by vector 'u'.
* @param u the vector to multiply by
*/
def * (u: $VECTO): $VECTOR =
{
val c = new $VECTOR (d1)
for (i <- 0 until n) c(i) = _dg(i) * u(i) + _sd(i) * u(i+1)
c(n) = _dg(d1-1) * u(d1-1)
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' bidiagonal matrix by scalar 'x'.
* @param x the scalar to multiply by
*/
def * (x: $BASE): Bid$MATRIX =
{
new Bid$MATRIX (_dg * x, _sd * x)
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' bidiagonal matrix by matrix 'b'.
* @param b the matrix to multiply by
*/
def *= (b: $MATRI): Bid$MATRIX =
{
throw new NoSuchMethodException ("inplace matrix multiplication not implemented")
} // *=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' bidiagonal matrix by scalar 'x'.
* @param x the scalar to multiply by
*/
def *= (x: $BASE): Bid$MATRIX =
{
_dg *= x; _sd *= x; this
} // *=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the dot product of 'this' matrix and vector 'u', by conceptually
* transposing 'this' matrix and then multiplying by 'u' (i.e., 'a dot u = a.t * u').
* @param u the vector to multiply by (requires same first dimensions)
*/
def dot (u: $VECTO): $VECTOR =
{
if (dim1 != u.dim) flaw ("dot", "matrix dot vector - incompatible first dimensions")
val c = new $VECTOR (d1)
c(0) = _dg(0) * u(0)
for (i <- 1 until d1) c(i) = _sd(i-1) * u(i-1) + _dg(i) * u(i)
c
} // dot
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the dot product of 'this' matrix with matrix 'b' to produce a vector.
* @param b the second matrix of the dot product
*/
def dot (b: $MATRI): $VECTOR =
{
if (dim1 != b.dim1) flaw ("dot", "matrix dot matrix - incompatible first dimensions")
val c = new $VECTOR (d1)
c(0) = _dg(0) * b(0, 0)
for (i <- 1 until d1) c(i) = _sd(i-1) * b(i-1, i) + _dg(i) * b(i, i)
c
} // dot
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the matrix dot product of 'this' matrix with matrix 'b' to produce a matrix.
* @param b the second matrix of the dot product
*/
def mdot (b: Bid$MATRIX): $MATRIX =
{
if (dim1 != b.dim1) flaw ("mdot", "matrix mdot matrix - incompatible first dimensions")
val c = new $MATRIX (dim2, b.dim2)
c(0, 0) = _dg(0) * b._dg(0)
for (i <- 1 until dim1) {
c(i, i) = _dg(i) * b._dg(i) + _sd(i-1) * b._sd(i-1)
c(i-1, i) = _dg(i-1) * b._sd(i-1)
c(i, i-1) = _sd(i-1) * b._dg(i-1)
} // for
c
} // mdot
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the matrix dot product of 'this' matrix with matrix 'b' to produce a matrix.
* @param b the second matrix of the dot product
*/
def mdot (b: $MATRI): $MATRIX =
{
if (dim1 != b.dim1) flaw ("mdot", "matrix mdot matrix - incompatible first dimensions")
val c = new $MATRIX (dim2, b.dim2)
for (j <- 0 until b.dim2) {
c(0, j) = _dg(0) * b(0, j)
for (i <- 1 until dim1) {
c(i, j) = _sd(i-1) * b(i-1, j) + _dg(i) * b(i, j)
} // for
} // for
c
} // mdot
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' bidiagonal matrix by vector 'u' to produce another matrix
* 'a_ij * u_j'.
* @param u the vector to multiply by
*/
def ** (u: $VECTO): Bid$MATRIX =
{
throw new NoSuchMethodException ("matrix * vector -> matrix not implemented")
} // **
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' bidiagonal matrix by vector 'u' to produce another
* matrix 'a_ij * u_j'.
* @param u the vector to multiply by
*/
def **= (u: $VECTO): Bid$MATRIX =
{
throw new NoSuchMethodException ("inplace matrix * vector -> matrix not implemented")
} // **=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide 'this' bidiagonal matrix by scalar 'x'.
* @param x the scalar to divide by
*/
def / (x: $BASE): Bid$MATRIX =
{
new Bid$MATRIX (_dg / x, _sd / x)
} // /
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide in-place 'this' bidiagonal matrix by scalar 'x'.
* @param x the scalar to divide by
*/
def /= (x: $BASE): Bid$MATRIX =
{
_dg /= x; _sd /= x; this
} // /=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Raise 'this' bidiagonal matrix to the 'p'th power (for some integer 'p' >= 2).
* @param p the power to raise 'this' matrix to
*/
def ~^ (p: Int): Bid$MATRIX =
{
throw new NoSuchMethodException ("matrix power function (~^) not implemented")
} // ~^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the maximum element in 'this' bidiagonal matrix.
* @param e the ending row index (exclusive) for the search
*/
def max (e: Int = dim1): $BASE = _dg(0 until e).max() max _sd(0 until e).max()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the minimum element in 'this' bidiagonal matrix.
* @param e the ending row index (exclusive) for the search
*/
def min (e: Int = dim1): $BASE = _dg(0 until e).min() min _sd(0 until e).min()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' using back substitution in the equation 'u*x = y' where
* 'this' matrix ('u') is upper triangular (see 'lud' above).
* @param y the constant vector
*/
def bsolve (y: $VECTO): $VECTOR = solve (y)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' in the equation 'a*x = b' where 'a' is 'this' bidiagonal matrix.
* @param b the constant vector
*/
def solve (b: $VECTO): $VECTOR =
{
val d = _dg // diagonal
val e = _sd // super-diagonal
val x = new $VECTOR (d1)
x(n) = b(n) / d(n)
for (i <- n-1 to 0 by -1) x(i) = (b(i) - e(i) * x(i+1)) / d(i)
x
} // solve
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' in the equation 'l*u*x = b' (see 'lud' above).
* @param lu the lower and upper triangular matrices
* @param b the constant vector
*/
def solve (lu: Tuple2 [$MATRI, $MATRI], b: $VECTO): $VECTOR = solve (lu._1, lu._2, b)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Combine 'this' bidiagonal matrix with matrix 'b', placing them along the
* diagonal and filling in the bottom left and top right regions with zeros:
* '[this, b]'.
* @param b the matrix to combine with 'this' bidiagonal matrix
*/
def diag (b: $MATRI): $MATRI =
{
val m = d1 + b.dim1
val n = d1 + b.dim2
val c = new $MATRIX (m, n)
c(0, 0) = _dg(0)
c(0, 1) = _sd(0)
for (i <- 1 until m) {
if (i < d1) {
c(i, i) = _dg(i)
if (i < n) c(i, i+1) = _sd(i)
} else {
for (j <- d1 until n) c(i, j) = b(i-d1, j-d1)
} // if
} // for
c
} // diag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form a matrix '[Ip, this, Iq]' where Ir is a 'r-by-r' identity matrix, by
* positioning the three matrices 'Ip', 'this' and 'Iq' along the diagonal.
* Fill the rest of matrix with zeros.
* @param p the size of identity matrix Ip
* @param q the size of identity matrix Iq
*/
def diag (p: Int, q: Int): SymTri$MATRIX =
{
val nn = d1 + p + q
val dd = new $VECTOR (nn)
val ss = new $VECTOR (nn-1)
for (i <- 0 until p) dd(i) = $ONE // Ip
for (i <- 0 until d1) dd(i+p) = _dg(i) // this
for (i <- 0 until n) ss(i+p) = _sd(i) // this
for (i <- p + d1 until nn) dd(i) = $ONE // Iq
new SymTri$MATRIX (dd, ss)
} // diag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the 'k'th diagonal of 'this' bidiagonal matrix. Assumes 'dim2 >= dim1'.
* @param k how far above the main diagonal, e.g., (0, 1) for (main, super)
*/
def getDiag (k: Int = 0): $VECTOR =
{
if (k == 0) _dg.toDense
else if (k == 1) _sd.toDense
else { flaw ("getDiag", "nothing stored for diagonal " + k); null }
} // getDiag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the 'k'th diagonal of 'this' bidiagonal matrix to the vector 'u'.
* Assumes 'dim2 >= dim1'.
* @param u the vector to set the diagonal to
* @param k how far above the main diagonal, e.g., (-1, 0, 1) for (sub, main, super)
*/
def setDiag (u: $VECTO, k: Int = 0)
{
if (k == 0) _dg = u.toDense
else if (k == 1) _sd = u.toDense
else flaw ("setDiag", "nothing stored for diagonal " + k)
} // setDiag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the main diagonal of 'this' bidiagonal matrix to the scalar 'x'.
* Assumes 'dim2 >= dim1'.
* @param x the scalar to set the diagonal to
*/
def setDiag (x: $BASE) { _dg.set (x) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Invert 'this' bidiagonal matrix.
*/
def inverse: $MATRI =
{
val d = _dg // diagonal
val e = _sd // augmented super-diagonal
val b = new $MATRIX (d1, d1)
for (i <- 0 until d1) b(i, i) = $ONE / d(i)
for (i <- n to 1 by -1; j <- i+1 until d1) {
b(i, j) = -(e(j-1) / d(j)) * b(i, j-1)
} // for
b
} // inverse
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Clean values in 'this' bidiagonal matrix at or below the threshold by setting
* them to zero. Iterative algorithms give approximate values and if very close
* to zero, may throw off other calculations, e.g., in computing eigenvectors.
* @param thres the cutoff threshold (a small value)
* @param relative whether to use relative or absolute cutoff
*/
def clean (thres: Double, relative: Boolean = true): Bid$MATRIX =
{
val s = if (relative) mag else $ONE // use matrix magnitude or 1
for (i <- range_d) if (ABS (_dg(i)) <= thres * s) _dg(i) = $ZERO
for (i <- range_s) if (ABS (_sd(i)) <= thres * s) _sd(i) = $ZERO
this
} // clean
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the (right) nullspace of 'this' 'm-by-n' matrix (requires 'n = m+1')
* by performing Gauss-Jordan reduction and extracting the negation of the
* last column augmented by 1.
* <p>
* nullspace (a) = set of orthogonal vectors v s.t. a * v = 0
* <p>
* The left nullspace of matrix 'a' is the same as the right nullspace of 'a.t'.
* FIX: need a more robust algorithm for computing nullspace (@see Fac_QR.scala).
* FIX: remove the 'n = m+1' restriction.
* @see http://ocw.mit.edu/courses/mathematics/18-06sc-linear-algebra-fall-2011/ax-b-and-the-four-subspaces
* @see /solving-ax-0-pivot-variables-special-solutions/MIT18_06SCF11_Ses1.7sum.pdf
*/
def nullspace: $VECTOR =
{
if (dim2 != dim1 + 1) flaw ("nullspace", "requires n (columns) = m (rows) + 1")
reduce.col(dim2 - 1) * -$ONE ++ $ONE
} // nullspace
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute in-place the (right) nullspace of 'this' 'm-by-n' matrix (requires 'n = m+1')
* by performing Gauss-Jordan reduction and extracting the negation of the
* last column augmented by 1.
* <p>
* nullspace (a) = set of orthogonal vectors v s.t. a * v = 0
* <p>
* The left nullspace of matrix 'a' is the same as the right nullspace of 'a.t'.
* FIX: need a more robust algorithm for computing nullspace (@see Fac_QR.scala).
* FIX: remove the 'n = m+1' restriction.
* @see http://ocw.mit.edu/courses/mathematics/18-06sc-linear-algebra-fall-2011/ax-b-and-the-four-subspaces
* @see /solving-ax-0-pivot-variables-special-solutions/MIT18_06SCF11_Ses1.7sum.pdf
*/
def nullspace_ip (): $VECTOR =
{
if (dim2 != dim1 + 1) flaw ("nullspace", "requires n (columns) = m (rows) + 1")
reduce_ip ()
col(dim2 - 1) * -$ONE ++ $ONE
} // nullspace_ip
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the trace of 'this' bidiagonal matrix, i.e., the sum of the elements
* on the main diagonal. Should also equal the sum of the eigenvalues.
* @see Eigen.scala
*/
def trace: $BASE = _dg.sum
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the sum of 'this' bidiagonal matrix, i.e., the sum of its elements.
*/
def sum: $BASE = _dg.sum + _sd.sum
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the 'abs' sum of 'this' bidiagonal matrix, i.e., the sum of the absolute
* value of its elements. This is useful for comparing matrices '(a - b).sumAbs'.
*/
def sumAbs: $BASE = _dg.sumAbs + _sd.sumAbs
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the sum of the lower triangular region of 'this' bidiagonal matrix.
*/
def sumLower: $BASE = $ZERO
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the determinant of 'this' bidiagonal matrix.
*/
def det: $BASE = detHelper (n)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Helper method for computing the determinant of 'this' bidiagonal matrix. FIX
* @param nn the current dimension
*/
private def detHelper (nn: Int): $BASE =
{
if (nn == 0) _dg(0)
else if (nn == 1) _dg(0) * _dg(1) - _sd(0) * _sd(0)
else _dg(n) * detHelper (nn-1) - _sd(nn-1) * _sd(nn-1) * detHelper (nn-2)
} // detHelper
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the lower triangular of 'this' matrix (rest are zero).
*/
def lowerT: $MATRIX = { val c = new $MATRIX (dim1, dim1); c.setDiag (_dg); c }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the upper triangular of 'this' matrix (rest are zero).
*/
def upperT: $MATRIX = { val c = new $MATRIX (dim1, dim1); c.setDiag (_dg); c.setDiag (_sd, 1); c }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix is bidiagonal (has non-zero elements only in
* main diagonal and super-diagonal).
*/
override def isBidiagonal: Boolean = true
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' bidiagonal matrix is nonnegative (has no negative elements).
*/
override def isNonnegative: Boolean = _dg.isNonnegative && _sd.isNonnegative
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' bidiagonal matrix is rectangular (all rows have the same
* number of columns).
*/
def isRectangular: Boolean = true
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix is bidiagonal (has non-zero elements only in
* main diagonal and super-diagonal).
*/
override def isTridiagonal: Boolean = false
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' bidiagonal matrix to a string showing the diagonal
* vector followed by the sup-diagonal vector.
*/
override def toString: String = "\\nBid$MATRIX(\\t" + _dg + ", \\n\\t\\t" + _sd + ")"
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Write 'this' matrix to a CSV-formatted text file with name 'fileName'.
* @param fileName the name of file to hold the data
*/
def write (fileName: String)
{
// FIX - implement write method
} // write
//--------------------------------------------------------------------------
// The following methods are currently not implemented for Bidiagonal matrices:
//--------------------------------------------------------------------------
def lud: Tuple2 [$MATRI, $MATRI] =
{
throw new NoSuchMethodException ("lud not implemented since it's already an upper matrix")
} // lud
def lud_ip (): Tuple2 [$MATRI, $MATRI] =
{
throw new NoSuchMethodException ("lud_ip not implemented since it's already an upper matrix")
} // lud_ip
def solve (l: $MATRI, u: $MATRI, b: $VECTO): $VECTOR =
{
throw new NoSuchMethodException ("solve lu not implemented, since lud not needed")
} // solve
def inverse_ip (): Bid$MATRIX =
{
throw new NoSuchMethodException ("inverse_ip not implemented since result may not be BidMatrix")
} // inverse_ip
def reduce: Bid$MATRIX =
{
throw new NoSuchMethodException ("reduce not yet implemented")
} // reduce
def reduce_ip ()
{
throw new NoSuchMethodException ("reduce_ip not implemented since result may not be BidMatrix")
} // reduce_ip
} // Bid$MATRIX class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Bid$MATRIX` object is the companion object for the `Bid$MATRIX` class.
*/
object Bid$MATRIX extends Error
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a matrix and assign values from the array of vectors 'u'.
* @param u the array of vectors to assign
* @param columnwise whether the vectors are treated as column or row vectors
*/
def apply (u: Array [$VECTO], columnwise: Boolean = true): Bid$MATRIX =
{
var x: Bid$MATRIX = null
val u_dim = u(0).dim
if (u_dim != u.length) flaw ("apply", "symmetric matrices must be square")
if (columnwise) {
x = new Bid$MATRIX (u_dim)
for (j <- 0 until u_dim) x.setCol (j, u(j)) // assign column vectors
} else {
x = new Bid$MATRIX (u_dim)
for (i <- 0 until u_dim) x(i) = u(i) // assign row vectors
} // if
x
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a matrix and assign values from the Scala `Vector` of vectors 'u'.
* Assumes vectors are column-wise.
* @param u the Vector of vectors to assign
*/
def apply (u: Vector [$VECTO]): Bid$MATRIX =
{
val u_dim = u(0).dim
val x = new Bid$MATRIX (u_dim)
for (j <- 0 until u.length) x.setCol (j, u(j)) // assign column vectors
x
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a matrix by reading from a text file, e.g., a CSV file.
* @param fileName the name of file holding the data
*/
def apply (fileName: String): Bid$MATRIX =
{
val sp = ',' // character separating the values
val lines = fromFile (fileName).getLines.toArray // get the lines from file
val (m, n) = (lines.length, lines(0).split (sp).length)
if (m != n) flaw ("apply", "symmetric matrices must be square")
val x = new Bid$MATRIX (m)
for (i <- 0 until m) x(i) = $VECTOR (lines(i).split (sp))
x
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create an 'm-by-m' identity matrix I (ones on main diagonal, zeros elsewhere).
* @param m the row and column dimensions of the matrix
*/
def eye (m: Int): Bid$MATRIX =
{
val c = new Bid$MATRIX (m)
for (i <- 0 until m) c(i, i) = $ONE
c
} // eye
} // Bid$MATRIX object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Bid${MATRIX}Test` object is used to test the `Bid$MATRIX` class.
* > run-main scalation.linalgebra.Bid${MATRIX}Test
*/
object Bid${MATRIX}Test extends App
{
val a = new Bid$MATRIX ($VECTOR (3, 4, 5),
$VECTOR (2, 1))
val b = new Bid$MATRIX ($VECTOR (2, 3, 4),
$VECTOR (5, 6))
val v = $VECTOR (5, 3, 6)
val c = new $MATRIX ((3, 3), 3, 1, 0,
0, 4, 2,
0, 0, 5)
val d = new $MATRIX ((3, 3), 2, 5, 0,
5, 3, 6,
0, 6, 4)
println ("a = " + a)
println ("b = " + b)
println ("a + b = " + (a + b))
println ("a - b = " + (a - b))
println ("a * b = " + (a * b))
println ("a * v = " + (a * v))
println ("c * d = " + (c * d))
println ("a.det = " + a.det)
val x2 = a.solve (v)
println ("a.solve (v) = " + x2)
println ("a * x2 = " + a * x2)
println ("a.inverse = " + a.inverse)
println ("a.inverse * a = " + a.inverse * a)
} // Bid${MATRIX}Test object
"""
// Ending of string holding code template --------------------------------------
// println (code)
val writer = new PrintWriter (new File (DIR + _l + "Bid" + MATRIX + ".scalaa"))
writer.write (code)
writer.close ()
} // for
} // BldBidMatrix object
|
NBKlepp/fda
|
scalation_1.2/src/main/scala/scalation/linalgebra/bld/BldBidMatrix.scala
|
Scala
|
mit
| 45,403 |
package org.jetbrains.plugins.scala
package annotator
import org.intellij.lang.annotations.Language
import org.jetbrains.plugins.scala.base.SimpleTestCase
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral
import scala.util.Random
/**
* @author Ye Xianjin
* @since 11/27/14
*/
class IntegerLiteralCheckTest extends SimpleTestCase {
final val Header = ""
def randomIntValues(num: Int): List[Int] = {
List.fill(num)(Random.nextInt)
}
def randomLongValues(num: Int): List[Long] = {
Stream.continually(Random.nextLong).filter(_.toHexString.length > 8).take(num).toList
}
// how should I bound T to Int and Long only?
def expandIntegerLiteral[T](x: T): List[String] = {
val (octalString, hexString) = x match {
case t: Int => (java.lang.Integer.toOctalString(t), java.lang.Integer.toHexString(t))
case t: Long => (java.lang.Long.toOctalString(t), java.lang.Long.toHexString(t))
}
List(x.toString, "0" + octalString, "0x" + hexString, "0X" + hexString)
}
def prependSign(s: String): List[String] = if (s.startsWith("-")) List(s) else List(s, "-" + s)
def appendL(s: String): List[String] = List(s + "l", s + "L")
val intValues = List(0, -0, 1, -1, 1234, -1234, Int.MinValue, Int.MaxValue)
val longValues = List(1l + Int.MaxValue, 12345l + Int.MaxValue, -1l + Int.MinValue, -1234l + Int.MinValue, Long.MinValue, Long.MaxValue)
val numOfGenInteger = 10
def testFine() {
val intStrings = (intValues ++ randomIntValues(numOfGenInteger)).flatMap(expandIntegerLiteral).flatMap(prependSign).distinct
for (s <- intStrings) {
assertNothing(messages(s"val a = $s"))
}
val longStrings = (intStrings flatMap appendL) ++
(longValues ++ randomLongValues(numOfGenInteger)).flatMap(expandIntegerLiteral).flatMap(prependSign).flatMap(appendL).distinct
for (s <- longStrings) {
assertNothing(messages(s"val a = $s"))
}
}
def testLiteralOverflowInt() {
val longStrings = longValues.map(_.toString) ++ randomLongValues(numOfGenInteger).flatMap(expandIntegerLiteral).distinct
for (s <- longStrings ++ Seq("2147483648", "-2147483649")) {
assertMatches(messages(s"val a = $s")) {
case Error(s, OverflowIntPattern()) :: Nil =>
}
}
}
def testLiteralOverflowLong() {
val overflowLongStrings = (longValues ++ randomLongValues(numOfGenInteger)).
flatMap(x => List(x.toString.padTo(21, '1'), "0x" + x.toHexString.padTo(17, '1'), "0" + x.toOctalString.padTo(23, '1')))
val overflowLongStringsWithL = overflowLongStrings.flatMap(appendL)
for (s <- overflowLongStrings ++ overflowLongStringsWithL ++ Seq("9223372036854775808l", "-9223372036854775809l")) {
assertMatches(messages(s"val a = $s")) {
case Error(s, OverflowLongPattern()) :: Nil =>
}
}
}
def messages(@Language(value = "Scala", prefix = Header) code: String): List[Message] = {
val annotator = new ScalaAnnotator() {}
val mock = new AnnotatorHolderMock
val parse: ScalaFile = (Header + code).parse
parse.depthFirst.foreach {
case literal: ScLiteral => annotator.annotate(literal, mock)
case _ =>
}
mock.annotations.filter((p: Message) => !p.isInstanceOf[Info])
}
val OverflowIntPattern = ContainsPattern("out of range for type Int")
val OverflowLongPattern = ContainsPattern("out of range even for type Long")
}
|
double-y/translation-idea-plugin
|
test/org/jetbrains/plugins/scala/annotator/IntegerLiteralCheckTest.scala
|
Scala
|
apache-2.0
| 3,515 |
package org.stanoq.crawler
import java.util.concurrent.TimeUnit
import com.typesafe.config.ConfigFactory
import org.mongodb.scala.{MongoClient, MongoCollection}
import org.stanoq.crawler.model._
import org.mongodb.scala.bson.codecs.Macros._
import org.mongodb.scala.bson.codecs.DEFAULT_CODEC_REGISTRY
import org.bson.codecs.configuration.CodecRegistries.{fromProviders, fromRegistries}
import org.mongodb.scala.bson.conversions.Bson
import org.mongodb.scala.model.Filters._
import scala.concurrent.Await
import scala.concurrent.duration.Duration
object MongoHelper {
val config = ConfigFactory.load()
val rawUrl = config.getString("mongo.url")
val url = rawUrl.substring(0,rawUrl.lastIndexOf("/")+1)
val databaseName = rawUrl.substring(rawUrl.lastIndexOf("/")+1)
val mongoClient: MongoClient = MongoClient(rawUrl)
val responseRegistry = fromRegistries(fromProviders(classOf[CrawlerResponse],classOf[ConfigProperties],classOf[Node],classOf[EchartResponse],classOf[EchartNode],classOf[EchartLink]), DEFAULT_CODEC_REGISTRY)
val database = mongoClient.getDatabase(databaseName).withCodecRegistry(responseRegistry)
val collection: MongoCollection[CrawlerResponse] = database.getCollection("crawler")
def filterResponse(config:ConfigProperties):Bson = and(equal("config.url", config.url),equal("config.depthLimit", config.depthLimit))
def size = Await.result(collection.count().head(), Duration(10, TimeUnit.SECONDS)).toInt
def getLatest:List[CrawlerResponse] = Await.result(collection.find().skip(size-1).toFuture(), Duration(10, TimeUnit.SECONDS)).toList
def getAll(limit: Int) = Await.result(collection.find().limit(limit).toFuture(), Duration(10, TimeUnit.SECONDS)).toList
def getResponse(url: String) = Await.result(collection.find(equal("config.url",url)).toFuture(),Duration(10, TimeUnit.SECONDS))
def persist(response: CrawlerResponse) = Await.result(collection.insertOne(response).head(), Duration(10, TimeUnit.SECONDS))
def deleteSite(config: ConfigProperties) = Await.result(collection.deleteOne(filterResponse(config)).head(),Duration(10, TimeUnit.SECONDS))
def deleteAll() = Await.result(collection.drop().head(),Duration(10, TimeUnit.SECONDS))
}
|
olka/stanoq
|
src/main/scala/org/stanoq/crawler/MongoHelper.scala
|
Scala
|
mit
| 2,191 |
/**
* Copyright 2015, 2016, 2017 Gianluca Amato <[email protected]>
*
* This file is part of ScalaFix.
* ScalaFix is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ScalaFix is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of a
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ScalaFix. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich
/**
* The fixpoint package contains everything which is related to defining and solving systems
* of equations. This package object defines some type aliases which are used in the API.
*/
package object scalafix {
/**
* An assignment for an equation system is a map from unknowns to values.
*/
type Assignment[U, V] = U => V
/**
* The body of an equation system, i.e., a map from assignments to assignments.
*/
type Body[U, V] = Assignment[U, V] => Assignment[U, V]
/**
* A body which also calculates dependencies among unknowns.
*/
type BodyWithDependencies[U, V] = Assignment[U, V] => Assignment[U, (V, Iterable[U])]
/**
* The effect of an edge in a graph equation system.
*/
type EdgeAction[U, V, E] = Assignment[U, V] => E => V
/**
* The constant to use in -Xelide-below in order to remove tracing code. Note that
* tracing might be required to make a program work, so only elide it if you
* known what you are doing.
*/
final val TRACING: Int = 5000
}
|
jandom-devel/ScalaFix
|
core/src/main/scala/it/unich/scalafix/package.scala
|
Scala
|
gpl-3.0
| 1,823 |
/*
* Copyright (c) 2015 Robert Conrad - All Rights Reserved.
* Unauthorized copying of this file, via any medium is strictly prohibited.
* This file is proprietary and confidential.
* Last modified by rconrad, 1/3/15 7:09 PM
*/
package base.socket.message.user
import base.socket.message.{ CommandObject, UserServerCommand }
object UserServerCommands extends CommandObject {
val cmds = init(this, Login, BadApiVersion, Busy, BadInput)
case object Login extends UserServerCommand[LoginUserServerMessage]("loginOK")
case object BadApiVersion extends UserServerCommand[BadApiUserServerMessage]("loginOK")
case object Busy extends UserServerCommand[BusyUserServerMessage]("serverBusy")
case object BadInput extends UserServerCommand[BadInputUserServerMessage]("badInput")
}
|
robconrad/base-api
|
project-socket/src/main/scala/base/socket/message/user/UserServerCommands.scala
|
Scala
|
mit
| 791 |
/**
* Created by Variant on 16/3/18.
*/
object PatternMatchFunction {
def main(args: Array[String]) {
def match_type(t : Any) = t match {
case p :Int => println("It is Integre")
case p :String => println("It is String")
case m :Map[_, _] => m.foreach(println)
case _ => println("Unknown type !!!")
}
match_type(2)
match_type(Map("Scala" -> "Spark"))
def match_array(arr : Any) = arr match{
case Array(0) => println("Array" + "0")
case Array(x,y) => println("Array" + x +" " +y)
case Array(0, _*) =>println("Array" + "0 ...")
case _ => println("Something else")
}
match_array(Array(0))
match_array(Array(0,1))
match_array(Array(0,12,3,5,5,"strn"))
def match_list(lst :Any) = lst match{
case 0 :: Nil => println("List:" + "0")
case x :: y :: Nil => println("List:" + x + " " + y)
case 0 :: tail =>println("List" + "0 ...")
case _ => println("something else")
}
match_list(List(0))
match_list(List(0,1))
match_list(List(0,1,2,3,45,6))
//scala 提取器Extractor
def match_tuple(tuple : Any) =tuple match{
case(0, _) => println("Tuple:" + "0" )
case(x,0) => println("Tuple:" + x)
case _ => println("Something else")
}
match_tuple(0,"scala")
match_tuple("dasdas",0)
match_tuple(0,1,2,3,4,5)
}
}
|
sparkLiwei/ProgrammingNote
|
scalaLearning/scalaFunction/scalaFunction/PatternMatchFunction.scala
|
Scala
|
cc0-1.0
| 1,378 |
package jp.co.bizreach.robot
import java.io.InputStream
import java.util.Locale
import org.apache.commons.io.input.BOMInputStream
import scala.annotation.tailrec
import scala.io.Source
import scala.util.Try
import scala.util.matching.Regex
import com.softwaremill.quicklens._
/**
* The object of parsing the robots.txt.
*/
object RobotsTxtParser {
/**
* Parse the specified robots.txt.
*
* @param stream input stream of robots.txt
* @param charsetName encoding (optional: defaults to UTF-8)
* @return the contents of the parsed robots.txt
*/
def parse(stream: InputStream, charsetName: String = "UTF-8"): RobotsTxt = {
// Function to analyze by reads each line
@tailrec
def readLine0(lines: Iterator[String], robots: RobotsTxt = RobotsTxt(),
currentPath: Seq[String] = Nil, isGroupRecord: Boolean = false): RobotsTxt = {
import Record._
if(!lines.hasNext) robots
else read(lines.next()) match {
// applicable
case Some((r, v)) => r match {
case UserAgent =>
val path = v.toLowerCase(Locale.ENGLISH)
readLine0(
lines = lines,
robots = robots += path,
currentPath = path +: ( if(isGroupRecord) Nil else currentPath )
)
case Disallow =>
readLine0(
lines = lines,
robots = robots.update(currentPath: _*)(_.modify(_.disallow).using(v +: _)),
currentPath = currentPath,
isGroupRecord = true
)
case Allow =>
readLine0(
lines = lines,
robots = robots.update(currentPath: _*)(_.modify(_.allow).using(v +: _)),
currentPath = currentPath,
isGroupRecord = true
)
case CrawlDelay =>
val delay = Seq(Try(v.toInt).getOrElse(0), 0).max
readLine0(
lines = lines,
robots = robots.update(currentPath: _*)(_.modify(_.crawlDelay).setTo(delay)),
currentPath = currentPath,
isGroupRecord = true
)
case Sitemap =>
readLine0(
lines = lines,
robots = robots.modify(_.sitemap).using(v +: _),
currentPath = currentPath,
isGroupRecord = true
)
}
// not applicable
case None => readLine0(lines, robots, currentPath, true)
}
}
readLine0(
Source.fromInputStream(new BOMInputStream(stream), charsetName)
.getLines() flatMap stripComment
)
}
private def stripComment(line: String): Option[String] = {
(line.splitAt(line indexOf '#') match {
// comments do not exist
case (take, drop) if take.isEmpty => drop
// comments exist
case (take, _) => take
}).trim match {
case "" => None
case x => Some(x)
}
}
private sealed abstract class Record(regex: Regex) {
def value(line: String): Option[String] =
regex.findFirstMatchIn(line).map(_.group(1)).filterNot("" == _)
}
private object Record {
case object UserAgent extends Record("(?i)^user-agent:\\\\s*([^\\\\t\\\\n\\\\x0B\\\\f\\\\r]+)\\\\s*$".r)
case object Disallow extends Record("(?i)^disallow:\\\\s*([^\\\\s]*)\\\\s*$".r)
case object Allow extends Record("(?i)^allow:\\\\s*([^\\\\s]*)\\\\s*$".r)
case object CrawlDelay extends Record("(?i)^crawl-delay:\\\\s*([^\\\\s]+)\\\\s*$".r)
case object Sitemap extends Record("(?i)^sitemap:\\\\s*([^\\\\s]+)\\\\s*$".r)
def read(line: String): Option[(Record, String)] = Seq(UserAgent, Disallow, Allow, CrawlDelay, Sitemap)
.flatMap { x => x.value(line).map(x -> _) }
.headOption
}
}
|
bizreach/robotparser-scala
|
src/main/scala/jp/co/bizreach/robot/RobotsTxtParser.scala
|
Scala
|
apache-2.0
| 3,807 |
package scroll.internal.support.impl
import scroll.internal.support.RelationshipsApi
import scroll.internal.support.RoleQueriesApi
import scala.reflect.ClassTag
class Relationships(private[this] val roleQueries: RoleQueriesApi) extends RelationshipsApi {
import scroll.internal.support.impl.Multiplicities._
class ToBuilder[L <: AnyRef: ClassTag](name: String, leftMul: Multiplicity)
extends ToBuilderApi[L] {
override def to[R <: AnyRef: ClassTag](rightMul: Multiplicity): RelationshipApi[L, R] =
new Relationship[L, R](name, leftMul, rightMul)
}
class FromBuilder(name: String) extends FromBuilderApi {
override def from[L <: AnyRef: ClassTag](leftMul: Multiplicity): ToBuilder[L] =
new ToBuilder[L](name, leftMul)
}
/** Creates a [[Relationships.Relationship]] with the given name with a fluent relationship
* creation API.
*
* @param name
* the name of the created Relationship
* @return
* an instance of the Relationship builder
*/
override def create(name: String): FromBuilder = new FromBuilder(name)
/** Class representation of a relationship between two (role) types.
*
* @param name
* name of the relationship
* @param leftMul
* multiplicity of the left side of the relationship
* @param rightMul
* multiplicity of the right side of the relationship
* @tparam L
* type of the role of the left side of the relationship
* @tparam R
* type of the role of the right side of the relationship
*/
class Relationship[L <: AnyRef: ClassTag, R <: AnyRef: ClassTag](
name: String,
leftMul: Multiplicity,
rightMul: Multiplicity
) extends RelationshipApi[L, R] {
protected val MULT_NOT_ALLOWED: String = "This multiplicity is not allowed!"
private[this] def checkMul[T](m: Multiplicity, on: Seq[T]): Seq[T] = {
m match {
case MMany() =>
assert(
on.nonEmpty,
s"With left multiplicity for '$name' of '*', the resulting role set should not be empty!"
)
case ConcreteValue(v) =>
assert(
v.compare(on.size) == 0,
s"With a concrete multiplicity for '$name' of '$v' the resulting role set should have the same size!"
)
case RangeMultiplicity(f, t) =>
(f, t) match {
case (ConcreteValue(v1), ConcreteValue(v2)) =>
assert(
v1 <= on.size && v2 >= on.size,
s"With a multiplicity for '$name' from '$v1' to '$v2', the resulting role set size should be in between!"
)
case (ConcreteValue(v), MMany()) =>
assert(
v <= on.size,
s"With a multiplicity for '$name' from '$v' to '*', the resulting role set size should be in between!"
)
case _ =>
throw new RuntimeException(MULT_NOT_ALLOWED) // default case
}
}
on
}
override def left(matcher: L => Boolean = _ => true): Seq[L] =
checkMul(leftMul, roleQueries.all[L](matcher))
override def right(matcher: R => Boolean = _ => true): Seq[R] =
checkMul(rightMul, roleQueries.all[R](matcher))
}
}
|
max-leuthaeuser/SCROLL
|
core/src/main/scala/scroll/internal/support/impl/Relationships.scala
|
Scala
|
lgpl-3.0
| 3,240 |
/*
* Copyright 2017 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.featran.jmh
import java.util.concurrent.TimeUnit
import com.spotify.featran.transformers._
import com.spotify.featran._
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class TransformerBenchmark {
import Fixtures._
def benchmark[A](transformer: Transformer[A, _, _], bh: Blackhole)(implicit
fixture: Seq[A]
): Seq[Unit] = {
implicit val fb: FeatureBuilder[Unit] = new NoOpFeatureBuilder(bh)
val fe = FeatureSpec.of[A].required(identity)(transformer).extract(fixture)
fe.featureValues[Unit]
}
// TODO: figure out how to verify that all transformers are covered
@Benchmark def binarizer(bh: Blackhole): Seq[Unit] = benchmark(Binarizer("t"), bh)
@Benchmark def bucketizer(bh: Blackhole): Seq[Unit] =
benchmark(Bucketizer("t", Array(0.0, 250.0, 500.0, 750.0, 1000.0)), bh)
@Benchmark def hashNHotEncoder(bh: Blackhole): Seq[Unit] = benchmark(HashNHotEncoder("t"), bh)
@Benchmark def hashNHotWeightedEncoder(bh: Blackhole): Seq[Unit] =
benchmark(HashNHotWeightedEncoder("t"), bh)
@Benchmark def hashOneHotEncoder(bh: Blackhole): Seq[Unit] = benchmark(HashOneHotEncoder("t"), bh)
@Benchmark def heavyHitters(bh: Blackhole): Seq[Unit] = benchmark(HeavyHitters("t", 100), bh)
@Benchmark def identityB(bh: Blackhole): Seq[Unit] = benchmark(Identity("t"), bh)
@Benchmark def maxAbsScaler(bh: Blackhole): Seq[Unit] = benchmark(MaxAbsScaler("t"), bh)
@Benchmark def mdl(bh: Blackhole): Seq[Unit] = benchmark(MDL[String]("t"), bh)
@Benchmark def minMaxScaler(bh: Blackhole): Seq[Unit] = benchmark(MinMaxScaler("t"), bh)
@Benchmark def nGrams(bh: Blackhole): Seq[Unit] = benchmark(NGrams("t"), bh)
@Benchmark def nHotEncoder(bh: Blackhole): Seq[Unit] = benchmark(NHotEncoder("t"), bh)
@Benchmark def nHotWeightedEncoder(bh: Blackhole): Seq[Unit] =
benchmark(NHotWeightedEncoder("t"), bh)
@Benchmark def normalizer(bh: Blackhole): Seq[Unit] = benchmark(Normalizer("t"), bh)
@Benchmark def oneHotEncoder(bh: Blackhole): Seq[Unit] = benchmark(OneHotEncoder("t"), bh)
@Benchmark def polynomialExpansion(bh: Blackhole): Seq[Unit] =
benchmark(PolynomialExpansion("t"), bh)
@Benchmark def quantileDiscretizer(bh: Blackhole): Seq[Unit] =
benchmark(QuantileDiscretizer("t"), bh)
@Benchmark def standardScaler(bh: Blackhole): Seq[Unit] = benchmark(StandardScaler("t"), bh)
@Benchmark def topNOneHotEncoder(bh: Blackhole): Seq[Unit] =
benchmark(TopNOneHotEncoder("t", 100), bh)
@Benchmark def vectorIdentity(bh: Blackhole): Seq[Unit] =
benchmark(VectorIdentity[Array]("t"), bh)
@Benchmark def vonMisesEvaluator(bh: Blackhole): Seq[Unit] =
benchmark(VonMisesEvaluator("t", 100.0, 0.001, Array(1.0, 2.0, 3.0, 4.0, 5.0)), bh)
}
private object Fixtures {
implicit val doubles: Seq[Double] = (0 until 1000).map(_.toDouble)
implicit val labels: Seq[String] = (0 until 1000).map(x => "l" + (x % 50))
implicit val mdlRecords: Seq[MDLRecord[String]] =
(0 until 1000).map(x => MDLRecord((x % 3).toString, x.toDouble))
implicit val nLabels: Seq[Seq[String]] =
(0 until 1000).map(x => (0 until (x % 50 + 1)).map("l" + _))
implicit val nWeightedLabels: Seq[Seq[WeightedLabel]] = nLabels.map(_.map(WeightedLabel(_, 1.0)))
implicit val vectors: Seq[Array[Double]] = (0 until 1000).map(x => Array.fill(10)(x / 1000.0))
}
private class NoOpFeatureBuilder(val bh: Blackhole) extends FeatureBuilder[Unit] {
override def init(dimension: Int): Unit = bh.consume(dimension)
override def result: Unit = bh.consume(Unit)
override def add(name: String, value: Double): Unit = {
bh.consume(name)
bh.consume(value)
}
override def skip(): Unit = bh.consume(Unit)
override def newBuilder: FeatureBuilder[Unit] = new NoOpFeatureBuilder(bh)
}
|
spotify/featran
|
jmh/src/test/scala/com/spotify/featran/jmh/TransformerBenchmark.scala
|
Scala
|
apache-2.0
| 4,484 |
package sangria.validation.rules
import sangria.ast
import sangria.ast.AstVisitorCommand
import sangria.validation._
import scala.collection.mutable.{Set => MutableSet}
/** Unique argument names
*
* A GraphQL field or directive is only valid if all supplied arguments are uniquely named.
*/
class UniqueArgumentNames extends ValidationRule {
override def visitor(ctx: ValidationContext) = new AstValidatingVisitor {
val knownArgNames = MutableSet[String]()
override val onEnter: ValidationVisit = {
case _: ast.Field =>
knownArgNames.clear()
AstVisitorCommand.RightContinue
case _: ast.Directive =>
knownArgNames.clear()
AstVisitorCommand.RightContinue
case ast.Argument(name, _, _, pos) =>
if (knownArgNames contains name)
Left(Vector(DuplicateArgNameViolation(name, ctx.sourceMapper, pos.toList)))
else {
knownArgNames += name
AstVisitorCommand.RightContinue
}
}
}
}
|
sangria-graphql/sangria
|
modules/core/src/main/scala/sangria/validation/rules/UniqueArgumentNames.scala
|
Scala
|
apache-2.0
| 999 |
package nodes.stats
import breeze.linalg._
import breeze.stats.distributions._
import workflow.Transformer
/**
* A node that takes in DenseVector[Double] and randomly flips
* the sign of some of the elements
*/
case class RandomSignNode(signs: DenseVector[Double])
extends Transformer[DenseVector[Double], DenseVector[Double]] {
def apply(in: DenseVector[Double]): DenseVector[Double] = in :* signs
}
object RandomSignNode {
/* Create a random sign node */
def apply(size: Int, rand: RandBasis = Rand): RandomSignNode = {
val signs = 2.0*convert(DenseVector.rand(size, Binomial(1, 0.5)(rand)), Double) - 1.0
new RandomSignNode(signs)
}
}
|
tomerk/keystone
|
src/main/scala/nodes/stats/RandomSignNode.scala
|
Scala
|
apache-2.0
| 668 |
package ch.ethz.dalab.dissolve.classification
import ch.ethz.dalab.dissolve.optimization.SolverOptions
import org.apache.spark.rdd.RDD
import java.io.FileWriter
import ch.ethz.dalab.dissolve.regression.LabeledObject
import org.apache.spark.mllib.regression.LabeledPoint
import breeze.linalg._
import ch.ethz.dalab.dissolve.optimization.SolverUtils
import ch.ethz.dalab.dissolve.optimization.DissolveFunctions
import ch.ethz.dalab.dissolve.optimization.DBCFWSolverTuned
import scala.collection.mutable.HashMap
import org.apache.spark.rdd.PairRDDFunctions
import ch.ethz.dalab.dissolve.optimization.SSGSolver
import ch.ethz.dalab.dissolve.optimization.SSGSolver
import ch.ethz.dalab.dissolve.optimization.UseDBCFWSolver
import ch.ethz.dalab.dissolve.optimization.UseSSGSolver
case class MultiClassLabel(label: Double, numClasses: Int)
object MultiClassSVMWithDBCFW extends DissolveFunctions[Vector[Double], MultiClassLabel] {
var labelToWeight = HashMap[MultiClassLabel, Double]()
override def classWeights(label: MultiClassLabel): Double = {
labelToWeight.get(label).getOrElse(1.0)
}
/**
* Feature function
*
* Analogous to phi(y) in (2)
* Returns y_i * x_i
*
*/
def featureFn(x: Vector[Double], y: MultiClassLabel): Vector[Double] = {
assert(y.label.toInt < y.numClasses,
"numClasses = %d. Found y_i.label = %d"
.format(y.numClasses, y.label.toInt))
val featureVector = Vector.zeros[Double](x.size * y.numClasses)
val numDims = x.size
// Populate the featureVector in blocks [<class-0 features> <class-1 features> ...].
val startIdx = y.label.toInt * numDims
val endIdx = startIdx + numDims
featureVector(startIdx until endIdx) := x
featureVector
}
/**
* Loss function
*
* Returns 0 if yTruth == yPredict, 1 otherwise
* Equivalent to max(0, 1 - y w^T x)
*/
def lossFn(yTruth: MultiClassLabel, yPredict: MultiClassLabel): Double =
if (yTruth.label == yPredict.label)
0.0
else
1.0
/**
* Maximization Oracle
*
* Want: argmax L(y_i, y) - <w, psi_i(y)>
* This returns the most violating (Loss-augmented) label.
*/
override def oracleFn(model: StructSVMModel[Vector[Double], MultiClassLabel], xi: Vector[Double], yi: MultiClassLabel): MultiClassLabel = {
val weights = model.getWeights()
val numClasses = yi.numClasses
// Obtain a list of scores for each class
val mostViolatedContraint: (Double, Double) =
(0 until numClasses).map {
case cl =>
(cl, weights dot featureFn(xi, MultiClassLabel(cl, numClasses)))
}.map {
case (cl, score) =>
(cl.toDouble, score + 1.0)
}.map { // Loss-augment the scores
case (cl, score) =>
if (yi.label == cl)
(cl, score - 1.0)
else
(cl, score)
}.maxBy { // Obtain the class with the maximum value
case (cl, score) => score
}
MultiClassLabel(mostViolatedContraint._1, numClasses)
}
/**
* Prediction function
*/
def predictFn(model: StructSVMModel[Vector[Double], MultiClassLabel], xi: Vector[Double]): MultiClassLabel = {
val weights = model.getWeights()
val numClasses = model.numClasses
assert(numClasses > 1)
val prediction =
(0 until numClasses).map {
case cl =>
(cl.toDouble, weights dot featureFn(xi, MultiClassLabel(cl, numClasses)))
}.maxBy { // Obtain the class with the maximum value
case (cl, score) => score
}
MultiClassLabel(prediction._1, numClasses)
}
/**
* Classifying with in-built functions
*
* data needs to be 0-indexed
*/
def train(
data: RDD[LabeledPoint],
numClasses: Int,
solverOptions: SolverOptions[Vector[Double], MultiClassLabel],
customWeights:Option[HashMap[MultiClassLabel,Double]]=None): StructSVMModel[Vector[Double], MultiClassLabel] = {
solverOptions.numClasses = numClasses
// Convert the RDD[LabeledPoint] to RDD[LabeledObject]
val objectifiedData: RDD[LabeledObject[Vector[Double], MultiClassLabel]] =
data.map {
case x: LabeledPoint =>
val features: Vector[Double] = x.features match {
case features: org.apache.spark.mllib.linalg.SparseVector =>
val builder: VectorBuilder[Double] = new VectorBuilder(features.indices, features.values, features.indices.length, x.features.size)
builder.toSparseVector
case _ => SparseVector(x.features.toArray)
}
new LabeledObject[Vector[Double], MultiClassLabel](MultiClassLabel(x.label, numClasses), features)
}
labelToWeight = ClassificationUtils.generateClassWeights(objectifiedData,solverOptions.classWeights,customWeights)
val repartData =
if (solverOptions.enableManualPartitionSize)
objectifiedData.repartition(solverOptions.NUM_PART)
else
objectifiedData
println(solverOptions)
val (trainedModel,debugInfo) = solverOptions.solver match {
case UseDBCFWSolver => new DBCFWSolverTuned[Vector[Double], MultiClassLabel](
repartData,
this,
solverOptions,
miniBatchEnabled = false).optimize()
case UseSSGSolver => (new SSGSolver[Vector[Double], MultiClassLabel](
repartData.collect(),
this,
solverOptions
).optimize(),"")
}
println(debugInfo)
// Dump debug information into a file
val fw = new FileWriter(solverOptions.debugInfoPath)
// Write the current parameters being used
fw.write(solverOptions.toString())
fw.write("\\n")
// Write spark-specific parameters
fw.write(SolverUtils.getSparkConfString(data.context.getConf))
fw.write("\\n")
// Write values noted from the run
fw.write(debugInfo)
fw.close()
trainedModel
}
/**
* Classifying with user-submitted functions
*/
def train(
data: RDD[LabeledPoint],
dissolveFunctions: DissolveFunctions[Vector[Double], MultiClassLabel],
solverOptions: SolverOptions[Vector[Double], MultiClassLabel]): StructSVMModel[Vector[Double], MultiClassLabel] = {
val numClasses = solverOptions.numClasses
assert(numClasses > 1)
val minlabel = data.map(_.label).min()
val maxlabel = data.map(_.label).max()
assert(minlabel == 0, "Label classes need to be 0-indexed")
assert(maxlabel - minlabel + 1 == numClasses,
"Number of classes in data do not tally with passed argument")
// Convert the RDD[LabeledPoint] to RDD[LabeledObject]
val objectifiedData: RDD[LabeledObject[Vector[Double], MultiClassLabel]] =
data.map {
case x: LabeledPoint =>
new LabeledObject[Vector[Double], MultiClassLabel](MultiClassLabel(x.label, numClasses),
if (solverOptions.sparse) {
val features: Vector[Double] = x.features match {
case features: org.apache.spark.mllib.linalg.SparseVector =>
val builder: VectorBuilder[Double] = new VectorBuilder(features.indices, features.values, features.indices.length, x.features.size)
builder.toSparseVector
case _ => SparseVector(x.features.toArray)
}
features
} else
Vector(x.features.toArray))
}
val repartData =
if (solverOptions.enableManualPartitionSize)
objectifiedData.repartition(solverOptions.NUM_PART)
else
objectifiedData
println(solverOptions)
//choose optimizer
val (trainedModel,debugInfo) = solverOptions.solver match {
case UseDBCFWSolver => new DBCFWSolverTuned[Vector[Double], MultiClassLabel](
repartData,
dissolveFunctions,
solverOptions,
miniBatchEnabled = false).optimize()
case UseSSGSolver => (new SSGSolver[Vector[Double], MultiClassLabel](
repartData.collect(),
dissolveFunctions,
solverOptions
).optimize(),"")
}
// Dump debug information into a file
val fw = new FileWriter(solverOptions.debugInfoPath)
// Write the current parameters being used
fw.write(solverOptions.toString())
fw.write("\\n")
// Write spark-specific parameters
fw.write(SolverUtils.getSparkConfString(data.context.getConf))
fw.write("\\n")
// Write values noted from the run
fw.write(debugInfo)
fw.close()
println(debugInfo)
trainedModel
}
}
|
dalab/dissolve-struct
|
dissolve-struct-lib/src/main/scala/ch/ethz/dalab/dissolve/classification/MultiClassSVMWithDBCFW.scala
|
Scala
|
apache-2.0
| 8,464 |
package hud
import javafx.scene.layout.Pane
import javafx.scene.paint.Color
import javafx.scene.canvas.GraphicsContext
import javafx.scene.image.Image
import graphics.GraphicsObject
import graphics.Sprite
/** * * * * * * * * * * * * * * * * * * * * * * * * * * *
* MiniMap class
*
* Renders a hud element in the top right hand corner
* of the screen. Intended to hold the map of the world
* but currently just outputs player debug info.
*
* @author Ryan Needham
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * */
class MiniMap (player: Sprite, screenWidth: Int, screenHeight: Int) extends GraphicsObject {
width = 260
height = 180
x = screenWidth - width - 20
y = 20
val color = Color.web("rgba(0,0,0,0.9)")
/**
* render
*
* @param GraphicsContext
*
* Fills a transparent rectangle in the top right with
* a thin white border. Inside the rectangle player
* info is outputted.
*/
def render (context: GraphicsContext) = {
context.setFill (color)
context.fillRect (getX, getY, getWidth, getHeight)
context.setStroke (Color.LIGHTGREY)
context.setFill (Color.LIGHTGREY)
context.fillText ("posX: " + player.getX, getX + 8, getY + 18)
context.fillText ("posY: " + player.getY, getX + 8, getY + 30)
context.fillText ("targetX: "+ player.getTargetX, getX + 8, getY + 50)
context.fillText ("targetY: "+ player.getTargetY, getX + 8, getY + 62)
context.fillText ("health: " + player.health, getX + 8, getY + 80)
context.fillText ("energy: " + player.energy, getX + 8, getY + 92)
context.fillText ("frame: " + player.activeFrame, getX + 8, getY + 104)
context.fillText ("facing: " + player.getActiveSkin, getX + 8, getY + 116)
context.strokeRect (getX + 4, getY + 4, getWidth - 8, getHeight - 8)
}
}
|
MyForteIsTimeTravel/PanamaEngine
|
src/hud/MiniMap.scala
|
Scala
|
mit
| 2,019 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.model
case class Pipeline(
`class`: Option[String] = None,
organization: Option[String] = None,
name: Option[String] = None,
displayName: Option[String] = None,
fullName: Option[String] = None,
weatherScore: Option[Int] = None,
estimatedDurationInMillis: Option[Int] = None,
latestRun: Option[PipelinelatestRun] = None
)
|
cliffano/swaggy-jenkins
|
clients/scala-sttp/generated/src/main/scala/org/openapitools/client/model/Pipeline.scala
|
Scala
|
mit
| 719 |
package com.twitter.finagle
package object service {
/**
* A response classifier allows developers to give Finagle the additional
* application specific knowledge necessary in order to properly classify them.
* Without this, Finagle can only safely make judgements about transport
* level failures.
*
* As an example take an HTTP client that receives a response with a 500 status
* code back from a server. To Finagle this is a successful request/response
* based solely on the transport level. The application developer may want to
* treat all 500 status codes as failures and can do so via a
* [[com.twitter.finagle.service.ResponseClassifier]].
*
* It is a [[PartialFunction]] from a request/response pair to a
* [[ResponseClass]] and as such multiple classifiers can be composed
* together via [[PartialFunction.orElse]].
*
* @see `com.twitter.finagle.http.service.HttpResponseClassifier` for some
* HTTP classification tools.
*
* @note Java does not understand the type alias and must be used as
* `PartialFunction` in Java.
*
* @note Finagle's default classifier is
* [[com.twitter.finagle.service.ResponseClassifier.Default]]
* which is a total function fully covering the input domain.
*
* @note it is a good practice for users of `ResponseClassifier.apply` to
* instead use `theClassifier.applyOrElse(input, ResponseClassifier.Default)`
* in order to ensure that the PartialFunction will be fully covering.
*/
type ResponseClassifier = PartialFunction[ReqRep, ResponseClass]
//
// An alternate approach would've been to allow application developers
// to convert responses into a `Throw` instead of a `ResponseClass`.
//
// Conceptually, this fits more cleanly into the existing Finagle codebase
// and how it handles failures. There were a couple of drawbacks with this
// approach:
//
// 1. It is a strong opinion on the issue of "are errors exceptions?".
// This does not seem like something Finagle should be strongly
// opinionated about.
//
// 2. It makes users do "unnecessary" object modeling in that every
// failure needs to have some corresponding exception. For example,
// `HttpServerErrorException` for HTTP 500s. This is particularly
// uncompelling for developers that use status codes as part of their
// Thrift response.
// In the case where an Exception is returned by a Thrift service,
// but this is not a failure (for example, it is bad user input),
// this would also lead a different kind of unnecessary data modeling
// for creating types that are not Throws.
//
// 3. Converting the real response to a Throw is not an easy migration
// for existing users. An option to allow users to convert back to the
// original response outside of the Finagle stack would likely lead to
// confusion and the abstraction would leak out in places like
// `StatsFilter`.
}
|
koshelev/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/service/package.scala
|
Scala
|
apache-2.0
| 2,984 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.catnap
import cats.effect.{ContextShift, IO, Timer}
import cats.implicits._
import minitest.TestSuite
import monix.execution.BufferCapacity.{Bounded, Unbounded}
import monix.execution.ChannelType.{MPMC, MPSC, SPMC, SPSC}
import monix.execution.exceptions.APIContractViolationException
import monix.execution.internal.Platform
import monix.execution.schedulers.TestScheduler
import monix.execution.{BufferCapacity, Scheduler, TestUtils}
import scala.concurrent.TimeoutException
import scala.concurrent.duration._
object ConcurrentChannelFakeSuite extends BaseConcurrentChannelSuite[TestScheduler] {
def setup() = TestScheduler()
def tearDown(env: TestScheduler): Unit =
assert(env.state.tasks.isEmpty, "There should be no tasks left!")
def testIO(name: String, times: Int)(f: Scheduler => IO[Unit]): Unit = {
def repeatTest(test: IO[Unit], n: Int): IO[Unit] =
if (n > 0) test.flatMap(_ => repeatTest(test, n - 1))
else IO.unit
test(name) { ec =>
val result = repeatTest(f(ec), times).unsafeToFuture()
ec.tick(1.day)
result.value match {
case None => throw new TimeoutException("1 day")
case Some(value) => value.get
}
}
}
val boundedConfigForConcurrentSum: Bounded =
Bounded(256)
}
abstract class BaseConcurrentChannelSuite[S <: Scheduler] extends TestSuite[S] with TestUtils {
val boundedConfigForConcurrentSum: Bounded
val iterationsCount = {
if (Platform.isJVM) {
// Discriminate CI
if (isCI)
1000
else
10000
} else {
100 // JavaScript
}
}
val repeatForFastTests = {
if (Platform.isJVM) 1000 else 100
}
val repeatForSlowTests = {
if (Platform.isJVM) 50 else 1
}
val boundedConfig = ConsumerF.Config(capacity = Some(Bounded(10)))
val unboundedConfig = ConsumerF.Config(capacity = Some(Unbounded()))
implicit def contextShift(implicit s: Scheduler): ContextShift[IO] =
SchedulerEffect.contextShift[IO](s)(IO.ioEffect)
implicit def timer(implicit s: Scheduler): Timer[IO] =
SchedulerEffect.timerLiftIO[IO](s)(IO.ioEffect)
/** TO IMPLEMENT ... */
def testIO(name: String, times: Int = 1)(f: Scheduler => IO[Unit]): Unit
testIO("simple push and pull", times = repeatForFastTests) { implicit ec =>
for {
chan <- ConcurrentChannel[IO].withConfig[Int, Int](boundedConfig)
consume = chan.consume.use { consumer =>
for {
r1 <- consumer.pull
r2 <- consumer.pull
r3 <- consumer.pull
r4 <- consumer.pull
} yield {
assertEquals(r1, Right(1))
assertEquals(r2, Right(2))
assertEquals(r3, Right(3))
assertEquals(r4, Left(0))
}
}
fiber <- consume.start
_ <- chan.awaitConsumers(1)
_ <- chan.push(1)
_ <- chan.push(2)
_ <- chan.push(3)
_ <- chan.halt(0)
r <- fiber.join
} yield r
}
testIO("consumers can receive push", times = repeatForFastTests) { implicit ec =>
for {
chan <- ConcurrentChannel[IO].withConfig[Int, Int](boundedConfig)
fiber <- chan.consume.use(_.pull).start
_ <- chan.awaitConsumers(1)
_ <- chan.push(1)
r <- fiber.join
} yield {
assertEquals(r, Right(1))
}
}
testIO("consumers can wait for push", times = repeatForSlowTests) { implicit ec =>
def consume(c: ConsumerF[IO, Int, Int], acc: Int = 0): IO[Int] =
c.pull.flatMap {
case Left(l) => IO.pure(acc + l)
case Right(r) => consume(c, acc + r)
}
for {
chan <- ConcurrentChannel[IO].withConfig[Int, Int](boundedConfig)
fiber <- chan.consume.use(consume(_)).start
_ <- chan.awaitConsumers(1)
_ <- IO.sleep(3.millis)
_ <- chan.push(1)
_ <- IO.shift *> IO.shift *> chan.push(2)
_ <- IO.sleep(3.millis)
_ <- chan.push(3)
_ <- chan.halt(4)
r <- fiber.join
} yield {
assertEquals(r, 1 + 2 + 3 + 4)
}
}
testIO("consumers can receive pushMany", times = repeatForFastTests) { implicit ec =>
for {
chan <- ConcurrentChannel[IO].withConfig[Int, Int](boundedConfig)
fiber <- chan.consume.use(_.pullMany(10, 10)).start
_ <- chan.awaitConsumers(1)
_ <- chan.pushMany(1 to 10)
r <- fiber.join.map(_.map(_.sum))
} yield {
assertEquals(r, Right(55))
}
}
testIO("consumers can wait for pushMany", times = repeatForSlowTests) { implicit ec =>
def consume(c: ConsumerF[IO, Int, Int], acc: Int = 0): IO[Int] =
c.pull.flatMap {
case Left(l) => IO.pure(acc + l)
case Right(r) => consume(c, acc + r)
}
for {
chan <- ConcurrentChannel[IO].withConfig[Int, Int](boundedConfig)
fiber <- chan.consume.use(consume(_)).start
_ <- chan.awaitConsumers(1)
_ <- IO.sleep(3.millis)
_ <- chan.pushMany(1 to 20)
_ <- IO.shift *> IO.shift *> chan.pushMany(21 to 40)
_ <- IO.sleep(3.millis)
_ <- chan.pushMany(41 to 60)
_ <- chan.halt(100)
r <- fiber.join
} yield {
assertEquals(r, 100 + 30 * 61)
}
}
testIO("pullMany back-pressuring for minLength, with maxLength", times = repeatForFastTests) { implicit ec =>
val channel = ConcurrentChannel[IO].unsafe[Int, Int]()
val batch = channel.consume
.use(_.pullMany(10, 10))
.map {
case l @ Left(_) => l
case Right(seq) =>
assertEquals(seq.length, 10)
Right(seq.sum)
}
.start
def loop(n: Int): IO[Unit] =
channel.push(n).flatMap { _ =>
if (n - 1 > 0) loop(n - 1)
else IO.unit
}
for {
f <- batch
_ <- channel.awaitConsumers(1)
_ <- loop(9)
_ <- loop(10)
r <- f.join
} yield {
assertEquals(r, Right(5 * 11))
}
}
testIO("subscribe after channel was closed") { implicit ec =>
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
_ <- channel.push(1)
_ <- channel.halt(0)
r <- channel.consume.use(_.pull)
} yield {
assertEquals(r, Left(0))
}
}
testIO("push after channel was closed") { implicit ec =>
def consume(c: ConsumerF[IO, Int, Int]): IO[Unit] =
c.pull.flatMap {
case Right(_) =>
IO.raiseError(new APIContractViolationException("push after halt"))
case Left(value) =>
assertEquals(value, 0)
IO.sleep(1.milli) *> consume(c)
}
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
_ <- channel.halt(0)
fiber <- channel.consume.use(consume).start
b1 <- channel.push(1)
b2 <- channel.push(2)
_ <- channel.halt(10)
_ <- fiber.join.timeoutTo(10.millis, IO.unit).guarantee(fiber.cancel)
} yield {
assertEquals(b1, false)
assertEquals(b2, false)
}
}
testIO("pushMany after channel was closed") { implicit ec =>
def consume(c: ConsumerF[IO, Int, Int]): IO[Unit] =
c.pull.flatMap {
case Right(_) =>
IO.raiseError(new APIContractViolationException("push after halt"))
case Left(value) =>
assertEquals(value, 0)
IO.sleep(1.milli) *> consume(c)
}
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
_ <- channel.halt(0)
fiber <- channel.consume.use(consume).start
b1 <- channel.pushMany(Seq(1, 2, 3))
_ <- channel.halt(10)
_ <- fiber.join.timeoutTo(10.millis, IO.unit).guarantee(fiber.cancel)
} yield {
assertEquals(b1, false)
}
}
testIO("push/pushMany with no consumers") { implicit ec =>
def consume(c: ConsumerF[IO, Int, Int], acc: Int = 0): IO[Int] =
c.pull.flatMap {
case Left(l) => IO.pure(acc + l)
case Right(r) => consume(c, acc + r)
}
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
_ <- channel.push(1)
_ <- channel.push(2)
_ <- channel.push(3)
_ <- channel.pushMany(Seq(4, 5, 6))
fiber <- channel.consume.use(consume(_)).start
_ <- channel.awaitConsumers(1)
_ <- channel.push(100)
_ <- channel.pushMany(Seq(100, 100))
_ <- channel.halt(100)
r <- fiber.join
} yield {
assertEquals(r, 400)
}
}
testIO("pushMany with multiple consumers") { implicit ec =>
def consume(c: ConsumerF[IO, Int, Int], acc: Int = 0): IO[Int] =
c.pull.flatMap {
case Left(l) => IO.pure(acc + l)
case Right(r) => consume(c, acc + r)
}
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
fiber1 <- channel.consume.use(consume(_)).start
fiber2 <- channel.consume.use(consume(_)).start
fiber3 <- channel.consume.use(consume(_)).start
_ <- channel.awaitConsumers(3)
_ <- channel.push(100)
_ <- channel.pushMany(Seq(100, 100))
_ <- channel.halt(100)
r1 <- fiber1.join
r2 <- fiber2.join
r3 <- fiber3.join
} yield {
assertEquals(r1, 400)
assertEquals(r2, 400)
assertEquals(r3, 400)
}
}
testIO("halt with awaitConsumers active") { implicit ec =>
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
await <- channel.awaitConsumers(3).start
_ <- await.join.timeoutTo(1.millis, IO.unit)
_ <- channel.halt(0)
r <- await.join
} yield {
assertEquals(r, false)
}
}
testIO("awaitConsumers after halt") { implicit ec =>
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
_ <- channel.halt(0)
r <- channel.awaitConsumers(3)
} yield {
assertEquals(r, false)
}
}
testIO("awaitConsumers after consume, consume/release, consume, consume") { implicit ec =>
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
c1 <- channel.consume.use(c => c.pull *> c.pull).start
await <- channel.awaitConsumers(3).start
c2 <- channel.consume.use(c => c.pull).start
_ <- await.join.timeoutTo(3.millis, IO.unit)
_ <- channel.push(1)
r2 <- c2.join
c3 <- channel.consume.use(c => c.pull).start
c4 <- channel.consume.use(c => c.pull).start
_ <- await.join
_ <- channel.halt(0)
r1 <- c1.join
r3 <- c3.join
r4 <- c4.join
} yield {
assertEquals(r1, Left(0))
assertEquals(r2, Right(1))
assertEquals(r3, Left(0))
assertEquals(r4, Left(0))
}
}
testIO("pushMany with empty sequence") { implicit ec =>
def consume(c: ConsumerF[IO, Int, Int], acc: Int = 0): IO[Int] =
c.pull.flatMap {
case Left(l) => IO.pure(acc + l)
case Right(r) => consume(c, acc + r)
}
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
fiber <- channel.consume.use(consume(_)).start
_ <- channel.awaitConsumers(1)
_ <- channel.pushMany(Seq.empty)
_ <- channel.halt(100)
r <- fiber.join
} yield {
assertEquals(r, 100)
}
}
testIO("cancellation of paused pull") { implicit ec =>
def consume(c: ConsumerF[IO, Int, Int], acc: Int = 0): IO[Int] =
c.pull.flatMap {
case Left(l) => IO.pure(acc + l)
case Right(r) => consume(c, acc + r)
}
for {
channel <- ConcurrentChannel[IO].of[Int, Int]
fiber <- channel.consume.use(consume(_)).start
_ <- channel.awaitConsumers(1)
_ <- fiber.cancel
} yield ()
}
testIO(
s"concurrent sum via consumer.pull; MPMC; producers=4, consumers=4, workers=4, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 4,
workersPerConsumer = 4,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = false
)
}
testIO("concurrent sum via consumer.pull; MPMC; producers=4, consumers=4, workers=4, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 4,
workersPerConsumer = 4,
capacity = Unbounded(),
count = iterationsCount,
pullMany = false
)
}
testIO(
s"concurrent sum via consumer.pull; SPMC; producers=1, consumers=4, workers=4, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 1,
consumers = 4,
workersPerConsumer = 4,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = false
)
}
testIO("concurrent sum via consumer.pull; SPMC; producers=1, consumers=4, workers=4, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 1,
consumers = 4,
workersPerConsumer = 4,
capacity = Unbounded(),
count = iterationsCount,
pullMany = false
)
}
testIO(
s"concurrent sum via consumer.pull; MPMC; producers=4, consumers=1, workers=4, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 1,
workersPerConsumer = 4,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = false
)
}
testIO("concurrent sum via consumer.pull; MPMC; producers=4, consumers=1, workers=4, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 1,
workersPerConsumer = 4,
capacity = Unbounded(),
count = iterationsCount,
pullMany = false
)
}
testIO(
s"concurrent sum via consumer.pull; MPSC; producers=4, consumers=4, workers=1, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 4,
workersPerConsumer = 1,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = false
)
}
testIO("concurrent sum via consumer.pull; MPSC; producers=4, consumers=4, workers=1, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 4,
workersPerConsumer = 1,
capacity = Unbounded(),
count = iterationsCount,
pullMany = false
)
}
testIO(
s"concurrent sum via consumer.pull; SPSC; producers=1, consumers=1, workers=1, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 1,
consumers = 1,
workersPerConsumer = 1,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = false
)
}
testIO("concurrent sum via consumer.pull; SPSC; producers=1, consumers=1, workers=1, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 1,
consumers = 1,
workersPerConsumer = 1,
capacity = Unbounded(),
count = iterationsCount,
pullMany = false
)
}
testIO(
s"concurrent sum via consumer.pullMany; MPMC; producers=4, consumers=4, workers=4, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 4,
workersPerConsumer = 4,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = true
)
}
testIO("concurrent sum via consumer.pullMany; MPMC; producers=4, consumers=4, workers=4, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 4,
workersPerConsumer = 4,
capacity = Unbounded(),
count = iterationsCount,
pullMany = true
)
}
testIO(
s"concurrent sum via consumer.pullMany; SPMC; producers=1, consumers=4, workers=4, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 1,
consumers = 4,
workersPerConsumer = 4,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = true
)
}
testIO("concurrent sum via consumer.pullMany; SPMC; producers=1, consumers=4, workers=4, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 1,
consumers = 4,
workersPerConsumer = 4,
capacity = Unbounded(),
count = iterationsCount,
pullMany = true
)
}
testIO(
s"concurrent sum via consumer.pullMany; MPMC; producers=4, consumers=1, workers=4, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 1,
workersPerConsumer = 4,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = true
)
}
testIO("concurrent sum via consumer.pullMany; MPMC; producers=4, consumers=1, workers=4, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 1,
workersPerConsumer = 4,
capacity = Unbounded(),
count = iterationsCount,
pullMany = true
)
}
testIO(
s"concurrent sum via consumer.pullMany; MPSC; producers=4, consumers=4, workers=1, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 4,
workersPerConsumer = 1,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = true
)
}
testIO("concurrent sum via consumer.pullMany; MPSC; producers=4, consumers=4, workers=1, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 4,
consumers = 4,
workersPerConsumer = 1,
capacity = Unbounded(),
count = iterationsCount,
pullMany = true
)
}
testIO(
s"concurrent sum via consumer.pullMany; SPSC; producers=1, consumers=1, workers=1, capacity=$boundedConfigForConcurrentSum") {
implicit ec =>
testConcurrentSum(
producers = 1,
consumers = 1,
workersPerConsumer = 1,
boundedConfigForConcurrentSum,
count = iterationsCount,
pullMany = true
)
}
testIO("concurrent sum via consumer.pullMany; SPSC; producers=1, consumers=1, workers=1, capacity=Unbounded") {
implicit ec =>
testConcurrentSum(
producers = 1,
consumers = 1,
workersPerConsumer = 1,
capacity = Unbounded(),
count = iterationsCount,
pullMany = true
)
}
def testConcurrentSum(
producers: Int,
consumers: Int,
workersPerConsumer: Int,
capacity: BufferCapacity,
count: Int,
pullMany: Boolean)(implicit ec: Scheduler): IO[Unit] = {
val channelType =
if (producers > 1) {
if (workersPerConsumer > 1) MPMC
else MPSC
} else {
if (workersPerConsumer > 1) SPMC
else SPSC
}
def consume(consumer: ConsumerF[IO, Int, Int]): IO[Long] = {
def worker(acc: Long): IO[Long] = {
if (pullMany)
consumer.pullMany(1, 16).flatMap {
case Left(i) => IO.pure(acc + i)
case Right(seq) =>
assert(seq.length <= 16, s"seq.length (${seq.length}) <= 16")
worker(acc + seq.sum)
}
else
consumer.pull.flatMap {
case Left(i) => IO.pure(acc + i)
case Right(i) => worker(acc + i)
}
}
if (workersPerConsumer > 1) {
val list = (0 until workersPerConsumer).map(_ => worker(0)).toList
list.parSequence.map(_.sum)
} else {
worker(0)
}
}
def consumeMany(channel: ConcurrentChannel[IO, Int, Int]): IO[Long] = {
val task = channel
.consumeWithConfig(ConsumerF.Config(Some(capacity), Some(channelType.consumerType), None))
.use(ref => consume(ref))
if (consumers < 2) {
task
} else {
val list = (0 until consumers).map(_ => task).toList
list.parSequence.map(_.sum)
}
}
def produce(channel: ConcurrentChannel[IO, Int, Int]): IO[Unit] = {
def loop(channel: ConcurrentChannel[IO, Int, Int], n: Int): IO[Unit] =
if (n > 0) channel.push(n).flatMap(_ => loop(channel, n - 1))
else IO.unit
val task = loop(channel, count)
if (producers < 2)
task
else
(0 until producers).map(_ => task).toList.parSequence_
}
for {
channel <- ConcurrentChannel[IO].withConfig[Int, Int](producerType = channelType.producerType)
fiber <- consumeMany(channel).start
_ <- channel.awaitConsumers(consumers)
_ <- produce(channel)
_ <- channel.halt(0)
sum <- fiber.join
} yield {
val perProducer = count.toLong * (count + 1) / 2
assertEquals(sum, perProducer * producers * consumers)
}
}
}
|
monifu/monix
|
monix-catnap/shared/src/test/scala/monix/catnap/ConcurrentChannelSuite.scala
|
Scala
|
apache-2.0
| 21,886 |
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.tensor.Tensor
import org.scalatest.{FlatSpec, Matchers}
@com.intel.analytics.bigdl.tags.Parallel
class BatchNormalizationSpec extends FlatSpec with Matchers {
"A BatchNormalization" should "generate correct output" in {
val bn = new BatchNormalization[Double](3)
bn.weight(1) = 0.1
bn.weight(2) = 0.2
bn.weight(3) = 0.3
bn.bias(1) = 0.1
bn.bias(2) = 0.2
bn.bias(3) = 0.3
val input = Tensor[Double](3, 3)
var i = 0
input.apply1(e => {
i += 1; i
})
val output = bn.forward(input)
output.nDimension() should be(2)
output.size(1) should be(3)
output.size(2) should be(3)
output(Array(1, 1)) should be(-0.0225 +- 0.0001)
output(Array(1, 2)) should be(-0.0449 +- 0.0001)
output(Array(1, 3)) should be(-0.0674 +- 0.0001)
output(Array(2, 1)) should be(0.1 +- 0.0001)
output(Array(2, 2)) should be(0.2 +- 0.0001)
output(Array(2, 3)) should be(0.3 +- 0.0001)
output(Array(3, 1)) should be(0.2225 +- 0.0001)
output(Array(3, 2)) should be(0.4449 +- 0.0001)
output(Array(3, 3)) should be(0.6674 +- 0.0001)
}
"A BatchNormalization" should "generate correct gradient" in {
val bn = new BatchNormalization[Double](3)
bn.weight(1) = 0.1
bn.weight(2) = 0.2
bn.weight(3) = 0.3
bn.bias(1) = 0.1
bn.bias(2) = 0.2
bn.bias(3) = 0.3
val input = Tensor[Double](3, 3)
var i = 0
input.apply1(e => {
i += 1; i
})
val output = bn.forward(input)
val gradOutput = Tensor[Double](3, 3)
var j = 0.0
gradOutput.apply1(e => {
j += 0.1; j
})
val gradInput = bn.backward(input, gradOutput)
gradInput.nDimension() should be(2)
gradInput.size(1) should be(3)
gradInput.size(2) should be(3)
gradInput(Array(1, 1)) should be(-2.0412e-8 +- 1e-12)
gradInput(Array(1, 2)) should be(-4.0825e-8 +- 1e-12)
gradInput(Array(1, 3)) should be(-6.1237e-8 +- 1e-12)
gradInput(Array(2, 1)) should be(-0.0 +- 0.0001)
gradInput(Array(2, 2)) should be(-0.0 +- 0.0001)
gradInput(Array(2, 3)) should be(-0.0 +- 0.0001)
gradInput(Array(3, 1)) should be(2.0412e-8 +- 1e-12)
gradInput(Array(3, 2)) should be(4.0825e-8 +- 1e-12)
gradInput(Array(3, 3)) should be(6.1237e-8 +- 1e-12)
bn.gradWeight.nDimension() should be(1)
bn.gradWeight.size(1) should be(3)
bn.gradWeight(Array(1)) should be(0.7348 +- 0.0001)
bn.gradWeight(Array(2)) should be(0.7348 +- 0.0001)
bn.gradWeight(Array(3)) should be(0.7348 +- 0.0001)
bn.gradBias.nDimension() should be(1)
bn.gradBias.size(1) should be(3)
bn.gradBias(Array(1)) should be(1.2 +- 0.0001)
bn.gradBias(Array(2)) should be(1.5 +- 0.0001)
bn.gradBias(Array(3)) should be(1.8 +- 0.0001)
}
"A BatchNormalization evaluating" should "generate correct output" in {
val bn = new BatchNormalization[Double](3)
bn.weight(1) = 0.1
bn.weight(2) = 0.2
bn.weight(3) = 0.3
bn.bias(1) = 0.1
bn.bias(2) = 0.2
bn.bias(3) = 0.3
val input = Tensor[Double](3, 3)
var i = 0
input.apply1(e => {
i += 1; i
})
var output = bn.forward(input)
val gradOutput = Tensor[Double](3, 3)
var j = 0.0
gradOutput.apply1(e => {
j += 0.1; j
})
val gradInput = bn.backward(input, gradOutput)
bn.evaluate()
output = bn.forward(input)
println(output)
output = bn.forward(input)
println(output)
output = bn.forward(input)
println(output)
output = bn.forward(input)
println(output)
}
it should "generate correct output for no batch" in {
val bn = new BatchNormalization[Double](3)
bn.weight(1) = 0.1
bn.weight(2) = 0.2
bn.weight(3) = 0.3
bn.bias(1) = 0.1
bn.bias(2) = 0.2
bn.bias(3) = 0.3
bn.evaluate()
val input = Tensor[Double](3)
var i = 0
input.apply1(e => {
i += 1; i
})
val output = bn.forward(input)
output.valueAt(1) should be(0.2 +- 0.00001)
output.valueAt(2) should be(0.6 +- 0.00001)
output.valueAt(3) should be(1.2 +- 0.00001)
}
}
|
zhichao-li/BigDL
|
dl/src/test/scala/com/intel/analytics/bigdl/nn/BatchNormalizationSpec.scala
|
Scala
|
apache-2.0
| 4,931 |
package katas.scala.bsearchtree
import org.junit.Test
import org.scalatest.Matchers
/**
* User: dima
* Date: 04/11/2011
*/
class BST3 extends Matchers {
@Test def shouldFindIfBinarySearchTreeContainsElement() {
var bst = BST()
bst.contains(1) should equal(false)
bst = bst.add(1)
bst.contains(0) should equal(false)
bst.contains(1) should equal(true)
bst.contains(2) should equal(false)
bst = bst.add(2)
bst.contains(0) should equal(false)
bst.contains(1) should equal(true)
bst.contains(2) should equal(true)
bst.contains(3) should equal(false)
bst = bst.add(3)
bst.contains(0) should equal(false)
bst.contains(1) should equal(true)
bst.contains(2) should equal(true)
bst.contains(3) should equal(true)
bst.contains(4) should equal(false)
}
@Test def shouldAddElementsToBottomOfBST() {
BST() should equal(BST())
BST(1) should equal(BST(1))
BST().add(1) should equal(BST(1))
BST(1).add(2) should equal(BST(1, null, BST(2)))
BST(2).add(1) should equal(BST(2, BST(1)))
BST(1).add(2).add(3) should equal(BST(1, null, BST(2, null, BST(3))))
BST(3).add(2).add(1) should equal(BST(3, BST(2, BST(1))))
BST(2).add(3).add(1) should equal(BST(2, BST(1), BST(3)))
BST(2).add(1).add(3) should equal(BST(2, BST(1), BST(3)))
BST(3).add(2).add(1).add(4) should equal(BST(3, BST(2, BST(1)), BST(4)))
}
case class BST(value: Int = Int.MinValue, left: BST = null, right: BST = null) {
def add(n: Int): BST = {
add(this, n)
}
private def add(bst: BST, n: Int): BST = {
if (bst == null || bst.value == Int.MinValue) BST(n)
else if (n <= bst.value) BST(bst.value, add(bst.left, n), bst.right)
else if (n > bst.value) BST(bst.value, bst.left, add(bst.right, n))
else throw new IllegalStateException()
}
def contains(n: Int): Boolean = {
contains(this, n)
}
private def contains(bst: BST, n: Int): Boolean = {
if (bst == null) false
else if (n == bst.value) true
else if (n < bst.value) contains(bst.left, n)
else if (n > bst.value) contains(bst.right, n)
else throw new IllegalStateException()
}
}
}
|
dkandalov/katas
|
scala/src/katas/scala/bsearchtree/BST3.scala
|
Scala
|
unlicense
| 2,212 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.