code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Benchmarks **
** / __/ __// _ | / / / _ | __ / // __/ Adam Burmister **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ 2012, Google, Inc **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ 2013, Jonas Fonseca **
** |/____/ **
\\* */
// The ray tracer code in this file is written by Adam Burmister. It
// is available in its original form from:
//
// http://labs.flog.co.nz/raytracer/
//
// Ported from the v8 benchmark suite by Google 2012.
// Ported from the Dart benchmark_harness to Scala.js by Jonas Fonseca 2013
package org.scalajs.benchmark.tracer
case class IntersectionInfo(
shape: Shape = null,
position: Vector = null,
normal: Vector = null,
color: Color = new Color(0.0, 0.0, 0.0),
distance: Double = 0.0,
isHit: Boolean = false,
hitCount: Int = 0)
abstract class Shape(val position: Vector, val material: Material) {
def intersect(ray: Ray): IntersectionInfo
override def toString = "Shape"
}
class Plane(position: Vector, val d: Double, material: Material) extends Shape(position, material) {
def intersect(ray: Ray): IntersectionInfo = {
val Vd = this.position.dot(ray.direction);
if (Vd == 0)
return new IntersectionInfo() // no intersection
val t = -(this.position.dot(ray.position) + this.d) / Vd;
if (t <= 0)
return new IntersectionInfo() // no intersection
val intersection = ray.position + ray.direction.multiplyScalar(t)
val color = if (this.material.hasTexture) {
val vU = new Vector(this.position.y, this.position.z, -this.position.x)
val vV = vU.cross(this.position)
val u: Double = intersection.dot(vU)
val v: Double = intersection.dot(vV)
this.material.getColor(u, v)
} else {
this.material.getColor(0, 0)
}
new IntersectionInfo(
shape = this,
isHit = true,
position = intersection,
normal = this.position,
distance = t,
color = color
)
}
override def toString = s"Plane [$position, d=$d]"
}
class Sphere(position: Vector, radius: Double, material: Material) extends Shape(position, material) {
def intersect(ray: Ray): IntersectionInfo = {
val dst = ray.position - this.position
val B = dst.dot(ray.direction)
val C = dst.dot(dst) - (this.radius * this.radius)
val D = (B * B) - C
if (D <= 0)
return new IntersectionInfo(null) // no intersection
val distance = (-B) - math.sqrt(D)
val pos = ray.position + ray.direction.multiplyScalar(distance)
new IntersectionInfo(
shape = this,
isHit = true,
position = pos,
normal = (pos - this.position).normalize,
distance = distance,
color = this.material.getColor(0, 0)
)
}
override def toString = s"Sphere [position=$position, radius=$radius]"
}
|
jonas/scala-js-benchmarks
|
tracer/src/main/scala/org/scalajs/benchmark/tracer/Shapes.scala
|
Scala
|
bsd-3-clause
| 3,060 |
package blended.updater.remote
import java.io.File
import scala.util.Try
import blended.updater.config.{ ConfigWriter, OverlayConfig, OverlayConfigCompanion }
import blended.util.logging.Logger
import com.typesafe.config.ConfigFactory
class FileSystemOverlayConfigPersistor(storageDir: File) extends OverlayConfigPersistor {
private[this] val log = Logger[FileSystemOverlayConfigPersistor]
private[this] var overlayConfigs: Map[File, OverlayConfig] = Map()
private[this] var initalized: Boolean = false
def overlayConfigFileName(oc: OverlayConfig): String = s"${oc.name}-${oc.version}.conf"
def initialize(): Unit = {
log.debug(s"About to initialize overlay config persistor for storageDir: ${storageDir}")
overlayConfigs = if (!storageDir.exists()) {
Map()
} else {
val files = Option(storageDir.listFiles()).getOrElse(Array())
val ocs = files.flatMap { file =>
val oc = Try {
ConfigFactory.parseFile(file).resolve()
}.flatMap { rawConfig =>
OverlayConfigCompanion.read(rawConfig)
}
log.debug(s"Found file: ${file} with: {oc}")
oc.toOption.map(oc => file -> oc)
}
ocs.filter { case (file, oc) => file.getName() == overlayConfigFileName(oc) }.toMap
}
initalized = true
}
override def persistOverlayConfig(overlayConfig: OverlayConfig): Unit = {
if (!initalized) initialize()
val configFile = new File(storageDir, overlayConfigFileName(overlayConfig))
if (configFile.exists()) {
// collision, what should we do?
if (overlayConfigs.get(configFile) == Some(overlayConfig)) {
// known and same, so silently ignore
log.debug("OverlayConfig already persistent")
} else {
val msg = "Cannot persist overlay config. Storage location already taken for a different configuration."
log.error(s"${msg} Found file ${configFile} with config: ${overlayConfigs.get(configFile)}")
sys.error(msg)
}
}
ConfigWriter.write(OverlayConfigCompanion.toConfig(overlayConfig), configFile, None)
overlayConfigs += configFile -> overlayConfig
}
override def findOverlayConfigs(): List[OverlayConfig] = {
if (!initalized) initialize()
overlayConfigs.values.toList
}
}
|
lefou/blended
|
blended.updater.remote/src/main/scala/blended/updater/remote/FileSystemOverlayConfigPersistor.scala
|
Scala
|
apache-2.0
| 2,274 |
package vu.elements.of.style
/**
* @author v.uspenskiy
* @since 25/06/15
*/
object b$expression {
// It is easy to mistake a sequence of overly-simple expressions for profundity:
def aSophisticatedLargest(a: Int, b: Int, c: Int) = {
var large: Int = 0
if(a > b) {
large = a
} else {
large = b
}
if(large > c) {
} else {
large = c
}
large
}
// In the end `large` contains the largest of `a`, `b` and `c`
// 10 lines to find a biggest number
// There is a number of ways to do this computation, e.g.
def anIllustrativeLargest(a: Int, b: Int, c: Int) = {
var large: Int = a
if(b > large) large = b
if(c > large) large = c
large
}
// Say what you mean, simply and directly!
def aBetterLargest(a: Int, b: Int, c: Int) = {
math.max(a, math.max(b, c))
}
// Library functions let you build on the work of others,
// instead of starting from scratch each time
// Use library functions!
// Peculiar modes of expression often arise of attempts to write "efficient code"
def expression(x1: Double, x2: Double) {
val f1 = x1 - x2 * x2
val f2 = 1.00 - x2
val fx = f1 * f1 + f2 * f2
// Note that it is more efficient to compute
// f1*f1 than to compute f1^2
fx
}
// Whether "efficient" means "takes less time" or "takes less machine code",
// the comment is not always true. Many compilers would in fact generate
// shorter and faster code for:
// Needed to make a point
implicit class PowDouble(d: Double) {
def ^(d2: Double) = math.pow(d, d2)
}
def inlineExpression(x1: Double, x2: Double): Unit = {
(x1 - x2^2)^2 + (1.00 - x2)^2
}
// The fewer temporary variables in a program, the less chance
// there is that one will not be properly initialized, or that one
// will be altered unexpectedly before it is used,
// and the easier the program is to understand
// Avoid temporary variables!
// A program usually has to be read several times in the process of getting it debugged.
// The harder it is for people to grasp the intent of any given section,
// the longer it will be before the program becomes operational.
//
// Trying to outsmart a compiler defeats much of the purpose of using one.
// Write clearly — don't sacrifice clarity for "efficiency"!
def aVariation() = {
var no: Int = 42
def putPage() = { /* Doing something */ }
// Note that `110010` in binary is `50` in decimal
// This will be used for line counting.
if(no > 110010) {
putPage()
no = 0
}
}
// The programmer evidently hopes to avoid a run-time type-conversion by using
// binary literal. One of the first services to be automated in early computer
// languages was the conversion of decimal to binary in compile-time.
// Let the machine do the dirty work!
// Repeated patterns of code catch the eye when scanning listings:
def manipulateTriangle(x1: Double, x2: Double, x3: Double,
y1: Double, y2: Double, y3: Double) {
// Compute lengths of sides
val ab = math.sqrt(math.pow(x2 - x1, 2) + math.pow(y2 - y1, 2))
val ac = math.sqrt(math.pow(x3 - x1, 2) + math.pow(y3 - y1, 2))
val bc = math.sqrt(math.pow(x3 - x2, 2) + math.pow(y3 - y2, 2))
// Compute area
val s = (ab + bc + ac) / 2.00
val area = math.sqrt(s * (s - bc) * (s - ac) * (s - ab))
// Compute angles
val alpha = math.atan((4.0 * area) / (math.pow(ac, 2) + math.pow(ab, 2) - math.pow(bc, 2)))
val betta = math.atan((4.0 * area) / (math.pow(ab, 2) + math.pow(bc, 2) - math.pow(ac, 2)))
val gamma = math.atan((4.0 * area) / (math.pow(ac, 2) + math.pow(bc, 2) - math.pow(ab, 2)))
}
// We can see immediately the advantage of defining two functions:
def side(xa: Double, xb: Double, ya: Double, yb: Double) = {
math.sqrt(math.pow(xa - xb, 2) + math.pow(ya - yb, 2))
}
def angle(sarea: Double, sa: Double, sb: Double, sc: Double) = {
math.atan2(4.0 * sarea, math.pow(sa, 2) + math.pow(sb, 2) - math.pow(sc, 2))
}
// So we can write a
def betterManipulateTriangle(x1: Double, x2: Double, x3: Double,
y1: Double, y2: Double, y3: Double) {
// Compute lengths of sides
val ab = side(x2, x1, y2, y1)
val ac = side(x3, x1, y3, y1)
val bc = side(x3, x2, y3, y2)
// Compute area
val s = (ab + bc + ac) / 2.00
val area = math.sqrt(s * (s - bc) * (s - ac) * (s - ab))
// Compute angles
val alpha = angle(area, ac, ab, bc)
val betta = angle(area, ab, bc, ac)
val gamma = angle(area, ac, bc, ab)
}
// This is not only easier to write but also easier to modify.
// For instance changing `math.atan` to `math.atan2` to avoid error when divisor is zero.
// Another eye-catching repeat appears in,
def fragmentWithRepeat() {
val r = 12
val l = 24
var time = 0.00
var theta = 0.00
val delth = 2.00 * math.Pi / 100.00
for (i <- 1 to 100) {
val x = r * (1.00 - math.cos(theta)) + l - l * math.sqrt(1.00 - math.pow(r * math.sin(theta) / l, 2.00))
theta = theta + delth
val xnew = r * (1.00 - math.cos(theta)) + l - l * math.sqrt(1.00 - math.pow(r * math.sin(theta) / l, 2.00))
val vel = (xnew - x) / 0.01
time = time + 0.01
Console.println(s"$time, $theta, $xnew, $vel")
}
}
// Our first impulse is to define another function for the gangling expression that appears twice,
// but closer inspection shows a more fundamental oversight.
// Two adjacent values of x are computed twice as often as necessary, as previous value is always known.
// The practice of incrementing a floating point variable many times might be troublesome,
// to keep arithmetic errors from piling up, we are better off computing `time` and `theta` from i.
def puttingEverythingTogetherGives() {
val r = 12
val l = 24
var x = 0.00
for (i <- 1 to 100) {
val time = i.toDouble / 100.00
val theta = 2.00 * math.Pi * time
val xnew = r * (1.00 - math.cos(theta)) + l - l * math.sqrt(1.00 - math.pow(r * math.sin(theta) / l, 2.00))
val vel = (xnew - x) / 0.01
Console.println(s"$time, $theta, $xnew, $vel")
x = xnew
}
}
// Replace repetitive expressions by calls to a common function!
// Arithmetic expressions may differ from the way we intuitively tend to write them.
// We are accustomed, in writing algebra, to bind multiplication tighter than division.
// That
def x(a: Double, b: Double, c: Double) = a * b / 2.0 * c
// means
def wrongX(a: Double, b: Double, c: Double) = (a * b) / (2.0 * c) // Wrong
// when interpretation is
def rightX(a: Double, b: Double, c: Double) = ((a * b) / 2.0) * c
// Parenthesize to avoid ambiguity!
// Variable names can also be either safe or dangerous:
val N05S = 123
// Now, was that "N, letter O, five, S" or "N, zero, five, S" or even "NOSS"?
// Mixtures of similar characters (letter O and digit 0, letter l and digir 1, etc) are unsafe,
// as are long identifiers that differ only at the end. Use xPos, not positionX.
// Similar identifiers are dangerous in general,
val n = 23
val nn = n * n
val nnn = n * n * n
// It is only when, much further down, we read
Console.println(s"$n $nn $nnn")
// the typographical error in the second line becomes clear.
// Choose variable names that won't be confused!
// A conditional expression can also disguised by using,
def unnecessaryBranches(print: Boolean): Unit = {
if(!print) {
// Do nothing
} else if(!print) {
Console.println("6,105")
}
}
// This code is certainly better written as,
def oneWayPrint(print: Boolean): Unit = {
if(print) {
Console.println("6,105")
}
}
// Now we can tell at a glance that there is only way to reach the `println` call.
// The influence of Fortran arithmetic IF (when there was three branches to go
// whether condition returned -1, 0 or 1, like in Java `Comparator`)
// often extends into misuse of the logical IF (when condition is `Boolean` type) now:
def arithmeticalIfInfluenced(x1: Int, x2: Int): Unit = {
if(x1 - x2 < 0) {
Console.println("6,105")
}
}
// Should be written
def normalLogicalIf(x1: Int, x2: Int): Unit = {
if(x1 < x2) {
Console.println("6,105")
}
}
// Avoid unnecessary branches!
// A failure to state the underlying logic can lead to tangled control flow,
def rudimentaryDatingService() = {
val female = (0 until 8).map(_ => Console.readBoolean())
while(true) {
val male = (0 until 8).map(_ => Console.readBoolean())
var dontPrint = false
for (i <- 0 until 8) {
if (female(i))
if (!male(i))
dontPrint = true
else {
}
else
if (!male(i)) {
} else
dontPrint = true
}
if(!dontPrint) {
Console.println("Boy")
}
}
}
// The program is supposed to write "Boy" only if each of the `male`
// has the same truth value as the corresponding `female`.
// We can improve readability,
def improvedReadabilityDatingService() = {
val female = (0 until 8).map(_ => Console.readBoolean())
while(true) {
val male = (0 until 8).map(_ => Console.readBoolean())
var dontPrint = false
for (i <- 0 until 8) {
if((female(i) && !male(i)) || (!female(i) && male(i))) {
dontPrint = true
}
}
if(!dontPrint) {
Console.println("Boy")
}
}
}
// Or even
def directEqualityCheckDatingService() = {
val female = (0 until 8).map(_ => Console.readBoolean())
while(true) {
val male = (0 until 8).map(_ => Console.readBoolean())
var dontPrint = false
for (i <- 0 until 8) {
dontPrint = female(i) != male(i)
}
if(!dontPrint) {
Console.println("Boy")
}
}
}
// This tells us directly that the program will go on th read the next input line,
// without printing "Boy", if any `female(i)` differ from the corresponding `male(i)`.
// Don't use conditional branches as a substitute for a logical expression!
// Scala gives even much more elegant way of writing it
def scalaVersionDatingService() = {
val female = (0 until 8).map(_ => Console.readBoolean())
while(true) {
val male = (0 until 8).map(_ => Console.readBoolean())
if((male zip female) forall { case (m, f) => m == f }) {
Console.println("Boy")
}
}
}
// Most of the time we just using a relational operator (< or ==) in IF.
// But we can, if we wish, use Boolean operators: &&, ||, ! to make arbitrarily complex
// logical expressions. Boolean algebra is not used nearly as widely as ordinary arithmetic,
// so we must write logical expressions more carefully lest we confuse the reader:
def negatedConditions(x1: Int, x2: Int, array: Array[Int], i: Int) = {
var icount = 0
if(x1 >= array(i)) {
} else {
if(array(i) < x2) icount = icount + 1
}
icount
}
// It takes a while to realize that icount is incremented only if array(i) lies between x1 and x2.
// Inversions slow down reading comprehension and should be avoided. Rewriting gives,
def rewrittenConditions(x1: Int, x2: Int, array: Array[Int], i: Int) = {
var icount = 0
if(x1 < array(i) && array(i) < x2) icount = icount + 1
icount
}
// It is much easier to tell at a glance what the logic implies.
def anotherCondition(k: Int, print: String) = {
if(k == 0 || (!(print == "yes" || print == "no"))) {
Console.println("do")
}
}
// The inversion and double parentheses slow down comprehension. It seems better to
// distribute the "not" operation through the parenthesized expression with De Morgan`s laws.
def simplifiedCondition(k: Int, print: String) = {
if(k == 0 || (print != "yes" && print != "no")) {
Console.println("do")
}
}
// The expression is still not simple, but it is now in a form that more closely resembles how we speak
// (it is important, though, to track that both negation and or/and operation change applied).
// If a logical expression is hard to understand, try transforming it!
// Let's conclude with one larger example, to show how quickly program can get out of hand,
// when you fail to look after the little things. The program finds the area under the
// parabola y = x*x between x=0 and x=1, using a trapezoidal rule and several step sizes:
def trapz() = {
val mssg1 = "Area under the curve"
val mssg2 = "by the trapezoidal rule"
val mssg3 = "For delta x = 1/"
var i: Double = 0.00
var l: Double = 0.00
var m: Double = 0.00
var n: Int = 0
var area1: Double = 0.00
var area: Double = 0.00
var lmts: Double = 0.00
Console.println(mssg1)
Console.println(mssg2)
area = 0.00
for(k <- 4 to 10) {
m = 1.00 / k
n = k - 1
lmts = 0.50 * m
i = 1.00
for(j <- 1 to n) {
l = math.pow(i / k, 2.00)
area1 = 0.50 * m * (2.00 * l)
area = area + area1
if(i == n) out(k)
else i = i + 1
}
}
def out(k: Int) = {
area = area + lmts
Console.println(mssg3 + k + " " + area)
area = 0.00
}
}
// Held at arm's length, this program looks pretty impressive.
// The is a large assortment of data declarations, followed by a computation
// that is evidently complex enough to warrant a sub-procedure `out`.
// Declarations are neatly aligned, and the executable statements are staggered
// so as to indicate several levels of control nesting. There are text strings
// to suggest the intent of the program, and mnemonic identifiers
// to give hints about how the results are obtained. The general impression
// conveyed is that this is a moderately complicated problem
// that has been carefully coded and is now well under control.
// Closer inspection, however, shows quite the opposite.
// Each output message is used only once, and would be better placed in the
// `Console.println` call itself. The first to messages can be even combined.
// The purpose of the assignment `m = 1.00 / k` is unclear. Does it defend
// against some mysterious conversion? Is it to convey geometrical insight?
// Or does the programmer worry that computers divide more slowly than they multiply?
// Efficiency cannot be of grave importance anyway, not when the code contains
// area1 = 0.50 * m * (2.00 * l)
// We can now remove all variable declarations, so it is easier to see the underlying structure.
// Variable `i`, it is labouriously kept equal to `j` so that `out` could be called
// at the end of the last iteration. Clearly, `out` could be used just after inner loop is terminated,
// it is better to inline it though, as it is used just once. The structure simplifies remarkably.
// Now we can see that the summing variable `area` is supposed to be initialized
// at the beginning of each loop on `k`, it is much better practice than clearing it
// at the end of each iteration, especially from a remote procedure.
// This procedure `out` is changing `area` and reading 'lmts', destroying modularity referring to
// seamingly local variables in unexpected places, is an invitation to future bugs.
// Putting all our improvements together gives,
def shortTrapz() {
Console.println("Area under the curve\\nby the trapezoidal rule ")
var area: Double = 0.00
for(k <- 4 to 10) {
area = 0.5 / k
for(j <- 1 to (k-1)) {
area = area + math.pow(j.toDouble / k, 2.00) / k
}
Console.println(s"For delta x = 1/$k $area")
}
}
// The program now reflects how straightforward the calculation really is.
// The changes we made were not designed to decrease execution time or to decrease storage utilization.
// What then did we improve? Readability, principally, but also locality and simplicity of structure.
// This will be the goal of all out revisions.
}
|
vuspenskiy/programming-style
|
src/vu/elements/of/style/b$expression.scala
|
Scala
|
mit
| 16,270 |
package es.codemotion.akkaships.client
import java.io.File
import javax.sound.midi.{Sequencer, MidiSystem}
import akka.actor._
import akka.cluster.Cluster
import akka.cluster.ClusterEvent._
import es.codemotion.akkaships.client.SceneRenderer.{Fire, HideCursor, MoveCursor}
import es.codemotion.akkaships.common.domain._
import scala.concurrent.duration._
object PlayerActor {
def props(gameSrv: ActorSelection): Props = Props(new PlayerActor(gameSrv, new SceneRenderer(Size(20, 25))))
//Mensajes Propios del Usuario
case object SyncInput
case class ShowTextMessage(msg: String)
case object ClearTextArea
case class State(cursor: Option[Position], boardSize: Size) {
def isValid: Boolean = cursor forall (boardSize contains _)
}
def initialState(boardSize: Size) = State(None, boardSize)
}
class PlayerActor(gameServer: ActorSelection, scene: SceneRenderer) extends Actor with ActorLogging {
val inputPollPeriod = 150 milliseconds
var syncSched: Option[Cancellable] = None
val cluster = Cluster(context.system)
import PlayerActor._
def behaviour(st: State = initialState(scene.size)): Receive = {
case BoardUpdate(els) => {
scene.paintBoard(els, st.cursor)
}
case SyncInput =>
val commands = scene.getCommands
if (commands contains HideCursor) {
scene.hideCursor
context.become(behaviour(State(None, scene.size)))
} else commands collect {
case MoveCursor(pos) =>
val newPos = st.cursor.getOrElse(Position(0, 0)) + pos
Some(State(Some(newPos), scene.size)) collect {
case st if (st.isValid) =>
scene.moveCursor(newPos)
st
} getOrElse st
case Fire =>
???
} foreach { ns => context.become(behaviour(ns)) }
case ClearTextArea => scene.clearMessage
case ShowTextMessage(msg) => scene.showMessage(msg)
}
override def receive: Receive = behaviour(initialState(scene.size))
override def postStop(): Unit = {
scene.clearBoard(true)
cluster.unsubscribe(self)
syncSched.foreach(_.cancel())
super.postStop()
}
override def preStart(): Unit = {
super.preStart()
cluster.subscribe(self, initialStateMode = InitialStateAsEvents, classOf[MemberEvent], classOf[UnreachableMember])
// Sincronizacion para que cada lapso de tiempo se refresque el estado con lo que se inserta por teclado
syncSched = Some(context.system.scheduler.schedule(inputPollPeriod, inputPollPeriod, self, SyncInput)(
context.system.dispatcher
))
}
def music(midi : String): Sequencer = {
val url = getClass.getResource(s"/$midi")
val midiFile = new File(url.getPath)
val song = MidiSystem.getSequence(midiFile)
val midiPlayer = MidiSystem.getSequencer()
midiPlayer.open()
midiPlayer.setSequence(song)
midiPlayer.setLoopCount(0) // repeat 0 times (play once)
midiPlayer.start()
midiPlayer
}
}
|
jjlopezm/Akkaships-Exercise
|
Client/src/main/scala/es/codemotion/akkaships/client/PlayerActor.scala
|
Scala
|
apache-2.0
| 2,953 |
/*
* Copyright 2012 Sanjin Sehic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.ac.tuwien.infosys
package amber
package util
import org.slf4j.{LoggerFactory => SLF4JLoggerFactory}
trait SLF4JLogging extends Logging {
override protected lazy val logger = new LoggerFactory {
override def create(name: Logger.Name) = new Logger {
import Logging.Level._
@transient private[this] lazy val log = SLF4JLoggerFactory.getLogger(name)
override final def isEnabled(level: Logging.Level) =
level match {
case Debug => log.isDebugEnabled
case Info => log.isInfoEnabled
case Warn => log.isWarnEnabled
case Error => log.isErrorEnabled
}
override final def logAt(level: Logging.Level)
(message: => String, cause: Option[Throwable] = None) {
if (isEnabled(level))
cause match {
case Some(exception) =>
level match {
case Debug => log.debug(message, exception)
case Info => log.info(message, exception)
case Warn => log.warn(message, exception)
case Error => log.error(message, exception)
}
case _ =>
level match {
case Debug => log.debug(message)
case Info => log.info(message)
case Warn => log.warn(message)
case Error => log.error(message)
}
}
}
}
}
}
|
tuwiendsg/CAPA
|
core/src/scala/util/SLF4JLogging.scala
|
Scala
|
apache-2.0
| 2,036 |
package org.akoshterek.backgammon.agent
import org.akoshterek.backgammon.agent.gnubg.GnubgAgent
import org.akoshterek.backgammon.agent.pubeval.PubEvalAgent
import org.akoshterek.backgammon.agent.raw.{RawBatch, RawRl40, RawTd40}
import org.akoshterek.backgammon.eval.Evaluator
object AgentFactory {
def createAgent(fullName: String): Agent = {
val fullNameLower: String = fullName.toLowerCase
val tokens = fullNameLower.split("-")
tokens(0) match {
case "random" =>
new RandomAgent(Evaluator.basePath)
case "heuristic" =>
new HeuristicAgent(Evaluator.basePath)
case "pubeval" =>
PubEvalAgent(Evaluator.basePath)
case "gnubg" =>
new GnubgAgent(Evaluator.basePath)
case "raw" =>
RawBatch(Evaluator.basePath, fullNameLower)
case "rawrl40" =>
val agent = new RawRl40(Evaluator.basePath)
agent.load()
agent
case "rawtd40" =>
new RawTd40(Evaluator.basePath)
case _ =>
throw new IllegalArgumentException("Unknown agent name " + fullName)
}
}
}
|
akoshterek/MultiGammonJava
|
multi-gammon-core/src/main/java/org/akoshterek/backgammon/agent/AgentFactory.scala
|
Scala
|
gpl-3.0
| 1,086 |
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.optim
import java.nio.file.{Files, Paths}
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dataset.{DataSet, _}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{Engine, File, T, Table}
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
// TODO: remove D to be MiniBatch[T]
abstract class Optimizer[T: ClassTag, D](
protected val model: Module[T],
protected val dataset: DataSet[D],
protected val criterion: Criterion[T])(implicit ev : TensorNumeric[T])
{
protected var state: Table = T()
protected var optimMethod: OptimMethod[T] = new SGD[T]()
protected var endWhen: Trigger = Trigger.maxIteration(100)
protected var checkpointTrigger: Option[Trigger] = None
protected var checkpointPath: Option[String] = None
protected var isOverWrite: Boolean = false
protected var validationTrigger: Option[Trigger] = None
protected var validationMethods: Option[Array[ValidationMethod[T]]] = None
protected var validationDataSet: Option[DataSet[MiniBatch[T]]] = None
// To achieve better performance, please set dropPercentage as 0.04
protected var dropPercentage: Double = 0.0
protected var maxDropPercentage: Double = 0.0
protected var computeThresholdbatchSize: Int = 100
protected var warmupIterationNum: Int = 200
def optimize(): Module[T]
@deprecated("Use bigdl.check.singleton instead", "0.1.0")
def disableCheckSingleton(): this.type = {
this.checkSingleton = false
println("disableCheckSingleton is deprecated. Please use bigdl.check.singleton instead")
this
}
// TODO: Remove below code to DistriOptimizer after disableCheckSingleton is not supported
protected var checkSingleton = System.getProperty("bigdl.check.singleton",
true.toString).toBoolean
def setValidation(trigger: Trigger, dataset: DataSet[MiniBatch[T]],
vMethods : Array[ValidationMethod[T]])
: this.type = {
this.validationTrigger = Some(trigger)
this.validationDataSet = Some(dataset)
this.validationMethods = Some(vMethods)
this
}
def setValidation(trigger: Trigger, sampleRDD: RDD[Sample[T]],
vMethods : Array[ValidationMethod[T]], batchSize: Int)
: this.type = {
this.validationTrigger = Some(trigger)
val dataSet =
(DataSet.rdd(sampleRDD) -> SampleToBatch(batchSize))
.asInstanceOf[DistributedDataSet[MiniBatch[T]]]
this.validationDataSet = Some(dataSet)
this.validationMethods = Some(vMethods)
this
}
def setCheckpoint(path: String, trigger: Trigger): this.type = {
if (!path.startsWith(File.hdfsPrefix)) {
require(Files.isDirectory(Paths.get(path)), s"Optimizer.setCheckpoint: $path is not a folder")
}
this.checkpointPath = Some(path)
this.checkpointTrigger = Some(trigger)
this
}
def overWriteCheckpoint() : this.type = {
isOverWrite = true
this
}
def setState(state: Table): this.type = {
this.state = state
this
}
def setOptimMethod(method : OptimMethod[T]): this.type = {
this.optimMethod = method
this
}
def setEndWhen(endWhen: Trigger): this.type = {
this.endWhen = endWhen
this
}
def setDropMoudleProperty(dropPercentage: Double, maxDropPercentage: Double,
batchsize: Int = 100, warmupIteration: Int = 200): this.type = {
this.dropPercentage = dropPercentage
this.maxDropPercentage = maxDropPercentage
require(dropPercentage >= 0 && dropPercentage <= maxDropPercentage)
this.computeThresholdbatchSize = batchsize
this.warmupIterationNum = warmupIteration
this
}
def assertEngineInited(): Unit = {
require(Engine.isInitialized, s"you may forget to initialize Engine object.")
}
}
object Optimizer {
private[bigdl] def header(epoch: Int, count: Int, total: Long, iter: Int, wallClockTime: Long)
: String = {
s"[Epoch $epoch $count/$total][Iteration $iter][Wall Clock ${wallClockTime / 1e9}s]"
}
private[bigdl] def saveModel[T](model: Module[T], checkpointPath : Option[String],
overWrite : Boolean, postfix: String = ""): Unit = {
if (checkpointPath.isDefined) {
model.save(s"${checkpointPath.get}/model$postfix", overWrite)
}
}
private[bigdl] def saveState(state: Table, checkpointPath : Option[String], overWrite : Boolean,
postfix: String = ""): Unit = {
if (checkpointPath.isDefined) {
state.save(s"${checkpointPath.get}/state$postfix", overWrite)
}
}
def apply[T: ClassTag](
model: Module[T],
sampleRDD: RDD[Sample[T]],
criterion: Criterion[T],
batchSize: Int
)(implicit ev: TensorNumeric[T]): Optimizer[T, MiniBatch[T]] = {
new DistriOptimizer[T](
model = model,
dataset = (DataSet.rdd(sampleRDD) -> SampleToBatch(batchSize))
.asInstanceOf[DistributedDataSet[MiniBatch[T]]],
criterion = criterion
).asInstanceOf[Optimizer[T, MiniBatch[T]]]
}
def apply[T: ClassTag, D](
model: Module[T],
dataset: DataSet[D],
criterion: Criterion[T]
)(implicit ev: TensorNumeric[T]): Optimizer[T, D] = {
dataset match {
case d: DistributedDataSet[_] =>
new DistriOptimizer[T](
model = model,
dataset = d.asInstanceOf[DistributedDataSet[MiniBatch[T]]],
criterion = criterion
).asInstanceOf[Optimizer[T, D]]
case d: LocalDataSet[_] =>
new LocalOptimizer[T](
model = model,
dataset = d.asInstanceOf[LocalDataSet[MiniBatch[T]]],
criterion = criterion
).asInstanceOf[Optimizer[T, D]]
case _ =>
throw new UnsupportedOperationException
}
}
}
|
SeaOfOcean/BigDL
|
dl/src/main/scala/com/intel/analytics/bigdl/optim/Optimizer.scala
|
Scala
|
apache-2.0
| 6,466 |
package models
import play.api.db.slick.Config.driver.simple._
import scala.slick.lifted.Tag
case class Assesment(id: Option[Long], studentId: Long, startDate: Long, endDate: Option[Long])
case class AssesmentWithQuestionsAndScores(id: Long, startDate: Long, endDate: Option[Long])
class Assesments(tag: Tag) extends Table[Assesment](tag, "assesments") {
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def studentId = column[Long]("studentId")
def startDate = column[Long]("startDate")
def endDate = column[Option[Long]]("endDate")
def * = (id.?, studentId, startDate, endDate) <> (Assesment.tupled, Assesment.unapply _)
def student = foreignKey("assessment_student_fk", studentId, People.people)(_.id)
}
object Assesments {
lazy val assesments = TableQuery[Assesments]
def create(qs: Assesment)(implicit s: Session): Assesment =
(assesments returning assesments.map(_.id) into ((assesment, id) => assesment.copy(Some(id)))) += qs
def delete(id: Long)(implicit s: Session): Int =
assesments.filter(_.id === id).delete
def find(id: Long)(implicit s: Session): Option[Assesment] =
assesments.filter(_.id === id).firstOption
}
|
silbermm/proximal
|
app/models/Assessment.scala
|
Scala
|
apache-2.0
| 1,173 |
/*
* Copyright 2014 Treode, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.treode
import java.net.SocketAddress
import java.nio.channels.{AsynchronousCloseException, ClosedChannelException}
import java.util.logging.{Level, Logger}
import Level.{INFO, WARNING}
package object cluster {
class IgnoreRequestException extends Exception
class RemoteException extends Exception
private [cluster] def isClosedException (t: Throwable): Boolean =
t match {
case t: AsynchronousCloseException => true
case t: ClosedChannelException => true
case t: Exception if t.getMessage == "End of file reached." => true
case _ => false
}
private [cluster] object log {
val logger = Logger.getLogger ("com.treode.cluster")
def acceptingConnections (cellid: CellId, localId: HostId, localAddr: SocketAddress): Unit =
logger.log (INFO, s"Accepting peer connections to $localId for $cellid on $localAddr")
def connected (remoteId: HostId, localAddr: SocketAddress, remoteAddr: SocketAddress): Unit =
logger.log (INFO, s"Connected to $remoteId at $localAddr : $remoteAddr")
def disconnected (remoteId: HostId, localAddr: SocketAddress, remoteAddr: SocketAddress): Unit =
logger.log (INFO, s"Disconnected from $remoteId on $localAddr : $remoteAddr")
def errorWhileGreeting (expected: HostId, found: HostId): Unit =
logger.log (WARNING, s"Error while greeting: expected remote host $expected but found $found")
def exceptionWhileGreeting (e: Throwable): Unit =
logger.log (WARNING, s"Error while greeting", e)
def exceptionReadingMessage (e: Throwable): Unit =
logger.log (WARNING, s"Exception reading message", e)
def exceptionWritingMessage (e: Throwable): Unit =
logger.log (WARNING, s"Exception writing message", e)
def recyclingMessengerSocket (e: Throwable): Unit =
logger.log (WARNING, s"Recycling messenger socket", e)
def rejectedForeignCell (remoteId: HostId, remoteCellId: CellId): Unit =
logger.log (INFO, s"Rejected foreign cell $remoteId : $remoteCellId")
}}
|
Treode/store
|
cluster/src/com/treode/cluster/package.scala
|
Scala
|
apache-2.0
| 2,631 |
// Copyright (c) 2013-2020 Rob Norris and Contributors
// This software is licensed under the MIT License (MIT).
// For more information see LICENSE or https://opensource.org/licenses/MIT
package doobie.util.meta
import doobie.enumerated.JdbcType._
import Predef._
trait TimeMeta {
@deprecated("Use doobie.implicits.javatimedrivernative instead. If you are using a database which doobie directly integrates with, " +
"You won't need this import anymore as datetime instances are provided in the DB-specific implicit import. " +
"e.g. for PostgreSQL: `import doobie.postgres.implicits._`. ",
since = "0.11.0"
)
object javatime extends MetaConstructors with TimeMetaInstances
/**
* Use this import if you want to use the driver-native support for java.time.* types.
* This means that the java.time value is passed straight to the JDBC driver you're using
* without being converted to java.sql.* types (Unlike doobie.implicits.legacy.instant/localdate)
*/
object javatimedrivernative extends MetaConstructors with TimeMetaInstances
}
trait TimeMetaInstances { this: MetaConstructors =>
/** @group Instances */
implicit val JavaTimeInstantMeta: Meta[java.time.Instant] =
Basic.one[java.time.Instant](
Timestamp,
List(Char, VarChar, LongVarChar, Date, Time),
_.getObject(_, classOf[java.time.Instant]), _.setObject(_, _), _.updateObject(_, _))
/** @group Instances */
implicit val JavaTimeLocalDateMeta: Meta[java.time.LocalDate] =
Basic.one[java.time.LocalDate](
Date,
List(Char, VarChar, LongVarChar, Timestamp),
_.getObject(_, classOf[java.time.LocalDate]), _.setObject(_, _), _.updateObject(_, _))
/** @group Instances */
implicit val JavaLocalTimeMeta: Meta[java.time.LocalTime] =
Basic.one[java.time.LocalTime](
Time,
List(Char, VarChar, LongVarChar, Timestamp),
_.getObject(_, classOf[java.time.LocalTime]), _.setObject(_, _), _.updateObject(_, _))
/** @group Instances */
implicit val JavaTimeLocalDateTimeMeta: Meta[java.time.LocalDateTime] =
Basic.one[java.time.LocalDateTime](
Timestamp,
List(Char, VarChar, LongVarChar, Date, Time),
_.getObject(_, classOf[java.time.LocalDateTime]), _.setObject(_, _), _.updateObject(_, _))
/** @group Instances */
implicit val JavaOffsetTimeMeta: Meta[java.time.OffsetTime] =
Basic.one[java.time.OffsetTime](
TimeWithTimezone,
List(Char, VarChar, LongVarChar, Timestamp, Time),
_.getObject(_, classOf[java.time.OffsetTime]), _.setObject(_, _), _.updateObject(_, _))
/** @group Instances */
implicit val JavaOffsetDateTimeMeta: Meta[java.time.OffsetDateTime] =
Basic.one[java.time.OffsetDateTime](
TimestampWithTimezone,
List(Char, VarChar, LongVarChar, Date, Time, Timestamp),
_.getObject(_, classOf[java.time.OffsetDateTime]), _.setObject(_, _), _.updateObject(_, _))
/** @group Instances */
implicit val JavaZonedDateTimeMeta: Meta[java.time.ZonedDateTime] =
Basic.one[java.time.ZonedDateTime](
TimestampWithTimezone,
List(Char, VarChar, LongVarChar, Date, Time, Timestamp),
_.getObject(_, classOf[java.time.ZonedDateTime]), _.setObject(_, _), _.updateObject(_, _))
}
|
tpolecat/doobie
|
modules/core/src/main/scala/doobie/util/meta/timemeta.scala
|
Scala
|
mit
| 3,242 |
package com.wuyuntao.aeneas
import java.util.UUID
abstract trait Snapshot {
val owner: UUID
val revision: Int
}
|
wuyuntao/Aeneas
|
aeneas-core/src/main/scala/com/wuyuntao/aeneas/Snapshot.scala
|
Scala
|
apache-2.0
| 117 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.security.{MessageDigest, NoSuchAlgorithmException}
import java.util.zip.CRC32
import scala.annotation.tailrec
import org.apache.commons.codec.digest.DigestUtils
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util.{ArrayData, MapData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.hash.Murmur3_x86_32
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
import org.apache.spark.unsafe.Platform
/**
* A function that calculates an MD5 128-bit checksum and returns it as a hex string
* For input of type [[BinaryType]]
*/
@ExpressionDescription(
usage = "_FUNC_(input) - Returns an MD5 128-bit checksum as a hex string of the input",
extended = "> SELECT _FUNC_('Spark');\\n '8cde774d6f7333752ed72cacddb05126'")
case class Md5(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(BinaryType)
protected override def nullSafeEval(input: Any): Any =
UTF8String.fromString(DigestUtils.md5Hex(input.asInstanceOf[Array[Byte]]))
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c =>
s"UTF8String.fromString(org.apache.commons.codec.digest.DigestUtils.md5Hex($c))")
}
}
/**
* A function that calculates the SHA-2 family of functions (SHA-224, SHA-256, SHA-384, and SHA-512)
* and returns it as a hex string. The first argument is the string or binary to be hashed. The
* second argument indicates the desired bit length of the result, which must have a value of 224,
* 256, 384, 512, or 0 (which is equivalent to 256). SHA-224 is supported starting from Java 8. If
* asking for an unsupported SHA function, the return value is NULL. If either argument is NULL or
* the hash length is not one of the permitted values, the return value is NULL.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """_FUNC_(input, bitLength) - Returns a checksum of SHA-2 family as a hex string of the input.
SHA-224, SHA-256, SHA-384, and SHA-512 are supported. Bit length of 0 is equivalent to 256.""",
extended = """> SELECT _FUNC_('Spark', 0);
'529bc3b07127ecb7e53a4dcf1991d9152c24537d919178022b2c42657f79a26b'""")
// scalastyle:on line.size.limit
case class Sha2(left: Expression, right: Expression)
extends BinaryExpression with Serializable with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def nullable: Boolean = true
override def inputTypes: Seq[DataType] = Seq(BinaryType, IntegerType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val bitLength = input2.asInstanceOf[Int]
val input = input1.asInstanceOf[Array[Byte]]
bitLength match {
case 224 =>
// DigestUtils doesn't support SHA-224 now
try {
val md = MessageDigest.getInstance("SHA-224")
md.update(input)
UTF8String.fromBytes(md.digest())
} catch {
// SHA-224 is not supported on the system, return null
case noa: NoSuchAlgorithmException => null
}
case 256 | 0 =>
UTF8String.fromString(DigestUtils.sha256Hex(input))
case 384 =>
UTF8String.fromString(DigestUtils.sha384Hex(input))
case 512 =>
UTF8String.fromString(DigestUtils.sha512Hex(input))
case _ => null
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val digestUtils = "org.apache.commons.codec.digest.DigestUtils"
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
s"""
if ($eval2 == 224) {
try {
java.security.MessageDigest md = java.security.MessageDigest.getInstance("SHA-224");
md.update($eval1);
${ev.value} = UTF8String.fromBytes(md.digest());
} catch (java.security.NoSuchAlgorithmException e) {
${ev.isNull} = true;
}
} else if ($eval2 == 256 || $eval2 == 0) {
${ev.value} =
UTF8String.fromString($digestUtils.sha256Hex($eval1));
} else if ($eval2 == 384) {
${ev.value} =
UTF8String.fromString($digestUtils.sha384Hex($eval1));
} else if ($eval2 == 512) {
${ev.value} =
UTF8String.fromString($digestUtils.sha512Hex($eval1));
} else {
${ev.isNull} = true;
}
"""
})
}
}
/**
* A function that calculates a sha1 hash value and returns it as a hex string
* For input of type [[BinaryType]] or [[StringType]]
*/
@ExpressionDescription(
usage = "_FUNC_(input) - Returns a sha1 hash value as a hex string of the input",
extended = "> SELECT _FUNC_('Spark');\\n '85f5955f4b27a9a4c2aab6ffe5d7189fc298b92c'")
case class Sha1(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = StringType
override def inputTypes: Seq[DataType] = Seq(BinaryType)
protected override def nullSafeEval(input: Any): Any =
UTF8String.fromString(DigestUtils.sha1Hex(input.asInstanceOf[Array[Byte]]))
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c =>
s"UTF8String.fromString(org.apache.commons.codec.digest.DigestUtils.sha1Hex($c))"
)
}
}
/**
* A function that computes a cyclic redundancy check value and returns it as a bigint
* For input of type [[BinaryType]]
*/
@ExpressionDescription(
usage = "_FUNC_(input) - Returns a cyclic redundancy check value as a bigint of the input",
extended = "> SELECT _FUNC_('Spark');\\n '1557323817'")
case class Crc32(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = LongType
override def inputTypes: Seq[DataType] = Seq(BinaryType)
protected override def nullSafeEval(input: Any): Any = {
val checksum = new CRC32
checksum.update(input.asInstanceOf[Array[Byte]], 0, input.asInstanceOf[Array[Byte]].length)
checksum.getValue
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val CRC32 = "java.util.zip.CRC32"
val checksum = ctx.freshName("checksum")
nullSafeCodeGen(ctx, ev, value => {
s"""
$CRC32 $checksum = new $CRC32();
$checksum.update($value, 0, $value.length);
${ev.value} = $checksum.getValue();
"""
})
}
}
/**
* A function that calculates hash value for a group of expressions. Note that the `seed` argument
* is not exposed to users and should only be set inside spark SQL.
*
* The hash value for an expression depends on its type and seed:
* - null: seed
* - boolean: turn boolean into int, 1 for true, 0 for false, and then use murmur3 to
* hash this int with seed.
* - byte, short, int: use murmur3 to hash the input as int with seed.
* - long: use murmur3 to hash the long input with seed.
* - float: turn it into int: java.lang.Float.floatToIntBits(input), and hash it.
* - double: turn it into long: java.lang.Double.doubleToLongBits(input), and hash it.
* - decimal: if it's a small decimal, i.e. precision <= 18, turn it into long and hash
* it. Else, turn it into bytes and hash it.
* - calendar interval: hash `microseconds` first, and use the result as seed to hash `months`.
* - binary: use murmur3 to hash the bytes with seed.
* - string: get the bytes of string and hash it.
* - array: The `result` starts with seed, then use `result` as seed, recursively
* calculate hash value for each element, and assign the element hash value
* to `result`.
* - map: The `result` starts with seed, then use `result` as seed, recursively
* calculate hash value for each key-value, and assign the key-value hash
* value to `result`.
* - struct: The `result` starts with seed, then use `result` as seed, recursively
* calculate hash value for each field, and assign the field hash value to
* `result`.
*
* Finally we aggregate the hash values for each expression by the same way of struct.
*/
abstract class HashExpression[E] extends Expression {
/** Seed of the HashExpression. */
val seed: E
override def foldable: Boolean = children.forall(_.foldable)
override def nullable: Boolean = false
override def checkInputDataTypes(): TypeCheckResult = {
if (children.isEmpty) {
TypeCheckResult.TypeCheckFailure("function hash requires at least one argument")
} else {
TypeCheckResult.TypeCheckSuccess
}
}
override def eval(input: InternalRow): Any = {
var hash = seed
var i = 0
val len = children.length
while (i < len) {
hash = computeHash(children(i).eval(input), children(i).dataType, hash)
i += 1
}
hash
}
protected def computeHash(value: Any, dataType: DataType, seed: E): E
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
ev.isNull = "false"
val childrenHash = children.map { child =>
val childGen = child.genCode(ctx)
childGen.code + ctx.nullSafeExec(child.nullable, childGen.isNull) {
computeHash(childGen.value, child.dataType, ev.value, ctx)
}
}.mkString("\\n")
ev.copy(code = s"""
${ctx.javaType(dataType)} ${ev.value} = $seed;
$childrenHash""")
}
private def nullSafeElementHash(
input: String,
index: String,
nullable: Boolean,
elementType: DataType,
result: String,
ctx: CodegenContext): String = {
val element = ctx.freshName("element")
ctx.nullSafeExec(nullable, s"$input.isNullAt($index)") {
s"""
final ${ctx.javaType(elementType)} $element = ${ctx.getValue(input, elementType, index)};
${computeHash(element, elementType, result, ctx)}
"""
}
}
@tailrec
private def computeHash(
input: String,
dataType: DataType,
result: String,
ctx: CodegenContext): String = {
val hasher = hasherClassName
def hashInt(i: String): String = s"$result = $hasher.hashInt($i, $result);"
def hashLong(l: String): String = s"$result = $hasher.hashLong($l, $result);"
def hashBytes(b: String): String =
s"$result = $hasher.hashUnsafeBytes($b, Platform.BYTE_ARRAY_OFFSET, $b.length, $result);"
dataType match {
case NullType => ""
case BooleanType => hashInt(s"$input ? 1 : 0")
case ByteType | ShortType | IntegerType | DateType => hashInt(input)
case LongType | TimestampType => hashLong(input)
case FloatType => hashInt(s"Float.floatToIntBits($input)")
case DoubleType => hashLong(s"Double.doubleToLongBits($input)")
case d: DecimalType =>
if (d.precision <= Decimal.MAX_LONG_DIGITS) {
hashLong(s"$input.toUnscaledLong()")
} else {
val bytes = ctx.freshName("bytes")
s"""
final byte[] $bytes = $input.toJavaBigDecimal().unscaledValue().toByteArray();
${hashBytes(bytes)}
"""
}
case CalendarIntervalType =>
val microsecondsHash = s"$hasher.hashLong($input.microseconds, $result)"
s"$result = $hasher.hashInt($input.months, $microsecondsHash);"
case BinaryType => hashBytes(input)
case StringType =>
val baseObject = s"$input.getBaseObject()"
val baseOffset = s"$input.getBaseOffset()"
val numBytes = s"$input.numBytes()"
s"$result = $hasher.hashUnsafeBytes($baseObject, $baseOffset, $numBytes, $result);"
case ArrayType(et, containsNull) =>
val index = ctx.freshName("index")
s"""
for (int $index = 0; $index < $input.numElements(); $index++) {
${nullSafeElementHash(input, index, containsNull, et, result, ctx)}
}
"""
case MapType(kt, vt, valueContainsNull) =>
val index = ctx.freshName("index")
val keys = ctx.freshName("keys")
val values = ctx.freshName("values")
s"""
final ArrayData $keys = $input.keyArray();
final ArrayData $values = $input.valueArray();
for (int $index = 0; $index < $input.numElements(); $index++) {
${nullSafeElementHash(keys, index, false, kt, result, ctx)}
${nullSafeElementHash(values, index, valueContainsNull, vt, result, ctx)}
}
"""
case StructType(fields) =>
fields.zipWithIndex.map { case (field, index) =>
nullSafeElementHash(input, index.toString, field.nullable, field.dataType, result, ctx)
}.mkString("\\n")
case udt: UserDefinedType[_] => computeHash(input, udt.sqlType, result, ctx)
}
}
protected def hasherClassName: String
}
/**
* Base class for interpreted hash functions.
*/
abstract class InterpretedHashFunction {
protected def hashInt(i: Int, seed: Long): Long
protected def hashLong(l: Long, seed: Long): Long
protected def hashUnsafeBytes(base: AnyRef, offset: Long, length: Int, seed: Long): Long
def hash(value: Any, dataType: DataType, seed: Long): Long = {
value match {
case null => seed
case b: Boolean => hashInt(if (b) 1 else 0, seed)
case b: Byte => hashInt(b, seed)
case s: Short => hashInt(s, seed)
case i: Int => hashInt(i, seed)
case l: Long => hashLong(l, seed)
case f: Float => hashInt(java.lang.Float.floatToIntBits(f), seed)
case d: Double => hashLong(java.lang.Double.doubleToLongBits(d), seed)
case d: Decimal =>
val precision = dataType.asInstanceOf[DecimalType].precision
if (precision <= Decimal.MAX_LONG_DIGITS) {
hashLong(d.toUnscaledLong, seed)
} else {
val bytes = d.toJavaBigDecimal.unscaledValue().toByteArray
hashUnsafeBytes(bytes, Platform.BYTE_ARRAY_OFFSET, bytes.length, seed)
}
case c: CalendarInterval => hashInt(c.months, hashLong(c.microseconds, seed))
case a: Array[Byte] =>
hashUnsafeBytes(a, Platform.BYTE_ARRAY_OFFSET, a.length, seed)
case s: UTF8String =>
hashUnsafeBytes(s.getBaseObject, s.getBaseOffset, s.numBytes(), seed)
case array: ArrayData =>
val elementType = dataType match {
case udt: UserDefinedType[_] => udt.sqlType.asInstanceOf[ArrayType].elementType
case ArrayType(et, _) => et
}
var result = seed
var i = 0
while (i < array.numElements()) {
result = hash(array.get(i, elementType), elementType, result)
i += 1
}
result
case map: MapData =>
val (kt, vt) = dataType match {
case udt: UserDefinedType[_] =>
val mapType = udt.sqlType.asInstanceOf[MapType]
mapType.keyType -> mapType.valueType
case MapType(kt, vt, _) => kt -> vt
}
val keys = map.keyArray()
val values = map.valueArray()
var result = seed
var i = 0
while (i < map.numElements()) {
result = hash(keys.get(i, kt), kt, result)
result = hash(values.get(i, vt), vt, result)
i += 1
}
result
case struct: InternalRow =>
val types: Array[DataType] = dataType match {
case udt: UserDefinedType[_] =>
udt.sqlType.asInstanceOf[StructType].map(_.dataType).toArray
case StructType(fields) => fields.map(_.dataType)
}
var result = seed
var i = 0
val len = struct.numFields
while (i < len) {
result = hash(struct.get(i, types(i)), types(i), result)
i += 1
}
result
}
}
}
/**
* A MurMur3 Hash expression.
*
* We should use this hash function for both shuffle and bucket, so that we can guarantee shuffle
* and bucketing have same data distribution.
*/
@ExpressionDescription(
usage = "_FUNC_(a1, a2, ...) - Returns a hash value of the arguments.")
case class Murmur3Hash(children: Seq[Expression], seed: Int) extends HashExpression[Int] {
def this(arguments: Seq[Expression]) = this(arguments, 42)
override def dataType: DataType = IntegerType
override def prettyName: String = "hash"
override protected def hasherClassName: String = classOf[Murmur3_x86_32].getName
override protected def computeHash(value: Any, dataType: DataType, seed: Int): Int = {
Murmur3HashFunction.hash(value, dataType, seed).toInt
}
}
object Murmur3HashFunction extends InterpretedHashFunction {
override protected def hashInt(i: Int, seed: Long): Long = {
Murmur3_x86_32.hashInt(i, seed.toInt)
}
override protected def hashLong(l: Long, seed: Long): Long = {
Murmur3_x86_32.hashLong(l, seed.toInt)
}
override protected def hashUnsafeBytes(base: AnyRef, offset: Long, len: Int, seed: Long): Long = {
Murmur3_x86_32.hashUnsafeBytes(base, offset, len, seed.toInt)
}
}
/**
* Print the result of an expression to stderr (used for debugging codegen).
*/
case class PrintToStderr(child: Expression) extends UnaryExpression {
override def dataType: DataType = child.dataType
protected override def nullSafeEval(input: Any): Any = input
private val outputPrefix = s"Result of ${child.simpleString} is "
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val outputPrefixField = ctx.addReferenceObj("outputPrefix", outputPrefix)
nullSafeCodeGen(ctx, ev, c =>
s"""
| System.err.println($outputPrefixField + $c);
| ${ev.value} = $c;
""".stripMargin)
}
}
/**
* A function throws an exception if 'condition' is not true.
*/
@ExpressionDescription(
usage = "_FUNC_(condition) - Throw an exception if 'condition' is not true.")
case class AssertTrue(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def nullable: Boolean = true
override def inputTypes: Seq[DataType] = Seq(BooleanType)
override def dataType: DataType = NullType
override def prettyName: String = "assert_true"
private val errMsg = s"'${child.simpleString}' is not true!"
override def eval(input: InternalRow) : Any = {
val v = child.eval(input)
if (v == null || java.lang.Boolean.FALSE.equals(v)) {
throw new RuntimeException(errMsg)
} else {
null
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval = child.genCode(ctx)
// Use unnamed reference that doesn't create a local field here to reduce the number of fields
// because errMsgField is used only when the value is null or false.
val errMsgField = ctx.addReferenceObj(errMsg)
ExprCode(code = s"""${eval.code}
|if (${eval.isNull} || !${eval.value}) {
| throw new RuntimeException($errMsgField);
|}""".stripMargin, isNull = "true", value = "null")
}
override def sql: String = s"assert_true(${child.sql})"
}
/**
* A xxHash64 64-bit hash expression.
*/
case class XxHash64(children: Seq[Expression], seed: Long) extends HashExpression[Long] {
def this(arguments: Seq[Expression]) = this(arguments, 42L)
override def dataType: DataType = LongType
override def prettyName: String = "xxHash"
override protected def hasherClassName: String = classOf[XXH64].getName
override protected def computeHash(value: Any, dataType: DataType, seed: Long): Long = {
XxHash64Function.hash(value, dataType, seed)
}
}
object XxHash64Function extends InterpretedHashFunction {
override protected def hashInt(i: Int, seed: Long): Long = XXH64.hashInt(i, seed)
override protected def hashLong(l: Long, seed: Long): Long = XXH64.hashLong(l, seed)
override protected def hashUnsafeBytes(base: AnyRef, offset: Long, len: Int, seed: Long): Long = {
XXH64.hashUnsafeBytes(base, offset, len, seed)
}
}
/**
* Returns the current database of the SessionCatalog.
*/
@ExpressionDescription(
usage = "_FUNC_() - Returns the current database.",
extended = "> SELECT _FUNC_()")
case class CurrentDatabase() extends LeafExpression with Unevaluable {
override def dataType: DataType = StringType
override def foldable: Boolean = true
override def nullable: Boolean = false
override def prettyName: String = "current_database"
}
|
gioenn/xSpark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala
|
Scala
|
apache-2.0
| 21,433 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.system
import java.util
import java.util.Collections
import org.junit.Assert._
import org.junit.Test
import org.apache.samza.Partition
import org.apache.samza.config.TaskConfig
import org.apache.samza.serializers._
import org.apache.samza.system.chooser.MessageChooser
import org.apache.samza.system.chooser.DefaultChooser
import org.apache.samza.system.chooser.MockMessageChooser
import org.apache.samza.util.BlockingEnvelopeMap
import org.mockito.Mockito
import org.mockito.Mockito.{spy, when}
import scala.collection.JavaConverters._
class TestSystemConsumers {
def testPollIntervalMs {
val numEnvelopes = 1000
val system = "test-system"
val systemStreamPartition0 = new SystemStreamPartition(system, "some-stream", new Partition(0))
val systemStreamPartition1 = new SystemStreamPartition(system, "some-stream", new Partition(1))
val envelope = new IncomingMessageEnvelope(systemStreamPartition0, "1", "k", "v")
val consumer = new CustomPollResponseSystemConsumer(envelope)
var now = 0L
val systemAdmins = Mockito.mock(classOf[SystemAdmins])
Mockito.doReturn(Mockito.mock(classOf[SystemAdmin])).when(systemAdmins.getSystemAdmin(system))
val consumers = new SystemConsumers(new MockMessageChooser, Map(system -> consumer), systemAdmins,
new SerdeManager, new SystemConsumersMetrics,
SystemConsumers.DEFAULT_NO_NEW_MESSAGES_TIMEOUT,
SystemConsumers.DEFAULT_DROP_SERIALIZATION_ERROR,
TaskConfig.DEFAULT_POLL_INTERVAL_MS, clock = () => now)
consumers.register(systemStreamPartition0, "0")
consumers.register(systemStreamPartition1, "1234")
consumers.start
// Tell the consumer to respond with 1000 messages for SSP0, and no
// messages for SSP1.
consumer.setResponseSizes(numEnvelopes)
// Choose to trigger a refresh with data.
assertNull(consumers.choose())
// 2: First on start, second on choose.
assertEquals(2, consumer.polls)
assertEquals(2, consumer.lastPoll.size)
assertTrue(consumer.lastPoll.contains(systemStreamPartition0))
assertTrue(consumer.lastPoll.contains(systemStreamPartition1))
assertEquals(envelope, consumers.choose())
assertEquals(envelope, consumers.choose())
// We aren't polling because we're getting non-null envelopes.
assertEquals(2, consumer.polls)
// Advance the clock to trigger a new poll even though there are still
// messages.
now = TaskConfig.DEFAULT_POLL_INTERVAL_MS
assertEquals(envelope, consumers.choose())
// We polled even though there are still 997 messages in the unprocessed
// message buffer.
assertEquals(3, consumer.polls)
assertEquals(1, consumer.lastPoll.size)
// Only SSP1 was polled because we still have messages for SSP2.
assertTrue(consumer.lastPoll.contains(systemStreamPartition1))
// Now drain all messages for SSP0. There should be exactly 997 messages,
// since we have chosen 3 already, and we started with 1000.
(0 until (numEnvelopes - 3)).foreach { i =>
assertEquals(envelope, consumers.choose())
}
// Nothing left. Should trigger a poll here.
assertNull(consumers.choose())
assertEquals(4, consumer.polls)
assertEquals(2, consumer.lastPoll.size)
// Now we ask for messages from both again.
assertTrue(consumer.lastPoll.contains(systemStreamPartition0))
assertTrue(consumer.lastPoll.contains(systemStreamPartition1))
}
def testBasicSystemConsumersFunctionality {
val system = "test-system"
val systemStreamPartition = new SystemStreamPartition(system, "some-stream", new Partition(1))
val envelope = new IncomingMessageEnvelope(systemStreamPartition, "1", "k", "v")
val consumer = new CustomPollResponseSystemConsumer(envelope)
var now = 0
val systemAdmins = Mockito.mock(classOf[SystemAdmins])
Mockito.doReturn(Mockito.mock(classOf[SystemAdmin])).when(systemAdmins.getSystemAdmin(system))
val consumers = new SystemConsumers(new MockMessageChooser, Map(system -> consumer), systemAdmins,
new SerdeManager, new SystemConsumersMetrics,
SystemConsumers.DEFAULT_NO_NEW_MESSAGES_TIMEOUT,
SystemConsumers.DEFAULT_DROP_SERIALIZATION_ERROR,
TaskConfig.DEFAULT_POLL_INTERVAL_MS, clock = () => now)
consumers.register(systemStreamPartition, "0")
consumers.start
// Start should trigger a poll to the consumer.
assertEquals(1, consumer.polls)
// Tell the consumer to start returning messages when polled.
consumer.setResponseSizes(1)
// Choose to trigger a refresh with data.
assertNull(consumers.choose())
// Choose should have triggered a second poll, since no messages are available.
assertEquals(2, consumer.polls)
// Choose a few times. This time there is no data.
assertEquals(envelope, consumers.choose())
assertNull(consumers.choose())
assertNull(consumers.choose())
// Return more than one message this time.
consumer.setResponseSizes(2)
// Choose to trigger a refresh with data.
assertNull(consumers.choose())
// Increase clock interval.
now = TaskConfig.DEFAULT_POLL_INTERVAL_MS
// We get two messages now.
assertEquals(envelope, consumers.choose())
// Should not poll even though clock interval increases past interval threshold.
assertEquals(2, consumer.polls)
assertEquals(envelope, consumers.choose())
assertNull(consumers.choose())
}
@Test
def testSystemConumersShouldRegisterStartAndStopChooser {
val system = "test-system"
val systemStreamPartition = new SystemStreamPartition(system, "some-stream", new Partition(1))
var consumerStarted = 0
var consumerStopped = 0
var consumerRegistered = Map[SystemStreamPartition, String]()
var chooserStarted = 0
var chooserStopped = 0
var chooserRegistered = Map[SystemStreamPartition, String]()
val consumer = Map(system -> new SystemConsumer {
def start = consumerStarted += 1
def stop = consumerStopped += 1
def register(systemStreamPartition: SystemStreamPartition, offset: String) = consumerRegistered += systemStreamPartition -> offset
def poll(systemStreamPartitions: java.util.Set[SystemStreamPartition], timeout: Long) = Map[SystemStreamPartition, java.util.List[IncomingMessageEnvelope]]().asJava
})
val systemAdmins = Mockito.mock(classOf[SystemAdmins])
Mockito.when(systemAdmins.getSystemAdmin(system)).thenReturn(Mockito.mock(classOf[SystemAdmin]))
val consumers = new SystemConsumers(new MessageChooser {
def update(envelope: IncomingMessageEnvelope) = Unit
def choose = null
def start = chooserStarted += 1
def stop = chooserStopped += 1
def register(systemStreamPartition: SystemStreamPartition, offset: String) = chooserRegistered += systemStreamPartition -> offset
}, consumer, systemAdmins)
consumers.register(systemStreamPartition, "0")
consumers.start
consumers.stop
assertEquals(1, chooserStarted)
assertEquals(1, chooserStopped)
assertEquals(1, chooserRegistered.size)
assertEquals("0", chooserRegistered(systemStreamPartition))
assertEquals(1, consumerStarted)
assertEquals(1, consumerStopped)
assertEquals(1, consumerRegistered.size)
assertEquals("0", consumerRegistered(systemStreamPartition))
}
@Test
def testThrowSystemConsumersExceptionWhenTheSystemDoesNotHaveConsumer() {
val system = "test-system"
val system2 = "test-system2"
val systemStreamPartition = new SystemStreamPartition(system, "some-stream", new Partition(1))
val systemStreamPartition2 = new SystemStreamPartition(system2, "some-stream", new Partition(1))
var started = 0
var stopped = 0
var registered = Map[SystemStreamPartition, String]()
val consumer = Map(system -> new SystemConsumer {
def start {}
def stop {}
def register(systemStreamPartition: SystemStreamPartition, offset: String) {}
def poll(systemStreamPartitions: java.util.Set[SystemStreamPartition], timeout: Long) = Map[SystemStreamPartition, java.util.List[IncomingMessageEnvelope]]().asJava
})
val consumers = new SystemConsumers(new MessageChooser {
def update(envelope: IncomingMessageEnvelope) = Unit
def choose = null
def start = started += 1
def stop = stopped += 1
def register(systemStreamPartition: SystemStreamPartition, offset: String) = registered += systemStreamPartition -> offset
}, consumer, null)
// it should throw a SystemConsumersException because system2 does not have a consumer
var caughtRightException = false
try {
consumers.register(systemStreamPartition2, "0")
} catch {
case e: SystemConsumersException => caughtRightException = true
case _: Throwable => caughtRightException = false
}
assertTrue("suppose to throw SystemConsumersException, but apparently it did not", caughtRightException)
}
@Test
def testDroppingMsgOrThrowExceptionWhenSerdeFails() {
val system = "test-system"
val systemStreamPartition = new SystemStreamPartition(system, "some-stream", new Partition(1))
val msgChooser = new DefaultChooser
val consumer = Map(system -> new SerializingConsumer)
val systemMessageSerdes = Map(system -> (new StringSerde("UTF-8")).asInstanceOf[Serde[Object]])
val serdeManager = new SerdeManager(systemMessageSerdes = systemMessageSerdes)
val systemAdmins = Mockito.mock(classOf[SystemAdmins])
Mockito.when(systemAdmins.getSystemAdmin(system)).thenReturn(Mockito.mock(classOf[SystemAdmin]))
// throw exceptions when the deserialization has error
val consumers = new SystemConsumers(msgChooser, consumer, systemAdmins, serdeManager, dropDeserializationError = false)
consumers.register(systemStreamPartition, "0")
consumers.start
consumer(system).putStringMessage
consumer(system).putBytesMessage
var caughtRightException = false
try {
consumers.choose()
} catch {
case e: SystemConsumersException => caughtRightException = true
case _: Throwable => caughtRightException = false
}
assertTrue("suppose to throw SystemConsumersException", caughtRightException)
consumers.stop
// it should not throw exceptions when deserializaion fails if dropDeserializationError is set to true
val consumers2 = new SystemConsumers(msgChooser, consumer, systemAdmins, serdeManager, dropDeserializationError = true)
consumers2.register(systemStreamPartition, "0")
consumers2.start
consumer(system).putBytesMessage
consumer(system).putStringMessage
consumer(system).putBytesMessage
var notThrowException = true;
try {
consumers2.choose()
} catch {
case e: Throwable => notThrowException = false
}
assertTrue("it should not throw any exception", notThrowException)
var msgEnvelope = Some(consumers2.choose())
assertTrue("Consumer did not succeed in receiving the second message after Serde exception in choose", msgEnvelope.get != null)
consumers2.stop
// ensure that the system consumer will continue after poll() method ignored a Serde exception
consumer(system).putStringMessage
consumer(system).putBytesMessage
notThrowException = true;
try {
consumers2.start
} catch {
case e: Throwable => notThrowException = false
}
assertTrue("SystemConsumer start should not throw any Serde exception", notThrowException)
msgEnvelope = null
msgEnvelope = Some(consumers2.choose())
assertTrue("Consumer did not succeed in receiving the second message after Serde exception in poll", msgEnvelope.get != null)
consumers2.stop
}
@Test
def testSystemConsumersShouldNotPollEndOfStreamSSPs {
val system = "test-system"
val stream = "some-stream"
val systemStreamPartition1 = new SystemStreamPartition(system, stream, new Partition(1))
val systemStreamPartition2 = new SystemStreamPartition(system, stream, new Partition(2))
val normalEnvelope = new IncomingMessageEnvelope(systemStreamPartition1, "1", "k", "v")
val endOfStreamEnvelope = IncomingMessageEnvelope.buildEndOfStreamEnvelope(systemStreamPartition2)
val consumer = new CustomPollResponseSystemConsumer(normalEnvelope)
val systemAdmins = Mockito.mock(classOf[SystemAdmins])
Mockito.when(systemAdmins.getSystemAdmin(system)).thenReturn(Mockito.mock(classOf[SystemAdmin]))
val consumers = new SystemConsumers(new MockMessageChooser, Map(system -> consumer),
systemAdmins, new SerdeManager, new SystemConsumersMetrics,
SystemConsumers.DEFAULT_NO_NEW_MESSAGES_TIMEOUT,
SystemConsumers.DEFAULT_DROP_SERIALIZATION_ERROR,
TaskConfig.DEFAULT_POLL_INTERVAL_MS, clock = () => 0)
consumers.register(systemStreamPartition1, "0")
consumers.register(systemStreamPartition2, "0")
consumers.start
// Start should trigger a poll to the consumer.
assertEquals(1, consumer.polls)
assertEquals(2, consumer.lastPoll.size())
// Tell the consumer to start returning messages when polled.
val nextResponse = Map[SystemStreamPartition, java.util.List[IncomingMessageEnvelope]](
systemStreamPartition1 -> Collections.singletonList(normalEnvelope),
systemStreamPartition2 -> Collections.singletonList(endOfStreamEnvelope)
)
consumer.setNextResponse(nextResponse)
// Choose to trigger a refresh with data.
assertNull(consumers.choose())
// Choose should have triggered a second poll, since no messages are available.
assertEquals(2, consumer.polls)
assertEquals(2, consumer.lastPoll.size())
// Choose a few times and let chooser handle the end of stream message
assertNotNull(consumers.choose())
assertNotNull(consumers.choose())
consumers.tryUpdate(systemStreamPartition1)
// Now assuming that chooser has processed end of stream message,
// tryUpdate shouldn't add ssp back to emptySystemStreamPartitionsBySystem
consumers.tryUpdate(systemStreamPartition2)
assertNull(consumers.choose())
assertEquals(3, consumer.polls)
// SystemConsumers should poll only one partition: ssp1
assertEquals(1, consumer.lastPoll.size())
assertTrue(consumer.lastPoll.contains(systemStreamPartition1))
}
@Test
def testSystemConsumersRegistration {
val system = "test-system"
val stream = "some-stream"
val systemStreamPartition1 = new SystemStreamPartition(system, stream, new Partition(1))
val systemStreamPartition2 = new SystemStreamPartition(system, stream, new Partition(2))
val consumer = Mockito.mock(classOf[SystemConsumer])
val systemAdmins = Mockito.mock(classOf[SystemAdmins])
Mockito.when(systemAdmins.getSystemAdmin(system)).thenReturn(Mockito.mock(classOf[SystemAdmin]))
val consumers = new SystemConsumers(new MockMessageChooser, Map(system -> consumer),
systemAdmins, new SerdeManager, new SystemConsumersMetrics,
SystemConsumers.DEFAULT_NO_NEW_MESSAGES_TIMEOUT,
SystemConsumers.DEFAULT_DROP_SERIALIZATION_ERROR,
TaskConfig.DEFAULT_POLL_INTERVAL_MS, clock = () => 0)
consumers.register(systemStreamPartition1, "0")
}
@Test
def testSystemConsumersElasticityEnabled: Unit = {
val system = "test-system"
// create two key bucekts "ssp0" and "ssp1" within one SSP "ssp"
// and two IME such that one of their ssp keybucket maps to ssp0 and the other one maps to ssp1
// registed only "ssp0" with the SystemConsumers
val ssp = new SystemStreamPartition(system, "some-stream", new Partition(0))
val ssp0 = new SystemStreamPartition(system, "some-stream", new Partition(0), 0)
val ssp1 = new SystemStreamPartition(system, "some-stream", new Partition(0), 1)
val envelope = spy(new IncomingMessageEnvelope(ssp, "100", "key", "value"))
val envelope00 = spy(new IncomingMessageEnvelope(ssp, "0", "key0", "value0"))
val envelope01 = spy(new IncomingMessageEnvelope(ssp, "1", "key1", "value0"))
when(envelope00.getSystemStreamPartition(2)).thenReturn(ssp0)
when(envelope01.getSystemStreamPartition(2)).thenReturn(ssp1)
val consumer = new CustomPollResponseSystemConsumer(envelope)
var now = 0
val systemAdmins = Mockito.mock(classOf[SystemAdmins])
when(systemAdmins.getSystemAdmin(system)).thenReturn(Mockito.mock(classOf[SystemAdmin]))
val systemConsumersMetrics = new SystemConsumersMetrics
val systemConsumers = new SystemConsumers(new MockMessageChooser, Map(system -> consumer), systemAdmins,
new SerdeManager, systemConsumersMetrics,
SystemConsumers.DEFAULT_NO_NEW_MESSAGES_TIMEOUT,
SystemConsumers.DEFAULT_DROP_SERIALIZATION_ERROR,
TaskConfig.DEFAULT_POLL_INTERVAL_MS, clock = () => now, 2)
systemConsumers.register(ssp0, "0")
systemConsumers.start
// Tell the consumer to return envelope00
val nextResponse00 = Map[SystemStreamPartition, java.util.List[IncomingMessageEnvelope]](
ssp -> Collections.singletonList(envelope00)
)
consumer.setNextResponse(nextResponse00)
// Choose to trigger a refresh with data.
assertNull(systemConsumers.choose())
assertEquals(envelope00, systemConsumers.choose())
assertEquals(1, systemConsumersMetrics.choseObject.getCount)
// Tell the consumer to return envelop01
val nextResponse01 = Map[SystemStreamPartition, java.util.List[IncomingMessageEnvelope]](
ssp -> Collections.singletonList(envelope01)
)
consumer.setNextResponse(nextResponse01)
// envelope01 does not belong to ssp_keybucket processed by the container
// and hence the metric for choseObject should not be updated
assertNull(systemConsumers.choose()) //refresh
assertEquals(envelope01, systemConsumers.choose())
assertEquals(1, systemConsumersMetrics.choseObject.getCount)
}
/**
* A simple MockSystemConsumer that keeps track of what was polled, and lets
* you define how many envelopes to return in the poll response. You can
* supply the envelope to use for poll responses through the constructor.
* You can also directly set the next response by calling setNextResponse
*/
private class CustomPollResponseSystemConsumer(envelope: IncomingMessageEnvelope) extends SystemConsumer {
var polls = 0
var pollResponse = Map[SystemStreamPartition, java.util.List[IncomingMessageEnvelope]]()
var lastPoll: java.util.Set[SystemStreamPartition] = null
def start {}
def stop {}
def register(systemStreamPartition: SystemStreamPartition, offset: String) {}
def poll(systemStreamPartitions: java.util.Set[SystemStreamPartition], timeout: Long) = {
polls += 1
lastPoll = new util.HashSet[SystemStreamPartition](systemStreamPartitions)
pollResponse.asJava
}
def setResponseSizes(numEnvelopes: Int) {
val q = new java.util.ArrayList[IncomingMessageEnvelope]()
(0 until numEnvelopes).foreach { i => q.add(envelope) }
pollResponse = Map(envelope.getSystemStreamPartition -> q)
pollResponse = Map[SystemStreamPartition, java.util.List[IncomingMessageEnvelope]]()
}
def setNextResponse(nextResponse: Map[SystemStreamPartition, java.util.List[IncomingMessageEnvelope]]) {
pollResponse = nextResponse
}
}
/**
* A simple consumer that provides two extra methods: one is to put bytes
* format message and the other to put string format message.
*/
private class SerializingConsumer extends BlockingEnvelopeMap {
val systemStreamPartition = new SystemStreamPartition("test-system", "some-stream", new Partition(1))
def putBytesMessage() {
put(systemStreamPartition, new IncomingMessageEnvelope(systemStreamPartition, "0", "0", "test".getBytes()))
}
def putStringMessage() {
put(systemStreamPartition, new IncomingMessageEnvelope(systemStreamPartition, "0", "1", "test"))
}
def start() {}
def stop() {}
override def register(systemStreamPartition: SystemStreamPartition, offset: String): Unit = {
super[BlockingEnvelopeMap].register(systemStreamPartition, offset)
}
}
}
object TestSystemConsumers {
def getSystemConsumers(consumers: java.util.Map[String, SystemConsumer], systemAdmins: SystemAdmins = SystemAdmins.empty()) : SystemConsumers = {
new SystemConsumers(new DefaultChooser, consumers.asScala.toMap, systemAdmins)
}
}
|
apache/samza
|
samza-core/src/test/scala/org/apache/samza/system/TestSystemConsumers.scala
|
Scala
|
apache-2.0
| 21,444 |
/*
package dao
import java.util.UUID
import database._
import models.{ReportCardRescheduledAtom, ReportCardRescheduledLike}
import org.joda.time.{LocalDate, LocalTime}
import play.api.inject.guice.GuiceableModule
import slick.dbio.DBIO
import slick.jdbc.PostgresProfile.api._
import slick.lifted.TableQuery
import utils.date.DateTimeOps._
// TODO Rewrite Test
// TODO Add Annotation Test
final class ReportCardRescheduledDaoSpec extends AbstractDaoSpec[ReportCardRescheduledTable, ReportCardRescheduledDb, ReportCardRescheduledLike] {
import AbstractDaoSpec._
private val amount = 50
private lazy val privateLabworks = labworks.take(4)
private lazy val reportCardEntries = populateReportCardEntries(amount, 8, withReschedule = true)(privateLabworks, students)
def rescheduled(reportCardEntryId: UUID, i: Int): ReportCardRescheduledDb = {
val rDate = LocalDate.now.plusDays(i)
val rStart = LocalTime.now.plusHours(i)
val rEnd = rStart.plusHours(1)
ReportCardRescheduledDb(reportCardEntryId, rDate.sqlDate, rStart.sqlTime, rEnd.sqlTime, randomRoom.id)
}
override protected def name = "reportCardRescheduleSpec"
override protected val dbEntity: ReportCardRescheduledDb =
rescheduled(reportCardEntries.head.id, 0)
override protected val invalidDuplicateOfDbEntity: ReportCardRescheduledDb =
dbEntity.copy(id = UUID.randomUUID)
override protected val invalidUpdateOfDbEntity: ReportCardRescheduledDb =
dbEntity.copy(reportCardEntry = UUID.randomUUID)
override protected val validUpdateOnDbEntity: ReportCardRescheduledDb =
dbEntity.copy(room = randomRoom.id, reason = Some("reason"), date = dbEntity.date.localDate.plusWeeks(1).sqlDate)
override protected val dbEntities: List[ReportCardRescheduledDb] = {
val entries = reportCardEntries.drop(1)
(0 until amount).map(i => rescheduled(entries(i).id, i)).toList
}
override protected val lwmAtom: ReportCardRescheduledAtom = ReportCardRescheduledAtom(
dbEntity.date.localDate,
dbEntity.start.localTime,
dbEntity.end.localTime,
rooms.find(_.id == dbEntity.room).get.toUniqueEntity,
dbEntity.reason,
dbEntity.id
)
override protected val dependencies = DBIO.seq(
TableQuery[DegreeTable].forceInsertAll(degrees),
TableQuery[UserTable].forceInsertAll(employees ++ students),
TableQuery[SemesterTable].forceInsertAll(semesters),
TableQuery[CourseTable].forceInsertAll(courses),
TableQuery[LabworkTable].forceInsertAll(labworks),
TableQuery[RoomTable].forceInsertAll(rooms),
TableQuery[ReportCardEntryTable].forceInsertAll(reportCardEntries)
)
override protected val dao: ReportCardRescheduledDao = app.injector.instanceOf(classOf[ReportCardRescheduledDao])
override protected def bindings: Seq[GuiceableModule] = Seq.empty
}
*/
|
THK-ADV/lwm-reloaded
|
test/dao/ReportCardRescheduledDaoSpec.scala
|
Scala
|
mit
| 2,811 |
import sbt._
import sbt.{Project, Build}
import sbt.Keys._
object OurBuild extends Build {
override lazy val settings = super.settings ++ Seq(
scalaVersion := "2.10.4",
organization := "org.wennergr",
version := "1-SNAPSHOT"
)
lazy val stensionLibrary: Project = Project(
id = "stensions",
base = file("stensions"),
settings = Project.defaultSettings
)
.settings(net.virtualvoid.sbt.graph.Plugin.graphSettings: _*)
}
|
wennergr/stensions
|
project/OurBuild.scala
|
Scala
|
apache-2.0
| 464 |
/**
* The MIT License (MIT)
* <p/>
* Copyright (c) 2016 ScalateKids
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* <p/>
* @author Scalatekids
* @version 1.0
* @since 1.0
*/
package com.actorbase.driver.exceptions
case class WrongCredentialsExc(msg: String) extends Exception(msg)
case class WrongPasswordExc(msg: String) extends Exception(msg)
case class WrongNewPasswordExc(msg: String) extends Exception(msg)
case class CollectionAlreadyExistsExc(msg: String) extends Exception(msg)
case class UndefinedCollectionExc(msg: String) extends Exception(msg)
case class UndefinedFileExc(msg: String) extends Exception(msg)
case class DuplicateKeyExc(msg: String) extends Exception(msg)
case class UndefinedKeyExc(msg: String) extends Exception(msg)
case class MalformedFileExc(msg: String) extends Exception(msg)
case class UsernameAlreadyExistsExc(msg: String) extends Exception(msg)
case class UndefinedUsernameExc(msg: String) extends Exception(msg)
case class InternalErrorExc(msg: String) extends Exception(msg)
|
ScalateKids/Actorbase-Client
|
src/main/scala/com/actorbase/driver/exceptions/Exceptions.scala
|
Scala
|
mit
| 2,106 |
package de.tudarmstadt.lt.flinkdt.pipes
import de.tudarmstadt.lt.flinkdt.tasks.DSTaskConfig
import java.text.SimpleDateFormat
import java.util.Date
import org.apache.flink.api.scala._
import java.time.Duration
import de.tudarmstadt.lt.flinkdt.types.CT2
import scala.reflect.ClassTag
import org.apache.flink.api.common.typeinfo.TypeInformation
import de.tudarmstadt.lt.flinkdt.Implicits._
import org.apache.flink.util.Collector
object MCL {
def apply[CT <: CT2 : ClassTag : TypeInformation](ct:DataSet[CT]): Unit = {
val maxIterations = 10
// interpret CT as edge (from vertex ct.a to vertex ct.b)
// start iteration
// val finalM = ct.iterateWithTermination(maxIterations) {
// currentM =>
// val newM = currentM
// // expand (M x M)
// .map( a => a) // symmetry
// .groupBy("a")
// .reduceGroup((iter, out: Collector[CT]) => {
// iter.foreach { e =>
// out.collect(e)
// }
// })
// }
// //.withForwardedFields("pageId")
//
// // terminate if no rank update was significant
// val termination = currentRanks.join(newRanks).where("pageId").equalTo("pageId") {
// (current, next, out: Collector[Int]) =>
// // check for significant update
// if (math.abs(current.rank - next.rank) > EPSILON) out.collect(1)
// }
// (newRanks, termination)
// }
}
def main(args: Array[String]): Unit = {
DSTaskConfig.load(DSTaskConfig.resolveConfig(args))
val tf = new SimpleDateFormat("yyyy-MM-dd\'T\'HH:mm:ssz")
val start = System.currentTimeMillis()
var info = s"main: ${getClass.getName}\nstart: ${tf.format(new Date(start))} \nend: -- \nduration: -- "
DSTaskConfig.writeConfig(additional_comments = info)
val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment
// MCL(env.readCT2r(""))
val end = System.currentTimeMillis()
val dur = Duration.ofMillis(end-start)
info = s"main: ${getClass.getName}\nstart: ${tf.format(new Date(start))} \nend: ${tf.format(new Date(end))} \nduration: ${dur.toHours} h ${dur.minusHours(dur.toHours).toMinutes} m ${dur.minusMinutes(dur.toMinutes).toMillis} ms"
DSTaskConfig.writeConfig(additional_comments = info, overwrite = true)
}
}
//import java.lang.Iterable
//
//import org.apache.flink.api.common.functions.GroupReduceFunction
//import org.apache.flink.api.java.utils.ParameterTool
//import org.apache.flink.api.scala._
//import org.apache.flink.examples.java.graph.util.PageRankData
//import org.apache.flink.api.java.aggregation.Aggregations.SUM
//
//import org.apache.flink.util.Collector
//
//import scala.collection.JavaConverters._
//
///**
// * A basic implementation of the Page Rank algorithm using a bulk iteration.
// *
// * This implementation requires a set of pages and a set of directed links as input and works as
// * follows.
// *
// * In each iteration, the rank of every page is evenly distributed to all pages it points to. Each
// * page collects the partial ranks of all pages that point to it, sums them up, and applies a
// * dampening factor to the sum. The result is the new rank of the page. A new iteration is started
// * with the new ranks of all pages. This implementation terminates after a fixed number of
// * iterations. This is the Wikipedia entry for the
// * [[http://en.wikipedia.org/wiki/Page_rank Page Rank algorithm]]
// *
// * Input files are plain text files and must be formatted as follows:
// *
// * - Pages represented as an (long) ID separated by new-line characters.
// * For example `"1\n2\n12\n42\n63"` gives five pages with IDs 1, 2, 12, 42, and 63.
// * - Links are represented as pairs of page IDs which are separated by space characters. Links
// * are separated by new-line characters.
// * For example `"1 2\n2 12\n1 12\n42 63"` gives four (directed) links (1)->(2), (2)->(12),
// * (1)->(12), and (42)->(63). For this simple implementation it is required that each page has
// * at least one incoming and one outgoing link (a page can point to itself).
// *
// * Usage:
// * {{{
// * PageRankBasic --pages <path> --links <path> --output <path> --numPages <n> --iterations <n>
// * }}}
// *
// * If no parameters are provided, the program is run with default data from
// * [[org.apache.flink.examples.java.graph.util.PageRankData]] and 10 iterations.
// *
// * This example shows how to use:
// *
// * - Bulk Iterations
// * - Default Join
// * - Configure user-defined functions using constructor parameters.
// *
// */
//object PageRankBasic {
//
// private final val DAMPENING_FACTOR: Double = 0.85
// private final val EPSILON: Double = 0.0001
//
// def main(args: Array[String]) {
//
// val params: ParameterTool = ParameterTool.fromArgs(args)
// println("Usage: PageRankBasic " +
// "--pages <path> --links <path> --output <path> --numPages <n> --iterations <n>")
//
// // set up execution environment
// val env = ExecutionEnvironment.getExecutionEnvironment
//
// // make parameters available in the web interface
// env.getConfig.setGlobalJobParameters(params)
//
// // read input data
// val (pages, numPages) = getPagesDataSet(env, params)
// val links = getLinksDataSet(env, params)
// val maxIterations = params.getInt("iterations", 10)
//
// // assign initial ranks to pages
// val pagesWithRanks = pages.map(p => Page(p, 1.0 / numPages)).withForwardedFields("*->pageId")
//
// // build adjacency list from link input
// val adjacencyLists = links
// .groupBy("sourceId").reduceGroup( new GroupReduceFunction[Link, AdjacencyList] {
// override def reduce(values: Iterable[Link], out: Collector[AdjacencyList]): Unit = {
// var outputId = -1L
// val outputList = values.asScala map { t => outputId = t.sourceId; t.targetId }
// out.collect(new AdjacencyList(outputId, outputList.toArray))
// }
// })
//
// // start iteration
// val finalRanks = pagesWithRanks.iterateWithTermination(maxIterations) {
// currentRanks =>
// val newRanks = currentRanks
// // distribute ranks to target pages
// .join(adjacencyLists).where("pageId").equalTo("sourceId") {
// (page, adjacent, out: Collector[Page]) =>
// val targets = adjacent.targetIds
// val len = targets.length
// adjacent.targetIds foreach { t => out.collect(Page(t, page.rank /len )) }
// }
// // collect ranks and sum them up
// .groupBy("pageId").aggregate(SUM, "rank")
// // apply dampening factor
// .map { p =>
// Page(p.pageId, (p.rank * DAMPENING_FACTOR) + ((1 - DAMPENING_FACTOR) / numPages))
// }.withForwardedFields("pageId")
//
// // terminate if no rank update was significant
// val termination = currentRanks.join(newRanks).where("pageId").equalTo("pageId") {
// (current, next, out: Collector[Int]) =>
// // check for significant update
// if (math.abs(current.rank - next.rank) > EPSILON) out.collect(1)
// }
// (newRanks, termination)
// }
//
// val result = finalRanks
//
// // emit result
// if (params.has("output")) {
// result.writeAsCsv(params.get("output"), "\n", " ")
// // execute program
// env.execute("Basic PageRank Example")
// } else {
// println("Printing result to stdout. Use --output to specify output path.")
// result.print()
// }
// }
//
// // *************************************************************************
// // USER TYPES
// // *************************************************************************
//
// case class Link(sourceId: Long, targetId: Long)
//
// case class Page(pageId: Long, rank: Double)
//
// case class AdjacencyList(sourceId: Long, targetIds: Array[Long])
//
// // *************************************************************************
// // UTIL METHODS
// // *************************************************************************
//
// private def getPagesDataSet(env: ExecutionEnvironment, params: ParameterTool):
// (DataSet[Long], Long) = {
// if (params.has("pages") && params.has("numPages")) {
// val pages = env
// .readCsvFile[Tuple1[Long]](params.get("pages"), fieldDelimiter = " ", lineDelimiter = "\n")
// .map(x => x._1)
// (pages, params.getLong("numPages"))
// } else {
// println("Executing PageRank example with default pages data set.")
// println("Use --pages and --numPages to specify file input.")
// (env.generateSequence(1, 15), PageRankData.getNumberOfPages)
// }
// }
//
// private def getLinksDataSet(env: ExecutionEnvironment, params: ParameterTool):
// DataSet[Link] = {
// if (params.has("links")) {
// env.readCsvFile[Link](params.get("links"), fieldDelimiter = " ",
// includedFields = Array(0, 1))
// } else {
// println("Executing PageRank example with default links data set.")
// println("Use --links to specify file input.")
// val edges = PageRankData.EDGES.map { case Array(v1, v2) => Link(v1.asInstanceOf[Long],
// v2.asInstanceOf[Long])}
// env.fromCollection(edges)
// }
// }
//
//}
|
remstef/flinkfun
|
src/main/scala/de/tudarmstadt/lt/flinkdt/pipes/MCL.scala
|
Scala
|
apache-2.0
| 9,399 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package collection
package mutable
import java.lang.Integer.rotateRight
import scala.util.hashing.byteswap32
/** This class can be used to construct data structures that are based
* on hashtables. Class `HashTable[A]` implements a hashtable
* that maps keys of type `A` to values of the fully abstract
* member type `Entry`. Classes that make use of `HashTable`
* have to provide an implementation for `Entry`.
*
* There are mainly two parameters that affect the performance of a hashtable:
* the <i>initial size</i> and the <i>load factor</i>. The <i>size</i>
* refers to the number of <i>buckets</i> in the hashtable, and the <i>load
* factor</i> is a measure of how full the hashtable is allowed to get before
* its size is automatically doubled. Both parameters may be changed by
* overriding the corresponding values in class `HashTable`.
*
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.0, 31/12/2006
* @since 1
*
* @tparam A type of the elements contained in this hash table.
*/
trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] {
// Replacing Entry type parameter by abstract type member here allows to not expose to public
// implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`.
// However, I'm afraid it's too late now for such breaking change.
import HashTable._
@transient protected var _loadFactor = defaultLoadFactor
/** The actual hash table.
*/
@transient protected var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity)
/** The number of mappings contained in this hash table.
*/
@transient protected var tableSize: Int = 0
/** The next size value at which to resize (capacity * load factor).
*/
@transient protected var threshold: Int = initialThreshold(_loadFactor)
/** The array keeping track of the number of elements in 32 element blocks.
*/
@transient protected var sizemap: Array[Int] = null
@transient protected var seedvalue: Int = tableSizeSeed
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
/** The initial size of the hash table.
*/
protected def initialSize: Int = 16
/** The initial threshold.
*/
private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity)
private def initialCapacity = capacity(initialSize)
private def lastPopulatedIndex = {
var idx = table.length - 1
while (table(idx) == null && idx > 0)
idx -= 1
idx
}
/**
* Initializes the collection from the input stream. `readEntry` will be called for each
* entry to be read from the input stream.
*/
private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry) {
in.defaultReadObject
_loadFactor = in.readInt()
assert(_loadFactor > 0)
val size = in.readInt()
tableSize = 0
assert(size >= 0)
seedvalue = in.readInt()
val smDefined = in.readBoolean()
table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
threshold = newThreshold(_loadFactor, table.length)
if (smDefined) sizeMapInit(table.length) else sizemap = null
var index = 0
while (index < size) {
addEntry(readEntry)
index += 1
}
}
/**
* Serializes the collection to the output stream by saving the load factor, collection
* size and collection entries. `writeEntry` is responsible for writing an entry to the stream.
*
* `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To
* deserialize, `init` should be used.
*/
private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit) {
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
foreachEntry(writeEntry)
}
/** Find entry with given key in table, null if not found.
*/
@deprecatedOverriding("No sensible way to override findEntry as private findEntry0 is used in multiple places internally.", "2.11.0")
protected def findEntry(key: A): Entry =
findEntry0(key, index(elemHashCode(key)))
private[this] def findEntry0(key: A, h: Int): Entry = {
var e = table(h).asInstanceOf[Entry]
while (e != null && !elemEquals(e.key, key)) e = e.next
e
}
/** Add entry to table
* pre: no entry with same key exists
*/
@deprecatedOverriding("No sensible way to override addEntry as private addEntry0 is used in multiple places internally.", "2.11.0")
protected def addEntry(e: Entry) {
addEntry0(e, index(elemHashCode(e.key)))
}
private[this] def addEntry0(e: Entry, h: Int) {
e.next = table(h).asInstanceOf[Entry]
table(h) = e
tableSize = tableSize + 1
nnSizeMapAdd(h)
if (tableSize > threshold)
resize(2 * table.length)
}
/** Find entry with given key in table, or add new one if not found.
* May be somewhat faster then `findEntry`/`addEntry` pair as it
* computes entry's hash index only once.
* Returns entry found in table or null.
* New entries are created by calling `createNewEntry` method.
*/
protected def findOrAddEntry[B](key: A, value: B): Entry = {
val h = index(elemHashCode(key))
val e = findEntry0(key, h)
if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null }
}
/** Creates new entry to be immediately inserted into the hashtable.
* This method is guaranteed to be called only once and in case that the entry
* will be added. In other words, an implementation may be side-effecting.
*/
protected def createNewEntry[B](key: A, value: B): Entry
/** Remove entry from table if present.
*/
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def removeEntry(key: A) : Entry = {
val h = index(elemHashCode(key))
var e = table(h).asInstanceOf[Entry]
if (e != null) {
if (elemEquals(e.key, key)) {
table(h) = e.next
tableSize = tableSize - 1
nnSizeMapRemove(h)
e.next = null
return e
} else {
var e1 = e.next
while (e1 != null && !elemEquals(e1.key, key)) {
e = e1
e1 = e1.next
}
if (e1 != null) {
e.next = e1.next
tableSize = tableSize - 1
nnSizeMapRemove(h)
e1.next = null
return e1
}
}
}
null
}
/** An iterator returning all entries.
*/
protected def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] {
val iterTable = table
var idx = lastPopulatedIndex
var es = iterTable(idx)
def hasNext = es != null
def next() = {
val res = es
es = es.next
while (es == null && idx > 0) {
idx = idx - 1
es = iterTable(idx)
}
res.asInstanceOf[Entry]
}
}
/** Avoid iterator for a 2x faster traversal. */
protected def foreachEntry[U](f: Entry => U) {
val iterTable = table
var idx = lastPopulatedIndex
var es = iterTable(idx)
while (es != null) {
val next = es.next // Cache next in case f removes es.
f(es.asInstanceOf[Entry])
es = next
while (es == null && idx > 0) {
idx -= 1
es = iterTable(idx)
}
}
}
/** Remove all entries from table
*/
protected def clearTable() {
var i = table.length - 1
while (i >= 0) { table(i) = null; i = i - 1 }
tableSize = 0
nnSizeMapReset(0)
}
private def resize(newSize: Int) {
val oldTable = table
table = new Array(newSize)
nnSizeMapReset(table.length)
var i = oldTable.length - 1
while (i >= 0) {
var e = oldTable(i)
while (e != null) {
val h = index(elemHashCode(e.key))
val e1 = e.next
e.next = table(h).asInstanceOf[Entry]
table(h) = e
e = e1
nnSizeMapAdd(h)
}
i = i - 1
}
threshold = newThreshold(_loadFactor, newSize)
}
/* Size map handling code */
/*
* The following three sizeMap* functions (Add, Remove, Reset)
* are used to update the size map of the hash table.
*
* The size map logically divides the hash table into `sizeMapBucketSize` element buckets
* by keeping an integer entry for each such bucket. Each integer entry simply denotes
* the number of elements in the corresponding bucket.
* Best understood through an example, see:
* table = [/, 1, /, 6, 90, /, -3, 5] (8 entries)
* sizemap = [ 2 | 3 ] (2 entries)
* where sizeMapBucketSize == 4.
*
* By default the size map is not initialized, so these methods don't do anything, thus,
* their impact on hash table performance is negligible. However, if the hash table
* is converted into a parallel hash table, the size map is initialized, as it will be needed
* there.
*/
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def nnSizeMapAdd(h: Int) = if (sizemap ne null) {
sizemap(h >> sizeMapBucketBitSize) += 1
}
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def nnSizeMapRemove(h: Int) = if (sizemap ne null) {
sizemap(h >> sizeMapBucketBitSize) -= 1
}
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) {
val nsize = calcSizeMapSize(tableLength)
if (sizemap.length != nsize) sizemap = new Array[Int](nsize)
else java.util.Arrays.fill(sizemap, 0)
}
private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1
// discards the previous sizemap and only allocates a new one
protected def sizeMapInit(tableLength: Int) {
sizemap = new Array[Int](calcSizeMapSize(tableLength))
}
// discards the previous sizemap and populates the new one
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def sizeMapInitAndRebuild() {
sizeMapInit(table.length)
// go through the buckets, count elements
var tableidx = 0
var bucketidx = 0
val tbl = table
var tableuntil = 0
if (tbl.length < sizeMapBucketSize) tableuntil = tbl.length else tableuntil = sizeMapBucketSize
val totalbuckets = totalSizeMapBuckets
while (bucketidx < totalbuckets) {
var currbucketsize = 0
while (tableidx < tableuntil) {
var e = tbl(tableidx)
while (e ne null) {
currbucketsize += 1
e = e.next
}
tableidx += 1
}
sizemap(bucketidx) = currbucketsize
tableuntil += sizeMapBucketSize
bucketidx += 1
}
}
private[collection] def printSizeMap() {
println(sizemap.toList)
}
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def sizeMapDisable() = sizemap = null
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def isSizeMapDefined = sizemap ne null
// override to automatically initialize the size map
protected def alwaysInitSizeMap = false
/* End of size map handling code */
protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2)
/**
* Note: we take the most significant bits of the hashcode, not the lower ones
* this is of crucial importance when populating the table in parallel
*/
protected final def index(hcode: Int): Int = {
val ones = table.length - 1
val exponent = Integer.numberOfLeadingZeros(ones)
(improve(hcode, seedvalue) >>> exponent) & ones
}
protected def initWithContents(c: HashTable.Contents[A, Entry]) = {
if (c != null) {
_loadFactor = c.loadFactor
table = c.table
tableSize = c.tableSize
threshold = c.threshold
seedvalue = c.seedvalue
sizemap = c.sizemap
}
if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild()
}
private[collection] def hashTableContents = new HashTable.Contents(
_loadFactor,
table,
tableSize,
threshold,
seedvalue,
sizemap
)
}
private[collection] object HashTable {
/** The load factor for the hash table (in 0.001 step).
*/
private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible
private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt
private[collection] final def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
trait HashUtils[KeyType] {
protected final def sizeMapBucketBitSize = 5
// so that:
protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize
protected def elemHashCode(key: KeyType) = key.##
/**
* Defer to a high-quality hash in [[scala.util.hashing]].
* The goal is to distribute across bins as well as possible even if a hash code has low entropy at some bits.
* <p/>
* OLD VERSION - quick, but bad for sequence 0-10000 - little entropy in higher bits - since 2003
* {{{
* var h: Int = hcode + ~(hcode << 9)
* h = h ^ (h >>> 14)
* h = h + (h << 4)
* h ^ (h >>> 10)
* }}}
* the rest of the computation is due to SI-5293
*/
protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed)
}
/**
* Returns a power of two >= `target`.
*/
private[collection] def powerOfTwo(target: Int): Int = {
/* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */
var c = target - 1
c |= c >>> 1
c |= c >>> 2
c |= c >>> 4
c |= c >>> 8
c |= c >>> 16
c + 1
}
class Contents[A, Entry >: Null <: HashEntry[A, Entry]](
val loadFactor: Int,
val table: Array[HashEntry[A, Entry]],
val tableSize: Int,
val threshold: Int,
val seedvalue: Int,
val sizemap: Array[Int]
) {
import scala.collection.DebugUtils._
private[collection] def debugInformation = buildString {
append =>
append("Hash table contents")
append("-------------------")
append("Table: [" + arrayString(table, 0, table.length) + "]")
append("Table size: " + tableSize)
append("Load factor: " + loadFactor)
append("Seedvalue: " + seedvalue)
append("Threshold: " + threshold)
append("Sizemap: [" + arrayString(sizemap, 0, sizemap.length) + "]")
}
}
}
|
felixmulder/scala
|
src/library/scala/collection/mutable/HashTable.scala
|
Scala
|
bsd-3-clause
| 15,799 |
package src.main.scala.db
/*
* object: DbSchemaStop
*
* The record schema of a single bus "stop" in the "stops.txt" CSV file in
* the General Transit Feed Specification (GTFS)
*/
case class DbSchemaStop(stopId: String, stopName: String,
stopLatitude: Double, stopLongitude: Double)
/*
* Notes:
* About the "wheelchair_boarding" field:
* some transit agencies, like Vancouver Translink, don't provide this field
* (we could assume that it is "1" given the high-quality of living of
* Vancouver)
*/
|
je-nunez/urban_planning_on_gtfs_traffic_congestion
|
src/main/scala/db/DbSchemaStop.scala
|
Scala
|
gpl-2.0
| 536 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.tools.data.downloader.streams
import java.io.InputStream
import akka.actor.ActorSystem
import akka.http.scaladsl.model._
import akka.stream._
import akka.stream.scaladsl._
import akka.util.ByteString
import cmwell.tools.data.downloader.DataPostProcessor
import cmwell.tools.data.utils.ArgsManipulations._
import cmwell.tools.data.utils.akka.{Retry, lineSeparatorFrame, _}
import cmwell.tools.data.utils.logging.DataToolsLogging
import cmwell.tools.data.utils.ops.VersionChecker
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
import scala.util._
/**
* Downloads infotons from CM-Well
*/
object Downloader extends DataToolsLogging with DataToolsConfig {
// separator of response uuids
private val defaultSortAsc = false
private val numInfotonsPerRequest = 25
/**
* Creates a [[akka.stream.scaladsl.Source]] which downloads data from target CM-Well
*
* @param baseUrl address of target cm-well
* @param path path in cm-well
* @param params cm-well url params
* @param qp cm-well query params
* @param format desired cm-well data format
* @param op operation type (default = stream)
* @param length max number of records to receive (e.g., 1, 10, all if set to `None`)
* @param recursive true if need to get records in a recursive way
* @param numInfotonsPerRequest how many infotons will be requested in each HTTP request (batching factor)
* @param system actor system
* @param mat akka stream materializer
* @return [[akka.stream.scaladsl.Source Source]] which returns data chunks from cm-well
* @see [[Downloader#createSourceFromQuery()]]
*/
def createSourceFromQuery(baseUrl: String,
path: String,
params: String = "",
qp: String = "",
format: String = "trig",
op: String = "stream",
length: Option[Int] = Some(50),
recursive: Boolean = false,
numInfotonsPerRequest: Int = numInfotonsPerRequest)
(implicit system: ActorSystem, mat: Materializer) = {
val downloader = new Downloader(
baseUrl = baseUrl,
path = path,
params = params,
qp = qp,
format = format,
op = op,
length = length,
recursive = recursive,
numInfotonsPerRequest)
downloader.createSourceFromQuery()
}
/**
* Downloads data from target CM-Well and apply the given outputHandler on each data chunk
*
* @param baseUrl address of target cm-well
* @param path path in cm-well
* @param params cm-well url params
* @param qp cm-well query params
* @param format desired cm-well data format
* @param op operation type (default = stream)
* @param length max number of records to receive (e.g., 1, 10, all if set to `None`)
* @param recursive true if need to get records in a recursive way
* @param numInfotonsPerRequest how many infotons will be requested in each HTTP request (batching factor)
* @param outputHandler function which handles given data (e.g., write data to file)
* @param system actor system
* @param mat akka stream materializer
* @return [[scala.concurrent.Future Future]] of download process
* @see [[cmwell.tools.data.downloader.streams.Downloader#createSourceFromQuery]]
*/
def downloadFromQuery(baseUrl: String,
path: String,
params: String = "",
qp: String = "",
format: String = "trig",
op: String = "stream",
length: Option[Int] = Some(50),
recursive: Boolean = false,
numInfotonsPerRequest: Int = numInfotonsPerRequest,
outputHandler: (String) => Unit = (s: String) => ())
(implicit system: ActorSystem, mat: Materializer) = {
createSourceFromQuery(
baseUrl = baseUrl,
path = path,
params = params,
qp = qp,
format = format,
op = op,
length = length,
recursive = recursive,
numInfotonsPerRequest = numInfotonsPerRequest)
.runForeach(data => outputHandler(data.utf8String))
}
/**
* Downloads data from input stream containing uuids
*
* @param baseUrl address of target cm-well
* @param format desired cm-well data format
* @param numInfotonsPerRequest how many infotons will be requested in each HTTP request (batching factor)
* @param outputHandler function which handles given data (e.g., write data to file)
* @param in input stream containing uuids
* @param system actor system
* @param mat akka stream materializer
* @return [[scala.concurrent.Future Future]] of download process
* @see [[cmwell.tools.data.downloader.streams.Downloader#createSourceFromUuidInputStream]]
*/
def downloadFromUuidInputStream(baseUrl: String,
format: String = "trig",
numInfotonsPerRequest: Int = numInfotonsPerRequest,
outputHandler: (String) => Unit = (s: String) => (),
in: InputStream)
(implicit system: ActorSystem, mat: Materializer) = {
createSourceFromUuidInputStream(
baseUrl = baseUrl,
format = format,
numInfotonsPerRequest = numInfotonsPerRequest,
in = in)
.runForeach(data => outputHandler(data.utf8String))
}
/**
* Creates a [[akka.stream.scaladsl.Source]] from uuid [[java.io.InputStream InputStream]] which downloads data from target CM-Well.
*
* @param baseUrl address of target cm-well
* @param format desired cm-well data format
* @param numInfotonsPerRequest how many infotons will be requested in each HTTP request (batching factor)
* @param in input stream containing uuids
* @param system actor system
* @param mat akka stream materializer
* @return [[akka.stream.scaladsl.Source Source]] which returns data chunks from cm-well
*/
def createSourceFromUuidInputStream(baseUrl: String,
format: String = "trig",
numInfotonsPerRequest: Int = numInfotonsPerRequest,
in: InputStream)
(implicit system: ActorSystem, mat: Materializer) = {
val source = StreamConverters.fromInputStream(() => in)
.via(lineSeparatorFrame)
createSourceFromUuids(
baseUrl = baseUrl,
format = format,
numInfotonsPerRequest = numInfotonsPerRequest,
source = source
)
}
/**
* Creates a [[akka.stream.scaladsl.Source Source]] from uuid [[akka.stream.scaladsl.Source Source]]
* which downloads data from target CM-Well.
*
* @param baseUrl address of target cm-well
* @param format desired cm-well data format
* @param numInfotonsPerRequest how many infotons will be requested in each HTTP request (batching factor)
* @param source [[akka.stream.scaladsl.Source]] which emits uuids elements
* @param system actor system
* @param mat akka stream materializer
* @return [[akka.stream.scaladsl.Source Source]] which returns data chunks from cm-well
*/
def createSourceFromUuids(baseUrl: String,
format: String = "trig",
numInfotonsPerRequest: Int = numInfotonsPerRequest,
source: Source[ByteString, _])
(implicit system: ActorSystem, mat: Materializer) = {
val downloader = new Downloader(
baseUrl = baseUrl,
path = "/",
format = format,
numInfotonsPerRequest = numInfotonsPerRequest)
source
.via(downloader.downloadDataFromUuids)
.recover{case t => System.err.println(t); ByteString(t.toString) }
}
/**
* Downloads data from input stream containing infoton paths
*
* @param baseUrl address of target cm-well
* @param format desired cm-well data format
* @param numInfotonsPerRequest how many infotons will be requested in each HTTP request (batching factor)
* @param outputHandler function which handles given data (e.g., write data to file)
* @param in input stream containing infoton paths
* @param system actor system
* @param mat akka stream materializer
* @return [[scala.concurrent.Future Future]] of download process
*/
def downloadFromPathsInputStream(baseUrl: String,
format: String = "trig",
numInfotonsPerRequest: Int = numInfotonsPerRequest,
outputHandler: (String) => Unit = (s: String) => (),
in: InputStream)
(implicit system: ActorSystem, mat: Materializer) = {
createSourceFromPathsInputStream(
baseUrl = baseUrl,
format = format,
numInfotonsPerRequest = numInfotonsPerRequest,
in = in)
.runForeach(data => outputHandler(data.utf8String))
}
/**
* Creates a [[akka.stream.scaladsl.Source Source]] from paths [[java.io.InputStream]]
* which downloads data from target CM-Well.
*
* @param baseUrl address of target cm-well
* @param format desired cm-well data format
* @param numInfotonsPerRequest how many infotons will be requested in each HTTP request (batching factor)
* @param in input stream containing infoton paths
* @param system actor system
* @param mat akka stream materializer
* @return [[akka.stream.scaladsl.Source Source]] which returns data chunks from cm-well
*/
def createSourceFromPathsInputStream(baseUrl: String,
format: String = "trig",
numInfotonsPerRequest: Int = numInfotonsPerRequest,
in: InputStream)
(implicit system: ActorSystem, mat: Materializer) = {
val source = StreamConverters.fromInputStream(() => in)
.via(lineSeparatorFrame)
createSourceFromPaths(
baseUrl = baseUrl,
format = format,
numInfotonsPerRequest = numInfotonsPerRequest,
source = source
)
}
/**
* Creates a [[akka.stream.scaladsl.Source Source]] from uuid [[akka.stream.scaladsl.Source Source]]
* which downloads data from target CM-Well.
*
* @param baseUrl address of target cm-well
* @param format desired cm-well data format
* @param params params in cm-well URI
* @param numInfotonsPerRequest how many infotons will be requested in each HTTP request (batching factor)
* @param source [[akka.stream.scaladsl.Source]] which emits infoton paths elements
* @param system actor system
* @param mat akka stream materializer
* @return [[akka.stream.scaladsl.Source Source]] which returns data chunks from cm-well
*/
def createSourceFromPaths(baseUrl: String,
format: String = "trig",
params: String = "",
numInfotonsPerRequest: Int = numInfotonsPerRequest,
source: Source[ByteString, _])
(implicit system: ActorSystem, mat: Materializer) = {
val downloader = new Downloader(
baseUrl = baseUrl,
path = "/",
format = format,
params = params,
numInfotonsPerRequest = numInfotonsPerRequest)
source
.via(downloader.downloadDataFromPaths)
.recover{case t => System.err.println(t); ByteString(t.toString) }
}
}
class Downloader(baseUrl: String,
path: String,
params: String = "",
qp: String = "",
format: String = "trig",
op: String = "stream",
length: Option[Int] = Some(50),
recursive: Boolean = false,
numInfotonsPerRequest: Int = Downloader.numInfotonsPerRequest,
outputHandler: (String) => Unit = (s: String) => ())
(implicit system: ActorSystem, mat: Materializer) extends DataToolsLogging {
type Data = Seq[ByteString]
import Downloader._
private [streams] var retryTimeout = {
val timeoutDuration = Duration(config.getString("cmwell.downloader.streams.http-retry-timeout")).toCoarsest
FiniteDuration( timeoutDuration.length, timeoutDuration.unit )
}
private val badUuidsLogger = LoggerFactory.getLogger("bad-uuids")
implicit val ec = scala.concurrent.ExecutionContext.Implicits.global //system.dispatcher
private val bufferSize = config.getInt("akka.http.host-connection-pool.max-connections")
private val HttpAddress(protocol, host, port, uriPrefix) = extractBaseUrl(baseUrl)
/**
* Creates a flow for downloading infoton data from uuid strings
*
* @return flow that gets uuids and download their data
*/
def downloadDataFromUuids() = {
def createDataRequest(uuids: Seq[ByteString]) = {
HttpRequest(
uri = s"${formatHost(baseUrl)}/_out?format=$format",
method = HttpMethods.POST,
entity = uuids.map(_.utf8String).mkString("/ii/", "\\n/ii/", "")
)
}
Flow[ByteString]
.grouped(numInfotonsPerRequest)
.buffer(bufferSize, OverflowStrategy.backpressure)
.map (uuids => uuids -> None)
.via(Retry.retryHttp(retryTimeout, bufferSize, baseUrl)(createDataRequest))
.flatMapConcat {
case (Success(HttpResponse(s,h,e,p)), _, _) if s.isSuccess() =>
DataPostProcessor.postProcessByFormat(format, e.withoutSizeLimit().dataBytes)
case (Success(res@HttpResponse(s,h,e,p)), uuids, _) =>
// entity databytes were discarded in job flow
logger.error(s"error: status=$s")
badUuidsLogger.error(s"uuids: ${uuids.map(_.utf8String).mkString("\\n")}")
Source.empty
case (Failure(err), uuids, _) =>
logger.error(s"cannot download data from uuids", err)
Source.empty
}
}
/**
* Creates [[akka.stream.scaladsl.Source Source]] which downloads data from CM-Well:
* - query cm-well and receive uuids
* - download infotons' data of previous uuids in requested format
* - apply given output handler on received infoton data
*
* @return [[akka.stream.scaladsl.Source Source]] of download process which emits data chunks
*/
def createSourceFromQuery() = {
/**
* Creates an http query request for getting uuids according to given parameters
*
* @return HttpRequest for cm-well querying for infotons
*/
def createQueryRequest(): HttpRequest = {
val qpValue = if (qp.isEmpty) "" else s"&qp=$qp"
val paramsValue = if (params.isEmpty) "" else s"&$params"
val recursiveValue = if (recursive) "&recursive" else ""
val lengthValue = if (length.isDefined) s"&length=${length.get}" else ""
val uri = s"${formatHost(baseUrl)}$path?op=$op&format=tsv$qpValue$paramsValue$recursiveValue$lengthValue"
logger.debug(s"query request: GET $uri")
HttpRequest(uri = uri)
}
def tsvToUuid(tsv: ByteString) = {
tsv.dropWhile(_ != '\\t').drop(1) // path
.dropWhile(_ != '\\t').drop(1) // lastModified
.takeWhile(_ != '\\t') // uuid
}
val conn = HttpConnections.outgoingConnection(host, port, protocol)
Source.single(createQueryRequest())
.via(conn)
.flatMapConcat {
case res@HttpResponse(s, h, e, p) if s.isSuccess() =>
e.withoutSizeLimit().dataBytes
.via(lineSeparatorFrame)
.buffer(1000, OverflowStrategy.backpressure)
.map(tsvToUuid)
.filter(_.nonEmpty)
case res@HttpResponse(s,_,e,_) =>
res.discardEntityBytes()
logger.error(s"error in getting uuids: status=$s")
Source.empty[ByteString]
}
.buffer(1000, OverflowStrategy.backpressure)
// .recover{case t => System.err.println(t); t.toString }
.via(downloadDataFromUuids)
// .recover{case t => System.err.println(t); ByteString(t.toString) }
}
/**
* Creates a flow for downloading infoton data from infoton path strings
*
* @return flow that gets paths and download their data
*/
def downloadDataFromPaths() = {
def createDataRequest(paths: Seq[ByteString]) = {
val paramsValue = if (params.isEmpty) "" else s"&$params"
HttpRequest(
uri = s"${formatHost(baseUrl)}/_out?format=$format$paramsValue",
method = HttpMethods.POST,
entity = concatByteStrings(paths, endl).utf8String
)
}
Flow[ByteString]
.groupedWithin(numInfotonsPerRequest, 3.seconds)
.map (paths => paths -> None)
.via(Retry.retryHttp(retryTimeout, bufferSize, baseUrl)(createDataRequest))
.flatMapConcat {
case (Success(HttpResponse(s,h,e,p)), _, _) if s.isSuccess() =>
DataPostProcessor.postProcessByFormat(format, e.withoutSizeLimit().dataBytes)
case (Success(res@HttpResponse(s,h,e,p)), paths, _) =>
res.discardEntityBytes()
logger.error(s"error: status=$s")
badUuidsLogger.error(s"paths: ${paths.map(_.utf8String).mkString("\\n")}")
Source.empty
case (Failure(err), paths, _) =>
logger.error("error: cannot process data response", err)
badUuidsLogger.error(s"paths: ${paths.map(_.utf8String).mkString("\\n")}")
Source.empty
}
}
}
|
nruppin/CM-Well
|
server/cmwell-data-tools/src/main/scala/cmwell/tools/data/downloader/streams/Downloader.scala
|
Scala
|
apache-2.0
| 18,375 |
package incognito.anonymization.buckets
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.Accumulable
import incognito.rdd.SATuple
import scala.collection.Map
import incognito.rdd.Data
/**
* @author Antorweep Chakravorty
* @constructor a constructor to create buckets
* @param _taxonomy contains a map of child -> parent relationship of sensitive attribute values
* @param rddCount total number of records in the dataset
* @param threshold value for maximum allowed probability change for sensitive attribute values appearing in any equivalence class
*/
abstract class Buckets(_taxonomy: Broadcast[Map[String, String]] = null, rddCount: Double, beta: Double = 0.0) extends Serializable {
/**
* A method to create the buckets from sensitive attribute RDD
* @param data the data set to be anonymized
* @param height the current height of the taxonomy tree at which the partition phase runs
* @return returns an rdd of sensitive attribute values with a bucket code representing the bucket they belong
*/
def getBuckets(data: RDD[Data], height: Int = -1): RDD[SATuple]
}
|
achak1987/SparkAnonymizationToolkit
|
src/main/scala/incognito/anonymization/buckets/Buckets.scala
|
Scala
|
apache-2.0
| 1,138 |
package com.shorrockin.cascal.serialization
import scala.language.existentials
import java.nio.ByteBuffer
import reflect.Manifest
import java.lang.annotation.Annotation
import java.lang.reflect.{Field, Method}
import java.util.{Date, UUID}
import annotations.{Columns, Optional}
import annotations.{Key => AKey, SuperColumn => ASuperColumn, Value => AValue}
import annotations.{Keyspace => AKeySpace, Super => ASuper, Family => AFamily}
import com.shorrockin.cascal.model._
import com.shorrockin.cascal.utils.{Logging, Conversions}
/**
* holds a reference to the default converter
*/
object Converter extends Converter(Serializer.Default) with Logging {
}
/**
* main class used to convert objects to and from their cascal
* equivalents.
*
* @author Chris Shorrock
*/
class Converter(serializers:Map[Class[_], Serializer[_]]) {
private var reflectionCache = Map[Class[_], ReflectionInformation]()
/**
* converts all the column sequences in the provided map (which is returned from a list
* call). and returns a sequence of the specified type.
*/
def apply[T](seq:Seq[(SuperColumn, Seq[Column[SuperColumn]])])(implicit manifest:Manifest[T]):Seq[T] = {
seq.map { (tup) => apply[T](tup._2) }
}
/**
* given a list of columns, assumed to all belong to the same columns, creates
* the object of type T using the annotations present an that class. Uses
* the serializers to convert values in columns to their appropriate.
*/
def apply[T](columns:Seq[Column[_]])(implicit manifest:Manifest[T]):T = {
val info = Converter.this.info(manifest.erasure)
// iterate over all the types and map them into the values needed to call the
// constructor to create the object.
val values:Seq[Any] = info.parameters.map { (paramType) =>
val cls = paramType._1
val annotation = paramType._2
annotation match {
// if there's a columnsToKey annotation get the first columnsToKey in the columns and return it
case k:AKey => stringToObject(cls, columnsToKey(columns).value)
// if there's a super column annotation get the super column then use the serializers
// to convert the byte array to the appropriate value.
case sc:ASuperColumn => info.isSuper match {
case true => bytesToObject(cls, columnsToSuperColumn(columns).value)
case false => throw new IllegalArgumentException("@SuperColumn may only exist within class annotated with @Super")
}
// if there's a columns annotation that is mapped to a Seq[(Tuple, Tuple)] then iterate
// over all the columns returned and create the appropriate type using values provided.
case a:Columns => cls.equals(classOf[Seq[_]]) match {
case false => throw new IllegalArgumentException("@Columns annotation must be attached to Seq[Tuple2] values - was: " + cls)
case true => columns.map { (column) => (bytesToObject(a.name, column.name) -> bytesToObject(a.value, column.value)) }
}
// if there's a value annotation look up the column with the matching name and then
// retrieve the value, and convert it as needed.
case a:AValue => find(a.value, columns) match {
case None => throw new IllegalArgumentException("Unable to find column with name: " + a.value)
case Some(c) => bytesToObject(cls, c.value)
}
// optional types are like values except they map to option/some/none so they may or
// may not exist. additionally - we get the parameter type from the annotation not
// the actual parameter
case a:Optional => cls.equals(classOf[Option[_]]) match {
case true => find(a.column, columns) match {
case None => None
case Some(c) => Some(bytesToObject(a.as, c.value))
}
case false => throw new IllegalArgumentException("@Optional may only be used on a Option[_] parameter")
}
// anything else throw an exception
case _ => throw new IllegalStateException("annonation of: " + annotation + " was not placed in such a manner that it could be used on type: " + cls)
}
}
info.constructor.newInstance(values.toArray.asInstanceOf[Array[Object]]:_*).asInstanceOf[T]
}
/**
* Given a class type, a Method that returns that type, and a source object (Cascal ORM object),
* return the appropriate serialized byte array. Does not support Option.
*/
private def getFieldSerialized[T](fieldType:Class[_], fieldGetter:Method, obj:T):ByteBuffer = {
// Couldn't figure out how to case match classes on a class obj with type erasure
if (fieldType == classOf[String]) Conversions.byteBuffer(fieldGetter.invoke(obj).asInstanceOf[String])
else if (fieldType == classOf[UUID]) Conversions.byteBuffer(fieldGetter.invoke(obj).asInstanceOf[UUID])
else if (fieldType == classOf[Int]) Conversions.byteBuffer(fieldGetter.invoke(obj).asInstanceOf[Int])
else if (fieldType == classOf[Long]) Conversions.byteBuffer(fieldGetter.invoke(obj).asInstanceOf[Long])
else if (fieldType == classOf[Boolean]) Conversions.byteBuffer(fieldGetter.invoke(obj).asInstanceOf[Boolean])
else if (fieldType == classOf[Float]) Conversions.byteBuffer(fieldGetter.invoke(obj).asInstanceOf[Float])
else if (fieldType == classOf[Double]) Conversions.byteBuffer(fieldGetter.invoke(obj).asInstanceOf[Double])
else if (fieldType == classOf[Date]) Conversions.byteBuffer(fieldGetter.invoke(obj).asInstanceOf[Date])
else throw new IllegalStateException("Type %s of getter %s is unknown".format(fieldGetter.getName, fieldType.toString))
}
/**
* Given a Method that returns an Option, and a source object (Cascal ORM object),
* return null if calling the method returns None, or otherwise the appropriate
* serialized byte array.
*/
private def getOptionFieldSerialized[T](fieldGetter:Method, obj:T):ByteBuffer = {
val opt = fieldGetter.invoke(obj).asInstanceOf[Option[_]]
opt match {
case None => null
case Some(x:String) => Conversions.byteBuffer(x)
case Some(x:UUID) => Conversions.byteBuffer(x)
case Some(x:Int) => Conversions.byteBuffer(x)
case Some(x:Long) => Conversions.byteBuffer(x)
case Some(x:Boolean) => Conversions.byteBuffer(x)
case Some(x:Float) => Conversions.byteBuffer(x)
case Some(x:Double) => Conversions.byteBuffer(x)
case Some(x:Date) => Conversions.byteBuffer(x)
case _ => throw new IllegalStateException(
"Type of Option %s for getter %s is unknown".format(opt.toString, fieldGetter.getName))
}
}
/**
* Given an object of type T using the Cascal Annotations returns a list of columns
* complete with name/value. Uses the serializers to convert values in columns to their
* appropriate byte array.
*/
def unapply[T](obj:T)(implicit manifest:Manifest[T]):List[Column[_]] = {
val info = Converter.this.info(manifest.erasure)
val key:String = info.fieldGettersAndColumnNames.filter(tup => tup._2._2 match {
case a:AKey => true
case _ => false
}).head._1.invoke(obj).asInstanceOf[String]
var superCol:ByteBuffer = null
if (info.isSuper) {
val superTup = info.fieldGettersAndColumnNames.filter(tup => tup._2._2 match {
case a:ASuperColumn => true
case _ => false
}).head
val superGetter = superTup._1
val superType = superTup._2._1
superCol = getFieldSerialized(superType, superGetter, obj)
}
info.fieldGettersAndColumnNames.foldLeft(List[Column[_]]()) { (acc, tup) =>
val fieldGetter = tup._1
var optField = false
val fieldType = tup._2._2 match {
case a:Optional =>
optField = true
a.as
case _ => tup._2._1
}
val columnName:String = tup._2._2 match {
case a:Optional => a.column
case a:AValue => a.value
case _ => null
}
val value:ByteBuffer = optField match {
case false => getFieldSerialized(fieldType, fieldGetter, obj)
case true => getOptionFieldSerialized(fieldGetter, obj)
}
if (columnName == null || value == null) acc
else info.isSuper match {
case true => (info.family.asInstanceOf[SuperColumnFamily] \\ key \\ superCol \\ (Conversions.byteBuffer(columnName), value)) :: acc
case false => (info.family.asInstanceOf[StandardColumnFamily] \\ key \\ (Conversions.byteBuffer(columnName), value)) :: acc
}
}
}
/**
* returns the reflection information from the reflection cache, using
* DCL to manage access to the cache.
*/
def info(cls:Class[_]):ReflectionInformation = {
if (reflectionCache.contains(cls)) reflectionCache(cls)
else this.synchronized {
if (reflectionCache.contains(cls)) reflectionCache(cls)
else {
val out = ReflectionInformation(cls)
reflectionCache = reflectionCache + (out.cls.asInstanceOf[Class[_]] -> out)
out
}
}
}
/**
* returns the column with the specified name, or
*/
private def find(name:String, columns:Seq[Column[_]]):Option[Column[_]] = {
val nameBytes = Conversions.byteBuffer(name)
columns.find { (c) => nameBytes.equals(c.name) }
}
/**
* converts the specified byte array to the specified type using the installed
* serializers.
*/
private def bytesToObject[A](ofType:Class[A], bytes:ByteBuffer):A = {
serializers.get(ofType) match {
case None => throw new IllegalArgumentException("unable to find serializer for type: " + ofType)
case Some(s) =>
// TODO sure there's a better way - without this you end up with:
// "value asInstanceOf is not a member of ?"
val castedSerial = s.asInstanceOf[Serializer[Any]]
(castedSerial.fromByteBuffer(bytes)).asInstanceOf[A]
}
}
/**
* converts the specified string to the specified type using the installed
* serializers.
*/
private def stringToObject[A](ofType:Class[A], string:String):A = {
serializers.get(ofType) match {
case None => throw new IllegalArgumentException("unable to find serializer for type: " + ofType)
case Some(s) =>
// TODO sure there's a better way - without this you end up with:
// "value asInstanceOf is not a member of ?"
val castedSerial = s.asInstanceOf[Serializer[Any]]
(castedSerial.fromString(string)).asInstanceOf[A]
}
}
/**
* returns the common super column that is shared amonst all the columns
*/
private def columnsToSuperColumn(columns:Seq[Column[_]]):SuperColumn = {
if (columns.length == 0) throw new IllegalArgumentException("unable to retrieve super column when Seq[Column] is empty")
columns(0).owner match {
case sc:SuperColumn => sc
case _ => throw new IllegalArgumentException("unable to retrieve super column for a standard column")
}
}
/**
* returns the columnsToKey value for the specified sequence of columns, assumes all columns
* contain the same columnsToKey.
*/
private def columnsToKey(columns:Seq[Column[_]]):Key[_, _] = {
if (columns.length == 0) throw new IllegalArgumentException("unable to retrieve key value when empty list of columns are provided")
columns(0).key
}
/**
* holds reflection information about a given class
*/
case class ReflectionInformation(val cls:Class[_]) {
val keyspace = {
extract(cls, classOf[AKeySpace]) match {
case None => throw new IllegalArgumentException("all mapped classes must contain @Keyspace annotation; not found in " + cls)
case Some(v) => Keyspace(v.value())
}
}
val isSuper = {
extract(cls, classOf[ASuper]) match {
case None => false
case Some(v) => true
}
}
val family = {
extract(cls, classOf[AFamily]) match {
case None => throw new IllegalArgumentException("all mapped classes must contain @Family annotation")
case Some(f) => isSuper match {
case true => SuperColumnFamily(f.value(), keyspace)
case false => StandardColumnFamily(f.value(), keyspace)
}
}
}
// TODO examine all - use one with annotations present
val constructor = cls.getDeclaredConstructors()(0)
val parameters = {
val params = constructor.getParameterTypes
val annotations = constructor.getParameterAnnotations
var out = List[(Class[_], Annotation)]()
(0 until params.length).foreach { (index) =>
val annotation = annotations(index)
if (null == annotation || 0 == annotation.length) {
throw new IllegalArgumentException("unable to create object when not all parameters have annotations, parameter type: " + params(index) + ", index: " + index)
}
if (1 != annotation.length) {
throw new IllegalArgumentException("in a cascal mapped object each argument must have ONLY one annotation")
}
out = (params(index) -> annotation(0)) :: out
}
out.reverse
}
// map of annotation classes to the field
val fields = {
var out = List[(Field, Annotation)]()
cls.getDeclaredFields.foreach { field =>
val annotations = field.getDeclaredAnnotations
if (annotations.length > 0) annotations(0) match {
case a:AKey => out = (field -> a) :: out
case a:Optional => out = (field -> a) :: out
case a:ASuperColumn => out = (field -> a) :: out
case a:AValue => out = (field -> a) :: out
case _ => /* ignore */
}
}
out
}
val fieldNames = cls.getDeclaredFields.map(_.getName)
val fieldGetters = cls.getDeclaredMethods.filter(m=>fieldNames.contains(m.getName))
// Returns Seq[(getters for private field, (col type, col annotation))]
val fieldGettersAndColumnNames = fieldGetters.sortWith(
(f1, f2) => fieldNames.indexOf(f1.getName) < fieldNames.indexOf(f2.getName)).zip(parameters)
/**
* returns the field for the specified annotation class
*/
def field[A <: Annotation](cls:Class[A]):Option[(Field, Annotation)] = fields.find { (tup) => cls.equals(tup._2.getClass)}
/**
* returns all the fields matching the specified annotation
*/
def fields[A <: Annotation](cls:Class[A]):Seq[(Field, Annotation)] = fields.filter { (tup) => cls.equals(tup._2.getClass) }
private def extract[A <: Annotation](cls:Class[_], annot:Class[A]):Option[A] = {
val value = cls.getAnnotation(annot).asInstanceOf[A]
if (null == value) None
else Some(value)
}
}
}
|
Shimi/cascal
|
src/main/scala/com/shorrockin/cascal/serialization/Converter.scala
|
Scala
|
apache-2.0
| 15,049 |
package vggames.scala.specs.valvar
import vggames.scala.specs.GameSpecification
import vggames.scala.code.RestrictedFunction0
import vggames.scala.specs.TestRun
class DefineVarString extends GameSpecification[RestrictedFunction0[String]] {
def runSignature = ":String"
def extendsType = "RestrictedFunction0[String]"
override def afterCode = "valor = valor\\n valor"
def challenge = """Defina a variavel chamada <code>valor</code> com o valor <code>"var"</code> """
def run(code : Code, submittedCode : String)(implicit cases : TestRun) =
"O seu código" should {
""" definir a variável "var" chamada valor """ in {
code() must_== "var"
}
}
}
|
vidageek/games
|
games/scala/src/main/scala/vggames/scala/specs/valvar/DefineVarString.scala
|
Scala
|
gpl-3.0
| 688 |
package net.sansa_stack.query.spark.graph.jena.serialization
/**
* Serializers for the sansa query layer
*
*/
object JenaKryoSerializers {
// No common query layer specific serializers so far
// This is the place to add them in when the need arises
}
|
SANSA-Stack/SANSA-RDF
|
sansa-query/sansa-query-spark/src/main/scala/net/sansa_stack/query/spark/graph/jena/serialization/JenaKryoSerializers.scala
|
Scala
|
apache-2.0
| 261 |
/*
* This file is part of Kiama.
*
* Copyright (C) 2008-2013 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.kiama
package rewriting
/**
* Strategy-based term rewriting in the style of Stratego (http://strategoxt.org/).
* The implementation here is partially based on the semantics given in "Program
* Transformation with Scoped Dynamic Rewrite Rules", by Bravenboer, van Dam, Olmos
* and Visser, Fundamenta Informaticae, 69, 2005. The library strategies are mostly
* based on the Stratego library, but also on combinators found in the Scrap Your
* Boilerplate and Uniplate libraries for Haskell.
*/
trait Rewriter {
import org.kiama.util.Emitter
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable.Builder
import scala.collection.mutable.WeakHashMap
/**
* The type of terms that can be rewritten. Any type of value is acceptable
* but generic traversals will only work on some specific types. See the
* documentation of the specific generic traversals (e.g., `all` or `some`)
* for a detailed description.
*/
type Term = Any
/**
* Term-rewriting strategies. A strategy is a function that takes a term
* as input and either succeeds producing a new term (`Some`), or fails
* (`None`).
*/
abstract class Strategy extends (Term => Option[Term]) {
/**
* Alias this strategy as `p` to make it easier to refer to in the
* combinator definitions.
*/
p =>
/**
* Apply this strategy to a term, producing either a transformed term
* wrapped in `Some`, or `None`, representing a rewriting failure.
*/
def apply (r : Term) : Option[Term]
/**
* Sequential composition. Construct a strategy that first applies
* this strategy. If it succeeds, then apply `q` to the new subject
* term. Otherwise fail.
*/
def <* (q : => Strategy) : Strategy =
new Strategy {
def apply (t1 : Term) : Option[Term] =
p (t1) match {
case Some (t2) => q (t2)
case None => None
}
}
/**
* Deterministic choice. Construct a strategy that first applies
* this strategy. If it succeeds, succeed with the resulting term.
* Otherwise, apply `q` to the original subject term.
*/
def <+ (q : => Strategy) : Strategy =
new Strategy {
def apply (t1 : Term) : Option[Term] =
p (t1) match {
case Some (t2) => Some (t2)
case None => q (t1)
}
}
/**
* Non-deterministic choice. Normally, construct a strategy that
* first applies either this strategy or the given strategy. If it
* succeeds, succeed with the resulting term. Otherwise, apply `q`.
* Currently implemented as deterministic choice, but this behaviour
* should not be relied upon.
* When used as the argument to the `<` conditional choice
* combinator, `+` just serves to hold the two strategies that are
* chosen between by the conditional choice.
*/
def + (q : => Strategy) : PlusStrategy =
new PlusStrategy (p, q)
/**
* Conditional choice: `c < l + r`. Construct a strategy that first
* applies this strategy (`c`). If `c` succeeds, the strategy applies
* `l` to the resulting term, otherwise it applies `r` to the original
* subject term.
*/
def < (lr : => PlusStrategy) : Strategy =
new Strategy {
def apply (t1 : Term) : Option[Term] =
p (t1) match {
case Some (t2) => lr.lhs (t2)
case None => lr.rhs (t1)
}
}
}
/**
* Helper class to contain commonality of choice in non-deterministic
* choice operator and then-else part of a conditional choice. Only
* returned by the non-deterministic choice operator.
*/
class PlusStrategy (p : => Strategy, q : => Strategy) extends Strategy {
val lhs = p
val rhs = q
def apply (t : Term) : Option[Term] =
(p <+ q) (t)
}
/**
* Make a strategy from a function `f`. The function return value
* determines whether the strategy succeeds or fails.
*/
def strategyf (f : Term => Option[Term]) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] =
f (t)
}
/**
* Make a strategy from a partial function `f`. If the function is
* defined at the current term, then the function return value
* when applied to the current term determines whether the strategy
* succeeds or fails. If the function is not defined at the current
* term, the strategy fails.
*/
def strategy (f : Term ==> Option[Term]) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] = {
if (f isDefinedAt t)
f (t)
else
None
}
}
/**
* Define a rewrite rule using a function `f` that returns a term.
* The rule always succeeds with the return value of the function.
*/
def rulef (f : Term => Term) : Strategy =
strategyf (t => Some (f (t)))
/**
* Define a rewrite rule using a partial function `f`. If the function is
* defined at the current term, then the strategy succeeds with the return
* value of the function applied to the current term. Otherwise the
* strategy fails.
*/
def rule (f : Term ==> Term) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] = {
if (f isDefinedAt t)
Some (f (t))
else
None
}
}
/**
* Define a rewrite rule using a function `f` that returns a strategy. The
* rule applies the function to the subject term to get a strategy which
* is then applied again to the subject term. In other words, the function
* is only used for side-effects such as pattern matching. The whole thing
* also fails if `f` is not defined at the term in the first place.
*/
def rulefs (f : Term ==> Strategy) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] = {
if (f isDefinedAt t)
(f (t)) (t)
else
None
}
}
/**
* Construct a strategy that always succeeds, changing the subject term to
* the given term `t`.
*/
def build (t : => Term) : Strategy =
rulef (_ => t)
/**
* Construct a strategy from an option value `o`. The strategy succeeds
* or fails depending on whether `o` is a Some or None, respectively.
* If `o` is a `Some`, then the subject term is changed to the term that
* is wrapped by the `Some`.
*/
def option (o : => Option[Term]) : Strategy =
strategyf (_ => o)
/**
* Define a term query by a function `f`. The query always succeeds with
* no effect on the subject term but applies the given (possibly partial)
* function `f` to the subject term. In other words, the strategy runs
* `f` for its side-effects.
*/
def queryf[T] (f : Term => T) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] = {
f (t)
Some (t)
}
}
/**
* Define a term query by a partial function `f`. The query always succeeds
* with no effect on the subject term but applies the given partial function
* `f` to the subject term. In other words, the strategy runs `f` for its
* side-effects.
*/
def query[T] (f : Term ==> T) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] = {
if (f isDefinedAt t)
f (t)
Some (t)
}
}
/**
* A strategy that always fails.
*/
val fail : Strategy =
option (None)
/**
* A strategy that always succeeds with the subject term unchanged (i.e.,
* this is the identity strategy).
*/
val id : Strategy =
strategyf (t => Some (t))
/**
* A strategy that always succeeds with the subject term unchanged (i.e.,
* this is the identity strategy) with the side-effect that the subject
* term is printed to the given emitter, prefixed by the string `s`. The
* emitter defaults to one that writes to standard output.
*/
def debug (msg : String, emitter : Emitter = new Emitter) : Strategy =
strategyf (t => { emitter.emitln (msg + t); Some (t) })
/**
* Create a logging strategy based on a strategy `s`. The returned strategy
* succeeds or fails exactly as `s` does, but also prints the provided message,
* the subject term, the success or failure status, and on success, the result
* term, to the provided emitter (default: standard output).
*/
def log[T] (s : => Strategy, msg : String, emitter : Emitter = new Emitter) : Strategy =
new Strategy {
def apply (t1 : Term) : Option[Term] = {
emitter.emit (msg + t1)
val r = s (t1)
r match {
case Some (t2) =>
emitter.emitln (" succeeded with " + t2)
case None =>
emitter.emitln (" failed")
}
r
}
}
/**
* Create a logging strategy based on a strategy `s`. The returned strategy
* succeeds or fails exactly as `s` does, but if `s` fails, also prints the
* provided message and the subject term to the provided emitter (default:
* standard output).
*/
def logfail[T] (s : => Strategy, msg : String, emitter : Emitter = new Emitter) : Strategy =
new Strategy {
def apply (t1 : Term) : Option[Term] = {
val r = s (t1)
r match {
case Some (t2) =>
// Do nothing
case None =>
emitter.emitln (msg + t1 + " failed")
}
r
}
}
/**
* Construct a strategy that succeeds only if the subject term matches
* the given term `t`.
*/
def term (t : Term) : Strategy =
rule {
case `t` => t
}
/**
* Generic term deconstruction.
*/
object Term {
/**
* Generic term deconstruction. An extractor that decomposes `Product`
* or `Rewritable` values into the value itself and a sequence of its
* children. Terms that are not `Product` or `Rewritable` are not
* decomposable (i.e., the list of children will be empty).
*/
def unapply (t : Any) : Option[(Any,Seq[Any])] = {
t match {
case r : Rewritable =>
Some ((r, r.deconstruct))
case p : Product =>
val cs = for (i <- 0 until p.productArity) yield p.productElement (i)
Some ((p, cs))
case _ =>
Some ((t, Nil))
}
}
}
/**
* Perform a paramorphism over a value. This is a fold in which the
* recursive step may refer to the recursive component of the value
* and the results of folding over the children. When the function `f`
* is called, the first parameter is the value and the second is a
* sequence of the values that `f` has returned for the children. his
* will work on any value, but will only decompose values that are
* supported by the `Term` generic term deconstruction. This operation
* is similar to that used in the Uniplate library.
*/
def para[T] (f : (Any, Seq[T]) => T) : Any => T = {
case Term (t, ts) => f (t, ts.map (para (f)))
}
/**
* Cache of constructors for product duplication.
*/
protected val constrcache =
new WeakHashMap[java.lang.Class[_], java.lang.reflect.Constructor[_]]
/**
* General product duplication function. Returns a product that applies
* the same constructor as the product `t`, but with the given children
* instead of `t`'s children. Fails if a constructor cannot be found,
* there are the wrong number of new children, or if one of the new
* children is not of the appropriate type.
*/
protected def dup[T <: Product] (t : T, children : Array[AnyRef]) : T = {
val clazz = t.getClass
val ctor = constrcache.getOrElseUpdate (clazz, (clazz.getConstructors())(0))
try {
ctor.newInstance (children : _*).asInstanceOf[T]
} catch {
case e : IllegalArgumentException =>
sys.error ("dup illegal arguments: " + ctor + " (" +
children.deep.mkString (",") + "), expects " +
ctor.getParameterTypes.length)
}
}
/**
* Make an arbitrary value `c` into a term child, checking that it worked
* properly. Object references will be returned unchanged; other values
* will be boxed.
*/
protected def makechild (c : Any) : AnyRef =
c.asInstanceOf[AnyRef]
/**
* Traversal to a single child. Construct a strategy that applies `s` to
* the ''ith'' child of the subject term (counting from one). If `s` succeeds on
* the ''ith'' child producing `t`, then succeed, forming a new term that is the
* same as the original term except that the ''ith'' child is now `t`. If `s` fails
* on the ''ith'' child or the subject term does not have an ''ith'' child, then fail.
* `child(i, s)` is equivalent to Stratego's `i(s)` operator. If `s` succeeds on
* the ''ith'' child producing the same term (by `eq` for references and by `==` for
* other values), then the overall strategy returns the subject term.
* This operation works for instances of `Product` or finite `Seq` values.
*/
def child (i : Int, s : Strategy) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] =
t match {
case p : Product => childProduct (p)
case t : Seq[_] => childSeq (t.asInstanceOf[Seq[Term]])
case _ => None
}
private def childProduct (p : Product) : Option[Term] = {
val numchildren = p.productArity
if ((i < 1) || (i > numchildren)) {
None
} else {
val ct = p.productElement (i-1)
s (ct) match {
case Some (ti) if (same (ct, ti)) =>
Some (p)
case Some (ti) =>
val newchildren = new Array[AnyRef](numchildren)
var j = 0
while (j < numchildren) {
newchildren (j) = makechild (p.productElement (j))
j = j + 1
}
newchildren (i-1) = makechild (ti)
val ret = dup (p, newchildren)
Some (ret)
case None =>
None
}
}
}
private def childSeq[CC[U] <: Seq[U]] (t : CC[Term])
(implicit cbf : CanBuildFrom[CC[Term], Term, CC[Term]])
: Option[CC[Term]] = {
val numchildren = t.size
if ((i < 1) || (i > numchildren)) {
None
} else {
val ct = t (i - 1)
s (ct) match {
case Some (ti) if (same (ct, ti)) =>
Some (t)
case Some (ti) =>
val b = cbf (t)
b.sizeHint (t.size)
var j = 0
while (j < i - 1) {
b += t (j)
j = j + 1
}
b += ti
j = j + 1
while (j < numchildren) {
b += t (j)
j = j + 1
}
Some (b.result)
case None =>
None
}
}
}
}
/**
* Compare two arbitrary values. If they are both references, use
* reference equality, otherwise throw an error since we should be
* able to cast anything to reference.
*/
protected def same (v1 : Any, v2 : Any) : Boolean =
if (v1 == null)
v2 == null
else if (v2 == null)
false
else
(v1, v2) match {
case (r1 : AnyRef, r2: AnyRef) =>
r1 eq r2
case _ =>
sys.error ("Rewriter.same: comparison of non-AnyRefs " + v1 + " and " +
v2 + ", should not be reached")
}
/**
* Traversal to all children. Construct a strategy that applies `s` to all
* term children of the subject term. If `s` succeeds on all of the children,
* then succeed, forming a new term from the constructor
* of the original term and the result of `s` for each child. If `s` fails on any
* child, fail. If there are no children, succeed. If `s` succeeds on all
* children producing the same terms (by `eq` for references and by `==` for
* other values), then the overall strategy returns the subject term.
* This operation works on finite `Rewritable`, `Product`, `Map` and `Traversable`
* values, checked for in that order.
* Children of a `Rewritable` (resp. Product, collection) value are processed
* in the order returned by the value's deconstruct (resp. `productElement`,
* `foreach`) method.
*/
def all (s : => Strategy) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] =
t match {
case r : Rewritable => allRewritable (r)
case p : Product => allProduct (p)
case m : Map[_,_] => allMap (m.asInstanceOf[Map[Term,Term]])
case t : Traversable[_] => allTraversable (t.asInstanceOf[Traversable[Term]])
case _ => Some (t)
}
private def allProduct (p : Product) : Option[Term] = {
val numchildren = p.productArity
if (numchildren == 0)
Some (p)
else {
val newchildren = new Array[AnyRef](numchildren)
var changed = false
var i = 0
while (i < numchildren) {
val ct = p.productElement (i)
s (ct) match {
case Some (ti) =>
newchildren (i) = makechild (ti)
if (!same (ct, ti))
changed = true
case None =>
return None
}
i = i + 1
}
if (changed) {
val ret = dup (p, newchildren)
Some (ret)
} else
Some (p)
}
}
private def allRewritable (r : Rewritable) : Option[Term] = {
val numchildren = r.arity
if (numchildren == 0)
Some (r)
else {
val children = r.deconstruct
val newchildren = new Array[Any](numchildren)
var changed = false
var i = 0
while (i < numchildren) {
val ct = children (i)
s (ct) match {
case Some (ti) =>
newchildren (i) = makechild (ti)
if (!same (ct, ti))
changed = true
case None =>
return None
}
i = i + 1
}
if (changed) {
val ret = r.reconstruct (newchildren)
Some (ret)
} else
Some (r)
}
}
private def allTraversable[CC[_] <: Traversable[Term]] (t : CC[Term])
(implicit cbf : CanBuildFrom[CC[Term], Term, CC[Term]])
: Option[CC[Term]] =
if (t.size == 0)
Some (t)
else {
val b = cbf (t)
b.sizeHint (t.size)
var changed = false
for (ct <- t)
s (ct) match {
case Some (ti) =>
b += ti
if (!same (ct, ti))
changed = true
case None =>
return None
}
if (changed)
Some (b.result)
else
Some (t)
}
private def allMap[CC[V,W] <: Map[V,W]] (t : CC[Term,Term])
(implicit cbf : CanBuildFrom[CC[Term,Term], (Term, Term), CC[Term,Term]])
: Option[CC[Term,Term]] =
if (t.size == 0)
Some (t)
else {
val b = cbf (t)
b.sizeHint (t.size)
var changed = false
for (ct <- t)
s (ct) match {
case Some (ti @ (tix,tiy)) =>
b += ti
if (!same (ct, ti))
changed = true
case _ =>
return None
}
if (changed)
Some (b.result)
else
Some (t)
}
}
/**
* Traversal to one child. Construct a strategy that applies `s` to the term
* children of the subject term. Assume that `c` is the
* first child on which s succeeds. Then stop applying `s` to the children and
* succeed, forming a new term from the constructor of the original term and
* the original children, except that `c` is replaced by the result of applying
* `s` to `c`. In the event that the strategy fails on all children, then fail.
* If there are no children, fail. If `s` succeeds on the one child producing
* the same term (by `eq` for references and by `==` for other values), then
* the overall strategy returns the subject term.
* This operation works on instances of finite `Rewritable`, `Product`, `Map`
* and `Traversable` values, checked for in that order.
* Children of a `Rewritable` (resp. `Product`, collection) value are processed
* in the order returned by the value's `deconstruct` (resp. `productElement`,
* `foreach`) method.
*/
def one (s : => Strategy) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] =
t match {
case r : Rewritable => oneRewritable (r)
case p : Product => oneProduct (p)
case m : Map[_,_] => oneMap (m.asInstanceOf[Map[Term,Term]])
case t : Traversable[_] => oneTraversable (t.asInstanceOf[Traversable[Term]])
case _ => None
}
private def oneProduct (p : Product) : Option[Term] = {
val numchildren = p.productArity
var i = 0
while (i < numchildren) {
val ct = p.productElement (i)
s (ct) match {
case Some (ti) if (same (ct, ti)) =>
return Some (p)
case Some (ti) =>
val newchildren = new Array[AnyRef] (numchildren)
var j = 0
while (j < i) {
newchildren (j) = makechild (p.productElement (j))
j = j + 1
}
newchildren (i) = makechild (ti)
j = j + 1
while (j < numchildren) {
newchildren (j) = makechild (p.productElement (j))
j = j + 1
}
val ret = dup (p, newchildren)
return Some (ret)
case None =>
// Do nothing
}
i = i + 1
}
None
}
private def oneRewritable (r : Rewritable) : Option[Term] = {
val numchildren = r.arity
val children = r.deconstruct
var i = 0
while (i < numchildren) {
val ct = children (i)
s (ct) match {
case Some (ti) if (same (ct, ti)) =>
return Some (r)
case Some (ti) =>
val newchildren = new Array[Any] (numchildren)
var j = 0
while (j < i) {
newchildren (j) = makechild (children (j))
j = j + 1
}
newchildren (i) = makechild (ti)
j = j + 1
while (j < numchildren) {
newchildren (j) = makechild (children (j))
j = j + 1
}
val ret = r.reconstruct (newchildren)
return Some (ret)
case None =>
// Do nothing
}
i = i + 1
}
None
}
private def oneTraversable[CC[U] <: Traversable[U]] (t : CC[Term])
(implicit cbf : CanBuildFrom[CC[Term], Term, CC[Term]])
: Option[CC[Term]] = {
val b = cbf (t)
b.sizeHint (t.size)
var add = true
for (ct <- t)
if (add)
s (ct) match {
case Some (ti) if same (ct, ti) =>
return Some (t)
case Some (ti) =>
b += ti
add = false
case None =>
b += ct
}
else
b += ct
if (add)
None
else
Some (b.result)
}
private def oneMap[CC[V,W] <: Map[V,W]] (t : CC[Term,Term])
(implicit cbf : CanBuildFrom[CC[Term,Term], (Term, Term), CC[Term,Term]])
: Option[CC[Term,Term]] = {
val b = cbf (t)
b.sizeHint (t.size)
var add = true
for (ct <- t)
if (add)
s (ct) match {
case Some (ti @ (tix,tiy)) if (same (ct, ti)) =>
return Some (t)
case Some (ti @ (tix, tiy)) =>
b += ti
add = false
case None =>
b += ct
}
else
b += ct
if (add)
None
else
Some (b.result)
}
}
/**
* Traversal to as many children as possible, but at least one. Construct a
* strategy that applies `s` to the term children of the subject term.
* If `s` succeeds on any of the children, then succeed,
* forming a new term from the constructor of the original term and the result
* of `s` for each succeeding child, with other children unchanged. In the event
* that the strategy fails on all children, then fail. If there are no
* children, fail. If `s` succeeds on children producing the same terms (by `eq`
* for references and by `==` for other values), then the overall strategy
* returns the subject term.
* This operation works on instances of finite `Rewritable`, `Product`, `Map` and
* `Traversable` values, checked for in that order.
* Children of a `Rewritable` (resp. `Product`, collection) value are processed
* in the order returned by the value's `deconstruct` (resp. `productElement`,
* `foreach`) method.
*/
def some (s : => Strategy) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] =
t match {
case r : Rewritable => someRewritable (r)
case p : Product => someProduct (p)
case m : Map[_,_] => someMap (m.asInstanceOf[Map[Term,Term]])
case t : Traversable[_] => someTraversable (t.asInstanceOf[Traversable[Term]])
case _ => None
}
private def someProduct (p : Product) : Option[Term] = {
val numchildren = p.productArity
if (numchildren == 0)
None
else {
val newchildren = new Array[AnyRef](numchildren)
var success = false
var changed = false
var i = 0
while (i < numchildren) {
val ct = p.productElement (i)
s (ct) match {
case Some (ti) =>
newchildren (i) = makechild (ti)
if (!same (ct, ti))
changed = true
success = true
case None =>
newchildren (i) = makechild (ct)
}
i = i + 1
}
if (success)
if (changed) {
val ret = dup (p, newchildren)
Some (ret)
} else
Some (p)
else
None
}
}
private def someRewritable (r : Rewritable) : Option[Term] = {
val numchildren = r.arity
if (numchildren == 0)
None
else {
val children = r.deconstruct
val newchildren = new Array[Any](numchildren)
var success = false
var changed = false
var i = 0
while (i < numchildren) {
val ct = children (i)
s (ct) match {
case Some (ti) =>
newchildren (i) = makechild (ti)
if (!same (ct, ti))
changed = true
success = true
case None =>
newchildren (i) = makechild (ct)
}
i = i + 1
}
if (success)
if (changed) {
val ret = r.reconstruct (newchildren)
Some (ret)
} else
Some (r)
else
None
}
}
private def someTraversable[CC[U] <: Traversable[U]] (t : CC[Term])
(implicit cbf : CanBuildFrom[CC[Term], Term, CC[Term]])
: Option[CC[Term]] =
if (t.size == 0)
None
else {
val b = cbf (t)
b.sizeHint (t.size)
var success = false
var changed = false
for (ct <- t)
s (ct) match {
case Some (ti) =>
b += ti
if (!same (ct, ti))
changed = true
success = true
case None =>
b += ct
}
if (success)
if (changed)
Some (b.result)
else
Some (t)
else
None
}
private def someMap[CC[V,W] <: Map[V,W]] (t : CC[Term,Term])
(implicit cbf : CanBuildFrom[CC[Term,Term], (Term, Term), CC[Term,Term]])
: Option[CC[Term,Term]] =
if (t.size == 0)
None
else {
val b = cbf (t)
b.sizeHint (t.size)
var success = false
var changed = false
for (ct <- t)
s (ct) match {
case Some (ti @ (tix, tiy)) =>
b += ti
if (!same (ct, ti))
changed = true
success = true
case _ =>
b += ct
}
if (success)
if (changed)
Some (b.result)
else
Some (t)
else
None
}
}
/**
* Make a strategy that applies the elements of ss pairwise to the
* children of the subject term, returning a new term if all of the
* strategies succeed, otherwise failing. The constructor of the new
* term is the same as that of the original term and the children
* are the results of the strategies. If the length of `ss` is not
* the same as the number of children, then `congruence(ss)` fails.
* If the argument strategies succeed on children producing the same
* terms (by `eq` for references and by `==` for other values), then the
* overall strategy returns the subject term.
* This operation works on instances of `Product` values.
*/
def congruence (ss : Strategy*) : Strategy =
new Strategy {
def apply (t : Term) : Option[Term] =
t match {
case p : Product => congruenceProduct (p, ss : _*)
case _ => Some (t)
}
private def congruenceProduct (p : Product, ss : Strategy*) : Option[Term] = {
val numchildren = p.productArity
if (numchildren == ss.length) {
val newchildren = new Array[AnyRef](numchildren)
var changed = false
var i = 0
while (i < numchildren) {
val ct = p.productElement (i)
(ss (i)) (ct) match {
case Some (ti) =>
newchildren (i) = makechild (ti)
if (!same (ct, ti))
changed = true
case None =>
return None
}
i = i + 1
}
if (changed) {
val ret = dup (p, newchildren)
Some (ret)
} else
Some (p)
} else
None
}
}
/**
* Rewrite a term. Apply the strategy `s` to a term returning the result term
* if `s` succeeds, otherwise return the original term.
*/
def rewrite[T] (s : => Strategy) (t : T) : T = {
s (t) match {
case Some (t1) =>
t1.asInstanceOf[T]
case None =>
t
}
}
/**
* Return a strategy that behaves as `s` does, but memoises its arguments and
* results. In other words, if `memo(s)` is called on a term `t` twice, the
* second time will return the same result as the first, without having to
* invoke `s`. For best results, it is important that `s` should have no side
* effects.
*/
def memo (s : => Strategy) : Strategy =
new Strategy {
private val cache =
new scala.collection.mutable.HashMap[Term,Option[Term]]
def apply (t : Term) : Option[Term] =
cache.getOrElseUpdate (t, s (t))
}
/**
* Collect query results in a traversable collection. Run the function
* `f` as a top-down left-to-right query on the subject term. Accumulate
* the values produced by the function in the collection and return the
* final value of the list.
*/
def collect[CC[U] <: Traversable[U],T] (f : Term ==> T)
(implicit cbf : CanBuildFrom[CC[T],T,CC[T]]) : Term => CC[T] =
(t : Term) => {
val b = cbf ()
val add = (v : T) => b += v
(everywhere (query (f andThen add))) (t)
b.result ()
}
/**
* Collect query results in a list. Run the function `f` as a top-down
* left-to-right query on the subject term. Accumulate the values
* produced by the function in a list and return the final value of
* the list.
*/
def collectl[T] (f : Term ==> T) : Term => List[T] =
collect[List,T] (f)
/**
* Collect query results in a set. Run the function `f` as a top-down
* left-to-right query on the subject term. Accumulate the values
* produced by the function in a set and return the final value of
* the set.
*/
def collects[T] (f : Term ==> T) : Term => Set[T] =
collect[Set,T] (f)
/**
* Count function results. Run the function `f` as a top-down query on
* the subject term. Sum the integer values returned by `f` from all
* applications.
*/
def count (f : Term ==> Int) : Term => Int =
everything (0) (_ + _) (f)
/**
* Construct a strategy that applies `s` to each element of a list,
* returning a new list of the results if all of the applications
* succeed, otherwise fail. If all of the applications succeed
* without change, return the input list.
*/
def map (s : => Strategy) : Strategy =
rulefs {
case Nil => id
case l @ (x :: xs) =>
option (s (x)) <*
rulefs {
case y =>
option (map (s) (xs)) <*
rule {
case ys : List[_] =>
if (same (x, y) && same (xs, ys))
l
else
y :: ys
}
}
}
/**
* Construct a strategy that applies `s`, yielding the result of `s` if it
* succeeds, otherwise leave the original subject term unchanged. In
* Stratego library this strategy is called `try`.
*/
def attempt (s : => Strategy) : Strategy =
s <+ id
/**
* Construct a strategy that applies `s` repeatedly until it fails.
*/
def repeat (s : => Strategy) : Strategy =
attempt (s <* repeat (s))
/**
* Construct a strategy that repeatedly applies `s` until it fails and
* then terminates with application of `c`.
*/
def repeat (s : => Strategy, c : => Strategy) : Strategy =
(s <* repeat (s, c)) <+ c
/**
* Construct a strategy that applies `s` repeatedly exactly `n` times. If
* `s` fails at some point during the n applications, the entire strategy
* fails. The result of the strategy is that of the ''nth'' application of
* `s`.
*/
def repeat (s : => Strategy, n : Int) : Strategy =
if (n == 0) id else s <* repeat (s, n - 1)
/**
* Construct a strategy that repeatedly applies `s` (at least once) and
* terminates with application of `c`.
*/
def repeat1 (s : => Strategy, c : => Strategy) : Strategy =
s <* (repeat1 (s, c) <+ c)
/**
* Construct a strategy that repeatedly applies `s` (at least once).
*/
def repeat1 (s : => Strategy) : Strategy =
repeat1 (s, id)
/**
* Construct a strategy that repeatedly applies `s` until `c` succeeds.
*/
def repeatuntil (s : => Strategy, c : => Strategy) : Strategy =
s <* (c <+ repeatuntil (s, c))
/**
* Construct a strategy that while c succeeds applies `s`. This operator
* is called `while` in the Stratego library.
*/
def loop (c : => Strategy, s : => Strategy) : Strategy =
attempt (c <* s <* loop (c, s))
/**
* Construct a strategy that while `c` does not succeed applies `s`. This
* operator is called `while-not` in the Stratego library.
*/
def loopnot (c : => Strategy, s : => Strategy) : Strategy =
c <+ (s <* loopnot (c, s))
/**
* Construct a strategy that applies `s` at least once and then repeats `s`
* while `c` succeeds. This operator is called `do-while` in the Stratego
* library.
*/
def doloop (s : => Strategy, c : => Strategy) : Strategy =
s <* loop (c, s)
/**
* Construct a strategy that repeats application of `s` while `c` fails, after
* initialization with `i`. This operator is called `for` in the Stratego
* library.
*/
def loopiter (i : => Strategy, c : => Strategy, s : => Strategy) : Strategy =
i <* loopnot (c, s)
/**
* Construct a strategy that applies `s(i)` for each integer `i` from `low` to
* `high` (inclusive). This operator is called `for` in the Stratego library.
*/
def loopiter (s : Int => Strategy, low : Int, high : Int) : Strategy =
if (low <= high)
s (low) <* loopiter (s, low + 1, high)
else
id
/**
* Construct a strategy that applies `s`, then fails if `s` succeeded or, if `s`
* failed, succeeds with the subject term unchanged, I.e., it tests if
* `s` applies, but has no effect on the subject term.
*/
def not (s : => Strategy) : Strategy =
s < fail + id
/**
* Construct a strategy that tests whether strategy `s` succeeds,
* restoring the original term on success. This is similar
* to Stratego's `where`, except that in this version any effects on
* bindings are not visible outside `s`.
*/
def where (s : => Strategy) : Strategy =
strategyf (t => (s <* build (t)) (t))
/**
* Construct a strategy that tests whether strategy `s` succeeds,
* restoring the original term on success. A synonym for `where`.
*/
def test (s : => Strategy) : Strategy =
where (s)
/**
* Construct a strategy that applies `s` in breadth first order.
*/
def breadthfirst (s : => Strategy) : Strategy =
all (s) <* all (breadthfirst (s))
/**
* Construct a strategy that applies `s` in a top-down, prefix fashion
* to the subject term.
*/
def topdown (s : => Strategy) : Strategy =
s <* all (topdown (s))
/**
* Construct a strategy that applies `s` in a top-down, prefix fashion
* to the subject term but stops when the strategy produced by `stop`
* succeeds. `stop` is given the whole strategy itself as its argument.
*/
def topdownS (s : => Strategy, stop : (=> Strategy) => Strategy) : Strategy =
s <* (stop (topdownS (s, stop)) <+ all (topdownS (s, stop)))
/**
* Construct a strategy that applies `s` in a bottom-up, postfix fashion
* to the subject term.
*/
def bottomup (s : => Strategy) : Strategy =
all (bottomup (s)) <* s
/**
* Construct a strategy that applies `s` in a bottom-up, postfix fashion
* to the subject term but stops when the strategy produced by `stop`
* succeeds. `stop` is given the whole strategy itself as its argument.
*/
def bottomupS (s : => Strategy, stop : (=> Strategy) => Strategy) : Strategy =
(stop (bottomupS (s, stop)) <+ (all (bottomupS (s, stop))) <* s)
/**
* Construct a strategy that applies `s` in a combined top-down and
* bottom-up fashion (i.e., both prefix and postfix) to the subject
* term.
*/
def downup (s : => Strategy) : Strategy =
s <* all (downup (s)) <* s
/**
* Construct a strategy that applies `s1` in a top-down, prefix fashion
* and `s2` in a bottom-up, postfix fashion to the subject term.
*/
def downup (s1 : => Strategy, s2 : => Strategy) : Strategy =
s1 <* all (downup (s1, s2)) <* s2
/**
* Construct a strategy that applies `s` in a combined top-down and
* bottom-up fashion (i.e., both prefix and postfix) to the subject
* but stops when the strategy produced by `stop` succeeds. `stop` is
* given the whole strategy itself as its argument.
*/
def downupS (s : => Strategy, stop : (=> Strategy) => Strategy) : Strategy =
s <* (stop (downupS (s, stop)) <+ all (downupS (s, stop))) <* s
/**
* Construct a strategy that applies `s1` in a top-down, prefix fashion
* and `s2` in a bottom-up, postfix fashion to the subject term but stops
* when the strategy produced by `stop` succeeds. `stop` is given the whole
* strategy itself as its argument.
*/
def downupS (s1 : => Strategy, s2 : => Strategy, stop : (=> Strategy) => Strategy) : Strategy =
s1 <* (stop (downupS (s1, s2, stop)) <+ all (downupS (s1, s2, stop))) <* s2
/**
* A unit for `topdownS`, `bottomupS` and `downupS`. For example, `topdown(s)`
* is equivalent to `topdownS(s, dontstop)`.
*/
def dontstop (s : => Strategy) : Strategy =
fail
/**
* Construct a strategy that applies `s` in a top-down fashion to one
* subterm at each level, stopping as soon as it succeeds once (at
* any level).
*/
def oncetd (s : => Strategy) : Strategy =
s <+ one (oncetd (s))
/**
* Construct a strategy that applies `s` in a bottom-up fashion to one
* subterm at each level, stopping as soon as it succeeds once (at
* any level).
*/
def oncebu (s : => Strategy) : Strategy =
one (oncebu (s)) <+ s
/**
* Construct a strategy that applies `s` in a top-down fashion to some
* subterms at each level, stopping as soon as it succeeds once (at
* any level).
*/
def sometd (s : => Strategy) : Strategy =
s <+ some (sometd (s))
/**
* Construct a strategy that applies `s` in a bottom-up fashion to some
* subterms at each level, stopping as soon as it succeeds once (at
* any level).
*/
def somebu (s : => Strategy) : Strategy =
some (somebu (s)) <+ s
/**
* Construct a strategy that applies `s` repeatedly in a top-down fashion
* stopping each time as soon as it succeeds once (at any level). The
* outermost fails when `s` fails to apply to any (sub-)term.
*/
def outermost (s : => Strategy) : Strategy =
repeat (oncetd (s))
/**
* Construct a strategy that applies `s` repeatedly to the innermost
* (i.e., lowest and left-most) (sub-)term to which it applies.
* Stop with the current term if `s` doesn't apply anywhere.
*/
def innermost (s : => Strategy) : Strategy =
bottomup (attempt (s <* innermost (s)))
/**
* An alternative version of `innermost`.
*/
def innermost2 (s : => Strategy) : Strategy =
repeat (oncebu (s))
/**
* Construct a strategy that applies `s` repeatedly to subterms
* until it fails on all of them.
*/
def reduce (s : => Strategy) : Strategy = {
def x : Strategy = some (x) + s
repeat (x)
}
/**
* Construct a strategy that applies `s` in a top-down fashion, stopping
* at a frontier where s succeeds.
*/
def alltd (s : => Strategy) : Strategy =
s <+ all (alltd (s))
/**
* Construct a strategy that applies `s` in a bottom-up fashion to all
* subterms at each level, stopping at a frontier where s succeeds.
*/
def allbu (s : => Strategy) : Strategy =
all (allbu (s)) <+ s
/**
* Construct a strategy that applies `s1` in a top-down, prefix fashion
* stopping at a frontier where `s1` succeeds. `s2` is applied in a bottom-up,
* postfix fashion to the result.
*/
def alldownup2 (s1 : => Strategy, s2 : => Strategy) : Strategy =
(s1 <+ all (alldownup2 (s1, s2))) <* s2
/**
* Construct a strategy that applies `s1` in a top-down, prefix fashion
* stopping at a frontier where `s1` succeeds. `s2` is applied in a bottom-up,
* postfix fashion to the results of the recursive calls.
*/
def alltdfold (s1 : => Strategy, s2 : => Strategy) : Strategy =
s1 <+ (all (alltdfold (s1, s2)) <* s2)
/**
* Construct a strategy that applies `s` in a top-down, prefix fashion
* stopping at a frontier where `s` succeeds on some children. `s` is then
* applied in a bottom-up, postfix fashion to the result.
*/
def somedownup (s : => Strategy) : Strategy =
(s <* all (somedownup (s)) <* (attempt (s))) <+ (some (somedownup (s)) <+ attempt (s))
/**
* Construct a strategy that applies `s` as many times as possible, but
* at least once, in bottom up order.
*/
def manybu (s : Strategy) : Strategy =
some (manybu (s)) <* attempt (s) <+ s
/**
* Construct a strategy that applies `s` as many times as possible, but
* at least once, in top down order.
*/
def manytd (s : Strategy) : Strategy =
s <* all (attempt (manytd (s))) <+ some (manytd (s))
/**
* Construct a strategy that tests whether the two sub-terms of a
* pair of terms are equal.
*/
val eq : Strategy =
rule {
case t @ (x, y) if x == y => t
}
/**
* Construct a strategy that tests whether the two sub-terms of a
* pair of terms are equal. Synonym for `eq`.
*/
val equal : Strategy =
eq
/**
* Construct a strategy that succeeds when applied to a pair `(x,y)`
* if `x` is a sub-term of `y`.
*/
val issubterm : Strategy =
strategy {
case (x : Term, y : Term) => where (oncetd (term (x))) (y)
}
/**
* Construct a strategy that succeeds when applied to a pair `(x,y)`
* if `x` is a sub-term of `y` but is not equal to `y`.
*/
val ispropersubterm : Strategy =
not (eq) <* issubterm
/**
* Construct a strategy that succeeds when applied to a pair `(x,y)`
* if `x` is a superterm of `y`.
*/
val issuperterm : Strategy =
strategy {
case (x, y) => issubterm (y, x)
}
/**
* Construct a strategy that succeeds when applied to a pair `(x,y)`
* if `x` is a super-term of `y` but is not equal to `y`.
*/
val ispropersuperterm : Strategy =
not (eq) <* issuperterm
/**
* Construct a strategy that succeeds if the current term has no
* direct subterms.
*/
val isleaf : Strategy =
all (fail)
/**
* Construct a strategy that applies to all of the leaves of the
* current term, using `isleaf` as the leaf predicate.
*/
def leaves (s : => Strategy, isleaf : => Strategy) : Strategy =
(isleaf <* s) <+ all (leaves (s, isleaf))
/**
* Construct a strategy that applies to all of the leaves of the
* current term, using `isleaf` as the leaf predicate, skipping
* subterms for which `skip` when applied to the result succeeds.
*/
def leaves (s : => Strategy, isleaf : => Strategy, skip : Strategy => Strategy) : Strategy =
(isleaf <* s) <+ skip (leaves (s, isleaf, skip)) <+ all (leaves (s, isleaf, skip))
/**
* Construct a strategy that succeeds if the current term has at
* least one direct subterm.
*/
val isinnernode : Strategy =
one (id)
/**
* Construct a strategy that applies `s` at all terms in a bottom-up fashion
* regardless of failure. Terms for which the strategy fails are left
* unchanged.
*/
def everywherebu (s : => Strategy) : Strategy =
bottomup (attempt (s))
/**
* Construct a strategy that applies `s` at all terms in a top-down fashion
* regardless of failure. Terms for which the strategy fails are left
* unchanged.
*/
def everywheretd (s : => Strategy) : Strategy =
topdown (attempt (s))
/**
* Same as `everywheretd`.
*/
def everywhere (s : => Strategy) : Strategy =
everywheretd (s)
/**
* Apply the function at every term in `t` in a top-down, left-to-right order.
* Collect the resulting `T` values by accumulating them using `f` with initial
* left value `v`. Return the final value of the accumulation.
*/
def everything[T] (v : T) (f : (T, T) => T) (g : Term ==> T) (t : Term) : T =
(collectl (g) (t)).foldLeft (v) (f)
/**
* Construct a strategy that applies `s`, then applies the restoring action
* `rest` if `s` fails (and then fail). Otherwise, let the result of `s` stand.
* Typically useful if `s` performs side effects that should be restored or
* undone when `s` fails.
*/
def restore (s : => Strategy, rest : => Strategy) : Strategy =
s <+ (rest <* fail)
/**
* Construct a strategy that applies `s`, then applies the restoring action
* `rest` regardless of the success or failure of `s`. The whole strategy
* preserves the success or failure of `s`. Typically useful if `s` performs
* side effects that should be restored always, e.g., when maintaining scope
* information.
*/
def restorealways (s : => Strategy, rest : => Strategy) : Strategy =
s < rest + (rest <* fail)
/**
* Applies `s` followed by `f` whether `s` failed or not.
* This operator is called `finally` in the Stratego library.
*/
def lastly (s : => Strategy, f : => Strategy) : Strategy =
s < where (f) + (where (f) <* fail)
/**
* `ior(s1, s2)` implements inclusive OR, that is, the
* inclusive choice of `s1` and `s2`. It first tries `s1`. If
* that fails it applies `s2` (just like `s1 <+ s2`). However,
* when `s1` succeeds it also tries to apply `s2`.
*/
def ior (s1 : => Strategy, s2 : => Strategy) : Strategy =
(s1 <* attempt (s2)) <+ s2
/**
* `or(s1, s2)` is similar to `ior(s1, s2)`, but the application
* of the strategies is only tested.
*/
def or (s1 : => Strategy, s2 : => Strategy) : Strategy =
where (s1) < attempt (test (s2)) + test (s2)
/**
* `and(s1, s2)` applies `s1` and `s2` to the current
* term and succeeds if both succeed. `s2` will always
* be applied, i.e., and is ''not'' a short-circuit
* operator
*/
def and (s1 : => Strategy, s2 : => Strategy) : Strategy =
where (s1) < test (s2) + (test (s2) <* fail)
}
/**
* Strategy-based term rewriting for arbitrary terms.
*/
object Rewriter extends Rewriter
|
matachi/rangeFix
|
lib/kiama_2.9.2-1.4.0-sources/org/kiama/rewriting/Rewriter.scala
|
Scala
|
mit
| 58,825 |
/*
* ReverseWindow.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.fscape
package stream
import akka.stream.{Attributes, FanInShape3, Inlet, Outlet}
import de.sciss.fscape.stream.impl.Handlers.InIAux
import de.sciss.fscape.stream.impl.logic.FilterWindowedInAOutA
import de.sciss.fscape.stream.impl.{NodeImpl, StageImpl}
/** Reverses contents of windowed input. */
object ReverseWindow {
/**
* @param in the signal to window
* @param size the window size. this is clipped to be `<= 1`
* @param clump clump size within each window. With a clump size of one,
* each window is reversed sample by sample, if the clump size
* is two, the first two samples are flipped with the last
* two samples, then the third and forth are flipped with the
* third and forth before last, etc. Like `size`, `clump` is
* sampled at each beginning of a new window and held constant
* during the window.
*/
def apply[A, E <: BufElem[A]](in: Outlet[E], size: OutI, clump: OutI)
(implicit b: Builder, tpe: StreamType[A, E]): Outlet[E] = {
val stage0 = new Stage[A, E](b.layer)
val stage = b.add(stage0)
b.connect(in , stage.in0)
b.connect(size , stage.in1)
b.connect(clump , stage.in2)
stage.out
}
private final val name = "ReverseWindow"
private type Shp[E] = FanInShape3[E, BufI, BufI, E]
private final class Stage[A, E <: BufElem[A]](layer: Layer)
(implicit a: Allocator, tpe: StreamType[A, E])
extends StageImpl[Shp[E]](name) { stage =>
val shape: Shape = new FanInShape3(
in0 = Inlet[E] (s"${stage.name}.in" ),
in1 = InI (s"${stage.name}.size" ),
in2 = InI (s"${stage.name}.clump"),
out = Outlet[E] (s"${stage.name}.out" )
)
def createLogic(attr: Attributes): NodeImpl[Shape] = {
val res: Logic[_, _] = if (tpe.isInt)
new Logic[Int , BufI](shape.asInstanceOf[Shp[BufI]], layer)
else if (tpe.isLong)
new Logic[Long , BufL](shape.asInstanceOf[Shp[BufL]], layer)
else if (tpe.isDouble)
new Logic[Double, BufD](shape.asInstanceOf[Shp[BufD]], layer)
else
new Logic[A, E](shape, layer)
res.asInstanceOf[Logic[A, E]]
}
}
private final class Logic[@specialized(Args) A, E <: BufElem[A]](shape: Shp[E], layer: Layer)
(implicit a: Allocator, tpe: StreamType[A, E])
extends FilterWindowedInAOutA[A, E, Shp[E]](name, layer, shape)(shape.in0, shape.out) {
private[this] val hSize : InIAux = InIAux(this, shape.in1)(math.max(0, _))
private[this] val hClump: InIAux = InIAux(this, shape.in2)(math.max(1, _))
protected def winBufSize: Int = hSize.value
protected def tryObtainWinParams(): Boolean = {
val ok = hSize.hasNext && hClump.hasNext
if (ok) {
hSize .next()
hClump.next()
}
ok
}
protected def processWindow(): Unit = {
val win = winBuf
val wsz = winBufSize
var i = 0
val cl = hClump.value
val cl2 = cl + cl
var j = wsz - cl
while (i < j) {
val k = i + cl
while (i < k) {
val tmp = win(i)
win(i) = win(j)
win(j) = tmp
i += 1
j += 1
}
j -= cl2
}
}
}
}
|
Sciss/FScape-next
|
core/shared/src/main/scala/de/sciss/fscape/stream/ReverseWindow.scala
|
Scala
|
agpl-3.0
| 3,732 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.scala2_11.scalatest3_0_1
import org.jetbrains.plugins.scala.SlowTests
import org.jetbrains.plugins.scala.testingSupport.scalatest.ScalaTestWholeSuiteTest
import org.junit.experimental.categories.Category
/**
* @author Roman.Shein
* @since 10.03.2017
*/
@Category(Array(classOf[SlowTests]))
class Scalatest2_11_3_0_1_WholeSuiteTest extends Scalatest2_11_3_0_1_Base with ScalaTestWholeSuiteTest
|
triplequote/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/testingSupport/scalatest/scala2_11/scalatest3_0_1/Scalatest2_11_3_0_1_WholeSuiteTest.scala
|
Scala
|
apache-2.0
| 459 |
package com.twitter.finagle.http.filter
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Request, Response, Status}
import com.twitter.util.{Await, Future, Duration}
import org.jboss.netty.handler.codec.http.HttpMethod
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.{JUnitRunner, MustMatchersForJUnit => MustMatchers}
@RunWith(classOf[JUnitRunner])
class CorsTest extends FlatSpec with MustMatchers {
val TRAP = new HttpMethod("TRAP")
val underlying = Service.mk[Request, Response] { request =>
val response = request.response
if (request.method == TRAP) {
response.contentString = "#guwop"
} else {
response.status = Status.MethodNotAllowed
}
Future value response
}
val policy = Cors.Policy(
allowsOrigin = {
case origin if origin.startsWith("juug") => Some(origin)
case origin if origin.endsWith("street") => Some(origin)
case _ => None
},
allowsMethods = { method => Some(method :: "TRAP" :: Nil) },
allowsHeaders = { headers => Some(headers) },
exposedHeaders = "Icey" :: Nil,
supportsCredentials = true,
maxAge = Some(Duration.Top)
)
val corsFilter = new Cors.HttpFilter(policy)
val service = corsFilter andThen underlying
"Cors.HttpFilter" should "handle preflight requests" in {
val request = Request()
request.method = HttpMethod.OPTIONS
request.headers.set("Origin", "thestreet")
request.headers.set("Access-Control-Request-Method", "BRR")
val response = Await result service(request)
response.headerMap.get("Access-Control-Allow-Origin") must be(Some("thestreet"))
response.headerMap.get("Access-Control-Allow-Credentials") must be(Some("true"))
response.headerMap.get("Access-Control-Allow-Methods") must be(Some("BRR, TRAP"))
response.headerMap.get("Vary") must be(Some("Origin"))
response.headerMap.get("Access-Control-Max-Age") must be(
Some(Duration.Top.inSeconds.toString))
response.contentString must be("")
}
it should "respond to invalid preflight requests without CORS headers" in {
val request = Request()
request.method = HttpMethod.OPTIONS
val response = Await result service(request)
response.status must be(Status.Ok)
response.headerMap.get("Access-Control-Allow-Origin") must be(None)
response.headerMap.get("Access-Control-Allow-Credentials") must be(None)
response.headerMap.get("Access-Control-Allow-Methods") must be(None)
response.headerMap.get("Vary") must be(Some("Origin"))
response.contentString must be("")
}
it should "respond to unacceptable cross-origin requests without CORS headers" in {
val request = Request()
request.method = HttpMethod.OPTIONS
request.headers.set("Origin", "theclub")
val response = Await result service(request)
response.status must be(Status.Ok)
response.headerMap.get("Access-Control-Allow-Origin") must be(None)
response.headerMap.get("Access-Control-Allow-Credentials") must be(None)
response.headerMap.get("Access-Control-Allow-Methods") must be(None)
response.headerMap.get("Vary") must be(Some("Origin"))
response.contentString must be("")
}
it should "handle simple requests" in {
val request = Request()
request.method = TRAP
request.headers.set("Origin", "juughaus")
val response = Await result service(request)
response.headerMap.get("Access-Control-Allow-Origin") must be(Some("juughaus"))
response.headerMap.get("Access-Control-Allow-Credentials") must be(Some("true"))
response.headerMap.get("Access-Control-Expose-Headers") must be(Some("Icey"))
response.headerMap.get("Vary") must be(Some("Origin"))
response.contentString must be("#guwop")
}
it should "not add response headers to simple requests if request headers aren't present" in {
val request = Request()
request.method = TRAP
val response = Await result service(request)
response.headerMap.get("Access-Control-Allow-Origin") must be(None)
response.headerMap.get("Access-Control-Allow-Credentials") must be(None)
response.headerMap.get("Access-Control-Expose-Headers") must be(None)
response.headerMap.get("Vary") must be(Some("Origin"))
response.contentString must be("#guwop")
}
}
|
travisbrown/finagle
|
finagle-http/src/test/scala/com/twitter/finagle/http/filter/CorsTest.scala
|
Scala
|
apache-2.0
| 4,302 |
package com.dretta
import com.datastax.spark.connector._
import _root_.kafka.serializer.StringDecoder
import java.util.Properties
import com.dretta.kafka.StockProducer
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka._
import org.apache.spark.streaming.{Seconds, StreamingContext}
import play.api.libs.json._
import com.datastax.spark.connector.writer.WriteConf
import com.dretta.json.{GetJsValue, JsonDecoder}
import org.apache.spark.{SparkConf, SparkContext}
object StockStats extends App with GetJsValue{
val topic = args(0)
val brokers = args(1)
val props = new Properties()
props.put("bootstrap.servers", brokers)
props.put("client.id", "ScalaProducerExample")
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
props.put("value.serializer", "com.dretta.json.JsonSerializer")
val producer = new StockProducer(topic, brokers, props)
val conf = new SparkConf().setMaster("local[*]").setAppName("StockStats")
val sc = new SparkContext(conf)
val ssc = new StreamingContext(sc, Seconds(2))
val topicSet = topic.split(",").toSet
val kafkaParams = Map[String, String]("metadata.broker.list" -> brokers)
val directKafkaStream = KafkaUtils.createDirectStream[String, JsValue, StringDecoder, JsonDecoder](ssc,kafkaParams,topicSet)
val parsers : DStream[JsValue] = directKafkaStream.map(v => v._2)
parsers.print()
parsers.foreachRDD(_.map{jsValue => (getString(jsValue("symbol")),
getDate(jsValue("last_trade_date_time")),
getDouble(jsValue("change_percent")),
getDouble(jsValue("change_price")),
getDouble(jsValue("last_close_price")),
getDouble(jsValue("last_trade_price")),
getInteger(jsValue("last_trade_size")),
getString(jsValue("stock_index"))
)}.saveToCassandra("ks", "stocks",
writeConf = new WriteConf(ifNotExists = true)))
ssc.start()
val startTime = System.currentTimeMillis()
val endTime = startTime + (10 * 1000)
println("Start message generator")
while(System.currentTimeMillis() < endTime){
producer.generateMessage()
Thread.sleep(2000)
}
println("Start table connection")
val rdd = sc.cassandraTable("ks", "stocks")
ssc.stop(stopSparkContext = false)
producer.close()
println("Database has " + rdd.count().toString + " entries")
println("Symbols: " + rdd.map(_.getString("symbol")).take(3).mkString(", "))
sc.stop()
}
|
dretta/StockStats
|
src/main/scala/com/dretta/StockStats.scala
|
Scala
|
agpl-3.0
| 2,510 |
package com.github.tanacasino.nnp
import java.util
import collection.JavaConverters._
trait NNP10 {
// P01 (*) Find the last element of a list.
def last(list: List[Int]): Int = {
list.last
}
@scala.annotation.tailrec
final def last1(list: List[Int]): Int = {
list match {
case Nil => throw new NoSuchElementException
case x :: Nil => x
case x :: xs => last1(xs)
}
}
def last2(list: List[Int]): Int = {
list.length match {
case 0 => throw new NoSuchElementException
case 1 => list.head
case n => last2(list.tail)
}
}
// P02 (*) Find the last but one element of a list.
def penultimate(list: List[Int]): Int = {
list.takeRight(2).head // 最後の2個をとって頭の要素
list.init.last // head : tail == init last
list(list.length - 2) //
}
@scala.annotation.tailrec
final def penultimate2(list: List[Int]): Int = {
list match {
case Nil => throw new NoSuchElementException
case x :: Nil => throw new NoSuchElementException
case x1 :: x2 :: Nil => x1
case x :: xs => penultimate2(xs)
}
}
def nth(n: Int, list: List[Int]): Int = {
list(n)
}
@scala.annotation.tailrec
final def nth2(n: Int, list: List[Int]): Int = {
require(0 <= n)
require(n <= list.length)
if(n == 0) {
list.head
} else {
nth2(n - 1, list.tail)
}
}
@scala.annotation.tailrec
final def nth3(n: Int, list: List[Int]): Int = {
n match {
case 0 => list.head
case i => nth3(i - 1, list.tail)
}
}
def length(list: List[Int]): Int = {
list.length
}
def length2(list: List[Int]): Int = {
def length0(size: Int, ls: List[Int]): Int = {
ls match {
case Nil => size
case x :: xs => length0(size + 1, xs)
}
}
length0(0, list)
}
def reverse(list: List[Int]): List[Int] = {
list.reverse
}
def reverse2(list: List[Int]): List[Int] = {
@scala.annotation.tailrec
def reverse0(ls: List[Int], acc: List[Int]): List[Int] = {
ls match {
case Nil => acc
case x :: xs => reverse0(xs, x :: acc)
}
}
reverse0(list, Nil)
}
def reverse3(list: List[Int]): List[Int] = {
list match {
case head :: Nil => head :: Nil
case head :: tail => reverse3(tail) ::: head :: Nil
case Nil => throw new NoSuchElementException
}
}
def isPalindrome(list: List[Int]): Boolean = {
list == list.reverse
}
def compress(list: List[Symbol]): List[Symbol] = {
@scala.annotation.tailrec
def compress0(ls: List[Symbol], acc: List[Symbol]): List[Symbol] = {
ls match {
case Nil => acc.reverse
case x :: xs => compress0(xs.dropWhile(_ == x), x :: acc)
}
}
compress0(list, Nil)
}
def pack(list: List[Symbol]): List[List[Symbol]] = {
@scala.annotation.tailrec
def pack0(acc: List[List[Symbol]], ls: List[Symbol]): List[List[Symbol]] = {
ls match {
case Nil => acc
case l => l.span(_ == l.head) match {
case (x, Nil) => x :: acc
case (x, y) => pack0(x :: acc, y)
}
}
}
pack0(Nil, list).reverse
}
def pack2(list: List[Symbol]): List[List[Symbol]] = {
def pack0(acc: List[List[Symbol]], ls: List[Symbol]): List[List[Symbol]] = {
ls match {
case Nil => acc
case l @ head :: tail =>
val (x, y) = l.span(_ == head)
pack0(x :: acc, y)
}
}
pack0(Nil, list).reverse
}
def encode2(list: List[Symbol]): List[(Int, Symbol)] = {
pack2(list).map(l => l.size -> l.head)
}
def encode3(list: List[Symbol]): List[(Int, Symbol)] = {
def encode0(acc: List[(Int, Symbol)], ls: List[Symbol]): List[(Int, Symbol)] = ls match {
case Nil => acc.reverse
case l @ head :: _ =>
val (x, y) = l.span(_ == head)
encode0((x.size, x.head) :: acc, y)
}
encode0(Nil, list)
}
def flatten2(nested: List[Any]): List[Any] = {
@scala.annotation.tailrec
def flatten0(acc: List[Any], ls: List[Any]): List[Any] = {
ls match {
case Nil => acc
case x :: xs => x match {
case l: List[_] => flatten0(acc, l ::: xs)
case v => flatten0(v :: acc, xs)
}
}
}
flatten0(Nil, nested).reverse
}
def flatten3(nested: List[Any]): List[Any] = {
@scala.annotation.tailrec
def flatten0(acc: List[Any], ls: List[Any]): List[Any] = {
ls match {
case Nil => acc
case (x: List[_]) :: xs => flatten0(acc, x ::: xs)
case (x: Any) :: xs => flatten0(x :: acc, xs)
}
}
flatten0(List.empty, nested).reverse
}
def flattenByCasino(nested: List[Any]): List[Any] = {
@scala.annotation.tailrec
def flatten0(acc: List[Any], ls: List[Any]): List[Any] = {
ls match {
case Nil => acc
case (x: List[_]) :: xs => flatten0(acc, x ::: xs)
case x :: xs => flatten0(x :: acc, xs)
}
}
flatten0(List.empty, nested).reverse
}
def flattenByK(nested: List[Any]): List[Any] = {
def innerFlatten(acc: List[Any], rest: List[Any]): List[Any] = {
rest match {
case Nil => acc
case head :: tail => head match {
case list: List[_] =>
innerFlatten(acc, list ::: tail)
case value: Any =>
innerFlatten(value :: acc, tail)
}
}
}
innerFlatten(List(), nested).sortBy {
case i: Int => i
case any => 0
}
}
def compress2(list: List[Symbol]): List[Symbol] = {
@scala.annotation.tailrec
def compress0(acc: List[Symbol], ls: List[Symbol]): List[Symbol] = {
ls match {
case Nil => acc
case x :: Nil => if (acc.nonEmpty && acc.head == x) acc else x :: acc
case x :: xs => if (acc.nonEmpty && acc.head == x) compress0(acc, xs) else compress0(x :: acc, xs)
}
}
compress0(List.empty, list).reverse
}
def compress3(list: List[Symbol]): List[Symbol] = {
@scala.annotation.tailrec
def compress0(acc: List[Symbol], ls: List[Symbol]): List[Symbol] = {
ls match {
case Nil => acc
case x :: xs => compress0(x :: acc, xs.dropWhile(_ == x))
}
}
compress0(List.empty, list).reverse
}
def hoge = {
val javalist = new util.ArrayList[String]()
javalist.asScala.foreach(println)
val jlist = List(1,2).asJava
}
}
|
tanacasino/learning-scala
|
src/main/scala/com/github/tanacasino/nnp/NNP10.scala
|
Scala
|
mit
| 6,474 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.examples.scala.relational
import org.apache.flink.api.java.aggregation.Aggregations
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.api.scala._
/**
* This program implements a modified version of the TPC-H query 10.
*
* The original query can be found at
* [http://www.tpc.org/tpch/spec/tpch2.16.0.pdf](http://www.tpc.org/tpch/spec/tpch2.16.0.pdf)
* (page 45).
*
* This program implements the following SQL equivalent:
*
* {{{
* SELECT
* c_custkey,
* c_name,
* c_address,
* n_name,
* c_acctbal
* SUM(l_extendedprice * (1 - l_discount)) AS revenue,
* FROM
* customer,
* orders,
* lineitem,
* nation
* WHERE
* c_custkey = o_custkey
* AND l_orderkey = o_orderkey
* AND YEAR(o_orderdate) > '1990'
* AND l_returnflag = 'R'
* AND c_nationkey = n_nationkey
* GROUP BY
* c_custkey,
* c_name,
* c_acctbal,
* n_name,
* c_address
* }}}
*
* Compared to the original TPC-H query this version does not print
* c_phone and c_comment, only filters by years greater than 1990 instead of
* a period of 3 months, and does not sort the result by revenue..
*
* Input files are plain text CSV files using the pipe character ('|') as field separator
* as generated by the TPC-H data generator which is available at
* [http://www.tpc.org/tpch/](a href="http://www.tpc.org/tpch/).
*
* Usage:
* {{{
*TPCHQuery10 --customer <path> --orders <path> --lineitem <path> --nation <path> --output <path>
* }}}
*
* This example shows how to use:
* - tuple data types
* - build-in aggregation functions
* - join with size hints
*
*/
object TPCHQuery10 {
def main(args: Array[String]) {
val params: ParameterTool = ParameterTool.fromArgs(args)
if (!params.has("lineitem") && !params.has("customer") &&
!params.has("orders") && !params.has("nation")) {
println(" This program expects data from the TPC-H benchmark as input data.")
println(" Due to legal restrictions, we can not ship generated data.")
println(" You can find the TPC-H data generator at http://www.tpc.org/tpch/.")
println(" Usage: TPCHQuery10" +
"--customer <path> --orders <path> --lineitem <path> --nation <path> --output <path>")
return
}
// set up execution environment
val env = ExecutionEnvironment.getExecutionEnvironment
// make parameters available in the web interface
env.getConfig.setGlobalJobParameters(params)
// get customer data set: (custkey, name, address, nationkey, acctbal)
val customers = getCustomerDataSet(env, params.get("customer"))
// get orders data set: (orderkey, custkey, orderdate)
val orders = getOrdersDataSet(env, params.get("orders"))
// get lineitem data set: (orderkey, extendedprice, discount, returnflag)
val lineitems = getLineitemDataSet(env, params.get("lineitem"))
// get nation data set: (nationkey, name)
val nations = getNationDataSet(env, params.get("nation"))
// filter orders by years
val orders1990 = orders.filter( o => o._3.substring(0,4).toInt > 1990)
.map( o => (o._1, o._2))
// filter lineitems by return status
val lineitemsReturn = lineitems.filter( l => l._4.equals("R"))
.map( l => (l._1, l._2 * (1 - l._3)) )
// compute revenue by customer
val revenueByCustomer = orders1990.joinWithHuge(lineitemsReturn).where(0).equalTo(0)
.apply( (o,l) => (o._2, l._2) )
.groupBy(0)
.aggregate(Aggregations.SUM, 1)
// compute final result by joining customer and nation information with revenue
val result = customers.joinWithTiny(nations).where(3).equalTo(0)
.apply( (c, n) => (c._1, c._2, c._3, n._2, c._5) )
.join(revenueByCustomer).where(0).equalTo(0)
.apply( (c, r) => (c._1, c._2, c._3, c._4, c._5, r._2) )
if (params.has("output")) {
// emit result
result.writeAsCsv(params.get("output"), "\\n", "|")
// execute program
env.execute("Scala TPCH Query 10 Example")
} else {
println("Printing result to stdout. Use --output to specify output path.")
result.print()
}
}
// *************************************************************************
// UTIL METHODS
// *************************************************************************
private def getCustomerDataSet(env: ExecutionEnvironment, customerPath: String):
DataSet[(Int, String, String, Int, Double)] = {
env.readCsvFile[(Int, String, String, Int, Double)](
customerPath,
fieldDelimiter = "|",
includedFields = Array(0,1,2,3,5) )
}
private def getOrdersDataSet(env: ExecutionEnvironment, ordersPath: String):
DataSet[(Int, Int, String)] = {
env.readCsvFile[(Int, Int, String)](
ordersPath,
fieldDelimiter = "|",
includedFields = Array(0, 1, 4) )
}
private def getLineitemDataSet(env: ExecutionEnvironment, lineitemPath: String):
DataSet[(Int, Double, Double, String)] = {
env.readCsvFile[(Int, Double, Double, String)](
lineitemPath,
fieldDelimiter = "|",
includedFields = Array(0, 5, 6, 8) )
}
private def getNationDataSet(env: ExecutionEnvironment, nationPath: String):
DataSet[(Int, String)] = {
env.readCsvFile[(Int, String)](
nationPath,
fieldDelimiter = "|",
includedFields = Array(0, 1) )
}
}
|
GJL/flink
|
flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery10.scala
|
Scala
|
apache-2.0
| 6,654 |
package com.typesafe.sbt.packager.windows
// TODO find a better name and add documentation
object NameHelper {
def makeEnvFriendlyName(name: String): String = name.toUpperCase.replaceAll("\\\\W", "_")
}
|
fsat/sbt-native-packager
|
src/main/scala/com/typesafe/sbt/packager/windows/NameHelper.scala
|
Scala
|
bsd-2-clause
| 205 |
import org.apache.spark.sql._
import org.apache.spark.sql.types._
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.functions.{array, lit, map, struct}
import org.apache.spark.sql.expressions.Window
import org.apache.spark.storage.StorageLevel._
// Constants
val dataFilePath = "/home/vagrant/data/lse/lse_all.txt"
val responseColumnName = "price_change_percent"
val numberOfDaysPerWeek = 5 // LSE is weekdays only
// Parameters
val maxPastOffset = 30
val predictionOffset = 5
val dateIndexSkipStep = numberOfDaysPerWeek - 1 // Note: should be considered relative to the week length, to avoid favouring particular days
val dateIndexSkip = 1//((maxPastOffset + predictionOffset) / dateIndexSkipStep) * dateIndexSkipStep // Note: integer division, not floating point
val maxConsideredPercentageChange = 300
val dataSplitFractions = Array(0.6, 0.2, 0.2)
val learningOffsets = (1 to maxPastOffset)//.map(i => i * -1)
def loadMetastock(dataFilePath: String): Dataset[Row] = {
val metaStockSevenDailySchema =
StructType(
StructField("ticker", StringType, false) ::
StructField("date", StringType, false) ::
StructField("open", FloatType, false) ::
StructField("high", FloatType, false) ::
StructField("low", FloatType, false) ::
StructField("close", FloatType, false) ::
StructField("volume", IntegerType, false) :: Nil)
val rowRDD = spark.sparkContext.textFile(dataFilePath).map(_.split(",")).map(i => Row(i(0), i(1), i(2).toFloat, i(3).toFloat, i(4).toFloat, i(5).toFloat, i(6).trim.toInt))
spark.createDataFrame(rowRDD, metaStockSevenDailySchema)
}
def buildDateIndex(data: Dataset[Row], dateColumnName: String): Dataset[Row] = {
val dateRDD = data.select(dateColumnName).distinct().orderBy(col(dateColumnName)).rdd.map {
case Row(date: String) => (date)
}.sortBy(i => i).zipWithIndex.map(r => Row(r._1, r._2))
val dateSchema =
StructType(
StructField(dateColumnName, StringType, false) ::
StructField("index", LongType, false) :: Nil)
spark.createDataFrame(dateRDD, dateSchema)
}
def buildTickerIndex(data: Dataset[Row], tickerColumnName: String): Dataset[Row] = {
val tickerRDD = data.select(tickerColumnName).distinct().rdd.map {
case Row(ticker: String) => (ticker)
}.sortBy(i => i).zipWithIndex.map(r => Row(r._1, r._2))
val tickerSchema =
StructType(
StructField("ticker", StringType, false) ::
StructField("ticker_index", LongType, false) :: Nil)
spark.createDataFrame(tickerRDD, tickerSchema)
}
def difference(a: Float, b: Float):Float = {
b - a
}
def percentChange(originalValue: Float, newValue: Float):Float = {
((newValue - originalValue) / originalValue) * 100
}
val differenceUdf = udf(difference(_:Float,_:Float):Float)
val percentChangeUdf = udf(percentChange(_:Float, _:Float):Float)
val window = Window.partitionBy("ticker").orderBy("index")
def addOffset(dataToAddTo: Dataset[Row], originalData: Dataset[Row], offset: Int): Dataset[Row] = {
dataToAddTo.withColumn("close" + offset.toString, lit(0f)).withColumn("volume" + offset.toString, lit(0))
}
def combineWithOffsets(data: Dataset[Row], offsets: Seq[Int]): Dataset[Row] = {
offsets.foldLeft(data)((d, o) => (
d
withColumn("close-" + o.toString, lag(col("close"), o).over(window))
withColumn("volume-" + o.toString, lag(col("volume"), o).over(window))
))
}
val stockDF = loadMetastock(dataFilePath)
stockDF.createOrReplaceTempView("stocks")
val dateDF = buildDateIndex(stockDF, "date")
dateDF.createOrReplaceTempView("dates")
val tickerDF = buildTickerIndex(stockDF, "ticker")
tickerDF.createOrReplaceTempView("tickers")
var allTickers = spark.sql("SELECT DISTINCT ticker FROM stocks")
val stockWithIndexDF = spark.sql("SELECT index, ticker_index as ticker, close, volume FROM stocks JOIN dates ON stocks.date = dates.date JOIN tickers ON stocks.ticker = tickers.ticker")
val filteredDF = stockWithIndexDF//.where("ticker = 'GSK' OR ticker = 'HIK' OR ticker = 'OML'")//.join(filteredTickers, List("ticker"), "inner")//.where("index > 100").where("index < 500")
filteredDF.persist(MEMORY_AND_DISK)
val joinedData = combineWithOffsets(filteredDF, learningOffsets).na.drop().filter($"index" % dateIndexSkip === 0)
joinedData.show()
val dataForCalculationResponses = filteredDF.select("index", "ticker", "close")
val possibleResponses = (
dataForCalculationResponses
withColumn("future_price", lag(col("close"), -predictionOffset).over(window))
withColumn("price_difference", differenceUdf(col("close"), col("future_price")))
withColumn(responseColumnName, ($"price_difference" / $"close") * 100)
drop("future_price")
drop("price_difference")
)
joinedData.show()
possibleResponses.show()
// Align training data (X) and expected responses (Y) ready for passing to model
// Because of offsets for past and future data, they cover different time-periods: find the intersection
val allData = (
joinedData
withColumn(responseColumnName, (differenceUdf(col("close"), lag(col("close"), -predictionOffset).over(window)) / $"close") * 100)
drop("index")
where(col(responseColumnName) < maxConsideredPercentageChange)
// drop("ticker")
).na.drop() // Drop nulls due to windowing/sliding
allData.cache()//.persist(MEMORY_AND_DISK)
//allData.show()
//allData.count()
//allData.printSchema()
val splits = allData.randomSplit(dataSplitFractions)
val (trainingData, testData, crossValidationData) = (splits(0), splits(1), splits(2))
|
decates/spark-sandbox
|
scripts/loadData.scala
|
Scala
|
apache-2.0
| 5,514 |
//
// Copyright (c) 2014 Mirko Nasato
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.encoded.jersik.java.codegen
import io.encoded.jersik.schema._
import io.encoded.jersik.codegen.CodeGeneration._
class JavaService(moduleName: String, service: Service) {
def toCode: String = {
val serviceName = service.name
val operations = mapJoin(service.operations, "\n\n") { operation =>
val operationName = operation.name
val requestType = operation.requestType.name
val responseType = operation.responseType.name
s"$responseType $operationName($requestType request);"
}
ai"""
package $moduleName;
public interface $serviceName {
$operations
}
"""
}
}
|
mirkonasato/jersik
|
java/jersik-java-codegen/src/main/scala/io/encoded/jersik/java/codegen/JavaService.scala
|
Scala
|
apache-2.0
| 1,230 |
package net.revenj
import java.time.OffsetDateTime
import net.revenj.patterns.DomainEvent
class TestMe extends DomainEvent {
override def queuedAt: OffsetDateTime = ???
override def URI: String = ???
}
|
ngs-doo/revenj
|
scala/revenj-core/src/test/scala/net/revenj/TestMe.scala
|
Scala
|
bsd-3-clause
| 210 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.descriptors
import java.lang.{Boolean => JBoolean, Double => JDouble, Integer => JInt}
import org.apache.flink.api.common.typeinfo.Types
import org.apache.flink.table.api.{TableException, ValidationException}
/**
* Validator for [[LiteralValue]].
*/
class LiteralValueValidator(keyPrefix: String) extends HierarchyDescriptorValidator(keyPrefix) {
/*
* TODO The following types need to be supported next.
* Types.SQL_DATE
* Types.SQL_TIME
* Types.SQL_TIMESTAMP
* Types.PRIMITIVE_ARRAY
* Types.OBJECT_ARRAY
* Types.MAP
* Types.MULTISET
* null
*/
override protected def validateWithPrefix(
keyPrefix: String,
properties: DescriptorProperties)
: Unit = {
val typeKey = s"$keyPrefix${LiteralValueValidator.TYPE}"
properties.validateType(typeKey, true, false)
// explicit type
if (properties.containsKey(typeKey)) {
val valueKey = s"$keyPrefix${LiteralValueValidator.VALUE}"
val typeInfo = properties.getType(typeKey)
typeInfo match {
case Types.BIG_DEC => properties.validateBigDecimal(valueKey, false)
case Types.BOOLEAN => properties.validateBoolean(valueKey, false)
case Types.BYTE => properties.validateByte(valueKey, false)
case Types.DOUBLE => properties.validateDouble(valueKey, false)
case Types.FLOAT => properties.validateFloat(valueKey, false)
case Types.INT => properties.validateInt(valueKey, false)
case Types.LONG => properties.validateLong(valueKey, false)
case Types.SHORT => properties.validateShort(valueKey, false)
case Types.STRING => properties.validateString(valueKey, false)
case _ => throw new TableException(s"Unsupported type '$typeInfo'.")
}
}
// implicit type
else {
// do not allow values in top-level
if (keyPrefix == HierarchyDescriptorValidator.EMPTY_PREFIX) {
throw new ValidationException(
"Literal values with implicit type must not exist in the top level of a hierarchy.")
}
properties.validateString(keyPrefix.substring(0, keyPrefix.length - 1), false)
}
}
}
object LiteralValueValidator {
val TYPE = "type"
val VALUE = "value"
private val LITERAL_FALSE = "false"
private val LITERAL_TRUE = "true"
/**
* Gets the value according to the type and value strings.
*
* @param keyPrefix the prefix of the literal type key
* @param properties the descriptor properties
* @return the derived value
*/
def getValue(keyPrefix: String, properties: DescriptorProperties): Any = {
val typeKey = s"$keyPrefix$TYPE"
// explicit type
if (properties.containsKey(typeKey)) {
val valueKey = s"$keyPrefix$VALUE"
val typeInfo = properties.getType(typeKey)
typeInfo match {
case Types.BIG_DEC => properties.getBigDecimal(valueKey)
case Types.BOOLEAN => properties.getBoolean(valueKey)
case Types.BYTE => properties.getByte(valueKey)
case Types.DOUBLE => properties.getDouble(valueKey)
case Types.FLOAT => properties.getFloat(valueKey)
case Types.INT => properties.getInt(valueKey)
case Types.LONG => properties.getLong(valueKey)
case Types.SHORT => properties.getShort(valueKey)
case Types.STRING => properties.getString(valueKey)
case _ => throw new TableException(s"Unsupported type '${typeInfo.getTypeClass}'.")
}
}
// implicit type
else {
deriveTypeStringFromValueString(
properties.getString(keyPrefix.substring(0, keyPrefix.length - 1)))
}
}
/**
* Tries to derive a literal value from the given string value.
* The derivation priority for the types are BOOLEAN, INT, DOUBLE, and VARCHAR.
*
* @param valueString the string formatted value
* @return parsed value
*/
def deriveTypeStringFromValueString(valueString: String): AnyRef = {
if (valueString.equals(LITERAL_TRUE) || valueString.equals(LITERAL_FALSE)) {
JBoolean.valueOf(valueString)
} else {
try {
JInt.valueOf(valueString)
} catch {
case _: NumberFormatException =>
try {
JDouble.valueOf(valueString)
} catch {
case _: NumberFormatException =>
valueString
}
}
}
}
}
|
shaoxuan-wang/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/descriptors/LiteralValueValidator.scala
|
Scala
|
apache-2.0
| 5,159 |
/**
* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \\ /__\\ (_ )(_ _)( ___)/ __) ( _ \\( )( )( _ \\ Read
* ) / /(__)\\ / /_ _)(_ )__) \\__ \\ )___/ )(__)( ) _ < README.txt
* (_)\\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
**/
package razie.wiki.parser
import razie.tconf.parser.StrAstNode
/** parse dsl, fiddles and code specific fragments */
trait WikiInlineScriptParser extends ParserBase {
def codeWikiProps = wikiPropScript | wikiPropCall | wikiPropExpr
// {{def name signature}}
def wikiPropScript: PS = "{{" ~> "\\\\.?".r ~ """def|lambda|inline""".r ~ "[: ]".r ~ """[^:} ]*""".r ~ "[: ]".r ~ """[^}]*""".r ~ "}}" ~
lines <~ ("{{/def}}" | "{{/lambda}}" |"{{/inline}}" | "{{/}}") ^^ {
case hidden ~ stype ~ _ ~ name ~ _ ~ sign ~ _ ~ lines => {
// inlines still need to be called with a call - but will be expanded right there
if ("lambda" == stype)
StrAstNode(s"`{{call:#$name}}`") // lambdas are executed right there...
else if ("inline" == stype && hidden.length <= 0)
StrAstNode(s"`{{call:#$name}}`") // lambdas are executed right there...
else if(hidden.length <= 0)
StrAstNode(s"`{{$stype:$name}}`") // defs are expanded in pre-processing and executed in display
else StrAstNode.EMPTY
}
}
def wikiPropCall: PS = "{{" ~> """call""".r ~ "[: ]".r ~ opt("""[^#}]*""".r) ~ "#" ~ """[^}]*""".r <~ "}}" ^^ {
case stype ~ _ ~ page ~ _ ~ name => {
StrAstNode("`{{" + stype + ":" + (page getOrElse "") + "#" + name + "}}`")
// calls are executed in display
}
}
def wikiPropExpr: PS = "{{" ~> ( "e" | "e.js" | "e.scala") ~ "[: ]".r ~ """[^}]*""".r <~ "}}" ^^ {
case stype ~ _ ~ body => {
StrAstNode("`{{" + stype + ":" + body + "}}`") // are evaluated on display - just check syntax here
// calls are executed in display
}
}
}
|
razie/diesel-rx
|
diesel/src/main/scala/razie/wiki/parser/WikiInlineScriptParser.scala
|
Scala
|
apache-2.0
| 1,939 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.http.grpc
import io.grpc.MethodDescriptor.MethodType
import io.grpc._
import io.grpc.stub.{AbstractBlockingStub, ClientCalls, ServerCalls, StreamObserver}
import wvlet.airframe.Design
import wvlet.airspec.AirSpec
import wvlet.log.LogSupport
import wvlet.log.io.IOUtil
object MyService extends LogSupport {
def helloMethod: MethodDescriptor[String, String] =
MethodDescriptor
.newBuilder[String, String](StringMarshaller, StringMarshaller)
.setFullMethodName(MethodDescriptor.generateFullMethodName("my-service", "hello"))
.setType(MethodType.UNARY)
.build()
def helloMethodDef: ServerMethodDefinition[String, String] = {
ServerMethodDefinition.create[String, String](
helloMethod,
ServerCalls.asyncUnaryCall(
new MethodHandlers()
)
)
}
class MethodHandlers extends ServerCalls.UnaryMethod[String, String] {
override def invoke(request: String, responseObserver: StreamObserver[String]): Unit = {
helloImpl(request, responseObserver)
}
}
def helloImpl(request: String, responseObserver: StreamObserver[String]): Unit = {
responseObserver.onNext(s"Hello ${request}")
responseObserver.onCompleted()
}
class MyServiceBlockingStub(channel: Channel, callOptions: CallOptions)
extends AbstractBlockingStub[MyServiceBlockingStub](channel, callOptions) {
override def build(channel: Channel, callOptions: CallOptions): MyServiceBlockingStub = {
new MyServiceBlockingStub(channel, callOptions)
}
def hello(message: String): String = {
ClientCalls.blockingUnaryCall(getChannel, MyService.helloMethod, getCallOptions, message)
}
}
def newBlockingStub(channel: Channel): MyServiceBlockingStub = {
new MyServiceBlockingStub(channel, CallOptions.DEFAULT)
}
}
/**
* Test for running gRPC server and client for checking the basic data flow of grpc-java
*/
object GrpcTest extends AirSpec {
private val service: ServerServiceDefinition =
ServerServiceDefinition
.builder("my-service")
.addMethod[String, String](MyService.helloMethodDef)
.build()
private val port = IOUtil.randomPort
override protected def design =
Design.newDesign
.bind[Server].toInstance(
ServerBuilder.forPort(port).addService(service).build()
).onStart { server =>
server.start()
info(s"Starting gRPC server localhost:${port}")
}
.onShutdown { server =>
info(s"Shutting down gRPC server localhost:${port}")
server.shutdownNow()
}
.bind[ManagedChannel].toProvider { server: Server =>
ManagedChannelBuilder.forTarget(s"localhost:${server.getPort}").usePlaintext().build()
}
.onShutdown { channel =>
channel.shutdownNow()
}
test("run server") { (server: Server, channel: ManagedChannel) =>
val client = MyService.newBlockingStub(channel)
for (i <- 0 to 10) {
val ret = client.hello(s"airframe-grpc ${i}")
debug(ret)
ret shouldBe s"Hello airframe-grpc ${i}"
}
}
}
|
wvlet/airframe
|
airframe-http-grpc/src/test/scala/wvlet/airframe/http/grpc/GrpcTest.scala
|
Scala
|
apache-2.0
| 3,634 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.{FailureMessages => _, _}
import SharedHelpers.thisLineNumber
import org.scalatest.exceptions.TestFailedException
// TODO: Shouldn't this actually use TypeCheckedTripleEquals?
class TypeCheckedAssertionsSpec extends FunSpec {
private val prettifier = Prettifier.default
val fileName: String = "TypeCheckedAssertionsSpec.scala"
describe("The assert(boolean) method") {
val a = 3
val b = 5
val bob = "bob"
val alice = "alice"
def didNotEqual(left: Any, right: Any): String = {
val (leftee, rightee) = Suite.getObjectsForFailureMessage(left, right)
FailureMessages.didNotEqual(prettifier, leftee, rightee)
}
def equaled(left: Any, right: Any): String =
FailureMessages.equaled(prettifier, left, right)
def thrice(i: Int) = i * 3
it("should do nothing when is used to check a == 3") {
assert(a == 3)
}
it("should throw TestFailedException with correct message and stack depth when is used to check a == 5") {
val e = intercept[TestFailedException] {
assert(a == 5)
}
assert(e.message === Some(didNotEqual(3, 5)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should do nothing when is used to check 5 == b") {
assert(5 == b)
}
it("should throw TestFailedException with correct message and stack depth when is used to check 3 == b") {
val e = intercept[TestFailedException] {
assert(3 == b)
}
assert(e.message === Some(didNotEqual(3, 5)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should do nothing when is used to check a != 5") {
assert(a != 5)
}
it("should throw TestFailedException with correct message and stack depth when is used to check a != 3") {
val e = intercept[TestFailedException] {
assert(a != 3)
}
assert(e.message === Some(equaled(3, 3)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should do nothing when is used to check 3 != b") {
assert(3 != b)
}
it("should throw TestFailedException with correct message and stack depth when is used to check 5 != b") {
val e = intercept[TestFailedException] {
assert(5 != b)
}
assert(e.message === Some(equaled(5, 5)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should do nothing when is used to check 3 == 3") {
assert(3 == 3)
}
it("should throw TestFailedException with message that contains the original code and correct stack depth when is used to check 3 == 5") {
// This is because the compiler simply pass the false boolean literal
// to the macro, can't find a way to get the 3 == 5 literal.
val e1 = intercept[TestFailedException] {
assert(3 == 5)
}
assert(e1.message === None)
assert(e1.failedCodeFileName === (Some(fileName)))
assert(e1.failedCodeLineNumber === (Some(thisLineNumber - 4)))
val e2 = intercept[TestFailedException] {
assert(3 == 5, "3 did not equal 5")
}
assert(e2.message === Some("3 did not equal 5"))
assert(e2.failedCodeFileName === (Some(fileName)))
assert(e2.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should throw TestFailedException with correct message and stack depth when is used to check a == b") {
val e = intercept[TestFailedException] {
assert(a == b)
}
assert(e.message === Some(didNotEqual(3, 5)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should throw TestFailedException with correct message and stack depth when is used to check a == null") {
val e = intercept[TestFailedException] {
assert(a == null)
}
assert(e.message === Some(didNotEqual(3, null)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should throw TestFailedException with correct message and stack depth when is used to check null == a") {
val e = intercept[TestFailedException] {
assert(null == a)
}
assert(e.message === Some(didNotEqual(null, 3)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should throw TestFailedException with correct message and stack depth when is used to check 3 != a") {
val e = intercept[TestFailedException] {
assert(3 != a)
}
assert(e.message === Some(equaled(3, 3)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should do nothing when is used to check 5 != a") {
assert(5 != a)
}
it("should do nothing when is used to check bob == \\"bob\\"") {
assert(bob == "bob")
}
it("should do nothing when is used to check bob != \\"alice\\"") {
assert(bob != "alice")
}
it("should do nothing when is used to check alice == \\"alice\\"") {
assert(alice == "alice")
}
it("should do nothing when is used to check alice != \\"bob\\"") {
assert(alice != "bob")
}
it("should throw TestFailedException with correct message and stack depth when is used to check bob == \\"alice\\"") {
val e = intercept[TestFailedException] {
assert(bob == "alice")
}
assert(e.message === Some(didNotEqual(bob, "alice")))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should throw TestFailedException with correct message and stack depth when is used to check bob != \\"bob\\"") {
val e = intercept[TestFailedException] {
assert(bob != "bob")
}
assert(e.message === Some(equaled(bob, "bob")))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should throw TestFailedException with correct message and stack depth when is used to check alice == \\"bob\\"") {
val e = intercept[TestFailedException] {
assert(alice == "bob")
}
assert(e.message === Some(didNotEqual(alice, "bob")))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should throw TestFailedException with correct message and stack depth when is used to check alice != \\"alice\\"") {
val e = intercept[TestFailedException] {
assert(alice != "alice")
}
assert(e.message === Some(equaled(alice, "alice")))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should do nothing when is used to check a === 3") {
assert(a === 3)
}
it("should throw TestFailedException with correct message and stack depth when is used to check a === 5") {
val e = intercept[TestFailedException] {
assert(a === 5)
}
assert(e.message === Some(didNotEqual(3, 5)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should do nothing when is used to check 3 === a") {
assert(3 === a)
}
it("should throw TestFailedException with correct message and stack depth when is used to check 5 === a") {
val e = intercept[TestFailedException] {
assert(5 === a)
}
assert(e.message === Some(didNotEqual(5, 3)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should do nothing when is used to check a !== 5") {
assert(a !== 5)
}
it("should throw TestFailedException with correct message and stack depth when is used to check a !== 3") {
val e = intercept[TestFailedException] {
assert(a !== 3)
}
assert(e.message === Some(equaled(3, 3)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
it("should do nothing when is used to check 5 !== a") {
assert(5 !== a)
}
it("should throw TestFailedException with correct message and stack depth when is used to check 3 !== a") {
val e = intercept[TestFailedException] {
assert(3 !== a)
}
assert(e.message === Some(equaled(3, 3)))
assert(e.failedCodeFileName === (Some(fileName)))
assert(e.failedCodeLineNumber === (Some(thisLineNumber - 4)))
}
}
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/TypeCheckedAssertionsSpec.scala
|
Scala
|
apache-2.0
| 9,609 |
object MontyHall {
import probability.Probability._
sealed abstract class Door
object A extends Door { override def toString = "A" }
object B extends Door { override def toString = "B" }
object C extends Door { override def toString = "C" }
sealed abstract class Winning
object Looser extends Winning { override def toString = "Looser"}
object Winner extends Winning { override def toString = "Winner"}
sealed abstract class Strategie
object Stay extends Strategie { override def toString = "stay" }
object Switch extends Strategie { override def toString = "switch" }
private val doors = List(A,B,C)
private def selectDoor(s:Strategie, chosen:Door, open:Door) =
s match {
case Stay => single(chosen)
case Switch => uniform(doors.filter({x=> x!=chosen && x!=open}))
}
def experiment(s:Strategie) =
for ( hidden <- uniform(doors); // 1. hide price
chosen <- uniform(doors); // 2. let player choose door
// 3. open a door
open <- uniform(doors.filter{x=> x!=hidden && x!=chosen});
// allow player to switch door:
chosen <- selectDoor(s, chosen, open))
yield(if (chosen == hidden) {
Winner
} else {
Looser
})
def main(arg:Array[String]) =
for (strategie <- List(Stay, Switch)) {
println(strategie.toString ++ ":\\n" ++ experiment(strategie).toString ++ "\\n")
}
}
|
urso/scala_mprob
|
examples/MontyHall.scala
|
Scala
|
bsd-3-clause
| 1,551 |
package latis.util
import org.junit.After
import org.junit.Assert._
import org.junit.Test
import latis.dm.Dataset
import latis.dm.Function
import latis.dm.Index
import latis.dm.Sample
import latis.dm.Text
import latis.dm.TupleMatch
class TestCapabilities {
/**
* Cause the reload of properties before each test.
*/
@After
def resetProperties = LatisProperties.reset
/**
* Assert that the Capabilities Dataset is formatted correctly
* and contains correct entries from the latis project's
* test latis.properties file.
*/
@Test
def capabilities_dataset = {
val ds = LatisCapabilities().getDataset
//latis.writer.AsciiWriter.write(ds)
ds match {
case Dataset(TupleMatch(Function(itDs), Function(itOut), Function(itOp))) => {
itDs.next match {
case Sample(Index(i), Text(dsName)) => {
assertEquals(0, i)
assertEquals("agg", dsName)
}
}
itOut.next match {
case Sample(Index(i), TupleMatch(Text(out), Text(desc))) => {
assertEquals(0, i)
assertEquals("asc", out)
assertEquals("ASCII representation reflecting how the dataset is modeled.", desc)
}
}
itOp.next match {
case Sample(Index(i), TupleMatch(Text(op), Text(desc), Text(usage))) => {
assertEquals(0, i)
assertEquals("binave", op)
assertEquals("Consolidate the data by binning (by a given width) and averaging the values in each bin.", desc)
assertEquals("binave(width)", usage)
}
}
}
case _ => fail
}
}
}
|
dlindhol/LaTiS
|
src/test/scala/latis/util/TestCapabilities.scala
|
Scala
|
epl-1.0
| 1,682 |
package com.softwaremill.play24
import _root_.controllers.Assets
import akka.actor.ActorSystem
import com.softwaremill.play24.dao.{SupplierDao, CoffeeDao}
import com.softwaremill.play24.modules.{DatabaseModule, ControllerModule, DaoModule}
import play.api.ApplicationLoader.Context
import play.api._
import play.api.libs.ws.ning.NingWSComponents
import play.api.routing.Router
import router.Routes
import com.softwaremill.macwire._
import scala.concurrent.ExecutionContext
class AppApplicationLoader extends ApplicationLoader {
def load(context: Context) = {
// make sure logging is configured
Logger.configure(context.environment)
(new BuiltInComponentsFromContext(context) with AppComponents).application
}
}
trait AppComponents
extends BuiltInComponents
with NingWSComponents // for wsClient
with DatabaseModule // Database injection
with DaoModule
with ControllerModule // Application controllers
{
implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
lazy val assets: Assets = wire[Assets]
lazy val router: Router = {
lazy val prefix = "/"
wire[Routes]
}
wire[Z]
// The seed method is here just for demonstration purposes. Ideally this will be run in a task.
def coffeeDoa: CoffeeDao
def supplierDoa: SupplierDao
val seed = wire[Seed]
seed.run()
}
class Z(as: ActorSystem)
|
numesmat/macwire
|
examples/play24/app/com/softwaremill/play24/AppApplicationLoader.scala
|
Scala
|
apache-2.0
| 1,367 |
/*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mongodb.scala.bson.collection.immutable
import scala.collection.JavaConverters._
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable.ListBuffer
import scala.collection.{ mutable, Traversable, TraversableLike }
import org.mongodb.scala.bson._
import org.mongodb.scala.bson.collection.BaseDocument
/**
* The immutable [[Document]] companion object for easy creation.
*/
object Document {
import BsonMagnets._
/**
* Create a new empty Document
* @return a new Document
*/
def empty: Document = apply()
/**
* Create a new Document
* @return a new Document
*/
def apply(): Document = new Document(new BsonDocument())
/**
* Parses a string in MongoDB Extended JSON format to a `Document`
*
* @param json the JSON string
* @return a corresponding `Document` object
* @see org.bson.json.JsonReader
* @see [[http://docs.mongodb.com/manual/reference/mongodb-extended-json/ MongoDB Extended JSON]]
*/
def apply(json: String): Document = new Document(BsonDocument(json))
/**
* Create a new document from the elems
* @param elems the key/value pairs that make up the Document. This can be any valid `(String, BsonValue)` pair that can be
* transformed into a [[BsonElement]] via [[BsonMagnets.CanBeBsonElement]] implicits and any [[BsonTransformer]]s that
* are in scope.
* @return a new Document consisting key/value pairs given by `elems`.
*/
def apply(elems: CanBeBsonElement*): Document = {
val underlying = new BsonDocument()
elems.foreach(elem => underlying.put(elem.key, elem.value))
new Document(underlying)
}
/**
* Create a new document from the elems
* @param elems a sequence of key/values that make up the Document. This can be any valid sequence of `(String, BsonValue)` pairs that
* can be transformed into a sequence of [[BsonElement]]s via [[BsonMagnets.CanBeBsonElements]] implicits and any
* [[BsonTransformer]]s
* that are in scope.
* @return a new Document consisting key/value pairs given by `elems`.
*/
def apply(elems: CanBeBsonElements): Document = {
val underlying = new BsonDocument()
elems.values.foreach(el => underlying.put(el.key, el.value))
new Document(underlying)
}
/**
* A implicit builder factory.
*
* @return a builder factory.
*/
implicit def canBuildFrom: CanBuildFrom[Traversable[(String, BsonValue)], (String, BsonValue), Document] = {
new CanBuildFrom[Traversable[(String, BsonValue)], (String, BsonValue), Document] {
def apply(): mutable.Builder[(String, BsonValue), Document] = builder
def apply(from: Traversable[(String, BsonValue)]): mutable.Builder[(String, BsonValue), Document] = builder
}
}
def builder: mutable.Builder[(String, BsonValue), Document] = ListBuffer[(String, BsonValue)]() mapResult fromSeq
def fromSeq(ts: Seq[(String, BsonValue)]): Document = {
val underlying = new BsonDocument()
ts.foreach(kv => underlying.put(kv._1, kv._2))
apply(underlying)
}
}
/**
* An immutable Document implementation.
*
* A strictly typed `Map[String, BsonValue]` like structure that traverses the elements in insertion order. Unlike native scala maps there
* is no variance in the value type and it always has to be a `BsonValue`.
*
* @param underlying the underlying BsonDocument which stores the data.
*
*/
case class Document(protected[scala] val underlying: BsonDocument)
extends BaseDocument[Document]
with TraversableLike[(String, BsonValue), Document] {
/**
* Creates a new immutable document
* @param underlying the underlying BsonDocument
* @return a new document
*/
protected[scala] def apply(underlying: BsonDocument) = new Document(underlying)
/**
* Applies a function `f` to all elements of this document.
*
* @param f the function that is applied for its side-effect to every element.
* The result of function `f` is discarded.
*
* @tparam U the type parameter describing the result of function `f`.
* This result will always be ignored. Typically `U` is `Unit`,
* but this is not necessary.
*
*/
override def foreach[U](f: ((String, BsonValue)) => U): Unit = underlying.asScala foreach f
/**
* Creates a new builder for this collection type.
*/
override def newBuilder: mutable.Builder[(String, BsonValue), Document] = Document.builder
}
|
rozza/mongo-java-driver
|
bson-scala/src/main/scala-2.13-/org/mongodb/scala/bson/collection/immutable/Document.scala
|
Scala
|
apache-2.0
| 5,133 |
package is.hail.types.physical
import is.hail.annotations._
import is.hail.asm4s.{Code, _}
import is.hail.expr.ir.EmitCodeBuilder
import is.hail.types.physical.stypes.primitives.{SFloat64, SFloat64Value}
import is.hail.types.physical.stypes.{SType, SValue}
import is.hail.types.virtual.TFloat64
case object PFloat64Optional extends PFloat64(false)
case object PFloat64Required extends PFloat64(true)
class PFloat64(override val required: Boolean) extends PNumeric with PPrimitive {
lazy val virtualType: TFloat64.type = TFloat64
override type NType = PFloat64
def _asIdent = "float64"
override def _pretty(sb: StringBuilder, indent: Int, compact: Boolean): Unit = sb.append("PFloat64")
override def unsafeOrdering(): UnsafeOrdering = new UnsafeOrdering {
def compare(o1: Long, o2: Long): Int = {
java.lang.Double.compare(Region.loadDouble(o1), Region.loadDouble(o2))
}
}
override def byteSize: Long = 8
override def zero = coerce[PFloat64](const(0.0))
override def add(a: Code[_], b: Code[_]): Code[PFloat64] = {
coerce[PFloat64](coerce[Double](a) + coerce[Double](b))
}
override def multiply(a: Code[_], b: Code[_]): Code[PFloat64] = {
coerce[PFloat64](coerce[Double](a) * coerce[Double](b))
}
override def sType: SType = SFloat64
def storePrimitiveAtAddress(cb: EmitCodeBuilder, addr: Code[Long], value: SValue): Unit =
cb.append(Region.storeDouble(addr, value.asDouble.value))
override def loadCheapSCode(cb: EmitCodeBuilder, addr: Code[Long]): SFloat64Value =
new SFloat64Value(cb.memoize(Region.loadDouble(addr)))
override def unstagedStoreJavaObjectAtAddress(addr: Long, annotation: Annotation, region: Region): Unit = {
Region.storeDouble(addr, annotation.asInstanceOf[Double])
}
}
object PFloat64 {
def apply(required: Boolean = false): PFloat64 = if (required) PFloat64Required else PFloat64Optional
def unapply(t: PFloat64): Option[Boolean] = Option(t.required)
}
|
hail-is/hail
|
hail/src/main/scala/is/hail/types/physical/PFloat64.scala
|
Scala
|
mit
| 1,964 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.examples.sql
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.functions._
// One method for defining the schema of an RDD is to make a case class with the desired column
// names and types.
case class Record(key: Int, value: String)
object RDDRelation {
def main(args: Array[String]) {
// System.setProperty("hadoop.home.dir", "D:\\\\hadoop-2.6.0")
import org.apache.spark.examples.HadoopHome
val hh = HadoopHome
val sparkConf = new SparkConf().setAppName("RDDRelation").setMaster("local[2]")
val sc = new SparkContext(sparkConf)
val sqlContext = new SQLContext(sc)
// Importing the SQL context gives access to all the SQL functions and implicit conversions.
import sqlContext.implicits._
val df = sc.parallelize((1 to 100).map(i => Record(i, s"val_$i"))).toDF()
// Any RDD containing case classes can be registered as a table. The schema of the table is
// automatically inferred using scala reflection.
df.registerTempTable("records")
// Once tables have been registered, you can run SQL queries over them.
println("Result of SELECT *:")
sqlContext.sql("SELECT * FROM records").collect().foreach(println)
// Aggregation queries are also supported.
val count = sqlContext.sql("SELECT COUNT(*) FROM records").collect().head.getLong(0)
println(s"COUNT(*): $count")
// The results of SQL queries are themselves RDDs and support all normal RDD functions. The
// items in the RDD are of type Row, which allows you to access each column by ordinal.
val rddFromSql = sqlContext.sql("SELECT key, value FROM records WHERE key < 10")
println("Result of RDD.map:")
rddFromSql.map(row => s"Key: ${row(0)}, Value: ${row(1)}").collect().foreach(println)
// Queries can also be written using a LINQ-like Scala DSL.
df.where($"key" === 1).orderBy($"value".asc).select($"key").collect().foreach(println)
// Write out an RDD as a parquet file.
df.write.parquet("pair.parquet")
// Read in parquet file. Parquet files are self-describing so the schmema is preserved.
val parquetFile = sqlContext.read.parquet("pair.parquet")
// Queries can be run using the DSL on parequet files just like the original RDD.
parquetFile.where($"key" === 1).select($"value".as("a")).collect().foreach(println)
// These files can also be registered as tables.
parquetFile.registerTempTable("parquetFile")
sqlContext.sql("SELECT * FROM parquetFile").collect().foreach(println)
sc.stop()
}
}
|
shenbaise/mltoy
|
src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala
|
Scala
|
apache-2.0
| 3,410 |
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert
package network
package common
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.mock.Mockito
import client.NetworkClient
import client.loadbalancer.{LoadBalancer, LoadBalancerFactory, LoadBalancerFactoryComponent}
import server._
import cluster.{ClusterClient, ClusterClientComponent, Node}
import com.google.protobuf.Message
import org.specs2.specification.Scope
import scala.collection.mutable.MutableList
class LocalMessageExecutionSpec extends SpecificationWithJUnit with Mockito with SampleMessage {
trait LocalMessageExecutionSetup extends Scope {
val clusterClient = mock[ClusterClient]
val messageExecutor = new MessageExecutor {
var called = false
var request: Any = _
val filters = new MutableList[Filter]
def shutdown = {}
def executeMessage[RequestMsg, ResponseMsg](request: RequestMsg, messageName: String,
responseHandler: Option[(Either[Exception, ResponseMsg]) => Unit],
context: Option[RequestContext] = None) = {
called = true
this.request = request
val response = null.asInstanceOf[ResponseMsg]
responseHandler.get(Right(response))
}
}
val networkClient = new NetworkClient with ClusterClientComponent with ClusterIoClientComponent with LoadBalancerFactoryComponent
with MessageExecutorComponent with LocalMessageExecution {
val lb = mock[LoadBalancer]
val loadBalancerFactory = mock[LoadBalancerFactory]
val clusterIoClient = mock[ClusterIoClient]
// val messageRegistry = mock[MessageRegistry]
val clusterClient = LocalMessageExecutionSetup.this.clusterClient
val messageExecutor = LocalMessageExecutionSetup.this.messageExecutor
val myNode = Node(1, "localhost:31313", true)
}
val nodes = Set(Node(1, "", true), Node(2, "", true), Node(3, "", true))
val endpoints = nodes.map { n =>
new Endpoint {
def node = n
def canServeRequests = true
}
}
val message = mock[Message]
// networkClient.messageRegistry.contains(any[Message]) returns true
clusterClient.nodes returns nodes
clusterClient.isConnected returns true
networkClient.clusterIoClient.nodesChanged(nodes) returns endpoints
networkClient.loadBalancerFactory.newLoadBalancer(endpoints) returns networkClient.lb
}
"LocalMessageExecution" should {
"call the MessageExecutor if myNode is equal to the node the request is to be sent to" in new LocalMessageExecutionSetup {
networkClient.lb.nextNode(None, None) returns Some(networkClient.myNode)
networkClient.start
networkClient.sendRequest(request) must not beNull
messageExecutor.called must beTrue
messageExecutor.request must be_==(request)
}
"not call the MessageExecutor if myNode is not equal to the node the request is to be sent to" in new LocalMessageExecutionSetup {
networkClient.lb.nextNode(None, None) returns Some(Node(2, "", true))
networkClient.start
networkClient.sendRequest(request) must not beNull
messageExecutor.called must beFalse
}
}
}
|
linkedin/norbert
|
network/src/test/scala/com/linkedin/norbert/network/common/LocalMessageExecutionSpec.scala
|
Scala
|
apache-2.0
| 3,749 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import java.util.concurrent.Semaphore
import scala.concurrent._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}
import org.scalatest.time.SpanSugar._
import org.apache.spark._
import org.apache.spark.util.ThreadUtils
class AsyncRDDActionsSuite extends SparkFunSuite with BeforeAndAfterAll with TimeLimits {
@transient private var sc: SparkContext = _
// Necessary to make ScalaTest 3.x interrupt a thread on the JVM like ScalaTest 2.2.x
implicit val defaultSignaler: Signaler = ThreadSignaler
override def beforeAll(): Unit = {
super.beforeAll()
sc = new SparkContext("local[2]", "test")
}
override def afterAll(): Unit = {
try {
LocalSparkContext.stop(sc)
sc = null
} finally {
super.afterAll()
}
}
lazy val zeroPartRdd = new EmptyRDD[Int](sc)
test("countAsync") {
assert(zeroPartRdd.countAsync().get() === 0)
assert(sc.parallelize(1 to 10000, 5).countAsync().get() === 10000)
}
test("collectAsync") {
assert(zeroPartRdd.collectAsync().get() === Seq.empty)
val collected = sc.parallelize(1 to 1000, 3).collectAsync().get()
assert(collected === (1 to 1000))
}
test("foreachAsync") {
zeroPartRdd.foreachAsync(i => Unit).get()
val accum = sc.longAccumulator
sc.parallelize(1 to 1000, 3).foreachAsync { i =>
accum.add(1)
}.get()
assert(accum.value === 1000)
}
test("foreachPartitionAsync") {
zeroPartRdd.foreachPartitionAsync(iter => Unit).get()
val accum = sc.longAccumulator
sc.parallelize(1 to 1000, 9).foreachPartitionAsync { iter =>
accum.add(1)
}.get()
assert(accum.value === 9)
}
test("takeAsync") {
def testTake(rdd: RDD[Int], input: Seq[Int], num: Int): Unit = {
val expected = input.take(num)
val saw = rdd.takeAsync(num).get()
assert(saw == expected, "incorrect result for rdd with %d partitions (expected %s, saw %s)"
.format(rdd.partitions.size, expected, saw))
}
val input = Range(1, 1000)
var rdd = sc.parallelize(input, 1)
for (num <- Seq(0, 1, 999, 1000)) {
testTake(rdd, input, num)
}
rdd = sc.parallelize(input, 2)
for (num <- Seq(0, 1, 3, 500, 501, 999, 1000)) {
testTake(rdd, input, num)
}
rdd = sc.parallelize(input, 100)
for (num <- Seq(0, 1, 500, 501, 999, 1000)) {
testTake(rdd, input, num)
}
rdd = sc.parallelize(input, 1000)
for (num <- Seq(0, 1, 3, 999, 1000)) {
testTake(rdd, input, num)
}
}
/**
* Make sure onComplete, onSuccess, and onFailure are invoked correctly in the case
* of a successful job execution.
*/
test("async success handling") {
val f = sc.parallelize(1 to 10, 2).countAsync()
// Use a semaphore to make sure onSuccess and onComplete's success path will be called.
// If not, the test will hang.
val sem = new Semaphore(0)
f.onComplete {
case scala.util.Success(res) =>
sem.release()
case scala.util.Failure(e) =>
info("Should not have reached this code path (onComplete matching Failure)")
throw new Exception("Task should succeed")
}
f.foreach { a =>
sem.release()
}
f.failed.foreach { t =>
info("Should not have reached this code path (onFailure)")
throw new Exception("Task should succeed")
}
assert(f.get() === 10)
failAfter(10.seconds) {
sem.acquire(2)
}
}
/**
* Make sure onComplete, onSuccess, and onFailure are invoked correctly in the case
* of a failed job execution.
*/
test("async failure handling") {
val f = sc.parallelize(1 to 10, 2).map { i =>
throw new Exception("intentional"); i
}.countAsync()
// Use a semaphore to make sure onFailure and onComplete's failure path will be called.
// If not, the test will hang.
val sem = new Semaphore(0)
f.onComplete {
case scala.util.Success(res) =>
info("Should not have reached this code path (onComplete matching Success)")
throw new Exception("Task should fail")
case scala.util.Failure(e) =>
sem.release()
}
f.foreach { a =>
info("Should not have reached this code path (onSuccess)")
throw new Exception("Task should fail")
}
f.failed.foreach { t =>
sem.release()
}
intercept[SparkException] {
f.get()
}
failAfter(10.seconds) {
sem.acquire(2)
}
}
/**
* Awaiting FutureAction results
*/
test("FutureAction result, infinite wait") {
val f = sc.parallelize(1 to 100, 4)
.countAsync()
assert(ThreadUtils.awaitResult(f, Duration.Inf) === 100)
}
test("FutureAction result, finite wait") {
val f = sc.parallelize(1 to 100, 4)
.countAsync()
assert(ThreadUtils.awaitResult(f, Duration(30, "seconds")) === 100)
}
test("FutureAction result, timeout") {
val f = sc.parallelize(1 to 100, 4)
.mapPartitions(itr => { Thread.sleep(20); itr })
.countAsync()
intercept[TimeoutException] {
ThreadUtils.awaitResult(f, Duration(20, "milliseconds"))
}
}
private def testAsyncAction[R](action: RDD[Int] => FutureAction[R]): Unit = {
val executionContextInvoked = Promise[Unit]
val fakeExecutionContext = new ExecutionContext {
override def execute(runnable: Runnable): Unit = {
executionContextInvoked.success(())
}
override def reportFailure(t: Throwable): Unit = ()
}
val starter = Smuggle(new Semaphore(0))
starter.drainPermits()
val rdd = sc.parallelize(1 to 100, 4).mapPartitions {itr => starter.acquire(1); itr}
val f = action(rdd)
f.onComplete(_ => ())(fakeExecutionContext)
// Here we verify that registering the callback didn't cause a thread to be consumed.
assert(!executionContextInvoked.isCompleted)
// Now allow the executors to proceed with task processing.
starter.release(rdd.partitions.length)
// Waiting for the result verifies that the tasks were successfully processed.
ThreadUtils.awaitResult(executionContextInvoked.future, atMost = 15.seconds)
}
test("SimpleFutureAction callback must not consume a thread while waiting") {
testAsyncAction(_.countAsync())
}
test("ComplexFutureAction callback must not consume a thread while waiting") {
testAsyncAction((_.takeAsync(100)))
}
}
|
rezasafi/spark
|
core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
|
Scala
|
apache-2.0
| 7,356 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Mon May 25 17:57:02 EDT 2015
* @see LICENSE (MIT style license file).
*/
package scalation.linalgebra.bld
import java.io.{File, PrintWriter}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `BldMatrix` object is used to build matrix classes for various base types.
* > run-main scalation.linalgebra.bld.BldMatrix
*/
object BldMatrix extends App with BldParams
{
println ("BldMatrix: generate code for Matrix classes")
for (i <- 0 until kind.length-1) { // do not generate `MatrixS`
val VECTOR = kind(i)._1
val BASE = kind(i)._2
val VECTOR2 = kind(i)._3
val BASE2 = kind(i)._4
val FORMAT = kind(i)._5
val MATRI = kind(i)._6
val ZERO = kind(i)._8
val ONE = kind(i)._9
val BASE_LC = BASE.toLowerCase
val MATRIX = { val m = MATRI.splitAt (MATRI.size-1); m._1 + "x" + m._2 }
val IMPORT = if (CUSTOM contains BASE) s"scalation.math.$BASE.{abs => ABS, _}"
else "math.{abs => ABS}"
val IMPORT2 = if (BASE == "StrNum") "scalation.math.{StrO, oneIf}"
else if (CUSTOM contains BASE) s"scalation.math.{$BASE, oneIf}"
else s"scalation.math.{${BASE_LC}_exp, oneIf}"
// Beginning of string holding code template -----------------------------------
val code = raw"""
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @builder scalation.linalgebra.bld.BldMatrix
* @version 1.2
* @date Sun Sep 16 14:09:25 EDT 2012
* @see LICENSE (MIT style license file).
*/
package scalation.linalgebra
import java.io.PrintWriter
import io.Source.fromFile
import $IMPORT
import $IMPORT2
import scalation.util.{Error, PackageInfo}
import $MATRIX.eye
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `$MATRIX` class stores and operates on Numeric Matrices of type `$BASE`.
* This class follows the `gen.MatrixN` framework and is provided for efficiency.
* @param d1 the first/row dimension
* @param d2 the second/column dimension
* @param v the 2D array used to store matrix elements
*/
class $MATRIX (val d1: Int,
val d2: Int,
private var v: Array [Array [$BASE]] = null)
extends $MATRI with Error with Serializable
{
/** Dimension 1
*/
lazy val dim1 = d1
/** Dimension 2
*/
lazy val dim2 = d2
if (v == null) {
v = Array.ofDim [$BASE] (dim1, dim2)
} else if (dim1 != v.length || dim2 != v(0).length) {
flaw ("constructor", "dimensions are wrong")
} // if
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a 'dim1' by 'dim1' square matrix.
* @param dim1 the row and column dimension
*/
def this (dim1: Int) { this (dim1, dim1) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a 'dim1' by 'dim2' matrix and assign each element the value 'x'.
* @param dim1 the row dimension
* @param dim2 the column dimesion
* @param x the scalar value to assign
*/
def this (dim1: Int, dim2: Int, x: $BASE)
{
this (dim1, dim2)
for (i <- range1; j <- range2) v(i)(j) = x
} // constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a matrix and assign values from array of arrays 'u'.
* @param u the 2D array of values to assign
*/
def this (u: Array [Array [$BASE]]) { this (u.length, u(0).length, u) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a matrix from repeated values.
* @param dim the (row, column) dimensions
* @param u the repeated values
*/
def this (dim: Tuple2 [Int, Int], u: $BASE*)
{
this (dim._1, dim._2)
for (i <- range1; j <- range2) v(i)(j) = u(i * dim2 + j)
} // constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a matrix and assign values from matrix 'b'.
* @param b the matrix of values to assign
*/
def this (b: $MATRIX)
{
this (b.d1, b.d2)
for (i <- range1; j <- range2) v(i)(j) = b.v(i)(j)
} // constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' matrix's element at the 'i,j'-th index position.
* @param i the row index
* @param j the column index
*/
def apply (i: Int, j: Int): $BASE = v(i)(j)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' matrix's vector at the 'i'-th index position ('i'-th row).
* @param i the row index
*/
def apply (i: Int): $VECTOR = new $VECTOR (v(i))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get a slice 'this' matrix row-wise on range 'ir' and column-wise on range 'jr'.
* Ex: b = a(2..4, 3..5)
* @param ir the row range
* @param jr the column range
*/
def apply (ir: Range, jr: Range): $MATRIX = slice (ir.start, ir.end, jr.start, jr.end)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' matrix's element at the 'i,j'-th index position to the scalar 'x'.
* @param i the row index
* @param j the column index
* @param x the scalar value to assign
*/
def update (i: Int, j: Int, x: $BASE) { v(i)(j) = x }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' matrix's row at the 'i'-th index position to the vector 'u'.
* @param i the row index
* @param u the vector value to assign
*/
def update (i: Int, u: $VECTOR) { v(i) = u() }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set a slice 'this' matrix row-wise on range ir and column-wise on range 'jr'.
* Ex: a(2..4, 3..5) = b
* @param ir the row range
* @param jr the column range
* @param b the matrix to assign
*/
def update (ir: Range, jr: Range, b: $MATRI)
{
if (b.isInstanceOf [$MATRIX]) {
val bb = b.asInstanceOf [$MATRIX]
for (i <- ir; j <- jr) v(i)(j) = bb.v(i - ir.start)(j - jr.start)
} else {
flaw ("update", "must convert b to a $MATRIX first")
} // if
} // update
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set all the elements in 'this' matrix to the scalar 'x'.
* @param x the scalar value to assign
*/
def set (x: $BASE) { for (i <- range1; j <- range2) v(i)(j) = x }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set all the values in 'this' matrix as copies of the values in 2D array 'u'.
* @param u the 2D array of values to assign
*/
def set (u: Array [Array [$BASE]]) { for (i <- range1; j <- range2) v(i)(j) = u(i)(j) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' matrix's 'i'-th row starting at column 'j' to the vector 'u'.
* @param i the row index
* @param u the vector value to assign
* @param j the starting column index
*/
def set (i: Int, u: $VECTOR, j: Int = 0) { for (k <- 0 until u.dim) v(i)(k+j) = u(k) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' matrix row-wise 'from' to 'end'.
* @param from the start row of the slice (inclusive)
* @param end the end row of the slice (exclusive)
*/
def slice (from: Int, end: Int): $MATRIX =
{
new $MATRIX (end - from, dim2, v.slice (from, end))
} // slice
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' matrix column-wise 'from' to 'end'.
* @param from the start column of the slice (inclusive)
* @param end the end column of the slice (exclusive)
*/
def sliceCol (from: Int, end: Int): $MATRIX =
{
val c = new $MATRIX (dim1, end - from)
for (i <- c.range1; j <- c.range2) c.v(i)(j) = v(i)(j + from)
c
} // sliceCol
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' matrix row-wise 'r_from' to 'r_end' and column-wise 'c_from' to 'c_end'.
* @param r_from the start of the row slice
* @param r_end the end of the row slice
* @param c_from the start of the column slice
* @param c_end the end of the column slice
*/
def slice (r_from: Int, r_end: Int, c_from: Int, c_end: Int): $MATRIX =
{
val c = new $MATRIX (r_end - r_from, c_end - c_from)
for (i <- c.range1; j <- c.range2) c.v(i)(j) = v(i + r_from)(j + c_from)
c
} // slice
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' matrix excluding the given row and/or column.
* @param row the row to exclude (0 until dim1, set to dim1 to keep all rows)
* @param col the column to exclude (0 until dim2, set to dim2 to keep all columns)
*/
def sliceExclude (row: Int, col: Int): $MATRIX =
{
val c = new $MATRIX (dim1 - oneIf (row < dim1), dim2 - oneIf (col < dim2))
for (i <- range1 if i != row) for (j <- range2 if j != col) {
c.v(i - oneIf (i > row))(j - oneIf (j > col)) = v(i)(j)
} // for
c
} // sliceExclude
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select rows from 'this' matrix according to the given index/basis.
* @param rowIndex the row index positions (e.g., (0, 2, 5))
*/
def selectRows (rowIndex: Array [Int]): $MATRIX =
{
val c = new $MATRIX (rowIndex.length, dim2)
for (i <- c.range1) c.v(i) = v(rowIndex(i))
c
} // selectRows
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get column 'col' from the matrix, returning it as a vector.
* @param col the column to extract from the matrix
* @param from the position to start extracting from
*/
def col (col: Int, from: Int = 0): $VECTOR =
{
val u = new $VECTOR (dim1 - from)
for (i <- from until dim1) u(i-from) = v(i)(col)
u
} // col
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set column 'col' of the matrix to a vector.
* @param col the column to set
* @param u the vector to assign to the column
*/
def setCol (col: Int, u: $VECTOR) { for (i <- range1) v(i)(col) = u(i) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select columns from 'this' matrix according to the given index/basis.
* Ex: Can be used to divide a matrix into a basis and a non-basis.
* @param colIndex the column index positions (e.g., (0, 2, 5))
*/
def selectCols (colIndex: Array [Int]): $MATRIX =
{
val c = new $MATRIX (dim1, colIndex.length)
for (j <- c.range2) c.setCol (j, col(colIndex(j)))
c
} // selectCols
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Transpose 'this' matrix (rows => columns).
*/
def t: $MATRIX =
{
val b = new $MATRIX (dim2, dim1)
for (i <- b.range1; j <- b.range2) b.v(i)(j) = v(j)(i)
b
} // t
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate 'this' matrix and (row) vector 'u', i.e. append 'u' to 'this'.
* @param u the vector to be concatenated as the new last row in matrix
*/
def +: (u: $VECTOR): $MATRIX =
{
if (u.dim != dim2) flaw ("+:", "vector does not match row dimension")
val c = new $MATRIX (dim1 + 1, dim2)
for (i <- c.range1) c(i) = if (i < dim1) this(i) else u
c
} // +:
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate 'this' matrix and (column) vector 'u', i.e. append 'u' to 'this'.
* @param u the vector to be concatenated as the new last column in matrix
*/
def +:^ (u: $VECTOR): $MATRIX =
{
if (u.dim != dim1) flaw ("+:^", "vector does not match column dimension")
val c = new $MATRIX (dim1, dim2 + 1)
for (j <- c.range2) c.setCol (j, if (j < dim2) col (j) else u)
c
} // +:^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (row-wise) 'this' matrix and matrix 'b'.
* @param b the matrix to be concatenated as the new last rows in matrix
*/
def ++ (b: $MATRI): $MATRIX =
{
if (b.dim2 != dim2) flaw ("++", "matrix b does not match row dimension")
val c = new $MATRIX (dim1 + b.dim1, dim2)
for (i <- c.range1) c(i) = if (i < dim1) this(i) else b(i - dim1)
c
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (column-wise) 'this' matrix and matrix 'b'.
* @param b the matrix to be concatenated as the new last columns in matrix
*/
def ++^ (b: $MATRI): $MATRIX =
{
if (b.dim1 != dim1) flaw ("++^", "matrix b does not match column dimension")
val c = new $MATRIX (dim1, dim2 + b.dim2)
for (j <- c.range2) c.setCol (j, if (j < dim2) col (j) else b.col (j - dim2))
c
} // ++^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' matrix and matrix 'b'.
* @param b the matrix to add (requires leDimensions)
*/
def + (b: $MATRIX): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) + b.v(i)(j)
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' matrix and matrix 'b' for any type extending $MATRI.
* @param b the matrix to add (requires leDimensions)
*/
def + (b: $MATRI): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) + b(i, j)
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' matrix and (row) vector 'u'.
* @param u the vector to add
*/
def + (u: $VECTOR): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) + u(j)
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' matrix and scalar 'x'.
* @param x the scalar to add
*/
def + (x: $BASE): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) + x
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' matrix and matrix 'b'.
* @param b the matrix to add (requires leDimensions)
*/
def += (b: $MATRIX): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) += b.v(i)(j)
this
} // +=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' matrix and matrix 'b' for any type extending $MATRI.
* @param b the matrix to add (requires leDimensions)
*/
def += (b: $MATRI): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) += b(i, j)
this
} // +=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' matrix and (row) vector 'u'.
* @param u the vector to add
*/
def += (u: $VECTOR): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) += u(j)
this
} // +=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' matrix and scalar 'x'.
* @param x the scalar to add
*/
def += (x: $BASE): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) += x
this
} // +=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract matrix 'b'.
* @param b the matrix to subtract (requires leDimensions)
*/
def - (b: $MATRIX): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) - b.v(i)(j)
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract matrix 'b' for any type extending $MATRI.
* @param b the matrix to subtract (requires leDimensions)
*/
def - (b: $MATRI): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) - b(i, j)
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract (row) vector 'u'.
* @param b the vector to subtract
*/
def - (u: $VECTOR): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) - u(j)
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract scalar 'x'.
* @param x the scalar to subtract
*/
def - (x: $BASE): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- c.range1; j <- c.range2) c.v(i)(j) = v(i)(j) - x
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract in-place matrix 'b'.
* @param b the matrix to subtract (requires leDimensions)
*/
def -= (b: $MATRIX): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) -= b.v(i)(j)
this
} // -=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract in-place matrix 'b'.
* @param b the matrix to subtract (requires leDimensions)
*/
def -= (b: $MATRI): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) -= b(i, j)
this
} // -=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract in-place (row) vector 'u'.
* @param b the vector to subtract
*/
def -= (u: $VECTOR): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) -= u(j)
this
} // -=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract in-place scalar 'x'.
* @param x the scalar to subtract
*/
def -= (x: $BASE): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) -= x
this
} // -=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by matrix 'b', transposing 'b' to improve efficiency.
* Use 'times' method to skip the transpose step.
* @param b the matrix to multiply by (requires sameCrossDimensions)
*/
def * (b: $MATRIX): $MATRIX =
{
if (dim2 != b.dim1) flaw ("*", "matrix * matrix - incompatible cross dimensions")
val c = new $MATRIX (dim1, b.dim2)
val bt = b.t // transpose the b matrix
for (i <- range1; j <- c.range2) {
val va = v(i); val vb = bt.v(j)
var sum = $ZERO
for (k <- range2) sum += va(k) * vb(k)
c.v(i)(j) = sum
} // for
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by matrix 'b', transposing 'b' to improve efficiency.
* Use 'times' method to skip the transpose step.
* @param b the matrix to multiply by (requires sameCrossDimensions)
*/
def * (b: $MATRI): $MATRIX =
{
if (dim2 != b.dim1) flaw ("*", "matrix * matrix - incompatible cross dimensions")
val c = new $MATRIX (dim1, b.dim2)
val bt = b.t // transpose the b matrix
for (i <- range1; j <- c.range2) {
val va = v(i); val vb = bt(j)
var sum = $ZERO
for (k <- range2) sum += va(k) * vb(k)
c.v(i)(j) = sum
} // for
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by vector 'u' (vector elements beyond 'dim2' ignored).
* @param u the vector to multiply by
*/
def * (u: $VECTOR): $VECTOR =
{
if (dim2 > u.dim) flaw ("*", "matrix * vector - vector dimension too small")
val c = new $VECTOR (dim1)
for (i <- range1) {
var sum = $ZERO
for (k <- range2) sum += v(i)(k) * u(k)
c(i) = sum
} // for
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by scalar 'x'.
* @param x the scalar to multiply by
*/
def * (x: $BASE): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) * x
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' matrix by matrix 'b', transposing 'b' to improve
* efficiency. Use 'times_ip' method to skip the transpose step.
* @param b the matrix to multiply by (requires square and sameCrossDimensions)
*/
def *= (b: $MATRIX): $MATRIX =
{
if (! b.isSquare) flaw ("*=", "matrix 'b' must be square")
if (dim2 != b.dim1) flaw ("*=", "matrix *= matrix - incompatible cross dimensions")
val bt = b.t // use the transpose of b
for (i <- range1) {
val row_i = new $VECTOR (dim2) // save ith row so not overwritten
for (j <- range2) row_i(j) = v(i)(j) // copy values from ith row of 'this' matrix
for (j <- range2) {
val vb = bt.v(j)
var sum = $ZERO
for (k <- range2) sum += row_i(k) * vb(k)
v(i)(j) = sum
} // for
} // for
this
} // *=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' matrix by matrix 'b', transposing 'b' to improve
* efficiency. Use 'times_ip' method to skip the transpose step.
* @param b the matrix to multiply by (requires square and sameCrossDimensions)
*/
def *= (b: $MATRI): $MATRIX =
{
if (! b.isSquare) flaw ("*=", "matrix 'b' must be square")
if (dim2 != b.dim1) flaw ("*=", "matrix *= matrix - incompatible cross dimensions")
val bt = b.t // use the transpose of b
for (i <- range1) {
val row_i = new $VECTOR (dim2) // save ith row so not overwritten
for (j <- range2) row_i(j) = v(i)(j) // copy values from ith row of 'this' matrix
for (j <- range2) {
val vb = bt(j)
var sum = $ZERO
for (k <- range2) sum += row_i(k) * vb(k)
v(i)(j) = sum
} // for
} // for
this
} // *=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' matrix by scalar 'x'.
* @param x the scalar to multiply by
*/
def *= (x: $BASE): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) *= x
this
} // *=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the dot product of 'this' matrix and vector 'u', by first transposing
* 'this' matrix and then multiplying by 'u' (ie., 'a dot u = a.t * u').
* @param u the vector to multiply by (requires same first dimensions)
*/
def dot (u: $VECTOR): $VECTOR =
{
if (dim1 != u.dim) flaw ("dot", "matrix dot vector - incompatible first dimensions")
val c = new $VECTOR (dim2)
val at = this.t // transpose the 'this' matrix
for (i <- range2) {
var sum = $ZERO
for (k <- range1) sum += at.v(i)(k) * u(k)
c(i) = sum
} // for
c
} // dot
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by matrix 'b' without first transposing 'b'.
* @param b the matrix to multiply by (requires sameCrossDimensions)
*/
def times (b: $MATRIX): $MATRIX =
{
if (dim2 != b.dim1) flaw ("times", "matrix * matrix - incompatible cross dimensions")
val c = new $MATRIX (dim1, b.dim2)
for (i <- range1; j <- c.range2) {
var sum = $ZERO
for (k <- range2) sum += v(i)(k) * b.v(k)(j)
c.v(i)(j) = sum
} // for
c
} // times
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' matrix by matrix 'b' without first transposing 'b'.
* If b and this reference the same matrix (b == this), a copy of the this
* matrix is made.
* @param b the matrix to multiply by (requires square and sameCrossDimensions)
*/
def times_ip (b: $MATRIX)
{
if (! b.isSquare) flaw ("times_ip", "matrix 'b' must be square")
if (dim2 != b.dim1) flaw ("times_ip", "matrix * matrix - incompatible cross dimensions")
val bb = if (b == this) new $MATRIX (this) else b
for (i <- range1) {
val row_i = new $VECTOR (dim2) // save ith row so not overwritten
for (j <- range2) row_i(j) = v(i)(j) // copy values from ith row of 'this' matrix
for (j <- range2) {
var sum = $ZERO
for (k <- range2) sum += row_i(k) * bb.v(k)(j)
v(i)(j) = sum
} // for
} // for
} // times_ip
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by matrix 'b' using 'dot' product (concise solution).
* @param b the matrix to multiply by (requires sameCrossDimensions)
*/
def times_d (b: $MATRI): $MATRIX =
{
if (dim2 != b.dim1) flaw ("*", "matrix * matrix - incompatible cross dimensions")
val c = new $MATRIX (dim1, b.dim2)
for (i <- range1; j <- c.range2) c.v(i)(j) = this(i) dot b.col(j)
c
} // times_d
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by matrix b using the Strassen matrix multiplication
* algorithm. Both matrices ('this' and 'b') must be square. Although the
* algorithm is faster than the traditional cubic algorithm, its requires
* more memory and is often less stable (due to round-off errors).
* FIX: could be make more efficient using a virtual slice (vslice) method.
* @see http://en.wikipedia.org/wiki/Strassen_algorithm
* @param b the matrix to multiply by (it has to be a square matrix)
*/
def times_s (b: $MATRIX): $MATRIX =
{
if (dim2 != b.dim1) flaw ("*", "matrix * matrix - incompatible cross dimensions")
val c = new $MATRIX (dim1, dim1) // allocate result matrix
var d = dim1 / 2 // half dim1
if (d + d < dim1) d += 1 // if not even, increment by 1
val evenDim = d + d // equals dim1 if even, else dim1 + 1
// decompose to blocks (use vslice method if available)
val a11 = slice (0, d, 0, d)
val a12 = slice (0, d, d, evenDim)
val a21 = slice (d, evenDim, 0, d)
val a22 = slice (d, evenDim, d, evenDim)
val b11 = b.slice (0, d, 0, d)
val b12 = b.slice (0, d, d, evenDim)
val b21 = b.slice (d, evenDim, 0, d)
val b22 = b.slice (d, evenDim, d, evenDim)
// compute intermediate sub-matrices
val p1 = (a11 + a22) * (b11 + b22)
val p2 = (a21 + a22) * b11
val p3 = a11 * (b12 - b22)
val p4 = a22 * (b21 - b11)
val p5 = (a11 + a12) * b22
val p6 = (a21 - a11) * (b11 + b12)
val p7 = (a12 - a22) * (b21 + b22)
for (i <- c.range1; j <- c.range2) {
c.v(i)(j) = if (i < d && j < d) p1.v(i)(j) + p4.v(i)(j)- p5.v(i)(j) + p7.v(i)(j)
else if (i < d) p3.v(i)(j-d) + p5.v(i)(j-d)
else if (i >= d && j < d) p2.v(i-d)(j) + p4.v(i-d)(j)
else p1.v(i-d)(j-d) - p2.v(i-d)(j-d) + p3.v(i-d)(j-d) + p6.v(i-d)(j-d)
} // for
c // return result matrix
} // times_s
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by vector 'u' to produce another matrix '(a_ij * u_j)'.
* E.g., multiply a matrix by a diagonal matrix represented as a vector.
* @param u the vector to multiply by
*/
def ** (u: $VECTOR): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) * u(j)
c
} // **
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' matrix by vector 'u' to produce another matrix '(a_ij * u_j)'.
* @param u the vector to multiply by
*/
def **= (u: $VECTOR): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) = v(i)(j) * u(j)
this
} // **=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide 'this' matrix by scalar 'x'.
* @param x the scalar to divide by
*/
def / (x: $BASE): $MATRIX =
{
val c = new $MATRIX (dim1, dim2)
for (i <- range1; j <- range2) c.v(i)(j) = v(i)(j) / x
c
} // /
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide in-place 'this' matrix by scalar 'x'.
* @param x the scalar to divide by
*/
def /= (x: $BASE): $MATRIX =
{
for (i <- range1; j <- range2) v(i)(j) = v(i)(j) / x
this
} // /=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Raise 'this' matrix to the 'p'th power (for some integer 'p' >= 2).
* Caveat: should be replace by a divide and conquer algorithm.
* @param p the power to raise 'this' matrix to
*/
def ~^ (p: Int): $MATRIX =
{
if (p < 2) flaw ("~^", "p must be an integer >= 2")
if (! isSquare) flaw ("~^", "only defined on square matrices")
val c = new $MATRIX (dim1, dim1)
for (i <- range1; j <- range1) {
var sum = $ZERO
for (k <- range1) sum += v(i)(k) * v(k)(j)
c.v(i)(j) = sum
} // for
if (p > 2) c ~^ (p-1) else c
} // ~^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the maximum element in 'this' matrix.
* @param e the ending row index (exclusive) for the search
*/
def max (e: Int = dim1): $BASE =
{
var x = v(0)(0)
for (i <- 1 until e; j <- range2 if v(i)(j) > x) x = v(i)(j)
x
} // max
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the minimum element in 'this' matrix.
* @param e the ending row index (exclusive) for the search
*/
def min (e: Int = dim1): $BASE =
{
var x = v(0)(0)
for (i <- 1 until e; j <- range2 if v(i)(j) < x) x = v(i)(j)
x
} // min
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Factor 'this' matrix into the product of upper and lower triangular
* matrices '(l, u)' using the LU Factorization algorithm. This version uses
* no partial pivoting.
*/
def lud_npp: Tuple2 [$MATRIX, $MATRIX] =
{
val l = new $MATRIX (dim1, dim2) // lower triangular matrix
val u = new $MATRIX (this) // upper triangular matrix (a copy of this)
for (i <- u.range1) {
val pivot = u.v(i)(i)
if (pivot =~ $ZERO) flaw ("lud_npp", "use lud since you have a zero pivot")
l.v(i)(i) = $ONE
for (j <- i + 1 until u.dim2) l.v(i)(j) = $ZERO
for (k <- i + 1 until u.dim1) {
val mul = u.v(k)(i) / pivot
l.v(k)(i) = mul
for (j <- u.range2) u.v(k)(j) = u.v(k)(j) - mul * u.v(i)(j)
} // for
} // for
Tuple2 (l, u)
} // lud_npp
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Factor 'this' matrix into the product of lower and upper triangular
* matrices '(l, u)' using the LU Factorization algorithm. This version uses
* partial pivoting.
*/
def lud: Tuple2 [$MATRIX, $MATRIX] =
{
val l = new $MATRIX (dim1, dim2) // lower triangular matrix
val u = new $MATRIX (this) // upper triangular matrix (a copy of this)
for (i <- u.range1) {
var pivot = u.v(i)(i)
if (pivot =~ $ZERO) {
val k = partialPivoting (u, i) // find the maxiumum element below pivot
u.swap (i, k, i) // swap rows i and k from column k
pivot = u.v(i)(i) // reset the pivot
} // if
l.v(i)(i) = $ONE
for (j <- i + 1 until u.dim2) l.v(i)(j) = $ZERO
for (k <- i + 1 until u.dim1) {
val mul = u.v(k)(i) / pivot
l.v(k)(i) = mul
for (j <- u.range2) u.v(k)(j) = u.v(k)(j) - mul * u.v(i)(j)
} // for
} // for
Tuple2 (l, u)
} // lud
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Factor in-place 'this' matrix into the product of lower and upper triangular
* matrices '(l, u)' using the LU Factorization algorithm. This version uses
* partial pivoting.
*/
def lud_ip: Tuple2 [$MATRIX, $MATRIX] =
{
val l = new $MATRIX (dim1, dim2) // lower triangular matrix
val u = this // upper triangular matrix (this)
for (i <- u.range1) {
var pivot = u.v(i)(i)
if (pivot =~ $ZERO) {
val k = partialPivoting (u, i) // find the maxiumum element below pivot
u.swap (i, k, i) // swap rows i and k from column k
pivot = u.v(i)(i) // reset the pivot
} // if
l.v(i)(i) = $ONE
for (j <- i + 1 until u.dim2) l.v(i)(j) = $ZERO
for (k <- i + 1 until u.dim1) {
val mul = u.v(k)(i) / pivot
l.v(k)(i) = mul
for (j <- u.range2) u.v(k)(j) = u.v(k)(j) - mul * u.v(i)(j)
} // for
} // for
Tuple2 (l, u)
} // lud_ip
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Use partial pivoting to find a maximal non-zero pivot and return its row
* index, i.e., find the maximum element '(k, i)' below the pivot '(i, i)'.
* @param a the matrix to perform partial pivoting on
* @param i the row and column index for the current pivot
*/
private def partialPivoting (a: $MATRIX, i: Int): Int =
{
var max = a.v(i)(i) // initially set to the pivot
var kMax = i // initially the pivot row
for (k <- i + 1 until a.dim1 if ABS (a.v(k)(i)) > max) {
max = ABS (a.v(k)(i))
kMax = k
} // for
if (kMax == i) {
flaw ("partialPivoting", "unable to find a non-zero pivot for row " + i)
} // if
kMax
} // partialPivoting
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' in the equation 'l*u*x = b' (see lud above).
* @param l the lower triangular matrix
* @param u the upper triangular matrix
* @param b the constant vector
*/
def solve (l: $MATRI, u: $MATRI, b: $VECTOR): $VECTOR =
{
val y = new $VECTOR (l.dim2)
for (k <- 0 until y.dim) { // solve for y in l*y = b
y(k) = b(k) - (l(k) dot y)
} // for
val x = new $VECTOR (u.dim2)
for (k <- x.dim - 1 to 0 by -1) { // solve for x in u*x = y
x(k) = (y(k) - (u(k) dot x)) / u(k, k)
} // for
x
} // solve
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' in the equation 'l*u*x = b' where 'l = this'. Requires
* 'l' to be lower triangular.
* @param u the upper triangular matrix
* @param b the constant vector
*/
def solve (u: $MATRI, b: $VECTOR): $VECTOR = solve (this, u, b)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' in the equation 'a*x = b' where 'a' is 'this' matrix.
* @param b the constant vector.
*/
def solve (b: $VECTOR): $VECTOR = solve (lud, b)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Combine 'this' matrix with matrix 'b', placing them along the diagonal and
* filling in the bottom left and top right regions with zeros; '[this, b]'.
* @param b the matrix to combine with 'this' matrix
*/
def diag (b: $MATRI): $MATRIX =
{
val m = dim1 + b.dim1
val n = dim2 + b.dim2
val c = new $MATRIX (m, n)
for (i <- 0 until m; j <- 0 until n) {
c.v(i)(j) = if (i < dim1 && j < dim2) v(i)(j)
else if (i >= dim1 && j >= dim2) b(i-dim1, j-dim2)
else $ZERO
} // for
c
} // diag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form a matrix '[Ip, this, Iq]' where Ir is a r-by-r identity matrix, by
* positioning the three matrices 'Ip', 'this' and 'Iq' along the diagonal.
* Fill the rest of matrix with zeros.
* @param p the size of identity matrix Ip
* @param q the size of identity matrix Iq
*/
def diag (p: Int, q: Int = 0): $MATRIX =
{
if (! isSquare) flaw ("diag", "'this' matrix must be square")
val n = dim1 + p + q
val c = new $MATRIX (n, n)
for (i <- 0 until p) c.v(i)(i) = $ONE // Ip
for (i <- 0 until dim1; j <- 0 until dim1) c.v(i+p)(j+p) = v(i)(j) // this
for (i <- p + dim1 until n) c.v(i)(i) = $ONE // Iq
c
} // diag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the kth diagonal of 'this' matrix. Assumes 'dim2 >= dim1'.
* @param k how far above the main diagonal, e.g., (-1, 0, 1) for (sub, main, super)
*/
def getDiag (k: Int = 0): $VECTOR =
{
val c = new $VECTOR (dim1 - math.abs (k))
val (j, l) = (math.max (-k, 0), math.min (dim1-k, dim1))
for (i <- j until l) c(i-j) = v(i)(i+k)
c
} // getDiag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the kth diagonal of 'this' matrix to the vector 'u'. Assumes 'dim2 >= dim1'.
* @param u the vector to set the diagonal to
* @param k how far above the main diagonal, e.g., (-1, 0, 1) for (sub, main, super)
*/
def setDiag (u: $VECTOR, k: Int = 0)
{
val (j, l) = (math.max (-k, 0), math.min (dim1-k, dim1))
for (i <- j until l) v(i)(i+k) = u(i-j)
} // setDiag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the main diagonal of 'this' matrix to the scalar 'x'. Assumes 'dim2 >= dim1'.
* @param x the scalar to set the diagonal to
*/
def setDiag (x: $BASE) { for (i <- range1) v(i)(i) = x }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Invert 'this' matrix (requires a square matrix) and does not use partial pivoting.
*/
def inverse_npp: $MATRIX =
{
val b = new $MATRIX (this) // copy 'this' matrix into b
val c = eye (dim1) // let c represent the augmentation
for (i <- b.range1) {
val pivot = b.v(i)(i)
if (pivot =~ $ZERO) flaw ("inverse_npp", "use inverse since you have a zero pivot")
for (j <- b.range2) {
b.v(i)(j) /= pivot
c.v(i)(j) /= pivot
} // for
for (k <- 0 until b.dim1 if k != i) {
val mul = b.v(k)(i)
for (j <- b.range2) {
b.v(k)(j) -= mul * b.v(i)(j)
c.v(k)(j) -= mul * c.v(i)(j)
} // for
} // for
} // for
c
} // inverse_npp
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Invert 'this' matrix (requires a square matrix) and use partial pivoting.
*/
def inverse: $MATRIX =
{
val b = new $MATRIX (this) // copy 'this' matrix into b
val c = eye (dim1) // let c represent the augmentation
for (i <- b.range1) {
var pivot = b.v(i)(i)
if (pivot =~ $ZERO) {
val k = partialPivoting (b, i) // find the maxiumum element below pivot
b.swap (i, k, i) // in b, swap rows i and k from column i
c.swap (i, k, 0) // in c, swap rows i and k from column 0
pivot = b.v(i)(i) // reset the pivot
} // if
for (j <- b.range2) {
b.v(i)(j) /= pivot
c.v(i)(j) /= pivot
} // for
for (k <- 0 until dim1 if k != i) {
val mul = b.v(k)(i)
for (j <- b.range2) {
b.v(k)(j) -= mul * b.v(i)(j)
c.v(k)(j) -= mul * c.v(i)(j)
} // for
} // for
} // for
c
} // inverse
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Invert in-place 'this' matrix (requires a square matrix) and uses partial pivoting.
* Note: this method turns the orginal matrix into the identity matrix.
* The inverse is returned and is captured by assignment.
*/
def inverse_ip: $MATRIX =
{
var b = this // use 'this' matrix for b
val c = eye (dim1) // let c represent the augmentation
for (i <- b.range1) {
var pivot = b.v(i)(i)
if (pivot =~ $ZERO) {
val k = partialPivoting (b, i) // find the maxiumum element below pivot
b.swap (i, k, i) // in b, swap rows i and k from column i
c.swap (i, k, 0) // in c, swap rows i and k from column 0
pivot = b.v(i)(i) // reset the pivot
} // if
for (j <- b.range2) {
b.v(i)(j) /= pivot
c.v(i)(j) /= pivot
} // for
for (k <- 0 until dim1 if k != i) {
val mul = b.v(k)(i)
for (j <- b.range2) {
b.v(k)(j) -= mul * b.v(i)(j)
c.v(k)(j) -= mul * c.v(i)(j)
} // for
} // for
} // for
c // return the solution
} // inverse_ip
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Use Gauss-Jordan reduction on 'this' matrix to make the left part embed an
* identity matrix. A constraint on this 'm-by-n' matrix is that 'n >= m'.
* It can be used to solve 'a * x = b': augment 'a' with 'b' and call reduce.
* Takes '[a | b]' to '[I | x]'.
*/
def reduce: $MATRIX =
{
if (dim2 < dim1) flaw ("reduce", "requires n (columns) >= m (rows)")
val b = new $MATRIX (this) // copy 'this' matrix into b
for (i <- b.range1) {
var pivot = b.v(i)(i)
if (pivot =~ $ZERO) {
val k = partialPivoting (b, i) // find the maxiumum element below pivot
b.swap (i, k, i) // in b, swap rows i and k from column i
pivot = b.v(i)(i) // reset the pivot
} // if
for (j <- b.range2) b.v(i)(j) /= pivot
for (k <- 0 until dim1 if k != i) {
val mul = b.v(k)(i)
for (j <- b.range2) b.v(k)(j) -= mul * b.v(i)(j)
} // for
} // for
b
} // reduce
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Use Gauss-Jordan reduction in-place on 'this' matrix to make the left part
* embed an identity matrix. A constraint on this 'm-by-n' matrix is that 'n >= m'.
* It can be used to solve 'a * x = b': augment 'a' with 'b' and call reduce.
* Takes '[a | b]' to '[I | x]'.
*/
def reduce_ip
{
if (dim2 < dim1) flaw ("reduce", "requires n (columns) >= m (rows)")
val b = this // use 'this' matrix for b
for (i <- b.range1) {
var pivot = b.v(i)(i)
if (pivot =~ $ZERO) {
val k = partialPivoting (b, i) // find the maxiumum element below pivot
b.swap (i, k, i) // in b, swap rows i and k from column i
pivot = b.v(i)(i) // reset the pivot
} // if
for (j <- b.range2) b.v(i)(j) /= pivot
for (k <- 0 until dim1 if k != i) {
val mul = b.v(k)(i)
for (j <- b.range2) b.v(k)(j) -= mul * b.v(i)(j)
} // for
} // for
} // reduce_ip
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Clean values in 'this' matrix at or below the threshold 'thres' by setting
* them to zero. Iterative algorithms give approximate values and if very close
* to zero, may throw off other calculations, e.g., in computing eigenvectors.
* @param thres the cutoff threshold (a small value)
* @param relative whether to use relative or absolute cutoff
*/
def clean (thres: Double, relative: Boolean = true): $MATRIX =
{
val s = if (relative) mag else $ONE // use matrix magnitude or 1
for (i <- range1; j <- range2) if (ABS (v(i)(j)) <= thres * s) v(i)(j) = $ZERO
this
} // clean
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the (right) nullspace of 'this' 'm-by-n' matrix (requires 'n = m+1')
* by performing Gauss-Jordan reduction and extracting the negation of the
* last column augmented by 1.
* <p>
* nullspace (a) = set of orthogonal vectors v s.t. a * v = 0
* <p>
* The left nullspace of matrix 'a' is the same as the right nullspace of 'a.t'.
* FIX: need a more robust algorithm for computing nullspace (@see Fac_QR.scala).
* FIX: remove the 'n = m+1' restriction.
* @see http://ocw.mit.edu/courses/mathematics/18-06sc-linear-algebra-fall-2011/ax-b-and-the-four-subspaces
* /solving-ax-0-pivot-variables-special-solutions/MIT18_06SCF11_Ses1.7sum.pdf
*/
def nullspace: $VECTOR =
{
if (dim2 != dim1 + 1) flaw ("nullspace", "requires n (columns) = m (rows) + 1")
reduce.col(dim2 - 1) * -$ONE ++ $ONE
} // nullspace
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute in-place the (right) nullspace of 'this' 'm-by-n' matrix (requires 'n = m+1')
* by performing Gauss-Jordan reduction and extracting the negation of the
* last column augmented by 1.
* <p>
* nullspace (a) = set of orthogonal vectors v s.t. a * v = 0
* <p>
* The left nullspace of matrix 'a' is the same as the right nullspace of 'a.t'.
* FIX: need a more robust algorithm for computing nullspace (@see Fac_QR.scala).
* FIX: remove the 'n = m+1' restriction.
* @see http://ocw.mit.edu/courses/mathematics/18-06sc-linear-algebra-fall-2011/ax-b-and-the-four-subspaces
* /solving-ax-0-pivot-variables-special-solutions/MIT18_06SCF11_Ses1.7sum.pdf
*/
def nullspace_ip: $VECTOR =
{
if (dim2 != dim1 + 1) flaw ("nullspace", "requires n (columns) = m (rows) + 1")
reduce_ip
col(dim2 - 1) * -$ONE ++ $ONE
} // nullspace_ip
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the trace of 'this' matrix, i.e., the sum of the elements on the
* main diagonal. Should also equal the sum of the eigenvalues.
* @see Eigen.scala
*/
def trace: $BASE =
{
if ( ! isSquare) flaw ("trace", "trace only works on square matrices")
var sum = $ZERO
for (i <- range1) sum += v(i)(i)
sum
} // trace
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the sum of 'this' matrix, i.e., the sum of its elements.
*/
def sum: $BASE =
{
var sum = $ZERO
for (i <- range1; j <- range2) sum += v(i)(j)
sum
} // sum
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the abs sum of 'this' matrix, i.e., the sum of the absolute value
* of its elements. This is useful for comparing matrices '(a - b).sumAbs'.
*/
def sumAbs: $BASE =
{
var sum = $ZERO
for (i <- range1; j <- range2) sum += ABS (v(i)(j))
sum
} // sumAbs
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the sum of the lower triangular region of 'this' matrix.
*/
def sumLower: $BASE =
{
var sum = $ZERO
for (i <- range1; j <- 0 until i) sum += v(i)(j)
sum
} // sumLower
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the determinant of 'this' matrix. The value of the determinant
* indicates, among other things, whether there is a unique solution to a
* system of linear equations (a nonzero determinant).
*/
def det: $BASE =
{
if ( ! isSquare) flaw ("det", "determinant only works on square matrices")
var sum = $ZERO
var b: $MATRIX = null
for (j <- range2) {
b = sliceExclude (0, j) // the submatrix that excludes row 0 and column j
sum += (if (j % 2 == 0) v(0)(j) * (if (b.dim1 == 1) b.v(0)(0) else b.det)
else -v(0)(j) * (if (b.dim1 == 1) b.v(0)(0) else b.det))
} // for
sum
} // det
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix is rectangular (all rows have the same number
* of columns).
*/
def isRectangular: Boolean =
{
for (i <- range1 if v(i).length != dim2) return false
true
} // isRectangular
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' real (double precision) matrix to a string.
*/
override def toString: String =
{
var sb = new StringBuilder ("\\n$MATRIX(")
if (dim1 == 0) return sb.append (")").mkString
for (i <- range1) {
for (j <- range2) {
sb.append (fString.format (v(i)(j)))
if (j == dim2-1) sb.replace (sb.length-1, sb.length, "\\n\\t")
} // for
} // for
sb.replace (sb.length-3, sb.length, ")").mkString
} // toString
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Write 'this' matrix to a CSV-formatted text file with name 'fileName'.
* @param fileName the name of file to hold the data
*/
def write (fileName: String)
{
val out = new PrintWriter (fileName)
for (i <- range1) {
for (j <- range2) { out.print (v(i)(j)); if (j < dim2-1) out.print (",") }
out.println ()
} // for
out.close
} // write
} // $MATRIX class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `$MATRIX` companion object provides operations for `$MATRIX` that don't require
* 'this' (like static methods in Java). It provides factory methods for building
* matrices from files or vectors.
*/
object $MATRIX extends Error
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a matrix and assign values from the array of vectors 'u'.
* @param u the array of vectors to assign
* @param columnwise whether the vectors are treated as column or row vectors
*/
def apply (u: Array [$VECTOR], columnwise: Boolean = true): $MATRIX =
{
var x: $MATRIX = null
val u_dim = u(0).dim
if (columnwise) {
x = new $MATRIX (u_dim, u.length)
for (j <- 0 until u.length) x.setCol (j, u(j)) // assign column vectors
} else {
x = new $MATRIX (u.length, u_dim)
for (i <- 0 until u_dim) x(i) = u(i) // assign row vectors
} // if
x
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a matrix and assign values from the Scala `Vector` of vectors 'u'.
* Assumes vectors are columwise.
* @param u the Vector of vectors to assign
*/
def apply (u: Vector [$VECTOR]): $MATRIX =
{
val u_dim = u(0).dim
val x = new $MATRIX (u_dim, u.length)
for (j <- 0 until u.length) x.setCol (j, u(j)) // assign column vectors
x
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a matrix by reading from a text file, e.g., a CSV file.
* @param fileName the name of file holding the data
*/
def apply (fileName: String): $MATRIX =
{
val sp = ',' // character separating the values
val lines = fromFile (fileName).getLines.toArray // get the lines from file
val (m, n) = (lines.length, lines(0).split (sp).length)
val x = new $MATRIX (m, n)
for (i <- 0 until m) x(i) = $VECTOR (lines(i).split (sp))
x
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create an 'm-by-n' identity matrix I (ones on main diagonal, zeros elsewhere).
* If 'n' is <= 0, set it to 'm' for a square identity matrix.
* @param m the row dimension of the matrix
* @param n the column dimension of the matrix (defaults to 0 => square matrix)
*/
def eye (m: Int, n: Int = 0): $MATRIX =
{
val nn = if (n <= 0) m else n // square matrix, if n <= 0
val mn = if (m <= nn) m else nn // length of main diagonal
val c = new $MATRIX (m, nn)
for (i <- 0 until mn) c.v(i)(i) = $ONE
c
} // eye
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (row) vector 'u' and matrix 'a', i.e., prepend 'u'.
* @param u the vector to be concatenated as the new first row in matrix
*/
def :+ (u: $VECTOR, a: $MATRIX): $MATRIX =
{
if (u.dim != a.dim2) flaw (":+", "vector does not match row dimension")
val c = new $MATRIX (a.dim1 + 1, a.dim2)
for (i <- c.range1) c(i) = if (i == 0) u else a(i - 1)
c
} // :+
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (column) vector 'u' and matrix 'a', i.e., prepend 'u'.
* @param u the vector to be concatenated as the new first column in matrix
*/
def :+^ (u: $VECTOR, a: $MATRIX): $MATRIX =
{
if (u.dim != a.dim1) flaw (":+^", "vector does not match column dimension")
val c = new $MATRIX (a.dim1, a.dim2 + 1)
for (j <- c.range2) c.setCol (j, if (j == 0) u else a.col (j - 1))
c
} // :+^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (row) vectors 'u' and 'w' to form a matrix with 2 rows.
* @param u the vector to be concatenated as the new first row in matrix
* @param w the vector to be concatenated as the new second row in matrix
*/
def ++ (u: $VECTOR, w: $VECTOR): $MATRIX =
{
if (u.dim != w.dim) flaw ("++", "vector dimensions do not match")
val c = new $MATRIX (2, u.dim)
c(0) = u
c(1) = w
c
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (column) vectors 'u' and 'w' to form a matrix with 2 columns.
* @param u the vector to be concatenated as the new first column in matrix
* @param w the vector to be concatenated as the new second column in matrix
*/
def ++^ (u: $VECTOR, w: $VECTOR): $MATRIX =
{
if (u.dim != w.dim) flaw ("++^", "vector dimensions do not match")
val c = new $MATRIX (u.dim, 2)
c.setCol (0, u)
c.setCol (1, w)
c
} // ++^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply vector 'u' by matrix 'a'. Treat 'u' as a row vector.
* @param u the vector to multiply by
* @param a the matrix to multiply by (requires sameCrossDimensions)
*/
def times (u: $VECTOR, a: $MATRIX): $VECTOR =
{
if (u.dim != a.dim1) flaw ("times", "vector * matrix - incompatible cross dimensions")
val c = new $VECTOR (a.dim2)
for (j <- a.range2) {
var sum = $ZERO
for (k <- a.range1) sum += u(k) * a.v(k)(j)
c(j) = sum
} // for
c
} // times
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the outer product of vector 'x' and vector 'y'. The result of the
* outer product is a matrix where 'c(i, j)' is the product of 'i'-th element
* of 'x' with the 'j'-th element of 'y'.
* @param x the first vector
* @param y the second vector
*/
def outer (x: $VECTOR, y: $VECTOR): $MATRIX =
{
val c = new $MATRIX (x.dim, y.dim)
for (i <- 0 until x.dim; j <- 0 until y.dim) c(i, j) = x(i) * y(j)
c
} // outer
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form a matrix from two vectors 'x' and 'y', row-wise.
* @param x the first vector -> row 0
* @param y the second vector -> row 1
*/
def form_rw (x: $VECTOR, y: $VECTOR): $MATRIX =
{
if (x.dim != y.dim) flaw ("form_rw", "dimensions of x and y must be the same")
val cols = x.dim
val c = new $MATRIX (2, cols)
c(0) = x
c(1) = y
c
} // form_rw
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form a matrix from scalar 'x' and a vector 'y', row-wise.
* @param x the first scalar -> row 0 (repeat scalar)
* @param y the second vector -> row 1
*/
def form_rw (x: $BASE, y: $VECTOR): $MATRIX =
{
val cols = y.dim
val c = new $MATRIX (2, cols)
for (j <- 0 until cols) c(0, j) = x
c(1) = y
c
} // form_rw
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form a matrix from a vector 'x' and a scalar 'y', row-wise.
* @param x the first vector -> row 0
* @param y the second scalar -> row 1 (repeat scalar)
*/
def form_rw (x: $VECTOR, y: $BASE): $MATRIX =
{
val cols = x.dim
val c = new $MATRIX (2, cols)
c(0) = x
for (j <- 0 until cols) c(1, j) = y
c
} // form_rw
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form a matrix from two vectors 'x' and 'y', column-wise.
* @param x the first vector -> column 0
* @param y the second vector -> column 1
*/
def form_cw (x: $VECTOR, y: $VECTOR): $MATRIX =
{
if (x.dim != y.dim) flaw ("form_cw", "dimensions of x and y must be the same")
val rows = x.dim
val c = new $MATRIX (rows, 2)
c.setCol(0, x)
c.setCol(1, y)
c
} // form_cw
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form a matrix from a scalar 'x' and a vector 'y', column-wise.
* @param x the first scalar -> column 0 (repeat scalar)
* @param y the second vector -> column 1
*/
def form_cw (x: $BASE, y: $VECTOR): $MATRIX =
{
val rows = y.dim
val c = new $MATRIX (rows, 2)
for (i <- 0 until rows) c(i, 0) = x
c.setCol(1, y)
c
} // form_cw
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form a matrix from a vector 'x' and a scalar 'y', column-wise.
* @param x the first vector -> column 0
* @param y the second scalar -> column 1 (repeat scalar)
*/
def form_cw (x: $VECTOR, y: $BASE): $MATRIX =
{
val rows = x.dim
val c = new $MATRIX (rows, 2)
c.setCol(0, x)
for (i <- 0 until rows) c(i, 1) = y
c
} // form_cw
} // $MATRIX companion object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `${MATRIX}Test` object tests the operations provided by `$MATRIX` class.
* > run-main scalation.linalgebra.${MATRIX}Test
*/
object ${MATRIX}Test extends App with PackageInfo
{
for (l <- 1 to 4) {
println ("\\n\\tTest $MATRIX on real matrices of dim " + l)
val x = new $MATRIX (l, l)
val y = new $MATRIX (l, l)
x.set (2)
y.set (3)
println ("x + y = " + (x + y))
println ("x - y = " + (x - y))
println ("x * y = " + (x * y))
println ("x * 4 = " + (x * 4))
} // for
println ("\\n\\tTest $MATRIX on additional operations")
val z = new $MATRIX ((2, 2), 1, 2,
3, 2)
val t = new $MATRIX ((3, 3), 1, 2, 3,
4, 3, 2,
1, 3, 1)
val zz = new $MATRIX ((3, 3), 3, 1, 0,
1, 4, 2,
0, 2, 5)
val bz = $VECTOR (5, 3, 6)
val b = $VECTOR (8, 7)
val lu = z.lud
val lu2 = z.lud_npp
println ("z = " + z)
println ("z.t = " + z.t)
println ("z.lud = " + lu)
println ("z.lud_npp = " + lu2)
println ("z.solve = " + z.solve (lu._1, lu._2, b))
println ("zz.solve = " + zz.solve (zz.lud, bz))
println ("z.inverse = " + z.inverse)
println ("z.inverse_ip = " + z.inverse_ip)
println ("t.inverse = " + t.inverse)
println ("t.inverse_ip = " + t.inverse_ip)
println ("z.inv * b = " + z.inverse * b)
println ("z.det = " + z.det)
println ("z = " + z)
z *= z // in-place matrix multiplication
println ("z squared = " + z)
val w = new $MATRIX ((2, 3), 2, 3, 5,
-4, 2, 3)
val v = new $MATRIX ((3, 2), 2, -4,
3, 2,
5, 3)
println ("w = " + w)
println ("v = " + v)
println ("w.reduce = " + w.reduce)
println ("right: w.nullspace = " + w.nullspace)
println ("check right nullspace = " + w * w.nullspace)
println ("left: v.t.nullspace = " + v.t.nullspace)
println ("check left nullspace = " + $MATRIX.times (v.t.nullspace, v))
for (row <- z) println ("row = " + row.deep)
val aa = new $MATRIX ((3, 2), 1, 2,
3, 4,
5, 6)
val bb = new $MATRIX ((2, 2), 1, 2,
3, 4)
println ("aa = " + aa)
println ("bb = " + bb)
println ("aa * bb = " + aa * bb)
aa *= bb
println ("aa *= bb = " + aa)
println ("aa dot bz = " + (aa dot bz))
println ("aa.t * bz = " + aa.t * bz)
val filename = getDataPath + "bb_matrix.csv"
bb.write (filename)
println ("bb_csv = " + $MATRIX (filename))
} // ${MATRIX}Test object
"""
// Ending of string holding code template --------------------------------------
// println (code)
val writer = new PrintWriter (new File (DIR + _l + MATRIX + ".scalaa"))
writer.write (code)
writer.close ()
} // for
} // BldMatrix object
|
mvnural/scalation
|
src/main/scala/scalation/linalgebra/bld/BldMatrix.scala
|
Scala
|
mit
| 66,249 |
/*
* Copyright 2015 Stephan Rehfeld
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package test.scaladelray.light
import org.scalatest.FunSpec
import scaladelray.{Color, World}
import scaladelray.geometry.{Sphere, Hit, Geometry}
import scaladelray.light.DirectionalLight
import scaladelray.math.{Ray, Vector3, Point3}
class DirectionalLightSpec extends FunSpec {
describe( "A DirectionalLight" ) {
it( "should radiate all points" ) {
val w = new World( Color( 0, 0, 0 ), Set[Geometry]() )
val l = new DirectionalLight( Color( 1, 1, 1 ), Vector3( 0, -1, 0 ) )
val points = Point3( 1, 0, 0 ) :: Point3( 0, 1, 0 ) :: Point3( 0, 0, 1 ) :: Point3( -1, 0, 0 ) :: Point3( 0, -1, 0 ) :: Point3( 0, 0, -1 ) :: Nil
for( p <- points )
for( b <- l.illuminates( p, w ) )
assert( b )
}
it( "should return itself when createLight is called." ) {
val l = new DirectionalLight( Color( 1, 1, 1 ), Vector3( 0, -1, 0 ) )
assert( l == l.createLight )
}
it( "should check the world if an object is between the point and the point light" ) {
var called = false
val w = new World( Color( 0, 0, 0 ), Set[Geometry]() ) {
override def <--( r : Ray ) : Set[Hit] = {
called = true
Set[Hit]()
}
}
val l = new DirectionalLight( Color( 1, 1, 1 ), Vector3( 0, -1, 0 ) )
l.illuminates( Point3( 3, 3, 3 ), w )
assert( called )
}
it( "should return false if an object is between the point and the light" ) {
val directions = Vector3( 1, 0, 0 ) :: Vector3( 0, 1, 0 ) :: Vector3( 0, 0, 1 ) :: Vector3( -1, 0, 0 ) :: Vector3( 0, -1, 0 ) :: Vector3( 0, 0, -1 ) :: Nil
val s = new Sphere( null )
val w = new World( Color( 0, 0, 0 ), Set() + s )
for( d <- directions ) {
val l = new DirectionalLight( Color( 1, 1, 1 ), d )
val p = (d * 2).asPoint
for( b <- l.illuminates( p, w ) )
assert( !b )
}
}
it( "should only have one sampling point") {
val l = new DirectionalLight( Color( 1, 1, 1 ), Vector3( 0, -1, 0 ) )
assert( l.samplingPoints == 1 )
}
it( "should always return the same direction") {
val directions = Vector3( 1, 0, 0 ) :: Vector3( 0, 1, 0 ) :: Vector3( 0, 0, 1 ) :: Vector3( -1, 0, 0 ) :: Vector3( 0, -1, 0 ) :: Vector3( 0, 0, -1 ) :: Nil
val points = for( d <- directions ) yield d.asPoint
for( d <- directions )
for( p <- points ) {
val l = new DirectionalLight( Color( 1, 1, 1 ), d )
for( dd <- l.directionFrom( p ) ) assert( dd == -d )
}
}
}
}
|
stephan-rehfeld/scaladelray
|
src/test/scala/test/scaladelray/light/DirectionalLightSpec.scala
|
Scala
|
apache-2.0
| 3,159 |
package nest.sparkle.store.cassandra
import scala.collection.JavaConverters.asScalaBufferConverter
import scala.reflect.runtime.universe._
import scala.util.Try
import java.nio.ByteBuffer
import com.typesafe.config.Config
import spray.json.JsValue
import nest.sparkle.store.cassandra.serializers._
import nest.sparkle.util.{Instance, GenericFlags}
import nest.sparkle.util.OptionConversion._
case class CanSerializeNotFound(msg: String) extends RuntimeException(msg)
/** Dynamically get a CanSerialize instance from a TypeTag. */
class RecoverCanSerialize(sparkleConfig: Config) {
private val basicCanSerializers: Map[TypeTag[_], CanSerialize[_]] = Map(
typeToCanSerialize[Boolean],
typeToCanSerialize[Short],
typeToCanSerialize[Int],
typeToCanSerialize[Long],
typeToCanSerialize[Double],
typeToCanSerialize[Char],
typeToCanSerialize[String],
typeToCanSerialize[JsValue],
typeToCanSerialize[GenericFlags],
typeToCanSerialize[ByteBuffer]
)
/** mapping from typeTag to CanSerialize for standard types */
val canSerializers = makeCanSerializers
/** return a CanSerialize instance at runtime based a typeTag. */
def optCanSerialize[T](targetTag: TypeTag[_]) // format: OFF
: Option[CanSerialize[T]] = { // format: ON
val untypedCanSerialize = canSerializers.get(targetTag)
untypedCanSerialize.asInstanceOf[Option[CanSerialize[T]]]
}
/** return a CanSerialize instance at runtime based a typeTag. */
def tryCanSerialize[T](implicit targetTag: TypeTag[_]): Try[CanSerialize[T]] = {
val untyped: Try[CanSerialize[_]] = canSerializers.get(targetTag).toTryOr(
CanSerializeNotFound(targetTag.tpe.toString))
untyped.asInstanceOf[Try[CanSerialize[T]]]
}
/** return a mapping from a typetag to a can Serialize */
private def typeToCanSerialize[T: TypeTag: CanSerialize]: (TypeTag[T], CanSerialize[T]) = {
typeTag[T] -> implicitly[CanSerialize[T]]
}
private def makeCanSerializers: Map[TypeTag[_], CanSerialize[_]] = {
CanSerializeConfigUtil.makeWithConfig(sparkleConfig, basicCanSerializers) { (typeTagClassName: String, canSerializerClassName: String) =>
def withFixedType[U]() = {
val typeTag = Instance.typeTagByClassName[U](typeTagClassName)
val canSerializer = Instance.objectByClassName[CanSerialize[U]](canSerializerClassName)
typeToCanSerialize(typeTag, canSerializer)
}
withFixedType()
}
}
}
object CanSerializeConfigUtil {
def makeWithConfig[X, Y](sparkleConfig: Config, basic: Map[X, Y])
(fn: (String, String) => (X, Y)): Map[X, Y] = {
makeWithConfig(sparkleConfig, basic.toSeq)(fn).toMap
}
def makeWithConfig[X](sparkleConfig: Config, basic: Seq[X])
(fn: (String, String) => X): Seq[X] = {
if (sparkleConfig.hasPath("sparkle-store-cassandra.serializers")) {
sparkleConfig.getConfigList("sparkle-store-cassandra.serializers").asScala.toSeq.map { serializerConfig =>
fn(serializerConfig.getString("type"), serializerConfig.getString("type-serializer"))
} ++ basic
} else {
basic
}
}
}
|
mighdoll/sparkle
|
store/src/main/scala/nest/sparkle/store/cassandra/RecoverCanSerialize.scala
|
Scala
|
apache-2.0
| 3,112 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.models.resnet
import java.nio.ByteBuffer
import java.nio.file.{Files, Path, Paths}
import com.intel.analytics.bigdl.dllib.feature.dataset.ByteRecord
import com.intel.analytics.bigdl.dllib.utils.File
import scopt.OptionParser
import scala.collection.mutable.ArrayBuffer
object Utils {
case class TrainParams(
folder: String = "./",
checkpoint: Option[String] = None,
modelSnapshot: Option[String] = None,
stateSnapshot: Option[String] = None,
optnet: Boolean = false,
depth: Int = 20,
classes: Int = 10,
shortcutType: String = "A",
batchSize: Int = 128,
nepochs: Int = 165,
learningRate: Double = 0.1,
weightDecay: Double = 1e-4,
momentum: Double = 0.9,
dampening: Double = 0.0,
nesterov: Boolean = true,
graphModel: Boolean = false,
warmupEpoch: Int = 0,
maxLr: Double = 0.0,
optimizerVersion: Option[String] = None)
val trainParser = new OptionParser[TrainParams]("BigDL ResNet Example") {
head("Train ResNet model on single node")
opt[String]('f', "folder")
.text("where you put your training files")
.action((x, c) => c.copy(folder = x))
opt[String]("model")
.text("model snapshot location")
.action((x, c) => c.copy(modelSnapshot = Some(x)))
opt[String]("state")
.text("state snapshot location")
.action((x, c) => c.copy(stateSnapshot = Some(x)))
opt[String]("cache")
.text("where to cache the model")
.action((x, c) => c.copy(checkpoint = Some(x)))
opt[Boolean]("optnet")
.text("shared gradients and caches to reduce memory usage")
.action((x, c) => c.copy(optnet = x))
opt[Int]("depth")
.text("depth of ResNet, 18 | 20 | 34 | 50 | 101 | 152 | 200")
.action((x, c) => c.copy(depth = x))
opt[Int]("classes")
.text("classes of ResNet")
.action((x, c) => c.copy(classes = x))
opt[String]("shortcutType")
.text("shortcutType of ResNet, A | B | C")
.action((x, c) => c.copy(shortcutType = x))
opt[Int]("batchSize")
.text("batchSize of ResNet, 64 | 128 | 256 | ..")
.action((x, c) => c.copy(batchSize = x))
opt[Int]("nEpochs")
.text("number of epochs of ResNet; default is 165")
.action((x, c) => c.copy(nepochs = x))
opt[Double]("learningRate")
.text("initial learning rate of ResNet; default is 0.1")
.action((x, c) => c.copy(learningRate = x))
opt[Double]("momentum")
.text("momentum of ResNet; default is 0.9")
.action((x, c) => c.copy(momentum = x))
opt[Double]("weightDecay")
.text("weightDecay of ResNet; default is 1e-4")
.action((x, c) => c.copy(weightDecay = x))
opt[Double]("dampening")
.text("dampening of ResNet; default is 0.0")
.action((x, c) => c.copy(dampening = x))
opt[Boolean]("nesterov")
.text("nesterov of ResNet; default is trye")
.action((x, c) => c.copy(nesterov = x))
opt[Unit]('g', "graphModel")
.text("use graph model")
.action((x, c) => c.copy(graphModel = true))
opt[Int]("warmupEpoch")
.text("warmup epoch")
.action((x, c) => c.copy(warmupEpoch = x))
opt[Double]("maxLr")
.text("maxLr")
.action((x, c) => c.copy(maxLr = x))
opt[String]("optimizerVersion")
.text("state optimizer version")
.action((x, c) => c.copy(optimizerVersion = Some(x)))
}
case class TestParams(
folder: String = "./",
model: String = "",
batchSize: Int = 128
)
val testParser = new OptionParser[TestParams]("BigDL ResNet on Cifar10 Test Example") {
opt[String]('f', "folder")
.text("the location of Cifar10 dataset")
.action((x, c) => c.copy(folder = x))
opt[String]('m', "model")
.text("the location of model snapshot")
.action((x, c) => c.copy(model = x))
.required()
.required()
opt[Int]('b', "batchSize")
.text("batch size")
.action((x, c) => c.copy(batchSize = x))
}
private[bigdl] def loadTrain(dataFile: String): Array[ByteRecord] = {
val allFiles = Array(
dataFile + "/data_batch_1.bin",
dataFile + "/data_batch_2.bin",
dataFile + "/data_batch_3.bin",
dataFile + "/data_batch_4.bin",
dataFile + "/data_batch_5.bin"
)
val result = new ArrayBuffer[ByteRecord]()
allFiles.foreach(load(_, result))
result.toArray
}
private[bigdl] def loadTest(dataFile: String): Array[ByteRecord] = {
val result = new ArrayBuffer[ByteRecord]()
val testFile = dataFile + "/test_batch.bin"
load(testFile, result)
result.toArray
}
/**
* load cifar data.
* read cifar from hdfs if data folder starts with "hdfs:", otherwise form local file.
* @param featureFile
* @param result
*/
private[bigdl] def load(featureFile: String, result : ArrayBuffer[ByteRecord]): Unit = {
val rowNum = 32
val colNum = 32
val imageOffset = rowNum * colNum * 3 + 1
val channelOffset = rowNum * colNum
val bufferOffset = 8
val featureBuffer = if (featureFile.startsWith(File.hdfsPrefix)) {
ByteBuffer.wrap(File.readHdfsByte(featureFile))
} else {
ByteBuffer.wrap(Files.readAllBytes(Paths.get(featureFile)))
}
val featureArray = featureBuffer.array()
val featureCount = featureArray.length / (rowNum * colNum * 3 + 1)
var i = 0
while (i < featureCount) {
val img = new Array[Byte]((rowNum * colNum * 3 + bufferOffset))
val byteBuffer = ByteBuffer.wrap(img)
byteBuffer.putInt(rowNum)
byteBuffer.putInt(colNum)
val label = featureArray(i * imageOffset).toFloat
var y = 0
val start = i * imageOffset + 1
while (y < rowNum) {
var x = 0
while (x < colNum) {
img((x + y * colNum) * 3 + 2 + bufferOffset) =
featureArray(start + x + y * colNum)
img((x + y * colNum) * 3 + 1 + bufferOffset) =
featureArray(start + x + y * colNum + channelOffset)
img((x + y * colNum) * 3 + bufferOffset) =
featureArray(start + x + y * colNum + 2 * channelOffset)
x += 1
}
y += 1
}
result.append(ByteRecord(img, label + 1.0f))
i += 1
}
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/models/resnet/Utils.scala
|
Scala
|
apache-2.0
| 6,839 |
package vn.myfeed.parser.h2
import scala.slick.driver.H2Driver.simple._
/**
* The Class Db.
*
* @author Nguyen Duc Dung
* @since 7/1/13 7:15 AM
*
*/
object Db {
def database = Database.forURL("jdbc:h2:parser/test/resources/article", driver = "org.h2.Driver")
}
|
SunriseSoftVN/hayhayblog
|
parser/app/vn/myfeed/parser/h2/Db.scala
|
Scala
|
gpl-2.0
| 272 |
/*
* A real-time collaborative tool to develop files over the network.
* Copyright (C) 2010 Mauro Ciancio and Leandro Gilioli
* {maurociancio,legilioli} at gmail dot com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ar.noxit.paralleleditor.gui
import javax.swing.event.{DocumentListener, DocumentEvent}
import javax.swing.text.PlainDocument
import swing.TextArea
class NotificationEditPane extends TextArea {
private var fireEvents = true
private val doc = new PlainDocument {
override def fireRemoveUpdate(e: DocumentEvent) = if (fireEvents) super.fireRemoveUpdate(e)
override def fireInsertUpdate(e: DocumentEvent) = if (fireEvents) super.fireInsertUpdate(e)
}
doc.addDocumentListener(new DocumentListener {
def changedUpdate(e: DocumentEvent) {
}
def insertUpdate(e: DocumentEvent) {
val newText = text.substring(e.getOffset, e.getOffset + e.getLength)
val ie = InsertionEvent(e.getOffset, newText)
publicar(ie)
}
def removeUpdate(e: DocumentEvent) {
val de = DeletionEvent(e.getOffset, e.getLength)
publicar(de)
}
})
peer.setDocument(doc)
protected def publicar(e: EditionEvent): Unit = {
publish(WrappedEvent(e))
}
def disableFiringEvents {
this.fireEvents = false;
}
def enableFiringEvents {
this.fireEvents = true;
}
}
|
maurociancio/parallel-editor
|
src/parallel-editor-gui/src/main/scala/ar/noxit/paralleleditor/gui/NotificationEditPane.scala
|
Scala
|
gpl-3.0
| 2,085 |
package com.alexitc.coinalerts.services.external
import javax.inject.Inject
import com.alexitc.coinalerts.models.{Book, Currency, CurrencyName, Market}
import com.alexitc.coinalerts.tasks.models.Ticker
import org.slf4j.LoggerFactory
import play.api.libs.json.JsValue
import play.api.libs.ws.{WSClient, WSResponse}
import scala.concurrent.{ExecutionContext, Future}
class CoinmarketcapService @Inject()(ws: WSClient)(implicit ec: ExecutionContext) extends ExchangeService {
private val logger = LoggerFactory.getLogger(this.getClass)
private val BTCMarket = Market.BTC
private val USDMarket = Market.USD
private val BaseURL = "https://api.coinmarketcap.com"
override def availableBooks(): Future[List[Book]] = {
getTickerList().map { ticketList =>
ticketList.map(_.book)
}
}
override def getTickerList(): Future[List[Ticker]] = {
val url = s"$BaseURL/v1/ticker/?limit=2000"
ws.url(url)
.get()
.map { response =>
Option(response)
.flatMap(toJson)
.map { jsonList =>
jsonList.flatMap { json =>
toTickerList(json)
}
}
.getOrElse {
logger.warn(s"Unexpected response from COINMARKETCAP, status = [${response.status}]")
List.empty
}
}
}
private def toJson(response: WSResponse) = {
if (response.status != 200) {
None
} else {
response.json.asOpt[List[JsValue]]
}
}
// coinmarketcap give us prices in BTC and USD
private def toTickerList(json: JsValue): List[Ticker] = {
val result = for {
// while symbol field would be preferred, there are collisions
currency <- (json \\ "symbol").asOpt[String].flatMap(Currency.from)
priceUSD <- (json \\ "price_usd").asOpt[BigDecimal]
priceBTC <- (json \\ "price_btc").asOpt[BigDecimal]
currencyName <- (json \\ "name")
.asOpt[String]
.map(_.trim)
.filter(_.nonEmpty)
.map(CurrencyName.apply)
} yield {
val tickerUSD = Ticker(Book(USDMarket, currency, Some(currencyName)), priceUSD)
if (Market.BTC.string equalsIgnoreCase currency.string) {
// there is no need to match BTC price against BTC
List(tickerUSD)
} else {
val tickerBTC = Ticker(Book(BTCMarket, currency, Some(currencyName)), priceBTC)
List(tickerBTC, tickerUSD)
}
}
result.getOrElse {
// NOTE: there are plenty of dirty values on CMC, avoid polluting logs
// logger.warn(s"There was an error while mapping a value to a ticker, json = [$json]")
List.empty
}
}
}
|
AlexITC/crypto-coin-alerts
|
alerts-server/app/com/alexitc/coinalerts/services/external/CoinmarketcapService.scala
|
Scala
|
gpl-3.0
| 2,631 |
package models
/**
* Created by ismet on 06/12/15.
*/
case class Session(
_id: String,
userId: String,
ip: String,
userAgent: String,
timestamp: Long,
lastActivity: Long
)
object Session {
import play.api.libs.json.Json
implicit val userFormat = Json.format[Session]
}
|
TVilaboa/Egresados
|
app/models/Session.scala
|
Scala
|
gpl-3.0
| 418 |
/*
* Copyright (c) 2016, Team Mion
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
/*
* Copyright (c) 2016, Team Mion
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package io.teammion.morefood.proxy
import java.util
import io.teammion.morefood.recipes.{ShapedRecipes, ShapelessRecipes, SmeltingRecipes}
import io.teammion.morefood.util.FunctionHelper
import io.teammion.morefood.{Config, EventHandler, Items}
import net.minecraft.item.crafting.{CraftingManager, IRecipe}
import net.minecraftforge.common.MinecraftForge
import net.minecraftforge.fml.common.event.{FMLInitializationEvent, FMLPostInitializationEvent, FMLPreInitializationEvent}
/**
* Common proxy (will be used on server and client side)
*
* @author Stefan Wimmer <[email protected]>
*/
class CommonProxy
extends IProxy
{
override def preInit(e : FMLPreInitializationEvent) : Unit =
{
Config.load(e)
Items.register()
}
override def init(e : FMLInitializationEvent) : Unit =
{
ShapedRecipes.register()
ShapelessRecipes.register()
SmeltingRecipes.register()
MinecraftForge.EVENT_BUS.register(EventHandler.instance)
}
override def postInit(e : FMLPostInitializationEvent) : Unit =
{
if (Config.OVERRIDE_BREAD_RECIPE)
{
val recipes : util.List[IRecipe] = CraftingManager.getInstance().getRecipeList
recipes.stream().forEach(
FunctionHelper.toConsumer[IRecipe]((recipe : IRecipe) =>
if (recipe != null && recipe.getRecipeOutput != null && recipe.getRecipeOutput.getItem == Items.BREAD)
recipes.remove(recipe)
)
)
}
}
}
|
teammion/tm-morefood
|
src/main/scala/io/teammion/morefood/proxy/CommonProxy.scala
|
Scala
|
isc
| 3,144 |
package controllers
import play.api.mvc.Controller
import play.api.libs.json.Json
import play.api.mvc.Action
class Aggregate extends Controller {
def index = Action {
NotImplemented(Json.obj())
}
}
|
DFID/aid-platform-beta
|
src/platform/app/controllers/Aggregate.scala
|
Scala
|
mit
| 208 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.fp
import slamdata.Predef._
import eu.timepit.refined.numeric.{NonNegative, Positive => RPositive, Greater}
import eu.timepit.refined.refineV
import eu.timepit.refined.api.{RefType, Refined}
import scalaz.{Equal, Show, Monoid}
import scalaz.syntax.show._
package object numeric {
implicit class SafeBigInt(val a: BigInt) extends scala.AnyVal {
def safeToInt: Option[Int] =
if (a <= Int.MaxValue) Some(a.toInt) else None
}
type Natural = Long Refined NonNegative
type Positive = Long Refined RPositive
def Positive(a: Long): Option[Positive] = refineV[RPositive](a).right.toOption
def Natural(a: Long): Option[Natural] = refineV[NonNegative](a).right.toOption
implicit def widenPositive[F[_,_],N](a: F[Int,Greater[N]])(implicit rt: RefType[F]): F[Long,Greater[N]] =
rt.unsafeWrap(rt.unwrap(a).toLong)
implicit def widenNatural[F[_,_]](a: F[Int, NonNegative])(implicit rt: RefType[F]): F[Long,NonNegative] =
rt.unsafeWrap(rt.unwrap(a).toLong)
implicit def positiveToNatural[F[_,_], A](a: F[A,RPositive])(implicit rt: RefType[F]): F[A, NonNegative] =
rt.unsafeWrap(rt.unwrap(a))
implicit def refinedMonoid[F[_,_],T](implicit rt: RefType[F], num: scala.Numeric[T]): Monoid[F[T,NonNegative]] =
Monoid.instance(
(a,b) => rt.unsafeWrap(num.plus(rt.unwrap(a), rt.unwrap(b))),
rt.unsafeWrap(num.zero))
implicit def refinedEqual[F[_,_],T:Equal,M](implicit rt: RefType[F]): Equal[F[T,M]] = Equal.equalBy(rt.unwrap)
implicit def refinedShow[F[_,_],T:Show,M](implicit rt: RefType[F]): Show[F[T,M]] = Show.shows(f => rt.unwrap(f).shows)
}
|
drostron/quasar
|
foundation/src/main/scala/quasar/fp/numeric/package.scala
|
Scala
|
apache-2.0
| 2,219 |
/*
* Copyright (c) 2013 Typelevel
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.typelevel.discipline
import org.scalacheck._
class DisciplineSpec extends Properties("A RuleSet should compute the properties correctly") {
property("for rings") = Prop {
expect(RingLaws.ring, ringExpected)
}
property("for additive groups") = Prop {
expect(RingLaws.additiveGroup, additiveGroupExpected)
}
property("for multiplicative groups") = Prop {
expect(RingLaws.multiplicativeGroup, multiplicativeGroupExpected)
}
def expect(ruleSet: Laws#RuleSet, props: List[String]) =
ruleSet.all.properties.map(_._1).sameElements(props)
val ringExpected = List(
"ring.additive:group.base:group.associative",
"ring.additive:group.base:group.identity",
"ring.additive:group.base:group.inverse",
"ring.multiplicative:monoid.base:monoid.associative",
"ring.multiplicative:monoid.base:monoid.identity",
"ring.distributive"
)
val additiveGroupExpected = List(
"group.base:group.associative",
"group.base:group.identity",
"group.base:group.inverse"
)
val multiplicativeGroupExpected = List(
"group.base:group.associative",
"group.base:group.identity",
"group.base:group.inverse",
"group.reciprocal consistent"
)
}
// vim: expandtab:ts=2:sw=2
|
typelevel/discipline
|
core/src/test/scala/org/typelevel/discipline/LawSpec.scala
|
Scala
|
mit
| 2,356 |
def f(a: Int, b: Int) = {}
println(/* offset: 4 */ f _)
|
ilinum/intellij-scala
|
testdata/resolve2/function/partial/AllToTwo.scala
|
Scala
|
apache-2.0
| 56 |
package admin.controllers
import play.api.mvc._
import jp.t2v.lab.play2.auth.AuthElement
import auth.AuthConfigImpl
import model.Administrator
/**
* The Class Home.
*
* @author Nguyen Duc Dung
* @since 1/7/14 12:34 PM
*
*/
object HomeCtr extends Controller with AuthElement with AuthConfigImpl with AdminTemplate {
val pageName = "Home"
def index = StackAction(AuthorityKey -> Administrator)(implicit request => {
renderOk(admin.views.html.index())
})
}
|
dungvn3000/playstrap
|
admin/app/admin/controllers/HomeCtr.scala
|
Scala
|
apache-2.0
| 475 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless
package genc
package ir
import PrimitiveTypes.{ PrimitiveType => PT, _ } // For desambiguation
import Literals._
import Operators._
import IRs._
final class StructInliner(val ctx: inox.Context) extends Transformer(RIR, SIR) with NoEnv {
import from._
private given givenDebugSection: DebugSectionGenC.type = DebugSectionGenC
object SimplifiableClassDef {
def unapply(cd: ClassDef): Option[ClassDef] = {
if (cd.fields.length == 1 && cd.parent.isEmpty) Some(cd)
else None
}
}
object SimplifiableExpr {
def unapply(e: Expr): Option[Expr] = e.getType match {
case ClassType(SimplifiableClassDef(cd)) => Some(e)
case _ => None
}
}
override def rec(typ: Type)(using Env): to.Type = typ match {
case ClassType(SimplifiableClassDef(cd)) => rec(cd.fields.map(_._1).head.typ)
case _ => super.rec(typ)
}
override def recImpl(e: Expr)(using Env): (to.Expr, Env) = e match {
case FieldAccess(SimplifiableExpr(obj), _) => recImpl(obj)
case Construct(SimplifiableClassDef(cd), Seq(arg)) => recImpl(arg)
case _ => super.recImpl(e)
}
override def rec(prog: Prog)(using Env): to.Prog =
super.rec(
prog.copy(classes =
prog.classes.filter {
case SimplifiableClassDef(cd) => false
case _ => true
}
)
)
}
|
epfl-lara/stainless
|
core/src/main/scala/stainless/genc/ir/StructInliner.scala
|
Scala
|
apache-2.0
| 1,390 |
/*******************************************************************************
Copyright (c) 2012-2014, S-Core, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.domain
object ObjectValue {
/* convenience constructors */
def apply(v: AbsNumber, writable: AbsBool, enumerable: AbsBool, configurable: AbsBool): ObjectValue
= ObjectValue(Value(v),writable, enumerable, configurable)
def apply(v: AbsUndef, writable: AbsBool, enumerable: AbsBool, configurable: AbsBool): ObjectValue
= ObjectValue(Value(v),writable, enumerable, configurable)
def apply(v: AbsString, writable: AbsBool, enumerable: AbsBool, configurable: AbsBool): ObjectValue
= ObjectValue(Value(v),writable, enumerable, configurable)
def apply(v: AbsBool, writable: AbsBool, enumerable: AbsBool, configurable: AbsBool): ObjectValue
= ObjectValue(Value(v),writable, enumerable, configurable)
def apply(v: PValue, writable: AbsBool, enumerable: AbsBool, configurable: AbsBool): ObjectValue
= ObjectValue(Value(v),writable, enumerable, configurable)
def apply(v: AbsNull, writable: AbsBool, enumerable: AbsBool, configurable: AbsBool): ObjectValue
= ObjectValue(Value(v),writable, enumerable, configurable)
def apply(v: Loc, writable: AbsBool, enumerable: AbsBool, configurable: AbsBool): ObjectValue
= ObjectValue(Value(v),writable, enumerable, configurable)
}
case class ObjectValue(value: Value,
writable: AbsBool,
enumerable: AbsBool,
configurable: AbsBool) {
/* tuple-like accessor */
val _1 = value
val _2 = writable
val _3 = enumerable
val _4 = configurable
/* partial order */
def <= (that: ObjectValue): Boolean = {
this.value <= that.value &&
this.writable <= that.writable &&
this.enumerable <= that.enumerable &&
this.configurable <= that.configurable
}
/* not a partial order */
def </ (that: ObjectValue): Boolean = {
this.value </ that.value ||
this.writable </ that.writable ||
this.enumerable </ that.enumerable ||
this.configurable </ that.configurable
}
/* join */
def + (that: ObjectValue): ObjectValue = {
ObjectValue(
this.value + that.value,
this.writable + that.writable,
this.enumerable + that.enumerable,
this.configurable + that.configurable)
}
/* meet */
def <> (that: ObjectValue): ObjectValue = {
ObjectValue(
this.value <> that.value,
this.writable <> that.writable,
this.enumerable <> that.enumerable,
this.configurable <> that.configurable)
}
}
|
darkrsw/safe
|
src/main/scala/kr/ac/kaist/jsaf/analysis/typing/domain/ObjectValue.scala
|
Scala
|
bsd-3-clause
| 2,860 |
package org.jetbrains.plugins.scala.annotator.createFromUsage
import com.intellij.codeInsight.template.{TemplateBuilderImpl, TemplateManager}
import com.intellij.codeInsight.{CodeInsightUtilCore, FileModificationService}
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi._
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.refactoring.util.CommonRefactoringUtil
import org.jetbrains.plugins.scala.annotator.createFromUsage.CreateEntityQuickFix._
import org.jetbrains.plugins.scala.annotator.createFromUsage.CreateFromUsageUtil._
import org.jetbrains.plugins.scala.codeInspection.collections.MethodRepr
import org.jetbrains.plugins.scala.console.ScalaLanguageConsoleView
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScSelfTypeElement, ScSimpleTypeElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.{ScExtendsBlock, ScTemplateBody}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory._
import org.jetbrains.plugins.scala.lang.psi.types.ScTypeExt
import org.jetbrains.plugins.scala.lang.psi.types.api.{ExtractClass, TypeSystem}
import org.jetbrains.plugins.scala.project.ScalaLanguageLevel.Scala_2_10
import org.jetbrains.plugins.scala.project._
import scala.util.{Failure, Success, Try}
/**
* Pavel Fatin
*/
abstract class CreateEntityQuickFix(ref: ScReferenceExpression, entity: String, keyword: String)
extends CreateFromUsageQuickFixBase(ref, entity) {
// TODO add private modifiers for unqualified entities ?
// TODO use Java CFU when needed
// TODO find better place for fields, create methods after
override def isAvailable(project: Project, editor: Editor, file: PsiFile): Boolean = {
if (!super.isAvailable(project, editor, file)) return false
implicit val typeSystem = project.typeSystem
def checkBlock(expr: ScExpression) = blockFor(expr) match {
case Success(bl) => !bl.isInCompiledFile
case _ => false
}
ref match {
case Both(Parent(_: ScAssignStmt), Parent(Parent(_: ScArgumentExprList))) =>
false
case exp@Parent(infix: ScInfixExpr) if infix.operation == exp => checkBlock(infix.getBaseExpr)
case it =>
it.qualifier match {
case Some(sup: ScSuperReference) => unambiguousSuper(sup).exists(!_.isInCompiledFile)
case Some(qual) => checkBlock(qual)
case None => !it.isInCompiledFile
}
}
}
def invokeInner(project: Project, editor: Editor, file: PsiFile) {
implicit val typeSystem = project.typeSystem
def tryToFindBlock(expr: ScExpression): Option[ScExtendsBlock] = {
blockFor(expr) match {
case Success(bl) => Some(bl)
case Failure(e) =>
CommonRefactoringUtil.showErrorHint(project, editor, e.getMessage, "Create entity quickfix", null)
None
}
}
if (!ref.isValid) return
val entityType = typeFor(ref)
val genericParams = genericParametersFor(ref)
val parameters = parametersFor(ref)
val placeholder = if (entityType.isDefined) "%s %s%s: Int" else "%s %s%s"
val unimplementedBody = if (file.scalaLanguageLevel.exists(_ >= Scala_2_10)) " = ???" else ""
val params = (genericParams ++: parameters).mkString
val text = placeholder.format(keyword, ref.nameId.getText, params) + unimplementedBody
val block = ref match {
case it if it.isQualified => ref.qualifier.flatMap(tryToFindBlock)
case Parent(infix: ScInfixExpr) => tryToFindBlock(infix.getBaseExpr)
case _ => None
}
if (!FileModificationService.getInstance.prepareFileForWrite(block.map(_.getContainingFile).getOrElse(file))) return
inWriteAction {
val entity = block match {
case Some(_ childOf (obj: ScObject)) if obj.isSyntheticObject =>
val bl = materializeSytheticObject(obj).extendsBlock
createEntity(bl, ref, text)
case Some(it) => createEntity(it, ref, text)
case None => createEntity(ref, text)
}
ScalaPsiUtil.adjustTypes(entity)
val builder = new TemplateBuilderImpl(entity)
for (aType <- entityType;
typeElement <- entity.children.findByType(classOf[ScSimpleTypeElement])) {
builder.replaceElement(typeElement, aType)
}
addTypeParametersToTemplate(entity, builder)
addParametersToTemplate(entity, builder)
addQmarksToTemplate(entity, builder)
CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(entity)
val template = builder.buildTemplate()
val isScalaConsole = file.getName == ScalaLanguageConsoleView.SCALA_CONSOLE
if (!isScalaConsole) {
val newEditor = positionCursor(entity.getLastChild)
val range = entity.getTextRange
newEditor.getDocument.deleteString(range.getStartOffset, range.getEndOffset)
TemplateManager.getInstance(project).startTemplate(newEditor, template)
}
}
}
}
object CreateEntityQuickFix {
private def materializeSytheticObject(obj: ScObject): ScObject = {
val clazz = obj.fakeCompanionClassOrCompanionClass
val objText = s"object ${clazz.name} {}"
val fromText = ScalaPsiElementFactory.createTemplateDefinitionFromText(objText, clazz.getParent, clazz)
clazz.getParent.addAfter(fromText, clazz).asInstanceOf[ScObject]
}
private def blockFor(exp: ScExpression)
(implicit typeSystem: TypeSystem): Try[ScExtendsBlock] = {
object ParentExtendsBlock {
def unapply(e: PsiElement): Option[ScExtendsBlock] = Option(PsiTreeUtil.getParentOfType(exp, classOf[ScExtendsBlock]))
}
exp match {
case InstanceOfClass(td: ScTemplateDefinition) => Success(td.extendsBlock)
case th: ScThisReference if PsiTreeUtil.getParentOfType(th, classOf[ScExtendsBlock], true) != null =>
th.refTemplate match {
case Some(ScTemplateDefinition.ExtendsBlock(block)) => Success(block)
case None =>
val parentBl = PsiTreeUtil.getParentOfType(th, classOf[ScExtendsBlock], /*strict = */true, /*stopAt = */classOf[ScTemplateDefinition])
if (parentBl != null) Success(parentBl)
else Failure(new IllegalStateException("Cannot find template definition for `this` reference"))
}
case sup: ScSuperReference =>
unambiguousSuper(sup) match {
case Some(ScTemplateDefinition.ExtendsBlock(block)) => Success(block)
case None => Failure(new IllegalStateException("Cannot find template definition for not-static super reference"))
}
case Both(th: ScThisReference, ParentExtendsBlock(block)) => Success(block)
case Both(ReferenceTarget((_: ScSelfTypeElement)), ParentExtendsBlock(block)) => Success(block)
case _ => Failure(new IllegalStateException("Cannot find a place to create definition"))
}
}
def createEntity(block: ScExtendsBlock, ref: ScReferenceExpression, text: String): PsiElement = {
if (block.templateBody.isEmpty)
block.add(createTemplateBody(block.getManager))
val children = block.templateBody.get.children.toSeq
val anchor = children.find(_.isInstanceOf[ScSelfTypeElement]).getOrElse(children.head)
val holder = anchor.getParent
val hasMembers = holder.children.findByType(classOf[ScMember]).isDefined
val entity = holder.addAfter(parseElement(text, ref.getManager), anchor)
if (hasMembers) holder.addAfter(createNewLine(ref.getManager), entity)
entity
}
def createEntity(ref: ScReferenceExpression, text: String): PsiElement = {
val anchor = anchorForUnqualified(ref).get
val holder = anchor.getParent
val entity = holder.addBefore(parseElement(text, ref.getManager), anchor)
holder.addBefore(createNewLine(ref.getManager, "\\n\\n"), entity)
holder.addAfter(createNewLine(ref.getManager, "\\n\\n"), entity)
entity
}
private def typeFor(ref: ScReferenceExpression): Option[String] = ref.getParent match {
case call: ScMethodCall => call.expectedType().map(_.canonicalText)
case _ => ref.expectedType().map(_.canonicalText)
}
private def parametersFor(ref: ScReferenceExpression): Option[String] = {
ref.parent.collect {
case MethodRepr(_, _, Some(`ref`), args) => paramsText(args)
case (_: ScGenericCall) childOf (MethodRepr(_, _, Some(`ref`), args)) => paramsText(args)
}
}
private def genericParametersFor(ref: ScReferenceExpression): Option[String] = ref.parent.collect {
case genCall: ScGenericCall =>
genCall.arguments match {
case args if args.size == 1 => "[T]"
case args => args.indices.map(i => s"T$i").mkString("[", ", ", "]")
}
}
private def anchorForUnqualified(ref: ScReferenceExpression): Option[PsiElement] = {
val parents = ref.parents.toList
val anchors = ref :: parents
val place = parents.zip(anchors).find {
case (_ : ScTemplateBody, _) => true
case (_ : ScalaFile, _) => true
case _ => false
}
place.map(_._2)
}
private def unambiguousSuper(supRef: ScSuperReference)
(implicit typeSystem: TypeSystem): Option[ScTypeDefinition] = {
supRef.staticSuper match {
case Some(ExtractClass(clazz: ScTypeDefinition)) => Some(clazz)
case None =>
supRef.parents.toSeq.collect { case td: ScTemplateDefinition => td } match {
case Seq(td) =>
td.supers match {
case Seq(t: ScTypeDefinition) => Some(t)
case _ => None
}
case _ => None
}
}
}
}
|
katejim/intellij-scala
|
src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateEntityQuickFix.scala
|
Scala
|
apache-2.0
| 9,924 |
package org.vlinderlang.vlinderc
sealed abstract class DiagnosticKind
case object Warning extends DiagnosticKind
case object Error extends DiagnosticKind
trait Diagnostic {
def kind: DiagnosticKind
def message: String
final def format: String = {
val kindString = kind match {
case Warning => s"warning"
case Error => s"error"
}
s"$kindString: $message"
}
}
|
mill-lang/millc
|
src/main/scala/org/vlinderlang/vlinderc/diagnostic.scala
|
Scala
|
bsd-3-clause
| 411 |
package com.socrata.http.server.util.filters
import javax.servlet.http.{HttpServletRequestWrapper, HttpServletRequest}
import InputByteCountingFilter._
import com.socrata.http.server.HttpRequest.AugmentedHttpServletRequest
import io.Codec
import javax.servlet.ServletInputStream
import java.io._
import com.socrata.http.server._
import com.socrata.http.server.implicits._
trait InputByteCountingFilter extends SimpleFilter[HttpRequest, HttpResponse] {
def apply(req: HttpRequest, service: HttpService): HttpResponse = {
val servletRequestWrapper = new CountingHttpServletRequest(req.servletRequest)
val wrapper = new WrapperHttpRequest(req) {
override def servletRequest = new AugmentedHttpServletRequest(servletRequestWrapper)
}
service(wrapper) ~> (_ => read(servletRequestWrapper.bytesRead))
}
def read(bytes: Long)
}
object InputByteCountingFilter {
class CountingHttpServletRequest(underlying: HttpServletRequest) extends HttpServletRequestWrapper(underlying) {
private val NONE = 0
private val READER = 1
private val STREAM = 2
private var state = NONE
private var count = 0L
private def getCountingInputStream = new ByteCountingInputStream(super.getInputStream)
override lazy val getInputStream: ServletInputStream = {
if(state == READER) throw new IllegalStateException("READER")
state = STREAM
getCountingInputStream
}
override lazy val getReader: BufferedReader = {
if(state == STREAM) throw new IllegalStateException("STREAM")
state = READER
new BufferedReader(new InputStreamReader(getCountingInputStream, Option(getCharacterEncoding).getOrElse(Codec.ISO8859.name)))
}
class ByteCountingInputStream(underlying: ServletInputStream) extends ServletInputStream {
override def read(): Int = underlying.read() match {
case -1 => -1
case b => count += 1; b
}
override def read(buf: Array[Byte]): Int = underlying.read(buf) match {
case -1 => -1
case n => count += n; n
}
override def read(buf: Array[Byte], off: Int, len: Int): Int = underlying.read(buf, off, len) match {
case -1 => -1
case n => count += n; n
}
override def skip(n: Long) = {
val skipped = underlying.skip(n)
count += skipped
skipped
}
override def markSupported = underlying.markSupported()
override def mark(readLimit: Int) = underlying.mark(readLimit)
override def reset() = underlying.reset()
override def close() = underlying.close()
override def available() = underlying.available()
def isFinished: Boolean = underlying.isFinished
def isReady(): Boolean = underlying.isReady
def setReadListener(x: javax.servlet.ReadListener) = underlying.setReadListener(x)
}
def bytesRead = count
}
}
|
socrata-platform/socrata-http
|
socrata-http-server/src/main/scala/com/socrata/http/server/util/filters/InputByteCountingFilter.scala
|
Scala
|
apache-2.0
| 2,871 |
package domain.user
import domain.ValueObject
/**
* ユーザID.
* @param id ID値
*/
case class UserId(
id: Long
) extends ValueObject[UserId] {
override def sameValueAs(other: UserId): Boolean = this.id == other.id
}
|
nemuzuka/vss-kanban
|
src/main/scala/domain/user/UserId.scala
|
Scala
|
mit
| 230 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.util
import language.implicitConversions
import org.junit.Assert._
import org.junit.Assume._
import org.junit.Test
import org.scalajs.testsuite.utils.Platform._
import java.util.{ Arrays, Comparator }
import scala.reflect.ClassTag
object ArraysTest extends ArraysTest
/** This is also used in the typedarray package to test scala.Arrays backed
* by TypedArrays
*/
class ArraysTest {
// To invoke org.junit.Assert.assertArrayEquals on Array[T]
implicit def array2erasedArray[T](arr: Array[T]): Array[AnyRef] =
arr.map(_.asInstanceOf[AnyRef])
/** Overridden by typedarray tests */
def Array[T: ClassTag](v: T*): scala.Array[T] = scala.Array(v: _*)
val stringComparator = new Comparator[String]() {
def compare(s1: String, s2: String): Int = s1.compareTo(s2)
}
@Test def sort_Int(): Unit =
testSort[Int](_.toInt, new Array(_), Arrays.sort(_), Arrays.sort(_, _, _))
@Test def sort_Long(): Unit =
testSort[Long](_.toLong, new Array(_), Arrays.sort(_), Arrays.sort(_, _, _))
@Test def sort_Short(): Unit =
testSort[Short](_.toShort, new Array(_), Arrays.sort(_), Arrays.sort(_, _, _))
@Test def sort_Byte(): Unit =
testSort[Byte](_.toByte, new Array(_), Arrays.sort(_), Arrays.sort(_, _, _))
@Test def sort_Char(): Unit =
testSort[Char](_.toChar, new Array(_), Arrays.sort(_), Arrays.sort(_, _, _))
@Test def sort_Float(): Unit =
testSort[Float](_.toFloat, new Array(_), Arrays.sort(_), Arrays.sort(_, _, _))
@Test def sort_Double(): Unit =
testSort[Double](_.toDouble, new Array(_), Arrays.sort(_), Arrays.sort(_, _, _))
@Test def sort_String(): Unit =
testSort[AnyRef](_.toString, new Array(_), Arrays.sort(_), Arrays.sort(_, _, _))
private def testSort[T: ClassTag](elem: Int => T, newArray: Int => Array[T],
sort: Array[T] => Unit, sort2: (Array[T], Int, Int) => Unit): Unit = {
val values = Array(5, 3, 6, 1, 2, 4).map(elem)
val arr = newArray(values.length)
for (i <- 0 until values.length)
arr(i) = values(i)
sort(arr)
assertArrayEquals(arr, Array(1, 2, 3, 4, 5, 6).map(elem))
for (i <- 0 until values.length)
arr(i) = values(i)
sort2(arr, 0, 3)
assertArrayEquals(arr, Array(3, 5, 6, 1, 2, 4).map(elem))
sort2(arr, 2, 5)
assertArrayEquals(arr, Array(3, 5, 1, 2, 6, 4).map(elem))
sort2(arr, 0, 6)
assertArrayEquals(arr, Array(1, 2, 3, 4, 5, 6).map(elem))
}
@Test def sortWithComparator(): Unit = {
val scalajs: Array[String] = Array("S", "c", "a", "l", "a", ".", "j", "s")
val sorted = Array[String](".", "S", "a", "a", "c", "j", "l", "s")
Arrays.sort(scalajs, stringComparator)
assertArrayEquals(sorted, scalajs)
}
@Test def sortIsStable(): Unit = {
case class A(n: Int)
val cmp = new Comparator[A]() {
def compare(a1: A, a2: A): Int = a1.n.compareTo(a2.n)
}
val scalajs: Array[A] = Array(A(1), A(2), A(2), A(3), A(1), A(2), A(3))
val sorted = Array[A](scalajs(0), scalajs(4), scalajs(1), scalajs(2),
scalajs(5), scalajs(3), scalajs(6))
Arrays.sort(scalajs, cmp)
assertArrayEquals(sorted, scalajs)
scalajs.zip(sorted).forall(pair => pair ._1 eq pair._2)
}
@Test def fill_Boolean(): Unit = {
val booleans = new Array[Boolean](6)
Arrays.fill(booleans, false)
assertArrayEquals(Array(false, false, false, false, false, false), booleans)
Arrays.fill(booleans, true)
assertArrayEquals(Array(true, true, true, true, true, true), booleans)
}
@Test def fill_Boolean_with_start_and_end_index(): Unit = {
val booleans = new Array[Boolean](6)
Arrays.fill(booleans, 1, 4, true)
assertArrayEquals(Array(false, true, true, true, false, false), booleans)
}
@Test def fill_Byte(): Unit = {
val bytes = new Array[Byte](6)
Arrays.fill(bytes, 42.toByte)
assertArrayEquals(Array[Byte](42, 42, 42, 42, 42, 42), bytes)
Arrays.fill(bytes, -1.toByte)
assertArrayEquals(Array[Byte](-1, -1, -1, -1, -1, -1), bytes)
}
@Test def fill_Byte_with_start_and_end_index(): Unit = {
val bytes = new Array[Byte](6)
Arrays.fill(bytes, 1, 4, 42.toByte)
assertArrayEquals(Array[Byte](0, 42, 42, 42, 0, 0), bytes)
Arrays.fill(bytes, 2, 5, -1.toByte)
assertArrayEquals(Array[Byte](0, 42, -1, -1, -1, 0), bytes)
}
@Test def fill_Short(): Unit = {
val shorts = new Array[Short](6)
Arrays.fill(shorts, 42.toShort)
assertArrayEquals(Array[Short](42, 42, 42, 42, 42, 42), shorts)
Arrays.fill(shorts, -1.toShort)
assertArrayEquals(Array[Short](-1, -1, -1, -1, -1, -1), shorts)
}
@Test def fill_Short_with_start_and_end_index(): Unit = {
val shorts = new Array[Short](6)
Arrays.fill(shorts, 1, 4, 42.toShort)
assertArrayEquals(Array[Short](0, 42, 42, 42, 0, 0), shorts)
Arrays.fill(shorts, 2, 5, -1.toShort)
assertArrayEquals(Array[Short](0, 42, -1, -1, -1, 0), shorts)
}
@Test def fill_Int(): Unit = {
val ints = new Array[Int](6)
Arrays.fill(ints, 42)
assertArrayEquals(Array(42, 42, 42, 42, 42, 42), ints)
Arrays.fill(ints, -1)
assertArrayEquals(Array(-1, -1, -1, -1, -1, -1), ints)
}
@Test def fill_Int_with_start_and_end_index(): Unit = {
val ints = new Array[Int](6)
Arrays.fill(ints, 1, 4, 42)
assertArrayEquals(Array(0, 42, 42, 42, 0, 0), ints)
Arrays.fill(ints, 2, 5, -1)
assertArrayEquals(Array(0, 42, -1, -1, -1, 0), ints)
}
@Test def fill_Long(): Unit = {
val longs = new Array[Long](6)
Arrays.fill(longs, 42L)
assertArrayEquals(Array(42L, 42L, 42L, 42L, 42L, 42L), longs)
Arrays.fill(longs, -1L)
assertArrayEquals(Array(-1L, -1L, -1L, -1L, -1L, -1L), longs)
}
@Test def fill_Long_with_start_and_end_index(): Unit = {
val longs = new Array[Long](6)
Arrays.fill(longs, 1, 4, 42L)
assertArrayEquals(Array(0L, 42L, 42L, 42L, 0L, 0L), longs)
Arrays.fill(longs, 2, 5, -1L)
assertArrayEquals(Array(0L, 42L, -1L, -1L, -1L, 0L), longs)
}
@Test def fill_Float(): Unit = {
val floats = new Array[Float](6)
Arrays.fill(floats, 42.0f)
assertArrayEquals(Array(42.0f, 42.0f, 42.0f, 42.0f, 42.0f, 42.0f), floats)
Arrays.fill(floats, -1.0f)
assertArrayEquals(Array(-1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f), floats)
}
@Test def fill_Float_with_start_and_end_index(): Unit = {
val floats = new Array[Float](6)
Arrays.fill(floats, 1, 4, 42.0f)
assertArrayEquals(Array(0.0f, 42.0f, 42.0f, 42.0f, 0.0f, 0.0f), floats)
Arrays.fill(floats, 2, 5, -1.0f)
assertArrayEquals(Array(0.0f, 42.0f, -1.0f, -1.0f, -1.0f, 0.0f), floats)
}
@Test def fill_Double(): Unit = {
val doubles = new Array[Double](6)
Arrays.fill(doubles, 42.0)
assertArrayEquals(Array(42.0, 42.0, 42.0, 42.0, 42.0, 42.0), doubles)
Arrays.fill(doubles, -1.0f)
assertArrayEquals(Array(-1.0, -1.0, -1.0, -1.0, -1.0, -1.0), doubles)
}
@Test def fill_Double_with_start_and_end_index(): Unit = {
val doubles = new Array[Double](6)
Arrays.fill(doubles, 1, 4, 42.0)
assertArrayEquals(Array(0.0, 42.0, 42.0, 42.0, 0.0, 0.0), doubles)
Arrays.fill(doubles, 2, 5, -1.0)
assertArrayEquals(Array(0.0, 42.0, -1.0, -1.0, -1.0, 0.0), doubles)
}
@Test def fill_AnyRef(): Unit = {
val array = new Array[AnyRef](6)
Arrays.fill(array, "a")
assertArrayEquals(Array[AnyRef]("a", "a", "a", "a", "a", "a"), array)
Arrays.fill(array, "b")
assertArrayEquals(Array[AnyRef]("b", "b", "b", "b", "b", "b"), array)
}
@Test def fill_AnyRef_with_start_and_end_index(): Unit = {
val bytes = new Array[AnyRef](6)
Arrays.fill(bytes, 1, 4, "a")
assertArrayEquals(Array[AnyRef](null, "a", "a", "a", null, null), bytes)
Arrays.fill(bytes, 2, 5, "b")
assertArrayEquals(Array[AnyRef](null, "a", "b", "b", "b", null), bytes)
}
@Test def binarySearch_with_start_and_end_index_on_Long(): Unit = {
val longs: Array[Long] = Array(1, 2, 3, 5, 6, 7)
var ret = Arrays.binarySearch(longs, 0, 6, 5)
assertEquals(3, ret)
ret = Arrays.binarySearch(longs, 0, 6, 0)
assertEquals(-1, ret)
ret = Arrays.binarySearch(longs, 0, 6, 4)
assertEquals(-4, ret)
ret = Arrays.binarySearch(longs, 0, 6, 8)
assertEquals(-7, ret)
}
@Test def binarySearch_on_Long(): Unit = {
val longs: Array[Long] = Array(1, 2, 3, 5, 6, 7)
var ret = Arrays.binarySearch(longs, 5)
assertEquals(3, ret)
ret = Arrays.binarySearch(longs, 0)
assertEquals(-1, ret)
ret = Arrays.binarySearch(longs, 4)
assertEquals(-4, ret)
ret = Arrays.binarySearch(longs, 8)
assertEquals(-7, ret)
}
@Test def binarySearch_with_start_and_end_index_on_Int(): Unit = {
val ints: Array[Int] = Array(1, 2, 3, 5, 6, 7)
var ret = Arrays.binarySearch(ints, 0, 6, 5)
assertEquals(3, ret)
ret = Arrays.binarySearch(ints, 0, 6, 0)
assertEquals(-1, ret)
ret = Arrays.binarySearch(ints, 0, 6, 4)
assertEquals(-4, ret)
ret = Arrays.binarySearch(ints, 0, 6, 8)
assertEquals(-7, ret)
}
@Test def binarySearch_on_Int(): Unit = {
val ints: Array[Int] = Array(1, 2, 3, 5, 6, 7)
var ret = Arrays.binarySearch(ints, 5)
assertEquals(3, ret)
ret = Arrays.binarySearch(ints, 0)
assertEquals(-1, ret)
ret = Arrays.binarySearch(ints, 4)
assertEquals(-4, ret)
ret = Arrays.binarySearch(ints, 8)
assertEquals(-7, ret)
}
@Test def binarySearch_with_start_and_end_index_on_Short(): Unit = {
val shorts: Array[Short] = Array(1, 2, 3, 5, 6, 7)
var ret = Arrays.binarySearch(shorts, 0, 6, 5.toShort)
assertEquals(3, ret)
ret = Arrays.binarySearch(shorts, 0, 6, 0.toShort)
assertEquals(-1, ret)
ret = Arrays.binarySearch(shorts, 0, 6, 4.toShort)
assertEquals(-4, ret)
ret = Arrays.binarySearch(shorts, 0, 6, 8.toShort)
assertEquals(-7, ret)
}
@Test def binarySearch_on_Short(): Unit = {
val shorts: Array[Short] = Array(1, 2, 3, 5, 6, 7)
var ret = Arrays.binarySearch(shorts, 5.toShort)
assertEquals(3, ret)
ret = Arrays.binarySearch(shorts, 0.toShort)
assertEquals(-1, ret)
ret = Arrays.binarySearch(shorts, 4.toShort)
assertEquals(-4, ret)
ret = Arrays.binarySearch(shorts, 8.toShort)
assertEquals(-7, ret)
}
@Test def binarySearch_with_start_and_end_index_on_Char(): Unit = {
val chars: Array[Char] = Array('b', 'c', 'd', 'f', 'g', 'h')
var ret = Arrays.binarySearch(chars, 0, 6, 'f')
assertEquals(3, ret)
ret = Arrays.binarySearch(chars, 0, 6, 'a')
assertEquals(-1, ret)
ret = Arrays.binarySearch(chars, 0, 6, 'e')
assertEquals(-4, ret)
ret = Arrays.binarySearch(chars, 0, 6, 'i')
assertEquals(-7, ret)
}
@Test def binarySearch_on_Char(): Unit = {
val chars: Array[Char] = Array('b', 'c', 'd', 'f', 'g', 'h')
var ret = Arrays.binarySearch(chars, 'f')
assertEquals(3, ret)
ret = Arrays.binarySearch(chars, 'a')
assertEquals(-1, ret)
ret = Arrays.binarySearch(chars, 'e')
assertEquals(-4, ret)
ret = Arrays.binarySearch(chars, 'i')
assertEquals(-7, ret)
}
@Test def binarySearch_with_start_and_end_index_on_Double(): Unit = {
val doubles: Array[Double] = Array(0.1, 0.2, 0.3, 0.5, 0.6, 0.7)
var ret = Arrays.binarySearch(doubles, 0, 6, 0.5)
assertEquals(3, ret)
ret = Arrays.binarySearch(doubles, 0, 6, 0.0)
assertEquals(-1, ret)
ret = Arrays.binarySearch(doubles, 0, 6, 0.4)
assertEquals(-4, ret)
ret = Arrays.binarySearch(doubles, 0, 6, 0.8)
assertEquals(-7, ret)
}
@Test def binarySearch_on_Double(): Unit = {
val doubles: Array[Double] = Array(0.1, 0.2, 0.3, 0.5, 0.6, 0.7)
var ret = Arrays.binarySearch(doubles, 0.5)
assertEquals(3, ret)
ret = Arrays.binarySearch(doubles, 0.0)
assertEquals(-1, ret)
ret = Arrays.binarySearch(doubles, 0.4)
assertEquals(-4, ret)
ret = Arrays.binarySearch(doubles, 0.8)
assertEquals(-7, ret)
}
@Test def binarySearch_with_start_and_end_index_on_Float(): Unit = {
val floats: Array[Float] = Array(0.1f, 0.2f, 0.3f, 0.5f, 0.6f, 0.7f)
var ret = Arrays.binarySearch(floats, 0, 6, 0.5f)
assertEquals(3, ret)
ret = Arrays.binarySearch(floats, 0, 6, 0.0f)
assertEquals(-1, ret)
ret = Arrays.binarySearch(floats, 0, 6, 0.4f)
assertEquals(-4, ret)
ret = Arrays.binarySearch(floats, 0, 6, 0.8f)
assertEquals(-7, ret)
}
@Test def binarySearch_on_Float(): Unit = {
val floats: Array[Float] = Array(0.1f, 0.2f, 0.3f, 0.5f, 0.6f, 0.7f)
var ret = Arrays.binarySearch(floats, 0.5f)
assertEquals(3, ret)
ret = Arrays.binarySearch(floats, 0.0f)
assertEquals(-1, ret)
ret = Arrays.binarySearch(floats, 0.4f)
assertEquals(-4, ret)
ret = Arrays.binarySearch(floats, 0.8f)
assertEquals(-7, ret)
}
@Test def binarySearch_with_start_and_end_index_on_AnyRef(): Unit = {
val strings: Array[AnyRef] = Array("aa", "abc", "cc", "zz", "zzzs", "zzzt")
var ret = Arrays.binarySearch(strings, 0, 6, "zz")
assertEquals(3, ret)
ret = Arrays.binarySearch(strings, 0, 6, "a")
assertEquals(-1, ret)
ret = Arrays.binarySearch(strings, 0, 6, "cd")
assertEquals(-4, ret)
ret = Arrays.binarySearch(strings, 0, 6, "zzzz")
assertEquals(-7, ret)
}
@Test def binarySearch_on_AnyRef(): Unit = {
val strings: Array[AnyRef] = Array("aa", "abc", "cc", "zz", "zzzs", "zzzt")
var ret = Arrays.binarySearch(strings, "zz")
assertEquals(3, ret)
ret = Arrays.binarySearch(strings, "a")
assertEquals(-1, ret)
ret = Arrays.binarySearch(strings, "cd")
assertEquals(-4, ret)
ret = Arrays.binarySearch(strings, "zzzz")
assertEquals(-7, ret)
}
@Test def should_check_ranges_of_input_to_binarySearch(): Unit = {
def expectException(block: => Unit)(expected: PartialFunction[Throwable, Unit]): Unit = {
val catchAll: PartialFunction[Throwable, Unit] = {
case e: Throwable => assertEquals("not thrown", e.getClass.getName)
}
try {
block
assertEquals("thrown", "exception")
} catch expected orElse catchAll
}
val array = Array(0, 1, 3, 4)
expectException({ Arrays.binarySearch(array, 3, 2, 2) }) {
case exception: IllegalArgumentException =>
assertEquals("fromIndex(3) > toIndex(2)", exception.getMessage)
}
// start/end comparison is made before index ranges checks
expectException({ Arrays.binarySearch(array, 7, 5, 2) }) {
case exception: IllegalArgumentException =>
assertEquals("fromIndex(7) > toIndex(5)", exception.getMessage)
}
expectException({ Arrays.binarySearch(array, -1, 4, 2) }) {
case exception: ArrayIndexOutOfBoundsException =>
assertEquals("Array index out of range: -1", exception.getMessage)
}
expectException({ Arrays.binarySearch(array, 0, 5, 2) }) {
case exception: ArrayIndexOutOfBoundsException =>
assertEquals("Array index out of range: 5", exception.getMessage)
}
}
@Test def copyOf_Int(): Unit = {
val ints: Array[Int] = Array(1, 2, 3)
val intscopy = Arrays.copyOf(ints, 5)
assertArrayEquals(Array(1, 2, 3, 0, 0), intscopy)
}
@Test def copyOf_Long(): Unit = {
val longs: Array[Long] = Array(1, 2, 3)
val longscopy = Arrays.copyOf(longs, 5)
assertArrayEquals(Array[Long](1, 2, 3, 0, 0), longscopy)
}
@Test def copyOf_Short(): Unit = {
val shorts: Array[Short] = Array(1, 2, 3)
val shortscopy = Arrays.copyOf(shorts, 5)
assertArrayEquals(Array[Short](1, 2, 3, 0, 0), shortscopy)
}
@Test def copyOf_Byte(): Unit = {
val bytes: Array[Byte] = Array(42, 43, 44)
val floatscopy = Arrays.copyOf(bytes, 5)
assertArrayEquals(Array[Byte](42, 43, 44, 0, 0), floatscopy)
}
@Test def copyOf_Char(): Unit = {
val chars: Array[Char] = Array('a', 'b', '0')
val charscopy = Arrays.copyOf(chars, 5)
assertEquals(0.toChar, charscopy(4))
}
@Test def copyOf_Double(): Unit = {
val doubles: Array[Double] = Array(0.1, 0.2, 0.3)
val doublescopy = Arrays.copyOf(doubles, 5)
assertArrayEquals(Array[Double](0.1, 0.2, 0.3, 0, 0), doublescopy)
}
@Test def copyOf_Float(): Unit = {
val floats: Array[Float] = Array(0.1f, 0.2f, 0.3f)
val floatscopy = Arrays.copyOf(floats, 5)
assertArrayEquals(Array[Float](0.1f, 0.2f, 0.3f, 0f, 0f), floatscopy)
}
@Test def copyOf_Boolean(): Unit = {
val bools: Array[Boolean] = Array(false, true, false)
val boolscopy = Arrays.copyOf(bools, 5)
assertArrayEquals(Array[Boolean](false, true, false, false, false), boolscopy)
}
@Test def copyOf_AnyRef(): Unit = {
val anyrefs: Array[AnyRef] = Array("a", "b", "c")
val anyrefscopy = Arrays.copyOf(anyrefs, 5)
assertEquals(classOf[Array[AnyRef]], anyrefscopy.getClass())
assertArrayEquals(Array[AnyRef]("a", "b", "c", null, null), anyrefscopy)
val sequences: Array[CharSequence] = Array("a", "b", "c")
val sequencescopy = Arrays.copyOf(sequences, 2)
assertEquals(classOf[Array[CharSequence]], sequencescopy.getClass())
assertArrayEquals(Array[CharSequence]("a", "b"), sequencescopy)
}
@Test def copyOf_AnyRef_with_change_of_type(): Unit = {
class A
case class B(x: Int) extends A
val bs: Array[AnyRef] = Array(B(1), B(2), B(3))
val bscopyAsA = Arrays.copyOf(bs, 5, classOf[Array[A]])
assertEquals(classOf[Array[A]], bscopyAsA.getClass())
assertArrayEquals(Array[A](B(1), B(2), B(3), null, null), bscopyAsA)
}
@Test def copyOfRange_AnyRef(): Unit = {
val anyrefs: Array[AnyRef] = Array("a", "b", "c", "d", "e")
val anyrefscopy = Arrays.copyOfRange(anyrefs, 2, 4)
assertEquals(classOf[Array[AnyRef]], anyrefscopy.getClass())
assertArrayEquals(Array[AnyRef]("c", "d"), anyrefscopy)
val sequences: Array[CharSequence] = Array("a", "b", "c", "d", "e")
val sequencescopy = Arrays.copyOfRange(sequences, 1, 5)
assertEquals(classOf[Array[CharSequence]], sequencescopy.getClass())
assertArrayEquals(Array[CharSequence]("b", "c", "d", "e"), sequencescopy)
}
@Test def copyOfRange_AnyRef_with_change_of_type(): Unit = {
class A
case class B(x: Int) extends A
val bs: Array[B] = Array(B(1), B(2), B(3), B(4), B(5))
val bscopyAsA = Arrays.copyOfRange(bs, 2, 4, classOf[Array[A]])
assertEquals(classOf[Array[A]], bscopyAsA.getClass())
assertArrayEquals(Array[A](B(3), B(4)), bscopyAsA)
}
@Test def hashCode_Boolean(): Unit = {
assertEquals(0, Arrays.hashCode(null: Array[Boolean]))
assertEquals(1, Arrays.hashCode(Array[Boolean]()))
assertEquals(1268, Arrays.hashCode(Array[Boolean](false)))
assertEquals(40359, Arrays.hashCode(Array[Boolean](true, false)))
}
@Test def hashCode_Chars(): Unit = {
assertEquals(0, Arrays.hashCode(null: Array[Char]))
assertEquals(1, Arrays.hashCode(Array[Char]()))
assertEquals(128, Arrays.hashCode(Array[Char]('a')))
assertEquals(4068, Arrays.hashCode(Array[Char]('c', '&')))
assertEquals(74792, Arrays.hashCode(Array[Char]('-', '5', 'q')))
assertEquals(88584920, Arrays.hashCode(Array[Char]('.', ' ', '\\u4323', 'v', '~')))
}
@Test def hashCode_Bytes(): Unit = {
assertEquals(0, Arrays.hashCode(null: Array[Byte]))
assertEquals(1, Arrays.hashCode(Array[Byte]()))
assertEquals(32, Arrays.hashCode(Array[Byte](1)))
assertEquals(1053, Arrays.hashCode(Array[Byte](7, -125)))
assertEquals(32719, Arrays.hashCode(Array[Byte](3, 0, 45)))
assertEquals(30065878, Arrays.hashCode(Array[Byte](0, 45, 100, 1, 1)))
}
@Test def hashCode_Shorts(): Unit = {
assertEquals(0, Arrays.hashCode(null: Array[Short]))
assertEquals(1, Arrays.hashCode(Array[Short]()))
assertEquals(32, Arrays.hashCode(Array[Short](1)))
assertEquals(1053, Arrays.hashCode(Array[Short](7, -125)))
assertEquals(37208, Arrays.hashCode(Array[Short](3, 0, 4534)))
assertEquals(30065878, Arrays.hashCode(Array[Short](0, 45, 100, 1, 1)))
}
@Test def hashCode_Ints(): Unit = {
assertEquals(0, Arrays.hashCode(null: Array[Int]))
assertEquals(1, Arrays.hashCode(Array[Int]()))
assertEquals(32, Arrays.hashCode(Array[Int](1)))
assertEquals(1053, Arrays.hashCode(Array[Int](7, -125)))
assertEquals(37208, Arrays.hashCode(Array[Int](3, 0, 4534)))
assertEquals(-1215441431, Arrays.hashCode(Array[Int](0, 45, 100, 1, 1, Int.MaxValue)))
}
@Test def hashCode_Longs(): Unit = {
assertEquals(0, Arrays.hashCode(null: Array[Long]))
assertEquals(1, Arrays.hashCode(Array[Long]()))
assertEquals(32, Arrays.hashCode(Array[Long](1L)))
assertEquals(1302, Arrays.hashCode(Array[Long](7L, -125L)))
assertEquals(37208, Arrays.hashCode(Array[Long](3L, 0L, 4534L)))
assertEquals(-1215441431, Arrays.hashCode(Array[Long](0L, 45L, 100L, 1L, 1L, Int.MaxValue)))
assertEquals(-1952288964, Arrays.hashCode(Array[Long](0L, 34573566354545L, 100L, 1L, 1L, Int.MaxValue)))
}
@Test def hashCode_Floats(): Unit = {
assertEquals(0, Arrays.hashCode(null: Array[Float]))
assertEquals(1, Arrays.hashCode(Array[Float]()))
if (!executingInJVM) {
assertEquals(32, Arrays.hashCode(Array[Float](1f)))
assertEquals(-2082726591, Arrays.hashCode(Array[Float](7.2f, -125.2f)))
assertEquals(-1891539602, Arrays.hashCode(Array[Float](302.1f, 0.0f, 4534f)))
assertEquals(-1591440133, Arrays.hashCode(Array[Float](0.0f, 45f, -100f, 1.1f, -1f, 3567f)))
}
}
@Test def hashCode_Doubles(): Unit = {
assertEquals(0, Arrays.hashCode(null: Array[Double]))
assertEquals(1, Arrays.hashCode(Array[Double]()))
if (!executingInJVM) {
assertEquals(-1503133662, Arrays.hashCode(Array[Double](1.1)))
assertEquals(-2075734168, Arrays.hashCode(Array[Double](7.3, -125.23)))
assertEquals(-557562564, Arrays.hashCode(Array[Double](3.9, 0.2, 4534.9)))
assertEquals(-1750344582, Arrays.hashCode(Array[Double](0.1, 45.1, -100.0, 1.1, 1.7)))
assertEquals(-1764602991, Arrays.hashCode(Array[Double](0.0, 34573566354545.9, 100.2, 1.1, 1.2, Int.MaxValue)))
}
}
@Test def hashCode_AnyRef(): Unit = {
assertEquals(0, Arrays.hashCode(null: Array[AnyRef]))
assertEquals(1, Arrays.hashCode(Array[AnyRef]()))
assertEquals(961, Arrays.hashCode(Array[AnyRef](null, null)))
assertEquals(126046, Arrays.hashCode(Array[AnyRef]("a", "b", null)))
assertEquals(-1237252983, Arrays.hashCode(Array[AnyRef](null, "a", "b", null, "fooooo")))
}
@Test def deepHashCode(): Unit = {
assertEquals(0, Arrays.deepHashCode(null: Array[AnyRef]))
assertEquals(1, Arrays.deepHashCode(Array[AnyRef]()))
assertEquals(961, Arrays.deepHashCode(Array[AnyRef](null, null)))
assertEquals(126046, Arrays.deepHashCode(Array[AnyRef]("a", "b", null)))
assertEquals(-1237252983, Arrays.deepHashCode(Array[AnyRef](null, "a", "b", null, "fooooo")))
assertEquals(962, Arrays.deepHashCode(Array[AnyRef](null, Array[AnyRef]())))
assertEquals(993, Arrays.deepHashCode(Array[AnyRef](Array[AnyRef](), Array[AnyRef]())))
assertEquals(63, Arrays.deepHashCode(Array[AnyRef](Array[AnyRef](Array[AnyRef]()))))
assertEquals(63, Arrays.deepHashCode(Array[AnyRef](Array[AnyRef](Array[Int]()))))
assertEquals(63, Arrays.deepHashCode(Array[AnyRef](Array[AnyRef](Array[Double]()))))
assertEquals(94, Arrays.deepHashCode(Array[AnyRef](Array[AnyRef](Array[Int](1)))))
assertEquals(94, Arrays.deepHashCode(Array[AnyRef](Array[AnyRef](Array[AnyRef](1.asInstanceOf[AnyRef])))))
}
@Test def equals_Booleans(): Unit = {
val a1 = Array(true, false)
assertTrue(Arrays.equals(a1, a1))
assertTrue(Arrays.equals(a1, Array(true, false)))
assertFalse(Arrays.equals(a1, Array(true)))
assertFalse(Arrays.equals(a1, Array(false)))
assertFalse(Arrays.equals(a1, Array[Boolean]()))
assertFalse(Arrays.equals(a1, Array(false, true)))
assertFalse(Arrays.equals(a1, Array(false, true, false)))
}
@Test def equals_Bytes(): Unit = {
val a1 = Array[Byte](1, -7, 10)
assertTrue(Arrays.equals(null: Array[Byte], null: Array[Byte]))
assertTrue(Arrays.equals(a1, a1))
assertTrue(Arrays.equals(a1, Array[Byte](1, -7, 10)))
assertFalse(Arrays.equals(a1, null))
assertFalse(Arrays.equals(a1, Array[Byte](3)))
assertFalse(Arrays.equals(a1, Array[Byte](1)))
assertFalse(Arrays.equals(a1, Array[Byte]()))
assertFalse(Arrays.equals(a1, Array[Byte](1, -7, 11)))
assertFalse(Arrays.equals(a1, Array[Byte](1, -7, 11, 20)))
}
@Test def equals_Chars(): Unit = {
val a1 = Array[Char]('a', '0', '-')
assertTrue(Arrays.equals(null: Array[Char], null: Array[Char]))
assertTrue(Arrays.equals(a1, a1))
assertTrue(Arrays.equals(a1, Array[Char]('a', '0', '-')))
assertFalse(Arrays.equals(a1, null))
assertFalse(Arrays.equals(a1, Array[Char]('z')))
assertFalse(Arrays.equals(a1, Array[Char]('a')))
assertFalse(Arrays.equals(a1, Array[Char]()))
assertFalse(Arrays.equals(a1, Array[Char]('a', '0', '+')))
assertFalse(Arrays.equals(a1, Array[Char]('a', '0', '-', 'z')))
}
@Test def equals_Shorts(): Unit = {
val a1 = Array[Short](1, -7, 10)
assertTrue(Arrays.equals(null: Array[Short], null: Array[Short]))
assertTrue(Arrays.equals(a1, a1))
assertTrue(Arrays.equals(a1, Array[Short](1, -7, 10)))
assertFalse(Arrays.equals(a1, null))
assertFalse(Arrays.equals(a1, Array[Short](3)))
assertFalse(Arrays.equals(a1, Array[Short](1)))
assertFalse(Arrays.equals(a1, Array[Short]()))
assertFalse(Arrays.equals(a1, Array[Short](1, -7, 11)))
assertFalse(Arrays.equals(a1, Array[Short](1, -7, 11, 20)))
}
@Test def equals_Ints(): Unit = {
val a1 = Array[Int](1, -7, 10)
assertTrue(Arrays.equals(null: Array[Int], null: Array[Int]))
assertTrue(Arrays.equals(a1, a1))
assertTrue(Arrays.equals(a1, Array[Int](1, -7, 10)))
assertFalse(Arrays.equals(a1, null))
assertFalse(Arrays.equals(a1, Array[Int](3)))
assertFalse(Arrays.equals(a1, Array[Int](1)))
assertFalse(Arrays.equals(a1, Array[Int]()))
assertFalse(Arrays.equals(a1, Array[Int](1, -7, 11)))
assertFalse(Arrays.equals(a1, Array[Int](1, -7, 11, 20)))
}
@Test def equals_Longs(): Unit = {
val a1 = Array[Long](1L, -7L, 10L)
assertTrue(Arrays.equals(null: Array[Long], null: Array[Long]))
assertTrue(Arrays.equals(a1, a1))
assertTrue(Arrays.equals(a1, Array[Long](1L, -7L, 10L)))
assertFalse(Arrays.equals(a1, null))
assertFalse(Arrays.equals(a1, Array[Long](3L)))
assertFalse(Arrays.equals(a1, Array[Long](1L)))
assertFalse(Arrays.equals(a1, Array[Long]()))
assertFalse(Arrays.equals(a1, Array[Long](1L, -7L, 11L)))
assertFalse(Arrays.equals(a1, Array[Long](1L, -7L, 11L, 20L)))
}
@Test def equals_Floats(): Unit = {
val a1 = Array[Float](1.1f, -7.4f, 10.0f)
assertTrue(Arrays.equals(null: Array[Float], null: Array[Float]))
assertTrue(Arrays.equals(a1, a1))
assertTrue(Arrays.equals(a1, Array[Float](1.1f, -7.4f, 10.0f)))
assertFalse(Arrays.equals(a1, null))
assertFalse(Arrays.equals(a1, Array[Float](3.0f)))
assertFalse(Arrays.equals(a1, Array[Float](1.1f)))
assertFalse(Arrays.equals(a1, Array[Float]()))
assertFalse(Arrays.equals(a1, Array[Float](1.1f, -7.4f, 11.0f)))
assertFalse(Arrays.equals(a1, Array[Float](1.1f, -7.4f, 10.0f, 20.0f)))
}
@Test def equals_Doubles(): Unit = {
val a1 = Array[Double](1.1, -7.4, 10.0)
assertTrue(Arrays.equals(null: Array[Double], null: Array[Double]))
assertTrue(Arrays.equals(a1, a1))
assertTrue(Arrays.equals(a1, Array[Double](1.1, -7.4, 10.0)))
assertFalse(Arrays.equals(a1, null))
assertFalse(Arrays.equals(a1, Array[Double](3.0)))
assertFalse(Arrays.equals(a1, Array[Double](1.1)))
assertFalse(Arrays.equals(a1, Array[Double]()))
assertFalse(Arrays.equals(a1, Array[Double](1.1, -7.4, 11.0)))
assertFalse(Arrays.equals(a1, Array[Double](1.1, -7.4, 10.0, 20.0)))
}
@Test def equals_AnyRefs(): Unit = {
// scalastyle:off equals.hash.code
class A(private val x: Int) {
override def equals(that: Any): Boolean = that match {
case that: A => this.x == that.x
case _ => false
}
}
// scalastyle:on equals.hash.code
def A(x: Int): A = new A(x)
val a1 = Array[AnyRef](A(1), A(-7), A(10))
assertTrue(Arrays.equals(null: Array[AnyRef], null: Array[AnyRef]))
assertTrue(Arrays.equals(a1, a1))
assertTrue(Arrays.equals(a1, Array[AnyRef](A(1), A(-7), A(10))))
assertFalse(Arrays.equals(a1, null))
assertFalse(Arrays.equals(a1, Array[AnyRef](A(3))))
assertFalse(Arrays.equals(a1, Array[AnyRef](A(1))))
assertFalse(Arrays.equals(a1, Array[AnyRef]()))
assertFalse(Arrays.equals(a1, Array[AnyRef](A(1), null, A(11))))
assertFalse(Arrays.equals(a1, Array[AnyRef](A(1), A(-7), A(11), A(20))))
}
@Test def deepEquals(): Unit = {
assertTrue(Arrays.deepEquals(
null: Array[AnyRef],
null: Array[AnyRef]))
assertTrue(Arrays.deepEquals(
Array[AnyRef](),
Array[AnyRef]()))
assertTrue(Arrays.deepEquals(
Array[AnyRef](null, null),
Array[AnyRef](null, null)))
assertTrue(Arrays.deepEquals(
Array[AnyRef]("a", "b", null),
Array[AnyRef]("a", "b", null)))
assertTrue(Arrays.deepEquals(
Array[AnyRef](null, "a", "b", null, "fooooo"),
Array[AnyRef](null, "a", "b", null, "fooooo")))
assertTrue(Arrays.deepEquals(
Array[AnyRef](null, Array[AnyRef]()),
Array[AnyRef](null, Array[AnyRef]())))
assertTrue(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](), Array[AnyRef]()),
Array[AnyRef](Array[AnyRef](), Array[AnyRef]())))
assertTrue(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[AnyRef]())),
Array[AnyRef](Array[AnyRef](Array[AnyRef]()))))
assertTrue(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[Int]())),
Array[AnyRef](Array[AnyRef](Array[Int]()))))
assertTrue(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[Double]())),
Array[AnyRef](Array[AnyRef](Array[Double]()))))
assertTrue(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[Int](1))),
Array[AnyRef](Array[AnyRef](Array[Int](1)))))
assertTrue(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[AnyRef](1.asInstanceOf[AnyRef]))),
Array[AnyRef](Array[AnyRef](Array[AnyRef](1.asInstanceOf[AnyRef])))))
assertFalse(Arrays.deepEquals(
null: Array[AnyRef],
Array[AnyRef]()))
assertFalse(Arrays.deepEquals(
Array[AnyRef](),
null: Array[AnyRef]))
assertFalse(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](), null),
Array[AnyRef](null, null)))
assertFalse(Arrays.deepEquals(
Array[AnyRef](null, Array[AnyRef]()),
Array[AnyRef](null, null)))
assertFalse(Arrays.deepEquals(
Array[AnyRef]("a", "b", null),
Array[AnyRef]("a", "c", null)))
assertFalse(Arrays.deepEquals(
Array[AnyRef](null, "a", "b", null, "fooooo"),
Array[AnyRef](null, "a", "b", "c", "fooooo")))
assertFalse(Arrays.deepEquals(
Array[AnyRef](null, Array[AnyRef]()),
Array[AnyRef](null, Array[AnyRef](null))))
assertFalse(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](), Array[AnyRef]()),
Array[AnyRef](Array[AnyRef](), Array[AnyRef](null))))
assertFalse(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[AnyRef]())),
Array[AnyRef](Array[AnyRef](Array[AnyRef](null)))))
assertFalse(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[Int]())),
Array[AnyRef](Array[AnyRef](Array[Int](1)))))
assertFalse(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[Double]())),
Array[AnyRef](Array[AnyRef](Array[Double](1.0)))))
assertFalse(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[Int](1))),
Array[AnyRef](Array[AnyRef](Array[Int](2)))))
assertFalse(Arrays.deepEquals(
Array[AnyRef](Array[AnyRef](Array[AnyRef](1.asInstanceOf[AnyRef]))),
Array[AnyRef](Array[AnyRef](Array[AnyRef](2.asInstanceOf[AnyRef])))))
}
@Test def toString_Long(): Unit = {
assertEquals("null", Arrays.toString(null: Array[Long]))
assertEquals("[]", Arrays.toString(Array[Long]()))
assertEquals("[0]", Arrays.toString(Array[Long](0L)))
assertEquals("[1]", Arrays.toString(Array[Long](1L)))
assertEquals("[2, 3]", Arrays.toString(Array[Long](2L, 3)))
assertEquals("[1, 2, 3, 4, 5]", Arrays.toString(Array[Long](1L, 2L, 3L, 4L, 5L)))
assertEquals("[1, -2, 3, 9223372036854775807]", Arrays.toString(Array[Long](1L, -2L, 3L, Long.MaxValue)))
}
@Test def toString_Int(): Unit = {
assertEquals("null", Arrays.toString(null: Array[Int]))
assertEquals("[]", Arrays.toString(Array[Int]()))
assertEquals("[0]", Arrays.toString(Array[Int](0)))
assertEquals("[1]", Arrays.toString(Array[Int](1)))
assertEquals("[2, 3]", Arrays.toString(Array[Int](2, 3)))
assertEquals("[1, 2, 3, 4, 5]", Arrays.toString(Array[Int](1, 2, 3, 4, 5)))
assertEquals("[1, -2, 3, 2147483647]", Arrays.toString(Array[Int](1, -2, 3, Int.MaxValue)))
}
@Test def toString_Short(): Unit = {
assertEquals("null", Arrays.toString(null: Array[Short]))
assertEquals("[]", Arrays.toString(Array[Short]()))
assertEquals("[0]", Arrays.toString(Array[Short](0)))
assertEquals("[1]", Arrays.toString(Array[Short](1)))
assertEquals("[2, 3]", Arrays.toString(Array[Short](2, 3)))
assertEquals("[1, 2, 3, 4, 5]", Arrays.toString(Array[Short](1, 2, 3, 4, 5)))
assertEquals("[1, -2, 3, 32767]", Arrays.toString(Array[Short](1, -2, 3, Short.MaxValue)))
}
@Test def toString_Byte(): Unit = {
assertEquals("null", Arrays.toString(null: Array[Byte]))
assertEquals("[]", Arrays.toString(Array[Byte]()))
assertEquals("[0]", Arrays.toString(Array[Byte](0)))
assertEquals("[1]", Arrays.toString(Array[Byte](1)))
assertEquals("[2, 3]", Arrays.toString(Array[Byte](2, 3)))
assertEquals("[1, 2, 3, 4, 5]", Arrays.toString(Array[Byte](1, 2, 3, 4, 5)))
assertEquals("[1, -2, 3, 127]", Arrays.toString(Array[Byte](1, -2, 3, Byte.MaxValue)))
}
@Test def toString_Boolean(): Unit = {
assertEquals("null", Arrays.toString(null: Array[Boolean]))
assertEquals("[]", Arrays.toString(Array[Boolean]()))
assertEquals("[true]", Arrays.toString(Array[Boolean](true)))
assertEquals("[false]", Arrays.toString(Array[Boolean](false)))
assertEquals("[true, false]", Arrays.toString(Array[Boolean](true, false)))
assertEquals("[true, true, false, false]", Arrays.toString(Array[Boolean](true, true, false, false)))
}
@Test def toString_Float(): Unit = {
assumeFalse("Assumes Float.toString JS semantics.", executingInJVM)
assertEquals("null", Arrays.toString(null: Array[Float]))
assertEquals("[]", Arrays.toString(Array[Float]()))
assertEquals("[0]", Arrays.toString(Array[Float](0.0f)))
assertEquals("[1.100000023841858]", Arrays.toString(Array[Float](1.1f)))
assertEquals("[2.200000047683716, 3]", Arrays.toString(Array[Float](2.2f, 3f)))
assertEquals("[1, 2, 3, 4, 5]", Arrays.toString(Array[Float](1f, 2f, 3f, 4f, 5f)))
assertEquals("[1, -2, 3, 3.4028234663852886e+38]", Arrays.toString(Array[Float](1f, -2f, 3f, Float.MaxValue)))
}
@Test def toString_Double(): Unit = {
assumeFalse("Assumes Double.toString JS semantics.", executingInJVM)
assertEquals("null", Arrays.toString(null: Array[Double]))
assertEquals("[]", Arrays.toString(Array[Double]()))
assertEquals("[0]", Arrays.toString(Array[Double](0.0d)))
assertEquals("[1.1]", Arrays.toString(Array[Double](1.1d)))
assertEquals("[2.2, 3]", Arrays.toString(Array[Double](2.2d, 3d)))
assertEquals("[1, 2, 3, 4, 5]", Arrays.toString(Array[Double](1d, 2d, 3d, 4d, 5d)))
assertEquals("[1, -2, 3, 1.7976931348623157e+308]",
Arrays.toString(Array[Double](1d, -2d, 3d, Double.MaxValue)))
}
@Test def toString_AnyRef(): Unit = {
class C(num: Int) {
override def toString: String = s"C($num)"
}
assertEquals("null", Arrays.toString(null: Array[AnyRef]))
assertEquals("[]", Arrays.toString(Array[AnyRef]()))
assertEquals("[abc]", Arrays.toString(Array[AnyRef]("abc")))
assertEquals("[a, b, c]", Arrays.toString(Array[AnyRef]("a", "b", "c")))
assertEquals("[C(1)]", Arrays.toString(Array[AnyRef](new C(1))))
assertEquals("[C(1), abc, 1, null]", Arrays.toString(Array[AnyRef](new C(1), "abc", Int.box(1), null)))
}
@Test def deepToString(): Unit = {
assertEquals("null", Arrays.deepToString(null: Array[AnyRef]))
assertEquals("[abc]", Arrays.deepToString(Array[AnyRef]("abc")))
assertEquals("[a, b, c]", Arrays.deepToString(Array[AnyRef]("a", "b", "c")))
assertEquals("[[1, 2, 3]]", Arrays.deepToString(Array[AnyRef](Array[Int](1, 2, 3))))
assertEquals("[[1, 2, 3], [4, 5, 6]]",
Arrays.deepToString(Array[AnyRef](Array[Int](1, 2, 3), Array[Int](4, 5, 6))))
assertEquals("[[]]", Arrays.deepToString(Array[AnyRef](Array[AnyRef]())))
assertEquals("[[[]]]", Arrays.deepToString(Array[AnyRef](Array[AnyRef](Array[AnyRef]()))))
assertEquals("[[[[1, 2, 3]]], [4, 5, 6]]", Arrays.deepToString(
Array[AnyRef](Array[AnyRef](Array[AnyRef](Array[Int](1, 2, 3))), Array[Int](4, 5, 6))))
val recArr = Array[AnyRef](null, null)
recArr(0) = recArr
assertEquals("[[...], null]", Arrays.deepToString(recArr))
assertEquals("[[[...], null]]", Arrays.deepToString(Array[AnyRef](recArr)))
assertEquals("[[[...], null]]", Arrays.deepToString(Array[AnyRef](recArr)))
recArr(1) = Array[AnyRef](null, Array[AnyRef](null, recArr, Array[AnyRef](recArr)))
assertEquals("[[...], [null, [null, [...], [[...]]]]]", Arrays.deepToString(recArr))
}
}
|
japgolly/scala-js
|
test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/util/ArraysTest.scala
|
Scala
|
bsd-3-clause
| 38,585 |
import scala.tools.partest.SessionTest
object Test extends SessionTest {
def session =
s"""|
|scala> :paste -raw $pastie
|Pasting file $pastie...
|$pastie:3: error: expected class or object definition
|val nope = 42
|^
|There were compilation errors!
|
|scala> :quit"""
def pastie = testPath changeExtension "pastie"
}
|
felixmulder/scala
|
test/files/run/repl-paste-raw-c.scala
|
Scala
|
bsd-3-clause
| 357 |
package delta.util
import delta._
import delta.write._
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.control.NonFatal
import scuff.concurrent.Threads.PiggyBack
/**
* [[delta.write.Repository]] wrapper for non-Event-source
* repositories, while still publishing events.
*/
abstract class PublishingRepository[ID, T <: AnyRef, EVT](
val impl: Repository[ID, T] with ImmutableEntity { type Loaded = (T, Revision) },
publishCtx: ExecutionContext)
extends Repository[ID, (T, List[EVT])]
with ImmutableEntity {
protected def publish(id: ID, revision: Revision, events: List[EVT], metadata: Metadata): Unit
private def publishEvents(id: ID, revision: Revision, events: List[EVT], metadata: Metadata): Unit = {
if (events.nonEmpty) try publish(id, revision, events, metadata) catch {
case NonFatal(e) => publishCtx.reportFailure(e)
}
}
type Loaded = impl.Loaded
def revision(loaded: Loaded): Int = loaded._2
def exists(id: ID): Future[Option[Revision]] = impl.exists(id)
def load(id: ID): Future[Loaded] = impl.load(id)
protected def update[R](
updateThunk: Loaded => Future[UT[R]],
id: ID, expectedRevision: Option[Revision])(
implicit
metadata: Metadata): Future[UM[R]] = {
@volatile var toPublish: List[EVT] = Nil
val updated = impl.update(id, expectedRevision) { loaded =>
updateThunk(loaded).map {
case (result, events) =>
toPublish = events
result
}(PiggyBack)
}
updated.foreach(publishEvents(id, _, toPublish, metadata))(publishCtx)
updated
}
def insert(id: => ID, entity: Entity)(
implicit
metadata: Metadata): Future[ID] = {
val (state, events) = entity
val inserted = impl.insert(id, state)
inserted.foreach(publishEvents(_, 0, events, metadata))(publishCtx)
inserted
}
}
|
nilskp/delta
|
src/main/scala/delta/util/PublishingRepository.scala
|
Scala
|
mit
| 1,867 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.common.jdbc
import com.bwsw.sj.common.config.SettingsUtils
import com.bwsw.sj.common.dal.repository.ConnectionRepository
import com.typesafe.scalalogging.Logger
import scaldi.Injectable.inject
import scaldi.Injector
/**
* Build [[JdbcClient]]. You can not create [[JdbcClient]] directly
*/
object JdbcClientBuilder {
private val logger = Logger(this.getClass)
private var hosts: Option[Array[String]] = None
private var driver: Option[String] = None
private var username: Option[String] = None
private var password: Option[String] = None
private var database: Option[String] = None
private var table: Option[String] = None
def buildCheck(): Unit = {
driver match {
case Some("") | None => throw new RuntimeException("Driver field must be declared.")
case _ =>
}
database match {
case Some("") | None => logger.warn("Database is not declared. It can lead to errors in the following.")
case _ =>
}
table match {
case Some("") | None => logger.warn("Table is not declared. It can lead to errors in the following.")
case _ =>
}
username match {
case Some("") | None => throw new RuntimeException("Username field must be declared.")
case _ =>
}
password match {
case Some("") | None => throw new RuntimeException("Password field must be declared.")
case _ =>
}
hosts match {
case None => throw new RuntimeException("Hosts field must be declared.")
case _ =>
}
}
def build()(implicit injector: Injector): JdbcClient = {
buildCheck()
val jdbcClientConnectionData = new JdbcClientConnectionData(
hosts.get,
driver.get,
username.get,
password.get,
database,
table,
inject[SettingsUtils])
new JdbcClient(jdbcClientConnectionData, inject[ConnectionRepository].getFileStorage)
}
def setHosts(hosts: Array[String]): JdbcClientBuilder.type = {
this.hosts = Option(hosts)
this
}
def setDriver(driver: String): JdbcClientBuilder.type = {
this.driver = Option(driver)
this
}
def setUsername(username: String): JdbcClientBuilder.type = {
this.username = Option(username)
this
}
def setPassword(password: String): JdbcClientBuilder.type = {
this.password = Option(password)
this
}
def setDatabase(database: String): JdbcClientBuilder.type = {
this.database = Option(database)
this
}
def setTable(table: String): JdbcClientBuilder.type = {
this.table = Option(table)
this
}
def setJdbcClientConnectionData(jdbcClientConnectionData: JdbcClientConnectionData): JdbcClientBuilder.type = {
hosts = Option(jdbcClientConnectionData.hosts)
driver = Option(jdbcClientConnectionData.driver)
username = Option(jdbcClientConnectionData.username)
password = Option(jdbcClientConnectionData.password)
database = jdbcClientConnectionData.database
table = jdbcClientConnectionData.table
this
}
}
|
bwsw/sj-platform
|
core/sj-common/src/main/scala/com/bwsw/common/jdbc/JdbcClientBuilder.scala
|
Scala
|
apache-2.0
| 3,804 |
/**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.lang.scala.schedulers
import scala.concurrent.duration.Duration
import rx.lang.scala.Scheduler
import rx.schedulers
/**
* Provides constructors for `TestScheduler`.
*/
object TestScheduler {
def apply(): TestScheduler = {
new TestScheduler(new schedulers.TestScheduler())
}
}
/**
* Scheduler with artificial time, useful for testing.
*
* For example, you could test the `Observable.interval` operation using a `TestScheduler` as follows:
*
* {{{
* @Test def testInterval() {
* import org.mockito.Matchers._
* import org.mockito.Mockito._
* import rx.lang.scala.JavaConversions._
*
* val scheduler = TestScheduler()
* val observer = mock(classOf[rx.Observer[Long]])
*
* val o = Observable.interval(1 second, scheduler)
* val sub = o.subscribe(toScalaObserver(new TestObserver(observer)))
*
* verify(observer, never).onNext(0L)
* verify(observer, never).onCompleted()
* verify(observer, never).onError(any(classOf[Throwable]))
*
* scheduler.advanceTimeTo(2 seconds)
*
* val inOrdr = inOrder(observer);
* inOrdr.verify(observer, times(1)).onNext(0L)
* inOrdr.verify(observer, times(1)).onNext(1L)
* inOrdr.verify(observer, never).onNext(2L)
* verify(observer, never).onCompleted()
* verify(observer, never).onError(any(classOf[Throwable]))
*
* sub.unsubscribe();
* scheduler.advanceTimeTo(4 seconds)
* verify(observer, never).onNext(2L)
* verify(observer, times(1)).onCompleted()
* verify(observer, never).onError(any(classOf[Throwable]))
* }
* }}}
*/
class TestScheduler private[scala] (val asJavaScheduler: rx.schedulers.TestScheduler) extends Scheduler {
def advanceTimeBy(time: Duration) {
asJavaScheduler.advanceTimeBy(time.length, time.unit)
}
def advanceTimeTo(time: Duration) {
asJavaScheduler.advanceTimeTo(time.length, time.unit)
}
def triggerActions() {
asJavaScheduler.triggerActions()
}
}
|
jbripley/RxScala
|
src/main/scala/rx/lang/scala/schedulers/TestScheduler.scala
|
Scala
|
apache-2.0
| 2,529 |
package colossus.extensions.util.bson.reader
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
trait Reader[T] {
def buffer: ByteBuffer
def readCString(): String = readCString(new ArrayBuffer[Byte](16))
@scala.annotation.tailrec
private def readCString(array: ArrayBuffer[Byte]): String = {
val byte = buffer.get()
if (byte == 0x00)
new String(array.toArray, "UTF-8")
else readCString(array += byte)
}
def readString(): String = {
val size = buffer.getInt()
val array = new Array[Byte](size - 1)
buffer.get(array)
buffer.get()
new String(array)
}
def readBytes(num: Int): Array[Byte] = {
val array = new Array[Byte](num)
buffer.get(array)
array
}
def read: Option[T]
}
|
fehmicansaglam/colossus-extensions
|
mongo/src/main/scala/colossus/extensions/util/bson/reader/Reader.scala
|
Scala
|
apache-2.0
| 769 |
package com.twitter.server.lint
import com.twitter.finagle.Stack
import com.twitter.finagle.client.StackClient
import com.twitter.finagle.param.{Label, ProtocolLibrary}
import com.twitter.finagle.util.StackRegistry
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class StackRegistryDuplicatesRuleTest extends FunSuite {
def newRegistry(): StackRegistry =
new StackRegistry { def registryName: String = "client" }
test("Empty registry doesn't create issues") {
val registry = newRegistry()
val rule = StackRegistryDuplicatesRule(registry, Set())
assert(rule().size == 0)
}
test("Single client doesn't create issues") {
val registry = newRegistry()
registry.register("localhost:11212", StackClient.newStack, Stack.Params.empty)
val rule = StackRegistryDuplicatesRule(registry, Set())
assert(rule().size == 0)
}
test("Multiple same name clients does create issues") {
val registry = newRegistry()
val params = Stack.Params.empty + Label("thrift-test-client")
registry.register("localhost:1234", StackClient.newStack, params)
registry.register("localhost:2345", StackClient.newStack, params)
val rule = StackRegistryDuplicatesRule(registry, Set())
assert(rule().size == 1)
}
test("Multiple same name non-whitelisted clients does create issues") {
val registry = newRegistry()
val params = Stack.Params.empty + Label("thrift-test-client")
registry.register("localhost:1234", StackClient.newStack, params)
registry.register("localhost:2345", StackClient.newStack, params)
val rule = StackRegistryDuplicatesRule(registry, Set("special", "very-special"))
assert(rule().size == 1)
}
test("Multiple same name whitelisted clients does not create issues") {
val registry = newRegistry()
val params = Stack.Params.empty + Label("thrift-test-client")
registry.register("localhost:1234", StackClient.newStack, params)
registry.register("localhost:2345", StackClient.newStack, params)
val rule = StackRegistryDuplicatesRule(registry, Set("special", "thrift-test-client"))
assert(rule().size == 0)
}
test("Multiple same name memcache clients doesn't create issues") {
val registry = newRegistry()
val params = Stack.Params.empty + Label("memcache-test-client") + ProtocolLibrary("memcached")
registry.register("localhost:11211", StackClient.newStack, params)
registry.register("localhost:11212", StackClient.newStack, params)
val rule = StackRegistryDuplicatesRule(registry, Set())
assert(rule().size == 0)
}
}
|
BuoyantIO/twitter-server
|
src/test/scala/com/twitter/server/lint/StackRegistryDuplicatesRuleTest.scala
|
Scala
|
apache-2.0
| 2,647 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala.hadoop.mapreduce
import org.apache.flink.api.scala._
import org.apache.flink.hadoopcompatibility.scala.HadoopInputs
import org.apache.flink.test.testdata.WordCountData
import org.apache.flink.test.util.{TestBaseUtils, JavaProgramTestBase}
import org.apache.flink.util.OperatingSystem
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.{Text, LongWritable}
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat, TextOutputFormat}
import org.junit.{Assume, Before}
class WordCountMapreduceITCase extends JavaProgramTestBase {
protected var textPath: String = null
protected var resultPath: String = null
@Before
def checkOperatingSystem() {
// FLINK-5164 - see https://wiki.apache.org/hadoop/WindowsProblems
Assume.assumeTrue("This test can't run successfully on Windows.", !OperatingSystem.isWindows)
}
protected override def preSubmit() {
textPath = createTempFile("text.txt", WordCountData.TEXT)
resultPath = getTempDirPath("result")
}
protected override def postSubmit() {
TestBaseUtils.compareResultsByLinesInMemory(WordCountData.COUNTS,
resultPath, Array[String](".", "_"))
}
protected def testProgram() {
internalRun(testDeprecatedAPI = true)
postSubmit()
resultPath = getTempDirPath("result2")
internalRun(testDeprecatedAPI = false)
postSubmit()
}
private def internalRun (testDeprecatedAPI: Boolean): Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val input =
if (testDeprecatedAPI) {
env.readHadoopFile(new TextInputFormat, classOf[LongWritable], classOf[Text], textPath)
} else {
env.createInput(HadoopInputs.readHadoopFile(new TextInputFormat, classOf[LongWritable],
classOf[Text], textPath))
}
val counts = input
.map(_._2.toString)
.flatMap(_.toLowerCase.split("\\\\W+").filter(_.nonEmpty).map( (_, 1)))
.groupBy(0)
.sum(1)
val words = counts
.map( t => (new Text(t._1), new LongWritable(t._2)) )
val job = Job.getInstance()
val hadoopOutputFormat = new HadoopOutputFormat[Text, LongWritable](
new TextOutputFormat[Text, LongWritable],
job)
hadoopOutputFormat.getConfiguration.set("mapred.textoutputformat.separator", " ")
FileOutputFormat.setOutputPath(job, new Path(resultPath))
words.output(hadoopOutputFormat)
env.execute("Hadoop Compat WordCount")
}
}
|
oscarceballos/flink-1.3.2
|
flink-tests/src/test/scala/org/apache/flink/api/scala/hadoop/mapreduce/WordCountMapreduceITCase.scala
|
Scala
|
apache-2.0
| 3,401 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.ui.swing.frame
import java.awt.Color
import scala.collection.mutable
import scala.swing._
import scala.swing.Swing.MatteBorder
import scala.util.Try
import io.gatling.commons.util.StringHelper.RichString
private[swing] object ValidationHelper {
case class Validator(
condition: String => Boolean,
successCallback: Component => Unit = setStandardBorder,
failureCallback: Component => Unit = setErrorBorder,
alwaysValid: Boolean = false
)
// Those are lazy vals to avoid unneccessary component creation when they're not needed (e.g. tests)
private lazy val standardBorder = new TextField().border
private lazy val errorBorder = MatteBorder(2, 2, 2, 2, Color.red)
/* Default validators */
private val portRange = 0 to 65536
def isValidPort(s: String) = Try(s.toInt).toOption.exists(portRange.contains)
def isNonEmpty(s: String) = s.trimToOption.isDefined
private val validPackageNameRegex = """^[a-z_\$][\w\$]*(?:\.[a-z_\$][\w\$]*)*$"""
def isValidPackageName(s: String) =
s.isEmpty ||
s.matches(validPackageNameRegex)
def isValidSimpleClassName(s: String) =
isNonEmpty(s) &&
!s.contains('_') &&
Character.isJavaIdentifierStart(s.charAt(0)) &&
!s.substring(1, s.length).exists(!Character.isJavaIdentifierPart(_))
/* Default callbacks */
def setStandardBorder(c: Component): Unit = { c.border = standardBorder }
def setErrorBorder(c: Component): Unit = { c.border = errorBorder }
private val validators = mutable.Map.empty[TextField, Validator]
private val status = mutable.Map.empty[TextField, Boolean]
def registerValidator(textField: TextField, validator: Validator): Unit = {
validators += (textField -> validator)
}
def updateValidationStatus(field: TextField) = validators.get(field) match {
case Some(validator) =>
val isValid = validator.condition(field.text)
val callback = if (isValid) validator.successCallback else validator.failureCallback
callback(field)
status += (field -> (validator.alwaysValid || isValid))
case None =>
throw new IllegalStateException(s"No validator registered for component : $field")
}
def allValid = {
validators.keys.map(updateValidationStatus)
validationStatus
}
def validationStatus = status.values.forall(identity)
}
|
GabrielPlassard/gatling
|
gatling-recorder/src/main/scala/io/gatling/recorder/ui/swing/frame/ValidationHelper.scala
|
Scala
|
apache-2.0
| 2,982 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.input
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}
import scala.collection.JavaConverters._
import com.google.common.io.{Closeables, ByteStreams}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.{InputSplit, JobContext, RecordReader, TaskAttemptContext}
import org.apache.hadoop.mapreduce.lib.input.{CombineFileInputFormat, CombineFileRecordReader, CombineFileSplit}
import org.apache.spark.deploy.SparkHadoopUtil
/**
* A general format for reading whole files in as streams, byte arrays,
* or other functions to be added
*/
private[spark] abstract class StreamFileInputFormat[T]
extends CombineFileInputFormat[String, T]
{
override protected def isSplitable(context: JobContext, file: Path): Boolean = false
/**
* Allow minPartitions set by end-user in order to keep compatibility with old Hadoop API
* which is set through setMaxSplitSize
*/
def setMinPartitions(context: JobContext, minPartitions: Int) {
val totalLen = listStatus(context).asScala.filterNot(_.isDir).map(_.getLen).sum
val maxSplitSize = math.ceil(totalLen / math.max(minPartitions, 1.0)).toLong
super.setMaxSplitSize(maxSplitSize)
}
def createRecordReader(split: InputSplit, taContext: TaskAttemptContext): RecordReader[String, T]
}
/**
* An abstract class of [[org.apache.hadoop.mapreduce.RecordReader RecordReader]]
* to reading files out as streams
*/
private[spark] abstract class StreamBasedRecordReader[T](
split: CombineFileSplit,
context: TaskAttemptContext,
index: Integer)
extends RecordReader[String, T] {
// True means the current file has been processed, then skip it.
private var processed = false
private var key = ""
private var value: T = null.asInstanceOf[T]
override def initialize(split: InputSplit, context: TaskAttemptContext): Unit = {}
override def close(): Unit = {}
override def getProgress: Float = if (processed) 1.0f else 0.0f
override def getCurrentKey: String = key
override def getCurrentValue: T = value
override def nextKeyValue: Boolean = {
if (!processed) {
val fileIn = new PortableDataStream(split, context, index)
value = parseStream(fileIn)
key = fileIn.getPath
processed = true
true
} else {
false
}
}
/**
* Parse the stream (and close it afterwards) and return the value as in type T
* @param inStream the stream to be read in
* @return the data formatted as
*/
def parseStream(inStream: PortableDataStream): T
}
/**
* Reads the record in directly as a stream for other objects to manipulate and handle
*/
private[spark] class StreamRecordReader(
split: CombineFileSplit,
context: TaskAttemptContext,
index: Integer)
extends StreamBasedRecordReader[PortableDataStream](split, context, index) {
def parseStream(inStream: PortableDataStream): PortableDataStream = inStream
}
/**
* The format for the PortableDataStream files
*/
private[spark] class StreamInputFormat extends StreamFileInputFormat[PortableDataStream] {
override def createRecordReader(split: InputSplit, taContext: TaskAttemptContext)
: CombineFileRecordReader[String, PortableDataStream] = {
new CombineFileRecordReader[String, PortableDataStream](
split.asInstanceOf[CombineFileSplit], taContext, classOf[StreamRecordReader])
}
}
/**
* A class that allows DataStreams to be serialized and moved around by not creating them
* until they need to be read
* @note TaskAttemptContext is not serializable resulting in the confBytes construct
* @note CombineFileSplit is not serializable resulting in the splitBytes construct
*/
class PortableDataStream(
isplit: CombineFileSplit,
context: TaskAttemptContext,
index: Integer)
extends Serializable {
private val confBytes = {
val baos = new ByteArrayOutputStream()
SparkHadoopUtil.get.getConfigurationFromJobContext(context).
write(new DataOutputStream(baos))
baos.toByteArray
}
private val splitBytes = {
val baos = new ByteArrayOutputStream()
isplit.write(new DataOutputStream(baos))
baos.toByteArray
}
@transient private lazy val split = {
val bais = new ByteArrayInputStream(splitBytes)
val nsplit = new CombineFileSplit()
nsplit.readFields(new DataInputStream(bais))
nsplit
}
@transient private lazy val conf = {
val bais = new ByteArrayInputStream(confBytes)
val nconf = new Configuration()
nconf.readFields(new DataInputStream(bais))
nconf
}
/**
* Calculate the path name independently of opening the file
*/
@transient private lazy val path = {
val pathp = split.getPath(index)
pathp.toString
}
/**
* Create a new DataInputStream from the split and context. The user of this method is responsible
* for closing the stream after usage.
*/
def open(): DataInputStream = {
val pathp = split.getPath(index)
val fs = pathp.getFileSystem(conf)
fs.open(pathp)
}
/**
* Read the file as a byte array
*/
def toArray(): Array[Byte] = {
val stream = open()
try {
ByteStreams.toByteArray(stream)
} finally {
Closeables.close(stream, true)
}
}
/**
* Closing the PortableDataStream is not needed anymore. The user either can use the
* PortableDataStream to get a DataInputStream (which the user needs to close after usage),
* or a byte array.
*/
@deprecated("Closing the PortableDataStream is not needed anymore.", "1.6.0")
def close(): Unit = {
}
def getPath(): String = path
}
|
chenc10/Spark-PAF
|
core/src/main/scala/org/apache/spark/input/PortableDataStream.scala
|
Scala
|
apache-2.0
| 6,446 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.api.push
import slamdata.Predef._
import quasar.api.ColumnType
import quasar.api.push.param.ParamError
import quasar.api.resource.ResourcePath
import cats.data.NonEmptyList
sealed trait ResultPushError[+D] extends Product with Serializable
object ResultPushError {
final case class DestinationNotFound[D](destinationId: D)
extends ResultPushError[D]
final case class PushNotFound[D](destinationId: D, path: ResourcePath)
extends ResultPushError[D]
final case class PushAlreadyRunning[D](destinationId: D, path: ResourcePath)
extends ResultPushError[D]
final case class FullNotSupported[D](destinationId: D)
extends ResultPushError[D]
final case class IncrementalNotSupported[D](destinationId: D)
extends ResultPushError[D]
final case class InvalidCoercion[D](
destinationId: D,
column: String,
scalar: ColumnType.Scalar,
typeIndex: TypeIndex)
extends ResultPushError[D]
final case class TypeNotFound[D](
destinationId: D,
column: String,
index: TypeIndex)
extends ResultPushError[D]
final case class TypeConstructionFailed[D](
destinationId: D,
column: String,
typeLabel: String,
errors: NonEmptyList[ParamError])
extends ResultPushError[D]
}
|
djspiewak/quasar
|
api/src/main/scala/quasar/api/push/ResultPushError.scala
|
Scala
|
apache-2.0
| 1,896 |
package com.evalonlabs.myinbox.model
import org.subethamail.smtp.MessageContext
import javax.mail.internet.MimeMessage
case class PersistMsgReq(ctx: MessageContext,
msg: Message[MimeMessage])
|
epappas/myinbox
|
common/src/main/scala/com/evalonlabs/myinbox/model/PersistMsgReq.scala
|
Scala
|
mit
| 218 |
package org.biancama.algorithms.sort
/**
* Created by massimo on 30/04/16.
*/
object QuickSort {
/**
* quicksort :: (Ord a) => [a] -> [a]
* quicksort [] = []
* quicksort (x:xs) =
* let smallerSorted = quicksort [a | a <- xs, a <= x]
* biggerSorted = quicksort [a | a <- xs, a > x]
* in smallerSorted ++ [x] ++ biggerSorted
*
* @return ordered list
*/
def sort[A](unsortedList: List[A])(implicit ord: Ordering[A]): List[A] = {
def getPivot(la: List[A]) = {
val center = la(la.size / 2)
val left = la(0)
val right = la(la.size - 1)
if (ord.compare(left, center) <= 0 && ord.compare(center, right) <= 0 || ord.compare(left, center) >= 0 && ord.compare(center, right) >= 0 ) {
center
} else if (ord.compare(center, left) <= 0 && ord.compare(left, right) <= 0 || ord.compare(center, left) >= 0 && ord.compare(left, right) >= 0 ) {
left
} else {
right
}
}
def partition (la: List[A], pivot: A) = (la.filter(x => ord.compare(x, pivot) < 0), la.filter(x => ord.compare(pivot, x) < 0), la.filter(x => ord.compare(pivot, x) == 0))
if (unsortedList.isEmpty || unsortedList.size == 1) unsortedList else {
val pivot = getPivot(unsortedList)
val (less, more, equal) = partition(unsortedList, pivot)
sort(less) ::: equal ::: sort(more)
}
}
}
|
biancama/data-structures-scala
|
src/main/scala/org/biancama/algorithms/sort/QuickSort.scala
|
Scala
|
gpl-3.0
| 1,398 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.benchmark
import org.apache.spark.benchmark.Benchmark
import org.apache.spark.sql.internal.SQLConf
/**
* Benchmark to measure whole stage codegen performance.
* To run this benchmark:
* {{{
* 1. without sbt: bin/spark-submit --class <this class> <spark sql test jar>
* 2. build/sbt "sql/test:runMain <this class>"
* 3. generate result: SPARK_GENERATE_BENCHMARK_FILES=1 build/sbt "sql/test:runMain <this class>"
* Results will be written to "benchmarks/MiscBenchmark-results.txt".
* }}}
*/
object MiscBenchmark extends SqlBasedBenchmark {
def filterAndAggregateWithoutGroup(numRows: Long): Unit = {
runBenchmark("filter & aggregate without group") {
codegenBenchmark("range/filter/sum", numRows) {
spark.range(numRows).filter("(id & 1) = 1").groupBy().sum().collect()
}
}
}
def limitAndAggregateWithoutGroup(numRows: Long): Unit = {
runBenchmark("range/limit/sum") {
codegenBenchmark("range/limit/sum", numRows) {
spark.range(numRows).limit(1000000).groupBy().sum().collect()
}
}
}
def sample(numRows: Int): Unit = {
runBenchmark("sample") {
codegenBenchmark("sample with replacement", numRows) {
spark.range(numRows).sample(withReplacement = true, 0.01).groupBy().sum().collect()
}
codegenBenchmark("sample without replacement", numRows) {
spark.range(numRows).sample(withReplacement = false, 0.01).groupBy().sum().collect()
}
}
}
def collect(numRows: Int): Unit = {
runBenchmark("collect") {
val benchmark = new Benchmark("collect", numRows, output = output)
benchmark.addCase("collect 1 million") { iter =>
spark.range(numRows).collect()
}
benchmark.addCase("collect 2 millions") { iter =>
spark.range(numRows * 2).collect()
}
benchmark.addCase("collect 4 millions") { iter =>
spark.range(numRows * 4).collect()
}
benchmark.run()
}
}
def collectLimit(numRows: Int): Unit = {
runBenchmark("collect limit") {
val benchmark = new Benchmark("collect limit", numRows, output = output)
benchmark.addCase("collect limit 1 million") { iter =>
spark.range(numRows * 4).limit(numRows).collect()
}
benchmark.addCase("collect limit 2 millions") { iter =>
spark.range(numRows * 4).limit(numRows * 2).collect()
}
benchmark.run()
}
}
def explode(numRows: Int): Unit = {
runBenchmark("generate explode") {
codegenBenchmark("generate explode array", numRows) {
val df = spark.range(numRows).selectExpr(
"id as key",
"array(rand(), rand(), rand(), rand(), rand()) as values")
df.selectExpr("key", "explode(values) value").count()
}
codegenBenchmark("generate explode map", numRows) {
val df = spark.range(numRows).selectExpr(
"id as key",
"map('a', rand(), 'b', rand(), 'c', rand(), 'd', rand(), 'e', rand()) pairs")
df.selectExpr("key", "explode(pairs) as (k, v)").count()
}
codegenBenchmark("generate posexplode array", numRows) {
val df = spark.range(numRows).selectExpr(
"id as key",
"array(rand(), rand(), rand(), rand(), rand()) as values")
df.selectExpr("key", "posexplode(values) as (idx, value)").count()
}
codegenBenchmark("generate inline array", numRows) {
val df = spark.range(numRows).selectExpr(
"id as key",
"array((rand(), rand()), (rand(), rand()), (rand(), 0.0d)) as values")
df.selectExpr("key", "inline(values) as (r1, r2)").count()
}
val M = 60000
codegenBenchmark("generate big struct array", M) {
import spark.implicits._
val df = spark.sparkContext.parallelize(Seq(("1",
Array.fill(M)({
val i = math.random
(i.toString, (i + 1).toString, (i + 2).toString, (i + 3).toString)
})))).toDF("col", "arr")
df.selectExpr("*", "explode(arr) as arr_col")
.select("col", "arr_col.*").count
}
withSQLConf(SQLConf.NESTED_PRUNING_ON_EXPRESSIONS.key -> "true") {
codegenBenchmark("generate big nested struct array", M) {
import spark.implicits._
val df = spark.sparkContext.parallelize(Seq(("1",
Array.fill(M)({
val i = math.random
(i.toString, (i + 1).toString, (i + 2).toString, (i + 3).toString)
})))).toDF("col", "arr")
.selectExpr("col", "struct(col, arr) as st")
.selectExpr("col", "st.col as col1", "explode(st.arr) as arr_col")
df.collect()
}
}
}
}
def stack(numRows: Int): Unit = {
runBenchmark("generate regular generator") {
codegenBenchmark("generate stack", numRows) {
val df = spark.range(numRows).selectExpr(
"id as key",
"id % 2 as t1",
"id % 3 as t2",
"id % 5 as t3",
"id % 7 as t4",
"id % 13 as t5")
df.selectExpr("key", "stack(4, t1, t2, t3, t4, t5)").count()
}
}
}
override def runBenchmarkSuite(mainArgs: Array[String]): Unit = {
filterAndAggregateWithoutGroup(500L << 22)
limitAndAggregateWithoutGroup(500L << 20)
sample(500 << 18)
collect(1 << 20)
collectLimit(1 << 20)
explode(1 << 24)
stack(1 << 24)
}
}
|
bdrillard/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/MiscBenchmark.scala
|
Scala
|
apache-2.0
| 6,231 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
import org.apache.spark.annotation.InterfaceStability
/**
* The data type representing `NULL` values. Please use the singleton `DataTypes.NullType`.
*
* @since 1.3.0
*/
@InterfaceStability.Stable
class NullType private() extends DataType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "NullType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
override def defaultSize: Int = 1
private[spark] override def asNullable: NullType = this
}
/**
* @since 1.3.0
*/
@InterfaceStability.Stable
case object NullType extends NullType
|
bravo-zhang/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
|
Scala
|
apache-2.0
| 1,556 |
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.apache.spark.rdd.cl
import org.apache.spark.mllib.linalg.DenseVector
import org.apache.spark.mllib.linalg.Vectors
class DenseVectorNativeInputBuffers(val vectorElementCapacity : Int,
val vectorCapacity : Int, val denseVectorStructSize : Int,
val blockingCopies : Boolean, val tiling : Int, val dev_ctx : Long)
extends NativeInputBuffers[DenseVector] {
val clValuesBuffer : Long = OpenCLBridge.clMalloc(dev_ctx, vectorElementCapacity * 8)
val valuesBuffer : Long = OpenCLBridge.pin(dev_ctx, clValuesBuffer)
val clSizesBuffer : Long = OpenCLBridge.clMalloc(dev_ctx, vectorCapacity * 4)
val sizesBuffer : Long = OpenCLBridge.pin(dev_ctx, clSizesBuffer)
val clOffsetsBuffer : Long = OpenCLBridge.clMalloc(dev_ctx, vectorCapacity * 4)
val offsetsBuffer : Long = OpenCLBridge.pin(dev_ctx, clOffsetsBuffer)
var vectorsToCopy : Int = -1
var elementsToCopy : Int = -1
var iter : Int = 0
val next_buffered : Array[Array[Double]] = new Array[Array[Double]](tiling)
var next_buffered_iter : Int = 0
var n_next_buffered : Int = 0
override def releaseOpenCLArrays() {
OpenCLBridge.clFree(clValuesBuffer, dev_ctx)
OpenCLBridge.clFree(clSizesBuffer, dev_ctx)
OpenCLBridge.clFree(clOffsetsBuffer, dev_ctx)
}
override def copyToDevice(argnum : Int, ctx : Long, dev_ctx : Long,
cacheID : CLCacheID, persistent : Boolean) : Int = {
// Array of structs for each item
OpenCLBridge.setArgUnitialized(ctx, dev_ctx, argnum,
denseVectorStructSize * vectorCapacity, persistent)
// values array, size of double = 8
OpenCLBridge.setNativePinnedArrayArg(ctx, dev_ctx, argnum + 1, valuesBuffer,
clValuesBuffer, elementsToCopy * 8)
// Sizes of each vector
OpenCLBridge.setNativePinnedArrayArg(ctx, dev_ctx, argnum + 2, sizesBuffer,
clSizesBuffer, vectorsToCopy * 4)
// Offsets of each vector
OpenCLBridge.setNativePinnedArrayArg(ctx, dev_ctx, argnum + 3,
offsetsBuffer, clOffsetsBuffer, vectorsToCopy * 4)
// Number of vectors
OpenCLBridge.setIntArg(ctx, argnum + 4, vectorsToCopy)
// Tiling
OpenCLBridge.setIntArg(ctx, argnum + 5, tiling)
return 6
}
override def next() : DenseVector = {
if (next_buffered_iter == n_next_buffered) {
next_buffered_iter = 0
n_next_buffered = if (vectorsToCopy - iter > tiling) tiling else vectorsToCopy - iter
OpenCLBridge.deserializeStridedValuesFromNativeArray(
next_buffered.asInstanceOf[Array[java.lang.Object]],
n_next_buffered, valuesBuffer, sizesBuffer, offsetsBuffer, iter, tiling)
}
val result : DenseVector = Vectors.dense(next_buffered(next_buffered_iter))
.asInstanceOf[DenseVector]
next_buffered_iter += 1
iter += 1
result
}
override def hasNext() : Boolean = {
iter < vectorsToCopy
}
}
|
agrippa/spark-swat
|
swat/src/main/scala/org/apache/spark/rdd/cl/DenseVectorNativeInputBuffers.scala
|
Scala
|
bsd-3-clause
| 4,401 |
package org.jetbrains.plugins.scala
package lang
package completion3
import com.intellij.codeInsight.completion.CompletionType
import org.junit.Assert
class ScalaEndMarkerCompletionTest extends ScalaCodeInsightTestBase {
import ScalaCodeInsightTestBase._
override protected def supportedIn(version: ScalaVersion): Boolean =
version >= LatestScalaVersions.Scala_3_0
private def checkLookupElement(fileText: String,
resultText: String,
lookupStr: String,
presentationText: String = null,
typeText: String = null,
completionType: CompletionType = CompletionType.BASIC): Unit =
doRawCompletionTest(fileText, resultText, completionType = completionType) { lookup =>
val actualPresentation = createPresentation(lookup)
val actualPresentationText = actualPresentation.getItemText + Option(actualPresentation.getTailText).getOrElse("")
val actualTypeText = actualPresentation.getTypeText
hasLookupString(lookup, lookupStr) &&
Option(presentationText).getOrElse(lookupStr) == actualPresentationText &&
actualTypeText == typeText
}
private def checkLookupElementsOrder(fileText: String, expectedItems: List[String]): Unit = {
val (_, items) = activeLookupWithItems(fileText, CompletionType.BASIC, DEFAULT_TIME)()
val actualItems = items.toList.map(_.getLookupString).filter(_.startsWith("end "))
Assert.assertArrayEquals(expectedItems.toArray[AnyRef], actualItems.toArray[AnyRef])
}
private def checkNoCompletion(fileText: String): Unit =
super.checkNoCompletion(fileText) { lookup =>
lookup.getLookupString.startsWith("end ") ||
createPresentation(lookup).getItemText.startsWith("end ")
}
private def checkNoCompletionFor(fileText: String, item: String): Unit =
super.checkNoCompletion(fileText) { lookup =>
hasLookupString(lookup, item)
}
def testNoCompletionAfterDot(): Unit = checkNoCompletion(
fileText =
s"""def foo =
| 1
| 2
| 3.e$CARET
|""".stripMargin
)
/// anonymous class
def testAnonClass(): Unit = checkLookupElement(
fileText =
s"""class C
|
|new C:
| def foo = true
|e$CARET
|""".stripMargin,
resultText =
s"""class C
|
|new C:
| def foo = true
|end new
|$CARET
|""".stripMargin,
lookupStr = "end new",
typeText = "C"
)
def testAnonClassComplexTypeText(): Unit = checkLookupElement(
fileText =
s"""class SomeClass
|class AnotherClass
|
|new SomeClass with AnotherClass:
| def foo = true
|e$CARET
|""".stripMargin,
resultText =
s"""class SomeClass
|class AnotherClass
|
|new SomeClass with AnotherClass:
| def foo = true
|end new
|$CARET
|""".stripMargin,
lookupStr = "end new",
typeText = "SomeClass with ..."
)
def testAnonClassComplexTypeText2(): Unit = checkLookupElement(
fileText =
s"""class SomeClass
|class AnotherClass
|
|new SomeClass
| with AnotherClass:
| def foo = true
|e$CARET
|""".stripMargin,
resultText =
s"""class SomeClass
|class AnotherClass
|
|new SomeClass
| with AnotherClass:
| def foo = true
|end new
|$CARET
|""".stripMargin,
lookupStr = "end new",
typeText = "SomeClass with ..."
)
def testAnonClassWithoutInput(): Unit = checkLookupElement(
fileText =
s"""class C
|
|new C:
| def foo = true
|$CARET
|""".stripMargin,
resultText =
s"""class C
|
|new C:
| def foo = true
|end new
|$CARET
|""".stripMargin,
lookupStr = "end new",
typeText = "C"
)
def testAnonClassAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""class C
|
|new C:
| def foo = true
|end $CARET
|""".stripMargin,
resultText =
s"""class C
|
|new C:
| def foo = true
|end new
|$CARET
|""".stripMargin,
lookupStr = "new",
presentationText = "new",
typeText = "C"
)
def testEmptyAnonClass(): Unit = checkLookupElement(
fileText =
s"""class C
|
|new C:
|e$CARET
|""".stripMargin,
resultText =
s"""class C
|
|new C:
|end new
|$CARET
|""".stripMargin,
lookupStr = "end new",
typeText = "C"
)
def testNoCompletionForAnonClassWithoutTemplateBody(): Unit = checkNoCompletion(
fileText =
s"""class C
|
|new C
|e$CARET
|""".stripMargin
)
def testNoCompletionForAnonClassWithoutTemplateBodyAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""class C
|
|new C
|end $CARET
|""".stripMargin
)
def testNoCompletionForAnonClassWithBraces(): Unit = checkNoCompletion(
fileText =
s"""class C
|
|new C {
| def foo = true
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForAnonClassWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""class C
|
|new C:
| def foo = true
|end new
|e$CARET
|""".stripMargin
)
/// class
def testClass(): Unit = checkLookupElement(
fileText =
s"""class C:
| def foo = true
|e$CARET
|""".stripMargin,
resultText =
s"""class C:
| def foo = true
|end C
|$CARET
|""".stripMargin,
lookupStr = "end C"
)
def testClassWithoutInput(): Unit = checkLookupElement(
fileText =
s"""class C:
| def foo = true
|$CARET
|""".stripMargin,
resultText =
s"""class C:
| def foo = true
|end C
|$CARET
|""".stripMargin,
lookupStr = "end C"
)
def testClassAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""class C:
| def foo = true
|end $CARET
|""".stripMargin,
resultText =
s"""class C:
| def foo = true
|end C
|$CARET
|""".stripMargin,
lookupStr = "C",
presentationText = "C"
)
def testEmptyClass(): Unit = checkLookupElement(
fileText =
s"""class C:
|e$CARET
|""".stripMargin,
resultText =
s"""class C:
|end C
|$CARET
|""".stripMargin,
lookupStr = "end C"
)
def testNoCompletionForClassWithoutTemplateBody(): Unit = checkNoCompletion(
fileText =
s"""class C
|e$CARET
|""".stripMargin
)
def testNoCompletionForClassWithoutTemplateBodyAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""class C
|end $CARET
|""".stripMargin
)
def testNoCompletionForClassWithBraces(): Unit = checkNoCompletion(
fileText =
s"""class C {
| def foo = true
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForClassWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""class C:
| def foo = true
|end C
|e$CARET
|""".stripMargin
)
/// trait
def testTrait(): Unit = checkLookupElement(
fileText =
s"""trait T:
| def foo
|e$CARET
|""".stripMargin,
resultText =
s"""trait T:
| def foo
|end T
|$CARET
|""".stripMargin,
lookupStr = "end T"
)
def testTraitWithoutInput(): Unit = checkLookupElement(
fileText =
s"""trait T:
| def foo
|$CARET
|""".stripMargin,
resultText =
s"""trait T:
| def foo
|end T
|$CARET
|""".stripMargin,
lookupStr = "end T"
)
def testTraitAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""trait T:
| def foo
|end $CARET
|""".stripMargin,
resultText =
s"""trait T:
| def foo
|end T
|$CARET
|""".stripMargin,
lookupStr = "T",
presentationText = "T"
)
def testEmptyTrait(): Unit = checkLookupElement(
fileText =
s"""trait T:
|e$CARET
|""".stripMargin,
resultText =
s"""trait T:
|end T
|$CARET
|""".stripMargin,
lookupStr = "end T"
)
def testNoCompletionForTraitWithoutTemplateBody(): Unit = checkNoCompletion(
fileText =
s"""trait T
|e$CARET
|""".stripMargin
)
def testNoCompletionForTraitWithoutTemplateBodyAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""trait T
|end $CARET
|""".stripMargin
)
def testNoCompletionForTraitWithBraces(): Unit = checkNoCompletion(
fileText =
s"""trait T {
| def foo
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForTraitWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""trait T:
| def foo
|end T
|e$CARET
|""".stripMargin
)
/// object
def testObject(): Unit = checkLookupElement(
fileText =
s"""object O:
| def foo = true
|e$CARET
|""".stripMargin,
resultText =
s"""object O:
| def foo = true
|end O
|$CARET
|""".stripMargin,
lookupStr = "end O"
)
def testObjectWithoutInput(): Unit = checkLookupElement(
fileText =
s"""object O:
| def foo = true
|$CARET
|""".stripMargin,
resultText =
s"""object O:
| def foo = true
|end O
|$CARET
|""".stripMargin,
lookupStr = "end O"
)
def testObjectAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""object O:
| def foo = true
|end $CARET
|""".stripMargin,
resultText =
s"""object O:
| def foo = true
|end O
|$CARET
|""".stripMargin,
lookupStr = "O",
presentationText = "O"
)
def testEmptyObject(): Unit = checkLookupElement(
fileText =
s"""object O:
|e$CARET
|""".stripMargin,
resultText =
s"""object O:
|end O
|$CARET
|""".stripMargin,
lookupStr = "end O"
)
def testNoCompletionForObjectWithoutTemplateBody(): Unit = checkNoCompletion(
fileText =
s"""object O
|e$CARET
|""".stripMargin
)
def testNoCompletionForObjectWithoutTemplateBodyAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""object O
|end $CARET
|""".stripMargin
)
def testNoCompletionForObjectWithBraces(): Unit = checkNoCompletion(
fileText =
s"""object O {
| def foo = true
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForObjectWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""object O:
| def foo = true
|end O
|e$CARET
|""".stripMargin
)
/// enum
def testEnum(): Unit = checkLookupElement(
fileText =
s"""enum E:
| case C
|e$CARET
|""".stripMargin,
resultText =
s"""enum E:
| case C
|end E
|$CARET
|""".stripMargin,
lookupStr = "end E"
)
def testEnumWithoutInput(): Unit = checkLookupElement(
fileText =
s"""enum E:
| case C
|$CARET
|""".stripMargin,
resultText =
s"""enum E:
| case C
|end E
|$CARET
|""".stripMargin,
lookupStr = "end E"
)
def testEnumAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""enum E:
| case C
|end $CARET
|""".stripMargin,
resultText =
s"""enum E:
| case C
|end E
|$CARET
|""".stripMargin,
lookupStr = "E",
presentationText = "E"
)
def testEmptyEnum(): Unit = checkLookupElement(
fileText =
s"""enum E:
|e$CARET
|""".stripMargin,
resultText =
s"""enum E:
|end E
|$CARET
|""".stripMargin,
lookupStr = "end E"
)
def testNoCompletionForEnumWithoutTemplateBody(): Unit = checkNoCompletion(
fileText =
s"""enum E
|e$CARET
|""".stripMargin
)
def testNoCompletionForEnumWithoutTemplateBodyAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""enum E
|end $CARET
|""".stripMargin
)
def testNoCompletionForEnumWithBraces(): Unit = checkNoCompletion(
fileText =
s"""enum E {
| case C
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForEnumWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""enum E:
| case C
|end E
|e$CARET
|""".stripMargin
)
/// constructor
def testConstructor(): Unit = checkLookupElement(
fileText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| println("multiline")
| e$CARET
|""".stripMargin,
resultText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| println("multiline")
| end this
| $CARET
|""".stripMargin,
lookupStr = "end this"
)
def testConstructorWithoutInput(): Unit = checkLookupElement(
fileText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| println("multiline")
| $CARET
|""".stripMargin,
resultText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| println("multiline")
| end this
| $CARET
|""".stripMargin,
lookupStr = "end this"
)
def testConstructorAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| println("multiline")
| end $CARET
|""".stripMargin,
resultText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| println("multiline")
| end this
| $CARET
|""".stripMargin,
lookupStr = "this",
presentationText = "this"
)
def testNoCompletionForConstructorWithoutBody(): Unit = checkNoCompletionFor(
fileText =
s"""class C(i: Int):
| def this(i: Int, s: String)
| e$CARET
|""".stripMargin,
item = "end this"
)
def testNoCompletionForConstructorIfIndentIsLessThanConstructorIndent(): Unit = checkNoCompletionFor(
fileText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| e$CARET
|""".stripMargin,
item = "end this"
)
def testNoCompletionForConstructorWithBraces(): Unit = checkNoCompletionFor(
fileText =
s"""class C(i: Int):
| def this(i: Int, s: String) = {
| this(i)
| println("multiline")
| }
| e$CARET
|""".stripMargin,
item = "end this"
)
def testNoCompletionForConstructorWithEndMarker(): Unit = checkNoCompletionFor(
fileText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| println("multiline")
| end this
| e$CARET
|""".stripMargin,
item = "end this"
)
def testClassIfIndentIsGreaterThanClassIndent(): Unit = checkLookupElement(
fileText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| println("multiline")
| e$CARET
|""".stripMargin,
resultText =
s"""class C(i: Int):
| def this(i: Int, s: String) =
| this(i)
| println("multiline")
|end C
|$CARET
|""".stripMargin,
lookupStr = "end C"
)
/// value
def testValue(): Unit = checkLookupElement(
fileText =
s"""val v =
| 1 +
| 41
|e$CARET
|""".stripMargin,
resultText =
s"""val v =
| 1 +
| 41
|end v
|$CARET
|""".stripMargin,
lookupStr = "end v"
)
def testValueWithoutInput(): Unit = checkLookupElement(
fileText =
s"""val v =
| 1 + 2 match
| case 3 => 0
| case _ => 1
|$CARET
|""".stripMargin,
resultText =
s"""val v =
| 1 + 2 match
| case 3 => 0
| case _ => 1
|end v
|$CARET
|""".stripMargin,
lookupStr = "end v"
)
def testValueAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""val v =
| 0
| 42
|end $CARET
|""".stripMargin,
resultText =
s"""val v =
| 0
| 42
|end v
|$CARET
|""".stripMargin,
lookupStr = "v",
presentationText = "v"
)
def testNoCompletionForValueWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""val v =
| 42
|e$CARET
|""".stripMargin
)
def testNoCompletionForValueWithBraces(): Unit = checkNoCompletion(
fileText =
s"""val v = {
| 0
| 42
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForValueWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""val v =
| 1 +
| 41
|end v
|e$CARET
|""".stripMargin
)
def testNoCompletionForAbstractValue(): Unit = checkNoCompletion(
fileText =
s"""val v
|e$CARET
|""".stripMargin
)
def testNoCompletionForAbstractValueAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""val v
|end $CARET
|""".stripMargin
)
def testNoCompletionForValueIfEndIsNotOnTheNewLine(): Unit = checkNoCompletion(
fileText =
s"""val v =
| 0
| 42 e$CARET
|""".stripMargin
)
/// variable
def testVariable(): Unit = checkLookupElement(
fileText =
s"""var v =
| 0
| 42
|e$CARET
|""".stripMargin,
resultText =
s"""var v =
| 0
| 42
|end v
|$CARET
|""".stripMargin,
lookupStr = "end v"
)
def testVariableWithoutInput(): Unit = checkLookupElement(
fileText =
s"""var v =
| 0
| 42
|$CARET
|""".stripMargin,
resultText =
s"""var v =
| 0
| 42
|end v
|$CARET
|""".stripMargin,
lookupStr = "end v"
)
def testVariableAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""var v =
| 0
| 42
|end $CARET
|""".stripMargin,
resultText =
s"""var v =
| 0
| 42
|end v
|$CARET
|""".stripMargin,
lookupStr = "v",
presentationText = "v"
)
def testNoCompletionForVariableWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""var v =
| 42
|e$CARET
|""".stripMargin
)
def testNoCompletionForVariableWithBraces(): Unit = checkNoCompletion(
fileText =
s"""var v = {
| 0
| 42
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForVariableWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""var v =
| 0
| 42
|end v
|e$CARET
|""".stripMargin
)
def testNoCompletionForAbstractVariable(): Unit = checkNoCompletion(
fileText =
s"""var v
|e$CARET
|""".stripMargin
)
def testNoCompletionForAbstractVariableAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""var v
|end $CARET
|""".stripMargin
)
def testNoCompletionForVariableIfEndIsNotOnTheNewLine(): Unit = checkNoCompletion(
fileText =
s"""var v =
| 0
| 42 e$CARET
|""".stripMargin
)
/// value binding pattern
def testValueBinding(): Unit = checkLookupElement(
fileText =
s"""val h :: t =
| List(1,
| 2, 3)
|e$CARET
|""".stripMargin,
resultText =
s"""val h :: t =
| List(1,
| 2, 3)
|end val
|$CARET
|""".stripMargin,
lookupStr = "end val"
)
def testValueBindingWithoutInput(): Unit = checkLookupElement(
fileText =
s"""val h :: t =
| List(1,
| 2, 3)
|$CARET
|""".stripMargin,
resultText =
s"""val h :: t =
| List(1,
| 2, 3)
|end val
|$CARET
|""".stripMargin,
lookupStr = "end val"
)
def testValueBindingAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""val h :: t =
| List(1,
| 2, 3)
|end $CARET
|""".stripMargin,
resultText =
s"""val h :: t =
| List(1,
| 2, 3)
|end val
|$CARET
|""".stripMargin,
lookupStr = "val",
presentationText = "val"
)
def testNoCompletionForValueBindingWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""val h :: t =
| List(1, 2, 3)
|e$CARET
|""".stripMargin
)
def testNoCompletionForValueBindingWithoutAssign(): Unit = checkNoCompletion(
fileText =
s"""val h :: t
|e$CARET
|""".stripMargin
)
def testNoCompletionForValueBindingWithoutAssignAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""val h :: t
|end $CARET
|""".stripMargin
)
def testNoCompletionForValueBindingWithBraces(): Unit = checkNoCompletion(
fileText =
s"""val h :: t = {
| List(1,
| 2, 3)
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForValueBindingWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""val h :: t =
| List(1,
| 2, 3)
|end val
|e$CARET
|""".stripMargin
)
/// variable binding pattern
def testNoCompletionForVariableBinding(): Unit = checkNoCompletion(
fileText =
s"""var h :: t =
| List(1,
| 2, 3)
|e$CARET
|""".stripMargin
)
def testNoCompletionForVariableBindingAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""var h :: t =
| List(1,
| 2, 3)
|end $CARET
|""".stripMargin
)
def testNoCompletionForVariableBindingWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""var h :: t =
| List(1, 2, 3)
|e$CARET
|""".stripMargin
)
def testNoCompletionForVariableBindingWithoutAssign(): Unit = checkNoCompletion(
fileText =
s"""var h :: t
|e$CARET
|""".stripMargin
)
def testNoCompletionForVariableBindingWithoutAssignAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""var h :: t
|end $CARET
|""".stripMargin
)
def testNoCompletionForVariableBindingWithBraces(): Unit = checkNoCompletion(
fileText =
s"""var h :: t = {
| List(1,
| 2, 3)
|}
|e$CARET
|""".stripMargin
)
/// given
def testAnonymousGivenAlias(): Unit = checkLookupElement(
fileText =
s"""given Int =
| 0
| 42
|e$CARET
|""".stripMargin,
resultText =
s"""given Int =
| 0
| 42
|end given
|$CARET
|""".stripMargin,
lookupStr = "end given"
)
def testAnonymousGivenAliasWithoutInput(): Unit = checkLookupElement(
fileText =
s"""given Int =
| 0
| 42
|$CARET
|""".stripMargin,
resultText =
s"""given Int =
| 0
| 42
|end given
|$CARET
|""".stripMargin,
lookupStr = "end given"
)
def testAnonymousGivenAliasAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""given Int =
| 0
| 42
|end $CARET
|""".stripMargin,
resultText =
s"""given Int =
| 0
| 42
|end given
|$CARET
|""".stripMargin,
lookupStr = "given",
presentationText = "given"
)
def testNoCompletionForAnonymousGivenAliasWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""given Int =
| 42
|e$CARET
|""".stripMargin
)
def testNoCompletionForAnonymousGivenAliasWithoutAssign(): Unit = checkNoCompletion(
fileText =
s"""given Int
|e$CARET
|""".stripMargin
)
def testNoCompletionForAnonymousGivenAliasWithoutAssignAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""given Int
|end $CARET
|""".stripMargin
)
def testNoCompletionForAnonymousGivenAliasWithBraces(): Unit = checkNoCompletion(
fileText =
s"""given Int = {
| 0
| 42
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForAnonymousGivenAliasWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""given Int =
| 0
| 42
|end given
|e$CARET
|""".stripMargin
)
def testGivenAlias(): Unit = checkLookupElement(
fileText =
s"""given someGiven: Int =
| 0
| 42
|e$CARET
|""".stripMargin,
resultText =
s"""given someGiven: Int =
| 0
| 42
|end someGiven
|$CARET
|""".stripMargin,
lookupStr = "end someGiven"
)
def testGivenAliasWithoutInput(): Unit = checkLookupElement(
fileText =
s"""given someGiven: Int =
| 0
| 42
|$CARET
|""".stripMargin,
resultText =
s"""given someGiven: Int =
| 0
| 42
|end someGiven
|$CARET
|""".stripMargin,
lookupStr = "end someGiven"
)
def testGivenAliasAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""given someGiven: Int =
| 0
| 42
|end $CARET
|""".stripMargin,
resultText =
s"""given someGiven: Int =
| 0
| 42
|end someGiven
|$CARET
|""".stripMargin,
lookupStr = "someGiven",
presentationText = "someGiven"
)
def testNoCompletionForGivenAliasWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""given someGiven: Int =
| 42
|e$CARET
|""".stripMargin
)
def testNoCompletionForGivenAliasWithoutAssign(): Unit = checkNoCompletion(
fileText =
s"""given someGiven: Int
|e$CARET
|""".stripMargin
)
def testNoCompletionForGivenAliasWithoutAssignAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""given someGiven: Int
|end $CARET
|""".stripMargin
)
def testNoCompletionForGivenAliasWithBraces(): Unit = checkNoCompletion(
fileText =
s"""given someGiven: Int = {
| 0
| 42
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForGivenAliasWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""given someGiven: Int =
| 0
| 42
|end someGiven
|e$CARET
|""".stripMargin
)
def testAnonymousGivenDefinition(): Unit = checkLookupElement(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given Ord[Int] with
| def compare(x: Int, y: Int): Int =
| x.compareTo(y)
|e$CARET
|""".stripMargin,
resultText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given Ord[Int] with
| def compare(x: Int, y: Int): Int =
| x.compareTo(y)
|end given
|$CARET
|""".stripMargin,
lookupStr = "end given"
)
def testAnonymousGivenDefinitionAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given Ord[Int] with
| def compare(x: Int, y: Int): Int =
| x.compareTo(y)
|end $CARET
|""".stripMargin,
resultText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given Ord[Int] with
| def compare(x: Int, y: Int): Int =
| x.compareTo(y)
|end given
|$CARET
|""".stripMargin,
lookupStr = "given",
presentationText = "given"
)
def testNoCompletionForAnonymousGivenDefinitionWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given Ord[Int] with
| def compare(x: Int, y: Int): Int = x.compareTo(y)
|e$CARET
|""".stripMargin,
)
def testNoCompletionForAnonymousGivenDefinitionWithBraces(): Unit = checkNoCompletion(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given Ord[Int] with {
| def compare(x: Int, y: Int): Int =
| x.compareTo(y)
|}
|e$CARET
|""".stripMargin,
)
def testNoCompletionForAnonymousGivenDefinitionWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given Ord[Int] with
| def compare(x: Int, y: Int): Int =
| x.compareTo(y)
|end given
|e$CARET
|""".stripMargin
)
def testGivenDefinition(): Unit = checkLookupElement(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given intOrd: Ord[Int] with
| def compare(x: Int, y: Int): Int =
| if x < y then -1 else if x > y then +1 else 0
|e$CARET
|""".stripMargin,
resultText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given intOrd: Ord[Int] with
| def compare(x: Int, y: Int): Int =
| if x < y then -1 else if x > y then +1 else 0
|end intOrd
|$CARET
|""".stripMargin,
lookupStr = "end intOrd"
)
def testGivenDefinitionAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given intOrd: Ord[Int] with
| def compare(x: Int, y: Int): Int =
| if x < y then -1 else if x > y then +1 else 0
|end $CARET
|""".stripMargin,
resultText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given intOrd: Ord[Int] with
| def compare(x: Int, y: Int): Int =
| if x < y then -1 else if x > y then +1 else 0
|end intOrd
|$CARET
|""".stripMargin,
lookupStr = "intOrd",
presentationText = "intOrd"
)
def testNoCompletionForGivenDefinitionWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given intOrd: Ord[Int] with
| def compare(x: Int, y: Int): Int = if x < y then -1 else if x > y then +1 else 0
|e$CARET
|""".stripMargin
)
def testNoCompletionForGivenDefinitionWithBraces(): Unit = checkNoCompletion(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given intOrd: Ord[Int] with {
| def compare(x: Int, y: Int): Int =
| if x < y then -1 else if x > y then +1 else 0
|}
|e$CARET
|""".stripMargin,
)
def testNoCompletionForGivenDefinitionWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""trait Ord[T]:
| def compare(x: T, y: T): Int
|given intOrd: Ord[Int] with
| def compare(x: Int, y: Int): Int =
| if x < y then -1 else if x > y then +1 else 0
|end intOrd
|e$CARET
|""".stripMargin
)
def testNoCompletionForPatternBoundGiven(): Unit = checkNoCompletionFor(
fileText =
s"""for
| given Int <- List(1, 2, 3)
| e$CARET
|do ()
|""".stripMargin,
item = "end given"
)
/// extension
def testExtension(): Unit = checkLookupElement(
fileText =
s"""extension (x: String)
| def < (y: String): Boolean =
| ???
|e$CARET
|""".stripMargin,
resultText =
s"""extension (x: String)
| def < (y: String): Boolean =
| ???
|end extension
|$CARET
|""".stripMargin,
lookupStr = "end extension"
)
def testExtensionWithoutInput(): Unit = checkLookupElement(
fileText =
s"""extension (x: String)
| def < (y: String): Boolean =
| ???
|e$CARET
|""".stripMargin,
resultText =
s"""extension (x: String)
| def < (y: String): Boolean =
| ???
|end extension
|$CARET
|""".stripMargin,
lookupStr = "end extension"
)
def testExtensionAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""extension (x: String)
| def < (y: String): Boolean =
| ???
|end $CARET
|""".stripMargin,
resultText =
s"""extension (x: String)
| def < (y: String): Boolean =
| ???
|end extension
|$CARET
|""".stripMargin,
lookupStr = "extension",
presentationText = "extension"
)
def testNoCompletionForExtensionWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""extension (x: String)
| def < (y: String): Boolean = ???
|e$CARET
|""".stripMargin
)
def testNoCompletionForExtensionWithBraces(): Unit = checkNoCompletion(
fileText =
s"""extension (x: String) {
| def < (y: String): Boolean =
| ???
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForExtensionWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""extension (x: String)
| def < (y: String): Boolean =
| ???
|end extension
|e$CARET
|""".stripMargin
)
def testNoCompletionForExtensionWithOneLinerFunctionOnTheSameLine(): Unit = checkNoCompletion(
fileText =
s"""extension (i: Int) def isZero: Boolean = i == 0
|e$CARET
|""".stripMargin
)
def testExtensionWithMultilineFunctionOnTheSameLine(): Unit = checkLookupElement(
fileText =
s"""extension (i: Int) def isZero: Boolean =
| i == 0
|e$CARET
|""".stripMargin,
resultText =
s"""extension (i: Int) def isZero: Boolean =
| i == 0
|end extension
|$CARET
|""".stripMargin,
lookupStr = "end extension"
)
def testExtensionWithMultilineFunctionOnTheSameLineAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""extension (i: Int) def isZero: Boolean =
| i == 0
|end $CARET
|""".stripMargin,
resultText =
s"""extension (i: Int) def isZero: Boolean =
| i == 0
|end extension
|$CARET
|""".stripMargin,
lookupStr = "extension",
presentationText = "extension"
)
def testNoCompletionForMultilineExtensionFunctionOnTheSameLine(): Unit = checkNoCompletionFor(
fileText =
s"""extension (i: Int) def isZero: Boolean =
| i == 0
|e$CARET
|""".stripMargin,
item = "end isZero"
)
// todo: uncomment when SCL-19689 is resolved
// def testNoCompletionForExtensionWithMultilineFunctionOnTheSameLineWithEndMarker(): Unit = checkNoCompletion(
// fileText =
// s"""extension (i: Int) def isZero: Boolean =
// | i == 0
// |end extension
// |e$CARET
// |""".stripMargin
// )
def testNoCompletionForExtensionWithoutFunctions(): Unit = checkNoCompletion(
fileText =
s"""extension (ss: Seq[String])
|e$CARET
|""".stripMargin
)
def testNoCompletionForExtensionWithoutFunctionsAfterEndKeyword(): Unit = checkNoCompletion(
fileText =
s"""extension (ss: Seq[String])
|end $CARET
|""".stripMargin
)
/// function
def testFunction(): Unit = checkLookupElement(
fileText =
s"""def largeMethod(n: Int) =
| val x = n / 2
| if x * 2 == n then
| x
| else
| x + 1
|e$CARET
|""".stripMargin,
resultText =
s"""def largeMethod(n: Int) =
| val x = n / 2
| if x * 2 == n then
| x
| else
| x + 1
|end largeMethod
|$CARET
|""".stripMargin,
lookupStr = "end largeMethod"
)
def testFunctionWithoutInput(): Unit = checkLookupElement(
fileText =
s"""def largeMethod(n: Int) =
| val x = n / 2
| if x * 2 == n then
| x
| else
| x + 1
|$CARET
|""".stripMargin,
resultText =
s"""def largeMethod(n: Int) =
| val x = n / 2
| if x * 2 == n then
| x
| else
| x + 1
|end largeMethod
|$CARET
|""".stripMargin,
lookupStr = "end largeMethod"
)
def testFunctionAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""def largeMethod(n: Int) =
| val x = n / 2
| if x * 2 == n then
| x
| else
| x + 1
|end $CARET
|""".stripMargin,
resultText =
s"""def largeMethod(n: Int) =
| val x = n / 2
| if x * 2 == n then
| x
| else
| x + 1
|end largeMethod
|$CARET
|""".stripMargin,
lookupStr = "largeMethod",
presentationText = "largeMethod"
)
def testNoCompletionForFunctionWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""def foo(str: String) =
| str.length
|e$CARET
|""".stripMargin
)
def testNoCompletionForFunctionWithoutBody(): Unit = checkNoCompletionFor(
fileText =
s"""def foo: Int
|e$CARET
|""".stripMargin,
item = "end foo"
)
def testNoCompletionForFunctionWithoutBodyAfterEndKeyword(): Unit = checkNoCompletionFor(
fileText =
s"""def foo: Int
|end $CARET
|""".stripMargin,
item = "foo"
)
def testNoCompletionForFunctionWithBraces(): Unit = checkNoCompletion(
fileText =
s"""def largeMethod(n: Int) = {
| val x = n / 2
| if x * 2 == n then
| x
| else
| x + 1
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForFunctionWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""def largeMethod(n: Int) =
| val x = n / 2
| if x * 2 == n then
| x
| else
| x + 1
|end largeMethod
|e$CARET
|""".stripMargin
)
/// package
def testPackage(): Unit = checkLookupElement(
fileText =
s"""package p1.p2.p3:
|e$CARET
|""".stripMargin,
resultText =
s"""package p1.p2.p3:
|end p3
|$CARET
|""".stripMargin,
lookupStr = "end p3"
)
def testPackageWithoutInput(): Unit = checkLookupElement(
fileText =
s"""package p1.p2.p3:
|$CARET
|""".stripMargin,
resultText =
s"""package p1.p2.p3:
|end p3
|$CARET
|""".stripMargin,
lookupStr = "end p3"
)
def testPackageAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""package p1.p2.p3:
|end $CARET
|""".stripMargin,
resultText =
s"""package p1.p2.p3:
|end p3
|$CARET
|""".stripMargin,
lookupStr = "p3",
presentationText = "p3"
)
def testNoCompletionForNonExplicitPackage(): Unit = checkNoCompletion(
s"""package p1.p2.p3
|e$CARET""".stripMargin
)
def testNoCompletionForNonExplicitPackageAfterEndKeyword(): Unit = checkNoCompletion(
s"""package p1.p2.p3
|end $CARET""".stripMargin
)
def testNoCompletionForPackageWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""package p1.p2.p3:
|end p3
|e$CARET
|""".stripMargin
)
/// if
def testIf(): Unit = checkLookupElement(
fileText =
s"""if 1 > 2 then
| println("wow")
| println("impossible")
|e$CARET
|""".stripMargin,
resultText =
s"""if 1 > 2 then
| println("wow")
| println("impossible")
|end if
|$CARET
|""".stripMargin,
lookupStr = "end if"
)
def testIfOldStyle(): Unit = checkLookupElement(
fileText =
s"""if (1 > 2)
| println("wow")
| println("impossible")
|e$CARET
|""".stripMargin,
resultText =
s"""if (1 > 2)
| println("wow")
| println("impossible")
|end if
|$CARET
|""".stripMargin,
lookupStr = "end if"
)
def testIfOldStyleWithElse(): Unit = checkLookupElement(
fileText =
s"""if (1 > 2)
| println("wow")
|else
| println()
| println("ok")
|e$CARET
|""".stripMargin,
resultText =
s"""if (1 > 2)
| println("wow")
|else
| println()
| println("ok")
|end if
|$CARET
|""".stripMargin,
lookupStr = "end if"
)
def testIfOldStyleWithElseWithoutConditionAndThenExpr(): Unit = checkLookupElement(
fileText =
s"""if else
| println()
| println("ok")
|e$CARET
|""".stripMargin,
resultText =
s"""if else
| println()
| println("ok")
|end if
|$CARET
|""".stripMargin,
lookupStr = "end if"
)
def testIfWithBracesAroundThenExpr(): Unit = checkLookupElement(
fileText =
s"""if (1 > 2) {
| println("wow")
|} else
| println()
| println("ok")
|e$CARET
|""".stripMargin,
resultText =
s"""if (1 > 2) {
| println("wow")
|} else
| println()
| println("ok")
|end if
|$CARET
|""".stripMargin,
lookupStr = "end if"
)
def testIfWithMultilineElse(): Unit = checkLookupElement(
fileText =
s"""if 1 > 2 then println("wow")
|else
| println("ok")
| println(1 - 2)
|e$CARET
|""".stripMargin,
resultText =
s"""if 1 > 2 then println("wow")
|else
| println("ok")
| println(1 - 2)
|end if
|$CARET
|""".stripMargin,
lookupStr = "end if"
)
def testIfWithoutInput(): Unit = checkLookupElement(
fileText =
s"""if 1 > 2 then
| println("wow")
| println("impossible")
|$CARET
|""".stripMargin,
resultText =
s"""if 1 > 2 then
| println("wow")
| println("impossible")
|end if
|$CARET
|""".stripMargin,
lookupStr = "end if"
)
def testIfAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""if 1 > 2 then
| println("wow")
| println("impossible")
|end $CARET
|""".stripMargin,
resultText =
s"""if 1 > 2 then
| println("wow")
| println("impossible")
|end if
|$CARET
|""".stripMargin,
lookupStr = "if",
presentationText = "if"
)
def testNoCompletionForIfWithBraces1(): Unit = checkNoCompletion(
fileText =
s"""if (1 > 2) {
| println("wow")
| println("impossible")
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForIfWithBraces2(): Unit = checkNoCompletion(
fileText =
s"""if (1 > 2)
| println("wow")
| println("impossible")
|else {
| println("ok")
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForIfWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""if 1 > 2 then
| println("wow")
| println("impossible")
|end if
|e$CARET
|""".stripMargin
)
def testNoCompletionForIfWithOneLinerThenWithoutElse(): Unit = checkNoCompletion(
fileText =
s"""if 1 > 2 then
| println("wow")
|e$CARET
|""".stripMargin
)
def testNoCompletionForIfWithOneLinerThenAndElse(): Unit = checkNoCompletion(
fileText =
s"""if 1 > 2 then
| println("wow")
|else
| println("ok")
|e$CARET
|""".stripMargin
)
def testNestedIf(): Unit = checkLookupElement(
fileText =
s"""if 1 > 2 then
| if 2 > 3 then
| println("wow")
| println(2 - 3)
| $CARET
| println("impossible")
|end if
|""".stripMargin,
resultText =
s"""if 1 > 2 then
| if 2 > 3 then
| println("wow")
| println(2 - 3)
| end if
| $CARET
| println("impossible")
|end if
|""".stripMargin,
lookupStr = "end if"
)
def testNoCompletionForIfOnTheSameLineAsValueDefinition(): Unit = checkNoCompletionFor(
fileText =
s"""val v = if 1 > 2 then
| println("hmm")
| 3
|else 4
|e$CARET
|""".stripMargin,
item = "end if"
)
/// while
def testWhile(): Unit = checkLookupElement(
fileText =
s"""var x = 5
|while x > 0 do
| x -= 2
| x += 1
|e$CARET
|""".stripMargin,
resultText =
s"""var x = 5
|while x > 0 do
| x -= 2
| x += 1
|end while
|$CARET
|""".stripMargin,
lookupStr = "end while"
)
def testWhileOldStyle(): Unit = checkLookupElement(
fileText =
s"""var x = 5
|while (x > 0)
| x -= 2
| x += 1
|e$CARET
|""".stripMargin,
resultText =
s"""var x = 5
|while (x > 0)
| x -= 2
| x += 1
|end while
|$CARET
|""".stripMargin,
lookupStr = "end while"
)
def testWhileWithoutInput(): Unit = checkLookupElement(
fileText =
s"""var x = 5
|while x > 0 do
| x -= 2
| x += 1
|$CARET
|""".stripMargin,
resultText =
s"""var x = 5
|while x > 0 do
| x -= 2
| x += 1
|end while
|$CARET
|""".stripMargin,
lookupStr = "end while"
)
def testWhileAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""var x = 5
|while x > 0 do
| x -= 2
| x += 1
|end $CARET
|""".stripMargin,
resultText =
s"""var x = 5
|while x > 0 do
| x -= 2
| x += 1
|end while
|$CARET
|""".stripMargin,
lookupStr = "while",
presentationText = "while"
)
def testNoCompletionForWhileWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""var x = 5
|while x > 0 do
| x -= 1
|e$CARET
|""".stripMargin
)
def testNoCompletionForWhileWithBraces1(): Unit = checkNoCompletion(
fileText =
s"""var x = 5
|while x > 0 do {
| x -= 2
| x += 1
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForWhileWithBraces2(): Unit = checkNoCompletion(
fileText =
s"""var x = 5
|while (x > 0) {
| x -= 2
| x += 1
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForWhileWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""var x = 5
|while x > 0 do
| x -= 2
| x += 1
|end while
|e$CARET
|""".stripMargin
)
/// for
def testFor(): Unit = checkLookupElement(
fileText =
s"""for x <- 0 to 5 do
| println(x)
| println(x * 2)
|e$CARET
|""".stripMargin,
resultText =
s"""for x <- 0 to 5 do
| println(x)
| println(x * 2)
|end for
|$CARET
|""".stripMargin,
lookupStr = "end for"
)
def testForOldStyle(): Unit = checkLookupElement(
fileText =
s"""for x <- 0 to 5
| println(x)
| println(x * 2)
|e$CARET
|""".stripMargin,
resultText =
s"""for x <- 0 to 5
| println(x)
| println(x * 2)
|end for
|$CARET
|""".stripMargin,
lookupStr = "end for"
)
def testForWithParensAroundEnumerators(): Unit = checkLookupElement(
fileText =
s"""for (x <- 0 to 5)
| println(x)
| println(x * 2)
|e$CARET
|""".stripMargin,
resultText =
s"""for (x <- 0 to 5)
| println(x)
| println(x * 2)
|end for
|$CARET
|""".stripMargin,
lookupStr = "end for"
)
def testForWithBracesAroundEnumerators(): Unit = checkLookupElement(
fileText =
s"""for {
| x <- 0 to 5
|} do
| println(x)
| println(x * 2)
|e$CARET
|""".stripMargin,
resultText =
s"""for {
| x <- 0 to 5
|} do
| println(x)
| println(x * 2)
|end for
|$CARET
|""".stripMargin,
lookupStr = "end for"
)
def testForWithoutInput(): Unit = checkLookupElement(
fileText =
s"""for x <- 0 to 5 do
| println(x)
| println(x * 2)
|$CARET
|""".stripMargin,
resultText =
s"""for x <- 0 to 5 do
| println(x)
| println(x * 2)
|end for
|$CARET
|""".stripMargin,
lookupStr = "end for"
)
def testForAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""for x <- 0 to 5 do
| println(x)
| println(x * 2)
|end $CARET
|""".stripMargin,
resultText =
s"""for x <- 0 to 5 do
| println(x)
| println(x * 2)
|end for
|$CARET
|""".stripMargin,
lookupStr = "for",
presentationText = "for"
)
def testNoCompletionForForWithOneLinerBody(): Unit = checkNoCompletion(
fileText =
s"""for x <- 0 to 5 do
| println(x)
|e$CARET
|""".stripMargin
)
def testNoCompletionForForWithBracesAroundBody1(): Unit = checkNoCompletion(
fileText =
s"""for {
| x <- 0 to 5
|} do {
| println(x)
| println(x * 2)
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForForWithBracesAroundBody2(): Unit = checkNoCompletion(
fileText =
s"""for {
| x <- 0 to 5
|} {
| println(x)
| println(x * 2)
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForForWithBracesAroundBody3(): Unit = checkNoCompletion(
fileText =
s"""for
| x <- 0 to 5
| y <- 0 to 5
|do {
| println(x)
| println(x * 2)
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForForWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""for x <- 0 to 5 do
| println(x)
| println(x * 2)
|end for
|e$CARET
|""".stripMargin
)
/// try
def testTry(): Unit = checkLookupElement(
fileText =
s"""var x = 0
|try
| x -= 2
| x += 1
|finally
| println(x)
|e$CARET
|""".stripMargin,
resultText =
s"""var x = 0
|try
| x -= 2
| x += 1
|finally
| println(x)
|end try
|$CARET
|""".stripMargin,
lookupStr = "end try"
)
def testTry2(): Unit = checkLookupElement(
fileText =
s"""var x = 0
|try
| x += 1
|finally
| println(x)
| println(x * 2)
|e$CARET
|""".stripMargin,
resultText =
s"""var x = 0
|try
| x += 1
|finally
| println(x)
| println(x * 2)
|end try
|$CARET
|""".stripMargin,
lookupStr = "end try"
)
def testTry3(): Unit = checkLookupElement(
fileText =
s"""var x = 0
|try
| x += 1
|catch
| case e: NumberFormatException => ()
| case e: Exception => ()
|finally
| println(x)
|e$CARET
|""".stripMargin,
resultText =
s"""var x = 0
|try
| x += 1
|catch
| case e: NumberFormatException => ()
| case e: Exception => ()
|finally
| println(x)
|end try
|$CARET
|""".stripMargin,
lookupStr = "end try"
)
def testTry4(): Unit = checkLookupElement(
fileText =
s"""var x = 0
|try
| x += 1
|catch
| case e: NumberFormatException => ()
| case e: Exception => ()
|e$CARET
|""".stripMargin,
resultText =
s"""var x = 0
|try
| x += 1
|catch
| case e: NumberFormatException => ()
| case e: Exception => ()
|end try
|$CARET
|""".stripMargin,
lookupStr = "end try"
)
def testTryWithBracesAroundExpression(): Unit = checkLookupElement(
fileText =
s"""var x = 0
|try {
| x -= 2
| x += 1
|} finally
| println(x)
| println(x + 1)
|e$CARET
|""".stripMargin,
resultText =
s"""var x = 0
|try {
| x -= 2
| x += 1
|} finally
| println(x)
| println(x + 1)
|end try
|$CARET
|""".stripMargin,
lookupStr = "end try"
)
def testTryWithBracesAroundCatchCaseClauses(): Unit = checkLookupElement(
fileText =
s"""var x = 0
|try
| x -= 2
| x += 1
|catch {
| case e: NumberFormatException => ()
| case e: Exception => ()
|}
|finally
| println(x)
|e$CARET
|""".stripMargin,
resultText =
s"""var x = 0
|try
| x -= 2
| x += 1
|catch {
| case e: NumberFormatException => ()
| case e: Exception => ()
|}
|finally
| println(x)
|end try
|$CARET
|""".stripMargin,
lookupStr = "end try"
)
def testTryWithBracesAroundTryExprAndCatchCaseClauses(): Unit = checkLookupElement(
fileText =
s"""var x = 0
|try {
| x -= 2
| x += 1
|} catch {
| case e: NumberFormatException => ()
| case e: Exception => ()
|}
|finally
| println(x)
| println(x + 1)
|e$CARET
|""".stripMargin,
resultText =
s"""var x = 0
|try {
| x -= 2
| x += 1
|} catch {
| case e: NumberFormatException => ()
| case e: Exception => ()
|}
|finally
| println(x)
| println(x + 1)
|end try
|$CARET
|""".stripMargin,
lookupStr = "end try"
)
def testTryWithoutInput(): Unit = checkLookupElement(
fileText =
s"""var x = 0
|try
| x -= 2
| x += 1
|finally
| println(x)
|$CARET
|""".stripMargin,
resultText =
s"""var x = 0
|try
| x -= 2
| x += 1
|finally
| println(x)
|end try
|$CARET
|""".stripMargin,
lookupStr = "end try"
)
def testTryAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""var x = 0
|try
| x -= 2
| x += 1
|finally
| println(x)
|end $CARET
|""".stripMargin,
resultText =
s"""var x = 0
|try
| x -= 2
| x += 1
|finally
| println(x)
|end try
|$CARET
|""".stripMargin,
lookupStr = "try",
presentationText = "try"
)
def testNoCompletionForTryWithOneLinerBlocks(): Unit = checkNoCompletion(
fileText =
s"""var x = 0
|try
| x += 1
|catch
| case e: Exception => ()
|finally
| println(x)
|e$CARET
|""".stripMargin
)
def testNoCompletionForTryWithBraces(): Unit = checkNoCompletion(
fileText =
s"""var x = 0
|try {
| x -= 2
| x += 1
|} catch {
| case e: NumberFormatException => ()
| case e: Exception => ()
|}
|finally
| println(x)
|e$CARET
|""".stripMargin
)
def testNoCompletionForTryWithBraces2(): Unit = checkNoCompletion(
fileText =
s"""var x = 0
|try
| x -= 2
| x += 1
|catch
| case e: NumberFormatException => ()
| case e: Exception => ()
|finally {
| println(x)
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForTryWithBraces3(): Unit = checkNoCompletion(
fileText =
s"""var x = 0
|try
| x -= 2
| x += 1
|catch {
| case e: NumberFormatException => ()
| case e: Exception => ()
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForTryWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""var x = 0
|try
| x -= 2
| x += 1
|finally
| println(x)
|end try
|e$CARET
|""".stripMargin
)
/// match
def testMatch(): Unit = checkLookupElement(
fileText =
s"""val x = ???
|x match
| case 0 => println("0")
| case _ =>
|e$CARET
|""".stripMargin,
resultText =
s"""val x = ???
|x match
| case 0 => println("0")
| case _ =>
|end match
|$CARET
|""".stripMargin,
lookupStr = "end match"
)
def testMatchWithoutInput(): Unit = checkLookupElement(
fileText =
s"""val x = ???
|x match
| case 0 => println("0")
| case _ =>
|$CARET
|""".stripMargin,
resultText =
s"""val x = ???
|x match
| case 0 => println("0")
| case _ =>
|end match
|$CARET
|""".stripMargin,
lookupStr = "end match"
)
def testMatchAfterEndKeyword(): Unit = checkLookupElement(
fileText =
s"""val x = ???
|x match
| case 0 => println("0")
| case _ =>
|end $CARET
|""".stripMargin,
resultText =
s"""val x = ???
|x match
| case 0 => println("0")
| case _ =>
|end match
|$CARET
|""".stripMargin,
lookupStr = "match",
presentationText = "match"
)
def testNoCompletionForMatchWithOneLinerCaseClauses(): Unit = checkNoCompletion(
fileText =
s"""val x = ???
|x match
| case 0 => println("0")
|e$CARET
|""".stripMargin
)
def testNoCompletionForMatchWithBraces(): Unit = checkNoCompletion(
fileText =
s"""val x = ???
|x match {
| case 0 => println("0")
| case _ =>
|}
|e$CARET
|""".stripMargin
)
def testNoCompletionForMatchWithEndMarker(): Unit = checkNoCompletion(
fileText =
s"""val x = ???
|x match
| case 0 => println("0")
| case _ =>
|end match
|e$CARET
|""".stripMargin
)
/// misaligned markers
def testMisalignedEndMarker1(): Unit = checkLookupElement(
fileText =
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| e$CARET
|""".stripMargin,
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| end match
| $CARET
|""".stripMargin,
lookupStr = "end match"
)
def testMisalignedEndMarker2(): Unit = checkLookupElement(
fileText =
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| e$CARET
|""".stripMargin,
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| end try
| $CARET
|""".stripMargin,
lookupStr = "end try"
)
def testMisalignedEndMarker3(): Unit = checkLookupElement(
fileText =
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| e$CARET
|""".stripMargin,
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| end if
| $CARET
|""".stripMargin,
lookupStr = "end if"
)
def testMisalignedEndMarker4(): Unit = checkLookupElement(
fileText =
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| e$CARET
|""".stripMargin,
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| end this
| $CARET
|""".stripMargin,
lookupStr = "end this"
)
def testMisalignedEndMarker5(): Unit = checkLookupElement(
fileText =
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| e$CARET
|""".stripMargin,
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| end C
| $CARET
|""".stripMargin,
lookupStr = "end C"
)
def testMisalignedEndMarker6(): Unit = checkLookupElement(
fileText =
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| e$CARET
|""".stripMargin,
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
|end p2
|$CARET
|""".stripMargin,
lookupStr = "end p2"
)
/// sorting
def testLookupElementsSorting1(): Unit = checkLookupElementsOrder(
fileText =
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| e$CARET
|""".stripMargin,
expectedItems = List("end match", "end try", "end if", "end this", "end C", "end p2")
)
def testLookupElementsSorting2(): Unit = checkLookupElementsOrder(
fileText =
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| e$CARET
| def this(x: String) = this()
|""".stripMargin,
expectedItems = List("end match", "end try", "end if", "end this")
)
def testLookupElementsSorting3(): Unit = checkLookupElementsOrder(
fileText =
s"""package p1.p2:
| abstract class C():
| def this(x: Int) =
| this()
| if x > 0 then
| try
| x match
| case 0 => println("0")
| case _ =>
| e$CARET
| finally
| println("done")
|""".stripMargin,
expectedItems = List("end match")
)
}
|
JetBrains/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/lang/completion3/ScalaEndMarkerCompletionTest.scala
|
Scala
|
apache-2.0
| 67,763 |
package cache
import models.BaseModel
import play.api.cache.CacheApi
import util.FutureOption
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.concurrent.duration.Duration
import scala.util.Success
/**
* Cache for all entities of a specific [[models.BaseModel]], indexed by id.
*
* @param cache injected play cache
* @param prefix prefix for the typed cache (must be unique)
* @param expiration expiration date for all data put inside the cache
* @tparam T base model type
*/
class BaseModelMapCache[T <: BaseModel[T]](cache: CacheApi,
prefix: String,
expiration: Duration = Duration.Inf) {
type ModelMap = Map[String, T]
/**
* Key for model map.
*/
private[this] val key = prefix + "/$all"
/**
* Get whole model map.
*
* @return Model map or None if not loaded
*/
def getAll: Option[ModelMap] = cache.get(key)
/**
* Get entity from the cache.
*
* @param id id to query for
* @return [[scala.Some]] found entity, or [[scala.None]]
*/
def get(id: String): Option[T] = getAll flatMap (_.get(id))
/**
* Replace entity in the model map. If the model map is not loaded, nothing happens.
*
* @param id id to update
* @param entity [[scala.Some]] entity or [[scala.None]]
* @return true if update was successful
*/
private[this] def replaceEntry(id: String, entity: Option[T]): Boolean = getAll exists {
map =>
setAll(entity match {
case None => map - id
case Some(newEntity) => map + (id -> newEntity)
})
true
}
/**
* Set whole model map.
*
* @param models Model map
*/
def setAll(models: ModelMap): Unit = cache.set(key, models, expiration)
/**
* Put entity into cache. The key is derived from the entity.
*
* @param entity entity
*/
def set(entity: T): Unit = replaceEntry(entity._id, Some(entity))
/**
* Remove entity from cache. If entity is not inside cache, nothing happens.
*
* @param id entity id to delete
*/
def remove(id: String): Unit = replaceEntry(id, None)
/**
* Remove whole map from cache.
*/
def removeAll(): Unit = cache.remove(key)
/**
* Get whole model map from cache. If cache is empty, the given entity collection provider is used to
* fetch the data, and the data is then put inside the cache if successful.
*
* @param block asynchronous block to fetch all entities
* @tparam A type of returned entity collection
* @return [[scala.concurrent.Future]] of the model map
*/
def getAllOrElseAsync[A <: Iterable[T]](block: => Future[A]): Future[ModelMap] = {
getAll match {
case Some(map) => Future.successful(map)
case None => block map {
models => models.map(model => (model._id, model)).toMap
} andThen {
case Success(result) => setAll(result)
}
}
}
/**
* Get entity from the cache. If cache is empty, the given entity collection provider is used to
* fetch the data, and the data is then put inside the cache if successful.
*
* @param id query entity id
* @param block asynchronous block to fetch all entities
* @tparam A type of returned entity collection
* @return [[util.FutureOption]] of the model
*/
def getOrElseAsync[A <: Iterable[T]](id: String, block: => Future[A]): FutureOption[T] =
FutureOption(getAllOrElseAsync(block) map (_ get id))
}
|
metaxmx/FridayNightBeer
|
modules/datamodel/src/main/scala/cache/BaseModelMapCache.scala
|
Scala
|
apache-2.0
| 3,688 |
package functionalProgramming.memoization
import org.scalatest.{FunSuite, WordSpecLike}
/**
* Created by yujieshui on 2017/2/2.
*/
class FibonacciTest extends WordSpecLike {
import Fibonacci._
"aa" in {
println(
solution(Seq(0,1,5,10,100))
)
}
}
|
1178615156/hackerrank
|
src/test/scala/functionalProgramming/memoization/FibonacciTest.scala
|
Scala
|
apache-2.0
| 274 |
package com.thoughtworks.deeplearning.plugins
/** A plugin that enables all other DeepLearning.scala built-in plugins.
*
* @example When creating a [[Builtins]] from [[com.thoughtworks.feature.Factory]],
*
* {{{
* import com.thoughtworks.feature.Factory
* val hyperparameters = Factory[plugins.Builtins].newInstance()
* }}}
*
* and `import` anything in [[implicits]],
*
* {{{
* import hyperparameters.implicits._
* }}}
*
* then all DeepLearning.scala built-in features should be enabled.
*
* <hr/>
*
* Creating weights:
*
* {{{
* import org.nd4j.linalg.factory.Nd4j
* import org.nd4j.linalg.api.ndarray.INDArray
* }}}
* {{{
* val numberOfInputFeatures = 8
* val numberOfOutputFeatures = 1
* val initialValueOfWeight: INDArray = Nd4j.rand(numberOfInputFeatures, numberOfOutputFeatures)
* val weight: hyperparameters.INDArrayWeight = hyperparameters.INDArrayWeight(initialValueOfWeight)
* }}}
*
* Creating neural network layers,
*
* {{{
* def fullyConnectedLayer(input: INDArray): hyperparameters.INDArrayLayer = {
* input dot weight
* }
* }}}
*
* or loss functions:
*
* {{{
* def hingeLoss(scores: hyperparameters.INDArrayLayer, label: INDArray): hyperparameters.DoubleLayer = {
* hyperparameters.max(0.0, 1.0 - label * scores).sum
* }
* }}}
*
* Training:
* {{{
* import scalaz.std.stream._
* import com.thoughtworks.future._
* import com.thoughtworks.each.Monadic._
* }}}
*
* {{{
* val batchSize = 4
* val numberOfIterations = 10
* val input = Nd4j.rand(batchSize, numberOfInputFeatures)
* val label = Nd4j.rand(batchSize, numberOfOutputFeatures)
* }}}
*
* {{{
* @monadic[Future]
* def train: Future[Stream[Double]] = {
* for (iteration <- (0 until numberOfIterations).toStream) yield {
* hingeLoss(fullyConnectedLayer(input), label).train.each
* }
* }
* }}}
*
* When the training is done,
* the loss of the last iteration should be no more than the loss of the first iteration
*
* {{{
* train.map { lossesByIteration =>
* lossesByIteration.last should be <= lossesByIteration.head
* }
* }}}
*
* @author 杨博 (Yang Bo)
*/
trait Builtins
extends ImplicitsSingleton
with Layers
with Weights
with Logging
with Names
with Operators
with FloatTraining
with FloatLiterals
with FloatWeights
with FloatLayers
with CumulativeFloatLayers
with DoubleTraining
with DoubleLiterals
with DoubleWeights
with DoubleLayers
with CumulativeDoubleLayers
with INDArrayTraining
with INDArrayLiterals
with INDArrayWeights
with INDArrayLayers
with CumulativeINDArrayLayers {
trait ImplicitsApi
extends super[Layers].ImplicitsApi
with super[Weights].ImplicitsApi
with super[Operators].ImplicitsApi
with super[FloatTraining].ImplicitsApi
with super[FloatLiterals].ImplicitsApi
with super[FloatLayers].ImplicitsApi
with super[DoubleTraining].ImplicitsApi
with super[DoubleLiterals].ImplicitsApi
with super[DoubleLayers].ImplicitsApi
with super[INDArrayTraining].ImplicitsApi
with super[INDArrayLiterals].ImplicitsApi
with super[INDArrayLayers].ImplicitsApi
type Implicits <: ImplicitsApi
trait LayerApi extends super[Logging].LayerApi with super[Names].LayerApi { this: Layer =>
}
type Layer <: LayerApi
trait WeightApi extends super[Logging].WeightApi with super[Names].WeightApi { this: Weight =>
}
type Weight <: WeightApi
}
|
izhangzhihao/DeepLearning.scala
|
plugins-Builtins/src/main/scala-2.11/com/thoughtworks/deeplearning/plugins/Builtins.scala
|
Scala
|
apache-2.0
| 4,097 |
// EvaluationOrder.scala
val sunny = true
val hoursSleep = 6
val exercise = false
val temp = 55
val happy1 = sunny && temp > 50 ||
exercise && hoursSleep > 7
println(happy1) // true
val sameHappy1 = (sunny && temp > 50) || (exercise && hoursSleep > 7)
println(sameHappy1) // true
val notSame =
(sunny && temp > 50 || exercise) && hoursSleep > 7
println(notSame) // false
|
5x5x5x5/Back2Basics
|
atomicScala/EvaluationOrder.scala
|
Scala
|
unlicense
| 381 |
package es.weso.rdf.turtle.parser
import org.scalatest.FunSpec
import com.typesafe.config._
import com.hp.hpl.jena.rdf.model.ModelFactory
import java.io.FileOutputStream
import java.io.FileInputStream
import scala.collection.JavaConverters._
import org.scalatest.Matchers
class RunTurtleW3cTestsSuite extends FunSpec with Matchers {
val report = RunTurtleW3cTests.createReport
describe("W3c tests report") {
for ((r, n) <- report.items zip (1 to report.items.length))
it("Should pass test " + n + ": " + r.name) {
if (r.passed) info("Info: " + r)
else fail("Test did not pass" + r)
}
}
// Ignored to pass Travis tests
// TODO: Refactor this to generate the report only when it is needed
ignore("Generate W3c EARL report") {
val passedCount = 291 // Number of tests that have to be passed
it("Should Generate EARL report with " + passedCount + " passed values") {
val earlModel = report.generateEARL
val conf: Config = ConfigFactory.load()
val outFile = conf.getString("EarlReportFile")
earlModel.write(new FileOutputStream(outFile), "TURTLE")
val readModel = ModelFactory.createDefaultModel()
readModel.read(new FileInputStream(outFile), "", "TURTLE")
val earl = "http://www.w3.org/ns/earl#"
val earl_outcome = readModel.createProperty(earl + "outcome")
val earl_passed = readModel.createResource(earl + "passed")
val passed = readModel.listResourcesWithProperty(earl_outcome, earl_passed).toList.asScala
passed.length should be(passedCount)
}
}
}
|
labra/turtleparser-with-combinators
|
turtleparser/jvm/src/test/scala/es/weso/rdf/turtle/parser/RunTurtleW3cTestsSuite.scala
|
Scala
|
lgpl-3.0
| 1,578 |
package postgresweb.components.items
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import japgolly.scalajs.react.{ReactComponentB, _}
import postgresweb.css.CommonStyles
import postgresweb.models.{JSONQueryFilter, Table, JSONSchema, JSONQuery}
import postgresweb.services.{GlobalState, ModelClient}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Failure, Success}
import scalacss.Defaults._
import scalacss.ScalaCssReact._
object Tables{
object Style extends StyleSheet.Inline {
import dsl._
val table = style(addClassNames("mdl-data-table","mdl-js-data-table","mdl-shadow--2dp"))
val td = style(addClassNames("mdl-data-table__cell--non-numeric"))
val select = style(addClassName("select-wrap"))
val input = style(
addClassName("mdl-textfield__input"),
width(100.px),
fontSize(11.px),
display.inherit,
lineHeight(15.px)
)
val selected = styleF.bool( selected => styleS(
mixinIf(selected)(
backgroundColor(c"#C5CAE9")
)
)
)
}
}
case class Tables(model:String) {
case class State(table:Table,page:Int,selectedRow:Vector[(String,String)],query:JSONQuery)
class Backend(scope:BackendScope[Unit,State]) {
val client = ModelClient(model)
def load(jq: JSONQuery):Future[Table] = {
println("load")
client.Helpers.filter2table(jq).map { table =>
println("laoded")
scope.modState(_.copy(table = table)).runNow()
table
}
}
load(JSONQuery.baseFilter)
/**
* When a row is selected set the global state with the id of the selected row
* and set the component state
*
* @param headers table headers
* @param row selected row
* @return A callback that do the action
*/
def selectRow(headers: Vector[String], row: Vector[String]):Callback = {
GlobalState.selectedId = row.headOption
for{
_ <- Callback.log("Selected row: " + row)
result <- scope.modState(_.copy(selectedRow = headers.zip(row)))
} yield result
}
/**
* Helpler method to generate the options for the filter field according to the type of the field
*
* @param `type` type of the field (you can use JSONSchema::typeOfTitle to retrive it
* @return A list of options according to the type
*/
def filterOption(`type`:String):Seq[TagMod] = `type` match {
case "string" => Seq(
<.option(^.value := "=", ^.selected := true,"="),
<.option(^.value := "like","Like")
)
case "number" => Seq(
<.option(^.value := "=", ^.selected := true,"="),
<.option(^.value := "<","<"),
<.option(^.value := ">",">"),
<.option(^.value := "not","not")
)
case _ => {
println("Type not found: " + `type`)
Seq(
<.option(^.value := "=", "=")
)
}
}
def refresh() = for{
state <- scope.state
table <- {
println("refresh:" + state.query)
Callback(load(state.query))
}
} yield table
def modOperator(s:State, field:String)(e: ReactEventI):Callback = {
val operator = e.target.value
println(operator)
val value = s.query.filter.lift(field).map(_.value).getOrElse("")
val newFilter = s.query.filter + (field -> JSONQueryFilter(value,Some(operator)))
val newQuery = s.query.copy(filter = newFilter)
scope.modState(_.copy(query = newQuery)) >>
Callback.log("State operator for " + field + "changed") >>
Callback(load(newQuery))
}
def modFilter(s:State, field:String)(e: ReactEventI):Callback = {
val value = e.target.value
val operator:Option[String] = s.query.filter.lift(field).flatMap(_.operator)
val newFilter = if(value.size > 0) {
s.query.filter + (field -> JSONQueryFilter(value,operator))
} else {
println("Remove filter to field" + field)
s.query.filter - field
}
val newQuery = s.query.copy(filter = newFilter)
println(newQuery)
scope.modState(_.copy(query = newQuery)) >>
Callback.log("State filter for " + field + "changed") >>
Callback(load(newQuery))
}
def render(S:State) = {
import Tables._
<.div(CommonStyles.row,
<.div(CommonStyles.fullWidth,
<.div(CommonStyles.scroll,
<.table(Style.table,
<.thead(
<.tr(
S.table.headers.map(title => <.th(Style.td,title))
)
),
<.tbody(
<.tr(
S.table.headers.map(title => <.td(Style.td,
<.input(Style.input,^.onChange ==> modFilter(S,title)), //TODO should not be the title here but the key
<.span(Style.select,
<.select(
^.onChange ==> modOperator(S,title), //TODO should not be the title here but the key
filterOption(S.table.schema.typeOfTitle(title))
)
)
))
),
S.table.rows.map{row =>
<.tr( Style.selected(row == S.selectedRow.map(_._2)),
^.onClick --> selectRow(S.table.headers,row),
row.map{ cell =>
<.td(Style.td,cell.toString)
}
)
}
)
)
)
)
)
}
}
val component = ReactComponentB[Unit]("ItemsInfo")
.initialState(State(Table.empty,1,Vector(),JSONQuery.baseFilter))
.renderBackend[Backend]
.buildU
def apply() = component()
}
|
minettiandrea/postgres-web
|
src/main/scala/postgresweb/components/items/Tables.scala
|
Scala
|
apache-2.0
| 5,802 |
/*
* Copyright (C) 2013 Alcatel-Lucent.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Licensed to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package molecule
package io
import stream.{ IChan, OChan }
/**
* The Molecule IO Monad
*
* @param ask function called to "execute" or "ask for" the result of the IO action.
*/
final class IO[+A](
private[io] final val ask: (impl.UThreadContext, A => Unit) => Unit) {
/**
* Bind the result of this action to the argument of a
* function resulting in the next IO action to execute.
* In other words, the new action created by `bind` passes control to the action
* created as a reaction to the result of the current action.
*
* @param react the function used to create the next action.
* @tparam B the type of the next action.
* @return the next action.
*/
def bind[B](react: A => IO[B]): IO[B] =
new IO[B]((t, k) =>
ask(t, a => react(a).ask(t, k))
)
/**
* The monadic `bind` operator.
*
* @param react the function used to create the next action.
* @tparam B the type of the next action.
* @return the next action.
*/
@inline
final def >>\\[B](react: A => IO[B]): IO[B] = bind(react)
/**
* An operator that acts like `bind` but returns the result
* of the previous action.
*
* @param react the function used to create the next action.
* @tparam B the type of the next action.
* @return the result of the previous action.
*/
def >>&\\[B](react: A => IO[B]): IO[A] = for {
a <- this
_ <- react(a)
} yield a
/**
* Sequence an action after this one and return the result of that action.
*
* @param next the next action.
* @tparam B the type of the next action.
* @return the result of the next action.
*/
@inline
final def andThen[B](next: => IO[B]): IO[B] = bind(_ => next)
/**
* Operator equivalent to `andThen`.
*
* @param next the next action.
* @tparam B the type of the next action.
* @return the result of the next action.
*/
@inline
final def >>[B](next: => IO[B]): IO[B] = bind(_ => next)
/**
* Sequence to unrelated actions and return the result of the first action.
*
* @param next the next action.
* @tparam B the type of the next action.
* @return the result of the first action.
*/
def >>&[B](next: => IO[B]): IO[A] = for {
a <- this
_ <- next
} yield a
/**
* Sequence an action after this one and return a pair containing the results of
* both actions.
*
* @param next the next action.
* @tparam B the type of the next action.
* @return the pair with both results (implicit conversions allow to match ~ as a tuple, otherwise
* use flatten x methods).
*/
def seq[B](next: IO[B]): IO[A ~ B] =
for {
a <- this
b <- next
} yield new ~(a, b)
/**
* Sequence an action after this one and return a pair containing the results of
* both actions.
*
* @param next the next action.
* @tparam B the type of the next action.
* @return the pair with both results (implicit conversions allow to match ~ as a tuple, otherwise
* use flatten x methods).
*/
@inline
final def ~[B](next: IO[B]): IO[A ~ B] = this seq next
/**
* Sequence an action after this one and return the result of that action.
*
* @param next the next action.
* @tparam B the type of the next action.
* @return the result of the next action.
*/
@inline
final def ~>[B](next: => IO[B]): IO[B] = bind(_ => next)
/**
* Sequence an action after this one and return the result of this action.
*
* @param next the next action.
* @tparam B the type of the next action.
* @return the result of this action.
*/
final def <~[B](next: => IO[B]): IO[A] =
for {
a <- this
_ <- next
} yield a
/**
* An operator that acts like `bind` but returns the result
* of the previous action.
*
* @param react the function used to create the next action.
* @tparam B the type of the next action.
* @return the result of the previous action.
*/
final def <~\\[B](react: A => IO[B]): IO[A] = for {
a <- this
_ <- react(a)
} yield a
/** Same as `bind` */
@inline
final def ~>\\[B](f: A => IO[B]): IO[B] = bind(f)
/** Same as `bind` */
@inline
final def flatMap[B](f: A => IO[B]): IO[B] = bind(f)
/**
* Apply a function to the result of this action.
*
* @param f the function to apply.
* @tparam B the type of the resulting action.
* @return the transformed action.
*/
def map[B](f: A => B): IO[B] =
new IO[B]({ (t, k) =>
ask(t, a => k(f(a)))
})
/**
* Apply a function to the result of this action.
*
* @param f the function to apply.
* @tparam B the type of the resulting action.
* @return the transformed action.
*/
@inline
final def $[B](f: A => B): IO[B] = map(f)
/**
* Execute an action or catch a user-level exception raised by this action.
* If a user-level exception is thrown, any resource dynamically acquired by
* this action is shutdown. Then, if its signal matches a signal for which
* the partial function is defined the action results in the action defined
* by the partial function handler. Otherwise, the signal is propageted
* further in the parent's context.
*
* @param f the partial function invoked if a user-level exception occurs.
* @return a managed action.
*/
def orCatch[B >: A](f: PartialFunction[Signal, IO[B]]): IO[B] = new IO[B]({ (t, k) =>
t.askOrHandle(ask, k, { signal =>
if (f.isDefinedAt(signal))
f(signal).ask(t, k)
else
t.raise(signal)
})
})
/** Function required by Scala for pattern matching */
def filter(p: A => Boolean): IO[A] = new IO[A]({ (t, k) =>
ask(t, a => if (p(a)) k(a) else sys.error("filter predicate not matched"))
})
import utils._
/**
* Interleave the execution of this action with the execution of another action.
*
* @param other the other action
* @return the pair of both results (implicit conversions allow to match ~ as a tuple, otherwise
* use flatten x methods).
*/
def par[B](other: IO[B])(implicit ma: Message[A], mb: Message[B]): IO[A ~ B] = this |~| other
/**
* Operator for `par`.
*
* @param action the other action
* @return an action that returns the results of both actions in a pair.
*/
def |~|[B](other: IO[B])(implicit ma: Message[A], mb: Message[B]): IO[A ~ B] = new IO[A ~ B]({ (t, k) =>
var first: Either[Signal, Any] = null
val ka: A => Unit = { a =>
if (first == null)
first = Right(a)
else first match {
case Right(b) =>
first = Left(EOS)
k(new ~(a, b.asInstanceOf[B]))
case Left(signal) =>
// Signal already raised
ma.poison(a, signal)
}
}
val kb: B => Unit = { b =>
if (first == null)
first = Right(b)
else first match {
case Right(a) =>
first = Left(EOS)
k(new ~(a.asInstanceOf[A], b))
case Left(signal) =>
// Signal already raised
mb.poison(b, signal)
}
}
import impl.Context
// Both threads will share the same context such that all of them fail
// if there is an uncaught exception (crash loudly)
// Note that even if the context is poisoned by action a, kb could still
// be executed just after since the continuation may have been submitted
// in the meantime. That is why we need to track termination in first.
val ca = new Context()
val cb = new Context()
val error: ((Any, Signal) => Unit) => (Signal => Unit) => Signal => Unit = { poison =>
raise => signal =>
ca.shutdown(signal)
cb.shutdown(signal)
if (first == null) {
first = Left(EOS)
raise(signal)
} else first match {
case Right(any) =>
first = Left(EOS)
poison(any, signal)
raise(signal)
case Left(signal) =>
// Signal already raised
}
}
val ea = error((any, signal) => mb.poison(any.asInstanceOf[B], signal))
val eb = error((any, signal) => ma.poison(any.asInstanceOf[A], signal))
// We yield the stack for (rare) situations where this is called recursively
// (If you can, use parl instead)
t.submit {
t.askOrHandle(ca, this.ask, ka, ea(t.raise), ea(t.fatal))
t.askOrHandle(cb, other.ask, kb, eb(t.raise), eb(t.fatal))
}
})
/**
* Same as `par` but discard the result.
*
* @param action the other action.
* @return an action that returns Unit after both actions have terminated.
*/
def |*|[B](action: IO[B])(implicit ma: Message[A], mb: Message[B]): IO[Unit] = { this |~| action } >> IO()
}
object IO {
import molecule.{ process => proc }
/**
* Create an action that returns the result of a thunk of code.
*
* @param a a call-by-name argument.
* @return an action that returns the result of the call-by-name argument.
*/
@inline final def apply[A](a: => A): IO[A] =
new IO((t, k) => k(a))
private[this] def strict[A](a: A): IO[A] =
new IO((t, k) => k(a))
private[this] final val unit = strict(())
/**
* The unit action.
*
* @return an action that returns unit.
*/
final def apply() = unit
import channel.{ ROChan, RIChan }
/**
* Launch a process
*
* @param process a process that returns a result of type R.
* @return the action that returns the result channel of the process.
*/
def launch[R: Message](process: proc.Process[R]): IO[RIChan[R]] =
new IO[RIChan[R]]({ (t, k) =>
k(t.platform.launch(process))
})
/**
* Launch a process
*
* @param process a process that returns a result of type R.
* @param rc the return channel
* @return unit
*/
def launch[R: Message](process: proc.Process[R], rc: ROChan[R]): IO[Unit] =
new IO[Unit]({ (t, k) =>
t.submit(k(t.platform.launch(process, rc)))
})
/**
* Launch a process
*
* @param process an process action that returns a result of type R.
* @return the result channel
*/
def launch[R: Message](p: IO[proc.Process[R]]): IO[RIChan[R]] =
p >>\\ { launch(_) }
/**
* Launch a process
*
* @param process an process action that returns a result of type R.
* @return the result channel
*/
def launch[R: Message](ff: IO[proc.Process[R]], rc: ROChan[R]): IO[Unit] =
ff >>\\ { f => launch(f, rc) }
/**
* Use an input channel in the context of the process.
* The returned process-level input is attached as a resource
* to the process context, and will get poisoned automatically
* when the process terminates, unless the input is explicitly
* released before (see API of Input[A]).
*
* @param id an identifier
* @param ichan a first-class input channel.
* @return an action that returns the process-level channel.
*/
private[io] def use[A: Message](id: Int, ichan: IChan[A]): IO[Input[A]] =
new IO[Input[A]]((t, k) => k(Input(t, id, ichan)))
/**
* Use an output channel in the context of the process.
* The returned process-level output is attached as a resource
* to the process context, and will get closed automatically
* when the process terminates, unless the input is explicitly
* released before (see API of Output[A]).
*
* @param id an identifier
* @param ochan a first-class output channel.
* @return an action that returns the process-level channel.
*/
private[io] def use[A: Message](id: Int, ochan: OChan[A]): IO[Output[A]] =
new IO[Output[A]]((t, k) => k(Output(t, id, ochan)))
}
|
molecule-labs/molecule
|
molecule-io/src/main/scala/molecule/io/IO.scala
|
Scala
|
apache-2.0
| 12,294 |
package argonaut
import scalaz._, Isomorphism._
import syntax.either._, std.string._, std.tuple._
case class DecodeResult[A](result: (String, CursorHistory) \\/ A) {
def fold[X](
failure: (String, CursorHistory) => X,
value: A => X
): X = result.fold({ case (m, h) => failure(m, h) }, value)
final def loop[X](e: (String, CursorHistory) => X, f: A => X \\/ DecodeResult[A]): X =
DecodeResult.loop(this, e, f)
def isError: Boolean =
result.isLeft
def map[B](f: A => B): DecodeResult[B] =
DecodeResult(result map f)
def flatMap[B](f: A => DecodeResult[B]): DecodeResult[B] =
DecodeResult(result flatMap (f(_).result))
def message: Option[String] =
failure map (_._1)
def history: Option[CursorHistory] =
failure map (_._2)
def toOption: Option[A] =
result.toOption
def toDisjunction: (String, CursorHistory) \\/ A =
result
def toEither: Either[(String, CursorHistory), A] =
result.toEither
def getOr[AA >: A](els: => AA): AA =
toOption.getOrElse(els)
/** alias for `toOption` */
def value: Option[A] =
result.toOption
def failure: Option[(String, CursorHistory)] =
result.swap.toOption
def option: DecodeResult[Option[A]] =
result.fold(
{ case (s, h) => h.head filter (_.succeeded) match {
case None => DecodeResult.ok(None)
case Some(_) => DecodeResult.fail(s, h)
}},
a => DecodeResult.ok(Some(a))
)
def |||[AA >: A](r: => DecodeResult[AA]): DecodeResult[AA] =
DecodeResult(result.fold(_ => r.result, _ => result))
override def toString(): String = "DecodeResult(%s)".format(result)
}
object DecodeResult extends DecodeResults {
def ok[A](value: A): DecodeResult[A] =
DecodeResult(value.right)
def fail[A](s: String, h: CursorHistory): DecodeResult[A] =
DecodeResult((s, h).left)
def fromDisjunction[A](d: String \\/ A, h: CursorHistory): DecodeResult[A] =
d.fold(e => fail(e, h), ok)
}
trait DecodeResults {
def okResult[A](value: A): DecodeResult[A] =
DecodeResult.ok(value)
def failResult[A](s: String, h: CursorHistory): DecodeResult[A] =
DecodeResult.fail(s, h)
@annotation.tailrec
final def loop[A, X](d: DecodeResult[A], e: (String, CursorHistory) => X, f: A => X \\/ DecodeResult[A]): X =
if (d.isError)
e(d.message.get, d.history.get)
else
f(d.value.get) match {
case -\\/(x) => x
case \\/-(a) => loop(a, e, f)
}
def failedResultL[A]: DecodeResult[A] @?> (String, CursorHistory) =
PLens(_.result.fold(q => Some(Store(r => failResult(r._1, r._2), q)),_ => None))
def failedResultMessageL[A]: DecodeResult[A] @?> String =
~Lens.firstLens compose failedResultL[A]
def failedResultHistoryL[A]: DecodeResult[A] @?> CursorHistory =
~Lens.secondLens compose failedResultL[A]
implicit def DecodeResultMonad: Monad[DecodeResult] = new Monad[DecodeResult] {
def point[A](a: => A) = DecodeResult.ok(a)
def bind[A, B](a: DecodeResult[A])(f: A => DecodeResult[B]) = a flatMap f
override def map[A, B](a: DecodeResult[A])(f: A => B) = a map f
}
type DecodeEither[A] = (String, CursorHistory) \\/ A
val decodeResultIsoFunctor: IsoFunctor[DecodeResult, DecodeEither] = new IsoFunctorTemplate[DecodeResult, DecodeEither] {
def to[A](decodeResult: DecodeResult[A]) = decodeResult.result
def from[A](either: DecodeEither[A]) = DecodeResult[A](either)
}
def decodeResultIsoSet[A]: IsoSet[DecodeResult[A], DecodeEither[A]] = new IsoSet[DecodeResult[A], DecodeEither[A]] {
def to = decodeResultIsoFunctor.to[A]
def from = decodeResultIsoFunctor.from[A]
}
implicit def DecodeResultEqual[A: Equal]: Equal[DecodeResult[A]] = new IsomorphismEqual[DecodeResult[A], DecodeEither[A]] {
def G = \\/.DisjunctionEqual(implicitly, implicitly)
def iso = decodeResultIsoSet
}
implicit def DecodeResultShow[A : Show]: Show[DecodeResult[A]] = new IsomorphismShow[DecodeResult[A], DecodeEither[A]] {
def G = \\/.DisjunctionShow(implicitly, implicitly)
def iso = decodeResultIsoSet
}
}
|
etorreborre/argonaut
|
src/main/scala/argonaut/DecodeResult.scala
|
Scala
|
bsd-3-clause
| 4,078 |
/**
* Copyright 2015 www.alaraph.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This is my solution to problem: https://www.hackerrank.com/challenges/jumping-bunnies
* [email protected]
*/
package com.alaraph.hackerrank.jpbunnies
object Solution {
def main(args: Array[String]) {
val n = scala.io.StdIn.readInt
val jumps = scala.io.StdIn.readLine.split(" ").map(_.toInt).sortWith((X, Y) => X>Y)
require(jumps.size == n)
println(mcm(jumps.toList))
}
def p(n: Int, i: Int): (Int, Int) = {
require(n >= i & i > 1)
def pA(m: Int, acc: Int = 0): (Int, Int) = m % i match {
case 0 => pA(m / i, acc + 1)
case _ => (acc, m)
}
pA(n, 0)
}
def dec(n: Int): List[(Int,Int)] = {
def decA(m: Int, i: Int, acc: List[(Int,Int)]): List[(Int,Int)] =
if (i > m) acc else p(m, i) match {
case (0, _) => decA(m, i+1, acc)
case (e, r) => decA(r, i+1, (i,e)::acc)
}
decA(n, 2, Nil)
}
def mcm(nn: List[Int]): BigInt = {
val dd = nn.flatMap((N=>dec(N)))
dd.map(X =>X._1).distinct.map(A => dd.filter(X => X._1 == A).max).map(Y => BigInt(Y._1).pow(Y._2)).reduce((A,B) => A*B)
}
}
|
maumorelli/alaraph
|
hackerrank/src/com/alaraph/hackerrank/jpbunnies/Solution.scala
|
Scala
|
apache-2.0
| 1,723 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import play.api.libs.json.{Format, Json}
import uk.gov.hmrc.ct._
import uk.gov.hmrc.ct.box.formats._
package object formats {
implicit val ap1Format: Format[AP1] = new OptionalIntegerFormat[AP1](AP1.apply)
implicit val ap2Format: Format[AP2] = new OptionalIntegerFormat[AP2](AP2.apply)
implicit val ap3Format: Format[AP3] = new OptionalIntegerFormat[AP3](AP3.apply)
implicit val cp1Format: Format[CP1] = new DateFormat[CP1](CP1.apply)
implicit val cp2Format: Format[CP2] = new DateFormat[CP2](CP2.apply)
implicit val cp6Format: Format[CP6] = new IntegerFormat[CP6](CP6.apply)
implicit val cp7Format: Format[CP7] = new OptionalIntegerFormat[CP7](CP7.apply)
implicit val cp8Format: Format[CP8] = new OptionalIntegerFormat[CP8](CP8.apply)
implicit val cp14Format: Format[CP14] = new IntegerFormat[CP14](CP14.apply)
implicit val cp15Format: Format[CP15] = new OptionalIntegerFormat[CP15](CP15.apply)
implicit val cp16Format: Format[CP16] = new OptionalIntegerFormat[CP16](CP16.apply)
implicit val cp17Format: Format[CP17] = new OptionalIntegerFormat[CP17](CP17.apply)
implicit val cp18Format: Format[CP18] = new OptionalIntegerFormat[CP18](CP18.apply)
implicit val cp19Format: Format[CP19] = new OptionalIntegerFormat[CP19](CP19.apply)
implicit val cp20Format: Format[CP20] = new OptionalIntegerFormat[CP20](CP20.apply)
implicit val cp21Format: Format[CP21] = new OptionalIntegerFormat[CP21](CP21.apply)
implicit val cp22Format: Format[CP22] = new OptionalIntegerFormat[CP22](CP22.apply)
implicit val cp23Format: Format[CP23] = new OptionalIntegerFormat[CP23](CP23.apply)
implicit val cp24Format: Format[CP24] = new OptionalIntegerFormat[CP24](CP24.apply)
implicit val cp25Format: Format[CP25] = new OptionalIntegerFormat[CP25](CP25.apply)
implicit val cp26Format: Format[CP26] = new OptionalIntegerFormat[CP26](CP26.apply)
implicit val cp27Format: Format[CP27] = new OptionalIntegerFormat[CP27](CP27.apply)
implicit val cp28Format: Format[CP28] = new OptionalIntegerFormat[CP28](CP28.apply)
implicit val cp29Format: Format[CP29] = new OptionalIntegerFormat[CP29](CP29.apply)
implicit val cp30Format: Format[CP30] = new OptionalIntegerFormat[CP30](CP30.apply)
implicit val cp31Format: Format[CP31] = new OptionalIntegerFormat[CP31](CP31.apply)
implicit val cp32Format: Format[CP32] = new OptionalIntegerFormat[CP32](CP32.apply)
implicit val cp33Format: Format[CP33] = new OptionalIntegerFormat[CP33](CP33.apply)
implicit val cp34Format: Format[CP34] = new OptionalIntegerFormat[CP34](CP34.apply)
implicit val cp35Format: Format[CP35] = new OptionalIntegerFormat[CP35](CP35.apply)
implicit val cp36Format: Format[CP36] = new OptionalIntegerFormat[CP36](CP36.apply)
implicit val cp37Format: Format[CP37] = new OptionalIntegerFormat[CP37](CP37.apply)
implicit val cp38Format: Format[CP38] = new IntegerFormat[CP38](CP38.apply)
implicit val cp40Format: Format[CP40] = new IntegerFormat[CP40](CP40.apply)
implicit val cp43Format: Format[CP43] = new OptionalIntegerFormat[CP43](CP43.apply)
implicit val cp44Format: Format[CP44] = new IntegerFormat[CP44](CP44.apply)
implicit val cp45Format: Format[CP45] = new IntegerFormat[CP45](CP45.apply)
implicit val cp46Format: Format[CP46] = new OptionalIntegerFormat[CP46](CP46.apply)
implicit val cp47Format: Format[CP47] = new OptionalIntegerFormat[CP47](CP47.apply)
implicit val cp48Format: Format[CP48] = new OptionalIntegerFormat[CP48](CP48.apply)
implicit val cp49Format: Format[CP49] = new OptionalIntegerFormat[CP49](CP49.apply)
implicit val cp50Format: Format[CP50] = new OptionalIntegerFormat[CP50](CP50.apply)
implicit val cp51Format: Format[CP51] = new OptionalIntegerFormat[CP51](CP51.apply)
implicit val cp52Format: Format[CP52] = new OptionalIntegerFormat[CP52](CP52.apply)
implicit val cp53Format: Format[CP53] = new OptionalIntegerFormat[CP53](CP53.apply)
implicit val cp54Format: Format[CP54] = new IntegerFormat[CP54](CP54.apply)
implicit val cp55Format: Format[CP55] = new OptionalIntegerFormat[CP55](CP55.apply)
implicit val cp56Format: Format[CP56] = new IntegerFormat[CP56](CP56.apply)
implicit val cp57Format: Format[CP57] = new OptionalIntegerFormat[CP57](CP57.apply)
implicit val cp58Format: Format[CP58] = new IntegerFormat[CP58](CP58.apply)
implicit val cp59Format: Format[CP59] = new IntegerFormat[CP59](CP59.apply)
implicit val cp78Format: Format[CP78] = new OptionalIntegerFormat[CP78](CP78.apply)
implicit val cp79Format: Format[CP79] = new OptionalIntegerFormat[CP79](CP79.apply)
implicit val cp80Format: Format[CP80] = new OptionalIntegerFormat[CP80](CP80.apply)
implicit val cp81Format: Format[CP81] = new IntegerFormat[CP81](CP81.apply)
implicit val cp81InputFormat: Format[CP81Input] = new OptionalIntegerFormat[CP81Input](CP81Input.apply)
implicit val cp82Format: Format[CP82] = new OptionalIntegerFormat[CP82](CP82.apply)
implicit val cp83Format: Format[CP83] = new OptionalIntegerFormat[CP83](CP83.apply)
implicit val cp84Format: Format[CP84] = new OptionalIntegerFormat[CP84](CP84.apply)
implicit val cp85Format: Format[CP85] = new OptionalIntegerFormat[CP85](CP85.apply)
implicit val cp86Format: Format[CP86] = new OptionalIntegerFormat[CP86](CP86.apply)
implicit val cp87InputFormat: Format[CP87Input] = new OptionalIntegerFormat[CP87Input](CP87Input.apply)
implicit val cp88Format: Format[CP88] = new OptionalIntegerFormat[CP88](CP88.apply)
implicit val cp89Format: Format[CP89] = new OptionalIntegerFormat[CP89](CP89.apply)
implicit val cp90Format: Format[CP90] = new OptionalIntegerFormat[CP90](CP90.apply)
implicit val cp91Format: Format[CP91] = new OptionalIntegerFormat[CP91](CP91.apply)
implicit val cp91InputFormat: Format[CP91Input] = new OptionalIntegerFormat[CP91Input](CP91Input.apply)
implicit val cp92Format: Format[CP92] = new OptionalIntegerFormat[CP92](CP92.apply)
implicit val cp93Format: Format[CP93] = new OptionalIntegerFormat[CP93](CP93.apply)
implicit val cp95Format: Format[CP95] = new OptionalIntegerFormat[CP95](CP95.apply)
implicit val cp96Format: Format[CP96] = new OptionalIntegerFormat[CP96](CP96.apply)
implicit val cp98Format: Format[CP98] = new OptionalIntegerFormat[CP98](CP98.apply)
implicit val cp99Format: Format[CP99] = new IntegerFormat[CP99](CP99.apply)
implicit val cp100Format: Format[CP100] = new IntegerFormat[CP100](CP100.apply)
implicit val cp101Format: Format[CP101] = new OptionalIntegerFormat[CP101](CP101.apply)
implicit val cp102Format: Format[CP102] = new OptionalIntegerFormat[CP102](CP102.apply)
implicit val cp103Format: Format[CP103] = new OptionalIntegerFormat[CP103](CP103.apply)
implicit val cp104Format: Format[CP104] = new OptionalIntegerFormat[CP104](CP104.apply)
implicit val cp106Format: Format[CP106] = new OptionalIntegerFormat[CP106](CP106.apply)
implicit val cp107Format: Format[CP107] = new OptionalIntegerFormat[CP107](CP107.apply)
implicit val cp108Format: Format[CP108] = new OptionalIntegerFormat[CP108](CP108.apply)
implicit val cp111Format: Format[CP111] = new IntegerFormat[CP111](CP111.apply)
implicit val cp113Format: Format[CP113] = new IntegerFormat[CP113](CP113.apply)
implicit val cp114Format: Format[CP114] = new IntegerFormat[CP114](CP114.apply)
implicit val cp115Format: Format[CP115] = new IntegerFormat[CP115](CP115.apply)
implicit val cp116Format: Format[CP116] = new IntegerFormat[CP116](CP116.apply)
implicit val cp117Format: Format[CP117] = new IntegerFormat[CP117](CP117.apply)
implicit val cp118Format: Format[CP118] = new IntegerFormat[CP118](CP118.apply)
implicit val cp186Format: Format[CP186] = new OptionalIntegerFormat[CP186](CP186.apply)
implicit val cp234Format: Format[CP234] = new OptionalIntegerFormat[CP234](CP234.apply)
implicit val cp235Format: Format[CP235] = new OptionalIntegerFormat[CP235](CP235.apply)
implicit val cp237Format: Format[CP237] = new OptionalIntegerFormat[CP237](CP237.apply)
implicit val cp238Format: Format[CP238] = new OptionalIntegerFormat[CP238](CP238.apply)
implicit val cp239Format: Format[CP239] = new IntegerFormat[CP239](CP239.apply)
implicit val cp240Format: Format[CP240] = new OptionalIntegerFormat[CP240](CP240.apply)
implicit val cp245Format: Format[CP245] = new OptionalIntegerFormat[CP245](CP245.apply)
implicit val cp246Format: Format[CP246] = new OptionalIntegerFormat[CP246](CP246.apply)
implicit val cp247Format: Format[CP247] = new OptionalIntegerFormat[CP247](CP247.apply)
implicit val cp248Format: Format[CP248] = new OptionalIntegerFormat[CP248](CP248.apply)
implicit val cp249Format: Format[CP249] = new OptionalIntegerFormat[CP249](CP249.apply)
implicit val cp251Format: Format[CP251] = new IntegerFormat[CP251](CP251.apply)
implicit val cp252Format: Format[CP252] = new OptionalIntegerFormat[CP252](CP252.apply)
implicit val cp253Format: Format[CP253] = new IntegerFormat[CP253](CP253.apply)
implicit val cp256Format: Format[CP256] = new IntegerFormat[CP256](CP256.apply)
implicit val cp257Format: Format[CP257] = new OptionalIntegerFormat[CP257](CP257.apply)
implicit val cp258Format: Format[CP258] = new IntegerFormat[CP258](CP258.apply)
implicit val cp259Format: Format[CP259] = new IntegerFormat[CP259](CP259.apply)
implicit val cp264Format: Format[CP264] = new IntegerFormat[CP264](CP264.apply)
implicit val cp265Format: Format[CP265] = new IntegerFormat[CP265](CP265.apply)
implicit val cp266Format: Format[CP266] = new IntegerFormat[CP266](CP266.apply)
implicit val cp273Format: Format[CP273] = new IntegerFormat[CP273](CP273.apply)
implicit val cp274Format: Format[CP274] = new IntegerFormat[CP274](CP274.apply)
implicit val cp278Format: Format[CP278] = new IntegerFormat[CP278](CP278.apply)
implicit val cp279Format: Format[CP279] = new OptionalIntegerFormat[CP279](CP279.apply)
implicit val cp281Format: Format[CP281] = new OptionalIntegerFormat[CP281](CP281.apply)
implicit val cp282Format: Format[CP282] = new OptionalIntegerFormat[CP282](CP282.apply)
implicit val cp283Format: Format[CP283] = new OptionalIntegerFormat[CP283](CP283.apply)
implicit val cp284Format: Format[CP284] = new OptionalIntegerFormat[CP284](CP284.apply)
implicit val cp285Format: Format[CP285] = new OptionalDateFormat[CP285](CP285.apply)
implicit val cp286Format: Format[CP286] = new OptionalIntegerFormat[CP286](CP286.apply)
implicit val cp287Format: Format[CP287] = new OptionalIntegerFormat[CP287](CP287.apply)
implicit val cp288Format: Format[CP288] = new OptionalIntegerFormat[CP288](CP288.apply)
implicit val cp289Format: Format[CP289] = new OptionalIntegerFormat[CP289](CP289.apply)
implicit val cp290Format: Format[CP290] = new OptionalIntegerFormat[CP290](CP290.apply)
implicit val cp291Format: Format[CP291] = new OptionalIntegerFormat[CP291](CP291.apply)
implicit val cp292Format: Format[CP292] = new IntegerFormat[CP292](CP292.apply)
implicit val cp293Format: Format[CP293] = new IntegerFormat[CP293](CP293.apply)
implicit val cp294Format: Format[CP294] = new IntegerFormat[CP294](CP294.apply)
implicit val cp295Format: Format[CP295] = new IntegerFormat[CP295](CP295.apply)
implicit val cp301Format: Format[CP301] = new OptionalIntegerFormat[CP301](CP301.apply)
implicit val cp302Format: Format[CP302] = new OptionalIntegerFormat[CP302](CP302.apply)
implicit val cp303Format: Format[CP303] = new OptionalIntegerFormat[CP303](CP303.apply)
implicit val cp305Format: Format[CP305] = new IntegerFormat[CP305](CP305.apply)
implicit val cp501Format: Format[CP501] = new OptionalIntegerFormat[CP501](CP501.apply)
implicit val cp502Format: Format[CP502] = new OptionalIntegerFormat[CP502](CP502.apply)
implicit val cp503Format: Format[CP503] = new OptionalIntegerFormat[CP503](CP503.apply)
implicit val cp504Format: Format[CP504] = new IntegerFormat[CP504](CP504.apply)
implicit val cp505Format: Format[CP505] = new OptionalIntegerFormat[CP505](CP505.apply)
implicit val cp506Format: Format[CP506] = new OptionalIntegerFormat[CP506](CP506.apply)
implicit val cp507Format: Format[CP507] = new IntegerFormat[CP507](CP507.apply)
implicit val cp508Format: Format[CP508] = new IntegerFormat[CP508](CP508.apply)
implicit val cp509Format: Format[CP509] = new IntegerFormat[CP509](CP509.apply)
implicit val cp510Format: Format[CP510] = new OptionalIntegerFormat[CP510](CP510.apply)
implicit val cp511Format: Format[CP511] = new IntegerFormat[CP511](CP511.apply)
implicit val cp512Format: Format[CP512] = new IntegerFormat[CP512](CP512.apply)
implicit val cp513Format: Format[CP513] = new OptionalIntegerFormat[CP513](CP513.apply)
implicit val cp514Format: Format[CP514] = new IntegerFormat[CP514](CP514.apply)
implicit val cp515Format: Format[CP515] = new OptionalIntegerFormat[CP515](CP515.apply)
implicit val cp666Format: Format[CP666] = new OptionalIntegerFormat[CP666](CP666.apply)
implicit val cp667Format: Format[CP667] = new OptionalIntegerFormat[CP667](CP667.apply)
implicit val cp668Format: Format[CP668] = new OptionalIntegerFormat[CP668](CP668.apply)
implicit val cp669Format: Format[CP669] = new OptionalIntegerFormat[CP669](CP669.apply)
implicit val cp670Format: Format[CP670] = new OptionalIntegerFormat[CP670](CP670.apply)
implicit val cp671Format: Format[CP671] = new OptionalIntegerFormat[CP671](CP671.apply)
implicit val cp672Format: Format[CP672] = new OptionalIntegerFormat[CP672](CP672.apply)
implicit val cp673Format: Format[CP673] = new OptionalIntegerFormat[CP673](CP673.apply)
implicit val cp674Format: Format[CP674] = new OptionalIntegerFormat[CP674](CP674.apply)
implicit val cp998Format: Format[CP998] = new OptionalIntegerFormat[CP998](CP998.apply)
implicit val cp999Format: Format[CP999] = new IntegerFormat[CP999](CP999.apply)
implicit val cpAux1Format: Format[CPAux1] = new IntegerFormat[CPAux1](CPAux1.apply)
implicit val cpAux2Format: Format[CPAux2] = new IntegerFormat[CPAux2](CPAux2.apply)
implicit val cpAux3Format: Format[CPAux3] = new IntegerFormat[CPAux3](CPAux3.apply)
implicit val cpq1000Format: Format[CPQ1000] = new OptionalBooleanFormat[CPQ1000](CPQ1000.apply)
implicit val cpq7Format: Format[CPQ7] = new OptionalBooleanFormat[CPQ7](CPQ7.apply)
implicit val cpq8Format: Format[CPQ8] = new OptionalBooleanFormat[CPQ8](CPQ8.apply)
implicit val cpq10Format: Format[CPQ10] = new OptionalBooleanFormat[CPQ10](CPQ10.apply)
implicit val cpq17Format: Format[CPQ17] = new OptionalBooleanFormat[CPQ17](CPQ17.apply)
implicit val cpq18Format: Format[CPQ18] = new OptionalBooleanFormat[CPQ18](CPQ18.apply)
implicit val cpq19Format: Format[CPQ19] = new OptionalBooleanFormat[CPQ19](CPQ19.apply)
implicit val cpq20Format: Format[CPQ20] = new OptionalBooleanFormat[CPQ20](CPQ20.apply)
implicit val cpq21Format: Format[CPQ21] = new OptionalBooleanFormat[CPQ21](CPQ21.apply)
implicit val carFormatter = Json.format[Car]
implicit val lec01Format: Format[LEC01] = Json.format[LEC01]
implicit val cato01Format: Format[CATO01] = new IntegerFormat[CATO01](CATO01.apply)
implicit val cato02Format: Format[CATO02] = new IntegerFormat[CATO02](CATO02.apply)
implicit val cato03Format: Format[CATO03] = new IntegerFormat[CATO03](CATO03.apply)
implicit val cato04Format: Format[CATO04] = new BigDecimalFormat[CATO04](CATO04.apply)
implicit val cato10Format: Format[CATO10] = new BooleanFormat[CATO10](CATO10.apply)
implicit val cato11Format: Format[CATO11] = new OptionalStringFormat[CATO11](CATO11.apply)
implicit val cato12Format: Format[CATO12] = new OptionalStringFormat[CATO12](CATO12.apply)
implicit val cato13Format: Format[CATO13] = new IntegerFormat[CATO13](CATO13.apply)
implicit val cato14Format: Format[CATO14] = new IntegerFormat[CATO14](CATO14.apply)
implicit val cato15Format: Format[CATO15] = new IntegerFormat[CATO15](CATO15.apply)
implicit val cato16Format: Format[CATO16] = new IntegerFormat[CATO16](CATO16.apply)
implicit val cato19Format: Format[CATO19] = new BooleanFormat[CATO19](CATO19.apply)
implicit val cato20Format: Format[CATO20] = new IntegerFormat[CATO20](CATO20.apply)
implicit val cato21Format: Format[CATO21] = new BigDecimalFormat[CATO21](CATO21.apply)
implicit val cato22Format: Format[CATO22] = new BigDecimalFormat[CATO22](CATO22.apply)
}
|
pncampbell/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/formats/package.scala
|
Scala
|
apache-2.0
| 17,112 |
package edu.umd.mith.hathi.api
import edu.umd.mith.util.DispatchUtils
import org.jboss.netty.util.Timer
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
/** Default basic configuration and properties for any API client.
*/
trait BasicClient extends DispatchUtils {
def backoffMaxRetries = 8
def backoffDelay = 1.second
implicit def timer: Timer
implicit def executor: ExecutionContext
def shutdown(): Unit = {
http.shutdown()
timer.stop()
}
}
|
umd-mith/hathi
|
core/src/main/scala/hathi/api/client.scala
|
Scala
|
apache-2.0
| 496 |
/*
* Copyright (c) 2013, Scodec
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package scodec.bits
/** Provides types related to base conversion -- e.g., binary, hexadecimal, and base 64. */
object Bases {
/** Partial mapping between characters and indices used in base conversions.
*/
trait Alphabet {
/** Converts the specified index to a character.
* @throws IndexOutOfBoundsException
* if the specified byte is not supported by this alphabet
*/
def toChar(index: Int): Char
/** Converts the specified char to an index.
* @throws IllegalArgumentException
* if the specified char is not supported by this alphabet
*/
def toIndex(c: Char): Int
/** Indicates whether the specified character should be ignored.
*/
def ignore(c: Char): Boolean
}
/** An alphabet that supports padding with a pad character. */
trait PaddedAlphabet extends Alphabet {
/** Padding character. */
val pad: Char
}
/** An alphabet that supports binary conversion. */
trait BinaryAlphabet extends Alphabet
/** An alphabet that supports hexadecimal conversion. */
trait HexAlphabet extends Alphabet
/** An alphabet that supports base 32 conversion. */
trait Base32Alphabet extends PaddedAlphabet
/** An alphabet that supports base 64 conversion. */
trait Base64Alphabet extends PaddedAlphabet
/** Predefined alphabets for use in base conversions. */
object Alphabets {
/** Binary alphabet that uses `{0, 1}` and allows whitespace and underscores for separation. */
object Binary extends BinaryAlphabet {
def toChar(i: Int) = if (i == 0) '0' else '1'
def toIndex(c: Char) =
c match {
case '0' => 0
case '1' => 1
case _ => throw new IllegalArgumentException
}
def ignore(c: Char) = c.isWhitespace || c == '_'
}
/** Binary alphabet that uses `{t, f}` and allows whitespace and underscores for separation. */
object Truthy extends BinaryAlphabet {
def toChar(i: Int) = if (i == 0) 't' else 'f'
def toIndex(c: Char) =
c match {
case 't' | 'T' => 0
case 'f' | 'F' => 1
case _ => throw new IllegalArgumentException
}
def ignore(c: Char) = c.isWhitespace || c == '_'
}
/** Abstract hex alphabet that supports `{0-9, A-F, a-f}` for looking up an index from a char.
*/
private[bits] abstract class LenientHex extends HexAlphabet {
def toIndex(c: Char) =
c match {
case c if c >= '0' && c <= '9' => c - '0'
case c if c >= 'a' && c <= 'f' => 10 + (c - 'a')
case c if c >= 'A' && c <= 'F' => 10 + (c - 'A')
case _ => throw new IllegalArgumentException
}
def ignore(c: Char) = c.isWhitespace || c == '_'
}
/** Base 16 alphabet that uses `{0-9, a-f}`. Whitespace and underscores are ignored. */
object HexLowercase extends LenientHex {
private val Chars =
Array('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f')
def toChar(i: Int) = Chars(i)
}
/** Base 16 alphabet that uses `{0-9, A-F}`. Whitespace and underscores are ignored. */
object HexUppercase extends LenientHex {
private val Chars =
Array('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F')
def toChar(i: Int) = Chars(i)
}
private def charIndicesLookupArray(indicesMap: Map[Char, Int]): (Int, Array[Int]) = {
val indicesMin: Int = indicesMap.keys.min.toInt
val indices: Array[Int] = Array.tabulate[Int](indicesMap.keys.max - indicesMin + 1) { i =>
indicesMap.getOrElse((i + indicesMin).toChar, -1)
}
(indicesMin, indices)
}
/** Base 32 alphabet, with padding, as defined by
* [[https://tools.ietf.org/html/rfc4648#section-6 RF4648 section 4]]. Whitespace is ignored.
*/
object Base32 extends Base32Alphabet {
private val Chars: Array[Char] = (('A' to 'Z') ++ ('2' to '7')).toArray
private val (indicesMin, indices) = charIndicesLookupArray(Chars.zipWithIndex.toMap)
val pad = '='
def toChar(i: Int) = Chars(i)
def toIndex(c: Char) = {
val lookupIndex = c - indicesMin
if (lookupIndex >= 0 && lookupIndex < indices.length && indices(lookupIndex) >= 0)
indices(lookupIndex)
else throw new IllegalArgumentException
}
def ignore(c: Char) = c.isWhitespace
}
/** Base 32 Crockford alphabet as defined by [[https://www.crockford.com/base32.html]].
* Whitespace and hyphen is ignored.
*/
object Base32Crockford extends Base32Alphabet {
private val Chars: Array[Char] =
(('0' to '9') ++ ('A' to 'H') ++ ('J' to 'K') ++ ('M' to 'N') ++ ('P' to 'T') ++ ('V' to 'Z')).toArray
private val (indicesMin, indices) = charIndicesLookupArray {
val map = (Chars.zipWithIndex ++ Chars.map(_.toLower).zipWithIndex).toMap
map ++ Map(
'O' -> map('0'),
'o' -> map('0'),
'I' -> map('1'),
'i' -> map('1'),
'L' -> map('1'),
'l' -> map('1')
)
}
val pad = '='
def toChar(i: Int) = Chars(i)
def toIndex(c: Char) = {
val lookupIndex = c - indicesMin
if (lookupIndex >= 0 && lookupIndex < indices.length && indices(lookupIndex) >= 0)
indices(lookupIndex)
else throw new IllegalArgumentException
}
def ignore(c: Char) = c == '-' || c.isWhitespace
}
/** Base 58 alphabet as defined by
* [[https://en.bitcoin.it/wiki/Base58Check_encoding#Base58_symbol_chart]]. IPFS hashes uses
* the same order.
*/
object Base58 extends Alphabet {
private val Chars = (('1' to '9') ++ ('A' to 'Z') ++ ('a' to 'z'))
.filterNot(c => List('O', 'I', 'l').contains(c))
.toArray
def toChar(i: Int) = Chars(i)
def toIndex(c: Char) =
c match {
case c if c >= '1' && c <= '9' => c - '1'
case c if c >= 'A' && c <= 'H' => c - 'A' + 9
case c if c >= 'J' && c <= 'N' => c - 'J' + 9 + 8
case c if c >= 'P' && c <= 'Z' => c - 'P' + 9 + 8 + 5
case c if c >= 'a' && c <= 'k' => c - 'a' + 9 + 8 + 5 + 11
case c if c >= 'm' && c <= 'z' => c - 'm' + 9 + 8 + 5 + 11 + 11
case _ => throw new IllegalArgumentException
}
def ignore(c: Char) = c.isWhitespace
}
private object Base64Base {
private val Chars = (('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') :+ '+' :+ '/').toArray
}
sealed trait Base64Base extends Base64Alphabet {
override val pad = '='
override def toChar(i: Int) = Base64Base.Chars(i)
override def toIndex(c: Char) =
c match {
case c if c >= 'A' && c <= 'Z' => c - 'A'
case c if c >= 'a' && c <= 'z' => c - 'a' + 26
case c if c >= '0' && c <= '9' => c - '0' + 26 + 26
case '+' => 62
case '/' => 63
case _ => throw new IllegalArgumentException
}
override def ignore(c: Char) = c.isWhitespace
}
/** Base 64 alphabet, with padding, as defined by
* [[https://tools.ietf.org/html/rfc4648#section-4 RF4648 section 4]]. Whitespace is ignored.
*/
object Base64 extends Base64Base with PaddedAlphabet
/** Base 64 alphabet, without padding, as defined by
* [[https://tools.ietf.org/html/rfc4648#section-4 RF4648 section 4]]. Whitespace is ignored.
*/
object Base64NoPad extends Base64Base {
override val pad = 0.toChar
}
private object Base64UrlBase {
private val Chars = (('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') :+ '-' :+ '_').toArray
}
/** Base 64 alphabet, with padding, as defined by
* [[https://tools.ietf.org/html/rfc4648#section-5 RF4648 section 5]]. Whitespace is ignored.
*/
sealed trait Base64UrlBase extends Base64Alphabet {
override val pad = '='
override def toChar(i: Int) = Base64UrlBase.Chars(i)
override def toIndex(c: Char) =
c match {
case c if c >= 'A' && c <= 'Z' => c - 'A'
case c if c >= 'a' && c <= 'z' => c - 'a' + 26
case c if c >= '0' && c <= '9' => c - '0' + 26 + 26
case '-' => 62
case '_' => 63
case _ => throw new IllegalArgumentException
}
override def ignore(c: Char) = c.isWhitespace
}
object Base64Url extends Base64UrlBase
object Base64UrlNoPad extends Base64UrlBase {
override val pad = 0.toChar
}
}
}
|
scodec/scodec-bits
|
core/shared/src/main/scala/scodec/bits/Bases.scala
|
Scala
|
bsd-3-clause
| 10,311 |
package org.jetbrains.plugins.scala.lang.completion3
import com.intellij.codeInsight.completion.CompletionType
import org.jetbrains.plugins.scala.codeInsight.ScalaCodeInsightTestBase
import org.jetbrains.plugins.scala.lang.completion.lookups.ScalaLookupItem
import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaCodeStyleSettings
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScObject}
/**
* User: Alefas
* Date: 27.03.12
*/
class ScalaClassNameCompletionTest extends ScalaCodeInsightTestBase {
def withRelativeImports(body: => Unit): Unit = {
val settings: ScalaCodeStyleSettings = ScalaCodeStyleSettings.getInstance(getProjectAdapter)
val oldValue = settings.isAddFullQualifiedImports
settings.setAddFullQualifiedImports(false)
try {
body
} finally {
settings.setAddFullQualifiedImports(oldValue)
}
}
def testClassNameRenamed() {
val fileText =
"""
|import java.util.{ArrayList => BLLLL}
|object Test extends App {
| val al: java.util.List[Int] = new BL<caret>
|}
""".stripMargin.replaceAll("\\r", "").trim()
configureFromFileTextAdapter("dummy.scala", fileText)
val (activeLookup, _) = complete(2, CompletionType.BASIC)
val resultText =
"""
|import java.util.{ArrayList => BLLLL}
|object Test extends App {
| val al: java.util.List[Int] = new BLLLL[Int](<caret>)
|}
""".stripMargin.replaceAll("\\r", "").trim()
completeLookupItem(activeLookup.find(le => le.getLookupString == "BLLLL").get, '\\t')
checkResultByText(resultText)
}
def testExpressionSameName() {
val fileText =
"""
|import collection.immutable.HashSet
|
|object Sandbox extends App {
| val x: HashSet[Int] = new HashSet[Int]
| HashSet<caret>
|}
""".stripMargin.replaceAll("\\r", "").trim()
configureFromFileTextAdapter("dummy.scala", fileText)
val (activeLookup, _) = complete(2, CompletionType.BASIC)
val resultText =
"""
|import collection.immutable.HashSet
|import scala.collection.mutable
|
|object Sandbox extends App {
| val x: HashSet[Int] = new HashSet[Int]
| mutable.HashSet<caret>
|}
""".stripMargin.replaceAll("\\r", "").trim()
completeLookupItem(activeLookup.find {
case le: ScalaLookupItem =>
le.element match {
case c: ScObject if c.qualifiedName == "scala.collection.mutable.HashSet" => true
case _ => false
}
case _ => false
}.get, '\\t')
checkResultByText(resultText)
}
def testClassSameName() {
val fileText =
"""
|import collection.immutable.HashSet
|
|object Sandbox extends App {
| val x: HashSet[Int] = new HashSet[Int]
| val y: HashSet<caret>
|}
""".stripMargin.replaceAll("\\r", "").trim()
configureFromFileTextAdapter("dummy.scala", fileText)
val (activeLookup, _) = complete(2, CompletionType.BASIC)
val resultText =
"""
|import collection.immutable.HashSet
|import scala.collection.mutable
|
|object Sandbox extends App {
| val x: HashSet[Int] = new HashSet[Int]
| val y: mutable.HashSet<caret>
|}
""".stripMargin.replaceAll("\\r", "").trim()
completeLookupItem(activeLookup.find {
case le: ScalaLookupItem =>
le.element match {
case c: ScClass if c.qualifiedName == "scala.collection.mutable.HashSet" => true
case _ => false
}
case _ => false
}.get, '\\t')
checkResultByText(resultText)
}
def testSmartJoining() {
val settings = ScalaCodeStyleSettings.getInstance(getProjectAdapter)
val oldValue = settings.getImportsWithPrefix
settings.setImportsWithPrefix(Array.empty)
try {
val fileText =
"""
|import collection.mutable.{Builder, Queue}
|import scala.collection.immutable.HashMap
|import collection.mutable.ArrayBuffer
|
|object Sandbox extends App {
| val m: ListM<caret>
|}
""".stripMargin.replaceAll("\\r", "").trim()
configureFromFileTextAdapter("dummy.scala", fileText)
val (activeLookup, _) = complete(2, CompletionType.BASIC)
val resultText =
"""
|import scala.collection.mutable.{ListMap, Builder, Queue, ArrayBuffer}
|import scala.collection.immutable.HashMap
|
|object Sandbox extends App {
| val m: ListMap
|}
""".stripMargin.replaceAll("\\r", "").trim()
completeLookupItem(activeLookup.find {
case le: ScalaLookupItem =>
le.element match {
case c: ScClass if c.qualifiedName == "scala.collection.mutable.ListMap" => true
case _ => false
}
case _ => false
}.get, '\\t')
checkResultByText(resultText)
}
catch {
case t: Exception => settings.setImportsWithPrefix(oldValue)
}
}
def testImportsMess() {
val fileText =
"""
|import scala.collection.immutable.{BitSet, HashSet, ListMap, SortedMap}
|import scala.collection.mutable._
|
|class Test2 {
| val x: HashMap[String, String] = HashMap.empty
| val z: ListSet<caret> = null
|}
""".stripMargin.replaceAll("\\r", "").trim()
configureFromFileTextAdapter("dummy.scala", fileText)
val (activeLookup, _) = complete(2, CompletionType.BASIC)
val resultText =
"""
|import scala.collection.immutable._
|import scala.collection.mutable.HashMap
|import scala.collection.mutable._
|
|class Test2 {
| val x: HashMap[String, String] = HashMap.empty
| val z: ListSet<caret> = null
|}
""".stripMargin.replaceAll("\\r", "").trim()
completeLookupItem(activeLookup.find {
case le: ScalaLookupItem =>
le.element match {
case c: ScClass if c.qualifiedName == "scala.collection.immutable.ListSet" => true
case _ => false
}
case _ => false
}.get, '\\t')
checkResultByText(resultText)
}
def testImplicitClass() {
val fileText =
"""
|package a
|
|object A {
|
| implicit class B(i: Int) {
| def foo = 1
| }
|
|}
|
|object B {
| 1.<caret>
|}
""".stripMargin.replaceAll("\\r", "").trim()
configureFromFileTextAdapter("dummy.scala", fileText)
val (activeLookup, _) = complete(2, CompletionType.BASIC)
val resultText =
"""
|package a
|
|import a.A.B
|
|object A {
|
| implicit class B(i: Int) {
| def foo = 1
| }
|
|}
|
|object B {
| 1.foo
|}
""".stripMargin.replaceAll("\\r", "").trim()
completeLookupItem(activeLookup.find(_.getLookupString == "foo").get, '\\t')
checkResultByText(resultText)
}
def testSCL4087() {
withRelativeImports {
val fileText =
"""
|package a.b {
|
| class XXXX
|
|}
|
|import a.{b => c}
|
|trait Y {
| val x: XXXX<caret>
|}
""".stripMargin.replaceAll("\\r", "").trim()
configureFromFileTextAdapter("dummy.scala", fileText)
val (activeLookup, _) = complete(2, CompletionType.BASIC)
val resultText =
"""
|package a.b {
|
| class XXXX
|
|}
|
|import a.{b => c}
|import c.XXXX
|
|trait Y {
| val x: XXXX
|}
""".stripMargin.replaceAll("\\r", "").trim()
completeLookupItem(activeLookup.find(_.getLookupString == "XXXX").get, '\\t')
checkResultByText(resultText)
}
}
}
|
double-y/translation-idea-plugin
|
test/org/jetbrains/plugins/scala/lang/completion3/ScalaClassNameCompletionTest.scala
|
Scala
|
apache-2.0
| 8,082 |
package metabrowse
import scala.concurrent.ExecutionContext.Implicits.global
import scala.scalajs.js
import monaco.CancellationToken
import monaco.editor.ITextModel
import monaco.languages.DocumentSymbolProvider
import monaco.languages.SymbolInformation
import monaco.languages.SymbolKind
import scala.meta.internal.{semanticdb => s}
import scala.{meta => m}
import scala.meta.internal.semanticdb.Scala._
class ScalaDocumentSymbolProvider(index: MetabrowseSemanticdbIndex)
extends DocumentSymbolProvider {
private def getDocumentSymbols(doc: s.TextDocument): Seq[DocumentSymbol] = {
val denotations = doc.symbols.map { info =>
info.symbol -> info
}.toMap
val infos = for {
name <- index.document.occurrences
if name.role.isDefinition
if name.symbol.isGlobal
denotation <- denotations.get(name.symbol)
kind <- symbolKind(denotation)
definition <- index.definition(name.symbol)
} yield DocumentSymbol(denotation, kind, definition)
infos
}
override def provideDocumentSymbols(
model: ITextModel,
token: CancellationToken
) = {
for {
Some(doc) <- index.semanticdb(model.uri.path)
} yield {
val symbols = getDocumentSymbols(doc).map {
case DocumentSymbol(denotation, kind, definition) =>
val symbol = jsObject[SymbolInformation]
symbol.name = denotation.displayName
// TODO: print signature instead of `denotation.symbol`: https://github.com/scalameta/metabrowse/issues/99
symbol.containerName = denotation.symbol
symbol.kind = kind
symbol.location = resolveLocation(definition)
symbol
}
js.Array[SymbolInformation](symbols: _*)
}
}.toMonacoThenable
def symbolKind(denotation: s.SymbolInformation): Option[SymbolKind] = {
import denotation.kind._
import s.SymbolInformation.Property
def hasProperty(flag: Int): Boolean =
(denotation.properties & flag) != 0
if (isParameter || isTypeParameter)
None
else if (isField || hasProperty(Property.VAL.value | Property.VAR.value))
Some(SymbolKind.Variable)
else if (isMethod)
Some(SymbolKind.Function)
else if (isConstructor)
Some(SymbolKind.Constructor)
else if (isClass)
Some(SymbolKind.Class)
else if (isObject)
Some(SymbolKind.Object)
else if (isTrait)
Some(SymbolKind.Interface)
else if (isPackage || isPackageObject)
Some(SymbolKind.Package)
else if (isType)
Some(SymbolKind.Namespace) // Note: no type related symbol kind exists
else
None
}
}
|
scalameta/metadoc
|
metabrowse-js/src/main/scala/metabrowse/ScalaDocumentSymbolProvider.scala
|
Scala
|
apache-2.0
| 2,616 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.api
import org.openapitools.client.model.BranchImpl
import org.openapitools.client.model.FavoriteImpl
import org.openapitools.client.model.GithubOrganization
import org.openapitools.client.model.GithubScm
import org.openapitools.client.model.MultibranchPipeline
import org.openapitools.client.model.Organisation
import org.openapitools.client.model.Pipeline
import org.openapitools.client.model.PipelineActivity
import org.openapitools.client.model.PipelineFolderImpl
import org.openapitools.client.model.PipelineImpl
import org.openapitools.client.model.PipelineRun
import org.openapitools.client.model.PipelineRunNode
import org.openapitools.client.model.PipelineStepImpl
import org.openapitools.client.model.QueueItemImpl
import org.openapitools.client.model.User
import org.openapitools.client.core.JsonSupport._
import sttp.client._
import sttp.model.Method
object BlueOceanApi {
def apply(baseUrl: String = "http://localhost") = new BlueOceanApi(baseUrl)
}
class BlueOceanApi(baseUrl: String) {
/**
* Delete queue item from an organization pipeline queue
*
* Expected answers:
* code 200 : (Successfully deleted queue item)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param queue Name of the queue item
*/
def deletePipelineQueueItem(username: String, password: String)(organization: String, pipeline: String, queue: String
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.DELETE, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/queue/${queue}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Unit])
/**
* Retrieve authenticated user details for an organization
*
* Expected answers:
* code 200 : User (Successfully retrieved authenticated user details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
*/
def getAuthenticatedUser(username: String, password: String)(organization: String
): Request[Either[ResponseError[Exception], User], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/user/")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[User])
/**
* Get a list of class names supported by a given class
*
* Expected answers:
* code 200 : String (Successfully retrieved class names)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param `class` Name of the class
*/
def getClasses(username: String, password: String)(`class`: String
): Request[Either[ResponseError[Exception], String], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/classes/${`class`}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[String])
/**
* Retrieve JSON Web Key
*
* Expected answers:
* code 200 : String (Successfully retrieved JWT token)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* @param key Key ID received as part of JWT header field kid
*/
def getJsonWebKey(key: Int
): Request[Either[ResponseError[Exception], String], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/jwt-auth/jwks/${key}")
.contentType("application/json")
.response(asJson[String])
/**
* Retrieve JSON Web Token
*
* Expected answers:
* code 200 : String (Successfully retrieved JWT token)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* @param expiryTimeInMins Token expiry time in minutes, default: 30 minutes
* @param maxExpiryTimeInMins Maximum token expiry time in minutes, default: 480 minutes
*/
def getJsonWebToken(expiryTimeInMins: Option[Int] = None, maxExpiryTimeInMins: Option[Int] = None
): Request[Either[ResponseError[Exception], String], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/jwt-auth/token?expiryTimeInMins=${ expiryTimeInMins }&maxExpiryTimeInMins=${ maxExpiryTimeInMins }")
.contentType("application/json")
.response(asJson[String])
/**
* Retrieve organization details
*
* Expected answers:
* code 200 : Organisation (Successfully retrieved pipeline details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Pipeline cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
*/
def getOrganisation(username: String, password: String)(organization: String
): Request[Either[ResponseError[Exception], Organisation], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Organisation])
/**
* Retrieve all organizations details
*
* Expected answers:
* code 200 : Seq[Organisation] (Successfully retrieved pipelines details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*/
def getOrganisations(username: String, password: String)(
): Request[Either[ResponseError[Exception], Seq[Organisation]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[Organisation]])
/**
* Retrieve pipeline details for an organization
*
* Expected answers:
* code 200 : Pipeline (Successfully retrieved pipeline details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Pipeline cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
*/
def getPipeline(username: String, password: String)(organization: String, pipeline: String
): Request[Either[ResponseError[Exception], Pipeline], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Pipeline])
/**
* Retrieve all activities details for an organization pipeline
*
* Expected answers:
* code 200 : Seq[PipelineActivity] (Successfully retrieved all activities details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
*/
def getPipelineActivities(username: String, password: String)(organization: String, pipeline: String
): Request[Either[ResponseError[Exception], Seq[PipelineActivity]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/activities")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[PipelineActivity]])
/**
* Retrieve branch details for an organization pipeline
*
* Expected answers:
* code 200 : BranchImpl (Successfully retrieved branch details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param branch Name of the branch
*/
def getPipelineBranch(username: String, password: String)(organization: String, pipeline: String, branch: String
): Request[Either[ResponseError[Exception], BranchImpl], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/branches/${branch}/")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[BranchImpl])
/**
* Retrieve branch run details for an organization pipeline
*
* Expected answers:
* code 200 : PipelineRun (Successfully retrieved run details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param branch Name of the branch
* @param run Name of the run
*/
def getPipelineBranchRun(username: String, password: String)(organization: String, pipeline: String, branch: String, run: String
): Request[Either[ResponseError[Exception], PipelineRun], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/branches/${branch}/runs/${run}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[PipelineRun])
/**
* Retrieve all branches details for an organization pipeline
*
* Expected answers:
* code 200 : MultibranchPipeline (Successfully retrieved all branches details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
*/
def getPipelineBranches(username: String, password: String)(organization: String, pipeline: String
): Request[Either[ResponseError[Exception], MultibranchPipeline], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/branches")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[MultibranchPipeline])
/**
* Retrieve pipeline folder for an organization
*
* Expected answers:
* code 200 : PipelineFolderImpl (Successfully retrieved folder details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param folder Name of the folder
*/
def getPipelineFolder(username: String, password: String)(organization: String, folder: String
): Request[Either[ResponseError[Exception], PipelineFolderImpl], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${folder}/")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[PipelineFolderImpl])
/**
* Retrieve pipeline details for an organization folder
*
* Expected answers:
* code 200 : PipelineImpl (Successfully retrieved pipeline details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param folder Name of the folder
*/
def getPipelineFolderPipeline(username: String, password: String)(organization: String, pipeline: String, folder: String
): Request[Either[ResponseError[Exception], PipelineImpl], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${folder}/pipelines/${pipeline}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[PipelineImpl])
/**
* Retrieve queue details for an organization pipeline
*
* Expected answers:
* code 200 : Seq[QueueItemImpl] (Successfully retrieved queue details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
*/
def getPipelineQueue(username: String, password: String)(organization: String, pipeline: String
): Request[Either[ResponseError[Exception], Seq[QueueItemImpl]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/queue")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[QueueItemImpl]])
/**
* Retrieve run details for an organization pipeline
*
* Expected answers:
* code 200 : PipelineRun (Successfully retrieved run details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param run Name of the run
*/
def getPipelineRun(username: String, password: String)(organization: String, pipeline: String, run: String
): Request[Either[ResponseError[Exception], PipelineRun], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs/${run}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[PipelineRun])
/**
* Get log for a pipeline run
*
* Expected answers:
* code 200 : String (Successfully retrieved pipeline run log)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param run Name of the run
* @param start Start position of the log
* @param download Set to true in order to download the file, otherwise it's passed as a response body
*/
def getPipelineRunLog(username: String, password: String)(organization: String, pipeline: String, run: String, start: Option[Int] = None, download: Option[Boolean] = None
): Request[Either[ResponseError[Exception], String], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs/${run}/log?start=${ start }&download=${ download }")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[String])
/**
* Retrieve run node details for an organization pipeline
*
* Expected answers:
* code 200 : PipelineRunNode (Successfully retrieved run node details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param run Name of the run
* @param node Name of the node
*/
def getPipelineRunNode(username: String, password: String)(organization: String, pipeline: String, run: String, node: String
): Request[Either[ResponseError[Exception], PipelineRunNode], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs/${run}/nodes/${node}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[PipelineRunNode])
/**
* Retrieve run node details for an organization pipeline
*
* Expected answers:
* code 200 : PipelineStepImpl (Successfully retrieved run node step details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param run Name of the run
* @param node Name of the node
* @param step Name of the step
*/
def getPipelineRunNodeStep(username: String, password: String)(organization: String, pipeline: String, run: String, node: String, step: String
): Request[Either[ResponseError[Exception], PipelineStepImpl], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs/${run}/nodes/${node}/steps/${step}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[PipelineStepImpl])
/**
* Get log for a pipeline run node step
*
* Expected answers:
* code 200 : String (Successfully retrieved pipeline run node step log)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param run Name of the run
* @param node Name of the node
* @param step Name of the step
*/
def getPipelineRunNodeStepLog(username: String, password: String)(organization: String, pipeline: String, run: String, node: String, step: String
): Request[Either[ResponseError[Exception], String], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs/${run}/nodes/${node}/steps/${step}/log")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[String])
/**
* Retrieve run node steps details for an organization pipeline
*
* Expected answers:
* code 200 : Seq[PipelineStepImpl] (Successfully retrieved run node steps details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param run Name of the run
* @param node Name of the node
*/
def getPipelineRunNodeSteps(username: String, password: String)(organization: String, pipeline: String, run: String, node: String
): Request[Either[ResponseError[Exception], Seq[PipelineStepImpl]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs/${run}/nodes/${node}/steps")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[PipelineStepImpl]])
/**
* Retrieve run nodes details for an organization pipeline
*
* Expected answers:
* code 200 : Seq[PipelineRunNode] (Successfully retrieved run nodes details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param run Name of the run
*/
def getPipelineRunNodes(username: String, password: String)(organization: String, pipeline: String, run: String
): Request[Either[ResponseError[Exception], Seq[PipelineRunNode]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs/${run}/nodes")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[PipelineRunNode]])
/**
* Retrieve all runs details for an organization pipeline
*
* Expected answers:
* code 200 : Seq[PipelineRun] (Successfully retrieved runs details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
*/
def getPipelineRuns(username: String, password: String)(organization: String, pipeline: String
): Request[Either[ResponseError[Exception], Seq[PipelineRun]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[PipelineRun]])
/**
* Retrieve all pipelines details for an organization
*
* Expected answers:
* code 200 : Seq[Pipeline] (Successfully retrieved pipelines details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
*/
def getPipelines(username: String, password: String)(organization: String
): Request[Either[ResponseError[Exception], Seq[Pipeline]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[Pipeline]])
/**
* Retrieve SCM details for an organization
*
* Expected answers:
* code 200 : GithubScm (Successfully retrieved SCM details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param scm Name of SCM
*/
def getSCM(username: String, password: String)(organization: String, scm: String
): Request[Either[ResponseError[Exception], GithubScm], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/scm/${scm}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[GithubScm])
/**
* Retrieve SCM organization repositories details for an organization
*
* Expected answers:
* code 200 : Seq[GithubOrganization] (Successfully retrieved SCM organization repositories details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param scm Name of SCM
* @param scmOrganisation Name of the SCM organization
* @param credentialId Credential ID
* @param pageSize Number of items in a page
* @param pageNumber Page number
*/
def getSCMOrganisationRepositories(username: String, password: String)(organization: String, scm: String, scmOrganisation: String, credentialId: Option[String] = None, pageSize: Option[Int] = None, pageNumber: Option[Int] = None
): Request[Either[ResponseError[Exception], Seq[GithubOrganization]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/scm/${scm}/organizations/${scmOrganisation}/repositories?credentialId=${ credentialId }&pageSize=${ pageSize }&pageNumber=${ pageNumber }")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[GithubOrganization]])
/**
* Retrieve SCM organization repository details for an organization
*
* Expected answers:
* code 200 : Seq[GithubOrganization] (Successfully retrieved SCM organizations details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param scm Name of SCM
* @param scmOrganisation Name of the SCM organization
* @param repository Name of the SCM repository
* @param credentialId Credential ID
*/
def getSCMOrganisationRepository(username: String, password: String)(organization: String, scm: String, scmOrganisation: String, repository: String, credentialId: Option[String] = None
): Request[Either[ResponseError[Exception], Seq[GithubOrganization]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/scm/${scm}/organizations/${scmOrganisation}/repositories/${repository}?credentialId=${ credentialId }")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[GithubOrganization]])
/**
* Retrieve SCM organizations details for an organization
*
* Expected answers:
* code 200 : Seq[GithubOrganization] (Successfully retrieved SCM organizations details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param scm Name of SCM
* @param credentialId Credential ID
*/
def getSCMOrganisations(username: String, password: String)(organization: String, scm: String, credentialId: Option[String] = None
): Request[Either[ResponseError[Exception], Seq[GithubOrganization]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/scm/${scm}/organizations?credentialId=${ credentialId }")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[GithubOrganization]])
/**
* Retrieve user details for an organization
*
* Expected answers:
* code 200 : User (Successfully retrieved users details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param user Name of the user
*/
def getUser(username: String, password: String)(organization: String, user: String
): Request[Either[ResponseError[Exception], User], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/users/${user}")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[User])
/**
* Retrieve user favorites details for an organization
*
* Expected answers:
* code 200 : Seq[FavoriteImpl] (Successfully retrieved users favorites details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param user Name of the user
*/
def getUserFavorites(username: String, password: String)(user: String
): Request[Either[ResponseError[Exception], Seq[FavoriteImpl]], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/users/${user}/favorites")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Seq[FavoriteImpl]])
/**
* Retrieve users details for an organization
*
* Expected answers:
* code 200 : User (Successfully retrieved users details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
*/
def getUsers(username: String, password: String)(organization: String
): Request[Either[ResponseError[Exception], User], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/organizations/${organization}/users/")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[User])
/**
* Replay an organization pipeline run
*
* Expected answers:
* code 200 : QueueItemImpl (Successfully replayed a pipeline run)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param run Name of the run
*/
def postPipelineRun(username: String, password: String)(organization: String, pipeline: String, run: String
): Request[Either[ResponseError[Exception], QueueItemImpl], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs/${run}/replay")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[QueueItemImpl])
/**
* Start a build for an organization pipeline
*
* Expected answers:
* code 200 : QueueItemImpl (Successfully started a build)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
*/
def postPipelineRuns(username: String, password: String)(organization: String, pipeline: String
): Request[Either[ResponseError[Exception], QueueItemImpl], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[QueueItemImpl])
/**
* Favorite/unfavorite a pipeline
*
* Expected answers:
* code 200 : FavoriteImpl (Successfully favorited/unfavorited a pipeline)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param body Set JSON string body to {\\"favorite\\": true} to favorite, set value to false to unfavorite
*/
def putPipelineFavorite(username: String, password: String)(organization: String, pipeline: String, body: Boolean
): Request[Either[ResponseError[Exception], FavoriteImpl], Nothing] =
basicRequest
.method(Method.PUT, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/favorite")
.contentType("application/json")
.auth.basic(username, password)
.body(body)
.response(asJson[FavoriteImpl])
/**
* Stop a build of an organization pipeline
*
* Expected answers:
* code 200 : PipelineRun (Successfully stopped a build)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param organization Name of the organization
* @param pipeline Name of the pipeline
* @param run Name of the run
* @param blocking Set to true to make blocking stop, default: false
* @param timeOutInSecs Timeout in seconds, default: 10 seconds
*/
def putPipelineRun(username: String, password: String)(organization: String, pipeline: String, run: String, blocking: Option[String] = None, timeOutInSecs: Option[Int] = None
): Request[Either[ResponseError[Exception], PipelineRun], Nothing] =
basicRequest
.method(Method.PUT, uri"$baseUrl/blue/rest/organizations/${organization}/pipelines/${pipeline}/runs/${run}/stop?blocking=${ blocking }&timeOutInSecs=${ timeOutInSecs }")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[PipelineRun])
/**
* Search for any resource details
*
* Expected answers:
* code 200 : String (Successfully retrieved search result)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param `q` Query string
*/
def search(username: String, password: String)(`q`: String
): Request[Either[ResponseError[Exception], String], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/search/?q=${ `q` }")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[String])
/**
* Get classes details
*
* Expected answers:
* code 200 : String (Successfully retrieved search result)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param `q` Query string containing an array of class names
*/
def searchClasses(username: String, password: String)(`q`: String
): Request[Either[ResponseError[Exception], String], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/blue/rest/classes/?q=${ `q` }")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[String])
}
|
cliffano/swaggy-jenkins
|
clients/scala-sttp/generated/src/main/scala/org/openapitools/client/api/BlueOceanApi.scala
|
Scala
|
mit
| 36,503 |
package spire.math
import spire.algebra.{IsIntegral, Order, Rig, Signed}
object UByte extends UByteInstances {
@inline final def apply(n: Byte) = new UByte(n)
@inline final def apply(n: Int) = new UByte(n.toByte)
@inline final def MinValue = UByte(0)
@inline final def MaxValue = UByte(-1)
}
class UByte(val signed: Byte) extends AnyVal with scala.math.ScalaNumericAnyConversions {
override def toByte: Byte = signed
override def toChar: Char = (signed & 0xff).toChar
override def toShort: Short = (signed & 0xff).toShort
override def toInt: Int = signed & 0xff
override def toLong: Long = signed & 0xffL
override def toFloat: Float = toInt.toFloat
override def toDouble: Double = toInt.toDouble
def toBigInt: BigInt = BigInt(toInt)
def byteValue(): Byte = toByte
def shortValue(): Short = toShort
def intValue(): Int = toInt
def longValue(): Long = toLong
def floatValue(): Float = toFloat
def doubleValue(): Double = toDouble
def isWhole(): Boolean = true
def underlying(): Any = signed
override def isValidByte = signed >= 0
override def isValidShort = true
override def isValidChar = true
override def isValidInt = true
def isValidLong = true
override def toString: String = toInt.toString
def == (that: UByte): Boolean = this.signed == that.signed
def != (that: UByte): Boolean = this.signed != that.signed
def <= (that: UByte) = this.toInt <= that.toInt
def < (that: UByte) = this.toInt < that.toInt
def >= (that: UByte) = this.toInt >= that.toInt
def > (that: UByte) = this.toInt > that.toInt
def unary_- = UByte(-this.signed)
def + (that: UByte) = UByte(this.signed + that.signed)
def - (that: UByte) = UByte(this.signed - that.signed)
def * (that: UByte) = UByte(this.signed * that.signed)
def / (that: UByte) = UByte(this.toInt / that.toInt)
def % (that: UByte) = UByte(this.toInt % that.toInt)
def unary_~ = UByte(~this.signed)
def << (shift: Int) = UByte((signed & 0xff) << (shift & 7))
def >> (shift: Int) = UByte((signed & 0xff) >>> (shift & 7))
def >>> (shift: Int) = UByte((signed & 0xff) >>> (shift & 7))
def & (that: UByte) = UByte((this.signed & 0xff) & (that.signed & 0xff))
def | (that: UByte) = UByte((this.signed & 0xff) | (that.signed & 0xff))
def ^ (that: UByte) = UByte((this.signed & 0xff) ^ (that.signed & 0xff))
def ** (that: UByte) = UByte(pow(this.toLong, that.toLong).toInt)
}
trait UByteInstances {
implicit final val UByteAlgebra = new UByteAlgebra
implicit final val UByteBitString = new UByteBitString
}
private[math] trait UByteIsRig extends Rig[UByte] {
def one: UByte = UByte(1)
def plus(a:UByte, b:UByte): UByte = a + b
override def pow(a:UByte, b:Int): UByte = {
if (b < 0)
throw new IllegalArgumentException("negative exponent: %s" format b)
a ** UByte(b)
}
override def times(a:UByte, b:UByte): UByte = a * b
def zero: UByte = UByte(0)
}
private[math] trait UByteOrder extends Order[UByte] {
override def eqv(x:UByte, y:UByte) = x == y
override def neqv(x:UByte, y:UByte) = x != y
override def gt(x: UByte, y: UByte) = x > y
override def gteqv(x: UByte, y: UByte) = x >= y
override def lt(x: UByte, y: UByte) = x < y
override def lteqv(x: UByte, y: UByte) = x <= y
def compare(x: UByte, y: UByte) = if (x < y) -1 else if (x > y) 1 else 0
}
private[math] trait UByteIsSigned extends Signed[UByte] {
def signum(a: UByte): Int = java.lang.Integer.signum(a.signed) & 1
def abs(a: UByte): UByte = a
}
private[math] trait UByteIsReal extends IsIntegral[UByte] with UByteOrder with UByteIsSigned {
def toDouble(n: UByte): Double = n.toDouble
}
@SerialVersionUID(0L)
private[math] class UByteBitString extends BitString[UByte] with Serializable {
def one: UByte = UByte(-1: Byte)
def zero: UByte = UByte(0: Byte)
def and(a: UByte, b: UByte): UByte = a & b
def or(a: UByte, b: UByte): UByte = a | b
def complement(a: UByte): UByte = ~a
override def xor(a: UByte, b: UByte): UByte = a ^ b
def signed: Boolean = false
def width: Int = 8
def toHexString(n: UByte): String = Integer.toHexString(n.toInt)
def bitCount(n: UByte): Int = Integer.bitCount(n.toInt)
def highestOneBit(n: UByte): UByte = UByte(Integer.highestOneBit(n.toInt))
def lowestOneBit(n: UByte): UByte = UByte(Integer.lowestOneBit(n.toInt))
def numberOfLeadingZeros(n: UByte): Int = Integer.numberOfLeadingZeros(n.toInt)
def numberOfTrailingZeros(n: UByte): Int = Integer.numberOfTrailingZeros(n.toInt)
def leftShift(n: UByte, i: Int): UByte = n << i
def rightShift(n: UByte, i: Int): UByte = n >> i
def signedRightShift(n: UByte, i: Int): UByte = n >>> i
def rotateLeft(n: UByte, i: Int): UByte = {
val j = i & 7
(n << j) | (n >>> (8 - j))
}
def rotateRight(n: UByte, i: Int): UByte = {
val j = i & 7
(n >>> j) | (n << (8 - j))
}
}
@SerialVersionUID(0L)
private[math] class UByteAlgebra extends UByteIsRig with UByteIsReal with Serializable
|
lrytz/spire
|
core/src/main/scala/spire/math/UByte.scala
|
Scala
|
mit
| 4,968 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.