code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
import org.apache.spark.sql.{DataFrame, SparkSession}
/**
* Simple case class
*/
case class Tweet()
class SparkAnalysis() {
var session: SparkSession = null
/**
* Creates a Spark session
*
* @return SparkSession
*/
private def createSession(): SparkSession = {
val session = SparkSession
.builder()
.appName("WSA")
// .config("spark.some.config.option", "some-value")
.config("spark.master", "local")
.config("spark.driver.bindAddress", "127.0.0.1")
.config("spark.executor.cores","2")
.getOrCreate()
this.session = session
return session
}
/**
* Saves data in CSV format
*
* @param filePath
* @param dataFrame
*/
private def saveToCsv(filePath:String, dataFrame: DataFrame): Unit = {
dataFrame.coalesce(1).write
.format("com.databricks.spark.csv")
.option("header", "true")
.mode("overwrite")
.save(filePath)
}
/**
* Runs a Spark Session and executes Spark SQL requests
*/
def run(): Unit = {
val spark = this.createSession()
import spark.implicits._
// we analyze political tweets only, so the wildcard is *.politics.json
val tweets = spark.read.json("./output/*.politics.json").as[Tweet]
.toDF() // we don't add parameters because we want to keep the same column names
.cache()
tweets.createOrReplaceTempView("tweets")
// first query
val totalTweetParJour1 = spark.sql("" +
"SELECT candidate as Candidat, Count(Distinct id_str) As totalTweet " +
"FROM tweets " +
"WHERE candidate IS NOT NULL AND date_format(cast(unix_timestamp(created_at, 'EEE MMM dd HH:mm:ss ZZZZZ yyyy') AS TIMESTAMP), 'yyyy-MM-dd') " +
"BETWEEN '2017-04-10' AND '2017-04-23' " +
"GROUP BY candidate " +
"ORDER BY totalTweet Desc")
saveToCsv("./output/graph-2a.csv", totalTweetParJour1)
// second query
val totalTweetParJour2 = spark.sql("" +
"SELECT candidate as Candidat, Count(Distinct id_str) As totalTweet " +
"FROM tweets " +
"WHERE (candidate = 'macron' OR candidate = 'le pen') " +
"AND date_format(cast(unix_timestamp(created_at, 'EEE MMM dd HH:mm:ss ZZZZZ yyyy') AS TIMESTAMP), 'yyyy-MM-dd') " +
"BETWEEN '2017-04-24' AND '2017-05-07' " +
"GROUP BY candidate " +
"ORDER BY totalTweet DESC")
saveToCsv("./output/graph-2b.csv", totalTweetParJour2)
// third query
val totalSentiParJour1 = spark.sql("" +
"SELECT date_format(cast(unix_timestamp(created_at, 'EEE MMM dd HH:mm:ss ZZZZZ yyyy') AS TIMESTAMP), 'yyyy-MM-dd') AS Jour, candidate as Candidat, Count(Distinct id_str) As nbrJournalier, Round(Avg(sentiment)*10,2) as Sentiment, Count(Distinct id_str) * Round(Avg(sentiment)*10,2) as Score " +
"FROM tweets " +
"WHERE candidate IS NOT NULL AND date_format(cast(unix_timestamp(created_at, 'EEE MMM dd HH:mm:ss ZZZZZ yyyy') AS TIMESTAMP), 'yyyy-MM-dd') " +
"BETWEEN '2017-04-10' AND '2017-04-23' " +
"GROUP BY date_format(cast(unix_timestamp(created_at, 'EEE MMM dd HH:mm:ss ZZZZZ yyyy') AS TIMESTAMP), 'yyyy-MM-dd'), candidate " +
"ORDER BY Jour ASC, Score DESC")
saveToCsv("./output/graph-3a.csv", totalSentiParJour1)
// fourth query
val totalSentiParJour2 = spark.sql("" +
"SELECT date_format(cast(unix_timestamp(created_at, 'EEE MMM dd HH:mm:ss ZZZZZ yyyy') AS TIMESTAMP), 'yyyy-MM-dd') AS Jour, candidate as Candidat, Count(Distinct id_str) As nbrJournalier, Round(Avg(sentiment)*10,2) as Sentiment, Count(Distinct id_str) * Round(Avg(sentiment)*10,2) as Score " +
"FROM tweets " +
"WHERE (candidate = 'macron' OR candidate = 'le pen') " +
"AND date_format(cast(unix_timestamp(created_at, 'EEE MMM dd HH:mm:ss ZZZZZ yyyy') AS TIMESTAMP), 'yyyy-MM-dd') " +
"BETWEEN '2017-04-24' AND '2017-05-07' " +
"GROUP BY date_format(cast(unix_timestamp(created_at, 'EEE MMM dd HH:mm:ss ZZZZZ yyyy') AS TIMESTAMP), 'yyyy-MM-dd'), candidate " +
"ORDER BY Jour ASC, Score DESC")
saveToCsv("./output/graph-3b.csv", totalSentiParJour2)
}
/**
* Stops the Spark session
*/
def stop(): Unit = {
this.session.stop
}
}
|
Qathom/wsa-extract
|
src/main/scala/SparkAnalysis.scala
|
Scala
|
apache-2.0
| 4,209 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2014 Matthias Langer ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.latrobe.kernels
import edu.latrobe._
import edu.latrobe.sizes._
import scala.util.hashing._
import spire.implicits._
final class Kernel4(override val size: (Int, Int, Int, Int),
override val stride: (Int, Int, Int, Int),
override val padding0: (Int, Int, Int, Int),
override val padding1: (Int, Int, Int, Int))
extends Kernel(size._1 * size._2 * size._3 * size._4)
with CartesianKernel[(Int, Int, Int, Int)] {
require(size._1 > 0)
require(size._2 > 0)
require(size._3 > 0)
require(size._4 > 0)
require(stride._1 >= 0)
require(stride._2 >= 0)
require(stride._3 >= 0)
require(stride._4 >= 0)
require(padding0._1 < size._1 && padding1._1 < size._1)
require(padding0._2 < size._2 && padding1._2 < size._2)
require(padding0._3 < size._3 && padding1._3 < size._3)
require(padding0._4 < size._4 && padding1._4 < size._4)
override def toString
: String = s"Kernel4[$size, $stride, $padding0, $padding1]"
override def canEqual(that: Any): Boolean = that.isInstanceOf[Kernel4]
override def hashCode(): Int = {
var tmp = super.hashCode()
tmp = MurmurHash3.mix(tmp, size.hashCode())
tmp = MurmurHash3.mix(tmp, stride.hashCode())
tmp = MurmurHash3.mix(tmp, padding0.hashCode())
tmp = MurmurHash3.mix(tmp, padding1.hashCode())
tmp
}
override protected def doEquals(other: Equatable)
: Boolean = super.doEquals(other) && (other match {
case other: Kernel4 =>
size == other.size &&
stride == other.stride &&
padding0 == other.padding0 &&
padding1 == other.padding1
case _ =>
false
})
override val ensuresAllValid: Boolean = {
val x = padding0._1 <= 0 && padding1._1 <= 0
val y = padding0._2 <= 0 && padding1._2 <= 0
val z = padding0._3 <= 0 && padding1._3 <= 0
val w = padding0._4 <= 0 && padding1._4 <= 0
x && y && z && w
}
override def isCentered: Boolean = {
val x = CartesianKernel.isCentered(size._1, padding0._1, padding1._1)
val y = CartesianKernel.isCentered(size._2, padding0._2, padding1._2)
val z = CartesianKernel.isCentered(size._3, padding0._3, padding1._3)
val w = CartesianKernel.isCentered(size._4, padding0._4, padding1._4)
x && y && z && w
}
override def hasUnitStride: Boolean = {
stride._1 == 1 && stride._2 == 1 && stride._3 == 1 && stride._4 == 1
}
val noValuesPerPlane: Int = size._1 * size._2
val noValuesPerBox: Int = size._1 * size._2 * size._3
// ---------------------------------------------------------------------------
// Pair index conversion related.
// ---------------------------------------------------------------------------
override def localPairNoOf(localPairPos: (Int, Int, Int, Int)): Int = {
require(localPairPos._1 >= 0 && localPairPos._1 < size._1)
require(localPairPos._2 >= 0 && localPairPos._2 < size._2)
require(localPairPos._3 >= 0 && localPairPos._3 < size._3)
require(localPairPos._4 >= 0 && localPairPos._4 < size._4)
val x = localPairPos._1
val y = localPairPos._2 * size._1
val z = localPairPos._3 * noValuesPerPlane
val w = localPairPos._4 * noValuesPerBox
x + y + z + w
}
override def localPairPositionOf(localPairNo: Int): (Int, Int, Int, Int) = {
require(localPairNo >= 0 && localPairNo < noValues)
val w = localPairNo / noValuesPerBox
val wRem = localPairNo % noValuesPerBox
val z = wRem / noValuesPerPlane
val zRem = wRem % noValuesPerPlane
val y = zRem / size._1
val x = zRem % size._1
(x, y, z, w)
}
override def localPairPositionOfCenterPair
: (Int, Int, Int, Int) = (size._1 / 2, size._2 / 2, size._3 / 2, size._4 / 2)
// ---------------------------------------------------------------------------
// Offset lookup.
// ---------------------------------------------------------------------------
override def offsetOfFirstPair(inputSize: Size): Int = inputSize match {
case inputSize: Size4 => offsetOfFirstPair(inputSize)
}
def offsetOfFirstPair(inputSize: Size4): Int = {
val inpStride = inputSize.stride
val x = padding0._1 * inpStride._1
val y = padding0._2 * inpStride._2
val z = padding0._3 * inpStride._3
val w = padding0._4 * inpStride._4
-x - y - z - w
}
override def relativeFirstOffsetOfPairOf(inputSize: Size, localPairNo: Int)
: Int = inputSize match {
case inputSize: Size4 => relativeOffsetOfPairOf(inputSize, localPairNo)
}
def relativeOffsetOfPairOf(inputSize: Size4, localPairNo: Int)
: Int = relativeOffsetOfPairOf(inputSize, localPairPositionOf(localPairNo))
override def relativeFirstOffsetOfPairOf(inputSize: Size,
localPairPosition: (Int, Int, Int, Int))
: Int = inputSize match {
case inputSize: Size4 =>
relativeOffsetOfPairOf(inputSize, localPairPosition)
}
def relativeOffsetOfPairOf(inputSize: Size4, localPairPosition: (Int, Int, Int, Int))
: Int = {
require(localPairPosition._1 >= 0 && localPairPosition._1 < size._1)
require(localPairPosition._2 >= 0 && localPairPosition._2 < size._2)
require(localPairPosition._3 >= 0 && localPairPosition._3 < size._3)
require(localPairPosition._4 >= 0 && localPairPosition._4 < size._4)
val inpStride = inputSize.stride
val x = localPairPosition._1 * inpStride._1
val y = localPairPosition._2 * inpStride._2
val z = localPairPosition._3 * inpStride._3
val w = localPairPosition._4 * inpStride._4
x + y + z + w
}
// ---------------------------------------------------------------------------
// Derived metrics.
// ---------------------------------------------------------------------------
override def inputSizeFor(noChannels: Int)
: Size4 = Size4(size, noChannels)
def outputSizeFor(inputSize: Size4, noMaps: Int)
: Size4 = outputSizeFor(inputSize, noMaps, CartesianKernel.outputSize)
override protected def doOutputSizeFor(inputSize: Size,
noMaps: Int,
callback: (Int, Int, Int, Int, Int) => Int)
: Size = inputSize match {
case inputSize: Size4 => outputSizeFor(inputSize, noMaps, callback)
}
def outputSizeFor(inputSize: Size4,
noMaps: Int,
callback: (Int, Int, Int, Int, Int) => Int)
: Size4 = Size4(
callback(inputSize.dims._1, size._1, stride._1, padding0._1, padding1._1),
callback(inputSize.dims._2, size._2, stride._2, padding0._2, padding1._2),
callback(inputSize.dims._3, size._3, stride._3, padding0._3, padding1._3),
callback(inputSize.dims._4, size._4, stride._4, padding0._4, padding1._4),
noMaps
)
// ---------------------------------------------------------------------------
// Iteration methods.
// ---------------------------------------------------------------------------
override def foreachOutput(inputSize: Size,
noMaps: Int,
fn: (Int, Int, Int) => Unit)
: Unit = {
if (ensuresAllValid) {
inputSize match {
case inputSize: Size4 => doForeachOutputSafe(inputSize, noMaps, fn)
case _ => throw new IllegalArgumentException
}
}
else {
inputSize match {
case inputSize: Size4 => doForeachOutputUnsafe(inputSize, noMaps, fn)
case _ => throw new IllegalArgumentException
}
}
}
protected def doForeachOutputSafe(inputSize: Size4,
noMaps: Int,
fn: (Int, Int, Int) => Unit)
: Unit = {
val outputSize = outputSizeFor(inputSize, noMaps)
doForeachOutputSafe(
inputSize,
outputSize,
0,
outputSize.noValues,
offsetOfFirstPair(inputSize),
fn
)
}
protected def doForeachOutputSafe(inputSize: Size4,
outputSize: Size4,
baseIndex: Int,
endIndex: Int,
baseOffset: Int,
fn: (Int, Int, Int) => Unit)
: Unit = {
// Pre-compute frequently used values.
val outStep = outputSize.stride
val (inpStepX, gapY, gapZ, gapW) = {
val inpStride = inputSize.stride
val inpStepX = stride._1 * inpStride._1
val inpStepY = stride._2 * inpStride._2
val inpStepZ = stride._3 * inpStride._3
val inpStepW = stride._4 * inpStride._4
val gapY = inpStepY - inpStepX * outputSize.dims._1
val gapZ = inpStepZ - inpStepY * outputSize.dims._2
val gapW = inpStepW - inpStepZ * outputSize.dims._3
(inpStepX, gapY, gapZ, gapW)
}
// Move kernel through input.
var offset = baseOffset
var i0 = baseIndex
while (i0 < endIndex) {
val nextGapW = i0 + outStep._4
while (i0 < nextGapW) {
val nextGapZ = i0 + outStep._3
while (i0 < nextGapZ) {
val nextGapY = i0 + outStep._2
while (i0 < nextGapY) {
val i1 = i0 + outStep._1
fn(i0, i1, offset)
offset += inpStepX
i0 = i1
}
offset += gapY
}
offset += gapZ
}
offset += gapW
}
}
protected def doForeachOutputUnsafe(inputSize: Size4,
noMaps: Int,
fn: (Int, Int, Int) => Unit)
: Unit = {
val outputSize = outputSizeFor(inputSize, noMaps)
doForeachOutputUnsafe(
inputSize, outputSize, 0, outputSize.noValues, offsetOfFirstPair(inputSize), fn
)
}
// TODO: Could be done faster!
protected def doForeachOutputUnsafe(inputSize: Size4,
outputSize: Size4,
baseIndex: Int,
endIndex: Int,
baseOffset: Int,
fn: (Int, Int, Int) => Unit)
: Unit = doForeachOutputUnsafe(
inputSize, outputSize, baseIndex, endIndex, baseOffset,
(i0, i1, offset0, x0, y0, z0, w0) => fn(i0, i1, offset0)
)
protected def doForeachOutputUnsafe(inputSize: Size4,
outputSize: Size4,
baseIndex: Int,
endIndex: Int,
baseOffset: Int,
fn: (Int, Int, Int, Int, Int, Int, Int) => Unit)
: Unit = {
// Pre-compute frequently used values.
val outStep = outputSize.stride
val (inpStepX, gapY, gapZ, gapW) = {
val inpStride = inputSize.stride
val inpStepX = stride._1 * inpStride._1
val inpStepY = stride._2 * inpStride._2
val inpStepZ = stride._3 * inpStride._3
val inpStepW = stride._4 * inpStride._4
val gapY = inpStepY - inpStepX * outputSize.dims._1
val gapZ = inpStepZ - inpStepY * outputSize.dims._2
val gapW = inpStepW - inpStepZ * outputSize.dims._3
(inpStepX, gapY, gapZ, gapW)
}
// Move kernel through input.
var w0 = -padding0._4
var offset = baseOffset
var i0 = baseIndex
while (i0 < endIndex) {
var z0 = -padding0._3
val nextGapW = i0 + outStep._4
while (i0 < nextGapW) {
var y0 = -padding0._2
val nextGapZ = i0 + outStep._3
while (i0 < nextGapZ) {
var x0 = -padding0._1
val nextGapY = i0 + outStep._2
while (i0 < nextGapY) {
val i1 = i0 + outStep._1
fn(i0, i1, offset, x0, y0, z0, w0)
offset += inpStepX
x0 += stride._1
i0 = i1
}
offset += gapY
y0 += stride._2
}
offset += gapZ
z0 += stride._3
}
offset += gapW
w0 += stride._4
}
}
override def foreachValidPairEx(inputSize: Size,
noMaps: Int,
fn: (Int, Int, Int) => ((Int, Int, Int, Int) => Unit, () => Unit))
: Unit = {
if (ensuresAllValid) {
inputSize match {
case inputSize: Size4 => doForeachValidPairExSafe(inputSize, noMaps, fn)
case _ => throw new IllegalArgumentException
}
}
else {
inputSize match {
case inputSize: Size4 => doForeachValidPairExUnsafe(inputSize, noMaps, fn)
case _ => throw new IllegalArgumentException
}
}
}
protected def doForeachValidPairExSafe(inputSize: Size4,
noMaps: Int,
fn: (Int, Int, Int) => ((Int, Int, Int, Int) => Unit, () => Unit))
: Unit = {
val outputSize = outputSizeFor(inputSize, noMaps)
doForeachValidPairExSafe(
inputSize,
outputSize,
0,
outputSize.noValues,
offsetOfFirstPair(inputSize),
fn
)
}
protected def doForeachValidPairExSafe(inputSize: Size4,
outputSize: Size4,
baseIndex: Int,
endIndex: Int,
baseOffset: Int,
fn: (Int, Int, Int) => ((Int, Int, Int, Int) => Unit, () => Unit))
: Unit = {
// Pre-compute frequently used values.
val noValuesX = size._1 * inputSize.noChannels
val noValuesXY = size._2 * noValuesX
val noValuesXYZ = size._3 * noValuesXY
val noValuesXYZW = size._4 * noValuesXYZ
val (gapY, gapZ, gapW) = {
val inpStride = inputSize.stride
val gapY = inpStride._2 - noValuesX//= size._1 * inpStride._1
val gapZ = inpStride._3 - size._2 * inpStride._2
val gapW = inpStride._4 - size._3 * inpStride._3
(gapY, gapZ, gapW)
}
// Foreach outputs, foreach pair.
doForeachOutputSafe(inputSize, outputSize, baseIndex, endIndex, baseOffset,
(i0: Int, i1: Int, offset0: Int) => {
val (fnPair, fnPost) = fn(i0, i1, baseOffset)
// Cycle through kernel dimensions.
var offset0 = baseOffset
var j0 = 0
while (j0 < noValuesXYZW) {
val nextGapW = j0 + noValuesXYZ
while (j0 < nextGapW) {
val nextGapZ = j0 + noValuesXY
while (j0 < nextGapZ) {
val nextGapY = j0 + noValuesX
while (j0 < nextGapY) {
val j1 = j0 + inputSize.noChannels
val offset1 = offset0 + inputSize.noChannels
fnPair(j0, j1, offset0, offset1)
offset0 = offset1
j0 = j1
}
offset0 += gapY
}
offset0 += gapZ
}
offset0 += gapW
}
// Call post.
fnPost()
}
)
}
protected def doForeachValidPairExUnsafe(inputSize: Size4,
noMaps: Int,
fn: (Int, Int, Int) => ((Int, Int, Int, Int) => Unit, () => Unit))
: Unit = {
val outputSize = outputSizeFor(inputSize, noMaps)
doForeachValidPairExUnsafe(
inputSize,
outputSize,
0,
outputSize.noValues,
offsetOfFirstPair(inputSize),
fn
)
}
protected def doForeachValidPairExUnsafe(inputSize: Size4,
outputSize: Size4,
baseIndex: Int,
endIndex: Int,
baseOffset: Int,
fn: (Int, Int, Int) => ((Int, Int, Int, Int) => Unit, () => Unit))
: Unit = {
// Pre-compute frequently used values.
val maxX = inputSize.dims._1 + Math.min(padding1._1, 0)
val maxY = inputSize.dims._2 + Math.min(padding1._2, 0)
val maxZ = inputSize.dims._3 + Math.min(padding1._3, 0)
val maxW = inputSize.dims._3 + Math.min(padding1._4, 0)
val (gapY, gapZ, gapW) = {
val inpStride = inputSize.stride
val gapY = inpStride._2 - size._1 * inpStride._1
val gapZ = inpStride._3 - size._2 * inpStride._2
val gapW = inpStride._4 - size._3 * inpStride._3
(gapY, gapZ, gapW)
}
/*
val inpStride = inputSize.stride
val limitL = inpStride._1 * Math.max(padding._1, 0)
val limitR = inpStride._1 * Math.min(Math.max(size._1 - padding._1, 0), size._1)
val noValuesX = inpStride._1 * size._1
val limitT = noValuesX * Math.max(padding._2, 0)
val limitB = noValuesX * Math.min(Math.max(size._2 - padding._2, 0), size._2)
val noValuesXY = noValuesX * size._2
val limitN = noValuesXY * Math.max(padding._3, 0)
val limitF = noValuesXY * Math.min(Math.max(size._3 - padding._3, 0), size._3)
val noValuesXYZ = noValuesXY * size._3
val limit0 = noValuesXYZ * Math.max(padding._4, 0)
val limit1 = noValuesXYZ * Math.min(Math.max(size._4 - padding._4, 0), size._4)
val noValuesXYZW = noValuesXYZ * size._4
*/
// Foreach outputs, foreach pair.
doForeachOutputUnsafe(inputSize, outputSize, baseIndex, endIndex, baseOffset,
// TODO: Can do this slightly faster!
(i0: Int, i1: Int, baseOffset: Int, x0: Int, y0: Int, z0: Int, w0: Int) => {
val (fnPair, fnPost) = fn(i0, i1, baseOffset)
// TODO: Could be done slightly faster! (Call safe method if safe!)
val x1 = x0 + size._1
val y1 = y0 + size._2
val z1 = z0 + size._3
val w1 = w0 + size._4
/*
// If unsafe kernel instance (left, right, top, bottom, near, far, past, late)
val begX = if (x0 == 0) limitL else 0
val begY = if (y0 == 0) limitT else 0
val begZ = if (z0 == 0) limitN else 0
val begW = if (w0 == 0) limit0 else 0
val endX = if (x0 == outputSize.dims._1 - 1) limitR else noValuesX
val endY = if (y0 == outputSize.dims._2 - 1) limitB else noValuesXY
val endZ = if (z0 == outputSize.dims._3 - 1) limitF else noValuesXYZ
val endW = if (w0 == outputSize.dims._4 - 1) limit0 else noValuesXYZW
val remY = noValuesX - endX
val remZ = noValuesXY - endY
val remW = noValuesXYZ - endZ
val gapY = inpStride._2 - endX // Note different gaps!
//val gapZ = (inputSize.height - yEnd) * inputSize.width
// TODO: Can do this slightly faster!
val gapZ = inpStride._3 - inpStride._2 * (endY / noValuesX)
val gapW = inpStride._4 - inpStride._3 * (endZ / noValuesXY)
*/
// Cycle through kernel dimensions.
var offset0 = baseOffset
var j0 = 0
cfor(w0)(_ < w1, _ + 1)(w => {
cfor(z0)(_ < z1, _ + 1)(z => {
cfor(y0)(_ < y1, _ + 1)(y => {
cfor(x0)(_ < x1, _ + 1)(x => {
val j1 = j0 + inputSize.noChannels
val offset1 = offset0 + inputSize.noChannels
// TODO: Could do this slightly faster!
if (x >= 0 && x < maxX && y >= 0 && y < maxY && z >= 0 && z < maxZ && w >= 0 && w < maxW) {
fnPair(j0, j1, offset0, offset1)
}
offset0 = offset1
j0 = j1
})
offset0 += gapY
})
offset0 += gapZ
})
})
/*
var offset0 = baseOffset
var j0 = begW
while (j0 < endW) {
val nextGapZ = j0 + endZ
j0 += begZ
while (j0 < nextGapZ) {
val nextGapY = j0 + endY
j0 += begY
while (j0 < nextGapY) {
val nextGapX = j0 + endX
j0 += begX
while (j0 < nextGapX) {
val j1 = j0 + inpStride._1
val offset1 = offset0 + inpStride._1
fnPair(j0, j1, offset0, offset1)
offset0 = offset1
j0 = j1
}
offset0 += gapY
j0 += remY
}
offset0 += gapZ
j0 += remZ
}
offset0 += gapW
j0 += remW
}
*/
// Call post.
fnPost()
}
)
}
}
object Kernel4 {
final def apply(size: (Int, Int, Int, Int))
: Kernel4 = apply(size, (1, 1, 1, 1))
final def apply(size: (Int, Int, Int, Int),
stride: (Int, Int, Int, Int))
: Kernel4 = apply(size, stride, (0, 0, 0, 0))
final def apply(size: (Int, Int, Int, Int),
stride: (Int, Int, Int, Int),
padding: (Int, Int, Int, Int))
: Kernel4 = apply(size, stride, padding, padding)
final def apply(size: (Int, Int, Int, Int),
stride: (Int, Int, Int, Int),
padding0: (Int, Int, Int, Int),
padding1: (Int, Int, Int, Int))
: Kernel4 = new Kernel4(size, stride, padding0, padding1)
final def centered(size: (Int, Int, Int, Int))
: Kernel4 = centered(size, (1, 1, 1, 1))
final def centered(size: (Int, Int, Int, Int),
stride: (Int, Int, Int, Int))
: Kernel4 = apply(
size,
stride,
(size._1 / 2, size._2 / 2, size._3 / 2, size._4 / 2),
((size._1 - 1) / 2, (size._2 - 1) / 2, (size._3 - 1) / 2, (size._4 - 1) / 2)
)
}
|
bashimao/ltudl
|
base/src/main/scala/edu/latrobe/kernels/Kernel4.scala
|
Scala
|
apache-2.0
| 22,379 |
/**
* Copyright (C) 2015-2016 Philipp Haller
*/
package lacasa.neg
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import org.junit.Test
import lacasa.util._
@RunWith(classOf[JUnit4])
class ControlThrowableSpec {
@Test
def test1() {
println(s"ControlThrowableSpec.test1")
expectError("propagated") {
"""
class C {
import scala.util.control.ControlThrowable
import lacasa.Box._
def m(): Unit = {
try {
val x = 0
val y = x + 10
println(s"res: ${x + y}")
} catch {
case t: ControlThrowable =>
println("hello")
uncheckedCatchControl
}
}
}
"""
}
}
@Test
def test2() {
println(s"ControlThrowableSpec.test2")
expectError("propagated") {
"""
class C {
import scala.util.control.ControlThrowable
def m(): Unit = {
try {
throw new ControlThrowable {}
} catch {
case t: Throwable =>
println("hello")
}
}
}
"""
}
}
@Test
def test3() {
println(s"ControlThrowableSpec.test3")
expectError("propagated") {
"""
class SpecialException(msg: String) extends RuntimeException
class C {
import scala.util.control.ControlThrowable
def m(): Unit = {
val res = try { 5 } catch {
case s: SpecialException => println("a")
case c: ControlThrowable => println("b")
case t: Throwable => println("c")
}
}
}
"""
}
}
}
|
phaller/lacasa
|
plugin/src/test/scala/lacasa/neg/ControlThrowable.scala
|
Scala
|
bsd-3-clause
| 1,725 |
/**
* Copyright 2014 Dropbox, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package djinni
import djinni.ast.TypeDef
import scala.collection.immutable
package object meta {
case class MExpr(base: Meta, args: Seq[MExpr])
abstract sealed class Meta
{
val numParams: Int
}
case class MParam(name: String) extends Meta { val numParams = 0 }
case class MDef(name: String, override val numParams: Int, defType: DefType, body: TypeDef) extends Meta
abstract sealed class MOpaque extends Meta { val idlName: String }
abstract sealed class DefType
case object DEnum extends DefType
case object DInterface extends DefType
case object DRecord extends DefType
case class MPrimitive(_idlName: String, jName: String, jniName: String, cName: String, jBoxed: String, jSig: String, objcName: String, objcBoxed: String) extends MOpaque { val numParams = 0; val idlName = _idlName }
case object MString extends MOpaque { val numParams = 0; val idlName = "string" }
case object MBinary extends MOpaque { val numParams = 0; val idlName = "binary" }
case object MOptional extends MOpaque { val numParams = 1; val idlName = "optional" }
case object MList extends MOpaque { val numParams = 1; val idlName = "list" }
case object MSet extends MOpaque { val numParams = 1; val idlName = "set" }
case object MMap extends MOpaque { val numParams = 2; val idlName = "map" }
val defaults: Map[String,MOpaque] = immutable.HashMap(
("i8", MPrimitive("i8", "byte", "jbyte", "int8_t", "Byte", "B", "int8_t", "NSNumber")),
("i16", MPrimitive("i16", "short", "jshort", "int16_t", "Short", "S", "int16_t", "NSNumber")),
("i32", MPrimitive("i32", "int", "jint", "int32_t", "Integer", "I", "int32_t", "NSNumber")),
("i64", MPrimitive("i64", "long", "jlong", "int64_t", "Long", "J", "int64_t", "NSNumber")),
("f64", MPrimitive("f64", "double", "jdouble", "double", "Double", "D", "double", "NSNumber")),
("bool", MPrimitive("bool", "boolean", "jboolean", "bool", "Boolean", "Z", "BOOL", "NSNumber")),
("string", MString),
("binary", MBinary),
("optional", MOptional),
("list", MList),
("set", MSet),
("map", MMap))
}
|
Bennyyyy/djinni
|
src/source/meta.scala
|
Scala
|
apache-2.0
| 2,703 |
class A {
def m(x: Int, y: Int, z: Int, y: Int, t: Int) { }
}
object Main { def main(args: Array[String]) { } }
|
tobast/compil-petitscala
|
tests/typing/bad/testfile-multiple_parameters1-1.scala
|
Scala
|
gpl-3.0
| 116 |
import sbt._
import Keys._
import sbtrelease._
import com.typesafe.sbt.SbtSite.SiteKeys._
import com.typesafe.sbt.SbtGhPages.GhPagesKeys._
object Build extends Build {
def executeTask(task: TaskKey[_], info: String): State => State = (st: State) => {
st.log.info(info)
val extracted = Project.extract(st)
val ref: ProjectRef = extracted.get(thisProjectRef)
val (newState, _) = extracted.runTask(task in ref, st)
newState
}
lazy val generateAndPushDocs: ReleaseStep = { st: State =>
val st2 = executeTask(makeSite, "Making doc site")(st)
executeTask(pushSite, "Publishing doc site")(st2)
}
}
|
derrickburns/generalized-kmeans-clustering
|
project/Build.scala
|
Scala
|
apache-2.0
| 629 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.workflow
import akka.actor.Actor
import akka.event.Logging
import io.prediction.data.storage.EngineInstance
import org.json4s.JValue
class PluginsActor(engineVariant: String) extends Actor {
implicit val system = context.system
val log = Logging(system, this)
val pluginContext = EngineServerPluginContext(log, engineVariant)
def receive: PartialFunction[Any, Unit] = {
case (ei: EngineInstance, q: JValue, p: JValue) =>
pluginContext.outputSniffers.values.foreach(_.process(ei, q, p, pluginContext))
case h: PluginsActor.HandleREST =>
try {
sender() ! pluginContext.outputSniffers(h.pluginName).handleREST(h.pluginArgs)
} catch {
case e: Exception =>
sender() ! s"""{"message":"${e.getMessage}"}"""
}
case _ =>
log.error("Unknown message sent to the Engine Server output sniffer plugin host.")
}
}
object PluginsActor {
case class HandleREST(pluginName: String, pluginArgs: Seq[String])
}
|
adamharish/PredictionIO
|
core/src/main/scala/io/prediction/workflow/EngineServerPluginsActor.scala
|
Scala
|
apache-2.0
| 1,611 |
package com.twitter.finagle.client.utils
import com.twitter.finagle.client.{StackClient, StdStackClient, Transporter}
import com.twitter.finagle.dispatch.SerialClientDispatcher
import com.twitter.finagle.netty4.Netty4Transporter
import com.twitter.finagle.param.ProtocolLibrary
import com.twitter.finagle.stats.NullStatsReceiver
import com.twitter.finagle.transport.{Transport, TransportContext}
import com.twitter.finagle.{Service, ServiceFactory, Stack}
import io.netty.channel.{
ChannelHandlerContext,
ChannelOutboundHandlerAdapter,
ChannelPipeline,
ChannelPromise
}
import io.netty.handler.codec.string.{StringDecoder, StringEncoder}
import java.net.SocketAddress
import java.nio.charset.StandardCharsets.UTF_8
object StringClient {
val protocolLibrary = "string"
object StringClientPipeline extends (ChannelPipeline => Unit) {
private class DelimEncoder(delim: Char) extends ChannelOutboundHandlerAdapter {
override def write(ctx: ChannelHandlerContext, msg: Any, p: ChannelPromise): Unit = {
val delimMsg = msg match {
case m: String => m + delim
case m => m
}
ctx.write(delimMsg, p)
}
}
def apply(pipeline: ChannelPipeline): Unit = {
pipeline.addLast("stringEncode", new StringEncoder(UTF_8))
pipeline.addLast("stringDecode", new StringDecoder(UTF_8))
pipeline.addLast("line", new DelimEncoder('\\n'))
}
}
private[finagle] object NoDelimStringPipeline extends (ChannelPipeline => Unit) {
def apply(pipeline: ChannelPipeline): Unit = {
pipeline.addLast("stringEncode", new StringEncoder(UTF_8))
pipeline.addLast("stringDecode", new StringDecoder(UTF_8))
}
}
val DefaultParams: Stack.Params = Stack.Params.empty + ProtocolLibrary(protocolLibrary)
case class Client(
stack: Stack[ServiceFactory[String, String]] = StackClient.newStack,
params: Stack.Params = DefaultParams,
appendDelimiter: Boolean = true)
extends StdStackClient[String, String, Client]
with Stack.Transformable[Client] {
protected def copy1(
stack: Stack[ServiceFactory[String, String]] = this.stack,
params: Stack.Params = this.params
): Client = copy(stack, params)
protected type In = String
protected type Out = String
protected type Context = TransportContext
protected def newTransporter(
addr: SocketAddress
): Transporter[String, String, TransportContext] =
if (appendDelimiter) Netty4Transporter.raw(StringClientPipeline, addr, params)
else Netty4Transporter.raw(NoDelimStringPipeline, addr, params)
protected def newDispatcher(
transport: Transport[In, Out] { type Context <: Client.this.Context }
): Service[String, String] =
new SerialClientDispatcher(transport, NullStatsReceiver)
def withEndpoint(s: Service[String, String]): Client =
withStack(
stack.replace(
StackClient.Role.prepConn,
(_: ServiceFactory[String, String]) => ServiceFactory.const(s)
)
)
override def transformed(t: Stack.Transformer): Client =
withStack(t(stack))
}
def client: Client = Client()
}
|
twitter/finagle
|
finagle-core/src/test/scala/com/twitter/finagle/client/utils/StringClient.scala
|
Scala
|
apache-2.0
| 3,167 |
/*
* Some of the traits, objects and classes are based on the source code from Functional Programming in Scala.
* The original source code from Functional Programming in Scala can be found at https://github.com/fpinscala/fpinscala.
* These code sections are copyrighted by Manning Publications, Co:
*
*
* Copyright (c) 2012, Manning Publications, Co.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*
* All other code sections are copyrighted by Emil Nilsson:
*
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Emil Nilsson
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.github.enil.chapter05
/**
* From Functional Programming in Scala.
*/
sealed trait Stream[+A] {
/**
* From Functional Programming in Scala.
*/
def foldRight[B](z: => B)(f: (A, => B) => B): B = this match {
case Cons(h, t) => f(h(), t().foldRight(z)(f))
case _ => z
}
/**
* @author Emil Nilsson
*/
def toList: List[A] = this match {
case Empty => Nil
case Cons(h, t) => h() :: t().toList
}
/**
* @author Emil Nilsson
*/
def take(n: Int): Stream[A] = this match {
case Empty => Stream.empty
case Cons(h, t) => if (n > 0) Stream.cons(h(), t().take(n - 1)) else Stream.empty
}
/**
* @author Emil Nilsson
*/
def drop(n: Int): Stream[A] = this match {
case Empty => Stream.empty
case Cons(h, t) => if (n > 0) t().drop(n - 1) else this
}
/**
* @author Emil Nilsson
*/
def takeWhile(f: A => Boolean): Stream[A] = this match {
case Cons(h, t) if f(h()) => Stream.cons(h(), t().takeWhile(f))
case _ => Stream.empty
}
/**
* @author Emil Nilsson
*/
def forAll(p: A => Boolean): Boolean = this match {
case Cons(h, t) if p(h()) => t().forAll(p)
case Cons(h, t) => false
case Empty => true
}
/**
* @author Emil Nilsson
*/
def takeWhile2(p: A => Boolean): Stream[A] =
foldRight(Stream.empty[A])((h, t) => if (p(h)) Stream.cons(h, t) else Stream.empty[A])
/**
* @author Emil Nilsson
*/
def headOption: Option[A] =
this.foldRight[Option[A]](None)((h, _) => Some(h))
/**
* @author Emil Nilsson
*/
def map[B](f: A => B): Stream[B] =
foldRight(Stream.empty[B])((h, t) => Stream.cons(f(h), t))
/**
* @author Emil Nilsson
*/
def filter(p: A => Boolean): Stream[A] =
foldRight(Stream.empty[A])((h, t) => if (p(h)) Stream.cons(h, t) else t)
/**
* @author Emil Nilsson
*/
def append[B >: A](bs: => Stream[B]): Stream[B] =
foldRight(bs)(Stream.cons(_, _))
/**
* @author Emil Nilsson
*/
def flatMap[B](f: A => Stream[B]): Stream[B] =
foldRight(Stream.empty[B])((h, t) => f(h) append t)
/**
* @author Emil Nilsson
*/
def map2[B](f: A => B): Stream[B] =
Stream.unfold(this) {
case Empty => None
case Cons(h, t) => Some(f(h()), t())
}
/**
* @author Emil Nilsson
*/
def take2(n: Int): Stream[A] =
Stream.unfold((this, n)) {
case (Cons(h, t), r) if r > 0 => Some(h(), (t(), r - 1))
case _ => None
}
/**
* @author Emil Nilsson
*/
def takeWhile3(p: A => Boolean): Stream[A] =
Stream.unfold(this) {
case Cons(h, t) if p(h()) => Some(h(), t())
case _ => None
}
/**
* @author Emil Nilsson
*/
def zipWith[B, C](bs: Stream[B])(f: (A, B) => C): Stream[C] =
Stream.unfold(this, bs) {
case (Cons(ah, at), Cons(bh, bt)) => Some(f(ah(), bh()), (at(), bt()))
case _ => None
}
/**
* @author Emil Nilsson
*/
def zipAll[B](s2: Stream[B]): Stream[(Option[A], Option[B])] =
Stream.unfold(this, s2) {
case (Empty, Empty) => None
case ((Cons(ah, at), Cons(bh, bt))) => Some((Some(ah()), Some(bh())), (at(), bt()))
case ((Cons(ah, at), Empty)) => Some((Some(ah()), None), (at(), Empty))
case (Empty, Cons(bh, bt)) => Some((None, Some(bh())), (Empty, bt()))
}
/**
* @author Emil Nilsson
*/
def tails: Stream[Stream[A]] =
Stream.unfold(Option(this)) {
case None => None
case Some(Empty) => Some(Empty, None)
case Some(as@Cons(_, t)) => Some(as, Some(t()))
}
/**
* @author Emil Nilsson
*/
def startsWith[B >: A](s: Stream[B]): Boolean =
zipAll(s) map {
case (Some(a), Some(b)) if a == b => true
case (Some(_), None) => true
case _ => false
} forAll identity
/**
* @author Emil Nilsson
*/
def scanRight[B](z: B)(f: (A, => B) => B): Stream[B] = {
foldRight(Stream(z), z) { case (a, (bs, b)) =>
lazy val x = f(a, b)
(Stream.cons(x, bs), x)
}._1
}
/**
* @author Emil Nilsson
*/
def tails2: Stream[Stream[A]] = scanRight(Empty: Stream[A])(Stream.cons(_, _))
}
/**
* From Functional Programming in Scala.
*/
case object Empty extends Stream[Nothing]
/**
* From Functional Programming in Scala.
*/
case class Cons[+A](h: () => A, t: () => Stream[A]) extends Stream[A] {
/**
* A comparison method is needed to work in assert without having to convert to lists.
*
* @author Emil Nilsson
*/
override def equals(obj: Any): Boolean = obj match {
case that: Cons[A] => that.h() == h() && that.t() == t()
case _ => false
}
}
/**
* From Functional Programming in Scala.
*/
object Stream {
/**
* From Functional Programming in Scala.
*/
def cons[A](hd: => A, tl: => Stream[A]): Stream[A] = {
lazy val head = hd
lazy val tail = tl
Cons(() => head, () => tail)
}
/**
* From Functional Programming in Scala.
*/
def empty[A]: Stream[A] = Empty
/**
* From Functional Programming in Scala.
*/
def apply[A](as: A*): Stream[A] =
if (as.isEmpty) empty else cons(as.head, apply(as.tail: _*))
/**
* @author Emil Nilsson
*/
def constant[A](a: A): Stream[A] =
cons(a, constant(a))
/**
* @author Emil Nilsson
*/
def from(n: Int): Stream[Int] =
cons(n, from(n + 1))
/**
* @author Emil Nilsson
*/
def unfold[A, S](z: S)(f: S => Option[(A, S)]): Stream[A] =
f(z).map { case (a, s) =>
cons(a, unfold(s)(f))
}.getOrElse(empty)
}
/**
* Exercise 5.1: implement Stream.toList.
*
* @author Emil Nilsson
*/
object Exercise51 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 2, 3).toList == List(1, 2, 3))
assert(Stream().toList == List())
}
}
/**
* Exercise 5.2: implement Stream.take and Stream.drop.
*
* @author Emil Nilsson
*/
object Exercise52 {
def main (args: Array[String]): Unit = {
assert(Stream(1, 2, 3).take(2) == Stream(1, 2))
assert(Stream(1, 2, 3).take(4) == Stream(1, 2, 3))
assert(Stream(1, 2, 3).take(0) == Empty)
assert(Stream(1, 2, 3).drop(2) == Stream(3))
assert(Stream(1, 2, 3).drop(4) == Empty)
assert(Stream(1).drop(0) == Stream(1))
}
}
/**
* Exercise 5.3: implement Stream.takeWhile.
*
* @author Emil Nilsson
*/
object Exercise53 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 2, 3).takeWhile((x: Int) => x < 3) == Stream(1, 2))
assert(Stream(1, 2, 3).takeWhile((x: Int) => x > 3) == Empty)
assert(Empty.takeWhile((x: Int) => x < 3) == Empty)
}
}
/**
* Exercise 5.4: implement Stream.forAll.
*
* @author Emil Nilsson
*/
object Exercise54 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 3, 5).forAll(_ % 2 == 1) == true)
assert(Stream(1, 2, 3, 5).forAll(_ % 2 == 1) == false)
assert(Stream(1, 3, 5, 6).forAll(_ % 2 == 1) == false)
assert(Empty.forAll((x: Int) => x % 2 == 1) == true)
}
}
/**
* Exercise 5.5: implement Stream.takeWhile using Stream.foldRight.
*
* @author Emil Nilsson
*/
object Exercise55 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 2, 3).takeWhile2((x: Int) => x < 3) == Stream(1, 2))
assert(Stream(1, 2, 3).takeWhile2((x: Int) => x > 3) == Empty)
assert(Empty.takeWhile2((x: Int) => x < 3) == Empty)
}
}
/**
* Exercise 5.6: implement Stream.headOption using Stream.foldRight.
*
* @author Emil Nilsson
*/
object Exercise56 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 2, 3).headOption == Some(1))
assert(Empty.headOption == None)
}
}
/**
* Exercise 5.7: implement Stream.map, Stream.filter, Stream.append and Stream.flatMap using Stream.foldRight.
*
* @author Emil Nilsson
*/
object Exercise57 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 2, 3).map(_.toString) == Stream("1", "2", "3"))
assert(Stream(1, 2, 3).map(_ + 1) == Stream(2, 3, 4))
assert(Empty.map(_.toString) == Empty)
assert(Stream(1, 2, 3, 4, 5).filter(_ % 2 == 0) == Stream(2, 4))
assert(Stream(1, 3, 5, 7).filter(_ % 2 == 0) == Empty)
assert(Empty.filter(_ => false) == Empty)
assert(Stream(1, 2, 3).append(Stream(4, 5, 6)) == Stream(1, 2, 3, 4, 5, 6))
assert(Stream(1, 2, 3).append(Empty) == Stream(1, 2, 3))
assert(Empty.append(Stream(4, 5, 6)) == Stream(4, 5, 6))
assert(Stream(1, 2, 3).flatMap(i => Stream(i, i)) == Stream(1, 1, 2, 2, 3, 3))
assert(Stream(1, 2, 3).flatMap(_ => Empty) == Empty)
assert(Empty.flatMap(_ => Empty) == Empty)
}
}
/**
* Exercise 5.8: implement Stream.constant.
*
* @author Emil Nilsson
*/
object Exercise58 {
def main(args: Array[String]): Unit = {
assert(Stream.constant(1).take(3) == Stream(1, 1, 1))
assert(Stream.constant("foo").take(2) == Stream("foo", "foo"))
}
}
/**
* Exercise 5.9: implement Stream.from.
*
* @author Emil Nilsson
*/
object Exercise59 {
def main(args: Array[String]): Unit = {
assert(Stream.from(0).take(3) == Stream(0, 1, 2))
assert(Stream.from(-10).take(3) == Stream(-10, -9, -8))
}
}
/**
* Exercise 5.10: implement fib using Stream.
*
* @author Emil Nilsson
*/
object Exercise510 {
def main(args: Array[String]): Unit = {
assert(fib.take(8) == Stream(0, 1, 1, 2, 3, 5, 8, 13))
}
def fib: Stream[Int] = {
def go(s: (Int, Int)): Stream[Int] =
Stream.cons(s._1, go(s._2, s._1 + s._2))
go(0, 1)
}
}
/**
* Exercise 5.12: implement fib, from, constant and ones using Stream.unfold.
*
* @author Emil Nilsson
*/
object Exercise512 {
def main(args: Array[String]): Unit = {
assert(fib.take(8) == Stream(0, 1, 1, 2, 3, 5, 8, 13))
assert(from(0).take(3) == Stream(0, 1, 2))
assert(from(-10).take(3) == Stream(-10, -9, -8))
assert(constant(1).take(3) == Stream(1, 1, 1))
assert(constant("foo").take(2) == Stream("foo", "foo"))
assert(ones.take(3) == Stream(1, 1, 1))
}
def fib = Stream.unfold(0, 1) { case(left, right) => Some(left, (right, left + right)) }
def from(n: Int) = Stream.unfold(n)(x => Some(x, x + 1))
def constant[A](a: A) = Stream.unfold()(_ => Some(a, ()))
def ones = Stream.unfold()(_ => Some(1, ()))
}
/**
* Exercise 5.13: implement Stream.map, Stream.take, Stream.takeWhile, Stream.zipWith and Stream.zipAll using
* Stream.unfold.
*
* @author Emil Nilsson
*/
object Exercise513 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 2, 3).map2(_.toString) == Stream("1", "2", "3"))
assert(Stream(1, 2, 3).map2(_ + 1) == Stream(2, 3, 4))
assert(Empty.map2(_.toString) == Empty)
assert(Stream(1, 2, 3).take2(2) == Stream(1, 2))
assert(Stream(1, 2, 3).take2(4) == Stream(1, 2, 3))
assert(Stream(1, 2, 3).take2(0) == Empty)
assert(Stream(1, 2, 3).takeWhile3((x: Int) => x < 3) == Stream(1, 2))
assert(Stream(1, 2, 3).takeWhile3((x: Int) => x > 3) == Empty)
assert(Empty.takeWhile3((x: Int) => x < 3) == Empty)
assert(Stream(1, 2, 3).zipWith(Stream(4, 5, 6))(_ + _) == Stream(5, 7, 9))
assert(Stream("foo", "bar").zipWith(Stream(1, 2, 3))(_ + _.toString) == Stream("foo1", "bar2"))
assert(Stream("foo", "bar").zipWith(Empty)(_ + _.toString) == Empty)
assert((Empty:Stream[String]).zipWith(Empty)(_ + _.toString) == Empty)
assert(Stream(1, 2).zipAll(Stream(1, 4)) == Stream((Some(1), Some(1)), (Some(2), Some(4))))
assert(Stream(1, 2).zipAll(Stream(1)) == Stream((Some(1), Some(1)), (Some(2), None)))
assert(Stream(1).zipAll(Stream(1, 4)) == Stream((Some(1), Some(1)), (None, Some(4))))
}
}
/**
* Exercise 5.14: implement Stream.startsWith.
*
* @author Emil Nilsson
*/
object Exercise514 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 2, 3).startsWith(Stream(1, 2)) == true)
assert(Stream(1, 2).startsWith(Stream(1, 2, 3)) == false)
assert(Stream(1, 2, 3).startsWith(Empty) == true)
assert(Empty.startsWith(Stream(1)) == false)
assert(Empty.startsWith(Empty) == true)
}
}
/**
* Exercise 5.15: implement Stream.tails using Stream.unfold.
*
* @author Emil Nilsson
*/
object Exercise515 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 2, 3).tails == Stream(Stream(1, 2, 3), Stream(2, 3), Stream(3), Stream()))
assert(Empty.tails == Stream(Empty))
}
}
/**
* Exercise 5.16: implement Stream.scanRight and Stream.tails using Stream.scanRight.
*
* @author Emil Nilsson
*/
object Exercise516 {
def main(args: Array[String]): Unit = {
assert(Stream(1, 2, 3).tails2 == Stream(Stream(1, 2, 3), Stream(2, 3), Stream(3), Stream()))
assert(Empty.tails2 == Stream(Empty))
}
}
|
enil/fpis-exercises
|
src/main/scala/io/github/enil/chapter05/ch05.scala
|
Scala
|
mit
| 15,175 |
/**
*
*/
package io.sipstack.application
import akka.actor.Actor
import akka.actor.Props
import io.sipstack.netty.codec.sip.SipMessageEvent
import akka.actor.ActorRef
import akka.actor.Terminated
import io.sipstack.transport.FlowActor.IncomingMessage
import io.sipstack.transport.FlowActor.Message
object ApplicationSupervisor {
def props(): Props = Props(new ApplicationSupervisor())
}
final class ApplicationSupervisor extends Actor {
override def preStart() {
println("Starting a new ApplicationSupservisor: " + this)
}
def receive = {
case Terminated(actor) => // ignore for now
case msg:IncomingMessage => dispatch(msg)
case que => println("unknown event, ignoring. Got " + que.getClass())
}
private def dispatch(event:IncomingMessage): Unit = {
val callId = event.callId
val child = context.child(callId)
if (child.isDefined) {
child.get forward event
} else {
context.watch(context.actorOf(ApplicationActor.props(), callId)) forward event
}
}
}
|
jonbo372/sipstack
|
sipstack-transaction/src/main/scala/io/sipstack/application/ApplicationSupervisor.scala
|
Scala
|
mit
| 1,032 |
package scala.meta.internal.semanticdb.javac
import com.sun.source.tree.{ClassTree, CompilationUnitTree}
import com.sun.source.util.{TaskEvent, TaskListener, TreePath, Trees}
import java.nio.file.{Path, Paths}
import javax.lang.model.element.TypeElement
import javax.tools.JavaFileObject
import scala.collection.mutable
import scala.collection.JavaConverters._
import scala.meta.io.AbsolutePath
class SemanticdbListener(targetRoot: Path, sourceRoot: Path, trees: Trees) extends TaskListener {
private val toplevelsProcessed =
mutable.Map[CompilationUnitTree, mutable.Map[TypeElement, Boolean]]()
private def singleFileGen(sourceFile: JavaFileObject, elems: Seq[TypeElement]): Unit = {
val sourceRelativePath = sourceRoot.relativize(Paths.get(sourceFile.toUri))
val gen = new SemanticdbGen(sourceRelativePath, elems)
gen.populate()
gen.persist(targetRoot)
}
override def started(e: TaskEvent): Unit = {}
override def finished(e: TaskEvent): Unit = e.getKind match {
case TaskEvent.Kind.ENTER if e.getSourceFile.getKind == JavaFileObject.Kind.SOURCE =>
val cu = e.getCompilationUnit
// pull the top-level TypeElements from the compilation unit
val elements = cu.getTypeDecls.asScala.map {
case tree: ClassTree =>
val elemPath = TreePath.getPath(cu, tree)
trees.getElement(elemPath).asInstanceOf[TypeElement]
}
assert(
!toplevelsProcessed.contains(cu),
"the same compilation unit has been entered twice, something is very wrong")
toplevelsProcessed(cu) = mutable.Map(elements.map(_ -> false): _*)
case TaskEvent.Kind.ANALYZE =>
val cu = e.getCompilationUnit
val elem = e.getTypeElement
val remainingToplevels = toplevelsProcessed(cu)
remainingToplevels(elem) = true
if (remainingToplevels.values.reduce(_ & _)) {
toplevelsProcessed.remove(cu)
singleFileGen(e.getSourceFile, remainingToplevels.keys.toSeq)
}
case _ => ()
}
}
|
olafurpg/scalameta
|
semanticdb/javac/src/main/scala/scala/meta/internal/semanticdb/javac/SemanticdbListener.scala
|
Scala
|
bsd-3-clause
| 2,004 |
package fuel.func
import fuel.util.Options
import fuel.core.StatePop
import fuel.util.Counter
object Termination {
object MaxTime {
def apply(opt: Options): Any => Boolean = apply(opt.paramInt("maxTime", 86400000, _ > 0))
def apply(maxMillisec: Int): Any => Boolean = {
val startTime = System.currentTimeMillis()
def timeElapsed = System.currentTimeMillis() - startTime
s: Any => timeElapsed > maxMillisec
}
}
class NoImprovement[S, E] {
def apply(ref: () => (S, E))(implicit ord: PartialOrdering[E]) = {
(s: StatePop[(S, E)]) => !s.exists(
es => ord.tryCompare(es._2, ref()._2).getOrElse(0) < 0)
}
}
class Count {
def apply(cnt: Counter, max: Long) = {
s: Any => cnt.count >= max
}
}
object MaxIter extends Count {
def apply[S](cnt: Counter)(implicit opt: Options): Any => Boolean = {
apply(cnt, opt('maxGenerations, 50, (_: Int) > 0))
}
def apply[S](cnt: Counter, maxGenerations: Int): Any => Boolean = {
super.apply(cnt, maxGenerations)
}
}
def apply[S, E](otherCond: (S, E) => Boolean = (_: S, _: E) => false)(implicit config: Options) = Seq(
MaxTime(config),
(s: StatePop[(S, E)]) => s.exists(es => otherCond(es._1, es._2)))
}
|
kkrawiec/fuel
|
src/main/scala/fuel/func/Termination.scala
|
Scala
|
mit
| 1,258 |
package com.robot.command
object CommandExecutor {
def execute[T](item: T,
commands: Seq[Command[T]]) =
commands.foldLeft(item)((z, c) => c.execute(z))
}
|
ratheeshmohan/robotappscala
|
src/main/scala/com/robot/command/commandexecutor.scala
|
Scala
|
apache-2.0
| 179 |
package org.webant.worker
import akka.actor._
import akka.routing.RoundRobinPool
import org.apache.commons.lang3.StringUtils
import org.apache.log4j.LogManager
import org.webant.commons.entity.{HttpDataEntity, Link}
import org.webant.worker.config.ConfigManager
import org.webant.worker.http.HttpResponse
sealed trait WorkerMessage
case class LinksMessage(links: Iterable[Link]) extends WorkerMessage
case class LinkMessage(link: Link) extends WorkerMessage
case class ResponseMessage(srcLink: Link, resp: HttpResponse[HttpDataEntity]) extends WorkerMessage
case class ResultMessage(link: Link, resp: HttpResponse[HttpDataEntity])
class Worker extends Actor {
def receive: PartialFunction[Any, Unit] = {
case LinkMessage(link) =>
sender ! ResponseMessage(link, extract(link))
}
private def extract(link: Link): HttpResponse[HttpDataEntity] = {
if (link == null || StringUtils.isBlank(link.getTaskId)
|| StringUtils.isBlank(link.getSiteId) || StringUtils.isBlank(link.getUrl))
return null
val task = ConfigManager.getTaskManager(link.getTaskId)
if (task == null) return null
val site = task.getSiteManager(link.getSiteId)
if (site == null) return null
val processor = site.getSiteProcessor
if (processor == null) return null
val resp = processor.process(link)
resp
}
}
class Master(workerNum: Int, listener: ActorRef) extends Actor {
// create a router
val workerRouter = context.actorOf(Props[Worker].withRouter(RoundRobinPool(workerNum)), name = "workerRouter")
def receive = {
case LinksMessage(links) =>
links.foreach(link => {
workerRouter ! LinkMessage(link)
})
case LinkMessage(link) =>
workerRouter ! LinkMessage(link)
// process complete, send a message to listener
case ResponseMessage(srcLink, content) =>
listener ! ResultMessage(srcLink, content)
}
}
class Listener extends Actor {
def receive = {
case ResultMessage(srcLink, resp) =>
if (resp != null && resp.links != null && !resp.links.isEmpty) {
srcLink.setStatus(Link.LINK_STATUS_SUCCESS)
} else {
srcLink.setStatus(Link.LINK_STATUS_FAIL)
}
}
}
class WorkerReactor() {
}
object WorkerReactor {
private val logger = LogManager.getLogger(WorkerReactor.getClass)
var master: ActorRef = _
var system: ActorSystem = _
def apply(): WorkerReactor = {
new WorkerReactor()
}
def start(): Unit = {
if (master == null) {
system = ActorSystem("WebantWorkerActor")
val listener = system.actorOf(Props[Listener], name = "listener")
val workerNum = ConfigManager.getWorkerConfig.threadNum
master = system.actorOf(Props(new Master(workerNum, listener)), name = "master")
logger.info("webant worker start successful.")
}
}
def submit(links: Iterable[Link]): Unit = {
if (links == null || links.isEmpty) return
master ! LinksMessage(links)
}
def submit(link: Link): Unit = {
if (link == null) return
master ! LinkMessage(link)
}
def exit(): Unit = {
if (system != null)
system.shutdown()
}
}
|
sutine/webant
|
webant-worker/src/main/scala/org/webant/worker/WorkerReactor.scala
|
Scala
|
apache-2.0
| 3,129 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import java.io.PrintWriter
import com.yammer.metrics.Metrics
import com.yammer.metrics.core.Gauge
import kafka.utils.{MockTime, TestUtils}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.test.TestUtils.DEFAULT_MAX_WAIT_MS
import org.apache.kafka.common.record.{CompressionType, RecordBatch}
import org.junit.Assert.{assertFalse, assertTrue, fail}
import org.junit.Test
import scala.collection.JavaConverters.mapAsScalaMapConverter
/**
* This is an integration test that tests the fully integrated log cleaner
*/
class LogCleanerIntegrationTest extends AbstractLogCleanerIntegrationTest {
val codec: CompressionType = CompressionType.LZ4
val time = new MockTime()
val topicPartitions = Array(new TopicPartition("log", 0), new TopicPartition("log", 1), new TopicPartition("log", 2))
@Test(timeout = DEFAULT_MAX_WAIT_MS)
def testMarksPartitionsAsOfflineAndPopulatesUncleanableMetrics() {
val largeMessageKey = 20
val (_, largeMessageSet) = createLargeSingleMessageSet(largeMessageKey, RecordBatch.CURRENT_MAGIC_VALUE)
val maxMessageSize = largeMessageSet.sizeInBytes
cleaner = makeCleaner(partitions = topicPartitions, maxMessageSize = maxMessageSize, backOffMs = 100)
def breakPartitionLog(tp: TopicPartition): Unit = {
val log = cleaner.logs.get(tp)
writeDups(numKeys = 20, numDups = 3, log = log, codec = codec)
val partitionFile = log.logSegments.last.log.file()
val writer = new PrintWriter(partitionFile)
writer.write("jogeajgoea")
writer.close()
writeDups(numKeys = 20, numDups = 3, log = log, codec = codec)
}
def getGauge[T](metricName: String, metricScope: String): Gauge[T] = {
Metrics.defaultRegistry.allMetrics.asScala
.filterKeys(k => {
k.getName.endsWith(metricName) && k.getScope.endsWith(metricScope)
})
.headOption
.getOrElse { fail(s"Unable to find metric $metricName") }
.asInstanceOf[(Any, Gauge[Any])]
._2
.asInstanceOf[Gauge[T]]
}
breakPartitionLog(topicPartitions(0))
breakPartitionLog(topicPartitions(1))
cleaner.startup()
val log = cleaner.logs.get(topicPartitions(0))
val log2 = cleaner.logs.get(topicPartitions(1))
val uncleanableDirectory = log.dir.getParent
val uncleanablePartitionsCountGauge = getGauge[Int]("uncleanable-partitions-count", uncleanableDirectory)
val uncleanableBytesGauge = getGauge[Long]("uncleanable-bytes", uncleanableDirectory)
TestUtils.waitUntilTrue(() => uncleanablePartitionsCountGauge.value() == 2, "There should be 2 uncleanable partitions", 2000L)
val expectedTotalUncleanableBytes = LogCleaner.calculateCleanableBytes(log, 0, log.logSegments.last.baseOffset)._2 +
LogCleaner.calculateCleanableBytes(log2, 0, log2.logSegments.last.baseOffset)._2
TestUtils.waitUntilTrue(() => uncleanableBytesGauge.value() == expectedTotalUncleanableBytes,
s"There should be $expectedTotalUncleanableBytes uncleanable bytes", 1000L)
val uncleanablePartitions = cleaner.cleanerManager.uncleanablePartitions(uncleanableDirectory)
assertTrue(uncleanablePartitions.contains(topicPartitions(0)))
assertTrue(uncleanablePartitions.contains(topicPartitions(1)))
assertFalse(uncleanablePartitions.contains(topicPartitions(2)))
}
}
|
gf53520/kafka
|
core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala
|
Scala
|
apache-2.0
| 4,169 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.commons.lang
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
class TypeTest extends AnyFunSpec with Matchers {
describe("Types") {
it("java primitives") {
val a = java.lang.Double.valueOf("0")
assert(a.getClass == classOf[java.lang.Double])
}
}
}
|
beangle/commons
|
core/src/test/scala/org/beangle/commons/lang/TypeTest.scala
|
Scala
|
lgpl-3.0
| 1,053 |
object Run extends App{
def funcionEjemplo() ={
println("Hola")
}
}
|
jaimeguzman/learning
|
test_list.scala
|
Scala
|
apache-2.0
| 81 |
/*
* Copyright (c) 2009, Ken Faulkner
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Ken Faulkner nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package FARQ.utils
import net.lag.configgy.Configgy
import net.lag.logging.Logger
import java.net._
import java.io._
object BulkSetData
{
Configgy.configure("farq.cfg")
var serverPort = Integer.parseInt( Configgy.config.getString("port", "9999" ) )
var serverIP = Configgy.config.getString("server_ip", "127.0.0.1" )
def sendSet( args:Array[String]) =
{
var i = Integer.parseInt( args(0))
for ( a <- 0 to i )
{
var data = a.toString.getBytes
var s = new Socket(serverIP, serverPort)
var out = s.getOutputStream
var dos = new DataOutputStream( out )
var request = "SETMYKEY|".getBytes
dos.write( request )
dos.writeInt( data.length )
dos.write( data, 0, data.length )
s.close()
}
}
def main(args: Array[String]) =
{
sendSet( args)
}
}
|
kpfaulkner/farq
|
src/farq/utils/bulksetdata.scala
|
Scala
|
bsd-3-clause
| 2,467 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
/*
* This file contains derivative works that require the following
* header to be displayed:
*
* Copyright 2002-2014 EPFL.
* Copyright 2011-2014 Typesafe, Inc.
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software in
* source or binary form for any purpose with or without fee is hereby
* granted, provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
* 3. Neither the name of the EPFL nor the names of its
* contributors may be used to endorse or promote products
* derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
package org.ensime.core
import org.ensime.indexer._
// Transform symbols to scaladoc components, with anchors to select specific
// members of a container type.
//
// See scala/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala for
// details related to link construction.
trait DocFinding { self: RichPresentationCompiler with SymbolToFqn =>
// companions don't exist in Java, and the $ are . in javadocs
private def cleanClass(n: String): String = n.replaceAll("\\\\$$", "").replace("$", ".")
private def javaFqn(tpe: Type): DocFqn = {
toFqn(tpe.typeSymbol) match {
case PackageName(parts) =>
DocFqn(parts.mkString("."), "package")
case ClassName(pkg, clazz) =>
DocFqn(pkg.fqnString, cleanClass(clazz))
case FieldName(ClassName(pkg, clazz), field) =>
DocFqn(pkg.fqnString, s"${cleanClass(clazz)}.$field")
case MethodName(ClassName(pkg, clazz), method, _) =>
DocFqn(pkg.fqnString, s"${cleanClass(clazz)}.$method")
}
}
private def isRoot(s: Symbol) = (s eq NoSymbol) || s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
private def fullPackage(sym: Symbol): String =
sym.ownerChain.reverse.filterNot(isRoot)
.takeWhile(_.hasPackageFlag).map(_.nameString).mkString(".")
private def fullTypeName(sym: Symbol, nestedTypeSep: String, nameString: (Symbol => String)): String =
sym.ownerChain.takeWhile(!_.hasPackageFlag).reverse.map(nameString).mkString(nestedTypeSep)
private def scalaFqn(sym: Symbol): DocFqn = {
def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.hasPackageFlag) "$" else "")
if (sym.isPackageObjectOrClass) {
DocFqn(fullPackage(sym.owner), "package")
} else if (sym.hasPackageFlag) {
DocFqn(fullPackage(sym), ".package")
} else {
DocFqn(fullPackage(sym), fullTypeName(sym, "$", nameString))
}
}
private def linkName(sym: Symbol, java: Boolean): DocFqn = {
if (java) javaFqn(sym.tpe) else scalaFqn(sym)
}
private def signatureString(sym: Symbol, java: Boolean): String =
if (!java)
sym.nameString + sym.signatureString.replaceAll("[\\\\s]", "")
else toFqn(sym) match {
case PackageName(parts) => ""
case ClassName(pkg, clazz) => ""
case FieldName(_, field) => field
case MethodName(_, method, desc) => method + desc.params.map {
case a: ArrayDescriptor => cleanClass(a.reifier.fqnString) + "[]"
case c: ClassName => cleanClass(c.fqnString)
}.mkString("(", ", ", ")")
}
def docSignature(sym: Symbol, pos: Option[Position]): Option[DocSigPair] = {
def docSig(java: Boolean) = {
val owner = sym.owner
if (sym.isCaseApplyOrUnapply) {
Some(DocSig(linkName(owner.companionClass, java), None))
} else if (sym.isClass || sym.isModule || sym.isTrait || sym.hasPackageFlag)
Some(DocSig(linkName(sym, java), None))
else if (owner.isClass || owner.isModule || owner.isTrait || owner.hasPackageFlag) {
val ownerAtSite = pos.flatMap(specificOwnerOfSymbolAt).getOrElse(owner)
Some(DocSig(linkName(ownerAtSite, java), Some(signatureString(sym, java))))
} else
sym.tpe.typeSymbol.toOption.map(tsym => DocSig(linkName(tsym, java), None))
}
(docSig(java = false), docSig(java = true)) match {
case (Some(scalaSig), Some(javaSig)) => Some(DocSigPair(scalaSig, javaSig))
case _ => None
}
}
}
|
VlachJosef/ensime-server
|
core/src/main/scala/org/ensime/core/DocFinding.scala
|
Scala
|
gpl-3.0
| 5,366 |
/*
* Copyright 2019 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.frontend.controller
import org.slf4j.MDC
import play.api.Logger
import play.api.mvc._
import uk.gov.hmrc.http.{HeaderNames, SessionKeys}
import scala.concurrent.Future
object UnauthorisedAction {
def apply(body: (Request[AnyContent] => Result), sensitiveDataFormKeys: Seq[String] = Seq.empty): Action[AnyContent] =
unauthedAction(ActionWithMdc(body), sensitiveDataFormKeys)
def async(
body: (Request[AnyContent] => Future[Result]),
sensitiveDataFormKeys: Seq[String] = Seq.empty): Action[AnyContent] =
unauthedAction(Action.async(body), sensitiveDataFormKeys)
private def unauthedAction(body: Action[AnyContent], sensitiveDataFormKeys: Seq[String]): Action[AnyContent] = body
}
/**
* Use this Action with your endpoints, if they are synchronous and require
* the header carrier values to be logged.
*
* For .async actions the MdcLoggingExecutionContext takes care of it.
*/
object ActionWithMdc extends ActionBuilder[Request] {
private def storeHeaders(request: RequestHeader) {
request.session.get(SessionKeys.userId).foreach(MDC.put(HeaderNames.authorisation, _))
request.session.get(SessionKeys.token).foreach(MDC.put(HeaderNames.token, _))
request.session.get(SessionKeys.sessionId).foreach(MDC.put(HeaderNames.xSessionId, _))
request.headers.get(HeaderNames.xForwardedFor).foreach(MDC.put(HeaderNames.xForwardedFor, _))
request.headers.get(HeaderNames.xRequestId).foreach(MDC.put(HeaderNames.xRequestId, _))
Logger.debug("Request details added to MDC")
}
override def invokeBlock[A](request: Request[A], block: (Request[A]) => Future[Result]): Future[Result] = {
Logger.debug("Invoke block, setting up MDC due to Action creation")
storeHeaders(request)
val r = block(request)
Logger.debug("Clearing MDC")
MDC.clear()
r
}
}
|
hmrc/frontend-bootstrap
|
src/main/scala/uk/gov/hmrc/play/frontend/controller/Actions.scala
|
Scala
|
apache-2.0
| 2,455 |
package org.akoshterek.backgammon.nn.encog.activation
import org.encog.engine.network.activation.ActivationFunction
import org.encog.mathutil.BoundNumbers
import org.encog.util.obj.ActivationUtil
/**
* Created by Alex on 20-05-17.
*/
@SerialVersionUID(1L)
class ActivationSoftplus extends ActivationFunction {
private val params = Array[Double] ()
override def activationFunction(x: Array[Double], start: Int, size: Int): Unit = {
var i = start
while (i < start + size) {
x(i) = Math.log(1 + BoundNumbers.bound(Math.exp(x(i))))
i += 1
}
}
override def clone = new ActivationSoftplus()
override def derivativeFunction(b: Double, a: Double): Double = {
// 1.0 / (1 + BoundNumbers.bound(Math.exp(-b))) alternative form
val e = BoundNumbers.bound(Math.exp(b))
e / (e + 1)
}
override def getParamNames: Array[String] = Array[String]()
override def getParams: Array[Double] = params
override def hasDerivative = true
override def setParam(index: Int, value: Double): Unit = {
params(index) = value
}
override def getFactoryCode: String = ActivationUtil.generateActivationFactory("softplus", this)
}
|
akoshterek/MultiGammonJava
|
multi-gammon-core/src/main/java/org/akoshterek/backgammon/nn/encog/activation/ActivationSoftplus.scala
|
Scala
|
gpl-3.0
| 1,171 |
package scala.reflect.internal
import org.junit.Assert._
import org.junit.{After, Assert, Before, Test}
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.collection.mutable
import scala.tools.nsc.settings.ScalaVersion
import scala.tools.nsc.symtab.SymbolTableForUnitTesting
import language.higherKinds
@RunWith(classOf[JUnit4])
class TypesTest {
object symbolTable extends SymbolTableForUnitTesting
import symbolTable._, definitions._
@Test
def testRefinedTypeSI8611(): Unit = {
def stringNarrowed = StringTpe.narrow
assert(stringNarrowed != stringNarrowed)
assert(!(stringNarrowed =:= stringNarrowed))
def boolWithString = refinedType(BooleanTpe :: StringTpe :: Nil, NoSymbol)
assert(boolWithString != boolWithString)
assert(boolWithString =:= boolWithString)
val boolWithString1 = boolWithString
val boolWithString1narrow1 = boolWithString1.narrow
val boolWithString1narrow2 = boolWithString1.narrow
// Two narrowings of the same refinement end up =:=. This was the root
// cause of scala/bug#8611. See `narrowUniquely` in `Logic` for the workaround.
assert(boolWithString1narrow1 =:= boolWithString1narrow2)
val uniquelyNarrowed1 = refinedType(boolWithString1narrow1 :: Nil, NoSymbol)
val uniquelyNarrowed2 = refinedType(boolWithString1narrow2 :: Nil, NoSymbol)
assert(uniquelyNarrowed1 =:= uniquelyNarrowed2)
}
@Test
def testTransitivityWithModuleTypeRef(): Unit = {
import rootMirror.EmptyPackageClass
val (module, moduleClass) = EmptyPackageClass.newModuleAndClassSymbol(TermName("O"), NoPosition, 0L)
val minfo = ClassInfoType(List(ObjectTpe), newScope, moduleClass)
module.moduleClass setInfo minfo
module setInfo module.moduleClass.tpe
val tp1 = TypeRef(ThisType(EmptyPackageClass), moduleClass, Nil)
val tp2 = SingleType(ThisType(EmptyPackageClass), module)
val tp3 = ThisType(moduleClass)
val tps = List(tp1, tp2, tp3)
val results = mutable.Buffer[String]()
tps.permutations.foreach {
case ts @ List(a, b, c) =>
def tsShownRaw = ts.map(t => showRaw(t)).mkString(", ")
if (a <:< b && b <:< c && !(a <:< c)) results += s"<:< intransitive: $tsShownRaw"
if (a =:= b && b =:= c && !(a =:= c)) results += s"=:= intransitive: $tsShownRaw"
}
results.toList match {
case Nil => // okay
case xs =>
Assert.fail(xs.mkString("\\n"))
}
}
@Test
def testRefinementContains(): Unit = {
val refinement = typeOf[{def foo: Int}]
assert(refinement.isInstanceOf[RefinedType])
assert(refinement.contains(IntClass))
val elem0 = refinement.baseTypeSeq(0)
assert(elem0.isInstanceOf[RefinementTypeRef])
assert(elem0.contains(IntClass))
}
@Test
def testRefinedLubs(): Unit = {
// https://github.com/scala/scala-dev/issues/168
assertEquals(typeOf[Option[AnyVal]], lub(typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean] with Option[Short]] :: Nil))
assertEquals(typeOf[Option[AnyVal]], lub(typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean]] :: Nil))
assertEquals(typeOf[Option[AnyVal]], lub((typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean] with Option[Short]] :: Nil).reverse))
assertEquals(typeOf[Option[AnyVal]], lub((typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean]] :: Nil).reverse))
}
@Test
def testExistentialRefinement(): Unit = {
import rootMirror.EmptyPackageClass
// class M[A]
val MClass = EmptyPackageClass.newClass("M")
val A = MClass.newTypeParameter("A").setInfo(TypeBounds.empty)
MClass.setInfo(PolyType(A :: Nil, ClassInfoType(ObjectClass.tpeHK :: Nil, newScopeWith(), MClass)))
// (M[Int] with M[X] { def m: Any }) forSome { type X }
val X = NoSymbol.newExistential("X").setInfo(TypeBounds.empty)
val T: Type = {
val decls = newScopeWith(MClass.newMethod("m").setInfo(NullaryMethodType(AnyClass.tpeHK)))
val refined = refinedType(appliedType(MClass, IntClass.tpeHK) :: appliedType(MClass, X.tpeHK) :: Nil, NoSymbol, decls, NoPosition)
newExistentialType(X :: Nil, refined)
}
val RefinementClass = T.underlying.typeSymbol
assertTrue(RefinementClass.isRefinementClass)
TypeRef(NoPrefix, RefinementClass, Nil) match {
case rtr : RefinementTypeRef =>
// ContainsCollector needs to look inside the info of symbols of RefinementTypeRefs
assert(rtr.contains(X))
}
val underlying = T.underlying
val baseTypeSeqIndices = T.baseTypeSeq.toList.indices
for (i <- baseTypeSeqIndices) {
// Elements of the existential type should have the same type symbol as underlying
assertEquals(T.baseTypeSeq.typeSymbol(i), underlying.baseTypeSeq.typeSymbol(i))
}
// Type symbols should be distinct
def checkDistinctTypeSyms(bts: BaseTypeSeq): Unit = {
val syms = baseTypeSeqIndices.map(T.baseTypeSeq.typeSymbol)
assertEquals(syms, syms.distinct)
}
checkDistinctTypeSyms(T.baseTypeSeq)
checkDistinctTypeSyms(T.underlying.baseTypeSeq)
// This is the entry for the refinement class
assertTrue(T.baseTypeSeq.typeSymbol(0).isRefinementClass)
assertEquals("M[Int] with M[X]{def m: Any} forSome { type X }", T.baseTypeSeq.rawElem(0).toString)
// This is the entry for M. The raw entry is an existential over a RefinedType which encodes a lazily computed base type
assertEquals(T.baseTypeSeq.typeSymbol(1), MClass)
assertEquals("M[X] with M[Int] forSome { type X }", T.baseTypeSeq.rawElem(1).toString)
// calling `apply` merges the prefix/args of the elements ot the RefinedType and rewraps in the existential
assertEquals("M[_1] forSome { type X; type _1 >: X with Int }", T.baseTypeSeq.apply(1).toString)
}
@Test
def testExistentialMerge(): Unit = {
val ts = typeOf[Set[Any]] :: typeOf[Set[X] forSome { type X <: Y; type Y <: Int}] :: Nil
def merge(ts: List[Type]) = mergePrefixAndArgs(ts, Variance.Contravariant, lubDepth(ts))
val merged1 = merge(ts)
val merged2 = merge(ts.reverse)
assert(ts.forall(_ <:< merged1)) // use to fail before fix to mergePrefixAndArgs for existentials
assert(ts.forall(_ <:< merged2))
assert(merged1 =:= merged2)
}
class Foo[A]
class Bar[+T, A]
class Baz {
def f[F[_]] = ()
def g[G[_, _]] = ()
}
var storedXsource: ScalaVersion = null
@Before
def storeXsource: Unit = {
storedXsource = settings.source.value
}
@After
def restoreXsource: Unit = {
settings.source.value = storedXsource
}
@Test
def testHigherKindedTypeVarUnification(): Unit = {
import rootMirror.EmptyPackageClass
import Flags._
val FooTpe = typeOf[Foo[Int]] match {
case TypeRef(pre, sym, _) =>
sym.typeParams // doing it for the side effect
TypeRef(pre, sym, Nil)
}
val BarTpe = typeOf[Bar[Int, Int]] match {
case TypeRef(pre, sym, _) =>
sym.typeParams // doing it for the side effect
TypeRef(pre, sym, Nil)
}
// apply Foo to type argument A
def Foo(A: Type) = FooTpe match {
case TypeRef(pre, sym, Nil) => TypeRef(pre, sym, A :: Nil)
}
// apply Bar to type arguments A, B
def Bar(A: Type, B: Type) = BarTpe match {
case TypeRef(pre, sym, Nil) => TypeRef(pre, sym, A :: B :: Nil)
}
val F0 = typeOf[Baz].member(TermName("f")).typeSignature.typeParams.head
val G0 = typeOf[Baz].member(TermName("g")).typeSignature.typeParams.head
// since TypeVars are mutable, we will be creating fresh ones
def F() = TypeVar(F0)
def G() = TypeVar(G0)
def polyType(f: TypeVar => Type, flags: Long = 0L): Type = {
val A = EmptyPackageClass.newTypeParameter(newTypeName("A"), newFlags = flags)
A.setInfo(TypeBounds.empty)
val A_ = TypeVar(A)
PolyType(A :: Nil, f(A_))
}
def coPolyType(f: TypeVar => Type): Type =
polyType(f, COVARIANT)
def polyType2(f: (TypeVar, TypeVar) => Type): Type = {
val A = EmptyPackageClass.newTypeParameter(newTypeName("A"))
val B = EmptyPackageClass.newTypeParameter(newTypeName("B"))
A.setInfo(TypeBounds.empty)
B.setInfo(TypeBounds.empty)
val A_ = TypeVar(A)
val B_ = TypeVar(B)
PolyType(A :: B :: Nil, f(A_, B_))
}
val Any = typeOf[Any]
val Int = typeOf[Int]
settings.source.value = ScalaVersion("2.13")
// test that ?F unifies with Foo
assert(F() <:< FooTpe)
assert(FooTpe <:< F())
assert(F() =:= FooTpe)
assert(FooTpe =:= F)
// test that ?F unifies with [A]Foo[A]
assert(F() <:< polyType(A => Foo(A)))
assert(polyType(A => Foo(A)) <:< F())
assert(F() =:= polyType(A => Foo(A)))
assert(polyType(A => Foo(A)) =:= F())
// test that ?F unifies with [A]Bar[Int, A]
assert(F() <:< polyType(A => Bar(Int, A)))
assert(polyType(A => Bar(Int, A)) <:< F())
assert(F() =:= polyType(A => Bar(Int, A)))
assert(polyType(A => Bar(Int, A)) =:= F())
// test that ?F unifies with [A]Bar[A, Int]
assert(F() <:< polyType(A => Bar(A, Int)))
assert(polyType(A => Bar(A, Int)) <:< F())
assert(F() =:= polyType(A => Bar(A, Int)))
assert(polyType(A => Bar(A, Int)) =:= F())
// test that ?F unifies with [+A]Bar[A, Int]
assert(F() <:< coPolyType(A => Bar(A, Int)))
assert(coPolyType(A => Bar(A, Int)) <:< F())
assert(F() =:= coPolyType(A => Bar(A, Int)))
assert(coPolyType(A => Bar(A, Int)) =:= F())
// test that ?F unifies with [A]Foo[Foo[A]]
assert(F() <:< polyType(A => Foo(Foo(A))))
assert(polyType(A => Foo(Foo(A))) <:< F())
assert(F() =:= polyType(A => Foo(Foo(A))))
assert(polyType(A => Foo(Foo(A))) =:= F())
// test that ?F unifies with [A]Foo[Bar[A, A]]
assert(F() <:< polyType(A => Foo(Bar(A, A))))
assert(polyType(A => Foo(Bar(A, A))) <:< F())
assert(F() =:= polyType(A => Foo(Bar(A, A))))
assert(polyType(A => Foo(Bar(A, A))) =:= F())
// test that ?F unifies with [A]Bar[Foo[A], Foo[A]]
assert(F() <:< polyType(A => Bar(Foo(A), Foo(A))))
assert(polyType(A => Bar(Foo(A), Foo(A))) <:< F())
assert(F() =:= polyType(A => Bar(Foo(A), Foo(A))))
assert(polyType(A => Bar(Foo(A), Foo(A))) =:= F())
// test that ?F unifies with [A]A
assert(F() <:< polyType(A => A))
assert(polyType(A => A) <:< F())
assert(F() =:= polyType(A => A))
assert(polyType(A => A) =:= F())
// test that ?F unifies with [A]Int
assert(F() <:< polyType(A => Int))
assert(polyType(A => Int) <:< F())
assert(F() =:= polyType(A => Int))
assert(polyType(A => Int) =:= F())
// test that ?F unifies with [A]Foo[Int]
assert(F() <:< polyType(A => Foo(Int)))
assert(polyType(A => Foo(Int)) <:< F())
assert(F() =:= polyType(A => Foo(Int)))
assert(polyType(A => Foo(Int)) =:= F())
// test that ?G unifies with Bar
assert(G() <:< BarTpe)
assert(BarTpe <:< G())
assert(G() =:= BarTpe)
assert(BarTpe =:= G())
// test that ?G unifies with [A, B]Bar[A, B]
assert(G() <:< polyType2((A, B) => Bar(A, B)))
assert(polyType2((A, B) => Bar(A, B)) <:< G())
assert(G() =:= polyType2((A, B) => Bar(A, B)))
assert(polyType2((A, B) => Bar(A, B)) =:= G())
// test that ?G unifies with [A, B]Bar[B, A]
assert(G() <:< polyType2((A, B) => Bar(B, A)))
assert(polyType2((B, A) => Bar(A, B)) <:< G())
assert(G() =:= polyType2((A, B) => Bar(B, A)))
assert(polyType2((B, A) => Bar(A, B)) =:= G())
// test that ?G unifies with [A, B]Bar[Bar[B, A], A]
assert(G() <:< polyType2((A, B) => Bar(Bar(B, A), A)))
assert(polyType2((A, B) => Bar(Bar(B, A), A)) <:< G())
assert(G() =:= polyType2((A, B) => Bar(Bar(B, A), A)))
assert(polyType2((A, B) => Bar(Bar(B, A), A)) =:= G())
// test that [A]Bar[Int, A] <:< ?F <:< [A]Bar[Any, A]
F() match { case _F =>
assert(polyType(A => Bar(Int, A)) <:< _F && _F <:< polyType(A => Bar(Any, A)))
}
}
@Test
def testAnyNothing(): Unit = {
object Foo { val a: Any = 23 ; val n: Nothing = ??? }
val aSym = typeOf[Foo.type].member(TermName("a"))
val nSym = typeOf[Foo.type].member(TermName("n"))
assert(typeIsAnyOrJavaObject(AnyTpe))
assert(typeIsNothing(NothingTpe))
assert(!typeIsAnyOrJavaObject(LiteralType(Constant(1))))
assert(!typeIsAnyOrJavaObject(SingleType(NoPrefix, aSym)))
assert(!typeIsNothing(SingleType(NoPrefix, nSym)))
}
@Test
def testSameTypesLub(): Unit = {
def testSameType(tpe: Type, num: Int = 5) = assert(lub(List.fill(num)(tpe)) =:= tpe)
testSameType(IntTpe)
testSameType(StringTpe)
testSameType(typeOf[Class[String]])
testSameType(LiteralType(Constant(1)))
testSameType(LiteralType(Constant("test")))
}
@Test
def testTypesLub(): Unit = {
val interestingCombos: Map[Type, List[List[Type]]] = Map(
IntTpe -> List(
List(ConstantType(Constant(0)), IntTpe),
List(ConstantType(Constant(0)), LiteralType(Constant(1))),
List(LiteralType(Constant(0)), ConstantType(Constant(1)))
),
StringTpe -> List(
List(LiteralType(Constant("a")), LiteralType(Constant("b"))),
List(LiteralType(Constant("a")), StringTpe),
List(ConstantType(Constant("a")), StringTpe),
List(ConstantType(Constant("a")), LiteralType(Constant("b"))),
List(ConstantType(Constant("a")), LiteralType(Constant("b")))
),
LiteralType(Constant(1)) -> List(
List(LiteralType(Constant(1)), LiteralType(Constant(1))),
List(ConstantType(Constant(1)), LiteralType(Constant(1))),
List(LiteralType(Constant(1)), ConstantType(Constant(1)))
),
LiteralType(Constant("a")) -> List(
List(LiteralType(Constant("a")), LiteralType(Constant("a"))),
List(ConstantType(Constant("a")), LiteralType(Constant("a"))),
List(LiteralType(Constant("a")), ConstantType(Constant("a")))
),
AnyValTpe -> List(
List(LiteralType(Constant(1)), IntTpe, DoubleTpe)
),
typeOf[Class[String]] -> List(
List(typeOf[Class[String]], typeOf[Class[String]])
),
typeOf[Class[_ >: String <: Object]] -> List(
List(typeOf[Class[String]], typeOf[Class[Object]])
)
)
interestingCombos foreach { case (result, checks) =>
checks.foreach(check => assert(lub(check) =:= result))
}
}
}
|
martijnhoekstra/scala
|
test/junit/scala/reflect/internal/TypesTest.scala
|
Scala
|
apache-2.0
| 14,428 |
package com.insweat.hssd.lib.util.logging
abstract class Handler(protected var _level: Int) extends FilteredObject {
def level: Int = _level
def level_=(l: Int): Unit = _level = l
def handle(record: Record): Boolean = {
val rv = filter(record)
if(rv) {
emit(record)
}
rv
}
protected def emit(record: Record): Unit
protected def format(record: Record): String = {
val levelName = getLevelName(record.level)
record.exception match {
case Some(s) =>
s"${record.timestamp} [${levelName}] ${record.msg} ${s}"
case None =>
s"${record.timestamp} [${levelName}] ${record.msg}"
}
}
override protected def filter(record: Record): Boolean = {
this.level <= record.level && super.filter(record)
}
}
class ConsoleHandler(_level: Int) extends Handler(_level) {
override protected def emit(record: Record): Unit = {
if(record.level >= LEVEL_WARNING) {
Console.err.println(format(record))
}
else {
Console.out.println(format(record))
}
}
}
|
insweat/hssd
|
com.insweat.hssd.lib/src/com/insweat/hssd/lib/util/logging/Handler.scala
|
Scala
|
lgpl-3.0
| 1,212 |
package com.eevolution.context.dictionary.infrastructure.repository
import java.util.UUID
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.ViewTrl
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/e-Evolution
* Created by [email protected] , www.e-evolution.com
*/
/**
* View Trl Repository
* @param session
* @param executionContext
*/
class ViewTrlRepository (session: JdbcSession)(implicit executionContext: ExecutionContext)
extends api.repository.ViewTrlRepository[ViewTrl , Int]
with ViewTrlMapping {
def getById(id: Int): Future[ViewTrl] = {
getByLanguage(id , "en_US")
}
def getByLanguage(id: Int , lang : String): Future[ViewTrl] = {
Future(run(queryViewTrl.filter(view => view.viewId == lift(id)
&& view.language == lift(lang))).headOption.get)
}
def getByUUID(uuid: UUID): Future[ViewTrl] = {
Future(run(queryViewTrl.filter(_.uuid == lift(uuid.toString))).headOption.get)
}
def getByViewTrlId(id : Int) : Future[List[ViewTrl]] = {
Future(run(queryViewTrl))
}
def getAll() : Future[List[ViewTrl]] = {
Future(run(queryViewTrl))
}
def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[ViewTrl]] = {
val offset = page * pageSize
val limit = (page + 1) * pageSize
for {
count <- countViewTrl()
elements <- if (offset > count) Future.successful(Nil)
else selectViewTrl(offset, limit)
} yield {
PaginatedSequence(elements, page, pageSize, count)
}
}
private def countViewTrl() = {
Future(run(queryViewTrl.size).toInt)
}
private def selectViewTrl(offset: Int, limit: Int): Future[Seq[ViewTrl]] = {
Future(run(queryViewTrl).drop(offset).take(limit).toSeq)
}
}
|
adempiere/ADReactiveSystem
|
dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/ViewTrlRepository.scala
|
Scala
|
gpl-3.0
| 2,802 |
package controllers.your_income
import org.specs2.mutable._
import utils.WithApplication
class GStatutoryMaternityPaternityAdoptionPayFormSpec extends Specification {
section ("unit", models.domain.StatutoryMaternityPaternityAdoptionPay.id)
"Statutory Maternity Paternity Adoption Pay Form" should {
val whoPaysYou = "The Man"
val howMuch = "12"
val yes = "yes"
val no = "no"
val monthlyFrequency = "Monthly"
val paymentType = "MaternityOrPaternityPay"
"map data into case class" in new WithApplication {
GStatutoryMaternityPaternityAdoptionPay.form.bind(
Map(
"paymentTypesForThisPay" -> paymentType,
"stillBeingPaidThisPay_paternityMaternityAdoption" -> yes,
"whenDidYouLastGetPaid" -> "",
"whoPaidYouThisPay_paternityMaternityAdoption" -> whoPaysYou,
"amountOfThisPay" -> howMuch,
"howOftenPaidThisPay" -> monthlyFrequency,
"howOftenPaidThisPayOther" -> ""
)
).fold(
formWithErrors => "This mapping should not happen." must equalTo("Error"),
f => {
f.paymentTypesForThisPay must equalTo(paymentType)
f.stillBeingPaidThisPay must equalTo(yes)
f.whenDidYouLastGetPaid must equalTo(None)
f.whoPaidYouThisPay must equalTo(whoPaysYou)
f.amountOfThisPay must equalTo(howMuch)
f.howOftenPaidThisPay must equalTo(monthlyFrequency)
f.howOftenPaidThisPayOther must equalTo(None)
})
}
"reject invalid yesNo answers" in new WithApplication {
GStatutoryMaternityPaternityAdoptionPay.form.bind(
Map(
"stillBeingPaidThisPay_paternityMaternityAdoption" -> "INVALID"
)
).fold(
formWithErrors => {
formWithErrors.errors.length must equalTo(5)
formWithErrors.errors(0).message must equalTo("error.required")
formWithErrors.errors(1).message must equalTo("yesNo.invalid")
},
f => "This mapping should not happen." must equalTo("Valid"))
}
"reject a howOften frequency of other with no other text entered" in new WithApplication {
GStatutoryMaternityPaternityAdoptionPay.form.bind(
Map(
"paymentTypesForThisPay" -> paymentType,
"stillBeingPaidThisPay_paternityMaternityAdoption" -> yes,
"whenDidYouLastGetPaid" -> "",
"whoPaidYouThisPay_paternityMaternityAdoption" -> whoPaysYou,
"amountOfThisPay" -> howMuch,
"howOftenPaidThisPay" -> "Other",
"howOftenPaidThisPayOther" -> ""
)
).fold(
formWithErrors => {
formWithErrors.errors.length must equalTo(1)
formWithErrors.errors(0).message must equalTo("howOftenPaidThisPay.required")
},
f => "This mapping should not happen." must equalTo("Valid"))
}
"reject when last paid blank when answer is no" in new WithApplication {
GStatutoryMaternityPaternityAdoptionPay.form.bind(
Map(
"paymentTypesForThisPay" -> paymentType,
"stillBeingPaidThisPay_paternityMaternityAdoption" -> no,
"whenDidYouLastGetPaid" -> "",
"whoPaidYouThisPay_paternityMaternityAdoption" -> whoPaysYou,
"amountOfThisPay" -> howMuch,
"howOftenPaidThisPay" -> monthlyFrequency,
"howOftenPaidThisPayOther" -> ""
)
).fold(
formWithErrors => {
formWithErrors.errors.length must equalTo(1)
formWithErrors.errors(0).message must equalTo("whenDidYouLastGetPaid.required")
},
f => "This mapping should not happen." must equalTo("Valid"))
}
}
section ("unit", models.domain.StatutoryMaternityPaternityAdoptionPay.id)
}
|
Department-for-Work-and-Pensions/ClaimCapture
|
c3/test/controllers/your_income/GStatutoryMaternityPaternityAdoptionPayFormSpec.scala
|
Scala
|
mit
| 3,787 |
package org.scaladebugger.api.profiles.java.info
import com.sun.jdi._
import org.scaladebugger.api.profiles.traits.info._
import org.scaladebugger.api.virtualmachines.ScalaVirtualMachine
import org.scaladebugger.test.helpers.ParallelMockFunSpec
import org.scalamock.scalatest.MockFactory
import org.scalatest.{FunSpec, Matchers, ParallelTestExecution}
class JavaValueInfoSpec extends ParallelMockFunSpec
{
private val mockScalaVirtualMachine = mock[ScalaVirtualMachine]
private val mockInfoProducerProfile = mock[InfoProducer]
private val mockValue = mock[Value]
private val mockNewPrimitiveProfile = mockFunction[PrimitiveValue, PrimitiveInfo]
private val mockNewObjectProfile = mockFunction[ObjectReference, ObjectInfo]
private val mockNewArrayProfile = mockFunction[ArrayReference, ArrayInfo]
private val mockNewThreadProfile = mockFunction[ThreadReference, ThreadInfo]
private val mockNewThreadGroupProfile = mockFunction[ThreadGroupReference, ThreadGroupInfo]
private val mockNewClassObjectProfile = mockFunction[ClassObjectReference, ClassObjectInfo]
private val mockNewClassLoaderProfile = mockFunction[ClassLoaderReference, ClassLoaderInfo]
private val mockNewStringProfile = mockFunction[StringReference, StringInfo]
describe("JavaValueInfo") {
describe("#toJavaInfo") {
it("should return a new instance of the Java profile representation") {
val expected = mock[ValueInfo]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockValue
)
// Get Java version of info producer
(mockInfoProducerProfile.toJavaInfo _).expects()
.returning(mockInfoProducerProfile).once()
// Create new info profile using Java version of info producer
(mockInfoProducerProfile.newValueInfo _)
.expects(mockScalaVirtualMachine, mockValue)
.returning(expected).once()
val actual = javaValueInfoProfile.toJavaInfo
actual should be (expected)
}
}
describe("#isJavaInfo") {
it("should return true") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockValue
)
val actual = javaValueInfoProfile.isJavaInfo
actual should be (expected)
}
}
describe("#toJdiInstance") {
it("should return the JDI instance this profile instance represents") {
val expected = mockValue
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockValue
)
val actual = javaValueInfoProfile.toJdiInstance
actual should be (expected)
}
}
describe("#typeInfo") {
it("should supply a type info wrapper even if the value is null") {
val expected = mock[TypeInfo]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
) {
// NOTE: ScalaMock does not allow us to supply null to mock argument,
// so throwing an error if we aren't supplied with null
override protected def newTypeProfile(_type: Type): TypeInfo = {
require(_type == null)
expected
}
}
val actual = javaValueInfoProfile.`type`
actual should be (expected)
}
it("should should return a new type info profile wrapping the type") {
val expected = mock[TypeInfo]
val mockType = mock[Type]
(mockValue.`type` _).expects().returning(mockType).once()
val mockNewTypeProfileFunction = mockFunction[Type, TypeInfo]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockValue
) {
override protected def newTypeProfile(_type: Type): TypeInfo =
mockNewTypeProfileFunction(_type)
}
mockNewTypeProfileFunction.expects(mockType).returning(expected).once()
val actual = javaValueInfoProfile.`type`
actual should be (expected)
}
}
describe("#toArrayInfo") {
it("should throw an assertion error if the value is null") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
intercept[AssertionError] {
javaValueInfoProfile.toArrayInfo
}
}
it("should throw an assertion error if the value is not an array") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
intercept[AssertionError] {
javaValueInfoProfile.toArrayInfo
}
}
it("should return an array reference wrapped in a profile") {
val expected = mock[ArrayInfo]
val mockArrayReference = mock[ArrayReference]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockArrayReference
) {
override protected def newArrayProfile(
arrayReference: ArrayReference
): ArrayInfo = mockNewArrayProfile(arrayReference)
}
mockNewArrayProfile.expects(mockArrayReference)
.returning(expected).once()
val actual = javaValueInfoProfile.toArrayInfo
actual should be (expected)
}
}
describe("#toClassLoaderInfo") {
it("should throw an assertion error if the value is null") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
intercept[AssertionError] {
javaValueInfoProfile.toClassLoaderInfo
}
}
it("should throw an assertion error if the value is not an class loader") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
intercept[AssertionError] {
javaValueInfoProfile.toClassLoaderInfo
}
}
it("should return an class loader reference wrapped in a profile") {
val expected = mock[ClassLoaderInfo]
val mockClassLoaderReference = mock[ClassLoaderReference]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockClassLoaderReference
) {
override protected def newClassLoaderProfile(
classLoaderReference: ClassLoaderReference
): ClassLoaderInfo = mockNewClassLoaderProfile(classLoaderReference)
}
mockNewClassLoaderProfile.expects(mockClassLoaderReference)
.returning(expected).once()
val actual = javaValueInfoProfile.toClassLoaderInfo
actual should be (expected)
}
}
describe("#toClassObjectInfo") {
it("should throw an assertion error if the value is null") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
intercept[AssertionError] {
javaValueInfoProfile.toClassObjectInfo
}
}
it("should throw an assertion error if the value is not an class object") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
intercept[AssertionError] {
javaValueInfoProfile.toClassObjectInfo
}
}
it("should return an class object reference wrapped in a profile") {
val expected = mock[ClassObjectInfo]
val mockClassObjectReference = mock[ClassObjectReference]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockClassObjectReference
) {
override protected def newClassObjectProfile(
classObjectReference: ClassObjectReference
): ClassObjectInfo = mockNewClassObjectProfile(classObjectReference)
}
mockNewClassObjectProfile.expects(mockClassObjectReference)
.returning(expected).once()
val actual = javaValueInfoProfile.toClassObjectInfo
actual should be (expected)
}
}
describe("#toThreadGroupInfo") {
it("should throw an assertion error if the value is null") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
intercept[AssertionError] {
javaValueInfoProfile.toThreadGroupInfo
}
}
it("should throw an assertion error if the value is not an thread group") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
intercept[AssertionError] {
javaValueInfoProfile.toThreadGroupInfo
}
}
it("should return an thread group reference wrapped in a profile") {
val expected = mock[ThreadGroupInfo]
val mockThreadGroupReference = mock[ThreadGroupReference]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockThreadGroupReference
) {
override protected def newThreadGroupProfile(
threadGroupReference: ThreadGroupReference
): ThreadGroupInfo = mockNewThreadGroupProfile(threadGroupReference)
}
mockNewThreadGroupProfile.expects(mockThreadGroupReference)
.returning(expected).once()
val actual = javaValueInfoProfile.toThreadGroupInfo
actual should be (expected)
}
}
describe("#toThreadInfo") {
it("should throw an assertion error if the value is null") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
intercept[AssertionError] {
javaValueInfoProfile.toThreadInfo
}
}
it("should throw an assertion error if the value is not an thread") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
intercept[AssertionError] {
javaValueInfoProfile.toThreadInfo
}
}
it("should return an thread reference wrapped in a profile") {
val expected = mock[ThreadInfo]
val mockThreadReference = mock[ThreadReference]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockThreadReference
) {
override protected def newThreadProfile(
threadReference: ThreadReference
): ThreadInfo = mockNewThreadProfile(threadReference)
}
mockNewThreadProfile.expects(mockThreadReference)
.returning(expected).once()
val actual = javaValueInfoProfile.toThreadInfo
actual should be (expected)
}
}
describe("#toStringInfo") {
it("should throw an assertion error if the value is null") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
intercept[AssertionError] {
javaValueInfoProfile.toStringInfo
}
}
it("should throw an assertion error if the value is not a string") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
intercept[AssertionError] {
javaValueInfoProfile.toStringInfo
}
}
it("should return a string reference wrapped in a profile") {
val expected = mock[StringInfo]
val mockStringReference = mock[StringReference]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockStringReference
) {
override protected def newStringProfile(
stringReference: StringReference
): StringInfo = mockNewStringProfile(stringReference)
}
mockNewStringProfile.expects(mockStringReference)
.returning(expected).once()
val actual = javaValueInfoProfile.toStringInfo
actual should be (expected)
}
}
describe("#toPrimitiveInfo") {
it("should throw an assertion error if the value is null") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
intercept[AssertionError] {
javaValueInfoProfile.toPrimitiveInfo
}
}
it("should throw an assertion error if the value is not an primitive") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
intercept[AssertionError] {
javaValueInfoProfile.toPrimitiveInfo
}
}
it("should return a primitive value wrapped in a profile") {
val expected = mock[PrimitiveInfo]
val mockPrimitiveValue = mock[PrimitiveValue]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockPrimitiveValue
) {
override protected def newPrimitiveProfile(
primitiveValue: PrimitiveValue
): PrimitiveInfo = mockNewPrimitiveProfile(primitiveValue)
}
mockNewPrimitiveProfile.expects(mockPrimitiveValue)
.returning(expected).once()
val actual = javaValueInfoProfile.toPrimitiveInfo
actual should be (expected)
}
it("should return a void value wrapped in a profile") {
val expected = mock[PrimitiveInfo]
val mockVoidValue = mock[VoidValue]
val mockNewPrimitiveProfile = mockFunction[VoidValue, PrimitiveInfo]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockVoidValue
) {
override protected def newPrimitiveProfile(
voidValue: VoidValue
): PrimitiveInfo = mockNewPrimitiveProfile(voidValue)
}
mockNewPrimitiveProfile.expects(mockVoidValue)
.returning(expected).once()
val actual = javaValueInfoProfile.toPrimitiveInfo
actual should be (expected)
}
}
describe("#toObjectInfo") {
it("should throw an assertion error if the value is null") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
intercept[AssertionError] {
javaValueInfoProfile.toObjectInfo
}
}
it("should throw an assertion error if the value is not an object") {
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
intercept[AssertionError] {
javaValueInfoProfile.toObjectInfo
}
}
it("should return an object reference wrapped in a profile") {
val expected = mock[ObjectInfo]
val mockObjectReference = mock[ObjectReference]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockObjectReference
) {
override protected def newObjectProfile(
arrayReference: ObjectReference
): ObjectInfo = mockNewObjectProfile(arrayReference)
}
mockNewObjectProfile.expects(mockObjectReference)
.returning(expected).once()
val actual = javaValueInfoProfile.toObjectInfo
actual should be (expected)
}
}
describe("#toLocalValue") {
it("should return null if the value is null") {
val expected: Any = null
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.toLocalValue
actual should be (expected)
}
it("should convert the remote value to its underlying value") {
val expected = "some value"
val mockStringReference = mock[StringReference]
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mockStringReference
)
(mockStringReference.value _).expects().returning(expected).once()
val actual = javaValueInfoProfile.toLocalValue
actual should be (expected)
}
}
describe("#isPrimitive") {
it("should return false if the value is null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isPrimitive
actual should be (expected)
}
it("should return false if the value is not a primitive") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isPrimitive
actual should be (expected)
}
it("should return true if the value is a primitive") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[PrimitiveValue]
)
val actual = javaValueInfoProfile.isPrimitive
actual should be (expected)
}
it("should return true if the value is void (considered primitive)") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[VoidValue]
)
val actual = javaValueInfoProfile.isPrimitive
actual should be (expected)
}
}
describe("#isVoid") {
it("should return false if the value is null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isVoid
actual should be (expected)
}
it("should return false if the value is not void") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isVoid
actual should be (expected)
}
it("should return true if the value is void") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[VoidValue]
)
val actual = javaValueInfoProfile.isVoid
actual should be (expected)
}
}
describe("#isObject") {
it("should return false if the value is null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isObject
actual should be (expected)
}
it("should return false if the value is not a object") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isObject
actual should be (expected)
}
it("should return true if the value is a object") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[ObjectReference]
)
val actual = javaValueInfoProfile.isObject
actual should be (expected)
}
}
describe("#isString") {
it("should return false if the value is null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isString
actual should be (expected)
}
it("should return false if the value is not a string") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isString
actual should be (expected)
}
it("should return true if the value is a string") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[StringReference]
)
val actual = javaValueInfoProfile.isString
actual should be (expected)
}
}
describe("#isArray") {
it("should return false if the value is null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isArray
actual should be (expected)
}
it("should return false if the value is not a array") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isArray
actual should be (expected)
}
it("should return true if the value is a array") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[ArrayReference]
)
val actual = javaValueInfoProfile.isArray
actual should be (expected)
}
}
describe("#isClassLoader") {
it("should return false if the value is null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isClassLoader
actual should be (expected)
}
it("should return false if the value is not a class loader") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isClassLoader
actual should be (expected)
}
it("should return true if the value is a class loader") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[ClassLoaderReference]
)
val actual = javaValueInfoProfile.isClassLoader
actual should be (expected)
}
}
describe("#isClassObject") {
it("should return false if the value is null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isClassObject
actual should be (expected)
}
it("should return false if the value is not a class object") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isClassObject
actual should be (expected)
}
it("should return true if the value is a class object") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[ClassObjectReference]
)
val actual = javaValueInfoProfile.isClassObject
actual should be (expected)
}
}
describe("#isThreadGroup") {
it("should return false if the value is null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isThreadGroup
actual should be (expected)
}
it("should return false if the value is not a thread group") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isThreadGroup
actual should be (expected)
}
it("should return true if the value is a thread group") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[ThreadGroupReference]
)
val actual = javaValueInfoProfile.isThreadGroup
actual should be (expected)
}
}
describe("#isThread") {
it("should return false if the value is null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isThread
actual should be (expected)
}
it("should return false if the value is not a thread") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isThread
actual should be (expected)
}
it("should return true if the value is a thread") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[ThreadReference]
)
val actual = javaValueInfoProfile.isThread
actual should be (expected)
}
}
describe("#isNull") {
it("should return true if the value is null") {
val expected = true
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
null
)
val actual = javaValueInfoProfile.isNull
actual should be (expected)
}
it("should return false if the value is not null") {
val expected = false
val javaValueInfoProfile = new JavaValueInfo(
mockScalaVirtualMachine,
mockInfoProducerProfile,
mock[Value]
)
val actual = javaValueInfoProfile.isNull
actual should be (expected)
}
}
}
}
|
ensime/scala-debugger
|
scala-debugger-api/src/test/scala/org/scaladebugger/api/profiles/java/info/JavaValueInfoSpec.scala
|
Scala
|
apache-2.0
| 28,014 |
package com.pwootage.metroidprime.formats.dgrp
import com.pwootage.metroidprime.formats.BinarySerializable
import com.pwootage.metroidprime.formats.io.PrimeDataFile
import com.pwootage.metroidprime.utils.DataTypeConversion
class Resource extends BinarySerializable{
var typ: Int = -1
var id: Int = -1
override def write(f: PrimeDataFile): Unit = {
f.write32(id)
f.write32(typ)
}
override def read(f: PrimeDataFile): Unit = {
id = f.read32()
typ = f.read32()
}
override def toString = s"Dependency(${id.toHexString}.${DataTypeConversion.intContainingCharsAsStr(typ)})"
}
|
Pwootage/prime-patcher
|
src/main/scala/com/pwootage/metroidprime/formats/dgrp/Resource.scala
|
Scala
|
gpl-3.0
| 605 |
package org.bste.euler.problems
import org.bste.euler.Problem
class P1_MultiplesThreeAndFive(limit: Int = 1000) extends Problem {
val title = "Problem 1 - Multiples of 3 and 5"
val description = """|If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9.
|The sum of these multiples is 23.
|Find the sum of all the multiples of 3 or 5 below 1000.""".stripMargin
def answer = solve().toString
private def solve() = {
(0 until limit).filter { x => x % 3 == 0 || x % 5 == 0 }.sum
}
}
|
benjstephenson/upschema
|
src/main/scala/org/bste/euler/problems/P1_MultiplesThreeAndFive.scala
|
Scala
|
gpl-2.0
| 545 |
// Copyright 2017,2018,2019,2020 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commbank.grimlock.framework.environment
import commbank.grimlock.framework.{
Cell,
Matrix,
Matrix1D,
Matrix2D,
Matrix3D,
Matrix4D,
Matrix5D,
Matrix6D,
Matrix7D,
Matrix8D,
Matrix9D,
MatrixWithParseErrors,
MultiDimensionMatrix,
SaveStringsAsText
}
import commbank.grimlock.framework.content.{ Content, Contents, IndexedContents }
import commbank.grimlock.framework.encoding.Value
import commbank.grimlock.framework.partition.Partitions
import commbank.grimlock.framework.position.{
Coordinates1,
Coordinates2,
Coordinates3,
Coordinates4,
Coordinates5,
Coordinates6,
Coordinates7,
Coordinates8,
Coordinates9,
Position,
Positions
}
import shapeless.{ ::, =:!=, HList, HNil }
import shapeless.nat.{ _0, _1, _2, _3, _4, _5, _6, _7, _8 }
/** Defines standard functional operations for dealing with distributed lists. */
trait NativeOperations[X, C <: Context[C]] {
/** Return the union of this `U` and `other`. */
def ++(other: C#U[X]): C#U[X]
/** Filter and map elements according to `pf`. */
def collect[Y : C#D](pf: PartialFunction[X, Y]): C#U[Y]
/** Keep only items that satisfy the predicate `f`. */
def filter(f: (X) => Boolean): C#U[X]
/** Apply function `f`, then flatten the results. */
def flatMap[Y : C#D](f: (X) => TraversableOnce[Y]): C#U[Y]
/** Map each element using the function `f`. */
def map[Y : C#D](f: (X) => Y): C#U[Y]
/** Return a new list by taking the first `num` elements of the original. */
def take(num: Int)(implicit enc: C#D[X]): C#U[X]
}
/** Defines operations for dealing with user defined values. */
trait ValueOperations[X, E[_]] {
/** Cross this value with `that`. */
def cross[Y](that: E[Y])(implicit ev: Y =:!= Nothing): E[(X, Y)]
/** Apply the function `f` to the value. */
def map[Y](f: (X) => Y): E[Y]
}
/** Defines convenience implicits for dealing with distributed cells. */
trait CellImplicits[C <: Context[C]] {
/** Converts a `Cell[P]` into a `C#U[Cell[P]]`. */
implicit def cellToU[P <: HList](c: Cell[P])(implicit ctx: C): C#U[Cell[P]]
/** Converts a `List[Cell[P]]` into a `C#U[Cell[P]]`. */
implicit def listCellToU[P <: HList](l: List[Cell[P]])(implicit ctx: C): C#U[Cell[P]]
}
/** Defines convenience implicits for dealing with distributed contents. */
trait ContentImplicits[C <: Context[C]] {
/** Converts a `C#U[Content]` to a `Contents`. */
implicit def toContents(data: C#U[Content]): Contents[C]
/** Converts a `C#U[(Position[P], Content)]` to a `IndexedContents`. */
implicit def toIndexed[P <: HList](data: C#U[(Position[P], Content)]): IndexedContents[P, C]
}
/** Defines convenience implicits for dealing with distributed strings. */
trait EnvironmentImplicits[C <: Context[C]] {
/** Converts a `C#U[String]` to a `SaveStringsAsText`. */
implicit def saveStringsAsText(data: C#U[String]): SaveStringsAsText[C]
/** Make available native functions of `C#U`. */
implicit def nativeFunctions[X](data: C#U[X]): NativeOperations[X, C]
/** Make available functions of `C#E`. */
implicit def valueFunctions[X](value: C#E[X]): ValueOperations[X, C#E]
/** Convert an `C#E` to a `C#U`. */
implicit def eToU[X : C#D](value: C#E[X])(implicit ctx: C): C#U[X]
}
/** Defines convenience implicits for dealing with matrices. */
trait MatrixImplicits[C <: Context[C]] {
/** Converts a `C#U[Cell[P]]` to a `Matrix`. */
implicit def toMatrix[P <: HList](data: C#U[Cell[P]]): Matrix[P, C]
/** Conversion from `C#U[Cell[V1 :: HNil]]` to a `Matrix1D`. */
implicit def toMatrix1D[
V1 <: Value[_]
](
data: C#U[Cell[V1 :: HNil]]
)(implicit
ev1: Position.IndexConstraints.Aux[V1 :: HNil, _0, V1]
): Matrix1D[V1, C]
/** Conversion from `C#U[Cell[V1 :: V2 :: HNil]]` to a `Matrix2D`. */
implicit def toMatrix2D[
V1 <: Value[_],
V2 <: Value[_]
](
data: C#U[Cell[V1 :: V2 :: HNil]]
)(implicit
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: HNil, _1, V2]
): Matrix2D[V1, V2, C]
/** Conversion from `C#U[Cell[V1 :: V2 :: V3 :: HNil]]` to a `Matrix3D`. */
implicit def toMatrix3D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_]
](
data: C#U[Cell[V1 :: V2 :: V3 :: HNil]]
)(implicit
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: HNil, _2, V3]
): Matrix3D[V1, V2, V3, C]
/** Conversion from `C#U[Cell[V1 :: V2 :: V3 :: V4 :: HNil]]` to a `Matrix4D`. */
implicit def toMatrix4D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_]
](
data: C#U[Cell[V1 :: V2 :: V3 :: V4 :: HNil]]
)(implicit
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: HNil, _3, V4]
): Matrix4D[V1, V2, V3, V4, C]
/** Conversion from `C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: HNil]]` to a `Matrix5D`. */
implicit def toMatrix5D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_]
](
data: C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: HNil]]
)(implicit
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _4, V5]
): Matrix5D[V1, V2, V3, V4, V5, C]
/** Conversion from `C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil]]` to a `Matrix6D`. */
implicit def toMatrix6D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_],
V6 <: Value[_]
](
data: C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil]]
)(implicit
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _4, V5],
ev6: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _5, V6]
): Matrix6D[V1, V2, V3, V4, V5, V6, C]
/** Conversion from `C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil]]` to a `Matrix7D`. */
implicit def toMatrix7D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_],
V6 <: Value[_],
V7 <: Value[_]
](
data: C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil]]
)(implicit
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _4, V5],
ev6: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _5, V6],
ev7: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _6, V7]
): Matrix7D[V1, V2, V3, V4, V5, V6, V7, C]
/** Conversion from `C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil]]` to a `Matrix8D`. */
implicit def toMatrix8D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_],
V6 <: Value[_],
V7 <: Value[_],
V8 <: Value[_]
](
data: C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil]]
)(implicit
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _4, V5],
ev6: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _5, V6],
ev7: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _6, V7],
ev8: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _7, V8]
): Matrix8D[V1, V2, V3, V4, V5, V6, V7, V8, C]
/** Conversion from `C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil]]` to a `Matrix9D`. */
implicit def toMatrix9D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_],
V6 <: Value[_],
V7 <: Value[_],
V8 <: Value[_],
V9 <: Value[_]
](
data: C#U[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil]]
)(implicit
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _4, V5],
ev6: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _5, V6],
ev7: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _6, V7],
ev8: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _7, V8],
ev9: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _8, V9]
): Matrix9D[V1, V2, V3, V4, V5, V6, V7, V8, V9, C]
/** Converts a `C#U[Cell[P]]` to a `MultiDimensionMatrix`. */
implicit def toMultiDimensionMatrix[
P <: HList
](
data: C#U[Cell[P]]
)(implicit
ev: Position.IsMultiDimensionalConstraints[P]
): MultiDimensionMatrix[P, C]
/** Converts a `List[Cell[P]]` to a `Matrix`. */
implicit def listToMatrix[P <: HList](data: List[Cell[P]])(implicit ctx: C): Matrix[P, C]
/** Conversion from `List[Cell[V1 :: HNil]]` to a `Matrix1D`. */
implicit def listToMatrix1D[
V1 <: Value[_]
](
data: List[Cell[V1 :: HNil]]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[V1 :: HNil, _0, V1]
): Matrix1D[V1, C]
/** Conversion from `List[Cell[V1 :: V2 :: HNil]]` to a `Matrix2D`. */
implicit def listToMatrix2D[
V1 <: Value[_],
V2 <: Value[_]
](
data: List[Cell[V1 :: V2 :: HNil]]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: HNil, _1, V2]
): Matrix2D[V1, V2, C]
/** Conversion from `List[Cell[V1 :: V2 :: V3 :: HNil]]` to a `Matrix3D`. */
implicit def listToMatrix3D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_]
](
data: List[Cell[V1 :: V2 :: V3 :: HNil]]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: HNil, _2, V3]
): Matrix3D[V1, V2, V3, C]
/** Conversion from `List[Cell[V1 :: V2 :: V3 :: V4 :: HNil]]` to a `Matrix4D`. */
implicit def listToMatrix4D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_]
](
data: List[Cell[V1 :: V2 :: V3 :: V4 :: HNil]]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: HNil, _3, V4]
): Matrix4D[V1, V2, V3, V4, C]
/** Conversion from `List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: HNil]]` to a `Matrix5D`. */
implicit def listToMatrix5D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_]
](
data: List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: HNil]]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: HNil, _4, V5]
): Matrix5D[V1, V2, V3, V4, V5, C]
/** Conversion from `List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil]]` to a `Matrix6D`. */
implicit def listToMatrix6D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_],
V6 <: Value[_]
](
data: List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil]]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _4, V5],
ev6: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: HNil, _5, V6]
): Matrix6D[V1, V2, V3, V4, V5, V6, C]
/** Conversion from `List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil]]` to a `Matrix7D`. */
implicit def listToMatrix7D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_],
V6 <: Value[_],
V7 <: Value[_]
](
data: List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil]]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _4, V5],
ev6: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _5, V6],
ev7: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: HNil, _6, V7]
): Matrix7D[V1, V2, V3, V4, V5, V6, V7, C]
/** Conversion from `List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil]]` to a `Matrix8D`. */
implicit def listToMatrix8D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_],
V6 <: Value[_],
V7 <: Value[_],
V8 <: Value[_]
](
data: List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil]]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _4, V5],
ev6: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _5, V6],
ev7: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _6, V7],
ev8: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: HNil, _7, V8]
): Matrix8D[V1, V2, V3, V4, V5, V6, V7, V8, C]
/** Conversion from `List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil]]` to a `Matrix9D`. */
implicit def listToMatrix9D[
V1 <: Value[_],
V2 <: Value[_],
V3 <: Value[_],
V4 <: Value[_],
V5 <: Value[_],
V6 <: Value[_],
V7 <: Value[_],
V8 <: Value[_],
V9 <: Value[_]
](
data: List[Cell[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil]]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _0, V1],
ev2: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _1, V2],
ev3: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _2, V3],
ev4: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _3, V4],
ev5: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _4, V5],
ev6: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _5, V6],
ev7: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _6, V7],
ev8: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _7, V8],
ev9: Position.IndexConstraints.Aux[V1 :: V2 :: V3 :: V4 :: V5 :: V6 :: V7 :: V8 :: V9 :: HNil, _8, V9]
): Matrix9D[V1, V2, V3, V4, V5, V6, V7, V8, V9, C]
/** Converts a `List[Cell[P]]` to a `MultiDimensionMatrix`. */
implicit def listToMultiDimensionMatrix[
P <: HList
](
data: List[Cell[P]]
)(implicit
ctx: C,
ev: Position.IsMultiDimensionalConstraints[P]
): MultiDimensionMatrix[P, C]
/** Conversion from `List[(T1, Content)]` to a `Matrix`. */
implicit def tuple1ToMatrix[
T1 <% Value[T1]
](
list: List[(T1, Content)]
)(implicit
ctx: C
): Matrix[Coordinates1[T1], C]
/** Conversion from `List[(T1, Content)]` to a `Matrix1D`. */
implicit def tuple1ToMatrix1D[
T1 <% Value[T1]
](
list: List[(T1, Content)]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[Coordinates1[T1], _0, Value[T1]]
): Matrix1D[Value[T1], C]
/** Conversion from `List[(T1, T2, Content)]` to a `Matrix`. */
implicit def tuple2ToMatrix[
T1 <% Value[T1],
T2 <% Value[T2]
](
list: List[(T1, T2, Content)]
)(implicit
ctx: C
): Matrix[Coordinates2[T1, T2], C]
/** Conversion from `List[(T1, T2, Content)]` to a `Matrix2D`. */
implicit def tuple2ToMatrix2D[
T1 <% Value[T1],
T2 <% Value[T2]
](
list: List[(T1, T2, Content)]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[Coordinates2[T1, T2], _0, Value[T1]],
ev2: Position.IndexConstraints.Aux[Coordinates2[T1, T2], _1, Value[T2]]
): Matrix2D[Value[T1], Value[T2], C]
/** Conversion from `List[(T1, T2, Content)]` to a `Matrix`. */
implicit def tuple2ToMultiDimensionMatrix[
T1 <% Value[T1],
T2 <% Value[T2]
](
list: List[(T1, T2, Content)]
)(implicit
ctx: C
): MultiDimensionMatrix[Coordinates2[T1, T2], C]
/** Conversion from `List[(T1, T2, T3, Content)]` to a `Matrix`. */
implicit def tuple3ToMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3]
](
list: List[(T1, T2, T3, Content)]
)(implicit
ctx: C
): Matrix[Coordinates3[T1, T2, T3], C]
/** Conversion from `List[(T1, T2, T3, Content)]` to a `Matrix3D`. */
implicit def tuple3ToMatrix3D[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3]
](
list: List[(T1, T2, T3, Content)]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[Coordinates3[T1, T2, T3], _0, Value[T1]],
ev2: Position.IndexConstraints.Aux[Coordinates3[T1, T2, T3], _1, Value[T2]],
ev3: Position.IndexConstraints.Aux[Coordinates3[T1, T2, T3], _2, Value[T3]]
): Matrix3D[Value[T1], Value[T2], Value[T3], C]
/** Conversion from `List[(T1, T2, T3, Content)]` to a `MultiDimensionMatrix`. */
implicit def tuple3ToMultiDimensionMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3]
](
list: List[(T1, T2, T3, Content)]
)(implicit
ctx: C
): MultiDimensionMatrix[Coordinates3[T1, T2, T3], C]
/** Conversion from `List[(T1, T2, T3, T4, Content)]` to a `Matrix`. */
implicit def tuple4ToMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4]
](
list: List[(T1, T2, T3, T4, Content)]
)(implicit
ctx: C
): Matrix[Coordinates4[T1, T2, T3, T4], C]
/** Conversion from `List[(T1, T2, T3, T4, Content)]` to a `Matrix4D`. */
implicit def tuple4ToMatrix4D[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4]
](
list: List[(T1, T2, T3, T4, Content)]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[Coordinates4[T1, T2, T3, T4], _0, Value[T1]],
ev2: Position.IndexConstraints.Aux[Coordinates4[T1, T2, T3, T4], _1, Value[T2]],
ev3: Position.IndexConstraints.Aux[Coordinates4[T1, T2, T3, T4], _2, Value[T3]],
ev4: Position.IndexConstraints.Aux[Coordinates4[T1, T2, T3, T4], _3, Value[T4]]
): Matrix4D[Value[T1], Value[T2], Value[T3], Value[T4], C]
/** Conversion from `List[(T1, T2, T3, T4, Content)]` to a `MultiDimensionMatrix`. */
implicit def tuple4ToMultiDimensionMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4]
](
list: List[(T1, T2, T3, T4, Content)]
)(implicit
ctx: C
): MultiDimensionMatrix[Coordinates4[T1, T2, T3, T4], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, Content)]` to a `Matrix`. */
implicit def tuple5ToMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5]
](
list: List[(T1, T2, T3, T4, T5, Content)]
)(implicit
ctx: C
): Matrix[Coordinates5[T1, T2, T3, T4, T5], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, Content)]` to a `Matrix5D`. */
implicit def tuple5ToMatrix5D[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5]
](
list: List[(T1, T2, T3, T4, T5, Content)]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[Coordinates5[T1, T2, T3, T4, T5], _0, Value[T1]],
ev2: Position.IndexConstraints.Aux[Coordinates5[T1, T2, T3, T4, T5], _1, Value[T2]],
ev3: Position.IndexConstraints.Aux[Coordinates5[T1, T2, T3, T4, T5], _2, Value[T3]],
ev4: Position.IndexConstraints.Aux[Coordinates5[T1, T2, T3, T4, T5], _3, Value[T4]],
ev5: Position.IndexConstraints.Aux[Coordinates5[T1, T2, T3, T4, T5], _4, Value[T5]]
): Matrix5D[Value[T1], Value[T2], Value[T3], Value[T4], Value[T5], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, Content)]` to a `MultiDimensionMatrix`. */
implicit def tuple5ToMultiDimensionMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5]
](
list: List[(T1, T2, T3, T4, T5, Content)]
)(implicit
ctx: C
): MultiDimensionMatrix[Coordinates5[T1, T2, T3, T4, T5], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, Content)]` to a `Matrix`. */
implicit def tuple6ToMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6]
](
list: List[(T1, T2, T3, T4, T5, T6, Content)]
)(implicit
ctx: C
): Matrix[Coordinates6[T1, T2, T3, T4, T5, T6], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, Content)]` to a `Matrix6D`. */
implicit def tuple6ToMatrix6D[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6]
](
list: List[(T1, T2, T3, T4, T5, T6, Content)]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[Coordinates6[T1, T2, T3, T4, T5, T6], _0, Value[T1]],
ev2: Position.IndexConstraints.Aux[Coordinates6[T1, T2, T3, T4, T5, T6], _1, Value[T2]],
ev3: Position.IndexConstraints.Aux[Coordinates6[T1, T2, T3, T4, T5, T6], _2, Value[T3]],
ev4: Position.IndexConstraints.Aux[Coordinates6[T1, T2, T3, T4, T5, T6], _3, Value[T4]],
ev5: Position.IndexConstraints.Aux[Coordinates6[T1, T2, T3, T4, T5, T6], _4, Value[T5]],
ev6: Position.IndexConstraints.Aux[Coordinates6[T1, T2, T3, T4, T5, T6], _5, Value[T6]]
): Matrix6D[Value[T1], Value[T2], Value[T3], Value[T4], Value[T5], Value[T6], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, Content)]` to a `MultiDimensionMatrix`. */
implicit def tuple6ToMultiDimensionMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6]
](
list: List[(T1, T2, T3, T4, T5, T6, Content)]
)(implicit
ctx: C
): MultiDimensionMatrix[Coordinates6[T1, T2, T3, T4, T5, T6], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, T7, Content)]` to a `Matrix`. */
implicit def tuple7ToMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6],
T7 <% Value[T7]
](
list: List[(T1, T2, T3, T4, T5, T6, T7, Content)]
)(implicit
ctx: C
): Matrix[Coordinates7[T1, T2, T3, T4, T5, T6, T7], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, T7, Content)]` to a `Matrix7D`. */
implicit def tuple7ToMatrix7D[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6],
T7 <% Value[T7]
](
list: List[(T1, T2, T3, T4, T5, T6, T7, Content)]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[Coordinates7[T1, T2, T3, T4, T5, T6, T7], _0, Value[T1]],
ev2: Position.IndexConstraints.Aux[Coordinates7[T1, T2, T3, T4, T5, T6, T7], _1, Value[T2]],
ev3: Position.IndexConstraints.Aux[Coordinates7[T1, T2, T3, T4, T5, T6, T7], _2, Value[T3]],
ev4: Position.IndexConstraints.Aux[Coordinates7[T1, T2, T3, T4, T5, T6, T7], _3, Value[T4]],
ev5: Position.IndexConstraints.Aux[Coordinates7[T1, T2, T3, T4, T5, T6, T7], _4, Value[T5]],
ev6: Position.IndexConstraints.Aux[Coordinates7[T1, T2, T3, T4, T5, T6, T7], _5, Value[T6]],
ev7: Position.IndexConstraints.Aux[Coordinates7[T1, T2, T3, T4, T5, T6, T7], _6, Value[T7]]
): Matrix7D[Value[T1], Value[T2], Value[T3], Value[T4], Value[T5], Value[T6], Value[T7], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, T7, Content)]` to a `MultiDimensionMatrix`. */
implicit def tuple7ToMultiDimensionMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6],
T7 <% Value[T7]
](
list: List[(T1, T2, T3, T4, T5, T6, T7, Content)]
)(implicit
ctx: C
): MultiDimensionMatrix[Coordinates7[T1, T2, T3, T4, T5, T6, T7], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, T7, T8, Content)]` to a `Matrix`. */
implicit def tuple8ToMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6],
T7 <% Value[T7],
T8 <% Value[T8]
](
list: List[(T1, T2, T3, T4, T5, T6, T7, T8, Content)]
)(implicit
ctx: C
): Matrix[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, T7, T8, Content)]` to a `Matrix8D`. */
implicit def tuple8ToMatrix8D[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6],
T7 <% Value[T7],
T8 <% Value[T8]
](
list: List[(T1, T2, T3, T4, T5, T6, T7, T8, Content)]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], _0, Value[T1]],
ev2: Position.IndexConstraints.Aux[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], _1, Value[T2]],
ev3: Position.IndexConstraints.Aux[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], _2, Value[T3]],
ev4: Position.IndexConstraints.Aux[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], _3, Value[T4]],
ev5: Position.IndexConstraints.Aux[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], _4, Value[T5]],
ev6: Position.IndexConstraints.Aux[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], _5, Value[T6]],
ev7: Position.IndexConstraints.Aux[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], _6, Value[T7]],
ev8: Position.IndexConstraints.Aux[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], _7, Value[T8]]
): Matrix8D[Value[T1], Value[T2], Value[T3], Value[T4], Value[T5], Value[T6], Value[T7], Value[T8], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, T7, T8, Content)]` to a `MultiDimensionMatrix`. */
implicit def tuple8ToMultiDimensionMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6],
T7 <% Value[T7],
T8 <% Value[T8]
](
list: List[(T1, T2, T3, T4, T5, T6, T7, T8, Content)]
)(implicit
ctx: C
): MultiDimensionMatrix[Coordinates8[T1, T2, T3, T4, T5, T6, T7, T8], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, T7, T8, T9, Content)]` to a `Matrix`. */
implicit def tuple9ToMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6],
T7 <% Value[T7],
T8 <% Value[T8],
T9 <% Value[T9]
](
list: List[(T1, T2, T3, T4, T5, T6, T7, T8, T9, Content)]
)(implicit
ctx: C
): Matrix[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, T7, T8, T9, Content)]` to a `Matrix9D`. */
implicit def tuple9ToMatrix9D[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6],
T7 <% Value[T7],
T8 <% Value[T8],
T9 <% Value[T9]
](
list: List[(T1, T2, T3, T4, T5, T6, T7, T8, T9, Content)]
)(implicit
ctx: C,
ev1: Position.IndexConstraints.Aux[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], _0, Value[T1]],
ev2: Position.IndexConstraints.Aux[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], _1, Value[T2]],
ev3: Position.IndexConstraints.Aux[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], _2, Value[T3]],
ev4: Position.IndexConstraints.Aux[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], _3, Value[T4]],
ev5: Position.IndexConstraints.Aux[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], _4, Value[T5]],
ev6: Position.IndexConstraints.Aux[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], _5, Value[T6]],
ev7: Position.IndexConstraints.Aux[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], _6, Value[T7]],
ev8: Position.IndexConstraints.Aux[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], _7, Value[T8]],
ev9: Position.IndexConstraints.Aux[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], _8, Value[T9]]
): Matrix9D[Value[T1], Value[T2], Value[T3], Value[T4], Value[T5], Value[T6], Value[T7], Value[T8], Value[T9], C]
/** Conversion from `List[(T1, T2, T3, T4, T5, T6, T7, T8, T9, Content)]` to a `MultiDimensionMatrix`. */
implicit def tuple9ToMultiDimensionMatrix[
T1 <% Value[T1],
T2 <% Value[T2],
T3 <% Value[T3],
T4 <% Value[T4],
T5 <% Value[T5],
T6 <% Value[T6],
T7 <% Value[T7],
T8 <% Value[T8],
T9 <% Value[T9]
](
list: List[(T1, T2, T3, T4, T5, T6, T7, T8, T9, Content)]
)(implicit
ctx: C
): MultiDimensionMatrix[Coordinates9[T1, T2, T3, T4, T5, T6, T7, T8, T9], C]
/** Conversion from matrix with errors tuple to `MatrixWithParseErrors`. */
implicit def tupleToParseErrors[
P <: HList
](
t: (C#U[Cell[P]], C#U[Throwable])
): MatrixWithParseErrors[P, C#U] = MatrixWithParseErrors(t._1, t._2)
}
/** Defines convenience implicits for dealing with distributed partitions. */
trait PartitionImplicits[C <: Context[C]] {
/** Conversion from `C#U[(I, Cell[P])]` to a `Partitions`. */
implicit def toPartitions[P <: HList, I : Ordering](data: C#U[(I, Cell[P])]): Partitions[P, I, C]
}
/** Defines convenience implicits for dealing with distributed positions. */
trait PositionImplicits[C <: Context[C]] {
/** Converts a `T` to a `C#U[Position[Coordinate1[T]]]`. */
implicit def tToU[T <% Value[T]](t: T)(implicit ctx: C): C#U[Position[Coordinates1[T]]]
/** Converts a `List[T]` to a `C#U[Position[Coordinate1[T]]]`. */
implicit def listTToU[T <% Value[T]](l: List[T])(implicit ctx: C): C#U[Position[Coordinates1[T]]]
/** Converts a `V` to a `C#U[Position[V :: HNil]]`. */
implicit def valueToU[V <: Value[_]](v: V)(implicit ctx: C): C#U[Position[V :: HNil]]
/** Converts a `List[V]` to a `C#U[Position[V :: HNil]]`. */
implicit def listValueToU[V <: Value[_]](l: List[V])(implicit ctx: C): C#U[Position[V :: HNil]]
/** Converts a `Position[P]` to a `C#U[Position[P]]`. */
implicit def positionToU[P <: HList](p: Position[P])(implicit ctx: C): C#U[Position[P]]
/** Converts a `List[Position[P]]` to a `C#U[Position[P]]`. */
implicit def listPositionToU[P <: HList](l: List[Position[P]])(implicit ctx: C): C#U[Position[P]]
/** Converts a `C#U[Position[P]]` to a `Positions`. */
implicit def toPositions[P <: HList](data: C#U[Position[P]]): Positions[P, C]
/** Converts a `(T, Cell.Predicate[P])` to a `List[(C#U[Position[S]], Cell.Predicate[P])]`. */
implicit def predicateToU[
P <: HList,
S <: HList,
T <% C#U[Position[S]]
](
t: (T, Cell.Predicate[P])
)(implicit
ctx: C
): List[(C#U[Position[S]], Cell.Predicate[P])] = {
val u: C#U[Position[S]] = t._1
List((u, t._2))
}
/** Converts a `List[(T, Cell.Predicate[P])]` to a `List[(C#U[Position[S]], Cell.Predicate[P])]`. */
implicit def listPredicateToU[
P <: HList,
S <: HList,
T <% C#U[Position[S]]
](
l: List[(T, Cell.Predicate[P])]
)(implicit
ctx: C
): List[(C#U[Position[S]], Cell.Predicate[P])] = l
.map { case (i, p) =>
val u: C#U[Position[S]] = i
(u, p)
}
}
/** Capture all implicits together. */
trait Implicits[C <: Context[C]] {
/** Environment related implicits. */
val environment: EnvironmentImplicits[C]
}
/** Capture all matrix specific implicits together. */
trait PrimeImplicits[C <: MatrixContext[C]] extends Implicits[C] {
/** Cell related implicits. */
val cell: CellImplicits[C]
/** Content related implicits. */
val content: ContentImplicits[C]
/** Matrix related implicits. */
val matrix: MatrixImplicits[C]
/** Partition related implicits. */
val partition: PartitionImplicits[C]
/** Position related implicits. */
val position: PositionImplicits[C]
}
|
CommBank/grimlock
|
grimlock-core/src/main/scala/commbank/grimlock/framework/Implicits.scala
|
Scala
|
apache-2.0
| 35,922 |
/*
* Copyright 2016 okumin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package influent.internal.msgpack
import influent.internal.msgpack.MsgpackUnpackerArbitrary._
import java.nio.ByteBuffer
import org.msgpack.core.MessagePack
import org.msgpack.value.ImmutableValue
import org.scalacheck.Shrink
import org.scalatest.WordSpec
import org.scalatest.prop.GeneratorDrivenPropertyChecks
class MsgpackIncrementalUnpackerSpec extends WordSpec with GeneratorDrivenPropertyChecks {
"MsgpackIncrementalUnpacker" should {
"decode msgpack values" in {
implicit val shrinkValue: Shrink[ImmutableValue] = Shrink.shrinkAny
implicit val shrinkInt: Shrink[Int] = Shrink.shrinkAny
forAll { (value: ImmutableValue, groupSize: Int) =>
whenever(groupSize > 0) {
val packer = MessagePack.newDefaultBufferPacker()
packer.packValue(value)
val asBytes = packer.toByteArray
var unpacker: MsgpackIncrementalUnpacker = FormatUnpacker.getInstance()
val buffer = new InfluentByteBuffer(Int.MaxValue)
val chunks = asBytes.grouped(groupSize).toList
chunks.init.foreach { bytes =>
val buf = ByteBuffer.allocate(1024)
buf.put(bytes).flip()
buffer.push(buf)
val result = unpacker.unpack(buffer)
assert(!result.isCompleted)
unpacker = result.next()
}
val buf = ByteBuffer.allocate(1024)
buf.put(chunks.last).flip()
buffer.push(buf)
val actual = unpacker.unpack(buffer)
assert(actual.isCompleted)
assert(actual.value() === value)
}
}
}
}
}
|
okumin/influent
|
influent-java/src/test/scala/influent/internal/msgpack/MsgpackIncrementalUnpackerSpec.scala
|
Scala
|
apache-2.0
| 2,194 |
package org.hrscala.sbt
import java.util.Properties
object ProjectInformation {
lazy val properties = {
val tmp = new Properties()
tmp.load(getClass.getResourceAsStream("project.properties"))
tmp
}
lazy val version = properties.getProperty("version")
lazy val buildDate = properties.getProperty("build-date")
}
|
HRScala/its-so-sbt
|
50-recursion/src/main/scala/org/hrscala/sbt/ProjectInformation.scala
|
Scala
|
unlicense
| 334 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.plugin.input.fileSystem
import java.io.{Serializable => JSerializable}
import com.stratio.sparta.sdk.properties.ValidatingPropertyMap._
import com.stratio.sparta.sdk.pipeline.input.Input
import org.apache.spark.sql.Row
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
/**
* This input creates a dataFrame given a path to an HDFS-compatible file.
* Spark will monitor the directory and will only create dataFrames
* from new entries.
* @param properties
*/
class FileSystemInput(properties: Map[String, JSerializable]) extends Input(properties) {
def initStream(ssc: StreamingContext, storageLevel: String): DStream[Row] = {
ssc.textFileStream(properties.getString("directory", "")).map(data => Row(data))
}
}
|
Frannie-Ludmilla/sparta
|
plugins/src/main/scala/com/stratio/sparta/plugin/input/fileSystem/FileSystemInput.scala
|
Scala
|
apache-2.0
| 1,432 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package expr
import com.intellij.lang.ASTNode
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.expr._
/**
* Author: Alexander Podkhalyuzin
* Date: 06.03.2008
*/
class ScCatchBlockImpl(node: ASTNode) extends ScExpressionImplBase(node) with ScCatchBlock {
def getLeftParenthesis: Option[PsiElement] = {
val leftParenthesis = findChildByType[PsiElement](ScalaTokenTypes.tLPARENTHESIS)
Option(leftParenthesis)
}
def getRightParenthesis: Option[PsiElement] = {
val rightParenthesis = findChildByType[PsiElement](ScalaTokenTypes.tRPARENTHESIS)
Option(rightParenthesis)
}
override def toString: String = "CatchBlock"
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScCatchBlockImpl.scala
|
Scala
|
apache-2.0
| 816 |
package lang.lambdaref
import name.Gensym
import name.Identifier.ID
import ref.{Declaration, RefGraph, Reference, Structural}
case class App(e1: Exp, e2: Exp) extends Exp {
override def retarget(retargeting: Map[Reference, Option[Declaration]]): Exp =
App(e1.retarget(retargeting), e2.retarget(retargeting))
override def resolveRefs: RefGraph = e1.resolveRefs + e2.resolveRefs
def substGraph(w: String, e: Exp) = App(e1.substGraph(w, e), e2.substGraph(w, e))
def normalizeGraph = e1.normalizeGraph match {
case Lam(x, body) => body.substGraph(x, e2).normalizeGraph
case v1 => App(v1, e2.normalizeGraph)
}
override def equiv(obj: Structural, eqDecls: Map[ID, Declaration]): Boolean = obj match {
case that: App => this.e1.equiv(that.e1, eqDecls) && this.e2.equiv(that.e2, eqDecls)
case _ => false
}
override def asNominal(implicit gensym: Gensym) = lang.lambda.App(e1.asNominal, e2.asNominal)
}
|
seba--/hygienic-transformations
|
scala/src/main/scala/lang/lambdaref/App.scala
|
Scala
|
lgpl-3.0
| 937 |
/**
* Copyright (c) 2013 Saddle Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.saddle
import vec._
import index._
import groupby._
import ops._
import stats._
import util.Concat.Promoter
import scalar.Scalar
import java.io.OutputStream
import org.saddle.mat.MatCols
/**
* `Frame` is an immutable container for 2D data which is indexed along both axes
* (rows, columns) by associated keys (i.e., indexes).
*
* The primary use case is homogeneous data, but a secondary concern is to support
* heterogeneous data that is homogeneous ony within any given column.
*
* The row index, column index, and constituent value data are all backed ultimately
* by arrays.
*
* `Frame` is effectively a doubly-indexed associative map whose row keys and col keys
* each have an ordering provided by the natural (provided) order of their backing
* arrays.
*
* Several factory and access methods are provided. In the following examples, assume
* that:
*
* {{{
* val f = Frame('a'->Vec(1,2,3), 'b'->Vec(4,5,6))
* }}}
*
* The `apply` method takes a row and col key returns a slice of the original Frame:
*
* {{{
* f(0,'a') == Frame('a'->Vec(1))
* }}}
*
* `apply` also accepts a [[org.saddle.index.Slice]]:
*
* {{{
* f(0->1, 'b') == Frame('b'->Vec(4,5))
* f(0, *) == Frame('a'->Vec(1), 'b'->Vec(4))
* }}}
*
* You may slice using the `col` and `row` methods respectively, as follows:
*
* {{{
* f.col('a') == Frame('a'->Vec(1,2,3))
* f.row(0) == Frame('a'->Vec(1), 'b'->Vec(4))
* f.row(0->1) == Frame('a'->Vec(1,2), 'b'->Vec(4,5))
* }}}
*
* You can achieve a similar effect with `rowSliceBy` and `colSliceBy`
*
* The `colAt` and `rowAt` methods take an integer offset i into the Frame, and
* return a Series indexed by the opposing axis:
*
* {{{
* f.rowAt(0) == Series('a'->1, 'b'->4)
* }}}
*
* If there is a one-to-one relationship between offset i and key (ie, no duplicate
* keys in the index), you may achieve the same effect via key as follows:
*
* {{{
* f.first(0) == Series('a'->1, 'b'->4)
* f.firstCol('a') == Series(1,2,3)
* }}}
*
* The `at` method returns an instance of a [[org.saddle.scalar.Scalar]], which behaves
* much like an `Option`; it can be either an instance of [[org.saddle.scalar.NA]] or a
* [[org.saddle.scalar.Value]] case class:
*
* {{{
* f.at(0, 0) == scalar.Scalar(1)
* }}}
*
* The `rowSlice` and `colSlice` methods allows slicing the Frame for locations in [i, j)
* irrespective of the value of the keys at those locations.
*
* {{{
* f.rowSlice(0,1) == Frame('a'->Vec(1), 'b'->Vec(4))
* }}}
*
* Finally, the method `raw` accesses a value directly, which may reveal the underlying
* representation of a missing value (so be careful).
*
* {{{
* f.raw(0,0) == 1
* }}}
*
* `Frame` may be used in arithmetic expressions which operate on two `Frame`s or on a
* `Frame` and a scalar value. In the former case, the two Frames will automatically
* align along their indexes:
*
* {{{
* f + f.shift(1) == Frame('a'->Vec(NA,3,5), 'b'->Vec(NA,9,11))
* }}}
*
* @param values A sequence of Vecs which comprise the columns of the Frame
* @param rowIx An index for the rows
* @param colIx An index for the columns
* @tparam RX The type of row keys
* @tparam CX The type of column keys
* @tparam T The type of entries in the frame
*/
class Frame[RX: ST: ORD, CX: ST: ORD, T: ST](
private[saddle] val values: MatCols[T], val rowIx: Index[RX], val colIx: Index[CX])
extends NumericOps[Frame[RX, CX, T]] with Serializable{
require(values.numRows == rowIx.length, "Row index length is incorrect")
require(values.numCols == colIx.length, "Col index length is incorrect")
private var cachedMat: Option[Mat[T]] = None
private var cachedRows: Option[MatCols[T]] = None
/**
* Number of rows in the Frame
*/
def numRows: Int = values.numRows
/**
* Number of cols in the Frame
*/
def numCols: Int = values.numCols
/**
* Returns true if there are no values in the Frame
*/
def isEmpty: Boolean = (values.numRows == 0)
/**
* The transpose of the frame (swapping the axes)
*/
def T: Frame[CX, RX, T] = Frame(rows(), colIx, rowIx)
// ---------------------------------------------------------------
// extract columns by associated key(s); ignore non-existent keys
/**
* Given one or more column keys, slice out the corresponding column(s)
* @param keys Column key(s) (sequence)
*/
def col(keys: CX*): Frame[RX, CX, T] = col(keys.toArray)
/**
* Given a Slice of type of column key, slice out corresponding column(s)
* @param slice Slice containing appropriate key bounds
*/
def col(slice: Slice[CX]): Frame[RX, CX, T] = {
val (a, b) = slice(colIx)
Frame(values.slice(a, b), rowIx, colIx.sliceBy(slice))
}
/**
* Given an array of column keys, slice out the corresponding column(s)
* @param keys Array of keys
*/
def col(keys: Array[CX]): Frame[RX, CX, T] = {
if (values.numCols == 0)
Frame.empty[RX, CX, T]
else {
val locs = array.filter[Int](_ != -1)(colIx(keys))
colAt(locs)
}
}
/**
* Slice out a set of columns from the frame
* @param from Key from which to begin slicing
* @param to Key at which to end slicing
* @param inclusive Whether to include 'to' key; true by default
*/
def colSliceBy(from: CX, to: CX, inclusive: Boolean = true): Frame[RX, CX, T] = {
val tmp = Series(values : _*).setIndex(colIx)
val res = tmp.sliceBy(from, to, inclusive)
Frame(res.values.toArray, rowIx, res.index)
}
// -----------------------------------------
// access columns by particular location(s)
/**
* Access frame column at a particular integer offset
* @param loc integer offset
*/
def colAt(loc: Int): Series[RX, T] = Series(values(loc), rowIx)
/**
* Access frame columns at a particular integer offsets
* @param locs a sequence of integer offsets
*/
def colAt(locs: Int*): Frame[RX, CX, T] = colAt(locs.toArray)
/**
* Access frame columns at a particular integer offsets
* @param locs an array of integer offsets
*/
def colAt(locs: Array[Int]): Frame[RX, CX, T] =
if (values.numCols == 0)
Frame.empty[RX, CX, T]
else
Frame(values.take(locs), rowIx, colIx.take(locs))
/**
* Access frame columns specified by a slice
* @param slice a slice specifier
*/
def colAt(slice: Slice[Int]): Frame[RX, CX, T] = {
val idx = IndexIntRange(numCols)
val pair = slice(idx)
Frame(values.slice(pair._1, pair._2), rowIx, colIx.slice(pair._1, pair._2))
}
/**
* Access frame columns between two integer offsets, [from, until)
* @param from Beginning offset
* @param until One past ending offset
* @param stride Optional increment between offsets
*/
def colSlice(from: Int, until: Int, stride: Int = 1): Frame[RX, CX, T] = {
val lb = math.max(0, from)
val ub = math.min(numCols, until)
val taker = array.range(lb, ub, stride)
Frame(values.take(taker), rowIx, colIx.take(taker))
}
/**
* Split Frame into two frames at column position c
* @param c Position at which to split Frame
*/
def colSplitAt(c: Int): (Frame[RX, CX, T], Frame[RX, CX, T]) =
(colSlice(0, c), colSlice(c, numCols))
/**
* Split Frame into two frames at column key k
* @param k Key at which to split Frame
*/
def colSplitBy(k: CX): (Frame[RX, CX, T], Frame[RX, CX, T]) =
colSplitAt(colIx.lsearch(k))
// ---------------------------------------------------------------
// extract rows by associated key(s); ignore non-existent keys
/**
* Given one or more row keys, slice out the corresponding row(s)
* @param keys Row key(s) (sequence)
*/
def row(keys: RX*): Frame[RX, CX, T] = row(keys.toArray)
/**
* Given a Slice of type of row key, slice out corresponding row(s)
* @param slice Slice containing appropriate key bounds
*/
def row(slice: Slice[RX]): Frame[RX, CX, T] = {
val (a, b) = slice(rowIx)
Frame(values.map(v => v.slice(a, b)), rowIx.sliceBy(slice), colIx)
}
/**
* Given an array of row keys, slice out the corresponding row(s)
* @param keys Array of keys
*/
def row(keys: Array[RX]): Frame[RX, CX, T] = {
if (values.numRows == 0)
Frame.empty[RX, CX, T]
else {
val locs = array.filter[Int](_ != -1)(rowIx(keys))
rowAt(locs)
}
}
/**
* Slice out a set of rows from the frame
* @param from Key from which to begin slicing
* @param to Key at which to end slicing
* @param inclusive Whether to include 'to' key; true by default
*/
def rowSliceBy(from: RX, to: RX, inclusive: Boolean = true): Frame[RX, CX, T] = {
val start = rowIx.lsearch(from)
val end = if (inclusive) rowIx.rsearch(to) else rowIx.lsearch(to)
Frame(values.map(v => v.slice(start, end)), rowIx.slice(start, end), colIx)
}
// -----------------------------------------
// access rows by particular location(s)
/**
* Access frame row at a particular integer offset
* @param loc integer offset
*/
def rowAt(loc: Int): Series[CX, T] = Series(rows()(loc), colIx)
/**
* Access frame rows at a particular integer offsets
* @param locs a sequence of integer offsets
*/
def rowAt(locs: Int*): Frame[RX, CX, T] = rowAt(locs.toArray)
/**
* Access frame rows at a particular integer offsets
* @param locs an array of integer offsets
*/
def rowAt(locs: Array[Int]): Frame[RX, CX, T] =
Frame(values.map(v => v.take(locs)), rowIx.take(locs), colIx)
/**
* Access frame rows specified by a slice
* @param slice a slice specifier
*/
def rowAt(slice: Slice[Int]): Frame[RX, CX, T] = {
val idx = IndexIntRange(numRows)
val pair = slice(idx)
Frame(values.map(_.slice(pair._1, pair._2)), rowIx.slice(pair._1, pair._2), colIx)
}
/**
* Access frame rows between two integer offsets, [from, until)
* @param from Beginning offset
* @param until One past ending offset
* @param stride Optional increment between offsets
*/
def rowSlice(from: Int, until: Int, stride: Int = 1): Frame[RX, CX, T] = {
Frame(values.map(v => v.slice(from, until, stride)), rowIx.slice(from, until, stride), colIx)
}
/**
* Split Frame into two frames at row position r
* @param r Position at which to split Frame
*/
def rowSplitAt(r: Int): (Frame[RX, CX, T], Frame[RX, CX, T]) =
(rowSlice(0, r), rowSlice(r, numRows))
/**
* Split Frame into two frames at row key k
* @param k Key at which to split Frame
*/
def rowSplitBy(k: RX): (Frame[RX, CX, T], Frame[RX, CX, T]) =
rowSplitAt(rowIx.lsearch(k))
// --------------------------------------------
// access a two dimensional sub-block by key(s)
/**
* Slice frame by row and column slice specifiers
* @param rix A row slice
* @param cix A col slice
*/
def apply(rix: Slice[RX], cix: Slice[CX]): Frame[RX, CX, T] = col(cix).row(rix)
/**
* Slice frame by row slice and array of column keys
* @param rix A row slice
* @param cix An array of column keys
*/
def apply(rix: Slice[RX], cix: Array[CX]): Frame[RX, CX, T] = col(cix).row(rix)
/**
* Slice frame by array of row keys and a col slice
* @param rix An array of row keys
* @param cix A col slice
*/
def apply(rix: Array[RX], cix: Slice[CX]): Frame[RX, CX, T] = col(cix).row(rix)
/**
* Slice from by an array of row keys and an array of col keys
* @param rix An array of row keys
* @param cix An array of col keys
*/
def apply(rix: Array[RX], cix: Array[CX]): Frame[RX, CX, T] = col(cix).row(rix)
// -----------------------------------------
// access grid by particular location(s)
/**
* Access a (Scalar-boxed) value from within the Frame
* @param r Integer row offset
* @param c Integer col offset
*/
def at(r: Int, c: Int): Scalar[T] = values.at(r, c)
/**
* Access a slice of the Frame by integer offsets
* @param r Array of row offsets
* @param c Array of col offsets
*/
def at(r: Array[Int], c: Array[Int]): Frame[RX, CX, T] = rowAt(r).colAt(c)
/**
* Access a slice of the Frame by integer offsets
* @param r Array of row offsets
* @param c Integer col offset
*/
def at(r: Array[Int], c: Int): Series[RX, T] = rowAt(r).colAt(c)
/**
* Access a slice of the Frame by integer offsets
* @param r Integer row offset
* @param c Array of col offsets
*/
def at(r: Int, c: Array[Int]): Series[CX, T] = colAt(c).rowAt(r)
/**
* Access a slice of the Frame by Slice parameters
* @param r Slice to apply to rows
* @param c Slice to apply to cols
*/
def at(r: Slice[Int], c: Slice[Int]): Frame[RX, CX, T] = rowAt(r).colAt(c)
/**
* Access the raw (unboxed) value at an offset within the Frame
* @param r Integer row offset
* @param c Integer col offset
*/
def raw(r: Int, c: Int): T = values(r, c)
// -----------------------------------------
// re-index frame; non-existent keys map to NA
/**
* Create a new Frame whose indexes are formed from the provided arguments, and whose values
* are derived from the original Frame. Keys in the provided indices which do not map to
* existing values will map to NA in the new Frame.
* @param rix Sequence of keys to be the row index of the result Frame
* @param cix Sequence of keys to be the col index of the result Frame
*/
def reindex(rix: Index[RX], cix: Index[CX]): Frame[RX, CX, T] =
reindexRow(rix).reindexCol(cix)
/**
* Create a new Frame whose row index is formed of the provided argument, and whose values
* are derived from the original Frame.
* @param rix Sequence of keys to be the row index of the result Frame
*/
def reindexRow(rix: Index[RX]): Frame[RX, CX, T] = {
val ixer = rowIx.getIndexer(rix)
ixer.map { i =>
Frame(values.map(v => Vec(array.take(v, i, v.scalarTag.missing))), rix, colIx)
} getOrElse this
}
/**
* Create a new Frame whose col index is formed of the provided argument, and whose values
* are derived from the original Frame.
* @param cix Sequence of keys to be the col index of the result Frame
*/
def reindexCol(cix: Index[CX]): Frame[RX, CX, T] = {
val ixer = colIx.getIndexer(cix)
ixer.map { i =>
Frame(values.take(i), rowIx, cix)
} getOrElse this
}
// -----------------------------------------
// access columns by type
/**
* Extract columns from a heterogeneous Frame which match the provided type.
* The result is a homogeneous frame consisting of the selected data.
* @tparam U The type of columns to extract
*/
def colType[U: ST]: Frame[RX, CX, U] = {
val (columns, locs) = values.takeType[U]
Frame(columns, rowIx, colIx.take(locs))
}
/**
* Extract columns from a heterogeneous Frame which match either of the provided
* types. The result is a heterogeneous frame consisting of the selected data.
* @tparam U1 First type of columns to extract
* @tparam U2 Second type of columns to extract
*/
def colType[U1: ST, U2: ST]: Frame[RX, CX, Any] = {
val (columns1, locs1) = values.takeType[U1]
val (columns2, locs2) = values.takeType[U2]
val frm = Panel(columns1 ++ columns2, rowIx, colIx.take(locs1) concat colIx.take(locs2))
val tkr = array.argsort(array.flatten(Seq(locs1, locs2)))
frm.colAt(tkr)
}
// ----------------------------------------
// generate or use a new index
/**
* Create a new Frame using the current values but with the new row index. Positions
* of the values do not change. Length of new index must be equal to number of rows.
* @param newIx A new Index
* @tparam Y Type of elements of new Index
*/
def setRowIndex[Y: ST: ORD](newIx: Index[Y]): Frame[Y, CX, T] =
Frame(values, newIx, colIx) withMat cachedMat
/**
* Create a new Frame using the current values but with the new row index specified
* by the column at a particular offset, and with that column removed from the frame
* data body.
*/
def withRowIndex(col: Int)(implicit ordT: ORD[T]): Frame[T, CX, T] =
this.setRowIndex(Index(this.colAt(col).toVec)).filterAt(_ != col)
/**
* Overloaded method to create hierarchical index from two cols.
*/
def withRowIndex(col1: Int, col2: Int)(implicit ordT: ORD[T]): Frame[(T, T), CX, T] = {
val newIx: Index[(T, T)] = Index.make(this.colAt(col1).toVec, this.colAt(col2).toVec)
this.setRowIndex(newIx).filterAt { case c => !Set(col1, col2).contains(c) }
}
/**
* Map a function over the row index, resulting in a new Frame
*
* @param fn The function RX => Y with which to map
* @tparam Y Result type of index, ie Index[Y]
*/
def mapRowIndex[Y: ST: ORD](fn: RX => Y): Frame[Y, CX, T] =
Frame(values, rowIx.map(fn), colIx) withMat cachedMat
/**
* Create a new Frame using the current values but with the new col index. Positions
* of the values do not change. Length of new index must be equal to number of cols.
* @param newIx A new Index
* @tparam Y Type of elements of new Index
*/
def setColIndex[Y: ST: ORD](newIx: Index[Y]): Frame[RX, Y, T] =
Frame(values, rowIx, newIx) withMat cachedMat
/**
* Create a new Frame using the current values but with the new col index specified
* by the row at a particular offset, and with that row removed from the frame
* data body.
*/
def withColIndex(row: Int)(implicit ordT: ORD[T]): Frame[RX, T, T] =
this.setColIndex(Index(this.rowAt(row).toVec)).rfilterAt(_ != row)
/**
* Overloaded method to create hierarchical index from two rows.
*/
def withColIndex(row1: Int, row2: Int)(implicit ordT: ORD[T]): Frame[RX, (T, T), T] = {
val newIx: Index[(T, T)] = Index.make(this.rowAt(row1).toVec, this.rowAt(row2).toVec)
this.setColIndex(newIx).rfilterAt { case r => !Set(row1, row2).contains(r) }
}
/**
* Map a function over the col index, resulting in a new Frame
*
* @param fn The function CX => Y with which to map
* @tparam Y Result type of index, ie Index[Y]
*/
def mapColIndex[Y: ST: ORD](fn: CX => Y): Frame[RX, Y, T] =
Frame(values, rowIx, colIx.map(fn)) withMat cachedMat
/**
* Create a new Frame whose values are the same, but whose row index has been changed
* to the bound [0, numRows - 1), as in an array.
*/
def resetRowIndex: Frame[Int, CX, T] =
Frame(values, IndexIntRange(numRows), colIx) withMat cachedMat
/**
* Create a new Frame whose values are the same, but whose col index has been changed
* to the bound [0, numCols - 1), as in an array.
*/
def resetColIndex: Frame[RX, Int, T] =
Frame(values, rowIx, IndexIntRange(numCols)) withMat cachedMat
// ----------------------------------------
// some helpful ops
/**
* Extract first n rows
*
* @param n number of rows to extract
*/
def head(n: Int): Frame[RX, CX, T] = transform(_.head(n))
/**
* Extract last n rows
*
* @param n number of rows to extract
*/
def tail(n: Int): Frame[RX, CX, T] = transform(_.tail(n))
/**
* Extract first n columns
*
* @param n number of columns to extract
*/
def headCol(n: Int) = Frame(values.take(n), rowIx, colIx.head(n))
/**
* Extract last n columns
*
* @param n number of columns to extract
*/
def tailCol(n: Int) = Frame(values.takeRight(n), rowIx, colIx.tail(n))
/**
* Extract first row matching a particular key
*
* @param k Key to match
*/
def first(k: RX): Series[CX, T] = {
val loc = rowIx.getFirst(k)
if (loc == -1) emptyRow else rowAt(loc)
}
/**
* Extract last row matching a particular key
*
* @param k Key to match
*/
def last(k: RX): Series[CX, T] = {
val loc = rowIx.getLast(k)
if (loc == -1) Series.empty[CX, T] else rowAt(loc)
}
/**
* Extract first col matching a particular key
*
* @param k Key to match
*/
def firstCol(k: CX): Series[RX, T] = {
val loc = colIx.getFirst(k)
if (loc == -1) emptyCol else colAt(loc)
}
/**
* Extract first col matching a particular key
*
* @param k Key to match
*/
def lastCol(k: CX): Series[RX, T] = {
val loc = colIx.getLast(k)
if (loc == -1) emptyCol else colAt(loc)
}
/**
* Return empty series of type equivalent to a row of frame
*
*/
def emptyRow: Series[CX, T] = Series.empty[CX, T]
/**
* Return empty series of type equivalent to a column of frame
*
*/
def emptyCol: Series[RX, T] = Series.empty[RX, T]
/**
* Create a new Frame whose rows are sorted according to the row
* index keys
*/
def sortedRIx: Frame[RX, CX, T] = if (rowIx.isMonotonic) this else {
val taker = rowIx.argSort
Frame(values.map(_.take(taker)), rowIx.take(taker), colIx)
}
/**
* Create a new Frame whose cols are sorted according to the col
* index keys
*/
def sortedCIx: Frame[RX, CX, T] = if (colIx.isMonotonic) this else {
val taker = colIx.argSort
Frame(values.take(taker), rowIx, colIx.take(taker))
}
/**
* Create a new Frame whose rows are sorted primarily on the values
* in the first column specified in the argument list, and then on
* the values in the next column, etc.
* @param locs Location of columns containing values to sort on
*/
def sortedRows(locs: Int*)(implicit ev: ORD[T]) = {
var order = array.range(0, numRows)
var j = locs.length - 1
while(j >= 0) {
val tosort = colAt(locs(j)).values.take(order)
val reordr = Index(tosort).argSort
order = array.take(order, reordr, sys.error("Logic error"))
j -= 1
}
Frame(values.map(_.take(order)), rowIx.take(order), colIx)
}
/**
* Create a new Frame whose cols are sorted primarily on the values
* in the first row specified in the argument list, and then on
* the values in the next row, etc.
* @param locs Location of rows containing values to sort on
*/
def sortedCols(locs: Int*)(implicit ev: ORD[T]) = {
var order = array.range(0, numCols)
var j = locs.length - 1
while(j >= 0) {
val tosort = rowAt(locs(j)).values.take(order)
val reordr = Index(tosort).argSort
order = array.take(order, reordr, sys.error("Logic error"))
j -= 1
}
Frame(values.take(order), rowIx, colIx.take(order))
}
/**
* Create a new Frame whose rows are sorted by the result of a function
* acting on each row.
* @param f Function from a single row (represented as series) to a value having an
* ordering
* @tparam Q Result type of the function
*/
def sortedRowsBy[Q: ORD](f: Series[CX, T] => Q): Frame[RX, CX, T] = {
val perm = array.range(0, numRows).sortBy((i: Int) => f(rowAt(i)))
rowAt(perm)
}
/**
* Create a new Frame whose cols are sorted by the result of a function
* acting on each col.
* @param f Function from a single col (represented as series) to a value having an
* ordering
* @tparam Q Result type of the function
*/
def sortedColsBy[Q: ORD](f: Series[RX, T] => Q): Frame[RX, CX, T] = {
val perm = array.range(0, numCols).sortBy((i: Int) => f(colAt(i)))
colAt(perm)
}
/**
* Map over each triple (r, c, v) in the Frame, returning a new frame from the resulting
* triples.
*/
def map[SX: ST: ORD, DX: ST: ORD, U: ST](f: ((RX, CX, T)) => (SX, DX, U)): Frame[SX, DX, U] = {
Series(toSeq.map(f).map { case (sx, dx, u) => ((sx, dx) -> u) } : _*).pivot
}
/**
* Map over each triple (r, c, v) in the Frame, flattening results, and returning a new frame from
* the resulting triples.
*/
def flatMap[SX: ST: ORD, DX: ST: ORD, U: ST](f: ((RX, CX, T)) => Traversable[(SX, DX, U)]): Frame[SX, DX, U] = {
Series(toSeq.flatMap(f).map { case (sx, dx, u) => ((sx, dx) -> u) } : _*).pivot
}
/**
* Map over the values of the Frame. Applies a function to each (non-na) value in the frame,
* returning a new frame whose indices remain the same.
*
* @param f Function from T to U
* @tparam U The type of the resulting values
*/
def mapValues[U: ST](f: T => U): Frame[RX, CX, U] = Frame(values.map(v => v.map(f)), rowIx, colIx)
/**
* Create a new Frame that, whenever the mask predicate function evaluates to
* true on a value, is masked with NA
* @param f Function from T to Boolean
*/
def mask(f: T => Boolean): Frame[RX, CX, T] = Frame(values.map(v => v.mask(f)), rowIx, colIx)
/**
* Create a new Frame whose columns follow the rule that, wherever the mask Vec is true,
* the column value is masked with NA
* @param m Mask Vec[Boolean]
*/
def mask(m: Vec[Boolean]): Frame[RX, CX, T] = Frame(values.map(v => v.mask(m)), rowIx, colIx)
/**
* Joins two frames along both their indexes and applies a function to each pair
* of values; when either value is NA, the result of the function is forced to be NA.
* @param other Other Frame
* @param rhow The type of join to effect on the rows
* @param chow The type of join to effect on the cols
* @param f The function to apply
* @tparam U The type of other frame values
* @tparam V The result type of the function
*/
def joinMap[U: ST, V: ST](other: Frame[RX, CX, U],
rhow: JoinType = LeftJoin,
chow: JoinType = RightJoin)(f: (T, U) => V): Frame[RX, CX, V] = {
val (l, r) = align(other, rhow, chow)
val result = l.values.zip(r.values).map { case (v1, v2) => VecImpl.zipMap(v1, v2)(f) }
Frame(result, l.rowIx, l.colIx)
}
/**
* Map a function over each column vector and collect the results into a Frame respecting
* the original indexes.
* @param f Function acting on Vec[T] and producing another Vec
* @tparam U Type of result Vec of the function
*/
def mapVec[U: ST](f: Vec[T] => Vec[U]): Frame[RX, CX, U] = Frame(values.map(f), rowIx, colIx)
/**
* Apply a function to each column series which results in a single value, and return the
* series of results indexed by original column index.
* @param f Function taking a column (series) to a value
* @tparam U The output type of the function
*/
def reduce[U: ST](f: Series[RX, T] => U): Series[CX, U] =
Series(Vec(values.map(v => f(Series(v, rowIx))) : _*), colIx)
/**
* Apply a function to each column series which results in another series (having possibly
* a different index); return new frame whose row index is the the full outer join of all
* the intermediately produced series (fast when all series have the same index), and having
* the original column index.
* @param f Function to operate on each column as a series
* @tparam U Type of values of result series of function
* @tparam SX Type of index of result series of function
*/
def transform[U: ST, SX: ST: ORD](f: Series[RX, T] => Series[SX, U]): Frame[SX, CX, U] =
Frame(values.map(v => f(Series(v, rowIx))), colIx)
// groupBy functionality (on rows)
/**
* Construct a [[org.saddle.groupby.FrameGrouper]] with which further computations, such
* as combine or transform, may be performed. The groups are constructed from the keys of
* the row index, with each unique key corresponding to a group.
*/
def groupBy = FrameGrouper(this)
/**
* Construct a [[org.saddle.groupby.FrameGrouper]] with which further computations, such
* as combine or transform, may be performed. The groups are constructed from the result
* of the function applied to the keys of the row index; each unique result of calling the
* function on elements of the row index corresponds to a group.
* @param fn Function from RX => Y
* @tparam Y Type of function codomain
*/
def groupBy[Y: ST: ORD](fn: RX => Y) = FrameGrouper(this.rowIx.map(fn), this)
/**
* Construct a [[org.saddle.groupby.FrameGrouper]] with which further computations, such
* as combine or transform, may be performed. The groups are constructed from the keys of
* the provided index, with each unique key corresponding to a group.
* @param ix Index with which to perform grouping
* @tparam Y Type of elements of ix
*/
def groupBy[Y: ST: ORD](ix: Index[Y]) = FrameGrouper(ix, this)
// concatenate two frames together (vertically), must have same number of columns
/**
* Concatenate two Frame instances together (vertically) whose indexes share the same type
* of elements, and where there exists some way to join the values of the Frames. For
* instance, Frame[X, Y, Double] `concat` Frame[X, Y, Int] will promote Int to Double as
* a result of the implicit existence of a Promoter[Double, Int, Double] instance.
* The resulting row index will simply be the concatenation of the input row indexes, and
* the column index will be the joint index (with join type specified as argument).
*
* @param other Frame[RX, CX, U] to concat
* @param pro Implicit evidence of Promoter
* @tparam U type of other Frame values
* @tparam V type of resulting Frame values
*/
def concat[U, V](other: Frame[RX, CX, U], how: JoinType = OuterJoin)(
implicit pro: Promoter[T, U, V], mu: ST[U], md: ST[V]): Frame[RX, CX, V] = {
val ixc = colIx.join(other.colIx, how)
val lft = ixc.lTake.map(x => values.take(x)) getOrElse values
val rgt = ixc.rTake.map(x => other.values.take(x)) getOrElse other.values
val mfn = (v: Vec[T], u: Vec[U]) => v concat u
val zpp = lft zip rgt
val dat = zpp.map { case (top, bot) => mfn(top, bot) }
val idx = rowIx concat other.rowIx
Frame(dat, idx, ixc.index)
}
/**
* Create Frame whose rows satisfy the rule that their keys and values are chosen
* via a Vec[Boolean] or a Series[_, Boolean] predicate when the latter contains a
* true value.
* @param pred Series[_, Boolean] (or Vec[Boolean] which will implicitly convert)
*/
def where(pred: Series[_, Boolean]): Frame[RX, CX, T] = {
val newVals = values.zipWithIndex.flatMap(z => if (pred.values(z._2)) Seq(z._1) else Seq.empty[Vec[T]] )
val newIdx = VecImpl.where(Vec(this.colIx.toArray))(pred.values.toArray)
Frame(newVals, rowIx, Index(newIdx))
}
/**
* Shift the sequence of values relative to the row index by some offset,
* dropping those values which no longer associate with a key, and having
* those keys which no longer associate to a value instead map to NA.
* @param n Number to shift
*/
def shift(n: Int = 1): Frame[RX, CX, T] = Frame(values.map(_.shift(n)), rowIx, colIx)
/**
* In each column, replaces all NA values for which there is a non-NA value at
* a prior offset with the corresponding most-recent, non-NA value. See Vec.pad
*/
def pad: Frame[RX, CX, T] = mapVec(_.pad)
/**
* Same as above, but limits the number of observations padded. See Vec.padAtMost
*/
def padAtMost(n: Int): Frame[RX, CX, T] = mapVec(_.padAtMost(n))
/**
* Return Frame whose columns satisfy a predicate function operating on that
* column
* @param pred Predicate function from Series[RX, T] => Boolean
*/
def filter(pred: Series[RX, T] => Boolean) = where(reduce(v => pred(v)))
/**
* Return Frame whose columns satisfy a predicate function operating on the
* column index
* @param pred Predicate function from CX => Boolean
*/
def filterIx(pred: CX => Boolean) = where(colIx.toVec.map(pred))
/**
* Return Frame whose columns satisfy a predicate function operating on the
* column index offset
* @param pred Predicate function from CX => Boolean
*/
def filterAt(pred: Int => Boolean) = where(vec.range(0, numCols).map(pred))
/**
* Return Frame excluding any of those columns which have an NA value
*/
def dropNA: Frame[RX, CX, T] = filter(s => !s.hasNA)
/**
* Produce a Frame each of whose columns are the result of executing a function
* on a sliding window of each column series.
* @param winSz Window size
* @param f Function Series[X, T] => B to operate on sliding window
* @tparam B Result type of function
*/
def rolling[B: ST](winSz: Int, f: Series[RX, T] => B): Frame[RX, CX, B] = {
val tmp = values.map { v => Series(v, rowIx).rolling(winSz, f).values }
Frame(tmp, rowIx.slice(winSz - 1, values.numRows), colIx)
}
/**
* Create a Series by rolling over winSz number of rows of the Frame at a
* time, and applying a function that takes those rows to a single value.
*
* @param winSz Window size to roll with
* @param f Function taking the (sub) frame to B
* @tparam B Result element type of Series
*/
def rollingFtoS[B: ST](winSz: Int, f: Frame[RX, CX, T] => B): Series[RX, B] = {
val buf = new Array[B](numRows - winSz + 1)
var i = winSz
while (i <= numRows) {
buf(i - winSz) = f(rowSlice(i - winSz, i))
i += 1
}
Series(Vec(buf), rowIx.slice(winSz - 1, numRows))
}
// ----------------------------------------
// joining
/**
* Perform a join with another Series[RX, T] according to the row index. The `how`
* argument dictates how the join is to be performed:
*
* - Left [[org.saddle.index.LeftJoin]]
* - Right [[org.saddle.index.RightJoin]]
* - Inner [[org.saddle.index.InnerJoin]]
* - Outer [[org.saddle.index.OuterJoin]]
*
* The result is a Frame whose row index is the result of the join, and whose column
* index has been reset to [0, numcols], and whose values are sourced from the original
* Frame and Series.
*
* @param other Series to join with
* @param how How to perform the join
*/
def joinS(other: Series[RX, T], how: JoinType = LeftJoin): Frame[RX, Int, T] = {
val indexer = rowIx.join(other.index, how)
val lft = indexer.lTake.map { loc => values.map(_.take(loc)) } getOrElse values
val rgt = indexer.rTake.map { loc => other.values.take(loc) } getOrElse other.values
Frame(lft :+ rgt, indexer.index, IndexIntRange(colIx.length + 1))
}
/**
* Same as `joinS`, but preserve the column index, adding the specified index value,
* `newColIx` as an index for the `other` Series.
*/
def joinSPreserveColIx(other: Series[RX, T], how: JoinType = LeftJoin, newColIx: CX): Frame[RX, CX, T] = {
val resultingFrame = joinS(other, how)
val newColIndex = colIx.concat(Index(newColIx))
resultingFrame.setColIndex(newColIndex)
}
/**
* Perform a join with another Frame[RX, CX, T] according to the row index. The `how`
* argument dictates how the join is to be performed:
*
* - Left [[org.saddle.index.LeftJoin]]
* - Right [[org.saddle.index.RightJoin]]
* - Inner [[org.saddle.index.InnerJoin]]
* - Outer [[org.saddle.index.OuterJoin]]
*
* The result is a Frame whose row index is the result of the join, and whose column
* index has been reset to [0, M + N), where M is the number of columns in the left
* frame and N in the right, and whose values are sourced from the original Frames.
*
* @param other Frame to join with
* @param how How to perform the join
*/
def join(other: Frame[RX, _, T], how: JoinType = LeftJoin): Frame[RX, Int, T] = {
val indexer = rowIx.join(other.rowIx, how)
val lft = indexer.lTake.map { loc => values.map(_.take(loc))} getOrElse values
val rgt = indexer.rTake.map { loc => other.values.map(_.take(loc))} getOrElse other.values
Frame(lft ++ rgt, indexer.index, IndexIntRange(colIx.length + other.colIx.length))
}
/**
* Same as `join`, but preserves column index
*/
def joinPreserveColIx(other: Frame[RX, CX, T], how: JoinType = LeftJoin): Frame[RX, CX, T] = {
val resultingFrame = join(other, how)
val newColIndex = colIx.concat(other.colIx)
resultingFrame.setColIndex(newColIndex)
}
/**
* Same as joinS, but the values of Series to join with may be of type Any, so that the
* resulting Frame may be heterogeneous in its column types.
*/
def joinAnyS(other: Series[RX, _], how: JoinType = LeftJoin): Frame[RX, Int, Any] = {
val indexer = rowIx.join(other.index, how)
val lft = indexer.lTake.map { loc => values.map(_.take(loc)) } getOrElse values
val rgt = indexer.rTake.map { loc => other.values.take(loc) } getOrElse other.values
Panel(lft :+ rgt, indexer.index, IndexIntRange(colIx.length + 1))
}
/**
* Same as `joinAnyS`, but preserve the column index, adding the specified index value,
* `newColIx` as an index for the `other` Series.
*/
def joinAnySPreserveColIx(other: Series[RX, _], how: JoinType = LeftJoin,
newColIx: CX): Frame[RX, CX, Any] = {
val resultingFrame = joinAnyS(other, how)
val newColIndex = colIx.concat(Index(newColIx))
resultingFrame.setColIndex(newColIndex)
}
/**
* Same as join, but the values of Frame to join with may be of type Any, so that the
* resulting Frame may be heterogeneous in its column types.
*/
def joinAny(other: Frame[RX, _, _], how: JoinType = LeftJoin): Frame[RX, Int, Any] = {
val indexer = rowIx.join(other.rowIx, how)
val lft = indexer.lTake.map { loc => values.map(_.take(loc))} getOrElse values
val rgt = indexer.rTake.map { loc => other.values.map(_.take(loc))} getOrElse other.values
Panel(lft ++ rgt, indexer.index, IndexIntRange(colIx.length + other.colIx.length))
}
/**
* Same as `joinAny`, but preserves column index
*/
def joinAnyPreserveColIx(other: Frame[RX, CX, _], how: JoinType = LeftJoin): Frame[RX, CX, Any] = {
val resultingFrame = joinAny(other, how)
val newColIndex = colIx.concat(other.colIx)
resultingFrame.setColIndex(newColIndex)
}
/**
* Aligns this frame with another frame, returning the left and right frames aligned
* to each others indexes according to the the provided parameters
*
* @param other Other frame to align with
* @param rhow How to perform the join on the row indexes
* @param chow How to perform the join on the col indexes
*/
def align[U: ST](other: Frame[RX, CX, U],
rhow: JoinType = OuterJoin,
chow: JoinType = OuterJoin): (Frame[RX, CX, T], Frame[RX, CX, U]) = {
val rJoin = rowIx.join(other.rowIx, rhow)
val cJoin = colIx.join(other.colIx, chow)
val lvals: MatCols[T] = cJoin.lTake.map(locs => values.take(locs)).getOrElse(values)
val rvals: MatCols[U] = cJoin.rTake.map(locs => other.values.take(locs)).getOrElse(other.values)
val vecs = for (i <- 0 until lvals.length) yield {
val lvec: Vec[T] = rJoin.lTake.map(locs => lvals(i).take(locs)).getOrElse(lvals(i))
val rvec: Vec[U] = rJoin.rTake.map(locs => rvals(i).take(locs)).getOrElse(rvals(i))
(lvec, rvec)
}
val (lvecs, rvecs) = vecs.unzip
(Frame(lvecs, rJoin.index, cJoin.index), Frame(rvecs, rJoin.index, cJoin.index))
}
// ------------------------------------------------
// reshaping
/**
* Drop all columns from the Frame which have nothing but NA values.
*/
def squeeze: Frame[RX, CX, T] = filter(s => !VecImpl.isAllNA(s.toVec))
/**
* Melt stacks the row index of arity N with the column index of arity M to form a result index
* of arity N + M, producing a 1D Series whose values are from the original Frame as indexed by
* the corresponding keys.
*
* For example, given:
*
* {{{
* Frame(1 -> Series('a' -> 1, 'b' -> 3), 2 -> Series('a' -> 2, 'b' -> 4)).melt
* }}}
*
* produces:
*
* {{{
* res0: org.saddle.Series[(Char, Int),Int] =
* [4 x 1]
* a 1 => 1
* 2 => 2
* b 1 => 3
* 2 => 4
* }}}
*
*
* @param melter Implicit evidence for a Melter for the two indexes
* @tparam W Output type (tuple of arity N + M)
*/
def melt[W](implicit melter: Melter[RX, CX, W]): Series[W, T] = {
val ix = Array.ofDim[W](numRows * numCols)(melter.tag)
var k = 0
var i = 0
while (i < numRows) {
var j = 0
while (j < numCols) {
ix(k) = melter(rowIx.raw(i), colIx.raw(j))
k += 1
j += 1
}
i += 1
}
implicit val ord = melter.ord
implicit val tag = melter.tag
Series[W, T](toMat.toVec, Index(ix))
}
/**
* Stack pivots the innermost column labels to the innermost row labels. That is, it splits
* a col index of tuple keys of arity N into a new col index having arity N-1 and a remaining
* index C, and forms a new row index by stacking the existing row index with C. The
* resulting Frame has values as in the original Frame indexed by the corresponding keys. It
* does the reverse of unstack.
*
* @param splt An implicit instance of Splitter to do the splitting
* @param stkr An implicit instance of Stacker to do the stacking
* @tparam O1 The N-1 arity column index type
* @tparam O2 The 1-arity type of split-out index C
* @tparam V The type of the stacked row index
*/
def stack[O1, O2, V](implicit splt: Splitter[CX, O1, O2], stkr: Stacker[RX, O2, V],
ord1: ORD[O1], ord2: ORD[O2], m1: ST[O1], m2: ST[O2]): Frame[V, O1, T] = {
T.unstack.T
}
/**
* Unstack pivots the innermost row labels to the innermost col labels. That is, it splits
* a row index of tuple keys of arity N into a new row index having arity N-1 and a remaining
* index R, and forms a new col index by stacking the existing col index with R. The
* resulting Frame has values as in the original Frame indexed by the corresponding keys.
*
* For example:
*
* {{{
* scala> Frame(Series(Vec(1,2,3,4), Index(('a',1),('a',2),('b',1),('b',2))), Series(Vec(5,6,7,8), Index(('a',1),('a',2),('b',1),('b',2))))
* res1: org.saddle.Frame[(Char, Int),Int,Int] =
* [4 x 2]
* 0 1
* -- --
* a 1 -> 1 5
* 2 -> 2 6
* b 1 -> 3 7
* 2 -> 4 8
*
* scala> res1.unstack
* res2: org.saddle.Frame[Char,(Int, Int),Int] =
* [2 x 4]
* 0 1
* 1 2 1 2
* -- -- -- --
* a -> 1 2 5 6
* b -> 3 4 7 8
* }}}
*
* @param splt An implicit instance of Splitter to do the splitting
* @param stkr An implicit instance of Stacker to do the stacking
* @tparam O1 The N-1 arity row index type
* @tparam O2 The 1-arity type of split-out index R
* @tparam V The type of the stacked col index
*/
def unstack[O1, O2, V](implicit splt: Splitter[RX, O1, O2], stkr: Stacker[CX, O2, V],
ord1: ORD[O1], ord2: ORD[O2], m1: ST[O1], m2: ST[O2]): Frame[O1, V, T] = {
implicit def ordV = stkr.ord
implicit def clmV = stkr.tag
val (lft, rgt) = splt(rowIx) // lft = row index w/o pivot level; rgt = pivot level
val rix = lft.uniques // Final row index
val uix = rgt.uniques
val cix = stkr(colIx, uix) // Final col index (colIx stacked w/unique pivot labels)
val grps = IndexGrouper(rgt, sorted = false).groups // Group by pivot label. Each unique label will get its
// own column in the final frame.
if (values.length > 0) {
val len = uix.length
var off = 0
var loc = 0
val result = Array.ofDim[Vec[T]](cix.length) // accumulates result columns
for ((_, taker) <- grps) { // For each pivot label grouping,
val gIdx = lft.take(taker) // use group's (lft) row index labels
val ixer = rix.join(gIdx) // to compute map to final (rix) locations;
for (currVec <- values) { // For each column vec of original frame
val vals = currVec.take(taker) // take values corresponding to current pivot label
val v = ixer.rTake.map(vals.take(_)).getOrElse(vals) // map values to be in correspondence to rix
result(loc) = v // and save vec in array.
loc += len // Increment offset into result array
if (loc >= cix.length) { off += 1; loc = off }
}
}
Frame[O1, V, T](result, rix, cix)
}
else Frame.empty[O1, V, T]
}
/**
* Extract the Mat embodied in the values of the Frame (dropping any indexing
* information)
*/
def toMat: Mat[T] = {
val st = implicitly[ST[T]]
synchronized {
if (cachedMat.isEmpty) {
val m = Mat(values.numCols, values.numRows, st.concat(values)).T
withMat(Some(m))
}
cachedMat.get
}
}
// ---------------------------------------------------------------
// Row-wise versions of all the ops that operate on cols by default
/**
* See mask; operates row-wise
*/
def rmask(f: T => Boolean): Frame[RX, CX, T] = T.mask(f).T
/**
* See mask; operates row-wise
*/
def rmask(b: Vec[Boolean]): Frame[RX, CX, T] = T.mask(b).T
/**
* See mapVec; operates row-wise
*/
def rmapVec[U: ST](f: Vec[T] => Vec[U]) = T.mapVec(f).T
/**
* See reduce; operates row-wise
*/
def rreduce[U: ST](f: Series[CX, T] => U): Series[RX, U] = T.reduce(f)
/**
* See transform; operates row-wise
*/
def rtransform[U: ST, SX: ST: ORD](f: Series[CX, T] => Series[SX, U]): Frame[RX, SX, U] = T.transform(f).T
/**
* See concat; operates row-wise
*/
def rconcat[U, V](other: Frame[RX, CX, U], how: JoinType = OuterJoin)(
implicit wd1: Promoter[T, U, V], mu: ST[U], md: ST[V]): Frame[RX, CX, V] = T.concat(other.T, how).T
/**
* See where; operates row-wise
*/
def rwhere(pred: Series[_, Boolean]): Frame[RX, CX, T] = {
val predv = pred.values
new Frame(new MatCols(values.map(v => v.where(predv))), Index(rowIx.toVec.where(predv)), colIx)
}
/**
* See shift; operates col-wise
*/
def cshift(n: Int = 1): Frame[RX, CX, T] = T.shift(n).T
/**
* See filter; operates row-wise
*/
def rfilter(pred: Series[CX, T] => Boolean) = rwhere(rreduce(v => pred(v)))
/**
* See filterIx; operates row-wise
*/
def rfilterIx(pred: RX => Boolean) = rwhere(rowIx.toVec.map(pred))
/**
* See filterAt; operates row-wise
*/
def rfilterAt(pred: Int => Boolean) = rwhere(vec.range(0, numRows).map(pred))
/**
* See joinS; operates row-wise
*/
def rjoinS(other: Series[CX, T], how: JoinType = LeftJoin): Frame[Int, CX, T] = T.joinS(other, how).T
/**
* See joinSPreserveColIx; operates row-wise
*/
def rjoinSPreserveRowIx(other: Series[CX, T], how: JoinType = LeftJoin, newRowIx: RX): Frame[RX, CX, T] = T.joinSPreserveColIx(other, how, newRowIx).T
/**
* See join; operates row-wise
*/
def rjoin(other: Frame[_, CX, T], how: JoinType = LeftJoin): Frame[Int, CX, T] = T.join(other.T, how).T
/**
* See joinPreserveColIx; operates row-wise
*/
def rjoinPreserveRowIx(other: Frame[RX, CX, T], how: JoinType = LeftJoin): Frame[RX, CX, T] = T.joinPreserveColIx(other.T, how).T
/**
* See joinAnyS; operates row-wise
*/
def rjoinAnyS(other: Series[CX, _], how: JoinType = LeftJoin): Frame[Int, CX, Any] = T.joinAnyS(other, how).T
/**
* See joinAnySPreserveColIx; operates row-wise
*/
def rjoinAnySPreserveRowIx(other: Series[CX, _], how: JoinType = LeftJoin, newRowIx: RX): Frame[RX, CX, Any] = T.joinAnySPreserveColIx(other, how, newRowIx).T
/**
* See joinAny; operates row-wise
*/
def rjoinAny(other: Frame[_, CX, _], how: JoinType = LeftJoin): Frame[Int, CX, Any] = T.joinAny(other.T, how).T
/**
* See joinAnyPreserveColIx; operates row-wise
*/
def rjoinAnyPreserveRowIx(other: Frame[RX, CX, _], how: JoinType = LeftJoin): Frame[RX, CX, Any] = T.joinAnyPreserveColIx(other.T, how).T
/**
* See dropNA; operates row-wise
*/
def rdropNA: Frame[RX, CX, T] = rfilter(v => !v.hasNA)
/**
* See squeeze; operates row-wise
*/
def rsqueeze: Frame[RX, CX, T] = rfilter(s => !VecImpl.isAllNA(s.toVec))
// todo: describe
// --------------------------------------
// for iterating over rows/cols/elements
/**
* Produce an indexed sequence of pairs of row index value and
* row Series
*/
def toRowSeq: IndexedSeq[(RX, Series[CX, T])] =
for (i <- array.range(0, numRows)) yield (rowIx.raw(i), rowAt(i))
/**
* Produce an indexed sequence of pairs of column index value and
* column Series.
*/
def toColSeq: IndexedSeq[(CX, Series[RX, T])] =
for (i <- array.range(0, numCols)) yield (colIx.raw(i), colAt(i))
/**
* Produce an indexed sequence of triples of values in the Frame
* in row-major order.
*/
def toSeq: IndexedSeq[(RX, CX, T)] =
(Range(0, numRows) zip rowIx.toSeq).flatMap { case(i, rx) =>
rowAt(i).toSeq.map { case (cx, t) =>
(rx, cx, t)
}
}
// ------------------------------------------------------
// internal contiguous caching of row data for efficiency
private def withMat(m: Option[Mat[T]]): Frame[RX, CX, T] = {
cachedMat = m
this
}
private def rows(): MatCols[T] = {
if (cachedRows.isEmpty) {
cachedRows = Some(toMat.rows())
}
cachedRows.get
}
// --------------------------------------
// pretty-printing
override def toString: String = stringify()
/**
* Creates a string representation of Frame
* @param nrows Max number of rows to include
* @param ncols Max number of rows to include
*/
def stringify(nrows: Int = 10, ncols: Int = 10): String = {
val buf = new StringBuilder()
if (numCols == 0 || numRows == 0)
buf.append("Empty Frame")
else {
buf.append("[%d x %d]\\n".format(numRows, numCols))
val rhalf = nrows / 2
val maxf = (a: List[Int], b: List[String]) => (a zip b).map(v => math.max(v._1, v._2.length))
// calc row index width
val rsca = rowIx.scalarTag
val rarr = rowIx.toArray
val rinit = rsca.strList(rarr(0)).map(_.length)
val rlens = util.grab(rarr, rhalf).map(rsca.strList(_)).foldLeft(rinit)(maxf)
val maxrl = rlens.sum + (rlens.length - 1)
// calc each col str width
val clens = MatCols.colLens(values, numCols, ncols)
val csca = colIx.scalarTag
def clen(c: Int) = clens(c) max {
val lst = csca.strList(colIx.raw(c)).map(_.length)
if (lst.length > 0) lst.max else 0
}
var prevColMask = clens.map(x => (x._1, false)) // recalls whether we printed a column's label at level L-1
var prevColLabel = "" // recalls previous column's label at level L
// build columns header
def createColHeader(l: Int) = (c: Int) => {
val labs = csca.strList(colIx.raw(c))
val currLab = labs(l)
val fmt = "%" + clen(c) + "s "
val res = if (l == labs.length - 1 || currLab != prevColLabel || prevColMask.get(c).getOrElse(false)) {
prevColMask = prevColMask.updated(c, true)
currLab.formatted(fmt)
}
else {
prevColMask = prevColMask.updated(c, false)
"".formatted(fmt)
}
prevColLabel = currLab
res
}
def colBreakStr = {
prevColLabel = ""
" " * 5
}
val spacer = " " * (maxrl + 4)
val sz = colIx.scalarTag.strList(colIx.raw(0)).size
for (i <- 0 until sz) {
buf.append(spacer)
buf.append(util.buildStr(ncols, numCols, createColHeader(i), colBreakStr))
buf.append("\\n")
}
def createColDivide(c: Int) = "-" * clen(c) + " "
buf.append(spacer)
buf.append(util.buildStr(ncols, numCols, createColDivide))
buf.append("\\n")
// for building row labels
def enumZip[A, B](a: List[A], b: List[B]): List[(Int, A, B)] =
for ( v <- (a.zipWithIndex zip b) ) yield (v._1._2, v._1._1, v._2)
val prevRowLabels = Array.fill(rowIx.scalarTag.strList(rowIx.raw(0)).size)("")
def resetRowLabels(k: Int) { for (i <- k until prevRowLabels.length) prevRowLabels(i) = "" }
def createIx(r: Int) = {
val vls = rsca.strList(rowIx.raw(r))
val lst = for ( (i, l, v) <- enumZip(rlens, vls)) yield {
val fmt = "%" + l + "s"
val res = if (i == vls.length - 1 || prevRowLabels(i) != v) {
resetRowLabels(i+1)
v.formatted(fmt)
} else "".formatted(fmt)
prevRowLabels(i) = v
res
}
lst.mkString(" ")
}
// for building frame entries
def createVals(r: Int) = {
val elem = (col: Int) => "%" + clen(col) + "s " format values(col).scalarTag.show(values(r, col))
util.buildStr(ncols, numCols, elem) + "\\n"
}
def rowBreakStr = {
resetRowLabels(0)
"...\\n"
}
// now build row strings
buf.append(util.buildStr(nrows, numRows, (r: Int) => createIx(r) + " -> " + createVals(r), rowBreakStr) )
}
buf.toString()
}
/**
* Pretty-printer for Frame, which simply outputs the result of stringify.
* @param nrows Number of rows to display
* @param ncols Number of cols to display
*/
def print(nrows: Int = 10, ncols: Int = 10, stream: OutputStream = System.out) {
stream.write(stringify(nrows, ncols).getBytes)
}
override def hashCode(): Int =
values.hashCode() * 31 * 31 + rowIx.hashCode() * 31 + colIx.hashCode()
override def equals(other: Any): Boolean = other match {
case f: Frame[_, _, _] => (this eq f) || rowIx == f.rowIx && colIx == f.colIx && values == f.values
case _ => false
}
}
object Frame extends BinOpFrame {
// --------------------------------
// stats implicits
/**
* Enrich a Frame to provide statistical methods
*/
implicit def frameToStats[RX, CX, T: ST](f: Frame[RX, CX, T]) = new FrameStats[RX, CX, T](f)
// --------------------------------
// instantiations
/**
* Factory method to create an empty Frame
* @tparam RX Type of row keys
* @tparam CX Type of col keys
* @tparam T Type of values
*/
def empty[RX: ST: ORD, CX: ST: ORD, T: ST]: Frame[RX, CX, T] =
new Frame[RX, CX, T](MatCols.empty[T], Index.empty[RX], Index.empty[CX])
// --------------------------------
// Construct using sequence of vectors
/**
* Factory method to create a Frame from a sequence of Vec objects
*/
def apply[T: ST](values: Vec[T]*): Frame[Int, Int, T] =
if (values.isEmpty) empty[Int, Int, T]
else {
val asIdxSeq = values.toIndexedSeq
apply(asIdxSeq, IndexIntRange(asIdxSeq(0).length), IndexIntRange(asIdxSeq.length))
}
/**
* Factory method to create a Frame from a sequence of Vec objects,
* a row index, and a column index.
*/
def apply[RX: ST: ORD, CX: ST: ORD, T: ST](
values: Seq[Vec[T]], rowIx: Index[RX], colIx: Index[CX]): Frame[RX, CX, T] =
if (values.isEmpty) empty[RX, CX, T]
else
new Frame[RX, CX, T](MatCols[T](values : _*), rowIx, colIx)
/**
* Factory method to create a Frame from a sequence of Vec objects
* and a column index.
*/
def apply[CX: ST: ORD, T: ST](values: Seq[Vec[T]], colIx: Index[CX]): Frame[Int, CX, T] =
if (values.isEmpty) empty[Int, CX, T]
else {
val asIdxSeq = values.toIndexedSeq
apply(asIdxSeq, IndexIntRange(asIdxSeq(0).length), colIx)
}
/**
* Factory method to create a Frame from tuples whose first element is
* the column label and the second is a Vec of values.
*/
def apply[CX: ST: ORD, T: ST](values: (CX, Vec[T])*): Frame[Int, CX, T] = {
val asIdxSeq = values.map(_._2).toIndexedSeq
val idx = Index(values.map(_._1).toArray)
asIdxSeq.length match {
case 0 => empty[Int, CX, T]
case _ => Frame(asIdxSeq, IndexIntRange(asIdxSeq(0).length), idx)
}
}
// --------------------------------
// Construct using sequence of series
// dummy type, extra implicit parameter allows us to disambiguate the following
// overloaded apply method
private type ID[T] = T => T
/**
* Factory method to create a Frame from a sequence of Series. The row labels
* of the result are the outer join of the indexes of the series provided.
*/
def apply[RX: ST: ORD, T: ST: ID](values: Series[RX, T]*): Frame[RX, Int, T] = {
val asIdxSeq = values.toIndexedSeq
asIdxSeq.length match {
case 0 => empty[RX, Int, T]
case 1 => Frame(asIdxSeq.map(_.values), asIdxSeq(0).index, IndexIntRange(1))
case _ => {
val init = Frame(IndexedSeq(asIdxSeq(0).values), asIdxSeq(0).index, Array(0))
val temp = asIdxSeq.tail.foldLeft(init)(_.joinS(_, OuterJoin))
Frame(temp.values, temp.rowIx, IndexIntRange(temp.numCols))
}
}
}
/**
* Factory method to create a Frame from a sequence of series, also specifying
* the column index to use. The row labels of the result are the outer join of
* the indexes of the series provided.
*/
def apply[RX: ST: ORD, CX: ST: ORD, T: ST](
values: Seq[Series[RX, T]], colIx: Index[CX]): Frame[RX, CX, T] = {
val asIdxSeq = values.toIndexedSeq
asIdxSeq.length match {
case 0 => empty[RX, CX, T]
case 1 => Frame(asIdxSeq.map(_.values), asIdxSeq(0).index, colIx)
case _ => {
val init = Frame(Seq(asIdxSeq(0).values), asIdxSeq(0).index, Index(0))
val temp = values.tail.foldLeft(init)(_.joinS(_, OuterJoin))
Frame(temp.values, temp.rowIx, colIx)
}
}
}
/**
* Factory method to create a Frame from a sequence of tuples, where the
* first element of the tuple is a column label, and the second a series
* of values. The row labels of the result are the outer join of the
* indexes of the series provided.
*/
def apply[RX: ST: ORD, CX: ST: ORD, T: ST](
values: (CX, Series[RX, T])*): Frame[RX, CX, T] = {
val asIdxSeq = values.map(_._2).toIndexedSeq
val idx = Index(values.map(_._1).toArray)
asIdxSeq.length match {
case 0 => empty[RX, CX, T]
case 1 => Frame(asIdxSeq.map(_.values), asIdxSeq(0).index, idx)
case _ => {
val init = Frame(Seq(asIdxSeq(0).values), asIdxSeq(0).index, Array(0))
val temp = asIdxSeq.tail.foldLeft(init)(_.joinS(_, OuterJoin))
Frame(temp.values, temp.rowIx, idx)
}
}
}
// --------------------------------
// Construct using matrix
/**
* Build a Frame from a provided Mat
*/
def apply[T: ST](values: Mat[T]): Frame[Int, Int, T] =
apply(values, new IndexIntRange(values.numRows), new IndexIntRange(values.numCols))
/**
* Build a Frame from a provided Mat, row index, and col index
*/
def apply[RX: ST: ORD, CX: ST: ORD, T: ST](mat: Mat[T], rowIx: Index[RX], colIx: Index[CX]): Frame[RX, CX, T] =
if (mat.length == 0)
empty[RX, CX, T]
else {
new Frame[RX, CX, T](mat.cols(), rowIx, colIx) withMat Some(mat)
}
}
/**
* Convenience constructors for a Frame[RX, CX, Any] that accept arbitrarily-typed Vectors
* and Series as constructor parameters, leaving their internal representations unchanged.
*/
object Panel {
/**
* Factory method to create an empty Frame whose columns have type Any
* @tparam RX Type of row keys
* @tparam CX Type of col keys
*/
def empty[RX: ST: ORD, CX: ST: ORD]: Frame[RX, CX, Any] =
new Frame[RX, CX, Any](MatCols.empty, Index.empty[RX], Index.empty[CX])
// --------------------------------
// Construct using sequence of vectors
/**
* Factory method to create a Frame from a sequence of Vec objects
*/
def apply(values: Vec[_]*): Frame[Int, Int, Any] =
if (values.isEmpty) empty[Int, Int]
else {
val asIdxSeq = values.toIndexedSeq
apply(asIdxSeq, IndexIntRange(asIdxSeq(0).length), IndexIntRange(asIdxSeq.length))
}
/**
* Factory method to create a Frame from a sequence of Vec objects,
* a row index, and a column index.
*/
def apply[RX: ST: ORD, CX: ST: ORD](
values: Seq[Vec[_]], rowIx: Index[RX], colIx: Index[CX]): Frame[RX, CX, Any] = {
val anySeq = values.toIndexedSeq
if (values.isEmpty)
empty[RX, CX]
else
Frame(toSeqVec(anySeq), rowIx, colIx)
}
/**
* Factory method to create a Frame from a sequence of Vec objects
* and a column index.
*/
def apply[CX: ST: ORD](values: Seq[Vec[_]], colIx: Index[CX]): Frame[Int, CX, Any] =
if (values.isEmpty) empty[Int, CX]
else {
val asIdxSeq = values.toIndexedSeq
apply(asIdxSeq, IndexIntRange(asIdxSeq(0).length), colIx)
}
private def toSeqVec(anySeq: Seq[Vec[_]]): IndexedSeq[Vec[Any]] =
anySeq.toIndexedSeq.asInstanceOf[IndexedSeq[Vec[Any]]]
/**
* Factory method to create a Frame from tuples whose first element is
* the column label and the second is a Vec of values.
*/
def apply[CX: ST: ORD, T: ST](
values: (CX, Vec[_])*): Frame[Int, CX, Any] = {
val asIdxSeq = values.map(_._2).toIndexedSeq
val idx = Index(values.map(_._1).toArray)
asIdxSeq.length match {
case 0 => empty[Int, CX]
case _ => Frame(toSeqVec(asIdxSeq), IndexIntRange(asIdxSeq(0).length), idx)
}
}
// --------------------------------
// Construct using sequence of series
private def toSeqSeries[RX](anySeq: Seq[Series[RX, _]]) =
anySeq.toIndexedSeq.asInstanceOf[IndexedSeq[Series[RX, Any]]]
/**
* Factory method to create a Frame from a sequence of Series. The row labels
* of the result are the outer join of the indexes of the series provided.
*/
def apply[RX: ST: ORD](values: Series[RX, _]*): Frame[RX, Int, Any] = {
val asIdxSeq = toSeqSeries(values)
asIdxSeq.length match {
case 0 => empty[RX, Int]
case 1 => Frame(asIdxSeq.map(_.values), asIdxSeq(0).index, IndexIntRange(1))
case _ => {
val init = Frame(Seq(asIdxSeq(0).values), asIdxSeq(0).index, Array(0))
val temp = asIdxSeq.tail.foldLeft(init)( _.joinS(_, OuterJoin))
Frame(temp.values, temp.rowIx, IndexIntRange(temp.numCols))
}
}
}
/**
* Factory method to create a Frame from a sequence of series, also specifying
* the column index to use. The row labels of the result are the outer join of
* the indexes of the series provided.
*/
def apply[RX: ST: ORD, CX: ST: ORD](
values: Seq[Series[RX, _]], colIx: Index[CX]): Frame[RX, CX, Any] = {
val asIdxSeq = toSeqSeries(values)
asIdxSeq.length match {
case 0 => empty[RX, CX]
case 1 => Frame(asIdxSeq.map(_.values), asIdxSeq(0).index, colIx)
case _ => {
val init = Frame(Seq(asIdxSeq(0).values), asIdxSeq(0).index, Index(0))
val temp = asIdxSeq.tail.foldLeft(init)(_.joinS(_, OuterJoin))
Frame(temp.values, temp.rowIx, colIx)
}
}
}
/**
* Factory method to create a Frame from a sequence of tuples, where the
* first element of the tuple is a column label, and the second a series
* of values. The row labels of the result are the outer join of the
* indexes of the series provided.
*/
def apply[RX: ST: ORD, CX: ST: ORD](
values: (CX, Series[RX, _])*): Frame[RX, CX, Any] = {
val asIdxSeq = toSeqSeries(values.map(_._2))
val idx = Index(values.map(_._1).toArray)
asIdxSeq.length match {
case 0 => empty[RX, CX]
case 1 => Frame(asIdxSeq.map(_.values), asIdxSeq(0).index, idx)
case _ => {
val init = Frame(Seq(asIdxSeq(0).values), asIdxSeq(0).index, Array(0))
val temp = asIdxSeq.tail.foldLeft(init)(_.joinS(_, OuterJoin))
Frame(temp.values, temp.rowIx, idx)
}
}
}
}
|
jyt109/saddle
|
saddle-core/src/main/scala/org/saddle/Frame.scala
|
Scala
|
apache-2.0
| 64,094 |
import stainless.lang._
object ObjectParamMutation9 {
case class A(var x: Int)
def foo(y: Int, a: A): Unit = {
a.x = y
}
def update(a: A): Unit = {
foo(10, a)
} ensuring(_ => a.x == 10)
}
|
epfl-lara/stainless
|
frontends/benchmarks/imperative/valid/ObjectParamMutation9.scala
|
Scala
|
apache-2.0
| 211 |
package com.es.scala.chapter06
import org.specs2.mutable.Specification
class SetTest6 extends Specification {
"com.es.scala.chapter06.IntSet6" should {
"verify that no element belongs to EmptySet" in {
new EmptySet6().contains(1) must beFalse
}
"verify that correct elements belong to NonEmptySet" in {
val set = new EmptySet6
val set1 = set.incl(3)
set1.contains(1) must beFalse
set1.contains(3) must beTrue
}
"verify that union works as expected" in {
val set = new EmptySet6
val set1 = set.incl(3).incl(5).incl(1)
val set2 = set.incl(4).incl(2)
val set3 = set1.union(set2)
set3.contains(1) must beTrue
set3.contains(2) must beTrue
set3.contains(3) must beTrue
set3.contains(4) must beTrue
set3.contains(5) must beTrue
}
"verify that intersection works as expected" in {
val set = new EmptySet6
val set1 = set.incl(1).incl(3).incl(4).incl(5)
println("set1 = " + set1.toString)
val set2 = set.incl(3).incl(4)
println("set2 = " + set2.toString)
val set3 = set1.intersection(set2)
println("set3 = " + set3.toString)
println("")
set3.contains(1) must beFalse
set3.contains(2) must beFalse
set3.contains(3) must beTrue
set3.contains(4) must beTrue
set3.contains(5) must beFalse
set3.intersection(set).contains(1) must beFalse
}
}
}
|
elephantscale/learning-scala
|
ScalaByExample/src/test/scala/com/es/scala/chapter06/SetTest6.scala
|
Scala
|
apache-2.0
| 1,436 |
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package play.core.server.common
import play.api._
import play.api.mvc._
import play.api.http.HeaderNames._
import play.api.libs.iteratee._
import scala.concurrent.{ Future, Promise }
object ServerResultUtils {
/** Save allocation by caching an empty array */
private val emptyBytes = new Array[Byte](0)
sealed trait ResultStreaming
final case class CannotStream(reason: String, alternativeResult: Result) extends ResultStreaming
final case class StreamWithClose(enum: Enumerator[Array[Byte]]) extends ResultStreaming
final case class StreamWithKnownLength(enum: Enumerator[Array[Byte]]) extends ResultStreaming
final case class StreamWithStrictBody(body: Array[Byte]) extends ResultStreaming
final case class UseExistingTransferEncoding(transferEncodedEnum: Enumerator[Array[Byte]]) extends ResultStreaming
final case class PerformChunkedTransferEncoding(enum: Enumerator[Array[Byte]]) extends ResultStreaming
/**
* Analyze the Result and determine how best to send it. This may involve looking at
* headers, buffering the enumerator, etc. The returned value will indicate how to
* stream the result and will provide an Enumerator or Array with the result body
* that should be streamed. CannotStream will be returned if the Result cannot be
* streamed to the given client. This can happen if a result requires Transfer-Encoding
* but the client uses HTTP 1.0. It can also happen if there is an error in the
* Result headers.
*/
def determineResultStreaming(result: Result, isHttp10: Boolean): Future[ResultStreaming] = {
result match {
case _ if result.header.headers.exists(_._2 == null) =>
Future.successful(CannotStream(
"A header was set to null",
Results.InternalServerError("")
))
case _ if (result.connection == HttpConnection.Close) =>
Future.successful(StreamWithClose(result.body))
case _ if (result.header.headers.contains(TRANSFER_ENCODING)) =>
if (isHttp10) {
Future.successful(CannotStream(
"Chunked response to HTTP/1.0 request",
Results.HttpVersionNotSupported("The response to this request is chunked and hence requires HTTP 1.1 to be sent, but this is a HTTP 1.0 request.")
))
} else {
Future.successful(UseExistingTransferEncoding(result.body))
}
case _ if (result.header.headers.contains(CONTENT_LENGTH)) =>
Future.successful(StreamWithKnownLength(result.body))
case _ =>
import play.api.libs.iteratee.Execution.Implicits.trampoline
val bodyReadAhead = readAheadOne(result.body >>> Enumerator.eof)
bodyReadAhead.map {
case Left(bodyOption) =>
val body = bodyOption.getOrElse(emptyBytes)
StreamWithStrictBody(body)
case Right(bodyEnum) =>
if (isHttp10) {
StreamWithClose(bodyEnum) // HTTP 1.0 doesn't support chunked encoding
} else {
PerformChunkedTransferEncoding(bodyEnum)
}
}
}
}
/**
* Start reading an Enumerator and see if it is only zero or one
* elements long.
* - If zero-length, return Left(None).
* - If one-length, return the element in Left(Some(el))
* - If more than one element long, return Right(enumerator) where
* enumerator is an Enumerator that contains *all* the input. Any
* already-read elements will still be included in this Enumerator.
*/
def readAheadOne[A](enum: Enumerator[A]): Future[Either[Option[A], Enumerator[A]]] = {
import Execution.Implicits.trampoline
val result = Promise[Either[Option[A], Enumerator[A]]]()
val it: Iteratee[A, Unit] = for {
taken <- Iteratee.takeUpTo(1)
emptyAfterTaken <- Iteratee.isEmpty
_ <- {
if (emptyAfterTaken) {
assert(taken.length <= 1)
result.success(Left(taken.headOption))
Done[A, Unit](())
} else {
val (remainingIt, remainingEnum) = Concurrent.joined[A]
result.success(Right(Enumerator.enumerate(taken) >>> remainingEnum))
remainingIt
}
}
} yield ()
enum(it)
result.future
}
def cleanFlashCookie(requestHeader: RequestHeader, result: Result): Result = {
val header = result.header
val flashCookie = {
header.headers.get(SET_COOKIE)
.map(Cookies.decode(_))
.flatMap(_.find(_.name == Flash.COOKIE_NAME)).orElse {
Option(requestHeader.flash).filterNot(_.isEmpty).map { _ =>
Flash.discard.toCookie
}
}
}
flashCookie.map { newCookie =>
result.withHeaders(SET_COOKIE -> Cookies.merge(header.headers.get(SET_COOKIE).getOrElse(""), Seq(newCookie)))
}.getOrElse(result)
}
/**
* Given a map of headers, split it into a sequence of individual headers.
* Most headers map into a single pair in the new sequence. The exception is
* the `Set-Cookie` header which we split into a pair for each cookie it
* contains. This allows us to work around issues with clients that can't
* handle combined headers. (Also RFC6265 says multiple headers shouldn't
* be folded together, which Play's API unfortunately does.)
*/
def splitHeadersIntoSeq(headers: Map[String, String]): Seq[(String, String)] = {
headers.to[Seq].flatMap {
case (SET_COOKIE, value) => {
val cookieParts: Seq[Cookie] = Cookies.decode(value)
cookieParts.map { cookiePart =>
(SET_COOKIE, Cookies.encode(Seq(cookiePart)))
}
}
case (name, value) =>
Seq((name, value))
}
}
}
|
jyotikamboj/container
|
pf-framework/src/play-server/src/main/scala/play/core/server/common/ServerResultUtils.scala
|
Scala
|
mit
| 5,705 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.server.core
import org.testng.annotations.Test
import java.util.Properties
import org.infinispan.server.core.Main._
import org.infinispan.manager.EmbeddedCacheManager
import org.testng.Assert._
import java.lang.reflect.Method
import org.infinispan.util.TypedProperties
import test.Stoppable
import org.infinispan.test.fwk.TestCacheManagerFactory
/**
* Abstract protocol server test.
*
* @author Galder ZamarreΓ±o
* @since 4.1
*/
@Test(groups = Array("functional"), testName = "server.core.AbstractProtocolServerTest")
class AbstractProtocolServerTest {
def testValidateNegativeMasterThreads() {
val p = new Properties
p.setProperty(PROP_KEY_MASTER_THREADS, "-1")
expectIllegalArgument(p, createServer)
}
def testValidateNegativeWorkerThreads() {
val p = new Properties
p.setProperty(PROP_KEY_WORKER_THREADS, "-1")
expectIllegalArgument(p, createServer)
}
def testValidateNegativeIdleTimeout() {
val p = new Properties
p.setProperty(PROP_KEY_IDLE_TIMEOUT, "-1")
expectIllegalArgument(p, createServer)
}
def testValidateIllegalTcpNoDelay() {
val p = new Properties
p.setProperty(PROP_KEY_TCP_NO_DELAY, "blah")
expectIllegalArgument(p, createServer)
}
def testValidateNegativeSendBufSize() {
val p = new Properties
p.setProperty(PROP_KEY_SEND_BUF_SIZE, "-1")
expectIllegalArgument(p, createServer)
}
def testValidateNegativeRecvBufSize() {
val p = new Properties
p.setProperty(PROP_KEY_RECV_BUF_SIZE, "-1")
expectIllegalArgument(p, createServer)
}
def testHostPropertySubstitution(m: Method) {
var host = "1.2.3.4"
var p = new Properties
p.setProperty(PROP_KEY_HOST, host)
Stoppable.useServer(createServer) { server =>
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getHost, host)
}
host = "${" + m.getName + "-myhost:5.6.7.8}"
p = new Properties
p.setProperty(PROP_KEY_HOST, host)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getHost, "5.6.7.8")
}
host = "${" + m.getName + "-myhost:9.10.11.12}"
System.setProperty(m.getName + "-myhost", "13.14.15.16")
p = new Properties
p.setProperty(PROP_KEY_HOST, host)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getHost, "13.14.15.16")
}
host = "${" + m.getName + "-otherhost}"
p = new Properties
p.setProperty(PROP_KEY_HOST, host)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getHost, host)
}
host = "${" + m.getName + "-otherhost}"
System.setProperty(m.getName + "-otherhost", "17.18.19.20")
p = new Properties
p.setProperty(PROP_KEY_HOST, host)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getHost, "17.18.19.20")
}
}
}
def testPortPropertySubstitution(m: Method) {
var port = "123"
var p = new Properties
p.setProperty(PROP_KEY_PORT, port)
Stoppable.useServer(createServer) { server =>
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getPort, port.toInt)
}
port = "${" + m.getName + "-myport:567}"
p = new Properties
p.setProperty(PROP_KEY_PORT, port)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getPort, 567)
}
port = "${" + m.getName + "-myport:891}"
System.setProperty(m.getName + "-myport", "234")
p = new Properties
p.setProperty(PROP_KEY_PORT, port)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getPort, 234)
}
port = "${" + m.getName + "-otherport}"
p = new Properties
p.setProperty(PROP_KEY_PORT, port)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getPort, 12345)
}
port = "${" + m.getName + "-otherport}"
System.setProperty(m.getName + "-otherport", "5567")
p = new Properties
p.setProperty(PROP_KEY_PORT, port)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.getPort, 5567)
}
}
}
def testTcpNoDelayPropertySubstitution(m: Method) {
var tcpNoDelay = "true"
var p = new Properties
p.setProperty(PROP_KEY_TCP_NO_DELAY, tcpNoDelay)
Stoppable.useServer(createServer) { server =>
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.asInstanceOf[MockProtocolServer]
.tcpNoDelay, tcpNoDelay.toBoolean)
}
tcpNoDelay = "${" + m.getName + "-mytcpnodelay:false}"
p = new Properties
p.setProperty(PROP_KEY_TCP_NO_DELAY, tcpNoDelay)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.asInstanceOf[MockProtocolServer].tcpNoDelay, false)
}
tcpNoDelay = "${" + m.getName + "-mytcpnodelay:true}"
System.setProperty(m.getName + "-mytcpnodelay", "false")
p = new Properties
p.setProperty(PROP_KEY_TCP_NO_DELAY, tcpNoDelay)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.asInstanceOf[MockProtocolServer].tcpNoDelay, false)
}
tcpNoDelay = "${" + m.getName + "-othertcpnodelay}"
p = new Properties
p.setProperty(PROP_KEY_TCP_NO_DELAY, tcpNoDelay)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
// Boolean.parseBoolean() returning false to anything other than true, no exception thrown
assertEquals(server.asInstanceOf[MockProtocolServer].tcpNoDelay, false)
}
tcpNoDelay = "${" + m.getName + "-othertcpnodelay}"
System.setProperty(m.getName + "-othertcpnodelay", "true")
p = new Properties
p.setProperty(PROP_KEY_PORT, tcpNoDelay)
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
assertEquals(server.asInstanceOf[MockProtocolServer].tcpNoDelay, true)
}
}
}
private def expectIllegalArgument(p: Properties, server: ProtocolServer) {
try {
Stoppable.useCacheManager(TestCacheManagerFactory.createCacheManager) { cm =>
server.start(p, cm)
}
} catch {
case i: IllegalArgumentException => // expected
} finally {
server.stop
}
}
private def createServer : MockProtocolServer = new MockProtocolServer
class MockProtocolServer extends AbstractProtocolServer("Mock") {
var tcpNoDelay: Boolean = _
override def start(properties: Properties, cacheManager: EmbeddedCacheManager) {
super.start(properties, cacheManager, 12345)
}
override def getEncoder = null
override def getDecoder = null
override def startTransport(idleTimeout: Int, tcpNoDelay: Boolean,
sendBufSize: Int, recvBufSize: Int, typedProps: TypedProperties) {
this.tcpNoDelay = tcpNoDelay
}
}
}
|
nmldiegues/stibt
|
infinispan/server/core/src/test/scala/org/infinispan/server/core/AbstractProtocolServerTest.scala
|
Scala
|
apache-2.0
| 9,267 |
/*
* Copyright 2013 Twitter inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus
import com.twitter.storehaus.kafka.KafkaSink.Dispatcher
/**
* @author Mansur Ashraf
* @since 11/23/13
*/
package object kafka {
implicit def dispatch[K, V](sink: KafkaSink[K, V]): () => Dispatcher[K, V] = sink.write
}
|
MansurAshraf/storehaus
|
storehaus-kafka/src/main/scala/com/twitter/storehaus/kafka/package.scala
|
Scala
|
apache-2.0
| 879 |
package com.karasiq.shadowcloud.actors.utils
import akka.actor.ActorRef
sealed trait ActorState {
def isActive: Boolean
}
object ActorState {
final case class Active(dispatcher: ActorRef) extends ActorState {
override def isActive: Boolean = true
}
case object Suspended extends ActorState {
override def isActive: Boolean = false
}
def ifActive(state: ActorState, action: ActorRef β Unit): Unit = {
state match {
case Active(dispatcher) β
action(dispatcher)
case _ β
// Pass
}
}
}
|
Karasiq/shadowcloud
|
utils/.jvm/src/main/scala/com/karasiq/shadowcloud/actors/utils/ActorState.scala
|
Scala
|
apache-2.0
| 549 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.memory
import org.apache.spark.SparkConf
import org.apache.spark.storage.BlockId
/**
* A [[MemoryManager]] that enforces a soft boundary between execution and storage such that
* either side can borrow memory from the other.
*
* The region shared between execution and storage is a fraction of (the total heap space - 300MB)
* configurable through `spark.memory.fraction` (default 0.6). The position of the boundary
* within this space is further determined by `spark.memory.storageFraction` (default 0.5).
* This means the size of the storage region is 0.6 * 0.5 = 0.3 of the heap space by default.
*
* Storage can borrow as much execution memory as is free until execution reclaims its space.
* When this happens, cached blocks will be evicted from memory until sufficient borrowed
* memory is released to satisfy the execution memory request.
*
* Similarly, execution can borrow as much storage memory as is free. However, execution
* memory is *never* evicted by storage due to the complexities involved in implementing this.
* The implication is that attempts to cache blocks may fail if execution has already eaten
* up most of the storage space, in which case the new blocks will be evicted immediately
* according to their respective storage levels.
*
* @param onHeapStorageRegionSize Size of the storage region, in bytes.
* This region is not statically reserved; execution can borrow from
* it if necessary. Cached blocks can be evicted only if actual
* storage memory usage exceeds this region.
*/
private[spark] class UnifiedMemoryManager private[memory] (
conf: SparkConf,
val maxHeapMemory: Long,
onHeapStorageRegionSize: Long,
numCores: Int)
extends MemoryManager(
conf,
numCores,
onHeapStorageRegionSize,
maxHeapMemory - onHeapStorageRegionSize) {
private def assertInvariants(): Unit = {
assert(onHeapExecutionMemoryPool.poolSize + onHeapStorageMemoryPool.poolSize == maxHeapMemory)
assert(
offHeapExecutionMemoryPool.poolSize + offHeapStorageMemoryPool.poolSize == maxOffHeapMemory)
}
assertInvariants()
override def maxOnHeapStorageMemory: Long = synchronized {
maxHeapMemory - onHeapExecutionMemoryPool.memoryUsed
}
override def maxOffHeapStorageMemory: Long = synchronized {
maxOffHeapMemory - offHeapExecutionMemoryPool.memoryUsed
}
/**
* Try to acquire up to `numBytes` of execution memory for the current task and return the
* number of bytes obtained, or 0 if none can be allocated.
*
* This call may block until there is enough free memory in some situations, to make sure each
* task has a chance to ramp up to at least 1 / 2N of the total memory pool (where N is the # of
* active tasks) before it is forced to spill. This can happen if the number of tasks increase
* but an older task had a lot of memory already.
*/
override private[memory] def acquireExecutionMemory(
numBytes: Long,
taskAttemptId: Long,
memoryMode: MemoryMode): Long = synchronized {
assertInvariants()
assert(numBytes >= 0)
val (executionPool, storagePool, storageRegionSize, maxMemory) = memoryMode match {
case MemoryMode.ON_HEAP => (
onHeapExecutionMemoryPool,
onHeapStorageMemoryPool,
onHeapStorageRegionSize,
maxHeapMemory)
case MemoryMode.OFF_HEAP => (
offHeapExecutionMemoryPool,
offHeapStorageMemoryPool,
offHeapStorageMemory,
maxOffHeapMemory)
}
/**
* Grow the execution pool by evicting cached blocks, thereby shrinking the storage pool.
*
* When acquiring memory for a task, the execution pool may need to make multiple
* attempts. Each attempt must be able to evict storage in case another task jumps in
* and caches a large block between the attempts. This is called once per attempt.
*/
def maybeGrowExecutionPool(extraMemoryNeeded: Long): Unit = {
if (extraMemoryNeeded > 0) {
// There is not enough free memory in the execution pool, so try to reclaim memory from
// storage. We can reclaim any free memory from the storage pool. If the storage pool
// has grown to become larger than `storageRegionSize`, we can evict blocks and reclaim
// the memory that storage has borrowed from execution.
val memoryReclaimableFromStorage = math.max(
storagePool.memoryFree,
storagePool.poolSize - storageRegionSize)
if (memoryReclaimableFromStorage > 0) {
// Only reclaim as much space as is necessary and available:
val spaceToReclaim = storagePool.freeSpaceToShrinkPool(
math.min(extraMemoryNeeded, memoryReclaimableFromStorage))
storagePool.decrementPoolSize(spaceToReclaim)
executionPool.incrementPoolSize(spaceToReclaim)
}
}
}
/**
* The size the execution pool would have after evicting storage memory.
*
* The execution memory pool divides this quantity among the active tasks evenly to cap
* the execution memory allocation for each task. It is important to keep this greater
* than the execution pool size, which doesn't take into account potential memory that
* could be freed by evicting storage. Otherwise we may hit SPARK-12155.
*
* Additionally, this quantity should be kept below `maxMemory` to arbitrate fairness
* in execution memory allocation across tasks, Otherwise, a task may occupy more than
* its fair share of execution memory, mistakenly thinking that other tasks can acquire
* the portion of storage memory that cannot be evicted.
*/
def computeMaxExecutionPoolSize(): Long = {
maxMemory - math.min(storagePool.memoryUsed, storageRegionSize)
}
executionPool.acquireMemory(
numBytes, taskAttemptId, maybeGrowExecutionPool, computeMaxExecutionPoolSize)
}
override def acquireStorageMemory(
blockId: BlockId,
numBytes: Long,
memoryMode: MemoryMode): Boolean = synchronized {
assertInvariants()
assert(numBytes >= 0)
val (executionPool, storagePool, maxMemory) = memoryMode match {
case MemoryMode.ON_HEAP => (
onHeapExecutionMemoryPool,
onHeapStorageMemoryPool,
maxOnHeapStorageMemory)
case MemoryMode.OFF_HEAP => (
offHeapExecutionMemoryPool,
offHeapStorageMemoryPool,
maxOffHeapStorageMemory)
}
if (numBytes > maxMemory) {
// Fail fast if the block simply won't fit
logInfo(s"Will not store $blockId as the required space ($numBytes bytes) exceeds our " +
s"memory limit ($maxMemory bytes)")
return false
}
if (numBytes > storagePool.memoryFree) {
// There is not enough free memory in the storage pool, so try to borrow free memory from
// the execution pool.
val memoryBorrowedFromExecution = Math.min(executionPool.memoryFree,
numBytes - storagePool.memoryFree)
executionPool.decrementPoolSize(memoryBorrowedFromExecution)
storagePool.incrementPoolSize(memoryBorrowedFromExecution)
}
storagePool.acquireMemory(blockId, numBytes)
}
override def acquireUnrollMemory(
blockId: BlockId,
numBytes: Long,
memoryMode: MemoryMode): Boolean = synchronized {
acquireStorageMemory(blockId, numBytes, memoryMode)
}
}
object UnifiedMemoryManager {
// Set aside a fixed amount of memory for non-storage, non-execution purposes.
// This serves a function similar to `spark.memory.fraction`, but guarantees that we reserve
// sufficient memory for the system even for small heaps. E.g. if we have a 1GB JVM, then
// the memory used for execution and storage will be (1024 - 300) * 0.6 = 434MB by default.
private val RESERVED_SYSTEM_MEMORY_BYTES = 300 * 1024 * 1024
def apply(conf: SparkConf, numCores: Int): UnifiedMemoryManager = {
val maxMemory = getMaxMemory(conf)
new UnifiedMemoryManager(
conf,
maxHeapMemory = maxMemory,
onHeapStorageRegionSize =
(maxMemory * conf.getDouble("spark.memory.storageFraction", 0.5)).toLong,
numCores = numCores)
}
/**
* Return the total amount of memory shared between execution and storage, in bytes.
*/
private def getMaxMemory(conf: SparkConf): Long = {
val systemMemory = conf.getLong("spark.testing.memory", Runtime.getRuntime.maxMemory)
val reservedMemory = conf.getLong("spark.testing.reservedMemory",
if (conf.contains("spark.testing")) 0 else RESERVED_SYSTEM_MEMORY_BYTES)
val minSystemMemory = (reservedMemory * 1.5).ceil.toLong
if (systemMemory < minSystemMemory) {
throw new IllegalArgumentException(s"System memory $systemMemory must " +
s"be at least $minSystemMemory. Please increase heap size using the --driver-memory " +
s"option or spark.driver.memory in Spark configuration.")
}
// SPARK-12759 Check executor memory to fail fast if memory is insufficient
if (conf.contains("spark.executor.memory")) {
val executorMemory = conf.getSizeAsBytes("spark.executor.memory")
if (executorMemory < minSystemMemory) {
throw new IllegalArgumentException(s"Executor memory $executorMemory must be at least " +
s"$minSystemMemory. Please increase executor memory using the " +
s"--executor-memory option or spark.executor.memory in Spark configuration.")
}
}
val usableMemory = systemMemory - reservedMemory
val memoryFraction = conf.getDouble("spark.memory.fraction", 0.6)
(usableMemory * memoryFraction).toLong
}
}
|
mike0sv/spark
|
core/src/main/scala/org/apache/spark/memory/UnifiedMemoryManager.scala
|
Scala
|
apache-2.0
| 10,490 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.api
import org.apache.toree.kernel.protocol.v5
import org.apache.toree.kernel.protocol.v5.{KMBuilder, KernelMessage}
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
/**
* Represents the methods available to send display content from the kernel to the
* client.
*/
class DisplayMethods(
private val actorLoader: ActorLoader,
private val parentMessage: KernelMessage,
private val kernelMessageBuilder: KMBuilder)
extends DisplayMethodsLike {
private[api] val kmBuilder = kernelMessageBuilder.withParent(parentMessage)
override def content(mimeType: String, data: String): Unit = {
val displayData = v5.content.DisplayData("user", Map(mimeType -> data), Map())
val kernelMessage = kmBuilder
.withIds(Seq(v5.content.DisplayData.toTypeString))
.withHeader(v5.content.DisplayData.toTypeString)
.withContentString(displayData).build
actorLoader.load(v5.SystemActorType.KernelMessageRelay) ! kernelMessage
}
override def clear(wait: Boolean = false): Unit = {
val clearOutput = v5.content.ClearOutput(wait)
val kernelMessage = kmBuilder
.withIds(Seq(v5.content.ClearOutput.toTypeString))
.withHeader(v5.content.ClearOutput.toTypeString)
.withContentString(clearOutput).build
actorLoader.load(v5.SystemActorType.KernelMessageRelay) ! kernelMessage
}
}
|
kapil-malik/incubator-toree
|
kernel/src/main/scala/org/apache/toree/kernel/api/DisplayMethods.scala
|
Scala
|
apache-2.0
| 2,195 |
package org.sisioh.aws4s.dynamodb.document
import com.amazonaws.services.dynamodbv2.document.{ PrimaryKey, TableKeysAndAttributes }
import org.sisioh.aws4s.PimpedType
import scala.collection.JavaConverters._
object TableKeysAndAttributeFactory {
def apply(tableName: String): TableKeysAndAttributes = new TableKeysAndAttributes(tableName)
}
class RichTableKeysAndAttributes(val underlying: TableKeysAndAttributes) extends AnyVal with PimpedType[TableKeysAndAttributes] {
def primaryKeys: Seq[PrimaryKey] = underlying.getPrimaryKeys.asScala.toVector
def withAttributeNames(attributeNames: Seq[String]): TableKeysAndAttributes = underlying.withAttributeNames(attributeNames.asJava)
def attributeNames: Set[String] = underlying.getAttributeNames.asScala.toSet
def tableName: String = underlying.getTableName
def projectionExpression: String = underlying.getProjectionExpression
def nameMap: Map[String, String] = underlying.getNameMap.asScala.toMap
def withNameMap(nameMap: Map[String, String]): TableKeysAndAttributes = underlying.withNameMap(nameMap.asJava)
}
|
everpeace/aws4s
|
aws4s-dynamodb/src/main/scala/org/sisioh/aws4s/dynamodb/document/RichTableKeysAndAttributes.scala
|
Scala
|
mit
| 1,089 |
package java.util.regex
trait MatchResult {
def groupCount(): Int
def start(): Int
def end(): Int
def group(): String
def start(group: Int): Int
def end(group: Int): Int
def group(group: Int): String
}
|
jmnarloch/scala-js
|
javalib/src/main/scala/java/util/regex/MatchResult.scala
|
Scala
|
bsd-3-clause
| 219 |
import com.danielasfregola.twitter4s.TwitterRestClient
import scala.concurrent.ExecutionContext.Implicits.global
object RestExample extends App {
// Make sure to define the following env variables:
// TWITTER_CONSUMER_TOKEN_KEY and TWITTER_CONSUMER_TOKEN_SECRET
// TWITTER_ACCESS_TOKEN_KEY and TWITTER_ACCESS_TOKEN_SECRET
val restClient = TwitterRestClient()
restClient.homeTimeline().map { ratedData =>
val tweets = ratedData.data
tweets.foreach(tweet => println(tweet.text))
}
}
|
DanielaSfregola/twitter4s.g8
|
src/main/g8/src/main/scala/RestExample.scala
|
Scala
|
apache-2.0
| 506 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
/**
* A function that takes no parameters (<em>i.e.</em>, a <code>Function0</code> or "no-arg" function) and results in <code>Unit</code>, which when
* invoked executes the body of the constructor of the class into which this trait is mixed.
*
* <p>
* This trait extends <code>DelayedInit</code> and defines a <code>delayedInit</code> method that
* saves the body of the constructor (passed to <code>delayedInit</code>) for later execution when <code>apply</code> is invoked.
* </p>
*
* <p>
* This trait is somewhat magical and therefore may be challenging for your collegues to understand, so please use it as a last resort only when the
* simpler options described in the "<a href="../FlatSpec.html#sharedFixtures">shared fixtures</a>" section of your chosen style trait won't do
* the job. <code>NoArg</code> is
* intended to address a specific use case that will likely be rare, and is unlikely to be useful outside of its intended use case, but
* it is quite handy for its intended use case (described in the next paragraph).
* One potential gotcha, for example, is that a subclass's constructor body could in theory be executed multiple times by simply invoking <code>apply</code> multiple
* times. In the intended use case for this trait, however, the body will be executed only once.
* </p>
*
* <p>
* The intended use case for this method is (relatively rare) situations in which you want to extend a different instance of the same class
* for each test, with the body of the test inheriting the members of that class, and with code executed before and/or after
* the body of the test.
* </p>
*
* <p>
* For example, Akka's <code>TestKit</code> class takes an <code>ActorSystem</code>,
* which must have a unique name. To run a suite of tests in parallel, each test must get its own <code>ActorSystem</code>, to
* ensure the tests run in isolation. At the end of each test, the <code>ActorSystem</code> must be shutdown. With <code>NoArg</code>,
* you can achieve this by first defining a class that extends <code>TestKit</code> and mixes in <code>NoArg</code>.
* Here's an example taken with permission from the book <a href="http://www.artima.com/shop/akka_concurrency"><em>Akka Concurrency</em></a>, by Derek Wyatt:
* </p>
*
* <pre class="stHighlight">
* import akka.actor.ActorSystem
* import akka.testkit.{TestKit, ImplicitSender}
* import java.util.concurrent.atomic.AtomicInteger
* import org.scalatest.fixture.NoArg
*
* object ActorSys {
* val uniqueId = new AtomicInteger(0)
* }
*
* class ActorSys(name: String) extends
* TestKit(ActorSystem(name))
* with ImplicitSender
* with NoArg {
*
* def this() = this(
* "TestSystem%05d".format(
* ActorSys.uniqueId.getAndIncrement()))
*
* def shutdown(): Unit = system.shutdown()
*
* override def apply() {
* try super.apply()
* finally shutdown()
* }
* }
* </pre>
*
* <p>
* Given this implementation of <code>ActorSys</code>, which will invoke <code>shutdown</code> after the constructor code
* is executed, you can run each test in a suite in a subclass of <code>TestKit</code>, giving each test's <code>TestKit</code>
* an <code>ActorSystem</code> with a unique name, allowing you to safely run those tests in parallel. Here's an example
* from <em>Akka Concurrency</em>:
* </p>
*
* <pre class="stHighlight">
* class MyActorSpec extends fixture.WordSpec
* with Matchers
* with UnitFixture
* with ParallelTestExecution {
*
* def makeActor(): ActorRef =
* system.actorOf(Props[MyActor], "MyActor")
*
* "My Actor" should {
* "throw when made with the wrong name" in new ActorSys {
* an [Exception] should be thrownBy {
* // use a generated name
* val a = system.actorOf(Props[MyActor])
* }
* }
* "construct without exception" in new ActorSys {
* val a = makeActor()
* // The throw will cause the test to fail
* }
* "respond with a Pong to a Ping" in new ActorSys {
* val a = makeActor()
* a ! Ping
* expectMsg(Pong)
* }
* }
* }
* </pre>
*
* <p>
* <a href="UnitFixture.html"><code>UnitFixture</code></a> is used in this example, because in this case, the <code>fixture.WordSpec</code> feature enabling tests to be defined as
* functions from fixture objects of type <code>FixtureParam</code> to <code>Unit</code> is not being used. Rather, only the secondary feature that enables
* tests to be defined as functions from <em>no parameters</em> to <code>Unit</code> is being used. This secondary feature is described in the second-to-last
* paragraph on the main Scaladoc documentation of <a href="WordSpec.html"><code>fixture.WordSpec</code></a>, which says:
* </p>
*
* <blockquote>
* If a test doesn't need the fixture, you can indicate that by providing a no-arg instead of a one-arg function, ...
* In other words, instead of starting your function literal
* with something like “<code>db =></code>”, you'd start it with “<code>() =></code>”. For such tests, <code>runTest</code>
* will not invoke <code>withFixture(OneArgTest)</code>. It will instead directly invoke <code>withFixture(NoArgTest)</code>.
* </blockquote>
*
* <p>
* Since <code>FixtureParam</code> is unused in this use case, it could
* be anything. Making it <code>Unit</code> will hopefully help readers more easily recognize that it is not being used.
* </p>
*
* <p>
* Note: As of Scala 2.11, <code>DelayedInit</code> (which is used by <code>NoArg</code>) has been deprecated, to indicate it is buggy and should be avoided
* if possible. Those in charge of the Scala compiler and standard library have promised that <code>DelayedInit</code> will not be removed from Scala
* unless an alternate way to achieve the same goal is provided. Thus it <em>should</em> be safe to use <code>NoArg</code>, but if you'd rather
* not you can achieve the same effect with a bit more boilerplate by extending (<code>() => Unit</code>) instead of <code>NoArg</code> and placing
* your code in an explicit <code>body</code> method. Here's an example:
* </p>
*
* <pre class="stHighlight">
* import akka.actor.ActorSystem
* import akka.testkit.{TestKit, ImplicitSender}
* import java.util.concurrent.atomic.AtomicInteger
* import org.scalatest.fixture.NoArg
*
* object ActorSys {
* val uniqueId = new AtomicInteger(0)
* }
*
* class ActorSys(name: String) extends
* TestKit(ActorSystem(name))
* with ImplicitSender
* with (() => Unit) {
*
* def this() = this(
* "TestSystem%05d".format(
* ActorSys.uniqueId.getAndIncrement()))
*
* def shutdown(): Unit = system.shutdown()
* def body(): Unit
*
* override def apply() = {
* try body()
* finally shutdown()
* }
* }
* </pre>
*
* <p>
* Using this version of <code>ActorSys</code> will require an explicit
* <code>body</code> method in the tests:
*
* </p>
*
* <pre class="stHighlight">
* class MyActorSpec extends fixture.WordSpec
* with Matchers
* with UnitFixture
* with ParallelTestExecution {
*
* def makeActor(): ActorRef =
* system.actorOf(Props[MyActor], "MyActor")
*
* "My Actor" should {
* "throw when made with the wrong name" in new ActorSys {
* def body() =
* an [Exception] should be thrownBy {
* // use a generated name
* val a = system.actorOf(Props[MyActor])
* }
* }
* "construct without exception" in new ActorSys {
* def body() = {
* val a = makeActor()
* // The throw will cause the test to fail
* }
* }
* "respond with a Pong to a Ping" in new ActorSys {
* def body() = {
* val a = makeActor()
* a ! Ping
* expectMsg(Pong)
* }
* }
* }
* }
* </pre>
*
*/
trait NoArg extends DelayedInit with (() => Unit) {
private var theBody: () => Unit = _
/**
* Saves the body of the constructor, passed as <code>body</code>, for later execution by <code>apply</code>.
*/
final def delayedInit(body: => Unit): Unit = {
synchronized { theBody = (() => body) }
}
/**
* Executes the body of the constructor that was passed to <code>delayedInit</code>.
*/
def apply(): Unit = synchronized { if (theBody != null) theBody() }
/**
* This method exists to cause a compile-time type error if someone accidentally
* tries to mix this trait into a <code>Suite</code>.
*
* <p>
* This trait is intended to be mixed
* into classes that are constructed within the body (or as the body) of tests, not mixed into <code>Suite</code>s themselves. For an example,
* the the main Scaladoc comment for this trait.
* </p>
*/
final val styleName: Int = 0 // So can't mix into Suite
}
|
dotty-staging/scalatest
|
scalatest/src/main/scala/org/scalatest/fixture/NoArg.scala
|
Scala
|
apache-2.0
| 9,536 |
/**
* Generated by Scrooge
* version: 4.7.0
* rev: d9d56174937f524a1981b38ebd6280eef7eeda4a
* built at: 20160427-121531
*/
package com.komanov.serialization.domain.thriftscala
import com.twitter.scrooge.{
LazyTProtocol,
TFieldBlob, ThriftException, ThriftStruct, ThriftStructCodec3, ThriftStructFieldInfo,
ThriftStructMetaData, ThriftUtil}
import org.apache.thrift.protocol._
import org.apache.thrift.transport.{TMemoryBuffer, TTransport}
import java.nio.ByteBuffer
import java.util.Arrays
import scala.collection.immutable.{Map => immutable$Map}
import scala.collection.mutable.Builder
import scala.collection.mutable.{
ArrayBuffer => mutable$ArrayBuffer, Buffer => mutable$Buffer,
HashMap => mutable$HashMap, HashSet => mutable$HashSet}
import scala.collection.{Map, Set}
object PageAddedPb extends ThriftStructCodec3[PageAddedPb] {
private val NoPassthroughFields = immutable$Map.empty[Short, TFieldBlob]
val Struct = new TStruct("PageAddedPb")
val PathField = new TField("path", TType.STRING, 1)
val PathFieldManifest = implicitly[Manifest[String]]
/**
* Field information in declaration order.
*/
lazy val fieldInfos: scala.List[ThriftStructFieldInfo] = scala.List[ThriftStructFieldInfo](
new ThriftStructFieldInfo(
PathField,
true,
false,
PathFieldManifest,
_root_.scala.None,
_root_.scala.None,
immutable$Map.empty[String, String],
immutable$Map.empty[String, String]
)
)
lazy val structAnnotations: immutable$Map[String, String] =
immutable$Map.empty[String, String]
/**
* Checks that all required fields are non-null.
*/
def validate(_item: PageAddedPb): Unit = {
}
def withoutPassthroughFields(original: PageAddedPb): PageAddedPb =
new Immutable(
path =
{
val field = original.path
field.map { field =>
field
}
}
)
override def encode(_item: PageAddedPb, _oproto: TProtocol): Unit = {
_item.write(_oproto)
}
private[this] def lazyDecode(_iprot: LazyTProtocol): PageAddedPb = {
var pathOffset: Int = -1
var _passthroughFields: Builder[(Short, TFieldBlob), immutable$Map[Short, TFieldBlob]] = null
var _done = false
val _start_offset = _iprot.offset
_iprot.readStructBegin()
while (!_done) {
val _field = _iprot.readFieldBegin()
if (_field.`type` == TType.STOP) {
_done = true
} else {
_field.id match {
case 1 =>
_field.`type` match {
case TType.STRING =>
pathOffset = _iprot.offsetSkipString
case _actualType =>
val _expectedType = TType.STRING
throw new TProtocolException(
"Received wrong type for field 'path' (expected=%s, actual=%s).".format(
ttypeToString(_expectedType),
ttypeToString(_actualType)
)
)
}
case _ =>
if (_passthroughFields == null)
_passthroughFields = immutable$Map.newBuilder[Short, TFieldBlob]
_passthroughFields += (_field.id -> TFieldBlob.read(_field, _iprot))
}
_iprot.readFieldEnd()
}
}
_iprot.readStructEnd()
new LazyImmutable(
_iprot,
_iprot.buffer,
_start_offset,
_iprot.offset,
pathOffset,
if (_passthroughFields == null)
NoPassthroughFields
else
_passthroughFields.result()
)
}
override def decode(_iprot: TProtocol): PageAddedPb =
_iprot match {
case i: LazyTProtocol => lazyDecode(i)
case i => eagerDecode(i)
}
private[this] def eagerDecode(_iprot: TProtocol): PageAddedPb = {
var path: _root_.scala.Option[String] = _root_.scala.None
var _passthroughFields: Builder[(Short, TFieldBlob), immutable$Map[Short, TFieldBlob]] = null
var _done = false
_iprot.readStructBegin()
while (!_done) {
val _field = _iprot.readFieldBegin()
if (_field.`type` == TType.STOP) {
_done = true
} else {
_field.id match {
case 1 =>
_field.`type` match {
case TType.STRING =>
path = _root_.scala.Some(readPathValue(_iprot))
case _actualType =>
val _expectedType = TType.STRING
throw new TProtocolException(
"Received wrong type for field 'path' (expected=%s, actual=%s).".format(
ttypeToString(_expectedType),
ttypeToString(_actualType)
)
)
}
case _ =>
if (_passthroughFields == null)
_passthroughFields = immutable$Map.newBuilder[Short, TFieldBlob]
_passthroughFields += (_field.id -> TFieldBlob.read(_field, _iprot))
}
_iprot.readFieldEnd()
}
}
_iprot.readStructEnd()
new Immutable(
path,
if (_passthroughFields == null)
NoPassthroughFields
else
_passthroughFields.result()
)
}
def apply(
path: _root_.scala.Option[String] = _root_.scala.None
): PageAddedPb =
new Immutable(
path
)
def unapply(_item: PageAddedPb): _root_.scala.Option[_root_.scala.Option[String]] = _root_.scala.Some(_item.path)
@inline private def readPathValue(_iprot: TProtocol): String = {
_iprot.readString()
}
@inline private def writePathField(path_item: String, _oprot: TProtocol): Unit = {
_oprot.writeFieldBegin(PathField)
writePathValue(path_item, _oprot)
_oprot.writeFieldEnd()
}
@inline private def writePathValue(path_item: String, _oprot: TProtocol): Unit = {
_oprot.writeString(path_item)
}
object Immutable extends ThriftStructCodec3[PageAddedPb] {
override def encode(_item: PageAddedPb, _oproto: TProtocol): Unit = { _item.write(_oproto) }
override def decode(_iprot: TProtocol): PageAddedPb = PageAddedPb.decode(_iprot)
override lazy val metaData: ThriftStructMetaData[PageAddedPb] = PageAddedPb.metaData
}
/**
* The default read-only implementation of PageAddedPb. You typically should not need to
* directly reference this class; instead, use the PageAddedPb.apply method to construct
* new instances.
*/
class Immutable(
val path: _root_.scala.Option[String],
override val _passthroughFields: immutable$Map[Short, TFieldBlob])
extends PageAddedPb {
def this(
path: _root_.scala.Option[String] = _root_.scala.None
) = this(
path,
Map.empty
)
}
/**
* This is another Immutable, this however keeps strings as lazy values that are lazily decoded from the backing
* array byte on read.
*/
private[this] class LazyImmutable(
_proto: LazyTProtocol,
_buf: Array[Byte],
_start_offset: Int,
_end_offset: Int,
pathOffset: Int,
override val _passthroughFields: immutable$Map[Short, TFieldBlob])
extends PageAddedPb {
override def write(_oprot: TProtocol): Unit = {
_oprot match {
case i: LazyTProtocol => i.writeRaw(_buf, _start_offset, _end_offset - _start_offset)
case _ => super.write(_oprot)
}
}
lazy val path: _root_.scala.Option[String] =
if (pathOffset == -1)
None
else {
Some(_proto.decodeString(_buf, pathOffset))
}
/**
* Override the super hash code to make it a lazy val rather than def.
*
* Calculating the hash code can be expensive, caching it where possible
* can provide significant performance wins. (Key in a hash map for instance)
* Usually not safe since the normal constructor will accept a mutable map or
* set as an arg
* Here however we control how the class is generated from serialized data.
* With the class private and the contract that we throw away our mutable references
* having the hash code lazy here is safe.
*/
override lazy val hashCode = super.hashCode
}
/**
* This Proxy trait allows you to extend the PageAddedPb trait with additional state or
* behavior and implement the read-only methods from PageAddedPb using an underlying
* instance.
*/
trait Proxy extends PageAddedPb {
protected def _underlying_PageAddedPb: PageAddedPb
override def path: _root_.scala.Option[String] = _underlying_PageAddedPb.path
override def _passthroughFields = _underlying_PageAddedPb._passthroughFields
}
}
trait PageAddedPb
extends ThriftStruct
with scala.Product1[Option[String]]
with java.io.Serializable
{
import PageAddedPb._
def path: _root_.scala.Option[String]
def _passthroughFields: immutable$Map[Short, TFieldBlob] = immutable$Map.empty
def _1 = path
/**
* Gets a field value encoded as a binary blob using TCompactProtocol. If the specified field
* is present in the passthrough map, that value is returned. Otherwise, if the specified field
* is known and not optional and set to None, then the field is serialized and returned.
*/
def getFieldBlob(_fieldId: Short): _root_.scala.Option[TFieldBlob] = {
lazy val _buff = new TMemoryBuffer(32)
lazy val _oprot = new TCompactProtocol(_buff)
_passthroughFields.get(_fieldId) match {
case blob: _root_.scala.Some[TFieldBlob] => blob
case _root_.scala.None => {
val _fieldOpt: _root_.scala.Option[TField] =
_fieldId match {
case 1 =>
if (path.isDefined) {
writePathValue(path.get, _oprot)
_root_.scala.Some(PageAddedPb.PathField)
} else {
_root_.scala.None
}
case _ => _root_.scala.None
}
_fieldOpt match {
case _root_.scala.Some(_field) =>
val _data = Arrays.copyOfRange(_buff.getArray, 0, _buff.length)
_root_.scala.Some(TFieldBlob(_field, _data))
case _root_.scala.None =>
_root_.scala.None
}
}
}
}
/**
* Collects TCompactProtocol-encoded field values according to `getFieldBlob` into a map.
*/
def getFieldBlobs(ids: TraversableOnce[Short]): immutable$Map[Short, TFieldBlob] =
(ids flatMap { id => getFieldBlob(id) map { id -> _ } }).toMap
/**
* Sets a field using a TCompactProtocol-encoded binary blob. If the field is a known
* field, the blob is decoded and the field is set to the decoded value. If the field
* is unknown and passthrough fields are enabled, then the blob will be stored in
* _passthroughFields.
*/
def setField(_blob: TFieldBlob): PageAddedPb = {
var path: _root_.scala.Option[String] = this.path
var _passthroughFields = this._passthroughFields
_blob.id match {
case 1 =>
path = _root_.scala.Some(readPathValue(_blob.read))
case _ => _passthroughFields += (_blob.id -> _blob)
}
new Immutable(
path,
_passthroughFields
)
}
/**
* If the specified field is optional, it is set to None. Otherwise, if the field is
* known, it is reverted to its default value; if the field is unknown, it is removed
* from the passthroughFields map, if present.
*/
def unsetField(_fieldId: Short): PageAddedPb = {
var path: _root_.scala.Option[String] = this.path
_fieldId match {
case 1 =>
path = _root_.scala.None
case _ =>
}
new Immutable(
path,
_passthroughFields - _fieldId
)
}
/**
* If the specified field is optional, it is set to None. Otherwise, if the field is
* known, it is reverted to its default value; if the field is unknown, it is removed
* from the passthroughFields map, if present.
*/
def unsetPath: PageAddedPb = unsetField(1)
override def write(_oprot: TProtocol): Unit = {
PageAddedPb.validate(this)
_oprot.writeStructBegin(Struct)
if (path.isDefined) writePathField(path.get, _oprot)
if (_passthroughFields.nonEmpty) {
_passthroughFields.values.foreach { _.write(_oprot) }
}
_oprot.writeFieldStop()
_oprot.writeStructEnd()
}
def copy(
path: _root_.scala.Option[String] = this.path,
_passthroughFields: immutable$Map[Short, TFieldBlob] = this._passthroughFields
): PageAddedPb =
new Immutable(
path,
_passthroughFields
)
override def canEqual(other: Any): Boolean = other.isInstanceOf[PageAddedPb]
override def equals(other: Any): Boolean =
canEqual(other) &&
_root_.scala.runtime.ScalaRunTime._equals(this, other) &&
_passthroughFields == other.asInstanceOf[PageAddedPb]._passthroughFields
override def hashCode: Int = _root_.scala.runtime.ScalaRunTime._hashCode(this)
override def toString: String = _root_.scala.runtime.ScalaRunTime._toString(this)
override def productArity: Int = 1
override def productElement(n: Int): Any = n match {
case 0 => this.path
case _ => throw new IndexOutOfBoundsException(n.toString)
}
override def productPrefix: String = "PageAddedPb"
}
|
dkomanov/scala-serialization
|
scala-serialization/src/main/scala/com/komanov/serialization/domain/thriftscala/PageAddedPb.scala
|
Scala
|
mit
| 13,073 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.javalib.util
import org.junit.Assert._
import org.junit.Test
import java.util.Random
import org.scalajs.testsuite.utils.AssertThrows.assertThrows
class RandomTest {
@Test def nextBitsSeed10(): Unit = {
val random = new HackRandom(10)
assertEquals(747, random.next(10))
assertEquals(0, random.next(1))
assertEquals(16, random.next(6))
assertEquals(432970, random.next(20))
assertEquals(254270492, random.next(32))
}
@Test def nextBitsSeedNeg5(): Unit = {
val random = new HackRandom(-5)
assertEquals(275, random.next(10))
assertEquals(0, random.next(1))
assertEquals(21, random.next(6))
assertEquals(360349, random.next(20))
assertEquals(1635930704, random.next(32))
}
@Test def nextBitsSeedMaxLong(): Unit = {
val random = new HackRandom(Long.MaxValue)
assertEquals(275, random.next(10))
assertEquals(0, random.next(1))
assertEquals(0, random.next(6))
assertEquals(574655, random.next(20))
assertEquals(-1451336087, random.next(32))
}
@Test def nextBitsSeedMinInt(): Unit = {
val random = new HackRandom(Int.MinValue)
assertEquals(388, random.next(10))
assertEquals(0, random.next(1))
assertEquals(25, random.next(6))
assertEquals(352095, random.next(20))
assertEquals(-2140124682, random.next(32))
}
@Test def setSeed(): Unit = {
val random = new HackRandom(11)
assertEquals(747, random.next(10))
assertEquals(1, random.next(1))
assertEquals(27, random.next(6))
random.setSeed(11)
assertEquals(747, random.next(10))
assertEquals(1, random.next(1))
assertEquals(27, random.next(6))
}
@Test def setSeedNextGaussian(): Unit = {
val random = new Random(-1)
assertEquals(1.7853314409882288, random.nextGaussian(), 0.0)
random.setSeed(-1)
assertEquals(1.7853314409882288, random.nextGaussian(), 0.0)
}
@Test def nextDouble(): Unit = {
val random = new Random(-45)
assertEquals(0.27288421395636253, random.nextDouble(), 0.0)
assertEquals(0.5523165360074201, random.nextDouble(), 0.0)
assertEquals(0.5689979434708298, random.nextDouble(), 0.0)
assertEquals(0.9961166166874871, random.nextDouble(), 0.0)
assertEquals(0.5368984665202684, random.nextDouble(), 0.0)
assertEquals(0.19849067496547423, random.nextDouble(), 0.0)
assertEquals(0.6021019223595357, random.nextDouble(), 0.0)
assertEquals(0.06132131151816378, random.nextDouble(), 0.0)
assertEquals(0.7303867762743866, random.nextDouble(), 0.0)
assertEquals(0.7426529384056163, random.nextDouble(), 0.0)
}
@Test def nextBoolean(): Unit = {
val random = new Random(4782934)
assertFalse(random.nextBoolean())
assertTrue(random.nextBoolean())
assertTrue(random.nextBoolean())
assertFalse(random.nextBoolean())
assertFalse(random.nextBoolean())
assertFalse(random.nextBoolean())
assertTrue(random.nextBoolean())
assertFalse(random.nextBoolean())
}
@Test def nextInt(): Unit = {
val random = new Random(-84638)
assertEquals(-1217585344, random.nextInt())
assertEquals(1665699216, random.nextInt())
assertEquals(382013296, random.nextInt())
assertEquals(1604432482, random.nextInt())
assertEquals(-1689010196, random.nextInt())
assertEquals(1743354032, random.nextInt())
assertEquals(454046816, random.nextInt())
assertEquals(922172344, random.nextInt())
assertEquals(-1890515287, random.nextInt())
assertEquals(1397525728, random.nextInt())
}
@Test def nextIntInt(): Unit = {
val random = new Random(7)
assertEquals(32736, random.nextInt(76543))
assertThrows(classOf[Exception], random.nextInt(0))
assertEquals(29, random.nextInt(45))
assertEquals(60, random.nextInt(945))
assertEquals(20678044, random.nextInt(35694839))
assertEquals(23932, random.nextInt(35699))
assertEquals(2278, random.nextInt(3699))
assertEquals(8, random.nextInt(10))
}
@Test def nextIntIntPowersOf2(): Unit = {
val random = new Random(-56938)
assertEquals(8, random.nextInt(32))
assertEquals(3, random.nextInt(8))
assertEquals(3, random.nextInt(128))
assertEquals(1950, random.nextInt(4096))
assertEquals(3706, random.nextInt(8192))
assertEquals(4308, random.nextInt(8192))
assertEquals(3235, random.nextInt(8192))
assertEquals(7077, random.nextInt(8192))
assertEquals(2392, random.nextInt(8192))
assertEquals(31, random.nextInt(32))
}
@Test def nextLong(): Unit = {
val random = new Random(205620432625028L)
assertEquals(3710537363280377478L, random.nextLong())
assertEquals(4121778334981170700L, random.nextLong())
assertEquals(289540773990891960L, random.nextLong())
assertEquals(307008980197674441L, random.nextLong())
assertEquals(7527069864796025013L, random.nextLong())
assertEquals(-4563192874520002144L, random.nextLong())
assertEquals(7619507045427546529L, random.nextLong())
assertEquals(-7888117030898487184L, random.nextLong())
assertEquals(-3499168703537933266L, random.nextLong())
assertEquals(-1998975913933474L, random.nextLong())
}
@Test def nextFloat(): Unit = {
val random = new Random(-3920005825473L)
assertEquals(0.059591234f, random.nextFloat(), 0.0f)
assertEquals(0.7007871f, random.nextFloat(), 0.0f)
assertEquals(0.39173192f, random.nextFloat(), 0.0f)
assertEquals(0.0647918f, random.nextFloat(), 0.0f)
assertEquals(0.9029677f, random.nextFloat(), 0.0f)
assertEquals(0.18226051f, random.nextFloat(), 0.0f)
assertEquals(0.94444054f, random.nextFloat(), 0.0f)
assertEquals(0.008844078f, random.nextFloat(), 0.0f)
assertEquals(0.08891684f, random.nextFloat(), 0.0f)
assertEquals(0.06482434f, random.nextFloat(), 0.0f)
}
@Test def nextBytes(): Unit = {
val random = new Random(7399572013373333L)
def test(exps: Int*): Unit = {
val exp = exps.map(_.toByte).toArray
val buf = new Array[Byte](exp.length)
random.nextBytes(buf)
assertArrayEquals(exp, buf)
}
test(62, 89, 68, -91, 10, 0, 85)
test(-89, -76, 88, 121, -25, 47, 58, -8, 78, 20, -77, 84, -3,
-33, 58, -9, 11, 57, -118, 40, -74, -86, 78, 123, 58)
test(-77, 112, -116)
test()
test(-84, -96, 108)
test(57, -106, 42, -100, -47, -84, 67, -48, 45)
}
@Test def nextGaussian(): Unit = {
val random = new Random(2446004)
assertEquals(-0.5043346938630431, random.nextGaussian(), 0.0)
assertEquals(-0.3250983270156675, random.nextGaussian(), 0.0)
assertEquals(-0.23799457294994966, random.nextGaussian(), 0.0)
assertEquals(0.4164610631507695, random.nextGaussian(), 0.0)
assertEquals(0.22086348814760687, random.nextGaussian(), 0.0)
assertEquals(-0.706833209972521, random.nextGaussian(), 0.0)
assertEquals(0.6730758289772553, random.nextGaussian(), 0.0)
assertEquals(0.2797393696191283, random.nextGaussian(), 0.0)
assertEquals(-0.2979099632667685, random.nextGaussian(), 0.0)
assertEquals(0.37443415981434314, random.nextGaussian(), 0.0)
assertEquals(0.9584801742918951, random.nextGaussian(), 0.0)
assertEquals(1.1762179112229345, random.nextGaussian(), 0.0)
assertEquals(0.8736960092848826, random.nextGaussian(), 0.0)
assertEquals(0.12301554931271008, random.nextGaussian(), 0.0)
assertEquals(-0.6052081187207353, random.nextGaussian(), 0.0)
assertEquals(-0.2015925608755316, random.nextGaussian(), 0.0)
assertEquals(-1.0071216119742104, random.nextGaussian(), 0.0)
assertEquals(0.6734222041441913, random.nextGaussian(), 0.0)
assertEquals(0.3990565555091522, random.nextGaussian(), 0.0)
assertEquals(2.0051627385915154, random.nextGaussian(), 0.0)
}
/** Helper class to access next */
class HackRandom(seed: Long) extends Random(seed) {
override def next(bits: Int): Int = super.next(bits)
}
}
|
scala-js/scala-js
|
test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/util/RandomTest.scala
|
Scala
|
apache-2.0
| 8,178 |
package com.datastax.spark.connector.rdd.partitioner
import java.net.InetAddress
import com.datastax.spark.connector.rdd.partitioner.dht.{CassandraNode, LongToken}
import org.junit.Assert._
import org.junit.Test
class TokenRangeClustererTest {
type TokenRange = com.datastax.spark.connector.rdd.partitioner.dht.TokenRange[Long, LongToken]
val addr1 = InetAddress.getByName("192.168.123.1")
val addr2 = InetAddress.getByName("192.168.123.2")
val addr3 = InetAddress.getByName("192.168.123.3")
val addr4 = InetAddress.getByName("192.168.123.4")
val addr5 = InetAddress.getByName("192.168.123.5")
val node1 = CassandraNode(addr1, addr1)
val node2 = CassandraNode(addr2, addr2)
val node3 = CassandraNode(addr3, addr3)
val node4 = CassandraNode(addr4, addr4)
val node5 = CassandraNode(addr5, addr5)
private def token(x: Long) = new com.datastax.spark.connector.rdd.partitioner.dht.LongToken(x)
@Test
def testEmpty() {
val trc = new TokenRangeClusterer(10)
val groups = trc.group(Seq.empty)
assertEquals(0, groups.size)
}
@Test
def testTrivialClustering() {
val tr1 = new TokenRange(token(0), token(10), Set(node1), Some(5))
val tr2 = new TokenRange(token(10), token(20), Set(node1), Some(5))
val trc = new TokenRangeClusterer[Long, LongToken](10)
val groups = trc.group(Seq(tr1, tr2))
assertEquals(1, groups.size)
assertEquals(Set(tr1, tr2), groups.head.toSet)
}
@Test
def testSplitByHost() {
val tr1 = new TokenRange(token(0), token(10), Set(node1), Some(2))
val tr2 = new TokenRange(token(10), token(20), Set(node1), Some(2))
val tr3 = new TokenRange(token(20), token(30), Set(node2), Some(2))
val tr4 = new TokenRange(token(30), token(40), Set(node2), Some(2))
val trc = new TokenRangeClusterer[Long, LongToken](10)
val groups = trc.group(Seq(tr1, tr2, tr3, tr4)).map(_.toSet).toSet
assertEquals(2, groups.size)
assertTrue(groups.contains(Set(tr1, tr2)))
assertTrue(groups.contains(Set(tr3, tr4)))
}
@Test
def testSplitByCount() {
val tr1 = new TokenRange(token(0), token(10), Set(node1), Some(5))
val tr2 = new TokenRange(token(10), token(20), Set(node1), Some(5))
val tr3 = new TokenRange(token(20), token(30), Set(node1), Some(5))
val tr4 = new TokenRange(token(30), token(40), Set(node1), Some(5))
val trc = new TokenRangeClusterer[Long, LongToken](10)
val groups = trc.group(Seq(tr1, tr2, tr3, tr4)).map(_.toSet).toSet
assertEquals(2, groups.size)
assertTrue(groups.contains(Set(tr1, tr2)))
assertTrue(groups.contains(Set(tr3, tr4)))
}
@Test
def testTooLargeRanges() {
val tr1 = new TokenRange(token(0), token(10), Set(node1), Some(100000))
val tr2 = new TokenRange(token(10), token(20), Set(node1), Some(100000))
val trc = new TokenRangeClusterer[Long, LongToken](10)
val groups = trc.group(Seq(tr1, tr2)).map(_.toSet).toSet
assertEquals(2, groups.size)
assertTrue(groups.contains(Set(tr1)))
assertTrue(groups.contains(Set(tr2)))
}
@Test
def testMultipleEndpoints() {
val tr1 = new TokenRange(token(0), token(10), Set(node2, node1, node3), Some(1))
val tr2 = new TokenRange(token(10), token(20), Set(node1, node3, node4), Some(1))
val tr3 = new TokenRange(token(20), token(30), Set(node3, node1, node5), Some(1))
val tr4 = new TokenRange(token(30), token(40), Set(node3, node1, node4), Some(1))
val trc = new TokenRangeClusterer[Long, LongToken](10)
val groups = trc.group(Seq(tr1, tr2, tr3, tr4))
assertEquals(1, groups.size)
assertEquals(4, groups.head.size)
assertFalse(groups.head.map(_.endpoints).reduce(_ intersect _).isEmpty)
}
@Test
def testMaxClusterSize() {
val tr1 = new TokenRange(token(0), token(10), Set(node1, node2, node3), Some(1))
val tr2 = new TokenRange(token(10), token(20), Set(node1, node2, node3), Some(1))
val tr3 = new TokenRange(token(20), token(30), Set(node1, node2, node3), Some(1))
val trc = new TokenRangeClusterer[Long, LongToken](maxRowCountPerGroup = 10, maxGroupSize = 1)
val groups = trc.group(Seq(tr1, tr2, tr3))
assertEquals(3, groups.size)
}
}
|
brkyvz/spark-cassandra-connector
|
spark-cassandra-connector/src/test/scala/com/datastax/spark/connector/rdd/partitioner/TokenRangeClustererTest.scala
|
Scala
|
apache-2.0
| 4,167 |
package sttp.client3.curl
import sttp.client3.curl.CurlApi.SlistHandle
class CurlList(val ptr: SlistHandle) extends AnyVal {}
|
softwaremill/sttp
|
core/src/main/scalanative/sttp/client3/curl/CurlList.scala
|
Scala
|
apache-2.0
| 128 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.File
import java.sql.Timestamp
import java.text.SimpleDateFormat
import org.apache.spark.sql.{AnalysisException, Column, DataFrame, QueryTest, Row}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructField, StructType}
class FileMetadataStructSuite extends QueryTest with SharedSparkSession {
val data0: Seq[Row] = Seq(Row("jack", 24, Row(12345L, "uom")))
val data1: Seq[Row] = Seq(Row("lily", 31, Row(54321L, "ucb")))
val schema: StructType = new StructType()
.add(StructField("name", StringType))
.add(StructField("age", IntegerType))
.add(StructField("info", new StructType()
.add(StructField("id", LongType))
.add(StructField("university", StringType))))
val schemaWithNameConflicts: StructType = new StructType()
.add(StructField("name", StringType))
.add(StructField("age", IntegerType))
.add(StructField("_METADATA", new StructType()
.add(StructField("id", LongType))
.add(StructField("university", StringType))))
private val METADATA_FILE_PATH = "_metadata.file_path"
private val METADATA_FILE_NAME = "_metadata.file_name"
private val METADATA_FILE_SIZE = "_metadata.file_size"
private val METADATA_FILE_MODIFICATION_TIME = "_metadata.file_modification_time"
/**
* This test wrapper will test for both row-based and column-based file formats:
* (json and parquet) with nested schema:
* 1. create df0 and df1 and save them as testFileFormat under /data/f0 and /data/f1
* 2. read the path /data, return the df for further testing
* 3. create actual metadata maps for both files under /data/f0 and /data/f1 for further testing
*
* The final df will have data:
* jack | 24 | {12345, uom}
* lily | 31 | {54321, ucb}
*
* The schema of the df will be the `fileSchema` provided to this method
*
* This test wrapper will provide a `df` and actual metadata map `f0`, `f1`
*/
private def metadataColumnsTest(
testName: String, fileSchema: StructType)
(f: (DataFrame, Map[String, Any], Map[String, Any]) => Unit): Unit = {
Seq("json", "parquet").foreach { testFileFormat =>
test(s"metadata struct ($testFileFormat): " + testName) {
withTempDir { dir =>
import scala.collection.JavaConverters._
// 1. create df0 and df1 and save under /data/f0 and /data/f1
val df0 = spark.createDataFrame(data0.asJava, fileSchema)
val f0 = new File(dir, "data/f0").getCanonicalPath
df0.coalesce(1).write.format(testFileFormat).save(f0)
val df1 = spark.createDataFrame(data1.asJava, fileSchema)
val f1 = new File(dir, "data/f1").getCanonicalPath
df1.coalesce(1).write.format(testFileFormat).save(f1)
// 2. read both f0 and f1
val df = spark.read.format(testFileFormat).schema(fileSchema)
.load(new File(dir, "data").getCanonicalPath + "/*")
val realF0 = new File(dir, "data/f0").listFiles()
.filter(_.getName.endsWith(s".$testFileFormat")).head
val realF1 = new File(dir, "data/f1").listFiles()
.filter(_.getName.endsWith(s".$testFileFormat")).head
// 3. create f0 and f1 metadata data
val f0Metadata = Map(
METADATA_FILE_PATH -> realF0.toURI.toString,
METADATA_FILE_NAME -> realF0.getName,
METADATA_FILE_SIZE -> realF0.length(),
METADATA_FILE_MODIFICATION_TIME -> new Timestamp(realF0.lastModified())
)
val f1Metadata = Map(
METADATA_FILE_PATH -> realF1.toURI.toString,
METADATA_FILE_NAME -> realF1.getName,
METADATA_FILE_SIZE -> realF1.length(),
METADATA_FILE_MODIFICATION_TIME -> new Timestamp(realF1.lastModified())
)
f(df, f0Metadata, f1Metadata)
}
}
}
}
metadataColumnsTest("read partial/all metadata struct fields", schema) { (df, f0, f1) =>
// read all available metadata struct fields
checkAnswer(
df.select("name", "age", "info",
METADATA_FILE_NAME, METADATA_FILE_PATH,
METADATA_FILE_SIZE, METADATA_FILE_MODIFICATION_TIME),
Seq(
Row("jack", 24, Row(12345L, "uom"),
f0(METADATA_FILE_NAME), f0(METADATA_FILE_PATH),
f0(METADATA_FILE_SIZE), f0(METADATA_FILE_MODIFICATION_TIME)),
Row("lily", 31, Row(54321L, "ucb"),
f1(METADATA_FILE_NAME), f1(METADATA_FILE_PATH),
f1(METADATA_FILE_SIZE), f1(METADATA_FILE_MODIFICATION_TIME))
)
)
// read a part of metadata struct fields
checkAnswer(
df.select("name", "info.university", METADATA_FILE_NAME, METADATA_FILE_SIZE),
Seq(
Row("jack", "uom", f0(METADATA_FILE_NAME), f0(METADATA_FILE_SIZE)),
Row("lily", "ucb", f1(METADATA_FILE_NAME), f1(METADATA_FILE_SIZE))
)
)
}
metadataColumnsTest("read metadata struct fields with random ordering", schema) { (df, f0, f1) =>
// read a part of metadata struct fields with random ordering
checkAnswer(
df.select(METADATA_FILE_NAME, "name", METADATA_FILE_SIZE, "info.university"),
Seq(
Row(f0(METADATA_FILE_NAME), "jack", f0(METADATA_FILE_SIZE), "uom"),
Row(f1(METADATA_FILE_NAME), "lily", f1(METADATA_FILE_SIZE), "ucb")
)
)
}
metadataColumnsTest("read metadata struct fields with expressions", schema) { (df, f0, f1) =>
checkAnswer(
df.select(
// substring of file name
substring(col(METADATA_FILE_NAME), 1, 3),
// format timestamp
date_format(col(METADATA_FILE_MODIFICATION_TIME), "yyyy-MM")
.as("_file_modification_year_month"),
// convert to kb
col(METADATA_FILE_SIZE).divide(lit(1024)).as("_file_size_kb"),
// get the file format
substring_index(col(METADATA_FILE_PATH), ".", -1).as("_file_format")
),
Seq(
Row(
f0(METADATA_FILE_NAME).toString.substring(0, 3), // sql substring vs scala substring
new SimpleDateFormat("yyyy-MM").format(f0(METADATA_FILE_MODIFICATION_TIME)),
f0(METADATA_FILE_SIZE).asInstanceOf[Long] / 1024.toDouble,
f0(METADATA_FILE_PATH).toString.split("\\\\.").takeRight(1).head
),
Row(
f1(METADATA_FILE_NAME).toString.substring(0, 3), // sql substring vs scala substring
new SimpleDateFormat("yyyy-MM").format(f1(METADATA_FILE_MODIFICATION_TIME)),
f1(METADATA_FILE_SIZE).asInstanceOf[Long] / 1024.toDouble,
f1(METADATA_FILE_PATH).toString.split("\\\\.").takeRight(1).head
)
)
)
}
metadataColumnsTest("select all will not select metadata struct fields", schema) { (df, _, _) =>
checkAnswer(
df.select("*"),
Seq(
Row("jack", 24, Row(12345L, "uom")),
Row("lily", 31, Row(54321L, "ucb"))
)
)
}
metadataColumnsTest("metadata will not overwrite user data",
schemaWithNameConflicts) { (df, _, _) =>
// the user data has the schema: name, age, _metadata.id, _metadata.university
// select user data
checkAnswer(
df.select("name", "age", "_METADATA", "_metadata"),
Seq(
Row("jack", 24, Row(12345L, "uom"), Row(12345L, "uom")),
Row("lily", 31, Row(54321L, "ucb"), Row(54321L, "ucb"))
)
)
// select metadata will fail when analysis
val ex = intercept[AnalysisException] {
df.select("name", METADATA_FILE_NAME).collect()
}
assert(ex.getMessage.contains("No such struct field file_name in id, university"))
}
metadataColumnsTest("select only metadata", schema) { (df, f0, f1) =>
checkAnswer(
df.select(METADATA_FILE_NAME, METADATA_FILE_PATH,
METADATA_FILE_SIZE, METADATA_FILE_MODIFICATION_TIME),
Seq(
Row(f0(METADATA_FILE_NAME), f0(METADATA_FILE_PATH),
f0(METADATA_FILE_SIZE), f0(METADATA_FILE_MODIFICATION_TIME)),
Row(f1(METADATA_FILE_NAME), f1(METADATA_FILE_PATH),
f1(METADATA_FILE_SIZE), f1(METADATA_FILE_MODIFICATION_TIME))
)
)
checkAnswer(
df.select("_metadata"),
Seq(
Row(Row(f0(METADATA_FILE_PATH), f0(METADATA_FILE_NAME),
f0(METADATA_FILE_SIZE), f0(METADATA_FILE_MODIFICATION_TIME))),
Row(Row(f1(METADATA_FILE_PATH), f1(METADATA_FILE_NAME),
f1(METADATA_FILE_SIZE), f1(METADATA_FILE_MODIFICATION_TIME)))
)
)
}
metadataColumnsTest("select and re-select", schema) { (df, f0, f1) =>
checkAnswer(
df.select("name", "age", "info",
METADATA_FILE_NAME, METADATA_FILE_PATH,
METADATA_FILE_SIZE, METADATA_FILE_MODIFICATION_TIME)
.select("name", "file_path"), // cast _metadata.file_path as file_path
Seq(
Row("jack", f0(METADATA_FILE_PATH)),
Row("lily", f1(METADATA_FILE_PATH))
)
)
}
metadataColumnsTest("alias", schema) { (df, f0, f1) =>
val aliasDF = df.select(
Column("name").as("myName"),
Column("age").as("myAge"),
Column(METADATA_FILE_NAME).as("myFileName"),
Column(METADATA_FILE_SIZE).as("myFileSize")
)
// check schema
val expectedSchema = new StructType()
.add(StructField("myName", StringType))
.add(StructField("myAge", IntegerType))
.add(StructField("myFileName", StringType))
.add(StructField("myFileSize", LongType))
assert(aliasDF.schema.fields.toSet == expectedSchema.fields.toSet)
// check data
checkAnswer(
aliasDF,
Seq(
Row("jack", 24, f0(METADATA_FILE_NAME), f0(METADATA_FILE_SIZE)),
Row("lily", 31, f1(METADATA_FILE_NAME), f1(METADATA_FILE_SIZE))
)
)
}
metadataColumnsTest("filter", schema) { (df, f0, _) =>
checkAnswer(
df.select("name", "age", METADATA_FILE_NAME)
.where(Column(METADATA_FILE_NAME) === f0(METADATA_FILE_NAME)),
Seq(
// _file_name == f0's name, so we will only have 1 row
Row("jack", 24, f0(METADATA_FILE_NAME))
)
)
}
Seq(true, false).foreach { caseSensitive =>
metadataColumnsTest(s"upper/lower case when case " +
s"sensitive is $caseSensitive", schemaWithNameConflicts) { (df, f0, f1) =>
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
// file schema: name, age, _METADATA.id, _METADATA.university
if (caseSensitive) {
// for case sensitive mode:
// _METADATA is user data
// _metadata is metadata
checkAnswer(
df.select("name", "age", "_METADATA", "_metadata"),
Seq(
Row("jack", 24, Row(12345L, "uom"),
Row(f0(METADATA_FILE_PATH), f0(METADATA_FILE_NAME),
f0(METADATA_FILE_SIZE), f0(METADATA_FILE_MODIFICATION_TIME))),
Row("lily", 31, Row(54321L, "ucb"),
Row(f1(METADATA_FILE_PATH), f1(METADATA_FILE_NAME),
f1(METADATA_FILE_SIZE), f1(METADATA_FILE_MODIFICATION_TIME)))
)
)
} else {
// for case insensitive mode:
// _METADATA and _metadata are both user data
// select user data
checkAnswer(
df.select("name", "age",
// user columns
"_METADATA", "_metadata",
"_metadata.ID", "_METADATA.UniVerSity"),
Seq(
Row("jack", 24, Row(12345L, "uom"), Row(12345L, "uom"), 12345L, "uom"),
Row("lily", 31, Row(54321L, "ucb"), Row(54321L, "ucb"), 54321L, "ucb")
)
)
// select metadata will fail when analysis - metadata cannot overwrite user data
val ex = intercept[AnalysisException] {
df.select("name", "_metadata.file_name").collect()
}
assert(ex.getMessage.contains("No such struct field file_name in id, university"))
val ex1 = intercept[AnalysisException] {
df.select("name", "_METADATA.file_NAME").collect()
}
assert(ex1.getMessage.contains("No such struct field file_NAME in id, university"))
}
}
}
}
Seq("true", "false").foreach { offHeapColumnVectorEnabled =>
withSQLConf("spark.sql.columnVector.offheap.enabled" -> offHeapColumnVectorEnabled) {
metadataColumnsTest(s"read metadata with " +
s"offheap set to $offHeapColumnVectorEnabled", schema) { (df, f0, f1) =>
// read all available metadata struct fields
checkAnswer(
df.select("name", "age", "info",
METADATA_FILE_NAME, METADATA_FILE_PATH,
METADATA_FILE_SIZE, METADATA_FILE_MODIFICATION_TIME),
Seq(
Row("jack", 24, Row(12345L, "uom"), f0(METADATA_FILE_NAME), f0(METADATA_FILE_PATH),
f0(METADATA_FILE_SIZE), f0(METADATA_FILE_MODIFICATION_TIME)),
Row("lily", 31, Row(54321L, "ucb"), f1(METADATA_FILE_NAME), f1(METADATA_FILE_PATH),
f1(METADATA_FILE_SIZE), f1(METADATA_FILE_MODIFICATION_TIME))
)
)
// read a part of metadata struct fields
checkAnswer(
df.select("name", "info.university", METADATA_FILE_NAME, METADATA_FILE_SIZE),
Seq(
Row("jack", "uom", f0(METADATA_FILE_NAME), f0(METADATA_FILE_SIZE)),
Row("lily", "ucb", f1(METADATA_FILE_NAME), f1(METADATA_FILE_SIZE))
)
)
}
}
}
Seq("true", "false").foreach { enabled =>
withSQLConf("spark.sql.optimizer.nestedSchemaPruning.enabled" -> enabled) {
metadataColumnsTest(s"read metadata with" +
s"nestedSchemaPruning set to $enabled", schema) { (df, f0, f1) =>
// read a part of data: schema pruning
checkAnswer(
df.select("name", "info.university", METADATA_FILE_NAME, METADATA_FILE_SIZE),
Seq(
Row("jack", "uom", f0(METADATA_FILE_NAME), f0(METADATA_FILE_SIZE)),
Row("lily", "ucb", f1(METADATA_FILE_NAME), f1(METADATA_FILE_SIZE))
)
)
}
}
}
}
|
holdenk/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileMetadataStructSuite.scala
|
Scala
|
apache-2.0
| 14,975 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.subjects
import monix.execution.Ack.Continue
import monix.execution.Scheduler
import monix.execution.exceptions.DummyException
import monix.reactive.{MulticastStrategy, Observer}
object ConcurrentAsyncSubjectSuite extends BaseConcurrentSubjectSuite {
def alreadyTerminatedTest(expectedElems: Seq[Long])(implicit s: Scheduler) = {
val c = ConcurrentSubject(MulticastStrategy.async[Long])
Sample(c, expectedElems.lastOption.getOrElse(0))
}
def continuousStreamingTest(expectedElems: Seq[Long])(implicit s: Scheduler) = None
test("while active, keep adding subscribers, but don't emit anything") { implicit s =>
var wereCompleted = 0
var sum = 0L
def createObserver = new Observer[Long] {
def onNext(elem: Long) = {
sum += elem
Continue
}
def onError(ex: Throwable) = ()
def onComplete() = {
wereCompleted += 1
}
}
val channel = ConcurrentSubject(MulticastStrategy.async[Long])
channel.unsafeSubscribeFn(createObserver)
channel.unsafeSubscribeFn(createObserver)
channel.unsafeSubscribeFn(createObserver)
for (x <- Seq(10, 20, 30)) channel.onNext(x.toLong)
s.tick()
assertEquals(sum, 0)
assertEquals(wereCompleted, 0)
channel.onComplete()
channel.onComplete()
s.tick()
assertEquals(sum, 30 * 3)
assertEquals(wereCompleted, 3)
channel.unsafeSubscribeFn(createObserver)
s.tick()
assertEquals(sum, 30 * 4)
assertEquals(wereCompleted, 4)
}
test("should interrupt on error without emitting anything") { implicit s =>
var wereCompleted = 0
var sum = 0L
def createObserver = new Observer[Long] {
def onNext(elem: Long) = {
sum += elem
Continue
}
def onComplete() = ()
def onError(ex: Throwable) = ex match {
case DummyException("dummy1") =>
wereCompleted += 1
case _ =>
()
}
}
val channel = ConcurrentSubject.async[Long]
channel.unsafeSubscribeFn(createObserver)
channel.unsafeSubscribeFn(createObserver)
channel.unsafeSubscribeFn(createObserver)
channel.onNext(10)
channel.onNext(20)
channel.onNext(30)
s.tick()
assertEquals(sum, 0)
assertEquals(wereCompleted, 0)
channel.onError(DummyException("dummy1"))
channel.onError(DummyException("dummy2"))
s.tick()
assertEquals(sum, 0)
assertEquals(wereCompleted, 3)
channel.unsafeSubscribeFn(createObserver)
assertEquals(sum, 0)
assertEquals(wereCompleted, 4)
}
test("should interrupt when empty") { implicit s =>
var wereCompleted = 0
var sum = 0L
def createObserver = new Observer[Long] {
def onNext(elem: Long) = {
sum += elem
Continue
}
def onComplete() = wereCompleted += 1
def onError(ex: Throwable) = ()
}
val channel = ConcurrentSubject.async[Long]
channel.unsafeSubscribeFn(createObserver)
channel.unsafeSubscribeFn(createObserver)
channel.unsafeSubscribeFn(createObserver)
channel.onComplete()
s.tick()
assertEquals(sum, 0)
assertEquals(wereCompleted, 3)
channel.unsafeSubscribeFn(createObserver)
s.tick()
assertEquals(sum, 0)
assertEquals(wereCompleted, 4)
}
}
|
alexandru/monifu
|
monix-reactive/shared/src/test/scala/monix/reactive/subjects/ConcurrentAsyncSubjectSuite.scala
|
Scala
|
apache-2.0
| 3,971 |
package com.github.suzuki0keiichi.nomorescript.trees
case class NoMoreScriptSelect(name: String, child: NoMoreScriptTree) extends NoMoreScriptTree {
override def toJs(terminate: Boolean) = {
val childJs = child.toJs(false)
if (name == "<init>") {
if (terminate) Util.addLast(childJs, ";") else childJs
} else if (childJs == Nil) {
List(name + (if (terminate) ";" else ""))
} else {
Util.addLast(childJs, "." + name + (if (terminate) ";" else ""))
}
}
}
|
suzuki0keiichi/nomorescript
|
nomorescript-plugin/src/main/scala/com/github/suzuki0keiichi/nomorescript/trees/NoMoreScriptSelect.scala
|
Scala
|
mit
| 497 |
package mesosphere.marathon
package api.v2
import mesosphere.UnitTest
import mesosphere.marathon.api.{ JsonTestHelper, TaskKiller, TestAuthFixture }
import mesosphere.marathon.core.async.ExecutionContexts.global
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.core.health.HealthCheckManager
import mesosphere.marathon.core.instance.{ Instance, TestInstanceBuilder }
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.task.termination.KillService
import mesosphere.marathon.core.task.tracker.{ InstanceTracker, InstanceStateOpProcessor }
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state.{ PathId, _ }
import mesosphere.marathon.test.{ GroupCreation, SettableClock }
import org.mockito.Matchers
import org.mockito.Mockito._
import play.api.libs.json.Json
import scala.concurrent.Future
import scala.concurrent.duration._
class SpecInstancesResourceTest extends UnitTest with GroupCreation {
case class Fixture(
auth: TestAuthFixture = new TestAuthFixture,
service: MarathonSchedulerService = mock[MarathonSchedulerService],
taskTracker: InstanceTracker = mock[InstanceTracker],
stateOpProcessor: InstanceStateOpProcessor = mock[InstanceStateOpProcessor],
taskKiller: TaskKiller = mock[TaskKiller],
healthCheckManager: HealthCheckManager = mock[HealthCheckManager],
config: MarathonConf = mock[MarathonConf],
groupManager: GroupManager = mock[GroupManager]) {
val identity = auth.identity
val appsTaskResource = new AppTasksResource(
taskTracker,
taskKiller,
healthCheckManager,
config,
groupManager,
auth.auth,
auth.auth
)
config.zkTimeoutDuration returns 1.second
}
case class FixtureWithRealTaskKiller(
auth: TestAuthFixture = new TestAuthFixture,
service: MarathonSchedulerService = mock[MarathonSchedulerService],
taskTracker: InstanceTracker = mock[InstanceTracker],
stateOpProcessor: InstanceStateOpProcessor = mock[InstanceStateOpProcessor],
healthCheckManager: HealthCheckManager = mock[HealthCheckManager],
config: MarathonConf = mock[MarathonConf],
groupManager: GroupManager = mock[GroupManager]) {
val identity = auth.identity
val killService = mock[KillService]
val taskKiller = new TaskKiller(
taskTracker, stateOpProcessor, groupManager, service, config, auth.auth, auth.auth, killService)
val appsTaskResource = new AppTasksResource(
taskTracker,
taskKiller,
healthCheckManager,
config,
groupManager,
auth.auth,
auth.auth
)
config.zkTimeoutDuration returns 1.second
}
"SpecInstancesResource" should {
"deleteMany" in new Fixture {
val appId = "/my/app".toRootPath
val host = "host"
val clock = new SettableClock()
val instance1 = TestInstanceBuilder.newBuilderWithLaunchedTask(appId, now = clock.now(), version = clock.now()).addTaskStaged().getInstance()
val instance2 = TestInstanceBuilder.newBuilderWithLaunchedTask(appId, now = clock.now(), version = clock.now()).addTaskStaged().getInstance()
val toKill = Seq(instance1, instance2)
config.zkTimeoutDuration returns 5.seconds
taskKiller.kill(any, any, any)(any) returns Future.successful(toKill)
groupManager.runSpec(appId) returns Some(AppDefinition(appId))
healthCheckManager.statuses(appId) returns Future.successful(collection.immutable.Map.empty)
val response = appsTaskResource.deleteMany(appId.toString, host, scale = false, force = false, wipe = false, auth.request)
response.getStatus shouldEqual 200
val expected =
s"""
|{ "tasks": [
| {
| "appId" : "/my/app",
| "healthCheckResults" : [ ],
| "host" : "host.some",
| "id" : "${instance1.appTask.taskId.idString}",
| "ipAddresses" : [ ],
| "ports" : [ ],
| "servicePorts" : [ ],
| "slaveId" : "agent-1",
| "state" : "TASK_STAGING",
| "stagedAt" : "2015-04-09T12:30:00.000Z",
| "version" : "2015-04-09T12:30:00.000Z",
| "localVolumes" : [ ]
| }, {
| "appId" : "/my/app",
| "healthCheckResults" : [ ],
| "host" : "host.some",
| "id" : "${instance2.appTask.taskId.idString}",
| "ipAddresses" : [ ],
| "ports" : [ ],
| "servicePorts" : [ ],
| "slaveId" : "agent-1",
| "state" : "TASK_STAGING",
| "stagedAt" : "2015-04-09T12:30:00.000Z",
| "version" : "2015-04-09T12:30:00.000Z",
| "localVolumes" : [ ]
| } ]
|}
""".stripMargin
JsonTestHelper
.assertThatJsonString(response.getEntity.asInstanceOf[String])
.correspondsToJsonString(expected)
}
"deleteMany with scale and wipe fails" in new Fixture {
val appId = "/my/app"
val host = "host"
val exception = intercept[BadRequestException] {
appsTaskResource.deleteMany(appId, host, scale = true, force = false, wipe = true, auth.request)
}
exception.getMessage shouldEqual "You cannot use scale and wipe at the same time."
}
"deleteMany with wipe delegates to taskKiller with wipe value" in new Fixture {
val appId = "/my/app"
val host = "host"
healthCheckManager.statuses(appId.toRootPath) returns Future.successful(collection.immutable.Map.empty)
taskKiller.kill(any, any, any)(any) returns Future.successful(Seq.empty[Instance])
val response = appsTaskResource.deleteMany(appId, host, scale = false, force = false, wipe = true, auth.request)
response.getStatus shouldEqual 200
verify(taskKiller).kill(any, any, Matchers.eq(true))(any)
}
"deleteOne" in new Fixture {
val clock = new SettableClock()
val appId = PathId("/my/app")
val instance1 = TestInstanceBuilder.newBuilderWithLaunchedTask(appId, now = clock.now(), version = clock.now()).getInstance()
val instance2 = TestInstanceBuilder.newBuilderWithLaunchedTask(appId, now = clock.now(), version = clock.now()).getInstance()
val toKill = Seq(instance1)
config.zkTimeoutDuration returns 5.seconds
taskTracker.specInstances(appId) returns Future.successful(Seq(instance1, instance2))
taskKiller.kill(any, any, any)(any) returns Future.successful(toKill)
groupManager.app(appId) returns Some(AppDefinition(appId))
healthCheckManager.statuses(appId) returns Future.successful(collection.immutable.Map.empty)
val response = appsTaskResource.deleteOne(
appId.toString, instance1.instanceId.idString, scale = false, force = false, wipe = false, auth.request
)
response.getStatus shouldEqual 200
val expected =
s"""
|{ "task":
| {
| "appId" : "/my/app",
| "healthCheckResults" : [ ],
| "host" : "host.some",
| "id" : "${instance1.appTask.taskId.idString}",
| "ipAddresses" : [ ],
| "ports" : [ ],
| "servicePorts" : [ ],
| "slaveId" : "agent-1",
| "state" : "TASK_STAGING",
| "stagedAt" : "2015-04-09T12:30:00.000Z",
| "version" : "2015-04-09T12:30:00.000Z",
| "localVolumes" : [ ]
| }
|}""".stripMargin
JsonTestHelper
.assertThatJsonString(response.getEntity.asInstanceOf[String])
.correspondsToJsonString(expected)
verify(taskKiller).kill(equalTo(appId), any, any)(any)
verifyNoMoreInteractions(taskKiller)
}
"deleteOne with scale and wipe fails" in new Fixture {
val appId = PathId("/my/app")
val id = Task.Id.forRunSpec(appId)
healthCheckManager.statuses(appId) returns Future.successful(collection.immutable.Map.empty)
val exception = intercept[BadRequestException] {
appsTaskResource.deleteOne(appId.toString, id.toString, scale = true, force = false, wipe = true, auth.request)
}
exception.getMessage shouldEqual "You cannot use scale and wipe at the same time."
}
"deleteOne with wipe delegates to taskKiller with wipe value" in new Fixture {
val clock = new SettableClock()
val appId = PathId("/my/app")
val instance1 = TestInstanceBuilder.newBuilderWithLaunchedTask(appId, now = clock.now(), version = clock.now()).getInstance()
val instance2 = TestInstanceBuilder.newBuilderWithLaunchedTask(appId, now = clock.now(), version = clock.now()).getInstance()
val toKill = Seq(instance1)
config.zkTimeoutDuration returns 5.seconds
taskTracker.specInstances(appId) returns Future.successful(Seq(instance1, instance2))
taskKiller.kill(any, any, any)(any) returns Future.successful(toKill)
groupManager.app(appId) returns Some(AppDefinition(appId))
healthCheckManager.statuses(appId) returns Future.successful(collection.immutable.Map.empty)
val response = appsTaskResource.deleteOne(
appId.toString, instance1.instanceId.idString, scale = false, force = false, wipe = true, auth.request
)
response.getStatus shouldEqual 200
val expected =
s"""
|{ "task":
| {
| "appId" : "/my/app",
| "healthCheckResults" : [ ],
| "host" : "host.some",
| "id" : "${instance1.appTask.taskId.idString}",
| "ipAddresses" : [ ],
| "ports" : [ ],
| "servicePorts" : [ ],
| "slaveId" : "agent-1",
| "state" : "TASK_STAGING",
| "stagedAt" : "2015-04-09T12:30:00.000Z",
| "version" : "2015-04-09T12:30:00.000Z",
| "localVolumes" : [ ]
| }
|}""".stripMargin
JsonTestHelper
.assertThatJsonString(response.getEntity.asInstanceOf[String])
.correspondsToJsonString(expected)
verify(taskKiller).kill(equalTo(appId), any, org.mockito.Matchers.eq(true))(any)
verifyNoMoreInteractions(taskKiller)
}
"get tasks" in new Fixture {
val clock = new SettableClock()
val appId = PathId("/my/app")
val instance1 = TestInstanceBuilder.newBuilderWithLaunchedTask(appId, clock.now()).getInstance()
val instance2 = TestInstanceBuilder.newBuilderWithLaunchedTask(appId, clock.now()).getInstance()
config.zkTimeoutDuration returns 5.seconds
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.of(InstanceTracker.SpecInstances.forInstances(appId, Seq(instance1, instance2))))
healthCheckManager.statuses(appId) returns Future.successful(collection.immutable.Map.empty)
groupManager.app(appId) returns Some(AppDefinition(appId))
val response = appsTaskResource.indexJson("/my/app", auth.request)
response.getStatus shouldEqual 200
val expected =
s"""
|{ "tasks": [
| {
| "appId" : "/my/app",
| "healthCheckResults" : [ ],
| "host" : "host.some",
| "id" : "${instance1.appTask.taskId.idString}",
| "ipAddresses" : [ ],
| "ports" : [ ],
| "servicePorts" : [ ],
| "slaveId" : "agent-1",
| "state" : "TASK_STAGING",
| "stagedAt" : "2015-04-09T12:30:00.000Z",
| "version" : "2015-04-09T12:30:00.000Z",
| "localVolumes" : [ ]
| }, {
| "appId" : "/my/app",
| "healthCheckResults" : [ ],
| "host" : "host.some",
| "id" : "${instance2.appTask.taskId.idString}",
| "ipAddresses" : [ ],
| "ports" : [ ],
| "servicePorts" : [ ],
| "slaveId" : "agent-1",
| "state" : "TASK_STAGING",
| "stagedAt" : "2015-04-09T12:30:00.000Z",
| "version" : "2015-04-09T12:30:00.000Z",
| "localVolumes" : [ ]
| } ]
|}
""".stripMargin
JsonTestHelper
.assertThatJsonString(response.getEntity.asInstanceOf[String])
.correspondsToJsonString(expected)
}
"access without authentication is denied" in new Fixture {
Given("An unauthenticated request")
auth.authenticated = false
val req = auth.request
groupManager.rootGroup() returns createRootGroup()
When("the indexJson is fetched")
val indexJson = appsTaskResource.indexJson("", req)
Then("we receive a NotAuthenticated response")
indexJson.getStatus should be(auth.NotAuthenticatedStatus)
When("the index as txt is fetched")
val indexTxt = appsTaskResource.indexTxt("", req)
Then("we receive a NotAuthenticated response")
indexTxt.getStatus should be(auth.NotAuthenticatedStatus)
When("One task is deleted")
val deleteOne = appsTaskResource.deleteOne("appId", "taskId", false, false, false, req)
Then("we receive a NotAuthenticated response")
deleteOne.getStatus should be(auth.NotAuthenticatedStatus)
When("multiple tasks are deleted")
val deleteMany = appsTaskResource.deleteMany("appId", "host", false, false, false, req)
Then("we receive a NotAuthenticated response")
deleteMany.getStatus should be(auth.NotAuthenticatedStatus)
}
"access to indexJson without authorization leads to a 404 if the app does not exist" in new Fixture {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
Given("the app does not exist")
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
groupManager.app("/app".toRootPath) returns None
When("the indexJson is fetched")
val indexJson = appsTaskResource.indexJson("/app", req)
Then("we receive a 404")
indexJson.getStatus should be(404)
}
"access to indexJson without authorization is not allowed if the app exists" in new Fixture {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
Given("the app exists")
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
groupManager.app("/app".toRootPath) returns Some(AppDefinition("/app".toRootPath))
When("the indexJson is fetched")
val indexJson = appsTaskResource.indexJson("/app", req)
Then("we receive a not authorized response")
indexJson.getStatus should be(auth.UnauthorizedStatus)
}
"access to indexJson without authorization leads to a 404 if the group does not exist" in new Fixture {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
Given("the group does not exist")
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
groupManager.group("/group".toRootPath) returns None
When("the indexJson is fetched")
val indexJson = appsTaskResource.indexJson("/group/*", req)
Then("we receive a 404")
indexJson.getStatus should be(404)
}
"access to indexJson without authorization is not allowed if the group exists" in new Fixture {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
Given("the group exists")
val groupPath = "/group".toRootPath
groupManager.group(groupPath) returns Some(createGroup(groupPath))
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
When("the indexJson is fetched")
val indexJson = appsTaskResource.indexJson("/group/*", req)
Then("we receive a not authorized response")
indexJson.getStatus should be(auth.UnauthorizedStatus)
}
"access to indexTxt without authorization is not allowed if the app exists" in new Fixture {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
Given("The app exists")
groupManager.app("/app".toRootPath) returns Some(AppDefinition("/app".toRootPath))
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
When("the index as txt is fetched")
val indexTxt = appsTaskResource.indexTxt("/app", req)
Then("we receive a not authorized response")
indexTxt.getStatus should be(auth.UnauthorizedStatus)
}
"access to indexTxt without authorization leads to a 404 if the the app does not exist" in new Fixture {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
Given("The app not exists")
groupManager.app("/app".toRootPath) returns None
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
When("the index as txt is fetched")
val indexTxt = appsTaskResource.indexTxt("/app", req)
Then("we receive a not authorized response")
indexTxt.getStatus should be(404)
}
"access to deleteOne without authorization is not allowed if the app exists" in new FixtureWithRealTaskKiller() {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
val taskId = Task.Id.forRunSpec(PathId("/app"))
Given("The app exists")
groupManager.runSpec("/app".toRootPath) returns Some(AppDefinition("/app".toRootPath))
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
When("deleteOne is called")
val deleteOne = appsTaskResource.deleteOne("app", taskId.toString, false, false, false, req)
Then("we receive a not authorized response")
deleteOne.getStatus should be(auth.UnauthorizedStatus)
}
"access to deleteOne without authorization leads to a 404 if the the app does not exist" in new FixtureWithRealTaskKiller() {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
val taskId = Task.Id.forRunSpec(PathId("/app"))
Given("The app not exists")
groupManager.runSpec("/app".toRootPath) returns None
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
When("deleteOne is called")
val deleteOne = appsTaskResource.deleteOne("app", taskId.toString, false, false, false, req)
Then("we receive a not authorized response")
deleteOne.getStatus should be(404)
}
"access to deleteMany without authorization is not allowed if the app exists" in new FixtureWithRealTaskKiller() {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
Given("The app exists")
groupManager.runSpec("/app".toRootPath) returns Some(AppDefinition("/app".toRootPath))
taskTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
When("deleteMany is called")
val deleteMany = appsTaskResource.deleteMany("app", "host", false, false, false, req)
Then("we receive a not authorized response")
deleteMany.getStatus should be(auth.UnauthorizedStatus)
}
"access to deleteMany without authorization leads to a 404 if the the app does not exist" in new FixtureWithRealTaskKiller() {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
Given("The app not exists")
groupManager.runSpec("/app".toRootPath) returns None
When("deleteMany is called")
val deleteMany = appsTaskResource.deleteMany("app", "host", false, false, false, req)
Then("we receive a not authorized response")
deleteMany.getStatus should be(404)
}
}
}
|
janisz/marathon
|
src/test/scala/mesosphere/marathon/api/v2/SpecInstancesResourceTest.scala
|
Scala
|
apache-2.0
| 20,296 |
package com.jetyun.ansj
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.mllib.feature.HashingTF
import org.apache.spark.mllib.feature.IDF
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.rdd.RDD
object SimpleTFIDF {
def main(args: Array[String]) {
val sc = new SparkContext("local[2]", "First Spark App")
val filePath = "/Users/seraph/code/workspace/ansj4s/output/segment/part-00000"
// Load documents (one per line).
val documents: RDD[Seq[String]] = sc.textFile(filePath).map(_.split("\\t").toSeq)
val hashingTF = new HashingTF()
val tf: RDD[Vector] = hashingTF.transform(documents)
tf.collect().foreach{x =>
val v:Vector = x
println(v)
}
println("===============================================================================")
tf.cache()
val idf = new IDF().fit(tf)
val tfidf: RDD[Vector] = idf.transform(tf)
tfidf.collect().foreach{x =>
val v:Vector = x
println(v)
}
}
}
|
seraph115/ansj4s
|
src/main/scala-2.11/com/jetyun/ansj/SimpleTFIDF.scala
|
Scala
|
apache-2.0
| 1,066 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.gihyo.spark.ch03.pairrdd_action
import jp.gihyo.spark.{SparkFunSuite, TestSparkContext}
class CollectAsMapExampleSuite extends SparkFunSuite with TestSparkContext {
test("run") {
CollectAsMapExample.run(sc)
}
}
|
yu-iskw/gihyo-spark-book-example
|
src/test/scala/jp/gihyo/spark/ch03/pairrdd_action/CollectAsMapExampleSuite.scala
|
Scala
|
apache-2.0
| 1,035 |
package scala.slick.test.jdbc
import scala.language.higherKinds
import scala.slick.testutil._
import scala.slick.testutil.TestDBs._
import com.typesafe.slick.testkit.util.JdbcTestDB
object ExecutorTest extends DBTestObject()
class ExecutorTest(val tdb: JdbcTestDB) extends DBTest {
import tdb.profile.backend.Database.dynamicSession
import tdb.profile.simple._
def all[E, C[_]](q: Query[_, E, C]) = {
// static tests if the implicit conversions can be applied
q.list
q.run
}
}
|
dvinokurov/slick
|
slick-testkit/src/test/scala/scala/slick/test/jdbc/ExecutorTest.scala
|
Scala
|
bsd-2-clause
| 498 |
package xyztr
/**
* Represents an audio file
*/
case class AudioData(name: String, data: Array[Byte])
|
matshenricson/xyztr
|
src/main/scala/xyztr/AudioData.scala
|
Scala
|
gpl-3.0
| 107 |
package io.kaitai.struct.format
import java.nio.charset.Charset
import io.kaitai.struct.Utils
import io.kaitai.struct.datatype.DataType
import io.kaitai.struct.datatype.DataType._
import io.kaitai.struct.exprlang.Ast.expr
import io.kaitai.struct.exprlang.{Ast, Expressions}
import io.kaitai.struct.problems.KSYParseError
import scala.collection.JavaConversions._
case class ConditionalSpec(ifExpr: Option[Ast.expr], repeat: RepeatSpec)
trait AttrLikeSpec extends MemberSpec {
def dataType: DataType
def cond: ConditionalSpec
def valid: Option[ValidationSpec]
def doc: DocSpec
def isArray: Boolean = cond.repeat != NoRepeat
override def dataTypeComposite: DataType = {
if (isArray) {
ArrayTypeInStream(dataType)
} else {
dataType
}
}
override def isNullable: Boolean = {
if (cond.ifExpr.isDefined) {
true
} else if (isArray) {
// for potential future languages using null flags (like C++)
// and having switchBytesOnlyAsRaw = false (unlike C++)
false
} else {
dataType match {
case st: SwitchType =>
st.isNullable
case _ =>
false
}
}
}
def isNullableSwitchRaw: Boolean = {
if (cond.ifExpr.isDefined) {
true
} else if (isArray) {
false
} else {
dataType match {
case st: SwitchType =>
st.isNullableSwitchRaw
case _ =>
false
}
}
}
/**
* Determines if this attribute is to be parsed lazily (i.e. on first use),
* or eagerly (during object construction, usually in a `_read` method)
* @return True if this attribute is lazy, false if it's eager
*/
def isLazy: Boolean
}
case class AttrSpec(
path: List[String],
id: Identifier,
dataType: DataType,
cond: ConditionalSpec = ConditionalSpec(None, NoRepeat),
valid: Option[ValidationSpec] = None,
doc: DocSpec = DocSpec.EMPTY
) extends AttrLikeSpec with MemberSpec {
override def isLazy = false
}
case class YamlAttrArgs(
size: Option[Ast.expr],
sizeEos: Boolean,
encoding: Option[String],
terminator: Option[Int],
include: Boolean,
consume: Boolean,
eosError: Boolean,
padRight: Option[Int],
contents: Option[Array[Byte]],
enumRef: Option[String],
parent: Option[Ast.expr],
process: Option[ProcessExpr]
) {
def getByteArrayType(path: List[String]) = {
(size, sizeEos) match {
case (Some(bs: expr), false) =>
BytesLimitType(bs, terminator, include, padRight, process)
case (None, true) =>
BytesEosType(terminator, include, padRight, process)
case (None, false) =>
terminator match {
case Some(term) =>
BytesTerminatedType(term, include, consume, eosError, process)
case None =>
throw KSYParseError("'size', 'size-eos' or 'terminator' must be specified", path).toException
}
case (Some(_), true) =>
throw KSYParseError("only one of 'size' or 'size-eos' must be specified", path).toException
}
}
}
object AttrSpec {
val LEGAL_KEYS = Set(
"id",
"doc",
"doc-ref",
"type",
"if",
"terminator",
"consume",
"include",
"eos-error",
"valid",
"repeat"
)
val LEGAL_KEYS_BYTES = Set(
"contents",
"size",
"size-eos",
"pad-right",
"parent",
"process"
)
val LEGAL_KEYS_STR = Set(
"size",
"size-eos",
"pad-right",
"encoding"
)
val LEGAL_KEYS_ENUM = Set(
"enum"
)
def fromYaml(src: Any, path: List[String], metaDef: MetaSpec, idx: Int): AttrSpec = {
val srcMap = ParseUtils.asMapStr(src, path)
val id = ParseUtils.getOptValueStr(srcMap, "id", path) match {
case Some(idStr) =>
try {
NamedIdentifier(idStr)
} catch {
case _: InvalidIdentifier =>
throw KSYParseError.invalidId(idStr, "attribute", path ++ List("id"))
}
case None => NumberedIdentifier(idx)
}
fromYaml(srcMap, path, metaDef, id)
}
def fromYaml(srcMap: Map[String, Any], path: List[String], metaDef: MetaSpec, id: Identifier): AttrSpec = {
try {
fromYaml2(srcMap, path, metaDef, id)
} catch {
case (epe: Expressions.ParseException) =>
throw KSYParseError.expression(epe, path)
}
}
def fromYaml2(srcMap: Map[String, Any], path: List[String], metaDef: MetaSpec, id: Identifier): AttrSpec = {
val doc = DocSpec.fromYaml(srcMap, path)
val process = ProcessExpr.fromStr(ParseUtils.getOptValueStr(srcMap, "process", path), path)
// TODO: add proper path propagation
val contents = srcMap.get("contents").map(parseContentSpec(_, path ++ List("contents")))
val size = ParseUtils.getOptValueExpression(srcMap, "size", path)
val sizeEos = ParseUtils.getOptValueBool(srcMap, "size-eos", path).getOrElse(false)
val ifExpr = ParseUtils.getOptValueExpression(srcMap, "if", path)
val encoding = ParseUtils.getOptValueStr(srcMap, "encoding", path)
val terminator = ParseUtils.getOptValueInt(srcMap, "terminator", path)
val consume = ParseUtils.getOptValueBool(srcMap, "consume", path).getOrElse(true)
val include = ParseUtils.getOptValueBool(srcMap, "include", path).getOrElse(false)
val eosError = ParseUtils.getOptValueBool(srcMap, "eos-error", path).getOrElse(true)
val padRight = ParseUtils.getOptValueInt(srcMap, "pad-right", path)
val enum = ParseUtils.getOptValueStr(srcMap, "enum", path)
val parent = ParseUtils.getOptValueExpression(srcMap, "parent", path)
val valid = srcMap.get("valid").map(ValidationSpec.fromYaml(_, path ++ List("valid")))
// Convert value of `contents` into validation spec and merge it in, if possible
val valid2: Option[ValidationSpec] = (contents, valid) match {
case (None, _) => valid
case (Some(byteArray), None) =>
Some(ValidationEq(Ast.expr.List(
byteArray.map(x => Ast.expr.IntNum(x & 0xff))
)))
case (Some(_), Some(_)) =>
throw KSYParseError.withText(s"`contents` and `valid` can't be used together", path)
}
val typObj = srcMap.get("type")
val yamlAttrArgs = YamlAttrArgs(
size, sizeEos,
encoding, terminator, include, consume, eosError, padRight,
contents, enum, parent, process
)
// Unfortunately, this monstrous match can't rewritten in simpler way due to Java type erasure
val dataType: DataType = typObj match {
case None =>
DataType.fromYaml(
None, path, metaDef, yamlAttrArgs
)
case Some(x) =>
x match {
case simpleType: String =>
DataType.fromYaml(
Some(simpleType), path, metaDef, yamlAttrArgs
)
case switchMap: Map[Any, Any] =>
val switchMapStr = ParseUtils.anyMapToStrMap(switchMap, path)
parseSwitch(switchMapStr, path, metaDef, yamlAttrArgs)
case unknown =>
throw KSYParseError.withText(s"expected map or string, found $unknown", path ++ List("type"))
}
}
val (repeatSpec, legalRepeatKeys) = RepeatSpec.fromYaml(srcMap, path)
val legalKeys = LEGAL_KEYS ++ legalRepeatKeys ++ (dataType match {
case _: BytesType => LEGAL_KEYS_BYTES
case _: StrFromBytesType => LEGAL_KEYS_STR
case _: UserType => LEGAL_KEYS_BYTES
case EnumType(_, _) => LEGAL_KEYS_ENUM
case _: SwitchType => LEGAL_KEYS_BYTES
case _ => Set()
})
ParseUtils.ensureLegalKeys(srcMap, legalKeys, path)
AttrSpec(path, id, dataType, ConditionalSpec(ifExpr, repeatSpec), valid2, doc)
}
def parseContentSpec(c: Any, path: List[String]): Array[Byte] = {
c match {
case s: String =>
s.getBytes(Charset.forName("UTF-8"))
case objects: List[_] =>
val bb = new scala.collection.mutable.ArrayBuffer[Byte]
objects.zipWithIndex.foreach { case (value, idx) =>
value match {
case s: String =>
bb.appendAll(Utils.strToBytes(s))
case integer: Integer =>
bb.append(Utils.clampIntToByte(integer))
case el =>
throw KSYParseError.withText(s"unable to parse fixed content in array: $el", path ++ List(idx.toString))
}
}
bb.toArray
case _ =>
throw KSYParseError.withText(s"unable to parse fixed content: $c", path)
}
}
val LEGAL_KEYS_SWITCH = Set(
"switch-on",
"cases"
)
private def parseSwitch(
switchSpec: Map[String, Any],
path: List[String],
metaDef: MetaSpec,
arg: YamlAttrArgs
): DataType = {
val on = ParseUtils.getValueExpression(switchSpec, "switch-on", path)
val _cases = ParseUtils.getValueMapStrStr(switchSpec, "cases", path)
ParseUtils.ensureLegalKeys(switchSpec, LEGAL_KEYS_SWITCH, path)
val cases = _cases.map { case (condition, typeName) =>
val casePath = path ++ List("cases", condition)
val condType = DataType.fromYaml(
Some(typeName), casePath, metaDef,
arg
)
try {
Expressions.parse(condition) -> condType
} catch {
case epe: Expressions.ParseException =>
throw KSYParseError.expression(epe, casePath)
}
}
// If we have size defined, and we don't have any "else" case already, add
// an implicit "else" case that will at least catch everything else as
// "untyped" byte array of given size
val addCases: Map[Ast.expr, DataType] = if (cases.containsKey(SwitchType.ELSE_CONST)) {
Map()
} else {
(arg.size, arg.sizeEos) match {
case (Some(sizeValue), false) =>
Map(SwitchType.ELSE_CONST -> BytesLimitType(sizeValue, None, false, None, arg.process))
case (None, true) =>
Map(SwitchType.ELSE_CONST -> BytesEosType(None, false, None, arg.process))
case (None, false) =>
Map()
case (Some(_), true) =>
throw KSYParseError.withText("can't have both `size` and `size-eos` defined", path)
}
}
SwitchType(on, cases ++ addCases)
}
}
|
kaitai-io/kaitai_struct_compiler
|
shared/src/main/scala/io/kaitai/struct/format/AttrSpec.scala
|
Scala
|
gpl-3.0
| 10,063 |
/*
* Copyright 2015 eleflow.com.br.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eleflow.uberdata
import java.sql.Timestamp
import java.text.DecimalFormat
import eleflow.uberdata.core.exception.UnexpectedValueException
import eleflow.uberdata.enums.SupportedAlgorithm._
import ml.dmlc.xgboost4j.scala.spark.XGBoostModel
import org.apache.spark
import org.apache.spark._
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.ml._
import org.apache.spark.ml.evaluation.TimeSeriesEvaluator
import org.apache.spark.ml.feature.{StringIndexer, VectorAssembler}
import org.apache.spark.ml.tuning.ParamGridBuilder
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.functions._
import scala.reflect.ClassTag
/**
* Created by celio on 11/04/16.
*/
object ForecastPredictor {
def apply(): ForecastPredictor = new ForecastPredictor
}
class ForecastPredictor extends Serializable {
lazy val defaultRange = (0 to 2).toArray
trait TimestampOrd extends Ordering[Timestamp] {
override def compare(x: Timestamp, y: Timestamp): Int =
if (x.getTime < y.getTime) -1
else if (x.getTime == y.getTime) 0
else 1
}
implicit object TimestampOrdering extends TimestampOrd
protected def defaultARIMAParamMap[T <: ArimaParams](estimator: T, paramRange: Array[Int]) =
new ParamGridBuilder()
.addGrid(estimator.arimaP, paramRange)
.addGrid(estimator.arimaQ, paramRange)
.addGrid(estimator.arimaD, paramRange)
.build()
.filter(f =>
f.get[Int](estimator.arimaP).getOrElse(0) != 0 ||
f.get[Int](estimator.arimaQ).getOrElse(0) != 0)
def prepareARIMAPipeline[L](
groupByCol: String,
labelCol: String = "label",
validationCol: String = "validation",
timeCol: String = "Date",
nFutures: Int,
paramRange: Array[Int] = defaultRange
)(implicit kt: ClassTag[L]): Pipeline = {
val transformer =
createTimeSeriesGenerator[L](groupByCol, labelCol, timeCol)
prepareARIMAPipelineInt[L](
groupByCol,
labelCol,
validationCol,
nFutures,
paramRange,
Array(transformer))
}
val metricName: String = "rmspe"
protected def prepareARIMAPipelineInt[L](
groupBycol: String,
labelCol: String,
validationCol: String,
nFutures: Int,
paramRange: Array[Int],
transformer: Array[Transformer]
)(implicit kt: ClassTag[L]) = {
val timeSeriesEvaluator: TimeSeriesEvaluator[L] =
new TimeSeriesEvaluator[L]()
.setValidationCol(validationCol)
.setLabelCol(labelCol)
.setMetricName(metricName)
val arima = new ArimaBestModelFinder[L]()
.setTimeSeriesEvaluator(timeSeriesEvaluator)
.setGroupByCol(groupBycol)
.setValidationCol(validationCol)
.setNFutures(nFutures)
val paramGrid = defaultARIMAParamMap[ArimaBestModelFinder[L]](arima, paramRange)
val arimaBestModelFinder: ArimaBestModelFinder[L] = arima.setEstimatorParamMaps(paramGrid)
preparePipeline(arimaBestModelFinder, preTransformers = transformer)
}
def prepareHOLTWintersPipeline[T](
groupByCol: String,
labelCol: String = "label",
validationCol: String = "validation",
timeCol: String = "Date",
nFutures: Int = 6
)(implicit kt: ClassTag[T]): Pipeline = {
val transformer = createTimeSeriesGenerator(groupByCol, labelCol, timeCol)
val timeSeriesEvaluator: TimeSeriesEvaluator[T] =
new TimeSeriesEvaluator[T]()
.setValidationCol(validationCol)
.setLabelCol(labelCol)
.setMetricName(metricName)
val holtWinters = new HoltWintersBestModelFinder[T]()
.setTimeSeriesEvaluator(timeSeriesEvaluator)
.setGroupByCol(groupByCol)
.setLabelCol(labelCol)
.setValidationCol(validationCol)
.setNFutures(nFutures)
.asInstanceOf[HoltWintersBestModelFinder[Double]]
preparePipeline(holtWinters, preTransformers = Array(transformer))
}
def prepareMovingAveragePipeline[G](
groupBycol: String,
featuresCol: String = "features",
validationCol: String = "validation",
timeCol: String = "Date",
windowSize: Int = 8
)(implicit kt: ClassTag[G]): Pipeline = {
val transformer = createTimeSeriesGenerator(groupBycol, featuresCol, timeCol)
val movingAverage = new MovingAverage[G]()
.setOutputCol(validationCol)
.setInputCol(featuresCol)
.setWindowSize(windowSize)
new Pipeline().setStages(Array(transformer, movingAverage))
}
private def createTimeSeriesGenerator[L](
groupByCol: String,
featuresCol: String,
timeCol: String
)(implicit kt: ClassTag[L]): TimeSeriesGenerator[L] = {
new TimeSeriesGenerator[L]()
.setFeaturesCol(featuresCol)
.setGroupByCol(groupByCol)
.setTimeCol(timeCol)
.setOutputCol("features")
}
private def preparePipeline(timeSeriesBestModelFinder: TimeSeriesBestModelFinder,
preTransformers: Array[_ <: Transformer]): Pipeline = {
new Pipeline().setStages(preTransformers ++ Array(timeSeriesBestModelFinder))
}
def prepareBestForecastPipeline[L](
labelCol: String,
featuresCol: String,
validationCol: String,
timeCol: String,
nFutures: Int,
meanAverageWindowSize: Seq[Int],
paramRange: Array[Int]
)(implicit kt: ClassTag[L]): Pipeline = {
val transformer =
createTimeSeriesGenerator[L](labelCol, featuresCol, timeCol)
val timeSeriesEvaluator: TimeSeriesEvaluator[L] =
new TimeSeriesEvaluator[L]()
.setValidationCol(validationCol)
.setLabelCol(featuresCol)
.setMetricName(metricName)
val findBestForecast = new ForecastBestModelFinder[L, ForecastBestModel[L]]
.setWindowParams(meanAverageWindowSize)
.setTimeSeriesEvaluator(timeSeriesEvaluator)
.setLabelCol(labelCol)
.setValidationCol(validationCol)
.setNFutures(nFutures)
.asInstanceOf[ForecastBestModelFinder[L, ForecastBestModel[L]]]
val paramGrid = defaultARIMAParamMap[ForecastBestModelFinder[L, ForecastBestModel[L]]](
findBestForecast,
paramRange)
findBestForecast.setEstimatorParamMaps(paramGrid)
preparePipeline(findBestForecast, Array(transformer))
}
def prepareXGBoostSmallModel[L, G](
labelCol: String,
featuresCol: Seq[String],
validationCol: String,
timeCol: String,
idCol: String,
groupByCol: String,
schema: StructType
)(implicit kl: ClassTag[L], kg: ClassTag[G]): Pipeline = {
val timeSeriesEvaluator: TimeSeriesEvaluator[G] =
new TimeSeriesEvaluator[G]()
.setValidationCol(validationCol)
.setLabelCol(labelCol)
.setMetricName(metricName)
val xgboost = new XGBoostBestSmallModelFinder[L, G]()
.setTimeSeriesEvaluator(timeSeriesEvaluator)
.setLabelCol(labelCol)
.setGroupByCol(groupByCol)
.setIdCol(idCol)
.setValidationCol(validationCol)
.setTimeCol(timeCol)
new Pipeline().setStages(
createXGBoostPipelineStages(labelCol, featuresCol, groupByCol, Some(idCol), timeCol,
schema = schema) :+ xgboost)
}
def createXGBoostPipelineStages(labelCol: String,
featuresCol: Seq[String],
groupByCol: String,
idCol: Option[String] = None,
timeCol:String = "",
schema: StructType): Array[PipelineStage] = {
val allColumns = schema.map(_.name).toArray
val stringColumns = schema
.filter(f => f.dataType.isInstanceOf[StringType] && featuresCol.contains(f.name))
.map(_.name)
val nonStringColumns = allColumns.filter(
f =>
!stringColumns.contains(f)
&& featuresCol.contains(f))
val stringIndexers = stringColumns.map { column =>
new StringIndexer().setInputCol(column).setOutputCol(s"${column}Index")
}.toArray
val nonStringIndex = "nonStringIndex"
val columnIndexers = new VectorizeEncoder()
.setInputCol(nonStringColumns)
.setOutputCol(nonStringIndex)
.setLabelCol(labelCol)
.setGroupByCol(groupByCol)
.setIdCol(idCol.getOrElse(""))
.setTimeCol(timeCol)
val assembler = new VectorAssembler()
.setInputCols(stringColumns.map(f => s"${f}Index").toArray :+ nonStringIndex)
.setOutputCol(IUberdataForecastUtil.FEATURES_COL_NAME)
stringIndexers :+ columnIndexers :+ assembler
}
//label, GroupBy
def predict[L, G](train: DataFrame,
test: DataFrame,
labelCol: String,
featuresCol: Seq[String] = Seq.empty[String],
timeCol: String,
idCol: String,
groupByCol: String,
algorithm: Algorithm = FindBestForecast,
nFutures: Int = 6,
meanAverageWindowSize: Seq[Int] = Seq(8, 16, 26),
paramRange: Array[Int] = defaultRange)(
implicit kt: ClassTag[L],
ord: Ordering[L] = null,
gt: ClassTag[G]): (DataFrame, PipelineModel, Double) = {
require(featuresCol.nonEmpty, "featuresCol parameter can't be empty")
val validationCol = idCol + algorithm.toString
algorithm match {
case Arima | HoltWinters | MovingAverage8 | MovingAverage16 | MovingAverage26 |
FindBestForecast =>
predictSmallModelFuture[L](
train,
test,
groupByCol,
featuresCol.head,
timeCol,
idCol,
algorithm,
validationCol,
nFutures,
meanAverageWindowSize,
paramRange)
case XGBoostAlgorithm =>
predictSmallModelFeatureBased[L, G](
train,
test,
labelCol,
featuresCol,
timeCol,
idCol,
groupByCol,
algorithm,
validationCol)
case _ =>
throw new UnexpectedValueException(
s"Algorithm $algorithm can't be used to predict a Forecast")
}
}
def predictSmallModelFeatureBased[L, G](
train: DataFrame,
test: DataFrame,
labelCol: String,
featuresCol: Seq[String],
timeCol: String,
idCol: String,
groupByCol: String,
algorithm: Algorithm = XGBoostAlgorithm,
validationCol: String
)(implicit kt: ClassTag[L], ord: Ordering[L], gt: ClassTag[G]): (DataFrame, PipelineModel, Double) = {
require(
algorithm == XGBoostAlgorithm,
"The accepted algorithm for this method is XGBoostAlgorithm")
val pipeline = prepareXGBoostSmallModel[L, G](
labelCol,
featuresCol,
validationCol,
timeCol,
idCol,
groupByCol,
train.schema)
val cachedTrain = train.cache
val cachedTest = test.cache()
val model = pipeline.fit(cachedTrain)
val result = model.transform(cachedTest).cache
val joined =
result.select(idCol, IUberdataForecastUtil.FEATURES_PREDICTION_COL_NAME, groupByCol, timeCol, IUberdataForecastUtil.METRIC_COL_NAME)
val dfToBeReturned = joined.withColumnRenamed("featuresPrediction", "prediction").select(idCol, "prediction", groupByCol, timeCol)
(dfToBeReturned.sort(idCol), model, calculateAccuracySmallModelFeatureBased(joined))
}
def prepareSmallModelPipeline[G](train: DataFrame,
test: DataFrame,
groupByCol: String,
labelCol: String,
timeCol: String,
idCol: String,
algorithm: Algorithm,
validationCol: String,
nFutures: Int,
meanAverageWindowSize: Seq[Int],
paramRange: Array[Int])(implicit ct: ClassTag[G]): Pipeline = {
algorithm match {
case Arima =>
prepareARIMAPipeline[G](groupByCol, labelCol, validationCol, timeCol, nFutures, paramRange)
case HoltWinters =>
prepareHOLTWintersPipeline[G](
groupByCol,
labelCol,
validationCol,
timeCol,
nFutures
)
case MovingAverage8 =>
prepareMovingAveragePipeline[G](
groupByCol,
labelCol,
validationCol,
timeCol,
8
)
case MovingAverage16 =>
prepareMovingAveragePipeline[G](
groupByCol,
labelCol,
validationCol,
timeCol,
16
)
case MovingAverage26 =>
prepareMovingAveragePipeline[G](
groupByCol,
labelCol,
validationCol,
timeCol,
26
)
case FindBestForecast =>
prepareBestForecastPipeline[G](
groupByCol,
labelCol,
validationCol,
timeCol,
nFutures,
meanAverageWindowSize,
paramRange)
case _ =>
throw new UnexpectedValueException(
s"Algorithm $algorithm can't be used to predict a Forecast")
}
}
def predictSmallModelFuture[G](
train: DataFrame,
test: DataFrame,
groupByCol: String,
labelCol: String,
timeCol: String,
idCol: String,
algorithm: Algorithm = FindBestForecast,
validationCol: String,
nFutures: Int = 6,
meanAverageWindowSize: Seq[Int] = Seq(8, 16, 26),
paramRange: Array[Int] = defaultRange
)(implicit kt: ClassTag[G], ord: Ordering[G] = null): (DataFrame, PipelineModel, Double) = {
require(
algorithm != XGBoostAlgorithm,
"The accepted algorithms for this method doesn't include XGBoost")
val pipeline = prepareSmallModelPipeline(
train,
test,
groupByCol,
labelCol,
timeCol,
idCol,
algorithm,
validationCol,
nFutures,
meanAverageWindowSize,
paramRange)
val cachedTrain = train.cache
val model = pipeline.fit(cachedTrain)
val result = model.transform(cachedTrain)
val timeColIndex = test.columns.indexOf(timeCol)
val sparkContext = train.sqlContext.sparkContext
val timeColIndexBc = sparkContext.broadcast(timeColIndex)
val labelColBc = sparkContext.broadcast(groupByCol)
val validationColBc = sparkContext.broadcast(validationCol)
val validationColIndexBc =
sparkContext.broadcast(result.columns.indexOf(validationCol))
val labelColIndexBc =
sparkContext.broadcast(result.columns.indexOf(groupByCol))
val featuresColIndexBc =
sparkContext.broadcast(result.columns.indexOf("features"))
val featuresValidationColIndexBc =
sparkContext.broadcast(result.columns.indexOf("featuresValidation"))
val groupedTest = test.rdd
.groupBy(row => row.getAs[G](labelColBc.value))
.map {
case (key, values) =>
val sort = values.toArray.map { row =>
IUberdataForecastUtil.convertColumnToLongAddAtEnd(row, timeColIndexBc.value)
}.sortBy(row => row.getAs[Long](row.size - 1))
(key, sort)
}
.cache
val keyValueResult = result.rdd
.map(
row =>
(row.getAs[G](labelColBc.value),
(row
.getAs[org.apache.spark.ml.linalg.Vector](
validationColBc.value
)
.toArray,
row))
)
.cache
val forecastResult = keyValueResult.join(groupedTest).flatMap {
case (key, ((predictions, row), ids)) =>
val filteredRow = row.schema.zipWithIndex.filter {
case (value, index) =>
index != validationColIndexBc.value &&
index != labelColIndexBc.value && index != featuresColIndexBc.value &&
index != featuresValidationColIndexBc.value && value.name != "featuresPrediction"
}
ids.zip(predictions).map {
case (id, prediction) =>
val seq = id.toSeq
val (used, _) = seq.splitAt(seq.length - 1)
Row(
used ++ filteredRow.map { case (_, index) => row.get(index) } :+ Math.round(
prediction): _*
)
}
}
val sqlContext = train.sqlContext
val schema = result.schema.fields
.filter(
f =>
f.name != validationCol && f.name != groupByCol && f.name != "features"
&& f.name != "featuresValidation" && f.name != "featuresPrediction"
)
.foldLeft(test.schema) {
case (testSchema, field) => testSchema.add(field)
}
.add(StructField("prediction", LongType))
val df = sqlContext.createDataFrame(forecastResult, schema)
(df, model, calculateAccuracySmallModelFuture(result))
}
def saveResult[T](toBeSaved: RDD[(T, Long)], path: String): Unit = {
toBeSaved.map {
case (key, value) => s"$key,$value"
}.coalesce(1).saveAsTextFile(path)
}
def predictBigModelFuture(
train: DataFrame,
test: DataFrame,
algorithm: Algorithm,
labelCol: String,
idCol: String,
timeCol: String,
featuresCol: Seq[String],
rounds: Int = 2000,
params: Map[String, Any] = Map.empty[String, Any]): (DataFrame, PipelineModel, Double) = {
val pipeline = algorithm match {
case XGBoostAlgorithm =>
prepareXGBoostBigModel(labelCol, idCol, featuresCol, timeCol, train.schema, rounds, params)
case _ => throw new UnsupportedOperationException()
}
val model = pipeline.fit(train.cache)
val predictions = model.transform(test).cache
val index = (train.count()*0.2).toInt
val trainForValidation = train.limit(index)
if(train.columns.contains(idCol)) {
val validation = model.transform(trainForValidation).cache.withColumnRenamed(idCol, "id1").select("id1", "prediction")
val joined = validation.join(train, validation("id1") === train(idCol)).select(idCol, "prediction", labelCol)
.filter(s"${labelCol} > 0")
val joinedWithError = joined.withColumn("Error", abs(joined(labelCol) - joined("prediction")) / joined(labelCol))
(predictions.sort(idCol), model, calculateAccuracyBigModelFuture(joinedWithError))
}else{
(predictions.sort(idCol), model, 0.0)
}
}
def prepareXGBoostBigModel[L, G](
labelCol: String,
idCol: String,
featuresCol: Seq[String],
timeCol: String,
schema: StructType,
rounds: Int,
params: Map[String, Any])(implicit ct: ClassTag[L], gt: ClassTag[G]): Pipeline = {
val validationCol: String = "validation"
val timeSeriesEvaluator: TimeSeriesEvaluator[G] = new TimeSeriesEvaluator[G]()
.setValidationCol(validationCol)
.setLabelCol(labelCol)
.setMetricName("rmspe")
val xgboost = new XGBoostBestBigModelFinder[L, G]()
.setTimeSeriesEvaluator(timeSeriesEvaluator)
.setLabelCol(labelCol)
.setIdCol(idCol)
.setXGBoostLinearParams(params)
.setXGBoostRounds(rounds)
.setTimeCol(timeCol)
.setValidationCol(validationCol)
new Pipeline().setStages(
createXGBoostPipelineStages(labelCol, featuresCol, "", Some(idCol), timeCol, schema = schema)
:+ xgboost)
}
private def calculateAccuracySmallModelFuture(df: DataFrame): Double = {
/*val spark = SparkSession.builder.
master("local")
.appName("tester")
.getOrCreate()
import spark.implicits._*/
if(df.columns.contains("featuresValidation")) {
val errorsArray = df.select("features", "featuresValidation").rdd.map { case Row(v1: org.apache.spark.ml.linalg.Vector, v2: org.apache.spark.ml.linalg.Vector) =>
val zipArray = v1.toArray.zip(v2.toArray).map {
f => if (f._1 == 0) {
0
} else {
Math.abs(f._1 - f._2) / f._1
}
}
zipArray.sum / zipArray.length
}.collect
1.0 - errorsArray.sum / errorsArray.length
}else{
//TODO: Precisa implementar acuracia para quando nao existir 'featuresValidation' - por exemplo: ARIMA
1.0
}
}
private def calculateAccuracySmallModelFeatureBased(df: DataFrame): Double = {
val rmspe_medio = df.agg(avg("metric")).first.get(0).asInstanceOf[Double]
val n = df.count
val accuracy = 1.0 - ((rmspe_medio*rmspe_medio*n)*(rmspe_medio*rmspe_medio*n))/n
accuracy
}
private def calculateAccuracyBigModelFuture(df: DataFrame): Double = {
val erro_medio = df.agg(avg("Error")).first.get(0).asInstanceOf[Double]
val accuracy = 1.0 - erro_medio
accuracy
}
}
|
eleflow/uberdata
|
iuberdata_core/src/main/scala/eleflow/uberdata/ForecastPredictor.scala
|
Scala
|
apache-2.0
| 23,526 |
package dotty.tools.dotc
package transform
import MegaPhase._
import core.DenotTransformers._
import core.Symbols._
import core.Contexts._
import core.Types._
import core.Flags._
import core.Decorators._
import core.NameKinds.LiftedTreeName
import NonLocalReturns._
import util.Store
/** Lifts try's that might be executed on non-empty expression stacks
* to their own methods. I.e.
*
* try body catch handler
*
* is lifted to
*
* { def liftedTree$n() = try body catch handler; liftedTree$n() }
*
* However, don't lift try's without catch expressions (try-finally).
* Lifting is needed only for try-catch expressions that are evaluated in a context
* where the stack might not be empty. `finally` does not attempt to continue evaluation
* after an exception, so the fact that values on the stack are 'lost' does not matter
* (copied from https://github.com/scala/scala/pull/922).
*/
class LiftTry extends MiniPhase with IdentityDenotTransformer { thisPhase =>
import ast.tpd._
override def phaseName: String = LiftTry.name
override def description: String = LiftTry.description
private var NeedLift: Store.Location[Boolean] = _
private def needLift(using Context): Boolean = ctx.store(NeedLift)
override def initContext(ctx: FreshContext): Unit =
NeedLift = ctx.addLocation(false)
private def liftingCtx(p: Boolean)(using Context) =
if (needLift == p) ctx else ctx.fresh.updateStore(NeedLift, p)
override def prepareForApply(tree: Apply)(using Context): Context =
liftingCtx(true)
override def prepareForDefDef(tree: DefDef)(using Context): Context =
liftingCtx(false)
override def prepareForValDef(tree: ValDef)(using Context): Context =
if !tree.symbol.exists
|| tree.symbol.isSelfSym
|| tree.symbol.owner == ctx.owner.enclosingMethod
&& !tree.symbol.is(Lazy)
// The current implementation wraps initializers of lazy vals in
// calls to an initialize method, which means that a `try` in the
// initializer needs to be lifted. Note that the new scheme proposed
// in #6979 would avoid this.
then ctx
else liftingCtx(true)
override def prepareForAssign(tree: Assign)(using Context): Context =
if (tree.lhs.symbol.maybeOwner == ctx.owner.enclosingMethod) ctx
else liftingCtx(true)
override def prepareForReturn(tree: Return)(using Context): Context =
if (!isNonLocalReturn(tree)) ctx
else liftingCtx(true)
override def prepareForTemplate(tree: Template)(using Context): Context =
liftingCtx(false)
override def transformTry(tree: Try)(using Context): Tree =
if (needLift && tree.cases.nonEmpty) {
report.debuglog(i"lifting tree at ${tree.span}, current owner = ${ctx.owner}")
val fn = newSymbol(
ctx.owner, LiftedTreeName.fresh(), Synthetic | Method,
MethodType(Nil, tree.tpe.widenIfUnstable), coord = tree.span)
tree.changeOwnerAfter(ctx.owner, fn, thisPhase)
Block(DefDef(fn, tree) :: Nil, ref(fn).appliedToNone)
}
else tree
}
object LiftTry:
val name = "liftTry"
val description: String = "lift any try that might be executed on a non-empty expression stack"
|
dotty-staging/dotty
|
compiler/src/dotty/tools/dotc/transform/LiftTry.scala
|
Scala
|
apache-2.0
| 3,210 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package dotty.tools.io
import scala.language.unsafeNulls
import java.io.{
IOException, InputStream, OutputStream, BufferedOutputStream,
ByteArrayOutputStream
}
import java.net.URL
import java.nio.file.{FileAlreadyExistsException, Files, Paths}
/**
* An abstraction over files for use in the reflection/compiler libraries.
*
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
object AbstractFile {
def getFile(path: String): AbstractFile = getFile(File(path))
def getDirectory(path: String): AbstractFile = getDirectory(Directory(path))
def getFile(path: JPath): AbstractFile = getFile(File(path))
def getDirectory(path: JPath): AbstractFile = getDirectory(Directory(path))
/**
* If the specified File exists and is a regular file, returns an
* abstract regular file backed by it. Otherwise, returns `null`.
*/
def getFile(path: Path): AbstractFile =
if (path.isFile) new PlainFile(path) else null
/**
* If the specified File exists and is either a directory or a
* readable zip or jar archive, returns an abstract directory
* backed by it. Otherwise, returns `null`.
*/
def getDirectory(path: Path): AbstractFile =
if (path.isDirectory) new PlainFile(path)
else if (path.isFile && Path.isExtensionJarOrZip(path.jpath)) ZipArchive fromFile path.toFile
else null
/**
* If the specified URL exists and is a regular file or a directory, returns an
* abstract regular file or an abstract directory, respectively, backed by it.
* Otherwise, returns `null`.
*/
def getURL(url: URL): AbstractFile =
if (url.getProtocol != "file") null
else new PlainFile(new Path(Paths.get(url.toURI)))
def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url
}
/**
* <p>
* This class and its children serve to unify handling of files and
* directories. These files and directories may or may not have some
* real counter part within the file system. For example, some file
* handles reference files within a zip archive or virtual ones
* that exist only in memory.
* </p>
* <p>
* Every abstract file has a path (i.e. a full name) and a name
* (i.e. a short name) and may be backed by some real File. There are
* two different kinds of abstract files: regular files and
* directories. Regular files may be read and have a last modification
* time. Directories may list their content and look for subfiles with
* a specified name or path and of a specified kind.
* </p>
* <p>
* The interface does <b>not</b> allow to access the content.
* The class `symtab.classfile.AbstractFileReader` accesses
* bytes, knowing that the character set of classfiles is UTF-8. For
* all other cases, the class `SourceFile` is used, which honors
* `global.settings.encoding.value`.
* </p>
*
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
abstract class AbstractFile extends Iterable[AbstractFile] {
/** Returns the name of this abstract file. */
def name: String
/** Returns the path of this abstract file. */
def path: String
/** Returns the absolute path of this abstract file. */
def absolutePath: String = path
/** Returns the path of this abstract file in a canonical form. */
def canonicalPath: String = if (jpath == null) path else jpath.normalize.toString
/** Checks extension case insensitively. */
def hasExtension(other: String): Boolean = extension == other.toLowerCase
val extension: String = Path.extension(name)
/** The absolute file, if this is a relative file. */
def absolute: AbstractFile
/** Returns the containing directory of this abstract file */
def container : AbstractFile
/** Returns the underlying File if any and null otherwise. */
def file: JFile = try {
if (jpath == null) null
else jpath.toFile
} catch {
case _: UnsupportedOperationException => null
}
/** Returns the underlying Path if any and null otherwise. */
def jpath: JPath
/** An underlying source, if known. Mostly, a zip/jar file. */
def underlyingSource: Option[AbstractFile] = None
/** Does this abstract file denote an existing file? */
def exists: Boolean = {
(jpath eq null) || Files.exists(jpath)
}
/** Does this abstract file represent something which can contain classfiles? */
def isClassContainer: Boolean = isDirectory || (jpath != null && (extension == "jar" || extension == "zip"))
/** Create a file on disk, if one does not exist already. */
def create(): Unit
/** Delete the underlying file or directory (recursively). */
def delete(): Unit
/** Is this abstract file a directory? */
def isDirectory: Boolean
/** Does this abstract file correspond to something on-disk? */
def isVirtual: Boolean = false
/** Returns the time that this abstract file was last modified. */
def lastModified: Long
/** returns an input stream so the file can be read */
def input: InputStream
/** Returns an output stream for writing the file */
def output: OutputStream
/** Returns a buffered output stream for writing the file - defaults to out */
def bufferedOutput: BufferedOutputStream = new BufferedOutputStream(output)
/** size of this file if it is a concrete file. */
def sizeOption: Option[Int] = None
def toURL: URL = if (jpath == null) null else jpath.toUri.toURL
/** Returns contents of file (if applicable) in a Char array.
* warning: use `Global.getSourceFile()` to use the proper
* encoding when converting to the char array.
*/
@throws(classOf[IOException])
def toCharArray: Array[Char] = new String(toByteArray).toCharArray
/** Returns contents of file (if applicable) in a byte array.
*/
@throws(classOf[IOException])
def toByteArray: Array[Byte] = {
val in = input
sizeOption match {
case Some(size) =>
var rest = size
val arr = new Array[Byte](rest)
while (rest > 0) {
val res = in.read(arr, arr.length - rest, rest)
if (res == -1)
throw new IOException("read error")
rest -= res
}
in.close()
arr
case None =>
val out = new ByteArrayOutputStream()
var c = in.read()
while(c != -1) {
out.write(c)
c = in.read()
}
in.close()
out.toByteArray()
}
}
/** Returns all abstract subfiles of this abstract directory. */
def iterator(): Iterator[AbstractFile]
/** Drill down through subdirs looking for the target, as in lookupName.
* Ths target name is the last of parts.
*/
final def lookupPath(parts: Seq[String], directory: Boolean): AbstractFile =
var file: AbstractFile = this
var i = 0
val n = parts.length - 1
while file != null && i < n do
file = file.lookupName(parts(i), directory = true)
i += 1
if file == null then null else file.lookupName(parts(i), directory = directory)
end lookupPath
/** Returns the abstract file in this abstract directory with the specified
* name. If there is no such file, returns `null`. The argument
* `directory` tells whether to look for a directory or
* a regular file.
*/
def lookupName(name: String, directory: Boolean): AbstractFile
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile
/** Return an abstract file that does not check that `path` denotes
* an existing file.
*/
def lookupPathUnchecked(path: String, directory: Boolean): AbstractFile = {
lookup((f, p, dir) => f.lookupNameUnchecked(p, dir), path, directory)
}
private def lookup(getFile: (AbstractFile, String, Boolean) => AbstractFile,
path0: String,
directory: Boolean): AbstractFile = {
val separator = java.io.File.separatorChar
// trim trailing '/'s
val path: String = if (path0.last == separator) path0 dropRight 1 else path0
val length = path.length()
assert(length > 0 && !(path.last == separator), path)
var file = this
var start = 0
while (true) {
val index = path.indexOf(separator, start)
assert(index < 0 || start < index, ((path, directory, start, index)))
val name = path.substring(start, if (index < 0) length else index)
file = getFile(file, name, if (index < 0) directory else true)
if ((file eq null) || index < 0) return file
start = index + 1
}
file
}
private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile =
lookupName(name, isDir) match {
case null =>
// the optional exception may be thrown for symlinks, notably /tmp on macOS.
// isDirectory tests for existing directory. The default behavior is hypothetical isDirectory(jpath, FOLLOW_LINKS).
try Files.createDirectories(jpath)
catch { case _: FileAlreadyExistsException if Files.isDirectory(jpath) => }
// a race condition in creating the entry after the failed lookup may throw
val path = jpath.resolve(name)
if (isDir) Files.createDirectory(path)
else Files.createFile(path)
new PlainFile(new File(path))
case lookup => lookup
}
/**
* Get the file in this directory with the given name,
* creating an empty file if it does not already existing.
*/
def fileNamed(name: String): AbstractFile = {
assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
fileOrSubdirectoryNamed(name, isDir = false)
}
/**
* Get the subdirectory with a given name, creating it if it
* does not already exist.
*/
def subdirectoryNamed(name: String): AbstractFile = {
assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
fileOrSubdirectoryNamed(name, isDir = true)
}
protected def unsupported(): Nothing = unsupported(null)
protected def unsupported(msg: String): Nothing = throw new UnsupportedOperationException(msg)
/** Returns the path of this abstract file. */
override def toString(): String = path
}
|
dotty-staging/dotty
|
compiler/src/dotty/tools/io/AbstractFile.scala
|
Scala
|
apache-2.0
| 10,418 |
/*
* Copyright (c) 2015 Alexandros Pappas p_alx hotmail com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package gr.gnostix.freeswitch.actors
import akka.actor.SupervisorStrategy.Restart
import akka.actor._
import akka.pattern.{ask, pipe}
import akka.util.Timeout
import scala.language.postfixOps
import scala.concurrent.duration._
import scala.concurrent.Future
object CallRouter {
def props(wsLiveEventsActor: ActorRef, completedCallsActor: ActorRef): Props =
Props(new CallRouter(wsLiveEventsActor, completedCallsActor))
}
class CallRouter(wsLiveEventsActor: ActorRef, completedCallsActor: ActorRef) extends Actor with ActorLogging {
import gr.gnostix.freeswitch.actors.ActorsProtocol._
import context.dispatcher
override val supervisorStrategy =
OneForOneStrategy() {
case _ => Restart
}
implicit val timeout = Timeout(1 seconds) // needed for `?` below
// create the FailedCallsActor in advance
val failedCallsActor = context.actorOf(Props(classOf[FailedCallsActor],wsLiveEventsActor), "failedCallsActor")
// get reference of CompletedCallsActor
//val completedCallsActor = context.actorSelection("/user/centralMessageRouter/completedCallsActor")
def idle(activeCalls: scala.collection.Map[String, ActorRef]): Receive = {
case x@CallNew(uuid, eventName, fromUser, toUser, readCodec, writeCodec, fromUserIP, toUserIP, callUUID,
callerChannelCreatedTime, callerChannelAnsweredTime, freeSWITCHHostname, freeSWITCHIPv4, callDirection,
pdd, ringTimeSec, None, None)
if callUUID != "_UNKNOWN" =>
log info x.toString
(activeCalls get callUUID) match {
case None =>
// wsLiveEventsActor ! ActorsJsonProtocol.newCallToJson(x)
wsLiveEventsActor ! x
val actor = context.actorOf(Props[CallActor], callUUID)
actor ! x
context watch actor
val newMap = activeCalls updated(callUUID, actor)
context become idle(newMap)
case Some(actor) =>
actor ! x
log info s"Call $callUUID already active, sending the second channel .."
}
case x@CallNew(uuid, eventName, fromUser, toUser, readCodec, writeCodec, fromUserIP, toUserIP, callUUID,
callerChannelCreatedTime, callerChannelAnsweredTime, freeSWITCHHostname, freeSWITCHIPv4, callDirection,
pdd, ringTimeSec, None, None) =>
log info "_UNKNOWN" + x.toString
case x@CallEnd(uuid, eventName, fromUser, toUser, readCodec, writeCodec, fromUserIP, toUserIP, callUUID,
callerChannelCreatedTime, callerChannelAnsweredTime, callerChannelHangupTime, freeSWITCHHostname,
freeSWITCHIPv4, hangupCause, billSec, rtpQualityPerc, otherLegUniqueId, hangupDisposition, callDirection, mos,
pdd, ringTimeSec, None, None)
if x.callUUID != "_UNKNOWN" =>
log info "-----> " + x.toString
(activeCalls get callUUID) match {
case None =>
(activeCalls get otherLegUniqueId) match {
case None =>
x.callerChannelAnsweredTime match {
case None => failedCallsActor ! x
case Some(a) => log warning s"Call $callUUID doesn't exist! with answered time " + a
}
log info s"Call $callUUID doesn't exist!"
case Some(actor) =>
actor ! x
log info s"Call otherLegUniqueId $otherLegUniqueId already active"
}
case Some(actor) =>
actor ! x
log info s"Call $callUUID already active"
}
case x@CallEnd(uuid, eventName, fromUser, toUser, readCodec, writeCodec, fromUserIP, toUserIP, callUUID,
callerChannelCreatedTime, callerChannelAnsweredTime, callerChannelHangupTime, freeSWITCHHostname,
freeSWITCHIPv4, hangupCause, billSec, rtpQualityPerc, otherLegUniqueId, hangupDisposition, callDirection, mos,
pdd, ringTimeSec, None, None) =>
log info s"no uuid $uuid" + x.toString
case GetConcurrentCalls =>
val calls = activeCalls.keys.toList
//log info s"======== $calls"
// channels / 2 (each call has two channels)
sender() ! GetCallsResponse(calls.size, calls)
case x@GetTotalConcurrentCalls =>
// log info "call router GetConcurrentCalls received .."
sender ! ConcurrentCallsNum(activeCalls.size)
case x@GetFailedCalls =>
log info "--------> ask for failed calls"
failedCallsActor forward x
case x@GetFailedCallsByDate =>
failedCallsActor forward x
case x@GetTotalFailedCalls =>
failedCallsActor forward x
case x @ GetFailedCallsAnalysis(fromNumberOfDigits, toNumberOfDigits) =>
failedCallsActor forward x
case x @ GetFailedCallsChannelByTime(t) =>
failedCallsActor forward x
case x@GetCallInfo(callUuid) =>
(activeCalls get callUuid) match {
case None =>
val response = s"Invalid call $callUuid"
log warning response
sender() ! response
case Some(actor) =>
// get both channels from the next call actor
log info "----> sending request for call info to actor"
actor forward x
}
case x@GetChannelInfo(callUuid, channeluuid) =>
(activeCalls get callUuid) match {
case None =>
val response = s"Invalid call $callUuid"
log warning response
sender() ! response
case Some(actor) =>
actor forward x
}
case x @ GetConcurrentCallsChannel =>
log info "call router got event GetConcurrentCallsChannel"
val f: List[Future[Option[CallNew]]] = activeCalls.map{
case (a,y) => (y ? x).mapTo[Option[CallNew]]
}.toList
Future.sequence(f) pipeTo sender
case x @ GetFailedCallsChannel =>
failedCallsActor forward x
case x @ GetConcurrentCallsChannelByIpPrefix(ip, prefix) =>
val f: List[Future[Option[CallNew]]] = activeCalls.map{
case (a,y) => (y ? x).mapTo[Option[CallNew]]
}.toList
Future.sequence(f) pipeTo sender
case CallTerminated(callEnd) =>
val completedCall = activeCalls.filter(_._2 == sender())
completedCall.size match {
case 0 => log error "this call doesn't exist in concurrent calls when completed !!!"
case _ => completedCallsActor ! CompletedCall(completedCall.head._1, callEnd.callerChannelHangupTime, completedCall.head._2)
}
wsLiveEventsActor ! callEnd
//wsLiveEventsActor ! ActorsJsonProtocol.endCallToJson(callEnd)
val newMap = activeCalls.filter(_._2 != sender())
context become idle(newMap)
/*
case Terminated(actor: ActorRef) =>
val completedCall = activeCalls.filter(_._2 == sender())
completedCallsActor ! completedCall
val newMap = activeCalls.filter(_._2 != sender())
context become idle(newMap)
*/
case x =>
log.info("---- I don't know this event: " + x.toString)
}
def receive: Receive =
idle(scala.collection.Map.empty[String, ActorRef])
}
|
gnostix/freeswitch-monitoring
|
src/main/scala/gr/gnostix/freeswitch/actors/CallRouter.scala
|
Scala
|
apache-2.0
| 7,554 |
package scala.meta
package internal
package quasiquotes
import scala.compat.Platform.EOL
import scala.language.experimental.macros
import scala.reflect.macros.whitebox.Context
import scala.meta.internal.trees.{Reflection => AstReflection}
// NOTE: we don't have the signature as [O, I] to keep symmetry with Unlift
object Lift {
def apply[I](outside: Any): I = macro ConversionMacros.liftApply[I]
def unapply[I](outside: Any): Option[I] = macro ConversionMacros.liftUnapply[I]
}
// NOTE: here we can't have the signature be [I, O], because we never know I
// in the case of Unlift.apply, we've just assembled the reified result and don't know its type yet
// in the case of Unlift.unapply, we only know the expected type of the unquote, not its actual type
// it would be nice if Scala supported partially provided type argument lists
object Unlift {
def apply[O](inside: Any): O = macro ConversionMacros.unliftApply[O]
def unapply[O](inside: Any): Option[O] = macro ConversionMacros.unliftUnapply[O]
}
class ConversionMacros(val c: Context) extends AstReflection {
lazy val u: c.universe.type = c.universe
lazy val mirror: u.Mirror = c.mirror
val XtensionQuasiquoteTerm = "shadow scala.meta quasiquotes"
import u._
val MetaLift = mirror.staticClass("scala.meta.quasiquotes.Lift")
val MetaUnlift = mirror.staticClass("scala.meta.quasiquotes.Unlift")
private def typeMismatchMessage(found: c.Type, req: c.Type): String = {
val g = c.universe.asInstanceOf[scala.tools.nsc.Global]
val msg = g.analyzer.foundReqMsg(found.asInstanceOf[g.Type], req.asInstanceOf[g.Type])
val foundReqMessage = msg.replace("meta.", "scala.meta.").replace("scala.scala.", "scala.")
"type mismatch when unquoting" + foundReqMessage
}
def liftApply[I](outside: c.Tree)(implicit I: c.WeakTypeTag[I]): c.Tree = {
val outsideTpe = outside.tpe
val insideTpe = I.tpe
if (outsideTpe <:< insideTpe) {
val needsCast = !(outsideTpe <:< insideTpe)
if (needsCast) q"$outside.asInstanceOf[$insideTpe]"
else outside
} else {
val liftable =
c.inferImplicitValue(appliedType(MetaLift, outsideTpe, insideTpe), silent = true)
if (liftable.nonEmpty) {
q"$liftable.apply($outside)"
} else {
c.abort(c.enclosingPosition, typeMismatchMessage(outsideTpe, insideTpe))
}
}
}
def liftUnapply[I](outside: c.Tree)(implicit I: c.WeakTypeTag[I]): c.Tree = {
// NOTE: Here's an interesting idea that I'd like to explore.
// How about we allow things like `42 match { case q"$x" => x }`?
// For that to work, we just need to wrap the reification result into `Lift.unapply`!
???
}
def unliftApply[O](inside: c.Tree)(implicit O: c.WeakTypeTag[O]): c.Tree = {
// NOTE: here we just disregard the expected outside type, because I can't find uses for it
// duality is a fun thing, but it looks like here it just led me into a dead-end
q"$inside"
}
def unliftUnapply[O](inside: c.Tree)(implicit O: c.WeakTypeTag[O]): c.Tree = {
val insideTpe = inside.tpe
val outsideTpe = O.tpe
if (insideTpe <:< outsideTpe) {
q"_root_.scala.Some($inside: $insideTpe)"
} else {
val unliftable =
c.inferImplicitValue(appliedType(MetaUnlift, insideTpe, outsideTpe), silent = true)
if (unliftable.nonEmpty) {
q"$unliftable.apply($inside)"
} else {
c.abort(c.enclosingPosition, typeMismatchMessage(insideTpe, outsideTpe))
}
}
}
}
|
scalameta/scalameta
|
scalameta/quasiquotes/shared/src/main/scala/scala/meta/internal/quasiquotes/ConversionMacros.scala
|
Scala
|
bsd-3-clause
| 3,499 |
package org.locationtech.geomesa.plugin
import org.apache.wicket.behavior.SimpleAttributeModifier
import org.apache.wicket.markup.html.form.{Form, FormComponent}
import org.apache.wicket.markup.html.panel.Panel
import org.apache.wicket.model.{IModel, ResourceModel}
import org.geoserver.web.data.store.StoreEditPanel
import org.geoserver.web.data.store.panel.{ParamPanel, PasswordParamPanel, TextParamPanel}
import org.geoserver.web.util.MapModel
import org.geotools.data.DataAccessFactory.Param
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
abstract class GeoMesaStoreEditPanel (componentId: String, storeEditForm: Form[_])
extends StoreEditPanel(componentId, storeEditForm) {
def addTextPanel(paramsModel: IModel[_], param: Param): FormComponent[_] = {
val paramName = param.key
val resourceKey = getClass.getSimpleName + "." + paramName
val required = param.required
val textParamPanel =
new TextParamPanel(paramName,
new MapModel(paramsModel, paramName).asInstanceOf[IModel[_]],
new ResourceModel(resourceKey, paramName), required)
addPanel(textParamPanel, param, resourceKey)
}
def addPasswordPanel(paramsModel: IModel[_], param: Param): FormComponent[_] = {
val paramName = param.key
val resourceKey = getClass.getSimpleName + "." + paramName
val required = param.required
val passParamPanel =
new PasswordParamPanel(paramName,
new MapModel(paramsModel, paramName).asInstanceOf[IModel[_]],
new ResourceModel(resourceKey, paramName), required)
addPanel(passParamPanel, param, resourceKey)
}
def addPanel(paramPanel: Panel with ParamPanel, param: Param, resourceKey: String): FormComponent[_] = {
paramPanel.getFormComponent.setType(classOf[String])
val defaultTitle = String.valueOf(param.description)
val titleModel = new ResourceModel(resourceKey + ".title", defaultTitle)
val title = String.valueOf(titleModel.getObject)
paramPanel.add(new SimpleAttributeModifier("title", title))
add(paramPanel)
paramPanel.getFormComponent
}
}
|
mmatz-ccri/geomesa
|
geomesa-plugin/src/main/scala/org/locationtech/geomesa/plugin/GeoMesaStoreEditPanel.scala
|
Scala
|
apache-2.0
| 2,641 |
/*
* Copyright (c) 2013-2014 TelefΓ³nica InvestigaciΓ³n y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.servicemanager.services
import es.tid.cosmos.servicemanager.ServiceInstance
import es.tid.cosmos.servicemanager.services.dependencies.ServiceDependencies
/** Representation of a service definition.
*
* Derived classes should be objects as only an instance per service is assumed.
*/
trait Service {
/** Type of the values that parametrize it to have a service instance */
type Parametrization
val name: String
/** Default parametrization of the service.
*
* Services returning a value different from None can be instantiate with default configuration.
* That is specially useful when instantiating services because they are transitive dependencies
* of other services.
*/
def defaultParametrization: Option[Parametrization] = None
/** Direct service dependencies */
val dependencies: ServiceDependencies = ServiceDependencies.none
def instance(parametrization: Parametrization): ServiceInstance[this.type] =
ServiceInstance[this.type](this, parametrization)
lazy val defaultInstance: Option[ServiceInstance[this.type]] =
defaultParametrization.map(instance)
}
|
telefonicaid/fiware-cosmos-platform
|
service-manager/src/main/scala/es/tid/cosmos/servicemanager/services/Service.scala
|
Scala
|
apache-2.0
| 1,781 |
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val appName = "websocket-chat"
val appVersion = "1.0"
val mandubianRepo = Seq(
"Mandubian repository snapshots" at "https://github.com/mandubian/mandubian-mvn/raw/master/snapshots/",
"Mandubian repository releases" at "https://github.com/mandubian/mandubian-mvn/raw/master/releases/"
)
val appDependencies = Seq(
"org.mandubian" %% "play-actor-room" % "0.1"
)
val main = play.Project(appName, appVersion, appDependencies).settings(
resolvers ++= mandubianRepo
)
}
|
atrout/hackday-websocket-chat
|
samples/websocket-chat/project/Build.scala
|
Scala
|
apache-2.0
| 595 |
package io.jvm.uuid
package bench
import java.util.Locale
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
@State(Scope.Thread)
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
class ToStringBenchmark {
var randomUUID: UUID = _
@Setup(Level.Invocation)
def setup(): Unit = {
randomUUID = UUID.random
}
@Benchmark
def legacyToLowerString: String =
randomUUID.toString
@Benchmark
def optimizedToLowerString: String =
randomUUID.string
@Benchmark
def legacyToUpperString: String =
randomUUID.toString.toUpperCase(Locale.ROOT)
@Benchmark
def optimizedToUpperString: String =
randomUUID.toUpperCase
}
|
melezov/scala-uuid
|
bench/src/main/scala/io/jvm/uuid/bench/ToStringBenchmark.scala
|
Scala
|
bsd-3-clause
| 702 |
import scala.reflect.TypeTest
object Test {
def test[S, T](using TypeTest[S, T]): Unit = ()
val a: A = ???
test[Any, Any]
test[Int, Int]
test[Int, Any]
test[String, Any]
test[String, AnyRef]
test[Any, Int]
test[Any, String]
test[Any, Some[_]]
test[Any, Array[Int]]
test[Seq[Int], List[Int]]
test[Any, Some[Int]] // error
test[Any, a.X] // error
test[a.X, a.Y] // error
}
class A {
type X
type Y <: X
}
|
lampepfl/dotty
|
tests/neg-custom-args/fatal-warnings/type-test-syntesize.scala
|
Scala
|
apache-2.0
| 443 |
package scala.virtualization.lms
package epfl
package test5
import common._
import test1._
import java.io.PrintWriter
import java.io.FileOutputStream
trait JSGenFunctions extends JSGenEffect with BaseGenFunctions {
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case Lambda(fun, x, y) =>
stream.println("var " + quote(sym) + " = function(" + quote(x) + ") {")
emitBlock(y)
stream.println("return " + quote(getBlockResult(y)))
stream.println("}")
case Apply(fun, arg) =>
emitValDef(sym, quote(fun) + "(" + quote(arg) + ")")
case _ => super.emitNode(sym, rhs)
}
}
trait JSGenTupledFunctions extends JSGenFunctions {
val IR: TupledFunctionsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case Lambda(fun, UnboxedTuple(xs), y) =>
stream.println("var " + quote(sym) + " = function" + xs.map(quote).mkString("(", ",", ")") + " {")
emitBlock(y)
stream.println("return " + quote(getBlockResult(y)))
stream.println("}")
case Apply(fun, UnboxedTuple(args)) =>
emitValDef(sym, quote(fun) + args.map(quote).mkString("(", ",", ")"))
case _ => super.emitNode(sym, rhs)
}
override def quote(x: Exp[Any]) : String = x match {
case UnboxedTuple(t) =>
t.zipWithIndex.map({ case(el, i) => "_" + (i+1) + ":" + quote(el)}).mkString("{", ",", "}")
case _ => super.quote(x)
}
}
trait JSGenStruct extends JSGenBase {
val IR: StructExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case Struct(tag, elems) =>
registerStruct(structName(sym.tp), elems)
emitValDef(sym, "{__structName:" + structName(sym.tp) + "," + elems.map(e => e._1+":"+quote(e._2)).mkString(",") + "}")
// printlog("WARNING: emitting " + structName(sym.tp) + " struct " + quote(sym))
case FieldApply(struct, index) =>
emitValDef(sym, quote(struct) + "." + index)
// printlog("WARNING: emitting field access: " + quote(struct) + "." + index)
case FieldUpdate(struct, index, rhs) =>
emitValDef(sym, quote(struct) + "." + index + " = " + quote(rhs))
// printlog("WARNING: emitting field update: " + quote(struct) + "." + index)
case _ => super.emitNode(sym, rhs)
}
}
trait JSGenTupleOps extends JSGenBase with JSGenStruct {
val IR: TupleOpsExp
import IR._
/* FIXME: now using structs to implement tuples
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case ETuple2(a,b) =>
emitValDef(sym, "{_1:"+ quote(a) + ",_2:" + quote(b) + "}")
case Tuple2Access1(t) => emitValDef(sym, quote(t) + "._1")
case Tuple2Access2(t) => emitValDef(sym, quote(t) + "._2")
case _ => super.emitNode(sym, rhs)
}
*/
}
trait FunctionsProg { this: Print with Functions with IfThenElse with Equal =>
def test(x: Rep[Any]): Rep[Any] = {
val f = fun { x : Rep[Any] =>
print("foo")
x
}
f(f(x))
}
def test2(x: Rep[Double]): Rep[Double => Double] =
fun {(y: Rep[Double]) => if(y == x) unit(2 : Double) else y}
}
trait FunctionsRecursiveProg { this: Arith with Print with Functions =>
def test(x: Rep[Any]): Rep[Any] = {
val f = fun { x : Rep[Any] =>
print("foo")
x
}
lazy val g : Rep[Any => Any] = fun { x =>
print("bar")
g(x)
}
val h = fun { x : Rep[Any] =>
print("baz")
1
}
h(g(f(f(x))))
}
}
trait TwoArgsFunProg { this: TupledFunctions =>
def test(x: Rep[Double]): Rep[(Double, Double)] = {
val f = fun { (a : Rep[Double], b : Rep[Double]) => (b,a) }
f(f(x, x))
}
}
trait TupleFunProg { this: Arith with TupledFunctions =>
def test (x: Rep[Double]): Rep[(Double, Double)] = {
val f = fun { t : Rep[(Double, Double)] => t }
f(1.0, x)
}
}
trait NoArgFunProg { this: TupledFunctions =>
def test (x: Rep[Any]): Rep[Any] = {
val f = fun { () => x }
f()
}
}
trait TwoArgsRecursiveFunProg { this: TupledFunctions with Arith with Equal with IfThenElse =>
def test(x: Rep[Double]): Rep[Double] = {
lazy val iter : Rep[((Double,Double)) => Double] = fun { (n, acc) =>
if (n == 0) acc else iter(n-1, n*acc)
}
iter(x, unit(1.0))
}
}
trait SchedFunProg { this: Functions with Arith with Equal with IfThenElse =>
def foo: Rep[Double => Double] = fun { a =>
def iter : Rep[Double => Double] = fun { b =>
if (b == 0) a
else iter(b-1)
}
iter(a)
}
def test(x: Rep[Double]): Rep[Double] = {
foo(x)
}
}
class TestFunctions extends FileDiffSuite {
val prefix = home + "test-out/epfl/test5-"
def testFunctions = {
withOutFile(prefix+"functions") {
println("-- begin")
new FunctionsProg with PrintExp with FunctionsExp with IfThenElseExp with EqualExp{ self =>
val codegen = new ScalaGenPrint with ScalaGenFunctions with ScalaGenIfThenElse with ScalaGenEqual{ val IR: self.type = self }
val f = (x: Rep[Double]) => test(x)
codegen.emitSource(f, "Test", new PrintWriter(System.out))
val g = (x: Rep[Double]) => test2(x)
codegen.emitSource(g, "Test2", new PrintWriter(System.out))
}
new FunctionsProg with PrintExp with FunctionsExp with IfThenElseExp with EqualExp{ self =>
val codegen = new JSGenPrint with JSGenFunctions with JSGenIfThenElse with JSGenEqual{ val IR: self.type = self }
val f = (x: Rep[Double]) => test(x)
codegen.emitSource(f, "main", new PrintWriter(System.out))
val g = (x: Rep[Double]) => test2(x)
codegen.emitSource(g, "main2", new PrintWriter(System.out))
}
println("-- end")
}
assertFileEqualsCheck(prefix+"functions")
}
def testFunRetFun = {
withOutFile(prefix+"funretfun") {
println("-- begin")
new FunctionsProg with PrintExp with FunctionsExp with IfThenElseExp with EqualExp{ self =>
val codegen = new ScalaGenPrint with ScalaGenFunctions with ScalaGenIfThenElse with ScalaGenEqual{
val IR: self.type = self
}
val f = (x: Rep[Double]) => doLambda{(y: Rep[Int]) => test(x)}
codegen.emitSource(f, "Test", new PrintWriter(System.out))
}
new FunctionsProg with PrintExp with FunctionsExp with IfThenElseExp with EqualExp{ self =>
val codegen = new JSGenPrint with JSGenFunctions with JSGenIfThenElse with JSGenEqual{ val IR: self.type = self }
val f = (x: Rep[Double]) => doLambda{(y: Rep[Int]) => test(x)}
codegen.emitSource(f, "main", new PrintWriter(System.out))
}
println("-- end")
}
assertFileEqualsCheck(prefix+"funretfun")
}
def testFunctionsRecursive = {
withOutFile(prefix+"functionsrecursive") {
println("-- begin")
new FunctionsRecursiveProg with ArithExpOpt with PrintExp with FunctionsRecursiveExp { self =>
val codegen = new ScalaGenArith with ScalaGenPrint with ScalaGenFunctions { val IR: self.type = self }
val f = (x: Rep[Double]) => test(x)
codegen.emitSource(f, "Test", new PrintWriter(System.out))
}
new FunctionsRecursiveProg with ArithExpOpt with PrintExp with FunctionsRecursiveExp { self =>
val codegen = new JSGenArith with JSGenPrint with JSGenFunctions { val IR: self.type = self }
val f = (x: Rep[Double]) => test(x)
codegen.emitSource(f, "main", new PrintWriter(System.out))
}
println("-- end")
}
assertFileEqualsCheck(prefix+"functionsrecursive")
}
def testTwoArgsFun = {
withOutFile(prefix+"twoargsfun") {
new TwoArgsFunProg with TupledFunctionsExp { self =>
val codegen = new JSGenTupledFunctions with JSGenTupleOps with GenericGenUnboxedTupleAccess { val IR: self.type = self }
codegen.emitSource(test _, "main", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"twoargsfun")
}
def testTupleFun = {
withOutFile(prefix+"tuplefun") {
new TupleFunProg with ArithExp with TupledFunctionsExp { self =>
val codegen = new JSGenTupledFunctions with JSGenTupleOps with GenericGenUnboxedTupleAccess { val IR: self.type = self }
codegen.emitSource(test _, "main", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"tuplefun")
}
def testNoArgFun = {
withOutFile(prefix+"noargfun") {
new NoArgFunProg with TupledFunctionsRecursiveExp { self =>
val codegen = new JSGenTupledFunctions with JSGenTupleOps with GenericGenUnboxedTupleAccess { val IR: self.type = self }
codegen.emitSource(test _, "main", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"noargfun")
}
def testTwoArgsRecursiveFun = {
withOutFile(prefix+"twoargsrecfun") {
new TwoArgsRecursiveFunProg with TupledFunctionsRecursiveExp with ArithExpOpt with EqualExp with IfThenElseExp { self =>
val codegen = new JSGenTupledFunctions with JSGenArith with JSGenEqual with JSGenIfThenElse with JSGenTupleOps with GenericGenUnboxedTupleAccess { val IR: self.type = self }
codegen.emitSource(test _, "main", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"twoargsrecfun")
}
def testSchedFun = {
withOutFile(prefix+"schedfun") {
new SchedFunProg with FunctionsRecursiveExp with ArithExpOpt with EqualExp with IfThenElseExp { self =>
val codegen = new JSGenFunctions with JSGenArith with JSGenEqual with JSGenIfThenElse { val IR: self.type = self }
val f = (x: Rep[Double]) => test(x)
codegen.emitSource(f, "Test", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"schedfun")
}
}
|
afernandez90/virtualization-lms-core
|
test-src/epfl/test5-js/TestFunctions.scala
|
Scala
|
bsd-3-clause
| 9,699 |
package com.equalinformation.carapi.scala.test
/**
* Created by bpupadhyaya on 3/27/17.
*/
class Test4 {
//TODO clean up later
}
|
bpupadhyaya/CodingChallenge
|
CarAPIV1/src/test/scala/com/equalinformation/carapi/scala/test/Test4.scala
|
Scala
|
mit
| 137 |
package controllers.s_self_employment
import controllers.mappings.Mappings
import org.specs2.mutable._
import utils.WithApplication
class GAboutSelfEmploymentFormSpec extends Specification {
section("unit", models.domain.SelfEmployment.id)
"About Self Employment - About Self Employment Form" should {
val typeOfWork = "gardener"
"map data into case class" in new WithApplication {
GSelfEmploymentDates.form.bind(
Map(
"typeOfWork" -> typeOfWork,
"stillSelfEmployed" -> "yes",
"moreThanYearAgo" -> "yes",
"haveAccounts" -> "yes")
).fold(
formWithErrors => "This mapping should not happen." must equalTo("Error"),
f => {
f.typeOfWork mustEqual(typeOfWork)
f.stillSelfEmployed mustEqual("yes")
f.moreThanYearAgo mustEqual("yes")
f.haveAccounts mustEqual(Some("yes"))
}
)
}
"reject if typeOfWork is not filled" in new WithApplication {
GSelfEmploymentDates.form.bind(
Map(
"typeOfWork" -> "",
"stillSelfEmployed" -> "yes",
"moreThanYearAgo" -> "yes",
"haveAccounts" -> "yes")
).fold(
formWithErrors => formWithErrors.errors.head.message must equalTo(Mappings.errorRequired),
f => "This mapping should not happen." must equalTo("Valid")
)
}
"reject if stillSelfEmployed is not filled" in new WithApplication {
GSelfEmploymentDates.form.bind(
Map(
"typeOfWork" -> typeOfWork,
"stillSelfEmployed" -> "",
"moreThanYearAgo" -> "yes",
"haveAccounts" -> "yes")
).fold(
formWithErrors => formWithErrors.errors.head.message must equalTo(Mappings.errorRequired),
f => "This mapping should not happen." must equalTo("Valid")
)
}
}
section("unit", models.domain.SelfEmployment.id)
}
|
Department-for-Work-and-Pensions/ClaimCapture
|
c3/test/controllers/s_self_employment/GAboutSelfEmploymentFormSpec.scala
|
Scala
|
mit
| 1,903 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.io.{InputStream, OutputStream}
import java.rmi.server.UID
import org.apache.avro.Schema
import scala.collection.JavaConverters._
import scala.language.implicitConversions
import scala.reflect.ClassTag
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import com.google.common.base.Objects
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hive.ql.exec.{UDF, Utilities}
import org.apache.hadoop.hive.ql.plan.{FileSinkDesc, TableDesc}
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMacro
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils
import org.apache.hadoop.hive.serde2.avro.{AvroGenericRecordWritable, AvroSerdeUtils}
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector
import org.apache.hadoop.io.Writable
import org.apache.spark.Logging
import org.apache.spark.sql.types.Decimal
import org.apache.spark.util.Utils
private[hive] object HiveShim {
// Precision and scale to pass for unlimited decimals; these are the same as the precision and
// scale Hive 0.13 infers for BigDecimals from sources that don't specify them (e.g. UDFs)
val UNLIMITED_DECIMAL_PRECISION = 38
val UNLIMITED_DECIMAL_SCALE = 18
val HIVE_GENERIC_UDF_MACRO_CLS = "org.apache.hadoop.hive.ql.udf.generic.GenericUDFMacro"
/*
* This function in hive-0.13 become private, but we have to do this to walkaround hive bug
*/
private def appendReadColumnNames(conf: Configuration, cols: Seq[String]) {
val old: String = conf.get(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR, "")
val result: StringBuilder = new StringBuilder(old)
var first: Boolean = old.isEmpty
for (col <- cols) {
if (first) {
first = false
} else {
result.append(',')
}
result.append(col)
}
conf.set(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR, result.toString)
}
/*
* Cannot use ColumnProjectionUtils.appendReadColumns directly, if ids is null or empty
*/
def appendReadColumns(conf: Configuration, ids: Seq[Integer], names: Seq[String]) {
if (ids != null && ids.nonEmpty) {
ColumnProjectionUtils.appendReadColumns(conf, ids.asJava)
}
if (names != null && names.nonEmpty) {
appendReadColumnNames(conf, names)
}
}
/*
* Bug introduced in hive-0.13. AvroGenericRecordWritable has a member recordReaderID that
* is needed to initialize before serialization.
*/
def prepareWritable(w: Writable, serDeProps: Seq[(String, String)]): Writable = {
w match {
case w: AvroGenericRecordWritable =>
w.setRecordReaderID(new UID())
// In Hive 1.1, the record's schema may need to be initialized manually or a NPE will
// be thrown.
if (w.getFileSchema() == null) {
serDeProps
.find(_._1 == AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName())
.foreach { kv =>
w.setFileSchema(new Schema.Parser().parse(kv._2))
}
}
case _ =>
}
w
}
def toCatalystDecimal(hdoi: HiveDecimalObjectInspector, data: Any): Decimal = {
if (hdoi.preferWritable()) {
Decimal(hdoi.getPrimitiveWritableObject(data).getHiveDecimal().bigDecimalValue,
hdoi.precision(), hdoi.scale())
} else {
Decimal(hdoi.getPrimitiveJavaObject(data).bigDecimalValue(), hdoi.precision(), hdoi.scale())
}
}
/**
* This class provides the UDF creation and also the UDF instance serialization and
* de-serialization cross process boundary.
*
* Detail discussion can be found at https://github.com/apache/spark/pull/3640
*
* @param functionClassName UDF class name
* @param instance optional UDF instance which contains additional information (for macro)
*/
private[hive] case class HiveFunctionWrapper(var functionClassName: String,
private var instance: AnyRef = null) extends java.io.Externalizable {
// for Serialization
def this() = this(null)
override def hashCode(): Int = {
if (functionClassName == HIVE_GENERIC_UDF_MACRO_CLS) {
Objects.hashCode(functionClassName, instance.asInstanceOf[GenericUDFMacro].getBody())
} else {
functionClassName.hashCode()
}
}
override def equals(other: Any): Boolean = other match {
case a: HiveFunctionWrapper if functionClassName == a.functionClassName =>
// In case of udf macro, check to make sure they point to the same underlying UDF
if (functionClassName == HIVE_GENERIC_UDF_MACRO_CLS) {
a.instance.asInstanceOf[GenericUDFMacro].getBody() ==
instance.asInstanceOf[GenericUDFMacro].getBody()
} else {
true
}
case _ => false
}
@transient
def deserializeObjectByKryo[T: ClassTag](
kryo: Kryo,
in: InputStream,
clazz: Class[_]): T = {
val inp = new Input(in)
val t: T = kryo.readObject(inp, clazz).asInstanceOf[T]
inp.close()
t
}
@transient
def serializeObjectByKryo(
kryo: Kryo,
plan: Object,
out: OutputStream) {
val output: Output = new Output(out)
kryo.writeObject(output, plan)
output.close()
}
def deserializePlan[UDFType](is: java.io.InputStream, clazz: Class[_]): UDFType = {
deserializeObjectByKryo(Utilities.runtimeSerializationKryo.get(), is, clazz)
.asInstanceOf[UDFType]
}
def serializePlan(function: AnyRef, out: java.io.OutputStream): Unit = {
serializeObjectByKryo(Utilities.runtimeSerializationKryo.get(), function, out)
}
def writeExternal(out: java.io.ObjectOutput) {
// output the function name
out.writeUTF(functionClassName)
// Write a flag if instance is null or not
out.writeBoolean(instance != null)
if (instance != null) {
// Some of the UDF are serializable, but some others are not
// Hive Utilities can handle both cases
val baos = new java.io.ByteArrayOutputStream()
serializePlan(instance, baos)
val functionInBytes = baos.toByteArray
// output the function bytes
out.writeInt(functionInBytes.length)
out.write(functionInBytes, 0, functionInBytes.length)
}
}
def readExternal(in: java.io.ObjectInput) {
// read the function name
functionClassName = in.readUTF()
if (in.readBoolean()) {
// if the instance is not null
// read the function in bytes
val functionInBytesLength = in.readInt()
val functionInBytes = new Array[Byte](functionInBytesLength)
in.readFully(functionInBytes)
// deserialize the function object via Hive Utilities
instance = deserializePlan[AnyRef](new java.io.ByteArrayInputStream(functionInBytes),
Utils.getContextOrSparkClassLoader.loadClass(functionClassName))
}
}
def createFunction[UDFType <: AnyRef](): UDFType = {
if (instance != null) {
instance.asInstanceOf[UDFType]
} else {
val func = Utils.getContextOrSparkClassLoader
.loadClass(functionClassName).newInstance.asInstanceOf[UDFType]
if (!func.isInstanceOf[UDF]) {
// We cache the function if it's no the Simple UDF,
// as we always have to create new instance for Simple UDF
instance = func
}
func
}
}
}
/*
* Bug introduced in hive-0.13. FileSinkDesc is serializable, but its member path is not.
* Fix it through wrapper.
*/
implicit def wrapperToFileSinkDesc(w: ShimFileSinkDesc): FileSinkDesc = {
val f = new FileSinkDesc(new Path(w.dir), w.tableInfo, w.compressed)
f.setCompressCodec(w.compressCodec)
f.setCompressType(w.compressType)
f.setTableInfo(w.tableInfo)
f.setDestTableId(w.destTableId)
f
}
/*
* Bug introduced in hive-0.13. FileSinkDesc is serializable, but its member path is not.
* Fix it through wrapper.
*/
private[hive] class ShimFileSinkDesc(
var dir: String,
var tableInfo: TableDesc,
var compressed: Boolean)
extends Serializable with Logging {
var compressCodec: String = _
var compressType: String = _
var destTableId: Int = _
def setCompressed(compressed: Boolean) {
this.compressed = compressed
}
def getDirName(): String = dir
def setDestTableId(destTableId: Int) {
this.destTableId = destTableId
}
def setTableInfo(tableInfo: TableDesc) {
this.tableInfo = tableInfo
}
def setCompressCodec(intermediateCompressorCodec: String) {
compressCodec = intermediateCompressorCodec
}
def setCompressType(intermediateCompressType: String) {
compressType = intermediateCompressType
}
}
}
|
chenc10/Spark-PAF
|
sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala
|
Scala
|
apache-2.0
| 9,671 |
package foo
class Foo extends Top(Bar.vvvv)
|
Kwestor/scala-ide
|
org.scala-ide.sdt.core.tests/test-workspace/find-references/super/src/foo/Foo.scala
|
Scala
|
bsd-3-clause
| 44 |
import scala.util.parsing.combinator.JavaTokenParsers
class TestParsers extends JavaTokenParsers {
/*start*/floatingPointNumber ^^ { _.toDouble }/*end*/
}
//TestParsers.this.Parser[Double]
|
ilinum/intellij-scala
|
testdata/typeInference/bugs5/SCL3076.scala
|
Scala
|
apache-2.0
| 191 |
/*
* Copyright 2013 - 2015, Daniel Krzywicki <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package pl.edu.agh.scalamas.examples
import pl.edu.agh.scalamas.app.{ConcurrentStack, SynchronousEnvironment}
import pl.edu.agh.scalamas.emas.EmasLogic
import pl.edu.agh.scalamas.genetic.RastriginProblem
import scala.concurrent.duration._
/**
* Example app.
*/
object EmasApp extends ConcurrentStack("emas")
with SynchronousEnvironment
with EmasLogic
with RastriginProblem {
def main(args: Array[String]) {
run(10 seconds)
this.populationStrategy
}
}
|
ros3n/IntOb
|
examples/src/main/scala/pl/edu/agh/scalamas/examples/EmasApp.scala
|
Scala
|
mit
| 1,631 |
package org.mrgeo.spark
import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import org.mrgeo.data.raster.RasterWritable
import org.mrgeo.utils.Bounds
class Serializers {
}
class RasterWritableSerializer extends Serializer[RasterWritable] {
override def write(kryo: Kryo, output: Output, rw: RasterWritable) = {
println("RasterWritableSerializer.write")
val bytes = rw.getBytes
output.writeInt(bytes.length)
output.writeBytes(bytes)
}
override def read(kryo: Kryo, input: Input, `type`: Class[RasterWritable]): RasterWritable = {
println("RasterWritableSerializer.read")
val length = input.readInt()
return new RasterWritable(input.readBytes(length))
}
}
class BoundsSerializer extends Serializer[Bounds] {
override def write(kryo: Kryo, output: Output, bounds: Bounds): Unit = {
output.writeDouble(bounds.getMinX)
output.writeDouble(bounds.getMinY)
output.writeDouble(bounds.getMaxX)
output.writeDouble(bounds.getMaxY)
}
override def read(kryo: Kryo, input: Input, `type`: Class[Bounds]): Bounds = {
return new Bounds(input.readDouble(),input.readDouble(),input.readDouble(),input.readDouble())
}
}
|
tjkrell/MrGEO
|
mrgeo-core/src/main/scala/org/mrgeo/spark/Serializers.scala
|
Scala
|
apache-2.0
| 1,224 |
package system.s3upload
import fly.play.s3.{BucketFilePart, BucketFilePartUploadTicket}
import play.api.Logger
import play.api.libs.iteratee.{Cont, Input, Iteratee, Done}
import scala.concurrent.{Await, Future, ExecutionContext}
import scala.concurrent.duration._
import scala.util.{Failure, Success}
class MultipartUploadIteratee(uploader: MultipartUploader, uploadThreshold: Int)(implicit ec: ExecutionContext) {
type IterateeType = Iteratee[Array[Byte], Unit]
type StepFunction = (PartState) => (Input[Array[Byte]]) => IterateeType
def step(state: PartState)(input: Input[Array[Byte]]): IterateeType = input match {
case Input.El(bytes) => handleEl(step, state, bytes)
case Input.Empty => handleEmpty(step, state)
case Input.EOF => handleEOF(state)
}
def handleEmpty(step: StepFunction, state: PartState)(implicit ec: ExecutionContext): IterateeType =
Cont[Array[Byte], Unit](i => step(state)(i))
def handleEl(step: StepFunction, state: PartState, bytes: Array[Byte])(implicit ec: ExecutionContext): IterateeType = {
val newState = state.addBytes(bytes)
if (newState.accumulatedBytes.length >= uploadThreshold) {
// Controversial. Blocks the iteratee until the chunk has been uploaded. If we don't do this
// then the iteratee will happily consume all the incoming data and buffer it up in memory,
// only discarding chunks when they've finished uploading. This could potentially lead
// to out-of-memory errors. Blocking creates back-pressure to slow the data coming in, at
// the cost of thread starvation if several uploads happen concurrently. Use a different thread
// context, perhaps?
val tickets = Await.result(uploadPart(newState), 10 minutes)
Cont[Array[Byte], Unit](i => step(newState.nextPart(tickets))(i))
} else {
Cont[Array[Byte], Unit](i => step(newState)(i))
}
}
def uploadPart(state: PartState)(implicit ec: ExecutionContext): Future[List[BucketFilePartUploadTicket]] = {
Logger.info(s"Pushing part ${state.partNumber} with ${state.accumulatedBytes.length} bytes. ${state.totalSize} bytes uploaded so far")
val f = uploader.uploadPart(BucketFilePart(state.partNumber.n, state.accumulatedBytes.toArray)).map(t => t +: state.uploadTickets)
f.onFailure {
case e =>
Logger.error("Error during upload of chunk, aborting multipart upload", e)
uploader.abortMultipartUpload
}
f
}
def handleEOF(state: PartState)(implicit ec: ExecutionContext): IterateeType = {
val finalLength: DataLength = state.totalSize.add(state.accumulatedBytes.length)
Logger.info(s"Got EOF as part number ${state.partNumber}. Total data size is $finalLength")
Logger.info(s"Pushing final part ${state.partNumber} with ${state.accumulatedBytes.length} bytes")
val finalState: Future[IterateeType] = uploadPart(state).flatMap { _ =>
Logger.info(s"Completing upload with tickets for ${state.uploadTickets.size} parts")
uploader.completeMultipartUpload(state.uploadTickets.reverse).map {
case MultipartUploadSuccess => Logger.info(s"Multipart upload response completed successfully")
case MultipartUploadError(status, error) =>
Logger.info(s"Response to multipart upload completion was $status ($error). Aborting.")
uploader.abortMultipartUpload
}
}.map(_ => Done(0, Input.EOF))
finalState.onFailure {
case e =>
Logger.error("Error during upload of chunk, aborting multipart upload", e)
uploader.abortMultipartUpload
}
// Here we want to wait until the upload is completed so that the docker push does not exit
// before the data is actually on the registry.
Await.result(finalState, 10 minutes)
}
}
|
WiredThing/hither
|
app/system/s3upload/MultipartUploadIteratee.scala
|
Scala
|
mit
| 3,767 |
package org.cloudfun.framework.scheduler.single
import _root_.org.cloudfun.framework.time.{Clock, Time}
import org.cloudfun.framework.scheduler.{Task, Scheduler}
import java.util.PriorityQueue
/**
* Simple single-threaded scheduler that can be advanced by calling an update function.
* Useful for integrating in a render-update loop on a client.
*/
class SingleThreadedScheduler(clock: Clock, updateManually: Boolean = false) extends Scheduler {
val updatesPerSecond = conf[Int]("ups", "updates-per-second", 60, "Number of times per second to check and invoke scheduled tasks. This value only has effect if updateManually is set to false in the code (it is false by default).")
private case class ScheduledTask(time: Time, task: Task) extends Comparable[ScheduledTask] {
def compareTo(o: ScheduledTask) = if (time.ms < o.time.ms) -1 else if (time.ms > o.time.ms) 1 else 0
}
private val tasks: PriorityQueue[ScheduledTask] = new PriorityQueue[ScheduledTask]
private var updater: Thread = null
private var active = false
def scheduleCallback(time: Time, task: Task) = tasks add ScheduledTask(time, task)
def update() {
val currentTime = clock.currentGameTime
while(tasks.peek != null && tasks.peek.time.ms <= currentTime.ms) {
tasks.poll.task()
}
}
def isActive: Boolean = active
override def onStart() {
if (!updateManually) {
if (isActive) onStop()
updater = new Thread(new Runnable {
def run = {
while(active) {
update()
Thread.sleep(1000 / updatesPerSecond())
}
}
})
updater.setDaemon(true)
active = true
updater.start()
}
}
override def onStop() {
if (!updateManually) {
if (isActive) {
active = false
updater.join
updater = null
}
}
}
}
|
zzorn/cloudfun
|
src/main/scala/org/cloudfun/framework/scheduler/single/SingleThreadedScheduler.scala
|
Scala
|
lgpl-3.0
| 1,859 |
import java.io.{File}
object PurchaseOrderTest extends TestBase {
val inFile = new File("integration/src/test/resources/ipo.xsd")
val usageFile = new File(tmp, "PurchaseOrderUsage.scala")
copyFileFromResource("PurchaseOrderUsage.scala", usageFile)
// override val module = new scalaxb.compiler.xsd.Driver with Verbose
lazy val generated = module.process(inFile, "ipo", tmp)
"ipo.scala file must compile so Address can be used" in {
(List("import ipo._",
"Address(\"\", \"\", \"\").toString"),
generated) must evaluateTo("Address(,,)", outdir = "./tmp")
}
"ipo.scala file must compile together with PurchaseOrderUsage.scala" in {
(List("import ipo._",
"PurchaseOrderUsage.allTests"),
usageFile :: generated) must evaluateTo(true, outdir = "./tmp")
}
}
|
justjoheinz/scalaxb
|
integration/src/test/scala/PurchaseOrderTest.scala
|
Scala
|
mit
| 819 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package top
import com.intellij.psi.tree.IElementType
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.base.Modifier
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.parsing.expressions.Annotation
/**
* @author Alexander Podkhalyuzin
* Date: 05.02.2008
*/
/*
* TmplDef ::= {Annotation} {Modifier}
[case] class ClassDef
* | [case] object ObjectDef
* | trait TraitDef
*
*/
object TmplDef {
def parse(builder: ScalaPsiBuilder): Boolean = {
val templateMarker = builder.mark()
templateMarker.setCustomEdgeTokenBinders(ScalaTokenBinders.PRECEEDING_COMMENTS_TOKEN, null)
val annotationsMarker = builder.mark()
while (Annotation.parse(builder)) {
}
annotationsMarker.done(ScalaElementTypes.ANNOTATIONS)
annotationsMarker.setCustomEdgeTokenBinders(ScalaTokenBinders.DEFAULT_LEFT_EDGE_BINDER, null)
val modifierMarker = builder.mark()
while (Modifier.parse(builder)) {
}
val caseState = isCaseState(builder)
modifierMarker.done(ScalaElementTypes.MODIFIERS)
templateParser(builder.getTokenType, caseState) match {
case Some((parse, elementType)) =>
builder.advanceLexer()
if (parse(builder)) {
templateMarker.done(elementType)
} else {
templateMarker.drop()
}
true
case None =>
templateMarker.rollbackTo()
false
}
}
private def isCaseState(builder: ScalaPsiBuilder) = {
val caseMarker = builder.mark()
val result = builder.getTokenType match {
case ScalaTokenTypes.kCASE =>
builder.advanceLexer() // Ate case
true
case _ => false
}
builder.getTokenType match {
case ScalaTokenTypes.kTRAIT if result =>
caseMarker.rollbackTo()
builder.error(ErrMsg("wrong.case.modifier"))
builder.advanceLexer() // Ate case
case _ => caseMarker.drop()
}
result
}
private def templateParser(tokenType: IElementType, caseState: Boolean) = tokenType match {
case ScalaTokenTypes.kCLASS => Some(ClassDef.parse _, ScalaElementTypes.CLASS_DEFINITION)
case ScalaTokenTypes.kOBJECT => Some(ObjectDef.parse _, ScalaElementTypes.OBJECT_DEFINITION)
case ScalaTokenTypes.kTRAIT =>
def parse(builder: ScalaPsiBuilder): Boolean = {
val result = TraitDef.parse(builder)
if (caseState) true else result
}
Some(parse _, ScalaElementTypes.TRAIT_DEFINITION)
case _ => None
}
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/parser/parsing/top/TmplDef.scala
|
Scala
|
apache-2.0
| 2,697 |
package com.featurefm.riversong.routes
/**
* Created by yardena on 1/4/16.
*/
trait RiverSongRouting {
def routes: akka.http.scaladsl.server.Route
}
|
ListnPlay/RiverSong
|
src/main/scala/com/featurefm/riversong/routes/RiverSongRouting.scala
|
Scala
|
mit
| 154 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.mongodb.sink
import com.datamountaineer.kcql.{Kcql, WriteModeEnum}
import com.datamountaineer.streamreactor.common.errors.{NoopErrorPolicy, ThrowErrorPolicy}
import com.datamountaineer.streamreactor.connect.mongodb.config.{MongoConfig, MongoConfigConstants, MongoSettings}
import com.datamountaineer.streamreactor.connect.mongodb.{Json, Transaction}
import com.mongodb.client.MongoCursor
import com.mongodb.client.model.{Filters, InsertOneModel}
import com.mongodb.{AuthenticationMechanism, MongoClient}
import de.flapdoodle.embed.mongo.config.{MongodConfigBuilder, Net}
import de.flapdoodle.embed.mongo.distribution.Version
import de.flapdoodle.embed.mongo.{MongodExecutable, MongodProcess, MongodStarter}
import de.flapdoodle.embed.process.runtime.Network
import org.apache.kafka.common.config.SslConfigs
import org.apache.kafka.common.config.types.Password
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.bson.Document
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.util.UUID
import scala.collection.JavaConverters._
import scala.collection.immutable.{ListMap, ListSet}
class MongoWriterTest extends AnyWordSpec with Matchers with BeforeAndAfterAll {
val starter = MongodStarter.getDefaultInstance
val port = 12345
val mongodConfig = new MongodConfigBuilder()
.version(Version.Main.PRODUCTION)
.net(new Net(port, Network.localhostIsIPv6()))
.build()
var mongodExecutable: Option[MongodExecutable] = None
var mongod: Option[MongodProcess] = None
var mongoClient: Option[MongoClient] = None
override def beforeAll(): Unit = {
mongodExecutable = Some(starter.prepare(mongodConfig))
mongod = mongodExecutable.map(_.start())
mongoClient = Some(new MongoClient("localhost", port))
}
override def afterAll(): Unit = {
mongod.foreach(_.stop())
mongodExecutable.foreach(_.stop())
}
// create SinkRecord from JSON strings, no schema
def createSRStringJson(json: String, recordNum: Int): SinkRecord = {
new SinkRecord("topicA", 0, null, null, Schema.STRING_SCHEMA, json, recordNum)
}
"MongoWriter" should {
"insert records into the target Mongo collection with Schema.String and payload json" in {
val settings = MongoSettings("localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
Set(Kcql.parse("INSERT INTO insert_string_json SELECT * FROM topicA")),
Map.empty,
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val records = for (i <- 1 to 4) yield {
val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString
new SinkRecord("topicA", 0, null, null, Schema.STRING_SCHEMA, json, i)
}
runInserts(records, settings)
}
"upsert records into the target Mongo collection with Schema.String and payload json" in {
val settings = MongoSettings("localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
Set(Kcql.parse("UPSERT INTO upsert_string_json SELECT * FROM topicA PK lock_time")),
Map("topicA" -> Set("lock_time")),
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val records = for (i <- 1 to 4) yield {
val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString
.replace("\"size\": 807", "\"size\": 1010" + (i - 1))
new SinkRecord("topicA", 0, null, null, Schema.STRING_SCHEMA, json, i)
}
runUpserts(records, settings)
}
"upsert records into the target Mongo collection with single-field key" in {
val settings = MongoSettings("localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
kcql = Set(Kcql.parse("UPSERT INTO upsert_string_json_single_key SELECT * FROM topicA PK C")),
keyBuilderMap = Map("topicA" -> Set("C")),
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val records = List(
"""{"A": 0, "B": "0", "C": 10 }""",
"""{"A": 1, "B": "1", "C": "11" }"""
)
runUpsertsTestKeys(
records,
createSRStringJson,
settings,
expectedKeys = Map(
0 -> ListMap("C"->10),
1 -> ListMap("C"->"11")
))
}
"upsert records into the target Mongo collection with multi-field key" in {
val settings = MongoSettings("localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
kcql = Set(Kcql.parse("UPSERT INTO upsert_string_json_multikey SELECT * FROM topicA PK B,C")),
keyBuilderMap = Map("topicA" -> ListSet("B", "C")),
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val records = List(
"""{"A": 0, "B": "0", "C": 10 }""",
"""{"A": 1, "B": "1", "C": "11" }"""
)
runUpsertsTestKeys(
records,
createSRStringJson,
settings,
expectedKeys = Map(
0 -> ListMap("B"->"0", "C"-> 10),
1 -> ListMap("B"->"1", "C"-> "11")
))
}
"upsert records into the target Mongo collection with multi-field keys embedded in document" in {
val settings = MongoSettings("localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
kcql = Set(Kcql.parse("UPSERT INTO upsert_string_json_multikey_embedded SELECT * FROM topicA PK B, C.M, C.N.Y")),
keyBuilderMap = Map("topicA" -> ListSet("B", "C.M", "C.N.Y")),
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val records = List(
"""{"A": 0, "B": "0", "C": {"M": "1000", "N": {"X": 10, "Y": 100} } }""",
"""{"A": 1, "B": "1", "C": {"M": "1001", "N": {"X": 11, "Y": 101} } }"""
)
runUpsertsTestKeys(
records,
createSRStringJson,
settings,
expectedKeys = Map(
0 -> ListMap("B"->"0", "M"-> "1000", "Y"-> 100),
1 -> ListMap("B"->"1", "M"-> "1001", "Y"-> 101)
))
1 shouldBe 1
}
"insert records into the target Mongo collection with Schema.Struct and payload Struct" in {
val settings = MongoSettings("localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
Set(Kcql.parse("INSERT INTO insert_struct SELECT * FROM topicA")),
Map.empty,
Map("topicA" -> Map.empty),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val records = for (i <- 1 to 4) yield {
val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString
val tx = Json.fromJson[Transaction](json)
new SinkRecord("topicA", 0, null, null, Transaction.ConnectSchema, tx.toStruct(), i)
}
runInserts(records, settings)
}
"upsert records into the target Mongo collection with Schema.Struct and payload Struct" in {
val settings = MongoSettings("localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
Set(Kcql.parse("UPSERT INTO upsert_struct SELECT * FROM topicA PK lock_time")),
Map("topicA" -> Set("lock_time")),
Map("topicA" -> Map.empty),
Map("topicA" -> Set.empty),
ThrowErrorPolicy())
val records = for (i <- 1 to 4) yield {
val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString
val tx = Json.fromJson[Transaction](json)
new SinkRecord("topicA", 0, null, null, Transaction.ConnectSchema, tx.copy(size = 10100 + (i - 1)).toStruct(), i)
}
runUpserts(records, settings)
}
"insert records into the target Mongo collection with schemaless records and payload as json" in {
val settings = MongoSettings("localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
Set(Kcql.parse("INSERT INTO insert_schemaless_json SELECT * FROM topicA")),
Map.empty,
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val records = for (i <- 1 to 4) yield {
val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString
val tx = Json.fromJson[Transaction](json)
new SinkRecord("topicA", 0, null, null, null, tx.toHashMap, i)
}
runInserts(records, settings)
}
"upsert records into the target Mongo collection with schemaless records and payload as json" in {
val settings = MongoSettings("localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
Set(Kcql.parse("INSERT INTO upsert_schemaless_json SELECT * FROM topicA")),
Map("topicA" -> Set("lock_time")),
Map("topicA" -> Map.empty),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val records = for (i <- 1 to 4) yield {
val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString
val tx = Json.fromJson[Transaction](json)
new SinkRecord("topicA", 0, null, null, null, tx.copy(size = 10100 + (i - 1)).toHashMap, i)
}
runInserts(records, settings)
}
"MongoClientProvider should set authentication mechanism to plain" in {
val settings = MongoSettings("mongodb://localhost",
"test",
new Password("test"),
AuthenticationMechanism.PLAIN,
"local",
Set(Kcql.parse("INSERT INTO insert_string_json SELECT * FROM topicA")),
Map.empty,
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val client = MongoClientProvider(settings = settings)
val auth = client.getCredential
auth.getAuthenticationMechanism shouldBe (AuthenticationMechanism.PLAIN)
}
"MongoClientProvider should set authentication mechanism to GSSAPI" in {
val settings = MongoSettings("mongodb://localhost",
"test",
new Password("test"),
AuthenticationMechanism.GSSAPI,
"local",
Set(Kcql.parse("INSERT INTO insert_string_json SELECT * FROM topicA")),
Map.empty,
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val client = MongoClientProvider(settings = settings)
val auth = client.getCredential
auth.getAuthenticationMechanism shouldBe (AuthenticationMechanism.GSSAPI)
}
"MongoClientProvider should set authentication mechanism to SCRAM_SHA_1" in {
val settings = MongoSettings("mongodb://localhost",
"test",
new Password("test"),
AuthenticationMechanism.SCRAM_SHA_1,
"local",
Set(Kcql.parse("INSERT INTO insert_string_json SELECT * FROM topicA")),
Map.empty,
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val client = MongoClientProvider(settings = settings)
val auth = client.getCredential
auth.getAuthenticationMechanism shouldBe (AuthenticationMechanism.SCRAM_SHA_1)
}
"MongoClientProvider should set have ssl enabled" in {
val settings = MongoSettings("mongodb://localhost/?ssl=true",
"test",
new Password("test"),
AuthenticationMechanism.SCRAM_SHA_256,
"local",
Set(Kcql.parse("INSERT INTO insert_string_json SELECT * FROM topicA")),
Map.empty,
Map("topicA" -> Map("*" -> "*")),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val client = MongoClientProvider(settings = settings)
val auth = client.getCredential
auth.getAuthenticationMechanism shouldBe (AuthenticationMechanism.SCRAM_SHA_256)
client.getMongoClientOptions.isSslEnabled shouldBe true
}
"MongoClientProvider should set authentication mechanism to SCRAM_SHA_1 as username not set and it is the mongo default" in {
val settings = MongoSettings("mongodb://localhost",
"",
new Password(""),
AuthenticationMechanism.SCRAM_SHA_256,
"local",
Set(Kcql.parse("INSERT INTO insert_string_json SELECT * FROM topicA")),
Map.empty,
Map("topicA" -> Map.empty),
Map("topicA" -> Set.empty),
NoopErrorPolicy())
val client = MongoClientProvider(settings = settings)
client.getMongoClientOptions.isSslEnabled shouldBe false
}
"MongoClientProvider should set SSL and jvm props in SSL in URI" in {
val truststoreFilePath = getClass.getResource("/truststore.jks").getPath
val keystoreFilePath = getClass.getResource("/keystore.jks").getPath
val map = Map(
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017/?ssl=true",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1",
SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG -> truststoreFilePath,
SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG -> "truststore-password",
SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG -> keystoreFilePath,
SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG -> "keystore-password"
).asJava
val config = MongoConfig(map)
val settings = MongoSettings(config)
settings.trustStoreLocation shouldBe Some(truststoreFilePath)
settings.keyStoreLocation shouldBe Some(keystoreFilePath)
settings.trustStorePassword shouldBe Some("truststore-password")
settings.keyStorePassword shouldBe Some("keystore-password")
val clientProvider = MongoClientProvider(settings)
clientProvider.getMongoClientOptions.isSslEnabled shouldBe true
val props = System.getProperties
props.containsKey("javax.net.ssl.keyStorePassword") shouldBe true
props.get("javax.net.ssl.keyStorePassword") shouldBe "keystore-password"
props.containsKey("javax.net.ssl.keyStore") shouldBe true
props.get("javax.net.ssl.keyStore") shouldBe keystoreFilePath
props.containsKey("javax.net.ssl.keyStoreType") shouldBe true
props.get("javax.net.ssl.keyStoreType") shouldBe "JKS"
props.containsKey("javax.net.ssl.trustStorePassword") shouldBe true
props.get("javax.net.ssl.trustStorePassword") shouldBe "truststore-password"
props.containsKey("javax.net.ssl.trustStore") shouldBe true
props.get("javax.net.ssl.trustStore") shouldBe truststoreFilePath
props.containsKey("javax.net.ssl.trustStoreType") shouldBe true
props.get("javax.net.ssl.trustStoreType") shouldBe "JKS"
}
"MongoClientProvider should select nested fields on INSERT in schemaless JSON" in {
val collectionName = UUID.randomUUID().toString
val map = Map(
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017/?ssl=true",
MongoConfigConstants.KCQL_CONFIG -> s"INSERT INTO $collectionName SELECT vehicle, vehicle.fullVIN, header.applicationId FROM topicA",
).asJava
val config = MongoConfig(map)
val settings = MongoSettings(config)
val mongoWriter = new MongoWriter(settings, mongoClient.get)
val records = for (i <- 1 to 4) yield {
val json = scala.io.Source.fromFile(getClass.getResource(s"/vehicle$i.json").toURI.getPath).mkString
new SinkRecord("topicA", 0, null, null, Schema.STRING_SCHEMA, json, i)
}
mongoWriter.write(records)
val actualCollection = mongoClient.get
.getDatabase(settings.database)
.getCollection(collectionName)
actualCollection.countDocuments() shouldBe 4
actualCollection.find().iterator().forEachRemaining(r => System.out.println(r))
}
// FIXME:
"MongoClientProvider should select nested fields on UPSERT in schemaless JSON and PK" in {
val collectionName = UUID.randomUUID().toString
val map = Map(
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017/?ssl=true",
MongoConfigConstants.KCQL_CONFIG -> s"UPSERT INTO $collectionName SELECT vehicle.fullVIN, header.applicationId FROM topicA pk vehicle.fullVIN",
).asJava
val config = MongoConfig(map)
val settings = MongoSettings(config)
val mongoWriter = new MongoWriter(settings, mongoClient.get)
val records = for (i <- 1 to 4) yield {
val json = scala.io.Source.fromFile(getClass.getResource(s"/vehicle$i.json").toURI.getPath).mkString
new SinkRecord("topicA", 0, null, null, Schema.STRING_SCHEMA, json, i)
}
mongoWriter.write(records)
val actualCollection = mongoClient.get
.getDatabase(settings.database)
.getCollection(collectionName)
actualCollection.countDocuments() shouldBe 3
actualCollection.find().iterator().forEachRemaining(r => System.out.println(r))
}
"MongoClientProvider should select nested fields on UPSERT in AVRO" in {
val collectionName = UUID.randomUUID().toString
val map = Map(
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017/?ssl=true",
MongoConfigConstants.KCQL_CONFIG -> s"UPSERT INTO $collectionName SELECT sensorID, location.lon as lon, location.lat as lat FROM topicA pk location.lon",
).asJava
val config = MongoConfig(map)
val settings = MongoSettings(config)
val mongoWriter = new MongoWriter(settings, mongoClient.get)
val locationSchema = SchemaBuilder.struct().name("location")
.field("lat", Schema.STRING_SCHEMA)
.field("lon", Schema.STRING_SCHEMA)
.build();
val schema = SchemaBuilder.struct().name("com.example.device")
.field("sensorID", Schema.STRING_SCHEMA)
.field("temperature", Schema.FLOAT64_SCHEMA)
.field("humidity", Schema.FLOAT64_SCHEMA)
.field("ts", Schema.INT64_SCHEMA)
.field("location", locationSchema)
.build()
val locStruct = new Struct(locationSchema)
.put("lat", "37.98")
.put("lon", "23.72")
val struct1 = new Struct(schema).put("sensorID", "sensor-123").put("temperature", 60.4).put("humidity", 90.1).put("ts", 1482180657010L).put("location", locStruct)
val struct2 = new Struct(schema).put("sensorID", "sensor-123").put("temperature", 62.1).put("humidity", 103.3).put("ts", 1482180657020L).put("location", locStruct)
val struct3 = new Struct(schema).put("sensorID", "sensor-789").put("temperature", 64.5).put("humidity", 101.1).put("ts", 1482180657030L).put("location", locStruct)
val sinkRecord1 = new SinkRecord("topicA", 0, null, null, schema, struct1, 1)
val sinkRecord2 = new SinkRecord("topicA", 0, null, null, schema, struct2, 2)
val sinkRecord3 = new SinkRecord("topicA", 0, null, null, schema, struct3, 3)
mongoWriter.write(Seq(sinkRecord1, sinkRecord2, sinkRecord3))
val actualCollection = mongoClient.get
.getDatabase(settings.database)
.getCollection(collectionName)
actualCollection.find().iterator().forEachRemaining(r => System.out.println(r))
actualCollection.countDocuments() shouldBe 1
val doc = actualCollection.find().iterator().next()
doc.values().size() shouldBe 4
doc.getString("_id") shouldBe "23.72"
doc.getString("sensorID") shouldBe "sensor-789"
doc.getString("lon") shouldBe "23.72"
doc.getString("lat") shouldBe "37.98"
}
}
private def runInserts(records: Seq[SinkRecord], settings: MongoSettings) = {
val mongoWriter = new MongoWriter(settings, mongoClient.get)
mongoWriter.write(records)
val databases = MongoIterableFn(mongoClient.get.listDatabaseNames()).toSet
databases.contains(settings.database) shouldBe true
val collections = MongoIterableFn(mongoClient.get.getDatabase(settings.database).listCollectionNames())
.toSet
val collectionName = settings.kcql.head.getTarget
collections.contains(collectionName) shouldBe true
val actualCollection = mongoClient.get
.getDatabase(settings.database)
.getCollection(collectionName)
actualCollection.countDocuments() shouldBe 4
actualCollection.countDocuments(Filters.eq("lock_time", 9223372036854775807L)) shouldBe 1
actualCollection.countDocuments(Filters.eq("lock_time", 427856)) shouldBe 1
actualCollection.countDocuments(Filters.eq("lock_time", 7856)) shouldBe 1
actualCollection.countDocuments(Filters.eq("lock_time", 0)) shouldBe 1
}
private def runUpserts(records: Seq[SinkRecord], settings: MongoSettings): Unit = {
require(settings.kcql.size == 1)
require(settings.kcql.head.getWriteMode == WriteModeEnum.UPSERT)
val db = mongoClient.get.getDatabase(settings.database)
db.createCollection(settings.kcql.head.getTarget)
val collection = db.getCollection(settings.kcql.head.getTarget)
val inserts = for (i <- 1 to 4) yield {
val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString
val tx = Json.fromJson[Transaction](json)
val doc = new Document(tx.toHashMap.asInstanceOf[java.util.Map[String, AnyRef]])
new InsertOneModel[Document](doc)
}
collection.bulkWrite(inserts.asJava)
val mongoWriter = new MongoWriter(settings, mongoClient.get)
mongoWriter.write(records)
val databases = MongoIterableFn(mongoClient.get.listDatabaseNames()).toSet
databases.contains(settings.database) shouldBe true
val collections = MongoIterableFn(mongoClient.get.getDatabase(settings.database).listCollectionNames())
.toSet
val collectionName = settings.kcql.head.getTarget
collections.contains(collectionName) shouldBe true
val actualCollection = mongoClient.get
.getDatabase(settings.database)
.getCollection(collectionName)
actualCollection.countDocuments() shouldBe 4
val keys = Seq(9223372036854775807L, 427856L, 0L, 7856L)
keys.zipWithIndex.foreach { case (k, index) =>
var docOption = MongoIterableFn(actualCollection.find(Filters.eq("lock_time", k))).headOption
docOption.isDefined shouldBe true
docOption.get.get("size") shouldBe 10100 + index
}
}
// Map of record number to Map of key field names and field values.
type KeyInfo = Map[ Int, Map[String, Any] ]
// Note that it is assumed the head in the expectedKeys 2nd map is the 'identifying'
// field so use a Map[ListMap] if you have more than one key field:
private def runUpsertsTestKeys(
records: Seq[String], // json records to upsert
recordToSinkRecordFn: (String, Int) => SinkRecord, // (json, recordNum)
settings: MongoSettings,
expectedKeys: KeyInfo,
markIt: Boolean = false) = {
implicit val jsonFormats = org.json4s.DefaultFormats
require(settings.kcql.size == 1)
require(settings.kcql.head.getWriteMode == WriteModeEnum.UPSERT)
val db = mongoClient.get.getDatabase(settings.database)
db.createCollection(settings.kcql.head.getTarget)
val collection = db.getCollection(settings.kcql.head.getTarget)
// Do initial insert of all records with id what we would expect
val inserts = records.zipWithIndex.map{ case (record, i) =>
val keys = expectedKeys(i)
// If key is one field, set _id to that field's value directly.
// If key is more than one field, set _id to the map object.
val idJson = keys.size match {
case 1 => Serialization.write(Map("_id"->keys.head._2))
case n if (n > 1) => Serialization.write(Map("_id"->keys))
case _ => fail()
}
val rec = compact(parse(record) merge parse(idJson))
println(s"writing rec: $rec")
val doc = Document.parse(rec)
new InsertOneModel[Document](doc)
}
collection.bulkWrite(inserts.asJava)
// Now do upsert with an added field
val mongoWriter = new MongoWriter(settings, mongoClient.get)
val sinkRecords = records.zipWithIndex.map{ case (rec, i) =>
val modRec = compact(parse(rec) merge parse(s"""{"newField": $i}"""))
recordToSinkRecordFn(modRec, i)
}
mongoWriter.write(sinkRecords)
val databases = MongoIterableFn(mongoClient.get.listDatabaseNames()).toSet
databases.contains(settings.database) shouldBe true
val collections = MongoIterableFn(mongoClient.get.getDatabase(settings.database).listCollectionNames())
.toSet
val collectionName = settings.kcql.head.getTarget
collections.contains(collectionName) shouldBe true
val actualCollection = mongoClient.get
.getDatabase(settings.database)
.getCollection(collectionName)
// check the keys NEW
expectedKeys.foreach{ case (index, keys) =>
//(field, k)
val identifyingField = keys.headOption.get._1 // (must have at least key)
val ifValue = keys.headOption.get._2
val docOption: Option[Document] =
MongoIterableFn(actualCollection.find(Filters.eq(identifyingField, ifValue))).headOption
// If a head key value was unexpected, this will trigger here b/c we probably can't find the record to test against:
docOption.isDefined shouldBe true
val doc: Document = docOption.get
// Check the field we added in the upsert is actually there
// If a non-head key value was unexpected, this will trigger here:
doc.get("newField") shouldBe index
doc.get("_id") match {
case subDoc: Document =>
keys.map{ case (k,v) =>
subDoc.get(k) shouldBe v
}
case x => {
keys.size shouldBe 1
x shouldBe keys.head._2
}
}
}
actualCollection.countDocuments() shouldBe records.size
}
}
|
datamountaineer/stream-reactor
|
kafka-connect-mongodb/src/test/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoWriterTest.scala
|
Scala
|
apache-2.0
| 27,094 |
package epic.framework
import breeze.linalg._
import breeze.stats.distributions.Rand
import scala.collection.mutable.ArrayBuffer
import scala.collection.GenTraversableOnce
import breeze.util.SerializableLogging
/*
class StructSVM[Datum](val model: Model[Datum],
maxIter: Int = 100,
batchSize: Int = 100,
maxSMOIterations: Int = 100,
C: Double = 100) extends SerializableLogging {
import model._
def train(data: IndexedSeq[Datum]) = {
val weights = new ModelObjective(model, data).initialWeightVector(randomize = true)
var alphas = DenseVector.zeros[Double](0)
var constraints = IndexedSeq.empty[Constraint]
var converged = false
val numBatches = (data.length + batchSize - 1)/batchSize
for(i <- 0 until maxIter if !converged) {
val newWeights = weights.copy
for(i <- 0 until numBatches) {
val smoTol = if (i < 5) math.pow(10, -(i + 1)) else 1E-6
val inf = model.inferenceFromWeights(newWeights)
val batch = Rand.subsetsOfSize(data, batchSize).draw()
constraints ++= findNewConstraints(inf, batch)
alphas = DenseVector.vertcat(alphas, DenseVector.zeros[Double](constraints.size - alphas.size))
smo(inf, newWeights, alphas, constraints, smoTol)
val (newAlphas, newConstraints) = removeOldConstraints(alphas, constraints)
constraints = newConstraints
alphas = newAlphas
}
logger.info(s"${constraints.size} total constraints. ${alphas.findAll(_.abs > 1E-5).size} active.")
converged = constraints.size == 0 || (weights - newWeights).norm(Double.PositiveInfinity) < 1E-6
weights := newWeights
}
weights
}
private case class Constraint(loss: Double, gold: Datum, guess: Datum, ftf: Double) {
def dot(w: DenseVector[Double]) = {
val counts = model.countsFromMarginal(d, guessMarginal)
model.accumulateCounts(d, goldMarginal, counts, -1)
val feats = model.expectedCountsToObjective(counts)._2
feats dot w
}
lazy val ftf = {
}
var age = 0
def axpy(scale: Double, weights: DenseVector[Double]) = {
val ec = model.emptyCounts
model.accumulateCounts(d, guessMarginal, ec, scale)
model.accumulateCounts(d, goldMarginal, ec, -scale)
weights += model.expectedCountsToObjective(ec)._2
}
}
private def findNewConstraints(inf: model.Inference, data: IndexedSeq[Datum]): GenTraversableOnce[Constraint] = {
for {
d <- data.par
guessMarginal = inf.marginal(d)
goldMarginal = inf.goldMarginal(d)
if guessMarginal.logPartition > goldMarginal.logPartition
} yield {
val counts = model.countsFromMarginal(d, guessMarginal)
model.accumulateCounts(d, goldMarginal, counts, -1)
val feats = model.expectedCountsToObjective(counts)._2
val ftf = feats dot feats
Constraint(d, gm, m, ftf)
}
}
private def removeOldConstraints(alphas: DenseVector[Double],
constraints: IndexedSeq[Constraint]):(DenseVector[Double], IndexedSeq[Constraint]) = {
val newAlphas = Array.newBuilder[Double]
val newConstraints = new ArrayBuffer[Constraint]()
for( i <- 0 until alphas.length) {
if (alphas(i).abs < 1E-5) constraints(i).age += 1
else constraints(i).age = 0
if (constraints(i).age < MAX_CONSTRAINT_AGE) {
newConstraints += constraints(i)
newAlphas += alphas(i)
}
}
new DenseVector(newAlphas.result()) -> newConstraints
}
val MAX_CONSTRAINT_AGE = 50
private def smo(inf: model.Inference,
weights: DenseVector[Double],
alphas: DenseVector[Double],
constraints: IndexedSeq[Constraint],
smoTol: Double): Unit = {
if (alphas.sum < C) {
alphas += (C-alphas.sum)/alphas.length
}
for(i <- 0 until alphas.length) {
if (alphas(i) != 0.0) {
constraints(i).axpy(alphas(i), weights)
}
}
var largestChange = 10000.0
for(iter <- 0 until maxSMOIterations if largestChange > smoTol) {
largestChange = 0.0
val perm = Rand.permutation(constraints.length).draw()
for( i <- perm) {
val con1 = constraints(i)
val oldA1 = alphas(i)
val j = perm(i)
val oldA2 = alphas(j)
if ( (oldA1 != 0 && oldA2 != 0)) {
val con2 = constraints(j)
var t = ((con1.loss - con2.loss) - ( (con2.dot(weights)) - (con1.dot(weights))))/(con1.ftf + con2.ftf)
val tt = t
if (!t.isNaN && t != 0.0) {
t = t max (-oldA1)
val newA1 = (oldA1 + t) min (oldA1 + oldA2)
val newA2 = (oldA2 - t) max 0
alphas(i) = newA1
alphas(j) = newA2
println(newA1,newA2, tt, t, oldA1, oldA2)
con1.axpy(oldA1 - newA1, weights)
con2.axpy(oldA2 - newA2, weights)
largestChange = largestChange max (oldA1 - newA1).abs
largestChange = largestChange max (oldA2 - newA2).abs
}
}
}
}
}
}
*/
|
langkilde/epic
|
src/main/scala/epic/framework/StructSVM.scala
|
Scala
|
apache-2.0
| 5,155 |
/*
* Copyright 2012 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.money.http.client
object HttpTraceConfig {
lazy val HttpResponseTimeTraceKey: String = "http-call-duration"
lazy val HttpFullResponseTimeTraceKey: String = "http-call-with-body-duration"
lazy val ProcessResponseTimeTraceKey: String = "http-process-response-duration"
lazy val HttpResponseCodeTraceKey: String = "http-response-code"
}
|
Comcast/money
|
money-http-client/src/main/scala/com/comcast/money/http/client/HttpTraceConfig.scala
|
Scala
|
apache-2.0
| 993 |
/*******************************************************************************
* Copyright (c) 2014 Εukasz Szpakowski.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
******************************************************************************/
package pl.luckboy.purfuncor.frontend.instant
import scala.util.parsing.input.NoPosition
import scalaz._
import scalaz.Scalaz._
import pl.luckboy.purfuncor.common._
import pl.luckboy.purfuncor.frontend._
import pl.luckboy.purfuncor.frontend.kinder.InferredKind
import pl.luckboy.purfuncor.frontend.typer.InferredType
import pl.luckboy.purfuncor.frontend.typer.TypeValueTerms._
object BuiltinInstanceTree
{
def builtinInstanceTree[T, U] = InstanceTree.fromInstanceTables[AbstractPolyFunction[T], U, GlobalInstance[T]](Map(
// construct
ConstructFunction -> InstanceTable.fromTuples(Seq(
// Byte
GlobalInstanceType(InferredType(zeroType & byteType, Seq())) -> ZeroIntegerConstructInstance(IntegerTypeFunction.Byte),
GlobalInstanceType(InferredType(nonZeroType & byteType, Seq())) -> NonZeroIntegerConstructInstance(IntegerTypeFunction.Byte),
// Short
GlobalInstanceType(InferredType(zeroType & shortType, Seq())) -> ZeroIntegerConstructInstance(IntegerTypeFunction.Short),
GlobalInstanceType(InferredType(nonZeroType & shortType, Seq())) -> NonZeroIntegerConstructInstance(IntegerTypeFunction.Short),
// Int
GlobalInstanceType(InferredType(zeroType & intType, Seq())) -> ZeroIntegerConstructInstance(IntegerTypeFunction.Int),
GlobalInstanceType(InferredType(nonZeroType & intType, Seq())) -> NonZeroIntegerConstructInstance(IntegerTypeFunction.Int),
// Long
GlobalInstanceType(InferredType(zeroType & longType, Seq())) -> ZeroIntegerConstructInstance(IntegerTypeFunction.Long),
GlobalInstanceType(InferredType(nonZeroType & longType, Seq())) -> NonZeroIntegerConstructInstance(IntegerTypeFunction.Long),
// Array
GlobalInstanceType(InferredType(emptyType & arrayType(typeParam(0, 0)), Seq(InferredKind(Star(KindType, NoPosition))))) -> EmptyArrayConstructInstance,
GlobalInstanceType(InferredType(nonEmptyType & arrayType(typeParam(0, 0)), Seq(InferredKind(Star(KindType, NoPosition))))) -> NonEmptyArrayConstructInstance)),
// select
SelectFunction -> InstanceTable.fromTuples(Seq(
// Byte
GlobalInstanceType(InferredType((zeroType | nonZeroType) & byteType, Seq())) -> IntegerSelectInstance(IntegerTypeFunction.Byte),
// Short
GlobalInstanceType(InferredType((zeroType | nonZeroType) & shortType, Seq())) -> IntegerSelectInstance(IntegerTypeFunction.Short),
// Int
GlobalInstanceType(InferredType((zeroType | nonZeroType) & intType, Seq())) -> IntegerSelectInstance(IntegerTypeFunction.Int),
// Long
GlobalInstanceType(InferredType((zeroType | nonZeroType) & longType, Seq())) -> IntegerSelectInstance(IntegerTypeFunction.Long),
// Array
GlobalInstanceType(InferredType((emptyType | nonEmptyType) & arrayType(typeParam(0, 0)), Seq(InferredKind(Star(KindType, NoPosition))))) -> ArraySelectInstance))))
}
|
luckboy/Purfuncor
|
src/main/scala/pl/luckboy/purfuncor/frontend/instant/BuiltinInstanceTree.scala
|
Scala
|
mpl-2.0
| 3,391 |
package blanky
import blanky.domain.{SignUpUserDto, User}
import spray.json.DefaultJsonProtocol
object AppJsonProtocol extends DefaultJsonProtocol {
implicit val userFormat = jsonFormat3(User)
implicit val signUpUserDtoFormat = jsonFormat4(SignUpUserDto)
}
|
vadim-shb/blanky
|
server/src/main/scala/blanky/AppJsonProtocol.scala
|
Scala
|
mit
| 264 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package docs.home.scaladsl.serialization.v1
//#add-optional
case class ItemAdded(shoppingCartId: String, productId: String, quantity: Int)
//#add-optional
|
rcavalcanti/lagom
|
docs/manual/scala/guide/cluster/code/docs/home/scaladsl/serialization/v1/ItemAdded.scala
|
Scala
|
apache-2.0
| 234 |
/*
* Copyright (c) 2010-2011 Belmont Technology Pty Ltd. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sodatest.runtime.processing.running.testFixtures
import org.sodatest.api.SodaFixture
class GoodTestFixture extends SodaFixture {
def createReport(name: String) = None
def createEvent(name: String) = None
}
|
GrahamLea/SodaTest
|
sodatest-runtime/src/test/scala/org/sodatest/runtime/processing/running/testFixtures/GoodTestFixture.scala
|
Scala
|
apache-2.0
| 865 |
package io.github.oxlade39.storrent.core
import java.io.File
import akka.util.ByteString
import java.net.URI
import scala.io.Source
import java.security.MessageDigest
import java.util.Date
object Torrent {
val extension = ".torrent"
val mediaType = "application/x-bittorrent"
val encoding = "ISO-8859-1"
def apply(file: File) = fromFile(file)
def fromFile(file: File): Torrent = {
val tfr = new TorrentFileReader(file)
Torrent(
tfr.name,
tfr.comment,
tfr.createdBy,
tfr.creationDate,
tfr.files,
tfr.infoHash,
tfr.announceList,
tfr.pieceLength,
tfr.pieces
)
}
def hash(bytes: ByteString): ByteString = {
val md = MessageDigest.getInstance("SHA-1")
md.update(bytes.toArray)
ByteString(md.digest())
}
}
class TorrentFileReader(file: File) {
/** The query parameters encoding when parsing byte strings. */
val BYTE_ENCODING = "ISO-8859-1"
private[this] val source = Source.fromFile(file, BYTE_ENCODING)
private[this] val (main) = try {
val byteArray = source.map(_.toByte).toArray
BencodeParser.parse(ByteString.fromArray(byteArray)) match {
case Some(map: BMap) => map
case other => throw new IllegalArgumentException(s"Unsupported file contents: $other")
}
} finally {
source.close()
}
private[this] val info = main("info") match {
case bmap: BMap => bmap
case _ => throw new IllegalArgumentException("no info")
}
val name: String = info("name").asInstanceOf[BBytes].value.utf8String
val files: List[TorrentFile] = info.values.get(BBytes("files")) match {
case Some(items: BList) =>
items.values.map {
case file: BMap =>
val path = file("path") match {
case p: BList => p.values.map {case b: BBytes => b.value.utf8String }.mkString(File.separator)
}
val size = file("length") match {
case BInt(i) => i
}
TorrentFile(path, size)
}.toList
case _ =>
List(TorrentFile(name, info("length").asInstanceOf[BInt].value))
}
val announceList = main.get("announce-list") match {
case Some(l: BList) => {
val tiers = l.values
.map(_.asInstanceOf[BList].values.toList
.map(_.asInstanceOf[BBytes].value.utf8String)
.map(new URI(_)))
tiers.toList
}
case _ =>
List(List(new URI(main("announce").asInstanceOf[BBytes].value.utf8String)))
}
val comment = main.get("comment").map(_.asInstanceOf[BBytes].stringValue)
val createdBy = main.get("created by").map(_.asInstanceOf[BBytes].stringValue)
val creationDate = main.get("creation date").map(_.asInstanceOf[BInt].value.toLong)
val infoHash = hash(info.encode.toArray)
val pieceLength = info("piece length").asInstanceOf[BInt].value
val pieces = info("pieces").asInstanceOf[BBytes].value.grouped(20).toList
def hash(byteArray: Array[Byte]) = Torrent.hash(ByteString(byteArray))
}
case class Torrent(name: String,
comment: Option[String] = None,
createdBy: Option[String] = None,
creationTimestamp: Option[Long] = None,
files: List[TorrentFile],
infoHash: ByteString,
announceList: List[List[URI]],
pieceSize: Int,
pieceHashes: List[ByteString],
seeder: Boolean = false) {
def creationDate = creationTimestamp.map(new Date(_))
def isMultifile = files.size > 1
lazy val getSize = files.foldLeft(0L)(_ + _.size)
lazy val hexInfoHash = infoHash.map("%02X" format _).mkString
lazy val trackerCount = announceList.flatten.toSet.size
def pieceCount = pieceHashes.size
}
case class TorrentFile(name: String, size: Long)
|
oxlade39/STorrent
|
src/main/scala/io/github/oxlade39/storrent/core/Torrent.scala
|
Scala
|
apache-2.0
| 3,782 |
/*
* Exposure.scala
* (Eisenerz)
*
* Copyright (c) 2016 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU General Public License v2+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.eisenerz
import com.hopding.jrpicam.RPiCamera
import com.hopding.jrpicam.enums.{AWB, Encoding, MeteringMode, Exposure => PiExposure}
import com.pi4j.io.gpio.GpioFactory
import de.sciss.file._
import scala.annotation.{switch, tailrec}
import scala.util.Try
object Exposure {
private[this] val outputDir = file("/") / "media" / "pi" / "exposure" / "Pictures"
sealed trait State
case object StatePause extends State
case object StateRecord extends State
case object StateShutdown extends State
@volatile
private[this] var state: State = StatePause
def main(args: Array[String]): Unit = {
// keyMatrix()
// dualColorLED()
run()
}
/* Delay between taking photos in milliseconds. */
private[this] val Delay = 10000
def run(): Unit = {
require(outputDir.isDirectory && outputDir.canWrite, s"Cannot write to $outputDir")
val countSite = outputDir.children(_.name.startsWith("site-")).flatMap { f =>
val n = f.name
Try(n.substring(5).toInt).toOption
} .sorted.lastOption.getOrElse(0) + 1
val siteDir = outputDir / s"site-$countSite"
require(siteDir.mkdir())
println(s"Next site will be #$countSite")
val keys = new KeyMatrix
val led = new DualColorLED
val cam = new RPiCamera(siteDir.path)
// cf. https://raspberrypi.stackexchange.com/questions/14047/
cam.setShutter(4000) // 500000
cam.setISO(100)
cam.setExposure(PiExposure.SPORTS)
cam.setAWB(AWB.HORIZON)
cam.setMeteringMode(MeteringMode.AVERAGE)
cam.setHorizontalFlipOn()
cam.setVerticalFlipOn()
// cam.setAWB(AWB.OFF)
val width = 3280 // /2
val height = 2464 // /2
val encoding = Encoding.JPG
cam.setEncoding(encoding)
cam.turnOffPreview()
// cam.setTimeout()
val ext = encoding.toString
led.pulseGreen() // 'ready'
// // XXX TODO --- this could be slow for lots of pictures; perhaps use 'jumping'
// var count = outputDir.children(_.name.startsWith("frame-")).flatMap { f =>
// val n = f.name
// Try(n.substring(6, n.indexOf('.', 6)).toInt).toOption
// } .sorted.lastOption.getOrElse(0) + 1
var count = 1
// println(s"Next frame will be #$count")
while (state != StateShutdown) {
if (state == StateRecord) {
val name = s"frame-$count.$ext"
cam.takeStill(name, width, height)
count += 1
}
var dlyRemain = Delay
while (dlyRemain > 0) {
Thread.sleep(100)
(keys.read(): @switch) match {
case '1' =>
if (state != StateRecord) {
state = StateRecord
dlyRemain = 0
led.pulseRed()
}
case '2' =>
if (state != StatePause) {
state = StatePause
led.pulseGreen()
}
case '9' =>
state = StateShutdown
dlyRemain = 0
led.blinkRed()
case _ =>
}
dlyRemain -= 100
}
}
import scala.sys.process._
Seq("sudo", "shutdown", "now").!
}
def keyMatrix(): Unit = {
println("Running 'Key Matrix'...")
val keys = new KeyMatrix
val io = GpioFactory.getInstance
@tailrec def loop(): Char = {
val c = keys.read()
if (c == KeyMatrix.NotPressed) loop() else c
}
val res = loop()
println(s"Key pressed: $res")
io.shutdown()
}
// XXX TODO --- use pwm, so we can balance red and green intensity for orange mix
def dualColorLED(): Unit = {
val led = new DualColorLED
val io = GpioFactory.getInstance
led.red()
Thread.sleep(2000)
led.green()
Thread.sleep(2000)
io.shutdown()
}
}
|
Sciss/Eisenerz
|
exposure/src/main/scala/de/sciss/eisenerz/Exposure.scala
|
Scala
|
gpl-2.0
| 3,992 |
package notebook.client
import java.io.File
import akka.actor.{Actor, ActorRef, Props}
import notebook.OutputTypes._
import notebook.PresentationCompiler
import notebook.kernel._
import notebook.JobTracking
import notebook.kernel.repl.common.ReplT
import org.joda.time.LocalDateTime
import com.datafellas.utils.{CustomResolvers, Deps}
import com.datafellas.utils._
import sbt._
import scala.collection.immutable.Queue
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.{Failure => TFailure, Success => TSuccess}
import notebook.repl.{ReplCommand, command_interpreters}
import notebook.repl.command_interpreters.combineIntepreters
/**
* @param _initScripts List of scala source strings to be executed during REPL startup.
* @param customSparkConf Map configuring the notebook (spark configuration).
* @param compilerArgs Command line arguments to pass to the REPL compiler
*/
class ReplCalculator(
notebookName:String,
customLocalRepo: Option[String],
customRepos: Option[List[String]],
customDeps: Option[List[String]],
customImports: Option[List[String]],
customArgs: Option[List[String]],
customSparkConf: Option[Map[String, String]],
remoteCalcActorRef:ActorRef, // From CalcWebSocketService !
_initScripts: List[(String, String)],
compilerArgs: List[String]
) extends Actor with akka.actor.ActorLogging {
private val remoteLogger = context.actorSelection("/user/remote-logger")
remoteLogger ! remoteCalcActorRef
private val authRegex = """(?s)^\\s*\\(([^\\)]+)\\)\\s*$""".r
private val credRegex = """"([^"]+)"\\s*,\\s*"([^"]+)"""".r //"
// note: the resolver list is a superset of Spark's list in o.a.spark.deploy.SparkSubmit
// except that the local ivy repo isn't included
private var resolvers: List[Resolver] = {
val mavenLocal = Resolver.mavenLocal
val defaultLocal = Resolver.defaultLocal
val local = {
val pats = List(
sys.props("user.home") + "/.ivy2/" + "/local/" + Resolver.localBasePattern,
sys.props("user.home") + "/.ivy2/" + "/cache/" + Resolver.localBasePattern
)
FileRepository("snb-local", Resolver.defaultFileConfiguration, Patterns(pats, pats, false))
}
val defaultShared = Resolver.defaultShared
val mavenReleases = sbt.DefaultMavenRepository
val typesafeReleases = Resolver.typesafeIvyRepo("releases")
val jCenterReleases = Resolver.jcenterRepo
val sonatypeReleases = Resolver.sonatypeRepo("releases")
val spReleases = new MavenRepository("spark-packages", "http://dl.bintray.com/spark-packages/maven/")
val defaults = defaultLocal :: local :: mavenLocal :: defaultShared :: mavenReleases :: spReleases :: typesafeReleases :: jCenterReleases :: sonatypeReleases :: Nil
customRepos.getOrElse(List.empty[String]).map(CustomResolvers.fromString).map(_._2) ::: defaults
}
private var repo: File = customLocalRepo.map { x =>
new File(notebook.util.StringUtils.updateWithVarEnv(x))
}.getOrElse {
val tmp = new File(System.getProperty("java.io.tmpdir"))
val snb = new File(tmp, "spark-notebook")
if (!snb.exists) snb.mkdirs
val repo = new File(snb, "repo")
if (!repo.exists) repo.mkdirs
val r = new File(repo, java.util.UUID.randomUUID.toString)
if (!r.exists) r.mkdirs
r
}
def codeRepo = new File(repo, "code")
val (depsJars, depsScript): (List[String], (String, () => String)) = customDeps.map { d =>
val customDeps = d.mkString("\\n")
val deps = Deps.script(customDeps, resolvers, repo, notebook.BuildInfo.xSparkVersion).toOption.getOrElse(List.empty[String])
(deps, ("deps", () => {
s"""
|val CustomJars = ${deps.mkString("Array(\\"", "\\",\\"", "\\")").replace("\\\\","\\\\\\\\")}
""".stripMargin
}))
}.getOrElse((List.empty[String], ("deps", () => "val CustomJars = Array.empty[String]\\n")))
val ImportsScripts = ("imports", () => customImports.map(_.mkString("\\n") + "\\n").getOrElse("\\n"))
private var _repl: Option[ReplT] = None
private def repl: ReplT = _repl getOrElse {
val r = ReplT.create(compilerArgs, depsJars)
_repl = Some(r)
r
}
private var _presentationCompiler: Option[PresentationCompiler] = None
private def presentationCompiler: PresentationCompiler = _presentationCompiler getOrElse {
val r = new PresentationCompiler(depsJars)
_presentationCompiler = Some(r)
r
}
val chat = new notebook.front.gadgets.Chat()
// +/- copied of https://github.com/scala/scala/blob/v2.11.4/src%2Flibrary%2Fscala%2Fconcurrent%2Fduration%2FDuration.scala
final def toCoarsest(d: FiniteDuration): String = {
def loop(length: Long, unit: TimeUnit, acc: String): String = {
def coarserOrThis(coarser: TimeUnit, divider: Int) = {
if (length == divider)
loop(1, coarser, acc)
else if (length < divider)
FiniteDuration(length, unit).toString + " " + acc
else {
val _acc = if (length % divider == 0) {
acc
} else {
FiniteDuration(length % divider, unit).toString + " " + acc
}
loop(length / divider, coarser, _acc)
}
}
unit match {
case DAYS => d.toString + " " + acc
case HOURS => coarserOrThis(DAYS, 24)
case MINUTES => coarserOrThis(HOURS, 60)
case SECONDS => coarserOrThis(MINUTES, 60)
case MILLISECONDS => coarserOrThis(SECONDS, 1000)
case MICROSECONDS => coarserOrThis(MILLISECONDS, 1000)
case NANOSECONDS => coarserOrThis(MICROSECONDS, 1000)
}
}
if (d.unit == DAYS || d.length == 0) d.toString()
else loop(d.length, d.unit, "").trim
}
// Make a child actor so we don't block the execution on the main thread, so that interruption can work
private val executor = context.actorOf(Props(new Actor {
implicit val ec = context.dispatcher
private var queue: Queue[(ActorRef, ExecuteRequest)] = Queue.empty
private var currentlyExecutingTask: Option[Future[(String, EvaluationResult)]] = None
def eval(b: => String, notify: Boolean = true)(success: => String = "",
failure: String => String = (s: String) => "Error evaluating " + b + ": " + s) {
repl.evaluate(b)._1 match {
case Failure(str) =>
if (notify) {
eval( """""", notify = false)()
}
log.error(failure(str))
case _ =>
if (notify) {
eval( """""", notify = false)()
}
log.info(success)
}
}
def receive = {
case "process-next" =>
log.debug(s"Processing next asked, queue is ${queue.size} length now")
currentlyExecutingTask = None
if (queue.nonEmpty) { //queue could be empty if InterruptRequest was asked!
log.debug("Dequeuing execute request current size: " + queue.size)
val (executeRequest, queueTail) = queue.dequeue
queue = queueTail
val (ref, er) = executeRequest
log.debug("About to execute request from the queue")
execute(ref, er)
}
case er@ExecuteRequest(_, _, code) =>
log.debug("Enqueuing execute request at: " + queue.size)
queue = queue.enqueue((sender(), er))
// if queue contains only the new task, and no task is currently executing, execute it straight away
// otherwise the execution will start once the evaluation of earlier cell(s) finishes
if (currentlyExecutingTask.isEmpty && queue.size == 1) {
self ! "process-next"
}
case InterruptCellRequest(killCellId) =>
// kill job(s) still waiting for execution to start, if any
val (jobsInQueueToKill, nonAffectedJobs) = queue.partition { case (_, ExecuteRequest(cellIdInQueue, _, _)) =>
cellIdInQueue == killCellId
}
log.debug(s"Canceling $killCellId jobs still in queue (if any):\\n $jobsInQueueToKill")
queue = nonAffectedJobs
log.debug(s"Interrupting the cell: $killCellId")
val jobGroupId = JobTracking.jobGroupId(killCellId)
// make sure sparkContext is already available!
if (jobsInQueueToKill.isEmpty && repl.sparkContextAvailable) {
log.info(s"Killing job Group $jobGroupId")
val thisSender = sender()
repl.evaluate(
s"""globalScope.sparkContext.cancelJobGroup("${jobGroupId}")""",
msg => thisSender ! StreamResponse(msg, "stdout")
)
}
// StreamResponse shows error msg
sender() ! StreamResponse("The cell was cancelled.\\n", "stderr")
// ErrorResponse to marks cell as ended
sender() ! ErrorResponse("The cell was cancelled.\\n", incomplete = false)
case InterruptRequest =>
log.debug("Interrupting the spark context")
val thisSender = sender()
log.debug("Clearing the queue of size " + queue.size)
queue = scala.collection.immutable.Queue.empty
repl.evaluate(
"globalScope.sparkContext.cancelAllJobs()",
msg => {
thisSender ! StreamResponse(msg, "stdout")
}
)
}
private var commandInterpreters = combineIntepreters(command_interpreters.defaultInterpreters)
def execute(sender: ActorRef, er: ExecuteRequest): Unit = {
val generatedReplCode: ReplCommand = commandInterpreters(er)
val start = System.currentTimeMillis
val thisSelf = self
val thisSender = sender
val result = scala.concurrent.Future {
// this future is required to allow InterruptRequest messages to be received and process
// so that spark jobs can be killed and the hand given back to the user to refine their tasks
val cellId = er.cellId
def replEvaluate(code:String, cellId:String) = {
val cellResult = try {
repl.evaluate(s"""
|globalScope.sparkContext.setJobGroup("${JobTracking.jobGroupId(cellId)}", "${JobTracking.jobDescription(code, start)}")
|$code
""".stripMargin,
msg => thisSender ! StreamResponse(msg, "stdout"),
nameDefinition => thisSender ! nameDefinition
)
}
finally {
repl.evaluate("globalScope.sparkContext.clearJobGroup()")
}
cellResult
}
val result = replEvaluate(generatedReplCode.replCommand, cellId)
val d = toCoarsest(Duration(System.currentTimeMillis - start, MILLISECONDS))
(d, result._1)
}
currentlyExecutingTask = Some(result)
result foreach {
case (timeToEval, Success(result)) =>
val evalTimeStats = s"Took: $timeToEval, at ${new LocalDateTime().toString("Y-M-d H:m")}"
thisSender ! ExecuteResponse(generatedReplCode.outputType, result.toString(), evalTimeStats)
case (timeToEval, Failure(stackTrace)) =>
thisSender ! ErrorResponse(stackTrace, incomplete = false)
case (timeToEval, notebook.kernel.Incomplete) =>
thisSender ! ErrorResponse("Incomplete (hint: check the parenthesis)", incomplete = true)
}
result onComplete {
_ => thisSelf ! "process-next"
}
}
}))
def preStartLogic() {
log.info("ReplCalculator preStart")
val dummyScript = ("dummy", () => s"""val dummy = ();\\n""")
val SparkHookScript = (
"class server",
() => s"""@transient val _5C4L4_N0T3800K_5P4RK_HOOK = "${repl.classServerUri.get.replaceAll("\\\\\\\\", "\\\\\\\\\\\\\\\\")}";\\n"""
)
// Must escape last remaining '\\', which could be for windows paths.
val nbName = notebookName.replaceAll("\\"", "").replace("\\\\", "\\\\\\\\")
val SparkConfScript = {
val m = customSparkConf .getOrElse(Map.empty[String, String])
m .map { case (k, v) =>
"( \\"" + k + "\\" β \\"" + v + "\\" )"
}.mkString(",")
}
val CustomSparkConfFromNotebookMD = ("custom conf", () => s"""
|@transient val notebookName = "$nbName"
|@transient val _5C4L4_N0T3800K_5P4RK_C0NF:Map[String, String] = Map(
| $SparkConfScript
|)\\n
""".stripMargin
)
def eval(script: () => String): Option[String] = {
val sc = script()
log.debug("script is :\\n" + sc)
if (sc.trim.length > 0) {
val (result, _) = repl.evaluate(sc)
result match {
case Failure(str) =>
log.error("Error in init script: \\n%s".format(str))
None
case _ =>
if (log.isDebugEnabled) log.debug("\\n" + sc)
log.info("Init script processed successfully")
Some(sc)
}
} else None
}
val allInitScrips: List[(String, () => String)] = dummyScript ::
SparkHookScript ::
depsScript ::
ImportsScripts ::
CustomSparkConfFromNotebookMD ::
( _initScripts ::: repl.endInitCommand ).map(x => (x._1, () => x._2))
for ((name, script) <- allInitScrips) {
log.info(s" INIT SCRIPT: $name")
eval(script).map { sc =>
presentationCompiler.addScripts(sc)
}
}
repl.setInitFinished()
}
override def preStart() {
preStartLogic()
super.preStart()
}
override def postStop() {
log.info("ReplCalculator postStop")
presentationCompiler.stop()
super.postStop()
}
override def preRestart(reason: Throwable, message: Option[Any]) {
log.info("ReplCalculator preRestart " + message)
reason.printStackTrace()
super.preRestart(reason, message)
}
override def postRestart(reason: Throwable) {
log.info("ReplCalculator postRestart")
reason.printStackTrace()
super.postRestart(reason)
}
def receive = {
case msgThatShouldBeFromTheKernel =>
msgThatShouldBeFromTheKernel match {
case req @ InterruptCellRequest(_) =>
executor.forward(req)
case InterruptRequest => executor.forward(InterruptRequest)
case req@ExecuteRequest(_, _, code) => executor.forward(req)
case CompletionRequest(line, cursorPosition) =>
val (matched, candidates) = presentationCompiler.complete(line, cursorPosition)
sender ! CompletionResponse(cursorPosition, candidates, matched)
case ObjectInfoRequest(code, position) =>
val completions = repl.objectInfo(code, position)
val resp = if (completions.length == 0) {
ObjectInfoResponse(found = false, code, "", "")
} else {
ObjectInfoResponse(found = true, code, completions.mkString("\\n"), "")
}
sender ! resp
}
}
}
|
antonkulaga/spark-notebook
|
modules/kernel/src/main/scala-2.10/notebook/ReplCalculator.scala
|
Scala
|
apache-2.0
| 15,000 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.CATO01
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
class CPQ18Spec extends WordSpec with Matchers with MockitoSugar {
"CPQ18" should {
"when empty" when {
"fail validation when CPQ17 is false and CPQ19 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ17()).thenReturn(CPQ17(Some(false)))
CPQ18(None).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.required"))
}
"pass validation when CPQ17 is true and CPQ19 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
CPQ18(None).validate(retriever) shouldBe empty
}
"fail validation when CPQ19 is false and CPQ17 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ19()).thenReturn(CPQ19(Some(false)))
CPQ18(None).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.required"))
}
"pass validation when CPQ19 is true and CPQ17 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ19()).thenReturn(CPQ19(Some(true)))
CPQ18(None).validate(retriever) shouldBe empty
}
"pass validation when CPQ17 is not defined and CPQ19 is not defined" in new SetUpboxRetriever {
CPQ18(None).validate(retriever) shouldBe empty
}
"pass validation when CPQ20 is true and CP118 == 0" in new SetUpboxRetriever {
when(retriever.cpQ20()).thenReturn(CPQ20(Some(true)))
CPQ18(None).validate(retriever) shouldBe empty
}
"fail validation when CP117 and CP118 == zero and CATO01 > 0" in new SetUpboxRetriever {
when(retriever.cato01()).thenReturn(CATO01(1))
CPQ18(None).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.required"))
}
"pass validation when TP no NTP CP284 == CP117, CPQ17 = true" in new SetUpboxRetriever {
when(retriever.cp117()).thenReturn(CP117(1))
when(retriever.cp284()).thenReturn(CP284(Some(1)))
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
when(retriever.cpQ18()).thenReturn(CPQ18(Some(true)))
CPQ18(None).validate(retriever) shouldBe empty
}
"pass validation when TL no NTP with CPQ20 = true" in new SetUpboxRetriever {
when(retriever.cp118()).thenReturn(CP118(1))
when(retriever.cpQ20()).thenReturn(CPQ20(Some(true)))
CPQ18(None).validate(retriever) shouldBe empty
}
}
"when true" when {
"pass validation when CPQ17 is false and CPQ19 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ17()).thenReturn(CPQ17(Some(false)))
CPQ18(Some(true)).validate(retriever) shouldBe empty
}
"fail validation when CPQ17 is true and CPQ19 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
CPQ18(Some(true)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"pass validation when CPQ19 is false and CPQ17 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ19()).thenReturn(CPQ19(Some(false)))
CPQ18(Some(true)).validate(retriever) shouldBe empty
}
"fail validation when true and CPQ19 is true and CPQ17 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ19()).thenReturn(CPQ19(Some(true)))
CPQ18(Some(true)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"fail validation when CPQ17 is not defined and CPQ19 is not defined" in new SetUpboxRetriever {
CPQ18(Some(true)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"fail validation when true and CPQ20 is true and CP118 == 0" in new SetUpboxRetriever {
when(retriever.cpQ20()).thenReturn(CPQ20(Some(true)))
CPQ18(Some(true)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"pass validation when CP117 and CP118 == zero and CATO01 > 0" in new SetUpboxRetriever {
when(retriever.cato01()).thenReturn(CATO01(1))
CPQ18(Some(true)).validate(retriever) shouldBe empty
}
"pass validation when TP no NTP CP284 > CP117, CPQ17 = true" in new SetUpboxRetriever {
when(retriever.cp117()).thenReturn(CP117(1))
when(retriever.cp284()).thenReturn(CP284(Some(2)))
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
CPQ18(Some(true)).validate(retriever) shouldBe empty
}
"pass validation when TP with NTP CP284 + CATO01 > 0, CPQ17 = true" in new SetUpboxRetriever {
when(retriever.cp117()).thenReturn(CP117(1))
when(retriever.cp284()).thenReturn(CP284(Some(1)))
when(retriever.cato01()).thenReturn(CATO01(1))
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
CPQ18(Some(true)).validate(retriever) shouldBe empty
}
"fail validation when TP no NTP CP284 == CP117, CPQ17 = true" in new SetUpboxRetriever {
when(retriever.cp117()).thenReturn(CP117(1))
when(retriever.cp284()).thenReturn(CP284(Some(1)))
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
CPQ18(Some(true)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"fail validation when TL no NTP with CPQ20 = true" in new SetUpboxRetriever {
when(retriever.cp118()).thenReturn(CP118(1))
when(retriever.cpQ20()).thenReturn(CPQ20(Some(true)))
CPQ18(Some(true)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"pass vaildation when TL with NTP CPQ19 = true, CP118 - CATO01 < 0" in new SetUpboxRetriever {
when(retriever.cp118()).thenReturn(CP118(1))
when(retriever.cato01()).thenReturn(CATO01(2))
when(retriever.cpQ19()).thenReturn(CPQ19(Some(true)))
CPQ18(Some(true)).validate(retriever) shouldBe empty
}
}
"when false" when {
"pass validation when CPQ17 is false and CPQ19 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ17()).thenReturn(CPQ17(Some(false)))
CPQ18(Some(false)).validate(retriever) shouldBe empty
}
"fail validation when CPQ17 is true and CPQ19 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
CPQ18(Some(false)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"pass validation when CPQ19 is false and CPQ17 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ19()).thenReturn(CPQ19(Some(false)))
CPQ18(Some(false)).validate(retriever) shouldBe empty
}
"fail validation when CPQ19 is true and CPQ17 is not defined" in new SetUpboxRetriever {
when(retriever.cpQ19()).thenReturn(CPQ19(Some(true)))
CPQ18(Some(false)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"fail validation when CPQ19 is true and CPQ17 is not defined and boxes as per scenario" in new SetUpboxRetriever {
when(retriever.cpQ19()).thenReturn(CPQ19(Some(true)))
when(retriever.cpQ20()).thenReturn(CPQ20(Some(true)))
when(retriever.cp287()).thenReturn(CP287(Some(10000)))
when(retriever.cp288()).thenReturn(CP288(Some(13404)))
when(retriever.cato01()).thenReturn(CATO01(1300))
when(retriever.cp118()).thenReturn(CP118(24704))
when(retriever.cp117()).thenReturn(CP117(0))
CPQ18(Some(false)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"fail validation when CPQ17 is not defined and CPQ19 is not defined" in new SetUpboxRetriever {
CPQ18(Some(false)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"fail validation when CPQ20 is true and CP118 == 0" in new SetUpboxRetriever {
when(retriever.cpQ20()).thenReturn(CPQ20(Some(true)))
CPQ18(Some(false)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"pass validation when CP117 and CP118 == zero and CATO01 > 0" in new SetUpboxRetriever {
when(retriever.cato01()).thenReturn(CATO01(1))
CPQ18(Some(false)).validate(retriever) shouldBe empty
}
"pass validation when TP no NTP CP284 > CP117, CPQ17 = true and CPQ18 true" in new SetUpboxRetriever {
when(retriever.cp117()).thenReturn(CP117(1))
when(retriever.cp284()).thenReturn(CP284(Some(2)))
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
CPQ18(Some(false)).validate(retriever) shouldBe empty
}
"pass validation when TP with NTP CP284 + CATO01 > 0, CPQ17 = true and CPQ18 true" in new SetUpboxRetriever {
when(retriever.cp117()).thenReturn(CP117(1))
when(retriever.cp284()).thenReturn(CP284(Some(1)))
when(retriever.cato01()).thenReturn(CATO01(1))
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
CPQ18(Some(false)).validate(retriever) shouldBe empty
}
"fail validation when TP no NTP CP284 == CP117, CPQ17 = true" in new SetUpboxRetriever {
when(retriever.cp117()).thenReturn(CP117(1))
when(retriever.cp284()).thenReturn(CP284(Some(1)))
when(retriever.cpQ17()).thenReturn(CPQ17(Some(true)))
CPQ18(Some(false)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"fail validation when TL no NTP with CPQ20 = true" in new SetUpboxRetriever {
when(retriever.cp118()).thenReturn(CP118(1))
when(retriever.cpQ20()).thenReturn(CPQ20(Some(true)))
CPQ18(Some(false)).validate(retriever) shouldBe Set(CtValidation(Some("CPQ18"), "error.CPQ18.cannot.exist"))
}
"pass vaildation when TL with NTP CPQ19 = true, CP118 - CATO01 < 0, CPQ18 = true" in new SetUpboxRetriever {
when(retriever.cp118()).thenReturn(CP118(1))
when(retriever.cato01()).thenReturn(CATO01(2))
when(retriever.cpQ19()).thenReturn(CPQ19(Some(true)))
CPQ18(Some(false)).validate(retriever) shouldBe empty
}
}
}
}
trait SetUpboxRetriever extends MockitoSugar {
val retriever: ComputationsBoxRetriever = mock[ComputationsBoxRetriever]
def setUpRetriever() = {
when(retriever.cpQ17()).thenReturn(CPQ17(None))
when(retriever.cpQ18()).thenReturn(CPQ18(None))
when(retriever.cpQ19()).thenReturn(CPQ19(None))
when(retriever.cpQ20()).thenReturn(CPQ20(None))
when(retriever.cp117()).thenReturn(CP117(0))
when(retriever.cp118()).thenReturn(CP118(0))
when(retriever.cp284()).thenReturn(CP284(None))
when(retriever.cato01()).thenReturn(CATO01(0))
}
setUpRetriever()
}
|
pncampbell/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/computations/CPQ18Spec.scala
|
Scala
|
apache-2.0
| 11,808 |
package adtoyou.spark.streamming
/**
* Created by qiyu.wang on 2017/10/12.
*/
import java.util.Calendar
import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka._
import org.json4s.JValue
import org.json4s.JsonAST.{JBool, JInt, JString}
import org.json4s.jackson.JsonMethods.parse
/**
* Consumes messages from one or more topics in Kafka and does wordcount.
* Usage: DirectKafkaWordCount <brokers> <topics> <consumer_group>
* <brokers> is a list of one or more Kafka brokers
* <topics> is a list of one or more kafka topics to consume from
* <consumer_group> is the id of consumer group
*
* Example:
* $ bin/run-example streaming.DirectKafkaWordCount broker1-host:port,broker2-host:port \\
* topic1,topic2
*/
object ASOStreaming_1 {
def main(args: Array[String]) {
if (args.length < 3) {
System.err.println(
s"""
|Usage: DirectKafkaWordCount <brokers> <topics> <consumer_group>
| <brokers> is a list of one or more Kafka brokers
| <topics> is a list of one or more kafka topics to consume from
| <consumer_group> is the id of consumer group
|
""".stripMargin)
System.exit(1)
}
// Create context with 2 second batch interval
val sparkConf = new SparkConf()
.setAppName("MAC_TEST")
.set("redis.host", "192.168.3.156")
.set("redis.port", "9801")
.set("redis.timeout", "30000")
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
// .registerKryoClasses(Array(classOf[AppRank]))
// .set("spark.streaming.blockInterval", "100ms")
val ssc = new StreamingContext(sparkConf, Seconds(10))
ssc.checkpoint("/drp/tmp/aso_checkpoint")
lazy val hiveContext = new HiveContext(ssc.sparkContext)
val Array(brokers, topics, group) = args
// Create direct kafka stream with brokers and topics
val topicsSet = topics.split(",").toSet
val kafkaParams = Map[String, String]("metadata.broker.list" -> brokers,
"group.id" -> group
)
val messages = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](
ssc, kafkaParams, topicsSet)
messages
.map(m => decode(m._2))
.filter(_._2 != AppRank.empty)
.updateStateByKey(updateAppRanks)
.foreachRDD { (rdd, time) =>
val calendar = Calendar.getInstance()
calendar.setTimeInMillis(time.milliseconds)
val currentHH = calendar.get(Calendar.HOUR_OF_DAY)
val currentMM = calendar.get(Calendar.MINUTE)
println("HH=" + currentHH + ",MM=" + currentMM)
if (currentMM == 0) {
// flush redis to hive(hh-1)
}
if (!rdd.isEmpty()) {
val appBoard = rdd.filter(_._2.markToSink) map { a =>
val hm = a._1.split("_")(0).split(":")
val h = hm(0).toInt
val m = hm(1).toInt
(a._1, (h, m, a._2))
}
val toRedis = appBoard.filter(a => a._2._1 == currentHH && a._2._2 != 0)
val toHive = appBoard.filter(a => a._2._1 != currentHH || a._2._2 == 0)
toRedis.foreach(t =>
println("redis:" + t._1 + "," + t._2._3.appRanks.mkString("\\n")))
toHive.foreach(t =>
println("hive:" + t._1 + "," + t._2._3.appRanks.mkString("\\n")))
// rdd.sparkContext.toRedisKV(toRedis)
}
}
// Start the computation
ssc.start()
ssc.awaitTermination()
}
def decode(s: String): (String, AppRank) = {
try {
val jo = parse(s)
val lang = optString(jo, "lang")
val popId = optInt(jo, "popId")
val genreId = optInt(jo, "genreId")
val isLast = optBoolean(jo, "isLast")
val timestamp = optLong(jo, "timestamp")
val calendar = Calendar.getInstance()
calendar.setTimeInMillis(timestamp)
val hm = calendar.get(Calendar.HOUR_OF_DAY) + ":" + calendar.get(Calendar.MINUTE)
val valueStr = optString(jo, "value")
val value = parse(valueStr)
val appId = optString(value, "id")
val rankName = optString(value, "name").split("\\\\. ", 2)
val rank = if (rankName.length == 2) rankName(0).toInt else -1
val idTuple = (hm, lang, popId, genreId, appId, rank)
val id = idTuple.productIterator.mkString("_")
(id, AppRank(timestamp, lang, popId, genreId, appId, rank, isLast))
} catch {
case _: com.fasterxml.jackson.core.JsonParseException =>
("", AppRank.empty)
}
}
def optString(json: JValue, key: String): String = {
json \\ key match {
case JString(x) => x
case _ => ""
}
}
def optBoolean(json: JValue, key: String): Boolean = {
json \\ key match {
case JBool(x) => x
case _ => false
}
}
def optInt(json: JValue, key: String): Int = {
json \\ key match {
case JInt(x) => x.intValue()
case _ => -1
}
}
def optLong(json: JValue, key: String): Long = {
json \\ key match {
case JInt(x) => x.longValue()
case _ => -1L
}
}
def updateAppRanks(newRanks: Seq[AppRank],
state: Option[AppBoard]): Option[AppBoard] = {
val newState = state
.map(prev => AppBoard(prev.appRanks ++ newRanks, prev.markToSink))
.orElse(Some(AppBoard(newRanks)))
if (newState.get.markToSink)
Some(AppBoard(newRanks))
else if (newRanks.exists(_.isLast))
state
.map(prev => AppBoard(prev.appRanks ++ newRanks, true))
else
newState
}
}
|
7u/spark-learning
|
spark.learning/spark_test/src/main/scala/adtoyou/spark/streamming/ASOStreaming_1.scala
|
Scala
|
apache-2.0
| 5,887 |
object Solution {
def gcd(x: Int, y: Int): Int =
{
// You only need to fill up this function
// To return the value of the GCD of x and y
if (x % y == 0) {
return y
} else if (y % x == 0) {
return x
} else if (x > y) {
return gcd(y, x % y)
} else {
return gcd(x, y % x)
}
}
/**This part handles the input/output. Do not change or modify it **/
def acceptInputAndComputeGCD(pair:List[Int]) = {
println(gcd(pair.head,pair.reverse.head))
}
def main(args: Array[String]) {
/** The part relates to the input/output. Do not change or modify it **/
acceptInputAndComputeGCD(readLine().trim().split(" ").map(x=>x.toInt).toList)
}
}
|
franklingu/HackerRank
|
functional-programming/recursion/computing-the-gcd/computing_the_gcd.scala
|
Scala
|
mit
| 765 |
package com.monsanto.stats.tables.clustering
import com.monsanto.stats.tables.utils.ProfileUtils
import scala.util.Random
import scala.collection.parallel.immutable.ParVector
import scala.annotation.elidable
import elidable._
import ProfileUtils.time
import scala.annotation.tailrec
class CRP(params: ModelParams, allTopicVectorResults: Vector[TopicVectorInput]) { thisCRP =>
private val cache: Array[Double] = new Array[Double](allTopicVectorResults.length)
{
var idx: Int = 0
while (idx < allTopicVectorResults.length) {
cache(idx) = logGamma(idx.toDouble + params.beta)
idx += 1
}
}
// Array(key0, value0, key1, value1, key2, value2, ... plus possibly some unused elements at the end)
final class VecMap private (private val pairs: Array[Int], val size: Int) {
// Ensure array length is even
// require((pairs.length & 1) == 0)
// require(pairs.length >= size) // Pairs can have extra slots
def toMap: Map[Int, Int] = {
var result = Map.empty[Int, Int]
val doubleSize = size * 2
var idx = 0
while (idx < doubleSize) {
result += pairs(idx) -> pairs(idx + 1)
idx += 2
}
result
}
def +(that: VecMap): VecMap = {
val thisLen = this.size * 2 // Length of used portion of this.pairs array
val thatLen = that.size * 2 // Length of used portion of that.pairs array
val newPairs: Array[Int] = new Array[Int](thisLen + thatLen)
var thisIdx = 0
var thatIdx = 0
var newIdx = 0
while (thisIdx < thisLen && thatIdx < thatLen) {
val thisKey = this.pairs(thisIdx)
val thatKey = that.pairs(thatIdx)
if (thisKey == thatKey) {
newPairs(newIdx) = thisKey
newPairs(newIdx + 1) = this.pairs(thisIdx + 1) + that.pairs(thatIdx + 1)
thisIdx += 2
thatIdx += 2
}
else if (thisKey < thatKey) {
newPairs(newIdx) = thisKey
newPairs(newIdx + 1) = this.pairs(thisIdx + 1)
thisIdx += 2
}
else {
newPairs(newIdx) = thatKey
newPairs(newIdx + 1) = that.pairs(thatIdx + 1)
thatIdx += 2
}
newIdx += 2
}
if (thisIdx < thisLen) {
// that.pairs is spent. Just finish off this
while (thisIdx < thisLen) {
newPairs(newIdx) = this.pairs(thisIdx)
newPairs(newIdx + 1) = this.pairs(thisIdx + 1)
thisIdx += 2
newIdx += 2
}
}
else if (thatIdx < thatLen) {
// this.pairs is spent. Just finish off that
while (thatIdx < thatLen) {
newPairs(newIdx) = that.pairs(thatIdx)
newPairs(newIdx + 1) = that.pairs(thatIdx + 1)
thatIdx += 2
newIdx += 2
}
}
assert((newIdx & 1) == 0)
new VecMap(newPairs, newIdx / 2)
}
def -(that: VecMap): VecMap = {
val thisLen = this.size * 2 // Length of used portion of this.pairs array
val thatLen = that.size * 2 // Length of used portion of that.pairs array
val newPairs: Array[Int] = new Array[Int](thisLen)
var thisIdx = 0
var thatIdx = 0
var newIdx = 0
while (thisIdx < thisLen && thatIdx < thatLen) {
val thisKey = this.pairs(thisIdx)
val thatKey = that.pairs(thatIdx)
if (thisKey == thatKey) {
newPairs(newIdx) = thisKey
newPairs(newIdx + 1) = this.pairs(thisIdx + 1) - that.pairs(thatIdx + 1)
thisIdx += 2
thatIdx += 2
newIdx += 2
}
else if (thisKey < thatKey) {
newPairs(newIdx) = thisKey
newPairs(newIdx + 1) = this.pairs(thisIdx + 1)
thisIdx += 2
newIdx += 2
}
else {
// Leave this one out entirely, and don't incremenet newIdx
thatIdx += 2
}
}
if (thisIdx < thisLen) {
// that.pairs is spent. Just finish off this
while (thisIdx < thisLen) {
newPairs(newIdx) = this.pairs(thisIdx)
newPairs(newIdx + 1) = this.pairs(thisIdx + 1)
thisIdx += 2
newIdx += 2
}
} // else this is spent, so we're don
assert((newIdx & 1) == 0)
new VecMap(newPairs, newIdx / 2)
}
override def equals(other: Any): Boolean = {
other match {
case that: VecMap => this.pairs.take(this.size * 2).deep == that.pairs.take(that.size * 2).deep
case _ => false
}
}
override def hashCode: Int = java.util.Arrays.hashCode(pairs)
override def toString: String = {
val eleStrs = toMap map { case (k, v) => s"$k -> $v" }
eleStrs.mkString("VecMap(", ", ", ")")
}
/*
* C is just the result of this integral. C tells you the proability that someone is going to sit somewhere and
* the probability of your uncertainty about what the parameters of that table truly are. If you toss 10 coins and
* get 6 heads 4 tails, you'd guess it is 60/40, but you wouldn't be very certain. If you had 1000 samples you'd be
* more certain, and likely be closer to 50/50. C it is accounting for that uncertainty.
*/
def estimateCSmoothingFirst: Double = {
// Compute partSumAi and partSumLogGammaAi by iterating through all
// values in the WeightedVector's vecMap and computing the sum of the
// values and their logGammas.
var partSumAi: Double = 0.0
var partSumLogGammaAi: Double = 0.0
var idx = 1
val len = size * 2
while (idx < len) {
val v = pairs(idx)
partSumAi += v + params.beta // add beta to this and the next value to smooth the curve
val logGammaSmoothingFirst =
if (v < allTopicVectorResults.length) cache(v)
else logGamma(v + params.beta)
partSumLogGammaAi += logGammaSmoothingFirst
idx += 2
}
// The number of "smalls" (used in smoothing) is going to be the topic vector size minus the size
// of the weighted vector, because our weighted vectors are sparse (zeros are left out). When we
// "smooth," we change those zeros into small numbers.
val countOfSmalls: Int = params.topicVectorSize - size
// Because wv is a sparse vector, it does not contain zero values. To smooth, we want to add params.beta
// to each conceptual value, including the zeros. So each zero should become params.beta. Then we'll want
// to sum them all up. The sum of just these small numbers will be the countOfSmalls * params.beta:
val sumOfSmalls: Double = countOfSmalls * params.beta
// The total of the unlogged values is that partSumAi, which is the sum of all the non-zero values
// (which existed in the wv sparse vector) plus the sumOfSmalls calculated above.
val totalSumAi: Double = partSumAi + sumOfSmalls
// Similarly, the logGammaOfSmalls is the logGamma of params.beta multiplied by the number of zero values,
// which are left out of the wv sparse vector.
val logGammaOfSmalls: Double = countOfSmalls * logGamma(params.beta)
// Similar to above, the total log gamma sum is the one computed from the non-zero values that appeared
// in the sparse vector plus the logGammaOfSmalls, computed above.
val totalLogGammaAi: Double = partSumLogGammaAi + logGammaOfSmalls
// Result is the total log gamma sum minus the logGamma of the total regular sum
totalLogGammaAi - logGamma(totalSumAi)
}
}
object VecMap {
// Array(key0, value0, key1, value1, key2, value2, ...)
def apply(map: Map[Int, Int]): VecMap = {
val sorted: Vector[(Int, Int)] = map.toVector.sortBy(_._1)
val pairs = new Array[Int](sorted.size * 2)
var it = sorted.iterator
var idx = 0
while (it.hasNext) {
val (k, v) = it.next()
pairs(idx) = k
pairs(idx + 1) = v
idx += 2
}
new VecMap(pairs, sorted.size)
}
}
case class TopicVector(id: Long, vecMap: VecMap) {
def toTopicVectorResult: TopicVectorInput = TopicVectorInput(id, vecMap.toMap)
}
object TopicVector {
def from(tvr: TopicVectorInput): TopicVector = TopicVector(tvr.id, VecMap(tvr.vecMap))
}
val allTopicVectors: Vector[TopicVector] = allTopicVectorResults.map(tvr => TopicVector(tvr.id, VecMap(tvr.vecMap)))
case class Cluster(topicVectors: Vector[TopicVector], topicCountsSums: VecMap) {
Cluster.requireTopicVectorsSizeLessThanOrEqualToTotal(topicVectors, params.topicVectorSize) // This is #3
def size: Int = topicVectors.size
val cValue: Double = topicCountsSums.estimateCSmoothingFirst
def posteriorLogPdf(tv: TopicVector): Double = {
val newTopicCountsSums: VecMap = tv.vecMap + topicCountsSums
// Now newTopicCountsSums is a sparse vector vecMap that represents the sum of the tv.vecMap
// and the topicCountSums with no smoothing yet.
newTopicCountsSums.estimateCSmoothingFirst - cValue
}
def hasOneOccupant: Boolean = topicVectors.length == 1
def addOccupant(topicVector: TopicVector): Cluster = {
if (topicVectors.exists(_.id == topicVector.id)) { // Now this if check is showing up #2 anon
throw new IllegalStateException
}
val newTopicVectors = topicVector +: topicVectors
val newTopicCountsSums = topicVector.vecMap + this.topicCountsSums
copy(topicVectors = newTopicVectors, topicCountsSums = newTopicCountsSums)
}
def removeOccupant(topicId: Long): Cluster = {
if (topicVectors.length <= 1) { // TODO: requireState
throw new IllegalStateException
}
val topicIdx = topicVectors.indexWhere(tv => tv.id == topicId) // Now this one is up there, #1 anon
if (topicIdx == -1) {
throw new IllegalArgumentException("topicId not found")
}
val tvToRemove = topicVectors(topicIdx)
Cluster(topicVectors.take(topicIdx) ++ topicVectors.drop(topicIdx + 1), topicCountsSums - tvToRemove.vecMap)
}
}
object Cluster {
def apply(topicVectors: Vector[TopicVector]): Cluster = {
val topicCountsSums: VecMap = computeTopicCountsSums(topicVectors)
apply(topicVectors, topicCountsSums)
}
// This is # 1 right now
def computeTopicCountsSums(topicVectors: Vector[TopicVector]): VecMap = {
var idx = 0
var sum = VecMap(Map.empty)
while (idx < topicVectors.length) {
val vecMap = topicVectors(idx).vecMap
sum += vecMap
idx += 1
}
sum
}
@elidable(ASSERTION) def requireTopicVectorsSizeLessThanOrEqualToTotal(topicVectors: Vector[TopicVector], topicVectorSize: Int): Unit = {
if (topicVectors exists { case TopicVector(_, mp) => mp.size > topicVectorSize }) {
val probs: Vector[(Long, Int)] = topicVectors.collect { case TopicVector(id, mp) if mp.size > topicVectorSize => (id, mp.size) }
throw new IllegalArgumentException(s"${probs.size} topic vector maps had size > params.topicVectorSize $topicVectorSize: ${probs}")
}
}
}
case class Restaurant private (
clusters: Vector[Cluster],
private val assignments: Array[Int] // Array index is TopicVector idx; element is cluster idx
) { thisRestaurant =>
def assignment(topicVectorIdx: Int): Int = assignments(topicVectorIdx)
// Insert the topic vector
def insert(topicVectorIdx: Int, optClusterIdx: Option[Int]): Restaurant = {
optClusterIdx match {
case None => // Put in new cluster at end
val newAssignments = assignments.clone()
newAssignments(topicVectorIdx) = clusters.length
new Restaurant(
clusters :+ Cluster(Vector(allTopicVectors(topicVectorIdx))),
newAssignments // assignments + (topicVectorIdx -> clusters.length)
)
case Some(clusterIdx) =>
val newCluster = clusters(clusterIdx).addOccupant(allTopicVectors(topicVectorIdx))
val newClusters = clusters.updated(clusterIdx, newCluster)
val newAssignments = assignments.clone()
newAssignments(topicVectorIdx) = clusterIdx
new Restaurant(
newClusters,
newAssignments // assignments + (topicVectorIdx -> clusterIdx)
)
}
}
def extract(topicVectorIdx: Int): Extraction = {
require(topicVectorIdx >= 0 && topicVectorIdx < allTopicVectors.length)
val topicVector = allTopicVectors(topicVectorIdx)
val clusterIdx = thisRestaurant.assignment(topicVectorIdx)
val cluster = thisRestaurant.clusters(clusterIdx)
val newClusterOpt =
if (cluster.hasOneOccupant) None
else Some(cluster.removeOccupant(topicVector.id))
val (newClusters, newAssignments) =
newClusterOpt match {
case None => // remove cluster
val theNewClusters = thisRestaurant.clusters.take(clusterIdx) ++ thisRestaurant.clusters.drop(clusterIdx + 1)
val theNewAssignments = assignments.clone()
// val it: Iterator[(Int, Int)] = assignments.iterator
var tvIdx = 0
while (tvIdx < theNewAssignments.length) {
val cIdx = theNewAssignments(tvIdx)
if (cIdx == clusterIdx) { // drop the entry for the cluster we just removed
theNewAssignments(tvIdx) = -1
}
else if (cIdx > clusterIdx) {
theNewAssignments(tvIdx) = cIdx - 1
}
tvIdx += 1
}
(theNewClusters, theNewAssignments)
case Some(newCluster) =>
val theNewClusters = thisRestaurant.clusters.updated(clusterIdx, newCluster)
val theNewAssignments = assignments.clone()
theNewAssignments(topicVectorIdx) = -1
// val theNewAssignments = assignments - topicVectorIdx
(theNewClusters, theNewAssignments)
}
val newRestaurant = new Restaurant(newClusters, newAssignments)
Extraction(allTopicVectors(topicVectorIdx), topicVectorIdx, newRestaurant)
}
override def equals(other: Any): Boolean = {
other match {
case that: Restaurant => this.clusters == that.clusters
case _ => false
}
}
override def hashCode: Int = clusters.hashCode
def dataLikelihoodIgnoringClusterCount: Double = {
var idx = 0
var sum: Double = 0.0
while (idx < clusters.length) {
sum += clusters(idx).cValue
idx += 1
}
sum
}
// Returned array has key0, value0, key1, value1, key2, value2, ...
def clusterSizeHistogram: Array[Int] = {
val histoMap: Map[Int, Int] = clusters.groupBy(_.size).mapValues(_.size)
var idx = 0
val result = new Array[Int](2 * histoMap.size)
val it = histoMap.iterator
while (it.hasNext) {
val (k, v) = it.next()
result(idx) = k
result(idx + 1) = v
idx += 2
}
result
}
def clusterCountPart: Double =
clusters.size * (logGamma(params.beta) * params.topicVectorSize - logGamma(params.beta * params.topicVectorSize))
def logPosteriorProbability: Double = {
val dLICC = dataLikelihoodIgnoringClusterCount
val ccP = clusterCountPart
val cA = Restaurant.logProbabilityOfClusteringArrangement(clusters.size, params.alpha, clusterSizeHistogram)
(dLICC - ccP) + cA
}
}
object Restaurant {
def apply(clusters: Vector[Cluster]): Restaurant = {
val assignments = new Array[Int](allTopicVectors.length)
java.util.Arrays.fill(assignments, -1)
var clusterIdx = 0
while (clusterIdx < clusters.length) {
val cluster = clusters(clusterIdx)
var ctvIdx = 0
while (ctvIdx < cluster.topicVectors.length) {
val clustersTopicVector = cluster.topicVectors(ctvIdx)
val topicVectorIdx = allTopicVectors.indexWhere(tv => tv.id == clustersTopicVector.id)
assignments(topicVectorIdx) = clusterIdx
ctvIdx += 1
}
clusterIdx += 1
}
new Restaurant(clusters, assignments)
}
def logProbabilityOfClusteringArrangement(
clusterCount: Int,
alpha: Double,
clusterSizeHistogram: Array[Int] // key0, value0, key1, value1, ...
): Double = {
var sum = 0.0
var idx = 0
while (idx < clusterSizeHistogram.length) {
val clusterSize = clusterSizeHistogram(idx)
val countOfThisSize = clusterSizeHistogram(idx + 1)
sum += countOfThisSize * Math.log(clusterSize) + logGamma(countOfThisSize + 1.0)
idx += 2
}
clusterCount * Math.log(alpha) - sum
}
}
case class Extraction(
extracted: TopicVector,
extractedIdx: Int,
restaurant: Restaurant
) {
val clusters: Vector[Cluster] = restaurant.clusters
def assignment(topicVectorIdx: Int): Int = restaurant.assignment(topicVectorIdx)
}
def initializeClusters(shuffle: Boolean = true): Vector[Cluster] = {
// Throw IAE if contained map has more elements than the passe topicVectorSize.
val unshuffled = allTopicVectors.map(tv => Cluster(Vector(tv)))
if (shuffle) Random.shuffle(unshuffled) else unshuffled
}
/*
* LogGamma is a function that's in the Gibbs collapsed sampler. (In the uncollapsed sampler, what is the probability of
* all the multinomials ... we have to estimate those parameters too. The collapsed sampler collapses all that into
* one function, and that function happens to be expressed in the difference of sums in estimateC that uses
* this log function. Gamma comes up a lot in multinomial stuff. Gamma is the continuous version of factorial.
* The datumbox author did a Taylor series to 5 digits of precision and hard-coded the numbers.
*/
def logGamma(x: Double): Double = {
val tmp: Double = (x - 0.5) * Math.log(x + 4.5) - (x + 4.5)
val ser: Double = 1.0 + 76.18009173 / (x + 0) - 86.50532033 / (x + 1) +
24.01409822 / (x + 2) - 1.231739516 / (x + 3) +
0.00120858003 / (x + 4) - 0.00000536382 / (x + 5)
tmp + Math.log(ser * Math.sqrt(2 * Math.PI))
}
def parClusterWeights(tv: TopicVector, clustersMinusTv: Vector[Cluster]): ParVector[Double] = {
clustersMinusTv.par.map { c =>
val pdf = c.posteriorLogPdf(tv) // marginalLogLikelihoodXi
val mixingTv = c.size / (params.alpha + allTopicVectorResults.length - 1) // double mixingXi = ck.size()/(alpha+n-1)
pdf + Math.log(mixingTv)
}
}
private final val ClusterWeightsDenom = params.alpha + allTopicVectorResults.length - 1
/*
* This method computes in one go for each topic vector (each customer, whose id is the key in the returned map), the probability
* that they will be seated at a new cluster. One element at a time we'll add this to the end of the table assignment probabilities
* vector using this line of code in tableAssignmentProbabilities:
*
* val unnormalizedWeights: Vector[Double] = clusterWeights(params, tv, clustersMinusTv) :+ newClusterWeights(tv.id)
*/
private final val cNew: Cluster = Cluster(Vector.empty)
private final val probNewCluster: Double = params.alpha / ClusterWeightsDenom // This is the only one that changes
private final val logProbNewCluster: Double = Math.log(probNewCluster)
val newClusterWeights: Array[Double] = {
val result = new Array[Double](allTopicVectors.length)
var idx = 0
while (idx < allTopicVectors.length) {
val priorLogPredictive: Double = cNew.posteriorLogPdf(allTopicVectors(idx))
result(idx) = (priorLogPredictive + logProbNewCluster)
idx += 1
}
result
}
// Note: possible performance enhancement for weightsForStartingANewCluster: Could just leave out the priorLogPredictive here, and then
// add it in later (in selectCluster) then we don't have to do it every time.
// Result is one longer than the number of clusters, because last one is the probability we'll stick them in the new cluster
/*
* This method estimates the probability of assigning topic vector, tv, to the elements of the cluster vector (which
* currently do not include tv, as he or she has been extracted.
*/
def clusterWeights(tv: TopicVector, clustersMinusTv: Vector[Cluster]): Array[Double] = {
val cmtLen = clustersMinusTv.length
val result = new Array[Double](cmtLen + 1) // Room for one more at the end
var idx = 0
while (idx < cmtLen) {
val c = clustersMinusTv(idx)
val pdf = c.posteriorLogPdf(tv) // marginalLogLikelihoodXi
val mixingTv = c.size / ClusterWeightsDenom // double mixingXi = ck.size()/(alpha+n-1)
result(idx) = pdf + Math.log(mixingTv)
idx += 1
}
result
}
def tableAssignmentProbabilities(tvIdx: Int, tv: TopicVector, clustersMinusTv: Vector[Cluster]): Array[Double] = {
// Here is where I'm sticking the start new weight to the end!
val unnormalizedWeights: Array[Double] = clusterWeights(tv, clustersMinusTv) // multiPurpose is first, unnormalizedWeights
val uwLen = unnormalizedWeights.length
unnormalizedWeights(uwLen - 1) = newClusterWeights(tvIdx)
val maxWeight: Double = unnormalizedWeights.max
val unloggedMaxRemovedWeights = unnormalizedWeights
var idx = 0
while (idx < uwLen) {
val uw = unnormalizedWeights(idx)
unloggedMaxRemovedWeights(idx) = Math.exp(uw - maxWeight)
idx += 1
}
val sum: Double = unloggedMaxRemovedWeights.sum
val result = unloggedMaxRemovedWeights
idx = 0
while (idx < uwLen) {
val umrw = unloggedMaxRemovedWeights(idx)
result(idx) = umrw / sum
idx += 1
}
result
}
// Result is one longer than the number of clusters, because last one is the probability we'll stick them in the new cluster
def parTableAssignmentProbabilities(tvIdx: Int, tv: TopicVector, clustersMinusTv: Vector[Cluster]): Vector[Double] = {
// Here is where I'm sticking the start new weight to the end!
val unnormalizedWeights: ParVector[Double] = parClusterWeights(tv, clustersMinusTv) :+ newClusterWeights(tvIdx)
val maxWeight: Double = unnormalizedWeights.max
val unloggedMaxRemovedWeights: ParVector[Double] = unnormalizedWeights.map(uw => Math.exp(uw - maxWeight))
val sum: Double = unloggedMaxRemovedWeights.sum
unloggedMaxRemovedWeights.map(uw => uw / sum).seq // might have to avoid divide by zero as in JAva version
}
def weightedProbabilitySampling(choices: Array[Double], rng: RandomNumGen): Int = {
val randomNumber0To1: Double = rng.next()
@tailrec
def recurseWPS(sum: Double, idx: Int): Int = {
if (idx == choices.length) 0
else {
val newSum = sum + choices(idx)
if (newSum > randomNumber0To1) idx
else recurseWPS(newSum, idx + 1)
}
}
recurseWPS(0.0, 0)
}
// The parallel version has already boxed it, so take a Vector[Double]
def weightedProbabilitySamplingForPar(choices: Vector[Double], rng: RandomNumGen): Int = {
val randomNumber0To1: Double = rng.next()
@tailrec
def recurseWPS(sum: Double, idx: Int): Int = {
if (idx == choices.length) 0
else {
val newSum = sum + choices(idx)
if (newSum > randomNumber0To1) idx
else recurseWPS(newSum, idx + 1)
}
}
recurseWPS(0.0, 0)
}
def extract(topicVectorIdx: Int, restaurant: Restaurant): Extraction = restaurant.extract(topicVectorIdx)
def insert(optClusterIdx: Option[Int], extraction: Extraction): Restaurant = {
val requirementSatisfied =
optClusterIdx match {
case None => true
case Some(clusterIdx) =>
clusterIdx >= 0 && clusterIdx < extraction.restaurant.clusters.length
}
require(requirementSatisfied, s"optClusterIdx was: $optClusterIdx")
extraction.restaurant.insert(extraction.extractedIdx, optClusterIdx)
}
def reseatAllCustomers(
restaurant: Restaurant,
rng: RandomNumGen,
selector: (Extraction, RandomNumGen) => Option[Int]
): Restaurant = {
@tailrec
def recurseRAC(topicVectorIdx: Int, rst: Restaurant): Restaurant = {
if (topicVectorIdx >= allTopicVectors.length) rst
else {
val extraction =
extract(topicVectorIdx, rst)
val clusterIdxOpt =
selector(extraction, rng)
val newRestaurant =
insert(clusterIdxOpt, extraction)
recurseRAC(topicVectorIdx + 1, newRestaurant)
}
}
recurseRAC(0, restaurant)
}
@elidable(INFO) def printInfo(msg: String): Unit = println(msg)
def findClusters(
maxIterations: Int,
rng: RandomNumGen,
selector: (Extraction, RandomNumGen) => Option[Int],
shuffle: Boolean = true
): CRPResult = {
@tailrec
def recurseFC(iterationCount: Int, restaurant: Restaurant, bestRestaurant: Restaurant, bestScore: Double): CRPResult = {
// printInfo(s"iteration $iterationCount: cluster count was ${restaurant.clusters.size} --> ${restaurant.clusters.map(cluster => cluster.topicVectors.size)}")
if (iterationCount == maxIterations) {
val clusterResults = bestRestaurant.clusters.zipWithIndex.map{case (c, clusterIndex) =>
def topicVectorResult(tv: TopicVector): TopicVectorResult = {
val unnormalizedWeights: ParVector[Double] = parClusterWeights(tv, bestRestaurant.clusters)
val maxWeight: Double = unnormalizedWeights.max
val unloggedMaxRemovedWeights: ParVector[Double] = unnormalizedWeights.map(uw => Math.exp(uw - maxWeight))
val sum: Double = unloggedMaxRemovedWeights.sum
val belongProb = unloggedMaxRemovedWeights(clusterIndex) / sum
TopicVectorResult(tv.id, tv.vecMap.toMap, belongProb)
}
ClusterResult(
params,
c.topicVectors.map(topicVectorResult),
c.topicCountsSums.toMap
)
}
CRPResult(iterationCount, clusterResults)
}
else {
val (newRestaurant, millis) = time { reseatAllCustomers(restaurant, rng, selector) }
// printInfo(s" --> ${restaurant.clusters.map(cluster => cluster.topicVectors.size)}")
val newScore = newRestaurant.logPosteriorProbability
val (newBestRestaurant, newBestScore) =
if (newScore > bestScore) {
// println("And we have a new winner!")
(newRestaurant, newScore)
}
else (bestRestaurant, bestScore)
// println("logPosteriorProbability was " + newScore)
val asterisk: String = if (newScore > bestScore) "*" else ""
printInfo(f"Iteration ${iterationCount + 1}: cluster count was ${restaurant.clusters.size}, reseat: $millis, score: ${newScore}%.5f${asterisk}")
recurseFC(iterationCount + 1, newRestaurant, newBestRestaurant, newBestScore)
}
}
val initClusters = initializeClusters(shuffle)
val initRestaurant = Restaurant(initClusters)
val initScore = initRestaurant.logPosteriorProbability
recurseFC(0, initRestaurant, initRestaurant, initScore)
}
def selectCluster(
extraction: Extraction,
rng: RandomNumGen
): Option[Int] = {
val (choice, probsLen): (Int, Int) =
if (extraction.restaurant.clusters.length > 1000) {
val probs: Vector[Double] = parTableAssignmentProbabilities(extraction.extractedIdx, extraction.extracted, extraction.restaurant.clusters)
(weightedProbabilitySamplingForPar(probs, rng), probs.length)
}
else {
val probs: Array[Double] = tableAssignmentProbabilities(extraction.extractedIdx, extraction.extracted, extraction.restaurant.clusters)
(weightedProbabilitySampling(probs, rng), probs.length)
}
if (choice == probsLen - 1) None else Some(choice)
}
}
|
MonsantoCo/chinese-restaurant-process
|
src/main/scala/com/monsanto/stats/tables/clustering/CRP.scala
|
Scala
|
bsd-3-clause
| 27,762 |
package cmdreader.std
import cmdreader.Command
import types._
import run._
class Disassembly extends Command {
override def getName(): String = "dsa"
override def isValidArg0(n: Int): Boolean = n == 1
override def apply(args: Array[Type]): Type = {
args(0) match {
case f: TBinFunc => new TString(Disassembler.disassemble(f.toBytecode))
case _ => new TError(1)
}
}
}
|
bluebear94/bag
|
src/main/scala/cmdreader/std/Disassembly.scala
|
Scala
|
gpl-3.0
| 396 |
package com.twitter.finatra.http.benchmarks
import com.twitter.inject.Test
class FinagleRequestScopeBenchmarkTest extends Test {
"test" in {
val benchmark = new FinagleRequestScopeBenchmark()
benchmark.timeServiceWithRequestScopeFilter()
}
}
|
syamantm/finatra
|
benchmarks/src/test/scala/com/twitter/finatra/http/benchmarks/FinagleRequestScopeBenchmarkTest.scala
|
Scala
|
apache-2.0
| 257 |
package glaux.interfaces.akka
package service
import akka.actor.{Props, Actor, ActorLogging}
import Protocols.{Response, Confirmed}
import glaux.interfaces.api.domain.{SessionId, ProfileId, Reading}
import Protocols.Agent._
import glaux.interfaces.api.persistence.SessionPersistence
import glaux.reinforcementlearning._
import scala.concurrent.Future
import scala.util.{Failure, Success}
class AgentForUser[AT <: QAgent: SessionPersistence](qAgent: AT, sessionId: SessionId) extends Actor with ActorLogging {
import qAgent.Session
private val repo = implicitly[SessionPersistence[AT]] //todo: use cached implicit from shapeless here.
import context.dispatcher
private lazy val previousSessionF: Future[Option[Session]] = repo.get(qAgent, sessionId)
def receive: Receive = initializing(Vector.empty)
def initializing(initReadings: Vector[Reading]): Receive = {
case Report(reading, _) β
val (newContext, response) = tryStart(initReadings :+ reading)
sender ! response
context become newContext
case m @ (RequestAction | QueryStatus) β tryStart(initReadings) match {
case (newContext, ActionsAvailable) β
self forward m
context become newContext
case (newContext, response) β
sender ! response
context become newContext
}
}
def inSession(session: Session): Receive = {
case Report(reading, reward) β
context become inSession(qAgent.report(reading, reward, session))
sender ! ActionsAvailable
case RequestAction β
val (action, newSession) = qAgent.requestAction(session)
sender ! ActionResult(action)
context become inSession(newSession)
case ReportTermination(reading, reward) β
val newS = qAgent.report(reading, reward, session)
val closed = qAgent.close(newS)
val replyTo = sender
storeSession(closed).map { _ β
replyTo ! Confirmed
}.onFailure {
case e: Throwable β throw e
}
context stop self
case QueryStatus β
sender ! AgentStatus(session.iteration.memory.size, session.iteration.loss)
}
private def tryStart(readings: Vector[Reading]): (Receive, Response) = {
def startAgent(previousSession: Option[Session]): (Receive, Response) =
qAgent.start(readings, previousSession) match {
case Left(m) β
(initializing(readings), PendingMoreReadings)
case Right(session) β
(inSession(session), ActionsAvailable)
}
previousSessionF.value match {
case Some(Success(previousSession)) β
startAgent(previousSession)
case Some(Failure(e)) β
throw e
case None β //future not completed yet
(initializing(readings), Initializing)
}
}
private def storeSession(session: Session): Future[Unit] = repo.upsert(qAgent, sessionId)(session)
}
object AgentForUser {
def props[AT <: QAgent: SessionPersistence](qAgent: AT, sessionId: SessionId): Props =
Props(new AgentForUser(qAgent, sessionId))
}
|
typeAI/glaux
|
akka-interface-service/src/main/scala/glaux/interfaces.akka/service/AgentForUser.scala
|
Scala
|
apache-2.0
| 3,043 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.