code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.catinthedark.ld36
import java.net.URI
import com.badlogic.gdx.{Gdx, Input, InputAdapter}
import com.catinthedark.lib.YieldUnit
class ConnectState(address: String) extends YieldUnit[String, Shared0] {
var shared0: Shared0 = _
var hardSkip: Boolean = false
override def onActivate(data: String): Unit = {
shared0 = Shared0(new URI(address))
Gdx.input.setInputProcessor(new InputAdapter {
override def keyDown(keyCode: Int): Boolean = {
keyCode match {
case Input.Keys.BACKSPACE => hardSkip = true
case _ =>
}
true
}
})
shared0.start()
shared0.networkControl.onServerHello.ports += onServerHello
def onServerHello(u: Unit): Unit = {
shared0.networkControl.hello(data)
}
}
override def onExit(): Unit = {
Gdx.input.setInputProcessor(null)
}
override def run(delta: Float): Option[Shared0] = {
if (hardSkip) {
hardSkip = false
println("WARNING hard skip of network connection")
return Some(shared0)
}
if (shared0 != null && shared0.networkControl.isConnected.isDefined) {
Some(shared0)
} else {
None
}
}
}
|
cat-in-the-dark/old48_36_game
|
client/src/main/scala/com/catinthedark/ld36/ConnectState.scala
|
Scala
|
mit
| 1,184 |
package byte_walk
import byteR._
import byteR.cfg._
import exceptions._
import scala.collection.mutable.{HashMap,HashSet,Map}
object CFGWalk {
def apply(instructions: List[JVMInstruction]): JVMCFG = {
// This represents the CFG. It goes from source locations
// that are at the start of a basic block to source locations
// that are at the start of the BB's that follow.
val cfgMap = new HashMap[BBStart, (BBEnd, BBPred, BBSucc)]()
// Int is the instruction index of the BB visited.
val visitedBBs = new HashSet[BBStart]()
// The successors are easy to set as the map is built up.
// The predecessors must be added after each BB has had
// successors accumulated.
var bbPreds = new HashMap[BBStart, BBPred]()
// Create the map:
val followMap = BWalk.walk(instructions)
if (instructions.length > 0) {
// Do a DFS of the BB's. At each BB, keep track of the live variables
// set.
var bbList = List(BBStart(0))
bbPreds(BBStart(0)) = new BBPred(List())
// Stop the while loop from running forever.
var loopCount = 0
while (bbList.length != 0) {
loopCount += 1
if (loopCount == 1000000) {
throw new ICE("CFGWalk running forever")
}
val currentBB = bbList.head
bbList = bbList.tail
if (!visitedBBs.contains(currentBB)) {
val (bbEndIndex, next) = bbApply(currentBB,
instructions(currentBB.index),
followMap)
bbList = bbList ::: next.succs
// Add this block to the list of successors of BBs in the next set.
next.succs.foreach {
case nextBB => if (bbPreds.contains(nextBB)) {
bbPreds(nextBB) = BBPred(currentBB :: bbPreds(nextBB).preds)
} else {
bbPreds(nextBB) = BBPred(List(currentBB))
}
}
// Build the CFG:
// The BBPred is later overwritten.
cfgMap(currentBB) = (bbEndIndex, BBPred(List()), next)
visitedBBs += currentBB
}
}
// Now, set the predecessors for each BB.
cfgMap.foreach {
case (start, (end, pred, succs)) =>
cfgMap(start) = (end, bbPreds(start), succs)
}
}
new JVMCFG(cfgMap, instructions.toArray)
}
/* Given some instruction that is the start of a basic block,
* go through the basic block until we reach a symbol indicating the
* end of the BB.
*
* Return the range that constitutes this BB.
*
* The BB is identified by the index of the first instruction in it.
*/
def bbApply(instructionNo: BBStart, startInstruction: JVMInstruction,
instructionMap: Map[Int, List[(Int, JVMInstruction)]]):
(BBEnd, BBSucc) = {
// Prevent the loop from running forever.
var loopCount = 0;
// Keep track of the index of the last instruction we have looked at.
var bbCurrIndex = instructionNo.index
var bbEndInstruction = startInstruction
// Keep track of label instructions, which signal the end of BBs
var endOfBB = false;
while (instructionMap(bbCurrIndex).length == 1
&& !endOfBB) {
loopCount += 1
if (loopCount == 100000)
throw new ICE("BB identification appears to be running indefinitely")
// Note that this pattern match is safe as the know that the length
// of the list is 1.
val (nextInd, instruction) = instructionMap(bbCurrIndex) match {
case List((index, instr)) => (index, instr)
case _ => throw new UnreachableException()
}
bbEndInstruction = instruction
bbCurrIndex = nextInd
instruction match {
case labelInstruction: JVMLabelInstruction =>
// This is where the BB terminates. This might happen if we have
// e.g. a label mark, which could be some other BB jumping in here.
endOfBB = true
case other =>
}
}
val bbEnd = bbEndInstruction match {
// If the end index is a label mark, we need the next BB to
// start with the same label mark.
case lMark : JVMLabelMark => BBEnd(bbCurrIndex - 1)
case other => BBEnd(bbCurrIndex)
}
val nextInstructionNos = instructionMap(bbEnd.index) map {
case (no, instr) => BBStart(no)
}
(bbEnd, BBSucc(nextInstructionNos))
}
}
|
j-c-w/mlc
|
src/main/scala/byte_walk/CFGWalk.scala
|
Scala
|
gpl-3.0
| 4,422 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
/*
Copyright (c) 2007-2016, Rickard Nilsson
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the EPFL nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.scalacheck.util
sealed trait FreqMap[T] extends Serializable {
protected val underlying: scala.collection.immutable.Map[T,Int]
val total: Int
def +(t: T): FreqMap[T] = new FreqMap[T] {
private val n = FreqMap.this.underlying.get(t) match {
case None => 1
case Some(n) => n+1
}
val underlying = FreqMap.this.underlying + (t -> n)
val total = FreqMap.this.total + 1
}
def -(t: T): FreqMap[T] = new FreqMap[T] {
val underlying = FreqMap.this.underlying.get(t) match {
case None => FreqMap.this.underlying
case Some(n) => FreqMap.this.underlying + (t -> (n-1))
}
val total = FreqMap.this.total + 1
}
def ++(fm: FreqMap[T]): FreqMap[T] = new FreqMap[T] {
private val keys = FreqMap.this.underlying.keySet ++ fm.underlying.keySet
private val mappings = keys.toStream.map { x =>
(x, fm.getCount(x).getOrElse(0) + FreqMap.this.getCount(x).getOrElse(0))
}
val underlying = scala.collection.immutable.Map(mappings: _*)
val total = FreqMap.this.total + fm.total
}
def --(fm: FreqMap[T]): FreqMap[T] = new FreqMap[T] {
val underlying = FreqMap.this.underlying transform {
case (x,n) => n - fm.getCount(x).getOrElse(0)
}
lazy val total = (0 /: underlying.valuesIterator) (_ + _)
}
def getCount(t: T) = underlying.get(t)
def getCounts: List[(T,Int)] = underlying.toList.sortBy(-_._2)
def getRatio(t: T) = for(c <- getCount(t)) yield c.toDouble/total
def getRatios = for((t,c) <- getCounts) yield (t, c.toDouble/total)
override def toString = underlying.toString
}
object FreqMap {
def empty[T]: FreqMap[T] = new FreqMap[T] {
val underlying = scala.collection.immutable.Map.empty[T,Int]
val total = 0
}
}
|
sirthias/swave
|
core/src/test/scala/org/scalacheck/util/FreqMap.scala
|
Scala
|
mpl-2.0
| 3,486 |
package spire.math
import spire.math.ArbitrarySupport.{Positive, NonNegative}
import scala.util.Try
import org.scalatest.FunSuite
import spire.implicits.{eqOps => _, _}
import spire.laws.arb.{interval => interval_, rational}
import spire.random.{Uniform, Dist}
import org.scalatest.Matchers
import org.scalacheck.Arbitrary._
import org.scalatest._
import prop._
import org.scalacheck._
import Gen._
import Arbitrary.arbitrary
class IntervalTest extends FunSuite {
def cc(n1: Double, n2: Double) = Interval.closed(n1, n2)
def co(n1: Double, n2: Double) = Interval.openUpper(n1, n2)
def oc(n1: Double, n2: Double) = Interval.openLower(n1, n2)
def oo(n1: Double, n2: Double) = Interval.open(n1, n2)
val e = Interval.empty[Double]
val all = Interval.all[Double]
test("[2, inf] is a superset of empty") { assert(Interval.atOrAbove(2).isSupersetOf(Interval.empty[Int])) }
test("empty is empty") { assert(e.isEmpty) }
test("point is point") { assert(Interval.point(2).isPoint) }
test("[2,2] is point") { assert(Interval.closed(2, 2).isPoint) }
test("[3,2] is empty") { assert(Interval.closed(3, 2).isEmpty) }
test("empty interval is not above -1") { assert(!Interval.empty[Int].hasAbove(-1)) }
test("empty interval is not below 1") { assert(!Interval.empty[Int].hasBelow(1)) }
test("[2] has above 0") { assert(Interval.point(2).hasAbove(0)) }
test("[-2] has below 0") { assert(Interval.point(-2).hasBelow(0)) }
test("[0, 1] has at or above 1") { assert(Interval.closed(0, 1).hasAtOrAbove(1)) }
test("[1, 2] has at or above 1") { assert(Interval.closed(1, 2).hasAtOrAbove(1)) }
test("[1, 2] has above 1") { assert(Interval.closed(1, 2).hasAtOrAbove(1)) }
test("(1, 2] has above 1") { assert(Interval.openLower(1, 2).hasAtOrAbove(1)) }
test("Interval.point(2).toString == [2]") { assert(Interval.point(2).toString === "[2]") }
test("Interval.empty.toString == (Γ)") { assert(Interval.empty[Int].toString === "(Γ)") }
val a = cc(0.0, 4.0)
test("a.contains(0.0) is true") { assert(a.contains(0.0) === true) }
test("a.crosses(0.0) is false") { assert(a.crosses(0.0) === false) }
test("a.contains(3.334) is true") { assert(a.contains(3.334) === true) }
test("a.contains(8.334) is false") { assert(a.contains(8.334) === false) }
val b = cc(-8.0, 2.0)
test("b.contains(0.0) is true") { assert(b.contains(0.0) === true) }
test("b.crosses(0.0) is true") { assert(b.crosses(0.0) === true) }
val c = oc(0.0, 1.0)
test("c.contains(0.0) is false") { assert(c.contains(0.0) === false) }
test("c.crosses(0.0) is false") { assert(c.crosses(0.0) === false) }
test("[3, 6] -- [3, 6] = nil") { assert(cc(3D, 6D) -- cc(3D, 6D) === Nil) }
test("[3, 6] -- empty = [3, 6]") { assert(cc(3D, 6D) -- e === List(cc(3D, 6D))) }
test("[3, 6] -- all = nil") { assert(cc(3D, 6D) -- all === Nil) }
test("[3, 6] -- [4, 6] = [3, 4)") { assert(cc(3D, 6D) -- cc(4D, 6D) === List(co(3D, 4D))) }
test("[3, 6] -- [4, 5] = [3, 4), (5, 6]") { assert(cc(3D, 6D) -- cc(4D, 5D) === List(co(3D, 4D), oc(5D, 6D))) }
}
class RingIntervalTest extends FunSuite {
def cc(n1: Double, n2: Double) = Interval.closed(n1, n2)
val a = cc(0.0, 4.0)
test("a + a") { assert(a + a === cc(0.0, 8.0)) }
test("a - a") { assert(a - a === cc(-4.0, 4.0)) }
test("a * a") { assert(a * a === cc(0.0, 16.0)) }
val b = cc(-8.0, 2.0)
test("b + b") { assert(b + b === cc(-16.0, 4.0)) }
test("b - b") { assert(b - b === cc(-10.0, 10.0)) }
test("b * b") { assert(b * b === cc(-16.0, 64.0)) }
import interval.{Open, Unbound, Closed}
val c = 4.0
test("-(c, β) = (-β, -c)") {
assert( -Interval.fromBounds(Open(c), Unbound()) ===
Interval.fromBounds(Unbound(), Open(-c)) )
}
test("-(-β, c] = [-c, β)") {
assert( -Interval.fromBounds(Unbound(), Closed(c)) ===
Interval.fromBounds(Closed(-c), Unbound()) )
}
test("(c, β) * (-c) = (-β, -c * c), c > 0") {
assert( Interval.fromBounds(Open(c), Unbound()) * (-c) ===
Interval.fromBounds(Unbound(), Open(-c*c)) )
}
test("(-β, c] * (-c) = [-c * c, β), c > 0") {
assert( Interval.fromBounds(Unbound(), Closed(c)) * (-c) ===
Interval.fromBounds(Closed(-c*c), Unbound()) )
}
test("Interval multiplication bug #372") {
val a = Interval(-1, 1)
val b = Interval.above(1)
val x = -1
val y = 10
assert(a.contains(x))
assert(b.contains(y))
assert((a*b).contains(x*y))
}
test("Interval multiplication bug 1") {
val a = Interval(-3, -2)
val b = Interval.above(-10)
val x = -3
val y = -9
assert(a.contains(x))
assert(b.contains(y))
assert((a*b).contains(x*y))
}
test("Interval multiplication bug 2") {
val a = Interval.atOrBelow(0)
val b = Interval.below(-1)
assert((a*b).contains(0))
}
test("Interval multiplication bug 3") {
val a = Interval.atOrBelow(0)
val b = Interval.open(-2, -1)
assert((a*b).contains(0))
}
test("Interval multiplication bug 4") {
val a = Interval.above(2)
val b = Interval.closed(0, 1)
assert((a*b).contains(0))
}
}
class IntervalGeometricPartialOrderTest extends FunSuite {
import spire.optional.intervalGeometricPartialOrder._
import Interval.{openUpper, openLower, closed, open, point}
test("[2, 3) === [2, 3)") { assert(openUpper(2, 3).partialCompare(openUpper(2, 3)) == 0.0) }
test("[2, 3) < [3, 4]") { assert(openUpper(2, 3) < closed(3, 4)) }
test("[2, 3] < (3, 4]") { assert(closed(2, 3) < openLower(3, 4)) }
test("[2, 3] cannot be compared to [3, 4]") { assert(closed(2, 3).partialCompare(closed(3, 4)).isNaN) }
test("[3, 4] > [2, 3)") { assert(closed(3, 4) > openUpper(2, 3)) }
test("[2, 3) <= [3, 4]") { assert(openUpper(2, 3) <= closed(3, 4)) }
test("[3, 4] >= [2, 3)") { assert(closed(3, 4) >= openUpper(2, 3)) }
test("not [2, 3] < [3, 4]") { assert(!(closed(2, 3) < closed(3, 4))) }
test("not [2, 3] <= [3, 4]") { assert(!(closed(2, 3) <= closed(3, 4))) }
test("not [3, 4] > [3, 4]") { assert(!(closed(2, 3) > closed(3, 4))) }
test("not [3, 4] >= [3, 4]") { assert(!(closed(2, 3) >= closed(3, 4))) }
test("empty.partialCompare(empty) == 0.0") { assert(open(2, 2).partialCompare(open(3, 3)) == 0.0) }
test("empty cannot be compared to [2, 3]") { assert(open(2, 2).partialCompare(closed(2, 3)).isNaN) }
test("[2, 3] cannot be compared to empty") { assert(closed(2, 3).partialCompare(open(2, 2)).isNaN) }
test("Minimal and maximal elements of {[1], [2, 3], [2, 4]}") {
val intervals = Seq(point(1), closed(2, 3), closed(2, 4))
assert(intervals.pmin.toSet == Set(point(1)))
assert(intervals.pmax.toSet == Set(closed(2, 3), closed(2, 4)))
}
}
class IntervalSubsetPartialOrderTest extends FunSuite {
import spire.optional.intervalSubsetPartialOrder._
import Interval.{openUpper, openLower, closed, open, point}
test("Minimal and maximal elements of {[1, 3], [3], [2], [1]} by subset partial order") {
val intervals = Seq(closed(1, 3), point(3), point(2), point(1))
assert(intervals.pmin.toSet == Set(point(1), point(2), point(3)))
assert(intervals.pmax.toSet == Set(closed(1, 3)))
}
}
// TODO: this is just the tip of the iceberg... we also need to worry about
// unbounded intervals, closed vs open bounds, etc.
class ContinuousIntervalTest extends FunSuite {
def cc(n1: Double, n2: Double) = Interval.closed(n1, n2)
val a = 2.0
val b = 5.0
val c = 1.0
val d = 4.0
// numerator interval crosses zero
test("[-a,b] / [c,d]") { assert(cc(-a, b) / cc(c, d) === cc(-a / c, b / c)) }
test("[-a,b] / [-d,-c]") { assert(cc(-a, b) / cc(-d, -c) === cc(b / -c, -a / -c)) }
// numerator interval is positive
test("[a,b] / [-d,-c]") { assert(cc(a, b) / cc(-d, -c) === cc(b / -c, a / -d)) }
test("[a,b] / [c,d]") { assert(cc(a, b) / cc(c, d) === cc(a / d, b / c)) }
// numerator interval is negative
test("[-b,-a] / [-d,-c]") { assert(cc(-b, -a) / cc(-d, -c) === cc(-a / -d, -b / -c)) }
test("[-b,-a] / [c,d]") { assert(cc(-b, -a) / cc(c, d) === cc(-b / c, -a / d)) }
}
class IntervalReciprocalTest extends FunSuite {
def t(a: Interval[Rational], b: Interval[Rational]): Unit =
test(s"[1]/$a = $b") { assert(a.reciprocal === b) }
def error(a: Interval[Rational]): Unit =
test(s"[1]/$a = error") {
intercept[ArithmeticException] { a.reciprocal }
}
// point(x)
t(Interval.point(r"1/5"), Interval.point(r"5"))
t(Interval.point(r"-99"), Interval.point(r"-1/99"))
error(Interval.point(r"0"))
// above(x)
t(Interval.above(r"3"), Interval.open(r"0", r"1/3"))
t(Interval.above(r"0"), Interval.above(r"0")) //fixme
error(Interval.above(r"-1"))
// atOrAbove(x)
t(Interval.atOrAbove(r"1/9"), Interval.openLower(r"0", r"9"))
error(Interval.atOrAbove(r"0"))
error(Interval.atOrAbove(r"-2"))
// closed(x, y)
t(Interval.closed(r"1/2", r"4"), Interval.closed(r"1/4", r"2"))
error(Interval.closed(r"0", r"6"))
error(Interval.closed(r"-2", r"1/5"))
error(Interval.closed(r"-1/9", r"0"))
t(Interval.closed(r"-70", r"-14"), Interval.closed(r"-1/14", r"-1/70"))
// openLower(x, y)
t(Interval.openLower(r"1/2", r"4"), Interval.openUpper(r"1/4", r"2"))
t(Interval.openLower(r"0", r"6"), Interval.atOrAbove(r"1/6")) //fixme
error(Interval.openLower(r"-2", r"1/5"))
error(Interval.openLower(r"-1/9", r"0"))
t(Interval.openLower(r"-70", r"-14"), Interval.openUpper(r"-1/14", r"-1/70"))
// openUpper(x, y)
t(Interval.openUpper(r"1/2", r"4"), Interval.openLower(r"1/4", r"2"))
error(Interval.openUpper(r"0", r"6"))
error(Interval.openUpper(r"-2", r"1/5"))
t(Interval.openUpper(r"-1/9", r"0"), Interval.atOrBelow(r"-9")) //fixme
t(Interval.openUpper(r"-70", r"-14"), Interval.openLower(r"-1/14", r"-1/70"))
// open
t(Interval.open(r"1/2", r"4"), Interval.open(r"1/4", r"2"))
t(Interval.open(r"0", r"6"), Interval.above(r"1/6")) //fixme
error(Interval.open(r"-2", r"1/5"))
t(Interval.open(r"-1/9", r"0"), Interval.below(r"-9")) //fixme
t(Interval.open(r"-70", r"-14"), Interval.open(r"-1/14", r"-1/70"))
// below(x)
error(Interval.below(r"3"))
t(Interval.below(r"0"), Interval.below(r"0")) //fixme
t(Interval.below(r"-1"), Interval.open(r"-1", r"0")) //fixme
// atOrBelow(x)
error(Interval.atOrBelow(r"1/9"))
error(Interval.atOrBelow(r"0"))
t(Interval.atOrBelow(r"-2"), Interval.openUpper(r"-1/2", r"0")) //fixme
}
class IntervalCheck extends PropSpec with Matchers with GeneratorDrivenPropertyChecks {
property("x β x") {
forAll { (x: Interval[Rational]) => (x isSupersetOf x) shouldBe true }
}
property("x β (x | y) && y β (x | y)") {
forAll { (x: Interval[Rational], y: Interval[Rational]) =>
val z = x | y
(z isSupersetOf x) shouldBe true
(z isSupersetOf y) shouldBe true
}
}
property("(x & y) β x && (x & y) β y") {
forAll { (x: Interval[Rational], y: Interval[Rational]) =>
val z = x & y
(x isSupersetOf z) shouldBe true
(y isSupersetOf z) shouldBe true
}
}
val rng = spire.random.GlobalRng
property("(x -- y) β x && (x -- y) & y = Γ") {
forAll { (x: Interval[Rational], y: Interval[Rational]) =>
(x -- y).foreach { zi =>
(zi isSubsetOf x) shouldBe true
(zi intersects y) shouldBe false
}
}
}
property("(x -- Γ) = x") {
forAll { (x: Interval[Rational]) =>
if (x.nonEmpty) {
(x -- Interval.empty[Rational]) shouldBe List(x)
}
}
}
property("(x -- x) = Γ") {
forAll { (x: Interval[Rational]) =>
(x -- x) shouldBe Nil
}
}
property("(x -- (-β, β)) = Γ") {
forAll { (x: Interval[Rational]) =>
(x -- Interval.all[Rational]) shouldBe Nil
}
}
def sample(int: Interval[Rational], n: Int): Array[Rational] =
if (int.isEmpty) {
Array.empty[Rational]
} else {
import spire.math.interval.ValueBound
val underlyingf: () => Rational = (int.lowerBound, int.upperBound) match {
case (ValueBound(x) , ValueBound(y)) => () => rng.nextInt(10) match {
case 0 => x
case 9 => y
case _ => x + Rational(rng.nextDouble) * (y - x)
}
case (ValueBound(x) , _) => () => rng.nextInt(5) match {
case 0 => x
case _ => x + (Rational(rng.nextGaussian).abs * Long.MaxValue)
}
case (_, ValueBound(y)) => () => rng.nextInt(5) match {
case 4 => y
case _ => y - (Rational(rng.nextGaussian).abs * Long.MaxValue)
}
case (_ , _) => () => Rational(rng.nextGaussian) * Long.MaxValue
}
def nextf(): Rational = {
val r = underlyingf()
if (int.contains(r)) r else nextf()
}
Array.fill(n)(nextf())
}
val tries = 100
def testUnop(f: Interval[Rational] => Interval[Rational])(g: Rational => Rational): Unit = {
forAll { (a: Interval[Rational]) =>
val c: Interval[Rational] = f(a)
sample(a, tries).foreach { x =>
val ok = c.contains(g(x))
if (!ok) println("%s failed on %s" format (a, x.toString))
ok shouldBe true
}
}
}
def testBinop(f: (Interval[Rational], Interval[Rational]) => Interval[Rational])(g: (Rational, Rational) => Rational): Unit = {
forAll { (a: Interval[Rational], b: Interval[Rational]) =>
val c: Interval[Rational] = f(a, b)
sample(a, tries).zip(sample(b, tries)).foreach { case (x, y) =>
if (!a.contains(x)) println("%s does not contain %s" format (a, x))
if (!b.contains(y)) println("%s does not contain %s" format (b, y))
val ok = c.contains(g(x, y))
if (!ok) println("(%s, %s) failed on (%s, %s)" format (a, b, x.toString, y.toString))
ok shouldBe true
}
}
}
property("sampled unop abs") { testUnop(_.abs)(_.abs) }
property("sampled unop -") { testUnop(-_)(-_) }
property("sampled unop pow(2)") { testUnop(_.pow(2))(_.pow(2)) }
property("sampled unop pow(3)") { testUnop(_.pow(3))(_.pow(3)) }
property("sampled binop +") { testBinop(_ + _)(_ + _) }
property("sampled binop -") { testBinop(_ - _)(_ - _) }
property("sampled binop *") { testBinop(_ * _)(_ * _) }
property("sampled binop vmin") { testBinop(_ vmin _)(_ min _) }
property("sampled binop vmax") { testBinop(_ vmax _)(_ max _) }
property("toString/apply") {
forAll { (x: Interval[Rational]) =>
Interval(x.toString) shouldBe x
}
}
property("points compare as scalars") {
import spire.optional.intervalGeometricPartialOrder._
import spire.algebra.{Order, PartialOrder}
forAll { (x: Rational, y: Rational) =>
val a = Interval.point(x)
val b = Interval.point(y)
PartialOrder[Interval[Rational]].tryCompare(a, b).get shouldBe Order[Rational].compare(x, y)
val Some(Point(vmin)) = a.pmin(b)
vmin shouldBe x.min(y)
val Some(Point(vmax)) = a.pmax(b)
vmax shouldBe x.max(y)
}
}
property("(-inf, a] < [b, inf) if a < b") {
import spire.optional.intervalGeometricPartialOrder._
import spire.algebra.{Order, PartialOrder}
forAll { (a: Rational, w: Positive[Rational]) =>
val b = a + w.num
// a < b
val i = Interval.atOrBelow(a)
val j = Interval.atOrAbove(b)
(i < j) shouldBe true
(i >= j) shouldBe false
(j > i) shouldBe true
(j <= i) shouldBe false
}
}
property("(-inf, a] does not compare to [b, inf) if a >= b") {
import spire.optional.intervalGeometricPartialOrder._
import spire.algebra.{Order, PartialOrder}
forAll { (a: Rational, w: NonNegative[Rational]) =>
val b = a - w.num
// a >= b
val i = Interval.atOrBelow(a)
val j = Interval.atOrAbove(b)
i.partialCompare(j).isNaN shouldBe true
j.partialCompare(i).isNaN shouldBe true
}
}
property("(-inf, inf) does not compare with [a, b]") {
import spire.optional.intervalGeometricPartialOrder._
import spire.algebra.{Order, PartialOrder}
forAll { (a: Rational, b: Rational) =>
val i = Interval.all[Rational]
val j = Interval.closed(a, b)
i.partialCompare(j).isNaN shouldBe true
j.partialCompare(i).isNaN shouldBe true
}
}
property("empty intervals are equal") {
forAll { (x: Rational, y: Rational) =>
import spire.algebra.Eq
val a = Interval.open(x, x)
val b = Interval.open(y, y)
val c = Interval.openUpper(x, x)
val d = Interval.openLower(x, x)
val e = Interval.empty[Rational]
a shouldBe e
a shouldBe b
b shouldBe e
c shouldBe e
d shouldBe e
e shouldBe e
Eq[Interval[Rational]].eqv(a, e) shouldBe true
Eq[Interval[Rational]].eqv(a, b) shouldBe true
Eq[Interval[Rational]].eqv(b, e) shouldBe true
Eq[Interval[Rational]].eqv(c, e) shouldBe true
Eq[Interval[Rational]].eqv(d, e) shouldBe true
Eq[Interval[Rational]].eqv(e, e) shouldBe true
}
}
}
class IntervalIteratorCheck extends PropSpec with Matchers with GeneratorDrivenPropertyChecks {
property("bounded intervals are ok") {
forAll { (n1: Rational, n2: Rational, num0: Byte) =>
val (x, y) = if (n1 <= n2) (n1, n2) else (n2, n1)
val num = ((num0 & 255) % 13) + 1
def testEndpoints(interval: Interval[Rational], step: Rational, hasLower: Boolean, hasUpper: Boolean): Unit = {
val ns = interval.iterator(step).toSet
ns(x) shouldBe hasLower
ns(y) shouldBe hasUpper
val extra = if (hasLower && hasUpper) 2 else if (hasLower || hasUpper) 1 else 0
ns.size shouldBe (num - 1 + extra)
}
val cc = Interval.closed(x, y) // [x, y]
val oo = Interval.open(x, y) // (x, y)
val oc = Interval.openLower(x, y) // (x, y]
val co = Interval.openUpper(x, y) // [x, y)
val step = (y - x) / num
if (step.isZero) {
List(cc, oo, oc, co).foreach { xs =>
Try(xs.iterator(0)).isFailure shouldBe true
}
} else {
val triples = List((cc, true, true), (oo, false, false), (oc, false, true), (co, true, false))
triples.foreach { case (interval, hasLower, hasUpper) =>
testEndpoints(interval, step, hasLower, hasUpper)
testEndpoints(interval, -step, hasLower, hasUpper)
}
}
}
}
property("half-unbound intervals are ok") {
forAll { (n: Rational, s: Rational) =>
val step0 = s.abs
val cu = Interval.atOrAbove(n) // [n, β)
val ou = Interval.above(n) // (n, β)
val uc = Interval.atOrBelow(n) // (-β, n]
val uo = Interval.below(n) // (-β, n)
if (step0.isZero) {
List(cu, ou, uc, uo).foreach { xs =>
Try(xs.iterator(0)).isFailure shouldBe true
}
} else {
val triples = List((cu, true, 1), (ou, false, 1), (uc, true, -1), (uo, false, -1))
triples.foreach { case (interval, hasN, mult) =>
val step = step0 * mult
val it = interval.iterator(step)
val expected = if (hasN) n else n + step
it.next() shouldBe expected
Try(interval.iterator(-step)).isFailure shouldBe true
}
}
}
}
property("unbound intervals are not supported") {
forAll { (step: Rational) =>
Try(Interval.all[Rational].iterator(step)).isFailure shouldBe true
}
}
}
|
JackSullivan/spire
|
tests/src/test/scala/spire/math/IntervalTest.scala
|
Scala
|
mit
| 19,334 |
package almhirt.domain
import scala.language.postfixOps
import java.time.{ ZonedDateTime, LocalDateTime }
import scala.concurrent._
import scala.concurrent.duration._
import org.reactivestreams.Subscriber
import akka.actor._
import almhirt.common._
import almhirt.streaming._
import akka.stream.scaladsl._
import akka.testkit._
import org.scalatest._
import almhirt.context.AlmhirtContext
class AggregateRootNexusTests(_system: ActorSystem)
extends TestKit(_system) with fixture.WordSpecLike with Matchers with BeforeAndAfterAll {
def this() = this(ActorSystem("AggregateRootNexusTests", almhirt.TestConfigs.logErrorConfig))
implicit val executionContext = system.dispatchers.defaultGlobalDispatcher
implicit val ccuad = CanCreateUuidsAndDateTimes()
implicit def implicitFlowMaterializer = akka.stream.ActorMaterializer()(_system)
val counter = new java.util.concurrent.atomic.AtomicInteger(1)
def nextCounter = counter.getAndIncrement()
val rnd = new scala.util.Random()
def createId(pre: Int): String = {
s"${pre}-${rnd.nextInt(1000)}"
}
private case class StatusEventResults(initiated: Int, executed: Int, failed: Int) {
override def toString(): String =
s"(initiated: $initiated, executed: $executed, failed: $failed)"
}
"The AggregateRootNexus" when {
import almhirt.eventlog.AggregateRootEventLog._
import almhirt.aggregates._
import aggregatesforthelazyones._
import almhirt.tracking._
def splitStatusEvents(events: Seq[Any]): (Seq[CommandStatusChanged], Seq[CommandStatusChanged], Seq[CommandStatusChanged]) =
events.collect { case x: CommandStatusChanged β x }.foldLeft((Seq[CommandStatusChanged](), Seq[CommandStatusChanged](), Seq[CommandStatusChanged]())) {
case ((a, b, c), cur) β
cur match {
case x @ CommandStatusChanged(_, _, CommandStatus.Initiated) β (a :+ x, b, c)
case x @ CommandStatusChanged(_, _, CommandStatus.Executed) β (a, b :+ x, c)
case x @ CommandStatusChanged(_, _, CommandStatus.NotExecuted(_)) β (a, b, c :+ x)
}
}
def assertStatusEvents(initiated: Int, ok: Int, failed: Int, events: Seq[Any]) {
val (i, o, f) = splitStatusEvents(events)
StatusEventResults(i.size, o.size, f.size) should equal(StatusEventResults(initiated, ok, failed))
}
"receiving valid commands" when {
"an aggregate root is created" should {
"should emit the status events [Initiated, Executed]" in { fixture β
val FixtureParam(testId, commandSubscriber, eventlog, streams) = fixture
val statusProbe = TestProbe()
Source.fromPublisher(streams.eventStream).collect { case e: SystemEvent β e }.to(Sink.actorRef(statusProbe.ref, "done")).run()
within(15 seconds) {
Source[Command](CreateUser(CommandHeader(), createId(1), 0L, "hans", "meier") :: Nil).to(Sink.actorRef(commandSubscriber, "done")).run()
statusProbe.expectMsgType[CommandStatusChanged].status should equal(CommandStatus.Initiated)
statusProbe.expectMsgType[CommandStatusChanged].status should equal(CommandStatus.Executed)
}
}
}
"2 aggregate roots are created" should {
"emit the status events [Initiated(a), Executed(a)] and [Initiated(b), Executed(b)]" in { fixture β
val FixtureParam(testId, commandSubscriber, eventlog, streams) = fixture
val statusProbe = TestProbe()
Source.fromPublisher(streams.eventStream).collect { case e: SystemEvent β e }.to(Sink.actorRef(statusProbe.ref, "done")).run()
within(15 seconds) {
Source[Command](List(
CreateUser(CommandHeader(), createId(1), 0L, "hans", "meier"),
CreateUser(CommandHeader(), createId(2), 0L, "hans", "meier"))).to(Sink.actorRef(commandSubscriber, "done")).run()
assertStatusEvents(initiated = 2, ok = 2, failed = 0, statusProbe.receiveN(2 * 2, 30 seconds))
}
}
}
// val nn = 100
// val ids = (1 to nn).map(createId).toVector
// val n = ids.size
// s"$n aggregate roots are created" should {
// s"emit the status events [Initiated, Executed] $n times" in { fixture β
// val FixtureParam(testId, commandSubscriber, eventlog, streams) = fixture
// val statusProbe = TestProbe()
// Source.fromPublisher(streams.eventStream).collect { case e: SystemEvent β e }.to(Sink.fromSubscriber(DelegatingSubscriber[SystemEvent](statusProbe.ref))).run()
// val start = Deadline.now
// within(15 seconds) {
// Source[Command](ids.toStream.map(id β CreateUser(CommandHeader(), id, 0L, "hans", "meier"))).to(Sink.fromSubscriber(commandSubscriber)).run()
// assertStatusEvents(initiated = n, ok = n, failed = 0, statusProbe.receiveN(n * 2, 30 seconds))
// val time = start.lap
// info(s"Dispatched ${n} in ${start.lap.defaultUnitString}((${(n * 1000).toDouble / time.toMillis}/s)).")
// }
// }
// }
// s"$n aggregate roots are created and then updated" should {
// s"emit the status events ([Initiated(a), Executed]x2) $n times" in { fixture β
// val FixtureParam(testId, commandSubscriber, eventlog, streams) = fixture
// val statusProbe = TestProbe()
// Source.fromPublisher(streams.eventStream).collect { case e: SystemEvent β e }.to(Sink.actorRef(statusProbe.ref, "done")).run()
// val flow = Source[Command](ids.toStream.map(id β CreateUser(CommandHeader(), id, 0L, "hans", "meier"): AggregateRootCommand) ++
// ids.toStream.map(id β ChangeUserLastname(CommandHeader(), id, 1L, "mΓΌller"): AggregateRootCommand))
// val start = Deadline.now
// within(15 seconds) {
// flow.to(Sink.fromSubscriber(commandSubscriber)).run()
// assertStatusEvents(initiated = 2 * n, ok = 2 * n, failed = 0, statusProbe.receiveN(2 * n * 2, 30 seconds))
// val time = start.lap
// info(s"Dispatched ${n} commands in ${start.lap.defaultUnitString}((${(n * 1000).toDouble / time.toMillis}/s)).")
// }
// }
// }
// s"$n aggregate roots are created, updated and then deleted" should {
// s"emit the status events ([Initiated, Executed]x3) $n times" in { fixture β
// val FixtureParam(testId, commandSubscriber, eventlog, streams) = fixture
// val statusProbe = TestProbe()
// Source.fromPublisher(streams.eventStream).collect { case e: SystemEvent β e }.to(Sink.fromSubscriber(DelegatingSubscriber[SystemEvent](statusProbe.ref))).run()
// val flow = Source[Command](ids.toStream.map(id β CreateUser(CommandHeader(), id, 0L, "hans", "meier"): AggregateRootCommand) ++
// ids.toStream.map(id β ChangeUserLastname(CommandHeader(), id, 1L, "mΓΌller"): AggregateRootCommand) ++
// ids.toStream.map(id β ConfirmUserDeath(CommandHeader(), id, 2L): AggregateRootCommand))
// val start = Deadline.now
// within(30 seconds) {
// flow.to(Sink.fromSubscriber(commandSubscriber)).run()
// assertStatusEvents(initiated = 3 * n, ok = 3 * n, failed = 0, statusProbe.receiveN(3 * n * 2, 30 seconds))
// val time = start.lap
// info(s"Dispatched ${n} commands in ${start.lap.defaultUnitString}((${(n * 1000).toDouble / time.toMillis}/s)).")
// }
// }
// val bigN = 500
// s"$bigN aggregate roots are created, updated and then deleted" should {
// s"emit the status events ([Initiated, Executed]x3) $bigN times" in { fixture β
// val FixtureParam(testId, commandSubscriber, eventlog, streams) = fixture
// val statusProbe = TestProbe()
// Source.fromPublisher(streams.eventStream).collect { case e: SystemEvent β e }.to(Sink.fromSubscriber(DelegatingSubscriber[SystemEvent](statusProbe.ref))).run()
// val flow = Source[Command]((1 to bigN).toSeq.map(id β CreateUser(CommandHeader(), s"$id", 0L, s"hans-$id", s"meier-$id"): AggregateRootCommand) ++
// (1 to bigN).toSeq.map(id β ChangeUserLastname(CommandHeader(), s"$id", 1L, s"mΓΌller-$id"): AggregateRootCommand) ++
// (1 to bigN).toSeq.map(id β ConfirmUserDeath(CommandHeader(), s"$id", 2L): AggregateRootCommand))
// val start = Deadline.now
// within(30 seconds) {
// flow.to(Sink.fromSubscriber(commandSubscriber)).run()
// assertStatusEvents(initiated = 3 * bigN, ok = 3 * bigN, failed = 0, statusProbe.receiveN(3 * bigN * 2, 30 seconds))
// }
// val time = start.lap
// info(s"Dispatched ${bigN} commands in ${start.lap.defaultUnitString}((${(bigN * 1000).toDouble / time.toMillis}/s)).")
// }
// }
// }
}
}
private val currentTestId = new java.util.concurrent.atomic.AtomicInteger(1)
def nextTestId = currentTestId.getAndIncrement()
case class FixtureParam(
testId: Int,
commandSubscriber: ActorRef,
eventlog: ActorRef,
streams: AlmhirtStreams)
def withFixture(test: OneArgTest) = {
import scalaz.syntax.validation._
import almhirt.aggregates._
import almhirt.akkax._
val testId = nextTestId
//info(s"Test $testId")
val eventlogProps: Props = almhirt.eventlog.InMemoryAggregateRootEventLog.props()
val eventlogActor: ActorRef = system.actorOf(eventlogProps, s"eventlog-$testId")
implicit val almhirtContext = AlmhirtContext.TestContext.noComponentsDefaultGlobalDispatcher(s"almhirt-context-$testId", AggregateRootNexusTests.this.ccuad, 5.seconds.dilated).awaitResultOrEscalate(5.seconds.dilated)
def droneProps(ars: ActorRef, ss: Option[SnapshottingForDrone]): Props = Props(
new AggregateRootDrone[User, UserEvent] with ActorLogging with UserEventHandler with UserCommandHandler with UserUpdater with AggregateRootDroneCommandHandlerAdaptor[User, UserCommand, UserEvent] {
def ccuad = AggregateRootNexusTests.this.ccuad
val arTag = scala.reflect.ClassTag[User](classOf[User])
val snapshotting = ss
def futuresContext: ExecutionContext = executionContext
def aggregateEventLog: ActorRef = ars
val eventsBroker: StreamBroker[Event] = almhirtContext.eventBroker
val notifyHiveAboutUndispatchedEventsAfter: Option[FiniteDuration] = None
val notifyHiveAboutUnstoredEventsAfterPerEvent: Option[FiniteDuration] = None
def retryEventLogActionDelay: Option[FiniteDuration] = None
val preStoreActionFor = (e: UserEvent) β PreStoreEventAction.NoAction
val returnToUnitializedAfter = None
override val aggregateCommandValidator = AggregateRootCommandValidator.Validated
override val tag = scala.reflect.ClassTag[UserCommand](classOf[UserCommand])
})
val droneFactory = new AggregateRootDroneFactory {
import scalaz._, Scalaz._
def propsForCommand(command: AggregateRootCommand, ars: ActorRef, snapshotting: Option[(ActorRef, almhirt.snapshots.SnapshottingPolicyProvider)]): AlmValidation[Props] = {
command match {
case c: UserCommand β
snapshotting match {
case None β droneProps(ars, None).success
case Some((repo, provider)) β provider.apply("user").map(policy β droneProps(ars, Some(SnapshottingForDrone(repo, policy))))
}
case x β
NoSuchElementProblem(s"I don't have props for command $x").failure
}
}
}
def hiveProps(descriptor: HiveDescriptor) = Props(
new AggregateRootHive(
descriptor,
NoResolvingRequired(eventlogActor),
None,
ResolveSettings.default,
maxParallelism = 4,
droneFactory = droneFactory,
almhirtContext.eventBroker,
enqueuedEventsThrottlingThreshold = 8))
val hiveFactory = new AggregateRootHiveFactory {
def props(descriptor: HiveDescriptor): AlmValidation[Props] = hiveProps(descriptor).success
}
val hiveSelector: Seq[(HiveDescriptor, AggregateRootCommand β Boolean)] =
Seq(
(HiveDescriptor("0"), cmd β Math.abs(cmd.aggId.hashCode % 5) == 0),
(HiveDescriptor("1"), cmd β Math.abs(cmd.aggId.hashCode % 5) == 1),
(HiveDescriptor("2"), cmd β Math.abs(cmd.aggId.hashCode % 5) == 2),
(HiveDescriptor("3"), cmd β Math.abs(cmd.aggId.hashCode % 5) == 3),
(HiveDescriptor("4"), cmd β Math.abs(cmd.aggId.hashCode % 5) == 4))
val nexusProps = AggregateRootNexus.propsRaw(hiveSelector, hiveFactory)(almhirtContext)
val nexusActor = system.actorOf(nexusProps, s"nexus-$testId")
val nexusSubscriber = akka.stream.actor.ActorSubscriber[Command](nexusActor)
Thread.sleep(200)
try {
withFixture(test.toNoArgTest(FixtureParam(testId, nexusActor, eventlogActor, almhirtContext)))
} finally {
system.stop(nexusActor)
system.stop(eventlogActor)
almhirtContext.stop()
}
}
override def beforeAll() {
}
override def afterAll() {
TestKit.shutdownActorSystem(system)
}
}
|
chridou/almhirt
|
almhirt-core/src/test/scala/almhirt/domain/AggregateRootNexusTests.scala
|
Scala
|
apache-2.0
| 13,517 |
object Test {
def main(args: Array[String]): Unit = {
println(sum(1, 2, 3))
}
inline def sum(inline i: Int, inline j: Int, inline k: Int): Int = ${ Macros.sum('i, 'j, 'k) }
}
|
som-snytt/dotty
|
tests/run-macros/inline-varargs-1/Main_2.scala
|
Scala
|
apache-2.0
| 188 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.tools
import org.scalatest._
import org.scalactic.Requirements._
import java.util.concurrent.ExecutorService
import java.util.concurrent.Future
import java.util.concurrent.LinkedBlockingQueue
/**
* This Distributor can be used by multiple threads.
*
* @author Bill Venners
*/
private[scalatest] class ConcurrentDistributor(args: Args, execSvc: ExecutorService) extends Distributor {
private val futureQueue = new LinkedBlockingQueue[Future[_]]
def apply(suite: Suite, tracker: Tracker): Unit = {
apply(suite, args.copy(tracker = tracker))
}
def apply(suite: Suite, args: Args): Status = {
requireNonNull(suite, args)
val status = new ScalaTestStatefulStatus
val suiteRunner = new SuiteRunner(suite, args, status)
val future: Future[_] = execSvc.submit(suiteRunner)
futureQueue.put(future)
status
}
def poll() = None
def waitUntilDone(): Unit = {
while (futureQueue.peek != null)
futureQueue.poll().get()
}
}
|
dotty-staging/scalatest
|
scalatest/src/main/scala/org/scalatest/tools/ConcurrentDistributor.scala
|
Scala
|
apache-2.0
| 1,597 |
package rere.ql.queries
import rere.ql.options.all._
import rere.ql.options.{ComposableOptions, Options}
import rere.ql.ql2.Term.TermType
import rere.ql.typeclasses.{DatumSelector, Transmuter}
import rere.ql.types._
trait JoinQueries {
// inner_join
trait InnerJoinInfiniteStreamLikeQuery[T <: ReqlDatum] extends ReqlInfiniteStream[T]
trait InnerJoinFiniteStreamLikeQuery[T <: ReqlDatum] extends ReqlFiniteStream[T]
trait InnerJoinArrayLikeQuery[T <: ReqlDatum] extends ReqlArray[T]
implicit class InnerJoinOnInfiniteStreamLikeOp[T <: ReqlValue : Transmuter](val infiniteStreamLike: ReqlInfiniteStreamLike[T]) {
def innerJoin[
U <: ReqlValue : Transmuter
](
otherSeq: ReqlFiniteSequence[U],
predicate: (T, U) => ReqlBoolean
): InnerJoinInfiniteStreamLikeQuery[ReqlJoinResult[T, U]] = new InnerJoinInfiniteStreamLikeQuery[ReqlJoinResult[T, U]] {
val command = TermType.INNER_JOIN
val string = "inner_join"
val arguments = infiniteStreamLike :: otherSeq :: Func.wrap2(predicate) :: Nil
val options = Options.empty
}
}
implicit class InnerJoinOnFiniteStreamLikeOp[T <: ReqlValue : Transmuter](val finiteStreamLike: ReqlFiniteStreamLike[T]) {
def innerJoin[
U <: ReqlValue : Transmuter
](
otherSeq: ReqlFiniteSequence[U],
predicate: (T, U) => ReqlBoolean
): InnerJoinFiniteStreamLikeQuery[ReqlJoinResult[T, U]] = new InnerJoinFiniteStreamLikeQuery[ReqlJoinResult[T, U]] {
val command = TermType.INNER_JOIN
val string = "inner_join"
val arguments = finiteStreamLike :: otherSeq :: Func.wrap2(predicate) :: Nil
val options = Options.empty
}
}
implicit class InnerJoinOnArrayLikeOp[T <: ReqlDatum : Transmuter](val arrayLike: ReqlFiniteArrayLike[T]) {
def innerJoin[
U <: ReqlDatum : Transmuter
](
otherSeq: ReqlFiniteSequence[U],
predicate: (T, U) => ReqlBoolean
): InnerJoinArrayLikeQuery[ReqlJoinResult[T, U]] = new InnerJoinArrayLikeQuery[ReqlJoinResult[T, U]] {
val command = TermType.INNER_JOIN
val string = "inner_join"
val arguments = arrayLike :: otherSeq :: Func.wrap2(predicate) :: Nil
val options = Options.empty
}
}
// outer_join
trait OuterJoinInfiniteStreamLikeQuery[T <: ReqlDatum] extends ReqlInfiniteStream[T]
trait OuterJoinFiniteStreamLikeQuery[T <: ReqlDatum] extends ReqlFiniteStream[T]
trait OuterJoinArrayLikeQuery[T <: ReqlDatum] extends ReqlArray[T]
implicit class OuterJoinOnInfiniteStreamLikeOp[T <: ReqlDatum : Transmuter](val infiniteStreamLike: ReqlInfiniteStreamLike[T]) {
def outerJoin[
U <: ReqlDatum : Transmuter
](
otherSeq: ReqlFiniteSequence[U],
predicate: (T, U) => ReqlBoolean
): OuterJoinInfiniteStreamLikeQuery[ReqlJoinResult[T, U]] = new OuterJoinInfiniteStreamLikeQuery[ReqlJoinResult[T, U]] {
val command = TermType.OUTER_JOIN
val string = "outer_join"
val arguments = infiniteStreamLike :: otherSeq :: Func.wrap2(predicate) :: Nil
val options = Options.empty
}
}
implicit class OuterJoinOnFiniteStreamLikeOp[T <: ReqlDatum : Transmuter](val finiteStreamLike: ReqlFiniteStreamLike[T]) {
def outerJoin[
U <: ReqlDatum : Transmuter
](
otherSeq: ReqlFiniteSequence[U],
predicate: (T, U) => ReqlBoolean
): OuterJoinFiniteStreamLikeQuery[ReqlJoinResult[T, U]] = new OuterJoinFiniteStreamLikeQuery[ReqlJoinResult[T, U]] {
val command = TermType.OUTER_JOIN
val string = "outer_join"
val arguments = finiteStreamLike :: otherSeq :: Func.wrap2(predicate) :: Nil
val options = Options.empty
}
}
implicit class OuterJoinOnArrayLikeOp[T <: ReqlDatum : Transmuter](val arrayLike: ReqlFiniteArrayLike[T]) {
def outerJoin[
U <: ReqlDatum : Transmuter
](
otherSeq: ReqlFiniteSequence[U],
predicate: (T, U) => ReqlBoolean
): OuterJoinArrayLikeQuery[ReqlJoinResult[T, U]] = new OuterJoinArrayLikeQuery[ReqlJoinResult[T, U]] {
val command = TermType.OUTER_JOIN
val string = "outer_join"
val arguments = arrayLike :: otherSeq :: Func.wrap2(predicate) :: Nil
val options = Options.empty
}
}
// eq_join
trait EqJoinInfiniteStreamLikeQuery[T <: ReqlDatum] extends ReqlInfiniteStream[T]
trait EqJoinFiniteStreamLikeQuery[T <: ReqlDatum] extends ReqlFiniteStream[T]
trait EqJoinArrayLikeQuery[T <: ReqlDatum] extends ReqlArray[T]
//TODO: maybe make index argument explicit and remove selector wrapper ??? pass default index is easy, find wrapped - hard
implicit class EqJoinOnInfiniteStreamLikeOp[T0 <: ReqlDatum](val infiniteStreamLike: ReqlInfiniteStreamLike[T0]) {
def eqJoin[T1, PK1 <: PrimaryKey, Selected <: ReqlDatum](
selector: DatumSelector[T0, Selected],
secondTable: ReqlTable[T1, PK1],
secondTableIndex: IndexOptions = DefaultIndex,
ordered: OrderingOptions = NotOrdered
): EqJoinInfiniteStreamLikeQuery[ReqlJoinResult[T0, T1]] = new EqJoinInfiniteStreamLikeQuery[ReqlJoinResult[T0, T1]] {
val command = TermType.EQ_JOIN
val string = "eq_join"
val arguments = infiniteStreamLike :: selector :: secondTable :: Nil
val options = ComposableOptions.compose(secondTableIndex, ordered)
}
}
implicit class EqJoinOnFiniteStreamLikeOp[T0 <: ReqlDatum](val finiteStreamLike: ReqlFiniteStreamLike[T0]) {
def eqJoin[T1, PK1 <: PrimaryKey, Selected <: ReqlDatum](
selector: DatumSelector[T0, Selected],
secondTable: ReqlTable[T1, PK1],
secondTableIndex: IndexOptions = DefaultIndex,
ordered: OrderingOptions = NotOrdered
): EqJoinFiniteStreamLikeQuery[ReqlJoinResult[T0, T1]] = new EqJoinFiniteStreamLikeQuery[ReqlJoinResult[T0, T1]] {
val command = TermType.EQ_JOIN
val string = "eq_join"
val arguments = finiteStreamLike :: selector :: secondTable :: Nil
val options = ComposableOptions.compose(secondTableIndex, ordered)
}
}
implicit class EqJoinOnArrayLikeOp[T0 <: ReqlDatum](val arrayLike: ReqlFiniteArrayLike[T0]) {
def eqJoin[T1, PK1 <: PrimaryKey, Selected <: ReqlDatum](
selector: DatumSelector[T0, Selected],
table: ReqlTable[T1, PK1],
tableIndex: IndexOptions = DefaultIndex,
ordered: OrderingOptions = NotOrdered
): EqJoinArrayLikeQuery[ReqlJoinResult[T0, T1]] = new EqJoinArrayLikeQuery[ReqlJoinResult[T0, T1]] {
val command = TermType.EQ_JOIN
val string = "eq_join"
val arguments = arrayLike :: selector :: table :: Nil
val options = ComposableOptions.compose(tableIndex, ordered)
}
}
// zip
//TODO: maybe need allow zip only join results, not on generic stream and array
// TODO: Documentation says what zip works only on join results - stream and array. In practice it can work on any
// sequence if all elements have both "left" and "right" fields.
trait ZipInfiniteStreamQuery[T <: ReqlDatum] extends ReqlInfiniteStream[T]
trait ZipFiniteStreamQuery[T <: ReqlDatum] extends ReqlFiniteStream[T]
trait ZipArrayQuery[T <: ReqlDatum] extends ReqlArray[T]
//TODO: special input type LeftRight[T, U]
implicit class ZipOnInfiniteStreamOp[
LeftType, RightType, Zipped <: ReqlDatum
](val infiniteStream: ReqlInfiniteStream[ReqlJoinResult[LeftType, RightType]]) {
def zip(): ZipInfiniteStreamQuery[Zipped] = new ZipInfiniteStreamQuery[Zipped] {
val command = TermType.ZIP
val string = "zip"
val arguments = infiniteStream :: Nil
val options = Options.empty
}
}
implicit class ZipOnFiniteStreamOp[
LeftType, RightType, Zipped <: ReqlDatum
](val finiteStream: ReqlFiniteStream[ReqlJoinResult[LeftType, RightType]]) {
def zip(): ZipFiniteStreamQuery[Zipped] = new ZipFiniteStreamQuery[Zipped] {
val command = TermType.ZIP
val string = "zip"
val arguments = finiteStream :: Nil
val options = Options.empty
}
}
implicit class ZipOnArrayOp[
LeftType, RightType, Zipped <: ReqlDatum
](val array: ReqlArray[ReqlJoinResult[LeftType, RightType]]) {
def zip(): ZipArrayQuery[Zipped] = new ZipArrayQuery[Zipped] {
val command = TermType.ZIP
val string = "zip"
val arguments = array :: Nil
val options = Options.empty
}
}
}
|
pbaun/rere
|
modules/ql/src/main/scala/rere/ql/queries/JoinQueries.scala
|
Scala
|
apache-2.0
| 8,283 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.dllib.nn.ops.{TruncateDiv => TruncateDivOps}
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.tf.Context
import org.tensorflow.framework.{DataType, NodeDef}
import scala.reflect.ClassTag
class TruncateDiv extends TensorflowOpsLoader {
import Utils._
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder, context: Context[T])
(implicit ev: TensorNumeric[T]): Module[T] = {
val t = getType(nodeDef.getAttrMap, "T")
if (t == DataType.DT_INT32) {
TruncateDivOps[T, Int]()
} else {
throw new UnsupportedOperationException(s"Not support load TruncateDiv when type is ${t}")
}
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/utils/tf/loaders/TruncateDiv.scala
|
Scala
|
apache-2.0
| 1,468 |
package com.softwaremill.macwire.dependencyLookup
import com.softwaremill.macwire.{PositionUtil, Debug, TypeCheckUtil}
import scala.annotation.tailrec
import scala.reflect.macros.blackbox.Context
private[dependencyLookup] class ValuesOfTypeInParentsFinder[C <: Context](val c: C, debug: Debug) {
import c.universe._
private val typeCheckUtil = new TypeCheckUtil[c.type](c, debug)
private val positionUtil = new PositionUtil[c.type](c)
def find(t: Type, implicitValue: Option[Tree]): List[Tree] = {
def checkCandidate(tpt: Type): Boolean = {
val typesToCheck = tpt :: (tpt match {
case NullaryMethodType(resultType) => List(resultType)
case MethodType(_, resultType) => List(resultType)
case _ => Nil
})
typesToCheck.exists(ty => ty <:< t && typeCheckUtil.isNotNullOrNothing(ty))
}
def findInParent(parent: Tree): Set[Name] = {
debug.withBlock(s"Checking parent: [$parent]") {
val parentType = if (parent.tpe == null) {
debug("Parent type is null. Creating an expression of parent's type and type-checking that expression ...")
/*
It sometimes happens that the parent type is not yet calculated; this seems to be the case if for example
the parent is in the same compilation unit, but different package.
To get the type we need to invoke type-checking on some expression that has the type of the parent. There's
a lot of expressions to choose from, here we are using the expression "identity[<parent>](null)".
In order to construct the tree, we borrow some elements from a reified expression for String. To get the
desired expression we need to swap the String part with parent.
*/
typeCheckUtil.typeCheckExpressionOfType(parent)
} else {
parent.tpe
}
val names: Set[String] = parentType.members.filter { symbol =>
// filter out values already found by implicitValuesFinder
implicitValue.map(iv => !positionUtil.samePosition(iv.symbol.pos, symbol.pos)).getOrElse(true) &&
checkCandidate(symbol.typeSignature)
}.map { symbol =>
// For (lazy) vals, the names have a space at the end of the name (probably some compiler internals).
// Hence the trim.
symbol.name.decodedName.toString.trim()
}(collection.breakOut)
if (names.size > 0) {
debug(s"Found ${names.size} matching name(s): [${names.mkString(", ")}]")
}
names.map(TermName(_))
}
}
@tailrec
def findInParents(parents: List[Tree], acc: Set[Name]): Set[Name] = {
parents match {
case Nil => acc
case parent :: tail => findInParents(tail, findInParent(parent) ++ acc)
}
}
val parents = c.enclosingClass match {
case ClassDef(_, _, _, Template(pp, _, _)) => pp
case ModuleDef(_, _, Template(pp, _, _)) => pp
case e =>
c.error(c.enclosingPosition, s"Unknown type of enclosing class: ${e.getClass}")
Nil
}
findInParents(parents, Set()).map(Ident(_))(collection.breakOut)
}
}
|
rcirka/macwire
|
macros/src/main/scala/com/softwaremill/macwire/dependencyLookup/ValuesOfTypeInParentsFinder.scala
|
Scala
|
apache-2.0
| 3,171 |
import collection.immutable._
// ticket #3508
object Test {
def main(args: Array[String]) {
assert(Stream.tabulate(123)(_ + 1).toList == List.tabulate(123)(_ + 1))
}
}
|
felixmulder/scala
|
test/files/run/t3508.scala
|
Scala
|
bsd-3-clause
| 180 |
package com.github.vitalsoftware.scalaredox.models
import java.time.LocalDate
import com.github.vitalsoftware.macros.jsonDefaults
import com.github.vitalsoftware.util.RobustPrimitives
import com.github.vitalsoftware.util.JsonImplicits.jodaISO8601Format
import org.joda.time.DateTime
import play.api.libs.json._
/**
* @param Source Source of the specimen. [Allowed values](http://phinvads.cdc.gov/vads/ViewValueSet.action?id=C9271C18-7B67-DE11-9B52-0015173D1785)
* @param BodySite Body site from which the specimen was collected. [Allowed values](http://www.hl7.org/FHIR/v2/0163/index.html)
* @param ID ID of the collected specimen
*/
@jsonDefaults case class Specimen(
Source: Option[String] = None,
BodySite: Option[String] = None,
ID: Option[String] = None
)
object Specimen extends RobustPrimitives
object OrderPriorityTypes extends Enumeration {
val Stat = Value("Stat")
val ASAP = Value("ASAP")
val Routine = Value("Routine")
val Preoperative = Value("Preoperative")
val TimingCritical = Value("Timing Critical")
val Other = Value("Other")
def defaultValue = Other
lazy val mappings = Map(
"ST" -> "Stat",
"RT" -> "Routine"
)
@transient implicit lazy val jsonFormat: Format[OrderPriorityTypes.Value] = Format(
Reads {
case JsString(v) => JsSuccess(JsString(mappings.getOrElse(v, v)))
case _ => JsError("error.expected.jsstring")
} andThen Reads.enumNameReads(OrderPriorityTypes),
Writes.enumNameWrites
)
}
/**
* List of supplementary clinical information associated with the order. Often these are answers to Ask at Order Entry (AOE) questions.
*
* @param Code Code for the information element
* @param Codeset Code set used to identify the information element. Codeset will be blank for system-defined codes. LOINC is used for a subset of AOE questions.
* @param Description Description of the information element. For AOEs, this is typically the text of the AOE question
* @param Value Value of the information element. For AOEs, this is typically the full answer
* @param Units Units of the value. If the Value is a time range, this may be "WK"
* @param Abbreviation Abbreviation of the value of the information element. Typically only present for text answer AOEs
* @param Notes Notes related to the clinical info
*/
@jsonDefaults case class ClinicalInfo(
Code: Option[String] = None,
Codeset: Option[String] = None,
Description: Option[String] = None,
Value: Option[String] = None,
Units: Option[String] = None,
Abbreviation: Option[String] = None,
Notes: Seq[String] = Seq.empty
) extends Codeset
object ClinicalInfo extends RobustPrimitives
/** The "Producer" is typically the Lab which did the resulting. */
@jsonDefaults case class OrderProducer(
ID: Option[String] = None,
IDType: Option[String] = None,
Name: Option[String] = None,
Address: Option[Address] = None
)
object OrderProducer extends RobustPrimitives
/**
* @param NPI A National Provider Identifier or NPI is a unique 10-digit identification number issued to health care providers in the United States
*/
@jsonDefaults case class OrderProvider(
NPI: Option[String] = None,
FirstName: Option[String] = None,
LastName: Option[String] = None,
Type: Option[String] = None,
Credentials: Seq[String] = Seq.empty,
Address: Option[Address] = None,
Location: Option[CareLocation] = None,
PhoneNumber: Option[PhoneNumber] = None,
EmailAddresses: Seq[String] = Seq.empty,
Role: Option[BasicCode] = None
) extends ProviderLike {
def ID: Option[String] = NPI
def IDType: Option[String] = Some("NPI")
}
object OrderProvider extends RobustPrimitives
/** Facility this order was placed in */
@jsonDefaults case class OrderingFacility(
Name: Option[String] = None,
Address: Option[Address] = None,
PhoneNumber: Option[String] = None
)
object OrderingFacility extends RobustPrimitives
/**
* Order messages communicate details of diagnostic tests such as labs, radiology imaging, etc.
*
* @param ID ID of the order assigned by the placing system
* @param TransactionDateTime DateTime the order was placed
* @param CollectionDateTime DateTime the specimen was collected
* @param Specimen Source of the specimen.
* @param Procedure Procedure that was ordered
* @param Provider Provider making the order
* @param OrderingFacility Facility this order was placed in
* @param Priority Priority of the order. One of the following: "Stat", "ASAP", "Routine", "Preoperative", "Timing Critical".
* @param Expiration Date when the order becomes invalid. In YYYY-MM-DD format
* @param Comments Clinically relevant comments regarding the order
* @param Notes Order-level notes
* @param Diagnoses List of diagnoses associated with this order
* @param ClinicalInfo List of supplementary clinical information associated with the order. Often these are answers to Ask at Order Entry (AOE) questions.
*/
@jsonDefaults case class Order(
ID: String,
TransactionDateTime: Option[DateTime] = None,
CollectionDateTime: Option[DateTime] = None,
Specimen: Option[Specimen] = None,
Procedure: Option[BasicCodeset] = None,
Provider: Option[OrderProvider] = None,
OrderingFacility: Option[OrderingFacility] = None,
Priority: Option[OrderPriorityTypes.Value] = None,
Expiration: Option[LocalDate] = None,
Comments: Option[String] = None,
Notes: Seq[String] = Seq.empty,
Diagnoses: Seq[CodesetWithName] = Seq.empty,
ClinicalInfo: Seq[ClinicalInfo] = Seq.empty
)
object Order extends RobustPrimitives
|
vital-software/scala-redox
|
src/main/scala/com/github/vitalsoftware/scalaredox/models/Order.scala
|
Scala
|
mit
| 5,526 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import cats.effect.IO
import monix.execution.Scheduler
import monix.reactive.Observable
import scala.concurrent.duration._
object MapEffectSuite extends BaseOperatorSuite {
def createObservable(sourceCount: Int) = Some {
val o = Observable.range(0L, sourceCount.toLong).mapEvalF(x => IO(x))
Sample(o, count(sourceCount), sum(sourceCount), waitFirst, waitNext)
}
def count(sourceCount: Int) =
sourceCount
def waitFirst = Duration.Zero
def waitNext = Duration.Zero
def observableInError(sourceCount: Int, ex: Throwable) =
if (sourceCount == 1) None
else
Some {
val o = createObservableEndingInError(Observable.range(0L, sourceCount.toLong), ex)
.mapEvalF(i => IO.pure(i))
Sample(o, count(sourceCount), sum(sourceCount), waitFirst, waitNext)
}
def sum(sourceCount: Int) = {
sourceCount * (sourceCount - 1) / 2
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = Some {
val o = Observable.range(0L, sourceCount.toLong).mapEvalF { i =>
if (i == sourceCount - 1)
throw ex
else
IO.pure(i)
}
Sample(o, count(sourceCount - 1), sum(sourceCount - 1), waitFirst, waitNext)
}
def toList[A](o: Observable[A])(implicit s: Scheduler) = {
o.foldLeft(Vector.empty[A])(_ :+ _)
.runAsyncGetLast
.map(_.getOrElse(Vector.empty))
}
override def cancelableObservables(): Seq[Sample] = {
val sample = Observable
.range(0, 100)
.delayOnNext(1.second)
.mapEvalF(x => IO(x))
Seq(
Sample(sample, 0, 0, 0.seconds, 0.seconds),
Sample(sample, 1, 1, 1.seconds, 0.seconds)
)
}
}
|
monifu/monifu
|
monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/MapEffectSuite.scala
|
Scala
|
apache-2.0
| 2,376 |
package models
import slick.driver.MySQLDriver.api._
package object school {
/**
* Table queries for school models
*/
object tableQueries {
/**
* The [[TableQuery]] for [[school.Topics]]
*/
val topics =
TableQuery[school.Topics]
/**
* The [[TableQuery]] for [[school.TopicRevisions]]
*/
val topicRevisions =
TableQuery[school.TopicRevisions]
/**
* The [[TableQuery]] for [[school.TopicRevisionProposals]]
*/
val topicRevisionProposals =
TableQuery[school.TopicRevisionProposals]
/**
* The [[TableQuery]] for [[school.Microdegrees]]
*/
val microdegrees =
TableQuery[school.Microdegrees]
/**
* The [[TableQuery]] for [[school.MicrodegreeRevisions]]
*/
val microdegreeRevisions =
TableQuery[school.MicrodegreeRevisions]
/**
* The [[TableQuery]] for [[school.MicrodegreeRevisionProposals]]
*/
val microdegreeRevisionProposals =
TableQuery[school.MicrodegreeRevisionProposals]
/**
* The [[TableQuery]] for [[school.TopicRequirements]]
*/
val topicRequirements =
TableQuery[school.TopicRequirements]
/**
* The [[TableQuery]] for [[school.Enrollments]]
*/
val enrollments =
TableQuery[school.Enrollments]
}
}
|
Xanho/xanho-api
|
app/models/school/package.scala
|
Scala
|
apache-2.0
| 1,338 |
package org.gbougeard.model.projects
/**
* Created with IntelliJ IDEA.
* User: gbougeard
* Date: 13/07/13
* Time: 19:07
* To change this template use File | Settings | File Templates.
*/
case class DashboardInput(id: Option[String],
commit_message: Option[String])
object DashboardInput {
import play.api.libs.json.Json
import play.api.libs.functional.syntax._
implicit val format = Json.format[DashboardInput]
}
|
gbougeard/gas
|
src/main/scala/org/gbougeard/model/projects/DashboardInput.scala
|
Scala
|
apache-2.0
| 455 |
package ca.pgx.rest.service
package com.eventhub.rest.service
import akka.actor.{ActorLogging, Actor, ActorSystem, Props}
import akka.io.IO
import ca.pgx.common.communication.email.SmtpMailer
import spray.can.Http
import net.liftweb.util.{Props => LiftProps}
import scala.reflect.ClassTag
import scala.sys.SystemProperties
import net.liftweb.common.Full
import java.io.FileInputStream
import akka.event.LoggingAdapter
import ca.pgx.eventhub.backup.EventReceptionMonitor
/**
* Entry point for the service. It runs as a standalone application and acts as an HTTP server.
*
* IMPORTANT: the initialization order here is important to avoid any data corruption.
*
* If you add an initialization step that can fail do it before web service binds to the port and starts servicing
* requests if possible.
*
* If the service you are initializing does not protect you from race conditions you might be allowed to add
* its initialization call after port bind. That way on a single machine port binding will serve as some sort
* of lock which prevents multiple instances of this application running. Keep in mind that it's not a bulletproof
* solution but usually is ok.
*
* Mongeez and Liquibase acquire database level locks and don't require any other synchronization mechanisms.
*/
final class Boot[RouteActor <: Actor with ActorLogging with RestService : ClassTag] private () extends BootDbInit { //CriticalExceptionHandlerImpl
// FIXME: now Lift logging is used somehow
implicit def log: LoggingAdapter = ??? //implicitly[RouteActor].log
implicit lazy val system = ActorSystem("eventhub-actor-system")
/**
* Initializes all modules and starts up the service.
*/
def start(): Unit = {
initApp()
initMail()
initDb()
initWebService()
initEventMonitor()
}
protected def initApp(): Unit = {
initProps()
val banner = """
_____ _ _ _ _
| ____|_ _____ _ __ | |_| | | |_ _| |__
| _| \ \ / / _ \ '_ \| __| |_| | | | | '_ \
| |___ \ V / __/ | | | |_| _ | |_| | |_) |
|_____| \_/ \___|_| |_|\__|_| |_|\__,_|_.__/
"""//.stripMargin
info(banner)
info(s"BOOT: Application is starting up. Running in [${LiftProps.mode}] mode.")
sys.addShutdownHook(info("APPLICATION HAS BEEN SHUT DOWN.")) // TODO: check if it actually is able to print after shutdown, maybe use println?
}
protected def initProps(): Unit = {
val props = new SystemProperties
val filename = props.get("propsfile")
filename foreach {
f =>
warn(s"OVERRIDING APPLICATION SETTINGS WITH SETTINGS FROM PROVIDED FILE: [$f] !")
LiftProps.whereToLook = () => ((f, () => Full(new FileInputStream(f))) :: Nil)
}
}
protected def initMail(): Unit = {
doOrDie {
SmtpMailer.init
info("SMTP mail sender initialized.")
}
}
protected def initEventMonitor(): Unit = {
EventReceptionMonitor.init(system)
}
protected def initWebService(): Unit = {
val restPort = LiftProps.getInt("service.restPort", 8090)
lazy val service = system.actorOf(Props[RouteActor], "eventhub-rest-service-actor")
IO(Http) ! Http.Bind(service, interface = LiftProps.get("interface", "0.0.0.0"), port = restPort)
info(s"Started listening to web requests on port [$restPort].")
info("BOOT: Application startup is complete. Ready to receive requests.")
}
}
/**
* Instances of this service can be created via this companion object only. This forces clients to provide correct
* type arguments.
*/
object Boot {
def apply[RouteActor <: Actor with ActorLogging with RestService : ClassTag](): Boot[RouteActor] =
new Boot[RouteActor]
}
|
pgxcentre/eventhub
|
rest/src/main/scala/ca/pgx/rest/service/Boot.scala
|
Scala
|
apache-2.0
| 3,658 |
package org.jetbrains.jps.incremental.scala
package data
import java.io.File
import org.jetbrains.jps.ModuleChunk
import org.jetbrains.jps.builders.java.JavaBuilderUtil
import org.jetbrains.jps.incremental.CompileContext
import org.jetbrains.jps.incremental.scala._
import org.jetbrains.jps.incremental.scala.model.{IncrementalityType, LibrarySettings}
import org.jetbrains.jps.model.java.JpsJavaSdkType
import org.jetbrains.jps.model.module.JpsModule
import com.intellij.openapi.diagnostic.{Logger => JpsLogger}
import org.jetbrains.jps.cmdline.ProjectDescriptor
import scala.collection.JavaConverters._
/**
* @author Pavel Fatin
*/
case class CompilerData(compilerJars: Option[CompilerJars], javaHome: Option[File], incrementalType: IncrementalityType)
object CompilerData {
private val Log: JpsLogger = JpsLogger.getInstance(CompilerData.getClass.getName)
def from(context: CompileContext, chunk: ModuleChunk): Either[String, CompilerData] = {
val project = context.getProjectDescriptor
val target = chunk.representativeTarget
val module = target.getModule
val compilerJars = if (SettingsManager.hasScalaSdk(module)) {
compilerJarsIn(module).flatMap { case jars: CompilerJars =>
val compileJars =
if (useHydraCompiler(project, module, jars)) {
getHydraCompilerJars(project, module, jars)
} else jars
Log.info("Compiler jars: " + compileJars.files.map(_.getName))
val absentJars = compileJars.files.filter(!_.exists)
Either.cond(absentJars.isEmpty,
Some(compileJars),
"Scala compiler JARs not found (module '" + chunk.representativeTarget().getModule.getName + "'): "
+ absentJars.map(_.getPath).mkString(", "))
}
} else {
Right(None)
}
compilerJars.flatMap { jars =>
val incrementalityType = SettingsManager.getProjectSettings(project.getProject).getIncrementalityType
javaHome(context, module).map(CompilerData(jars, _, incrementalityType))
}
}
def javaHome(context: CompileContext, module: JpsModule): Either[String, Option[File]] = {
val project = context.getProjectDescriptor
val model = project.getModel
Option(module.getSdk(JpsJavaSdkType.INSTANCE))
.toRight("No JDK in module " + module.getName)
.flatMap { moduleJdk =>
val globalSettings = SettingsManager.getGlobalSettings(model.getGlobal)
val jvmSdk = if (globalSettings.isCompileServerEnabled && JavaBuilderUtil.CONSTANT_SEARCH_SERVICE.get(context) != null) {
Option(globalSettings.getCompileServerSdk).flatMap { sdkName =>
val libraries = model.getGlobal.getLibraryCollection.getLibraries(JpsJavaSdkType.INSTANCE).asScala
libraries.find(_.getName == sdkName).map(_.getProperties)
}
} else {
Option(model.getProject.getSdkReferencesTable.getSdkReference(JpsJavaSdkType.INSTANCE))
.flatMap(references => Option(references.resolve)).map(_.getProperties)
}
if (jvmSdk.contains(moduleJdk)) Right(None)
else {
val directory = new File(moduleJdk.getHomePath)
Either.cond(directory.exists, Some(directory), "JDK home directory does not exists: " + directory)
}
}
}
def isDottyModule(module: JpsModule): Boolean = {
compilerJarsIn(module) match {
case Right(jars) => jars.dotty.isDefined
case _ => false
}
}
def needNoBootCp(chunk: ModuleChunk): Boolean = {
chunk.getModules.asScala.forall(needNoBootCp)
}
def compilerVersion(module: JpsModule): Option[String] = compilerJarsIn(module) match {
case Right(CompilerJars(_, compiler, _)) => version(compiler)
case Left(error) => Log.error(error)
None
}
private def needNoBootCp(module: JpsModule): Boolean = {
def tooOld(version: Option[String]) = version.exists(v => v.startsWith("2.8") || v.startsWith("2.9"))
compilerJarsIn(module) match {
case Right(jars @ CompilerJars(_, compiler, _)) => jars.dotty.isEmpty && !tooOld(version(compiler))
case _ => false
}
}
private def compilerJarsIn(module: JpsModule): Either[String, CompilerJars] = {
val sdk = SettingsManager.getScalaSdk(module)
if (sdk == null) return Left(s"Scala SDK not found in module ${module.getName}")
val files = sdk.getProperties.asInstanceOf[LibrarySettings].getCompilerClasspath
val library = find(files, "scala-library", ".jar") match {
case Left(error) => Left(error + " in Scala compiler classpath in Scala SDK " + sdk.getName)
case right => right
}
library.flatMap { libraryJar =>
val compiler = find(files, "scala-compiler", ".jar") match {
case Left(error) => Left(error + " in Scala compiler classpath in Scala SDK " + sdk.getName)
case right => right
}
compiler.flatMap { compilerJar =>
val extraJars = files.filterNot(file => file == libraryJar || file == compilerJar)
val reflectJarError = {
version(compilerJar).flatMap {
case version if version.startsWith("2.10") => // TODO implement a better version comparison
find(extraJars, "scala-reflect", ".jar").left.toOption
.map(_ + " in Scala compiler classpath in Scala SDK " + sdk.getName)
case _ => None
}
}
reflectJarError.toLeft(CompilerJars(libraryJar, compilerJar, extraJars))
}
}
}
private def find(files: Seq[File], prefix: String, suffix: String): Either[String, File] = {
files.filter(it => it.getName.startsWith(prefix) && it.getName.endsWith(suffix)) match {
case Seq() =>
Left("No '%s*%s'".format(prefix, suffix))
case Seq(file) =>
Right(file)
case Seq(duplicates @ _*) =>
Left("Multiple '%s*%s' files (%s)".format(prefix, suffix, duplicates.map(_.getName).mkString(", ")))
}
}
private def version(compiler: File): Option[String] = readProperty(compiler, "compiler.properties", "version.number")
private def useHydraCompiler(project: ProjectDescriptor, module: JpsModule, jars: CompilerJars): Boolean = {
val hydraGlobalSettings = SettingsManager.getGlobalHydraSettings(project.getModel.getGlobal)
val hydraProjectSettings = SettingsManager.getHydraSettings(project.getProject)
val enabled = hydraProjectSettings.isHydraEnabled
val compilerVer = compilerVersion(module)
val hydraArtifactsExist = compilerVer.map(v => hydraGlobalSettings.containsArtifactsFor(v, hydraProjectSettings.getHydraVersion)).getOrElse(false)
val res = enabled && hydraArtifactsExist
if (enabled && !res) {
val reason =
if (compilerVer.isEmpty) s"could not extract compiler version from module $module, ${compilerJarsIn(module)}"
else s"Hydra artifacts not found for ${compilerVer.get} and ${hydraProjectSettings.getHydraVersion}."
Log.error(s"Not using Hydra compiler for ${module.getName} because $reason")
}
res
}
private def getHydraCompilerJars(project: ProjectDescriptor, module: JpsModule, jars: CompilerJars) = {
val scalaVersion = compilerVersion(module).get
val hydraData = HydraData(project.getProject, scalaVersion)
val hydraOtherJars = hydraData.otherJars
val extraJars = if(hydraOtherJars.nonEmpty) hydraOtherJars else jars.extra
CompilerJars(jars.library, hydraData.getCompilerJar.getOrElse(jars.compiler), extraJars)
}
}
|
triplequote/intellij-scala
|
scala/compiler-jps/src/org/jetbrains/jps/incremental/scala/data/CompilerData.scala
|
Scala
|
apache-2.0
| 7,447 |
object Test {
def main(args: Array[String]): Unit = {
{
(erased x: Int) => 42
}
println("ok")
}
}
|
som-snytt/dotty
|
tests/run-custom-args/erased/erased-19.scala
|
Scala
|
apache-2.0
| 122 |
package ua.t3hnar.plugins.cmdsupport.lang.parser
import com.intellij.openapi.project.Project
import com.intellij.lang.{ASTNode, ParserDefinition}
import com.intellij.psi.tree.IFileElementType
import com.intellij.psi.util.PsiUtilCore
import com.intellij.psi.FileViewProvider
import ua.t3hnar.plugins.cmdsupport.lang.lexer.{CmdTokenType, CmdLexer}
import ua.t3hnar.plugins.cmdsupport.lang.CmdLanguage
import ua.t3hnar.plugins.cmdsupport.lang.psi.CmdFile
class CmdParserDefinition extends ParserDefinition {
def getStringLiteralElements = CmdTokenType.STRING_LITERALS
def getFileNodeType = CmdParserDefinition.FILE
def createElement(node: ASTNode) = PsiUtilCore.NULL_PSI_ELEMENT
def createParser(project: Project) = new CmdParser
def getWhitespaceTokens = CmdTokenType.WHITE_SPACES
def createLexer(project: Project) = new CmdLexer
def getCommentTokens = CmdTokenType.COMMENTS
def spaceExistanceTypeBetweenTokens(node1: ASTNode, node2: ASTNode) = null
def createFile(provider: FileViewProvider) = new CmdFile(provider)
}
private object CmdParserDefinition {
val FILE = new IFileElementType(CmdLanguage)
}
|
t3hnar/CmdSupport
|
src/ua/t3hnar/plugins/cmdsupport/lang/parser/CmdParserDefinition.scala
|
Scala
|
apache-2.0
| 1,124 |
package model
import scala.slick.lifted.{TableQuery, AbstractTable}
import Database.profile.simple._
trait CrudSupport[T <: AbstractTable[_], K] extends Insertable[T]
with Searchable[T, K]
with Updatable[T, K]
with Removable[T, K]
with Selectable[T, K]
sealed trait Requestable[T <: AbstractTable[_]] extends Profile {
val entities: TableQuery[T]
val session: Session
implicit val innerSession = session
}
trait Selectable[T <: AbstractTable[_], K] {
def selectBy(entity: T#TableElementType): Query[T, T#TableElementType]
def selectById(id: K): Query[T, T#TableElementType]
}
trait Insertable[T <: AbstractTable[_]] extends Requestable[T] {
def insert(entity: T#TableElementType) {
entities += entity
}
def insertAll(data: T#TableElementType*) {
data.foreach(insert)
}
}
trait Updatable[T <: AbstractTable[_], K] extends Requestable[T] {
this: Selectable[T, K] =>
def update(entity: T#TableElementType) {
selectBy(entity).update(entity)
}
def update(id: K, entity: T#TableElementType) {
selectById(id).update(entity)
}
}
trait Removable[T <: AbstractTable[_], K] extends Requestable[T] {
this: Selectable[T, K] =>
def delete(entity: T#TableElementType) {
selectBy(entity).mutate(_.delete())
}
def deleteById(id: K) {
selectById(id).mutate(_.delete())
}
}
trait Searchable[T <: AbstractTable[_], K] extends Requestable[T] {
this: Selectable[T, K] =>
def findAll(): List[T#TableElementType] = {
entities.list
}
def findPage(pageNumber: Int, pageSize: Int): List[T#TableElementType] = {
entities.drop(pageSize * (pageNumber - 1)).take(pageSize).list
}
def find(limit: Int): List[T#TableElementType] = {
entities.take(limit).list
}
def findById(id: K): T#TableElementType = {
selectById(id).first()
}
def count: Int = {
Query(entities.length).first()
}
}
|
evancasey/tinder-bandits
|
src/main/scala/model/CrudSupport.scala
|
Scala
|
mit
| 1,883 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.index
import org.geotools.feature.simple.SimpleFeatureTypeBuilder
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo._
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.SimpleFeatureType
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class TemporalIndexCheckTest extends Specification {
// setup the basic types
def noDTGType = SimpleFeatureTypes.createType("noDTGType", s"foo:String,bar:Geometry,baz:String,$DEFAULT_GEOMETRY_PROPERTY_NAME:Geometry")
def oneDTGType = SimpleFeatureTypes.createType("oneDTGType", s"foo:String,bar:Geometry,baz:String,$DEFAULT_GEOMETRY_PROPERTY_NAME:Geometry,$DEFAULT_DTG_PROPERTY_NAME:Date")
def twoDTGType = SimpleFeatureTypes.createType("twoDTGType", s"foo:String,bar:Geometry,baz:String,$DEFAULT_GEOMETRY_PROPERTY_NAME:Geometry,$DEFAULT_DTG_PROPERTY_NAME:Date,$DEFAULT_DTG_END_PROPERTY_NAME:Date")
val DEFAULT_DATE_KEY = org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.DEFAULT_DATE_KEY
def copy(sft: SimpleFeatureType) = {
val b = new SimpleFeatureTypeBuilder()
b.init(sft)
b.buildFeatureType()
}
"TemporalIndexCheck" should {
"detect no valid DTG" in {
val testType = copy(noDTGType)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beNone
}
"detect no valid DTG even if SF_PROPERTY_START_TIME is set incorrectly" in {
val testType = copy(noDTGType)
testType.getUserData.put(DEFAULT_DATE_KEY, DEFAULT_DTG_PROPERTY_NAME)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beNone
}
"detect a valid DTG if SF_PROPERTY_START_TIME is not set" in {
val testType = copy(oneDTGType)
testType.getUserData.remove(DEFAULT_DATE_KEY)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"detect a valid DTG if SF_PROPERTY_START_TIME is not properly set" in {
val testType = copy(oneDTGType)
testType.getUserData.put(DEFAULT_DATE_KEY, "no_such_dtg")
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"present no DTG candidate if SF_PROPERTY_START_TIME is set properly" in {
val testType = copy(oneDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"detect valid DTG candidates and select the first if SF_PROPERTY_START_TIME is not set correctly" in {
val testType = copy(twoDTGType)
testType.getUserData.put(DEFAULT_DATE_KEY, "no_such_dtg")
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"present no DTG candidate if SF_PROPERTY_START_TIME is set properly and there are multiple Date attributes" in {
val testType = copy(twoDTGType)
testType.getUserData.put(DEFAULT_DATE_KEY, DEFAULT_DTG_PROPERTY_NAME)
TemporalIndexCheck.validateDtgField(testType)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
}
"getDTGFieldName" should {
"return a dtg field name if SF_PROPERTY_START_TIME is set properly" in {
val testType = copy(oneDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME)
testType.getDtgField must beSome(DEFAULT_DTG_PROPERTY_NAME)
}
"not return a dtg field name if SF_PROPERTY_START_TIME is not set correctly" in {
val testType = copy(noDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME) must throwAn[IllegalArgumentException]
testType.getDtgField must beNone
}
}
"getDTGDescriptor" should {
"return a dtg attribute descriptor if SF_PROPERTY_START_TIME is set properly" in {
val testType = copy(oneDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME)
testType.getDtgDescriptor must beSome(oneDTGType.getDescriptor(DEFAULT_DTG_PROPERTY_NAME))
}
"not return a dtg attribute descriptor if SF_PROPERTY_START_TIME is not set correctly" in {
val testType = copy(noDTGType)
testType.setDtgField(DEFAULT_DTG_PROPERTY_NAME) must throwAn[IllegalArgumentException]
testType.getDtgDescriptor must beNone
}
}
}
|
giserh/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/index/TemporalIndexCheckTest.scala
|
Scala
|
apache-2.0
| 5,000 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.http.ws
import play.api.libs.json.{Json, Writes}
import uk.gov.hmrc.play.audit.http.HeaderCarrier
import uk.gov.hmrc.play.http.logging.MdcLoggingExecutionContext._
import uk.gov.hmrc.play.http.{HttpPatch, HttpResponse}
import scala.concurrent.Future
trait WSPatch extends HttpPatch with WSRequest {
def doPatch[A](url: String, body: A)(implicit rds: Writes[A], hc: HeaderCarrier): Future[HttpResponse] = {
buildRequest(url).patch(Json.toJson(body)).map (new WSHttpResponse(_))
}
}
|
liquidarmour/http-verbs
|
src/main/scala/uk/gov/hmrc/play/http/ws/WSPatch.scala
|
Scala
|
apache-2.0
| 1,123 |
package scalautils
import scalautils.Bash._
/**
* @author Holger Brandl
*/
object BashTest extends App {
// import scalautils.Bash
Bash.eval("""cd ~/unit_tests
echo baum > baum.txt
echo haus > haus.txt""")
Bash.eval("cd ~/unit_tests\\n echo baum > baum2.txt\\necho haus > haus2.txt")
val jobCmed = "cd '/home/brandl/unit_tests'; mysub \\"Users__brandl__633224592__1112201512d4102\\" 'cd /home/brandl/unit_tests ## lsfutils:change into wd\\nsleep 60; echo \\"this is task 1\\" > task_1.txt ' -q short | joblist /home/brandl/unit_tests/.test_tasks"
Bash.eval(jobCmed)
}
object BashPlayground {
import better.files.File
import scala.language.postfixOps
import scalautils._
import sys.process._
//http://oldfashi\\onedsoftware.com/2009/07/10/scala-code-review-foldleft-and-foldright/
List("/bin/bash", "-c", s"'kaka'").foldLeft("")((b, a) => b + " " + a).trim
def R(rcmd: String) {
Seq("/bin/bash", "-c", s"echo '$rcmd' | Rscript --vanilla -") !
}
Bash.eval("echo test 1>&2",
redirectStderr = File("/Users/brandl/Desktop/stderr_redir.txt"),
redirectStdout = File("/Users/brandl/Desktop/stdout_redir.txt"))
Bash.eval("which ls", redirectStdout = File("/Users/brandl/Desktop/stdin_redir.txt"))
//http://docs.scala-lang.org/tutorials/tour/operators.html
//Any method which takes a single parameter can be used as an infix operator in Scala. x
//R "1+1"
R("1+1")
File("/home/brandl/.bash_profile") // .head
//import scala.sys.process._
//val cmd = "uname -a" // Your command
//val output = cmd.!!.trim // Captures the output
// or
// Process("cat temp.txt")!
eval("om $(pwd)")
}
|
holgerbrandl/scalautils
|
src/test/scala/scalautils/BashTest.scala
|
Scala
|
bsd-2-clause
| 1,659 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird.util.summer
import java.util.concurrent.ArrayBlockingQueue
import com.twitter.algebird._
import com.twitter.util.Future
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
/**
* @author Ian O Connell
*/
case class SyncSummingQueue[Key, Value](bufferSize: BufferSize,
override val flushFrequency: FlushFrequency,
override val softMemoryFlush: MemoryFlushPercent,
override val memoryIncr: Incrementor,
override val timeoutIncr: Incrementor,
sizeIncr: Incrementor,
insertOps: Incrementor,
tuplesIn: Incrementor,
tuplesOut: Incrementor)(implicit semigroup: Semigroup[Value])
extends AsyncSummer[(Key, Value), Map[Key, Value]]
with WithFlushConditions[(Key, Value), Map[Key, Value]] {
require(bufferSize.v > 0, "Use the Null summer for an empty async summer")
protected override val emptyResult = Map.empty[Key, Value]
private[this] final val squeue: CustomSummingQueue[Map[Key, Value]] =
new CustomSummingQueue(bufferSize.v, sizeIncr, insertOps)
override def isFlushed: Boolean = squeue.isFlushed
def flush: Future[Map[Key, Value]] = {
val tups = squeue.flush.getOrElse(Map.empty)
tuplesOut.incrBy(tups.size)
Future.value(tups)
}
def addAll(vals: TraversableOnce[(Key, Value)]): Future[Map[Key, Value]] = {
val outputs = squeue
.put(Monoid.sum(vals.map { i =>
tuplesIn.incr
Map(i)
}))
.getOrElse(Map.empty)
tuplesOut.incrBy(outputs.size)
Future.value(outputs)
}
}
class CustomSummingQueue[V](capacity: Int, sizeIncr: Incrementor, putCalls: Incrementor)(
override implicit val semigroup: Semigroup[V])
extends StatefulSummer[V] {
private val queueOption: Option[ArrayBlockingQueue[V]] =
if (capacity > 0) Some(new ArrayBlockingQueue[V](capacity, true)) else None
/**
* puts an item to the queue, optionally sums up the queue and returns value
* This never blocks interally. It uses offer. If the queue is full, we drain,
* sum the queue.
*/
final def put(item: V): Option[V] =
if (queueOption.isDefined) {
putCalls.incr
queueOption.flatMap { queue =>
if (!queue.offer(item)) {
sizeIncr.incr
// Queue is full, do the work:
Monoid.plus(flush, Some(item))
} else {
// We are in the queue
None
}
}
} else { Some(item) }
def apply(v: V): Option[V] = put(v)
/**
* drain the queue and return the sum. If empty, return None
*/
def flush: Option[V] =
queueOption.flatMap { queue =>
val toSum = ListBuffer[V]()
queue.drainTo(toSum.asJava)
Semigroup.sumOption(toSum)
}
def isFlushed: Boolean = queueOption.map { _.size == 0 }.getOrElse(true)
}
|
nevillelyh/algebird
|
algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala
|
Scala
|
apache-2.0
| 3,638 |
/*******************************************************************************
* Copyright 2010 Maxime LΓ©vesque
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************** */
package org.squeryl.dsl
class Group[K](k: K) {
def key = k
}
class Measures[M](m: M) {
def measures = m
}
class GroupWithMeasures[K,M](k: K, m: M) {
def key = k
def measures = m
override def toString = {
val sb = new StringBuffer
sb.append("GroupWithMeasures[")
sb.append("key=")
sb.append(key)
sb.append(",measures=")
sb.append(measures)
sb.append("]")
sb.toString
}
}
object GroupWithMeasures {
def unapply[K, M](x: GroupWithMeasures[K, M]) = Some((x.key, x.measures))
}
|
Krasnyanskiy/squeryl
|
src/main/scala/org/squeryl/dsl/Group.scala
|
Scala
|
apache-2.0
| 1,324 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hbase
import org.apache.hadoop.hbase._
import org.apache.spark.sql.Row
/**
* HBase minicluster query test again stringformat tbl.
*/
class HBaseTpcStringFormatMiniTestSuite extends TestBase {
private val tableName = "store_sales_stringformat"
private val hbaseTableName = "STORE_SALES_STRINGFORMAT"
private val hbaseFamilies = Seq("f")
private val csvPaths = Array("src/test/resources", "sql/hbase/src/test/resources")
private val csvFile = "store_sales_stringformat.txt"
private val tpath = for (csvPath <- csvPaths if new java.io.File(csvPath).exists()) yield {
logInfo(s"Following path exists $csvPath\n")
csvPath
}
private[hbase] val csvPath = tpath(0)
override protected def beforeAll() = {
super.beforeAll()
val hbaseAdmin = TestHbase.hbaseAdmin
/**
* create hbase table if it does not exists
*/
if (!hbaseAdmin.tableExists(TableName.valueOf(hbaseTableName))) {
val descriptor = new HTableDescriptor(TableName.valueOf(hbaseTableName))
hbaseFamilies.foreach { f => descriptor.addFamily(new HColumnDescriptor(f)) }
try {
hbaseAdmin.createTable(descriptor)
} catch {
case e: TableExistsException =>
logError(s"HBase table $hbaseTableName already exists.", e)
}
}
/**
* drop the existing logical table if it exists
*/
if (TestHbase.catalog.tableExists(Seq(tableName))) {
val dropSql = "DROP TABLE " + tableName
try {
runSql(dropSql)
} catch {
case e: IllegalStateException =>
logError(s"Error occurs while dropping the table $tableName", e)
}
}
/**
* create table
*/
val createSql =
s"""CREATE TABLE store_sales_stringformat (
strkey STRING,
ss_sold_date_sk INTEGER,
ss_sold_time_sk INTEGER,
ss_item_sk INTEGER,
ss_customer_sk INTEGER,
ss_cdemo_sk INTEGER,
ss_hdemo_sk INTEGER,
ss_addr_sk INTEGER,
ss_store_sk INTEGER,
ss_promo_sk INTEGER,
ss_ticket_number INTEGER,
ss_quantity INTEGER,
ss_wholesale_cost FLOAT,
ss_list_price FLOAT,
ss_sales_price FLOAT,
ss_ext_discount_amt FLOAT,
ss_ext_sales_price FLOAT,
ss_ext_wholesale_cost FLOAT,
ss_ext_list_price FLOAT,
ss_ext_tax FLOAT,
ss_coupon_amt FLOAT,
ss_net_paid FLOAT,
ss_net_paid_inc_tax FLOAT,
ss_net_profit FLOAT,
PRIMARY KEY(strkey))
MAPPED BY
(STORE_SALES_STRINGFORMAT, COLS=[
ss_sold_date_sk=f.ss_sold_date_sk,
ss_sold_time_sk=f.ss_sold_time_sk,
ss_item_sk=f.ss_item_sk,
ss_customer_sk=f.ss_customer_sk,
ss_cdemo_sk=f.ss_cdemo_sk,
ss_hdemo_sk=f.ss_hdemo_sk,
ss_addr_sk=f.ss_addr_sk,
ss_store_sk=f.ss_store_sk,
ss_promo_sk=f.ss_promo_sk,
ss_ticket_number=f.ss_ticket_number,
ss_quantity=f.ss_quantity,
ss_wholesale_cost=f.ss_wholesale_cost,
ss_list_price=f.ss_list_price,
ss_sales_price=f.ss_sales_price,
ss_ext_discount_amt=f.ss_ext_discount_amt,
ss_ext_sales_price=f.ss_ext_sales_price,
ss_ext_wholesale_cost=f.ss_ext_wholesale_cost,
ss_ext_list_price=f.ss_ext_list_price,
ss_ext_tax=f.ss_ext_tax,
ss_coupon_amt=f.ss_coupon_amt,
ss_net_paid=f.ss_net_paid,
ss_net_paid_inc_tax=f.ss_net_paid_inc_tax,
ss_net_profit=f.ss_net_profit
]) IN STRINGFORMAT""".stripMargin
try {
runSql(createSql)
} catch {
case e: IllegalStateException =>
logError(s"Error occurs while creating the table $tableName", e)
}
/**
* load the data
*/
val loadSql = "LOAD DATA LOCAL INPATH '" + s"$csvPath/$csvFile" +
"' INTO TABLE " + tableName
try {
runSql(loadSql)
} catch {
case e: IllegalStateException =>
logError(s"Error occurs while loading the data $tableName", e)
}
}
override protected def afterAll() = {
runSql("DROP TABLE " + tableName)
super.afterAll()
}
test("Query 0") {
val sql = "SELECT count(1) FROM store_sales_stringformat"
val rows = runSql(sql)
assert(rows.size == 1)
assert(rows(0).get(0) == 10)
}
test("Query 1") {
val sql = "SELECT ss_quantity, ss_wholesale_cost, ss_list_price FROM store_sales_stringformat WHERE ss_item_sk = 574 AND ss_ticket_number = 29"
val rows = runSql(sql)
// printRows(rows)
assert(rows.size == 1)
assert(rows(0).get(0) == 33)
assert(rows(0).get(1) == 68.24f)
assert(rows(0).get(2) == 116.69f)
}
test("Query 2") {
val sql =
s"""SELECT ss_sold_date_sk, ss_sold_time_sk, ss_store_sk
|FROM store_sales_stringformat WHERE ss_item_sk = 3163 AND ss_ticket_number = 7"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.size == 1)
assert(rows(0).get(0) == 2452260)
assert(rows(0).get(1) == 46712)
assert(rows(0).get(2) == 19)
}
test("Query 3") {
val sql =
s"""SELECT ss_customer_sk, ss_promo_sk, ss_coupon_amt, ss_net_profit
|FROM store_sales_stringformat
|WHERE ss_item_sk = 18814 AND ss_ticket_number = 29"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.size == 1)
assert(rows(0).get(0) == null)
assert(rows(0).get(1) == null)
assert(rows(0).get(2) == 0.00f)
assert(rows(0).get(3) == -4398.98f)
}
test("Query 4") {
val sql =
s"""SELECT ss_ticket_number, count(1)
|FROM store_sales_stringformat
|GROUP BY ss_ticket_number
|ORDER BY ss_ticket_number"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 5)
assert(rows(0).get(0) == 7)
assert(rows(0).get(1) == 2)
assert(rows(1).get(0) == 10)
assert(rows(1).get(1) == 2)
assert(rows(2).get(0) == 11)
assert(rows(2).get(1) == 1)
assert(rows(3).get(0) == 29)
assert(rows(3).get(1) == 3)
assert(rows(4).get(0) == 30)
assert(rows(4).get(1) == 2)
}
test("Query 5") {
val sql =
"""SELECT ss_item_sk, ss_ticket_number, count(1)
|FROM store_sales_stringformat
|WHERE ss_item_sk > 14000 AND ss_item_sk < 18000
|GROUP BY ss_item_sk, ss_ticket_number"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 1)
assert(rows(0).get(0) == 16335)
assert(rows(0).get(1) == 10)
assert(rows(0).get(2) == 1)
}
test("Query 6") {
val sql =
s"""SELECT ss_item_sk, avg(ss_quantity) as avg_qty, count(ss_quantity) as cnt_qty
|FROM store_sales_stringformat
|WHERE ss_item_sk = 707
|GROUP BY ss_item_sk
|ORDER BY ss_item_sk"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 1)
assert(rows(0).get(0) == 707)
assert(rows(0).get(1) == 83.0f)
assert(rows(0).get(2) == 1)
}
test("Query 7") {
val sql =
s"""SELECT ss_item_sk, ss_ticket_number, sum(ss_wholesale_cost) as sum_wholesale_cost
|FROM store_sales_stringformat
|WHERE ss_item_sk > 9000 AND ss_item_sk < 18000
|GROUP BY ss_item_sk, ss_ticket_number"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 2)
assert(rows(0).get(0) == 16335)
assert(rows(0).get(1) == 10)
assert(rows(0).get(2) == 82.3499984741211)
assert(rows(1).get(0) == 12919)
assert(rows(1).get(1) == 30)
assert(rows(1).get(2) == 61.959999084472656)
}
test("Query 8") {
val sql =
s"""SELECT ss_item_sk, ss_ticket_number,
|min(ss_wholesale_cost) as min_wholesale_cost,
|max(ss_wholesale_cost) as max_wholesale_cost,
|avg(ss_wholesale_cost) as avg_wholesale_cost
|FROM store_sales_stringformat
|WHERE ss_item_sk > 1000 AND ss_item_sk < 18000
|GROUP BY ss_item_sk, ss_ticket_number"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 5)
assert(rows(0).get(0) == 16335)
assert(rows(0).get(1) == 10)
assert(rows(0).get(2) == 82.35f)
assert(rows(0).get(2) == 82.35f)
assert(rows(0).get(2) == 82.3499984741211)
assert(rows(4).get(0) == 3163)
assert(rows(4).get(1) == 7)
assert(rows(4).get(2) == 69.53f)
assert(rows(4).get(2) == 69.53f)
assert(rows(4).get(2) == 69.52999877929688)
}
test("Query 9") {
val sql =
s"""SELECT ss_item_sk, count(ss_customer_sk) as count_ss_customer_sk
|FROM store_sales_stringformat
|WHERE ss_item_sk > 0 AND ss_item_sk <= 18813
|GROUP BY ss_item_sk
|ORDER BY ss_item_sk"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 9)
assert(rows(0).get(0) == 7)
assert(rows(0).get(1) == 1)
assert(rows(1).get(0) == 574)
assert(rows(1).get(1) == 1)
assert(rows(2).get(0) == 707)
assert(rows(2).get(1) == 1)
assert(rows(3).get(0) == 1579)
assert(rows(3).get(1) == 1)
assert(rows(4).get(0) == 1857)
assert(rows(4).get(1) == 1)
assert(rows(5).get(0) == 3163)
assert(rows(5).get(1) == 1)
assert(rows(6).get(0) == 12919)
assert(rows(6).get(1) == 1)
assert(rows(7).get(0) == 16335)
assert(rows(7).get(1) == 1)
assert(rows(8).get(0) == 18669)
assert(rows(8).get(1) == 1)
}
test("Query 10") {
val sql = "SELECT count(*) FROM store_sales_stringformat WHERE ss_net_profit < 100"
val rows = runSql(sql)
// printRows(rows)
assert(rows(0).get(0) == 8)
}
test("Query 11") {
val sql =
s"""SELECT count(*) FROM store_sales_stringformat
|WHERE ss_coupon_amt < 500 AND ss_ext_discount_amt < 500
|AND ss_net_paid < 500 AND ss_net_paid_inc_tax < 500"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows(0).get(0) == 2)
}
test("Query 12") {
val sql = "SELECT count(distinct ss_customer_sk) as count_distinct_customer FROM store_sales_stringformat"
val rows = runSql(sql)
// printRows(rows)
assert(rows(0).get(0) == 5)
}
test("Query 13") {
val sql = "SELECT * FROM store_sales_stringformat LIMIT 5"
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 5)
}
test("Query 14") {
val sql =
s"""SELECT ss_customer_sk, count(*)
|FROM store_sales_stringformat
|WHERE ss_item_sk >= 4000 AND ss_item_sk <= 18000
|GROUP BY ss_customer_sk
|ORDER BY ss_customer_sk"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 2)
assert(rows(0).get(0) == 75937)
assert(rows(0).get(1) == 1)
assert(rows(1).get(0) == 180451)
assert(rows(1).get(1) == 1)
}
test("Query 15") {
val sql =
s"""SELECT count(ss_customer_sk) as count_customer
|FROM store_sales_stringformat
|WHERE ss_customer_sk IN (1,25,50,75937,180451)"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows(0).get(0) == 4)
}
test("Query 16") {
val sql =
s"""SELECT count(ss_customer_sk) as count_customer
|FROM store_sales_stringformat
|WHERE ss_customer_sk <= 147954 AND ss_quantity < 5000"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows(0).get(0) == 7)
}
test("Query 17") {
val sql =
s"""SELECT count(ss_customer_sk) AS count_customer
|FROM store_sales_stringformat
|WHERE ss_customer_sk > 100"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows(0).get(0) == 9)
}
test("Query 18") {
val sql =
s"""SELECT ss_ticket_number, ss_quantity, ss_wholesale_cost, ss_list_price
|FROM store_sales_stringformat
|WHERE ss_ticket_number = 10 OR ss_wholesale_cost < 17.33"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 3)
assert(rows(0).get(0) == 10)
assert(rows(0).get(1) == 83)
assert(rows(0).get(2) == 10.26f)
assert(rows(0).get(3) == 17.33f)
assert(rows(1).get(0) == 10)
assert(rows(1).get(1) == 66)
assert(rows(1).get(2) == 82.35f)
assert(rows(1).get(3) == 137.52f)
assert(rows(2).get(0) == 11)
assert(rows(2).get(1) == 68)
assert(rows(2).get(2) == 7.16f)
assert(rows(2).get(3) == 12.88f)
}
test("Query 19") {
val sql =
s"""SELECT ss_ticket_number, ss_sold_date_sk, ss_sold_time_sk, ss_store_sk
|FROM store_sales_stringformat
|WHERE ss_ticket_number = 10 OR ss_sold_date_sk >= 2451966
|ORDER BY ss_ticket_number"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 5)
assert(rows(0).get(0) == 7)
assert(rows(0).get(1) == 2452260)
assert(rows(0).get(2) == 46712)
assert(rows(0).get(3) == 19)
assert(rows(1).get(0) == 7)
assert(rows(1).get(1) == 2452260)
assert(rows(1).get(2) == 46712)
assert(rows(1).get(3) == 19)
assert(rows(2).get(0) == 10)
assert(rows(2).get(1) == 2451966)
assert(rows(2).get(2) == 60226)
assert(rows(2).get(3) == 13)
assert(rows(3).get(0) == 10)
assert(rows(3).get(1) == 2451966)
assert(rows(3).get(2) == 60226)
assert(rows(3).get(3) == 13)
assert(rows(4).get(0) == 11)
assert(rows(4).get(1) == 2452420)
assert(rows(4).get(2) == 68961)
assert(rows(4).get(3) == 25)
}
test("Query 20") {
val sql =
s"""SELECT ss_ticket_number, ss_sold_date_sk, ss_customer_sk, ss_promo_sk, ss_coupon_amt
|FROM store_sales_stringformat
|WHERE ss_ticket_number = 10
|OR (ss_sold_date_sk > 2451121 AND ss_sold_date_sk <= 2451966)
|ORDER BY ss_ticket_number""".stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 4)
assert(rows(0).get(0) == 10)
assert(rows(0).get(1) == 2451966)
assert(rows(0).get(2) == 180451)
assert(rows(0).get(3) == 145)
assert(rows(0).get(4) == 0.00f)
assert(rows(1).get(0) == 10)
assert(rows(1).get(1) == 2451966)
assert(rows(1).get(2) == 180451)
assert(rows(1).get(3) == 175)
assert(rows(1).get(4) == 0.00f)
assert(rows(2).get(0) == 30)
assert(rows(2).get(1) == 2451390)
assert(rows(2).get(2) == 75937)
assert(rows(2).get(3) == 231)
assert(rows(2).get(4) == 0.00f)
assert(rows(3).get(0) == 30)
assert(rows(3).get(1) == 2451390)
assert(rows(3).get(2) == 75937)
assert(rows(3).get(3) == 200)
assert(rows(3).get(4) == 210.72f)
}
test("Query 21") {
val sql =
s"""SELECT strkey, ss_item_sk, ss_ticket_number, count(1)
|FROM store_sales_stringformat
|WHERE ss_ticket_number >= 10 and ss_ticket_number <= 20
|GROUP BY strkey, ss_item_sk, ss_ticket_number"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 3)
assert(rows(0).get(0) == "00707000000010")
assert(rows(0).get(1) == 707)
assert(rows(0).get(2) == 10)
assert(rows(0).get(3) == 1)
assert(rows(1).get(0) == "18669000000011")
assert(rows(1).get(1) == 18669)
assert(rows(1).get(2) == 11)
assert(rows(1).get(3) == 1)
assert(rows(2).get(0) == "16335000000010")
assert(rows(2).get(1) == 16335)
assert(rows(2).get(2) == 10)
assert(rows(2).get(3) == 1)
}
test("Query 22") {
val sql =
s"""SELECT strkey, ss_item_sk, ss_ticket_number, SUM(ss_wholesale_cost) AS sum_wholesale_cost
|FROM store_sales_stringformat
|WHERE ss_ticket_number >= 10 and ss_ticket_number <= 20
|GROUP BY strkey, ss_item_sk, ss_ticket_number"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 3)
assert(rows(0).get(0) == "00707000000010")
assert(rows(0).get(1) == 707)
assert(rows(0).get(2) == 10)
assert(rows(0).get(3) == 10.260000228881836)
assert(rows(1).get(0) == "18669000000011")
assert(rows(1).get(1) == 18669)
assert(rows(1).get(2) == 11)
assert(rows(1).get(3) == 7.159999847412109)
assert(rows(2).get(0) == "16335000000010")
assert(rows(2).get(1) == 16335)
assert(rows(2).get(2) == 10)
assert(rows(2).get(3) == 82.3499984741211)
}
test("Query 23") {
val sql =
s"""SELECT ss_item_sk, ss_ticket_number,
|min(ss_wholesale_cost) as min_wholesale_cost,
|max(ss_wholesale_cost) as max_wholesale_cost,
|avg(ss_wholesale_cost) as avg_wholesale_cost
|FROM store_sales_stringformat
|WHERE (ss_ticket_number >= 10 AND ss_ticket_number <= 20)
|AND (ss_sold_date_sk > 2451121 AND ss_sold_date_sk <= 2451966)
|GROUP BY ss_item_sk, ss_ticket_number
|ORDER BY ss_item_sk, ss_ticket_number""".stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 2)
assert(rows(0).get(0) == 707)
assert(rows(0).get(1) == 10)
assert(rows(0).get(2) == 10.26f)
assert(rows(0).get(3) == 10.26f)
assert(rows(0).get(4) == 10.260000228881836)
assert(rows(1).get(0) == 16335)
assert(rows(1).get(1) == 10)
assert(rows(1).get(2) == 82.35f)
assert(rows(1).get(3) == 82.35f)
assert(rows(1).get(4) == 82.3499984741211)
}
test("Query 24") {
val sql =
s"""SELECT ss_item_sk, ss_ticket_number,
|min(ss_ext_wholesale_cost) as min_ss_ext_wholesale_cost,
|max(ss_ext_wholesale_cost) as max_ss_ext_wholesale_cost,
|avg(ss_ext_wholesale_cost) as avg_ss_ext_wholesale_cost
|FROM store_sales_stringformat
|WHERE (ss_ticket_number >= 10 AND ss_ticket_number <= 100)
|AND (ss_customer_sk > 0 AND ss_customer_sk <= 147954)
|AND (ss_sold_date_sk = 2451121 OR ss_sold_date_sk = 2451390)
|GROUP BY ss_item_sk, ss_ticket_number
|ORDER BY ss_item_sk, ss_ticket_number"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 4)
assert(rows(0).get(0) == 7)
assert(rows(0).get(1) == 29)
assert(rows(0).get(2) == 1726.89f)
assert(rows(0).get(3) == 1726.89f)
assert(rows(0).get(4) == 1726.8900146484375)
assert(rows(1).get(0) == 574)
assert(rows(1).get(1) == 29)
assert(rows(1).get(2) == 2251.92f)
assert(rows(1).get(3) == 2251.92f)
assert(rows(1).get(4) == 2251.919921875)
assert(rows(2).get(0) == 1579)
assert(rows(2).get(1) == 30)
assert(rows(2).get(2) == 1344.0f)
assert(rows(2).get(3) == 1344.0f)
assert(rows(2).get(4) == 1344.0)
assert(rows(3).get(0) == 12919)
assert(rows(3).get(1) == 30)
assert(rows(3).get(2) == 2044.68f)
assert(rows(3).get(3) == 2044.68f)
assert(rows(3).get(4) == 2044.6800537109375)
}
test("Query 25") {
val sql =
s"""SELECT *
|FROM store_sales_stringformat
|WHERE strkey > '03163000000007'"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 4)
assert(rows(0).get(0) == "12919000000030")
assert(rows(0).get(1) == 2451390)
assert(rows(0).get(5) == 499127)
assert(rows(0).get(23) == -1765.35f)
assert(rows(1).get(0) == "16335000000010")
assert(rows(1).get(1) == 2451966)
assert(rows(1).get(5) == 71288)
assert(rows(1).get(23) == 10.56f)
assert(rows(2).get(0) == "18669000000011")
assert(rows(2).get(1) == 2452420)
assert(rows(2).get(5) == 781292)
assert(rows(2).get(23) == -209.76f)
assert(rows(3).get(0) == "18814000000029")
assert(rows(3).get(1) == 2451121)
assert(rows(3).get(5) == null)
assert(rows(3).get(23) == -4398.98f)
}
test("Query 26") {
val sql =
s"""SELECT *
|FROM store_sales_stringformat
|WHERE ss_wholesale_cost >= 33
|AND ss_quantity > 40"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 3)
assert(rows(0).get(0) == "01857000000007")
assert(rows(0).get(1) == 2452260)
assert(rows(0).get(5) == 890396)
assert(rows(0).get(23) == 1150.23f)
assert(rows(1).get(0) == "03163000000007")
assert(rows(1).get(1) == 2452260)
assert(rows(1).get(5) == 890396)
assert(rows(1).get(23) == -2900.34f)
assert(rows(2).get(0) == "16335000000010")
assert(rows(2).get(1) == 2451966)
assert(rows(2).get(5) == 71288)
assert(rows(2).get(23) == 10.56f)
}
test("Query 27") {
val sql = "SELECT * FROM store_sales_stringformat WHERE ss_ticket_number + 0 = 10 and ss_sold_date_sk + 0 > 0"
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 2)
assert(rows(0).get(0) == "00707000000010")
assert(rows(0).get(2) == 60226)
assert(rows(0).get(8) == 13)
assert(rows(0).get(23) == -89.64f)
assert(rows(1).get(0) == "16335000000010")
assert(rows(1).get(2) == 60226)
assert(rows(1).get(8) == 13)
assert(rows(1).get(23) == 10.56f)
}
test("Query 28") {
val sql =
s"""SELECT * FROM store_sales_stringformat
|WHERE ss_cdemo_sk IS NULL""".stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 1)
assert(rows(0).get(0) == "18814000000029")
assert(rows(0).get(2) == null)
assert(rows(0).get(8) == null)
assert(rows(0).get(23) == -4398.98f)
}
test("Query 28_1") {
val sql =
s"""SELECT ss_cdemo_sk FROM store_sales_stringformat
|WHERE ss_cdemo_sk IS NULL"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 1)
assert(rows(0).get(0) == null)
}
test("Query 28_2") {
val sql =
s"""SELECT ss_cdemo_sk FROM store_sales_stringformat
|WHERE ss_cdemo_sk IS NOT NULL"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 9)
}
test("Query 29") {
val sql =
s"""SELECT * FROM store_sales_stringformat
|WHERE ss_cdemo_sk IS NOT NULL"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 9)
}
test("Query 30") {
val sql =
s"""SELECT * FROM store_sales_stringformat
|WHERE ss_cdemo_sk IS NOT NULL AND ss_ticket_number = 29"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 2)
assert(rows(0).get(0) == "00007000000029")
assert(rows(0).get(2) == 45001)
assert(rows(0).get(8) == 14)
assert(rows(0).get(23) == 1192.95f)
assert(rows(1).get(0) == "00574000000029")
assert(rows(1).get(2) == 45001)
assert(rows(1).get(8) == 14)
assert(rows(1).get(23) == -1421.81f)
}
test("Query 31") {
val sql =
s"""SELECT * FROM store_sales_stringformat
|WHERE ss_cdemo_sk IS NULL AND ss_ticket_number = 29"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 1)
assert(rows(0).get(0) == "18814000000029")
assert(rows(0).get(2) == null)
assert(rows(0).get(8) == null)
assert(rows(0).get(22) == null)
assert(rows(0).get(23) == -4398.98f)
}
test("Query 32") {
val sql =
s"""SELECT * FROM store_sales_stringformat
|WHERE ss_cdemo_sk IS NULL
|OR ss_ticket_number = 29"""
.stripMargin
val rows = runSql(sql)
// printRows(rows)
assert(rows.length == 3)
assert(rows(0).get(0) == "00007000000029")
assert(rows(0).get(2) == 45001)
assert(rows(0).get(8) == 14)
assert(rows(0).get(22) == 2949.03f)
assert(rows(0).get(23) == 1192.95f)
assert(rows(1).get(0) == "00574000000029")
assert(rows(1).get(2) == 45001)
assert(rows(1).get(8) == 14)
assert(rows(1).get(22) == 896.51f)
assert(rows(1).get(23) == -1421.81f)
assert(rows(2).get(0) == "18814000000029")
assert(rows(2).get(2) == null)
assert(rows(2).get(8) == null)
assert(rows(2).get(22) == null)
assert(rows(2).get(23) == -4398.98f)
}
private def printRows(rows: Array[Row]) = {
println("======= QUERY RESULTS ======")
for (i <- 0 until rows.size) {
println(rows(i).mkString(" | "))
}
println("============================")
}
}
|
yzhou2001/HSpark
|
src/test/scala/org/apache/spark/sql/hbase/HBaseTpcStringFormatMiniTestSuite.scala
|
Scala
|
apache-2.0
| 25,394 |
package scavlink.link.nav
import org.scalatest.{Matchers, WordSpec}
import scavlink.coord.NED
import scavlink.link.mission.MissionTestData
import scavlink.message.{SystemId, VehicleId}
import scavlink.state.LocationState
import scala.concurrent.duration._
class GotoLocationsSpec extends WordSpec with Matchers with MissionTestData {
val vehicle = VehicleId.fromLink("spec", SystemId(1))
val course = GotoLocations(waypoints, hasArrived = withinMeters(1, 1), maxEta = 1.hour, smoothingWindow = 5.seconds)
val points = Vector(
waypoints(0) + NED(-30, -30, 40),
waypoints(0) + NED(-29.8, -29, 40),
waypoints(0) + NED(-29.8, -28, 39),
waypoints(0) + NED(-29.5, -26, 37),
waypoints(0) + NED(-29.4, -25.7, 34),
waypoints(0) + NED(-29.2, -23, 31),
waypoints(0) + NED(-28.7, -20, 28),
waypoints(0) + NED(-25.3, -14, 23),
waypoints(0) + NED(-22.1, -9, 17),
waypoints(0) + NED(-18.3, -6, 13.4)
)
val telemetry = points.zipWithIndex.map { case (p, i) =>
LocationState(vehicle, timeIndex = i * 1000, location = p)
}
"the GotoLocations course" should {
"update waypoint to the next location upon arrival at current waypoint" in {
course.waypoint shouldBe waypoints(0)
val newCourse = course.update(LocationState(vehicle, location = waypoints(0)))
newCourse.waypoint shouldBe waypoints(1)
}
"update distance value and distances vector" in {
def updateAndCheckCourse(course: GotoLocations, index: Int): Unit = {
course.update(telemetry(index)) match {
case nc: GotoLocations =>
println(s"index=$index dist=${nc.distance} dc=${nc.distanceChangeRate} wp=${waypoints(0)}")
println(s"last=${course.current} this=${nc.current}")
println(nc.distances)
nc.current shouldBe Some(points(index))
val distance = points(index).haversineDistance(waypoints(0))
nc.distance shouldBe distance
nc.distances.last shouldBe((index * 1000).toLong, distance)
if (!course.distance.isNaN) {
assert(nc.distance < course.distance)
}
// at this point in the telemetry, we should start seeing a distance change calculation
if (index > 5) {
assert(nc.distanceChangeRate < 0)
}
nc.status shouldBe CourseStatus.OK
if (index < telemetry.length - 1) {
updateAndCheckCourse(nc, index + 1)
}
case _ => fail()
}
}
updateAndCheckCourse(course, 0)
}
"not add a new time/distance sample if it's identical to the last one" in {
val newCourse = course.update(telemetry(0))
newCourse.update(telemetry(0)) match {
case nc: GotoLocations =>
nc.distances.length shouldBe 1
case _ => fail()
}
}
"flag the course as Error when LocationStates send it away from waypoint" in {
val telemetry = points.zipWithIndex.map { case (p, i) =>
LocationState(vehicle, timeIndex = (points.length - i - 1) * 1000, location = p)
}
def updateAndCheckCourse(course: GotoLocations, index: Int): Unit = {
course.update(telemetry(index)) match {
case nc: GotoLocations =>
println(s"index=$index dist=${nc.distance} dc=${nc.distanceChangeRate} wp=${waypoints(0)}")
println(s"last=${course.current} this=${nc.current}")
println(nc.distances)
if (!course.distance.isNaN) {
assert(nc.distance > course.distance)
}
if (index < 5) {
assert(nc.distanceChangeRate > 0)
nc.status shouldBe CourseStatus.Error
} else {
nc.status shouldBe CourseStatus.OK
}
if (index > 0) {
updateAndCheckCourse(nc, index - 1)
}
case _ => //
}
}
updateAndCheckCourse(course, telemetry.length - 1)
}
"flag the course as Error when LocationStates indicate extremely slow progress" in {
val telemetry = (0 to 9).map { i =>
LocationState(vehicle, timeIndex = i * 1000, location = points(0))
}
def updateAndCheckCourse(course: GotoLocations, index: Int): Unit = {
course.update(telemetry(index)) match {
case nc: GotoLocations =>
println(s"index=$index dist=${nc.distance} dc=${nc.distanceChangeRate} wp=${waypoints(0)}")
println(s"last=${course.current} this=${nc.current}")
println(nc.distances)
if (index > 4) {
nc.status shouldBe CourseStatus.Error
} else {
nc.status shouldBe CourseStatus.OK
}
if (index < telemetry.length - 1) {
updateAndCheckCourse(nc, index + 1)
}
case _ => //
}
}
updateAndCheckCourse(course, 0)
}
"flag the course as Warning when LocationStates indicate moderately slow progress" ignore {
// spread the telemetry samples over > 30 minutes
val telemetry = points.zipWithIndex.map { case (p, i) =>
LocationState(vehicle, timeIndex = i * 1000 * 2000, location = p)
}
def updateAndCheckCourse(course: GotoLocations, index: Int): Unit = {
course.update(telemetry(index)) match {
case nc: GotoLocations =>
println(s"index=$index dist=${nc.distance} dc=${nc.distanceChangeRate} wp=${waypoints(0)}")
println(s"last=${course.current} this=${nc.current}")
println(nc.distances)
if (index > 9) {
nc.status shouldBe CourseStatus.Error
} else {
nc.status shouldBe CourseStatus.OK
}
if (index < telemetry.length - 1) {
updateAndCheckCourse(nc, index + 1)
}
case _ => //
}
}
updateAndCheckCourse(course, 0)
}
"flag the course as Error when current location moves outside the fence" ignore {
// val initCourse = course.copy(fence )
}
}
}
|
nickolasrossi/scavlink
|
src/test/scala/scavlink/link/nav/GotoLocationsSpec.scala
|
Scala
|
mit
| 6,125 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.tools
import joptsimple.OptionParser
import org.apache.kafka.common.security._
import kafka.utils.{CommandLineUtils, Exit, Logging, ZKGroupTopicDirs, ZkUtils}
object VerifyConsumerRebalance extends Logging {
def main(args: Array[String]) {
val parser = new OptionParser()
val zkConnectOpt = parser.accepts("zookeeper.connect", "ZooKeeper connect string.").
withRequiredArg().defaultsTo("localhost:2181").ofType(classOf[String])
val groupOpt = parser.accepts("group", "Consumer group.").
withRequiredArg().ofType(classOf[String])
parser.accepts("help", "Print this message.")
if(args.length == 0)
CommandLineUtils.printUsageAndDie(parser, "Validate that all partitions have a consumer for a given consumer group.")
val options = parser.parse(args : _*)
if (options.has("help")) {
parser.printHelpOn(System.out)
Exit.exit(0)
}
CommandLineUtils.checkRequiredArgs(parser, options, groupOpt)
val zkConnect = options.valueOf(zkConnectOpt)
val group = options.valueOf(groupOpt)
var zkUtils: ZkUtils = null
try {
zkUtils = ZkUtils(zkConnect,
30000,
30000,
JaasUtils.isZkSecurityEnabled())
debug("zkConnect = %s; group = %s".format(zkConnect, group))
// check if the rebalancing operation succeeded.
try {
if(validateRebalancingOperation(zkUtils, group))
println("Rebalance operation successful !")
else
println("Rebalance operation failed !")
} catch {
case e2: Throwable => error("Error while verifying current rebalancing operation", e2)
}
}
finally {
if (zkUtils != null)
zkUtils.close()
}
}
private def validateRebalancingOperation(zkUtils: ZkUtils, group: String): Boolean = {
info("Verifying rebalancing operation for consumer group " + group)
var rebalanceSucceeded: Boolean = true
/**
* A successful rebalancing operation would select an owner for each available partition
* This means that for each partition registered under /brokers/topics/[topic]/[broker-id], an owner exists
* under /consumers/[consumer_group]/owners/[topic]/[broker_id-partition_id]
*/
val consumersPerTopicMap = zkUtils.getConsumersPerTopic(group, excludeInternalTopics = false)
val partitionsPerTopicMap = zkUtils.getPartitionsForTopics(consumersPerTopicMap.keySet.toSeq)
partitionsPerTopicMap.foreach { case (topic, partitions) =>
val topicDirs = new ZKGroupTopicDirs(group, topic)
info("Alive partitions for topic %s are %s ".format(topic, partitions.toString))
info("Alive consumers for topic %s => %s ".format(topic, consumersPerTopicMap.get(topic)))
val partitionsWithOwners = zkUtils.getChildrenParentMayNotExist(topicDirs.consumerOwnerDir)
if(partitionsWithOwners.isEmpty) {
error("No owners for any partitions for topic " + topic)
rebalanceSucceeded = false
}
debug("Children of " + topicDirs.consumerOwnerDir + " = " + partitionsWithOwners.toString)
val consumerIdsForTopic = consumersPerTopicMap.get(topic)
// for each available partition for topic, check if an owner exists
partitions.foreach { partition =>
// check if there is a node for [partition]
if(!partitionsWithOwners.contains(partition.toString)) {
error("No owner for partition [%s,%d]".format(topic, partition))
rebalanceSucceeded = false
}
// try reading the partition owner path for see if a valid consumer id exists there
val partitionOwnerPath = topicDirs.consumerOwnerDir + "/" + partition
val partitionOwner = zkUtils.readDataMaybeNull(partitionOwnerPath)._1 match {
case Some(m) => m
case None => null
}
if(partitionOwner == null) {
error("No owner for partition [%s,%d]".format(topic, partition))
rebalanceSucceeded = false
}
else {
// check if the owner is a valid consumer id
consumerIdsForTopic match {
case Some(consumerIds) =>
if(!consumerIds.map(c => c.toString).contains(partitionOwner)) {
error(("Owner %s for partition [%s,%d] is not a valid member of consumer " +
"group %s").format(partitionOwner, topic, partition, group))
rebalanceSucceeded = false
}
else
info("Owner of partition [%s,%d] is %s".format(topic, partition, partitionOwner))
case None => {
error("No consumer ids registered for topic " + topic)
rebalanceSucceeded = false
}
}
}
}
}
rebalanceSucceeded
}
}
|
ijuma/kafka
|
core/src/main/scala/kafka/tools/VerifyConsumerRebalance.scala
|
Scala
|
apache-2.0
| 5,627 |
package java.time
private[time] object Preconditions {
// Like scala.Predef.require, but throws a DateTimeException.
def requireDateTime(requirement: Boolean, message: => Any): Unit = {
if (!requirement)
throw new DateTimeException(message.toString)
}
}
|
jasonchaffee/scala-js
|
javalib/src/main/scala/java/time/Preconditions.scala
|
Scala
|
bsd-3-clause
| 271 |
package com.seremis.geninfusion.registry
import com.seremis.geninfusion.api.GIApiInterface.IDataTypeRegistry
import com.seremis.geninfusion.api.util.{DataType, INBTTagable}
import net.minecraft.nbt.NBTTagCompound
import scala.collection.mutable.HashMap
class DataTypeRegistry extends IDataTypeRegistry {
private var register: HashMap[Class[_], DataType[_]] = HashMap()
override def register[A](data: DataType[A], clzz: Class[A]): Unit = {
register += (clzz -> data)
}
@throws[IllegalArgumentException]
override def getDataTypeForClass[A](clzz: Class[A]): DataType[A] = {
val option = register.get(clzz).asInstanceOf[Option[DataType[A]]]
if(option.nonEmpty) {
option.get
} else {
noRegisteredDataType(clzz)
}
}
override def hasDataTypeForClass(clzz: Class[_]): Boolean = register.get(clzz).nonEmpty
@throws[IllegalArgumentException]
override def readValueFromNBT[A](compound: NBTTagCompound, name: String, dataClass: Class[A]): A = getDataTypeForClass(dataClass).readFromNBT(compound, name)
@throws[IllegalArgumentException]
override def writeValueToNBT[A](compound: NBTTagCompound, name: String, dataClass: Class[A], data: A): Unit = getDataTypeForClass(dataClass).writeToNBT(compound, name, data)
@throws[IllegalArgumentException]
def noRegisteredDataType(clzz: Class[_]) = {
if(classOf[INBTTagable].isAssignableFrom(clzz)) {
throw new IllegalArgumentException("There is no registered DataType for class " + clzz.getName + ", even though there should be one, as it implements INBTTagable. Make sure the DataType is registered before using it.")
} else {
throw new IllegalArgumentException("There is no registered DataType for class " + clzz.getName + ". Make sure to register the DataType before using it.")
}
}
}
|
Seremis/Genetic-Infusion
|
src/main/scala/com/seremis/geninfusion/registry/DataTypeRegistry.scala
|
Scala
|
gpl-3.0
| 1,894 |
package vuescale
package scaladsl
import scala.scalajs.js
import scala.scalajs.js.annotation._
import vuescale.facade.CreateElement
import vuescale.facade.Vue
trait DataOptions[V] extends js.Object {
def data: js.UndefOr[js.Function0[js.Object]] = js.undefined
def props: js.UndefOr[js.Object] = js.undefined
def propsData: js.UndefOr[js.Object] = js.undefined
def computed: js.UndefOr[Handler[_]] = js.undefined
def methods: js.UndefOr[Handler[_]] = js.undefined
// TODO more stong type
def watch: js.UndefOr[js.Dictionary[js.Any]] = js.undefined
}
trait DomOptions extends js.Object {
def el: js.UndefOr[String] = js.undefined
def template: js.UndefOr[String] = js.undefined
// TODO more strong type
def render: js.UndefOr[CreateElement] = js.undefined
def renderError: js.UndefOr[js.Function] = js.undefined
}
trait LifecycleOptions[V] extends js.Object {
type LifecycleHook = js.ThisFunction0[V, Unit]
def beforeCreate: js.UndefOr[LifecycleHook] = js.undefined
def created: js.UndefOr[LifecycleHook] = js.undefined
def beforeMount: js.UndefOr[LifecycleHook] = js.undefined
def mounted: js.UndefOr[LifecycleHook] = js.undefined
def beforeUpdate: js.UndefOr[LifecycleHook] = js.undefined
def updated: js.UndefOr[LifecycleHook] = js.undefined
def activated: js.UndefOr[LifecycleHook] = js.undefined
def deactivated: js.UndefOr[LifecycleHook] = js.undefined
def beforeDestroy: js.UndefOr[LifecycleHook] = js.undefined
def destroyed: js.UndefOr[LifecycleHook] = js.undefined
}
trait AssetOptions extends js.Object {
// TODO more clear type
def directives: js.UndefOr[js.Object] = js.undefined
def filters: js.UndefOr[js.Object] = js.undefined
def components: js.UndefOr[js.Dictionary[js.Any]] = js.undefined
}
trait CompositionOptions extends js.Object {
def parent: js.UndefOr[Vue] = js.undefined
// TODO give clear type
def mixins: js.UndefOr[js.Array[js.Object]] = js.undefined
@JSName("extends")
def `extends`: js.UndefOr[js.Object] = js.undefined
}
trait Component[V]
extends DataOptions[V]
with DomOptions
with LifecycleOptions[V]
with AssetOptions
with CompositionOptions
{
def name: js.UndefOr[String] = js.undefined
def functional: js.UndefOr[Boolean] = js.undefined
// TODO define `ModelOptions`
def model: js.UndefOr[js.Object] = js.undefined
}
object Component {
type ComponentDefs = Iterable[(String, Component[_])]
/** Create new component options.
*/
def apply[V <: Vue](
el: js.UndefOr[String] = js.undefined,
data: js.UndefOr[js.Object] = js.undefined,
props: js.UndefOr[js.Object] = js.undefined,
computed: js.UndefOr[Handler[_]] = js.undefined,
methods: js.UndefOr[Handler[_]] = js.undefined,
template: js.UndefOr[String] = js.undefined,
components: ComponentDefs = Nil,
render: js.UndefOr[js.Function] = js.undefined
): Component[V] =
applyInternal[V](data, components)(
"el" -> el,
"props" -> props,
"computed" -> computed,
"methods" -> methods,
"template" -> template,
"render" -> render
)
private def applyInternal[V <: Vue](
data: js.UndefOr[js.Object],
components: ComponentDefs
)(rest: (String, js.Any)*): Component[V] = {
val wrappedDataFn: js.UndefOr[js.Function] =
data.map(x => { () => x })
val opts: js.Dictionary[js.Any] = js.Dictionary()
if (!js.isUndefined(wrappedDataFn)) {
opts("data") = wrappedDataFn
}
if (components.nonEmpty) {
opts("components") = js.Dictionary(components.toSeq: _*)
}
rest.filterNot(js.isUndefined).foreach { case (key, opt) =>
opts(key) = opt
}
opts.asInstanceOf[Component[V]]
}
def builder[V <: Vue](name: String): Builder[V] = new Builder[V](name)
def builder[V <: Vue]: Builder[V] = new Builder[V]()
}
|
lettenj61/vuescale
|
vuescale-core/src/main/scala/vuescale/scaladsl/Component.scala
|
Scala
|
mit
| 3,853 |
package fr.laas.fape.anml
import fr.laas.fape.anml.model.AnmlProblem
object Parsing extends App {
val repetitions = 10
val file =
if(args.size == 0)
"resources/test.anml"
else
args(0)
println("Parsing: "+file)
for(i <- 0 until repetitions) {
val start = System.currentTimeMillis()
val pb = new AnmlProblem()
val parsed = System.currentTimeMillis()
pb.extendWithAnmlFile(file)
val extended = System.currentTimeMillis()
println(s"Time parsing: ${parsed - start}")
println(s"Time extending: ${extended - parsed}")
}
}
|
athy/fape
|
anml-parser/src/main/scala/fr/laas/fape/anml/Parsing.scala
|
Scala
|
bsd-2-clause
| 581 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions
import org.apache.flink.table.api._
import org.apache.flink.table.planner.expressions.utils.RowTypeTestBase
import org.apache.flink.table.planner.utils.DateTimeTestUtil.{localDate, localDateTime, localTime => gLocalTime}
import org.junit.Test
class RowTypeTest extends RowTypeTestBase {
@Test
def testRowLiteral(): Unit = {
// primitive literal
testAllApis(
row(1, "foo", true),
"row(1, 'foo', true)",
"ROW(1, 'foo', true)",
"(1, foo, TRUE)")
// special literal
testTableApi(
row(
localDate("1985-04-11"),
gLocalTime("14:15:16"),
localDateTime("1985-04-11 14:15:16"),
BigDecimal("0.1").bigDecimal,
array(1, 2, 3),
map("foo", "bar"),
row(1, true)),
"(1985-04-11, 14:15:16, 1985-04-11 14:15:16, 0.1, [1, 2, 3], {foo=bar}, (1, TRUE))")
testSqlApi(
"ROW(DATE '1985-04-11', TIME '14:15:16', TIMESTAMP '1985-04-11 14:15:16', " +
"CAST(0.1 AS DECIMAL(2, 1)), ARRAY[1, 2, 3], MAP['foo', 'bar'], row(1, true))",
"(1985-04-11, 14:15:16, 1985-04-11 14:15:16, 0.1, [1, 2, 3], {foo=bar}, (1, TRUE))")
testSqlApi(
"ROW(DATE '1985-04-11', TIME '14:15:16', TIMESTAMP '1985-04-11 14:15:16.123456', " +
"CAST(0.1 AS DECIMAL(2, 1)), ARRAY[1, 2, 3], MAP['foo', 'bar'], row(1, true))",
"(1985-04-11, 14:15:16, 1985-04-11 14:15:16.123456, 0.1, [1, 2, 3], {foo=bar}, (1, TRUE))")
testAllApis(
row(1 + 1, 2 * 3, nullOf(DataTypes.STRING())),
"row(1 + 1, 2 * 3, Null(STRING))",
"ROW(1 + 1, 2 * 3, NULLIF(1, 1))",
"(2, 6, NULL)"
)
testSqlApi("(1, 'foo', true)", "(1, foo, TRUE)")
}
@Test
def testRowField(): Unit = {
testAllApis(
row('f0, 'f1),
"row(f0, f1)",
"(f0, f1)",
"(NULL, 1)"
)
testAllApis(
'f2,
"f2",
"f2",
"(2, foo, TRUE)"
)
testAllApis(
row('f2, 'f5),
"row(f2, f5)",
"(f2, f5)",
"((2, foo, TRUE), (foo, NULL))"
)
testAllApis(
'f4,
"f4",
"f4",
"(1984-03-12, 0.00000000, [1, 2, 3])"
)
testAllApis(
row('f1, "foo", true),
"row(f1, 'foo', true)",
"(f1, 'foo',true)",
"(1, foo, TRUE)"
)
}
@Test
def testRowOperations(): Unit = {
testAllApis(
'f5.get("f0"),
"f5.get('f0')",
"f5.f0",
"foo"
)
testAllApis(
'f3.get("f1").get("f2"),
"f3.get('f1').get('f2')",
"f3.f1.f2",
"TRUE"
)
// SQL API for row value constructor follow by field access is not supported
testTableApi(
row('f1, 'f6, 'f2).get("f1").get("f1"),
"row(f1, f6, f2).get('f1').get('f1')",
"NULL"
)
}
@Test
def testUnsupportedCastTableApi(): Unit = {
expectedException.expect(classOf[ValidationException])
testTableApi(
'f5.cast(DataTypes.BIGINT()),
""
)
}
@Test
def testUnsupportedCastSqlApi(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Cast function cannot convert value")
testSqlApi(
"CAST(f5 AS BIGINT)",
""
)
}
}
|
apache/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/expressions/RowTypeTest.scala
|
Scala
|
apache-2.0
| 4,016 |
package org.sparklinedata.hive.hook
import org.junit.{Before, Test}
class BasicTest {
/*
* to get a plan file, in hive debugger break in Driver.execute, and execute:
* org.apache.commons.io.FileUtils.writeStringToFile(new java.io.File("/tmp/q1.plan"), Utilities.serializeObject(this.plan))
*/
@Test
def testQ1() : Unit = {
val ins = getClass.getClassLoader.getResourceAsStream("sampleplans/q1.plan")
val qp = HivePlanUtils.readQueryPlan(ins)
val opNode = HivePlanUtils.querPlanToOperatorGraph(qp)
println(opNode.toStringTree())
}
@Test
def testQ27() : Unit = {
val ins = getClass.getClassLoader.getResourceAsStream("sampleplans/q27.plan")
val qp = HivePlanUtils.readQueryPlan(ins)
val opNode = HivePlanUtils.querPlanToOperatorGraph(qp)
println(opNode.toStringTree())
}
}
|
hbutani/hive-lineage
|
hivehook/src/test/scala/org/sparklinedata/hive/hook/BasicTest.scala
|
Scala
|
apache-2.0
| 839 |
package org.qi4j.sample.scala
import org.qi4j.library.constraints.annotation.MaxLength
trait HelloWorldMixin2
{
def sayHello(@MaxLength(10) name: String ): String = "Hello " + name
}
|
joobn72/qi4j-sdk
|
libraries/lang-scala/src/test/scala/org/qi4j/sample/scala/HelloWorldMixin2.scala
|
Scala
|
apache-2.0
| 186 |
package gsd.linux.cnf
import gsd.linux._
import org.kiama.rewriting.Rewriter._
import org.kiama.rewriting.Strategy
import collection.mutable.HashMap
object IdMap {
def apply(es: Iterable[BExpr]): Map[String, Int] = {
val map = new HashMap[String, Int]
es foreach { e =>
e.identifiers filter { !map.contains(_) } foreach { id =>
map += id -> (map.size + 1)
}
}
Map() ++ map
}
}
object CNFBuilder {
val sDistributeRule: Strategy = oncetd {
rule[BExpr] {
case BOr(BAnd(x,y),z) => BAnd(BOr(x,z), BOr(y,z))
case BOr(x,BAnd(y,z)) => BAnd(BOr(x,y), BOr(x,z))
}
}
val sIffRule = everywheretd {
rule[BExpr] {
case BIff(x,y) => (!x | y) & (!y | x)
}
}
val sImpliesRule = everywheretd {
rule[BExpr] {
case BImplies(x,y) => !x | y
}
}
/**
* Run until we reach a fixpoint.
*/
def distribute(e: BExpr): List[BExpr] = {
val result = rewrite(sDistributeRule)(e)
if (result == e) result.splitConjunctions
else result.splitConjunctions flatMap distribute
}
/**
* @param idMap Maps identifiers in the expression to an integer
*/
def toClause(e: BExpr, idMap: collection.Map[String, Int]): Clause = e match {
case BNot(BId(v)) => List(-idMap(v))
case BId(v) => List(idMap(v))
case BOr(x, y) => toClause(x, idMap) ::: toClause(y, idMap)
case _ => sys.error("Wrong format. Expression is not a clause: " + e)
}
/**
* @param idMap Maps identifiers in the expression to an integer
*/
def toCNF(e: BExpr, idMap: collection.Map[String, Int]) =
rewrite(sIffRule <* sImpliesRule)(e)
.simplify
.splitConjunctions
.filter { _ != BTrue }
.flatMap { distribute }
.map { toClause(_, idMap) }
}
|
matachi/linux-variability-analysis-tools.fm-translation
|
src/main/scala/gsd/linux/cnf/CNF.scala
|
Scala
|
gpl-3.0
| 1,778 |
/**
* Copyright (C) 2020 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xml.dom
import org.orbeon.dom
// TODO: Rename this.
object Converter {
implicit class ScalaElemConverterOps(private val e: scala.xml.Elem) extends AnyVal {
def toDocument: dom.Document = IOSupport.readOrbeonDom(e.toString)
}
}
|
orbeon/orbeon-forms
|
src/main/scala/org/orbeon/oxf/xml/dom/Converter.scala
|
Scala
|
lgpl-2.1
| 909 |
package com.faacets.qalg
package syntax
package algos
trait AllSyntax
extends CatSyntax
with DeterminantSyntax
with FactorySyntax
with GramSchmidtSyntax
with KronSyntax
with LUSyntax
with PrimeSyntax
with RankSyntax
with RrefSyntax
with ShiftSyntax
with TraceSyntax
|
denisrosset/qalg
|
core/src/main/scala/qalg/syntax/algos/AllSyntax.scala
|
Scala
|
mit
| 311 |
package mesosphere.marathon.api
import javax.ws.rs.WebApplicationException
import javax.ws.rs.core.Response.Status
import javax.ws.rs.core.{ MediaType, Response }
import javax.ws.rs.ext.{ ExceptionMapper, Provider }
import com.fasterxml.jackson.core.JsonParseException
import com.fasterxml.jackson.databind.JsonMappingException
import com.google.inject.Singleton
import com.sun.jersey.api.NotFoundException
import mesosphere.marathon.api.v2.Validation._
import mesosphere.marathon.{ Exception => _, _ }
import org.slf4j.LoggerFactory
import play.api.libs.json.{ JsResultException, JsValue, Json }
import scala.concurrent.TimeoutException
@Provider
@Singleton
class MarathonExceptionMapper extends ExceptionMapper[Exception] {
private[this] val log = LoggerFactory.getLogger(getClass.getName)
def toResponse(exception: Exception): Response = {
// WebApplicationException are things like invalid requests etc, no need to log a stack trace
exception match {
case e: WebApplicationException =>
log.info("mapping exception to status code", exception)
case _ =>
log.warn("mapping exception to status code", exception)
}
Response
.status(statusCode(exception))
.entity(Json.stringify(entity(exception)))
.`type`(MediaType.APPLICATION_JSON)
.build
}
// TODO: Use one of the many enums that we already have.
private def statusCode(exception: Exception): Int = exception match {
case e: TimeoutException => 503 // Service Unavailable
case e: UnknownAppException => 404 // Not found
case e: UnknownGroupException => 404 // Not found
case e: AppLockedException => 409 // Conflict
case e: ConflictingChangeException => 409 // Conflict
case e: BadRequestException => 400 // Bad Request
case e: JsonParseException => 400 // Bad Request
case e: JsResultException => 400 // Bad Request
case e: JsonMappingException => 400 // Bad Request
case e: IllegalArgumentException => 422 // Unprocessable entity
case e: ValidationFailedException => 422 // Unprocessable Entity
case e: WebApplicationException => e.getResponse.getStatus
case _ => 500 // Internal server error
}
private def entity(exception: Exception): JsValue = exception match {
case e: NotFoundException =>
Json.obj("message" -> s"URI not found: ${e.getNotFoundUri.getRawPath}")
case e: AppLockedException =>
Json.obj(
"message" -> e.getMessage,
"deployments" -> e.deploymentIds.map(id => Json.obj("id" -> id))
)
case e: JsonParseException =>
Json.obj(
"message" -> "Invalid JSON",
"details" -> e.getOriginalMessage
)
case e: JsonMappingException =>
Json.obj(
"message" -> "Please specify data in JSON format",
"details" -> e.getMessage
)
case e: JsResultException =>
val errors = e.errors.map {
case (path, errs) => Json.obj("path" -> path.toString(), "errors" -> errs.map(_.message))
}
Json.obj(
"message" -> "Invalid JSON",
"details" -> errors
)
case ValidationFailedException(obj, failure) => Json.toJson(failure)
case e: WebApplicationException =>
Option(Status.fromStatusCode(e.getResponse.getStatus)).fold {
Json.obj("message" -> e.getMessage)
} { status =>
Json.obj("message" -> status.getReasonPhrase)
}
case _ =>
Json.obj("message" -> exception.getMessage)
}
}
|
timcharper/marathon
|
src/main/scala/mesosphere/marathon/api/MarathonExceptionMapper.scala
|
Scala
|
apache-2.0
| 3,467 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.views.application.tnrb
import iht.views.ViewTestHelper
import iht.views.html.application.tnrb.tnrb_overview_table_row
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
import iht.config.AppConfig
import play.api.mvc.Call
import play.twirl.api.Html
import iht.constants.Constants._
class TnrbOverviewTableRowViewTest extends ViewTestHelper {
lazy val id = "home-in-uk"
lazy val questionText = "Sample question"
lazy val questionScreenReaderText = "Sample screen reader"
lazy val questionCategory = "questionAnswer"
lazy val link = iht.controllers.application.tnrb.routes.PermanentHomeController.onPageLoad()
lazy val answerValue = "Sample value"
lazy val linkID = appConfig.TnrbSpousePermanentHomeInUKID
def tnrbOverviewTableRow(id: String = "home-in-uk",
questionText:Html = Html("Sample question"),
questionScreenReaderText: String = "Sample screen reader",
questionCategory:String = "questionAnswer",
answerValue:String = "Sample value",
answerValueFormatted:Option[Html] = None,
link:Option[Call] = None,
linkScreenReader:String = "",
linkID: String = appConfig.TnrbSpousePermanentHomeInUKID
) = {
lazy val tnrbOverviewTableRowView: tnrb_overview_table_row = app.injector.instanceOf[tnrb_overview_table_row]
implicit val request = createFakeRequest()
val view = tnrbOverviewTableRowView(id,
questionText,
questionScreenReaderText,
questionCategory,
answerValue,
answerValueFormatted,
link:Option[Call],
linkScreenReader,
linkID
).toString
asDocument(view)
}
"TnrbOverviewTableRow" must {
"have no message keys in html" in {
noMessageKeysShouldBePresent(tnrbOverviewTableRow().toString)
}
"have the correct id" in {
val view = tnrbOverviewTableRow()
assertRenderedById(view, id)
}
"have the correct question text" in {
val view = tnrbOverviewTableRow()
assertRenderedById(view, s"$id-text")
}
"show the value if it has" in {
val view = tnrbOverviewTableRow()
val value = view.getElementById(s"$id-value")
value.text mustBe answerValue
}
"not show the value when there is not" in {
val view = tnrbOverviewTableRow(answerValue = "")
val value = view.getElementById(s"$id-value")
value.text mustBe empty
}
"show the correct link with text" in {
val view = tnrbOverviewTableRow(link = Some(link))
val questionLink = view.getElementById(s"$linkID")
questionLink.attr("href") mustBe link.url
questionLink.text() mustBe messagesApi("iht.change")
}
"show the correct question category when answer value is empty" in {
val view = tnrbOverviewTableRow(answerValue = "", link = Some(link))
val questionLink = view.getElementById(s"$linkID")
questionLink.text() mustBe messagesApi("site.link.giveAnswer")
}
}
}
|
hmrc/iht-frontend
|
test/iht/views/application/tnrb/TnrbOverviewTableRowViewTest.scala
|
Scala
|
apache-2.0
| 3,795 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.avro
import java.io.{FileNotFoundException, IOException}
import java.util.Locale
import scala.collection.JavaConverters._
import org.apache.avro.Schema
import org.apache.avro.file.{DataFileReader, FileReader}
import org.apache.avro.file.DataFileConstants.{BZIP2_CODEC, DEFLATE_CODEC, SNAPPY_CODEC, XZ_CODEC, ZSTANDARD_CODEC}
import org.apache.avro.generic.{GenericDatumReader, GenericRecord}
import org.apache.avro.mapred.{AvroOutputFormat, FsInput}
import org.apache.avro.mapreduce.AvroJob
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileStatus
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.SparkException
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.avro.AvroOptions.ignoreExtensionKey
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.execution.datasources.OutputWriterFactory
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
private[sql] object AvroUtils extends Logging {
def inferSchema(
spark: SparkSession,
options: Map[String, String],
files: Seq[FileStatus]): Option[StructType] = {
val conf = spark.sessionState.newHadoopConfWithOptions(options)
val parsedOptions = new AvroOptions(options, conf)
if (parsedOptions.parameters.contains(ignoreExtensionKey)) {
logWarning(s"Option $ignoreExtensionKey is deprecated. Please use the " +
"general data source option pathGlobFilter for filtering file names.")
}
// User can specify an optional avro json schema.
val avroSchema = parsedOptions.schema
.getOrElse {
inferAvroSchemaFromFiles(files, conf, parsedOptions.ignoreExtension,
spark.sessionState.conf.ignoreCorruptFiles)
}
SchemaConverters.toSqlType(avroSchema).dataType match {
case t: StructType => Some(t)
case _ => throw new RuntimeException(
s"""Avro schema cannot be converted to a Spark SQL StructType:
|
|${avroSchema.toString(true)}
|""".stripMargin)
}
}
def supportsDataType(dataType: DataType): Boolean = dataType match {
case _: AtomicType => true
case st: StructType => st.forall { f => supportsDataType(f.dataType) }
case ArrayType(elementType, _) => supportsDataType(elementType)
case MapType(keyType, valueType, _) =>
supportsDataType(keyType) && supportsDataType(valueType)
case udt: UserDefinedType[_] => supportsDataType(udt.sqlType)
case _: NullType => true
case _ => false
}
def prepareWrite(
sqlConf: SQLConf,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory = {
val parsedOptions = new AvroOptions(options, job.getConfiguration)
val outputAvroSchema: Schema = parsedOptions.schema
.getOrElse(SchemaConverters.toAvroType(dataSchema, nullable = false,
parsedOptions.recordName, parsedOptions.recordNamespace))
AvroJob.setOutputKeySchema(job, outputAvroSchema)
if (parsedOptions.compression == "uncompressed") {
job.getConfiguration.setBoolean("mapred.output.compress", false)
} else {
job.getConfiguration.setBoolean("mapred.output.compress", true)
logInfo(s"Compressing Avro output using the ${parsedOptions.compression} codec")
val codec = parsedOptions.compression match {
case DEFLATE_CODEC =>
val deflateLevel = sqlConf.avroDeflateLevel
logInfo(s"Avro compression level $deflateLevel will be used for $DEFLATE_CODEC codec.")
job.getConfiguration.setInt(AvroOutputFormat.DEFLATE_LEVEL_KEY, deflateLevel)
DEFLATE_CODEC
case codec @ (SNAPPY_CODEC | BZIP2_CODEC | XZ_CODEC | ZSTANDARD_CODEC) => codec
case unknown => throw new IllegalArgumentException(s"Invalid compression codec: $unknown")
}
job.getConfiguration.set(AvroJob.CONF_OUTPUT_CODEC, codec)
}
new AvroOutputWriterFactory(dataSchema,
outputAvroSchema.toString,
parsedOptions.positionalFieldMatching)
}
private def inferAvroSchemaFromFiles(
files: Seq[FileStatus],
conf: Configuration,
ignoreExtension: Boolean,
ignoreCorruptFiles: Boolean): Schema = {
// Schema evolution is not supported yet. Here we only pick first random readable sample file to
// figure out the schema of the whole dataset.
val avroReader = files.iterator.map { f =>
val path = f.getPath
if (!ignoreExtension && !path.getName.endsWith(".avro")) {
None
} else {
Utils.tryWithResource {
new FsInput(path, conf)
} { in =>
try {
Some(DataFileReader.openReader(in, new GenericDatumReader[GenericRecord]()))
} catch {
case e: IOException =>
if (ignoreCorruptFiles) {
logWarning(s"Skipped the footer in the corrupted file: $path", e)
None
} else {
throw new SparkException(s"Could not read file: $path", e)
}
}
}
}
}.collectFirst {
case Some(reader) => reader
}
avroReader match {
case Some(reader) =>
try {
reader.getSchema
} finally {
reader.close()
}
case None =>
throw new FileNotFoundException(
"No Avro files found. If files don't have .avro extension, set ignoreExtension to true")
}
}
// The trait provides iterator-like interface for reading records from an Avro file,
// deserializing and returning them as internal rows.
trait RowReader {
protected val fileReader: FileReader[GenericRecord]
protected val deserializer: AvroDeserializer
protected val stopPosition: Long
private[this] var completed = false
private[this] var currentRow: Option[InternalRow] = None
def hasNextRow: Boolean = {
while (!completed && currentRow.isEmpty) {
val r = fileReader.hasNext && !fileReader.pastSync(stopPosition)
if (!r) {
fileReader.close()
completed = true
currentRow = None
} else {
val record = fileReader.next()
// the row must be deserialized in hasNextRow, because AvroDeserializer#deserialize
// potentially filters rows
currentRow = deserializer.deserialize(record).asInstanceOf[Option[InternalRow]]
}
}
currentRow.isDefined
}
def nextRow: InternalRow = {
if (currentRow.isEmpty) {
hasNextRow
}
val returnRow = currentRow
currentRow = None // free up hasNextRow to consume more Avro records, if not exhausted
returnRow.getOrElse {
throw new NoSuchElementException("next on empty iterator")
}
}
}
/** Wrapper for a pair of matched fields, one Catalyst and one corresponding Avro field. */
case class AvroMatchedField(
catalystField: StructField,
catalystPosition: Int,
avroField: Schema.Field)
/**
* Helper class to perform field lookup/matching on Avro schemas.
*
* This will match `avroSchema` against `catalystSchema`, attempting to find a matching field in
* the Avro schema for each field in the Catalyst schema and vice-versa, respecting settings for
* case sensitivity. The match results can be accessed using the getter methods.
*
* @param avroSchema The schema in which to search for fields. Must be of type RECORD.
* @param catalystSchema The Catalyst schema to use for matching.
* @param avroPath The seq of parent field names leading to `avroSchema`.
* @param catalystPath The seq of parent field names leading to `catalystSchema`.
* @param positionalFieldMatch If true, perform field matching in a positional fashion
* (structural comparison between schemas, ignoring names);
* otherwise, perform field matching using field names.
*/
class AvroSchemaHelper(
avroSchema: Schema,
catalystSchema: StructType,
avroPath: Seq[String],
catalystPath: Seq[String],
positionalFieldMatch: Boolean) {
if (avroSchema.getType != Schema.Type.RECORD) {
throw new IncompatibleSchemaException(
s"Attempting to treat ${avroSchema.getName} as a RECORD, but it was: ${avroSchema.getType}")
}
private[this] val avroFieldArray = avroSchema.getFields.asScala.toArray
private[this] val fieldMap = avroSchema.getFields.asScala
.groupBy(_.name.toLowerCase(Locale.ROOT))
.mapValues(_.toSeq) // toSeq needed for scala 2.13
/** The fields which have matching equivalents in both Avro and Catalyst schemas. */
val matchedFields: Seq[AvroMatchedField] = catalystSchema.zipWithIndex.flatMap {
case (sqlField, sqlPos) =>
getAvroField(sqlField.name, sqlPos).map(AvroMatchedField(sqlField, sqlPos, _))
}
/**
* Validate that there are no Catalyst fields which don't have a matching Avro field, throwing
* [[IncompatibleSchemaException]] if such extra fields are found. If `ignoreNullable` is false,
* consider nullable Catalyst fields to be eligible to be an extra field; otherwise,
* ignore nullable Catalyst fields when checking for extras.
*/
def validateNoExtraCatalystFields(ignoreNullable: Boolean): Unit =
catalystSchema.zipWithIndex.foreach { case (sqlField, sqlPos) =>
if (getAvroField(sqlField.name, sqlPos).isEmpty &&
(!ignoreNullable || !sqlField.nullable)) {
if (positionalFieldMatch) {
throw new IncompatibleSchemaException("Cannot find field at position " +
s"$sqlPos of ${toFieldStr(avroPath)} from Avro schema (using positional matching)")
} else {
throw new IncompatibleSchemaException(
s"Cannot find ${toFieldStr(catalystPath :+ sqlField.name)} in Avro schema")
}
}
}
/**
* Validate that there are no Avro fields which don't have a matching Catalyst field, throwing
* [[IncompatibleSchemaException]] if such extra fields are found.
*/
def validateNoExtraAvroFields(): Unit = {
(avroFieldArray.toSet -- matchedFields.map(_.avroField)).foreach { extraField =>
if (positionalFieldMatch) {
throw new IncompatibleSchemaException(s"Found field '${extraField.name()}' at position " +
s"${extraField.pos()} of ${toFieldStr(avroPath)} from Avro schema but there is no " +
s"match in the SQL schema at ${toFieldStr(catalystPath)} (using positional matching)")
} else {
throw new IncompatibleSchemaException(
s"Found ${toFieldStr(avroPath :+ extraField.name())} in Avro schema but there is no " +
"match in the SQL schema")
}
}
}
/**
* Extract a single field from the contained avro schema which has the desired field name,
* performing the matching with proper case sensitivity according to SQLConf.resolver.
*
* @param name The name of the field to search for.
* @return `Some(match)` if a matching Avro field is found, otherwise `None`.
*/
private[avro] def getFieldByName(name: String): Option[Schema.Field] = {
// get candidates, ignoring case of field name
val candidates = fieldMap.getOrElse(name.toLowerCase(Locale.ROOT), Seq.empty)
// search candidates, taking into account case sensitivity settings
candidates.filter(f => SQLConf.get.resolver(f.name(), name)) match {
case Seq(avroField) => Some(avroField)
case Seq() => None
case matches => throw new IncompatibleSchemaException(s"Searching for '$name' in Avro " +
s"schema at ${toFieldStr(avroPath)} gave ${matches.size} matches. Candidates: " +
matches.map(_.name()).mkString("[", ", ", "]")
)
}
}
/** Get the Avro field corresponding to the provided Catalyst field name/position, if any. */
def getAvroField(fieldName: String, catalystPos: Int): Option[Schema.Field] = {
if (positionalFieldMatch) {
avroFieldArray.lift(catalystPos)
} else {
getFieldByName(fieldName)
}
}
}
/**
* Convert a sequence of hierarchical field names (like `Seq(foo, bar)`) into a human-readable
* string representing the field, like "field 'foo.bar'". If `names` is empty, the string
* "top-level record" is returned.
*/
private[avro] def toFieldStr(names: Seq[String]): String = names match {
case Seq() => "top-level record"
case n => s"field '${n.mkString(".")}'"
}
}
|
shaneknapp/spark
|
external/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala
|
Scala
|
apache-2.0
| 13,500 |
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.schema.shell.ddl
import scala.collection.JavaConversions._
import org.kiji.annotations.ApiAudience
import org.kiji.annotations.ApiStability
import org.kiji.annotations.Inheritance
import org.kiji.schema.KijiURI
import org.kiji.schema.avro.ColumnDesc
import org.kiji.schema.avro.FamilyDesc
import org.kiji.schema.avro.LocalityGroupDesc
import org.kiji.schema.avro.TableLayoutDesc
import org.kiji.schema.layout.KijiTableLayout
import org.kiji.schema.shell.DDLException
import org.kiji.schema.shell.Environment
import org.kiji.schema.shell.TableNotFoundException
import org.kiji.schema.shell.spi.EnvironmentPlugin
import org.kiji.schema.shell.spi.ParserPluginFactory
/**
* Abstract base class for DDL command implementations.
*/
@ApiAudience.Framework
@ApiStability.Evolving
@Inheritance.Extensible
abstract class DDLCommand {
/**
* Get the environment in which the command should be executed.
*
* <p>This must return the same Environment for the lifetime of a `DDLCommand`.
*
* @return the environment in which the command is executed.
*/
def env(): Environment
/**
* Method called by the runtime to execute this parsed command.
* @return the environment object to use in subsequent commands.
*/
def exec(): Environment
/** Return the Kiji instance name being operated on. */
final def getKijiURI(): KijiURI = {
env.instanceURI
}
/**
* Print the supplied string to the output with a newline. Output is typically
* stdout, but can be redirected e.g. for testing.
* @param s the string to emit.
*/
final protected def echo(s: String): Unit = {
env.printer.println(s)
}
/**
* Print the supplied string to the output with no trailing newline. Output is typically
* stdout, but can be redirected e.g. for testing.
* @param s the string to emit.
*/
final protected def echoNoNL(s: String): Unit = {
env.printer.print(s)
}
/**
* For interactive users, print the specified prompt message and ensure that the
* user returns an affirmative response. This throws DDLException if they don't.
* A non-interactive environment will silently affirm.
*
* @param message to display to the user in an interactive terminal.
* @throws DDLException if the user responds 'no'.
*/
final protected def checkConfirmationPrompt(message: String): Unit = {
if (!env.isInteractive) {
return // non-interactive user doesn't get a prompt.
}
echo(message)
val maybeInput = env.inputSource.readLine("y/N> ")
maybeInput match {
case None => { throw new DDLException("User canceled operation.") /* out of input. */ }
case Some(input) => {
if (input.toUpperCase == "Y" || input.toUpperCase == "YES") {
return // Got confirmation from the user.
} else {
throw new DDLException("User canceled operation. (Please respond 'y' or 'n'.)")
}
}
}
}
// For cases where DDLCommand instances have been created by a ParserPlugin,
// keep a reference to the ParserPluginFactory. If it also extends EnvironmentPlugin,
// the command can use this in getExtensionState() and updateExtensionState()
// to access and update its additional environment data.
private var mCurrentExtensionModule: Option[ParserPluginFactory] = None
/**
* Returns the environment extension associated with the current extension module.
*
* If this DDLCommand is the output of a ParserPlugin whose ParserPluginFactory
* is also an EnvironmentPlugin, return the environment extension data associated
* with the plugin. If this is not from a ParserPlugin, or the plugin does not extend
* the environment, throws DDLException.
*
* @return the current plugin's environment extension data.
* @throws DDLException if this is not running in a plugin, or the plugin does not
* extend EnvironmentPlugin.
*
*/
final protected def getExtensionState[T](): T = {
mCurrentExtensionModule match {
case None => throw new DDLException("This DDLCommand is not being run from an extension")
case Some(pluginFactory) => {
if (pluginFactory.isInstanceOf[EnvironmentPlugin[_]]) {
try {
// This plugin has environment data; return it, typecasting to the user's
// specified state type.
return env.extensionMapping(pluginFactory.getName()).asInstanceOf[T]
} catch {
case nsee: NoSuchElementException =>
throw new DDLException("No extension data associated with plugin "
+ pluginFactory.getName())
}
} else {
throw new DDLException("The module " + pluginFactory.getName()
+ " does not extend EnvironmentPlugin")
}
}
}
}
/**
* Updates the environment extension data associated with the current plugin.
*
* This will return a new Environment object that contains the updated state information
* for the current module.
*
* @return a new Environment containing the updated extension state for the plugin associated
* with this DDLCommand.
* @throws DDLException if this is not being run from a plugin, or the plugin does not extend
* EnvironmentPlugin.
*/
final protected def setExtensionState[T](newState: T): Environment = {
mCurrentExtensionModule match {
case None => throw new DDLException("This DDLCommand is not being run from an extension")
case Some(pluginFactory) => {
if (pluginFactory.isInstanceOf[EnvironmentPlugin[_]]) {
return env.updateExtension(pluginFactory.asInstanceOf[EnvironmentPlugin[T]], newState)
} else {
throw new DDLException("The module " + pluginFactory.getName()
+ " does not extend EnvironmentPlugin")
}
}
}
}
/**
* A method used by the parser to tell a newly-created DDLCommand which plugin, if any,
* generated the DDLCommand instance.
*
* The main DDL parser, or a `ParserPlugin`, can both create DDLCommand instances.
* If this was generated by a ParserPlugin, the associated `ParserPluginFactory` is
* recorded here, so the DDLCommand instance has access to the extension state of
* this plugin.
*
* @param plugin that created this DDLCommand instance.
*/
final private[shell] def setCurrentPlugin(plugin: ParserPluginFactory): Unit = {
mCurrentExtensionModule = Some(plugin)
}
}
|
kijiproject/kiji-schema-shell
|
src/main/scala/org/kiji/schema/shell/ddl/DDLCommand.scala
|
Scala
|
apache-2.0
| 7,179 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen.calls
import org.apache.flink.table.planner.codegen.GenerateUtils.generateNonNullField
import org.apache.flink.table.planner.codegen.{CodeGeneratorContext, GeneratedExpression}
import org.apache.flink.table.types.logical.LogicalType
import org.apache.flink.table.types.logical.LogicalTypeRoot.{DATE, TIMESTAMP_WITHOUT_TIME_ZONE,TIMESTAMP_WITH_LOCAL_TIME_ZONE, TIME_WITHOUT_TIME_ZONE}
/**
* Generates function call to determine current time point (as date/time/timestamp) in
* local timezone or not.
*/
class CurrentTimePointCallGen(local: Boolean, isStreaming: Boolean) extends CallGenerator {
override def generate(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression],
returnType: LogicalType): GeneratedExpression = returnType.getTypeRoot match {
// LOCALTIME in Streaming mode
case TIME_WITHOUT_TIME_ZONE if local && isStreaming =>
val time = ctx.addReusableRecordLevelLocalTime()
generateNonNullField(returnType, time)
// LOCALTIME in Batch mode
case TIME_WITHOUT_TIME_ZONE if local && !isStreaming =>
val time = ctx.addReusableQueryLevelLocalTime()
generateNonNullField(returnType, time)
// LOCALTIMESTAMP in Streaming mode
case TIMESTAMP_WITHOUT_TIME_ZONE if local && isStreaming =>
val timestamp = ctx.addReusableRecordLevelLocalDateTime()
generateNonNullField(returnType, timestamp)
// LOCALTIMESTAMP in Batch mode
case TIMESTAMP_WITHOUT_TIME_ZONE if local && !isStreaming =>
val timestamp = ctx.addReusableQueryLevelLocalDateTime()
generateNonNullField(returnType, timestamp)
// CURRENT_DATE in Streaming mode
case DATE if isStreaming =>
val date = ctx.addReusableRecordLevelCurrentDate()
generateNonNullField(returnType, date)
// CURRENT_DATE in Batch mode
case DATE if !isStreaming =>
val date = ctx.addReusableQueryLevelCurrentDate()
generateNonNullField(returnType, date)
// CURRENT_TIME in Streaming mode
case TIME_WITHOUT_TIME_ZONE if isStreaming =>
val time = ctx.addReusableRecordLevelLocalTime()
generateNonNullField(returnType, time)
// CURRENT_TIME in Batch mode
case TIME_WITHOUT_TIME_ZONE if !isStreaming =>
val time = ctx.addReusableQueryLevelLocalTime()
generateNonNullField(returnType, time)
// CURRENT_TIMESTAMP in Streaming mode
case TIMESTAMP_WITH_LOCAL_TIME_ZONE if isStreaming =>
val timestamp = ctx.addReusableRecordLevelCurrentTimestamp()
generateNonNullField(returnType, timestamp)
// CURRENT_TIMESTAMP in Batch mode
case TIMESTAMP_WITH_LOCAL_TIME_ZONE if !isStreaming =>
val timestamp = ctx.addReusableQueryLevelCurrentTimestamp()
generateNonNullField(returnType, timestamp)
}
}
|
apache/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/CurrentTimePointCallGen.scala
|
Scala
|
apache-2.0
| 3,618 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js API **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
/**
* All doc-comments marked as "MDN" are by Mozilla Contributors,
* distributed under the Creative Commons Attribution-ShareAlike license from
* https://developer.mozilla.org/en-US/docs/Web/Reference/API
*/
package scala.scalajs.js
import annotation.JSBracketAccess
/** Dictionary "view" of a JavaScript value. */
sealed trait Dictionary[A] extends Object {
/** Reads a field of this object by its name. */
@JSBracketAccess
def apply(key: String): A
/** Writes a field of this object by its name. */
@JSBracketAccess
def update(key: String, value: A): Unit
/** Deletes a property of this object by its name.
* The property must be configurable.
* This method is equivalent to the "delete" keyword in JavaScript.
* @return true on success (the property did not exist or was configurable),
* false otherwise
*/
def delete(key: String): Boolean = sys.error("stub")
}
/** Factory for [[Dictionary]] instances. */
object Dictionary {
/** Returns a new empty dictionary */
def empty[A]: Dictionary[A] = (new Object).asInstanceOf[Dictionary[A]]
def apply[A](properties: (String, A)*): Dictionary[A] = {
val result = empty[A]
for ((key, value) <- properties)
result(key) = value
result
}
/** Returns the names of all the enumerable properties of this object. */
@deprecated("Use js.Object.properties(obj) instead", "0.5.0")
def propertiesOf(obj: Any): Array[String] =
Object.properties(obj.asInstanceOf[Object])
}
|
swhgoon/scala-js
|
library/src/main/scala/scala/scalajs/js/Dictionary.scala
|
Scala
|
bsd-3-clause
| 2,025 |
package ammonite
import ammonite.runtime.{History, Storage}
import ammonite.interp.Interpreter
import ammonite.main.Defaults
import ammonite.ops._
import ammonite.runtime.tools.DependencyConstructor._
import ammonite.TestUtils._
import utest._
object CachingTests extends TestSuite{
val tests = TestSuite{
println("ScriptTests")
val scriptPath = pwd/'amm/'src/'test/'resources/'scripts
val resourcesPath = pwd/'amm/'src/'test/'resources
val tempDir = tmp.dir(prefix="ammonite-tester")
'noAutoIncrementWrapper{
val storage = Storage.InMemory()
val interp = createTestInterp(storage)
interp.interpApi.load.module(scriptPath/"ThreeBlocks.sc")
try{
Class.forName("cmd0")
assert(false)
} catch {
case e: ClassNotFoundException => assert(true)
case e: Exception => assert(false)
}
}
'blocks{
def check(fileName: String, expected: Int) = {
val storage = Storage.InMemory()
val interp = createTestInterp(storage)
val n0 = storage.compileCache.size
assert(n0 == 1) // customLolz predef
interp.interpApi.load.module(scriptPath/fileName)
val n = storage.compileCache.size
assert(n == expected)
}
* - check("OneBlock.sc", 2)
* - check("TwoBlocks.sc", 3)
* - check("ThreeBlocks.sc", 4)
}
'processModuleCaching{
def check(script: RelPath){
val storage = new Storage.Folder(tempDir)
val interp1 = createTestInterp(
storage,
Defaults.predefString
)
interp1.interpApi.load.module(resourcesPath/script)
assert(interp1.compiler != null)
val interp2 = createTestInterp(
storage,
Defaults.predefString
)
assert(interp2.compiler == null)
interp2.interpApi.load.module(resourcesPath/script)
assert(interp2.compiler == null)
}
'testOne - check('scriptLevelCaching/"scriptTwo.sc")
'testTwo - check('scriptLevelCaching/"scriptOne.sc")
'testThree - check('scriptLevelCaching/"QuickSort.sc")
'testLoadModule - check('scriptLevelCaching/"testLoadModule.sc")
'testFileImport - check('scriptLevelCaching/"testFileImport.sc")
'testIvyImport - check('scriptLevelCaching/"ivyCacheTest.sc")
'testIvyResource- {
if (!scala2_12) check('scriptLevelCaching/"ivyCachedResourceTest.sc")
}
}
'testRunTimeExceptionForCachedScripts{
val storage = new Storage.Folder(tempDir)
val numFile = pwd/'amm/'target/'test/'resources/'scriptLevelCaching/"num.value"
rm(numFile)
write(numFile, "1")
val interp1 = createTestInterp(
storage,
Defaults.predefString
)
interp1.interpApi.load.module(resourcesPath/'scriptLevelCaching/"runTimeExceptions.sc")
val interp2 = createTestInterp(
storage,
Defaults.predefString
)
val res = intercept[java.lang.ArithmeticException]{
interp2.interpApi.load.module(
resourcesPath/'scriptLevelCaching/"runTimeExceptions.sc"
)
}
assert(interp2.compiler == null &&
res.toString == "java.lang.ArithmeticException: / by zero")
}
'persistence{
val tempDir = ammonite.ops.Path(
java.nio.file.Files.createTempDirectory("ammonite-tester-x")
)
val interp1 = createTestInterp(new Storage.Folder(tempDir))
val interp2 = createTestInterp(new Storage.Folder(tempDir))
interp1.interpApi.load.module(scriptPath/"OneBlock.sc")
interp2.interpApi.load.module(scriptPath/"OneBlock.sc")
val n1 = interp1.compilationCount
val n2 = interp2.compilationCount
assert(n1 == 2) // customLolz predef + OneBlock.sc
assert(n2 == 0) // both should be cached
}
'tags{
val storage = Storage.InMemory()
val interp = createTestInterp(storage)
interp.interpApi.load.module(scriptPath/"TagBase.sc")
interp.interpApi.load.module(scriptPath/"TagPrevCommand.sc")
interp.interpApi.load.ivy("com.lihaoyi" %% "scalatags" % "0.6.2")
interp.interpApi.load.module(scriptPath/"TagBase.sc")
val n = storage.compileCache.size
assert(n == 5) // customLolz predef + two blocks for each loaded file
}
'compilerInit{
val tempDir = ammonite.ops.Path(
java.nio.file.Files.createTempDirectory("ammonite-tester-x")
)
val interp1 = createTestInterp(new Storage.Folder(tempDir))
val interp2 = createTestInterp(new Storage.Folder(tempDir))
interp1.interpApi.load.module(scriptPath/"cachedCompilerInit.sc")
interp2.interpApi.load.module(scriptPath/"cachedCompilerInit.sc")
assert(interp2.compilationCount == 0)
}
// commenting out this one, which seems not to pass on the CI (fine locally for me though)
/*'changeScriptInvalidation{
// This makes sure that the compile caches are properly utilized, and
// flushed, in a variety of circumstances: changes to the number of
// blocks in the predef, predefs containing magic imports, and changes
// to the script being run. For each change, the caches should be
// invalidated, and subsequently a single compile should be enough
// to re-fill the caches
val predefFile = tmp("""
val x = 1337
@
val y = x
import $ivy.`com.lihaoyi::scalatags:0.6.2`, scalatags.Text.all._
""")
val scriptFile = tmp("""div("<('.'<)", y).render""")
def processAndCheckCompiler(f: ammonite.interp.Compiler => Boolean) ={
val interp = createTestInterp(
new Storage.Folder(tempDir){
override val predef = predefFile
},
Defaults.predefString
)
interp.interpApi.load.module(scriptFile)
assert(f(interp.compiler))
}
processAndCheckCompiler(_ != null)
processAndCheckCompiler(_ == null)
rm! predefFile
write(
predefFile,
"""
import $ivy.`com.lihaoyi::scalatags:0.6.2`; import scalatags.Text.all._
val y = 31337
"""
)
processAndCheckCompiler(_ != null)
processAndCheckCompiler(_ == null)
rm! scriptFile
write(
scriptFile,
"""div("(>'.')>", y).render"""
)
processAndCheckCompiler(_ != null)
processAndCheckCompiler(_ == null)
}*/
}
}
|
alexarchambault/ammonium
|
amm/src/test/scala/ammonite/CachingTests.scala
|
Scala
|
mit
| 6,429 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package operator
import scala.collection.JavaConversions._
import org.apache.spark.broadcast.{ Broadcast => Broadcasted }
import org.objectweb.asm.Opcodes
import com.asakusafw.lang.compiler.model.graph.{ MarkerOperator, OperatorInput }
import com.asakusafw.lang.compiler.planning.PlanMarker
import com.asakusafw.runtime.core.GroupView
import com.asakusafw.spark.compiler.graph.BroadcastIds
import com.asakusafw.spark.tools.asm._
import com.asakusafw.spark.tools.asm.MethodBuilder._
import com.asakusafw.spark.tools.asm4s._
trait ViewFields extends ClassBuilder {
implicit def context: ViewFields.Context
def operatorInputs: Seq[OperatorInput]
lazy val viewInputs: Seq[OperatorInput] =
operatorInputs.filter(_.getInputUnit == OperatorInput.InputUnit.WHOLE)
override def defFields(fieldDef: FieldDef): Unit = {
super.defFields(fieldDef)
viewInputs.zipWithIndex.foreach {
case (input, i) =>
fieldDef.newField(
Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL,
s"view${i}",
classOf[GroupView[_]].asType)
}
}
def initViewFields(broadcastsVar: Var)(implicit mb: MethodBuilder): Unit = {
val thisVar :: _ = mb.argVars
viewInputs.zipWithIndex.foreach {
case (input, i) =>
val marker: Option[MarkerOperator] = {
val opposites = input.getOpposites
assert(opposites.size <= 1,
s"The size of broadcast inputs should be 0 or 1: ${opposites.size}")
opposites.headOption.map { opposite =>
val operator = opposite.getOwner
assert(operator.isInstanceOf[MarkerOperator],
s"The master input should be marker operator: ${operator} [${operator}]")
assert(
operator.asInstanceOf[MarkerOperator].getAttribute(classOf[PlanMarker])
== PlanMarker.BROADCAST,
s"The master input should be BROADCAST marker operator: ${
operator.asInstanceOf[MarkerOperator].getAttribute(classOf[PlanMarker])
} [${operator}]")
operator.asInstanceOf[MarkerOperator]
}
}
val keyElementTypes = input.dataModelRef.groupingTypes(input.getGroup.getGrouping)
val mapGroupViewType = MapGroupViewClassBuilder.getOrCompile(keyElementTypes)
thisVar.push().putField(
s"view${i}", {
val mapGroupView = pushNew(mapGroupViewType)
mapGroupView.dup().invokeInit(
marker.map { marker =>
applyMap(
broadcastsVar.push(), context.broadcastIds.getField(marker))
.cast(classOf[Broadcasted[_]].asType)
.invokeV("value", classOf[AnyRef].asType)
.cast(classOf[Map[_, _]].asType)
}.getOrElse {
buildMap(_ => ())
})
mapGroupView.asType(classOf[GroupView[_]].asType)
})
}
}
def getViewField(input: OperatorInput)(implicit mb: MethodBuilder): Stack = {
val thisVar :: _ = mb.argVars
val i = viewInputs.indexOf(input)
assert(i >= 0,
s"The input unit of ${input} is not InputUnit.WHOLE: ${input.getInputUnit}")
thisVar.push().getField(s"view${i}", classOf[GroupView[_]].asType)
}
}
object ViewFields {
trait Context
extends CompilerContext
with DataModelLoaderProvider {
def broadcastIds: BroadcastIds
}
}
|
ueshin/asakusafw-spark
|
compiler/src/main/scala/com/asakusafw/spark/compiler/operator/ViewFields.scala
|
Scala
|
apache-2.0
| 4,050 |
package com.github.saurfang.parquet.proto.spark.sql
import com.google.protobuf.{ByteString, AbstractMessage}
import com.google.protobuf.Descriptors.{EnumValueDescriptor, FieldDescriptor}
import com.google.protobuf.Descriptors.FieldDescriptor.JavaType._
import org.apache.spark.sql.Row
object ProtoRDDConversions {
def messageToRow[A <: AbstractMessage](message: A): Row = {
import collection.JavaConversions._
def toRowData(fd: FieldDescriptor, obj: AnyRef) = {
fd.getJavaType match {
case BYTE_STRING => obj.asInstanceOf[ByteString].toByteArray
case ENUM => obj.asInstanceOf[EnumValueDescriptor].getName
case MESSAGE => messageToRow(obj.asInstanceOf[AbstractMessage])
case _ => obj
}
}
val fieldDescriptors = message.getDescriptorForType.getFields
val fields = message.getAllFields
Row(
fieldDescriptors.map{
fd =>
if(fields.containsKey(fd)) {
val obj = fields.get(fd)
if(fd.isRepeated) {
obj.asInstanceOf[java.util.List[Object]].map(toRowData(fd, _)).toSeq
} else {
toRowData(fd, obj)
}
} else if(fd.isRepeated) {
Seq()
} else null
}.toSeq: _*
)
}
}
|
saurfang/sparksql-protobuf
|
src/main/scala/com/github/saurfang/parquet/proto/spark/sql/ProtoRDDConversions.scala
|
Scala
|
apache-2.0
| 1,270 |
/*
* Copyright 2015 LG CNS.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.db;
import scouter.server.core.cache.TextCache
import scouter.server.db.text.TextTable
import scouter.util.HashUtil
object TextRD {
def getString(date: String, divs: String, hash: Int): String = {
val out = TextCache.get(divs, hash);
if (out != null)
return out;
try {
val divhash = HashUtil.hash(divs);
if (TextPermWR.isA(divhash)) {
return TextPermRD.getString(divs, hash);
}
val table = TextWR.open(date)
val b = table.get(divhash, hash);
if (b == null)
return null;
val text = new String(b, "UTF-8");
TextCache.put(divhash, hash, text);
return text;
} catch {
case e: Exception => e.printStackTrace()
}
return null;
}
}
|
jhshin9/scouter
|
scouter.server/src/scouter/server/db/TextRD.scala
|
Scala
|
apache-2.0
| 1,477 |
/*
* Copyright (c) 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mercuree.transformations.core
import org.scalatest.FlatSpec
import org.scalamock.scalatest.MockFactory
/**
* [[Transformations]] test.
* <p>
*
* @author Alexander Valyugin
*/
class TransformationsSpec extends FlatSpec with MockFactory {
trait MockStoredTransformations extends StoredTransformations {
val mocked = stub[StoredTransformations]
override def findAllExcept(names: Set[String]): Seq[StoredTransformation] = mocked.findAllExcept(names)
override def findById(name: String): Option[StoredTransformation] = mocked.findById(name)
override def insert(transformation: LocalTransformation): Unit = mocked.insert(transformation)
override def delete(transformation: Transformation): Unit = mocked.delete(transformation)
override def update(transformation: LocalTransformation): Unit = mocked.update(transformation)
override def applyScript(script: String): Unit = mocked.applyScript(script)
override def transform[A](f: => A): A = f
override def transactional[A](f: => A): A = f
}
class TestTransformations(override val localTransformations: List[Transformation])
extends Transformations with LocalTransformations with MockStoredTransformations
"A new transformation" should "be applied" in {
val local = LocalTransformation("test", "create", "", ApplyMode.Once, true)
val pack = new TestTransformations(List(local))
import pack.mocked._
(findById _).when("test").returns(None)
(findAllExcept _).when(Set("test")).returns(Nil)
pack.run
inSequence {
(findById _).verify("test")
(applyScript _).verify("create")
(insert _).verify(local)
}
}
"Disabled transformation" should "not be applied" in {
val disabled = DisabledTransformation("test")
val pack = new TestTransformations(List(disabled))
import pack.mocked._
(findById _).when("test").returns(None)
(findAllExcept _).when(Set("test")).returns(Nil)
pack.run
(applyScript _).verify(*).never
(insert _).verify(*).never
(delete _).verify(*).never
(update _).verify(*).never
}
"Disabled transformation" should "be rolled back if had been applied previously" in {
val disabled = DisabledTransformation("test")
val stored = StoredTransformation("test", "", "", "rollback", "")
val pack = new TestTransformations(List(disabled))
import pack.mocked._
(findById _).when("test").returns(Some(stored))
(findAllExcept _).when(Set("test")).returns(Nil)
pack.run
inSequence {
(findById _).verify("test")
(applyScript _).verify("rollback")
(delete _).verify(stored)
}
}
"Locally removed transformation" should "be rolled back" in {
val stored = StoredTransformation("test", "", "", "rollback", "")
val pack = new TestTransformations(List())
import pack.mocked._
(findById _).when("test").returns(Some(stored))
(findAllExcept _).when(Set[String]()).returns(List(stored))
pack.run
inSequence {
(findById _).verify("test")
(applyScript _).verify("rollback")
(delete _).verify(stored)
}
}
"Modified transformation" should "be rolled back and applied again" in {
val local = LocalTransformation("test", "update", "", ApplyMode.Modified, true)
val stored = StoredTransformation("test", "", "", "rollback", local.rollbackScriptHash)
val pack = new TestTransformations(List(local))
import pack.mocked._
(findById _).when("test").returns(Some(stored))
(findAllExcept _).when(Set("test")).returns(Nil)
pack.run
inSequence {
(findById _).verify("test")
(applyScript _).verify("rollback")
(applyScript _).verify("update")
(update _).verify(local)
}
}
"Modified transformation" should "not be applied if set to apply once" in {
val local = LocalTransformation("test", "update", "", ApplyMode.Once, true)
val stored = StoredTransformation("test", "", "", "rollback", local.rollbackScriptHash)
val pack = new TestTransformations(List(local))
import pack.mocked._
(findById _).when("test").returns(Some(stored))
(findAllExcept _).when(Set("test")).returns(Nil)
pack.run
inSequence {
(findById _).verify("test")
}
}
"Run always transformation" should "be rolled back and applied again" in {
val local = LocalTransformation("test", "update", "rollback", ApplyMode.Always, true)
val stored = StoredTransformation("test", "update", local.updateScriptHash, "rollback", local.rollbackScriptHash)
val pack = new TestTransformations(List(local))
import pack.mocked._
(findById _).when("test").returns(Some(stored))
(findAllExcept _).when(Set("test")).returns(Nil)
pack.run
inSequence {
(findById _).verify("test")
(applyScript _).verify("rollback")
(applyScript _).verify("update")
(update _).verify(local)
}
}
"If rollback script modified it" should "only update the stored transformation" in {
val local = LocalTransformation("test", "", "A", ApplyMode.Once, true)
val stored = StoredTransformation("test", "", local.updateScriptHash, "", "")
val pack = new TestTransformations(List(local))
import pack.mocked._
(findById _).when("test").returns(Some(stored))
(findAllExcept _).when(Set("test")).returns(Nil)
pack.run
inSequence {
(findById _).verify("test")
(update _).verify(local)
}
}
"Transformations" should "be applied in the given order" in {
val local1 = LocalTransformation("test1", "update1", "", ApplyMode.Once, true)
val local2 = LocalTransformation("test2", "update2", "", ApplyMode.Once, true)
val stored = StoredTransformation("test3", "", "", "rollback", "")
val pack = new TestTransformations(List(local2, local1))
import pack.mocked._
(findById _).when("test1").returns(None)
(findById _).when("test2").returns(None)
(findById _).when("test3").returns(Some(stored))
(findAllExcept _).when(Set("test1", "test2")).returns(List(stored))
pack.run
inSequence {
(findById _).verify("test2")
(applyScript _).verify("update2")
(insert _).verify(local2)
(findById _).verify("test1")
(applyScript _).verify("update1")
(insert _).verify(local1)
(findById _).verify("test3")
(applyScript _).verify("rollback")
(delete _).verify(stored)
}
}
}
|
zeddius1983/transformations
|
transformations-core/src/test/scala/org/mercuree/transformations/core/TransformationsSpec.scala
|
Scala
|
apache-2.0
| 7,040 |
/*
* This file is part of the \\BlueLaTeX project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gnieh.blue
package core
package impl
package paper
import http._
import couch.Paper
import common._
import permission._
import com.typesafe.config.Config
import tiscaf._
import gnieh.diffson._
import scala.io.Source
import scala.util.{
Try,
Success
}
import gnieh.sohva.control.CouchClient
/** Handle JSON Patches that modify paper data such as paper name
*
* @author Lucas Satabin
*/
class ModifyPaperLet(paperId: String, val couch: CouchClient, config: Config, logger: Logger) extends SyncPermissionLet(paperId, config, logger) {
def permissionAct(user: Option[UserInfo], role: Role, permissions: Set[Permission])(implicit talk: HTalk): Try[Unit] = permissions match {
case Configure() =>
// only authors may modify this list
(talk.req.octets, talk.req.header("if-match")) match {
case (Some(octets), knownRev @ Some(_)) =>
val manager = entityManager("blue_papers")
// the modification must be sent as a JSON Patch document
// retrieve the paper object from the database
manager.getComponent[Paper](paperId) flatMap {
case Some(paper) if paper._rev == knownRev =>
talk.readJson[JsonPatch] match {
case Some(patch) =>
// the revision matches, we can apply the patch
val paper1 = patch(paper).withRev(knownRev)
// and save the new paper data
for(p <- manager.saveComponent(paperId, paper1))
// save successfully, return ok with the new ETag
// we are sure that the revision is not empty because it comes from the database
yield talk.writeJson(true, p._rev.get)
case None =>
// nothing to do
Success(
talk
.setStatus(HStatus.NotModified)
.writeJson(ErrorResponse("nothing_to_do", "No changes sent")))
}
case Some(_) =>
// nothing to do
Success(
talk
.setStatus(HStatus.Conflict)
.writeJson(ErrorResponse("conflict", "Old paper info revision provided")))
case None =>
// unknown paper
Success(
talk
.setStatus(HStatus.NotFound)
.writeJson(ErrorResponse("nothing_to_do", s"Unknown paper $paperId")))
}
case (None, _) =>
// nothing to do
Success(
talk
.setStatus(HStatus.NotModified)
.writeJson(ErrorResponse("nothing_to_do", "No changes sent")))
case (_, None) =>
// known revision was not sent, precondition failed
Success(
talk
.setStatus(HStatus.Conflict)
.writeJson(ErrorResponse("conflict", "Paper revision not provided")))
}
case _ =>
Success(
talk
.setStatus(HStatus.Forbidden)
.writeJson(ErrorResponse("no_sufficient_rights", "You have no permission to modify the paper data")))
}
}
|
tdurieux/bluelatex
|
blue-core/src/main/scala/gnieh/blue/core/impl/paper/ModifyPaperLet.scala
|
Scala
|
apache-2.0
| 3,778 |
package com.seanshubin.todo.application.domain
class RedirectHandlerRequest(redirects: Map[String, String]) extends RequestValueHandler {
override def handle(request: RequestValue): Option[ResponseValue] = {
redirects.get(request.uri.path).map {
destination =>
val response = ResponseValue.redirect(destination)
response
}
}
}
|
SeanShubin/todo-application
|
domain/src/main/scala/com/seanshubin/todo/application/domain/RedirectHandlerRequest.scala
|
Scala
|
unlicense
| 362 |
package waldap.core.model
import org.apache.directory.api.ldap.model.name.Dn
trait Account {
val userName: String
val isAdmin: Boolean
}
case class AdminAccount(username: String) extends Account {
override val userName: String = username
override val isAdmin: Boolean = true
}
case class UserAccount(username: String, dn: Dn) extends Account {
override val userName: String = username
override val isAdmin: Boolean = false
}
|
kounoike/waldap
|
src/main/scala/waldap/core/model/Account.scala
|
Scala
|
apache-2.0
| 441 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import javax.servlet.http.HttpServletRequest
import scala.xml.{Node, NodeSeq}
import org.apache.spark.scheduler.Schedulable
import org.apache.spark.ui.{UIUtils, WebUIPage}
/** Page showing list of all ongoing and recently finished stages and pools */
private[ui] class AllStagesPage(parent: StagesTab) extends WebUIPage("") {
private val sc = parent.sc
private val listener = parent.progressListener
private def isFairScheduler = parent.isFairScheduler
def render(request: HttpServletRequest): Seq[Node] = {
listener.synchronized {
val activeStages = listener.activeStages.values.toSeq
val pendingStages = listener.pendingStages.values.toSeq
val completedStages = listener.completedStages.reverse
val numCompletedStages = listener.numCompletedStages
val failedStages = listener.failedStages.reverse
val numFailedStages = listener.numFailedStages
val subPath = "stages"
val activeStagesTable =
new StageTableBase(request, activeStages, "active", "activeStage", parent.basePath, subPath,
parent.progressListener, parent.isFairScheduler,
killEnabled = parent.killEnabled, isFailedStage = false)
val pendingStagesTable =
new StageTableBase(request, pendingStages, "pending", "pendingStage", parent.basePath,
subPath, parent.progressListener, parent.isFairScheduler,
killEnabled = false, isFailedStage = false)
val completedStagesTable =
new StageTableBase(request, completedStages, "completed", "completedStage", parent.basePath,
subPath, parent.progressListener, parent.isFairScheduler,
killEnabled = false, isFailedStage = false)
val failedStagesTable =
new StageTableBase(request, failedStages, "failed", "failedStage", parent.basePath, subPath,
parent.progressListener, parent.isFairScheduler,
killEnabled = false, isFailedStage = true)
// For now, pool information is only accessible in live UIs
val pools = sc.map(_.getAllPools).getOrElse(Seq.empty[Schedulable])
val poolTable = new PoolTable(pools, parent)
val shouldShowActiveStages = activeStages.nonEmpty
val shouldShowPendingStages = pendingStages.nonEmpty
val shouldShowCompletedStages = completedStages.nonEmpty
val shouldShowFailedStages = failedStages.nonEmpty
val completedStageNumStr = if (numCompletedStages == completedStages.size) {
s"$numCompletedStages"
} else {
s"$numCompletedStages, only showing ${completedStages.size}"
}
val summary: NodeSeq =
<div>
<ul class="unstyled">
{
if (shouldShowActiveStages) {
<li>
<a href="#active"><strong>Active Stages:</strong></a>
{activeStages.size}
</li>
}
}
{
if (shouldShowPendingStages) {
<li>
<a href="#pending"><strong>Pending Stages:</strong></a>
{pendingStages.size}
</li>
}
}
{
if (shouldShowCompletedStages) {
<li id="completed-summary">
<a href="#completed"><strong>Completed Stages:</strong></a>
{completedStageNumStr}
</li>
}
}
{
if (shouldShowFailedStages) {
<li>
<a href="#failed"><strong>Failed Stages:</strong></a>
{numFailedStages}
</li>
}
}
</ul>
</div>
var content = summary ++
{
if (sc.isDefined && isFairScheduler) {
<h4>{pools.size} Fair Scheduler Pools</h4> ++ poolTable.toNodeSeq
} else {
Seq.empty[Node]
}
}
if (shouldShowActiveStages) {
content ++= <h4 id="active">Active Stages ({activeStages.size})</h4> ++
activeStagesTable.toNodeSeq
}
if (shouldShowPendingStages) {
content ++= <h4 id="pending">Pending Stages ({pendingStages.size})</h4> ++
pendingStagesTable.toNodeSeq
}
if (shouldShowCompletedStages) {
content ++= <h4 id="completed">Completed Stages ({completedStageNumStr})</h4> ++
completedStagesTable.toNodeSeq
}
if (shouldShowFailedStages) {
content ++= <h4 id ="failed">Failed Stages ({numFailedStages})</h4> ++
failedStagesTable.toNodeSeq
}
UIUtils.headerSparkPage("Stages for All Jobs", content, parent)
}
}
}
|
akopich/spark
|
core/src/main/scala/org/apache/spark/ui/jobs/AllStagesPage.scala
|
Scala
|
apache-2.0
| 5,492 |
package com.ebay.neutrino.handler.ops
import java.util
import com.ebay.neutrino.NeutrinoRequest
import com.ebay.neutrino.channel.NeutrinoEvent
import com.ebay.neutrino.util.Utilities
import com.typesafe.scalalogging.slf4j.StrictLogging
import io.netty.buffer.{ByteBuf, ByteBufHolder}
import io.netty.channel.ChannelHandler.Sharable
import io.netty.channel._
import io.netty.handler.codec.http._
/**
* Hook the Channel session to audit incoming/outgoing channel events for post-
* analysis.
*
*/
@Sharable
class NeutrinoAuditHandler extends ChannelDuplexHandler with StrictLogging
{
import com.ebay.neutrino.handler.ops.NeutrinoAuditHandler._
import com.ebay.neutrino.handler.ops.AuditActivity._
@inline def calculateSize(msg: AnyRef) = msg match {
case data: ByteBuf => data.readableBytes()
case data: ByteBufHolder => data.content.readableBytes
case data => 0
}
override def userEventTriggered(ctx: ChannelHandlerContext, event: AnyRef): Unit = {
ctx.channel.audit(UserEvent(event))
ctx.fireUserEventTriggered(event)
}
override def channelRead(ctx: ChannelHandlerContext, msg: AnyRef): Unit = {
ctx.channel.audit(
msg match {
case data: HttpRequest => Request(data)
case data: HttpResponse => Response(data)
case data: LastHttpContent => Content(data.content.readableBytes, true)
case data: HttpContent => Content(data.content.readableBytes)
case data: ByteBuf => ReadData(data.readableBytes)
})
ctx.fireChannelRead(msg)
}
override def write(ctx: ChannelHandlerContext, msg: Object, promise: ChannelPromise): Unit = {
ctx.channel.audit(
msg match {
case data: HttpRequest => Request(data)
case data: HttpResponse => Response(data)
case data: LastHttpContent => Content(data.content.readableBytes, true)
case data: HttpContent => Content(data.content.readableBytes)
case data: ByteBuf => WriteData(data.readableBytes)
})
ctx.write(msg, promise)
}
override def flush(ctx: ChannelHandlerContext): Unit = {
ctx.channel.audit(Flush())
ctx.flush()
}
override def close(ctx: ChannelHandlerContext, future: ChannelPromise): Unit = {
ctx.channel.audit(Close())
ctx.close(future)
}
override def exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) = {
ctx.channel.audit(Error(cause))
ctx.fireExceptionCaught(cause)
}
}
object NeutrinoAuditHandler extends StrictLogging {
import Utilities._
implicit class NeutrinoAuditRequestSupport(val self: NeutrinoRequest) extends AnyVal {
/**
* Retrieve the activity-state, creating if necessary.
* @return valid state
*/
def state =
self.get(classOf[AuditState]) match {
case None =>
val state = AuditState()
self.set(classOf[AuditState], state)
state
case Some(state) =>
state
}
// Add the activity provided
def audit(channel: Channel, data: => AuditActivity) =
state.add(channel, data)
// Add the activity provided
//def audit(channel: Channel, data: Unit => AuditActivity) =
//state.add(channel, data())
// Add the activity provided
def audit(channel: Channel, datafx: PartialFunction[Channel, AuditActivity]) =
if (datafx.isDefinedAt(channel)) state.add(channel, datafx.apply(channel))
// Clear the audit state from the request provided
def clearstate() = self.clear(classOf[AuditState])
}
implicit class NeutrinoAuditSupport(val channel: Channel) extends AnyVal {
// Retrieve the associated state
def auditstate = AuditState.state(channel)
// Add the activity provided
def audit(data: => AuditActivity) = AuditState.audit(channel, data)
// Clear audit-state, if present
def clear() = AuditState.clear(channel)
}
class NeutrinoAuditLogger(request: NeutrinoRequest) extends ChannelFutureListener {
// Log requests that exceed the request's threshold
override def operationComplete(future: ChannelFuture): Unit = {
// Audit support; if our transaction has taken more than 5 seconds, dump the diagnostics here
val settings = request.session.service.settings.channel
val channel = future.channel
// Generate audit-records and throw debug
request.audit(channel, AuditActivity.Event(NeutrinoEvent.ResponseCompleted(request)))
settings.auditThreshold map { threshold =>
// Grab the request's data
val state = request.clear(classOf[AuditState])
state map { state =>
if (request.elapsed > threshold)
logger.warn("Audit state on long running transaction: {}\n{}", channel.toStringExt, state)
else
logger.debug("Audit state on transaction: {}\n{}", channel.toStringExt, state)
}
}
}
}
}
sealed abstract class AuditActivity {
val time = System.nanoTime
}
object AuditActivity {
// Supported types
case class Request(request: HttpRequest) extends AuditActivity
case class Response(response: HttpResponse) extends AuditActivity
case class Content(size: Int, last: Boolean=false) extends AuditActivity
case class ReadData(size: Int) extends AuditActivity
case class WriteData(size: Int) extends AuditActivity
case class Error(cause: Throwable) extends AuditActivity
case class Downstream() extends AuditActivity
case class DownstreamConnect(success: Boolean) extends AuditActivity
case class ChannelAssigned(channel: Channel) extends AuditActivity
case class ChannelException(channel: Channel, cause: Throwable) extends AuditActivity
case class Event(event: NeutrinoEvent) extends AuditActivity
case class UserEvent(event: AnyRef) extends AuditActivity
case class Detail(value: String) extends AuditActivity
case class Flush() extends AuditActivity
case class Close() extends AuditActivity
}
case class AuditState() {
import com.ebay.neutrino.handler.ops.AuditActivity._
import com.ebay.neutrino.util.AttributeSupport._
import scala.collection.JavaConversions.asScalaSet
private val activity = new util.LinkedList[(Channel, AuditActivity)]
// Add the activity provided
def add(data: (Channel, AuditActivity)) =
this.synchronized { activity.add(data) }
def headerStr(msg: HttpMessage) =
s"headers = [${msg.headers.names.mkString(",")}]"
override def toString = {
val builder = new StringBuilder("AuditState:\n")
val start = if (activity.isEmpty) 0L else activity.peekFirst._2.time
val iter = activity.iterator
// Iterate the activity
while (iter.hasNext) {
val (channel, item) = iter.next()
builder
.append(" ").append(String.format("%9s", ""+(item.time-start)/1000)).append(" micros:\t")
.append(
if (channel.service.isDefined) "Sx"+channel.id
else "0x"+channel.id
)
.append('\t')
builder.append(item match {
case Request (data: FullHttpRequest) => s"FullRequest (${data.uri}), ${headerStr(data)}"
case Request (data) => s"Request (${data.uri}), ${headerStr(data)}"
case Response(data: FullHttpResponse) => s"FullResponse, ${headerStr(data)}"
case Response(data) => s"Response, ${headerStr(data)}"
case Content(size, true) => s"LastContent($size)"
case Content(size, false) => s"Content($size)"
case Error(cause: Throwable) => s"Error($cause)"
case _ => item.toString
})
builder.append('\n')
}
builder.toString
}
}
/**
* Static helper methods.
*
* We define them here to ensure the anonymous lambda classes aren't constructed by
* the value classes.
*/
object AuditState {
import com.ebay.neutrino.handler.ops.NeutrinoAuditHandler._
import com.ebay.neutrino.util.AttributeSupport._
def request(channel: Channel): Option[NeutrinoRequest] =
channel.request orElse (channel.session flatMap (_.channel.request))
def state(channel: Channel): Option[AuditState] =
request(channel) map (_.state)
def audit(channel: Channel, data: => AuditActivity) =
request(channel) map (_.state.add((channel, data)))
// Clear audit-state, if present
def clear(channel: Channel) =
request(channel) map (_.clear(classOf[AuditState]))
}
|
eBay/Neutrino
|
src/main/scala/com/ebay/neutrino/handler/ops/NeutrinoAuditHandler.scala
|
Scala
|
apache-2.0
| 8,389 |
/**
* Copyright (C) 2016 DANS - Data Archiving and Networked Services ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.multideposit.parser
import better.files.File
import cats.data.NonEmptyList
import nl.knaw.dans.common.lang.dataset.AccessCategory
import nl.knaw.dans.easy.multideposit.PathExplorer.InputPathExplorer
import nl.knaw.dans.easy.multideposit.TestSupportFixture
import nl.knaw.dans.easy.multideposit.model.{ ContributorRole, CreatorOrganization, CreatorPerson, Profile }
import nl.knaw.dans.easy.multideposit.parser.Headers.Header
import org.joda.time.DateTime
trait ProfileTestObjects {
lazy val profileCSV @ profileCSVRow1 :: profileCSVRow2 :: Nil = List(
Map(
Headers.Title -> "title1",
Headers.Description -> "descr1",
Headers.CreatorInitials -> "A.",
Headers.CreatorSurname -> "Jones",
Headers.CreatorRole -> "Supervisor",
Headers.Created -> "2016-07-30",
Headers.Available -> "2016-07-31",
Headers.Audience -> "D30000",
Headers.AccessRights -> "REQUEST_PERMISSION"
),
Map(
Headers.Title -> "title2",
Headers.Description -> "descr2",
Headers.Audience -> "D37000"
)
)
lazy val profileCSVRows = List(
DepositRow(2, profileCSVRow1),
DepositRow(3, profileCSVRow2),
)
lazy val profile: Profile = Profile(
titles = NonEmptyList.of("title1", "title2"),
descriptions = NonEmptyList.of("descr1", "descr2"),
creators = NonEmptyList.of(CreatorPerson(initials = "A.", surname = "Jones", role = Option(ContributorRole.SUPERVISOR))),
created = DateTime.parse("2016-07-30"),
available = DateTime.parse("2016-07-31"),
audiences = NonEmptyList.of("D30000", "D37000"),
accessright = AccessCategory.REQUEST_PERMISSION
)
}
class ProfileParserSpec extends TestSupportFixture with ProfileTestObjects {
self =>
private val parser = new ProfileParser with ParserUtils with InputPathExplorer {
val multiDepositDir: File = self.multiDepositDir
}
import parser._
"extractProfile" should "convert the csv input to the corresponding output" in {
extractProfile(2, profileCSVRows).value shouldBe profile
}
it should "fail if there are no values for DC_TITLE, DC_DESCRIPTION, creator, DDM_CREATED, DDM_AUDIENCE and DDM_ACCESSRIGHTS" in {
val rows = DepositRow(2, Map.empty[Header, String]) ::
DepositRow(3, Map.empty[Header, String]) :: Nil
extractProfile(2, rows).invalidValue.toNonEmptyList.toList should contain inOrderOnly(
ParseError(2, "There should be at least one non-empty value for DC_TITLE"),
ParseError(2, "There should be at least one non-empty value for DC_DESCRIPTION"),
ParseError(2, "There should be at least one non-empty value for the creator fields"),
ParseError(2, "There should be one non-empty value for DDM_CREATED"),
ParseError(2, "There should be at least one non-empty value for DDM_AUDIENCE"),
ParseError(2, "There should be one non-empty value for DDM_ACCESSRIGHTS"),
)
}
it should "fail if there are multiple values for DDM_CREATED, DDM_AVAILABLE and DDM_ACCESSRIGHTS" in {
val rows = DepositRow(2, profileCSVRow1) ::
DepositRow(3, profileCSVRow2.updated(Headers.Created, "2015-07-30")
.updated(Headers.Available, "2015-07-31")
.updated(Headers.AccessRights, "NO_ACCESS")) :: Nil
extractProfile(2, rows).invalidValue.toNonEmptyList.toList should contain inOrderOnly(
ParseError(2, "Only one row is allowed to contain a value for the column 'DDM_CREATED'. Found: [2016-07-30, 2015-07-30]"),
ParseError(2, "At most one row is allowed to contain a value for the column 'DDM_AVAILABLE'. Found: [2016-07-31, 2015-07-31]"),
ParseError(2, "Only one row is allowed to contain a value for the column 'DDM_ACCESSRIGHTS'. Found: [REQUEST_PERMISSION, NO_ACCESS]"),
)
}
"accessCategory" should "convert the value for DDM_ACCESSRIGHTS into the corresponding enum object" in {
accessCategory(2, Headers.AccessRights)("ANONYMOUS_ACCESS").value shouldBe AccessCategory.ANONYMOUS_ACCESS
}
it should "fail if the DDM_ACCESSRIGHTS value does not correspond to an object in the enum" in {
accessCategory(2, Headers.AccessRights)("unknown value").invalidValue shouldBe
ParseError(2, "Value 'unknown value' is not a valid accessright in column DDM_ACCESSRIGHTS").chained
}
"creator" should "return None if none of the fields are defined" in {
val row = DepositRow(2, Map(
Headers.CreatorTitles -> "",
Headers.CreatorInitials -> "",
Headers.CreatorInsertions -> "",
Headers.CreatorSurname -> "",
Headers.CreatorOrganization -> "",
Headers.CreatorDAI -> "",
Headers.CreatorRole -> ""
))
creator(row) shouldBe empty
}
it should "succeed with an organisation when only the DCX_CREATOR_ORGANIZATION is defined" in {
val row = DepositRow(2, Map(
Headers.CreatorTitles -> "",
Headers.CreatorInitials -> "",
Headers.CreatorInsertions -> "",
Headers.CreatorSurname -> "",
Headers.CreatorOrganization -> "org",
Headers.CreatorDAI -> "",
Headers.CreatorRole -> ""
))
creator(row).value.value shouldBe CreatorOrganization("org", None)
}
it should "succeed with an organisation when only the DCX_CREATOR_ORGANIZATION and DCX_CREATOR_ROLE are defined" in {
val row = DepositRow(2, Map(
Headers.CreatorTitles -> "",
Headers.CreatorInitials -> "",
Headers.CreatorInsertions -> "",
Headers.CreatorSurname -> "",
Headers.CreatorOrganization -> "org",
Headers.CreatorDAI -> "",
Headers.CreatorRole -> "ProjectManager"
))
creator(row).value.value shouldBe CreatorOrganization("org", Some(ContributorRole.PROJECT_MANAGER))
}
it should "succeed with a person when only DCX_CREATOR_INITIALS and DCX_CREATOR_SURNAME are defined" in {
val row = DepositRow(2, Map(
Headers.CreatorTitles -> "",
Headers.CreatorInitials -> "A.",
Headers.CreatorInsertions -> "",
Headers.CreatorSurname -> "Jones",
Headers.CreatorOrganization -> "",
Headers.CreatorDAI -> "",
Headers.CreatorRole -> ""
))
creator(row).value.value shouldBe CreatorPerson(None, "A.", None, "Jones", None, None, None)
}
it should "succeed with a more extensive person when more fields are filled in" in {
val row = DepositRow(2, Map(
Headers.CreatorTitles -> "Dr.",
Headers.CreatorInitials -> "A.",
Headers.CreatorInsertions -> "X",
Headers.CreatorSurname -> "Jones",
Headers.CreatorOrganization -> "org",
Headers.CreatorDAI -> "dai123",
Headers.CreatorRole -> "rElAtEdpErsOn"
))
creator(row).value.value shouldBe CreatorPerson(Some("Dr."), "A.", Some("X"), "Jones", Some("org"), Some(ContributorRole.RELATED_PERSON), Some("dai123"))
}
it should "fail if DCX_CREATOR_INITIALS is not defined" in {
val row = DepositRow(2, Map(
Headers.CreatorTitles -> "Dr.",
Headers.CreatorInitials -> "",
Headers.CreatorInsertions -> "",
Headers.CreatorSurname -> "Jones",
Headers.CreatorOrganization -> "",
Headers.CreatorDAI -> "",
Headers.CreatorRole -> ""
))
creator(row).value.invalidValue shouldBe ParseError(2, "Missing value for: DCX_CREATOR_INITIALS").chained
}
it should "fail if DCX_CREATOR_SURNAME is not defined" in {
val row = DepositRow(2, Map(
Headers.CreatorTitles -> "Dr.",
Headers.CreatorInitials -> "A.",
Headers.CreatorInsertions -> "",
Headers.CreatorSurname -> "",
Headers.CreatorOrganization -> "",
Headers.CreatorDAI -> "",
Headers.CreatorRole -> ""
))
creator(row).value.invalidValue shouldBe ParseError(2, "Missing value for: DCX_CREATOR_SURNAME").chained
}
it should "fail if DCX_CREATOR_INITIALS and DCX_CREATOR_SURNAME are both not defined" in {
val row = DepositRow(2, Map(
Headers.CreatorTitles -> "Dr.",
Headers.CreatorInitials -> "",
Headers.CreatorInsertions -> "",
Headers.CreatorSurname -> "",
Headers.CreatorOrganization -> "",
Headers.CreatorDAI -> "",
Headers.CreatorRole -> ""
))
creator(row).value.invalidValue shouldBe ParseError(2, "Missing value(s) for: [DCX_CREATOR_INITIALS, DCX_CREATOR_SURNAME]").chained
}
it should "fail if DCX_CREATOR_ROLE has an invalid value" in {
val row = DepositRow(2, Map(
Headers.CreatorTitles -> "Dr.",
Headers.CreatorInitials -> "A.",
Headers.CreatorInsertions -> "",
Headers.CreatorSurname -> "Jones",
Headers.CreatorOrganization -> "",
Headers.CreatorDAI -> "",
Headers.CreatorRole -> "invalid!"
))
creator(row).value.invalidValue shouldBe ParseError(2, "Value 'invalid!' is not a valid creator role").chained
}
}
|
DANS-KNAW/easy-split-multi-deposit
|
src/test/scala/nl.knaw.dans.easy.multideposit/parser/ProfileParserSpec.scala
|
Scala
|
apache-2.0
| 9,408 |
package chapter.eight
object ExerciseEight extends App {
class Person(val name: String) {
override def toString = s"""${getClass.getName} [name="${name}"]"""
}
class SecretAgent(codename: String) extends Person(codename) {
override val name = "secret"
override val toString = "secret"
}
}
|
deekim/impatient-scala
|
src/main/scala/chapter/eight/ExerciseEight.scala
|
Scala
|
apache-2.0
| 313 |
import ammonite.ops._
import ammonite.shell._
import ammonite.ops.ImplicitWd._
/*
TODO:
* add syntax for custom operators
* file atomicity
* stronger type-checking (e.g. `write` should only accept `Path`
* the results list in runMap could keep metadata info for each target that's been added to it (or each rule that's been run)
* as a Map or List, and this could be pattern-matched on too, and passed along, as in graph.clj, when the type of the target...
* Key is not a file/file-pattern.
* use sorted Map
* If there are duplicate rules, they should be dissoc'ed between iterations. This way the first rule acts like the first case in a pattern.
* so if a previous rule doesn't create an optional object (say file), put the rule requiring that object first . . . but would have
* to make sure that the optional obj. rule is defined before that. Maybe use Maybe Monad for this?
* currently don't use topological sort, so running this would run rules that weren't necessary. This is bad because we want user
* to be able to pick what to run (e.g. clean, install, etc.)
*/
/*
what we'd like to see:
{ "*.md" => { ("*.txt", "echo") -> (a, b) => a + b}
hmmm anonymous function types needed.
*/
val m = Map({ "*.md" -> { List("*.txt") -> ((x:Path) => cp(x, x %= "silly")) } } )
val getInputs = ((x:List[String]) => x.map( s => ls! cwd |? (_ ~= s)))
def anyEmpty(seq:Seq[Seq[Any]]) = seq.map(_.isEmpty).reduce(_ || _)
/* Instead of running through by order, runMap accepts a key (target) which is the final goal,
and recursively request the rules necessary to build the target. */
def runMap (m:Map[String, (List[String], (Path*) => Unit)]) = {
def runMapP (m:Map[String, (List[String], (Path*) => Unit)], results:List[List[Path]]) = {
if (m.isEmpty) return ()
val next = m.filterNot(x => anyEmpty(getInputs(x._2._1))).head // this raises exception if can't keep going
val newResults = getInputs(m._.2._1).map(_ | (m._2._2)) // this is still wrong
return runMapP( (m - head._1), results :+ newResults)
}
runMap(m, List())
}
def fileReg (s:String) = ("(" ++ s.replaceAll("\\\\.", "\\\\.").replaceAll("\\\\*", ".*") ++ ")").r
val noExt = ((x:String) => x.split('.').init.mkString("."))
val swapExt = ( (s:String, ext:String) => (noExt(s)) ++ "." ++ ext)
def dirname (p:Path) : Path = cwd.make(p.segments.init, 0)
implicit class Iff(val a: Path) extends AnyVal {
def -= = dirname(a)/noExt(a.last)
def %= (b: String) : Path = dirname(a)/swapExt(a.last, b)
def += (b: String) : Path = dirname(a)/(a.last ++ "." ++ b)
def ~= (b: String) : Boolean = fileReg(b).unapplySeq(a.last).isDefined
}
// maybe check for .PHONY results: (create phoney object string wrapper I guess)
//def getInputsP (xs:List[String]) = xs.map( s => ls! cwd |? (_ ~= s)))
// but getInputs needs to include the inputs that have already been created.
// remove from map once completed with `-` `key`
/* Examples */
ls! cwd |? (_.last ~= "*.txt")
ls! cwd |? (_ ~= "*.txt") | (x => cp(x, x %= "silly"))
ls! cwd |? (_ ~= "*.silly") | (rm!)
def list? (s:String) = ls! cwd |? (_ ~= s)
val tupleMap = { "*.md" -> { ("*.yaml", "*.cfg") -> ((x:Path) => cp(x, x %= "silly")) } }
//scala splat
Function.tupled(add _)(1, 3)
// have to use `import shapeless._` to get the mapping + splatting
// to work together.
def list(s:String) = ls! cwd |? (_ ~= s)
//val inputs = .map( a => a match {case s :: Nil => List(ls! cwd |? (_ ~= s))
// case xs => xs.map(x => ls! cwd |? (_ ~= x))})
//val universe: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe
//import universe._
//val t = q"""{ case "*.txt" => true
// case _ => false}"""
//extract the cases,
//try matching on all of them and catch a match error.
//val l = t.children.map(_.children(0))
// .getClass -> Ident or Literal
//t.children(0).children(0)
|
averagehat/scala-make
|
graph.scala
|
Scala
|
bsd-3-clause
| 3,877 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.tracing.rewriters
import org.neo4j.cypher.internal.frontend.v2_3.Rewriter
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherFunSuite
class RewriterStepSequencerTest extends CypherFunSuite {
test("if no conditions are used, what goes in is what comes out") {
val dummyRewriter1 = Rewriter.noop
val dummyRewriter2 = Rewriter.lift { case x: AnyRef => x }
RewriterStepSequencer.newValidating("test")() should equal(RewriterContract(Seq(), Set()))
RewriterStepSequencer.newValidating("test")(ApplyRewriter("1", dummyRewriter1), ApplyRewriter("2", dummyRewriter2)) should equal(RewriterContract(Seq(dummyRewriter1, dummyRewriter2), Set()))
}
test("Should enable conditions between rewriters and collect the post conditions at the end") {
val dummyCond1 = RewriterCondition("a", (x: Any) => Seq("1"))
val dummyCond2 = RewriterCondition("b", (x: Any) => Seq("2"))
val dummyRewriter1 = Rewriter.noop
val dummyRewriter2 = Rewriter.lift { case x: AnyRef => x }
val sequencer = RewriterStepSequencer.newValidating("test")(
ApplyRewriter("1", dummyRewriter1),
EnableRewriterCondition(dummyCond1),
ApplyRewriter("2", dummyRewriter2),
EnableRewriterCondition(dummyCond2)
)
sequencer.childRewriters should equal(Seq(
dummyRewriter1,
RunConditionRewriter("test", Some("1"), Set(dummyCond1)),
dummyRewriter2,
RunConditionRewriter("test", Some("2"), Set(dummyCond1, dummyCond2))
))
sequencer.postConditions should equal(Set(dummyCond1, dummyCond2))
}
test("Should enable/disable conditions between rewriters and collect the post conditions at the end") {
val dummyCond1 = RewriterCondition("a", (x: Any) => Seq("1"))
val dummyCond2 = RewriterCondition("b", (x: Any) => Seq("2"))
val dummyRewriter1 = Rewriter.noop
val dummyRewriter2 = Rewriter.lift { case x: AnyRef => x}
val dummyRewriter3 = Rewriter.noop
val sequencer = RewriterStepSequencer.newValidating("test")(
ApplyRewriter("1", dummyRewriter1),
EnableRewriterCondition(dummyCond1),
ApplyRewriter("2", dummyRewriter2),
EnableRewriterCondition(dummyCond2),
ApplyRewriter("3", dummyRewriter3),
DisableRewriterCondition(dummyCond2)
)
sequencer.childRewriters should equal(Seq(
dummyRewriter1,
RunConditionRewriter("test", Some("1"), Set(dummyCond1)),
dummyRewriter2,
RunConditionRewriter("test", Some("2"), Set(dummyCond1, dummyCond2)),
dummyRewriter3,
RunConditionRewriter("test", Some("3"), Set(dummyCond1))
))
sequencer.postConditions should equal(Set(dummyCond1))
}
}
|
HuangLS/neo4j
|
community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/tracing/rewriters/RewriterStepSequencerTest.scala
|
Scala
|
apache-2.0
| 3,502 |
package com.twitter.finagle.netty4.channel
import com.twitter.finagle.Stack
import com.twitter.finagle.param.Stats
import com.twitter.finagle.stats.InMemoryStatsReceiver
import io.netty.channel._
import io.netty.channel.embedded.EmbeddedChannel
import io.netty.channel.nio.NioEventLoopGroup
import java.net.SocketAddress
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.funsuite.AnyFunSuite
class HandlerEventTest extends AnyFunSuite with MockitoSugar {
// verify that custom channel handlers don't swallow pipeline events.
val handlers = List(
new ChannelRequestStatsHandler(
new ChannelRequestStatsHandler.SharedChannelRequestStats(new InMemoryStatsReceiver)
),
new ChannelStatsHandler(
new SharedChannelStats(Stack.Params.empty + Stats(new InMemoryStatsReceiver))
),
new SimpleChannelSnooper("test"),
new ByteBufSnooper("test")
)
val loop = new NioEventLoopGroup()
for (handler <- handlers) testHandler(handler)
private[this] def testHandler(ch: ChannelHandler): Unit = {
val handler = new TestDuplexHandler
val pipeline = new EmbeddedChannel(ch, handler).pipeline
val name = ch.getClass.getCanonicalName
// inbound events
test(s"$name doesn't suppress ChannelActive event") {
pipeline.fireChannelActive()
assert(handler.channelActiveFired, "suppressed ChannelActive event")
}
test(s"$name doesn't suppress ChannelRead event") {
pipeline.fireChannelRead(new Object)
assert(handler.channelReadFired, "suppressed ChannelRead event")
}
test(s"$name doesn't suppress ChannelInactive event") {
pipeline.fireChannelInactive()
assert(handler.channelInactiveFired, "suppressed ChannelInactive event")
}
test(s"$name doesn't suppress ChannelReadComplete event") {
pipeline.fireChannelReadComplete()
assert(handler.channelReadCompleteFired, "suppressed ChannelReadComplete event")
}
test(s"$name doesn't suppress ChannelRegistered event") {
pipeline.fireChannelRegistered()
assert(handler.channelRegisteredFired, "suppressed ChannelRegistered event")
}
test(s"$name doesn't suppress ChannelUnregistered event") {
pipeline.fireChannelUnregistered()
assert(handler.channelUnregisteredFired, "suppressed ChannelUnregistered event")
}
test(s"$name doesn't suppress ChannelWritabilityChanged event") {
pipeline.fireChannelWritabilityChanged()
assert(handler.channelWritabilityChangedFired, "suppressed ChannelWritabilityChanged event")
}
test(s"$name doesn't suppress ExceptionCaught event") {
pipeline.fireExceptionCaught(new Exception)
assert(handler.exceptionCaughtFired, "suppressed ExceptionCaught event")
}
test(s"$name doesn't suppress UserEventTriggered event") {
pipeline.fireUserEventTriggered(new Object)
assert(handler.userEventTriggeredFired, "suppressed UserEventTriggered event")
}
// outbound actions
test(s"$name doesn't suppress Flush event") {
pipeline.flush()
assert(handler.flushFired, "suppressed Flush event")
}
test(s"$name doesn't suppress Write event") {
pipeline.write(new Object)
assert(handler.writeFired, "suppressed Write event")
}
test(s"$name doesn't suppress Deregister event") {
pipeline.deregister()
assert(handler.deregisterFired, "suppressed Deregister event")
}
test(s"$name doesn't suppress Read event") {
pipeline.read()
assert(handler.readFired, "suppressed Read event")
}
test(s"$name doesn't suppress Connect event") {
pipeline.connect(mock[SocketAddress])
assert(handler.connectFired, "suppressed Connect event")
}
test(s"$name doesn't suppress Bind event") {
pipeline.bind(mock[SocketAddress])
assert(handler.bindFired, "suppressed Bind event")
}
// note: we don't test disconnects because the channel types
// we care about are connection oriented and disconnect
// isn't a meaningful operation for them so netty turns them into
// closes.
test(s"$name doesn't suppress Close event") {
pipeline.close()
assert(handler.closeFired, "suppressed Close event")
}
}
/**
* a channel duplex handler which records which events have fired
*/
class TestDuplexHandler extends ChannelDuplexHandler {
// outbound events
var flushFired = false
override def flush(ctx: ChannelHandlerContext): Unit = {
flushFired = true
super.flush(ctx)
}
var writeFired = false
override def write(
ctx: ChannelHandlerContext,
msg: scala.Any,
promise: ChannelPromise
): Unit = {
writeFired = true
super.write(ctx, msg, promise)
}
var closeFired = false
override def close(ctx: ChannelHandlerContext, future: ChannelPromise): Unit = {
closeFired = true
super.close(ctx, future)
}
var deregisterFired = false
override def deregister(ctx: ChannelHandlerContext, future: ChannelPromise): Unit = {
deregisterFired = true
super.deregister(ctx, future)
}
var readFired = false
override def read(ctx: ChannelHandlerContext): Unit = {
readFired = true
super.read(ctx)
}
var connectFired = false
override def connect(
ctx: ChannelHandlerContext,
remoteAddress: SocketAddress,
localAddress: SocketAddress,
future: ChannelPromise
): Unit = {
connectFired = true
super.connect(ctx, remoteAddress, localAddress, future)
}
var bindFired = false
override def bind(
ctx: ChannelHandlerContext,
localAddress: SocketAddress,
future: ChannelPromise
): Unit = {
bindFired = true
super.bind(ctx, localAddress, future)
}
// inbound events
var exceptionCaughtFired = false
override def exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable): Unit = {
exceptionCaughtFired = true
super.exceptionCaught(ctx, cause)
}
var channelActiveFired = false
override def channelActive(ctx: ChannelHandlerContext): Unit = {
channelActiveFired = true
super.channelActive(ctx)
}
var channelReadFired = false
override def channelRead(ctx: ChannelHandlerContext, msg: scala.Any): Unit = {
channelReadFired = true
super.channelRead(ctx, msg)
}
var channelUnregisteredFired = false
override def channelUnregistered(ctx: ChannelHandlerContext): Unit = {
channelUnregisteredFired = true
super.channelUnregistered(ctx)
}
var channelInactiveFired = false
override def channelInactive(ctx: ChannelHandlerContext): Unit = {
channelInactiveFired = true
super.channelInactive(ctx)
}
var channelWritabilityChangedFired = false
override def channelWritabilityChanged(ctx: ChannelHandlerContext): Unit = {
channelWritabilityChangedFired = true
super.channelWritabilityChanged(ctx)
}
var userEventTriggeredFired = false
override def userEventTriggered(ctx: ChannelHandlerContext, evt: scala.Any): Unit = {
userEventTriggeredFired = true
super.userEventTriggered(ctx, evt)
}
var channelRegisteredFired = false
override def channelRegistered(ctx: ChannelHandlerContext): Unit = {
channelRegisteredFired = true
super.channelRegistered(ctx)
}
var channelReadCompleteFired = false
override def channelReadComplete(ctx: ChannelHandlerContext): Unit = {
channelReadCompleteFired = true
super.channelReadComplete(ctx)
}
var handlerRemovedFired = false
override def handlerRemoved(ctx: ChannelHandlerContext): Unit = {
handlerRemovedFired = true
super.handlerRemoved(ctx)
}
var handlerAddedFired = false
override def handlerAdded(ctx: ChannelHandlerContext): Unit = {
handlerAddedFired = true
super.handlerAdded(ctx)
}
}
}
|
twitter/finagle
|
finagle-netty4/src/test/scala/com/twitter/finagle/netty4/channel/HandlerEventTest.scala
|
Scala
|
apache-2.0
| 7,958 |
package com.twitter.scrooge.frontend
class ParseException(reason: String, cause: Throwable) extends Exception(reason, cause) {
def this(reason: String) = this(reason, null)
}
// severe errors
class NegativeFieldIdException(name: String)
extends ParseException("Negative user-provided id in field " + name)
class DuplicateFieldIdException(name: String)
extends ParseException("Duplicate user-provided id in field " + name)
class RepeatingEnumValueException(name: String, value: Int)
extends ParseException("Repeating enum value in " + name + ": " + value)
class UnionFieldInvalidNameException(union: String, field: String)
extends ParseException("Field " + field + " in union " + union + " is prohibited")
// warnings (non-severe errors). If the strict mode is on, Scrooge will throw these exceptions;
// otherwise it merely prints warnings.
class ParseWarning(reason: String, cause: Throwable)
extends ParseException(reason, cause)
{
def this(reason: String) = this(reason, null)
}
class UnionFieldRequiredException(union: String, field: String)
extends ParseWarning("Field " + field + " in union " + union + " cannot be required")
class UnionFieldOptionalException(union: String, field: String)
extends ParseWarning("Field " + field + " in union " + union + " cannot be optional")
object UnionFieldRequirednessException {
def apply(union: String, field: String, requiredness: String): ParseWarning = {
requiredness.toLowerCase match {
case "required" => new UnionFieldRequiredException(union, field)
case "optional" => new UnionFieldOptionalException(union, field)
}
}
}
class InvalidThriftFilenameException(filename: String, regex: String)
extends ParseWarning("Thrift filename " + filename + " is invalid, did not pass this check: " + regex)
/**
* ScroogeInternalException indicates a Scrooge bug
*/
class ScroogeInternalException(msg: String) extends Exception
|
nshkrob/scrooge
|
scrooge-generator/src/main/scala/com/twitter/scrooge/frontend/ParseException.scala
|
Scala
|
apache-2.0
| 1,922 |
package ru.wordmetrix.vector
import org.scalacheck.{Arbitrary, Gen}
object CheckVectorList extends TestVector {
val empty: Vector[Int] = VectorList.empty[Int]
implicit def VS = Arbitrary[Vector[Int]](for {
keys <- Gen.containerOf[List, Int](for {
k <- Gen.choose(0, 10)
} yield (k)) map (_.distinct.take(5).sortBy(x => x))
values <- Gen.containerOf[List, Double](for {
k1 <- Gen.choose(accuracy, 100)
k2 <- Gen.choose(-100, -accuracy)
} yield (k1 + k2))
} yield {
VectorList[Int](keys.zip(values))
})
}
|
electricmind/utils
|
src/test/scala/ru/wordmetrix/vector/CheckVectorList.scala
|
Scala
|
apache-2.0
| 556 |
package mesosphere.marathon.io.storage
import java.io._
/**
* The local file system implementation.
*
* @param file the underlying file
* @param path the relative path, this item is identified with.
*/
case class FileStorageItem(file: File, basePath: File, path: String, baseUrl: String) extends StorageItem {
def store(fn: OutputStream => Unit): FileStorageItem = {
createDirectory(file.getParentFile)
using(new FileOutputStream(file)) { fn }
this
}
def moveTo(path: String): FileStorageItem = {
val to = new File(basePath, path)
moveFile(file, to)
cleanUpDir(file.getParentFile)
FileStorageItem(to, basePath, path, url)
}
def url: String = s"$baseUrl/$path"
def inputStream(): InputStream = new FileInputStream(file)
def lastModified: Long = file.lastModified()
def length: Long = file.length()
def exists: Boolean = file.exists()
def delete() {
file.delete()
cleanUpDir(file.getParentFile)
}
private def cleanUpDir(dir: File) {
if (!dir.isFile && dir != basePath && dir.list().isEmpty) {
dir.delete()
cleanUpDir(dir.getParentFile)
}
}
}
/**
* The local file system storage implementation.
*
* @param basePath the base path to the managed asset directory
*/
class FileStorageProvider(val url: String, val basePath: File) extends StorageProvider {
require(basePath.exists(), "Base path does not exist: %s. Configuration error?".format(basePath.getAbsolutePath))
def item(path: String): FileStorageItem = {
val file: File = new File(basePath, path)
//make sure, no file from outside base path is created
if (!file.getCanonicalPath.startsWith(basePath.getCanonicalPath)) throw new IOException("Access Denied")
new FileStorageItem(file, basePath, path, url)
}
}
|
14Zen/marathon
|
src/main/scala/mesosphere/marathon/io/storage/FileStorageProvider.scala
|
Scala
|
apache-2.0
| 1,791 |
package com.expedia.gps.geo.reactive101.client.scala
import com.codahale.metrics.Timer.Context
import com.codahale.metrics._
import com.expedia.gps.geo.reactive101.scala.client._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import scala.concurrent.Future
/**
*
* @author [email protected]
* @since 2015-11-13
*/
object BasicScalaTest extends AbstractTest {
val NB_CALLS = 1000
import scala.concurrent.ExecutionContext.Implicits.global
val client = new DispatchRESTClient
def main(args: Array[String]) {
val mainContext: Timer.Context = mainTimer.time()
val futures: Future[Seq[String]] = doMultipleCall(client, metrics)
futures onComplete { foods =>
println(s"Nb food prepared: ${foods.get.size}")
mainContext.close()
reporter.report()
sys.exit(0)
}
}
def orderAndGetFood: Future[String] = {
val subContext: Context = subTimer.time()
client
.callAsync("localhost:4200", "/food/takeOrder")
.collect({ case success: CallSuccess => success })
.map { success =>
val food = (parse(success.content) \\ "order").extract[String]
food.charAt(0).toUpper + food.substring(1)
}
.flatMap {
food => client.callAsync("localhost:4200", s"/food/prepare$food")
}
.collect({ case success: CallSuccess => success })
.map { success =>
val foodPrepared = (parse(success.content) \\ "food").extract[String]
subContext.close()
foodPrepared
}
}
def doMultipleCall(client: ScalaRESTClient, metrics: MetricRegistry): Future[Seq[String]] = {
val allCallsFutures = (1 to NB_CALLS) map { _ =>
orderAndGetFood
}
Future.sequence(allCallsFutures)
}
}
|
olmartinATexpedia/reactive101
|
src/test/scala/com/expedia/gps/geo/reactive101/client/scala/BasicScalaTest.scala
|
Scala
|
apache-2.0
| 1,732 |
package io.scalajs.nodejs.path
import scala.scalajs.js
/**
* Path Object
* @author [email protected]
*/
class PathObject(val root: js.UndefOr[String] = js.undefined,
val dir: js.UndefOr[String] = js.undefined,
val base: js.UndefOr[String] = js.undefined,
val ext: js.UndefOr[String] = js.undefined,
val name: js.UndefOr[String] = js.undefined)
extends js.Object
|
scalajs-io/nodejs
|
app/common/src/main/scala/io/scalajs/nodejs/path/PathObject.scala
|
Scala
|
apache-2.0
| 453 |
package org.scalaperf
package statistics
import scala.annotation.tailrec
import Statistics.arrayOfDoubleToStatsArrayOfDouble
import scala.math.{sqrt, round, min}
case class Outliers(median: Double,
iqr: Double,
lowExtreme: List[(Double, Int)],
lowMild: List[(Double, Int)],
highExtreme: List[(Double, Int)],
highMild: List[(Double, Int)]) {
def isEmpty: Boolean = (lowExtreme.isEmpty && lowMild.isEmpty && highExtreme.isEmpty && highMild.isEmpty)
}
case class SerialCorrelation(N: Int,
nbOutsideExpected: Int,
K: Int,
outsideBelow: List[(Double, Double, Int)],
outsideAbove: List[(Double, Double, Int)]) {
def isEmpty: Boolean = (outsideBelow.isEmpty && outsideAbove.isEmpty)
def isMeaningless = (N < 50)
def hasMoreThanExpected = (outsideBelow.size + outsideAbove.size) > nbOutsideExpected
def size = (outsideBelow.size + outsideAbove.size)
}
/**
* Scala translation of Brent Boyer code
* http://www.ellipticgroup.com/html/benchmarkingArticle.html
*/
object Diagnostics {
// @see bb.util.Stats.diagnoseSdOfActions(double)
def diagnoseSdForAction(blockStats: Stats, nbAction: Int): Option[Double] = {
import scala.math.{sqrt, min}
if (nbAction < 16) return None
val muB = blockStats.mean
val sigmaB = blockStats.sd
val muA = muB / nbAction
val sigmaA = sigmaB / sqrt(nbAction)
val tMin = 0
val muGMin = (muA + tMin) / 2
val sigmaG = min( (muGMin - tMin) / 4, sigmaA)
if (sigmaB == 0) return None
val cMax1 = cMaxSolver(nbAction, muB, sigmaB, muA, sigmaG, tMin)
val cMax2 = cMaxSolver(nbAction, muB, sigmaB, muA, sigmaG, muGMin)
val cMax = min(cMax1, cMax2)
if (cMax == 0) return None
val var1 = varianceOutliers(nbAction, sigmaB, sigmaG, 1)
val var2 = varianceOutliers(nbAction, sigmaB, sigmaG, cMax)
val cOutMin = if (var1 < var2) 1L else cMax
val varOutMin = if (var1 < var2) var1 else var2
val varBGOutMin = (sigmaB * sigmaB) - ((nbAction - cOutMin) * (sigmaG * sigmaG))
val muGOutMin = muA - sqrt( (cOutMin * varBGOutMin) / (nbAction * (nbAction - cOutMin)))
val UOutMin = muA + sqrt( ((nbAction - cOutMin) * varBGOutMin) / (nbAction * cOutMin))
val fractionVarOutlierMin = varOutMin / (sigmaB * sigmaB)
if (fractionVarOutlierMin < 0.01) None
else Some(fractionVarOutlierMin)
}
// @see bb.util.Stats.cMaxSolver(double, double, double, double, double, double)
private def cMaxSolver(a: Double, muB: Double, sigmaB: Double, muA: Double, sigmaG: Double, x: Double) = {
import scala.math.{floor, sqrt}
val k0 = -a * a * (muA - x) * (muA - x)
val k1 = (sigmaB * sigmaB) - (a * sigmaG * sigmaG) + (a * (muA - x) * (muA -x))
val k2 = sigmaG * sigmaG
val determinant = (k1 * k1) - (4 * k2 * k0)
val cMax = floor(-2 * k0 / (k1 + sqrt(determinant))).toLong
cMax
}
// @see bb.util.Stats.varianceOutliers(double, double, double, double)
private def varianceOutliers(a: Double, sigmaB: Double, sigmaG: Double, c: Double) = {
((a - c) / a) * ((sigmaB * sigmaB) - ((a - c) * (sigmaG * sigmaG)))
}
// @see bb.util.Benchmark.diagnoseOutliers()
def diagnoseOutliers(sample: Array[Double]): Outliers = {
val sorted = sample.sortWith(_ < _)
val quartile1 = sorted.quantile(1, 4)
val median = sorted.median
val quartile3 = sorted.quantile(3, 4)
val iqr = quartile3 - quartile1
val lowExtreme = quartile1 - (3 * iqr)
val lowMild = quartile1 - (1.5 * iqr)
val highExtreme = quartile3 + (3 * iqr)
val highMild = quartile3 + (1.5 * iqr)
@tailrec
def partition(data: List[(Double, Int)], current: Outliers): Outliers = {
(data, current) match {
case (Nil, _) => current
case ((d, i)::tail, Outliers(median, iqr, les, lms, hes, hms)) if d < lowExtreme => partition(tail, Outliers(median, iqr, (d, i + 1)::les, lms, hes, hms))
case ((d, i)::tail, Outliers(median, iqr, les, lms, hes, hms)) if d < lowMild => partition(tail, Outliers(median, iqr, les, (d, i + 1)::lms, hes, hms))
case ((d, i)::tail, Outliers(median, iqr, les, lms, hes, hms)) if d > highExtreme => partition(tail, Outliers(median, iqr, les, lms, (d, i + 1)::hes, hms))
case ((d, i)::tail, Outliers(median, iqr, les, lms, hes, hms)) if d > highMild => partition(tail, Outliers(median, iqr, les, lms, hes, (d, i + 1)::hms))
case (_::tail, outliners) => partition(tail, outliners)
}
}
val ols = partition(sorted.toList.zipWithIndex, Outliers(median, iqr, Nil, Nil, Nil, Nil))
Outliers(median,
iqr,
ols.lowExtreme.reverse,
ols.lowMild.reverse,
ols.highExtreme.reverse,
ols.highMild.reverse)
}
// @see bb.util.Benchmark.diagnoseSerialCorrelation()
def diagnoseSerialCorrelation(sample: Array[Double]): SerialCorrelation = {
val N = sample.length
if (N < 50) SerialCorrelation(N, 0, 0, Nil, Nil)
else {
val (r, ciLower, ciUpper) = autocorrelation(sample)
val K = min(round(N / 4d).toInt, 20)
val nbOutsideExpected = round( (1 - 0.95) * K ).toInt
@tailrec
def partition(data: List[(Double, Int)], current: SerialCorrelation): SerialCorrelation = {
(data, current) match {
case (Nil, _) =>
current
case ((_, k)::tail, SerialCorrelation(N, nbExpected, K, below, above)) if k == 0 =>
partition(tail, SerialCorrelation(N, nbExpected, K, below, above))
case ((d, k)::tail, SerialCorrelation(N, nbExpected, K, below, above)) if r(k) < ciLower(k) || r(k) > ciUpper(k) =>
val mean = (ciUpper(k) + ciLower(k)) / 2
val sigma = (ciUpper(k) - ciLower(k)) / (2 * 1.96)
val diff = r(k) - mean
val scale = diff / sigma
if (diff > 0) partition(tail, SerialCorrelation(N, nbExpected, K, below, (r(k), scale, k)::above))
else partition(tail, SerialCorrelation(N, nbExpected, K, (r(k), scale, k)::below, above))
case (_::tail, SerialCorrelation(N, nbExpected, K, below, above)) =>
partition(tail, SerialCorrelation(N, nbExpected, K, below, above))
}
}
val sc = partition(r.toList.zipWithIndex, SerialCorrelation(N, nbOutsideExpected, K, Nil, Nil))
SerialCorrelation(sc.N,
sc.nbOutsideExpected,
sc.K,
sc.outsideBelow.reverse,
sc.outsideAbove.reverse)
}
}
// @see bb.science.Math2.autocorrelation(double[])
private def autocorrelation(numbers: Array[Double]): (Array[Double], Array[Double], Array[Double]) = {
val N = numbers.length
val c = autocovariance(numbers)
val r = c.zipWithIndex.map { case (value, index) =>
if (index == 0) 1d
else value / c(0)
}
val llse = r.zipWithIndex.map { case (_, index) =>
if (index == 0) (0d, 0)
else {
val vark = Some((1 until index).foldLeft(0d) { (vark, i) =>
vark + (r(i) * r(i))
}).map(_ * 2).map(_ + 1).map(_ / N).map(sqrt(_)).get
(vark, index)
}
}
val meanr = -1d / N
def ci(op: (Double, Double) => Double) = {
llse.map { case (value, index) =>
if (index == 0) 1d
else {
val delta = 1.96 * value
op(meanr, delta)
}
}
}
val ciLower = ci(_ -_)
val ciUpper = ci(_ + _)
(r, ciLower, ciUpper)
}
// @see bb.science.Math2.autocovariance(double[])
private def autocovariance(numbers: Array[Double]): Array[Double] = {
val mean = numbers.mean
val N = numbers.length
(0 until N - 1).map { k =>
val sum = (0 until N - k).foldLeft[Double](0) { (sum, i) =>
sum + ((numbers(i) - mean) * (numbers(i + k) - mean))
}
sum / N
}.toArray
}
}
|
tonymagne/scalaperf
|
src/main/scala/org/scalaperf/statistics/Diagnostics.scala
|
Scala
|
lgpl-3.0
| 8,641 |
/*
* Copyright 2016 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.regex.scalaz
import kantan.regex.laws.{IllegalGroup, IllegalMatch, LegalGroup, LegalMatch}
import kantan.regex.laws.discipline.{DisciplineSuite, GroupDecoderTests, MatchDecoderTests}
import kantan.regex.scalaz.arbitrary._
import org.scalacheck.Arbitrary
import scalaz.Maybe
class MaybeDecoderTests extends DisciplineSuite {
implicit val legalGroup: Arbitrary[LegalGroup[Maybe[Int]]] = arbLegalMaybe
implicit val illegalGroup: Arbitrary[IllegalGroup[Maybe[Int]]] = arbIllegalMaybe
implicit val legalMatch: Arbitrary[LegalMatch[Maybe[Int]]] = arbLegalMaybe
implicit val illegalMatch: Arbitrary[IllegalMatch[Maybe[Int]]] = arbIllegalMaybe
checkAll("GroupDecoder[Maybe[Int]]", GroupDecoderTests[Maybe[Int]].decoder[Int, Int])
checkAll("MatchDecoder[Maybe[Int]]", MatchDecoderTests[Maybe[Int]].decoder[Int, Int])
}
|
nrinaudo/kantan.regex
|
scalaz/shared/src/test/scala/kantan/regex/scalaz/MaybeDecoderTests.scala
|
Scala
|
apache-2.0
| 1,450 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import org.apache.spark.internal.{Logging => ILogging}
/**
* Extension of Logging class - buyer beware.
*/
trait Logging extends ILogging {
}
|
holdenk/spark-testing-base
|
src/main/2.0/scala/org/apache/spark/Logging.scala
|
Scala
|
apache-2.0
| 973 |
import scala.quoted._
object Macros {
inline def blackbox: Int = ${one}
transparent inline def whitebox: Int = ${one}
private def one(using Quotes): Expr[Int] = Expr(1)
}
|
dotty-staging/dotty
|
tests/run-macros/whitebox-inline-macro/Macro_1.scala
|
Scala
|
apache-2.0
| 182 |
package base.utils
import java.text.SimpleDateFormat
import java.util.{Date, TimeZone}
import base.controllers.EnvironmentAll
import base.MyConfigImplicit._
import org.apache.commons.lang3.StringEscapeUtils
import play.twirl.api.{Html, JavaScript}
object Formatter {
def date(d: Date, format: String = "MM/dd/yyyy hh:mm:ss aa")(implicit env: EnvironmentAll):String = {
Option(d).fold{
""
} { date =>
val f = new SimpleDateFormat(format)
f.setTimeZone(TimeZone.getTimeZone(env.config.appTimeZone))
f.format(date)
}
}
def dateShort(d: Date)(implicit env:EnvironmentAll):String = date(d,"MM/dd/YY hh:mm aa")
def date(dateAsLong: Long)(implicit env: EnvironmentAll):String = date(new Date(dateAsLong))(env)
def time(x: Long):String = {
Option(x).fold{
""
} { x =>
val hours = x / 3600
val secondsLeft = x % 3660
val minutes = secondsLeft / 60
val seconds = secondsLeft % 60
s"$hours:$minutes:$seconds"
}
}
def jsTemplate(x: Html):Html = Html(JavaScript(x.toString.replace('\\n',' ')).toString)
def htmlFormatN2Br(s: String) = Option(s).fold(""){ s=>
StringEscapeUtils.escapeHtml4(s).replace("'", "'").replaceAll("\\\\n", "<br/>")
}
def replaceInHtmlTemplate(s: String, token: String, value: String): String = s.replace("$" + token + "$", htmlFormatN2Br(value))
def conditionalReplaceInHtmlTemplate(s: String, token: String, value: String): String = {
if (Option(value).forall(!_.isEmpty))
s.replaceAll("(?s)\\\\Q$" + token + "${\\\\E.*?\\\\Q}\\\\E", "")
else
s.replaceAll("(?s)\\\\Q$" + token + "${\\\\E(.*?)\\\\Q}\\\\E", "$1").replace("$" + token + "$", htmlFormatN2Br(value))
}
}
|
tarasbilinsky/tam
|
app/base/utils/Formatter.scala
|
Scala
|
apache-2.0
| 1,709 |
/**
* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \ /__\ (_ )(_ _)( ___)/ __) ( _ \( )( )( _ \ Read
* ) / /(__)\ / /_ _)(_ )__) \__ \ )___/ )(__)( ) _ < README.txt
* (_)\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.hosting
import com.mongodb.casbah.Imports._
import com.novus.salat._
import razie.Logging
import razie.audit.Audit
import razie.db.RazMongo
import razie.db.RazSalatContext._
import razie.diesel.engine.DieselAppContext
import razie.diesel.model.{DieselMsg, DieselMsgString, DieselTarget, ScheduledDieselMsg, ScheduledDieselMsgString}
import razie.tconf.hosting.Reactors
import razie.wiki.admin.GlobalData
import razie.wiki.model._
import razie.wiki.util.DslProps
import razie.wiki.{Services, WikiConfig}
import scala.collection.mutable.ListBuffer
import scala.concurrent.{Future, Promise}
import scala.util.Try
/**
* reactor management (multi-tenant) - we can host multiple wikis/websites, each is a "reactor"
*
* this holds all the reactors hosted in this process
*/
object WikiReactors extends Logging with Reactors {
// reserved reactors
final val RK = WikiConfig.RK // original, still decoupling code
final val WIKI = "wiki" // main reactor
final val SPECS = "specs" // main reactor
final val NOTES = WikiConfig.NOTES
final val ALIASES = Map ("www" -> "specs")
/** lower case index of loaded reactors - reactor names are insensitive */
val lowerCase = new collection.mutable.HashMap[String,String]()
// loaded in Global
val reactors = new collection.mutable.HashMap[String,Reactor]()
// todo mt-safe use collection.concurrent.TrieMap ?
// all possible reactors, some loaded some not
val allReactors = new collection.mutable.HashMap[String,WikiEntry]()
var allLowercase = List[String]()
/** find the mixins from realm properties */
private def getMixins (we:Option[WikiEntry]) =
new DslProps(we, "website,properties")
.prop("mixins")
.map(_
.split(",")
.filter(_.trim.length > 0))
.filter(_.size > 0) // not mixins = NOTHING
.getOrElse{
// the basic cannot depend on anyone other than what they want
if(we.exists(we=> Array(RK,NOTES,WIKI) contains we.name)) Array.empty[String]
else Array(WIKI)
}
def findWikiEntry(name:String, cat:String = "Reactor") =
RazMongo(Wikis.TABLE_NAME)
.findOne(Map("category" -> cat, "name" -> name))
.map(grater[WikiEntry].asObject(_))
def rk = reactors(RK)
def wiki = reactors(WIKI)
def add (realm:String, we:WikiEntry): Reactor = synchronized {
assert(! reactors.contains(realm), "Sorry, SITE_ERR: Reactor already active ???")
val r = Services.mkReactor(realm, getMixins(Some(we)).flatMap(reactors.get).toList, Some(we))
reactors.put(realm, r)
lowerCase.put(realm.toLowerCase, realm)
r
}
def contains (realm:String) : Boolean = reactors.contains(realm)
def apply (realm:String = Wikis.RK) : Reactor = {
// preload
if(reactors.isEmpty) loadReactors()
// anything not preloaded, load now
if(!reactors.contains(realm) && allReactors.contains(realm)) loadReactor(realm)
reactors.getOrElse(realm, rk)
} // using RK as a fallback
def maybeFind (realm:String = Wikis.RK) : Option[Reactor] = {
reactors.get(realm)
}
override def getProperties (realm:String) : Map[String,String] = {
apply(realm).websiteProps.props
}
// ====================================== loading
// stays on even after they're loaded, so it's never done again
@volatile var loading = false
private def loadReactors(): Unit = synchronized {
if(loading) {
Audit.logdb("DEBUG-WARNING", "Already Loading reactors " + Thread.currentThread().getName)
return
}
loading = true
//todo - sharding... now all realms currently loaded in this node
val res = reactors
// load reserved reactors: rk and wiki first
var toload = Services.config.preload.split(",")
// list all reactors to be loaded and pre-fetch wikis
//todo with large numbers of reactors, this will leak
RazMongo(Wikis.TABLE_NAME)
.find(Map("category" -> "Reactor"))
.map(grater[WikiEntry].asObject(_))
.toList
.foreach(we=>allReactors.put(we.name, we))
allLowercase = allReactors.keySet.map(_.toLowerCase).toList
// filter toload and keep only what's actually there - on localhost, not all are there
toload = toload.filter(allLowercase.contains)
clog <<
s"""
|=======================================================
| Preloading reactors: ${toload.mkString}
|=======================================================
|""".stripMargin
// load the basic reactors ahead of everyone that might depend on them
if(toload contains RK) loadReactor(RK)
if(toload contains NOTES) { // todo does not have a wiki - damn, can't use loadReactor
res.put (NOTES, Services.mkReactor(NOTES, Nil, None))
lowerCase.put(NOTES, NOTES)
}
if(toload contains WIKI) loadReactor(WIKI)
// todo this will create issues such that for a while after startup things are weird
razie.Threads.fork {
synchronized {
try {
// the basic were already loaded, will be ignored
Audit.logdb("DEBUG", "Loading reactors " + Thread.currentThread().getName)
toload.foreach(loadReactor(_, None))
// now load the rest
val rest = allReactors.filter(x => !reactors.contains(x._1)).map(_._1)
rest.foreach(loadReactor(_, None))
} catch {
case t: Throwable =>
error("while loading reactors", t)
Audit.logdb("DEBUG-ERR", "EXCEPTION loading reactors " + t)
}
Audit.logdb("DEBUG", "DONE Loaded reactors " + Thread.currentThread().getName)
}
// all loaded - start other problematic services
clog <<
"""
|=======================================================
|
| Reactors loaded
|
|=======================================================
|""".stripMargin
DieselAppContext.start
GlobalData.reactorsLoaded = true
GlobalData.reactorsLoadedP.success(true)
}
}
/** lazy load a reactor
*
* @param r
* @param useThis when reloading a new version
*/
private def loadReactor(r:String, useThis:Option[WikiEntry] = None, reload:Boolean=false) : Unit = synchronized {
clog <<
s"""
|=========== loadReactor: $r
|""".stripMargin
if (!reload && lowerCase.contains(r.toLowerCase) && useThis.isEmpty) return;
try {
var toLoad = new ListBuffer[WikiEntry]()
toLoad append useThis.getOrElse(allReactors(r))
val max = 20 // linearized mixins max
var curr = 0
// lazy depys
while (curr < max && !toLoad.isEmpty) {
curr += 1
val copy = toLoad.toList
toLoad.clear()
// todo smarter linearization of mixins
copy.foreach { we =>
val mixins = getMixins(Some(we))
val realm = we.wid.name
if (mixins.foldLeft(true) { (a, b) => a && lowerCase.contains(b.toLowerCase) }) {
// all mixins are loaded, go ahead
clog << "LOADING REACTOR " + realm
val re = Services.mkReactor(we.name, mixins.toList.map(x => reactors(x)), Some(we))
reactors.put(we.name, re)
lowerCase.put(we.name.toLowerCase, we.name)
val envSettings = re.wiki.find("Spec", "EnvironmentSettings")
val startupFlow =
s"""
|$$send diesel.realm.configure(realm="$realm")
|
|$$send diesel.realm.loaded(realm="$realm")
|
|$$send diesel.realm.ready(realm="$realm")
|""".stripMargin
// send realm.config and load messages if anyone used it
if (envSettings.exists(_.content.contains(DieselMsg.REALM.REALM_CONFIGURE)) ||
envSettings.exists(_.content.contains(DieselMsg.REALM.REALM_LOADED))) {
Services ! ScheduledDieselMsgString("1 second", DieselMsgString(
startupFlow,
DieselTarget.ENV(realm),
Map("realm" -> realm)
))
}
// if (envSettings.exists(_.content.contains(DieselMsg.REALM.REALM_CONFIGURE))) {
// Services ! ScheduledDieselMsg("1 second", DieselMsg(
// DieselMsg.REALM.ENTITY,
// DieselMsg.REALM.CONFIGURE,
// Map("realm" -> realm),
// DieselTarget.ENV(realm)
// ))
// }
//
// if (envSettings.exists(_.content.contains(DieselMsg.REALM.REALM_LOADED))) {
// Services ! ScheduledDieselMsg("10 seconds", DieselMsg(
// DieselMsg.REALM.ENTITY,
// DieselMsg.REALM.LOADED,
// Map("realm" -> realm),
// DieselTarget.ENV(realm)
// ))
// }
} else {
clog << s"NEED TO LOAD LATER REACTOR ${we.wid.name} depends on ${mixins.mkString(",")}"
toLoad appendAll mixins.filterNot(x => lowerCase.contains(x.toLowerCase)).map(allReactors.apply)
toLoad += we
}
}
}
} catch {
case t: Throwable =>
error("while loading reactor "+r, t)
Audit.logdb("DEBUG-ERR", "EXCEPTION loading reactor " + r + " - " + t)
}
}
def reload(r:String): Unit = synchronized {
// can't remove them first, so we can reload RK reactor
findWikiEntry(r).foreach{we=>
// first, refresh the loaded copy
allReactors.put(r, we)
// todo no mixins? just wiki ?
// reactors.put (we.name, Services.mkReactor(we.name, List(wiki), Some(we)))
// then reload
loadReactor(r, Some(we), true)
}
}
lazy val fallbackProps = new DslProps(WID("Reactor", "wiki").r("wiki").page, "properties,properties")
WikiObservers mini {
// todo on remote nodes, load the Some(x) from id
case WikiEvent(_, "WikiEntry", _, Some(x), _, _, _) => {
// reload fallbacks when mixins change
if (fallbackProps.we.exists(_.uwid == x.asInstanceOf[WikiEntry].uwid)) {
fallbackProps.reload(x.asInstanceOf[WikiEntry])
}
// reload reactor when changes
if (x.isInstanceOf[WikiEntry] && x.asInstanceOf[WikiEntry].category == "Reactor") {
val we = x.asInstanceOf[WikiEntry]
razie.audit.Audit.logdb("DEBUG", "event.reloadreactor", we.wid.wpath)
loadReactor(we.name, Some(we))
// WikiReactors.reload(we.name);
Website.clean(we.name + ".dieselapps.com")
new Website(we).prop("domain").map(Website.clean)
}
// reload EnvironmentSettings when changes
if (x.isInstanceOf[WikiEntry]
&& x.asInstanceOf[WikiEntry].category == "Spec"
&& x.asInstanceOf[WikiEntry].name == "EnvironmentSettings") {
val we = x.asInstanceOf[WikiEntry]
razie.audit.Audit.logdb("DEBUG", "event.realm.configure", we.wid.wpath)
// send realm.config message if anyone used it
if(we.content.contains(DieselMsg.REALM.REALM_CONFIGURE)) {
Services ! ScheduledDieselMsg("1 milliseconds", DieselMsg(
DieselMsg.REALM.ENTITY,
DieselMsg.REALM.CONFIGURE,
Map("realm" -> we.realm),
DieselTarget.ENV(we.realm)
))
}
}
}
}
WikiIndex.init()
}
|
razie/diesel-hydra
|
diesel/src/main/scala/razie/hosting/WikiReactors.scala
|
Scala
|
apache-2.0
| 11,766 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2014 Alexey Aksenov [email protected]
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company Β«MEZHGALAKTICHESKIJ TORGOVYJ ALIANSΒ»,
* Limited Liability Company Β«MEZHGALAKTICHESKIJ TORGOVYJ ALIANSΒ» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: [email protected]
*/
package org.digimead.tabuddy.desktop.logic.script
import com.google.common.cache.{ CacheBuilder, CacheLoader, LoadingCache, RemovalListener, RemovalNotification }
import java.util.concurrent.TimeUnit
import org.digimead.digi.lib.api.XDependencyInjection
import org.digimead.digi.lib.log.api.XLoggable
import scala.language.implicitConversions
import scala.util.DynamicVariable
/**
* Global, application wide cache with script containers.
*/
class Cache {
/** Thread local script evaluator. */
protected val evaluator = new DynamicVariable[Option[() β Script.Container[_]]](None)
/** Cache storage. */
val storage = CacheBuilder.newBuilder().
softValues().
maximumSize(Cache.maxSize).
expireAfterAccess(Cache.entryTTL, TimeUnit.SECONDS).
removalListener(new Cache.ScriptRemovalListener).
build(new Cache.ScriptLoader)
}
object Cache extends XLoggable {
implicit def cache2implementation(c: Cache.type): Cache = c.inner
implicit def cache2storage(c: Cache.type): LoadingCache[String, Script.Container[_]] = c.inner.storage
/** Get Cache entry time to live (in seconds). */
def entryTTL = DI.entryTTL
/** Get Cache implementation. */
def inner = DI.implementation
/** Get Cache maximum size. */
def maxSize = DI.maxSize
/** Get cached or evaluate new script. */
def withScript[T](unique: String)(f: β Script.Container[T]): Script.Container[T] =
inner.evaluator.withValue(Some(() β f)) { inner.storage.get(unique).asInstanceOf[Script.Container[T]] }
/**
* Thread local loader.
*/
class ScriptLoader extends CacheLoader[String, Script.Container[_]] {
def load(unique: String): Script.Container[_] = {
Cache.log.debug("Looking up script container with key:" + unique)
val evaluator = inner.evaluator.value.get // throw NoSuchElementException as expected
val container = evaluator()
if (container.className != "Evaluator__" + unique)
throw new IllegalArgumentException(s"Expect ${"Evaluator__" + unique} but found ${container.className}")
container
}
}
/**
* Clear script container before dispose.
*/
class ScriptRemovalListener extends RemovalListener[String, Script.Container[_]] {
/** Synchronization lock. */
protected val lock = new Object
def onRemoval(notification: RemovalNotification[String, Script.Container[_]]) = lock.synchronized {
Cache.log.debug(s"Script container associated with the key(${notification.getKey()}) is removed.")
Option(notification.getValue()).foreach(_.clear()) // value maybe GC'ed
}
}
/**
* Dependency injection routines
*/
private object DI extends XDependencyInjection.PersistentInjectable {
/** Cache implementation. */
lazy val implementation = injectOptional[Cache] getOrElse new Cache
/** Cache maximum size. */
lazy val maxSize = injectOptional[Int]("Script.Cache.MaxSize") getOrElse 100
/** Cache entry time to live (in seconds). */
lazy val entryTTL = injectOptional[Long]("Script.Cache.TTL") getOrElse 3600L // 1h
}
}
|
digimead/digi-TABuddy-desktop
|
part-logic/src/main/scala/org/digimead/tabuddy/desktop/logic/script/Cache.scala
|
Scala
|
agpl-3.0
| 5,297 |
package me.archdev.restapi
package object core {
type UserId = String
type AuthToken = String
final case class AuthTokenContent(userId: UserId)
final case class AuthData(id: UserId, username: String, email: String, password: String) {
require(id.nonEmpty, "id.empty")
require(username.nonEmpty, "username.empty")
require(email.nonEmpty, "email.empty")
require(password.nonEmpty, "password.empty")
}
final case class UserProfile(id: UserId, firstName: String, lastName: String) {
require(id.nonEmpty, "firstName.empty")
require(firstName.nonEmpty, "firstName.empty")
require(lastName.nonEmpty, "lastName.empty")
}
final case class UserProfileUpdate(firstName: Option[String] = None, lastName: Option[String] = None) {
def merge(profile: UserProfile): UserProfile =
UserProfile(profile.id, firstName.getOrElse(profile.firstName), lastName.getOrElse(profile.lastName))
}
}
|
ArchDev/akka-http-rest
|
src/main/scala/me/archdev/restapi/core/package.scala
|
Scala
|
mit
| 936 |
package dotty.tools
package dotc
package core
package tasty
import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._
import StdNames._, Denotations._, Flags._, Constants._, Annotations._
import util.Positions._
import ast.{tpd, Trees, untpd}
import Trees._
import Decorators._
import TastyUnpickler._, TastyBuffer._, PositionPickler._
import scala.annotation.{tailrec, switch}
import scala.collection.mutable.ListBuffer
import scala.collection.{ mutable, immutable }
import typer.Mode
import config.Printers.pickling
/** Unpickler for typed trees
* @param reader the reader from which to unpickle
* @param tastyName the nametable
*/
class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
import TastyFormat._
import TastyName._
import tpd._
private var readPositions = false
private var totalRange = NoPosition
private var positions: collection.Map[Addr, Position] = _
/** Make a subsequent call to `unpickle` return trees with positions
* @param totalRange the range position enclosing all returned trees,
* or NoPosition if positions should not be unpickled
* @param positions a map from tree addresses to their positions relative
* to positions of parent nodes.
*/
def usePositions(totalRange: Position, positions: collection.Map[Addr, Position]): Unit = {
readPositions = true
this.totalRange = totalRange
this.positions = positions
}
private val symAtAddr = new mutable.HashMap[Addr, Symbol]
private val unpickledSyms = new mutable.HashSet[Symbol]
private val treeAtAddr = new mutable.HashMap[Addr, Tree]
private val typeAtAddr = new mutable.HashMap[Addr, Type] // currently populated only for types that are known to be SHAREd.
private var stubs: Set[Symbol] = Set()
private var roots: Set[SymDenotation] = null
private def registerSym(addr: Addr, sym: Symbol) = {
symAtAddr(addr) = sym
unpickledSyms += sym
}
/** Enter all toplevel classes and objects into their scopes
* @param roots a set of SymDenotations that should be overwritten by unpickling
*/
def enterTopLevel(roots: Set[SymDenotation])(implicit ctx: Context): Unit = {
this.roots = roots
new TreeReader(reader).fork.indexStats(reader.endAddr)
}
/** The unpickled trees */
def unpickle()(implicit ctx: Context): List[Tree] = {
assert(roots != null, "unpickle without previous enterTopLevel")
val stats = new TreeReader(reader)
.readTopLevel()(ctx.addMode(Mode.AllowDependentFunctions))
normalizePos(stats, totalRange)
stats
}
def toTermName(tname: TastyName): TermName = tname match {
case Simple(name) => name
case Qualified(qual, name) => toTermName(qual) ++ "." ++ toTermName(name)
case Signed(original, params, result) => toTermName(original)
case Shadowed(original) => toTermName(original).shadowedName
case Expanded(prefix, original) => toTermName(original).expandedName(toTermName(prefix))
case ModuleClass(original) => toTermName(original).moduleClassName.toTermName
case SuperAccessor(accessed) => ???
case DefaultGetter(meth, num) => ???
}
def toTermName(ref: NameRef): TermName = toTermName(tastyName(ref))
def toTypeName(ref: NameRef): TypeName = toTermName(ref).toTypeName
class Completer(reader: TastyReader) extends LazyType {
import reader._
def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
treeAtAddr(currentAddr) = new TreeReader(reader).readIndexedDef()
}
}
class TreeReader(val reader: TastyReader) {
import reader._
def forkAt(start: Addr) = new TreeReader(subReader(start, endAddr))
def fork = forkAt(currentAddr)
def skipTree(tag: Int): Unit =
if (tag >= firstLengthTreeTag) goto(readEnd())
else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() }
else if (tag >= firstASTTreeTag) skipTree()
else if (tag >= firstNatTreeTag) readNat()
def skipTree(): Unit = skipTree(readByte())
def skipParams(): Unit =
while (nextByte == PARAMS || nextByte == TYPEPARAM) skipTree()
/** The next tag, following through SHARED tags */
def nextUnsharedTag: Int = {
val tag = nextByte
if (tag == SHARED) {
val lookAhead = fork
lookAhead.reader.readByte()
forkAt(lookAhead.reader.readAddr()).nextUnsharedTag
}
else tag
}
def readName(): TermName = toTermName(readNameRef())
def readNameSplitSig()(implicit ctx: Context): Any /* TermName | (TermName, Signature) */ =
tastyName(readNameRef()) match {
case Signed(original, params, result) =>
var sig = Signature(params map toTypeName, toTypeName(result))
if (sig == Signature.NotAMethod) sig = Signature.NotAMethod
(toTermName(original), sig)
case name =>
toTermName(name)
}
// ------ Reading types -----------------------------------------------------
/** Read names in an interleaved sequence of (parameter) names and types/bounds */
def readParamNames[N <: Name](end: Addr): List[N] =
until(end) {
val name = readName().asInstanceOf[N]
skipTree()
name
}
/** Read types or bounds in an interleaved sequence of (parameter) names and types/bounds */
def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] =
until(end) { readNat(); readType().asInstanceOf[T] }
/** Read referece to definition and return symbol created at that definition */
def readSymRef()(implicit ctx: Context): Symbol = {
val start = currentAddr
val addr = readAddr()
symAtAddr get addr match {
case Some(sym) => sym
case None =>
// Create a stub; owner might be wrong but will be overwritten later.
forkAt(addr).createSymbol()
val sym = symAtAddr(addr)
ctx.log(i"forward reference to $sym")
stubs += sym
sym
}
}
/** Read a type */
def readType()(implicit ctx: Context): Type = {
val start = currentAddr
val tag = readByte()
pickling.println(s"reading type ${astTagToString(tag)} at $start")
def registeringType[T](tp: Type, op: => T): T = {
typeAtAddr(start) = tp
op
}
def readLengthType(): Type = {
val end = readEnd()
def readNamesSkipParams[N <: Name]: (List[N], TreeReader) = {
val nameReader = fork
nameReader.skipTree() // skip result
val paramReader = nameReader.fork
(nameReader.readParamNames[N](end), paramReader)
}
val result =
(tag: @switch) match {
case SUPERtype =>
SuperType(readType(), readType())
case REFINEDtype =>
val parent = readType()
var name: Name = readName()
val ttag = nextUnsharedTag
if (ttag == TYPEBOUNDS || ttag == TYPEALIAS) name = name.toTypeName
RefinedType(parent, name, rt => registeringType(rt, readType()))
// Note that the lambda "rt => ..." is not equivalent to a wildcard closure!
// Eta expansion of the latter puts readType() out of the expression.
case APPLIEDtype =>
readType().appliedTo(until(end)(readType()))
case TYPEBOUNDS =>
TypeBounds(readType(), readType())
case TYPEALIAS =>
val alias = readType()
val variance =
if (nextByte == COVARIANT) { readByte(); 1 }
else if (nextByte == CONTRAVARIANT) { readByte(); -1 }
else 0
TypeAlias(alias, variance)
case ANNOTATED =>
AnnotatedType(readType(), Annotation(readTerm()))
case ANDtype =>
AndType(readType(), readType())
case ORtype =>
OrType(readType(), readType())
case BIND =>
val sym = ctx.newSymbol(ctx.owner, readName().toTypeName, BindDefinedType, readType())
registerSym(start, sym)
TypeRef.withFixedSym(NoPrefix, sym.name, sym)
case POLYtype =>
val (names, paramReader) = readNamesSkipParams[TypeName]
val result = PolyType(names)(
pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)),
pt => readType())
goto(end)
result
case METHODtype =>
val (names, paramReader) = readNamesSkipParams[TermName]
val result = MethodType(names, paramReader.readParamTypes[Type](end))(
mt => registeringType(mt, readType()))
goto(end)
result
case PARAMtype =>
readTypeRef() match {
case binder: PolyType => PolyParam(binder, readNat())
case binder: MethodType => MethodParam(binder, readNat())
}
case CLASSconst =>
ConstantType(Constant(readType()))
case ENUMconst =>
ConstantType(Constant(readTermRef().termSymbol))
}
assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
result
}
def readSimpleType(): Type = (tag: @switch) match {
case TYPEREFdirect | TERMREFdirect =>
NamedType.withFixedSym(NoPrefix, readSymRef())
case TYPEREFsymbol | TERMREFsymbol =>
readSymNameRef()
case TYPEREFpkg =>
readPackageRef().moduleClass.typeRef
case TERMREFpkg =>
readPackageRef().termRef
case TYPEREF =>
val name = readName().toTypeName
TypeRef(readType(), name)
case TERMREF =>
readNameSplitSig() match {
case name: TermName => TermRef.all(readType(), name)
case (name: TermName, sig: Signature) => TermRef.withSig(readType(), name, sig)
}
case THIS =>
ThisType.raw(readType().asInstanceOf[TypeRef])
case REFINEDthis =>
RefinedThis(readTypeRef().asInstanceOf[RefinedType])
case SHARED =>
val ref = readAddr()
typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType())
case UNITconst =>
ConstantType(Constant(()))
case TRUEconst =>
ConstantType(Constant(true))
case FALSEconst =>
ConstantType(Constant(false))
case BYTEconst =>
ConstantType(Constant(readInt().toByte))
case SHORTconst =>
ConstantType(Constant(readInt().toShort))
case CHARconst =>
ConstantType(Constant(readNat().toChar))
case INTconst =>
ConstantType(Constant(readInt()))
case LONGconst =>
ConstantType(Constant(readLongInt()))
case FLOATconst =>
ConstantType(Constant(java.lang.Float.intBitsToFloat(readInt())))
case DOUBLEconst =>
ConstantType(Constant(java.lang.Double.longBitsToDouble(readLongInt())))
case STRINGconst =>
ConstantType(Constant(readName().toString))
case NULLconst =>
ConstantType(Constant(null))
case BYNAMEtype =>
ExprType(readType())
}
if (tag < firstLengthTreeTag) readSimpleType() else readLengthType()
}
private def readSymNameRef()(implicit ctx: Context): Type = {
val sym = readSymRef()
val prefix = readType()
val res = NamedType.withSymAndName(prefix, sym, sym.name)
prefix match {
case prefix: ThisType if prefix.cls eq sym.owner => res.withDenot(sym.denot)
// without this precaution we get an infinite cycle when unpickling pos/extmethods.scala
// the problem arises when a self type of a trait is a type parameter of the same trait.
case _ => res
}
}
private def readPackageRef()(implicit ctx: Context): TermSymbol = {
val name = readName()
if (name == nme.ROOT) defn.RootPackage
else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageVal
else ctx.requiredPackage(name)
}
def readTypeRef(): Type =
typeAtAddr(readAddr())
def readPath()(implicit ctx: Context): Type = {
val tp = readType()
assert(tp.isInstanceOf[SingletonType])
tp
}
def readTermRef()(implicit ctx: Context): TermRef =
readType().asInstanceOf[TermRef]
// ------ Reading definitions -----------------------------------------------------
private def noRhs(end: Addr): Boolean =
currentAddr == end || isModifierTag(nextByte)
private def localContext(owner: Symbol)(implicit ctx: Context) = {
val lctx = ctx.fresh.setOwner(owner)
if (owner.isClass) lctx.setScope(owner.unforcedDecls) else lctx.setNewScope
}
private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbstractType: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): FlagSet = {
val lacksDefinition =
rhsIsEmpty &&
name.isTermName && !name.isConstructorName && !givenFlags.is(ParamOrAccessor) ||
isAbstractType
var flags = givenFlags
if (lacksDefinition && tag != PARAM) flags |= Deferred
if (tag == DEFDEF) flags |= Method
if (givenFlags is Module)
flags = flags | (if (tag == VALDEF) ModuleCreationFlags else ModuleClassCreationFlags)
if (ctx.owner.isClass) {
if (tag == TYPEPARAM) flags |= Param
else if (tag == PARAM) flags |= ParamAccessor
}
else if (isParamTag(tag)) flags |= Param
flags
}
/** Create symbol of definition node and enter in symAtAddr map
* @return the largest subset of {NoInits, PureInterface} that a
* trait owning this symbol can have as flags.
*/
def createSymbol()(implicit ctx: Context): FlagSet = {
val start = currentAddr
val tag = readByte()
val end = readEnd()
val rawName = tastyName(readNameRef())
var name: Name = toTermName(rawName)
if (tag == TYPEDEF || tag == TYPEPARAM) name = name.toTypeName
skipParams()
val ttag = nextUnsharedTag
val isAbstractType = ttag == TYPEBOUNDS
val isClass = ttag == TEMPLATE
val templateStart = currentAddr
skipTree() // tpt
val rhsIsEmpty = noRhs(end)
if (!rhsIsEmpty) skipTree()
val (givenFlags, annots, privateWithin) = readModifiers(end)
val expandedFlag = if (rawName.isInstanceOf[TastyName.Expanded]) ExpandedName else EmptyFlags
pickling.println(i"creating symbol $name at $start with flags $givenFlags")
val flags = normalizeFlags(tag, givenFlags | expandedFlag, name, isAbstractType, rhsIsEmpty)
def adjustIfModule(completer: LazyType) =
if (flags is Module) ctx.adjustModuleCompleter(completer, name) else completer
val sym =
roots.find(root => (root.owner eq ctx.owner) && root.name == name) match {
case Some(rootd) =>
pickling.println(i"overwriting ${rootd.symbol} # ${rootd.hashCode}")
rootd.info = adjustIfModule(
new Completer(subReader(start, end)) with SymbolLoaders.SecondCompleter)
rootd.flags = flags &~ Touched // allow one more completion
rootd.privateWithin = privateWithin
rootd.symbol
case _ =>
val completer = adjustIfModule(new Completer(subReader(start, end)))
if (isClass)
ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer,
privateWithin, coord = start.index)
else {
val sym = symAtAddr.get(start) match {
case Some(preExisting) =>
assert(stubs contains preExisting)
stubs -= preExisting
preExisting
case none =>
ctx.newNakedSymbol(start.index)
}
val denot = ctx.SymDenotation(symbol = sym, owner = ctx.owner, name, flags, completer, privateWithin)
sym.denot = denot
sym
}
} // TODO set position
sym.annotations = annots
ctx.enter(sym)
registerSym(start, sym)
if (isClass) {
sym.completer.withDecls(newScope)
forkAt(templateStart).indexTemplateParams()(localContext(sym))
}
if (isClass) NoInits
else if (sym.isType || sym.isConstructor || flags.is(Deferred)) NoInitsInterface
else if (tag == VALDEF) EmptyFlags
else NoInits
}
/** Read modifier list into triplet of flags, annotations and a privateWithin
* boindary symbol.
*/
def readModifiers(end: Addr)(implicit ctx: Context): (FlagSet, List[Annotation], Symbol) = {
var flags: FlagSet = EmptyFlags
var annots = new mutable.ListBuffer[Annotation]
var privateWithin: Symbol = NoSymbol
while (currentAddr.index != end.index) {
def addFlag(flag: FlagSet) = {
flags |= flag
readByte()
}
nextByte match {
case PRIVATE => addFlag(Private)
case INTERNAL => ??? // addFlag(Internal)
case PROTECTED => addFlag(Protected)
case ABSTRACT =>
readByte()
nextByte match {
case OVERRIDE => addFlag(AbsOverride)
case _ => flags |= Abstract
}
case FINAL => addFlag(Final)
case SEALED => addFlag(Sealed)
case CASE => addFlag(Case)
case IMPLICIT => addFlag(Implicit)
case LAZY => addFlag(Lazy)
case OVERRIDE => addFlag(Override)
case INLINE => addFlag(Inline)
case STATIC => addFlag(JavaStatic)
case OBJECT => addFlag(Module)
case TRAIT => addFlag(Trait)
case LOCAL => addFlag(Local)
case SYNTHETIC => addFlag(Synthetic)
case ARTIFACT => addFlag(Artifact)
case MUTABLE => addFlag(Mutable)
case LABEL => addFlag(Label)
case FIELDaccessor => addFlag(Accessor)
case CASEaccessor => addFlag(CaseAccessor)
case COVARIANT => addFlag(Covariant)
case CONTRAVARIANT => addFlag(Contravariant)
case SCALA2X => addFlag(Scala2x)
case DEFAULTparameterized => addFlag(DefaultParameterized)
case INSUPERCALL => addFlag(InSuperCall)
case STABLE => addFlag(Stable)
case PRIVATEqualified =>
readByte()
privateWithin = readType().typeSymbol
case PROTECTEDqualified =>
addFlag(Protected)
privateWithin = readType().typeSymbol
case ANNOTATION =>
readByte()
val end = readEnd()
val sym = readType().typeSymbol
val lazyAnnotTree = readLater(end, rdr => ctx => rdr.readTerm()(ctx))
annots += Annotation.deferred(sym, _ => lazyAnnotTree.complete)
case _ =>
assert(false, s"illegal modifier tag at $currentAddr")
}
}
(flags, annots.toList, privateWithin)
}
/** Create symbols for a definitions in statement sequence between
* current address and `end`.
* @return the largest subset of {NoInits, PureInterface} that a
* trait owning the indexed statements can have as flags.
*/
def indexStats(end: Addr)(implicit ctx: Context): FlagSet = {
val flagss =
until(end) {
nextByte match {
case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
createSymbol()
case IMPORT =>
skipTree()
NoInitsInterface
case PACKAGE =>
processPackage { (pid, end) => implicit ctx => indexStats(end) }
case _ =>
skipTree()
EmptyFlags
}
}
(NoInitsInterface /: flagss)(_ & _)
}
/** Process package with given operation `op`. The operation takes as arguments
* - a `RefTree` representing the `pid` of the package,
* - an end address,
* - a context which has the processd package as owner
*/
def processPackage[T](op: (RefTree, Addr) => Context => T)(implicit ctx: Context): T = {
readByte()
val end = readEnd()
val pid = ref(readTermRef()).asInstanceOf[RefTree]
op(pid, end)(localContext(pid.symbol.moduleClass))
}
/** Create symbols the longest consecutive sequence of parameters with given
* `tag` starting at current address.
*/
def indexParams(tag: Int)(implicit ctx: Context) =
while (nextByte == tag) createSymbol()
/** Create symbols for all type and value parameters of template starting
* at current address.
*/
def indexTemplateParams()(implicit ctx: Context) = {
assert(readByte() == TEMPLATE)
readEnd()
indexParams(TYPEPARAM)
indexParams(PARAM)
}
/** If definition was already read by a completer, return the previously read tree
* or else read definition.
*/
def readIndexedDef()(implicit ctx: Context): Tree = treeAtAddr.remove(currentAddr) match {
case Some(tree) => skipTree(); tree
case none => readNewDef()
}
private def readNewDef()(implicit ctx: Context): Tree = {
val start = currentAddr
val sym = symAtAddr(start)
val tag = readByte()
val end = readEnd()
def readParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] = {
fork.indexParams(tag)
readIndexedParams(tag)
}
def readParamss(implicit ctx: Context): List[List[ValDef]] = {
collectWhile(nextByte == PARAMS) {
readByte()
readEnd()
readParams[ValDef](PARAM)
}
}
def readRhs(implicit ctx: Context) =
if (noRhs(end)) EmptyTree
else readLater(end, rdr => ctx => rdr.readTerm()(ctx))
def localCtx = localContext(sym)
def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) =
ta.assignType(
untpd.DefDef(
sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)),
sym)
def ta = ctx.typeAssigner
val name = readName()
pickling.println(s"reading def of $name at $start")
val tree: MemberDef = tag match {
case DEFDEF =>
val tparams = readParams[TypeDef](TYPEPARAM)(localCtx)
val vparamss = readParamss(localCtx)
val tpt = readTpt()
val typeParams = tparams.map(_.symbol)
val valueParamss = ctx.normalizeIfConstructor(
vparamss.nestedMap(_.symbol), name == nme.CONSTRUCTOR)
val resType = ctx.effectiveResultType(sym, typeParams, tpt.tpe)
sym.info = ctx.methodType(typeParams, valueParamss, resType)
DefDef(tparams, vparamss, tpt)
case VALDEF =>
sym.info = readType()
ValDef(sym.asTerm, readRhs(localCtx))
case TYPEDEF | TYPEPARAM =>
if (sym.isClass) {
val companion = sym.scalacLinkedClass
if (companion != NoSymbol && unpickledSyms.contains(companion)) {
import transform.SymUtils._
if (sym is Flags.ModuleClass) sym.registerCompanionMethod(nme.COMPANION_CLASS_METHOD, companion)
else sym.registerCompanionMethod(nme.COMPANION_MODULE_METHOD, companion)
}
ta.assignType(untpd.TypeDef(sym.name.asTypeName, readTemplate(localCtx)), sym)
} else {
sym.info = readType()
TypeDef(sym.asType)
}
case PARAM =>
val info = readType()
if (noRhs(end)) {
sym.info = info
ValDef(sym.asTerm)
}
else {
sym.setFlag(Method)
sym.info = ExprType(info)
pickling.println(i"reading param alias $name -> $currentAddr")
DefDef(Nil, Nil, TypeTree(info))
}
}
val mods =
if (sym.annotations.isEmpty) EmptyModifiers
else Modifiers(annotations = sym.annotations.map(_.tree))
tree.withMods(mods) // record annotations in tree so that tree positions can be filled in.
goto(end)
setPos(start, tree)
}
private def readTemplate(implicit ctx: Context): Template = {
val start = currentAddr
val cls = ctx.owner.asClass
def setClsInfo(parents: List[TypeRef], selfType: Type) =
cls.info = ClassInfo(cls.owner.thisType, cls, parents, cls.unforcedDecls, selfType)
setClsInfo(Nil, NoType)
val localDummy = ctx.newLocalDummy(cls)
assert(readByte() == TEMPLATE)
val end = readEnd()
val tparams = readIndexedParams[TypeDef](TYPEPARAM)
val vparams = readIndexedParams[ValDef](PARAM)
val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) {
nextByte match {
case APPLY | TYPEAPPLY => readTerm()
case _ => readTpt()
}
}
val parentRefs = ctx.normalizeToClassRefs(parents.map(_.tpe), cls, cls.unforcedDecls)
val self =
if (nextByte == SELFDEF) {
readByte()
untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType)
}
else EmptyValDef
setClsInfo(parentRefs, if (self.isEmpty) NoType else self.tpt.tpe)
cls.setApplicableFlags(fork.indexStats(end))
val constr = readIndexedDef().asInstanceOf[DefDef]
def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) =
(tparams, stats) match {
case (tparam :: tparams1, (alias: TypeDef) :: stats1)
if tparam.name == alias.name.expandedName(cls) =>
val (tas, stats2) = mergeTypeParamsAndAliases(tparams1, stats1)
(tparam :: alias :: tas, stats2)
case _ =>
(tparams, stats)
}
val lazyStats = readLater(end, rdr => implicit ctx => {
val stats0 = rdr.readIndexedStats(localDummy, end)
val (tparamsAndAliases, stats) = mergeTypeParamsAndAliases(tparams, stats0)
tparamsAndAliases ++ vparams ++ stats
})
setPos(start,
untpd.Template(constr, parents, self, lazyStats)
.withType(localDummy.nonMemberTermRef))
}
def skipToplevel()(implicit ctx: Context): Unit= {
if (!isAtEnd)
nextByte match {
case IMPORT | PACKAGE =>
skipTree()
skipToplevel()
case _ =>
}
}
def readTopLevel()(implicit ctx: Context): List[Tree] = {
@tailrec def read(acc: ListBuffer[Tree]): List[Tree] = nextByte match {
case IMPORT | PACKAGE =>
acc += readIndexedStat(NoSymbol)
if (!isAtEnd)
read(acc)
else acc.toList
case _ => // top-level trees which are not imports or packages are not part of tree
acc.toList
}
read(new ListBuffer[tpd.Tree])
}
def readIndexedStat(exprOwner: Symbol)(implicit ctx: Context): Tree = nextByte match {
case TYPEDEF | VALDEF | DEFDEF =>
readIndexedDef()
case IMPORT =>
readImport()
case PACKAGE =>
val start = currentAddr
processPackage { (pid, end) => implicit ctx =>
setPos(start, PackageDef(pid, readIndexedStats(exprOwner, end)(ctx)))
}
case _ =>
readTerm()(ctx.withOwner(exprOwner))
}
def readImport()(implicit ctx: Context): Tree = {
readByte()
readEnd()
val expr = readTerm()
def readSelectors(): List[untpd.Tree] = nextByte match {
case RENAMED =>
readByte()
readEnd()
untpd.Pair(untpd.Ident(readName()), untpd.Ident(readName())) :: readSelectors()
case IMPORTED =>
readByte()
untpd.Ident(readName()) :: readSelectors()
case _ =>
Nil
}
Import(expr, readSelectors())
}
def readIndexedStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] =
until(end)(readIndexedStat(exprOwner))
def readStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] = {
fork.indexStats(end)
readIndexedStats(exprOwner, end)
}
def readIndexedParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] =
collectWhile(nextByte == tag) { readIndexedDef().asInstanceOf[T] }
// ------ Reading terms -----------------------------------------------------
def readTerm()(implicit ctx: Context): Tree = {
val start = currentAddr
val tag = readByte()
pickling.println(s"reading term ${astTagToString(tag)} at $start")
def readPathTerm(): Tree = {
goto(start)
readPath() match {
case path: TermRef => ref(path)
case path: ThisType => This(path.cls)
case path: ConstantType => Literal(path.value)
}
}
def readSimpleTerm(): Tree = tag match {
case IDENT =>
untpd.Ident(readName()).withType(readType())
case SELECT =>
def readQual(name: Name) = {
val localCtx =
if (name == nme.CONSTRUCTOR) ctx.addMode(Mode.InSuperCall) else ctx
readTerm()(localCtx)
}
def readRest(name: Name, sig: Signature) = {
val unshadowed = if (name.isShadowedName) name.revertShadowed else name
val qual = readQual(name)
untpd.Select(qual, unshadowed)
.withType(TermRef.withSig(qual.tpe.widenIfUnstable, name.asTermName, sig))
}
readNameSplitSig match {
case name: Name => readRest(name, Signature.NotAMethod)
case (name: Name, sig: Signature) => readRest(name, sig)
}
case NEW =>
New(readTpt())
case _ =>
readPathTerm()
}
def readLengthTerm(): Tree = {
val end = readEnd()
val result =
(tag: @switch) match {
case SUPER =>
val qual = readTerm()
val mixClass = ifBefore(end)(readType().typeSymbol, NoSymbol)
val mixName = if (mixClass.exists) mixClass.name.asTypeName else tpnme.EMPTY
tpd.Super(qual, mixName, ctx.mode.is(Mode.InSuperCall), mixClass)
case APPLY =>
val fn = readTerm()
val isJava = fn.tpe.isInstanceOf[JavaMethodType]
def readArg() = readTerm() match {
case SeqLiteral(elems) if isJava => JavaSeqLiteral(elems)
case arg => arg
}
tpd.Apply(fn, until(end)(readArg()))
case TYPEAPPLY =>
tpd.TypeApply(readTerm(), until(end)(readTpt()))
case PAIR =>
Pair(readTerm(), readTerm())
case TYPED =>
Typed(readTerm(), readTpt())
case NAMEDARG =>
NamedArg(readName(), readTerm())
case ASSIGN =>
Assign(readTerm(), readTerm())
case BLOCK =>
val exprReader = fork
skipTree()
val localCtx = ctx.fresh.setNewScope
val stats = readStats(ctx.owner, end)(localCtx)
val expr = exprReader.readTerm()(localCtx)
Block(stats, expr)
case IF =>
If(readTerm(), readTerm(), readTerm())
case LAMBDA =>
val meth = readTerm()
val tpt = ifBefore(end)(readTpt(), EmptyTree)
Closure(Nil, meth, tpt)
case MATCH =>
Match(readTerm(), readCases(end))
case RETURN =>
val from = readSymRef()
val expr = ifBefore(end)(readTerm(), EmptyTree)
Return(expr, Ident(from.termRef))
case TRY =>
Try(readTerm(), readCases(end), ifBefore(end)(readTerm(), EmptyTree))
case REPEATED =>
SeqLiteral(until(end)(readTerm()))
case BIND =>
val name = readName()
val info = readType()
val sym = ctx.newSymbol(ctx.owner, name, EmptyFlags, info)
registerSym(start, sym)
Bind(sym, readTerm())
case ALTERNATIVE =>
Alternative(until(end)(readTerm()))
case UNAPPLY =>
val fn = readTerm()
val implicitArgs =
collectWhile(nextByte == IMPLICITarg) {
readByte()
readTerm()
}
val patType = readType()
val argPats = until(end)(readTerm())
UnApply(fn, implicitArgs, argPats, patType)
case _ =>
readPathTerm()
}
assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
result
}
val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm()
tree.overwriteType(tree.tpe.simplified)
setPos(start, tree)
}
def readTpt()(implicit ctx: Context) = {
val start = currentAddr
val tp = readType()
if (tp.exists) setPos(start, TypeTree(tp)) else EmptyTree
}
def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] =
collectWhile(nextByte == CASEDEF && currentAddr != end) { readCase()(ctx.fresh.setNewScope) }
def readCase()(implicit ctx: Context): CaseDef = {
val start = currentAddr
readByte()
val end = readEnd()
val pat = readTerm()
val rhs = readTerm()
val guard = ifBefore(end)(readTerm(), EmptyTree)
setPos(start, CaseDef(pat, guard, rhs))
}
def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context => T): Trees.Lazy[T] = {
val localReader = fork
goto(end)
new LazyReader(localReader, op)
}
// ------ Hooks for positions ------------------------------------------------
/** Record address from which tree was created as a temporary position in the tree.
* The temporary position contains deltas relative to the position of the (as yet unknown)
* parent node. It is marked as a non-synthetic source position.
*/
def setPos[T <: Tree](addr: Addr, tree: T): T = {
if (readPositions)
tree.setPosUnchecked(positions.getOrElse(addr, Position(0, 0, 0)))
tree
}
}
private def setNormalized(tree: Tree, parentPos: Position): Unit =
tree.setPosUnchecked(
if (tree.pos.exists)
Position(parentPos.start + offsetToInt(tree.pos.start), parentPos.end - tree.pos.end)
else
parentPos)
def normalizePos(x: Any, parentPos: Position)(implicit ctx: Context): Unit =
traverse(x, parentPos, setNormalized)
class LazyReader[T <: AnyRef](reader: TreeReader, op: TreeReader => Context => T) extends Trees.Lazy[T] with DeferredPosition {
def complete(implicit ctx: Context): T = {
pickling.println(i"starting to read at ${reader.reader.currentAddr}")
val res = op(reader)(ctx.addMode(Mode.AllowDependentFunctions))
normalizePos(res, parentPos)
res
}
}
class LazyAnnotationReader(sym: Symbol, reader: TreeReader)
extends LazyAnnotation(sym) with DeferredPosition {
def complete(implicit ctx: Context) = {
val res = reader.readTerm()
normalizePos(res, parentPos)
res
}
}
}
|
densh/dotty
|
src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
|
Scala
|
bsd-3-clause
| 35,421 |
package slinky.native
import slinky.core.ExternalComponent
import slinky.core.annotations.react
import scala.scalajs.js
import scala.scalajs.js.annotation.JSImport
@react object SafeAreaView extends ExternalComponent {
case class Props(style: js.UndefOr[js.Object] = js.undefined)
@js.native
@JSImport("react-native", "SafeAreaView")
object Component extends js.Object
override val component = Component
}
|
shadaj/slinky
|
native/src/main/scala/slinky/native/SafeAreaView.scala
|
Scala
|
mit
| 421 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.nscplugin.test
import org.scalajs.nscplugin.test.util._
import org.scalajs.ir.Names._
import org.scalajs.ir.Trees._
import org.scalajs.ir.Types._
import org.junit.Test
// scalastyle:off line.size.limit
class BinaryCompatTest extends JSASTTest {
@Test
def emitDefaultAccessorsOfJSNativeDefs(): Unit = {
val XDefaultAccessorName = MethodName("foo$default$1", Nil, IntRef)
/* Check that, even with the fix to #4553, we still emit default accessors
* for JS native defs, unless they are `= js.native`.
*/
"""
import scala.scalajs.js, js.annotation._
object Container {
@js.native
@JSGlobal("foo")
def foo(x: Int = 5): Int = js.native
def bar(x: Int): Int = x
}
""".hasExactly(1, "default accessor for x in foo") {
case MethodDef(flags, MethodIdent(XDefaultAccessorName), _, _, _, _) =>
}
// Check that it is not emitted for `= js.native`.
"""
import scala.scalajs.js, js.annotation._
object Container {
@js.native
@JSGlobal("foo")
def foo(x: Int = js.native): Int = js.native
def bar(x: Int): Int = x
}
""".hasNot("default accessor for x in foo") {
case MethodDef(flags, MethodIdent(XDefaultAccessorName), _, _, _, _) =>
}
}
}
|
scala-js/scala-js
|
compiler/src/test/scala/org/scalajs/nscplugin/test/BinaryCompatTest.scala
|
Scala
|
apache-2.0
| 1,568 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the Measurement entity.
*/
class MeasurementGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connection("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"X-CSRF-TOKEN" -> "${csrf_token}"
)
val scn = scenario("Test the Measurement entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.exec(http("Authentication")
.post("/api/authentication")
.headers(headers_http_authenticated)
.formParam("j_username", "admin")
.formParam("j_password", "admin")
.formParam("remember-me", "true")
.formParam("submit", "Login"))
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.repeat(2) {
exec(http("Get all measurements")
.get("/api/measurements")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new measurement")
.put("/api/measurements")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "date":"2020-01-01T00:00:00.000Z", "left_arm":null, "right_arm":null, "waist":null, "left_thigh":null, "right_thigh":null, "weight_kgrs":null}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_measurement_url")))
.pause(10)
.repeat(5) {
exec(http("Get created measurement")
.get("${new_measurement_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created measurement")
.delete("${new_measurement_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(100) over (1 minutes))
).protocols(httpConf)
}
|
poolebu/healthlog
|
src/test/gatling/simulations/MeasurementGatlingTest.scala
|
Scala
|
mit
| 3,454 |
package com.technophobia.substeps.domain.events
/**
* @author rbarefield
*/
class DomainEventPublisher {
def publish(event: SubstepsDomainEvent) {
DomainEventPublisher.subscribers.get().foreach(_.handle(event))
}
def subscribe(subscriber: DomainEventSubscriber) {
val subscribers = DomainEventPublisher.subscribers.get()
DomainEventPublisher.subscribers.set(subscriber :: subscribers)
}
def reset() {
DomainEventPublisher.subscribers.set(Nil)
}
}
object DomainEventPublisher {
val subscribers = new ThreadLocal[List[DomainEventSubscriber]] {
override def initialValue = Nil
}
def instance() = new DomainEventPublisher
}
|
G2G3Digital/substeps-scala-core
|
src/main/scala/com/technophobia/substeps/domain/events/DomainEventPublisher.scala
|
Scala
|
lgpl-3.0
| 671 |
package leibniz
import leibniz.inhabitance.Proposition
import leibniz.internal.Unsafe
import leibniz.variance.{Contravariant, Covariant}
sealed abstract class As1[A, B] { ab =>
type Upper >: A
type Lower <: (B with Upper)
def lower: A === Lower
def upper: B === Upper
def loosen: A <~< B = {
type f1[x] = x <~< Upper
type f2[x] = A <~< x
upper.flip.subst[f2](
lower.flip.subst[f1](
As.refl[Lower] : Lower <~< Upper))
}
def substCt[F[-_]](fb: F[B]): F[A] =
lower.flip.subst[F](upper.subst[F](fb) : F[Lower])
def substCo[F[+_]](fa: F[A]): F[B] =
upper.flip.subst[F](lower.subst[F](fa) : F[Upper])
def coerce(a: A): B = {
type f[+x] = x
substCo[f](a)
}
def liftCoF[F[_]](implicit F: Covariant[F]): F[A] As1 F[B] =
F(ab.loosen).fix
def liftCtF[F[_]](implicit F: Contravariant[F]): F[B] As1 F[A] =
F(ab.loosen).fix
def substCoF[F[_]](fa: F[A])(implicit F: Covariant[F]): F[B] =
liftCoF[F].coerce(fa)
def substCtF[F[_]](fb: F[B])(implicit F: Contravariant[F]): F[A] =
liftCtF[F].coerce(fb)
}
object As1 {
private[this] final case class Refl[A]() extends As1[A, A] {
type Lower = A
type Upper = A
def lower: A === A = Is.refl[A]
def upper: A === A = Is.refl[A]
}
implicit def proposition[A, B]: Proposition[As1[A, B]] =
Proposition[As[A, B]].isomap(Iso.unsafe(a => a.fix, a => a.loosen))
def apply[A, B](implicit ev: A As1 B): A As1 B = ev
def refl[A]: A As1 A = new Refl[A]()
implicit def fix[A, B](implicit ab: A <~< B): A As1 B = ab.fix[A, B]
def proved[A, B, B1 >: A, A1 <: (B with B1)](a: A Is A1, b: B Is B1): As1[A, B] = new As1[A, B] {
type Upper = B1
type Lower = A1
def lower: A Is Lower = a
def upper: B Is Upper = b
}
}
|
alexknvl/leibniz
|
src/main/scala/leibniz/As1.scala
|
Scala
|
mit
| 1,779 |
package com.sksamuel.elastic4s
import org.elasticsearch.action.termvector.{TermVectorRequestBuilder, TermVectorResponse}
import org.elasticsearch.client.Client
import scala.concurrent.Future
trait TermVectorDsl {
def termVector(index: String, `type`: String, id: String) = TermVectorDefinition(index, `type`, id)
implicit object TermVectorExecutable
extends Executable[TermVectorDefinition, TermVectorResponse, TermVectorResponse] {
override def apply(client: Client, t: TermVectorDefinition): Future[TermVectorResponse] = {
injectFuture(t.build(client.prepareTermVector).execute)
}
}
}
case class TermVectorDefinition(private val index: String,
private val `type`: String,
private val id: String,
private val positions: Option[Boolean] = None,
private val payloads: Option[Boolean] = None,
private val offsets: Option[Boolean] = None,
private val routing: Option[String] = None,
private val termStatistics: Option[Boolean] = None,
private val fieldStatistics: Option[Boolean] = None,
private val fields: Option[Seq[String]] = None) {
def build(builder: TermVectorRequestBuilder): TermVectorRequestBuilder = {
builder.setIndex(index)
builder.setType(`type`)
builder.setId(id)
termStatistics.foreach(builder.setTermStatistics)
fieldStatistics.foreach(builder.setFieldStatistics)
positions.foreach(builder.setPositions)
payloads.foreach(builder.setPayloads)
offsets.foreach(builder.setOffsets)
routing.foreach(builder.setRouting)
fields.foreach(flds => builder.setSelectedFields(flds: _ *))
builder
}
def withTermStatistics(boolean: Boolean = true): TermVectorDefinition = copy(termStatistics = Option(boolean))
def withFieldStatistics(boolean: Boolean = true): TermVectorDefinition = copy(fieldStatistics = Option(boolean))
def withFields(fields: String*): TermVectorDefinition = copy(fields = Option(fields))
def withRouting(routing: String): TermVectorDefinition = copy(routing = Option(routing))
def withOffets(boolean: Boolean = true): TermVectorDefinition = copy(offsets = Option(boolean))
def withPayloads(boolean: Boolean = true): TermVectorDefinition = copy(payloads = Option(boolean))
def withPositions(boolean: Boolean = true): TermVectorDefinition = copy(positions = Option(boolean))
}
|
ExNexu/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/TermVectorDsl.scala
|
Scala
|
apache-2.0
| 2,577 |
/* ___ _ ___ _ _ *\\
** / __| |/ (_) | | Your SKilL scala Binding **
** \\__ \\ ' <| | | |__ generated: 01.02.2019 **
** |___/_|\\_\\_|_|____| by: feldentm **
\\* */
package de.ust.skill.sir.api.internal
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
import scala.collection.mutable.WrappedArray
import java.nio.BufferUnderflowException
import java.nio.MappedByteBuffer
import java.util.Arrays
import de.ust.skill.common.jvm.streams.MappedInStream
import de.ust.skill.common.jvm.streams.MappedOutStream
import de.ust.skill.common.scala.api.PoolSizeMissmatchError
import de.ust.skill.common.scala.api.RestrictionCheckFailed
import de.ust.skill.common.scala.api.SkillObject
import de.ust.skill.common.scala.internal.AutoField
import de.ust.skill.common.scala.internal.BulkChunk
import de.ust.skill.common.scala.internal.Chunk
import de.ust.skill.common.scala.internal.DistributedField
import de.ust.skill.common.scala.internal.IgnoredField
import de.ust.skill.common.scala.internal.FieldDeclaration
import de.ust.skill.common.scala.internal.KnownField
import de.ust.skill.common.scala.internal.LazyField
import de.ust.skill.common.scala.internal.SimpleChunk
import de.ust.skill.common.scala.internal.SingletonStoragePool
import de.ust.skill.common.scala.internal.fieldTypes._
import de.ust.skill.common.scala.internal.restrictions._
/**
* string CustomFieldOption.name
*/
final class F_CustomFieldOption_name(
_index : Int,
_owner : CustomFieldOptionPool,
_type : FieldType[java.lang.String])
extends FieldDeclaration[java.lang.String,_root_.de.ust.skill.sir.CustomFieldOption](_type,
"name",
_index,
_owner)
with KnownField[java.lang.String,_root_.de.ust.skill.sir.CustomFieldOption] {
override def createKnownRestrictions : Unit = {
}
override def read(part : MappedInStream, target : Chunk) {
val d = owner.data
val in = part.view(target.begin.toInt, target.end.toInt)
try {
target match {
case c : SimpleChunk β
var i = c.bpo.toInt
val high = i + c.count
while (i != high) {
d(i).asInstanceOf[_root_.de.ust.skill.sir.CustomFieldOption].Internal_name = t.read(in).asInstanceOf[java.lang.String]
i += 1
}
case bci : BulkChunk β
val blocks = owner.blocks
var blockIndex = 0
while (blockIndex < bci.blockCount) {
val b = blocks(blockIndex)
blockIndex += 1
var i = b.bpo
val end = i + b.dynamicCount
while (i != end) {
d(i).asInstanceOf[_root_.de.ust.skill.sir.CustomFieldOption].Internal_name = t.read(in).asInstanceOf[java.lang.String]
i += 1
}
}
}
} catch {
case e : BufferUnderflowException β
throw new PoolSizeMissmatchError(dataChunks.size - 1,
part.position() + target.begin,
part.position() + target.end,
this, in.position())
}
if(!in.eof())
throw new PoolSizeMissmatchError(dataChunks.size - 1,
part.position() + target.begin,
part.position() + target.end,
this, in.position())
}
def offset: Unit = {
val data = owner.data
var result = 0L
dataChunks.last match {
case c : SimpleChunk β
var i = c.bpo.toInt
val high = i + c.count
while (i != high) {
val v = data(i).asInstanceOf[_root_.de.ust.skill.sir.CustomFieldOption].Internal_name
result += t.offset(v)
i += 1
}
case bci : BulkChunk β
val blocks = owner.blocks
var blockIndex = 0
while (blockIndex < bci.blockCount) {
val b = blocks(blockIndex)
blockIndex += 1
var i = b.bpo
val end = i + b.dynamicCount
while (i != end) {
val v = data(i).asInstanceOf[_root_.de.ust.skill.sir.CustomFieldOption].Internal_name
result += t.offset(v)
i += 1
}
}
}
cachedOffset = result
}
def write(out: MappedOutStream): Unit = {
val data = owner.data
dataChunks.last match {
case c : SimpleChunk β
var i = c.bpo.toInt
val high = i + c.count
while (i != high) {
val v = data(i).asInstanceOf[_root_.de.ust.skill.sir.CustomFieldOption].Internal_name
t.write(v, out)
i += 1
}
case bci : BulkChunk β
val blocks = owner.blocks
var blockIndex = 0
while (blockIndex < bci.blockCount) {
val b = blocks(blockIndex)
blockIndex += 1
var i = b.bpo
val end = i + b.dynamicCount
while (i != end) {
val v = data(i).asInstanceOf[_root_.de.ust.skill.sir.CustomFieldOption].Internal_name
t.write(v, out)
i += 1
}
}
}
}
// note: reflective field access will raise exception for ignored fields
override def getR(i : SkillObject) : java.lang.String = i.asInstanceOf[_root_.de.ust.skill.sir.CustomFieldOption].name
override def setR(i : SkillObject, v : java.lang.String) {
i.asInstanceOf[_root_.de.ust.skill.sir.CustomFieldOption].name = v.asInstanceOf[java.lang.String]
}
}
|
skill-lang/skill
|
src/main/scala/de/ust/skill/sir/api/internal/F_CustomFieldOption_name.scala
|
Scala
|
bsd-3-clause
| 5,845 |
/**
* CopyrightΒ (c)Β 2016 IntelΒ CorporationΒ
*
* LicensedΒ underΒ theΒ ApacheΒ License,Β VersionΒ 2.0Β (theΒ "License");
* youΒ mayΒ notΒ useΒ thisΒ fileΒ exceptΒ inΒ complianceΒ withΒ theΒ License.
* YouΒ mayΒ obtainΒ aΒ copyΒ ofΒ theΒ LicenseΒ at
*
* Β Β Β Β Β http://www.apache.org/licenses/LICENSE-2.0
*
* UnlessΒ requiredΒ byΒ applicableΒ lawΒ orΒ agreedΒ toΒ inΒ writing,Β software
* distributedΒ underΒ theΒ LicenseΒ isΒ distributedΒ onΒ anΒ "ASΒ IS"Β BASIS,
* WITHOUTΒ WARRANTIESΒ ORΒ CONDITIONSΒ OFΒ ANYΒ KIND,Β eitherΒ expressΒ orΒ implied.
* SeeΒ theΒ LicenseΒ forΒ theΒ specificΒ languageΒ governingΒ permissionsΒ and
* limitationsΒ underΒ theΒ License.
*/
package org.trustedanalytics.sparktk.frame.internal.rdd
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.GenericRow
import org.apache.spark.sql.types._
import org.scalatest.Matchers
import org.trustedanalytics.sparktk.frame.internal.FrameState
import org.trustedanalytics.sparktk.frame.internal.ops.timeseries.TimeSeriesFunctions
import org.trustedanalytics.sparktk.frame.{ Column, DataTypes, FrameSchema }
import org.trustedanalytics.sparktk.testutils._
class FrameRddTest extends TestingSparkContextWordSpec with Matchers {
"FrameRdd" should {
/**
* Method that accepts FrameState as a parameter (for testing implicit conversion).
* @return Returns schema column column and rdd row count.
*/
def frameStateColumnCount(frameState: FrameState): (Int, Long) = {
(frameState.schema.columns.length, frameState.rdd.count())
}
/**
* Method that accepts FrameRdd as a parameter (for testing implicit conversion)
* @return Returns schema column column and rdd row count.
*/
def frameRddColumnCount(frameRdd: FrameRdd): (Int, Long) = {
(frameRdd.frameSchema.columns.length, frameRdd.count())
}
"implicitly convert between FrameState and FrameRdd" in {
val schema = FrameSchema(Vector(Column("num", DataTypes.int32), Column("name", DataTypes.string)))
val rows = FrameRdd.toRowRDD(schema, sparkContext.parallelize((1 to 100).map(i => Array(i.toLong, i.toString))).repartition(3))
val frameRdd = new FrameRdd(schema, rows)
val frameState = FrameState(rows, schema)
// Call both methods with FrameState
assert(frameStateColumnCount(frameState) == (2, 100))
assert(frameRddColumnCount(frameState) == (2, 100))
// Call both methods with FrameRdd
assert(frameRddColumnCount(frameRdd) == (2, 100))
assert(frameStateColumnCount(frameRdd) == (2, 100))
}
/**
* Tests converting from a FrameRdd to a DataFrame and then back to a FrameRdd.
*/
"converting between FrameRdd and Spark DataFrame" in {
val schema = FrameSchema(Vector(Column("id", DataTypes.int32), Column("name", DataTypes.string), Column("bday", DataTypes.datetime)))
val rows: Array[Row] = Array(
new GenericRow(Array[Any](1, "Bob", "1950-05-12T03:25:21.123Z")),
new GenericRow(Array[Any](2, "Susan", "1979-08-05T07:51:28.000Z")),
new GenericRow(Array[Any](3, "Jane", "1986-10-17T11:45:00.000Z"))
)
val frameRDD = new FrameRdd(schema, sparkContext.parallelize(rows))
// Convert FrameRDD to DataFrame
val dataFrame = frameRDD.toDataFrame
// Check the schema and note that the datetime column is represented as a long in the DataFrame
assert(dataFrame.schema.fields.sameElements(Array(StructField("id", IntegerType, true),
StructField("name", StringType, true),
StructField("bday", LongType, true))))
// Add a column that converts the bday (LongType) to a timestamp column that uses the TimestampType
val dfWithTimestamp = dataFrame.withColumn("timestamp", TimeSeriesFunctions.toTimestamp(dataFrame("bday")))
assert(dfWithTimestamp.schema.fields.sameElements(Array(StructField("id", IntegerType, true),
StructField("name", StringType, true),
StructField("bday", LongType, true),
StructField("timestamp", TimestampType, true))))
// Convert DataFrame back to a FrameRDD
val frameRddWithTimestamp = FrameRdd.toFrameRdd(dfWithTimestamp)
// Check schema
val fields = frameRddWithTimestamp.schema.columns
assert(frameRddWithTimestamp.schema.columnNames.sameElements(Vector("id", "name", "bday", "timestamp")))
assert(frameRddWithTimestamp.schema.columnDataType("id") == DataTypes.int32)
assert(frameRddWithTimestamp.schema.columnDataType("name") == DataTypes.string)
assert(frameRddWithTimestamp.schema.columnDataType("bday") == DataTypes.int64)
assert(frameRddWithTimestamp.schema.columnDataType("timestamp") == DataTypes.datetime)
}
}
}
|
aayushidwivedi01/spark-tk
|
sparktk-core/src/test/scala/org/trustedanalytics/sparktk/frame/internal/rdd/FrameRddTest.scala
|
Scala
|
apache-2.0
| 4,762 |
package com.ing.baker.il
import com.ing.baker.compiler.RecipeCompiler
import com.ing.baker.recipe.TestRecipe._
import com.ing.baker.recipe.scaladsl.Recipe
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import scala.language.postfixOps
class RecipeVisualizerSpec extends AnyWordSpecLike with Matchers {
"The Recipe visualisation module" should {
"be able to visualize a a created compile recipe" in {
val recipe: Recipe = getRecipe("VisualizationRecipe")
val compiledRecipe: CompiledRecipe = RecipeCompiler.compileRecipe(recipe)
val dot: String = RecipeVisualizer.visualizeRecipe(compiledRecipe, RecipeVisualStyle.default)
dot should include("interactionOneIngredient -> InteractionThree")
// baker.dumpToFile("TestRecipe.svg", compiledRecipe.getVisualRecipeAsSVG)
}
"be able to visualize the created interactions with a filter" in {
val recipe: Recipe = getRecipe("filteredVisualRecipe")
val compileRecipe: CompiledRecipe = RecipeCompiler.compileRecipe(recipe)
val dot: String = RecipeVisualizer.visualizeRecipe(compileRecipe, RecipeVisualStyle.default, filter = e => !e.contains("interactionFour"))
dot shouldNot contain("interactionFour")
}
"should visualize missing events with a red color" in {
val recipe: Recipe = Recipe("missingEvent")
.withInteraction(interactionOne.withRequiredEvent(secondEvent))
.withSensoryEvent(initialEvent)
val compileRecipe: CompiledRecipe = RecipeCompiler.compileRecipe(recipe)
val dot: String = RecipeVisualizer.visualizeRecipe(compileRecipe, RecipeVisualStyle.default)
dot should include("#EE0000")
}
"should visualize missing ingredients with a red color" in {
val recipe: Recipe = Recipe("missingEvent")
.withInteraction(interactionOne)
.withSensoryEvent(secondEvent)
val compileRecipe: CompiledRecipe = RecipeCompiler.compileRecipe(recipe)
val dot: String = RecipeVisualizer.visualizeRecipe(compileRecipe, RecipeVisualStyle.default)
dot should include("#EE0000")
}
}
}
|
ing-bank/baker
|
core/akka-runtime/src/test/scala/com/ing/baker/il/RecipeVisualizerSpec.scala
|
Scala
|
mit
| 2,131 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.data.orm
object NamingPolicy {
/**
* 葨εζε€§ιΏεΊ¦
*/
val DefaultMaxLength = 30
}
/**
* Entity table and Collection Table Naming Strategy.
*
* @author chaostone
*/
trait NamingPolicy {
/**
* Convert class to table name
*
* @param clazz
* @param entityName
*/
def classToTableName(clazz: Class[_], entityName: String): Name
/**
* Convert collection to table name
*
* @param clazz
* @param entityName
* @param tableName
* @param collectionName
*/
def collectionToTableName(clazz: Class[_], entityName: String, tableName: String, collectionName: String): Name
def propertyToColumnName(clazz: Class[_], property: String): String
}
case class Name(schema: Option[String], text: String)
|
beangle/data
|
orm/src/main/scala/org/beangle/data/orm/Naming.scala
|
Scala
|
lgpl-3.0
| 1,491 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.registration.returns
import config.{BaseControllerComponents, FrontendAppConfig}
import controllers.BaseController
import forms.SendEuGoodsForm
import models.api.returns.OverseasCompliance
import play.api.mvc.{Action, AnyContent}
import services.{ApplicantDetailsService, ReturnsService, SessionProfile, SessionService}
import uk.gov.hmrc.auth.core.AuthConnector
import views.html.returns.SendEUGoodsView
import javax.inject.{Inject, Singleton}
import scala.concurrent.{ExecutionContext, Future}
@Singleton
class SendEUGoodsController @Inject()(val authConnector: AuthConnector,
val sessionService: SessionService,
val applicantDetailsService: ApplicantDetailsService,
returnsService: ReturnsService,
sendEUGoodsPage: SendEUGoodsView
)(implicit appConfig: FrontendAppConfig,
val executionContext: ExecutionContext,
baseControllerComponents: BaseControllerComponents)
extends BaseController with SessionProfile {
val show: Action[AnyContent] = isAuthenticatedWithProfile() {
implicit request =>
implicit profile =>
returnsService.getReturns.map { returns =>
returns.overseasCompliance match {
case Some(OverseasCompliance(_, Some(sendEuGoods), _, _, _, _)) => Ok(sendEUGoodsPage(SendEuGoodsForm.form.fill(sendEuGoods)))
case _ => Ok(sendEUGoodsPage(SendEuGoodsForm.form))
}
}
}
val submit: Action[AnyContent] = isAuthenticatedWithProfile() {
implicit request =>
implicit profile =>
SendEuGoodsForm.form.bindFromRequest.fold(
badForm => Future.successful(BadRequest(sendEUGoodsPage(badForm))),
successForm => {
for {
returns <- returnsService.getReturns
updatedReturns = returns.copy(
overseasCompliance = returns.overseasCompliance.map(_.copy(
goodsToEu = Some(successForm)
))
)
_ <- returnsService.submitReturns(updatedReturns)
} yield {
Redirect(controllers.registration.returns.routes.StoringGoodsController.show)
}
}
)
}
}
|
hmrc/vat-registration-frontend
|
app/controllers/registration/returns/SendEUGoodsController.scala
|
Scala
|
apache-2.0
| 3,015 |
package se.digiplant.imagemagick.plugin
import org.specs2.mutable.Around
import org.specs2.specification.Scope
import org.specs2.execute.{AsResult, Result}
import play.api.test._
import play.api.test.Helpers._
import java.io.File
import org.apache.commons.io.FileUtils
import util.Random
trait ScalrContext extends Around with TempFile {
implicit val app: FakeApplication = new FakeApplication(
additionalConfiguration = Map(
"res.default" -> "tmp/default",
"res.imagemagickcache" -> "tmp/imagemagickcache",
"imagemagick.cache" -> "imagemagickcache",
"imagemagick.cachedir" -> "tmp/imagemagickcachedir"
)
)
def around[T : AsResult](t: =>T) = Helpers.running(app) {
val result = AsResult.effectively(t)
tmp.delete()
result
}
}
trait TempFile extends Scope {
lazy val tmp = new File("tmp")
lazy val logo = new File("test/resources/digiplant.jpg")
lazy val LargeLogo = new File("test/resources/digiplant_large.jpg")
def testFile: File = {
tmp.mkdir()
val chars = ('a' to 'z') ++ ('A' to 'Z') ++ ('1' to '9')
val rand = (1 to 20).map(x => chars(Random.nextInt(chars.length))).mkString
val tmpFile = new File("tmp", rand + ".jpg")
FileUtils.copyFile(logo, tmpFile)
tmpFile
}
def largeTestFile: File = {
tmp.mkdir()
val chars = ('a' to 'z') ++ ('A' to 'Z') ++ ('1' to '9')
val rand = (1 to 20).map(x => chars(Random.nextInt(chars.length))).mkString
val tmpFile = new File("tmp", rand + ".jpg")
FileUtils.copyFile(LargeLogo, tmpFile)
tmpFile
}
}
|
digiPlant/play-imagemagick
|
test/se/digiplant/imagemagick/plugin/Spec.scala
|
Scala
|
mit
| 1,561 |
package lila.study
import scala.util.chaining._
import chess.format.pgn.{ Tag, TagType, Tags }
object PgnTags {
def apply(tags: Tags): Tags =
tags pipe filterRelevant pipe removeContradictingTermination pipe sort
def setRootClockFromTags(c: Chapter): Option[Chapter] =
c.updateRoot { _.setClockAt(c.tags.clockConfig map (_.limit), Path.root) } filter (c !=)
private def filterRelevant(tags: Tags) =
Tags(tags.value.filter { t =>
relevantTypeSet(t.name) && !unknownValues(t.value)
})
private def removeContradictingTermination(tags: Tags) =
if (tags.resultColor.isDefined)
Tags(tags.value.filterNot { t =>
t.name == Tag.Termination && t.value.toLowerCase == "unterminated"
})
else tags
private val unknownValues = Set("", "?", "unknown")
private val sortedTypes: List[TagType] = {
import Tag._
List(
White,
WhiteElo,
WhiteTitle,
WhiteTeam,
Black,
BlackElo,
BlackTitle,
BlackTeam,
TimeControl,
Date,
Result,
Termination,
Site,
Event,
Round,
Board,
Annotator
)
}
val typesToString = sortedTypes mkString ","
private val relevantTypeSet: Set[TagType] = sortedTypes.toSet
private val typePositions: Map[TagType, Int] = sortedTypes.zipWithIndex.toMap
private def sort(tags: Tags) =
Tags {
tags.value.sortBy { t =>
typePositions.getOrElse(t.name, Int.MaxValue)
}
}
}
|
luanlv/lila
|
modules/study/src/main/PgnTags.scala
|
Scala
|
mit
| 1,487 |
import com.google.inject.AbstractModule
import services._
/**
* This class is a Guice module that tells Guice how to bind several
* different types. This Guice module is created when the Play
* application starts.
* Play will automatically use any class called `Module` that is in
* the root package. You can create modules in other locations by
* adding `play.modules.enabled` settings to the `application.conf`
* configuration file.
*/
class Module extends AbstractModule {
override def configure(): Unit = {
bind(classOf[FileService]).to(classOf[FileServiceImpl])
}
}
|
alexandremenif/tal-gantt
|
app/Module.scala
|
Scala
|
mit
| 599 |
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.protocol.testing
/** Called for each class or equivalent grouping. */
final class StartTestGroupEvent private (
val name: String) extends sbt.protocol.testing.TestMessage() with Serializable {
override def equals(o: Any): Boolean = o match {
case x: StartTestGroupEvent => (this.name == x.name)
case _ => false
}
override def hashCode: Int = {
37 * (37 * (17 + "StartTestGroupEvent".##) + name.##)
}
override def toString: String = {
"StartTestGroupEvent(" + name + ")"
}
protected[this] def copy(name: String = name): StartTestGroupEvent = {
new StartTestGroupEvent(name)
}
def withName(name: String): StartTestGroupEvent = {
copy(name = name)
}
}
object StartTestGroupEvent {
def apply(name: String): StartTestGroupEvent = new StartTestGroupEvent(name)
}
|
Duhemm/sbt
|
testing/src/main/contraband-scala/sbt/protocol/testing/StartTestGroupEvent.scala
|
Scala
|
bsd-3-clause
| 951 |
// Copyright (c) 2013-2020 Rob Norris and Contributors
// This software is licensed under the MIT License (MIT).
// For more information see LICENSE or https://opensource.org/licenses/MIT
package doobie.postgres
import cats.effect.IO
import cats.implicits.catsSyntaxApplicativeId
import doobie.ConnectionIO
import doobie.implicits._
import doobie.postgres.implicits._
import doobie.util.transactor.Transactor
import munit.CatsEffectSuite
import org.postgresql.ds.PGSimpleDataSource
import javax.sql.DataSource
class Issue1512 extends CatsEffectSuite {
val minChunkSize = 200
val datasource: DataSource = {
val ds = new PGSimpleDataSource
ds.setUser("postgres")
ds.setPassword("")
ds
}
val xa: Transactor[IO] =
Transactor.fromDataSource[IO](datasource, scala.concurrent.ExecutionContext.global)
val setup: IO[Int] =
sql"""
DROP TABLE IF EXISTS demo;
CREATE TABLE demo(id BIGSERIAL PRIMARY KEY NOT NULL, data BIGINT NOT NULL);
""".update.run
.transact(xa)
test("A stream with a Pure effect inserts items properly") {
setup.unsafeRunSync()
// A pure stream is fine - can copy many items
val count = 10000
val stream = fs2.Stream.emits(1 to count)
sql"COPY demo(data) FROM STDIN".copyIn(stream, minChunkSize).transact(xa).unsafeRunSync()
val queryCount =
sql"SELECT count(*) from demo".query[Int].unique.transact(xa).unsafeRunSync()
assertEquals(queryCount, count)
}
test("A stream with a ConnectionIO effect copies <= than minChunkSize items") {
setup.unsafeRunSync()
// Can copy up to minChunkSize just fine with ConnectionIO
val inputs = 1 to minChunkSize
val stream = fs2.Stream.emits[ConnectionIO, Int](inputs)
.evalMap(i => (i + 2).pure[ConnectionIO])
val copiedRows = sql"COPY demo(data) FROM STDIN".copyIn(stream, minChunkSize).transact(xa).unsafeRunSync()
assertEquals(copiedRows, inputs.size.toLong)
val queryCount =
sql"SELECT count(*) from demo".query[Int].unique.transact(xa).unsafeRunSync()
assertEquals(queryCount, minChunkSize)
}
test("A stream with a ConnectionIO effect copies items with count > minChunkSize") {
setup.unsafeRunSync()
val inputs = 1 to minChunkSize + 1
val stream = fs2.Stream.emits[ConnectionIO, Int](inputs)
.evalMap(i => (i + 2).pure[ConnectionIO])
val copiedRows = sql"COPY demo(data) FROM STDIN".copyIn(stream, minChunkSize).transact(xa).unsafeRunSync()
assertEquals(copiedRows, inputs.size.toLong)
val queryCount =
sql"SELECT count(*) from demo".query[Int].unique.transact(xa).unsafeRunSync()
assertEquals(queryCount, minChunkSize + 1)
}
}
|
tpolecat/doobie
|
modules/postgres/src/test/scala/doobie/postgres/Issue1512.scala
|
Scala
|
mit
| 2,698 |
package im.mange.flyby
import im.mange.common.{OnShutdown, ProcessRunner}
import im.mange.driveby.DriveByConfig
object FlyServer {
DriveByConfig.flyBinary match {
case Some(fly) => {
val flyProcess = new ProcessRunner("Fly", fly)
flyProcess.start()
OnShutdown.execute("Stop Fly", () => flyProcess.stop() )
}
case None => throw new RuntimeException("Unable to start fly, please set DriveByConfig.flyBinary")
}
///all bets are off from here .....
//make browser/start stopping also be BrowserCommands and end/start thread as required
//TODO: flyBinaryLocation should be in DriveByConfig/FlyByConfig
//TODO: do win32 ..OS.xxxx?
//don't forget to check fly in ... (and probably add license)
//private val script = if (OS.windows_?) "startFly.bat" else "startFly.sh"
//private val fly = new ProcessRunner("Fly", "./" + script, "bin/fly/")
//actually maybe be better to embed the binary in the jar anyway ...
//... in which case startFly should be ported to scala
//DriveByConfig.pool = remote(url)|local
//or DriveByConfig.pool = localhost (optimise) | hostname:port
// on suite startup .. start fly and remote pool - or local pool as required
// - or perhaps always use a remote pool? (no doubt it's a bit slower)
// or DriveByConfig.keepBrowsersOpen ....
//TODO: have a jar that can be launched ...
//e.g. driveby/flyby -server|-client ip etc
//launch space and as appropriate
//locate (and bundle) fly exec in in jar (see jruby classpath stuff for example)
//TODO: server needs to find browser to target command for
//browser will need to announce the type and it's id
//class is RemoteBrowser
//fly.write(Browser(browser.id), Long.MaxValue)
}
|
alltonp/driveby
|
src/main/scala/im/mange/flyby/FlyServer.scala
|
Scala
|
apache-2.0
| 1,730 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
trait BaseSpec extends AnyFlatSpecLike with Matchers with MockitoSugar with ScalaCheckDrivenPropertyChecks
|
gatling/gatling
|
gatling-commons/src/test/scala/io/gatling/BaseSpec.scala
|
Scala
|
apache-2.0
| 952 |
package com.sksamuel.elastic4s.streams
import akka.actor.ActorSystem
import com.sksamuel.elastic4s.jackson.ElasticJackson
import com.sksamuel.elastic4s.searches.RichSearchHit
import com.sksamuel.elastic4s.testkit.{AbstractElasticSugar, ClassLocalNodeProvider, ElasticSugar}
import org.reactivestreams.Publisher
import org.reactivestreams.tck.{PublisherVerification, TestEnvironment}
import org.scalatest.testng.TestNGSuiteLike
class ScrollPublisherVerificationTest
extends PublisherVerification[RichSearchHit](
new TestEnvironment(DEFAULT_TIMEOUT_MILLIS),
PUBLISHER_REFERENCE_CLEANUP_TIMEOUT_MILLIS
) with AbstractElasticSugar with TestNGSuiteLike with ClassLocalNodeProvider {
import ElasticJackson.Implicits._
implicit val system = ActorSystem()
ensureIndexExists("scrollpubver")
client.execute {
bulk(
indexInto("scrollpubver" / "empires")source Empire("Parthian", "Persia", "Ctesiphon"),
indexInto("scrollpubver" / "empires")source Empire("Ptolemaic", "Egypt", "Alexandria"),
indexInto("scrollpubver" / "empires")source Empire("British", "Worldwide", "London"),
indexInto("scrollpubver" / "empires")source Empire("Achaemenid", "Persia", "Babylon"),
indexInto("scrollpubver" / "empires")source Empire("Sasanian", "Persia", "Ctesiphon"),
indexInto("scrollpubver" / "empires")source Empire("Mongol", "East Asia", "Avarga"),
indexInto("scrollpubver" / "empires")source Empire("Roman", "Mediterranean", "Rome"),
indexInto("scrollpubver" / "empires")source Empire("Sumerian", "Mesopotamia", "Uruk"),
indexInto("scrollpubver" / "empires")source Empire("Klingon", "Space", "Kronos"),
indexInto("scrollpubver" / "empires")source Empire("Romulan", "Space", "Romulus"),
indexInto("scrollpubver" / "empires")source Empire("Cardassian", "Space", "Cardassia Prime"),
indexInto("scrollpubver" / "empires")source Empire("Egyptian", "Egypt", "Memphis"),
indexInto("scrollpubver" / "empires")source Empire("Babylonian", "Levant", "Babylon")
)
}
blockUntilCount(13, "scrollpubver")
val query = search("scrollpubver") query "*:*" scroll "1m" limit 2
override def boundedDepthOfOnNextAndRequestRecursion: Long = 2l
override def createFailedPublisher(): Publisher[RichSearchHit] = null
override def createPublisher(elements: Long): Publisher[RichSearchHit] = {
new ScrollPublisher(client, query, elements)
}
}
case class Empire(name: String, location: String, capital: String)
|
ulric260/elastic4s
|
elastic4s-streams/src/test/scala/com/sksamuel/elastic4s/streams/ScrollPublisherVerificationTest.scala
|
Scala
|
apache-2.0
| 2,495 |
package com.twitter.finagle.memcachedx.integration
import _root_.java.lang.{Boolean => JBoolean}
import java.net.{SocketAddress, InetSocketAddress}
import org.jboss.netty.buffer.ChannelBuffers
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfter, FunSuite}
import com.twitter.finagle.memcachedx.{CacheNodeGroup, KetamaClientBuilder}
import com.twitter.finagle.memcachedx.util.ChannelBufferUtils._
import com.twitter.finagle.netty3.ChannelBufferBuf
import com.twitter.finagle.{Group, Name}
import com.twitter.io.{Buf, Charsets}
import com.twitter.util.{Await, Future}
@RunWith(classOf[JUnitRunner])
class KetamaClientTest extends FunSuite with BeforeAndAfter {
/**
* We already proved above that we can hit a real memcache server,
* so we can use our own for the partitioned client test.
*/
var server1: InProcessMemcached = null
var server2: InProcessMemcached = null
var address1: InetSocketAddress = null
var address2: InetSocketAddress = null
before {
server1 = new InProcessMemcached(new InetSocketAddress(0))
address1 = server1.start().localAddress.asInstanceOf[InetSocketAddress]
server2 = new InProcessMemcached(new InetSocketAddress(0))
address2 = server2.start().localAddress.asInstanceOf[InetSocketAddress]
}
after {
server1.stop()
server2.stop()
}
test("doesn't blow up") {
val client = KetamaClientBuilder()
.nodes("localhost:%d,localhost:%d".format(address1.getPort, address2.getPort))
.build()
Await.result(client.delete("foo"))
assert(Await.result(client.get("foo")) === None)
Await.result(client.set("foo", Buf.Utf8("bar")))
val Buf.Utf8(res) = Await.result(client.get("foo")).get
assert(res === "bar")
}
test("using Name doesn't blow up") {
val name = Name.bound(address1, address2)
val client = KetamaClientBuilder().dest(name).build()
Await.result(client.delete("foo"))
assert(Await.result(client.get("foo")) === None)
Await.result(client.set("foo", Buf.Utf8("bar")))
val Buf.Utf8(res) = Await.result(client.get("foo")).get
assert(res === "bar")
}
test("using Group[InetSocketAddress] doesn't blow up") {
val mutableGroup = Group(address1, address2).map{_.asInstanceOf[SocketAddress]}
val client = KetamaClientBuilder()
.group(CacheNodeGroup(mutableGroup, true))
.build()
Await.result(client.delete("foo"))
assert(Await.result(client.get("foo")) === None)
Await.result(client.set("foo", Buf.Utf8("bar")))
val Buf.Utf8(res) = Await.result(client.get("foo")).get
assert(res === "bar")
}
test("using custom keys doesn't blow up") {
val client = KetamaClientBuilder()
.nodes("localhost:%d:1:key1,localhost:%d:1:key2".format(address1.getPort, address2.getPort))
.build()
Await.result(client.delete("foo"))
assert(Await.result(client.get("foo")) === None)
Await.result(client.set("foo", Buf.Utf8("bar")))
val Buf.Utf8(res) = Await.result(client.get("foo")).get
assert(res === "bar")
}
test("even in future pool") {
lazy val client = KetamaClientBuilder()
.nodes("localhost:%d,localhost:%d".format(address1.getPort, address2.getPort))
.build()
val futureResult = Future.value(true) flatMap {
_ => client.get("foo")
}
assert(Await.result(futureResult) === None)
}
}
|
yancl/finagle-6.22.0
|
finagle-memcachedx/src/test/scala/com/twitter/finagle/memcachedx/integration/KetamaClientTest.scala
|
Scala
|
apache-2.0
| 3,400 |
package org.eigengo.scalalp.streams
import akka.actor.ActorSystem
import akka.stream.scaladsl.Flow
import akka.stream.{FlowMaterializer, MaterializerSettings}
import nak.core.{FeaturizedClassifier, IndexedClassifier}
import scala.io.{Codec, Source}
object TwentyNewsGroups {
import nak.NakContext._
/**
* Holds the label with the classified value
* @param label the computed label
* @param value the value
* @tparam L type of L
* @tparam V type of V
*/
case class Classified[L, V](label: L, value: V)
/**
* A newsgroup message with just the ``header``, ``subject`` headers, and its ``text``
* @param from the from header
* @param subject the subject header
* @param text the body
*/
case class Message(from: String, subject: String, text: String)
/**
* Companion object for the ``Message`` case class. It contains the method ``parse``, which parses a String and
* returns a ``Message``.
*
* It is usual pattern to have convenience methods in the companion object. In our case, the [[Message#parse]] method
* does just that.
*/
object Message {
/**
* Parse a message in format
*
* ```
* From: [email protected] (dean.kaflowitz)
* Subject: Re: about the bible quiz answers
* Organization: AT&T
* Distribution: na
* Lines: 18
*
* In article <[email protected]>, ...
* ```
* @param line the line with the headers on each line, with body separated by two \\n\\n
* @return the parsed message
*/
def parse(line: String): Message = {
// The headers are separated from the body by ``\\n\\n`` (the character backslash and n), not the \\n control
// character.
val headerIndex = line.indexOf("\\\\n\\\\n")
val header = line.substring(0, headerIndex)
// We're splitting the string on the ``\\n`` characters. ``\\n`` does not mean the new line character.
// Now, since the parameter of String#split is a regular expression, it must be ``\\\\n``.
// Therefore, we have four backslashes in the String literal.
val headerElements = header.split("\\\\\\\\n").flatMap { e =>
// e is for example ``Subject: Foobarbaz``
val i = e.indexOf(':')
// check whether we found ``:``, and whether the following String has any content
// Notice that we return tuple (header-name, header-value)
if (i != -1 && i + 2 < e.length) Some(e.substring(0, i) -> e.substring(i + 2)) else None
}.toMap // convert the list of tuples (k, v) to a map
val text = line.substring(headerIndex + 3)
Message(headerElements("From"), headerElements("Subject"), text)
}
}
/**
* Main entyr point
* @param args the args
*/
def main(args: Array[String]) {
// load the pre-trained classifier
val classifier = loadClassifierFromResource[IndexedClassifier[String] with FeaturizedClassifier[String, String]]("/20news.classify")
// prepare the actor system
implicit val system = ActorSystem("Sys")
// construct the default materializer
implicit val materializer = FlowMaterializer(MaterializerSettings(system))
// load the source file
val source = Source.fromURI(getClass.getResource("/20news-test.txt").toURI)(Codec.ISO8859)
// this brings into scope the ``implicit ec: ExecutionContext``, which is required for ``onComplete``
import system.dispatcher
Flow(source.getLines()). // construct a flow by consuming the source lines and then:
filter(_.length > 10). // filtering for lines that are at least 10 characters long
map(Message.parse). // parsing each filtered line to turn it into a Message
map(m => Classified(classifier.predict(m.text), m)). // predicting the message's topic using the trained classifier
foreach(println). // displaying the output
// At this moment, we have a flow that we want, but it is not executing yet.
// To kick off the execution, we call one of the terminating combinators: in this case, onComplete.
onComplete {
case _ => system.shutdown() // when the execution completes, we shutdown the ActorSystem
}
// Compare the block above with the same flow on plain collections:
// source.getLines()
// .filter(_.length > 10)
// .map(Message.parse)
// .map(m => Classified(classifier.predict(m.text), m))
// .foreach(println)
}
}
|
eigengo/scala-launchpad
|
src/main/scala/org/eigengo/scalalp/streams/TwentyNewsGroups.scala
|
Scala
|
apache-2.0
| 4,441 |
package TutorialSolutions
import Chisel._
import scala.collection.mutable.HashMap
import scala.util.Random
class Parity extends Component {
val io = new Bundle {
val in = Bool(INPUT)
val out = Bool(OUTPUT) }
val s_even :: s_odd :: Nil = Enum(2){ UFix() }
val state = Reg(resetVal = s_even)
when (io.in) {
when (state === s_even) { state := s_odd }
.otherwise { state := s_even }
}
io.out := (state === s_odd)
}
class ParityTests(c: Parity) extends Tester(c, Array(c.io)) {
defTests {
var allGood = true
val vars = new HashMap[Node, Node]()
val rnd = new Random()
var isOdd = false
for (t <- 0 until 10) {
vars.clear()
val bit = rnd.nextInt(2)
vars(c.io.in) = Bool(bit == 1)
vars(c.io.out) = Bool(isOdd)
isOdd = if (bit == 1) !isOdd else isOdd
allGood = step(vars) && allGood
}
allGood
}
}
|
seyedmaysamlavasani/GorillaPP
|
chisel/chisel-tutorial/src/solutions/Parity.scala
|
Scala
|
bsd-3-clause
| 940 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.action.builder
import io.gatling.core.action.{ GroupEnd, GroupStart }
import io.gatling.core.session.Expression
private[core] object GroupBuilder {
def start(groupName: Expression[String]): ActionBuilder =
(ctx, next) => new GroupStart(groupName, ctx.coreComponents.statsEngine, ctx.coreComponents.clock, next)
val End: ActionBuilder =
(ctx, next) => new GroupEnd(ctx.coreComponents.statsEngine, ctx.coreComponents.clock, next)
}
|
gatling/gatling
|
gatling-core/src/main/scala/io/gatling/core/action/builder/GroupBuilder.scala
|
Scala
|
apache-2.0
| 1,089 |
/*
* Copyright 2016-2017 original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tap.message
/**
* Created by [email protected] on 1/3/17.
*/
object Exception {
case class UnknownAnalysisType(message: String) extends Exception(message)
}
|
uts-cic/tap
|
src/main/scala/tap/message/Exception.scala
|
Scala
|
apache-2.0
| 797 |
package org.jetbrains.plugins.scala.codeInspection.cast
import com.intellij.codeInspection.LocalInspectionTool
import org.jetbrains.plugins.scala.codeInspection.ScalaQuickFixTestBase
class ScalaRedundantConversionInspectionTest extends ScalaQuickFixTestBase {
override protected val classOfInspection: Class[_ <: LocalInspectionTool] =
classOf[ScalaRedundantConversionInspection]
override protected val description = "Casting '<from>' to '<to>' is redundant"
override protected def descriptionMatches(s: String): Boolean = s != null && s.startsWith("Casting '")
def test_int(): Unit = {
checkTextHasError(s"val x = 3$START.toInt$END")
testQuickFix(
"val x = 3.toInt",
"val x = 3",
"Remove Redundant Conversion"
)
}
def test_string(): Unit = {
checkTextHasError(s"""val x = ""$START.toString$END""")
testQuickFix(
"""val x = "".toString """,
"""val x = "" """,
"Remove Redundant Conversion"
)
}
//SCL-17290
def test_toString_on_variable(): Unit = {
checkTextHasError(
s"""
|val x = ""
|val y = x$START.toString$END
|""".stripMargin)
testQuickFix(
s"""
|val x = ""
|val y = x.toString
|""".stripMargin,
s"""
|val x = ""
|val y = x
|""".stripMargin,
"Remove Redundant Conversion"
)
}
val tryDef =
"""
|class Try[+T](value: T) {
| def fold[U](f1: Any => U, f2: T => U): U = f1(())
|}
|def Try[T](a: T): Try[T] = new Try(a)
|
|""".stripMargin
def test_SLC16197(): Unit = {
checkTextHasError(tryDef + s"""val x: String = Try("Hello").fold(_.toString, _$START.toString$END)""")
testQuickFix(
tryDef + """val x: String = Try("Hello").fold(_.toString, _.toString)""",
tryDef + """val x: String = Try("Hello").fold(_.toString, identity)""",
"Remove Redundant Conversion"
)
}
def test_SLC16197_neg(): Unit = {
checkTextHasError(tryDef + s"""val x: String = Try("Hello").fold(_.toString, _$START.toString$END + 3)""")
testQuickFix(
tryDef + """val x: String = Try("Hello").fold(_.toString, _.toString + 3)""",
tryDef + """val x: String = Try("Hello").fold(_.toString, _ + 3)""",
"Remove Redundant Conversion"
)
}
def test_toString_removal(): Unit = testQuickFix(
"""
|def test(arg: String, i: Int) = ()
|test("".toString, 3)
|""".stripMargin,
"""
|def test(arg: String, i: Int) = ()
|test("", 3)
|""".stripMargin,
"Remove Redundant Conversion"
)
}
|
JetBrains/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/codeInspection/cast/ScalaRedundantConversionInspectionTest.scala
|
Scala
|
apache-2.0
| 2,611 |
/*
* Derived from https://github.com/spray/spray/blob/v1.1-M7/spray-http/src/main/scala/spray/http/parser/CacheControlHeader.scala
*
* Copyright (C) 2011-2012 spray.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
package parser
import java.util.concurrent.TimeUnit
import scala.concurrent.duration._
import org.parboiled2.{Rule1, ParserInput}
import org.http4s.headers.`Cache-Control`
import org.http4s.CacheDirective._
import org.http4s.util.string._
private[parser] trait CacheControlHeader {
def CACHE_CONTROL(value: String): ParseResult[`Cache-Control`] =
new CacheControlParser(value).parse
private class CacheControlParser(input: ParserInput) extends Http4sHeaderParser[`Cache-Control`](input) {
def entry: Rule1[`Cache-Control`] = rule {
oneOrMore(CacheDirective).separatedBy(ListSep) ~ EOI ~> { xs: Seq[CacheDirective] =>
`Cache-Control`(xs.head, xs.tail:_*)
}
}
def CacheDirective: Rule1[CacheDirective] = rule {
("no-cache" ~ optional("=" ~ FieldNames)) ~> (fn => `no-cache`(fn.map(_.map(_.ci)).getOrElse(Nil))) |
"no-store" ~ push(`no-store`) |
"no-transform" ~ push(`no-transform`) |
"max-age=" ~ DeltaSeconds ~> (s => `max-age`(s)) |
"max-stale" ~ optional("=" ~ DeltaSeconds) ~> (s => `max-stale`(s)) |
"min-fresh=" ~ DeltaSeconds ~> (s => `min-fresh`(s)) |
"only-if-cached" ~ push(`only-if-cached`) |
"public" ~ push(`public`) |
"private" ~ optional("=" ~ FieldNames) ~> (fn => `private`(fn.map(_.map(_.ci)).getOrElse(Nil))) |
"must-revalidate" ~ push(`must-revalidate`) |
"proxy-revalidate" ~ push(`proxy-revalidate`) |
"s-maxage=" ~ DeltaSeconds ~> (s => `s-maxage`(s)) |
"stale-if-error=" ~ DeltaSeconds ~> (s => `stale-if-error`(s)) |
"stale-while-revalidate=" ~ DeltaSeconds ~> (s => `stale-while-revalidate`(s)) |
(Token ~ optional("=" ~ (Token | QuotedString)) ~> { (name: String, arg: Option[String]) => org.http4s.CacheDirective(name.ci, arg) })
}
def FieldNames: Rule1[Seq[String]] = rule { oneOrMore(QuotedString).separatedBy(ListSep) }
def DeltaSeconds: Rule1[Duration] = rule { capture(oneOrMore(Digit)) ~> {s: String => Duration(s.toLong, TimeUnit.SECONDS)} }
}
}
|
hvesalai/http4s
|
core/src/main/scala/org/http4s/parser/CacheControlHeader.scala
|
Scala
|
apache-2.0
| 2,780 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.calculations
import org.joda.time.{Days, LocalDate}
import uk.gov.hmrc.ct.box.{EndDate, StartDate}
import uk.gov.hmrc.ct.computations.HmrcAccountingPeriod
import uk.gov.hmrc.ct.utils.DateImplicits._
object AccountingPeriodHelper extends AccountingPeriodHelper
trait AccountingPeriodHelper {
def daysInAccountingPeriod(accountingPeriod: HmrcAccountingPeriod) = daysBetween(accountingPeriod.start.value, accountingPeriod.end.value)
def accountingPeriodDaysInFinancialYear(year: Int, accountingPeriod: HmrcAccountingPeriod): BigDecimal = {
val (fyStartDate, fyEndDate) = financialYearStartingIn(year)
val start = if (accountingPeriod.start.value < fyStartDate) fyStartDate else accountingPeriod.start.value
val end = if (accountingPeriod.end.value > fyEndDate) fyEndDate else accountingPeriod.end.value
BigDecimal(daysBetween(start, end))
}
def accountingPeriodSpansTwoFinancialYears(accountingPeriod: HmrcAccountingPeriod): Boolean = {
endingFinancialYear(accountingPeriod.end) > startingFinancialYear(accountingPeriod.start)
}
def financialYearStartingIn(year: Int): (LocalDate, LocalDate) = (new LocalDate(year, 4, 1), new LocalDate(year + 1, 3, 31))
def startingFinancialYear(date: StartDate): Int = financialYearForDate(date.value)
def endingFinancialYear(date: EndDate): Int = financialYearForDate(date.value)
def financialYearForDate(date: LocalDate): Int = if (date.getMonthOfYear < 4) date.getYear - 1 else date.getYear
def daysBetween(start: LocalDate, end: LocalDate): Int = Days.daysBetween(start, end).getDays + 1
def validateAccountingPeriod(accountingPeriod: HmrcAccountingPeriod) = {
if (accountingPeriod.start.value > accountingPeriod.end.value) {
throw new InvalidAccountingPeriodException("Accounting Period start date must be before the end date")
}
if (daysBetween(accountingPeriod.start.value, accountingPeriod.end.value) > maximumNumberOfDaysInAccountingPeriod(accountingPeriod)) {
throw new InvalidAccountingPeriodException("Accounting Period must not be longer than one calendar year")
}
if (accountingPeriod.start.value < new LocalDate(2006, 10, 2)) {
throw new InvalidAccountingPeriodException("Accounting Period must not be before 1st October 2006")
}
}
private def maximumNumberOfDaysInAccountingPeriod(accountingPeriod: HmrcAccountingPeriod): BigDecimal = {
val startDate = accountingPeriod.start.value
val endDate = startDate.withYear(startDate.getYear + 1)
daysBetween(startDate, endDate) - 1
}
}
class InvalidAccountingPeriodException(message: String) extends Exception(message)
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600/calculations/AccountingPeriodHelper.scala
|
Scala
|
apache-2.0
| 3,270 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.