code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package co.blocke.scalajack
package dynamodb
import org.json4s.JsonAST.{ JNothing, JObject, JValue }
object JsonDiff {
def compare(
left: JValue,
right: JValue,
leftLabel: String = "left",
rightLabel: String = "right"): Seq[JsonDiff] = {
(left, right) match {
case (JObject(leftFields), JObject(rightFields)) =>
val allFieldNames =
(leftFields.map(_._1) ++ rightFields.map(_._1)).distinct
allFieldNames.sorted flatMap { fieldName =>
val leftFieldValue = leftFields
.collectFirst({ case (`fieldName`, fieldValue) => fieldValue })
.getOrElse(JNothing)
val rightFieldValue = rightFields
.collectFirst({ case (`fieldName`, fieldValue) => fieldValue })
.getOrElse(JNothing)
compare(leftFieldValue, rightFieldValue, leftLabel, rightLabel)
}
// ---- Not used/needed at present, and I have questions about the correct behavior here. Exactly how do you
// "diff" two arrays (not necessarily homogeneous typed)?
//
// case (JArray(leftElements), JArray(rightElements)) =>
// (0 until (leftElements.size max rightElements.size)) flatMap { elementIndex =>
// val leftElement = leftElements.applyOrElse(elementIndex, (_: Int) => JNothing)
// val rightElement = rightElements.applyOrElse(elementIndex, (_: Int) => JNothing)
// compare(path \\ elementIndex, leftElement, rightElement, leftLabel, rightLabel)
// }
case _ =>
if (left == right) {
Seq.empty
} else {
val outerLeft = left
val outerRight = right
Seq(new JsonDiff {
override val left: JValue = outerLeft
override val right: JValue = outerRight
override def toString: String =
s"JsonDiff($leftLabel: $left, $rightLabel: $right)"
})
}
}
}
}
trait JsonDiff {
val left: JValue
val right: JValue
}
| gzoller/ScalaJack | dynamodb/src/test/scala/co.blocke.scalajack/dynamodb/JsonDiff.scala | Scala | mit | 2,050 |
package at.bhuemer.scala.playground.github.monad
/**
* The millionth definition of a monad in Scala (no need to use scalaz in this example - less than 50 lines in total).
*/
trait Monad[M[_]] {
def unit[A](a: => A): M[A]
def map[A, B](ma: M[A])(f: A => B): M[B]
def flatMap[A, B](ma: M[A])(f: A => M[B]): M[B]
} | bhuemer/scala-playground | github-future-monad/src/main/scala/at/bhuemer/scala/playground/github/monad/Monad.scala | Scala | apache-2.0 | 320 |
package com.twitter.finagle.http.filter
import com.twitter.conversions.time._
import com.twitter.finagle.{Deadline, Service}
import com.twitter.finagle.context.Contexts
import com.twitter.finagle.http.{Status, Response, Request}
import com.twitter.finagle.http.codec.HttpContext
import com.twitter.util.{Await, Future}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ContextFilterTest extends FunSuite {
test("parses Finagle-Ctx headers") {
val writtenDeadline = Deadline.ofTimeout(5.seconds)
val service =
new ClientContextFilter[Request, Response] andThen
new ServerContextFilter[Request, Response] andThen
Service.mk[Request, Response] { req =>
assert(Deadline.current.get == writtenDeadline)
Future.value(Response())
}
Contexts.broadcast.let(Deadline, writtenDeadline) {
val req = Request()
HttpContext.write(req)
// Clear the deadline value in the context
Contexts.broadcast.letClear(Deadline) {
// ensure the deadline was cleared
assert(Deadline.current == None)
val rsp = Await.result(service(req))
assert(rsp.status == Status.Ok)
}
}
}
test("does not set incorrectly encoded context headers") {
val service =
new ClientContextFilter[Request, Response] andThen
new ServerContextFilter[Request, Response] andThen
Service.mk[Request, Response] { _ =>
assert(Contexts.broadcast.marshal.isEmpty)
Future.value(Response())
}
val req = Request()
req.headers().add("Finagle-Ctx-com.twitter.finagle.Deadline", "foo")
val rsp = Await.result(service(req))
assert(rsp.status == Status.Ok)
}
}
| lukiano/finagle | finagle-http/src/test/scala/com/twitter/finagle/http/filter/ContextFilterTest.scala | Scala | apache-2.0 | 1,773 |
/*
* Copyright 2011 Delving B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package processors
import models.dos.Task
import org.im4java.process.OutputConsumer
import org.im4java.core.{ ImageCommand, IMOperation }
import java.io.{ File, InputStreamReader, BufferedReader, InputStream }
import libs.Normalizer
import org.apache.commons.io.FileUtils
import models.OrganizationConfiguration
/**
* This processor normalizes original TIFs, so that tiling works nicely with it. Original images are moved to a new subdirectory called "_original"
*
* @author Manuel Bernhardt <[email protected]>
*/
object TIFFNormalizationProcessor extends Processor {
def process(task: Task, processorParams: Map[String, AnyRef])(implicit configuration: OrganizationConfiguration) {
val originalDir = new File(task.pathAsFile, "_original")
val workDir = new File(task.pathAsFile, "_temp")
originalDir.mkdir()
workDir.mkdir()
val images = task.pathAsFile.listFiles().filter(f => isImage(f.getName))
Task.dao(task.orgId).setTotalItems(task, images.size)
info(task, s"Starting to normalize images for path '${task.path}', parameters: ${parameterList(task)}")
for (i <- images; if (!task.isCancelled)) {
try {
Normalizer.normalize(i, workDir).map { file =>
i.renameTo(new File(originalDir, i.getName))
file.renameTo(new File(task.pathAsFile, i.getName))
info(task, """Image %s normalized succesfully, moved original to directory "_original"""".format(i.getName), Some(i.getAbsolutePath), Some(file.getAbsolutePath))
}
} catch {
case t: Throwable =>
t.printStackTrace()
error(task, s"Error while normalizing image ${i.getAbsolutePath}: ${t.getMessage}")
}
Task.dao(task.orgId).incrementProcessedItems(task, 1)
}
FileUtils.deleteDirectory(workDir)
}
} | delving/culture-hub | modules/dos/app/processors/TIFFNormalizationProcessor.scala | Scala | apache-2.0 | 2,407 |
package com.intenthq.icicle
import java.util
import com.intenthq.icicle.redis.RoundRobinRedisPool
import org.specs2.mutable._
object JedisIcicleIntegrationSpec extends Specification {
"constructor" should {
"sets the correct host and port if a valid host and port is passed" in {
val underTest = new JedisIcicle("localhost:6379")
val jedis = underTest.getJedisPool.getResource
try {
(jedis.getClient.getHost must_== "localhost") and
(jedis.getClient.getPort must_== 6379)
} finally {
jedis.close()
}
}
}
"works with a real ID generator" in {
val jedisIcicle = new JedisIcicle("localhost:6379")
val roundRobinRedisPool = new RoundRobinRedisPool(util.Arrays.asList(jedisIcicle))
val idGenerator = new IcicleIdGenerator(roundRobinRedisPool)
val jedis = jedisIcicle.getJedisPool.getResource
try {
jedis.set(logicalShardIdRedisKey, "1")
jedis.del(sequenceRedisKey)
} finally {
jedis.close()
}
val n = 100000
val ids = (1 to n).map { _ => idGenerator.generateId() }
// They were all successful...
(ids.forall(_.isPresent) must beTrue) and
// They were all unique...
(ids.map(_.get).toSet.size must_== n) and
// And they were in order!
(ids.map(_.get).map(_.getId) must beSorted)
}
val sequenceRedisKey = "icicle-generator-sequence"
val logicalShardIdRedisKey = "icicle-generator-logical-shard-id"
}
| intenthq/icicle | icicle-jedis/src/it/scala/com/intenthq/icicle/JedisIcicleIntegrationSpec.scala | Scala | mit | 1,463 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.streams
import akka.actor.ActorSystem
import akka.event.Logging
import akka.stream._
import akka.stream.scaladsl.Flow
import akka.stream.stage._
import com.codahale.metrics.MetricRegistry
import org.squbs.metrics.MetricsExtension
object DemandSupplyMetrics {
/**
* Creates a linear [[GraphStage]] that captures (downstream) demand and (upstream) supply metrics.
*
* @tparam T the type of the elements flowing from upstream to downstream
* @param name A name to identify this instance's metric
* @param system The Actor system
* @return a [[DemandSupplyMetricsStage]] that can be joined with a [[Flow]] with the corresponding type to capture
* demand/supply metrics.
*/
def apply[T](name: String)(implicit system: ActorSystem): DemandSupplyMetricsStage[T] =
new DemandSupplyMetricsStage[T](name)
}
/**
* A linear [[GraphStage]] that is used to capture (downstream) demand and (upstream) supply metrics for
* backpressure visibility.
*/
class DemandSupplyMetricsStage[T](name: String)(implicit system: ActorSystem) extends GraphStage[FlowShape[T, T]] {
val domain = MetricsExtension(system).Domain
val metrics = MetricsExtension(system).metrics
val in = Inlet[T](Logging.simpleName(this) + ".in")
val out = Outlet[T](Logging.simpleName(this) + ".out")
override val shape = FlowShape.of(in, out)
// naming convention "domain:key-property-list"
val upstreamCounter = MetricRegistry.name(domain, s"$name-upstream-counter")
val downstreamCounter = MetricRegistry.name(domain, s"$name-downstream-counter")
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
setHandler(in, new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
metrics.meter(upstreamCounter).mark
push(out, elem)
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = {
metrics.meter(downstreamCounter).mark
pull(in)
}
})
}
}
| akara/squbs | squbs-ext/src/main/scala/org/squbs/streams/DemandSupplyMetrics.scala | Scala | apache-2.0 | 2,669 |
package finatra.filters
import com.twitter.finagle.http.filter.AddResponseHeadersFilter
object CorsFilter {
def apply(origin: String = "*", methods: String = "GET", headers: String = "x-requested-with") =
new AddResponseHeadersFilter(
Map("Access-Control-Allow-Origin" -> origin,
"Access-Control-Allow-Methods" -> methods,
"Access-Control-Allow-Headers" -> headers))
}
| pedrovgs/HaveANiceDay | src/main/scala/finatra/filters/CorsFilter.scala | Scala | gpl-3.0 | 404 |
package com.github.mrpowers.spark.daria.sql
import org.apache.spark.sql.DataFrame
case class MissingDataFrameColumnsException(smth: String) extends Exception(smth)
private[sql] class DataFrameColumnsChecker(df: DataFrame, requiredColNames: Seq[String]) {
val missingColumns = requiredColNames.diff(df.columns.toSeq)
def missingColumnsMessage(): String = {
val missingColNames = missingColumns.mkString(", ")
val allColNames = df.columns.mkString(", ")
s"The [${missingColNames}] columns are not included in the DataFrame with the following columns [${allColNames}]"
}
def validatePresenceOfColumns(): Unit = {
if (missingColumns.nonEmpty) {
throw MissingDataFrameColumnsException(missingColumnsMessage())
}
}
}
| MrPowers/spark-daria | src/main/scala/com/github/mrpowers/spark/daria/sql/DataFrameColumnsChecker.scala | Scala | mit | 758 |
package docs.scaladsl.services.headerfilters
package compose {
import com.lightbend.lagom.scaladsl.api.transport.{HeaderFilter, RequestHeader, ResponseHeader}
import com.lightbend.lagom.scaladsl.api.{Service, ServiceCall}
import org.slf4j.LoggerFactory
//#verbose-filter
class VerboseFilter(name: String) extends HeaderFilter {
private val log = LoggerFactory.getLogger(getClass)
def transformClientRequest(request: RequestHeader) = {
log.debug(name + " - transforming Client Request")
request
}
def transformServerRequest(request: RequestHeader) = {
log.debug(name + " - transforming Server Request")
request
}
def transformServerResponse(response: ResponseHeader,
request: RequestHeader) = {
log.debug(name + " - transforming Server Response")
response
}
def transformClientResponse(response: ResponseHeader,
request: RequestHeader) = {
log.debug(name + " - transforming Client Response")
response
}
}
//#verbose-filter
trait HelloService extends Service {
def sayHello: ServiceCall[String, String]
//#header-filter-composition
def descriptor = {
import Service._
named("hello").withCalls(
call(sayHello)
).withHeaderFilter(HeaderFilter.composite(
new VerboseFilter("Foo"),
new VerboseFilter("Bar")
))
}
//#header-filter-composition
}
}
| edouardKaiser/lagom | docs/manual/scala/guide/services/code/HeaderFilters.scala | Scala | apache-2.0 | 1,433 |
package net.nomadicalien.ch2
import java.util.Date
/**
* The Account abstraction (product type)
*/
object Listing2_8 {
sealed trait Account {
def number: String
def name: String
}
case class CheckingAccount(number: String, name: String, dateOfOpening: Date) extends Account
case class SavingsAccount(number: String, name: String, dateOfOpening: Date, rateOfInterest: BigDecimal)
extends Account
}
| BusyByte/functional-n-reactive-domain-modeling | chapter2/src/main/scala/Listing2_8.scala | Scala | apache-2.0 | 425 |
package com.twitter.util
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.{CancellationException, ExecutorService}
import org.junit.runner.RunWith
import org.mockito.ArgumentCaptor
import org.mockito.Matchers.any
import org.mockito.Mockito.{never, verify, when}
import org.scalatest.FunSuite
import org.scalatest.concurrent.Eventually
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import org.scalatest.time.{Millis, Seconds, Span}
import com.twitter.conversions.time._
@RunWith(classOf[JUnitRunner])
class TimerTest extends FunSuite with MockitoSugar with Eventually {
implicit override val patienceConfig =
PatienceConfig(timeout = scaled(Span(4, Seconds)), interval = scaled(Span(5, Millis)))
test("ThreadStoppingTimer should stop timers in a different thread") {
val executor = mock[ExecutorService]
val underlying = mock[Timer]
val timer = new ThreadStoppingTimer(underlying, executor)
verify(executor, never()).submit(any[Runnable])
timer.stop()
verify(underlying, never()).stop()
val runnableCaptor = ArgumentCaptor.forClass(classOf[Runnable])
verify(executor).submit(runnableCaptor.capture())
runnableCaptor.getValue.run()
verify(underlying).stop()
}
test("ReferenceCountingTimer calls the factory when it is first acquired") {
val underlying = mock[Timer]
val factory = mock[() => Timer]
when(factory()).thenReturn(underlying)
val refcounted = new ReferenceCountingTimer(factory)
verify(factory, never()).apply()
refcounted.acquire()
verify(factory).apply()
}
test("ReferenceCountingTimer stops the underlying timer when acquire count reaches 0") {
val underlying = mock[Timer]
val factory = mock[() => Timer]
when(factory()).thenReturn(underlying)
val refcounted = new ReferenceCountingTimer(factory)
refcounted.acquire()
refcounted.acquire()
refcounted.acquire()
verify(factory).apply()
refcounted.stop()
verify(underlying, never()).stop()
refcounted.stop()
verify(underlying, never()).stop()
refcounted.stop()
verify(underlying).stop()
}
test("ScheduledThreadPoolTimer should initialize and stop") {
val timer = new ScheduledThreadPoolTimer(1)
assert(timer != null)
timer.stop()
}
test("ScheduledThreadPoolTimer should increment a counter") {
val timer = new ScheduledThreadPoolTimer
val counter = new AtomicInteger(0)
timer.schedule(100.millis, 200.millis) {
counter.incrementAndGet()
}
eventually { assert(counter.get() >= 2) }
timer.stop()
}
test("ScheduledThreadPoolTimer should schedule(when)") {
val timer = new ScheduledThreadPoolTimer
val counter = new AtomicInteger(0)
timer.schedule(Time.now + 200.millis) {
counter.incrementAndGet()
}
eventually { assert(counter.get() === 1) }
timer.stop()
}
test("ScheduledThreadPoolTimer should cancel schedule(when)") {
val timer = new ScheduledThreadPoolTimer
val counter = new AtomicInteger(0)
val task = timer.schedule(Time.now + 200.millis) {
counter.incrementAndGet()
}
task.cancel()
Thread.sleep(1.seconds.inMillis)
assert(counter.get() != 1)
timer.stop()
}
test("JavaTimer should not stop working when an exception is thrown") {
var errors = 0
var latch = new CountDownLatch(1)
val timer = new JavaTimer {
override def logError(t: Throwable) {
errors += 1
latch.countDown
}
}
timer.schedule(Time.now) {
throw new scala.MatchError("huh")
}
latch.await(30.seconds)
assert(errors == 1)
var result = 0
latch = new CountDownLatch(1)
timer.schedule(Time.now) {
result = 1 + 1
latch.countDown
}
latch.await(30.seconds)
assert(result == 2)
assert(errors == 1)
}
test("JavaTimer should schedule(when)") {
val timer = new JavaTimer
val counter = new AtomicInteger(0)
timer.schedule(Time.now + 20.millis) {
counter.incrementAndGet()
}
Thread.sleep(40.milliseconds.inMillis)
eventually { assert(counter.get() == 1) }
timer.stop()
}
test("JavaTimer should schedule(pre-epoch)") {
val timer = new JavaTimer
val counter = new AtomicInteger(0)
timer.schedule(Time.Bottom) {
counter.incrementAndGet()
}
eventually { assert(counter.get() == 1) }
timer.stop()
}
test("JavaTimer should cancel schedule(when)") {
val timer = new JavaTimer
val counter = new AtomicInteger(0)
val task = timer.schedule(Time.now + 20.millis) {
counter.incrementAndGet()
}
task.cancel()
Thread.sleep(1.seconds.inMillis)
assert(counter.get() != 1)
timer.stop()
}
test("Timer should doLater") {
val result = "boom"
Time.withCurrentTimeFrozen { ctl =>
val timer = new MockTimer
val f = timer.doLater(1.millis)(result)
assert(!f.isDefined)
ctl.advance(2.millis)
timer.tick()
assert(f.isDefined)
assert(Await.result(f) == result)
}
}
test("Timer should doLater throws exception") {
Time.withCurrentTimeFrozen { ctl =>
val timer = new MockTimer
val ex = new Exception
def task: String = throw ex
val f = timer.doLater(1.millis)(task)
assert(!f.isDefined)
ctl.advance(2.millis)
timer.tick()
assert(f.isDefined)
intercept[Throwable] { Await.result(f, 0.millis) }
}
}
test("Timer should interrupt doLater") {
val result = "boom"
Time.withCurrentTimeFrozen { ctl =>
val timer = new MockTimer
val f = timer.doLater(1.millis)(result)
assert(!f.isDefined)
f.raise(new Exception)
ctl.advance(2.millis)
timer.tick()
assert(f.isDefined)
intercept[CancellationException] { Await.result(f) }
}
}
test("Timer should doAt") {
val result = "boom"
Time.withCurrentTimeFrozen { ctl =>
val timer = new MockTimer
val f = timer.doAt(Time.now + 1.millis)(result)
assert(!f.isDefined)
ctl.advance(2.millis)
timer.tick()
assert(f.isDefined)
assert(Await.result(f) == result)
}
}
test("Timer should cancel doAt") {
val result = "boom"
Time.withCurrentTimeFrozen { ctl =>
val timer = new MockTimer
val f = timer.doAt(Time.now + 1.millis)(result)
assert(!f.isDefined)
val exc = new Exception
f.raise(exc)
ctl.advance(2.millis)
timer.tick()
assert {
f.poll match {
case Some(Throw(e: CancellationException)) if e.getCause eq exc => true
case _ => false
}
}
}
}
test("Timer should schedule(when)") {
Time.withCurrentTimeFrozen { ctl =>
val timer = new MockTimer
val counter = new AtomicInteger(0)
timer.schedule(Time.now + 1.millis)(counter.incrementAndGet())
ctl.advance(2.millis)
timer.tick()
assert(counter.get() == 1)
}
}
test("Timer should cancel schedule(when)") {
Time.withCurrentTimeFrozen { ctl =>
val timer = new MockTimer
val counter = new AtomicInteger(0)
val task = timer.schedule(Time.now + 1.millis)(counter.incrementAndGet())
task.cancel()
ctl.advance(2.millis)
timer.tick()
assert(counter.get() == 0)
}
}
test("Timer should cancel schedule(duration)") {
Time.withCurrentTimeFrozen { ctl =>
val timer = new MockTimer
val counter = new AtomicInteger(0)
val task = timer.schedule(1.millis)(counter.incrementAndGet())
ctl.advance(2.millis)
timer.tick()
task.cancel()
ctl.advance(2.millis)
timer.tick()
assert(counter.get() == 1)
}
}
}
| tdyas/util | util-core/src/test/scala/com/twitter/util/TimerTest.scala | Scala | apache-2.0 | 7,756 |
object PascalTriangle {
val lookup_table = new collection.mutable.HashMap[(Int, Int), Int]
def compute(row: Int, col: Int): Int = {
require(col >= 0 && row >= 0, "row and col arguemnts must be >= 0")
require(col <= row, "col must be <= row")
if (col == 0 || col == row) {
1
} else {
val r1c: Int = this.lookup_table.getOrElseUpdate((row-1, col),
compute(row-1, col))
val r1c1: Int = this.lookup_table.getOrElseUpdate((row-1, col-1),
compute(row-1, col-1))
r1c + r1c1
}
}
def main(args: Array[String]): Unit = {
val row = args(0).toInt;
val col = args(1).toInt;
println(this.compute(row, col))
}
} | dansok/algos | PascalTriangle.scala | Scala | gpl-3.0 | 638 |
import argonaut._
import argonaut.Argonaut._
import cats.Show
import cats.syntax.all._
import treelog._
final case class Thing(id: Int, name: String)
object Thing {
implicit def ThingCodecJson: CodecJson[Thing] =
CodecJson(
(t: Thing) =>
("id" := t.id) ->:
("name" := t.name) ->:
jEmptyObject,
c =>
for {
id <- (c --\\ "id").as[Int]
name <- (c --\\ "name").as[String]
} yield Thing(id, name)
)
implicit val show: Show[Int] = new Show[Int] {
override def show(k: Int) = k.toString
}
}
// This defines the Argonaut JSON encoders and decoders we need in order to serialize and deserialize the serializable form
// of the DescribedComputation
object Codecs {
implicit val logTreeLabelEncoder: EncodeJson[LogTreeLabel[Int]] = EncodeJson { l =>
("success" := l.success) ->:
("annotations" := l.annotations) ->:
l.fold(d => ("description" := d.description) ->: jEmptyObject, _ => jEmptyObject)
}
implicit val logTreeLabelDecoder: DecodeJson[LogTreeLabel[Int]] = DecodeJson { c =>
if ((c --\\ "description").succeeded)
for {
success <- (c --\\ "success").as[Boolean]
annotations <- (c --\\ "annotations").as[Set[Int]]
description <- (c --\\ "description").as[String]
} yield DescribedLogTreeLabel(description, success, annotations)
else
for {
success <- (c --\\ "success").as[Boolean]
annotations <- (c --\\ "annotations").as[Set[Int]]
} yield UndescribedLogTreeLabel(success, annotations)
}
implicit val logTreeLabelCodec: CodecJson[LogTreeLabel[Int]] = CodecJson.derived[LogTreeLabel[Int]]
implicit val serializableTreeEncoder: EncodeJson[SerializableTree[Int]] = EncodeJson { t =>
("label" := t.label) ->:
("children" := t.children) ->:
jEmptyObject
}
implicit val serializableTreeDecoder: DecodeJson[SerializableTree[Int]] = DecodeJson { c =>
for {
label <- (c --\\ "label").as[LogTreeLabel[Int]]
children <- (c --\\ "children").as[List[SerializableTree[Int]]]
} yield SerializableTree(label, children)
}
implicit val serializableTreeCodec: CodecJson[SerializableTree[Int]] = CodecJson.derived[SerializableTree[Int]]
}
object SerializationExample extends App with LogTreeSyntax[Int] {
val result: DescribedComputation[List[String]] = listOfThings() ~>* ("Here are some things", things)
println("Before serialization:")
showDescribedComputation(result)
// The above will print:
// The log is:
// Here are some things
// Here I described Thing1 - [1]
// Here I described Thing2 - [2]
//
// The value is:
// \\/-(List(Hello Thing1, Hello Thing2))
// Now let's serialize the DescribedComputation into JSON
// Turn the DescribedComputation into a serializable form.
val serializableDescribedComputation: SerializableDescribedComputation[List[String]] = toSerializableForm(result)
import Codecs._
val json = serializableDescribedComputation.asJson.spaces2
println()
println("Serialized:")
println(json)
// The above renders:
/*
[
{
"Right" : [
"Hello Thing1",
"Hello Thing2"
]
},
{
"children" : [
{
"children" : [
],
"label" : {
"description" : "Here I described Thing1",
"annotations" : [
1
],
"success" : true
}
},
{
"children" : [
],
"label" : {
"description" : "Here I described Thing2",
"annotations" : [
2
],
"success" : true
}
}
],
"label" : {
"description" : "Here are some things",
"annotations" : [
],
"success" : true
}
}
]
*/
// Now let's deserialize
private val parsed: Either[String, Json] = Parse.parse(json)
private val decoded =
parsed.flatMap(_.jdecode[SerializableDescribedComputation[List[String]]].toEither)
private val deserialized =
decoded.map(ds => fromSerializableForm(ds))
// That's all we need to do to deserialize
deserialized.foreach { d =>
println()
println("After serializing and deserializing:")
showDescribedComputation(d)
// The above will print:
// The log is:
// Here are some things
// Here I described Thing1 - [1]
// Here I described Thing2 - [2]
//
// The value is:
// \\/-(List(Hello Thing1, Hello Thing2))
// Now let's carry on doing something:
val moreStuff = "FTW!" ~< (for {
things1And2 <- d ~> "Some things that have been serialized and deserialized"
things3And4 <- List(
Thing(3, "Thing3"),
Thing(4, "Thing4")
) ~>* ("Things that have not been serialized and deserialized", things)
} yield things1And2 ::: things3And4)
println()
println("After adding some things:")
showDescribedComputation(moreStuff)
// The above will print:
// The log is:
// The log is:
// FTW!
// Some things that have been serialized and deserialized
// Here are some things
// Here I described Thing1 - [1]
// Here I described Thing2 - [2]
// Things that have not been serialized and deserialized
// Here I described Thing3 - [3]
// Here I described Thing4 - [4]
//
// The value is:
// \\/-(List(Hello Thing1, Hello Thing2, Hello Thing3, Hello Thing4))
}
private def showDescribedComputation(dc: DescribedComputation[List[String]]): Unit = {
val runResult = dc.value
// This will not compile unless we define a scalaz.Show for Thing (as above)
println("The log is:")
println(runResult.written.show)
println()
println("The value is:")
println(runResult.value)
}
// The '~~' operator annotates the node on the left with the object on the right
private def things(thing: Thing) = s"Hello ${thing.name}" ~> s"Here I described ${thing.name}" ~~ thing.id
private def listOfThings() = Thing(1, "Thing1") :: Thing(2, "Thing2") :: Nil
}
| lancewalton/treelog | src/test/scala/SerializationExample.scala | Scala | mit | 6,202 |
package mesosphere.marathon.state
import mesosphere.marathon.metrics.Metrics
import scala.concurrent.Future
/**
* This responsibility is in transit:
*
* Current state:
* - all applications are stored as part of the root group in the group repository for every user intended change
* - all applications are stored again in the app repository, if the deployment of that application starts
*
* Future plan:
* - the applications should be always loaded via the groupManager or groupRepository.
* - the app repository is used to store versions of the application
*
* Until this plan is implemented, please think carefully when to use the app repository!
*/
class AppRepository(
val store: EntityStore[AppDefinition],
val maxVersions: Option[Int] = None,
val metrics: Metrics)
extends EntityRepository[AppDefinition] {
import scala.concurrent.ExecutionContext.Implicits.global
def allPathIds(): Future[Iterable[PathId]] = allIds().map(_.map(PathId.fromSafePath))
def currentVersion(appId: PathId): Future[Option[AppDefinition]] = currentVersion(appId.safePath)
def listVersions(appId: PathId): Future[Iterable[Timestamp]] = listVersions(appId.safePath)
def expunge(appId: PathId): Future[Iterable[Boolean]] = expunge(appId.safePath)
/**
* Returns the app with the supplied id and version.
*/
def app(appId: PathId, version: Timestamp): Future[Option[AppDefinition]] =
entity(appId.safePath, version)
/**
* Stores the supplied app, now the current version for that apps's id.
*/
def store(appDef: AppDefinition): Future[AppDefinition] =
storeWithVersion(appDef.id.safePath, appDef.version, appDef)
/**
* Returns the current version for all apps.
*/
def apps(): Future[Iterable[AppDefinition]] = current()
/**
* Returns a map from PathIds to current app timestamps.
*/
def currentAppVersions(): Future[Map[PathId, Timestamp]] =
for (as <- apps()) yield as.map { a => a.id -> a.version }.toMap
}
| vivekjuneja/marathon | src/main/scala/mesosphere/marathon/state/AppRepository.scala | Scala | apache-2.0 | 2,000 |
import play.api.libs.json._
import play.api.libs.json.Reads._
import play.api.libs.functional.syntax._
val json: JsValue = Json.parse("""
{
"name" : "Watership Down",
"location" : {
"lat" : 51.235685,
"long" : -1.309197
},
"residents" : [ {
"name" : "Fiver",
"age" : 4,
"role" : null
}, {
"name" : "Bigwig",
"age" : 6,
"role" : "Owsla"
} ]
}
""")
case class Location(lat: Double, long: Double)
case class Resident(name: String, age: Int, role: Option[String])
case class Place(name: String, location: Location, residents: Seq[Resident])
implicit val residentFormat = Json.format[Resident]
implicit val locationFormat = Json.format[Location]
implicit val placeFormat = Json.format[Place]
val place = json.as[Place]
//this should fail to read as age is non-optional and Peter has not sent his age
val invalidJson: JsValue = Json.parse("""
{
"name" : "Watership Up",
"location" : {
"lat" : 91.235685,
"long" : -1.309197
},
"residents" : [ {
"name" : "Peter",
"role" : "boss"
}, {
"name" : "Peterik",
"age" : 4,
"role" : "chlapcek"
} ]
}
""")
val home = invalidJson.validate[Place] match {
case place:JsSuccess[Place] => place
case e:JsError => println(s"Ooops! Validation error. This does not pass! ${e.toString()}")
}
| PeterPerhac/cheat-sheets | scala-scratches/json-working-with-json-in-play.scala | Scala | unlicense | 1,311 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
import java.util.Comparator
import org.apache.spark.storage.DiskBlockObjectWriter
/**
* A common interface for size-tracking collections of key-value pairs that
* 一个常见的接口跟踪集合的大小键值对
* - Have an associated partition for each key-value pair.
* - Support a memory-efficient sorted iterator
* -支持一种内存高效的排序迭代器
* - Support a WritablePartitionedIterator for writing the contents directly as bytes.
* -支持一个直接写内容作为字节writablepartitionediterator
*/
private[spark] trait WritablePartitionedPairCollection[K, V] {
/**
* Insert a key-value pair with a partition into the collection
* 将一个键值对插入到集合中的一个分区上
*
*/
def insert(partition: Int, key: K, value: V): Unit
/**
* Iterate through the data in order of partition ID and then the given comparator. This may
* destroy the underlying collection.
* 给定比较器遍历分区ID顺序的数据,这可能破坏底层集合
*/
def partitionedDestructiveSortedIterator(keyComparator: Option[Comparator[K]])
: Iterator[((Int, K), V)]
/**
* Iterate through the data and write out the elements instead of returning them. Records are
* returned in order of their partition ID and then the given comparator.
* This may destroy the underlying collection.
* 通过数据进行迭代写每个元素的内容而不是返回,记录被返回的顺序是分区标识,然后给定的比较器
* 这可能会破坏底层的集合
*/
def destructiveSortedWritablePartitionedIterator(keyComparator: Option[Comparator[K]])
: WritablePartitionedIterator = {
val it = partitionedDestructiveSortedIterator(keyComparator)
new WritablePartitionedIterator {
private[this] var cur = if (it.hasNext) it.next() else null
def writeNext(writer: DiskBlockObjectWriter): Unit = {
writer.write(cur._1._2, cur._2)
cur = if (it.hasNext) it.next() else null
}
def hasNext(): Boolean = cur != null
def nextPartition(): Int = cur._1._1
}
}
}
private[spark] object WritablePartitionedPairCollection {
/**
* A comparator for (Int, K) pairs that orders them by only their partition ID.
* 按照分区ID进行比较
*/
def partitionComparator[K]: Comparator[(Int, K)] = new Comparator[(Int, K)] {
override def compare(a: (Int, K), b: (Int, K)): Int = {
a._1 - b._1
}
}
/**
* A comparator for (Int, K) pairs that orders them both by their partition ID and a key ordering.
* 先按照分区Id进行比较,再按照指定的Key进行二级排序,如果没有指定排序字段默认Key进行比较
*/
def partitionKeyComparator[K](keyComparator: Comparator[K]): Comparator[(Int, K)] = {
new Comparator[(Int, K)] {
override def compare(a: (Int, K), b: (Int, K)): Int = {
val partitionDiff = a._1 - b._1
if (partitionDiff != 0) {
partitionDiff
} else {
keyComparator.compare(a._2, b._2)
}
}
}
}
}
/**
* Iterator that writes elements to a DiskBlockObjectWriter instead of returning them. Each element
* has an associated partition.
* 迭代器元素写到一个diskblockobjectwriter不是返回他们,每个元素都有一个关联的分区
*/
private[spark] trait WritablePartitionedIterator {
/**
* 写入Key和Value,并不写入Partition ID
*/
def writeNext(writer: DiskBlockObjectWriter): Unit
def hasNext(): Boolean
def nextPartition(): Int
}
| tophua/spark1.52 | core/src/main/scala/org/apache/spark/util/collection/WritablePartitionedPairCollection.scala | Scala | apache-2.0 | 4,374 |
package io.github.binaryfoo.lagotto.mmap
import java.nio.ByteBuffer
import scala.collection.immutable.StringLike
/**
* Odd (and probably failed) attempt at processing records faster by using a memory mapped buffer.
*/
case class LiteString(buf: ByteBuffer, start: Int = 0, override val length: Int) extends CharSequence {
val absoluteEnd = start + length - 1
override def charAt(index: Int): Char = {
if (index < 0 || index >= length)
throw new IllegalArgumentException(s"Invalid index $index. Not in [0-$length)")
buf.get(start + index).asInstanceOf[Char]
}
override def subSequence(start: Int, end: Int): LiteString = LiteString(buf, this.start + start, end - start)
override def toString: String = {
buf.synchronized {
val bytes = new Array[Byte](length)
buf.position(start)
buf.get(bytes)
new String(bytes)
}
}
def substring(start: Int, end: Int): LiteString = subSequence(start, end)
def substring(start: Int): LiteString = subSequence(start, length)
def contains(s: CharSequence): Boolean = indexOf(s) >= 0
def indexOf(s: CharSequence): Int = indexOf(s, 0)
def indexOf(s: CharSequence, from: Int): Int = {
def source(i: Int) = buf.get(i)
def target(i: Int) = s.charAt(i).asInstanceOf[Byte]
val first = target(0)
var i = start + from
def regionMatch(i: Int): Boolean = {
var j = i + 1
var k = 1
while (j <= absoluteEnd && k < s.length) {
if (source(j) != target(k)) {
return false
}
j += 1
k += 1
}
k == s.length
}
while (i <= absoluteEnd) {
while (i < absoluteEnd && source(i) != first) {
i += 1
}
if (i <= absoluteEnd) {
if (regionMatch(i)) {
return i - start
}
}
i += 1
}
-1
}
override def equals(other: Any): Boolean = other match {
case o: CharSequence =>
val i = indexOf(o)
i == 0 && o.length() == length
case _ => false
}
def indexOf(c: Char, from: Int = 0): Int = {
val i = start + from
if (i > absoluteEnd) {
-1
} else if (buf.get(i) == c) {
from
} else {
indexOf(c, from + 1)
}
}
def split(c: Char, from: Int = 0): Stream[LiteString] = {
val i = indexOf(c, from)
if (i == -1) {
subSequence(from, length) #:: Stream.empty
} else {
subSequence(from, i) #:: split(c, i + 1)
}
}
def unapply(s: LiteString): Option[LiteString] = {
if (s != null && s.equals(this))
Option(s)
else
None
}
}
object LiteString {
def untilEnd(buf: ByteBuffer, start: Int = 0) = {
val length = buf.limit() - start
LiteString(buf, start, length)
}
def lite(s: String) = untilEnd(ByteBuffer.wrap(s.getBytes))
}
| binaryfoo/lagotto | src/main/scala/io/github/binaryfoo/lagotto/mmap/LiteString.scala | Scala | mit | 2,798 |
package concrete
package constraint
package semantic
import bitvectors.BitVector
import concrete.util.IntIntMap
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
object Element {
def apply(result: Variable, index: Variable, varsIdx: Seq[(Int, Variable)]): Seq[Constraint] = {
val lastIndex = varsIdx.map(_._1).max
val vars = Array.ofDim[Variable](lastIndex + 1)
for ((i, v) <- varsIdx) {
vars(i) = v
}
if (result eq index) {
Seq(new ElementRI(result, vars))
} else if (vars.forall(v => (v eq null) || v.initDomain.isAssigned)) {
val values = vars.map(Option(_).map(_.initDomain.singleValue))
Seq(new ElementVal(result, index, values))
} else {
Seq(new ElementWatch(result, index, vars))
}
}
}
trait Element extends Constraint {
private lazy val vars2pos = {
val scopeIndices = scope.zipWithIndex.drop(2).toMap
Array.tabulate(vars.length)(i => Option(vars(i)).map(scopeIndices).getOrElse(-1))
}
def vars: Array[Variable]
def index: Variable
def result: Variable
override def init(ps: ProblemState): Outcome = {
ps.filterDom(index)(i => i >= 0 && i < vars.length && (vars(i) ne null))
}
def check(tuple: Array[Int]): Boolean = {
tuple(1) < vars.length &&
(vars(tuple(1)) ne null) &&
(tuple(0) == tuple(vars2pos(tuple(1))))
}
protected def reviseAssignedIndex(ps: ProblemState, index: Int, resultDom: Domain): Outcome = {
val selectedVar = vars(index)
//println(selectedVar.toString(ps))
val intersect = ps.dom(selectedVar) & resultDom
// println(s"${ps.dom(selectedVar)} & $resultDom = $intersect")
ps
.updateDom(selectedVar, intersect)
.updateDom(result, intersect)
.entailIf(this, _ => intersect.isAssigned)
}
}
/**
* Special case of Element to be used when the array contains only constants.
* Special case of binary constraint: indices have exactly one support
*
* @param result : result variable
* @param index : index variables
* @param valuesOpt : array of optional values
*/
class ElementVal(val result: Variable, val index: Variable, val valuesOpt: Array[Option[Int]])
extends Constraint(Array(result, index)) {
private val offset = valuesOpt.flatten.min
var values: Array[Int] = _
var indices: Array[Array[Int]] = _
def advise(ps: ProblemState, event: Event, position: Int): Int = ps.card(result) + ps.card(index)
def check(tuple: Array[Int]): Boolean = {
valuesOpt(tuple(1)).contains(tuple(0))
}
def init(ps: ProblemState): Outcome = {
values = valuesOpt.map {
case Some(v) => v
case None => Int.MinValue
}
val allIndices = new mutable.HashMap[Int, mutable.Seq[Int]]().withDefaultValue(new ArrayBuffer())
for (i <- ps.dom(index); value <- valuesOpt(i)) {
allIndices(value) :+= i
}
indices = Array.tabulate(allIndices.keysIterator.max - offset + 1)(i => allIndices(i + offset).toArray)
ps.filterDom(index)(valuesOpt(_).isDefined)
.filterDom(result)(allIndices(_).nonEmpty)
}
def revise(ps: ProblemState, mod: BitVector): Outcome = {
val res = ps.dom(result)
ps.filterDom(index)(i => res.contains(values(i)))
.andThen { ps =>
val iDom = ps.dom(index)
ps.filterDom(result)(v => indices(v - offset).exists(iDom))
}
.entailIf(this, _.dom(index).isAssigned)
}
def simpleEvaluation: Int = 1
}
/**
* Special case of Element to be used when result = index
*
* @param resultIndex : result/index variable
* @param vars : array variables
*/
class ElementRI(val resultIndex: Variable, val vars: Array[Variable]) extends Constraint(resultIndex +: vars.flatMap(Option(_))) {
private lazy val vars2pos = {
val scopeIndices = scope.zipWithIndex.drop(1).toMap
Array.tabulate(vars.length)(i => Option(vars(i)).map(scopeIndices).getOrElse(-1))
}
def advise(problemState: ProblemState, event: Event, position: Int): Int = arity
def check(tuple: Array[Int]): Boolean = {
tuple(0) < vars.length &&
(vars(tuple(0)) ne null) &&
(tuple(0) == tuple(vars2pos(tuple(0))))
}
def init(ps: ProblemState): Outcome = ps
def revise(ps: ProblemState, mod: BitVector): Outcome = {
ps.filterDom(resultIndex) { i =>
ps.dom(vars(i)).contains(i)
}.andThen { ps =>
if (ps.dom(resultIndex).isAssigned) {
val value = ps.dom(resultIndex).singleValue
ps.tryAssign(vars(value), value).entail(this)
} else {
ps
}
}
}
def simpleEvaluation: Int = 2
}
/**
* Standard Element constraint implemented with watches
*
* @param result : result variable
* @param index : index variable
* @param vars : array variables
*/
class ElementWatch(val result: Variable,
val index: Variable,
val vars: Array[Variable])
extends Constraint(result +: index +: vars.filter(_ ne null)) with Element {
require(result ne index)
private val pos2vars = Array.fill(arity)(-1) //new Array[Int](arity)
fillPos2Vars(2, 0)
/**
* indexWatches(i) contains the value v that support index i (ie, result and vars(i) both contains v)
*/
private[semantic] val indexResidues = new IntIntMap(index.initDomain.size)
private val card: Int = vars.filter(_ ne null).map(_.initDomain.size).max
/**
* resultWatches(v) contains an index i that support value v (ie, i is a valid index
* and vars(i) contains v)
*/
private[semantic] var resultResidues = new IntIntMap(result.initDomain.size)
def advise(ps: ProblemState, event: Event, pos: Int): Int = {
card * ps.card(index)
}
def revise(ps: ProblemState, mod: BitVector): Outcome = {
val rDom = ps.dom(result)
val afterIndex = if (mod(1) && mod.size == 1) {
// Index revision is skipped if it is the only one modified
ps
} else {
ps.filterDom(index) { i =>
val validResidue = indexResidues.get(i)
.exists(v => rDom.contains(v) && ps.dom(vars(i)).contains(v))
validResidue || {
val support = (rDom & ps.dom(vars(i))).headOption
for (v <- support) indexResidues.justPut(i, v)
support.isDefined
}
}
}
afterIndex.andThen { ps =>
val iDom = ps.dom(index)
if (iDom.isAssigned) {
reviseAssignedIndex(ps, iDom.head, rDom)
} else {
ps.filterDom(result) { v =>
val validResidue = resultResidues.get(v)
.exists(i => iDom.contains(i) && ps.dom(vars(i)).contains(v))
validResidue || {
val support = iDom.find(i => ps.dom(vars(i)).contains(v))
for (i <- support) resultResidues.justPut(v, i)
support.nonEmpty
}
}
}
}
}
override def toString(ps: ProblemState): String = toString(ps, "AC")
def toString(ps: ProblemState, consistency: String): String = {
s"${result.toString(ps)} =$consistency= ${index.toString(ps)}th of [${
vars.map(Option(_).map(_.toString(ps)).getOrElse("{}")).mkString("\\n")
}]"
}
def simpleEvaluation: Int = 3
private def fillPos2Vars(scopeI: Int, varsI: Int): Unit = {
if (scopeI < arity) {
Option(vars(varsI)) match {
case None => fillPos2Vars(scopeI, varsI + 1)
case Some(v) =>
assert(scope(scopeI) == v)
pos2vars(scopeI) = varsI
fillPos2Vars(scopeI + 1, varsI + 1)
}
}
}
}
| concrete-cp/concrete | src/main/scala/concrete/constraint/semantic/Element.scala | Scala | lgpl-2.1 | 7,471 |
package com.mesosphere.universe.v3.model
import com.twitter.util.Return
import com.twitter.util.Throw
import com.twitter.util.Try
import org.scalatest.Assertion
import org.scalatest.FreeSpec
class DcosReleaseVersionParserSpec extends FreeSpec {
private[this] val regex = DcosReleaseVersionParser.fullRegex.toString
"DcosReleaseVersionParser should" - {
"succeed for" - {
"1" in {
val Return(parse) = DcosReleaseVersionParser.parse("1")
assertResult(DcosReleaseVersion(DcosReleaseVersion.Version(1)))(parse)
}
// scalastyle:off magic.number
"10.200.3000.40000.500000-oneMillion" in {
val Return(parse) = DcosReleaseVersionParser.parse("10.200.3000.40000.500000-oneMillion")
val expected = DcosReleaseVersion(
DcosReleaseVersion.Version(10),
List(DcosReleaseVersion.Version(200), DcosReleaseVersion.Version(3000), DcosReleaseVersion.Version(40000), DcosReleaseVersion.Version(500000)),
Some(DcosReleaseVersion.Suffix("oneMillion"))
)
assertResult(expected)(parse)
}
// scalastyle:off magic.number
"1-ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" in {
val Return(parse) = DcosReleaseVersionParser.parse("1-ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
val expected = DcosReleaseVersion(
DcosReleaseVersion.Version(1),
List.empty,
Some(DcosReleaseVersion.Suffix("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"))
)
assertResult(expected)(parse)
}
"1.2.3-alpha.7" in {
val Return(parse) = DcosReleaseVersionParser.parse("1.2.3-alpha.7")
val expected = DcosReleaseVersion(
DcosReleaseVersion.Version(1),
List(DcosReleaseVersion.Version(2), DcosReleaseVersion.Version(3)),
Some(DcosReleaseVersion.Suffix("alpha.7"))
)
assertResult(expected)(parse)
}
"1.2.3-abc123-aA.123-pP.99-ts" in {
val Return(parse) = DcosReleaseVersionParser.parse("1.2.3-abc123-aA.123-pP.99-ts")
val expected = DcosReleaseVersion(
DcosReleaseVersion.Version(1),
List(DcosReleaseVersion.Version(2), DcosReleaseVersion.Version(3)),
Some(DcosReleaseVersion.Suffix("abc123-aA.123-pP.99-ts"))
)
assertResult(expected)(parse)
}
}
"fail for" - {
"empty string" in {
assertAssertionError(s"assertion failed: Value must not be empty") {
DcosReleaseVersionParser.parse("")
}
}
"only spaces" in {
assertAssertionError(s"assertion failed: Value must not be empty") {
DcosReleaseVersionParser.parse(" ")
}
}
"01" in {
assertAssertionError(s"assertion failed: Value '01' does not conform to expected format $regex") {
DcosReleaseVersionParser.parse("01")
}
}
"1.2.3-" in {
assertAssertionError(s"assertion failed: Value '1.2.3-' does not conform to expected format $regex") {
DcosReleaseVersionParser.parse("1.2.3-")
}
}
"2.01" in {
assertAssertionError(s"assertion failed: Value '2.01' does not conform to expected format $regex") {
DcosReleaseVersionParser.parse("2.01")
}
}
}
}
private[this] def assertAssertionError[T](expectedMessage: String)(f: => Try[T]): Assertion = {
f match {
case Throw(ae: AssertionError) =>
assertResult(expectedMessage)(ae.getMessage)
case Throw(t) =>
fail("unexpected throwable", t)
case Return(r) =>
fail(s"Return($r) when expected an assertion error")
}
}
}
| dcos/cosmos | cosmos-test-common/src/test/scala/com/mesosphere/universe/v3/model/DcosReleaseVersionParserSpec.scala | Scala | apache-2.0 | 3,718 |
package com.lyrx.text
import java.io.File
import com.lyrx.latex.LTXPDFProcessor
/**
* Created by extawe on 10/13/16.
*/
trait Processor {
def output():Either[File,String]
def process(fileName:String): Processor
}
| lyrx/lyrxgenerator | src/main/scala/com/lyrx/text/Processor.scala | Scala | gpl-3.0 | 227 |
/*
* ============= Ryft-Customized BSD License ============
* Copyright (c) 2015, Ryft Systems, Inc.
* All rights reserved.
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software must display the following acknowledgement:
* This product includes software developed by Ryft Systems, Inc.
* 4. Neither the name of Ryft Systems, Inc. nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY RYFT SYSTEMS, INC. ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL RYFT SYSTEMS, INC. BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* ============
*/
package com.ryft.spark.connector.query.value
/**
* Implementation of Ryft edit distance search
*/
case class EditValue(expression: String,
distance: Int,
width: Int,
caseSensitive: Boolean = false) extends RyftQueryValue { // toDo: check optionality
require(distance >= 0, s"Distance must be positive")
require(width >= 0, s"Width must be positive")
override def toString: String = {
s"""FEDS("$expression", CS=$caseSensitive, DIST=$distance, WIDTH=$width)"""
}
}
| getryft/spark-ryft-connector | spark-ryft-connector/src/main/scala/com/ryft/spark/connector/query/value/EditValue.scala | Scala | bsd-3-clause | 2,288 |
package com.paypal.genio
import org.json4s.JsonAST.{JString, JObject, JValue}
/**
* Created by akgoel on 03/07/15.
*/
trait Service {
def serviceName(): String
def servicePath(): String
def serviceRoot(): String
def schemas(): Map[String, Schema]
def resources(): Map[String, Resource]
}
class ServiceGDD(parsedSpec: Map[String, Any]) extends Service {
override def serviceName():String = parsedSpec.get("name").get.asInstanceOf[String]
override def resources(): Map[String, Resource] = ???
override def schemas(): Map[String, Schema] = ???
override def servicePath(): String = parsedSpec.get("servicePath").get.asInstanceOf[String]
override def serviceRoot(): String = parsedSpec.get("rootUrl").get.asInstanceOf[String]
}
class ServiceSwagger(parsedSpec: Map[String, Any]) extends Service {
override def serviceName(): String = Utils.readMapEntity(parsedSpec, "info.title")
override def resources(): Map[String, Resource] = ???
override def schemas(): Map[String, Parameter] = ???
override def servicePath():String = parsedSpec.get("basePath").get.asInstanceOf[String]
override def serviceRoot():String = parsedSpec.get("host").get.asInstanceOf[String]
} | prannamalai/genio-scala | src/main/scala/Parser.scala | Scala | apache-2.0 | 1,204 |
package barneshut
import java.util.concurrent._
import scala.collection._
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import common._
import scala.math._
import scala.collection.parallel._
import barneshut.conctrees.ConcBuffer
@RunWith(classOf[JUnitRunner])
class BarnesHutSuite extends FunSuite {
// test cases for quad tree
import FloatOps._
test("Empty: center of mass should be the center of the cell") {
val quad = Empty(51f, 46.3f, 5f)
assert(quad.massX == 51f, s"${quad.massX} should be 51f")
assert(quad.massY == 46.3f, s"${quad.massY} should be 46.3f")
}
test("Empty: mass should be 0") {
val quad = Empty(51f, 46.3f, 5f)
assert(quad.mass == 0f, s"${quad.mass} should be 0f")
}
test("Empty: total should be 0") {
val quad = Empty(51f, 46.3f, 5f)
assert(quad.total == 0, s"${quad.total} should be 0")
}
test("Leaf with 1 body") {
val b = new Body(123f, 18f, 26f, 0f, 0f)
val quad = Leaf(17.5f, 27.5f, 5f, Seq(b))
assert(quad.mass ~= 123f, s"${quad.mass} should be 123f")
assert(quad.massX ~= 18f, s"${quad.massX} should be 18f")
assert(quad.massY ~= 26f, s"${quad.massY} should be 26f")
assert(quad.total == 1, s"${quad.total} should be 1")
}
test("Fork with 3 empty quadrants and 1 leaf (nw)") {
val b = new Body(123f, 18f, 26f, 0f, 0f)
val nw = Leaf(17.5f, 27.5f, 5f, Seq(b))
val ne = Empty(22.5f, 27.5f, 5f)
val sw = Empty(17.5f, 32.5f, 5f)
val se = Empty(22.5f, 32.5f, 5f)
val quad = Fork(nw, ne, sw, se)
assert(quad.centerX == 20f, s"${quad.centerX} should be 20f")
assert(quad.centerY == 30f, s"${quad.centerY} should be 30f")
assert(quad.mass ~= 123f, s"${quad.mass} should be 123f")
assert(quad.massX ~= 18f, s"${quad.massX} should be 18f")
assert(quad.massY ~= 26f, s"${quad.massY} should be 26f")
assert(quad.total == 1, s"${quad.total} should be 1")
}
test("Empty.insert(b) should return a Leaf with only that body") {
val quad = Empty(51f, 46.3f, 5f)
val b = new Body(3f, 54f, 46f, 0f, 0f)
val inserted = quad.insert(b)
inserted match {
case Leaf(centerX, centerY, size, bodies) =>
assert(centerX == 51f, s"$centerX should be 51f")
assert(centerY == 46.3f, s"$centerY should be 46.3f")
assert(size == 5f, s"$size should be 5f")
assert(bodies == Seq(b), s"$bodies should contain only the inserted body")
case _ =>
fail("Empty.insert() should have returned a Leaf, was $inserted")
}
}
// test cases for Body
test("Body.updated should do nothing for Empty quad trees") {
val b1 = new Body(123f, 18f, 26f, 0f, 0f)
val body = b1.updated(Empty(50f, 60f, 5f))
assert(body.xspeed == 0f)
assert(body.yspeed == 0f)
}
test("Body.updated should take bodies in a Leaf into account") {
val b1 = new Body(123f, 18f, 26f, 0f, 0f)
val b2 = new Body(524.5f, 24.5f, 25.5f, 0f, 0f)
val b3 = new Body(245f, 22.4f, 41f, 0f, 0f)
val quad = Leaf(15f, 30f, 20f, Seq(b2, b3))
val body = b1.updated(quad)
assert(body.xspeed ~= 12.587037f)
assert(body.yspeed ~= 0.015557117f)
}
// test cases for sector matrix
test("'SectorMatrix.+=' should add a body at (25,47) to the correct bucket of a sector matrix of size 96") {
val body = new Body(5, 25, 47, 0.1f, 0.1f)
val boundaries = new Boundaries()
boundaries.minX = 1
boundaries.minY = 1
boundaries.maxX = 97
boundaries.maxY = 97
val sm = new SectorMatrix(boundaries, SECTOR_PRECISION)
sm += body
val res = sm(2, 3).size == 1 && sm(2, 3).find(_ == body).isDefined
assert(res, s"Body not found in the right sector")
}
// test("'Simulator.computeBoundaries' should compute correctly for 2 boundaries") {
// val b1 = new Boundaries()
// b1.minX = 1
// b1.minY = 4
// b1.maxX = 97
// b1.maxY = 150
//
// val b2 = new Boundaries()
// b2.minX = 20
// b2.minY = 1
// b2.maxX = 160
// b2.maxY = 130
//
// val b3 = new Boundaries()
// b3.minX = 1
// b3.minY = 1
// b3.maxX = 160
// b3.maxY = 150
//
// val res = computeBoundaries
// assert(res, s"Body not found in the right sector")
// }
}
object FloatOps {
private val precisionThreshold = 1e-4
/** Floating comparison: assert(float ~= 1.7f). */
implicit class FloatOps(val self: Float) extends AnyVal {
def ~=(that: Float): Boolean =
abs(self - that) < precisionThreshold
}
/** Long floating comparison: assert(double ~= 1.7). */
implicit class DoubleOps(val self: Double) extends AnyVal {
def ~=(that: Double): Boolean =
abs(self - that) < precisionThreshold
}
/** Floating sequences comparison: assert(floatSeq ~= Seq(0.5f, 1.7f). */
implicit class FloatSequenceOps(val self: Seq[Float]) extends AnyVal {
def ~=(that: Seq[Float]): Boolean =
self.size == that.size &&
self.zip(that).forall { case (a, b) =>
abs(a - b) < precisionThreshold
}
}
}
| yurii-khomenko/fpScalaSpec | c3w4barneshut/src/test/scala/barneshut/BarnesHutSuite.scala | Scala | gpl-3.0 | 5,041 |
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vertx.scala.tests.lang
import org.vertx.scala.core.http.HttpServerRequest
import org.vertx.scala.platform.Verticle
class VerticleClass extends Verticle {
override def start() {
vertx.createHttpServer().requestHandler { req: HttpServerRequest =>
req.response().end("Hello verticle class!")
}.listen(8080)
}
} | vert-x/mod-lang-scala | src/test/scala/org/vertx/scala/tests/lang/VerticleClass.scala | Scala | apache-2.0 | 956 |
package spark.deploy.client
/**
* Callbacks invoked by deploy client when various events happen. There are currently four events:
* connecting to the cluster, disconnecting, being given an executor, and having an executor
* removed (either due to failure or due to revocation).
*
* Users of this API should *not* block inside the callback methods.
*/
private[spark] trait ClientListener {
def connected(jobId: String): Unit
def disconnected(): Unit
def executorAdded(id: String, workerId: String, host: String, cores: Int, memory: Int): Unit
def executorRemoved(id: String, message: String): Unit
}
| joeywen/spark_cpp_api | core/src/main/scala/spark/deploy/client/ClientListener.scala | Scala | bsd-3-clause | 617 |
package security
import javax.inject.{Inject, Singleton}
@Singleton
class SurveyActionBuilder @Inject()(builder: Intake24RestrictedActionBuilder) {
}
| digitalinteraction/intake24 | ApiPlayServer/app/security/SurveyActionBuilder.scala | Scala | apache-2.0 | 155 |
package eventstore
package akka
package examples
import _root_.akka.actor._
import scala.concurrent.duration._
import eventstore.akka.tcp.ConnectionActor
object CountAll extends App {
val system = ActorSystem()
val connection = system.actorOf(ConnectionActor.props(), "connection")
val countAll = system.actorOf(Props[CountAll](), "count-all")
system.actorOf(SubscriptionActor.props(connection, countAll, None, None, Settings.Default), "subscription")
}
class CountAll extends Actor with ActorLogging {
context.setReceiveTimeout(1.second)
def receive = count(0)
def count(n: Long, printed: Boolean = false): Receive = {
case _: IndexedEvent => context become count(n + 1)
case LiveProcessingStarted => log.info("live processing started")
case ReceiveTimeout if !printed =>
log.info("count {}", n)
context become count(n, printed = true)
}
} | EventStore/EventStore.JVM | examples/src/main/scala/eventstore/akka/examples/CountAll.scala | Scala | bsd-3-clause | 891 |
package com.rocketfuel.sdbc.cassandra.datastax.implementation
import com.datastax.driver.core.{Row => CRow}
private[sdbc] trait RowMethods {
self: ParameterValues with IndexImplicits =>
implicit class Row(underlying: CRow) {
def get[T](ix: Index)(implicit getter: RowGetter[T]): Option[T] = {
getter(underlying, ix)
}
def getParameters(implicit getter: RowGetter[ParameterValue]): IndexedSeq[Option[ParameterValue]] = {
IndexedSeq.tabulate(underlying.getColumnDefinitions.size())(ix => get[ParameterValue](ix))
}
def getParametersByName(implicit getter: RowGetter[ParameterValue]): Map[String, Option[ParameterValue]] = {
getParameters.zipWithIndex.foldLeft(Map.empty[String, Option[ParameterValue]]) {
case (accum, (value, ix)) =>
accum + (underlying.getColumnDefinitions.getName(ix) -> value)
}
}
}
}
| wdacom/sdbc | cassandra/src/main/scala/com/rocketfuel/sdbc/cassandra/datastax/implementation/RowMethods.scala | Scala | bsd-3-clause | 883 |
package scala.models
import scala.generator._
import com.bryzek.apidoc.generator.v0.models.{File, InvocationForm}
import org.scalatest.{ ShouldMatchers, FunSpec }
class Kafka10ConsumerSpec extends FunSpec with ShouldMatchers {
import KafkaUtil._
import CaseClassUtil._
val json = models.TestHelper.buildJson("""
"imports": [],
"headers": [],
"info": [],
"enums": [],
"unions": [],
"attributes": [],
"models": [
{
"name": "member",
"attributes": [],
"plural": "members",
"fields": [
{ "name": "id", "type": "long", "required": true, "attributes": [] },
{ "name": "email", "type": "string", "required": true, "attributes": [] },
{ "name": "name", "type": "string", "required": false, "attributes": [] },
{ "name": "foo", "type": "string", "required": true, "attributes": [] }
]
},
{
"name": "kafka_member",
"plural": "kafka_members",
"fields": [{ "name": "v0", "type": "member", "required": true, "attributes": [] }],
"attributes": [
{ "name": "kafka_props",
"value": {
"data_type": "member",
"message_generate_key": "v0.id",
"topic": "s\\"mc.data.member-internal.$instance.$tenant\\""
}
}
]
}
],
"resources": []
""")
lazy val service = models.TestHelper.service(json)
lazy val form = InvocationForm(service)
describe("kafka consumer generator") {
it("generates kafka consumer") {
val generated = Kafka10Consumer.generateCode(form)
models.TestHelper.assertEqualsFile(
"/kafka-10-consumer.txt",
generated(0).contents
)
}
}
describe("topic regex") {
it("replaces tenants correctly") {
Seq(
"s\\"mc.data.member.$tenant\\"" → "s\\"mc.data.member.($tenantsPattern)\\"",
"s\\"mc.data.member.${tenant}\\"" → "s\\"mc.data.member.($tenantsPattern)\\"",
"s\\"mc.data.member.$apiVersion.$instance.$tenant\\"" → "s\\"mc.data.member.$apiVersion.$instance.($tenantsPattern)\\""
) foreach {
case (topicFn, expectedTopicRegex) ⇒
KafkaConsumer.generateTopicRegex(topicFn, "ignored") shouldBe expectedTopicRegex
}
}
}
}
| movio/movio-apidoc-generator | scala-generator/src/test/scala/models/Kafka10ConsumerSpec.scala | Scala | mit | 2,373 |
package io.aos.spark.mllib.scaling
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
// used for computing meand and standard deviation
import org.apache.spark.rdd.DoubleRDDFunctions
import Scaling._
object SelectVariables {
import GlobalScalingContext._
def main(args: Array[String]) {
if (args.length < 3) {
println("""
Missing arguments...
Usage:
SelectVariables <infile> <outfile> <feature1> ...
""")
System.exit(-1)
}
val infile = args(0)
val outfile = args(1)
val features = args.drop(2).toList
// load the csv file...
val file = sc.textFile(infile)
// extract column names
val colNames = file.first.split("\t").map( _.replaceAll("\"","") )
.zipWithIndex.map(tup => (tup._1 -> tup._2)).toMap
println(colNames)
/* this method extracts the list of features for each record */
def getDataForFeatures(lines: RDD[String]): RDD[List[Double]] = {
val feats = features
val names = colNames
lines.zipWithIndex.filter(elt => elt._2 != 0).map(elt => elt._1).map( line => {
// the row with all features
val fullRow = line.split("\t")
// extract the featute from this row
feats.map( feature => {
if (! colNames.contains(feature)) {
println(110)
println("TEST")
println(s"###### FAILED WITH KEY: $feature " + colNames)
}
val idx = colNames(feature)
if (fullRow.size < idx + 1) println(fullRow)
fullRow(idx).toDouble
})
})
}
/* This method returns the column identified by feature number
*/
def getDataForFeature(lines: RDD[String], feature: String): RDD[Double] = {
val idx: Int = colNames(feature)
lines.zipWithIndex.filter(elt => elt._2 != 0).map(elt => elt._1).map( line => {
line.split(",")(idx).toDouble
})
}
// now, build a filterd version of the dataset
// each row will be an Array of Double, ordered as features
val filtered = getDataForFeatures(file)
println(filtered.count)
println("saving data in " + outfile)
// create the header entry
val hlst = Array[String](features.mkString(","))
println(hlst)
val hdr: RDD[String] = sc.parallelize(hlst)
println(hdr.count)
println("header ready, cat with data...")
val txtrdd = (hdr ++ filtered.map(elt => elt.mkString(",")))
outfile.startsWith("hdfs://") match {
case false => {
// cat the header with data as strings and save file
println("coalescing the" + txtrdd.count + " lines")
val txt = txtrdd.coalesce(1, true)
println(txt.first)
txt.saveAsTextFile(outfile)
println("saved data")
}
case true => {
println("saving the " + txtrdd.count + " lines")
txtrdd.saveAsTextFile(outfile)
println("saved data")
}
}
}
}
| echalkpad/t4f-data | spark/mllib/src/main/scala/io/aos/spark/mllib/scaling/SelectVariables.scala | Scala | apache-2.0 | 2,823 |
package com.twitter.finagle.service
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finagle.{BackupRequestLost, SourcedException, Service, SimpleFilter}
import com.twitter.util.{Future, Stopwatch, Throw, Return}
import java.util.concurrent.atomic.AtomicInteger
class StatsFilter[Req, Rep](statsReceiver: StatsReceiver)
extends SimpleFilter[Req, Rep]
{
private[this] val outstandingRequestCount = new AtomicInteger(0)
private[this] val dispatchCount = statsReceiver.counter("requests")
private[this] val successCount = statsReceiver.counter("success")
private[this] val latencyStat = statsReceiver.stat("request_latency_ms")
private[this] val outstandingRequestCountgauge =
statsReceiver.addGauge("pending") { outstandingRequestCount.get }
def apply(request: Req, service: Service[Req, Rep]): Future[Rep] = {
val elapsed = Stopwatch.start()
outstandingRequestCount.incrementAndGet()
service(request) respond { response =>
outstandingRequestCount.decrementAndGet()
response match {
case Throw(BackupRequestLost) =>
// We blackhole this request. It doesn't count for anything.
// After the Failure() patch, this should no longer need to
// be a special case.
case Throw(e) =>
dispatchCount.incr()
latencyStat.add(elapsed().inMilliseconds)
def flatten(ex: Throwable): Seq[String] =
if (ex eq null) Seq[String]() else ex.getClass.getName +: flatten(ex.getCause)
statsReceiver.scope("failures").counter(flatten(e): _*).incr()
e match {
case sourced: SourcedException if sourced.serviceName != "unspecified" =>
statsReceiver
.scope("sourcedfailures")
.counter(sourced.serviceName +: flatten(sourced): _*)
.incr()
case _ =>
}
case Return(_) =>
dispatchCount.incr()
successCount.incr()
latencyStat.add(elapsed().inMilliseconds)
}
}
}
}
| joshbedo/finagle | finagle-core/src/main/scala/com/twitter/finagle/service/StatsFilter.scala | Scala | apache-2.0 | 2,045 |
package slamdata.engine
import slamdata.Predef._
import scala.reflect.ClassTag
import org.specs2.matcher._
import scalaz._
trait TreeMatchers {
def beTree[A](expected: A)(implicit RA: RenderTree[A]): Matcher[A] = new Matcher[A] {
def apply[S <: A](s: Expectable[S]) = {
val v = s.value
def diff = (RA.render(v) diff RA.render(expected)).draw.mkString("\\n")
result(v == expected, s"trees match:\\n$diff", s"trees do not match:\\n$diff", s)
}
}
}
trait TermLogicalPlanMatchers {
import slamdata.engine.analysis.fixplate._
import slamdata.engine.fp._
import slamdata.engine.RenderTree
case class equalToPlan(expected: Term[LogicalPlan]) extends Matcher[Term[LogicalPlan]] {
val equal = Equal[Term[LogicalPlan]].equal _
def apply[S <: Term[LogicalPlan]](s: Expectable[S]) = {
def diff(l: S, r: Term[LogicalPlan]): String = {
val lt = RenderTree[Term[LogicalPlan]].render(l)
val rt = RenderTree[Term[LogicalPlan]].render(r)
RenderTree.show(lt diff rt)(new RenderTree[RenderedTree] { override def render(v: RenderedTree) = v }).toString
}
result(equal(expected, s.value),
"\\ntrees are equal:\\n" + diff(s.value, expected),
"\\ntrees are not equal:\\n" + diff(s.value, expected),
s)
}
}
}
| wemrysi/quasar | core/src/test/scala/slamdata/engine/matchers.scala | Scala | apache-2.0 | 1,318 |
package chat.tox.antox.wrapper
import chat.tox.antox.tox.ToxSingleton
import scala.collection.JavaConversions._
class Group(val key: GroupKey,
val groupNumber: Int,
var name: String,
var alias: String,
var topic: String,
val peers: PeerList) {
var connected = false
def addPeer(tox: ToxCore, peerNumber: Int): Unit = {
val peerKey = tox.getGroupPeerPublicKey(key, peerNumber)
var peerName = tox.getGroupPeerName(key, peerNumber)
if (peerName == null) peerName = ""
// this.peers.addGroupPeer(new GroupPeer(peerKey, peerName, ignored = false))
printPeerList()
}
def printPeerList(): Unit = {
var number = 0
for (peer <- peers.all()) {
number += 1
}
}
def getPeerCount: Int = {
peers.all().size()
}
def clearPeerList(): Unit = {
peers.clear()
}
def leave(partMessage: String): Unit = {
ToxSingleton.tox.deleteGroup(key, partMessage)
}
override def toString: String = name
}
| subliun/Antox | app/src/main/scala/chat/tox/antox/wrapper/Group.scala | Scala | gpl-3.0 | 1,019 |
package pl.touk.nussknacker.engine.api
import pl.touk.nussknacker.engine.api.component.Component
import pl.touk.nussknacker.engine.api.process.ComponentUseCase
import pl.touk.nussknacker.engine.api.test.InvocationCollectors
import pl.touk.nussknacker.engine.api.typed.typing.TypingResult
import scala.concurrent.{ExecutionContext, Future}
/**
* Interface of Enricher/Processor. It has to have one method annotated with
* [[pl.touk.nussknacker.engine.api.MethodToInvoke]]. This method is called for every service invocation.
*
* This could be scala-trait, but we leave it as abstract class for now for java compatibility.
*
* TODO We should consider separate interfaces for java implementation, but right now we convert ProcessConfigCreator
* from java to scala one and is seems difficult to convert java CustomStreamTransformer, Service etc. into scala ones
*
* IMPORTANT lifecycle notice:
* Implementations of this class *must not* allocate resources (connections, file handles etc.) unless open() *or* appropriate @MethodToInvoke
* is called
*/
abstract class Service extends Lifecycle with Component
/*
This is marker interface, for services which have Lazy/dynamic parameters. Invocation is handled with ServiceInvoker
Lifecycle is handled on EagerService level (like in standard Service).
A sample use case is as follows:
- Enrichment with data from SQL database, ConnectionPool is created on level of EagerService
- Each ServiceInvoker has different SQL query, ServiceInvoker stores PreparedStatement
Please see EagerLifecycleService to see how such scenario can be achieved.
*/
abstract class EagerService extends Service
trait ServiceInvoker {
def invokeService(params: Map[String, Any])(implicit ec: ExecutionContext,
collector: InvocationCollectors.ServiceInvocationCollector,
contextId: ContextId,
componentUseCase: ComponentUseCase): Future[Any]
}
| TouK/nussknacker | components-api/src/main/scala/pl/touk/nussknacker/engine/api/Service.scala | Scala | apache-2.0 | 2,054 |
// Project: angulate2-examples
// Module: 06 AngelloLite
// Description: Component for editing StoryS
// Copyright (c) 2016. Distributed under the MIT License (see included LICENSE file).
package angellolite
import angulate2._
import scala.scalajs.js
@Component(
selector = "story-form",
templateUrl = "src/main/resources/html/story-form.html",
inputs = @@("story")
)
class StoryFormComponent(storyService: StoryService) {
var story: Story = _
val statuses = js.Array( "Back Log", "To Do", "In Progress", "Code Review", "QA Review", "Verified", "Done")
val types = js.Array("Feature","Enhancement","Bug","Spike")
def save() = storyService.saveStory(story)
}
@Data
case class Status(name: String)
| jokade/angulate2-examples | archive/06_angelloLite/js/src/main/scala/angellolite/StoryFormComponent.scala | Scala | mit | 726 |
package models
import akka.stream.{KillSwitches, SharedKillSwitch}
import play.api.libs.json.{Format, JsPath, Json, Reads}
import scala.language.postfixOps
/**
* Copyright (c) 2017 A. Roberto Fischer
*
* @author A. Roberto Fischer <[email protected]> on 3/14/2017
*/
final case class TwitterAuthCredentials(id: String,
name: String,
screenName: String,
accessToken: String,
accessTokenSecret: String) extends HasID[String] {
lazy val killSwitch: SharedKillSwitch = KillSwitches.shared(toString)
override lazy val toString: String =
id.toString + "__" +
name + "__" +
screenName
}
object TwitterAuthCredentials {
implicit val ordering = new Ordering[TwitterAuthCredentials] {
def compare(a: TwitterAuthCredentials, b: TwitterAuthCredentials): Int = {
a.name compare b.name
}
}
implicit val jsonFormat: Format[TwitterAuthCredentials] = Json.format[TwitterAuthCredentials]
}
| Queendimimi/twitter_extractor | app/models/TwitterAuthCredentials.scala | Scala | apache-2.0 | 1,096 |
package org.bitcoins.util
import org.bitcoinj.core.DumpedPrivateKey
import org.bitcoins.config.TestNet3
import org.bitcoins.crypto.ECFactory
/**
* Created by chris on 3/7/16.
*/
trait CryptoTestUtil {
def privateKeyBase58 = "cVLwRLTvz3BxDAWkvS3yzT9pUcTCup7kQnfT2smRjvmmm1wAP6QT"
def privateKeyBytes = BitcoinSUtil.decodeBase58(privateKeyBase58)
def privateKeyHex = BitcoinSUtil.encodeHex(privateKeyBytes)
def bitcoinjDumpedPrivateKey = new DumpedPrivateKey(BitcoinJTestUtil.params,privateKeyBase58)
def bitcoinjPrivateKey = bitcoinjDumpedPrivateKey.getKey
def privateKey = ECFactory.fromBase58ToPrivateKey(privateKeyBase58,TestNet3)
}
object CryptoTestUtil extends CryptoTestUtil
| Christewart/scalacoin | src/test/scala/org/bitcoins/util/CryptoTestUtil.scala | Scala | mit | 699 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.converters
import htsjdk.samtools.{
CigarElement,
SAMReadGroupRecord,
SAMRecord,
SAMUtils
}
import org.bdgenomics.utils.misc.Logging
import org.bdgenomics.adam.models.{
Attribute,
RecordGroupDictionary,
SequenceDictionary,
SequenceRecord,
TagType
}
import org.bdgenomics.adam.util.AttributeUtils
import org.bdgenomics.formats.avro.AlignmentRecord
import scala.collection.JavaConverters._
class SAMRecordConverter extends Serializable with Logging {
def convert(
samRecord: SAMRecord,
dict: SequenceDictionary,
readGroups: RecordGroupDictionary): AlignmentRecord = {
try {
val cigar: String = samRecord.getCigarString
val startTrim = if (cigar == "*") {
0
} else {
val count = cigar.takeWhile(_.isDigit).toInt
val operator = cigar.dropWhile(_.isDigit).head
if (operator == 'H') {
count
} else {
0
}
}
val endTrim = if (cigar.endsWith("H")) {
// must reverse string as takeWhile is not implemented in reverse direction
cigar.dropRight(1).reverse.takeWhile(_.isDigit).reverse.toInt
} else {
0
}
val builder: AlignmentRecord.Builder = AlignmentRecord.newBuilder
.setReadName(samRecord.getReadName)
.setSequence(samRecord.getReadString)
.setCigar(cigar)
.setBasesTrimmedFromStart(startTrim)
.setBasesTrimmedFromEnd(endTrim)
.setOrigQual(SAMUtils.phredToFastq(samRecord.getOriginalBaseQualities))
// if the quality string is "*", then we null it in the record
// or, in other words, we only set the quality string if it is not "*"
val qual = samRecord.getBaseQualityString
if (qual != "*") {
builder.setQual(qual)
}
// Only set the reference information if the read is aligned, matching the mate reference
// This prevents looking up a -1 in the sequence dictionary
val readReference: Int = samRecord.getReferenceIndex
if (readReference != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
builder.setContigName(samRecord.getReferenceName)
// set read alignment flag
val start: Int = samRecord.getAlignmentStart
assert(start != 0, "Start cannot equal 0 if contig is set.")
builder.setStart((start - 1))
// set OP and OC flags, if applicable
if (samRecord.getAttribute("OP") != null) {
builder.setOldPosition(samRecord.getIntegerAttribute("OP").toLong - 1)
builder.setOldCigar(samRecord.getStringAttribute("OC"))
}
val end = start.toLong - 1 + samRecord.getCigar.getReferenceLength
builder.setEnd(end)
// set mapping quality
val mapq: Int = samRecord.getMappingQuality
if (mapq != SAMRecord.UNKNOWN_MAPPING_QUALITY) {
builder.setMapq(mapq)
}
}
// set mapping flags
// oddly enough, it appears that reads can show up with mapping
// info (mapq, cigar, position)
// even if the read unmapped flag is set...
// While the meaning of the ReadMapped, ReadNegativeStand,
// PrimaryAlignmentFlag and SupplementaryAlignmentFlag
// are unclear when the read is not mapped or reference is not defined,
// it is nonetheless favorable to set these flags in the ADAM file
// in same way as they appear in the input BAM inorder to match exactly
// the statistics output by other programs, specifically Samtools Flagstat
builder.setReadMapped(!samRecord.getReadUnmappedFlag)
builder.setReadNegativeStrand(samRecord.getReadNegativeStrandFlag)
builder.setPrimaryAlignment(!samRecord.getNotPrimaryAlignmentFlag)
builder.setSupplementaryAlignment(samRecord.getSupplementaryAlignmentFlag)
// Position of the mate/next segment
val mateReference: Int = samRecord.getMateReferenceIndex
if (mateReference != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
builder.setMateContigName(samRecord.getMateReferenceName)
val mateStart = samRecord.getMateAlignmentStart
if (mateStart > 0) {
// We subtract one here to be 0-based offset
builder.setMateAlignmentStart(mateStart - 1)
}
}
// The Avro scheme defines all flags as defaulting to 'false'. We only
// need to set the flags that are true.
if (samRecord.getFlags != 0) {
if (samRecord.getReadPairedFlag) {
builder.setReadPaired(true)
if (samRecord.getMateNegativeStrandFlag) {
builder.setMateNegativeStrand(true)
}
if (!samRecord.getMateUnmappedFlag) {
builder.setMateMapped(true)
}
if (samRecord.getProperPairFlag) {
builder.setProperPair(true)
}
if (samRecord.getFirstOfPairFlag) {
builder.setReadInFragment(0)
}
if (samRecord.getSecondOfPairFlag) {
builder.setReadInFragment(1)
}
}
if (samRecord.getDuplicateReadFlag) {
builder.setDuplicateRead(true)
}
if (samRecord.getReadFailsVendorQualityCheckFlag) {
builder.setFailedVendorQualityChecks(true)
}
}
var tags = List[Attribute]()
val tlen = samRecord.getInferredInsertSize
if (tlen != 0) {
builder.setInferredInsertSize(tlen)
}
if (samRecord.getAttributes != null) {
samRecord.getAttributes.asScala.foreach {
attr =>
if (attr.tag == "MD") {
builder.setMismatchingPositions(attr.value.toString)
} else {
tags ::= AttributeUtils.convertSAMTagAndValue(attr)
}
}
}
if (tags.nonEmpty) {
builder.setAttributes(tags.mkString("\\t"))
}
val recordGroup: SAMReadGroupRecord = samRecord.getReadGroup
if (recordGroup != null) {
builder.setRecordGroupName(recordGroup.getReadGroupId)
.setRecordGroupSample(recordGroup.getSample)
}
builder.build
} catch {
case t: Throwable => {
log.error("Conversion of read: " + samRecord + " failed.")
throw t
}
}
}
}
| erictu/adam | adam-core/src/main/scala/org/bdgenomics/adam/converters/SAMRecordConverter.scala | Scala | apache-2.0 | 7,029 |
// Copyright (c) 2013, Johns Hopkins University. All rights reserved.
// This software is released under the 2-clause BSD license.
// See /LICENSE.txt
// Travis Wolfe, [email protected], 30 July 2013
package edu.jhu.hlt.parma.util
import collection.JavaConversions._
class Alphabet[T] extends Serializable {
private[this] val t2i = new java.util.HashMap[T, Integer]
private[this] val i2t = new java.util.ArrayList[T]
private[this] var growthStopped = false
def keys = i2t.toSeq
def indices = (0 until size)
def apply(t: T) = lookupIndex(t)
def apply(i: Int) = lookupObject(i)
def contains(t: T) = t2i.containsKey(t)
def lookupObject(i: Int): T = {
if(i < 0) throw new RuntimeException("need non-negative indices: " + i)
if(i >= i2t.size) throw new RuntimeException("that index hasn't been assigned: " + i)
return i2t.get(i)
}
def lookupIndex(t: T): Int = {
val i = t2i.get(t)
if(i == null) {
if( ! growthStopped ) {
i2t.add(t)
t2i.put(t, t2i.size)
return t2i.size - 1
}
else throw new RuntimeException(t + " does not exist!")
}
else return i
}
def lookupIndex(t: T, addIfNotPresent: Boolean = false): Int = {
val i = t2i.get(t)
if(i == null) {
if(addIfNotPresent) {
i2t.add(t)
t2i.put(t, t2i.size)
return t2i.size - 1
}
else throw new RuntimeException(t + " does not exist!")
}
else return i
}
def stopGrowth = {
growthStopped = true
}
def size = {
assert(i2t.size == t2i.size)
i2t.size
}
}
| hltcoe/parma | src/main/scala/edu/jhu/hlt/parma/util/Alphabet.scala | Scala | bsd-2-clause | 1,664 |
/*
* Copyright © 2015 Reactific Software LLC. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rxmongo.messages.cmds
import rxmongo.bson.BSONObject
import rxmongo.messages.{ AuthMechanism, Command }
/** logout
* @see [[http://docs.mongodb.org/master/reference/command/logout]]
* @param db
*/
case class LogoutCmd(db : String) extends Command(db, BSONObject("logout" → 1))
/** authenticate
* Starts an authenticated session using a username and password.
* @see [[http://docs.mongodb.org/master/reference/command/authenticate/#dbcmd.authenticate]]
* @param db
* @param user
* @param pass
* @param mechanism
*/
case class AuthenticateCmd(
db : String,
user : String,
pass : String,
mechanism : AuthMechanism) extends Command(db, BSONObject(
"authenticate" -> 1, "username" -> user, "password" -> pass, "mechanism" -> mechanism.asStr)
)
/** copydbgetnonce
* This is an internal command to generate a one-time password for use with the copydb command.
* @see [[http://docs.mongodb.org/master/reference/command/copydbgetnonce/#dbcmd.copydbgetnonce]]
* @param db
*/
case class CopyDbGetNonceCmd(
db : String) extends Command(db, BSONObject("copydbgetnonce" -> 1))
/** getnonce
* This is an internal command to generate a one-time password for authentication.
* @see [[http://docs.mongodb.org/master/reference/command/getnonce/#dbcmd.getnonce]]
* @param db The name of the database
*/
case class GetNonceCmd(
db : String) extends Command(db, BSONObject("getnonce" -> 1))
| reactific/RxMongo | messages/src/main/scala/rxmongo/messages/cmds/AuthenticationCommands.scala | Scala | mit | 2,584 |
// Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package com.twitter.intellij.pants.testFramework.performance
import java.io.File
import com.intellij.ProjectTopics
import com.intellij.ide.plugins.PluginManagerCore
import com.intellij.openapi.project.DumbService
import com.intellij.openapi.roots.{ModuleRootEvent, ModuleRootListener}
import com.intellij.openapi.util.Ref
import com.intellij.openapi.util.io.FileUtil
import com.twitter.intellij.pants.model.PantsTargetAddress
import com.twitter.intellij.pants.testFramework.PantsIntegrationTestCase
import com.twitter.intellij.pants.util.PantsUtil
import scala.jdk.CollectionConverters._
object PantsPerformanceBenchmark {
def main(args: Array[String]): Unit = {
def nextOption(map : Map[String, String], list: List[String]) : Map[String, String] = {
list match {
case Nil => map
case "-target" :: value :: tail =>
nextOption(map ++ Map("target" -> value), tail)
case "-output" :: value :: tail =>
nextOption(map ++ Map("output" -> value), tail)
case "-disabled-plugins-file" :: value :: tail =>
nextOption(map ++ Map("plugins" -> value), tail)
case option :: tail =>
println("Unknown option " + option)
System.exit(1)
Map()
}
}
val options = nextOption(Map(), args.toList)
runBenchmarkAndOutput(options)
println("Finished!")
System.exit(0)
}
def runBenchmarkAndOutput(options: Map[String, String]) = {
val pluginsToDisable = options.get("plugins").map(FileUtil.loadLines).map(_.asScala).getOrElse(List())
val timings = runBenchmark(options("target"), pluginsToDisable.toSet)
val msg = s"Imported ${timings.target} in " +
s"${timings.projectCreation / 1000}s / " +
s"indexed in ${timings.projectIndexing / 1000}s"
FileUtil.appendToFile(new File(options("output")), s"\\n$msg")
println(msg)
}
def runBenchmark(path: String, pluginsToDisable: Set[String]): Timings = {
val address = PantsTargetAddress.fromString(path)
val buildRoot = PantsUtil.findBuildRoot(new File(address.getPath))
val benchmark = new PantsPerformanceBenchmark(buildRoot.get(), pluginsToDisable)
benchmark.setName("performance benchmark")
benchmark.setUp()
try {
benchmark.run(address.getRelativePath)
}
finally {
try {
benchmark.tearDown()
}
catch {
case ignored: Throwable =>
}
}
}
}
case class Timings(
target: String,
projectCreation: Long = 0L,
projectIndexing: Long = 0L
)
class PantsPerformanceBenchmark(projectFolder: File, pluginsToDisable: Set[String]) extends PantsIntegrationTestCase {
override protected def getProjectFolder = projectFolder
override protected def getRequiredPluginIds = {
val allPluginIds = PluginManagerCore.getLoadedPlugins.asScala.map(_.getPluginId.getIdString).toSet
(allPluginIds -- pluginsToDisable).toArray
}
def run(target: String): Timings = {
println(s"Running performance test with ${PluginManagerCore.getPlugins.count(_.isEnabled)} plugins enabled.")
val importEnd = new Ref(-1L)
val messageBusConnection = myProject.getMessageBus.connect()
messageBusConnection.subscribe(
ProjectTopics.PROJECT_ROOTS,
new ModuleRootListener {
override def beforeRootsChange(event: ModuleRootEvent) = {
// import ends with changing of all the roots
importEnd.set(System.currentTimeMillis)
}
}
)
try {
val importStart = System.currentTimeMillis
doImport(target)
DumbService.getInstance(myProject).waitForSmartMode()
val indexingStart = importEnd.get()
val indexingEnd = System.currentTimeMillis
Timings(
target = target,
projectCreation = importEnd.get() - importStart,
projectIndexing = indexingEnd - indexingStart
)
}
finally {
messageBusConnection.disconnect()
}
}
}
| pantsbuild/intellij-pants-plugin | testFramework/src/main/scala/com/twitter/intellij/pants/testFramework/performance/PantsPerformanceBenchmark.scala | Scala | apache-2.0 | 4,085 |
/**
* sbt-osgi-manager - OSGi development bridge based on Bnd and Tycho.
*
* Copyright (c) 2013-2014 Alexey Aksenov [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sbt.osgi.manager.tycho
import java.io.{ PrintWriter, StringWriter }
import org.codehaus.plexus.logging.AbstractLogger
import sbt.osgi.manager.Plugin
import sbt.osgi.manager.support.Support.logPrefix
class Logger(threshold: Int, name: String) extends AbstractLogger(threshold, name) {
def getChildLogger(name: String) = this
def debug(message: String, throwable: Throwable) = Logger.debug(message, throwable)
def error(message: String, throwable: Throwable) = Logger.error(message, throwable)
def fatalError(message: String, throwable: Throwable) = Logger.fatalError(message, throwable)
def info(message: String, throwable: Throwable) = Logger.info(message, throwable)
def warn(message: String, throwable: Throwable) = Logger.warn(message, throwable)
}
object Logger {
def debug(message: String, throwable: Throwable) = logger match {
case Some(logger) ⇒
Option(throwable) match {
case Some(t) ⇒ logger.debug(logPrefix("*") + message + "\n" + getThrowableDump(t))
case None ⇒ logger.debug(logPrefix("*") + message)
}
case None ⇒
Option(throwable) match {
case Some(t) ⇒ System.err.println(logPrefix("*") + "DEBUG: " + message + "\n" + getThrowableDump(t))
case None ⇒ System.err.println(logPrefix("*") + "DEBUG: " + message)
}
}
def error(message: String, throwable: Throwable) = logger match {
case Some(logger) ⇒
Option(throwable) match {
case Some(t) ⇒ logger.error(logPrefix("*") + message + "\n" + getThrowableDump(t))
case None ⇒ logger.error(logPrefix("*") + message)
}
logger.error(logPrefix("*") + message)
case None ⇒
Option(throwable) match {
case Some(t) ⇒ System.err.println(logPrefix("*") + "ERROR: " + message + "\n" + getThrowableDump(t))
case None ⇒ System.err.println(logPrefix("*") + "ERROR: " + message)
}
}
def fatalError(message: String, throwable: Throwable) = logger match {
case Some(logger) ⇒
Option(throwable) match {
case Some(t) ⇒ logger.error(logPrefix("*") + "FATAL: " + message + "\n" + getThrowableDump(t))
case None ⇒ logger.error(logPrefix("*") + "FATAL: " + message)
}
case None ⇒
Option(throwable) match {
case Some(t) ⇒ System.err.println(logPrefix("*") + "FATAL: " + message + "\n" + getThrowableDump(t))
case None ⇒ System.err.println(logPrefix("*") + "FATAL: " + message)
}
}
def info(message: String, throwable: Throwable) = logger match {
case Some(logger) ⇒
Option(throwable) match {
case Some(t) ⇒ logger.info(logPrefix("*") + message + "\n" + t)
case None ⇒ logger.info(logPrefix("*") + message)
}
case None ⇒
Option(throwable) match {
case Some(t) ⇒ System.err.println(logPrefix("*") + "INFO: " + message + "\n" + getThrowableDump(t))
case None ⇒ System.err.println(logPrefix("*") + "INFO: " + message)
}
}
def warn(message: String, throwable: Throwable) = logger match {
case Some(logger) ⇒
Option(throwable) match {
case Some(t) ⇒ logger.warn(logPrefix("*") + message + "\n" + getThrowableDump(t))
case None ⇒ logger.warn(logPrefix("*") + message)
}
case None ⇒
Option(throwable) match {
case Some(t) ⇒ System.err.println(logPrefix("*") + "WARN: " + message + "\n" + getThrowableDump(t))
case None ⇒ System.err.println(logPrefix("*") + "WARN: " + message)
}
}
/** Get the default logger */
def logger: Option[sbt.Logger] =
Plugin.getLastKnownState.map(_.log)
/** Get throwable as string */
protected def getThrowableDump(throwable: Throwable): String = {
val writer = new StringWriter()
val printWriter = new PrintWriter(writer)
if (throwable != null) throwable.printStackTrace(printWriter)
writer.toString()
}
}
| digimead/sbt-osgi-manager | src/main/scala/sbt/osgi/manager/tycho/Logger.scala | Scala | apache-2.0 | 4,613 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.tuning
import java.util.{List => JList, Locale}
import scala.collection.JavaConverters._
import scala.concurrent.Future
import scala.concurrent.duration.Duration
import org.apache.hadoop.fs.Path
import org.json4s.DefaultFormats
import org.apache.spark.annotation.Since
import org.apache.spark.internal.Logging
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.evaluation.Evaluator
import org.apache.spark.ml.param.{IntParam, ParamMap, ParamValidators}
import org.apache.spark.ml.param.shared.{HasCollectSubModels, HasParallelism}
import org.apache.spark.ml.util._
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.ThreadUtils
/**
* Params for [[CrossValidator]] and [[CrossValidatorModel]].
*/
private[ml] trait CrossValidatorParams extends ValidatorParams {
/**
* Param for number of folds for cross validation. Must be >= 2.
* Default: 3
*
* @group param
*/
val numFolds: IntParam = new IntParam(this, "numFolds",
"number of folds for cross validation (>= 2)", ParamValidators.gtEq(2))
/** @group getParam */
def getNumFolds: Int = $(numFolds)
setDefault(numFolds -> 3)
}
/**
* K-fold cross validation performs model selection by splitting the dataset into a set of
* non-overlapping randomly partitioned folds which are used as separate training and test datasets
* e.g., with k=3 folds, K-fold cross validation will generate 3 (training, test) dataset pairs,
* each of which uses 2/3 of the data for training and 1/3 for testing. Each fold is used as the
* test set exactly once.
*/
@Since("1.2.0")
class CrossValidator @Since("1.2.0") (@Since("1.4.0") override val uid: String)
extends Estimator[CrossValidatorModel]
with CrossValidatorParams with HasParallelism with HasCollectSubModels
with MLWritable with Logging {
@Since("1.2.0")
def this() = this(Identifiable.randomUID("cv"))
/** @group setParam */
@Since("1.2.0")
def setEstimator(value: Estimator[_]): this.type = set(estimator, value)
/** @group setParam */
@Since("1.2.0")
def setEstimatorParamMaps(value: Array[ParamMap]): this.type = set(estimatorParamMaps, value)
/** @group setParam */
@Since("1.2.0")
def setEvaluator(value: Evaluator): this.type = set(evaluator, value)
/** @group setParam */
@Since("1.2.0")
def setNumFolds(value: Int): this.type = set(numFolds, value)
/** @group setParam */
@Since("2.0.0")
def setSeed(value: Long): this.type = set(seed, value)
/**
* Set the maximum level of parallelism to evaluate models in parallel.
* Default is 1 for serial evaluation
*
* @group expertSetParam
*/
@Since("2.3.0")
def setParallelism(value: Int): this.type = set(parallelism, value)
/**
* Whether to collect submodels when fitting. If set, we can get submodels from
* the returned model.
*
* Note: If set this param, when you save the returned model, you can set an option
* "persistSubModels" to be "true" before saving, in order to save these submodels.
* You can check documents of
* {@link org.apache.spark.ml.tuning.CrossValidatorModel.CrossValidatorModelWriter}
* for more information.
*
* @group expertSetParam
*/
@Since("2.3.0")
def setCollectSubModels(value: Boolean): this.type = set(collectSubModels, value)
@Since("2.0.0")
override def fit(dataset: Dataset[_]): CrossValidatorModel = {
val schema = dataset.schema
transformSchema(schema, logging = true)
val sparkSession = dataset.sparkSession
val est = $(estimator)
val eval = $(evaluator)
val epm = $(estimatorParamMaps)
// Create execution context based on $(parallelism)
val executionContext = getExecutionContext
val instr = Instrumentation.create(this, dataset)
instr.logParams(numFolds, seed, parallelism)
logTuningParams(instr)
val collectSubModelsParam = $(collectSubModels)
var subModels: Option[Array[Array[Model[_]]]] = if (collectSubModelsParam) {
Some(Array.fill($(numFolds))(Array.fill[Model[_]](epm.length)(null)))
} else None
// Compute metrics for each model over each split
val splits = MLUtils.kFold(dataset.toDF.rdd, $(numFolds), $(seed))
val metrics = splits.zipWithIndex.map { case ((training, validation), splitIndex) =>
val trainingDataset = sparkSession.createDataFrame(training, schema).cache()
val validationDataset = sparkSession.createDataFrame(validation, schema).cache()
instr.logDebug(s"Train split $splitIndex with multiple sets of parameters.")
// Fit models in a Future for training in parallel
val foldMetricFutures = epm.zipWithIndex.map { case (paramMap, paramIndex) =>
Future[Double] {
val model = est.fit(trainingDataset, paramMap).asInstanceOf[Model[_]]
if (collectSubModelsParam) {
subModels.get(splitIndex)(paramIndex) = model
}
// TODO: duplicate evaluator to take extra params from input
val metric = eval.evaluate(model.transform(validationDataset, paramMap))
instr.logDebug(s"Got metric $metric for model trained with $paramMap.")
metric
} (executionContext)
}
// Wait for metrics to be calculated
val foldMetrics = foldMetricFutures.map(ThreadUtils.awaitResult(_, Duration.Inf))
// Unpersist training & validation set once all metrics have been produced
trainingDataset.unpersist()
validationDataset.unpersist()
foldMetrics
}.transpose.map(_.sum / $(numFolds)) // Calculate average metric over all splits
instr.logInfo(s"Average cross-validation metrics: ${metrics.toSeq}")
val (bestMetric, bestIndex) =
if (eval.isLargerBetter) metrics.zipWithIndex.maxBy(_._1)
else metrics.zipWithIndex.minBy(_._1)
instr.logInfo(s"Best set of parameters:\\n${epm(bestIndex)}")
instr.logInfo(s"Best cross-validation metric: $bestMetric.")
val bestModel = est.fit(dataset, epm(bestIndex)).asInstanceOf[Model[_]]
instr.logSuccess(bestModel)
copyValues(new CrossValidatorModel(uid, bestModel, metrics)
.setSubModels(subModels).setParent(this))
}
@Since("1.4.0")
override def transformSchema(schema: StructType): StructType = transformSchemaImpl(schema)
@Since("1.4.0")
override def copy(extra: ParamMap): CrossValidator = {
val copied = defaultCopy(extra).asInstanceOf[CrossValidator]
if (copied.isDefined(estimator)) {
copied.setEstimator(copied.getEstimator.copy(extra))
}
if (copied.isDefined(evaluator)) {
copied.setEvaluator(copied.getEvaluator.copy(extra))
}
copied
}
// Currently, this only works if all [[Param]]s in [[estimatorParamMaps]] are simple types.
// E.g., this may fail if a [[Param]] is an instance of an [[Estimator]].
// However, this case should be unusual.
@Since("1.6.0")
override def write: MLWriter = new CrossValidator.CrossValidatorWriter(this)
}
@Since("1.6.0")
object CrossValidator extends MLReadable[CrossValidator] {
@Since("1.6.0")
override def read: MLReader[CrossValidator] = new CrossValidatorReader
@Since("1.6.0")
override def load(path: String): CrossValidator = super.load(path)
private[CrossValidator] class CrossValidatorWriter(instance: CrossValidator) extends MLWriter {
ValidatorParams.validateParams(instance)
override protected def saveImpl(path: String): Unit =
ValidatorParams.saveImpl(path, instance, sc)
}
private class CrossValidatorReader extends MLReader[CrossValidator] {
/** Checked against metadata when loading model */
private val className = classOf[CrossValidator].getName
override def load(path: String): CrossValidator = {
implicit val format = DefaultFormats
val (metadata, estimator, evaluator, estimatorParamMaps) =
ValidatorParams.loadImpl(path, sc, className)
val cv = new CrossValidator(metadata.uid)
.setEstimator(estimator)
.setEvaluator(evaluator)
.setEstimatorParamMaps(estimatorParamMaps)
metadata.getAndSetParams(cv, skipParams = Option(List("estimatorParamMaps")))
cv
}
}
}
/**
* CrossValidatorModel contains the model with the highest average cross-validation
* metric across folds and uses this model to transform input data. CrossValidatorModel
* also tracks the metrics for each param map evaluated.
*
* @param bestModel The best model selected from k-fold cross validation.
* @param avgMetrics Average cross-validation metrics for each paramMap in
* `CrossValidator.estimatorParamMaps`, in the corresponding order.
*/
@Since("1.2.0")
class CrossValidatorModel private[ml] (
@Since("1.4.0") override val uid: String,
@Since("1.2.0") val bestModel: Model[_],
@Since("1.5.0") val avgMetrics: Array[Double])
extends Model[CrossValidatorModel] with CrossValidatorParams with MLWritable {
/** A Python-friendly auxiliary constructor. */
private[ml] def this(uid: String, bestModel: Model[_], avgMetrics: JList[Double]) = {
this(uid, bestModel, avgMetrics.asScala.toArray)
}
private var _subModels: Option[Array[Array[Model[_]]]] = None
private[tuning] def setSubModels(subModels: Option[Array[Array[Model[_]]]])
: CrossValidatorModel = {
_subModels = subModels
this
}
// A Python-friendly auxiliary method
private[tuning] def setSubModels(subModels: JList[JList[Model[_]]])
: CrossValidatorModel = {
_subModels = if (subModels != null) {
Some(subModels.asScala.toArray.map(_.asScala.toArray))
} else {
None
}
this
}
/**
* @return submodels represented in two dimension array. The index of outer array is the
* fold index, and the index of inner array corresponds to the ordering of
* estimatorParamMaps
* @throws IllegalArgumentException if subModels are not available. To retrieve subModels,
* make sure to set collectSubModels to true before fitting.
*/
@Since("2.3.0")
def subModels: Array[Array[Model[_]]] = {
require(_subModels.isDefined, "subModels not available, To retrieve subModels, make sure " +
"to set collectSubModels to true before fitting.")
_subModels.get
}
@Since("2.3.0")
def hasSubModels: Boolean = _subModels.isDefined
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
bestModel.transform(dataset)
}
@Since("1.4.0")
override def transformSchema(schema: StructType): StructType = {
bestModel.transformSchema(schema)
}
@Since("1.4.0")
override def copy(extra: ParamMap): CrossValidatorModel = {
val copied = new CrossValidatorModel(
uid,
bestModel.copy(extra).asInstanceOf[Model[_]],
avgMetrics.clone()
).setSubModels(CrossValidatorModel.copySubModels(_subModels))
copyValues(copied, extra).setParent(parent)
}
@Since("1.6.0")
override def write: CrossValidatorModel.CrossValidatorModelWriter = {
new CrossValidatorModel.CrossValidatorModelWriter(this)
}
}
@Since("1.6.0")
object CrossValidatorModel extends MLReadable[CrossValidatorModel] {
private[CrossValidatorModel] def copySubModels(subModels: Option[Array[Array[Model[_]]]])
: Option[Array[Array[Model[_]]]] = {
subModels.map(_.map(_.map(_.copy(ParamMap.empty).asInstanceOf[Model[_]])))
}
@Since("1.6.0")
override def read: MLReader[CrossValidatorModel] = new CrossValidatorModelReader
@Since("1.6.0")
override def load(path: String): CrossValidatorModel = super.load(path)
/**
* Writer for CrossValidatorModel.
* @param instance CrossValidatorModel instance used to construct the writer
*
* CrossValidatorModelWriter supports an option "persistSubModels", with possible values
* "true" or "false". If you set the collectSubModels Param before fitting, then you can
* set "persistSubModels" to "true" in order to persist the subModels. By default,
* "persistSubModels" will be "true" when subModels are available and "false" otherwise.
* If subModels are not available, then setting "persistSubModels" to "true" will cause
* an exception.
*/
@Since("2.3.0")
final class CrossValidatorModelWriter private[tuning] (
instance: CrossValidatorModel) extends MLWriter {
ValidatorParams.validateParams(instance)
override protected def saveImpl(path: String): Unit = {
val persistSubModelsParam = optionMap.getOrElse("persistsubmodels",
if (instance.hasSubModels) "true" else "false")
require(Array("true", "false").contains(persistSubModelsParam.toLowerCase(Locale.ROOT)),
s"persistSubModels option value ${persistSubModelsParam} is invalid, the possible " +
"values are \\"true\\" or \\"false\\"")
val persistSubModels = persistSubModelsParam.toBoolean
import org.json4s.JsonDSL._
val extraMetadata = ("avgMetrics" -> instance.avgMetrics.toSeq) ~
("persistSubModels" -> persistSubModels)
ValidatorParams.saveImpl(path, instance, sc, Some(extraMetadata))
val bestModelPath = new Path(path, "bestModel").toString
instance.bestModel.asInstanceOf[MLWritable].save(bestModelPath)
if (persistSubModels) {
require(instance.hasSubModels, "When persisting tuning models, you can only set " +
"persistSubModels to true if the tuning was done with collectSubModels set to true. " +
"To save the sub-models, try rerunning fitting with collectSubModels set to true.")
val subModelsPath = new Path(path, "subModels")
for (splitIndex <- 0 until instance.getNumFolds) {
val splitPath = new Path(subModelsPath, s"fold${splitIndex.toString}")
for (paramIndex <- 0 until instance.getEstimatorParamMaps.length) {
val modelPath = new Path(splitPath, paramIndex.toString).toString
instance.subModels(splitIndex)(paramIndex).asInstanceOf[MLWritable].save(modelPath)
}
}
}
}
}
private class CrossValidatorModelReader extends MLReader[CrossValidatorModel] {
/** Checked against metadata when loading model */
private val className = classOf[CrossValidatorModel].getName
override def load(path: String): CrossValidatorModel = {
implicit val format = DefaultFormats
val (metadata, estimator, evaluator, estimatorParamMaps) =
ValidatorParams.loadImpl(path, sc, className)
val numFolds = (metadata.params \\ "numFolds").extract[Int]
val bestModelPath = new Path(path, "bestModel").toString
val bestModel = DefaultParamsReader.loadParamsInstance[Model[_]](bestModelPath, sc)
val avgMetrics = (metadata.metadata \\ "avgMetrics").extract[Seq[Double]].toArray
val persistSubModels = (metadata.metadata \\ "persistSubModels")
.extractOrElse[Boolean](false)
val subModels: Option[Array[Array[Model[_]]]] = if (persistSubModels) {
val subModelsPath = new Path(path, "subModels")
val _subModels = Array.fill(numFolds)(Array.fill[Model[_]](
estimatorParamMaps.length)(null))
for (splitIndex <- 0 until numFolds) {
val splitPath = new Path(subModelsPath, s"fold${splitIndex.toString}")
for (paramIndex <- 0 until estimatorParamMaps.length) {
val modelPath = new Path(splitPath, paramIndex.toString).toString
_subModels(splitIndex)(paramIndex) =
DefaultParamsReader.loadParamsInstance(modelPath, sc)
}
}
Some(_subModels)
} else None
val model = new CrossValidatorModel(metadata.uid, bestModel, avgMetrics)
.setSubModels(subModels)
model.set(model.estimator, estimator)
.set(model.evaluator, evaluator)
.set(model.estimatorParamMaps, estimatorParamMaps)
metadata.getAndSetParams(model, skipParams = Option(List("estimatorParamMaps")))
model
}
}
}
| lxsmnv/spark | mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala | Scala | apache-2.0 | 16,802 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.job.local
import java.util
import org.apache.samza.SamzaException
import org.apache.samza.config.{Config, JobConfig, TaskConfigJava}
import org.apache.samza.config.TaskConfig._
import org.apache.samza.container.TaskName
import org.apache.samza.coordinator.JobModelManager
import org.apache.samza.coordinator.stream.CoordinatorStreamManager
import org.apache.samza.job.{CommandBuilder, ShellCommandBuilder, StreamJob, StreamJobFactory}
import org.apache.samza.metrics.MetricsRegistryMap
import org.apache.samza.storage.ChangelogStreamManager
import org.apache.samza.util.{Logging, Util}
import scala.collection.JavaConversions._
/**
* Creates a stand alone ProcessJob with the specified config.
*/
class ProcessJobFactory extends StreamJobFactory with Logging {
def getJob(config: Config): StreamJob = {
val containerCount = JobConfig.Config2Job(config).getContainerCount
if (containerCount > 1) {
throw new SamzaException("Container count larger than 1 is not supported for ProcessJobFactory")
}
val metricsRegistry = new MetricsRegistryMap()
val coordinatorStreamManager = new CoordinatorStreamManager(config, metricsRegistry)
coordinatorStreamManager.register(getClass.getSimpleName)
coordinatorStreamManager.start
coordinatorStreamManager.bootstrap
val changelogStreamManager = new ChangelogStreamManager(coordinatorStreamManager)
val coordinator = JobModelManager(coordinatorStreamManager.getConfig, changelogStreamManager.readPartitionMapping(), metricsRegistry)
val jobModel = coordinator.jobModel
val taskPartitionMappings: util.Map[TaskName, Integer] = new util.HashMap[TaskName, Integer]
for (containerModel <- jobModel.getContainers.values) {
for (taskModel <- containerModel.getTasks.values) {
taskPartitionMappings.put(taskModel.getTaskName, taskModel.getChangelogPartition.getPartitionId)
}
}
changelogStreamManager.writePartitionMapping(taskPartitionMappings)
coordinatorStreamManager.stop()
//create necessary checkpoint and changelog streams
val checkpointManager = new TaskConfigJava(jobModel.getConfig).getCheckpointManager(metricsRegistry)
if (checkpointManager != null) {
checkpointManager.createResources()
}
ChangelogStreamManager.createChangelogStreams(jobModel.getConfig, jobModel.maxChangeLogStreamPartitions)
val containerModel = coordinator.jobModel.getContainers.get(0)
val fwkPath = JobConfig.getFwkPath(config) // see if split deployment is configured
info("Process job. using fwkPath = " + fwkPath)
val commandBuilder = {
config.getCommandClass match {
case Some(cmdBuilderClassName) => {
// A command class was specified, so we need to use a process job to
// execute the command in its own process.
Util.getObj(cmdBuilderClassName, classOf[CommandBuilder])
}
case _ => {
info("Defaulting to ShellCommandBuilder")
new ShellCommandBuilder
}
}
}
// JobCoordinator is stopped by ProcessJob when it exits
coordinator.start
commandBuilder
.setConfig(config)
.setId("0")
.setUrl(coordinator.server.getUrl)
.setCommandPath(fwkPath)
new ProcessJob(commandBuilder, coordinator)
}
}
| bharathkk/samza | samza-core/src/main/scala/org/apache/samza/job/local/ProcessJobFactory.scala | Scala | apache-2.0 | 4,152 |
package com.sksamuel.elastic4s.searches.queries.term
import com.sksamuel.elastic4s.DocumentRef
import com.sksamuel.elastic4s.searches.queries.Query
import com.sksamuel.exts.OptionImplicits._
case class TermsQuery[T](field: String,
values: Iterable[T],
boost: Option[Double] = None,
ref: Option[DocumentRef] = None,
routing: Option[String] = None,
path: Option[String] = None,
queryName: Option[String] = None)(implicit val buildable: BuildableTermsQuery[T])
extends Query {
def ref(index: String, `type`: String, id: String): TermsQuery[T] = ref(DocumentRef(index, `type`, id))
def ref(ref: DocumentRef): TermsQuery[T] = copy(ref = ref.some)
def routing(routing: String): TermsQuery[T] = copy(routing = routing.some)
def path(path: String): TermsQuery[T] = copy(path = path.some)
def boost(boost: Double): TermsQuery[T] = copy(boost = boost.some)
def queryName(queryName: String): TermsQuery[T] = copy(queryName = queryName.some)
}
trait BuildableTermsQuery[T] {
def build(q: TermsQuery[T]): Any
}
| Tecsisa/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/queries/term/TermsQuery.scala | Scala | apache-2.0 | 1,289 |
package pl.suder.scala.auctionHouse
import akka.actor._
import akka.testkit.{ TestProbe, ImplicitSender, TestActorRef, TestKit }
import org.scalatest.{ Matchers, OneInstancePerTest, WordSpecLike }
import pl.suder.scala.auctionHouse._
import pl.suder.scala.auctionHouse.Message._
import scala.concurrent.duration.`package`.DurationInt
class AuctionTest extends TestKit(ActorSystem("AuctionHouseTest"))
with WordSpecLike with Matchers with ImplicitSender with OneInstancePerTest {
val NAME = "name"
val parentTestProbe = TestProbe()
val auctionSearchTestProbe = TestProbe()
system.actorOf(Props(new Actor() {
override def receive = {
case x => auctionSearchTestProbe.ref forward x
}
}), "AuctionSearch")
val underTest = TestActorRef(Props(classOf[Auction], NAME), parentTestProbe.ref, NAME)
"An auction" should {
"send ItemBuyed after AuctionEnded to winner" in {
underTest ! Bid(1)
expectMsg(Auction.BidTimer + (1 second), ItemBuyed)
}
"tell parent that item is sold" when {
"sb bid it" in {
underTest ! Bid(1)
parentTestProbe.expectMsg(Auction.BidTimer + (1 second), AuctionEnded(true))
}
}
"tell parent that auction is deleted" when {
"sb does not bid it" in {
parentTestProbe.expectMsg(Auction.BidTimer + (1 second) + Auction.DeleteTimer, AuctionDeleted)
}
"sb bid it" in {
underTest ! Bid(1)
parentTestProbe.expectMsg(Auction.BidTimer + (1 second), AuctionEnded(true))
parentTestProbe.expectMsg(Auction.BidTimer + (1 second) + Auction.DeleteTimer, AuctionDeleted)
}
}
"tell winner that is beaten" in {
val buyer2 = TestProbe()
underTest ! Bid(1)
buyer2.send(underTest, Bid(2))
expectMsg(1 second, Beaten(2))
}
"tell buyer that give too small bid" in {
val buyer2 = TestProbe()
underTest ! Bid(2)
buyer2.send(underTest, Bid(1))
buyer2.expectMsg(1 second, NotEnough(2))
}
"tell buyer that item is sold" when {
"he send bid after auction is end" in {
val buyer2 = TestProbe()
underTest ! Bid(1)
Thread sleep (Auction.BidTimer + (1 second)).length * 1000
buyer2.send(underTest, Bid(2))
buyer2.expectMsg(1 second, ItemSold)
}
}
"be terminated after auction end" in {
watch(underTest)
expectTerminated(underTest, Auction.BidTimer + (1 second) + Auction.DeleteTimer)
}
"register in auction search" in {
auctionSearchTestProbe.expectMsg(1 second, Register(NAME))
}
}
}
| Materix/Sem7-Scala | src/main/test/pl/suder/scala/auctionHouse/AuctionTest.scala | Scala | mit | 2,581 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.entity.test
import common.{StreamLogging, WskActorSystem}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import spray.json._
import spray.json.DefaultJsonProtocol._
import org.apache.openwhisk.core.entity.ExecManifest
import org.apache.openwhisk.core.entity.ExecManifest._
import org.apache.openwhisk.core.entity.size._
import org.apache.openwhisk.core.entity.ByteSize
import scala.util.Success
@RunWith(classOf[JUnitRunner])
class ExecManifestTests extends FlatSpec with WskActorSystem with StreamLogging with Matchers {
behavior of "ExecManifest"
private def manifestFactory(runtimes: JsObject) = {
JsObject("runtimes" -> runtimes)
}
it should "parse an image name" in {
Map(
"i" -> ImageName("i"),
"i:t" -> ImageName("i", tag = Some("t")),
"i:tt" -> ImageName("i", tag = Some("tt")),
"ii" -> ImageName("ii"),
"ii:t" -> ImageName("ii", tag = Some("t")),
"ii:tt" -> ImageName("ii", tag = Some("tt")),
"p/i" -> ImageName("i", None, Some("p")),
"pre/img" -> ImageName("img", None, Some("pre")),
"pre/img:t" -> ImageName("img", None, Some("pre"), Some("t")),
"hostname:1234/img" -> ImageName("img", Some("hostname:1234"), None),
"hostname:1234/img:t" -> ImageName("img", Some("hostname:1234"), None, Some("t")),
"pre1/pre2/img" -> ImageName("img", None, Some("pre1/pre2")),
"pre1/pre2/img:t" -> ImageName("img", None, Some("pre1/pre2"), Some("t")),
"hostname:1234/pre1/pre2/img" -> ImageName("img", Some("hostname:1234"), Some("pre1/pre2")),
"hostname.com:3121/pre1/pre2/img:t" -> ImageName("img", Some("hostname.com:3121"), Some("pre1/pre2"), Some("t")),
"hostname.com:3121/pre1/pre2/img:t@sha256:77af4d6b9913e693e8d0b4b294fa62ade6054e6b2f1ffb617ac955dd63fb0182" ->
ImageName("img", Some("hostname.com:3121"), Some("pre1/pre2"), Some("t")))
.foreach {
case (s, v) => ImageName.fromString(s) shouldBe Success(v)
}
Seq("ABC", "x:8080:10/abc", "p/a:x:y", "p/a:t@sha256:77af4d6b9").foreach { s =>
a[DeserializationException] should be thrownBy ImageName.fromString(s).get
}
}
it should "read a valid configuration without default prefix, default tag or blackbox images" in {
val k1 = RuntimeManifest("k1", ImageName("???"))
val k2 = RuntimeManifest("k2", ImageName("???"), default = Some(true))
val p1 = RuntimeManifest("p1", ImageName("???"))
val s1 = RuntimeManifest("s1", ImageName("???"), stemCells = Some(List(StemCell(2, 256.MB))))
val mf = manifestFactory(JsObject("ks" -> Set(k1, k2).toJson, "p1" -> Set(p1).toJson, "s1" -> Set(s1).toJson))
val runtimes = ExecManifest.runtimes(mf, RuntimeManifestConfig()).get
Seq("k1", "k2", "p1", "s1").foreach {
runtimes.knownContainerRuntimes.contains(_) shouldBe true
}
runtimes.knownContainerRuntimes.contains("k3") shouldBe false
runtimes.resolveDefaultRuntime("k1") shouldBe Some(k1)
runtimes.resolveDefaultRuntime("k2") shouldBe Some(k2)
runtimes.resolveDefaultRuntime("p1") shouldBe Some(p1)
runtimes.resolveDefaultRuntime("s1") shouldBe Some(s1)
runtimes.resolveDefaultRuntime("ks:default") shouldBe Some(k2)
runtimes.resolveDefaultRuntime("p1:default") shouldBe Some(p1)
runtimes.resolveDefaultRuntime("s1:default") shouldBe Some(s1)
}
it should "read a valid configuration where an image may omit registry, prefix or tag" in {
val i1 = RuntimeManifest("i1", ImageName("???"))
val i2 = RuntimeManifest("i2", ImageName("???", Some("rrr")))
val i3 = RuntimeManifest("i3", ImageName("???", Some("rrr"), Some("ppp")), default = Some(true))
val i4 = RuntimeManifest("i4", ImageName("???", Some("rrr"), Some("ppp"), Some("ttt")))
val j1 = RuntimeManifest("j1", ImageName("???", None, None, Some("ttt")))
val k1 = RuntimeManifest("k1", ImageName("???", None, Some("ppp")))
val p1 = RuntimeManifest("p1", ImageName("???", None, Some("ppp"), Some("ttt")))
val q1 = RuntimeManifest("q1", ImageName("???", Some("rrr"), None, Some("ttt")))
val s1 = RuntimeManifest("s1", ImageName("???"), stemCells = Some(List(StemCell(2, 256.MB))))
val mf =
JsObject(
"runtimes" -> JsObject(
"is" -> Set(i1, i2, i3, i4).toJson,
"js" -> Set(j1).toJson,
"ks" -> Set(k1).toJson,
"ps" -> Set(p1).toJson,
"qs" -> Set(q1).toJson,
"ss" -> Set(s1).toJson))
val rmc = RuntimeManifestConfig()
val runtimes = ExecManifest.runtimes(mf, rmc).get
runtimes.resolveDefaultRuntime("i1").get.image.resolveImageName() shouldBe "???"
runtimes.resolveDefaultRuntime("i2").get.image.resolveImageName() shouldBe "rrr/???"
runtimes.resolveDefaultRuntime("i3").get.image.resolveImageName() shouldBe "rrr/ppp/???"
runtimes.resolveDefaultRuntime("i4").get.image.resolveImageName() shouldBe "rrr/ppp/???:ttt"
runtimes.resolveDefaultRuntime("j1").get.image.resolveImageName() shouldBe "???:ttt"
runtimes.resolveDefaultRuntime("k1").get.image.resolveImageName() shouldBe "ppp/???"
runtimes.resolveDefaultRuntime("p1").get.image.resolveImageName() shouldBe "ppp/???:ttt"
runtimes.resolveDefaultRuntime("q1").get.image.resolveImageName() shouldBe "rrr/???:ttt"
runtimes.resolveDefaultRuntime("s1").get.image.resolveImageName() shouldBe "???"
runtimes.resolveDefaultRuntime("s1").get.stemCells.get(0).count shouldBe 2
runtimes.resolveDefaultRuntime("s1").get.stemCells.get(0).memory shouldBe 256.MB
}
it should "read a valid configuration with blackbox images but without default registry, prefix or tag" in {
val imgs = Set(
ImageName("???"),
ImageName("???", Some("rrr")),
ImageName("???", Some("rrr"), Some("ppp")),
ImageName("???", Some("rrr"), Some("ppp"), Some("ttt")),
ImageName("???", None, None, Some("ttt")),
ImageName("???", None, Some("ppp")),
ImageName("???", None, Some("ppp"), Some("ttt")),
ImageName("???", Some("rrr"), None, Some("ttt")))
val mf = JsObject("runtimes" -> JsObject.empty, "blackboxes" -> imgs.toJson)
val runtimes = ExecManifest.runtimes(mf, RuntimeManifestConfig()).get
runtimes.blackboxImages shouldBe imgs
imgs.foreach(img => runtimes.skipDockerPull(img) shouldBe true)
runtimes.skipDockerPull(ImageName("???", Some("aaa"))) shouldBe false
runtimes.skipDockerPull(ImageName("???", None, Some("bbb"))) shouldBe false
}
it should "read a valid configuration with blackbox images, which may omit registry, prefix or tag" in {
val imgs = List(
ImageName("???"),
ImageName("???", Some("rrr")),
ImageName("???", Some("rrr"), Some("ppp")),
ImageName("???", Some("rrr"), Some("ppp"), Some("ttt")),
ImageName("???", None, None, Some("ttt")),
ImageName("???", None, Some("ppp")),
ImageName("???", None, Some("ppp"), Some("ttt")),
ImageName("???", Some("rrr"), None, Some("ttt")))
val mf = JsObject("runtimes" -> JsObject.empty, "blackboxes" -> imgs.toJson)
val rmc = RuntimeManifestConfig()
val runtimes = ExecManifest.runtimes(mf, rmc).get
runtimes.blackboxImages shouldBe imgs.toSet
imgs.forall(runtimes.skipDockerPull(_)) shouldBe true
runtimes.skipDockerPull(ImageName("xxx")) shouldBe false
runtimes.skipDockerPull(ImageName("???", Some("rrr"), Some("bbb"))) shouldBe false
runtimes.skipDockerPull(ImageName("???", Some("rrr"), Some("ppp"), Some("test"))) shouldBe false
runtimes.skipDockerPull(ImageName("???", None, None, Some("test"))) shouldBe false
}
it should "reject runtimes with multiple defaults" in {
val k1 = RuntimeManifest("k1", ImageName("???"), default = Some(true))
val k2 = RuntimeManifest("k2", ImageName("???"), default = Some(true))
val mf = manifestFactory(JsObject("ks" -> Set(k1, k2).toJson))
an[IllegalArgumentException] should be thrownBy ExecManifest.runtimes(mf, RuntimeManifestConfig()).get
}
it should "reject finding a default when none specified for multiple versions in the same family" in {
val k1 = RuntimeManifest("k1", ImageName("???"))
val k2 = RuntimeManifest("k2", ImageName("???"))
val mf = manifestFactory(JsObject("ks" -> Set(k1, k2).toJson))
an[IllegalArgumentException] should be thrownBy ExecManifest.runtimes(mf, RuntimeManifestConfig()).get
}
it should "prefix image name with overrides without registry" in {
val name = "xyz"
ExecManifest.ImageName(name, Some(""), Some(""), Some("")).resolveImageName() shouldBe name
Seq(
(ExecManifest.ImageName(name), name),
(ExecManifest.ImageName(name, None, None, Some("t")), s"$name:t"),
(ExecManifest.ImageName(name, None, Some("pre")), s"pre/$name"),
(ExecManifest.ImageName(name, None, Some("pre"), Some("t")), s"pre/$name:t"),
).foreach {
case (image, exp) =>
image.resolveImageName() shouldBe exp
image.resolveImageName(Some("")) shouldBe exp
image.resolveImageName(Some("r")) shouldBe s"r/$exp"
image.resolveImageName(Some("r/")) shouldBe s"r/$exp"
}
}
it should "prefix image name with overrides with registry" in {
val name = "xyz"
ExecManifest.ImageName(name, Some(""), Some(""), Some("")).resolveImageName() shouldBe name
Seq(
(ExecManifest.ImageName(name, Some("hostname.com")), s"hostname.com/$name"),
(ExecManifest.ImageName(name, Some("hostname.com"), None, Some("t")), s"hostname.com/$name:t"),
(ExecManifest.ImageName(name, Some("hostname.com"), Some("pre")), s"hostname.com/pre/$name"),
(ExecManifest.ImageName(name, Some("hostname.com"), Some("pre"), Some("t")), s"hostname.com/pre/$name:t"),
).foreach {
case (image, exp) =>
image.resolveImageName() shouldBe exp
image.resolveImageName(Some("")) shouldBe exp
image.resolveImageName(Some("r")) shouldBe exp
image.resolveImageName(Some("r/")) shouldBe exp
}
}
it should "indicate image is local if it matches deployment docker prefix" in {
val mf = JsObject.empty
val rmc = RuntimeManifestConfig(bypassPullForLocalImages = Some(true), localImagePrefix = Some("localpre"))
val manifest = ExecManifest.runtimes(mf, rmc)
manifest.get.skipDockerPull(ImageName(prefix = Some("x"), name = "y")) shouldBe false
manifest.get.skipDockerPull(ImageName(prefix = Some("localpre"), name = "y")) shouldBe true
}
it should "de/serialize stem cell configuration" in {
val cell = StemCell(3, 128.MB)
val cellAsJson = JsObject("count" -> JsNumber(3), "memory" -> JsString("128 MB"))
stemCellSerdes.write(cell) shouldBe cellAsJson
stemCellSerdes.read(cellAsJson) shouldBe cell
an[IllegalArgumentException] shouldBe thrownBy {
StemCell(-1, 128.MB)
}
an[IllegalArgumentException] shouldBe thrownBy {
StemCell(0, 128.MB)
}
an[IllegalArgumentException] shouldBe thrownBy {
val cellAsJson = JsObject("count" -> JsNumber(0), "memory" -> JsString("128 MB"))
stemCellSerdes.read(cellAsJson)
}
the[IllegalArgumentException] thrownBy {
val cellAsJson = JsObject("count" -> JsNumber(1), "memory" -> JsString("128"))
stemCellSerdes.read(cellAsJson)
} should have message {
ByteSize.formatError
}
}
it should "parse manifest from JSON string" in {
val json = """
|{ "runtimes": {
| "nodef": [
| {
| "kind": "nodejs:6",
| "deprecated": true,
| "image": {
| "name": "nodejsaction"
| },
| "stemCells": [{
| "count": 1,
| "memory": "128 MB"
| }]
| }, {
| "kind": "nodejs:8",
| "default": true,
| "image": {
| "name": "nodejsaction"
| },
| "stemCells": [{
| "count": 1,
| "memory": "128 MB"
| }, {
| "count": 1,
| "memory": "256 MB"
| }]
| }
| ],
| "pythonf": [{
| "kind": "python",
| "image": {
| "name": "pythonaction"
| },
| "stemCells": [{
| "count": 2,
| "memory": "256 MB"
| }]
| }],
| "swiftf": [{
| "kind": "swift",
| "image": {
| "name": "swiftaction"
| },
| "stemCells": []
| }],
| "phpf": [{
| "kind": "php",
| "image": {
| "name": "phpaction"
| }
| }]
| }
|}
|""".stripMargin.parseJson.asJsObject
val js6 = RuntimeManifest(
"nodejs:6",
ImageName("nodejsaction"),
deprecated = Some(true),
stemCells = Some(List(StemCell(1, 128.MB))))
val js8 = RuntimeManifest(
"nodejs:8",
ImageName("nodejsaction"),
default = Some(true),
stemCells = Some(List(StemCell(1, 128.MB), StemCell(1, 256.MB))))
val py = RuntimeManifest("python", ImageName("pythonaction"), stemCells = Some(List(StemCell(2, 256.MB))))
val sw = RuntimeManifest("swift", ImageName("swiftaction"), stemCells = Some(List.empty))
val ph = RuntimeManifest("php", ImageName("phpaction"))
val mf = ExecManifest.runtimes(json, RuntimeManifestConfig()).get
mf shouldBe {
Runtimes(
Set(
RuntimeFamily("nodef", Set(js6, js8)),
RuntimeFamily("pythonf", Set(py)),
RuntimeFamily("swiftf", Set(sw)),
RuntimeFamily("phpf", Set(ph))),
Set.empty,
None)
}
def stemCellFactory(m: RuntimeManifest, cells: List[StemCell]) = cells.map { c =>
(m.kind, m.image, c.count, c.memory)
}
mf.stemcells.flatMap {
case (m, cells) =>
cells.map { c =>
(m.kind, m.image, c.count, c.memory)
}
}.toList should contain theSameElementsAs List(
(js6.kind, js6.image, 1, 128.MB),
(js8.kind, js8.image, 1, 128.MB),
(js8.kind, js8.image, 1, 256.MB),
(py.kind, py.image, 2, 256.MB))
}
}
| jasonpet/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/entity/test/ExecManifestTests.scala | Scala | apache-2.0 | 15,639 |
package org.jetbrains.plugins.scala
package codeInspection.collections
import java.awt.{Component, GridLayout}
import java.util
import com.intellij.codeInspection.{ProblemHighlightType, ProblemsHolder}
import com.intellij.openapi.ui.{InputValidator, Messages}
import com.intellij.openapi.wm.IdeFocusManager
import com.intellij.psi.PsiElement
import com.intellij.ui._
import javax.swing._
import javax.swing.event.{ChangeEvent, ChangeListener}
import org.jetbrains.plugins.scala.codeInspection.collections.OperationOnCollectionInspectionBase._
import org.jetbrains.plugins.scala.codeInspection.{AbstractInspection, InspectionBundle}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.settings.{ScalaApplicationSettings, ScalaProjectSettingsUtil}
import org.jetbrains.plugins.scala.util.JListCompatibility
/**
* Nikolay.Tropin
* 5/17/13
*/
object OperationOnCollectionInspectionBase {
val inspectionId = InspectionBundle.message("operation.on.collection.id")
val inspectionName = InspectionBundle.message("operation.on.collection.name")
val likeOptionClassesDefault = Array("scala.Option", "scala.Some", "scala.None")
val likeCollectionClassesDefault = Array("scala.collection._", "scala.Array", "scala.Option", "scala.Some", "scala.None")
private val likeOptionKey = "operation.on.collection.like.option"
private val likeCollectionKey = "operation.on.collection.like.collection"
private val inputMessages = Map(
likeCollectionKey -> InspectionBundle.message("operation.on.collection.like.collection.input.message"),
likeOptionKey -> InspectionBundle.message("operation.on.collection.like.option.input.message")
)
private val inputTitles = Map(
likeCollectionKey -> InspectionBundle.message("operation.on.collection.like.collection.input.title"),
likeOptionKey -> InspectionBundle.message("operation.on.collection.like.option.input.title")
)
private val panelTitles = Map(
likeCollectionKey -> InspectionBundle.message("operation.on.collection.like.collection.panel.title"),
likeOptionKey -> InspectionBundle.message("operation.on.collection.like.option.panel.title")
)
}
abstract class OperationOnCollectionInspectionBase extends AbstractInspection(inspectionName) {
private val settings = ScalaApplicationSettings.getInstance()
override protected def actionFor(implicit holder: ProblemsHolder): PartialFunction[PsiElement, Any] = {
case expr: ScExpression => simplifications(expr).foreach {
case s@Simplification(toReplace, _, hint, rangeInParent) =>
val quickFix = OperationOnCollectionQuickFix(s)
holder.registerProblem(toReplace.getElement, hint, highlightType, rangeInParent, quickFix)
}
}
def highlightType: ProblemHighlightType = ProblemHighlightType.GENERIC_ERROR_OR_WARNING
private def simplifications(expr: ScExpression): Array[Simplification] = {
def simplificationTypes = for {
(st, idx) <- possibleSimplificationTypes.zipWithIndex
if getSimplificationTypesEnabled(idx)
} yield st
simplificationTypes.flatMap(st => st.getSimplifications(expr) ++ st.getSimplification(expr))
}
def getLikeCollectionClasses: Array[String] = settings.getLikeCollectionClasses
def getLikeOptionClasses: Array[String] = settings.getLikeOptionClasses
def setLikeCollectionClasses(values: Array[String]): Unit = settings.setLikeCollectionClasses(values)
def setLikeOptionClasses(values: Array[String]): Unit = settings.setLikeOptionClasses(values)
def possibleSimplificationTypes: Array[SimplificationType]
def getSimplificationTypesEnabled: Array[java.lang.Boolean]
def setSimplificationTypesEnabled(values: Array[java.lang.Boolean])
private val patternLists = Map(
likeCollectionKey -> getLikeCollectionClasses _,
likeOptionKey -> getLikeOptionClasses _
)
private val setPatternLists = {
Map(
likeCollectionKey -> setLikeCollectionClasses _,
likeOptionKey -> setLikeOptionClasses _
)
}
override def createOptionsPanel: JComponent = {
def checkBoxesPanel(): JComponent = {
val innerPanel = new JPanel()
innerPanel.setLayout(new BoxLayout(innerPanel, BoxLayout.Y_AXIS))
for (i <- possibleSimplificationTypes.indices) {
val enabled: Array[java.lang.Boolean] = getSimplificationTypesEnabled
val checkBox = new JCheckBox(possibleSimplificationTypes(i).description, enabled(i))
checkBox.getModel.addChangeListener(new ChangeListener {
def stateChanged(e: ChangeEvent) {
enabled(i) = checkBox.isSelected
setSimplificationTypesEnabled(enabled)
}
})
innerPanel.add(checkBox)
}
val extPanel = new JPanel()
extPanel.setLayout(new BoxLayout(extPanel, BoxLayout.X_AXIS))
extPanel.add(innerPanel)
extPanel.add(Box.createHorizontalGlue())
extPanel
}
def createPatternListPanel(parent: JComponent, patternListKey: String): JComponent = {
val patternList: Array[String] = patternLists(patternListKey)()
val listModel = JListCompatibility.createDefaultListModel()
patternList.foreach(JListCompatibility.add(listModel, listModel.size, _))
val patternJBList = JListCompatibility.createJBListFromModel(listModel)
def resetValues() {
val newArray = listModel.toArray collect {case s: String => s}
setPatternLists(patternListKey)(newArray)
}
val panel = ToolbarDecorator.createDecorator(patternJBList).setAddAction(new AnActionButtonRunnable {
def addPattern(pattern: String) {
if (pattern == null) return
val index: Int = - util.Arrays.binarySearch (listModel.toArray, pattern) - 1
if (index < 0) return
JListCompatibility.add(listModel, index, pattern)
resetValues()
patternJBList.setSelectedValue (pattern, true)
ScrollingUtil.ensureIndexIsVisible(patternJBList, index, 0)
IdeFocusManager.getGlobalInstance.requestFocus(patternJBList, false)
}
def run(button: AnActionButton) {
val validator: InputValidator = ScalaProjectSettingsUtil.getPatternValidator
val inputMessage = inputMessages(patternListKey)
val inputTitle = inputTitles(patternListKey)
val newPattern: String = Messages.showInputDialog(parent, inputMessage, inputTitle, Messages.getWarningIcon, "", validator)
addPattern(newPattern)
}
}).setRemoveAction(new AnActionButtonRunnable {
def run(t: AnActionButton) {
patternJBList.getSelectedIndices.foreach(listModel.removeElementAt)
resetValues()
}
}).disableUpDownActions.createPanel
val title = panelTitles(patternListKey)
val border = BorderFactory.createTitledBorder(title)
panel.setBorder(border)
panel
}
def patternsPanel(): JComponent = {
val panel = new JPanel(new GridLayout(1,2))
val likeCollectionPanel = createPatternListPanel(panel, likeCollectionKey)
val likeOptionPanel = createPatternListPanel(panel, likeOptionKey)
panel.add(likeCollectionPanel)
panel.add(likeOptionPanel)
panel
}
val panel = new JPanel()
panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS))
if (possibleSimplificationTypes.length > 1) {
val chbPanel = checkBoxesPanel()
chbPanel.setAlignmentX(Component.LEFT_ALIGNMENT)
panel.add(checkBoxesPanel())
}
panel.add(Box.createVerticalGlue())
panel.add(patternsPanel())
panel
}
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/collections/OperationOnCollectionInspectionBase.scala | Scala | apache-2.0 | 7,563 |
package com.twitter.finagle.mux.transport
import io.netty.channel.{ChannelHandler, ChannelPipeline}
import io.netty.handler.codec.{LengthFieldBasedFrameDecoder, LengthFieldPrepender}
private[transport] object Netty4Framer {
val MaxFrameLength = 0x7FFFFFFF
val LengthFieldOffset = 0
val LengthFieldLength = 4
val LengthAdjustment = 0
val InitialBytesToStrip = 4
}
/**
* An implementation of a mux framer using netty4 primitives.
*/
private[mux] abstract class Netty4Framer extends (ChannelPipeline => Unit) {
def bufferManagerName: String
def bufferManager: ChannelHandler
def apply(pipeline: ChannelPipeline): Unit = {
pipeline.addLast(
"frameDecoder",
new LengthFieldBasedFrameDecoder(
Netty4Framer.MaxFrameLength,
Netty4Framer.LengthFieldOffset,
Netty4Framer.LengthFieldLength,
Netty4Framer.LengthAdjustment,
Netty4Framer.InitialBytesToStrip
)
)
pipeline.addLast("frameEncoder", new LengthFieldPrepender(Netty4Framer.LengthFieldLength))
pipeline.addLast(bufferManagerName, bufferManager)
}
}
| luciferous/finagle | finagle-mux/src/main/scala/com/twitter/finagle/mux/transport/Netty4Framer.scala | Scala | apache-2.0 | 1,092 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.conf
import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit
import com.github.benmanes.caffeine.cache.Caffeine
import org.geotools.feature.simple.SimpleFeatureTypeBuilder
import org.locationtech.geomesa.features.SerializationOption.SerializationOptions
import org.locationtech.geomesa.features.SimpleFeatureSerializer
import org.locationtech.geomesa.features.kryo.{KryoFeatureSerializer, ProjectingKryoFeatureSerializer}
import org.locationtech.geomesa.filter.FilterHelper
import org.locationtech.geomesa.index.metadata.TableBasedMetadata
import org.locationtech.geomesa.utils.cache.CacheKeyGenerator
import org.locationtech.geomesa.utils.geotools.Transform.Transforms
import org.locationtech.geomesa.utils.geotools.{SimpleFeatureTypes, Transform}
import org.locationtech.geomesa.utils.index.VisibilityLevel
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
class ColumnGroups {
import org.locationtech.geomesa.filter.RichTransform.RichTransform
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors.RichAttributeDescriptor
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import scala.collection.JavaConverters._
/**
* Gets the column groups for a simple feature type. The default group will contain all columns
*
* @param sft simple feature type
* @return
*/
def apply(sft: SimpleFeatureType): Seq[(Array[Byte], SimpleFeatureType)] = {
val key = CacheKeyGenerator.cacheKey(sft)
var groups = ColumnGroups.cache.getIfPresent(key)
if (groups == null) {
if (sft.getVisibilityLevel == VisibilityLevel.Attribute) {
groups = IndexedSeq((ColumnGroups.Attributes, sft))
} else {
val map = scala.collection.mutable.Map.empty[String, SimpleFeatureTypeBuilder]
sft.getAttributeDescriptors.asScala.foreach { descriptor =>
descriptor.getColumnGroups().foreach { group =>
map.getOrElseUpdate(group, new SimpleFeatureTypeBuilder()).add(descriptor)
}
}
val sfts = map.map { case (group, builder) =>
builder.setName(sft.getTypeName)
val subset = SimpleFeatureTypes.immutable(builder.buildFeatureType(), sft.getUserData)
(group.getBytes(StandardCharsets.UTF_8), subset)
} + (ColumnGroups.Default -> sft)
// return the smallest groups first, for consistency tiebreaker is string comparison of group
groups = sfts.toIndexedSeq.sortBy { case (group, subset) =>
(subset.getAttributeCount, new String(group, StandardCharsets.UTF_8))
}
}
ColumnGroups.cache.put(key, groups)
}
groups
}
/**
* Get serializers for each column group
*
* @param sft simple feature type
* @return
*/
def serializers(sft: SimpleFeatureType): Seq[(Array[Byte], SimpleFeatureSerializer)] = {
apply(sft).map { case (colFamily, subset) =>
if (colFamily.eq(ColumnGroups.Default) || colFamily.eq(ColumnGroups.Attributes)) {
(colFamily, KryoFeatureSerializer(subset, SerializationOptions.withoutId))
} else {
(colFamily, new ProjectingKryoFeatureSerializer(sft, subset, SerializationOptions.withoutId))
}
}
}
/**
* Find a column group that supports the given transform and filter
*
* @param sft simple feature type
* @param transform transform definitions
* @param ecql filter, if any
* @return
*/
def group(sft: SimpleFeatureType, transform: Option[String], ecql: Option[Filter]): (Array[Byte], SimpleFeatureType) = {
val groups = apply(sft)
transform.map(Transforms(sft, _)) match {
case None => groups.last
case Some(definitions) =>
val iter = groups.iterator
var group = iter.next
// last group has all the columns, so just return the last one if nothing else matches
while (iter.hasNext && !supports(group._2, definitions, ecql)) {
group = iter.next
}
group
}
}
/**
* Validate that the column groups do not overlap with reserved column groups
*
* @param sft simple feature type
*/
def validate(sft: SimpleFeatureType): Unit = {
val groups = sft.getAttributeDescriptors.asScala.flatMap(_.getColumnGroups()).distinct
groups.foreach { group =>
if (group == ColumnGroups.DefaultString || group == ColumnGroups.AttributesString) {
throw new IllegalArgumentException(s"Column group '$group' is reserved for internal use - " +
"please choose another name")
}
}
if (sft.getVisibilityLevel == VisibilityLevel.Attribute && groups.nonEmpty) {
throw new IllegalArgumentException("Column groups are not supported when using attribute-level visibility")
}
}
/**
* Does the simple feature type contain the fields required to evaluate the transform and filter
*
* @param sft simple feature type
* @param transforms transform definitions
* @param filter filter
* @return
*/
private def supports(sft: SimpleFeatureType, transforms: Seq[Transform], filter: Option[Filter]): Boolean = {
filter.forall(FilterHelper.propertyNames(_, sft).forall(sft.indexOf(_) != -1)) &&
transforms.flatMap(_.properties).forall(sft.indexOf(_) != -1)
}
}
object ColumnGroups {
private val DefaultString = "d"
private val AttributesString = "a"
val Default: Array[Byte] = DefaultString.getBytes(StandardCharsets.UTF_8)
val Attributes: Array[Byte] = AttributesString.getBytes(StandardCharsets.UTF_8)
private val cache =
Caffeine.newBuilder()
.expireAfterWrite(TableBasedMetadata.Expiry.toDuration.get.toMillis, TimeUnit.MILLISECONDS)
.build[String, IndexedSeq[(Array[Byte], SimpleFeatureType)]]()
}
| ccri/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/conf/ColumnGroups.scala | Scala | apache-2.0 | 6,304 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Linked}
case class CP671(value: Option[Int]) extends CtBoxIdentifier with CtOptionalInteger
object CP671 extends Linked[CP91, CP671] {
override def apply(source: CP91): CP671 = CP671(source.value)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP671.scala | Scala | apache-2.0 | 909 |
package controllers
import io.apibuilder.api.v0.models.json._
import io.apibuilder.api.v0.models.Domain
import db.{MembershipsDao, OrganizationDomainsDao, OrganizationsDao}
import javax.inject.{Inject, Singleton}
import lib.Validation
import play.api.mvc._
import play.api.libs.json._
@Singleton
class Domains @Inject() (
val apibuilderControllerComponents: ApibuilderControllerComponents,
organizationDomainsDao: OrganizationDomainsDao
) extends ApibuilderController {
def post(orgKey: String) = Identified(parse.json) { request =>
withOrgAdmin(request.user, orgKey) { org =>
request.body.validate[Domain] match {
case e: JsError => {
Conflict(Json.toJson(Validation.invalidJson(e)))
}
case s: JsSuccess[Domain] => {
val form = s.get
organizationDomainsDao.findAll(
organizationGuid = Some(org.guid),
domain = Some(form.name)
).headOption match {
case None => {
val od = organizationDomainsDao.create(request.user, org, form.name)
Ok(Json.toJson(od.domain))
}
case Some(_) => {
Conflict(Json.toJson(Validation.error("domain has already been registered")))
}
}
}
}
}
}
def deleteByName(orgKey: String, name: String) = Identified { request =>
withOrgAdmin(request.user, orgKey) { org =>
org.domains.find(_.name == name).foreach { domain =>
organizationDomainsDao.findAll(organizationGuid = Some(org.guid), domain = Some(domain.name)).foreach { orgDomain =>
organizationDomainsDao.softDelete(request.user, orgDomain)
}
}
NoContent
}
}
}
| gheine/apidoc | api/app/controllers/Domains.scala | Scala | mit | 1,721 |
package org.dsa.iot.rx.core
import org.dsa.iot.rx.RxMerger2
/**
* First emits the items emitted by the first source, and then the items emitted by the second.
*
* <img width="640" height="380" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/concat.png" alt="" />
*/
class Concat[T] extends RxMerger2[T, T, T] {
protected def compute = source1.in ++ source2.in
}
/**
* Factory for [[Concat]] instances.
*/
object Concat {
/**
* Creates a new Concat instance.
*/
def apply[T]: Concat[T] = new Concat[T]
} | IOT-DSA/dslink-scala-ignition | src/main/scala/org/dsa/iot/rx/core/Concat.scala | Scala | apache-2.0 | 560 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa
import org.apache.parquet.filter2.predicate.FilterPredicate
import org.geotools.feature.simple.SimpleFeatureTypeBuilder
import org.locationtech.geomesa.filter.FilterHelper
import org.locationtech.geomesa.utils.geotools.Transform.{ExpressionTransform, PropertyTransform, RenameTransform, Transforms}
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
package object parquet {
/**
* Schema to read and schema to return.
*
* If we have to return a different feature than we read, we need to apply a secondary transform.
* Otherwise, we can just do the transform on read and skip the secondary transform
*
* @param read read schema, includes fields to filter on
* @param transform return schema, if different from read schema
*/
case class ReadSchema(read: SimpleFeatureType, transform: Option[(String, SimpleFeatureType)])
/**
* Filter to read files
*
* @param parquet parquet filter that we can push down to the file format
* @param residual residual geotools filter that we have to apply after read
*/
case class ReadFilter(parquet: Option[FilterPredicate], residual: Option[Filter])
object ReadSchema {
import org.locationtech.geomesa.filter.RichTransform.RichTransform
/**
* Calculates the read schema
*
* @param sft simple feature type
* @param filter query filter
* @param transform query transform
* @return
*/
def apply(
sft: SimpleFeatureType,
filter: Option[Filter],
transform: Option[(String, SimpleFeatureType)]): ReadSchema = {
transform match {
case None => ReadSchema(sft, None)
case Some((tdefs, _)) =>
val definitions = Transforms(sft, tdefs)
val secondary = definitions.exists {
case _: PropertyTransform => false
case _: RenameTransform => false
case _: ExpressionTransform => true
}
val transformCols = definitions.flatMap(_.properties).distinct
val filterCols = filter match {
case None => Seq.empty
case Some(f) => FilterHelper.propertyNames(f, sft).filterNot(transformCols.contains)
}
val projectedSft = {
val builder = new SimpleFeatureTypeBuilder()
builder.setName(sft.getName)
transformCols.foreach(a => builder.add(sft.getDescriptor(a)))
filterCols.foreach(a => builder.add(sft.getDescriptor(a)))
builder.buildFeatureType()
}
projectedSft.getUserData.putAll(sft.getUserData)
ReadSchema(projectedSft, if (secondary || filterCols.nonEmpty) { transform } else { None })
}
}
}
object ReadFilter {
/**
* Create a read filter
*
* @param sft simple feature type
* @param filter query filter
* @return
*/
def apply(sft: SimpleFeatureType, filter: Option[Filter]): ReadFilter = {
val (parquet, residual) = filter match {
case None | Some(Filter.INCLUDE) => (None, None)
case Some(f) => FilterConverter.convert(sft, f)
}
ReadFilter(parquet, residual)
}
}
}
| locationtech/geomesa | geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-parquet/src/main/scala/org/locationtech/geomesa/parquet/package.scala | Scala | apache-2.0 | 3,700 |
package scalamachine.core.tests
import org.specs2._
import mock._
import org.mockito.{Matchers => MM}
import scalamachine.core._
import Resource._
import v3.WebmachineDecisions
import HTTPHeaders._
import HTTPMethods._
class V3ColGSpecs extends Specification with Mockito with SpecsHelper with WebmachineDecisions { def is =
"Webmachine V3 Column G".title ^
p^
"G7 - Resource Exists?" ^
"Sets the Vary header after conneg" ^
"vary header contains all values if all 3 headers were used in conneg" ! testVaryAll ^
"vary header does not contain accept if list has size 0" ! testVaryContentTypesProvided0 ^
"vary header does not contain accept if list has size 1" ! testVaryContentTypesProvided1 ^
"vary header does not contain accept-charset if short circuited" ! testVaryCharsetsShortCircuit ^
"vary header does not contain accept-charset if list has size 0" ! testVaryCharsetsProvided0 ^
"vary header does not contain accept-charset if list has size 1" ! testVaryCharsetsProvided1 ^
"vary header does not contain accept-encoding if short circuited" ! testVaryEncodingsShortCircuit ^
"vary header does not contain accept-encoding if list has size 0" ! testVaryEncodingsProvided0 ^
"vary header does not contain accept-encoding if list has size 1" ! testVaryEncodingsProvided1 ^
"if resource returns non-empty list, those values are additional" ! testVaryResourceAdditional ^p^
"if resource exists, returns decision G8" ! testResourceExistsTrue ^
"otherwise H7 returned" ! testResourceExistsFalse ^
p^
"G8 - If-Match Exists?" ^
"if If-Match header exists, G9 is returned" ! testG8IfMatchExists ^
"otherwise H10 is returned" ! testG8IfMatchMissing ^
p^
"G9 - If-Match: *?" ^
"""if If-Match has value "*", H10 is returned""" ! testIfMatchStar ^
"otherwise G11 is returned" ! testIfMatchNotStar ^
p^
"G11 - ETag in If-Match" ^
"if ETag for resource is in list of etags in If-Match, H10 is returned" ! testIfMatchHasEtag ^
"otherwise a response with code 412 is returned" ! testIfMatchMissingEtag ^
end
def testVaryAll = {
import Res._
val ctypes: ContentTypesProvided =
(ContentType("text/plain"), (d: ReqRespData) => (d,result(FixedLengthBody("")))) ::
(ContentType("application/json"), (d: ReqRespData) => (d,result(FixedLengthBody("")))) ::
Nil
val charsets: CharsetsProvided = Some(("charset1", identity[Array[Byte]](_)) :: ("charset2", identity[Array[Byte]](_)) :: Nil)
val encodings: EncodingsProvided = Some(("identity", identity[Array[Byte]](_)) :: ("gzip", identity[Array[Byte]](_)) :: Nil)
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must contain("Accept") and contain("Accept-Encoding") and contain("Accept-Charset")
}
}
}
def testVaryContentTypesProvided0 = {
import Res._
val ctypes: ContentTypesProvided = Nil
val charsets: CharsetsProvided = Some(("charset1", identity[Array[Byte]](_)) :: ("charset2", identity[Array[Byte]](_)) :: Nil)
val encodings: EncodingsProvided = Some(("identity", identity[Array[Byte]](_)) :: ("gzip", identity[Array[Byte]](_)) :: Nil)
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must not =~("""Accept[^-]""")
}
}
}
def testVaryContentTypesProvided1 = {
import Res._
val ctypes: ContentTypesProvided = (ContentType("application/json"), (d: ReqRespData) => (d, result(FixedLengthBody("")))) :: Nil
val charsets: CharsetsProvided = Some(("charset1", identity[Array[Byte]](_)) :: ("charset2", identity[Array[Byte]](_)) :: Nil)
val encodings: EncodingsProvided = Some(("identity", identity[Array[Byte]](_)) :: ("gzip", identity[Array[Byte]](_)) :: Nil)
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must not =~("""Accept[^-]""")
}
}
}
def testVaryCharsetsShortCircuit = {
import Res._
val ctypes: ContentTypesProvided =
(ContentType("text/plain"), (d: ReqRespData) => (d, result(FixedLengthBody("")))) ::
(ContentType("application/json"), (d: ReqRespData) => (d, result(FixedLengthBody("")))) ::
Nil
val charsets: CharsetsProvided = None
val encodings: EncodingsProvided = Some(("identity", identity[Array[Byte]](_)) :: ("gzip", identity[Array[Byte]](_)) :: Nil)
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must not contain("Accept-Charset")
}
}
}
def testVaryCharsetsProvided0 = {
import Res._
val ctypes: ContentTypesProvided =
(ContentType("text/plain"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
(ContentType("application/json"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
Nil
val charsets: CharsetsProvided = Some(Nil)
val encodings: EncodingsProvided = Some(("identity", identity[Array[Byte]](_)) :: ("gzip", identity[Array[Byte]](_)) :: Nil)
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must not contain("Accept-Charset")
}
}
}
def testVaryCharsetsProvided1 = {
import Res._
val ctypes: ContentTypesProvided =
(ContentType("text/plain"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
(ContentType("application/json"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
Nil
val charsets: CharsetsProvided = Some(("charset2", identity[Array[Byte]](_)) :: Nil)
val encodings: EncodingsProvided = Some(("identity", identity[Array[Byte]](_)) :: ("gzip", identity[Array[Byte]](_)) :: Nil)
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must not contain("Accept-Charset")
}
}
}
def testVaryEncodingsShortCircuit = {
import Res._
val ctypes: ContentTypesProvided =
(ContentType("text/plain"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
(ContentType("application/json"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
Nil
val charsets: CharsetsProvided = Some(("charset1", identity[Array[Byte]](_)) :: ("charset2", identity[Array[Byte]](_)) :: Nil)
val encodings: EncodingsProvided = None
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must not contain("Accept-Encoding")
}
}
}
def testVaryEncodingsProvided0 = {
import Res._
val ctypes: ContentTypesProvided =
(ContentType("text/plain"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
(ContentType("application/json"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
Nil
val charsets: CharsetsProvided = Some(("charset1", identity[Array[Byte]](_)) :: ("charset2", identity[Array[Byte]](_)) :: Nil)
val encodings: EncodingsProvided = Some(Nil)
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must not contain("Accept-Encoding")
}
}
}
def testVaryEncodingsProvided1 = {
import Res._
val ctypes: ContentTypesProvided =
(ContentType("text/plain"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
(ContentType("application/json"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
Nil
val charsets: CharsetsProvided = Some(("charset1", identity[Array[Byte]](_)) :: ("charset2", identity[Array[Byte]](_)) :: Nil)
val encodings: EncodingsProvided = Some(("gzip", identity[Array[Byte]](_)) :: Nil)
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must not contain("Accept-Encoding")
}
}
}
def testVaryResourceAdditional = {
import Res._
val ctypes: ContentTypesProvided =
(ContentType("text/plain"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
(ContentType("application/json"), (d: ReqRespData) => (d, result(HTTPBody.Empty))) ::
Nil
val charsets: CharsetsProvided = Some(("charset1", identity[Array[Byte]](_)) :: ("charset2", identity[Array[Byte]](_)) :: Nil)
val encodings: EncodingsProvided = Some(("identity", identity[Array[Byte]](_)) :: ("gzip", identity[Array[Byte]](_)) :: Nil)
testDecisionResultHasData(
g7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(ctypes)
resource.charsetsProvided(any) answers mkAnswer(charsets)
resource.encodingsProvided(any) answers mkAnswer(encodings)
resource.variances(any) answers mkAnswer("One" :: "Two" :: Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
) {
_.responseHeader(Vary) must beSome.like {
case vary => vary must contain("One") and contain("Two")
}
}
}
def testResourceExistsTrue = {
testDecisionReturnsDecision(
g7,
g8,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(Nil)
resource.charsetsProvided(any) answers mkAnswer(None)
resource.encodingsProvided(any) answers mkAnswer(None)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(true)
}
)
}
def testResourceExistsFalse = {
testDecisionReturnsDecision(
g7,
h7,
resource => {
resource.contentTypesProvided(any) answers mkAnswer(Nil)
resource.charsetsProvided(any) answers mkAnswer(None)
resource.encodingsProvided(any) answers mkAnswer(None)
resource.variances(any) answers mkAnswer(Nil)
resource.resourceExists(any) answers mkAnswer(false)
}
)
}
def testG8IfMatchExists = {
testDecisionReturnsDecision(g8,g9, r => {}, data = createData(headers = Map(IfMatch -> "*")))
}
def testG8IfMatchMissing = {
testDecisionReturnsDecision(g8,h10, r => {})
}
def testIfMatchStar = {
testDecisionReturnsDecision(g9,h10, r => {}, data = createData(headers = Map(IfMatch -> "*")))
}
def testIfMatchNotStar = {
testDecisionReturnsDecision(g9,g11, r => {}, data = createData(headers = Map(IfMatch -> "1")))
}
def testIfMatchHasEtag = {
testDecisionReturnsDecision(g11,h10,_.generateEtag(any) answers mkAnswer(Some("1")), data = createData(headers = Map(IfMatch -> "1,2")))
}
def testIfMatchMissingEtag = {
testDecisionReturnsData(g11,_.generateEtag(any) answers mkAnswer(None), data = createData(headers = Map(IfMatch -> "1"))) {
_.statusCode must beEqualTo(412)
}
}
} | stackmob/scalamachine | core/src/test/scala/scalamachine/core/tests/V3ColGSpecs.scala | Scala | apache-2.0 | 15,062 |
package org.scalatra
import test.scalatest.ScalatraFunSuite
class GetResponseStatusSupportTestServlet extends ScalatraServlet {
before() {
session // Establish a session before we commit the response
}
after() {
session("status") = status.toString
}
get("/status/:status") {
response.setStatus(params("status").toInt)
status.toString
}
get("/redirect") {
response.sendRedirect("/")
}
get("/session-status") {
session.getOrElse("status", "none")
}
get("/send-error/:status") {
response.sendError(params("status").toInt)
}
get("/send-error/:status/:msg") {
response.sendError(params("status").toInt, params("msg"))
}
}
class GetResponseStatusSupportTest extends ScalatraFunSuite {
addServlet(classOf[GetResponseStatusSupportTestServlet], "/*")
test("remember status after setStatus") {
get("/status/404") {
body should equal ("404")
}
}
test("remembers status after sendRedirect") {
session {
get("/redirect") {}
get("/session-status") { body should equal ("302") }
}
}
test("remembers status after sendError without a message") {
session {
get("/send-error/500") {}
get("/session-status") { body should equal ("500") }
}
}
test("remembers status after sendError with a message") {
session {
get("/send-error/504/Gateway%20Timeout") {}
get("/session-status") { body should equal ("504") }
}
}
}
| louk/scalatra | core/src/test/scala/org/scalatra/GetResponseStatusSupportTest.scala | Scala | bsd-2-clause | 1,458 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.web.core
import javax.servlet.http.{HttpServletRequest, HttpServletRequestWrapper}
import scala.annotation.tailrec
/**
* Reverts geoserver's path handling in order to fix scalatra's servlet mapping.
*
* @see @org.geoserver.platform.AdvancedDispatchFilter
* @param request request to wrap
*/
class PathHandlingServletRequest(request: HttpServletRequest) extends HttpServletRequestWrapper(request) {
private lazy val unwrapped = PathHandlingServletRequest.unwrap(request)
override def getPathInfo: String = unwrapped.getPathInfo
override def getServletPath: String = unwrapped.getServletPath
}
object PathHandlingServletRequest {
@tailrec
def unwrap(request: HttpServletRequest): HttpServletRequest = {
request match {
case r: HttpServletRequestWrapper => unwrap(r.getRequest.asInstanceOf[HttpServletRequest])
case r => r
}
}
}
| ronq/geomesa | geomesa-web/geomesa-web-core/src/main/scala/org/locationtech/geomesa/web/core/PathHandlingServletRequest.scala | Scala | apache-2.0 | 1,374 |
/*
* Copyright 2006 - 2013
* Stefan Balev <[email protected]>
* Julien Baudry <[email protected]>
* Antoine Dutot <[email protected]>
* Yoann Pigné <[email protected]>
* Guilhelm Savin <[email protected]>
*
* This file is part of GraphStream <http://graphstream-project.org>.
*
* GraphStream is a library whose purpose is to handle static or dynamic
* graph, create them from scratch, file or any source and display them.
*
* This program is free software distributed under the terms of two licenses, the
* CeCILL-C license that fits European law, and the GNU Lesser General Public
* License. You can use, modify and/ or redistribute the software under the terms
* of the CeCILL-C license as circulated by CEA, CNRS and INRIA at the following
* URL <http://www.cecill.info> or under the terms of the GNU LGPL as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* The fact that you are presently reading this means that you have had
* knowledge of the CeCILL-C and LGPL licenses and that you accept their terms.
*/
package org.graphstream.ui.j2dviewer.renderer.test
import org.graphstream.graph.implementations.SingleGraph
import org.graphstream.algorithm.generator.GridGenerator
object TestSpeed {
def main(args:Array[String]) = (new TestSpeed).test
}
class TestSpeed {
def test() {
System.setProperty("gs.ui.renderer", "org.graphstream.ui.j2dviewer.J2DGraphRenderer")
for(i <- 1 until 60) {
Console.err.println("%d".format(i))
Thread.sleep(1000)
}
val graph = new SingleGraph("simple")
var gridg = new GridGenerator(false, false, true)
graph.addAttribute("ui.log", "fps.log")
graph.display(false)
gridg.addSink(graph)
gridg.begin
gridg.nextEvents
Thread.sleep(1000)
for(i <- 1 until 10) {
gridg.nextEvents
Thread.sleep(100)
}
for(i <- 1 until 100) {
gridg.nextEvents
Thread.sleep(10)
}
gridg.end
gridg.removeSink(graph)
}
} | prismsoul/gedgraph | sources/prismsoul.genealogy.gedgraph/gs-ui/org/graphstream/ui/j2dviewer/renderer/test/TestSpeed.scala | Scala | gpl-2.0 | 2,709 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.race.Race
import io.truthencode.ddo.support.requisite.{FeatRequisiteImpl, RaceRequisite, RequiresAnyOfRace}
/**
* Created by adarr on 2/20/2017.
*/
trait SmallSizeBonus
extends FeatRequisiteImpl with RaceRequisite with Passive with RequiresAnyOfRace {
self: RacialFeat =>
override def anyOfRace: Seq[(Race, Int)] =
List((Race.Gnome, 1), (Race.DeepGnome, 1), (Race.Halfling, 1))
override def grantsToRace: Seq[(Race, Int)] =
List((Race.Gnome, 1), (Race.DeepGnome, 1), (Race.Halfling, 1))
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/SmallSizeBonus.scala | Scala | apache-2.0 | 1,261 |
/*
* Distributed as part of the Stanford Topic Modeling Toolbox.
* Copyright (c) 2009- The Board of Trustees of the Leland
* Stanford Junior University.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA,
* 02110-1301, USA.
*/
package edu.stanford.nlp.tmt;
package learn;
import java.util.concurrent.CountDownLatch;
import scala.collection.IterableLike;
import scala.collection.TraversableView.NoBuilder;
import scala.collection.generic.CanBuildFrom;
import scala.concurrent.FutureTaskRunner;
import scala.concurrent.ops._;
import scalanlp.collection.LazyIterable;
/**
* A unified collection-like view of a set of shards, each of which is
* IterableLike. Calls to map, filter, reduce, foreach, etc., are evaluated
* with the help of a FutureTaskRunner. Note: callers should use the
* provided <code>reduce</code> method for a parallel reduction instead of
* <code>reduceLeft</code> and <code>reduceRight</code>.
*
* @author dramage
*/
class Sharded[A](val shards : List[List[A]])(implicit val ft : FutureTaskRunner)
extends Iterable[A] with IterableLike[A,Sharded[A]] {
type Repr = Sharded[A];
override def newBuilder =
new NoBuilder[A];
def iterator : Iterator[A] = new Iterator[A] {
val iterators = shards.map(_.iterator).toArray;
var which = 0;
override def hasNext =
iterators(which).hasNext;
override def next = {
val rv = iterators(which).next;
which = (which + 1) % iterators.length;
rv;
}
}
/** Applies the given function to each shard. */
def foreachShard[B](f : (List[A],Int) => B) : Unit = {
val latch = new CountDownLatch(shards.length);
for ((shard,index) <- shards.zipWithIndex) {
future({f(shard,index); latch.countDown;})(ft)
}
latch.await;
}
override def map[B,That](f : A=>B)(implicit bf : CanBuildFrom[Repr, B, That]) : That =
new Sharded[B](shards.map(s => future(s.map(f))(ft)).map(_.apply)).asInstanceOf[That];
override def filter(f : A=>Boolean) : Repr =
new Sharded[A](shards.map(s => future(s.filter(f))(ft)).map(_.apply)).asInstanceOf[Repr];
override def foreach[B](f: A => B) : Unit =
foreachShard((s,i) => s.foreach(f));
override def reduce[A1 >: A](op: (A1, A1) => A1) : A1 =
shards.filter(!_.isEmpty).map(s => future(s.reduceLeft(op))(ft)).map(_.apply).reduceLeft(op);
override def reduceLeft[B >: A] (op: (B, A) ⇒ B) : B =
throw new ShardedException("reduceLeft called on Sharded. Use reduce instead.");
override def reduceRight[B >: A] (op: (A, B) ⇒ B) : B =
throw new ShardedException("reduceRight called on Sharded. Use reduce instead.");
override def max [B >: A] (implicit cmp: Ordering[B]) : A =
shards.map(_.max(cmp)).max(cmp);
override def min [B >: A] (implicit cmp: Ordering[B]) : A =
shards.map(_.min(cmp)).min(cmp);
override def sum [B >: A] (implicit num: Numeric[B]) : B =
shards.map(_.sum(num)).sum(num);
override def product [B >: A] (implicit num: Numeric[B]) : B =
shards.map(_.product(num)).product(num);
override def size =
shards.map(_.size).sum;
override def toList =
iterator.toList;
}
object Sharded {
def apply[A](coll : Iterable[A], numShards : Int = Runtime.getRuntime.availableProcessors)
(implicit ft : FutureTaskRunner = new scala.concurrent.ThreadRunner) : Sharded[A] =
toLists(coll, numShards, (a : A) => a);
def toLists[A,B]
(coll : Iterable[A], numShards : Int = Runtime.getRuntime.availableProcessors, mapper : (A=>B))
(implicit ft : FutureTaskRunner = new scala.concurrent.ThreadRunner) = {
val sizeHint = if (coll.isInstanceOf[Seq[_]]) Some(coll.size / numShards) else None;
val channels = Array.fill(numShards)(new scala.concurrent.SyncVar[Option[A]]());
val results = new Array[List[B]](numShards);
val futures = Array.tabulate(numShards)(i => future({
var builder = List.newBuilder[B];
if (sizeHint.isDefined) {
builder.sizeHint(sizeHint.get);
}
var done = false;
while (!done) {
channels(i).get match {
case Some(value) => builder += mapper(value);
case None => done = true;
}
}
results(i) = builder.result;
})(ft));
var i = 0;
for (item <- coll) {
channels(i % numShards).put(Some(item));
i += 1;
}
channels.foreach(_.put(None));
futures.foreach(_.apply());
new Sharded[B](results.toList);
}
def shard[V](iterable : Iterable[V], numShards : Int) : List[LazyIterable[V]] =
shard(iterable.iterator, numShards).map(iter => LazyIterable[V](iter));
def shard[V](iterator : Iterator[V], numShards : Int) : List[Iterator[V]] = {
val cHasNext = Array.fill(numShards)(new scala.concurrent.SyncVar[Boolean]);
val cNext = Array.fill(numShards)(new scala.concurrent.SyncVar[V]);
scala.concurrent.ops.spawn {
var i = 0;
for (item <- iterator) {
cHasNext(i % numShards).put(true);
cNext(i % numShards).put(item);
i += 1;
}
cHasNext.foreach(_.put(false));
}
List.tabulate(numShards)(i => {
new Iterator[V] {
var knowsHasNext = false;
var valofHasNext = false;
override def hasNext = {
if (!knowsHasNext) {
knowsHasNext = true;
valofHasNext = cHasNext(i).get;
}
valofHasNext;
}
override def next = {
if (!hasNext) {
throw new NoSuchElementException();
}
knowsHasNext = false;
cNext(i).get;
}
}
});
}
implicit def canBuildFromIterables[B] : CanBuildFrom[Sharded[_],B,Sharded[B]] =
new CanBuildFrom[Sharded[_],B,Sharded[B]] {
override def apply(from: Sharded[_]) =
new NoBuilder[B];
override def apply() =
new NoBuilder[B];
}
}
class ShardedException(msg : String) extends RuntimeException(msg);
| quhfus/DoSeR | Stanford Topic Model/src/main/scala/edu/stanford/nlp/tmt/learn/Sharded.scala | Scala | gpl-2.0 | 6,560 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.asyncmvc.async
import java.util.UUID
import play.api.libs.json.Json
import play.api.mvc.{AnyContent, Request}
import uk.gov.hmrc.play.asyncmvc.model.AsyncMvcSession
import uk.gov.hmrc.time.DateTimeUtils
trait SessionHandler {
Self: AsyncValidation =>
final val AsyncMVCSessionId = "ASYNC_MVC_ID"
def getClientTimeout: Long // Return the client timeout in milliseconds. The client can only wait so long for the request before either result or timeout page is displayed.
def clearASyncIdFromSession(data: Map[String, String]): Map[String, String] = data - AsyncMVCSessionId
def buildUniqueId() = UUID.randomUUID().toString
def buildSession(id: String, uniqueId: String): String = {
implicit val format = Json.format[AsyncMvcSession]
val asyncMvcSession = AsyncMvcSession(id, uniqueId, DateTimeUtils.now.getMillis + getClientTimeout)
Json.stringify(Json.toJson(asyncMvcSession))
}
def buildSessionWithMVCSessionId(id: String, uniqueId: String, data: Map[String, String])(implicit request: Request[AnyContent]): Map[String, String] = {
data - AsyncMVCSessionId + (AsyncMVCSessionId -> buildSession(id, uniqueId))
}
def getSessionObject()(implicit request: Request[AnyContent]): Option[AsyncMvcSession] = {
request.session.data.get(AsyncMVCSessionId) match {
case Some(e) =>
implicit val format = Json.format[AsyncMvcSession]
Json.parse(e).asOpt[AsyncMvcSession]
case _ => None
}
}
}
| hmrc/play-async | src/main/scala/uk/gov/hmrc/play/asyncmvc/async/SessionHandler.scala | Scala | apache-2.0 | 2,096 |
package com.hashmapinc.tempus
import org.specs._
import org.specs.runner.{ConsoleRunner, JUnit4}
//
//class MySpecTest extends JUnit4(MySpec)
////class MySpecSuite extends ScalaTestSuite(MySpec)
//object MySpecRunner extends ConsoleRunner(MySpec)
//
//object MySpec extends Specification {
// "This wonderful system" should {
// "save the world" in {
// val list = Nil
// list must beEmpty
// }
// }
//}
| hashmapinc/witsml-client | src/examples/scala/src/test/scala/com/hashmapinc/tempus/MySpec.scala | Scala | apache-2.0 | 423 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.utils
import org.apache.flink.table.planner.plan.stats._
import org.apache.flink.table.planner.plan.utils.ColumnIntervalUtil._
import org.junit.Assert.{assertEquals, assertTrue}
import org.junit.Test
import java.lang
import java.math.BigInteger
import java.util.Date
class ColumnIntervalUtilTest {
@Test
def testConvertNumberToString(): Unit = {
assertEquals(Some("1"), convertNumberToString(1))
assertEquals(Some("1"), convertNumberToString(new lang.Integer(1)))
assertEquals(Some("1"), convertNumberToString(1L))
assertEquals(Some("1"), convertNumberToString(new lang.Long(1L)))
assertEquals(Some("1.11"), convertNumberToString(1.11f))
assertEquals(Some("1.11"), convertNumberToString(new lang.Float(1.11f)))
assertEquals(Some("1.11"), convertNumberToString(1.11))
assertEquals(Some("1.11"), convertNumberToString(new lang.Double(1.11)))
assertEquals(Some("1"), convertNumberToString(new BigInt(new BigInteger("1"))))
assertEquals(Some("1"), convertNumberToString(new BigInteger("1")))
assertEquals(Some("1.11"),
convertNumberToString(new BigDecimal(new java.math.BigDecimal("1.11"))))
assertEquals(Some("1.11"), convertNumberToString(new java.math.BigDecimal("1.11")))
assertEquals(None, convertNumberToString("123"))
assertEquals(None, convertNumberToString(new Date()))
}
@Test
def testNegativeValueInterval(): Unit = {
assertEquals(
toBigDecimalInterval(ValueInterval(-2, -1, includeLower = true, includeUpper = true)),
getNegativeOfValueInterval(ValueInterval(1, 2, includeLower = true, includeUpper = true))
)
assertEquals(
toBigDecimalInterval(ValueInterval(-2, -1, includeLower = true, includeUpper = false)),
getNegativeOfValueInterval(ValueInterval(1, 2, includeLower = false, includeUpper = true))
)
assertEquals(
toBigDecimalInterval(RightSemiInfiniteValueInterval(-2, includeLower = true)),
getNegativeOfValueInterval(LeftSemiInfiniteValueInterval(2, includeUpper = true))
)
assertEquals(
toBigDecimalInterval(LeftSemiInfiniteValueInterval(2, includeUpper = true)),
getNegativeOfValueInterval(RightSemiInfiniteValueInterval(-2, includeLower = true))
)
assertEquals(
null,
getNegativeOfValueInterval(ValueInterval("1", "2", includeLower = true, includeUpper = true))
)
assertEquals(
null,
getNegativeOfValueInterval(
ValueInterval(new Date(), new Date(), includeLower = true, includeUpper = true))
)
assertEquals(
toBigDecimalInterval(ValueInterval(-2.2f, -1.1f, includeLower = true, includeUpper = true)),
getNegativeOfValueInterval(
ValueInterval(1.1f, 2.2f, includeLower = true, includeUpper = true))
)
}
@Test
def testGetValueIntervalOfPlus(): Unit = {
assertEquals(
toBigDecimalInterval(ValueInterval(2, 6, includeLower = true, includeUpper = false)),
getValueIntervalOfPlus(
ValueInterval(-1, 2, includeLower = true, includeUpper = false),
ValueInterval(3, 4, includeLower = true, includeUpper = true))
)
assertEquals(
toBigDecimalInterval(LeftSemiInfiniteValueInterval(5, includeUpper = false)),
getValueIntervalOfPlus(
ValueInterval(-1, 2, includeLower = true, includeUpper = false),
LeftSemiInfiniteValueInterval(3, includeUpper = true))
)
assertEquals(
toBigDecimalInterval(LeftSemiInfiniteValueInterval(2, includeUpper = true)),
getValueIntervalOfPlus(
LeftSemiInfiniteValueInterval(-1, includeUpper = true),
LeftSemiInfiniteValueInterval(3, includeUpper = true))
)
assertEquals(
toBigDecimalInterval(RightSemiInfiniteValueInterval(2, includeLower = false)),
getValueIntervalOfPlus(
ValueInterval(-1, 2, includeLower = true, includeUpper = false),
RightSemiInfiniteValueInterval(3, includeLower = false))
)
assertEquals(
toBigDecimalInterval(RightSemiInfiniteValueInterval(6, includeLower = false)),
getValueIntervalOfPlus(
RightSemiInfiniteValueInterval(3, includeLower = true),
RightSemiInfiniteValueInterval(3, includeLower = false))
)
assertEquals(
null,
getValueIntervalOfPlus(
EmptyValueInterval,
ValueInterval(-1, 2, includeLower = true, includeUpper = false))
)
assertEquals(
null,
getValueIntervalOfPlus(
EmptyValueInterval,
LeftSemiInfiniteValueInterval(3, includeUpper = true))
)
assertEquals(
null,
getValueIntervalOfPlus(
EmptyValueInterval,
RightSemiInfiniteValueInterval(3, includeLower = false))
)
}
@Test
def testGetValueIntervalOfMultiply(): Unit = {
assertEquals(
toBigDecimalInterval(ValueInterval(-4, 2, includeLower = false, includeUpper = true)),
getValueIntervalOfMultiply(
ValueInterval(-1, 2, includeLower = true, includeUpper = false),
ValueInterval(-2, 1, includeLower = true, includeUpper = false))
)
assertEquals(
toBigDecimalInterval(ValueInterval(-2, 4, includeLower = false, includeUpper = false)),
getValueIntervalOfMultiply(
ValueInterval(-1, 2, includeLower = true, includeUpper = true),
ValueInterval(1, 2, includeLower = true, includeUpper = false))
)
assertEquals(
toBigDecimalInterval(ValueInterval(1, 4, includeLower = false, includeUpper = false)),
getValueIntervalOfMultiply(
ValueInterval(-2, -1, includeLower = false, includeUpper = false),
ValueInterval(-2, -1, includeLower = false, includeUpper = false))
)
assertEquals(
null,
getValueIntervalOfMultiply(
ValueInterval(-2, -1, includeLower = false, includeUpper = false),
EmptyValueInterval)
)
assertEquals(
null,
getValueIntervalOfMultiply(
ValueInterval(-2, -1, includeLower = false, includeUpper = false),
LeftSemiInfiniteValueInterval(1, includeUpper = false))
)
assertEquals(
null,
getValueIntervalOfMultiply(
ValueInterval(-2, -1, includeLower = false, includeUpper = false),
RightSemiInfiniteValueInterval(1, includeLower = false))
)
assertEquals(
null,
getValueIntervalOfMultiply(
LeftSemiInfiniteValueInterval(1, includeUpper = false),
RightSemiInfiniteValueInterval(1, includeLower = false))
)
}
}
| tzulitai/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/ColumnIntervalUtilTest.scala | Scala | apache-2.0 | 7,276 |
package edu.umass.ciir.kbbridge.data
import collection.mutable.HashMap
import edu.umass.ciir.kbbridge.util.ConfInfo
/**
*
*/
object TacId2WikiTitleMap {
private val f = io.Source.fromFile(ConfInfo.idmap)
private val wikiId2TacId = new HashMap[Int, String]()
private val tacId2wikiId = new HashMap[ String, Int]()
private val tacId2tacType = new HashMap[String, String]()
private val wikiTitle2TacId = new HashMap[String, String]()
private val wikiTitle2WikiId = new HashMap[String, Int]()
private val tacId2WikiTitle = new HashMap[String, String]()
private val tacId2TacTitle = new HashMap[String, String]()
for(line <- f.getLines()){
val sp = line.split("\\t")
val wikiId = Integer.parseInt(sp(0))
val wikiTitle = sp(1).replaceAll(" ","_")
val tacTitle = sp(2)
// load wiki title. put underscores, build mapping wikititle -> tac id and back
val tacId = sp(3)
val tacType = sp(4)
wikiId2TacId += (wikiId -> tacId)
tacId2wikiId += (tacId -> wikiId)
tacId2tacType += (tacId -> tacType)
wikiTitle2TacId += (wikiTitle -> tacId)
wikiTitle2WikiId += (wikiTitle -> wikiId)
tacId2WikiTitle += (tacId -> wikiTitle)
tacId2TacTitle += (tacId -> tacTitle)
}
f.close()
val wikiId2tacIdMap = wikiId2TacId.result()
val tacId2wikiIdMap = tacId2wikiId.result()
val tacId2tacTypeMap = tacId2tacType.result()
val tacId2WikiTitleMap = tacId2WikiTitle.result()
val tacId2TacTitleMap = tacId2TacTitle.result()
val wikiTitle2TacIdMap = wikiTitle2TacId.result()
val wikiTitle2WikiIdMap = wikiTitle2WikiId.result().withDefault(wikititle => {-1})
} | daltonj/KbBridge | src/main/scala/edu/umass/ciir/kbbridge/data/TacId2WikiTitleMap.scala | Scala | apache-2.0 | 1,628 |
object ReturnWithMutation {
case class Counter(var x: BigInt)
def increment(c: Counter): BigInt = {
if (c.x == 100) {
c.x += 1
return c.x
c.x += 1
0
} else {
c.x += 1
c.x += 2
c.x
}
}
def test: Unit = {
val c = Counter(100)
val x1 = increment(c)
val x2 = increment(c)
assert(x1 == 101)
assert(x2 == 104)
assert(c.x == 104)
}
}
| epfl-lara/stainless | frontends/benchmarks/imperative/valid/ReturnWithMutation.scala | Scala | apache-2.0 | 417 |
import ingredients.caseenum._
import org.scalatest.{ Matchers, WordSpec }
class CaseEnumSpec extends WordSpec with Matchers {
sealed trait Planet extends CaseEnum
object Planet {
case object Mercury extends Planet
case object Venus extends Planet
case object Earth extends Planet
}
"CaseEnumMacro" should {
"construct a sensible CaseEnumSerialization" in {
val serialization = CaseEnumSerialization.caseEnumSerialization[Planet]
val pairs = List(
Planet.Mercury -> "Mercury",
Planet.Venus -> "Venus",
Planet.Earth -> "Earth")
for ((co, str) <- pairs) {
serialization.caseToString(co).shouldBe(str)
serialization.caseFromString(str).shouldBe(Some(co))
}
}
}
"SerializationSupport" should {
"provide the typeclass instance" in {
trait FakeJsonSerializer[T] {
def toString(value: T): String
def fromString(str: String): Either[String, T]
}
implicit def fakeJsonSerializer[T <: CaseEnum](implicit instance: CaseEnumSerialization[T]) = new FakeJsonSerializer[T] {
def toString(value: T): String = instance.caseToString(value)
def fromString(str: String): Either[String, T] = instance.caseFromString(str) match {
case Some(v) => Right(v)
case None => Left(
s"$str is not a valid ${instance.name}. Valid values are: ${instance.values.mkString(", ")}"
)
}
}
implicitly[FakeJsonSerializer[Planet]].fromString("Mercury").shouldBe(Right(Planet.Mercury))
implicitly[FakeJsonSerializer[Planet]].fromString("Wrong").shouldBe(Left(
"Wrong is not a valid Planet. Valid values are: Mercury, Venus, Earth"
))
}
}
}
| buildo/ingredients | caseenum/src/test/scala/CaseEnumSpec.scala | Scala | mit | 1,743 |
/*
* Copyright 2016 Nikolay Smelik
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalabot.examples.teamnotification.conversations
import java.text.SimpleDateFormat
import scalabot.common.bot.{BotState, Conversation, Exit, Reply}
import scalabot.common.message._
import scalabot.examples.teamnotification.{Flag, TeamNotificationBot, TeamNotificationData}
import scala.util.Try
/**
* Created by Nikolay.Smelik on 8/17/2016.
*/
trait SickConversationProvider {
this: TeamNotificationBot =>
case class SickConversation(data: TeamNotificationData) extends Conversation {
val sickUntilState: BotState = BotState {
case TextIntent(sender, text) =>
val sdf = new SimpleDateFormat("dd MM yyyy")
val sickDateOpt = Try(sdf.parse(text)).toOption
sickDateOpt match {
case Some(date) =>
val user = data.users(sender)
data.updateUsers(sender, user.copy(isSick = true))
Reply(Exit).withIntent(ReplyMessageIntent(sender, s"I will not disturb you until ${sdf.format(date)}.\\nGet well soon!"))
case None =>
Reply(sickUntilState).withIntent(ReplyMessageIntent(sender, "Invalid date format"))
}
}
val sickUntilChoose: BotState = BotState {
case PositiveIntent(sender, _) =>
Reply(sickUntilState).withIntent(ReplyMessageIntent(sender, "Please enter the end date of your sick leave in format dd mm yyyy"))
case NegativeIntent(sender, _) =>
Reply(Exit).withIntent(ReplyMessageIntent(sender, "Okay, I will ask you about it when your turn of flag holder will come.\\nGet well soon!"))
}
override def initialState: BotState = BotState {
case intent: Intent =>
val flags = data.teams.foldLeft(Seq[Flag]()){case (list, team) => list ++ team.flags}
val user = data.users(intent.sender)
data.updateUsers(intent.sender, user.copy(isSick = true))
val intents = flags.filter(flag => flag.holder == intent.sender).map(flag => {
val oldHolder = flag.holder
val newHolder = flag.participants.head
flag.participants = flag.participants.tail :+ oldHolder
flag.holder = newHolder
ReplyMessageIntent(newHolder, s"You are new flag holder for flag ${flag.name} because of current flag holder has ill")
})
Reply(sickUntilChoose).withIntent(ReplyMessageIntent(intent.sender, "Do you know the end date of your sick leave?"))
.withIntent(intents)
}
}
}
| kerzok/ScalaBot | Examples/src/main/scala/scalabot/examples/teamnotification/conversations/SickConversationProvider.scala | Scala | apache-2.0 | 3,021 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.body
import java.io.Writer
import java.lang.{ StringBuilder => JStringBuilder }
import java.util.{ HashMap => JHashMap, Map => JMap }
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
import io.gatling.commons.validation._
import io.gatling.core.session.Session
import com.mitchellbosecke.pebble.PebbleEngine
import com.mitchellbosecke.pebble.loader.StringLoader
import com.mitchellbosecke.pebble.template.PebbleTemplate
import com.typesafe.scalalogging.StrictLogging
object Pebble extends StrictLogging {
private val Engine = new PebbleEngine.Builder().loader(new StringLoader).build
private def matchMap(map: Map[String, Any]): JMap[String, AnyRef] = {
val jMap: JMap[String, AnyRef] = new JHashMap(map.size)
for ((k, v) <- map) {
v match {
case c: Iterable[Any] => jMap.put(k, c.asJava)
case any: AnyRef => jMap.put(k, any) //The AnyVal case is not addressed, as an AnyVal will be in an AnyRef wrapper
}
}
jMap
}
def parseStringTemplate(string: String): Validation[PebbleTemplate] =
try {
Pebble.Engine.getTemplate(string).success
} catch {
case NonFatal(e) =>
logger.error("Error while parsing Pebble string", e)
e.getMessage.failure
}
def evaluateTemplate(template: PebbleTemplate, session: Session): Validation[String] = {
val context = matchMap(session.attributes)
val writer = StringBuilderWriter.pooled
try {
template.evaluate(writer, context)
writer.toString.success
} catch {
case NonFatal(e) =>
logger.info("Error while evaluate Pebble template", e)
e.getMessage.failure
}
}
}
object StringBuilderWriter {
private val Pool = ThreadLocal.withInitial[StringBuilderWriter](() => new StringBuilderWriter)
def pooled: StringBuilderWriter = {
val writer = Pool.get()
writer.reset()
writer
}
}
class StringBuilderWriter extends Writer {
val stringBuilder = new JStringBuilder
override def flush(): Unit = {}
def reset(): Unit =
stringBuilder.setLength(0)
override def write(cbuf: Array[Char], off: Int, len: Int): Unit =
throw new UnsupportedOperationException
override def write(string: String): Unit =
stringBuilder.append(string)
override def write(cbuf: Array[Char]): Unit =
stringBuilder.append(cbuf)
override def toString: String =
stringBuilder.toString
override def close(): Unit = {}
}
| timve/gatling | gatling-core/src/main/scala/io/gatling/core/body/Pebble.scala | Scala | apache-2.0 | 3,097 |
/**
*
* HttpClient
* Ledger wallet
*
* Created by Pierre Pollastri on 27/01/15.
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.ledger.ledgerwallet.remote
import java.net.{HttpURLConnection, URLConnection, URL}
import java.security.cert.X509Certificate
import java.util.Locale
import javax.net.ssl.X509TrustManager
import android.net.Uri
import com.koushikdutta.async.future.FutureCallback
import com.koushikdutta.async.http
import com.koushikdutta.async.http.AsyncHttpClient.WebSocketConnectCallback
import com.koushikdutta.async.http._
import com.koushikdutta.async.http.body.{JSONObjectBody, AsyncHttpRequestBody}
import com.koushikdutta.async.http.callback.HttpConnectCallback
import com.ledger.ledgerwallet.app.Config
import com.ledger.ledgerwallet.utils.logs.Logger
import org.apache.http.conn.ssl.SSLSocketFactory
import org.apache.http.impl.DefaultHttpRequestFactory
import org.apache.http.HttpRequest
import org.apache.http.client.methods.{HttpDelete, HttpPost, HttpGet}
import org.json.{JSONArray, JSONObject}
import scala.collection.mutable
import scala.concurrent.{Future, Promise}
import scala.util.Try
import scala.concurrent.ExecutionContext.Implicits.global
class HttpClient(baseUrl: Uri) {
type ParametersMap = Map[String, String]
type HeadersMap = Map[String, String]
private type HttpFuture[T] = com.koushikdutta.async.future.Future[T]
val headers = new mutable.HashMap[String, String]()
val _client = AsyncHttpClient.getDefaultInstance
/*
_client.getSSLSocketMiddleware.setTrustManagers(Array(
new X509TrustManager {
override def getAcceptedIssuers: Array[X509Certificate] = null
override def checkClientTrusted(chain: Array[X509Certificate], authType: String): Unit = {}
override def checkServerTrusted(chain: Array[X509Certificate], authType: String): Unit = {}
}
))
_client.getSSLSocketMiddleware.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER)
*/
def getJsonObject(url: String,
params: Option[ParametersMap] = None,
body: Option[AsyncHttpRequestBody[_]] = None,
headers: Option[HeadersMap] = None)
: Request[JSONObject] = Request.getJsonObject(url, params, body, headers)
def getJsonArray(url: String,
params: Option[ParametersMap] = None,
body: Option[AsyncHttpRequestBody[_]] = None,
headers: Option[HeadersMap] = None)
: Request[JSONArray] = Request.getJsonArray(url, params, body, headers)
def post(url: String,
params: Option[ParametersMap] = None,
body: Option[AsyncHttpRequestBody[_]] = None,
headers: Option[HeadersMap] = None)
: Request[JSONObject] = Request.post(url, params, body, headers)
def postJsonObject(url: String,
params: Option[ParametersMap] = None,
body: Option[JSONObject] = None,
headers: Option[HeadersMap] = None)
: Request[JSONObject] = Request.post(url, params, body.map(new JSONObjectBody(_)), headers)
def delete(url: String,
params: Option[ParametersMap] = None,
body: Option[AsyncHttpRequestBody[_]] = None,
header: Option[HeadersMap] = None)
: Request[Unit] = Request.delete(url, params, body, header)
def websocket(url: String,
protocol: Option[String] = None,
params: Option[ParametersMap] = None,
headers: Option[HeadersMap] = None)
: Future[WebSocket] = Request.websocket(url, protocol, params, headers).future
def testPageExistence(url: String): Future[Boolean] = Future {
val urlValue = new URL(url)
val connection = urlValue.openConnection().asInstanceOf[HttpURLConnection]
connection.setRequestMethod("GET")
connection.connect()
connection.getResponseCode() != 404
}
trait Request[T] {
def future: Future[T]
def cancel(): Unit
def request: AsyncHttpRequest
def response: Future[AsyncHttpResponse]
}
private class RequestImpl[T](_request: AsyncHttpRequest) extends Request[T] {
var httpFuture: HttpFuture[String] = _
val resultPromise = Promise[T]()
val responsePromise = Promise[AsyncHttpResponse]()
override def future: Future[T] = resultPromise.future
override def cancel(): Unit = httpFuture.cancel()
override def request: AsyncHttpRequest = _request
override def response: Future[AsyncHttpResponse] = responsePromise.future
}
private object Request {
def getJsonObject(url: String, params: Option[ParametersMap], body: Option[AsyncHttpRequestBody[_]], headers: Option[HeadersMap])
: Request[JSONObject] = executeJsonObject(new HttpGet(url), params, body, headers)
def getJsonArray(url: String, params: Option[ParametersMap], body: Option[AsyncHttpRequestBody[_]], headers: Option[HeadersMap])
: Request[JSONArray] = executeJsonArray(new HttpGet(url), params, body, headers)
def post(url: String, params: Option[ParametersMap], body: Option[AsyncHttpRequestBody[_]], headers: Option[HeadersMap])
: Request[JSONObject] = executeJsonObject(new HttpPost(url), params, body, headers)
def delete(url: String, params: Option[ParametersMap], body: Option[AsyncHttpRequestBody[_]], headers: Option[HeadersMap])
: Request[Unit] = execute(new HttpDelete(url), params, body, headers)
private[this] def execute(httpRequest: HttpRequest,
params: Option[ParametersMap],
body: Option[AsyncHttpRequestBody[_]],
headers: Option[HeadersMap])
: Request[Unit] = {
val httpAsyncRequest = configureRequest(httpRequest, params, body, headers)
val request = new RequestImpl[Unit](httpAsyncRequest)
request.httpFuture = _client.executeString(httpAsyncRequest, new VoidCallback(request))
request
}
private[this] def executeJsonObject(httpRequest: HttpRequest,
params: Option[ParametersMap],
body: Option[AsyncHttpRequestBody[_]],
headers: Option[HeadersMap])
: Request[JSONObject] = {
val httpAsyncRequest = configureRequest(httpRequest, params, body, headers)
val request = new RequestImpl[JSONObject](httpAsyncRequest)
request.httpFuture = _client.executeString(httpAsyncRequest, new JSONObjectCallback(request))
request
}
private[this] def executeJsonArray(httpRequest: HttpRequest,
params: Option[ParametersMap],
body: Option[AsyncHttpRequestBody[_]],
headers: Option[HeadersMap])
: Request[JSONArray] = {
val httpAsyncRequest = configureRequest(httpRequest, params, body, headers)
val request = new RequestImpl[JSONArray](httpAsyncRequest)
request.httpFuture = _client.executeString(httpAsyncRequest, new JSONArrayCallback(request))
request
}
def websocket(url: String,
protocol: Option[String],
params: Option[ParametersMap],
headers: Option[HeadersMap])
: Request[WebSocket] = {
val httpAsyncRequest = configureRequest(new HttpGet(url), params, None, headers)
val request = new RequestImpl[WebSocket](httpAsyncRequest)
_client.websocket(httpAsyncRequest, protocol.orNull, null).setCallback(new FutureCallback[http.WebSocket] {
override def onCompleted(ex: Exception, webSocket: http.WebSocket): Unit = {
if (ex == null && webSocket != null) {
request.responsePromise.success(null)
request.resultPromise.success(new WebSocket(webSocket))
} else if (ex != null) {
request.responsePromise.failure(ex)
request.resultPromise.failure(ex)
} else {
val e = new Exception("Websocket call completed without any socket nor exception")
request.responsePromise.failure(e)
request.resultPromise.failure(e)
}
}
})
request
}
private[this] def configureRequest(request: HttpRequest,
params: Option[ParametersMap],
body: Option[AsyncHttpRequestBody[_]],
headers: Option[HeadersMap]
)
: AsyncHttpRequest = {
var requestUri = Uri.parse(request.getRequestLine.getUri)
if (requestUri.isRelative) {
val path = requestUri.toString.split('/')
val builder = baseUrl.buildUpon()
path foreach builder.appendPath
requestUri = builder.build()
} else if (requestUri.isRelative) {
requestUri = baseUrl
}
val uriBuilder = requestUri.buildUpon()
params.foreach { _ foreach {case (key, value) => uriBuilder.appendQueryParameter(key, value.toString)} }
val configuredRequest = (new DefaultHttpRequestFactory).newHttpRequest(request.getRequestLine.getMethod, uriBuilder.toString)
HttpClient.this.headers foreach {case (key, value) => configuredRequest.setHeader(key, value) }
headers foreach {_ foreach {case (key, value) => configuredRequest.setHeader(key, value)}}
val asyncRequest = AsyncHttpRequest.create(configuredRequest)
body foreach asyncRequest.setBody
asyncRequest
}
private class JSONObjectCallback(request: RequestImpl[JSONObject]) extends com.koushikdutta.async.http.AsyncHttpClient.StringCallback {
override def onCompleted(e: Exception, source: AsyncHttpResponse, result: String): Unit = {
if (request.responsePromise.isCompleted) return
if (source == null) {
request.responsePromise.failure(e)
request.resultPromise.failure(e)
} else if (result == null && e != null) {
request.responsePromise.success(source)
request.resultPromise.failure(e)
} else {
request.responsePromise.success(source)
request.resultPromise.tryComplete(Try(new JSONObject(if (result.trim.length > 0) result else "{}")))
}
}
}
private class VoidCallback(request: RequestImpl[Unit]) extends com.koushikdutta.async.http.AsyncHttpClient.StringCallback {
override def onCompleted(ex: Exception, response: AsyncHttpResponse, result: String): Unit = {
if (request.responsePromise.isCompleted) return
if (response == null) {
request.responsePromise.failure(ex)
request.resultPromise.failure(ex)
} else {
request.responsePromise.success(response)
request.resultPromise.success(Unit)
}
}
}
private class JSONArrayCallback(request: RequestImpl[JSONArray]) extends com.koushikdutta.async.http.AsyncHttpClient.StringCallback {
override def onCompleted(e: Exception, source: AsyncHttpResponse, result: String): Unit = {
if (request.responsePromise.isCompleted) return
if (source == null) {
request.responsePromise.failure(e)
request.resultPromise.failure(e)
} else if (result == null && e != null) {
request.responsePromise.success(source)
request.resultPromise.failure(e)
} else {
request.responsePromise.success(source)
request.resultPromise.tryComplete(Try(new JSONArray(if (result.trim.length > 0) result else "[]")))
}
}
}
}
}
object HttpClient {
private[this] lazy val _defaultInstance = new HttpClient(Config.ApiBaseUri)
private[this] lazy val _websocketInstance = new HttpClient(Config.WebSocketBaseUri)
def defaultInstance = configure(_defaultInstance)
def websocketInstance = configure(_websocketInstance)
private[this] def configure(client: HttpClient): HttpClient = {
client.headers("X-Ledger-Locale") = Locale.getDefault.toString
client.headers("X-Ledger-Platform") = "android"
client.headers("X-Ledger-Environment") = Config.Env
client
}
}
| Morveus/ledger-wallet-android | app/src/main/scala/com/ledger/ledgerwallet/remote/HttpClient.scala | Scala | mit | 13,203 |
package org.jetbrains.plugins.scala
package lang
package actions
package editor
package enter
import com.intellij.openapi.project.Project
import org.jetbrains.plugins.scala.lang.actions.editor.enter.ChainedMethodCallEnterTest.DATA_PATH
import org.junit.runner.RunWith
import org.junit.runners.AllTests
@RunWith(classOf[AllTests])
class ChainedMethodCallEnterTest extends AbstractEnterActionTestBase(DATA_PATH) {
override protected def setSettings(project: Project): Unit = {
super.setSettings(project)
val settings = getCommonSettings(project)
settings.getIndentOptions.INDENT_SIZE = 2
settings.ALIGN_MULTILINE_CHAINED_METHODS = true
}
}
object ChainedMethodCallEnterTest {
val DATA_PATH = "/actions/editor/enter/align_method_call_chain/"
def suite = new ChainedMethodCallEnterTest
} | JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/actions/editor/enter/ChainedMethodCallEnterTest.scala | Scala | apache-2.0 | 811 |
package fs2
object concurrent {
/**
* Nondeterministically merges a stream of streams (`outer`) in to a single stream,
* opening at most `maxOpen` streams at any point in time.
*
* The outer stream is evaluated and each resulting inner stream is run concurrently,
* up to `maxOpen` stream. Once this limit is reached, evaluation of the outer stream
* is paused until one or more inner streams finish evaluating.
*
* When the outer stream stops gracefully, all inner streams continue to run,
* resulting in a stream that will stop when all inner streams finish
* their evaluation.
*
* When the outer stream fails, evaluation of all inner streams is interrupted
* and the resulting stream will fail with same failure.
*
* When any of the inner streams fail, then the outer stream and all other inner
* streams are interrupted, resulting in stream that fails with the error of the
* stream that cased initial failure.
*
* Finalizers on each inner stream are run at the end of the inner stream,
* concurrently with other stream computations.
*
* Finalizers on the outer stream are run after all inner streams have been pulled
* from the outer stream -- hence, finalizers on the outer stream will likely run
* before the last finalizer on the last inner stream.
*
* Finalizers on the returned stream are run after the outer stream has finished
* and all open inner streams have finished.
*
* @param maxOpen Maximum number of open inner streams at any time. Must be > 0.
* @param outer Stream of streams to join.
*/
def join[F[_],O](maxOpen: Int)(outer: Stream[F,Stream[F,O]])(implicit F: Async[F]): Stream[F,O] = {
assert(maxOpen > 0,"maxOpen must be > 0, was: " + maxOpen)
def throttle[A](checkIfKilled: F[Boolean]): Pipe[F,Stream[F,A],Unit] = {
def runInnerStream(inner: Stream[F,A], onInnerStreamDone: F[Unit]): Pull[F,Nothing,Unit] = {
val startInnerStream: F[F.Ref[Unit]] = {
F.bind(F.ref[Unit]) { gate =>
F.map(F.start(
Stream.eval(checkIfKilled).
flatMap { killed => if (killed) Stream.empty else inner }.
onFinalize { F.bind(F.setPure(gate)(())) { _ => onInnerStreamDone } }.
run
)) { _ => gate }}
}
Pull.acquire(startInnerStream) { gate => F.get(gate) }.map { _ => () }
}
def go(doneQueue: async.mutable.Queue[F,Unit])(open: Int): (Stream.Handle[F,Stream[F,A]], Stream.Handle[F,Unit]) => Pull[F,Nothing,Unit] = (h, d) => {
if (open < maxOpen)
Pull.receive1Option[F,Stream[F,A],Nothing,Unit] {
case Some(inner #: h) => runInnerStream(inner, F.map(F.start(doneQueue.enqueue1(())))(_ => ())).flatMap { gate => go(doneQueue)(open + 1)(h, d) }
case None => Pull.done
}(h)
else
d.receive1 { case _ #: d => go(doneQueue)(open - 1)(h, d) }
}
in => Stream.eval(async.unboundedQueue[F,Unit]).flatMap { doneQueue => in.pull2(doneQueue.dequeue)(go(doneQueue)(0)) }
}
for {
killSignal <- Stream.eval(async.signalOf(false))
outputQueue <- Stream.eval(async.mutable.Queue.synchronousNoneTerminated[F,Either[Throwable,Chunk[O]]])
o <- outer.map { inner =>
inner.chunks.attempt.evalMap { o => outputQueue.enqueue1(Some(o)) }.interruptWhen(killSignal)
}.through(throttle(killSignal.get)).onFinalize {
outputQueue.enqueue1(None)
}.mergeDrainL {
outputQueue.dequeue.through(pipe.unNoneTerminate).flatMap {
case Left(e) => Stream.eval(killSignal.set(true)).flatMap { _ => Stream.fail(e) }
case Right(c) => Stream.chunk(c)
}
}.onFinalize { killSignal.set(true) }
} yield o
}
}
| japgolly/scalaz-stream | core/src/main/scala/fs2/concurrent.scala | Scala | mit | 3,836 |
package riftwarp.serialization.common
import scalaz._, Scalaz._
import scalaz.Validation.FlatMap._
import almhirt.common._
import almhirt.tracking.CommandStatusChanged
import riftwarp._
import riftwarp.std.kit._
import riftwarp.std.WarpObjectLookUp
object CommandStatusChangedWarpPackaging extends EventWarpPackagingTemplate[CommandStatusChanged] with RegisterableWarpPacker {
val warpDescriptor = WarpDescriptor("CommandStatusChanged")
val alternativeWarpDescriptors = WarpDescriptor(classOf[CommandStatusChanged]) :: Nil
override def addEventParams(what: CommandStatusChanged, into: WarpObject)(implicit packers: WarpPackers): AlmValidation[WarpPackage] =
into ~>
With("commandHeader", what.commandHeader, CommandHeaderWarpPackaging) ~>
With("status", what.status, CommandStatusWarpPackaging)
override def extractEventParams(from: WarpObjectLookUp, header: EventHeader)(implicit unpackers: WarpUnpackers): AlmValidation[CommandStatusChanged] =
for {
commandHeader ← from.getWith("commandHeader", CommandHeaderWarpPackaging)
status ← from.getWith("status", CommandStatusWarpPackaging)
} yield CommandStatusChanged(header, commandHeader, status)
} | chridou/almhirt | riftwarp/src/main/scala/riftwarp/serialization/common/CommandStatusChangedWarpPackaging.scala | Scala | apache-2.0 | 1,206 |
package com.theseventhsense.datetime
import cats.implicits._
import com.theseventhsense.utils.types.SSDateTime.{Instant, TimeZone}
/**
* Created by erik on 6/15/16.
*/
class MomentRichTimeZone(timeZone: TimeZone)
extends AbstractRichTimeZone(timeZone) {
override def valid: Boolean = false
override def offsetSecondsAt(instant: Instant): Integer = 0
}
class MomentRichTimezoneOps extends AbstractRichTimeZoneOps {
override def parse(s: String): Either[TimeZone.ParseError, TimeZone] =
Either.left(TimeZone.ParseError.NotImplemented)
}
| 7thsense/utils-datetime | js/src/main/scala/com/theseventhsense/datetime/MomentRichTimeZone.scala | Scala | mit | 557 |
import sbt._
import Keys._
import PlayProject._
object ApplicationBuild extends Build {
val appName = "Cima"
val appVersion = "1.0"
val appDependencies = Seq(
// Add your project dependencies here,
"net.databinder" %% "dispatch-http" % "0.8.8" withSources,
"com.google.code.facebookapi" % "facebook-java-api" % "3.0.2" withSources
)
val main = PlayProject(appName, appVersion, appDependencies, mainLang = SCALA).settings(
resolvers ++= Seq(
"sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
)
)
}
| niklassaers/Cima | project/Build.scala | Scala | apache-2.0 | 584 |
/*
* Copyright (C) 2013 Alcatel-Lucent.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Licensed to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package molecule
package benchmarks.comparison
package molecule.executors
import platform.Executor
import platform.executors.TrampolineExecutor
import platform.{ ThreadFactory, MoleculeThread }
import jsr166y._
/**
* Executor used by the Flow Parallel Scheduler
*/
final class TrampolineFJExecutorLog(pool: Executor, group: ThreadGroup) extends Executor {
/**
* One task queue per kernel threads. A kernel thread will submit a task to the
* thread pool only if there are more than one task in its local task queue.
*/
private final val context = new ThreadLocal[TrampolineTask]() {
override protected def initialValue() = null
}
private[this] final class TrampolineTask( final var nextTask: Runnable) extends Runnable {
def run() = {
// When we reach here, the next task is null
context.set(this)
while (nextTask != null) {
val task = nextTask
nextTask = null
task.run()
}
}
}
def execute(task: Runnable) {
//println(Thread.currentThread())
//println(Thread.currentThread().getThreadGroup() + "==" + group)
// it is necessary to compare the marker trait because some frameworks like swing
// copy the thread group of the thread that started it...
try {
val thread = Thread.currentThread()
if ((thread.getThreadGroup() eq group) && thread.isInstanceOf[MoleculeThread]) {
val trampoline = context.get()
if (trampoline.nextTask != null) {
TrampolineFJExecutorLog.submitCount.getAndIncrement()
pool.execute(new TrampolineTask(trampoline.nextTask))
} else {
TrampolineFJExecutorLog.bounceCount.getAndIncrement()
}
trampoline.nextTask = task
} else {
TrampolineFJExecutorLog.submitCount.getAndIncrement()
pool.execute(new TrampolineTask(task))
}
} catch {
case t: java.util.concurrent.RejectedExecutionException =>
// stdin is never gracefully shutdown and may submit a last key event
// to this pool, which has been shutdown.
if (Thread.currentThread.getThreadGroup().getName() != "stdin")
throw t
}
}
def shutdownNow() =
pool.shutdownNow()
/**
* execute shutdown task.
*/
def shutdown() =
pool.shutdown()
}
object TrampolineFJExecutorLog {
import java.util.concurrent.atomic.AtomicInteger
val submitCount = new AtomicInteger(0)
val bounceCount = new AtomicInteger(0)
def reset() = {
submitCount.set(0)
bounceCount.set(0)
}
import java.util.concurrent.{ TimeUnit, LinkedBlockingQueue, ThreadPoolExecutor }
// def apply(tf:ThreadFactory, nbThreads:Int):TrampolineFJExecutorLog =
// new TrampolineFJExecutorLog(new ThreadPoolExecutor(nbThreads, nbThreads,
// 0L, TimeUnit.MILLISECONDS,
// new LinkedBlockingQueue[Runnable](),
// tf), tf.group)
def apply(tf: ThreadFactory, nbThreads: Int): TrampolineFJExecutorLog = {
val f: ForkJoinPool.ForkJoinWorkerThreadFactory = new ForkJoinPool.ForkJoinWorkerThreadFactory {
def newThread(pool: ForkJoinPool): ForkJoinWorkerThread = {
return new ForkJoinWorkerThread(pool, tf);
}
}
new TrampolineFJExecutorLog(Executor.wrap(new ForkJoinPool(nbThreads, f, null, false)), tf.group)
}
} | molecule-labs/molecule | molecule-benchmarks/src/main/scala/molecule/benchmarks/comparison/molecule/executors/TrampolineFJExecutorLog.scala | Scala | apache-2.0 | 4,115 |
/**
* Copyright (C) 2011 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.analysis
import org.junit.{Assume, Test}
import org.orbeon.oxf.common.Version
import org.orbeon.oxf.test.DocumentTestBase
import org.orbeon.oxf.xml.dom.Converter._
import org.scalatestplus.junit.AssertionsForJUnit
class ItemsetDependenciesTest extends DocumentTestBase with AssertionsForJUnit {
// See: [ #315557 ] XPath analysis: Checkbox with both itemset and value changing ends up in incorrect state
// http://forge.ow2.org/tracker/?func=detail&atid=350207&aid=315557&group_id=168
@Test def selectValueDependingOnItemset(): Unit = {
Assume.assumeTrue(Version.isPE) // only test this feature if we are the PE version
this setupDocument
<xh:html xmlns:xh="http://www.w3.org/1999/xhtml"
xmlns:xf="http://www.w3.org/2002/xforms"
xmlns:xxf="http://orbeon.org/oxf/xml/xforms"
xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xh:head>
<xf:model id="model" xxf:xpath-analysis="true" xxf:encrypt-item-values="false">
<xf:instance id="instance">
<instance xmlns="">
<selection>1 2</selection>
<value>1</value>
<value>2</value>
<index>1</index>
</instance>
</xf:instance>
</xf:model>
</xh:head>
<xh:body>
<xf:select id="checkbox" ref="selection" appearance="full">
<xf:item>
<xf:label/>
<xf:value ref="../value[xs:integer(../index)]"/>
</xf:item>
</xf:select>
<xf:select1 id="value-selection" ref="index" appearance="full">
<xf:item>
<xf:label>1</xf:label>
<xf:value>1</xf:value>
</xf:item>
<xf:item>
<xf:label>2</xf:label>
<xf:value>2</xf:value>
</xf:item>
</xf:select1>
</xh:body>
</xh:html>.toDocument
assert(getControlExternalValue("checkbox") === "1")
assert(getControlExternalValue("value-selection") === "1")
assert(getItemset("checkbox") === """[{"label":"","value":"1"}]""")
setControlValue("value-selection", "2")
assert(getControlExternalValue("checkbox") === "2")
assert(getItemset("checkbox") === """[{"label":"","value":"2"}]""")
}
} | orbeon/orbeon-forms | xforms/jvm/src/test/scala/org/orbeon/oxf/xforms/analysis/ItemsetDependenciesTest.scala | Scala | lgpl-2.1 | 2,978 |
import sbt._
import Keys._
object BuildSettings {
val paradiseVersion = "2.0.0"
val buildSettings = Defaults.defaultSettings ++ Seq(
organization := "us.insolit",
version := "0.1-SNAPSHOT",
scalacOptions ++= Seq("-feature", "-unchecked", "-deprecation"), //, "-Ymacro-debug-lite"),
scalaVersion := "2.11.1",
crossScalaVersions := Seq("2.11.0", "2.11.1"),
resolvers += Resolver.sonatypeRepo("snapshots"),
resolvers += Resolver.sonatypeRepo("releases"),
addCompilerPlugin("org.scalamacros" % "paradise" % paradiseVersion cross CrossVersion.full),
libraryDependencies <+= (scalaVersion)("org.scala-lang" % "scala-reflect" % _),
libraryDependencies <+= (scalaVersion)("org.scala-lang" % "scala-compiler" % _),
libraryDependencies += "org.scalatest" % "scalatest_2.11" % "2.2.0",
publishArtifact in Test := false
)
}
object FreezeBuild extends Build {
import BuildSettings._
lazy val root: Project = Project(
id = "root",
base = file("."),
settings = buildSettings ++ Seq(publishArtifact := false)
) aggregate(macros, test)
lazy val macros: Project = Project(
id = "freeze",
base = file("macros"),
settings = buildSettings ++ Seq(
libraryDependencies <+= (scalaVersion)("org.scala-lang" % "scala-reflect" % _),
libraryDependencies ++= (
if (scalaVersion.value.startsWith("2.10")) List("org.scalamacros" %% "quasiquotes" % paradiseVersion)
else Nil
)
)
)
lazy val test: Project = Project(
id = "test",
base = file("test"),
settings = buildSettings ++ Seq(publishArtifact := false)
) dependsOn(macros)
}
| jmgao/freeze | project/Build.scala | Scala | mit | 1,644 |
package chandu0101.scalajs.react.components
package materialui
import chandu0101.macros.tojs.JSMacro
import japgolly.scalajs.react._
import japgolly.scalajs.react.raw.React
import japgolly.scalajs.react.vdom.VdomNode
import org.scalajs.dom
import scala.scalajs.js
import scala.scalajs.js.`|`
/**
* This file is generated - submit issues instead of PR against it
*/
case class MuiStep(key: js.UndefOr[String] = js.undefined,
ref: js.UndefOr[String] = js.undefined,
/* Sets the step as active. Is passed to child components. */
active: js.UndefOr[Boolean] = js.undefined,
/* Mark the step as completed. Is passed to child components. */
completed: js.UndefOr[Boolean] = js.undefined,
/* Mark the step as disabled, will also disable the button if
`StepButton` is a child of `Step`. Is passed to child components. */
disabled: js.UndefOr[Boolean] = js.undefined,
/* Used internally for numbering. */
index: js.UndefOr[Double] = js.undefined,
last: js.UndefOr[Boolean] = js.undefined,
/* Override the inline-style of the root element. */
style: js.UndefOr[CssProperties] = js.undefined) {
/**
* @param children Should be `Step` sub-components such as `StepLabel`.
*/
def apply(children: VdomNode*) = {
val props = JSMacro[MuiStep](this)
val component = JsComponent[js.Object, Children.Varargs, Null](Mui.Step)
component(props)(children: _*)
}
}
| rleibman/scalajs-react-components | core/src/main/scala/chandu0101/scalajs/react/components/materialui/MuiStep.scala | Scala | apache-2.0 | 1,612 |
package controllers
import play.api.Logger
import play.api.data.Form
import play.api.data.Forms._
import play.api.i18n.Messages
import play.api.mvc._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.control.NonFatal
class PasswordReset extends AbstractCaveController {
val startResetPasswordForm = Form(
"email" -> email
)
val confirmResetPasswordForm = Form(
tuple(
"password1" -> text,
"password2" -> text
) verifying(Messages("cave.login.signup.passwordDoesntMatch"), fields => fields match {
case (password1: String, password2: String) => password1 == password2
})
)
def startResetPassword = Action { implicit request =>
Ok(views.html.loginscreen.startResetPassword(startResetPasswordForm))
}
def handleStartResetPassword = Action.async { implicit request =>
withCaveClient { client =>
startResetPasswordForm.bindFromRequest().fold(
formWithErrors => Future.successful(BadRequest(views.html.loginscreen.startResetPassword(formWithErrors))),
email => {
client.Users.postForgotPassword(email) map {
_ => Redirect(routes.Authentication.login).flashing("success" -> Messages("cave.login.passwordReset.checkYourEmail", email))
} recover {
case NonFatal(e) =>
Logger.error(s"Password reset error for $email", e)
Redirect(routes.Authentication.login).flashing("error" -> Messages("cave.login.passwordReset.error"))
}
}
)
}
}
def resetPassword(mailToken: String) = Action { implicit request =>
Ok(views.html.loginscreen.resetPasswordPage(confirmResetPasswordForm, mailToken))
}
def handleResetPassword(mailToken: String) = Action.async { implicit request =>
withCaveClient { client =>
confirmResetPasswordForm.bindFromRequest().fold(
formWithErrors => Future.successful(BadRequest(views.html.loginscreen.resetPasswordPage(formWithErrors, mailToken))),
values => {
client.Users.postResetPassword(values._1, mailToken) map {
_ => debug(s"Password changed")
Redirect(routes.Authentication.login).flashing("success" -> Messages("cave.login.password.reset.success"))
} recover {
case NonFatal(e) =>
Logger.error("Password change error", e)
Redirect(routes.Authentication.login).flashing("error" -> Messages("cave.login.passwordReset.error"))
}
}
)
}
}
} | gilt/cave | www/app/controllers/PasswordReset.scala | Scala | mit | 2,543 |
package model
import skinny.orm._, feature._
import scalikejdbc._
import org.joda.time._
case class LoginUserAppendix(
loginUserInfoId: Long,
sortNum: Long
)
object LoginUserAppendix extends SkinnyCRUDMapper[LoginUserAppendix] {
override lazy val tableName = "login_user_appendix"
override lazy val defaultAlias = createAlias("lua")
override lazy val primaryKeyFieldName = "loginUserInfoId"
/*
* If you're familiar with ScalikeJDBC/Skinny ORM, using #autoConstruct makes your mapper simpler.
* (e.g.)
* override def extract(rs: WrappedResultSet, rn: ResultName[LoginUserAppendix]) = autoConstruct(rs, rn)
*
* Be aware of excluding associations like this:
* (e.g.)
* case class Member(id: Long, companyId: Long, company: Option[Company] = None)
* object Member extends SkinnyCRUDMapper[Member] {
* override def extract(rs: WrappedResultSet, rn: ResultName[Member]) =
* autoConstruct(rs, rn, "company") // "company" will be skipped
* }
*/
override def extract(rs: WrappedResultSet, rn: ResultName[LoginUserAppendix]): LoginUserAppendix = new LoginUserAppendix(
loginUserInfoId = rs.get(rn.loginUserInfoId),
sortNum = rs.get(rn.sortNum)
)
/**
* ソート順変更.
* @param loginUserInfoId ログインユーザ情報ID
* @param sortNum ソート順
* @param session Session
*/
def updateSortNum(loginUserInfoId: Long, sortNum: Long)(implicit session: DBSession): Unit = {
LoginUserAppendix.updateById(loginUserInfoId).withAttributes(
'sortNum -> sortNum
)
}
}
| nemuzuka/vss-kanban | src/main/scala/model/LoginUserAppendix.scala | Scala | mit | 1,564 |
package actors
case class ChatMessage(name:String,text: String)
case class Stats(users:Set[String])
object JoinChatRoom
object Tick
object GetStats
| tnddn/iv-web | portal/rest-portal/app/actors/ChatProtocol.scala | Scala | apache-2.0 | 150 |
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.hyperbus.util
trait IdGeneratorBase {
final protected val base64t = "-0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz" // sorted by char code
protected def appendInt(sb: StringBuilder, i: Int): Unit = {
sb.append(base64t.charAt(i >> 24 & 63))
sb.append(base64t.charAt(i >> 18 & 63))
sb.append(base64t.charAt(i >> 12 & 63))
sb.append(base64t.charAt(i >> 6 & 63))
sb.append(base64t.charAt(i & 63))
}
}
| hypertino/hyperbus | hyperbus/src/main/scala/com/hypertino/hyperbus/util/IdGeneratorBase.scala | Scala | mpl-2.0 | 734 |
package scalaxy.json.json4s.base
import scalaxy.json.base._
import scala.util.control.NonFatal
import scala.language.experimental.macros
import scala.reflect.macros.blackbox.Context
trait JSONStringInterpolationMacros extends JsonDriverMacros {
def parse(str: String, useBigDecimalForDouble: Boolean = false): JSONValueType
def interpolateJsonString(c: Context)(args: c.Expr[Any]*)(implicit tag: c.WeakTypeTag[JSONValueType]): c.Expr[JSONValueType] = {
import c.universe._
val Select(Apply(jsonStringContext, List(Apply(Select(scalaStringContext, applyName), fragmentTrees))), jsonName) = c.prefix.tree
val fragments = fragmentTrees map {
case t @ Literal(Constant(s: String)) =>
s -> t.pos
}
var typedArgs = args.map(arg => c.typecheck(arg.tree))
val Placeholders(placeholders, argNames, posMap, _) = ExtractibleJSONStringContext.preparePlaceholders(
fragments, i => {
val typedArg = typedArgs(i)
val tpe = typedArg.tpe
isJField(c)(tpe) || isJFieldOption(c)(tpe)
},
i => typedArgs(i).pos)
val valNames = (1 to typedArgs.size).map(_ => TermName(c.freshName))
val valDefs = typedArgs.zip(valNames).map({
case (typedArg, valName) =>
ValDef(NoMods, valName, TypeTree(typedArg.tpe), typedArg): Tree
}).toList
val replacements: Map[String, (Tree, Type)] =
typedArgs.zip(valNames).zip(argNames).map({
case ((typedArg, valName), argName) =>
argName -> (Ident(valName) -> typedArg.tpe)
}).toMap
c.Expr[JSONValueType](
try { Block(valDefs, reifyJsonValue(c)(parse(placeholders), replacements).tree) }
catch { case NonFatal(ex) =>
if (!reportParsingException(c)(ex, posMap))
c.error(c.enclosingPosition, ex.getMessage)
q"null"
}
)
}
}
| nativelibs4java/Scalaxy | JSON/Json4s/Core/src/main/scala/scalaxy/json/base/implementation/JSONStringInterpolationMacros.scala | Scala | bsd-3-clause | 1,834 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.history.yarn.failures
import java.net.{URI, URL}
import org.apache.spark.SparkConf
import org.apache.spark.deploy.history.yarn.YarnTimelineUtils._
import org.apache.spark.deploy.history.yarn.integration.AbstractHistoryIntegrationTests
import org.apache.spark.deploy.history.yarn.rest.{HttpRequestException, JerseyBinding}
import org.apache.spark.deploy.history.yarn.server.{TimelineQueryClient, YarnHistoryProvider}
import org.apache.spark.deploy.history.yarn.testtools.YarnTestUtils._
/**
* Test reporting of connectivity problems to the caller, specifically how
* the `YarnHistoryProvider` handles the initial binding & reporting of problems.
*
*/
class WebsiteDiagnosticsSuite extends AbstractHistoryIntegrationTests {
var failingHistoryProvider: FailingYarnHistoryProvider = _
/**
* Create a failing history provider instance, with the flag set to say "the initial
* endpoint check" has not been executed.
* @param conf configuration
* @return the instance
*/
override protected def createHistoryProvider(conf: SparkConf): YarnHistoryProvider = {
val yarnConf = sc.hadoopConfiguration
val client = new TimelineQueryClient(timelineRootEndpoint(),
yarnConf, JerseyBinding.createClientConfig())
failingHistoryProvider = new
FailingYarnHistoryProvider(client, false, client.getTimelineURI(), conf)
failingHistoryProvider
}
def timelineRootEndpoint(): URI = {
val realTimelineEndpoint = getTimelineEndpoint(sc.hadoopConfiguration).toURL
new URL(realTimelineEndpoint, "/").toURI
}
/**
* Issue a GET request against the Web UI and expect it to fail with an error
* message indicating that `text/html` is not a supported type.
* with error text indicating it was in the endpoint check
* @param webUI URL to the web UI
* @param provider the provider
*/
def expectApplicationLookupToFailInEndpointCheck(webUI: URL,
provider: YarnHistoryProvider): Unit = {
val connector = createUrlConnector()
val appURL = new URL(webUI, "/history/app-0001")
describe(s"Expecting endpoint checks to fail while retrieving $appURL")
awaitURL(webUI, TEST_STARTUP_DELAY)
try {
assert(!failingHistoryProvider.endpointCheckSuccess())
val body = getHtmlPage(appURL, Nil)
fail(s"Expected a failure from GET $appURL -but got\n$body")
} catch {
case ex: HttpRequestException =>
assertContains(ex.toString, TimelineQueryClient.MESSAGE_CHECK_URL)
}
}
test("Probe UI with Endpoint check") {
def probeUIWithFailureCaught(webUI: URL, provider: YarnHistoryProvider): Unit = {
awaitURL(webUI, TEST_STARTUP_DELAY)
getHtmlPage(webUI, YarnHistoryProvider.TEXT_NEVER_UPDATED :: Nil)
}
webUITest("Probe UI with Endpoint check", probeUIWithFailureCaught)
}
test("Probe App ID with Endpoint check") {
def expectAppIdToFail(webUI: URL, provider: YarnHistoryProvider): Unit = {
expectApplicationLookupToFailInEndpointCheck(webUI, provider)
}
webUITest("Probe App ID with Endpoint check", expectAppIdToFail)
}
}
| steveloughran/spark-timeline-integration | yarn-timeline-history/src/test/scala/org/apache/spark/deploy/history/yarn/failures/WebsiteDiagnosticsSuite.scala | Scala | apache-2.0 | 3,943 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.partial
import scala.collection.Map
import scala.collection.mutable.HashMap
import scala.reflect.ClassTag
import org.apache.spark.util.collection.OpenHashMap
/**
* An ApproximateEvaluator for counts by key. Returns a map of key to confidence interval.
*/
private[spark] class GroupedCountEvaluator[T : ClassTag](totalOutputs: Int, confidence: Double)
extends ApproximateEvaluator[OpenHashMap[T, Long], Map[T, BoundedDouble]] {
private var outputsMerged = 0
private val sums = new OpenHashMap[T, Long]() // Sum of counts for each key
override def merge(outputId: Int, taskResult: OpenHashMap[T, Long]): Unit = {
outputsMerged += 1
taskResult.foreach { case (key, value) =>
sums.changeValue(key, value, _ + value)
}
}
override def currentResult(): Map[T, BoundedDouble] = {
if (outputsMerged == totalOutputs) {
sums.map { case (key, sum) => (key, new BoundedDouble(sum, 1.0, sum, sum)) }.toMap
} else if (outputsMerged == 0) {
new HashMap[T, BoundedDouble]
} else {
val p = outputsMerged.toDouble / totalOutputs
sums.map { case (key, sum) => (key, CountEvaluator.bound(confidence, sum, p)) }.toMap
}
}
}
| sh-cho/cshSpark | partial/GroupedCountEvaluator.scala | Scala | apache-2.0 | 2,011 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api
import org.apache.flink.api.common.RuntimeExecutionMode
import org.apache.flink.api.common.typeinfo.Types.STRING
import org.apache.flink.api.scala._
import org.apache.flink.configuration.ExecutionOptions
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment => ScalaStreamExecutionEnvironment}
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment
import org.apache.flink.table.api.bridge.scala.{StreamTableEnvironment => ScalaStreamTableEnvironment, _}
import org.apache.flink.table.api.config.TableConfigOptions
import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableEnvironmentInternal}
import org.apache.flink.table.catalog._
import org.apache.flink.table.functions.TestGenericUDF
import org.apache.flink.table.planner.factories.utils.TestCollectionTableFactory
import org.apache.flink.table.planner.runtime.utils.TestingAppendSink
import org.apache.flink.table.planner.utils.TableTestUtil.{readFromResource, replaceStageId}
import org.apache.flink.table.planner.utils.{TableTestUtil, TestTableSourceSinks, TestTableSourceWithTime}
import org.apache.flink.types.{Row, RowKind}
import org.apache.flink.util.{CollectionUtil, FileUtils, TestLogger}
import org.junit.Assert.{assertEquals, assertFalse, assertTrue, fail}
import org.junit.rules.{ExpectedException, TemporaryFolder}
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import org.junit.{Assert, Before, Rule, Test}
import _root_.java.io.{File, FileFilter}
import _root_.java.lang.{Long => JLong}
import _root_.java.util
import _root_.scala.collection.mutable
@RunWith(classOf[Parameterized])
class TableEnvironmentITCase(tableEnvName: String, isStreaming: Boolean) extends TestLogger {
// used for accurate exception information checking.
val expectedException: ExpectedException = ExpectedException.none()
@Rule
def thrown: ExpectedException = expectedException
private val _tempFolder = new TemporaryFolder()
@Rule
def tempFolder: TemporaryFolder = _tempFolder
var tEnv: TableEnvironment = _
private val settings = if (isStreaming) {
EnvironmentSettings.newInstance().inStreamingMode().build()
} else {
EnvironmentSettings.newInstance().inBatchMode().build()
}
@Before
def setup(): Unit = {
tableEnvName match {
case "TableEnvironment" =>
tEnv = TableEnvironmentImpl.create(settings)
case "StreamTableEnvironment" =>
tEnv = StreamTableEnvironment.create(
StreamExecutionEnvironment.getExecutionEnvironment, settings)
case _ => throw new UnsupportedOperationException("unsupported tableEnvName: " + tableEnvName)
}
TestTableSourceSinks.createPersonCsvTemporaryTable(tEnv, "MyTable")
}
@Test
def testSetPlannerType: Unit = {
tEnv.getConfig.getConfiguration.set(TableConfigOptions.TABLE_PLANNER, PlannerType.OLD)
TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
thrown.expect(classOf[IllegalArgumentException])
thrown.expectMessage(
"Mismatch between configured planner and actual planner. " +
"Currently, the 'table.planner' can only be set " +
"when instantiating the table environment. Subsequent changes are not supported. " +
"Please instantiate a new TableEnvironment if necessary."
)
tEnv.executeSql("insert into MySink1 select first from MyTable")
}
@Test
def testSetExecutionMode(): Unit = {
if (isStreaming) {
tEnv.getConfig.getConfiguration.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH)
} else {
tEnv.getConfig.getConfiguration.set(ExecutionOptions.RUNTIME_MODE,
RuntimeExecutionMode.STREAMING)
}
thrown.expect(classOf[IllegalArgumentException])
thrown.expectMessage(
"Mismatch between configured runtime mode and actual runtime mode. " +
"Currently, the 'execution.runtime-mode' can only be set when instantiating the " +
"table environment. Subsequent changes are not supported. " +
"Please instantiate a new TableEnvironment if necessary."
)
tEnv.explainSql("select first from MyTable")
}
@Test
def testExecuteTwiceUsingSameTableEnv(): Unit = {
val sink1Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
val sink2Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink2")
checkEmptyFile(sink1Path)
checkEmptyFile(sink2Path)
val table1 = tEnv.sqlQuery("select first from MyTable")
tEnv.insertInto(table1, "MySink1")
tEnv.execute("test1")
assertFirstValues(sink1Path)
checkEmptyFile(sink2Path)
// delete first csv file
new File(sink1Path).delete()
assertFalse(new File(sink1Path).exists())
val table2 = tEnv.sqlQuery("select last from MyTable")
tEnv.insertInto(table2, "MySink2")
tEnv.execute("test2")
assertFalse(new File(sink1Path).exists())
assertLastValues(sink2Path)
}
@Test
def testExplainAndExecuteSingleSink(): Unit = {
val sinkPath = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
val table1 = tEnv.sqlQuery("select first from MyTable")
tEnv.insertInto(table1, "MySink1")
tEnv.explain(false)
tEnv.execute("test1")
assertFirstValues(sinkPath)
}
@Test
def testExplainAndExecuteMultipleSink(): Unit = {
val sink1Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
val sink2Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink2")
val table1 = tEnv.sqlQuery("select first from MyTable")
tEnv.insertInto(table1, "MySink1")
val table2 = tEnv.sqlQuery("select last from MyTable")
tEnv.insertInto(table2, "MySink2")
tEnv.explain(false)
tEnv.execute("test1")
assertFirstValues(sink1Path)
assertLastValues(sink2Path)
}
@Test
def testExplainTwice(): Unit = {
val sink1Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
val sink2Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink2")
val table1 = tEnv.sqlQuery("select first from MyTable")
tEnv.insertInto(table1, "MySink1")
val table2 = tEnv.sqlQuery("select last from MyTable")
tEnv.insertInto(table2, "MySink2")
val result1 = tEnv.explain(false)
val result2 = tEnv.explain(false)
assertEquals(replaceStageId(result1), replaceStageId(result2))
}
@Test
def testSqlUpdateAndToDataStream(): Unit = {
if (!tableEnvName.equals("StreamTableEnvironment")) {
return
}
val streamEnv = StreamExecutionEnvironment.getExecutionEnvironment
val streamTableEnv = StreamTableEnvironment.create(streamEnv, settings)
TestTableSourceSinks.createPersonCsvTemporaryTable(streamTableEnv, "MyTable")
val sink1Path = TestTableSourceSinks.createCsvTemporarySinkTable(
streamTableEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
checkEmptyFile(sink1Path)
streamTableEnv.sqlUpdate("insert into MySink1 select first from MyTable")
val table = streamTableEnv.sqlQuery("select last from MyTable where id > 0")
val resultSet = streamTableEnv.toAppendStream(table, classOf[Row])
val sink = new TestingAppendSink
resultSet.addSink(sink)
val explain = streamTableEnv.explain(false)
assertEquals(
replaceStageId(readFromResource("/explain/testSqlUpdateAndToDataStream.out")),
replaceStageId(explain))
streamTableEnv.execute("test1")
assertFirstValues(sink1Path)
// the DataStream program is not executed
assertFalse(sink.isInitialized)
deleteFile(sink1Path)
streamEnv.execute("test2")
assertEquals(getExpectedLastValues.sorted, sink.getAppendResults.sorted)
// the table program is not executed again
assertFileNotExist(sink1Path)
}
@Test
def testToDataStreamAndSqlUpdate(): Unit = {
if (!tableEnvName.equals("StreamTableEnvironment")) {
return
}
val streamEnv = StreamExecutionEnvironment.getExecutionEnvironment
val streamTableEnv = StreamTableEnvironment.create(streamEnv, settings)
TestTableSourceSinks.createPersonCsvTemporaryTable(streamTableEnv, "MyTable")
val sink1Path = TestTableSourceSinks.createCsvTemporarySinkTable(
streamTableEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
checkEmptyFile(sink1Path)
val table = streamTableEnv.sqlQuery("select last from MyTable where id > 0")
val resultSet = streamTableEnv.toAppendStream(table, classOf[Row])
val sink = new TestingAppendSink
resultSet.addSink(sink)
streamTableEnv.sqlUpdate("insert into MySink1 select first from MyTable")
val explain = streamTableEnv.explain(false)
assertEquals(
replaceStageId(readFromResource("/explain/testSqlUpdateAndToDataStream.out")),
replaceStageId(explain))
streamEnv.execute("test2")
// the table program is not executed
checkEmptyFile(sink1Path)
assertEquals(getExpectedLastValues.sorted, sink.getAppendResults.sorted)
streamTableEnv.execute("test1")
assertFirstValues(sink1Path)
// the DataStream program is not executed again because the result in sink is not changed
assertEquals(getExpectedLastValues.sorted, sink.getAppendResults.sorted)
}
@Test
def testFromToDataStreamAndSqlUpdate(): Unit = {
if (!tableEnvName.equals("StreamTableEnvironment")) {
return
}
val streamEnv = ScalaStreamExecutionEnvironment.getExecutionEnvironment
val streamTableEnv = ScalaStreamTableEnvironment.create(streamEnv, settings)
val t = streamEnv.fromCollection(getPersonData)
.toTable(streamTableEnv, 'first, 'id, 'score, 'last)
streamTableEnv.registerTable("MyTable", t)
val sink1Path = TestTableSourceSinks.createCsvTemporarySinkTable(
streamTableEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
checkEmptyFile(sink1Path)
val table = streamTableEnv.sqlQuery("select last from MyTable where id > 0")
val resultSet = streamTableEnv.toAppendStream[Row](table)
val sink = new TestingAppendSink
resultSet.addSink(sink)
streamTableEnv.sqlUpdate("insert into MySink1 select first from MyTable")
val explain = streamTableEnv.explain(false)
assertEquals(
replaceStageId(readFromResource("/explain/testFromToDataStreamAndSqlUpdate.out")),
replaceStageId(explain))
streamEnv.execute("test2")
// the table program is not executed
checkEmptyFile(sink1Path)
assertEquals(getExpectedLastValues.sorted, sink.getAppendResults.sorted)
streamTableEnv.execute("test1")
assertFirstValues(sink1Path)
// the DataStream program is not executed again because the result in sink is not changed
assertEquals(getExpectedLastValues.sorted, sink.getAppendResults.sorted)
}
@Test
def testExecuteSqlWithInsertInto(): Unit = {
val sinkPath = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
checkEmptyFile(sinkPath)
val tableResult = tEnv.executeSql("insert into MySink1 select first from MyTable")
checkInsertTableResult(tableResult, "default_catalog.default_database.MySink1")
assertFirstValues(sinkPath)
}
@Test
def testExecuteSqlWithInsertOverwrite(): Unit = {
if(isStreaming) {
// Streaming mode not support overwrite for FileSystemTableSink.
return
}
val sinkPath = _tempFolder.newFolder().toString
tEnv.executeSql(
s"""
|create table MySink (
| first string
|) with (
| 'connector' = 'filesystem',
| 'path' = '$sinkPath',
| 'format' = 'testcsv'
|)
""".stripMargin
)
val tableResult1 = tEnv.executeSql("insert overwrite MySink select first from MyTable")
checkInsertTableResult(tableResult1, "default_catalog.default_database.MySink")
assertFirstValues(sinkPath)
val tableResult2 = tEnv.executeSql("insert overwrite MySink select first from MyTable")
checkInsertTableResult(tableResult2, "default_catalog.default_database.MySink")
assertFirstValues(sinkPath)
}
@Test
def testExecuteSqlAndSqlUpdate(): Unit = {
val sink1Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
val sink2Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("last"), Array(STRING)), "MySink2")
checkEmptyFile(sink1Path)
checkEmptyFile(sink2Path)
val tableResult = tEnv.executeSql("insert into MySink1 select first from MyTable")
checkInsertTableResult(tableResult, "default_catalog.default_database.MySink1")
assertFirstValues(sink1Path)
checkEmptyFile(sink2Path)
// delete first csv file
new File(sink1Path).delete()
assertFalse(new File(sink1Path).exists())
val table2 = tEnv.sqlQuery("select last from MyTable")
tEnv.insertInto(table2, "MySink2")
tEnv.execute("test2")
assertFalse(new File(sink1Path).exists())
assertLastValues(sink2Path)
}
@Test
def testExecuteSqlAndToDataStream(): Unit = {
if (!tableEnvName.equals("StreamTableEnvironment")) {
return
}
val streamEnv = StreamExecutionEnvironment.getExecutionEnvironment
val streamTableEnv = StreamTableEnvironment.create(streamEnv, settings)
TestTableSourceSinks.createPersonCsvTemporaryTable(streamTableEnv, "MyTable")
val sink1Path = TestTableSourceSinks.createCsvTemporarySinkTable(
streamTableEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
checkEmptyFile(sink1Path)
val table = streamTableEnv.sqlQuery("select last from MyTable where id > 0")
val resultSet = streamTableEnv.toAppendStream(table, classOf[Row])
val sink = new TestingAppendSink
resultSet.addSink(sink)
val tableResult = streamTableEnv.executeSql("insert into MySink1 select first from MyTable")
checkInsertTableResult(tableResult, "default_catalog.default_database.MySink1")
assertFirstValues(sink1Path)
// the DataStream program is not executed
assertFalse(sink.isInitialized)
deleteFile(sink1Path)
streamEnv.execute("test2")
assertEquals(getExpectedLastValues.sorted, sink.getAppendResults.sorted)
// the table program is not executed again
assertFileNotExist(sink1Path)
}
@Test
def testExecuteInsert(): Unit = {
val sinkPath = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink")
checkEmptyFile(sinkPath)
val table = tEnv.sqlQuery("select first from MyTable")
val tableResult = table.executeInsert("MySink")
checkInsertTableResult(tableResult, "default_catalog.default_database.MySink")
assertFirstValues(sinkPath)
}
@Test
def testExecuteInsertOverwrite(): Unit = {
if(isStreaming) {
// Streaming mode not support overwrite for FileSystemTableSink.
return
}
val sinkPath = _tempFolder.newFolder().toString
tEnv.executeSql(
s"""
|create table MySink (
| first string
|) with (
| 'connector' = 'filesystem',
| 'path' = '$sinkPath',
| 'format' = 'testcsv'
|)
""".stripMargin
)
val tableResult1 = tEnv.sqlQuery("select first from MyTable").executeInsert("MySink", true)
checkInsertTableResult(tableResult1, "default_catalog.default_database.MySink")
assertFirstValues(sinkPath)
val tableResult2 = tEnv.sqlQuery("select first from MyTable").executeInsert("MySink", true)
checkInsertTableResult(tableResult2, "default_catalog.default_database.MySink")
assertFirstValues(sinkPath)
}
@Test
def testTableDMLSync(): Unit = {
tEnv.getConfig.getConfiguration.set(TableConfigOptions.TABLE_DML_SYNC, Boolean.box(true));
val sink1Path = _tempFolder.newFolder().toString
tEnv.executeSql(
s"""
|create table MySink1 (
| first string,
| last string
|) with (
| 'connector' = 'filesystem',
| 'path' = '$sink1Path',
| 'format' = 'testcsv'
|)
""".stripMargin
)
val sink2Path = _tempFolder.newFolder().toString
tEnv.executeSql(
s"""
|create table MySink2 (
| first string
|) with (
| 'connector' = 'filesystem',
| 'path' = '$sink2Path',
| 'format' = 'testcsv'
|)
""".stripMargin
)
val sink3Path = _tempFolder.newFolder().toString
tEnv.executeSql(
s"""
|create table MySink3 (
| last string
|) with (
| 'connector' = 'filesystem',
| 'path' = '$sink3Path',
| 'format' = 'testcsv'
|)
""".stripMargin
)
val tableResult1 =
tEnv.sqlQuery("select first, last from MyTable").executeInsert("MySink1", false)
val stmtSet = tEnv.createStatementSet()
stmtSet.addInsertSql("INSERT INTO MySink2 select first from MySink1")
stmtSet.addInsertSql("INSERT INTO MySink3 select last from MySink1")
val tableResult2 = stmtSet.execute()
// checkInsertTableResult will wait the job finished,
// we should assert file values first to verify job has been finished
assertFirstValues(sink2Path)
assertLastValues(sink3Path)
// check TableResult after verifying file values
checkInsertTableResult(
tableResult2,
"default_catalog.default_database.MySink2",
"default_catalog.default_database.MySink3" )
// Verify it's no problem to invoke await twice
tableResult1.await()
tableResult2.await()
}
@Test
def testStatementSet(): Unit = {
val sink1Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("first"), Array(STRING)), "MySink1")
val sink2Path = TestTableSourceSinks.createCsvTemporarySinkTable(
tEnv, new TableSchema(Array("last"), Array(STRING)), "MySink2")
val stmtSet = tEnv.createStatementSet()
stmtSet.addInsert("MySink1", tEnv.sqlQuery("select first from MyTable"))
.addInsertSql("insert into MySink2 select last from MyTable")
val actual = stmtSet.explain()
val expected = TableTestUtil.readFromResource("/explain/testStatementSet.out")
assertEquals(replaceStageId(expected), replaceStageId(actual))
val tableResult = stmtSet.execute()
checkInsertTableResult(
tableResult,
"default_catalog.default_database.MySink1",
"default_catalog.default_database.MySink2")
assertFirstValues(sink1Path)
assertLastValues(sink2Path)
}
@Test
def testStatementSetWithOverwrite(): Unit = {
if(isStreaming) {
// Streaming mode not support overwrite for FileSystemTableSink.
return
}
val sink1Path = _tempFolder.newFolder().toString
tEnv.executeSql(
s"""
|create table MySink1 (
| first string
|) with (
| 'connector' = 'filesystem',
| 'path' = '$sink1Path',
| 'format' = 'testcsv'
|)
""".stripMargin
)
val sink2Path = _tempFolder.newFolder().toString
tEnv.executeSql(
s"""
|create table MySink2 (
| last string
|) with (
| 'connector' = 'filesystem',
| 'path' = '$sink2Path',
| 'format' = 'testcsv'
|)
""".stripMargin
)
val stmtSet = tEnv.createStatementSet()
stmtSet.addInsert("MySink1", tEnv.sqlQuery("select first from MyTable"), true)
stmtSet.addInsertSql("insert overwrite MySink2 select last from MyTable")
val tableResult1 = stmtSet.execute()
checkInsertTableResult(
tableResult1,
"default_catalog.default_database.MySink1",
"default_catalog.default_database.MySink2")
assertFirstValues(sink1Path)
assertLastValues(sink2Path)
// execute again using same StatementSet instance
stmtSet.addInsert("MySink1", tEnv.sqlQuery("select first from MyTable"), true)
.addInsertSql("insert overwrite MySink2 select last from MyTable")
val tableResult2 = stmtSet.execute()
checkInsertTableResult(
tableResult2,
"default_catalog.default_database.MySink1",
"default_catalog.default_database.MySink2")
assertFirstValues(sink1Path)
assertLastValues(sink2Path)
}
@Test
def testStatementSetWithSameSinkTableNames(): Unit = {
if(isStreaming) {
// Streaming mode not support overwrite for FileSystemTableSink.
return
}
val sinkPath = _tempFolder.newFolder().toString
tEnv.executeSql(
s"""
|create table MySink (
| first string
|) with (
| 'connector' = 'filesystem',
| 'path' = '$sinkPath',
| 'format' = 'testcsv'
|)
""".stripMargin
)
val stmtSet = tEnv.createStatementSet()
stmtSet.addInsert("MySink", tEnv.sqlQuery("select first from MyTable"), true)
stmtSet.addInsertSql("insert overwrite MySink select last from MyTable")
val tableResult = stmtSet.execute()
// only check the schema
checkInsertTableResult(
tableResult,
"default_catalog.default_database.MySink_1",
"default_catalog.default_database.MySink_2")
}
@Test
def testExecuteSelect(): Unit = {
val query =
"""
|select id, concat(concat(`first`, ' '), `last`) as `full name`
|from MyTable where mod(id, 2) = 0
""".stripMargin
val tableResult = tEnv.executeSql(query)
assertTrue(tableResult.getJobClient.isPresent)
assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult.getResultKind)
assertEquals(
ResolvedSchema.of(
Column.physical("id", DataTypes.INT()),
Column.physical("full name", DataTypes.STRING())),
tableResult.getResolvedSchema)
val expected = util.Arrays.asList(
Row.of(Integer.valueOf(2), "Bob Taylor"),
Row.of(Integer.valueOf(4), "Peter Smith"),
Row.of(Integer.valueOf(6), "Sally Miller"),
Row.of(Integer.valueOf(8), "Kelly Williams"))
val actual = CollectionUtil.iteratorToList(tableResult.collect())
actual.sort(new util.Comparator[Row]() {
override def compare(o1: Row, o2: Row): Int = {
o1.getField(0).asInstanceOf[Int].compareTo(o2.getField(0).asInstanceOf[Int])
}
})
assertEquals(expected, actual)
}
@Test
def testExecuteSelectWithUpdateChanges(): Unit = {
val tableResult = tEnv.sqlQuery("select count(*) as c from MyTable").execute()
assertTrue(tableResult.getJobClient.isPresent)
assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult.getResultKind)
assertEquals(
ResolvedSchema.of(Column.physical("c", DataTypes.BIGINT().notNull())),
tableResult.getResolvedSchema)
val expected = if (isStreaming) {
util.Arrays.asList(
Row.ofKind(RowKind.INSERT, JLong.valueOf(1)),
Row.ofKind(RowKind.UPDATE_BEFORE, JLong.valueOf(1)),
Row.ofKind(RowKind.UPDATE_AFTER, JLong.valueOf(2)),
Row.ofKind(RowKind.UPDATE_BEFORE, JLong.valueOf(2)),
Row.ofKind(RowKind.UPDATE_AFTER, JLong.valueOf(3)),
Row.ofKind(RowKind.UPDATE_BEFORE, JLong.valueOf(3)),
Row.ofKind(RowKind.UPDATE_AFTER, JLong.valueOf(4)),
Row.ofKind(RowKind.UPDATE_BEFORE, JLong.valueOf(4)),
Row.ofKind(RowKind.UPDATE_AFTER, JLong.valueOf(5)),
Row.ofKind(RowKind.UPDATE_BEFORE, JLong.valueOf(5)),
Row.ofKind(RowKind.UPDATE_AFTER, JLong.valueOf(6)),
Row.ofKind(RowKind.UPDATE_BEFORE, JLong.valueOf(6)),
Row.ofKind(RowKind.UPDATE_AFTER, JLong.valueOf(7)),
Row.ofKind(RowKind.UPDATE_BEFORE, JLong.valueOf(7)),
Row.ofKind(RowKind.UPDATE_AFTER, JLong.valueOf(8))
)
} else {
util.Arrays.asList(Row.of(JLong.valueOf(8)))
}
val actual = CollectionUtil.iteratorToList(tableResult.collect())
assertEquals(expected, actual)
}
@Test
def testExecuteSelectWithTimeAttribute(): Unit = {
val data = Seq("Mary")
val schema = new TableSchema(Array("name", "pt"), Array(Types.STRING, Types.LOCAL_DATE_TIME))
val sourceType = Types.STRING
val tableSource = new TestTableSourceWithTime(true, schema, sourceType, data, null, "pt")
// TODO refactor this after FLINK-16160 is finished
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("T", tableSource)
val tableResult = tEnv.executeSql("select * from T")
assertTrue(tableResult.getJobClient.isPresent)
assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult.getResultKind)
assertEquals(
ResolvedSchema.of(
Column.physical("name", DataTypes.STRING()),
Column.physical("pt", DataTypes.TIMESTAMP_LTZ(3))),
tableResult.getResolvedSchema)
val it = tableResult.collect()
assertTrue(it.hasNext)
val row = it.next()
assertEquals(2, row.getArity)
assertEquals("Mary", row.getField(0))
assertFalse(it.hasNext)
}
@Test
def testClearOperation(): Unit = {
TestCollectionTableFactory.reset()
val tableEnv = TableEnvironmentImpl.create(settings)
tableEnv.executeSql("create table dest1(x map<int,bigint>) with('connector' = 'COLLECTION')")
tableEnv.executeSql("create table dest2(x int) with('connector' = 'COLLECTION')")
tableEnv.executeSql("create table src(x int) with('connector' = 'COLLECTION')")
try {
// it would fail due to query and sink type mismatch
tableEnv.executeSql("insert into dest1 select count(*) from src")
Assert.fail("insert is expected to fail due to type mismatch")
} catch {
case _: Exception => //expected
}
tableEnv.executeSql("drop table dest1")
tableEnv.executeSql("insert into dest2 select x from src").await()
}
@Test
def testTemporaryOperationListener(): Unit = {
val listener = new ListenerCatalog("listener_cat")
val currentCat = tEnv.getCurrentCatalog
tEnv.registerCatalog(listener.getName, listener)
// test temporary table
tEnv.executeSql("create temporary table tbl1 (x int)")
assertEquals(0, listener.numTempTable)
tEnv.executeSql(s"create temporary table ${listener.getName}.`default`.tbl1 (x int)")
assertEquals(1, listener.numTempTable)
val tableResult = tEnv.asInstanceOf[TableEnvironmentInternal].getCatalogManager
.getTable(ObjectIdentifier.of(listener.getName, "default", "tbl1"))
assertTrue(tableResult.isPresent)
assertEquals(listener.tableComment, tableResult.get().getTable.getComment)
tEnv.executeSql("drop temporary table tbl1")
assertEquals(1, listener.numTempTable)
tEnv.executeSql(s"drop temporary table ${listener.getName}.`default`.tbl1")
assertEquals(0, listener.numTempTable)
tEnv.useCatalog(listener.getName)
tEnv.executeSql("create temporary table tbl1 (x int)")
assertEquals(1, listener.numTempTable)
tEnv.executeSql("drop temporary table tbl1")
assertEquals(0, listener.numTempTable)
tEnv.useCatalog(currentCat)
// test temporary view
tEnv.executeSql("create temporary view v1 as select 1")
assertEquals(0, listener.numTempTable)
tEnv.executeSql(s"create temporary view ${listener.getName}.`default`.v1 as select 1")
assertEquals(1, listener.numTempTable)
val viewResult = tEnv.asInstanceOf[TableEnvironmentInternal].getCatalogManager
.getTable(ObjectIdentifier.of(listener.getName, "default", "v1"))
assertTrue(viewResult.isPresent)
assertEquals(listener.tableComment, viewResult.get().getTable.getComment)
tEnv.executeSql("drop temporary view v1")
assertEquals(1, listener.numTempTable)
tEnv.executeSql(s"drop temporary view ${listener.getName}.`default`.v1")
assertEquals(0, listener.numTempTable)
tEnv.useCatalog(listener.getName)
tEnv.executeSql("create temporary view v1 as select 1")
assertEquals(1, listener.numTempTable)
tEnv.executeSql("drop temporary view v1")
assertEquals(0, listener.numTempTable)
tEnv.useCatalog(currentCat)
// test temporary function
val clzName = "foo.class.name"
try {
tEnv.executeSql(s"create temporary function func1 as '${clzName}'")
fail("Creating a temporary function with invalid class should fail")
} catch {
case _: Exception => //expected
}
assertEquals(0, listener.numTempFunc)
tEnv.executeSql(
s"create temporary function ${listener.getName}.`default`.func1 as '${clzName}'")
assertEquals(1, listener.numTempFunc)
tEnv.executeSql("drop temporary function if exists func1")
assertEquals(1, listener.numTempFunc)
tEnv.executeSql(s"drop temporary function ${listener.getName}.`default`.func1")
assertEquals(0, listener.numTempFunc)
tEnv.useCatalog(listener.getName)
tEnv.executeSql(s"create temporary function func1 as '${clzName}'")
assertEquals(1, listener.numTempFunc)
tEnv.executeSql("drop temporary function func1")
assertEquals(0, listener.numTempFunc)
tEnv.useCatalog(currentCat)
listener.close()
}
def getPersonData: List[(String, Int, Double, String)] = {
val data = new mutable.MutableList[(String, Int, Double, String)]
data.+=(("Mike", 1, 12.3, "Smith"))
data.+=(("Bob", 2, 45.6, "Taylor"))
data.+=(("Sam", 3, 7.89, "Miller"))
data.+=(("Peter", 4, 0.12, "Smith"))
data.+=(("Liz", 5, 34.5, "Williams"))
data.+=(("Sally", 6, 6.78, "Miller"))
data.+=(("Alice", 7, 90.1, "Smith"))
data.+=(("Kelly", 8, 2.34, "Williams"))
data.toList
}
private def assertFirstValues(csvFilePath: String): Unit = {
val expected = List("Mike", "Bob", "Sam", "Peter", "Liz", "Sally", "Alice", "Kelly")
val actual = readFile(csvFilePath)
assertEquals(expected.sorted, actual.sorted)
}
private def assertLastValues(csvFilePath: String): Unit = {
val actual = readFile(csvFilePath)
assertEquals(getExpectedLastValues.sorted, actual.sorted)
}
private def getExpectedLastValues: List[String] = {
List("Smith", "Taylor", "Miller", "Smith", "Williams", "Miller", "Smith", "Williams")
}
private def checkEmptyFile(csvFilePath: String): Unit = {
assertTrue(FileUtils.readFileUtf8(new File(csvFilePath)).isEmpty)
}
private def deleteFile(path: String): Unit = {
new File(path).delete()
assertFalse(new File(path).exists())
}
private def assertFileNotExist(path: String): Unit = {
assertFalse(new File(path).exists())
}
private def checkInsertTableResult(tableResult: TableResult, fieldNames: String*): Unit = {
assertTrue(tableResult.getJobClient.isPresent)
assertEquals(ResultKind.SUCCESS_WITH_CONTENT, tableResult.getResultKind)
assertEquals(
util.Arrays.asList(fieldNames: _*),
tableResult.getResolvedSchema.getColumnNames)
// return the result until the job is finished
val it = tableResult.collect()
assertTrue(it.hasNext)
val affectedRowCounts = fieldNames.map(_ => JLong.valueOf(-1L))
assertEquals(Row.of(affectedRowCounts: _*), it.next())
assertFalse(it.hasNext)
}
private def readFile(csvFilePath: String): List[String] = {
val file = new File(csvFilePath)
if (file.isDirectory) {
file.listFiles(new FileFilter() {
override def accept(f: File): Boolean = f.isFile
}).map(FileUtils.readFileUtf8).flatMap(_.split("\\n")).toList
} else {
FileUtils.readFileUtf8(file).split("\\n").toList
}
}
class ListenerCatalog(name: String)
extends GenericInMemoryCatalog(name) with TemporaryOperationListener {
val tableComment: String = "listener_comment"
val funcClzName: String = classOf[TestGenericUDF].getName
var numTempTable = 0
var numTempFunc = 0
override def onCreateTemporaryTable(tablePath: ObjectPath, table: CatalogBaseTable)
: CatalogBaseTable = {
numTempTable += 1
if (table.isInstanceOf[CatalogTable]) {
new CatalogTableImpl(table.getSchema, table.getOptions, tableComment)
} else {
val view = table.asInstanceOf[CatalogView]
new CatalogViewImpl(view.getOriginalQuery, view.getExpandedQuery,
view.getSchema, view.getOptions, tableComment)
}
}
override def onDropTemporaryTable(tablePath: ObjectPath): Unit = numTempTable -= 1
override def onCreateTemporaryFunction(functionPath: ObjectPath, function: CatalogFunction)
: CatalogFunction = {
numTempFunc += 1
new CatalogFunctionImpl(funcClzName, function.getFunctionLanguage)
}
override def onDropTemporaryFunction(functionPath: ObjectPath): Unit = numTempFunc -= 1
}
}
object TableEnvironmentITCase {
@Parameterized.Parameters(name = "{0}:isStream={1}")
def parameters(): util.Collection[Array[_]] = {
util.Arrays.asList(
Array("TableEnvironment", true),
Array("TableEnvironment", false),
Array("StreamTableEnvironment", true)
)
}
}
| tillrohrmann/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/TableEnvironmentITCase.scala | Scala | apache-2.0 | 34,162 |
/*
* Copyright 2016 David Russell
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.onetapbeyond.lambda.spark.executor.examples
import io.onetapbeyond.lambda.spark.executor.Gateway._
import io.onetapbeyond.aws.gateway.executor._
import org.apache.spark._
import scala.collection.JavaConverters._
/*
* RESTIntegration
*
* A sample Spark application that demonstrates the basic usage of
* SAMBA to call a REST API on the AWS API Gateway.
*/
object RESTIntegration {
def main(args:Array[String]):Unit = {
try {
val sc = initSparkContext()
/*
* Initialize a basic batch data source for the example by
* generating an RDD[Int].
*/
val dataRDD = sc.parallelize(1 to BATCH_DATA_SIZE)
/*
* Simple RDD.max represents minimal Apache Spark application.
*/
val max = dataRDD.max
/*
* Call API_REPORT_ENDPOINT on the API indicated by our instance
* of API_GATEWAY, pushing a parameter value on the REST API call
* representing the max data value detected within our Spark driver
* program. As we are using a mock API on the AWS API Gateway there
* is no response data, in this case the result simply indicates
* success or failure.
*/
val aTaskResult = AWS.Task(API_GATEWAY)
.resource(API_REPORT_ENDPOINT)
.input(Map("max" -> max).asJava)
.post()
.execute()
/*
* Verify REST call on API_REPORT_ENDPOINT was a success.
*/
println("RESTIntegration: report call success=" + aTaskResult.success)
} catch {
case t:Throwable => println("RESTIntegration: caught ex=" + t)
}
}
def initSparkContext():SparkContext = {
val conf = new SparkConf().setAppName(APP_NAME)
new SparkContext(conf)
}
private val APP_NAME = "SAMBA REST Integration Example"
private val BATCH_DATA_SIZE = 10
private val API_ID = "06ti6xmgg2"
private val API_STAGE = "mock"
private val API_REPORT_ENDPOINT = "/report"
private val API_GATEWAY:AWSGateway = AWS.Gateway(API_ID)
.region(AWS.Region.OREGON)
.stage(API_STAGE)
.build()
}
| onetapbeyond/lambda-spark-executor | examples/scala/rest-integration/src/main/scala/io/onetapbeyond/lambda/spark/executor/examples/RESTIntegration.scala | Scala | apache-2.0 | 2,853 |
package org.cakesolutions.akkapatterns.main
import akka.actor.{Props, ActorSystem}
import com.aphelia.amqp.{ChannelOwner, ConnectionOwner}
import com.aphelia.amqp.Amqp._
import akka.util.Timeout
import com.rabbitmq.client.{DefaultConsumer, Channel, Envelope, ConnectionFactory}
import com.rabbitmq.client.AMQP.BasicProperties
import com.aphelia.amqp.Amqp.ReturnedMessage
import com.aphelia.amqp.Amqp.Publish
import com.aphelia.amqp.Amqp.ChannelParameters
import scala.Some
import com.aphelia.amqp.RpcClient.Request
import com.aphelia.amqp.Amqp.QueueParameters
import com.aphelia.amqp.Amqp.Delivery
import akka.actor.SupervisorStrategy.Stop
/**
* @author janmachacek
*/
object ClientDemo {
import scala.concurrent.ExecutionContext.Implicits.global
implicit val timeout = Timeout(100000L)
def main(args: Array[String]) {
val actorSystem = ActorSystem("AkkaPatterns")
// RabbitMQ connection factory
val connectionFactory = new ConnectionFactory()
connectionFactory.setHost("localhost")
connectionFactory.setVirtualHost("/")
// create a "connection owner" actor, which will try and reconnect automatically if the connection is lost
val connection = actorSystem.actorOf(Props(new ConnectionOwner(connectionFactory)))
val madhouse = ClientDemo.getClass.getResource("/madouse.jpg").getPath
Thread.sleep(1000)
val count = 16
val clients = (0 until count).map(_ => ConnectionOwner.createChildActor(connection, Props(new RpcStreamingClient())))
clients.foreach(_ ! Request(Publish("amq.direct", "image.key", madhouse.getBytes) :: Nil))
Thread.sleep(100000)
clients.foreach(_ ! Stop)
}
class RpcStreamingClient(channelParams: Option[ChannelParameters] = None) extends ChannelOwner(channelParams) {
var queue: String = ""
var consumer: Option[DefaultConsumer] = None
override def onChannel(channel: Channel) {
// create a private, exclusive reply queue; its name will be randomly generated by the broker
queue = declareQueue(channel, QueueParameters("", passive = false, exclusive = true)).getQueue
consumer = Some(new DefaultConsumer(channel) {
override def handleDelivery(consumerTag: String, envelope: Envelope, properties: BasicProperties, body: Array[Byte]) {
self ! Delivery(consumerTag, envelope, properties, body)
}
})
channel.basicConsume(queue, false, consumer.get)
}
when(ChannelOwner.Connected) {
case Event(p: Publish, ChannelOwner.Connected(channel)) => {
val props = p.properties.getOrElse(new BasicProperties()).builder.replyTo(queue).build()
channel.basicPublish(p.exchange, p.key, p.mandatory, p.immediate, props, p.body)
stay()
}
case Event(Stop, ChannelOwner.Connected(channel)) =>
channel.close()
stop()
case Event(Request(publish, numberOfResponses), ChannelOwner.Connected(channel)) => {
publish.foreach(p => {
val props = p.properties.getOrElse(new BasicProperties()).builder.replyTo(queue).build()
channel.basicPublish(p.exchange, p.key, p.mandatory, p.immediate, props, p.body)
})
stay()
}
case Event(delivery@Delivery(consumerTag: String, envelope: Envelope, properties: BasicProperties, body: Array[Byte]), ChannelOwner.Connected(channel)) => {
channel.basicAck(envelope.getDeliveryTag, false)
println("|"+ delivery.body.length + "|")
//val fos = new FileOutputStream("x.jpeg")
//fos.write(delivery.body)
//fos.close()
stay()
}
case Event(msg@ReturnedMessage(replyCode, replyText, exchange, routingKey, properties, body), ChannelOwner.Connected(channel)) => {
stay()
}
}
}
}
| anand-singh/akka-patterns | sbt/src/main/scala/org/cakesolutions/akkapatterns/main/ClientDemo.scala | Scala | apache-2.0 | 3,747 |
object test4 {
abstract class SeqFactory[CC[X]] {
def unapplySeq[A](x : Seq[A]) : scala.Some[Seq[A]] = null
}
object SeqExtractor extends SeqFactory[Seq]
val ss: Seq[String] = null
ss match {
case SeqExtractor(a) => /*start*/a/*end*/
}
}
//String | LPTK/intellij-scala | testdata/typeInference/bugs4/SCL2731D.scala | Scala | apache-2.0 | 266 |
package org.scalatra
package test
package specs2
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
class ScalatraSpecSpec extends ScalatraSpec { def is =
s2"""
get / should
return 'Hello, world.' $e1
"""
// scalatra-specs2 does not depend on Scalatra, so we'll create our own
// simple servlet for a sanity check
addServlet(new HttpServlet {
override def doGet(req: HttpServletRequest, res: HttpServletResponse) {
res.getWriter.write("Hello, world.")
}
}, "/*")
def e1 = get("/") {
body must_== "Hello, world."
}
}
| etorreborre/scalatra | specs2/src/test/scala/org/scalatra/test/specs2/ScalatraSpecSpec.scala | Scala | bsd-2-clause | 582 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.api.java
import org.apache.spark.streaming.dstream.InputDStream
import scala.language.implicitConversions
import scala.reflect.ClassTag
/**
* A Java-friendly interface to [[org.apache.spark.streaming.dstream.InputDStream]] of
* key-value pairs.
*/
class JavaPairInputDStream[K, V](val inputDStream: InputDStream[(K, V)])(
implicit val kClassTag: ClassTag[K], implicit val vClassTag: ClassTag[V]
) extends JavaPairDStream[K, V](inputDStream) {
}
object JavaPairInputDStream {
/**
* Convert a scala [[org.apache.spark.streaming.dstream.InputDStream]] of pairs to a
* Java-friendly [[org.apache.spark.streaming.api.java.JavaPairInputDStream]].
*/
implicit def fromInputDStream[K: ClassTag, V: ClassTag](
inputDStream: InputDStream[(K, V)]): JavaPairInputDStream[K, V] = {
new JavaPairInputDStream[K, V](inputDStream)
}
}
| pronix/spark | streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairInputDStream.scala | Scala | apache-2.0 | 1,694 |
/*
* Copyright 2012 Jahziah Wagner <jahziah[dot]wagner[at]gmail[dot]com>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.openlobby.communication
import com.openlobby.commons.CommonsService
import com.openlobby.commons.thread.ServiceThreadImpl
import com.openlobby.constants.commons.ServerConstants
import java.io.BufferedReader
import java.io.BufferedWriter
import java.io.IOException
import java.io.InputStreamReader
import java.io.OutputStreamWriter
import java.net.Socket
import java.net.UnknownHostException
import java.util.LinkedList
import org.osgi.service.log.LogService
/**
* Thread started by OSGi.
*/
class CommunicationServiceImpl extends ServiceThreadImpl with ListenerService with MessengerService {
@volatile private var serverConstants : ServerConstants =_
@volatile private var commonsService : CommonsService =_
@volatile private var logService : LogService = _
private var listenerObservers = new LinkedList[ListenerObserver]
private var in : BufferedReader = _
private var os : BufferedWriter = _
def added(obs : ListenerObserver) {
logService.log(LogService.LOG_INFO, "Registered as a listener observer: " + obs +".")
listenerObservers add obs
}
def removed(obs : ListenerObserver) {
logService.log(LogService.LOG_INFO, "Unregistered as a listener observer: " + obs +".")
listenerObservers remove obs
}
override def run {
logService.log(LogService.LOG_INFO, "Communication Thread started.")
connectToServer
try {
while(getRunState) {
val msg = listen(in)
update(msg)
}
} catch {
case e: IOException => logService.log(LogService.LOG_ERROR, e.getMessage) // TODO notify listeners then stop
}
}
private def connectToServer {
while(in == null) {
logService.log(LogService.LOG_INFO, "Connecting to server.")
try {
val t = connect
in = t._1
os = t._2
} catch {
case e : UnknownHostException =>
logService.log(LogService.LOG_WARNING, "Lobby server host was unavailable. Re-trying in 25 seconds.")
}
if(in == null || os == null) {
Thread.sleep(25000) // sleep for 10s then retry
}
}
}
@throws(classOf[UnknownHostException])
private def connect:(BufferedReader, BufferedWriter)= {
val socket = new Socket(serverConstants.getLobbyServer, serverConstants.getLobbyServerPort)
return (new BufferedReader(new InputStreamReader(socket.getInputStream)),
new BufferedWriter(new OutputStreamWriter(socket.getOutputStream)))
}
@throws(classOf[IOException])
private def listen(in : BufferedReader):String= {
if(in == null) {
throw new IOException("No connection to remote server.");
}
return in.readLine
}
/**
* Notifies listeners of message.
*/
private def update(msg : String) {
logService.log(LogService.LOG_DEBUG, msg)
val sentences = msg.split("\\t")
val args = msg.split(" ")
val cmd = args.head
val it = listenerObservers.iterator
while(it.hasNext) {
val obs : ListenerObserver = it.next
obs.update(cmd, args, sentences)
}
}
def send(cmd : String) {
logService.log(LogService.LOG_DEBUG, "Sent command: " + cmd)
os.write(cmd)
os.flush
}
}
| jahwag/OpenLobby | modules/Communication/src/main/scala/com/openlobby/communication/CommunicationServiceImpl.scala | Scala | apache-2.0 | 3,879 |
import org.junit.runner.RunWith
import org.specs2._
@RunWith(classOf[org.specs2.runner.JUnitRunner])
class B extends Specification
{
// sequential=true is necessary to get junit in the call stack
// otherwise, junit calls specs, which then runs tests on separate threads
def is = args(sequential=true) ^ s2"""
This should
fail if 'succeed' file is missing $succeedNeeded
not run via JUnit $noJUnit
"""
def succeedNeeded = {
val f = new java.io.File("succeed")
f.exists must_== true
}
def noJUnit = {
println("Trace: " + RunBy.trace.mkString("\\n\\t", "\\n\\t", ""))
RunBy.junit must_== false
}
}
object RunBy
{
def trace = (new Exception).getStackTrace.map(_.getClassName)
def junit = trace.exists(_.contains("org.junit"))
}
| pdalpra/sbt | sbt/src/sbt-test/tests/one-class-multi-framework/src/test/scala/Test.scala | Scala | bsd-3-clause | 830 |
package com.boxpizza.actors
import akka.actor.{Actor, ActorLogging, ActorRef, Props}
import spray.json.DefaultJsonProtocol._
object WaiterActor {
case class Cost(pizza: String, cost: Double, client: ActorRef)
case class Bill(pizza: String, bill: Double)
object Bill {
implicit val goJson = jsonFormat2(Bill.apply)
}
}
class WaiterActor extends Actor with ActorLogging {
import WaiterActor._
def receive = {
case pizza: String =>
log.info(s"$pizza ordered")
context.actorOf(Props[CookActor]) ! Cook(pizza, sender)
case Cost(pizza, cost, client) =>
client ! Bill(s"$pizza", cost)
case _ => log.info("Still waiting for an ordination")
}
}
| codepr/boxpizza | src/main/scala/com/boxpizza/actors/WaiterActor.scala | Scala | mit | 695 |
package mesosphere.marathon
package core.appinfo
import mesosphere.marathon.raml.PodStatus
import mesosphere.marathon.state.PathId
import scala.concurrent.Future
trait PodStatusService {
/**
* @return the status of the pod at the given path, if such a pod exists
*/
def selectPodStatus(id: PathId, selector: PodSelector = Selector.all): Future[Option[PodStatus]]
/**
* @return the statuses of the pods at the given paths, if the pod exists
*/
def selectPodStatuses(ids: Set[PathId], selector: PodSelector = Selector.all): Future[Seq[PodStatus]]
}
| gsantovena/marathon | src/main/scala/mesosphere/marathon/core/appinfo/PodStatusService.scala | Scala | apache-2.0 | 576 |
package au.com.nicta
package postmark
package sending
import org.scalacheck.{Choose, Gen, Arbitrary}, Arbitrary.arbitrary, Gen._
import scalaz._, std.option._, syntax.show._, syntax.apply._, scalacheck.ScalaCheckBinding._
import org.joda.time.DateTime
import au.com.nicta.test.BasicArbitraries
import au.com.nicta.postmark.common.Header
import org.apache.commons.codec.binary.Base64
trait PostmarkArbitraries extends BasicArbitraries {
implicit def AttachmentArbitrary: Arbitrary[Attachment] =
Arbitrary(for {
name <- arbitrary[NonEmptyString]
content <- arbitrary[NonEmptyString]
contentType <- arbitrary[ContentTypeString]
} yield Attachment(name.value + ".txt", Base64.encodeBase64String(content.value.toCharArray.map(_.toByte)), contentType.value))
implicit def SentEmailArbitrary: Arbitrary[SentEmail] =
Arbitrary(for {
id <- arbitrary[String]
submittedAt <- arbitrary[DateTime]
to <- genLimitedList[EmailString](0, 20)
} yield SentEmail(id, submittedAt, to.map(_.value)))
implicit def EmailArbitrary: Arbitrary[Email] = Arbitrary(for {
from <- arbitrary[EmailString]
to <- genLimitedList[EmailString](1, 20)
cc <- genLimitedList[EmailString](0, 20)
bcc <- genLimitedList[EmailString](0, 20)
subject <- arbitrary[String]
tag <- arbitrary[Option[String]]
html <- arbitrary[Option[String]]
text <- arbitrary[Option[String]]
replyTo <- arbitrary[EmailString]
headers <- genLimitedList[Header](0, 20)
attachments <- genLimitedList[Attachment](0, 20)
backupText <- arbitrary[NonEmptyString]
} yield {
val (textToUse, htmlToUse) = (text, html) match {
case (None, None) => (Some(backupText.value), Some(backupText.value))
case (a, b) => (a, b)
}
Email(
from.value,
to.map(_.value), cc.map(_.value), bcc.map(_.value),
subject, tag, htmlToUse, textToUse, replyTo.value,
headers,
attachments)
})
case class ContentTypeString(value: String)
implicit def ContentTypeStringArbitrary: Arbitrary[ContentTypeString] =
Arbitrary(Gen.oneOf(List("text/plain")) map ContentTypeString)
}
| NICTA/postmarkapp-client | src/test/scala/au/com/nicta/postmark/sending/PostmarkArbitraries.scala | Scala | bsd-3-clause | 2,151 |
package at.forsyte.apalache.tla.pp.passes
import at.forsyte.apalache.infra.passes.{Pass, TlaModuleMixin}
/**
* An optimization pass that applies to KerA+.
*
* @author Igor Konnov
*/
trait OptPass extends Pass with TlaModuleMixin
| konnov/dach | tla-pp/src/main/scala/at/forsyte/apalache/tla/pp/passes/OptPass.scala | Scala | apache-2.0 | 239 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.submit.submitsteps.hadoopsteps
import java.io.File
import org.apache.spark.SparkConf
import org.apache.spark.deploy.k8s.{HadoopConfBootstrapImpl, HadoopConfUtils, HadoopUGIUtilImpl, OptionRequirements}
import org.apache.spark.deploy.k8s.HadoopConfSparkUserBootstrapImpl
import org.apache.spark.deploy.k8s.config._
import org.apache.spark.internal.Logging
/**
* Returns the complete ordered list of steps required to configure the hadoop configurations.
*/
private[spark] class HadoopStepsOrchestrator(
kubernetesResourceNamePrefix: String,
namespace: String,
hadoopConfigMapName: String,
submissionSparkConf: SparkConf,
hadoopConfDir: String) extends Logging {
private val isKerberosEnabled = submissionSparkConf.get(KUBERNETES_KERBEROS_SUPPORT)
private val maybePrincipal = submissionSparkConf.get(KUBERNETES_KERBEROS_PRINCIPAL)
private val maybeKeytab = submissionSparkConf.get(KUBERNETES_KERBEROS_KEYTAB)
.map(k => new File(k))
private val maybeExistingSecret = submissionSparkConf.get(KUBERNETES_KERBEROS_DT_SECRET_NAME)
private val maybeExistingSecretItemKey =
submissionSparkConf.get(KUBERNETES_KERBEROS_DT_SECRET_ITEM_KEY)
private val maybeRenewerPrincipal =
submissionSparkConf.get(KUBERNETES_KERBEROS_RENEWER_PRINCIPAL)
private val hadoopConfigurationFiles = HadoopConfUtils.getHadoopConfFiles(hadoopConfDir)
private val hadoopUGI = new HadoopUGIUtilImpl
logInfo(s"Hadoop Conf directory: $hadoopConfDir")
require(maybeKeytab.forall( _ => isKerberosEnabled ),
"You must enable Kerberos support if you are specifying a Kerberos Keytab")
require(maybeExistingSecret.forall( _ => isKerberosEnabled ),
"You must enable Kerberos support if you are specifying a Kerberos Secret")
OptionRequirements.requireBothOrNeitherDefined(
maybeKeytab,
maybePrincipal,
"If a Kerberos keytab is specified you must also specify a Kerberos principal",
"If a Kerberos principal is specified you must also specify a Kerberos keytab")
OptionRequirements.requireBothOrNeitherDefined(
maybeExistingSecret,
maybeExistingSecretItemKey,
"If a secret storing a Kerberos Delegation Token is specified you must also" +
" specify the label where the data is stored",
"If a secret data item-key where the data of the Kerberos Delegation Token is specified" +
" you must also specify the name of the secret")
def getHadoopSteps(): Seq[HadoopConfigurationStep] = {
val hadoopConfBootstrapImpl = new HadoopConfBootstrapImpl(
hadoopConfigMapName,
hadoopConfigurationFiles)
val hadoopConfMounterStep = new HadoopConfMounterStep(
hadoopConfigMapName,
hadoopConfigurationFiles,
hadoopConfBootstrapImpl,
hadoopConfDir)
val maybeKerberosStep =
if (isKerberosEnabled) {
maybeExistingSecret.map(existingSecretName => Some(new HadoopKerberosSecretResolverStep(
submissionSparkConf,
existingSecretName,
maybeExistingSecretItemKey.get))).getOrElse(Some(
new HadoopKerberosKeytabResolverStep(
kubernetesResourceNamePrefix,
submissionSparkConf,
maybePrincipal,
maybeKeytab,
maybeRenewerPrincipal,
hadoopUGI)))
} else {
Some(new HadoopConfSparkUserStep(new HadoopConfSparkUserBootstrapImpl(hadoopUGI)))
}
Seq(hadoopConfMounterStep) ++ maybeKerberosStep.toSeq
}
}
| apache-spark-on-k8s/spark | resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/submitsteps/hadoopsteps/HadoopStepsOrchestrator.scala | Scala | apache-2.0 | 4,316 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import org.scalatest.FunSuite
import org.apache.spark.rdd.RDD
class ImplicitOrderingSuite extends FunSuite with LocalSparkContext {
// Tests that PairRDDFunctions grabs an implicit Ordering in various cases where it should.
test("basic inference of Orderings"){
sc = new SparkContext("local", "test")
val rdd = sc.parallelize(1 to 10)
// These RDD methods are in the companion object so that the unserializable ScalaTest Engine
// won't be reachable from the closure object
// Infer orderings after basic maps to particular types
val basicMapExpectations = ImplicitOrderingSuite.basicMapExpectations(rdd)
basicMapExpectations.map({case (met, explain) => assert(met, explain)})
// Infer orderings for other RDD methods
val otherRDDMethodExpectations = ImplicitOrderingSuite.otherRDDMethodExpectations(rdd)
otherRDDMethodExpectations.map({case (met, explain) => assert(met, explain)})
}
}
private object ImplicitOrderingSuite {
class NonOrderedClass {}
class ComparableClass extends Comparable[ComparableClass] {
override def compareTo(o: ComparableClass): Int = ???
}
class OrderedClass extends Ordered[OrderedClass] {
override def compare(o: OrderedClass): Int = ???
}
def basicMapExpectations(rdd: RDD[Int]) = {
List((rdd.map(x => (x, x)).keyOrdering.isDefined,
"rdd.map(x => (x, x)).keyOrdering.isDefined"),
(rdd.map(x => (1, x)).keyOrdering.isDefined,
"rdd.map(x => (1, x)).keyOrdering.isDefined"),
(rdd.map(x => (x.toString, x)).keyOrdering.isDefined,
"rdd.map(x => (x.toString, x)).keyOrdering.isDefined"),
(rdd.map(x => (null, x)).keyOrdering.isDefined,
"rdd.map(x => (null, x)).keyOrdering.isDefined"),
(rdd.map(x => (new NonOrderedClass, x)).keyOrdering.isEmpty,
"rdd.map(x => (new NonOrderedClass, x)).keyOrdering.isEmpty"),
(rdd.map(x => (new ComparableClass, x)).keyOrdering.isDefined,
"rdd.map(x => (new ComparableClass, x)).keyOrdering.isDefined"),
(rdd.map(x => (new OrderedClass, x)).keyOrdering.isDefined,
"rdd.map(x => (new OrderedClass, x)).keyOrdering.isDefined"))
}
def otherRDDMethodExpectations(rdd: RDD[Int]) = {
List((rdd.groupBy(x => x).keyOrdering.isDefined,
"rdd.groupBy(x => x).keyOrdering.isDefined"),
(rdd.groupBy(x => new NonOrderedClass).keyOrdering.isEmpty,
"rdd.groupBy(x => new NonOrderedClass).keyOrdering.isEmpty"),
(rdd.groupBy(x => new ComparableClass).keyOrdering.isDefined,
"rdd.groupBy(x => new ComparableClass).keyOrdering.isDefined"),
(rdd.groupBy(x => new OrderedClass).keyOrdering.isDefined,
"rdd.groupBy(x => new OrderedClass).keyOrdering.isDefined"),
(rdd.groupBy((x: Int) => x, 5).keyOrdering.isDefined,
"rdd.groupBy((x: Int) => x, 5).keyOrdering.isDefined"),
(rdd.groupBy((x: Int) => x, new HashPartitioner(5)).keyOrdering.isDefined,
"rdd.groupBy((x: Int) => x, new HashPartitioner(5)).keyOrdering.isDefined"))
}
} | Dax1n/spark-core | core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala | Scala | apache-2.0 | 3,959 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.reflect.reify
package codegen
trait GenNames {
self: Reifier =>
import global._
def reifyName(name: Name) = {
val factory = if (name.isTypeName) nme.TypeName else nme.TermName
mirrorCall(factory, Literal(Constant(name.toString)))
}
}
| lrytz/scala | src/compiler/scala/reflect/reify/codegen/GenNames.scala | Scala | apache-2.0 | 557 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.