code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package cli.printer
import java.io.OutputStream
import core.Pipeline
/**
* Created by OpenDataFlow on 31/12/15.
*/
object Printer {
def getPrinter(name: String): Printer = {
name match {
case "text" ⇒ new PlainTextPrinter()
case "dot" ⇒ new DotPrinter()
// if we don't have it let's assume it's a class name
case _ ⇒
val c = Class.forName(name)
if (c.getInterfaces.contains(Printer.getClass)) {
return c.newInstance().asInstanceOf[Printer]
} else {
throw new Exception(s"Class ${name} does not extend Printer")
}
}
}
}
trait Printer {
def renderPipeline(p: Pipeline, o: OutputStream)
}
|
opendataflow/opendataflow
|
opendataflow-cli/src/main/scala/cli/printer/Printer.scala
|
Scala
|
apache-2.0
| 689 |
package eu.semberal.dbstress.actor
import akka.actor.{Actor, ActorRef, Props, Status}
import akka.event.LoggingReceive
import com.typesafe.scalalogging.LazyLogging
import eu.semberal.dbstress.actor.ControllerActor._
import eu.semberal.dbstress.actor.DatabaseActor.{InitConnection, StartUnitRun}
import eu.semberal.dbstress.model.Configuration.ScenarioConfig
import eu.semberal.dbstress.model.Results._
import eu.semberal.dbstress.util.IdGen
class ControllerActor(sc: ScenarioConfig) extends Actor with LazyLogging {
private val totalConnections = sc.units.map(_.parallelConnections).sum
override def receive: Receive = LoggingReceive { case RunScenario =>
val scenarioId = IdGen.genScenarioId()
logger.info(s"Starting the scenario (ID: $scenarioId)")
sc.units.foreach(u => {
(1 to u.parallelConnections).foreach { parId =>
val a = context.actorOf(
DatabaseActor.props(scenarioId, u.name, u.config),
s"dbactor_${u.name}_$parId"
)
a ! InitConnection
}
})
context.become(waitForInitConfirmation(sender(), 0))
logger.info(
s"Waiting for $totalConnections database connections to be initialized"
)
}
private def waitForInitConfirmation(
client: ActorRef,
collectedCount: Int
): Receive = LoggingReceive {
case UnitRunInitializationFinished
if collectedCount + 1 == totalConnections =>
logger.info(
"All database connections initialized, proceeding to the query execution phase"
)
context.children.foreach { ref =>
ref ! StartUnitRun
}
context.become(waitForFinish(client, Nil))
case UnitRunInitializationFinished =>
logger.debug(
s"Initialized database connections: ${collectedCount + 1}/$totalConnections"
)
context.become(waitForInitConfirmation(client, collectedCount + 1))
case UnitRunInitializationFailed(e) =>
val msg = e match {
case _: ClassNotFoundException => s"Class ${e.getMessage} not found"
case _ => e.getMessage
}
logger.error(s"Cannot initialize database connection: $msg")
context.stop(self)
client ! Status.Failure(new ConnectionInitException(e))
}
private def waitForFinish(
client: ActorRef,
unitRunResults: List[(String, UnitRunResult)]
): Receive = LoggingReceive {
case UnitRunFinished(unitName, result) =>
logger.info(
s"Finished unit runs: ${unitRunResults.length + 1}/$totalConnections"
)
context.become(
waitForFinish(client, (unitName -> result) :: unitRunResults)
)
if (totalConnections - unitRunResults.length == 1) self ! Done
case UnitRunError(e) =>
context.stop(self)
client ! Status.Failure(e)
case Done =>
val allCalls = unitRunResults.flatMap(_._2.callResults)
val failedCalls = allCalls.count(_.isInstanceOf[DbCallFailure])
val msg =
if (failedCalls > 0) s"($failedCalls/${allCalls.length} calls failed)"
else ""
logger.info("All database operations finished {}", msg)
val unitResultMap =
unitRunResults.groupBy(_._1).view.mapValues(_.map(_._2))
client ! ScenarioResult(
sc.units.map(conf => UnitResult(conf, unitResultMap(conf.name))).toList
)
}
}
object ControllerActor {
case object RunScenario
private[actor] case object UnitRunInitializationFinished
private[actor] final case class UnitRunInitializationFailed(e: Throwable)
private[actor] final case class UnitRunFinished(
unitName: String,
result: UnitRunResult
)
private[actor] final case class UnitRunError(e: UnitRunException)
private case object Done
def props(sc: ScenarioConfig) = Props(classOf[ControllerActor], sc)
}
|
semberal/dbstress
|
src/main/scala/eu/semberal/dbstress/actor/ControllerActor.scala
|
Scala
|
apache-2.0
| 3,800 |
import sbt._
import Keys._
object Settings {
val appName = "coral"
val buildOrganization = "io.coral"
val buildVersion = "1.0.0-SNAPSHOT"
val buildScalaVersion = "2.11.5"
val buildSettings = Defaults.coreDefaultSettings ++ Seq (
organization := buildOrganization,
version := buildVersion,
scalaVersion := buildScalaVersion,
shellPrompt := ShellPrompt.buildShellPrompt,
scalacOptions := Seq("-deprecation", "-feature", "-encoding", "utf8")
)
}
// Shell prompt which show the current project,
// git branch and build version
object ShellPrompt {
object devnull extends ProcessLogger {
def info (s: => String) {}
def error (s: => String) { }
def buffer[T] (f: => T): T = f
}
def currBranch = (
(("git status -sb" lines_! devnull).headOption)
getOrElse "-" stripPrefix "## "
)
val buildShellPrompt = {
(state: State) => {
val currProject = Project.extract (state).currentProject.id
"%s:%s:%s> ".format (
currProject, currBranch, Settings.buildVersion
)
}
}
}
|
daishichao/coral
|
project/settings.scala
|
Scala
|
apache-2.0
| 1,078 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This code is a modified version of the original Spark 1.0.2 K-Means implementation.
*/
package com.massivedatascience.clusterer
import com.massivedatascience.linalg.WeightedVector
import com.massivedatascience.util.XORShiftRandom
import org.apache.spark.Logging
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
/**
* This implements the
* <a href="http://ilpubs.stanford.edu:8090/778/1/2006-13.pdf">KMeans++ initialization algorithm</a>
*
* @param ops distance function
*/
class KMeansPlusPlus(ops: BregmanPointOps) extends Serializable with Logging {
/**
* Select centers in rounds. On each round, select 'perRound' centers, with probability of
* selection equal to the product of the given weights and distance to the closest cluster center
* of the previous round.
*
* This version allows some centers to be pre-selected.
*
* @param seed a random number seed
* @param candidateCenters the candidate centers
* @param weights the weights on the candidate centers
* @param totalRequested the total number of centers to select
* @param perRound the number of centers to add per round
* @param numPreselected the number of pre-selected centers
* @return an array of at most k cluster centers
*/
def goodCenters(
seed: Long,
candidateCenters: IndexedSeq[BregmanCenter],
weights: IndexedSeq[Double],
totalRequested: Int,
perRound: Int,
numPreselected: Int): IndexedSeq[BregmanCenter] = {
require(candidateCenters.length > 0)
require(totalRequested > 0 && totalRequested <= candidateCenters.length)
require(numPreselected >= 0 && numPreselected <= totalRequested)
require(perRound <= totalRequested)
if (candidateCenters.length < totalRequested)
logWarning(s"# requested $totalRequested exceeds number of points ${candidateCenters.length}")
val points = reWeightedPoints(candidateCenters, weights)
val rand = new XORShiftRandom(seed)
val centers = new ArrayBuffer[BregmanCenter](totalRequested)
@tailrec
def moreCenters(distances: IndexedSeq[Double]): Unit = {
val needed = totalRequested - centers.length
if (needed > 0) {
val cumulative = cumulativeWeights(points.zip(distances).map { case (p, d) => p.weight * d })
val selected = (0 until perRound).par.flatMap { _ =>
pickWeighted(rand, cumulative).iterator
}
val additionalCenters = selected.map(candidateCenters(_)).toIndexedSeq
val newDistances = updateDistances(points, distances, additionalCenters)
centers ++= additionalCenters.take(needed)
if (additionalCenters.nonEmpty) moreCenters(newDistances)
}
}
centers ++= (if (numPreselected == 0) {
pickWeighted(rand, cumulativeWeights(weights)).map(candidateCenters(_))
} else {
candidateCenters.take(numPreselected)
})
val maxDistances = IndexedSeq.fill(candidateCenters.length)(Double.MaxValue)
val initialDistances = updateDistances(points, maxDistances, centers)
moreCenters(initialDistances)
centers.take(totalRequested)
}
private[this] def reWeightedPoints(
candidateCenters: IndexedSeq[BregmanCenter],
weights: IndexedSeq[Double]): IndexedSeq[KMeansPlusPlus.this.ops.P] = {
candidateCenters.zip(weights).map {
case (c, w) =>
WeightedVector.fromInhomogeneousWeighted(c.inhomogeneous, w)
}.map(ops.toPoint)
}
/**
* Update the distance of each point to its closest cluster center, given the cluster
* centers that were added.
*
* @param points set of candidate initial cluster centers
* @param centers new cluster centers
* @return points with their distance to closest to cluster center updated
*/
private[this] def updateDistances(
points: IndexedSeq[BregmanPoint],
distances: IndexedSeq[Double],
centers: IndexedSeq[BregmanCenter]): IndexedSeq[Double] = {
val newDistances = points.zip(distances).par.map {
case (p, d) =>
Math.min(ops.pointCost(centers, p), d)
}
newDistances.toIndexedSeq
}
def cumulativeWeights(weights: IndexedSeq[Double]): IndexedSeq[Double] =
weights.scanLeft(0.0)(_ + _).tail
/**
* Pick a point at random, weighing the choices by the given cumulative weight vector.
*
* @param rand random number generator
* @param cumulative the cumulative weights of the points
* @return the index of the point chosen
*/
private[this] def pickWeighted(rand: XORShiftRandom, cumulative: IndexedSeq[Double]): Seq[Int] = {
val r = rand.nextDouble() * cumulative.last
val index = cumulative.indexWhere(x => x > r)
if (index == -1) Seq[Int]() else Seq(index)
}
}
|
derrickburns/generalized-kmeans-clustering
|
src/main/scala/com/massivedatascience/clusterer/KMeansPlusPlus.scala
|
Scala
|
apache-2.0
| 5,522 |
package cmwell.analytics.main
import cmwell.analytics.data.InfotonWithKeyFields
import cmwell.analytics.util.{CmwellConnector, DatasetFilter}
import cmwell.analytics.util.DatasetFilter._
import cmwell.analytics.util.TimestampConversion.timestampConverter
import org.apache.log4j.LogManager
import org.rogach.scallop.{ScallopConf, ScallopOption}
object DumpInfotonWithKeyFields {
def main(args: Array[String]): Unit = {
val logger = LogManager.getLogger(DumpInfotonWithKeyFields.getClass)
// Here, the parallelism defines how many partitions are produced.
// Having too many partitions (esp. with a shuffle) creates pathological I/O patterns.
val defaultParallelism = 1 max (Runtime.getRuntime.availableProcessors / 2)
try {
object Opts extends ScallopConf(args) {
val parallelism: ScallopOption[Int] = opt[Int]("parallelism", short = 'p', descr = "The parallelism level", default = Some(defaultParallelism))
val lastModifiedGteFilter: ScallopOption[java.sql.Timestamp] = opt[java.sql.Timestamp]("lastmodified-gte-filter", descr = "Filter on lastModified >= <value>, where value is an ISO8601 timestamp", default = None)(timestampConverter)
val pathPrefixFilter: ScallopOption[String] = opt[String]("path-prefix-filter", descr = "Filter on the path prefix matching <value>", default = None)
val out: ScallopOption[String] = opt[String]("out", short = 'o', descr = "The path to save the output to ", required = true)
val shell: ScallopOption[Boolean] = opt[Boolean]("spark-shell", short = 's', descr = "Run a Spark shell", required = false, default = Some(false))
val url: ScallopOption[String] = trailArg[String]("url", descr = "A CM-Well URL", required = true)
val format: ScallopOption[String] = opt[String]("format", short = 'f', descr = "The output format: csv | parquet", required = false, default = Some("parquet"))
validateOpt(format) {
case Some("parquet") | Some("csv") => Right(Unit)
case _ => Left(s"Invalid format - must be 'csv' or 'parquet'.")
}
verify()
}
CmwellConnector(
cmwellUrl = Opts.url(),
appName = "Dump infoton table - uuid, lastModified, path",
sparkShell = Opts.shell()
).withSparkSessionDo { spark =>
val datasetFilter = DatasetFilter(
lastModifiedGte = Opts.lastModifiedGteFilter.toOption,
pathPrefix = Opts.pathPrefixFilter.toOption)
val ds = InfotonWithKeyFields(Some(datasetFilter))(spark)
.coalesce(Opts.parallelism() * CmwellConnector.coalesceParallelismMultiplier)
Opts.format() match {
case "parquet" => ds.write.parquet(Opts.out())
case "csv" => ds.write.csv(Opts.out())
}
}
}
catch {
case ex: Throwable =>
logger.error(ex.getMessage, ex)
System.exit(1)
}
}
}
|
dudi3001/CM-Well
|
tools/dataConsistencyTool/cmwell-spark-analysis/src/main/scala/cmwell/analytics/main/DumpInfotonWithKeyFields.scala
|
Scala
|
apache-2.0
| 2,908 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
/** `Nothing` is - together with [[scala.Null]] - at the bottom of Scala's type hierarchy.
*
* `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist
* ''no instances'' of this type. Although type `Nothing` is uninhabited, it is
* nevertheless useful in several ways. For instance, the Scala library defines a value
* [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists are covariant in Scala,
* this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any element of type `T`.
*
* Another usage for Nothing is the return type for methods which never return normally.
* One example is method error in [[scala.sys]], which always throws an exception.
*/
sealed trait Nothing
|
lrytz/scala
|
test/scaladoc/resources/doc-root/Nothing.scala
|
Scala
|
apache-2.0
| 1,068 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming.test
import java.io.File
import java.util.ConcurrentModificationException
import java.util.Locale
import java.util.concurrent.TimeUnit
import scala.concurrent.duration._
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkConf
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito._
import org.scalatest.BeforeAndAfter
import org.apache.spark.sql._
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.{StreamSinkProvider, StreamSourceProvider}
import org.apache.spark.sql.streaming.{OutputMode, StreamTest, StreamingQuery, StreamingQueryException}
import org.apache.spark.sql.streaming.Trigger._
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
object LastOptions {
var mockStreamSourceProvider = mock(classOf[StreamSourceProvider])
var mockStreamSinkProvider = mock(classOf[StreamSinkProvider])
var parameters: Map[String, String] = null
var schema: Option[StructType] = null
var partitionColumns: Seq[String] = Nil
def clear(): Unit = {
parameters = null
schema = null
partitionColumns = null
reset(mockStreamSourceProvider)
reset(mockStreamSinkProvider)
}
}
/** Dummy provider: returns no-op source/sink and records options in [[LastOptions]]. */
class DefaultSource extends StreamSourceProvider with StreamSinkProvider {
private val fakeSchema = StructType(StructField("a", IntegerType) :: Nil)
override def sourceSchema(
spark: SQLContext,
schema: Option[StructType],
providerName: String,
parameters: Map[String, String]): (String, StructType) = {
LastOptions.parameters = parameters
LastOptions.schema = schema
LastOptions.mockStreamSourceProvider.sourceSchema(spark, schema, providerName, parameters)
("dummySource", fakeSchema)
}
override def createSource(
spark: SQLContext,
metadataPath: String,
schema: Option[StructType],
providerName: String,
parameters: Map[String, String]): Source = {
LastOptions.parameters = parameters
LastOptions.schema = schema
LastOptions.mockStreamSourceProvider.createSource(
spark, metadataPath, schema, providerName, parameters)
new Source {
override def schema: StructType = fakeSchema
override def getOffset: Option[Offset] = Some(new LongOffset(0))
override def getBatch(start: Option[Offset], end: Offset): DataFrame = {
import spark.implicits._
spark.internalCreateDataFrame(spark.sparkContext.emptyRDD, schema, isStreaming = true)
}
override def stop(): Unit = {}
}
}
override def createSink(
spark: SQLContext,
parameters: Map[String, String],
partitionColumns: Seq[String],
outputMode: OutputMode): Sink = {
LastOptions.parameters = parameters
LastOptions.partitionColumns = partitionColumns
LastOptions.mockStreamSinkProvider.createSink(spark, parameters, partitionColumns, outputMode)
(_: Long, _: DataFrame) => {}
}
}
class DataStreamReaderWriterSuite extends StreamTest with BeforeAndAfter {
override def sparkConf: SparkConf =
super.sparkConf
.setAppName("test")
.set("spark.sql.parquet.columnarReaderBatchSize", "4096")
.set("spark.sql.sources.useV1SourceList", "avro")
.set("spark.sql.extensions", "com.intel.oap.ColumnarPlugin")
.set("spark.sql.execution.arrow.maxRecordsPerBatch", "4096")
//.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.ColumnarShuffleManager")
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "false")
.set("spark.sql.columnar.window", "false")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
private def newMetadataDir =
Utils.createTempDir(namePrefix = "streaming.metadata").getCanonicalPath
after {
spark.streams.active.foreach(_.stop())
}
test("write cannot be called on streaming datasets") {
val e = intercept[AnalysisException] {
spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load()
.write
.save()
}
Seq("'write'", "not", "streaming Dataset/DataFrame").foreach { s =>
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT)))
}
}
test("resolve default source") {
spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load()
.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.start()
.stop()
}
test("resolve full class") {
spark.readStream
.format("org.apache.spark.sql.streaming.test.DefaultSource")
.load()
.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.start()
.stop()
}
test("options") {
val map = new java.util.HashMap[String, String]
map.put("opt3", "3")
val df = spark.readStream
.format("org.apache.spark.sql.streaming.test")
.option("opt1", "1")
.options(Map("opt2" -> "2"))
.options(map)
.load()
assert(LastOptions.parameters("opt1") == "1")
assert(LastOptions.parameters("opt2") == "2")
assert(LastOptions.parameters("opt3") == "3")
LastOptions.clear()
df.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("opt1", "1")
.options(Map("opt2" -> "2"))
.options(map)
.option("checkpointLocation", newMetadataDir)
.start()
.stop()
assert(LastOptions.parameters("opt1") == "1")
assert(LastOptions.parameters("opt2") == "2")
assert(LastOptions.parameters("opt3") == "3")
}
test("partitioning") {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load()
df.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.start()
.stop()
assert(LastOptions.partitionColumns == Nil)
df.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.partitionBy("a")
.start()
.stop()
assert(LastOptions.partitionColumns == Seq("a"))
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
df.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.partitionBy("A")
.start()
.stop()
assert(LastOptions.partitionColumns == Seq("a"))
}
intercept[AnalysisException] {
df.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.partitionBy("b")
.start()
.stop()
}
}
test("stream paths") {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.load("/test")
assert(LastOptions.parameters("path") == "/test")
LastOptions.clear()
df.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.start("/test")
.stop()
assert(LastOptions.parameters("path") == "/test")
}
test("test different data types for options") {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.test")
.option("intOpt", 56)
.option("boolOpt", false)
.option("doubleOpt", 6.7)
.load("/test")
assert(LastOptions.parameters("intOpt") == "56")
assert(LastOptions.parameters("boolOpt") == "false")
assert(LastOptions.parameters("doubleOpt") == "6.7")
LastOptions.clear()
df.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("intOpt", 56)
.option("boolOpt", false)
.option("doubleOpt", 6.7)
.option("checkpointLocation", newMetadataDir)
.start("/test")
.stop()
assert(LastOptions.parameters("intOpt") == "56")
assert(LastOptions.parameters("boolOpt") == "false")
assert(LastOptions.parameters("doubleOpt") == "6.7")
}
test("unique query names") {
/** Start a query with a specific name */
def startQueryWithName(name: String = ""): StreamingQuery = {
spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load("/test")
.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.queryName(name)
.start()
}
/** Start a query without specifying a name */
def startQueryWithoutName(): StreamingQuery = {
spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load("/test")
.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.start()
}
/** Get the names of active streams */
def activeStreamNames: Set[String] = {
val streams = spark.streams.active
val names = streams.map(_.name).toSet
assert(streams.length === names.size, s"names of active queries are not unique: $names")
names
}
val q1 = startQueryWithName("name")
// Should not be able to start another query with the same name
intercept[IllegalArgumentException] {
startQueryWithName("name")
}
assert(activeStreamNames === Set("name"))
// Should be able to start queries with other names
val q3 = startQueryWithName("another-name")
assert(activeStreamNames === Set("name", "another-name"))
// Should be able to start queries with auto-generated names
val q4 = startQueryWithoutName()
assert(activeStreamNames.contains(q4.name))
// Should not be able to start a query with same auto-generated name
intercept[IllegalArgumentException] {
startQueryWithName(q4.name)
}
// Should be able to start query with that name after stopping the previous query
q1.stop()
val q5 = startQueryWithName("name")
assert(activeStreamNames.contains("name"))
spark.streams.active.foreach(_.stop())
}
test("trigger") {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load("/test")
var q = df.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.trigger(ProcessingTime(10.seconds))
.start()
q.stop()
assert(q.asInstanceOf[StreamingQueryWrapper].streamingQuery.trigger == ProcessingTime(10000))
q = df.writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", newMetadataDir)
.trigger(ProcessingTime(100, TimeUnit.SECONDS))
.start()
q.stop()
assert(q.asInstanceOf[StreamingQueryWrapper].streamingQuery.trigger == ProcessingTime(100000))
}
test("source metadataPath") {
LastOptions.clear()
val checkpointLocation = new Path(newMetadataDir)
val df1 = spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load()
val df2 = spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load()
val q = df1.union(df2).writeStream
.format("org.apache.spark.sql.streaming.test")
.option("checkpointLocation", checkpointLocation.toString)
.trigger(ProcessingTime(10.seconds))
.start()
q.processAllAvailable()
q.stop()
verify(LastOptions.mockStreamSourceProvider).createSource(
any(),
meq(s"${new Path(makeQualifiedPath(checkpointLocation.toString)).toString}/sources/0"),
meq(None),
meq("org.apache.spark.sql.streaming.test"),
meq(Map.empty))
verify(LastOptions.mockStreamSourceProvider).createSource(
any(),
meq(s"${new Path(makeQualifiedPath(checkpointLocation.toString)).toString}/sources/1"),
meq(None),
meq("org.apache.spark.sql.streaming.test"),
meq(Map.empty))
}
private def newTextInput = Utils.createTempDir(namePrefix = "text").getCanonicalPath
test("check foreach() catches null writers") {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load()
var w = df.writeStream
var e = intercept[IllegalArgumentException](w.foreach(null))
Seq("foreach", "null").foreach { s =>
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT)))
}
}
test("check foreach() does not support partitioning") {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.test")
.load()
val foreachWriter = new ForeachWriter[Row] {
override def open(partitionId: Long, version: Long): Boolean = false
override def process(value: Row): Unit = {}
override def close(errorOrNull: Throwable): Unit = {}
}
var w = df.writeStream.partitionBy("value")
var e = intercept[AnalysisException](w.foreach(foreachWriter).start())
Seq("foreach", "partitioning").foreach { s =>
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT)))
}
}
test("prevent all column partitioning") {
withTempDir { dir =>
val path = dir.getCanonicalPath
intercept[AnalysisException] {
spark.range(10).writeStream
.outputMode("append")
.partitionBy("id")
.format("parquet")
.start(path)
}
}
}
private def testMemorySinkCheckpointRecovery(chkLoc: String, provideInWriter: Boolean): Unit = {
import testImplicits._
val ms = new MemoryStream[Int](0, sqlContext)
val df = ms.toDF().toDF("a")
val tableName = "test"
def startQuery: StreamingQuery = {
val writer = df.groupBy("a")
.count()
.writeStream
.format("memory")
.queryName(tableName)
.outputMode("complete")
if (provideInWriter) {
writer.option("checkpointLocation", chkLoc)
}
writer.start()
}
// no exception here
val q = startQuery
ms.addData(0, 1)
q.processAllAvailable()
q.stop()
checkAnswer(
spark.table(tableName),
Seq(Row(0, 1), Row(1, 1))
)
spark.sql(s"drop table $tableName")
// verify table is dropped
intercept[AnalysisException](spark.table(tableName).collect())
val q2 = startQuery
ms.addData(0)
q2.processAllAvailable()
checkAnswer(
spark.table(tableName),
Seq(Row(0, 2), Row(1, 1))
)
q2.stop()
}
test("MemorySink can recover from a checkpoint in Complete Mode") {
val checkpointLoc = newMetadataDir
val checkpointDir = new File(checkpointLoc, "offsets")
checkpointDir.mkdirs()
assert(checkpointDir.exists())
testMemorySinkCheckpointRecovery(checkpointLoc, provideInWriter = true)
}
test("SPARK-18927: MemorySink can recover from a checkpoint provided in conf in Complete Mode") {
val checkpointLoc = newMetadataDir
val checkpointDir = new File(checkpointLoc, "offsets")
checkpointDir.mkdirs()
assert(checkpointDir.exists())
withSQLConf(SQLConf.CHECKPOINT_LOCATION.key -> checkpointLoc) {
testMemorySinkCheckpointRecovery(checkpointLoc, provideInWriter = false)
}
}
test("append mode memory sink's do not support checkpoint recovery") {
import testImplicits._
val ms = new MemoryStream[Int](0, sqlContext)
val df = ms.toDF().toDF("a")
val checkpointLoc = newMetadataDir
val checkpointDir = new File(checkpointLoc, "offsets")
checkpointDir.mkdirs()
assert(checkpointDir.exists())
val e = intercept[AnalysisException] {
df.writeStream
.format("memory")
.queryName("test")
.option("checkpointLocation", checkpointLoc)
.outputMode("append")
.start()
}
assert(e.getMessage.contains("does not support recovering"))
assert(e.getMessage.contains("checkpoint location"))
}
test("SPARK-18510: use user specified types for partition columns in file sources") {
import org.apache.spark.sql.functions.udf
import testImplicits._
withTempDir { src =>
val createArray = udf { (length: Long) =>
for (i <- 1 to length.toInt) yield i.toString
}
spark.range(4).select(createArray('id + 1) as 'ex, 'id, 'id % 4 as 'part).coalesce(1).write
.partitionBy("part", "id")
.mode("overwrite")
.parquet(src.toString)
// Specify a random ordering of the schema, partition column in the middle, etc.
// Also let's say that the partition columns are Strings instead of Longs.
// partition columns should go to the end
val schema = new StructType()
.add("id", StringType)
.add("ex", ArrayType(StringType))
val sdf = spark.readStream
.schema(schema)
.format("parquet")
.load(src.toString)
assert(sdf.schema.toList === List(
StructField("ex", ArrayType(StringType)),
StructField("part", IntegerType), // inferred partitionColumn dataType
StructField("id", StringType))) // used user provided partitionColumn dataType
val sq = sdf.writeStream
.queryName("corruption_test")
.format("memory")
.start()
sq.processAllAvailable()
checkAnswer(
spark.table("corruption_test"),
// notice how `part` is ordered before `id`
Row(Array("1"), 0, "0") :: Row(Array("1", "2"), 1, "1") ::
Row(Array("1", "2", "3"), 2, "2") :: Row(Array("1", "2", "3", "4"), 3, "3") :: Nil
)
sq.stop()
}
}
test("user specified checkpointLocation precedes SQLConf") {
import testImplicits._
withTempDir { checkpointPath =>
withTempPath { userCheckpointPath =>
assert(!userCheckpointPath.exists(), s"$userCheckpointPath should not exist")
withSQLConf(SQLConf.CHECKPOINT_LOCATION.key -> checkpointPath.getAbsolutePath) {
val queryName = "test_query"
val ds = MemoryStream[Int].toDS
ds.writeStream
.format("memory")
.queryName(queryName)
.option("checkpointLocation", userCheckpointPath.getAbsolutePath)
.start()
.stop()
assert(checkpointPath.listFiles().isEmpty,
"SQLConf path is used even if user specified checkpointLoc: " +
s"${checkpointPath.listFiles()} is not empty")
assert(userCheckpointPath.exists(),
s"The user specified checkpointLoc (userCheckpointPath) is not created")
}
}
}
}
test("use SQLConf checkpoint dir when checkpointLocation is not specified") {
import testImplicits._
withTempDir { checkpointPath =>
withSQLConf(SQLConf.CHECKPOINT_LOCATION.key -> checkpointPath.getAbsolutePath) {
val queryName = "test_query"
val ds = MemoryStream[Int].toDS
ds.writeStream.format("memory").queryName(queryName).start().stop()
// Should use query name to create a folder in `checkpointPath`
val queryCheckpointDir = new File(checkpointPath, queryName)
assert(queryCheckpointDir.exists(), s"$queryCheckpointDir doesn't exist")
assert(
checkpointPath.listFiles().size === 1,
s"${checkpointPath.listFiles().toList} has 0 or more than 1 files ")
}
}
}
test("use SQLConf checkpoint dir when checkpointLocation is not specified without query name") {
import testImplicits._
withTempDir { checkpointPath =>
withSQLConf(SQLConf.CHECKPOINT_LOCATION.key -> checkpointPath.getAbsolutePath) {
val ds = MemoryStream[Int].toDS
ds.writeStream.format("console").start().stop()
// Should create a random folder in `checkpointPath`
assert(
checkpointPath.listFiles().size === 1,
s"${checkpointPath.listFiles().toList} has 0 or more than 1 files ")
}
}
}
test("configured checkpoint dir should not be deleted if a query is stopped without errors and" +
" force temp checkpoint deletion enabled") {
import testImplicits._
withTempDir { checkpointPath =>
withSQLConf(SQLConf.CHECKPOINT_LOCATION.key -> checkpointPath.getAbsolutePath,
SQLConf.FORCE_DELETE_TEMP_CHECKPOINT_LOCATION.key -> "true") {
val ds = MemoryStream[Int].toDS
val query = ds.writeStream.format("console").start()
assert(checkpointPath.exists())
query.stop()
assert(checkpointPath.exists())
}
}
}
test("temp checkpoint dir should be deleted if a query is stopped without errors") {
import testImplicits._
val query = MemoryStream[Int].toDS.writeStream.format("console").start()
query.processAllAvailable()
val checkpointDir = new Path(
query.asInstanceOf[StreamingQueryWrapper].streamingQuery.resolvedCheckpointRoot)
val fs = checkpointDir.getFileSystem(spark.sessionState.newHadoopConf())
assert(fs.exists(checkpointDir))
query.stop()
assert(!fs.exists(checkpointDir))
}
testQuietly("temp checkpoint dir should not be deleted if a query is stopped with an error") {
testTempCheckpointWithFailedQuery(false)
}
testQuietly("temp checkpoint should be deleted if a query is stopped with an error and force" +
" temp checkpoint deletion enabled") {
withSQLConf(SQLConf.FORCE_DELETE_TEMP_CHECKPOINT_LOCATION.key -> "true") {
testTempCheckpointWithFailedQuery(true)
}
}
private def testTempCheckpointWithFailedQuery(checkpointMustBeDeleted: Boolean): Unit = {
import testImplicits._
val input = MemoryStream[Int]
val query = input.toDS.map(_ / 0).writeStream.format("console").start()
val checkpointDir = new Path(
query.asInstanceOf[StreamingQueryWrapper].streamingQuery.resolvedCheckpointRoot)
val fs = checkpointDir.getFileSystem(spark.sessionState.newHadoopConf())
assert(fs.exists(checkpointDir))
input.addData(1)
intercept[StreamingQueryException] {
query.awaitTermination()
}
if (!checkpointMustBeDeleted) {
assert(fs.exists(checkpointDir))
} else {
assert(!fs.exists(checkpointDir))
}
}
test("SPARK-20431: Specify a schema by using a DDL-formatted string") {
spark.readStream
.format("org.apache.spark.sql.streaming.test")
.schema("aa INT")
.load()
assert(LastOptions.schema.isDefined)
assert(LastOptions.schema.get === StructType(StructField("aa", IntegerType) :: Nil))
LastOptions.clear()
}
test("SPARK-26586: Streams should have isolated confs") {
import testImplicits._
val input = MemoryStream[Int]
input.addData(1 to 10)
spark.conf.set("testKey1", 0)
val queries = (1 to 10).map { i =>
spark.conf.set("testKey1", i)
input.toDF().writeStream
.foreachBatch { (df: Dataset[Row], id: Long) =>
val v = df.sparkSession.conf.get("testKey1").toInt
if (i != v) {
throw new ConcurrentModificationException(s"Stream $i has the wrong conf value $v")
}
}
.start()
}
try {
queries.foreach(_.processAllAvailable())
} finally {
queries.foreach(_.stop())
}
}
}
|
Intel-bigdata/OAP
|
oap-native-sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala
|
Scala
|
apache-2.0
| 24,425 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.time.Duration
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import kafka.utils.TestUtils
import kafka.utils.Implicits._
import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig}
import kafka.server.KafkaConfig
import kafka.integration.KafkaServerTestHarness
import org.apache.kafka.clients.admin.{AdminClient, AdminClientConfig}
import org.apache.kafka.common.network.{ListenerName, Mode}
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, Deserializer, Serializer}
import org.junit.{After, Before}
import scala.collection.mutable
/**
* A helper class for writing integration tests that involve producers, consumers, and servers
*/
abstract class IntegrationTestHarness extends KafkaServerTestHarness {
protected def brokerCount: Int
protected def logDirCount: Int = 1
val producerConfig = new Properties
val consumerConfig = new Properties
val adminClientConfig = new Properties
val serverConfig = new Properties
private val consumers = mutable.Buffer[KafkaConsumer[_, _]]()
private val producers = mutable.Buffer[KafkaProducer[_, _]]()
private val adminClients = mutable.Buffer[AdminClient]()
protected def interBrokerListenerName: ListenerName = listenerName
protected def modifyConfigs(props: Seq[Properties]): Unit = {
configureListeners(props)
props.foreach(_ ++= serverConfig)
}
override def generateConfigs: Seq[KafkaConfig] = {
val cfgs = TestUtils.createBrokerConfigs(brokerCount, zkConnect, interBrokerSecurityProtocol = Some(securityProtocol),
trustStoreFile = trustStoreFile, saslProperties = serverSaslProperties, logDirCount = logDirCount)
modifyConfigs(cfgs)
cfgs.map(KafkaConfig.fromProps)
}
protected def configureListeners(props: Seq[Properties]): Unit = {
props.foreach { config =>
config.remove(KafkaConfig.InterBrokerSecurityProtocolProp)
config.setProperty(KafkaConfig.InterBrokerListenerNameProp, interBrokerListenerName.value)
val listenerNames = Set(listenerName, interBrokerListenerName)
val listeners = listenerNames.map(listenerName => s"${listenerName.value}://localhost:${TestUtils.RandomPort}").mkString(",")
val listenerSecurityMap = listenerNames.map(listenerName => s"${listenerName.value}:${securityProtocol.name}").mkString(",")
config.setProperty(KafkaConfig.ListenersProp, listeners)
config.setProperty(KafkaConfig.ListenerSecurityProtocolMapProp, listenerSecurityMap)
}
}
@Before
override def setUp() {
doSetup(createOffsetsTopic = true)
}
def doSetup(createOffsetsTopic: Boolean): Unit = {
// Generate client security properties before starting the brokers in case certs are needed
producerConfig ++= clientSecurityProps("producer")
consumerConfig ++= clientSecurityProps("consumer")
adminClientConfig ++= clientSecurityProps("adminClient")
super.setUp()
producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
producerConfig.putIfAbsent(ProducerConfig.ACKS_CONFIG, "-1")
producerConfig.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[ByteArraySerializer].getName)
producerConfig.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[ByteArraySerializer].getName)
consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
consumerConfig.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
consumerConfig.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "group")
consumerConfig.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[ByteArrayDeserializer].getName)
consumerConfig.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, classOf[ByteArrayDeserializer].getName)
adminClientConfig.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
if (createOffsetsTopic)
TestUtils.createOffsetsTopic(zkClient, servers)
}
def clientSecurityProps(certAlias: String): Properties = {
TestUtils.securityConfigs(Mode.CLIENT, securityProtocol, trustStoreFile, certAlias, TestUtils.SslCertificateCn,
clientSaslProperties)
}
def createProducer[K, V](keySerializer: Serializer[K] = new ByteArraySerializer,
valueSerializer: Serializer[V] = new ByteArraySerializer,
configOverrides: Properties = new Properties): KafkaProducer[K, V] = {
val props = new Properties
props ++= producerConfig
props ++= configOverrides
val producer = new KafkaProducer[K, V](props, keySerializer, valueSerializer)
producers += producer
producer
}
def createConsumer[K, V](keyDeserializer: Deserializer[K] = new ByteArrayDeserializer,
valueDeserializer: Deserializer[V] = new ByteArrayDeserializer,
configOverrides: Properties = new Properties,
configsToRemove: List[String] = List()): KafkaConsumer[K, V] = {
val props = new Properties
props ++= consumerConfig
props ++= configOverrides
configsToRemove.foreach(props.remove(_))
val consumer = new KafkaConsumer[K, V](props, keyDeserializer, valueDeserializer)
consumers += consumer
consumer
}
def createAdminClient(configOverrides: Properties = new Properties): AdminClient = {
val props = new Properties
props ++= adminClientConfig
props ++= configOverrides
val adminClient = AdminClient.create(props)
adminClients += adminClient
adminClient
}
@After
override def tearDown() {
producers.foreach(_.close(Duration.ZERO))
consumers.foreach(_.wakeup())
consumers.foreach(_.close(Duration.ZERO))
adminClients.foreach(_.close(Duration.ZERO))
producers.clear()
consumers.clear()
adminClients.clear()
super.tearDown()
}
}
|
KevinLiLu/kafka
|
core/src/test/scala/integration/kafka/api/IntegrationTestHarness.scala
|
Scala
|
apache-2.0
| 6,700 |
package org.pdfextractor.algorithm.phrase
/**
* Marker for specifying the packages to scan for annotated services
* in type-safe manner.
*/
class PhrasePackageMarker {}
|
kveskimae/pdfalg
|
src/main/scala/org/pdfextractor/algorithm/phrase/PhrasePackageMarker.scala
|
Scala
|
mit
| 176 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.util
import java.io.{ InputStream, OutputStream }
import java.util.zip.{ CRC32, Deflater, DeflaterOutputStream }
import io.gatling.commons.util.Io._
object GzipHelper {
def gzip(string: String): Array[Byte] = gzip(string.getBytes)
def gzip(bytes: Array[Byte]): Array[Byte] =
gzip(new FastByteArrayInputStream(bytes))
def gzip(in: InputStream): Array[Byte] =
withCloseable(in) { is =>
val out = FastByteArrayOutputStream.pooled()
val gzip = ReusableGzipOutputStream.forStream(out)
try {
gzip.writeHeader()
in.copyTo(gzip, 1024)
gzip.finish()
} finally {
gzip.reset()
}
out.toByteArray
}
}
class SettableOutputStream(var target: OutputStream) extends OutputStream {
override def write(b: Array[Byte], off: Int, len: Int): Unit =
target.write(b, off, len)
override def write(b: Array[Byte]): Unit =
target.write(b)
override def write(b: Int): Unit =
target.write(b.toByte)
}
object ReusableGzipOutputStream {
private val GzipMagic = 0x8b1f
private val TrailerSize = 8
private val Streams = new ThreadLocal[ReusableGzipOutputStream] {
override protected def initialValue() = new ReusableGzipOutputStream(new SettableOutputStream(null))
}
def forStream(target: OutputStream): ReusableGzipOutputStream = {
val gzip = Streams.get()
gzip.os.target = target
gzip
}
}
class ReusableGzipOutputStream(val os: SettableOutputStream)
extends DeflaterOutputStream(os, new Deflater(Deflater.DEFAULT_COMPRESSION, true)) {
import ReusableGzipOutputStream._
private val crc = new CRC32
override def write(buf: Array[Byte], off: Int, len: Int): Unit = {
super.write(buf, off, len)
crc.update(buf, off, len)
}
override def finish(): Unit =
if (!`def`.finished) {
`def`.finish()
while (!`def`.finished) {
var len = `def`.deflate(buf, 0, buf.length)
if (`def`.finished && len <= buf.length - TrailerSize) {
writeTrailer(buf, len)
len = len + TrailerSize
os.write(buf, 0, len)
return
}
if (len > 0)
out.write(buf, 0, len)
}
val trailer = Array.ofDim[Byte](TrailerSize)
writeTrailer(trailer, 0)
out.write(trailer)
}
def writeHeader(): Unit =
os.write(Array(
GzipMagic.toByte, // Magic number (short)
(GzipMagic >> 8).toByte, // Magic number (short)
Deflater.DEFLATED.toByte, // Compression method (CM)
0.toByte, // Flags (FLG)
0.toByte, // Modification time MTIME (int)
0.toByte, // Modification time MTIME (int)
0.toByte, // Modification time MTIME (int)
0.toByte, // Modification time MTIME (int)
0.toByte, // Extra flags (XFLG)
0.toByte // Operating system (OS)
))
private def writeTrailer(buf: Array[Byte], offset: Int): Unit = {
writeInt(crc.getValue.toInt, buf, offset)
writeInt(`def`.getTotalIn, buf, offset + 4)
}
/*
* Writes integer in Intel byte order to a byte array, starting at a given offset.
*/
private def writeInt(i: Int, buf: Array[Byte], offset: Int): Unit = {
writeShort(i & 0xffff, buf, offset)
writeShort((i >> 16) & 0xffff, buf, offset + 2)
}
/*
* Writes short integer in Intel byte order to a byte array, starting at a given offset
*/
private def writeShort(s: Int, buf: Array[Byte], offset: Int): Unit = {
buf(offset) = (s & 0xff).toByte
buf(offset + 1) = ((s >> 8) & 0xff).toByte
}
def reset(): Unit = {
crc.reset()
`def`.reset()
os.target = null
}
}
|
wiacekm/gatling
|
gatling-commons/src/main/scala/io/gatling/commons/util/GzipHelper.scala
|
Scala
|
apache-2.0
| 4,238 |
/*
* Copyright (c) 2015-2020 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.scalatracker.emitters.id
import java.io.FileWriter
import cats._
import cats.data.NonEmptyList
import cats.implicits._
import io.circe.Json
import io.circe.syntax._
import io.circe.parser.parse
import com.snowplowanalytics.iglu.core.{SchemaKey, SchemaVer, SelfDescribingData}
import com.snowplowanalytics.iglu.core.circe.implicits._
import com.snowplowanalytics.snowplow.scalatracker.Tracker.{DeviceCreatedTimestamp, Timestamp, TrueTimestamp}
import com.snowplowanalytics.snowplow.scalatracker.{Emitter, SelfDescribingJson, Tracker}
import com.snowplowanalytics.snowplow.scalatracker.Emitter._
import org.scalacheck.Gen
/**
* Ad-hoc load testing
*/
object StressTest {
import com.snowplowanalytics.snowplow.scalatracker.syntax.id._
/** ADT for all possible event types Tracker can accept */
sealed trait EventArguments
case class PageView(
url: String,
title: Option[String],
referrer: Option[String],
contexts: Option[List[SelfDescribingJson]],
timestamp: Option[Timestamp]
) extends EventArguments
// Parser typeclass. Useless so far
trait Read[A] { def reads(line: String): A }
object Read {
def apply[A: Read] = implicitly[Read[A]]
}
implicit val sdJsonsRead = new Read[List[SelfDescribingJson]] {
def parseJson(json: Json): SelfDescribingData[Json] =
json
.asObject
.flatMap { obj =>
val schemaKeyOpt = obj.toMap.get("schema").flatMap(_.asString).flatMap(SchemaKey.fromUri(_).toOption)
val dataOpt = obj.toMap.get("data")
(schemaKeyOpt, dataOpt).mapN((key, data) => SelfDescribingData(key, data))
}
.getOrElse(throw new RuntimeException("Failed the test while parsing JSON"))
def reads(line: String): List[SelfDescribingJson] = parse(line).toOption match {
case Some(json) => json.asArray.getOrElse(Vector.empty).map(parseJson).toList
case None => Nil
}
}
implicit val tstmpRead = new Read[Option[Timestamp]] {
def reads(line: String): Option[Timestamp] =
line.split(":").toList match {
case List("ttm", tstamp) => Some(TrueTimestamp(tstamp.toLong))
case List("dtm", tstamp) => Some(DeviceCreatedTimestamp(tstamp.toLong))
case _ => None
}
}
implicit val eventRead = new Read[EventArguments] {
def reads(line: String): EventArguments = {
val cols = line.split("\\t", -1).lift
(cols(0), cols(1)) match {
case (Some("pv"), Some(url)) =>
val ctx: Option[List[SelfDescribingJson]] = cols(4).map(sdJsonsRead.reads)
val timestamp = cols(5).flatMap(tstmpRead.reads)
PageView(url, cols(2), cols(3), ctx, timestamp)
case _ => throw new RuntimeException("Failed the test while reading the arguments")
}
}
}
// Generate valid pseudo-URL
val urlGen = for {
protocol <- Gen.oneOf(List("http://", "https://"))
port <- Gen.oneOf(List("", ":80", ":8080", ":443", ":10505"))
lengthDomain <- Gen.choose(1, 3)
topDomain <- Gen.oneOf(List("com", "ru", "co.uk", "org", "mobi", "by"))
domainList <- Gen.containerOfN[List, String](lengthDomain, Gen.alphaLowerStr)
lengthUrl <- Gen.choose(0, 5)
urlList <- Gen.containerOfN[List, String](lengthUrl, Gen.alphaNumStr)
url = new java.net.URL(protocol + domainList.mkString(".") + s".$topDomain" + port + "/" + urlList.mkString("/"))
} yield url
// Generate geolocation context
val geoLocationGen = for {
latitude <- Gen.choose[Double](-90, 90)
longitude <- Gen.choose[Double](-180, 180)
data = Json.obj("latitude" := latitude, "longitude" := longitude)
sd = SelfDescribingData[Json](
SchemaKey("com.snowplowanalytics.snowplow", "geolocation_context", "jsonschema", SchemaVer.Full(1, 1, 0)),
data
)
} yield sd
// Generate timestamp
val timestampGen = for {
tstType <- Gen.option(Gen.oneOf(List(TrueTimestamp.apply _, DeviceCreatedTimestamp.apply _)))
tstamp <- Gen.choose[Long](1508316432000L - (2 * 365 * 86400 * 1000L), 1508316432000L)
result <- tstType.map { x =>
x(tstamp)
}
} yield result
// Generate whole pageview event
val pageViewGen = for {
url <- urlGen.map(_.toString)
title <- Gen.option(Gen.alphaNumStr)
referrer <- Gen.option(urlGen.map(_.toString))
ctx <- Gen.option(geoLocationGen.map(x => List(x)))
tstamp <- timestampGen
} yield PageView(url, title, referrer, ctx, tstamp)
def writeContext(sd: List[SelfDescribingData[Json]]): String =
Json.fromValues(sd.map(s => s.normalize)).noSpaces
def writeTimestamp(tstamp: Timestamp): String = tstamp match {
case TrueTimestamp(tst) => s"ttm:$tst"
case DeviceCreatedTimestamp(tst) => s"dtm:$tst"
}
def writeEvent(event: PageView) =
s"pv\\t${event.url}\\t${event.title.getOrElse("")}\\t${event
.referrer
.getOrElse("")}\\t${event.contexts.map(writeContext).getOrElse("")}\\t${event.timestamp.map(writeTimestamp).getOrElse("")}"
def write(path: String, cardinality: Int): Unit = {
var i = 0
val fw = new FileWriter(path)
while (i < cardinality) {
pageViewGen.sample.map(writeEvent) match {
case Some(line) => fw.write(line + "\\n")
case None => ()
}
i = i + 1
}
fw.close()
}
/**
* Thread imitating application's work thread that has access to tracker
* Constructor blocks until events are not loaded into memory
*/
class TrackerThread(path: String, tracker: Tracker[Id]) {
// It can take some time
val events = scala.io.Source.fromFile(path).getLines().map(Read[EventArguments].reads).toList
println(s"TrackerThread for $path initialized")
def getWorker: Thread = {
val worker = new Thread {
private var i = 0
override def run(): Unit = {
events.foreach {
case PageView(url, title, referrer, contexts, timestamp) =>
tracker.trackPageView(url, title, referrer, contexts.getOrElse(Nil), timestamp)
i = i + 1
if (i % 1000 == 0) {
println(s"One more 1000 from $path")
}
}
println(s"TrackerThread for $path sent $i events")
i = 0
}
}
worker.setDaemon(true)
worker
}
}
/**
* Main method. Starts specified amount of separate threads sharing a tracker,
* each reading its own file and sending events via the same tracker.
* All threads should be prepared (parse events and store them in memory) during
* construction. When function returns - its ready to be started by foreach(_.run())
* ```
* println(System.currentTimeMillis)
* res0.foreach(_.run())
* res0.foreach(_.join())
* println(System.currentTimeMillis)
* ```
*
* @param collector single collector for all threads
* @param dir directory with temporary event TSVs
* @param cardinality amount of events in each TSV
* @param threads amount of parallel threads
* @return list of threads
*/
def testAsyncBatch(
collector: Emitter.EndpointParams,
dir: String,
cardinality: Int,
threads: Int = 1,
callback: Option[Callback[Id]]
) = {
import scala.concurrent.ExecutionContext.Implicits.global
val files = List.fill(threads)(dir).zipWithIndex.map { case (path, i) => s"$path/events-$i.tsv" }
files.foreach { file =>
write(file, cardinality)
}
println(s"Writing to files completed. ${files.mkString(", ")}")
val emitter = AsyncEmitter.createAndStart(collector, BufferConfig.EventsCardinality(50))
val tracker = new Tracker(NonEmptyList.of(emitter), "test-tracker-ns", "test-app")
files.map(file => new TrackerThread(file, tracker).getWorker)
}
}
|
snowplow/snowplow-scala-tracker
|
modules/id-emitter/src/test/scala/com/snowplowanalytics/snowplow/scalatracker/emitters/id/StressTest.scala
|
Scala
|
apache-2.0
| 8,569 |
package edison.search.algorithm
import edison.search.Samples
import edison.search.tree._
import edison.search.tree.select.Selector
import edison.util.SmartSpec
import org.scalamock.scalatest.MockFactory
class UctSelectionContextTest extends SmartSpec with MockFactory {
val algorithm = new UctAlgorithm(UctConfig(alpha = 2.0, expandThreshold = 10))
val nodeWithNoSamples = IntegerTree.empty(Range(1, 2))
val nodeWithGoodSample = IntegerTree(Range(1, 2), Nil, samples = Samples(1 -> 100.0))
val nodeWithBadSample = IntegerTree(Range(1, 2), Nil, samples = Samples(1 -> 10.0))
behavior of "UcbSelectionContext"
it must "create orderings that prefer nodes with no samples" in {
val root = IntegerTree(Range.inclusive(1, 2), List.empty, samples = Samples(1 -> 100.0))
val ordering = algorithm.UcbSelectionContext(root).getOrdering
ordering.compare(nodeWithNoSamples, nodeWithGoodSample) shouldBe -1
ordering.compare(nodeWithGoodSample, nodeWithNoSamples) shouldBe 1
ordering.compare(nodeWithNoSamples, nodeWithNoSamples) shouldBe 0
}
it must "create orderings that prefer nodes with higher ucb" in {
val root = IntegerTree(Range.inclusive(1, 2), List.empty, samples = Samples(1 -> 100.0, 1 -> 10.0))
val ordering = algorithm.UcbSelectionContext(root).getOrdering
ordering.compare(nodeWithGoodSample, nodeWithBadSample) shouldBe -1
ordering.compare(nodeWithBadSample, nodeWithGoodSample) shouldBe 1
ordering.compare(nodeWithGoodSample, nodeWithGoodSample) shouldBe 0
}
it must "compute UCB using correct total number of samples" in {
class MockableSamples extends Samples(Nil)
val samplesMock = mock[MockableSamples]
(samplesMock.ucb _).expects(2.0, 2).returning(Some(100.0))
val nodeWithGoodSampleAndCheck = nodeWithGoodSample.updated(samplesMock)
val root = IntegerTree(
Range.inclusive(1, 2),
List(nodeWithBadSample, nodeWithGoodSampleAndCheck),
Samples(1 -> 100.0, 1 -> 10.0)
)
val selector = Selector(algorithm.UcbSelectionContext(root))
selector(root).tree shouldBe nodeWithGoodSampleAndCheck
}
}
|
pawel-wiejacha/edison
|
core/src/test/scala/edison/search/algorithm/UctSelectionContextTest.scala
|
Scala
|
mit
| 2,122 |
package controllers.api
import models.ZkKafka
import models.ZkKafka._
import play.api._
import play.api.libs.json._
import play.api.mvc._
import _root_.utils.JsonFormats._
object Status extends Controller {
def current(topoRoot: String, topic: String) = Action {
val stormState = ZkKafka.getSpoutState(topoRoot, topic)
val zkState = ZkKafka.getKafkaState(topic)
val totalsAndDeltas = ZkKafka.getTopologyDeltas(topoRoot, topic)
Ok(Json.toJson(
totalsAndDeltas._2.toSeq
))
}
}
|
evertrue/capillary
|
app/controllers/api/Status.scala
|
Scala
|
mit
| 512 |
package org.jetbrains.plugins.scala
package lang.psi.api.expr
/**
* @author Alefas
* @since 25/03/14.
*/
trait ScInfixArgumentExpression extends ScExpression {
/**
* Return true if this expression is arguments for method invocation
*/
def isCall: Boolean = {
getContext match {
case infix: ScInfixExpr => infix.getArgExpr == this
case _ => false
}
}
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScInfixArgumentExpression.scala
|
Scala
|
apache-2.0
| 388 |
package org.firedancer3d.scenegraph.geometry;
class Sphere {
}
|
cyberthinkers/FireDancer3D
|
firedancer3d_shared/src/main/scala/org/firedancer3d/scenegraph/geometry/Sphere.scala
|
Scala
|
mit
| 70 |
/* Copyright 2017-18, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow.api.core.distributed
import org.platanios.tensorflow.api.config.ClusterConfig
import org.platanios.tensorflow.api.core.DeviceSpecification
import org.platanios.tensorflow.api.ops.OpSpecification
/** Device placement strategy to use in a replicated training setup.
*
* @param psNumTasks Number of parameter server tasks.
* @param psDevice Name of the parameter server device. If empty, no parameter server job is used.
* @param workerDevice Name of the worker device. If empty, no worker job is used.
* @param psOpTypes Set of strings representing op types that need to be placed on parameter server devices.
* @param psStrategy Function invoked for every parameter server op (i.e., matched by `psOpTypes`), that takes the
* op and returns the parameter server task index to use.
*
* @author Emmanouil Antonios Platanios
*/
class ReplicaDevicePlacer private[distributed](
psNumTasks: Int, psDevice: String, workerDevice: String, psOpTypes: Set[String],
psStrategy: (OpSpecification) => Int) {
def apply(opSpecification: OpSpecification): String = {
val currentDevice = DeviceSpecification.fromString(opSpecification.device)
// The `psJob` will be used for the specified ops (`psOps`) whenever it is present and `psNumTasks` is non-zero.
// However, its task number will only be set (using `psStrategy`) if there is a job field in `psJob` that won't be
// changed by the job field (if present) `currentDevice`.
if (psNumTasks > 0 && psDevice != null && psOpTypes.contains(opSpecification.opType)) {
var psDeviceSpec = DeviceSpecification.fromString(psDevice)
val currentJob = currentDevice.job
val psJob = psDeviceSpec.job
if (psJob != null && (currentJob == null || currentJob == psJob))
psDeviceSpec = psDeviceSpec.copy(task = psStrategy(opSpecification))
DeviceSpecification.merge(psDeviceSpec, currentDevice).toString
} else {
val workerDeviceSpec = DeviceSpecification.fromString(if (workerDevice != null) workerDevice else "")
DeviceSpecification.merge(workerDeviceSpec, currentDevice).toString
}
}
}
/** Contains helper methods for dealing with [[ReplicaDevicePlacer]]s. */
object ReplicaDevicePlacer {
/** Return a device function to use when building a graph for replicas.
*
* Device functions are used in the `Op.createWith(deviceFunction = ...)` statement to automatically assign ops to
* devices as they are being constructed. Device constraints are added from the inner-most context first, working
* outwards. The merging behavior adds constraints to fields that are yet unset by a more general inner context.
* Currently the fields include `job`, `task`, and `cpu`/`gpu`.
*
* If `clusterConfig` is `null`, and `psNumTasks` is 0, the returned function is a no-op. Otherwise, the value of
* `psNumTasks` is derived from `clusterConfig`.
*
* By default, only variable ops are placed on parameter server tasks and the placement strategy is round-robin over
* all parameter server tasks. A custom `psStrategy` may be used to do more intelligent device placement.
*
* For example:
* {{{
* // To build a cluster with two parameter server jobs on hosts `ps0` and `ps1`, and 3 worker jobs on hosts
* // `worker0`, `worker1`, and `worker2`.
* val clusterConfig = ClusterConfig(Map(
* "ps" -> JobConfig.fromAddresses(
* "ps0:2222",
* "ps1:2222"),
* "worker" -> JobConfig.fromAddresses(
* "worker0:2222",
* "worker1:2222",
* "worker2:2222")))
* Op.createWith(ReplicaDeviceSetter(clusterConfig = clusterConfig)) {
* // Build the graph.
* val v1 = tf.variable(...) // Assigned to device `/job:ps/task:0`
* val v2 = tf.variable(...) // Assigned to device `/job:ps/task:1`
* val v3 = tf.variable(...) // Assigned to device `/job:ps/task:0`
* }
* // Run computation.
* }}}
*
* @param psNumTasks Number of parameter server tasks. Ignored if `clusterConfig` is provided.
* @param psDevice Name of the parameter server device. If empty, no parameter server job is used.
* @param workerDevice Name of the worker device. If empty, no worker job is used.
* @param clusterConfig Cluster configuration.
* @param psOpTypes Set of strings representing op types that need to be placed on parameter server devices.
* @param psStrategy Function invoked for every parameter server op (i.e., matched by `psOpTypes`), that takes
* the op and returns the parameter server task index to use. If `null`, defaults to a
* round-robin strategy across all parameter server devices.
* @return [[ReplicaDevicePlacer]], whose `apply` method can be passed to `Op.createWith(deviceFunction = ...)`.
*/
def apply(
psNumTasks: Int = 0, psDevice: String = "/job:ps", workerDevice: String = "/job:worker",
clusterConfig: ClusterConfig = null, psOpTypes: Set[String] = Set("Variable", "VariableV2", "VarHandleOp"),
psStrategy: (OpSpecification) => Int = null): ReplicaDevicePlacer = {
val numTasks = {
if (clusterConfig != null) {
// Get `psJob` from `psDevice` by stripping "/job:".
val psJob = DeviceSpecification.fromString(psDevice).job
val psJobTasks = clusterConfig.jobTasks(psJob)
if (psJobTasks.isEmpty || psJobTasks.get == null) 0 else psJobTasks.get.size
} else {
psNumTasks
}
}
if (numTasks == 0) {
null
} else {
// TODO: [DISTRIBUTED] !!! Variables in the LOCAL_VARIABLES collection should not be placed on the parameter server.
new ReplicaDevicePlacer(
numTasks, psDevice, workerDevice, psOpTypes,
if (psStrategy == null) RoundRobinDeviceSetter(numTasks).apply else psStrategy)
}
}
/** Device placement strategy which returns the next parameter server task index for placement in round-robin order.
*
* @param psNumTasks Number of parameter server tasks to cycle among.
*/
private[distributed] case class RoundRobinDeviceSetter(psNumTasks: Int) {
private[this] var nextTask: Int = 0
def apply(opSpecification: OpSpecification): Int = {
val task = nextTask
nextTask = (nextTask + 1) % psNumTasks
task
}
}
}
|
eaplatanios/tensorflow
|
tensorflow/scala/api/src/main/scala/org/platanios/tensorflow/api/core/distributed/ReplicaDevicePlacer.scala
|
Scala
|
apache-2.0
| 7,095 |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.metamx.tranquility.test
import com.github.nscala_time.time.Imports._
import com.metamx.common.scala.Logging
import com.metamx.common.scala.Predef._
import com.metamx.common.scala.timekeeper.TestingTimekeeper
import com.metamx.tranquility.beam.Beam
import com.metamx.tranquility.spark.BeamFactory
import com.metamx.tranquility.spark.BeamRDD._
import com.metamx.tranquility.test.common.CuratorRequiringSuite
import com.metamx.tranquility.test.common.DruidIntegrationSuite
import com.metamx.tranquility.test.common.JulUtils
import org.apache.curator.framework.CuratorFrameworkFactory
import org.apache.curator.retry.BoundedExponentialBackoffRetry
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
import scala.collection.mutable
@RunWith(classOf[JUnitRunner])
class SparkDruidTest
extends FunSuite with DruidIntegrationSuite with CuratorRequiringSuite with Logging with BeforeAndAfterAll
{
private var sparkContext: SparkContext = null
private var ssc : StreamingContext = null
override def beforeAll(): Unit = {
sparkContext = new SparkContext(
new SparkConf().setMaster("local").setAppName("SparkDruidTest")
)
ssc = new StreamingContext(sparkContext, Seconds(3))
}
override def afterAll(): Unit = {
if (ssc != null) {
ssc.stop()
}
if (sparkContext != null) {
sparkContext.stop()
}
}
JulUtils.routeJulThroughSlf4j()
test("Spark to Druid") {
withDruidStack {
(curator, broker, coordinator, overlord) =>
val zkConnect = curator.getZookeeperClient.getCurrentConnectionString
val now = new DateTime().hourOfDay().roundFloorCopy()
val inputs = DirectDruidTest.generateEvents(now)
val lines = mutable.Queue[RDD[SimpleEvent]]()
lines += sparkContext.makeRDD(inputs, 2)
val dstream = ssc.queueStream(lines)
dstream.foreachRDD(rdd => rdd.propagate(new SimpleEventBeamFactory(zkConnect)))
ssc.start()
runTestQueriesAndAssertions(
broker, new TestingTimekeeper withEffect {
timekeeper =>
timekeeper.now = now
}
)
ssc.stop(true, true)
ssc.awaitTermination()
}
}
}
class SimpleEventBeamFactory(zkConnect: String) extends BeamFactory[SimpleEvent]
{
override def makeBeam: Beam[SimpleEvent] = {
SimpleEventBeamFactory.instance(zkConnect)
}
}
object SimpleEventBeamFactory
{
// zkConnect -> beams
private val beams = mutable.HashMap[String, Beam[SimpleEvent]]()
def instance(zkConnect: String) = {
beams.synchronized {
val aDifferentCurator = CuratorFrameworkFactory.newClient(
zkConnect,
new BoundedExponentialBackoffRetry(100, 1000, 5)
)
aDifferentCurator.start()
val builder = DirectDruidTest.newBuilder(
aDifferentCurator, new TestingTimekeeper withEffect {
timekeeper =>
timekeeper.now = DateTime.now
}
)
builder.buildBeam()
}
}
}
|
druid-io/tranquility
|
spark/src/test/scala/com/metamx/tranquility/test/SparkDruidTest.scala
|
Scala
|
apache-2.0
| 4,089 |
package io.finch
import scala.reflect.ClassTag
import cats.Alternative
import cats.data.NonEmptyList
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Cookie, Request, Response, Status}
import com.twitter.util.{Future, Return, Throw, Try}
import io.catbird.util.Rerunnable
import io.finch.internal._
import java.nio.charset.Charset
import shapeless._
import shapeless.ops.adjoin.Adjoin
import shapeless.ops.hlist.Tupler
/**
* An `Endpoint` represents the HTTP endpoint.
*
* It is well known and widely adopted in Finagle that "Your Server is a Function"
* (i.e., `Request => Future[Response]`). In a REST/HTTP API setting this function may be viewed as
* `Request =1=> (A =2=> Future[B]) =3=> Future[Response]`, where transformation `1` is a request
* decoding (deserialization), transformation `2` - is a business logic and transformation `3` is -
* a response encoding (serialization). The only interesting part here is transformation `2` (i.e.,
* `A => Future[B]`), which represents an application business.
*
* An `Endpoint` transformation (`map`, `mapAsync`, etc.) encodes the business logic, while the
* rest of Finch ecosystem takes care about both serialization and deserialization.
*
* A typical way to transform (or map) the `Endpoint` is to use [[Mapper]] and `Endpoint.apply`
* method, which, depending on the argument type, delegates the map operation to the underlying
* function.
*
* {{{
* import io.finch._
*
* case class Foo(i: Int)
* case class Bar(s: String)
*
* val foo: Endpoint[Foo] = get("foo") { Ok(Foo(42)) }
* val bar: Endpoint[Bar] = get("bar" :: string) { s: String => Ok(Bar(s)) }
* }}}
*
* `Endpoint`s are also composable in terms of or-else combinator (known as a "space invader"
* operator `:+:`) that takes two `Endpoint`s and returns a coproduct `Endpoint`.
*
* {{{
* import io.finch._
*
* val foobar: Endpoint[Foo :+: Bar :+: CNil] = foo :+: bar
* }}}
*
* An `Endpoint` might be converted into a Finagle [[Service]] with `Endpoint.toService` method so
* it can be served within Finagle HTTP.
*
* {{{
* import com.twitter.finagle.Http
*
* Http.server.serve(foobar.toService)
* }}}
*/
trait Endpoint[A] { self =>
/**
* Request item (part) that's this endpoint work with.
*/
def item: items.RequestItem = items.MultipleItems
/**
* Maps this endpoint to either `A => Output[B]` or `A => Output[Future[B]]`.
*/
final def apply(mapper: Mapper[A]): Endpoint[mapper.Out] = mapper(self)
// There is a reason why `apply` can't be renamed to `run` as per
// https://github.com/finagle/finch/issues/371.
// More details are here:
// http://stackoverflow.com/questions/32064375/magnet-pattern-and-overloaded-methods
/**
* Runs this endpoint.
*/
def apply(input: Input): Endpoint.Result[A]
/**
* Maps this endpoint to the given function `A => B`.
*/
final def map[B](fn: A => B): Endpoint[B] =
mapAsync(fn.andThen(Future.value))
/**
* Maps this endpoint to the given function `A => Future[B]`.
*/
final def mapAsync[B](fn: A => Future[B]): Endpoint[B] = new Endpoint[B] {
def apply(input: Input): Endpoint.Result[B] =
self(input).map {
case (remainder, output) => remainder -> output.flatMapF(oa => oa.traverse(a => fn(a)))
}
override def item = self.item
override def toString = self.toString
}
/**
* Maps this endpoint to the given function `A => Output[B]`.
*/
final def mapOutput[B](fn: A => Output[B]): Endpoint[B] =
mapOutputAsync(fn.andThen(Future.value))
/**
* Maps this endpoint to the given function `A => Future[Output[B]]`.
*/
final def mapOutputAsync[B](fn: A => Future[Output[B]]): Endpoint[B] = new Endpoint[B] {
def apply(input: Input): Endpoint.Result[B] =
self(input).map {
case (remainder, output) => remainder -> output.flatMapF { oa =>
val fob = oa.traverse(fn).map(oob => oob.flatten)
fob.map { ob =>
val ob1 = oa.headers.foldLeft(ob)((acc, x) => acc.withHeader(x))
val ob2 = oa.cookies.foldLeft(ob1)((acc, x) => acc.withCookie(x))
ob2
}
}
}
override def item = self.item
override def toString = self.toString
}
/**
* Transforms this endpoint to the given function `Future[Output[A]] => Future[Output[B]]`.
*
*
* Might be useful to perform some extra action on the underlying `Future`. For example, time
* the latency of the given endpoint.
*
* {{{
* import io.finch._
* import com.twitter.finagle.stats._
*
* def time[A](stat: Stat, e: Endpoint[A]): Endpoint[A] =
* e.transform(f => Stat.timeFuture(s)(f))
* }}}
*/
final def transform[B](fn: Future[Output[A]] => Future[Output[B]]): Endpoint[B] = new Endpoint[B] {
override def apply(input: Input): Endpoint.Result[B] = {
self(input).map {
case (remainder, output) => remainder -> new Rerunnable[Output[B]] {
override def run: Future[Output[B]] = fn(output.run)
}
}
}
}
final def product[B](other: Endpoint[B]): Endpoint[(A, B)] = new Endpoint[(A, B)] {
private[this] def join(both: (Try[Output[A]], Try[Output[B]])): Future[Output[(A, B)]] =
both match {
case (Return(oa), Return(ob)) => Future.value(oa.flatMap(a => ob.map(b => (a, b))))
case (Throw(oa), Throw(ob)) => Future.exception(collectExceptions(oa, ob))
case (Throw(e), _) => Future.exception(e)
case (_, Throw(e)) => Future.exception(e)
}
private[this] def collectExceptions(a: Throwable, b: Throwable): Error.RequestErrors = {
def collect(e: Throwable): Seq[Throwable] = e match {
case Error.RequestErrors(errors) => errors
case rest => Seq(rest)
}
Error.RequestErrors(collect(a) ++ collect(b))
}
def apply(input: Input): Endpoint.Result[(A, B)] =
self(input).flatMap {
case (remainder1, outputA) => other(remainder1).map {
case (remainder2, outputB) =>
remainder2 -> outputA.liftToTry.product(outputB.liftToTry).flatMapF(join)
}
}
override def item = self.item
override def toString = self.toString
}
/**
* Maps this endpoint to `Endpoint[A => B]`.
*/
@deprecated("Use product or Applicative[Endpoint].ap", "0.11.0")
final def ap[B](fn: Endpoint[A => B]): Endpoint[B] = product(fn).map {
case (a, f) => f(a)
}
/**
* Composes this endpoint with the given `other` endpoint. The resulting endpoint will succeed
* only if both this and `that` endpoints succeed.
*/
final def adjoin[B](other: Endpoint[B])(implicit
pairAdjoin: PairAdjoin[A, B]
): Endpoint[pairAdjoin.Out] = new Endpoint[pairAdjoin.Out] {
val inner = self.product(other).map {
case (a, b) => pairAdjoin(a, b)
}
def apply(input: Input): Endpoint.Result[pairAdjoin.Out] = inner(input)
override def item = items.MultipleItems
override def toString = s"${self.toString}/${other.toString}"
}
/**
* Composes this endpoint with the given [[Endpoint]].
*/
@deprecated("Use :: instead", "0.11")
final def ?[B](other: Endpoint[B])(implicit adjoin: PairAdjoin[A, B]): Endpoint[adjoin.Out] =
self.adjoin(other)
/**
* Composes this endpoint with the given [[Endpoint]].
*/
@deprecated("Use :: instead", "0.11")
final def /[B](other: Endpoint[B])(implicit adjoin: PairAdjoin[A, B]): Endpoint[adjoin.Out] =
self.adjoin(other)
/**
* Composes this endpoint with the given [[Endpoint]].
*/
final def ::[B](other: Endpoint[B])(implicit adjoin: PairAdjoin[B, A]): Endpoint[adjoin.Out] =
other.adjoin(self)
/**
* Sequentially composes this endpoint with the given `other` endpoint. The resulting endpoint
* will succeed if either this or `that` endpoints are succeed.
*/
final def |[B >: A](other: Endpoint[B]): Endpoint[B] = new Endpoint[B] {
private[this] def aToB(o: Endpoint.Result[A]): Endpoint.Result[B] =
o.asInstanceOf[Endpoint.Result[B]]
def apply(input: Input): Endpoint.Result[B] =
(self(input), other(input)) match {
case (aa @ Some((a, _)), bb @ Some((b, _))) =>
if (a.path.length <= b.path.length) aToB(aa) else bb
case (a, b) => aToB(a).orElse(b)
}
override def item = items.MultipleItems
override def toString = s"(${self.toString}|${other.toString})"
}
/**
* Composes this endpoint with another in such a way that coproducts are flattened.
*/
final def :+:[B](that: Endpoint[B])(implicit a: Adjoin[B :+: A :+: CNil]): Endpoint[a.Out] =
that.map(x => a(Inl[B, A :+: CNil](x))) |
self.map(x => a(Inr[B, A :+: CNil](Inl[A, CNil](x))))
final def withHeader(header: (String, String)): Endpoint[A] =
withOutput(o => o.withHeader(header))
final def withCookie(cookie: Cookie): Endpoint[A] =
withOutput(o => o.withCookie(cookie))
final def withCharset(charset: Charset): Endpoint[A] =
withOutput(o => o.withCharset(charset))
/**
* Converts this endpoint to a Finagle service `Request => Future[Response]` that serves JSON.
*/
final def toService(implicit
tr: ToResponse.Aux[A, Application.Json],
tre: ToResponse.Aux[Exception, Application.Json]
): Service[Request, Response] = toServiceAs[Application.Json]
/**
* Converts this endpoint to a Finagle service `Request => Future[Response]` that serves custom
* content-type `CT`.
*/
final def toServiceAs[CT <: String](implicit
tr: ToResponse.Aux[A, CT],
tre: ToResponse.Aux[Exception, CT]
): Service[Request, Response] = new Service[Request, Response] {
private[this] val basicEndpointHandler: PartialFunction[Throwable, Output[Nothing]] = {
case e: io.finch.Error => Output.failure(e, Status.BadRequest)
}
private[this] val safeEndpoint = self.handle(basicEndpointHandler)
def apply(req: Request): Future[Response] = safeEndpoint(Input.request(req)) match {
case Some((remainder, output)) if remainder.isEmpty =>
output.map(oa => oa.toResponse[CT](req.version)).run
case _ => Future.value(Response(req.version, Status.NotFound))
}
}
/**
* Recovers from any exception occurred in this endpoint by creating a new endpoint that will
* handle any matching throwable from the underlying future.
*/
final def rescue[B >: A](pf: PartialFunction[Throwable, Future[Output[B]]]): Endpoint[B] =
transform(foa => foa.rescue(pf))
/**
* Recovers from any exception occurred in this endpoint by creating a new endpoint that will
* handle any matching throwable from the underlying future.
*/
final def handle[B >: A](pf: PartialFunction[Throwable, Output[B]]): Endpoint[B] =
rescue(pf.andThen(Future.value))
/**
* Validates the result of this endpoint using a `predicate`. The rule is used for error
* reporting.
*
* @param rule text describing the rule being validated
* @param predicate returns true if the data is valid
*
* @return an endpoint that will return the value of this reader if it is valid.
* Otherwise the future fails with an [[Error.NotValid]] error.
*/
final def should(rule: String)(predicate: A => Boolean): Endpoint[A] = mapAsync(a =>
if (predicate(a)) Future.value(a)
else Future.exception(Error.NotValid(self.item, "should " + rule))
)
/**
* Validates the result of this endpoint using a `predicate`. The rule is used for error reporting.
*
* @param rule text describing the rule being validated
* @param predicate returns false if the data is valid
*
* @return an endpoint that will return the value of this reader if it is valid.
* Otherwise the future fails with a [[Error.NotValid]] error.
*/
final def shouldNot(rule: String)(predicate: A => Boolean): Endpoint[A] =
should(s"not $rule.")(x => !predicate(x))
/**
* Validates the result of this endpoint using a predefined `rule`. This method allows for rules
* to be reused across multiple endpoints.
*
* @param rule the predefined [[ValidationRule]] that will return true if the data is
* valid
*
* @return an endpoint that will return the value of this reader if it is valid.
* Otherwise the future fails with an [[Error.NotValid]] error.
*/
final def should(rule: ValidationRule[A]): Endpoint[A] = should(rule.description)(rule.apply)
/**
* Validates the result of this endpoint using a predefined `rule`. This method allows for rules
* to be reused across multiple endpoints.
*
* @param rule the predefined [[ValidationRule]] that will return false if the data is
* valid
*
* @return an endpoint that will return the value of this reader if it is valid.
* Otherwise the future fails with a [[Error.NotValid]] error.
*/
final def shouldNot(rule: ValidationRule[A]): Endpoint[A] = shouldNot(rule.description)(rule.apply)
/**
* Lifts this endpoint into one that always succeeds, with an empty `Option` representing failure.
*/
final def lift: Endpoint[Option[A]] = new Endpoint[Option[A]] {
def apply(input: Input): Endpoint.Result[Option[A]] =
self(input).map {
case (remainder, output) =>
remainder -> output.liftToTry
.map(toa => toa.toOption.fold(Output.None: Output[Option[A]])(o => o.map(Some.apply)))
}
override def item = self.item
override def toString = self.toString
}
private[this] def withOutput[B](fn: Output[A] => Output[B]): Endpoint[B] =
transform(foa => foa.map(oa => fn(oa)))
}
/**
* Provides extension methods for [[Endpoint]] to support coproduct and path syntax.
*/
object Endpoint {
type Result[A] = Option[(Input, Rerunnable[Output[A]])]
/**
* Creates an [[Endpoint]] from the given [[Output]].
*/
def apply(mapper: Mapper[shapeless.HNil]): Endpoint[mapper.Out] = mapper(/)
/**
* Creates an empty [[Endpoint]] (an endpoint that never matches) for a given type.
*/
def empty[A]: Endpoint[A] = new Endpoint[A] {
override def apply(input: Input): Result[A] = None
}
private[finch] def embed[A](i: items.RequestItem)(f: Input => Result[A]): Endpoint[A] =
new Endpoint[A] {
def apply(input: Input): Result[A] = f(input)
override def item: items.RequestItem = i
override def toString: String =
s"${item.kind}${item.nameOption.map(n => "(" + n + ")").getOrElse("")}"
}
final implicit class ValueEndpointOps[B](val self: Endpoint[B]) extends AnyVal {
/**
* Converts this endpoint to one that returns any type with `B :: HNil` as its representation.
*/
def as[A](implicit gen: Generic.Aux[A, B :: HNil]): Endpoint[A] =
self.map(value => gen.from(value :: HNil))
}
final implicit class HListEndpointOps[L <: HList](val self: Endpoint[L]) extends AnyVal {
/**
* Converts this endpoint to one that returns any type with this [[shapeless.HList]] as its
* representation.
*/
def as[A](implicit gen: Generic.Aux[A, L]): Endpoint[A] = self.map(gen.from)
/**
* Converts this endpoint to one that returns a tuple with the same types as this
* [[shapeless.HList]].
*
* Note that this will fail at compile time if this this [[shapeless.HList]] contains more than
* 22 elements.
*/
def asTuple(implicit tupler: Tupler[L]): Endpoint[tupler.Out] = self.map(tupler(_))
}
private[this] def notParsed[A](
e: Endpoint[_], tag: ClassTag[_]
): PartialFunction[Throwable, Try[A]] = {
case exc => Throw[A](Error.NotParsed(e.item, tag, exc))
}
/**
* Implicit conversion that allows to call `as[A]` on any `Endpoint[String]` to perform a type
* conversion based on an implicit `DecodeRequest[A]` which must be in scope.
*
* The resulting reader will fail when type conversion fails.
*/
implicit class StringEndpointOps(val self: Endpoint[String]) extends AnyVal {
def as[A](implicit decoder: Decode[A], tag: ClassTag[A]): Endpoint[A] =
self.mapAsync(value => Future.const(decoder(value).rescue(notParsed[A](self, tag))))
}
/**
* Implicit conversion that adds convenience methods to endpoint for optional values.
*/
implicit class OptionEndpointOps[A](val self: Endpoint[Option[A]]) extends AnyVal {
private[finch] def failIfNone: Endpoint[A] = self.mapAsync {
case Some(value) => Future.value(value)
case None => Future.exception(Error.NotPresent(self.item))
}
/**
* If endpoint is empty it will return provided default value.
*/
def withDefault[B >: A](default: => B): Endpoint[B] = self.map(_.getOrElse(default))
/**
* If endpoint is empty it will return provided alternative.
*/
def orElse[B >: A](alternative: => Option[B]): Endpoint[Option[B]] =
self.map(_.orElse(alternative))
}
/**
* Implicit conversion that allows to call `as[A]` on any `Endpoint[NonEmptyList[String]]` to perform a
* type conversion based on an implicit `Decode[A]` which must be in scope.
*
* The resulting endpoint will fail when type conversion fails on one
* or more of the elements in the `NonEmptyList`. It will succeed if type conversion succeeds for all elements.
*/
implicit class StringNelEndpointOps(val self: Endpoint[NonEmptyList[String]]) {
/* IMPLEMENTATION NOTE: This implicit class should extend AnyVal like all the other ones, to
* avoid instance creation for each invocation of the extension method. However, this let's us
* run into a compiler bug when we compile for Scala 2.10:
* https://issues.scala-lang.org/browse/SI-8018. The bug is caused by the combination of four
* things: 1) an implicit class, 2) extending AnyVal, 3) wrapping a class with type parameters,
* 4) a partial function in the body. 2) is the only thing we can easily remove here, otherwise
* we'd need to move the body of the method somewhere else. Once we drop support for Scala 2.10,
* this class can safely extends AnyVal.
*/
def as[A](implicit decoder: Decode[A], tag: ClassTag[A]): Endpoint[NonEmptyList[A]] =
self.mapAsync { items =>
val decoded = items.toList.map(decoder.apply)
val errors = decoded.collect {
case Throw(e) => Error.NotParsed(self.item, tag, e)
}
if (errors.isEmpty) Future.const(Try.collect(decoded).map(seq => NonEmptyList(seq.head, seq.tail.toList)))
else Future.exception(Error.RequestErrors(errors))
}
}
/**
* Implicit conversion that allows to call `as[A]` on any `Endpoint[Seq[String]]` to perform a
* type conversion based on an implicit ``DecodeRequest[A]` which must be in scope.
*
* The resulting endpoint will fail when the result is non-empty and type conversion fails on one
* or more of the elements in the `Seq`. It will succeed if the result is empty or type conversion
* succeeds for all elements.
*/
implicit class StringSeqEndpointOps(val self: Endpoint[Seq[String]]) {
/* IMPLEMENTATION NOTE: This implicit class should extend AnyVal like all the other ones, to
* avoid instance creation for each invocation of the extension method. However, this let's us
* run into a compiler bug when we compile for Scala 2.10:
* https://issues.scala-lang.org/browse/SI-8018. The bug is caused by the combination of four
* things: 1) an implicit class, 2) extending AnyVal, 3) wrapping a class with type parameters,
* 4) a partial function in the body. 2) is the only thing we can easily remove here, otherwise
* we'd need to move the body of the method somewhere else. Once we drop support for Scala 2.10,
* this class can safely extends AnyVal.
*/
def as[A](implicit decoder: Decode[A], tag: ClassTag[A]): Endpoint[Seq[A]] =
self.mapAsync { items =>
val decoded = items.map(decoder.apply)
val errors = decoded.collect {
case Throw(e) => Error.NotParsed(self.item, tag, e)
}
if (errors.isEmpty) Future.const(Try.collect(decoded))
else Future.exception(Error.RequestErrors(errors))
}
}
/**
* Implicit conversion that allows to call `as[A]` on any `Endpoint[Option[String]]` to perform a
* type conversion based on an implicit `DecodeRequest[A]` which must be in scope.
*
* The resulting endpoint will fail when the result is non-empty and type conversion fails. It
* will succeed if the result is empty or type conversion succeeds.
*/
implicit class StringOptionEndpointOps(val self: Endpoint[Option[String]]) extends AnyVal {
def as[A](implicit decoder: Decode[A], tag: ClassTag[A]): Endpoint[Option[A]] =
self.mapAsync {
case Some(value) =>
Future.const(decoder(value).rescue(notParsed[A](self, tag)).map(Some.apply))
case None =>
Future.None
}
}
class GenericDerivation[A] {
def fromParams[Repr <: HList](implicit
gen: LabelledGeneric.Aux[A, Repr],
fp: FromParams[Repr]
): Endpoint[A] = fp.endpoint.map(gen.from)
}
/**
* Generically derive a very basic instance of [[Endpoint]] for a given type `A`.
*/
def derive[A]: GenericDerivation[A] = new GenericDerivation[A]
implicit val endpointInstance: Alternative[Endpoint] = new Alternative[Endpoint] {
override def ap[A, B](ff: Endpoint[A => B])(fa: Endpoint[A]): Endpoint[B] = ff.product(fa).map {
case (f, a) => f(a)
}
override def map[A, B](fa: Endpoint[A])(f: A => B): Endpoint[B] = fa.map(f)
override def product[A, B](fa: Endpoint[A], fb: Endpoint[B]): Endpoint[(A, B)] = fa.product(fb)
override def pure[A](x: A): Endpoint[A] = new Endpoint[A] {
override def apply(input: Input): Result[A] = Some(input -> Rerunnable(Output.payload(x)))
}
override def empty[A]: Endpoint[A] = Endpoint.empty[A]
override def combineK[A](x: Endpoint[A], y: Endpoint[A]): Endpoint[A] = x | y
}
}
|
ilya-murzinov/finch
|
core/src/main/scala/io/finch/Endpoint.scala
|
Scala
|
apache-2.0
| 22,034 |
package boatcraft.core.blocks
import net.minecraft.block.Block
import net.minecraft.block.material.Material
import net.minecraft.world.World
import boatcraft.core.blocks.tileentites.TileDock
import net.minecraftforge.common.util.ForgeDirection
import net.minecraft.entity.EntityLivingBase
import com.ibm.icu.impl.duration.impl.Utils
import net.minecraft.util.MathHelper
import net.minecraft.item.ItemStack
import net.minecraft.creativetab.CreativeTabs
import net.minecraft.block.BlockContainer
class BlockDock extends BlockContainer(Material.iron) {
setCreativeTab(CreativeTabs.tabRedstone)
setBlockName("dock")
override def createNewTileEntity(world: World, meta: Int) = new TileDock
override def onBlockPlacedBy(world: World, x: Int, y: Int, z: Int, entity: EntityLivingBase, stack: ItemStack) {
super.onBlockPlacedBy(world, x, y, z, entity, stack)
val orientationTable = Array[ForgeDirection](ForgeDirection.SOUTH,
ForgeDirection.WEST, ForgeDirection.NORTH, ForgeDirection.EAST)
val orientationIndex = MathHelper.floor_double((entity.rotationYaw + 45) / 90) & 3
val orientation = orientationTable(orientationIndex)
world.setBlockMetadataWithNotify(x, y, z, orientation.getOpposite.ordinal, 1)
}
override def rotateBlock(world: World, x: Int, y: Int, z: Int, axis: ForgeDirection): Boolean = {
val meta = world.getBlockMetadata(x, y, z)
import ForgeDirection._
ForgeDirection.getOrientation(meta) match {
case WEST =>
world.setBlockMetadataWithNotify(x, y, z, ForgeDirection.SOUTH.ordinal, 3)
case EAST =>
world.setBlockMetadataWithNotify(x, y, z, ForgeDirection.NORTH.ordinal, 3)
case NORTH =>
world.setBlockMetadataWithNotify(x, y, z, ForgeDirection.WEST.ordinal, 3)
case SOUTH =>
world.setBlockMetadataWithNotify(x, y, z, ForgeDirection.EAST.ordinal, 3)
case _ => //NOOP
}
world.markBlockForUpdate(x, y, z)
return true
}
}
object BlockDock extends BlockDock
|
Open-Code-Developers/BoatCraft
|
src/main/scala/boatcraft/core/blocks/BlockDock.scala
|
Scala
|
mit
| 1,945 |
package au.com.agiledigital.rest.security
import javax.inject._
import au.com.agiledigital.rest.controllers.transport.{ MessageLevel, Message, JsonApiResponse }
import play.api._
import play.api.http.{ Writeable, DefaultHttpErrorHandler, MimeTypes }
import play.api.libs.json.{ JsValue, Json }
import play.api.mvc._
import play.api.routing.Router
import scala.concurrent.{ Future, ExecutionContext }
/**
* Provides JSON error responses when the requestor accepts JSON. If the requestor does not accept a JSON response,
* defers handling back to the [[DefaultHttpErrorHandler]].
*
* @param env the environment of the application (chiefly, whether it is in DEV or PROD mode).
* @param config the application's configuration.
* @param sourceMapper the source mapper.
* @param router the router provider.
*/
class ErrorHandler @Inject() (
env: Environment,
config: Configuration,
sourceMapper: OptionalSourceMapper,
router: Provider[Router]
)(implicit val ec: ExecutionContext) extends DefaultHttpErrorHandler(env, config, sourceMapper, router) {
override def onClientError(request: RequestHeader, statusCode: Int, message: String): Future[Result] = {
if (isJsonRequest(request)) {
super.onClientError(request, statusCode, message).map { result =>
val writeable: Writeable[JsValue] = implicitly[Writeable[JsValue]]
Result(result.header, writeable.toEntity(Json.toJson(JsonApiResponse[String](None, Message(message, MessageLevel.Error)))))
}
}
else {
super.onClientError(request, statusCode, message)
}
}
override protected def onDevServerError(request: RequestHeader, exception: UsefulException): Future[Result] = {
if (isJsonRequest(request)) {
Future.successful(
JsonApiResponse.internalServerErrorResponse(s"A server error occurred - [$exception].", exception)
)
}
else {
super.onDevServerError(request, exception)
}
}
override protected def onProdServerError(request: RequestHeader, exception: UsefulException) = {
if (isJsonRequest(request)) {
Future.successful(
JsonApiResponse.internalServerErrorResponse(s"A server error occurred [${exception.id}].")
)
}
else {
super.onProdServerError(request, exception)
}
}
private def isJsonRequest(request: RequestHeader): Boolean = request.accepts(MimeTypes.JSON)
}
|
agiledigital/play-rest-support
|
core/src/main/scala/au/com/agiledigital/rest/security/ErrorHandler.scala
|
Scala
|
apache-2.0
| 2,398 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.feature
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.util.MLlibTestSparkContext
//把字符转换特征哈希值,返回词的频率
/**
* HashTF从一个文档中计算出给定大小的词频向量。为了将词和向量顺序对应起来,所以使用了哈希。
* HashingTF使用每个单词对所需向量的长度S取模得出的哈希值,把所有单词映射到一个0到S-1之间的数字上。
* 由此可以保证生成一个S维的向量。随后当构建好词频向量后,使用IDF来计算逆文档频率,然后将它们与词频相乘计算TF-IDF
*/
class HashingTFSuite extends SparkFunSuite with MLlibTestSparkContext {
test("hashing tf on a single doc") {//散列在一个单一的文件
val hashingTF = new HashingTF(1000)
val doc = "a a b b c d".split(" ")
val n = hashingTF.numFeatures
//词的频率
val termFreqs = Seq(
(hashingTF.indexOf("a"), 2.0),
(hashingTF.indexOf("b"), 2.0),
(hashingTF.indexOf("c"), 1.0),
(hashingTF.indexOf("d"), 1.0))
//termFreqs: Seq[(Int, Double)] = List((97,2.0), (98,2.0), (99,1.0), (100,1.0))
assert(termFreqs.map(_._1).forall(i => i >= 0 && i < n),
"index must be in range [0, #features)")//索引必须在范围内
assert(termFreqs.map(_._1).toSet.size === 4, "expecting perfect hashing")//期待完美的哈希
val expected = Vectors.sparse(n, termFreqs)
//transform 把每个输入文档映射到一个Vector对象
//transform()方法将DataFrame转化为另外一个DataFrame的算法
assert(hashingTF.transform(doc) === expected)
}
test("hashing tf on an RDD") {//散列TF在RDD
val hashingTF = new HashingTF
val localDocs: Seq[Seq[String]] = Seq(
"a a b b b c d".split(" "),
"a b c d a b c".split(" "),
"c b a c b a a".split(" "))
val docs = sc.parallelize(localDocs, 2)
//transform()方法将DataFrame转化为另外一个DataFrame的算法
assert(hashingTF.transform(docs).collect().toSet === localDocs.map(hashingTF.transform).toSet)
}
}
|
tophua/spark1.52
|
mllib/src/test/scala/org/apache/spark/mllib/feature/HashingTFSuite.scala
|
Scala
|
apache-2.0
| 2,951 |
package wandou.math.vector
import java.util.Random
import java.util.StringTokenizer
import wandou.math.timeseries.Null
import wandou.collection.ArrayList
/**
* Sparse implement of Vec. It do not store 0 valued elements.
*
* This class should keep elements index sorted.
*
* Create a <code>SparseVec</code> whose items are copied from
* <code>source</code>.
*
* @param source the array from which items are copied
*
* @author Caoyuan Deng
*/
class SparseVec(src: Array[VecItem]) extends Vec {
import SparseVec._
private var _items: Array[VecItem] = src
private var _dimension: Int = _
/**
* Create a zero items <code>SparseVec</code>.
*/
def this() = {
this(new Array[VecItem](0))
}
/**
* Create a <code>SparseVec</code> of the desired dimension initialized to zero.
*
* @param dimension the dimension of the new <code>SparseVec</code>
*/
def this(dimension: Int) = {
this(new Array[VecItem](0))
_dimension = dimension
}
/**
* Create a <code>SparseVec</code> whose items are copied from
* <code>src</code>.
*
* @param src the <code>Vec</code> to be used as src
*/
def this(src: Vec) {
this(null.asInstanceOf[Array[VecItem]])
copy(src)
}
def dimension = _dimension
def dimension_=(dimension: Int) {
this._dimension = dimension
}
def setTo(src: Array[VecItem]) {
_items = src
}
def add(value: Double): Unit = {
assert(false, "SparseVec do not support this method, because we should make sure the elements is index sorted")
}
def values: Array[Double] = {
val _values = new Array[Double](dimension)
/** as all values has been initialed to 0 , we only need to: */
for (item <- _items) {
_values(item.index) = item.value
}
_values
}
def values_=(values: Array[Double]) {
if (dimension != values.length) {
throw new ArrayIndexOutOfBoundsException("Doing operations with source of different sizes.");
}
val newItems = new Array[VecItem](dimension)
for (i <- 0 until dimension) {
val value = values(i)
if (value != 0) {
newItems(i) = VecItem(i, value)
}
}
_items = newItems
}
def checkDimensionEquality(comp: Vec): Unit = {
if (comp.dimension != this.dimension) {
throw new ArrayIndexOutOfBoundsException("Doing operations with SparseVec instances of different sizes.");
}
}
override def clone: SparseVec = {
new SparseVec(this)
}
def metric(other: Vec): Double = {
this.minus(other).normTwo
}
def equals(another: Vec): Boolean = {
if (dimension != another.dimension) {
return false
}
another match {
case x: SparseVec =>
val itemsA = this._items
val itemsB = x._items
val lenA = itemsA.length
val lenB = itemsB.length
var idxA = 0
var idxB = 0
while (idxA < lenA && idxB < lenB) {
val itemA = itemsA(idxA)
val itemB = itemsB(idxB)
if (itemA.index == itemB.index) {
if (itemA.value != itemB.value) {
return false
}
idxA += 1
idxB += 1
} else if (itemA.index > itemB.index) {
idxB += 1
} else {
idxA += 1
}
}
case _ =>
for (i <- 0 until dimension) {
if (apply(i) != another(i)) {
return false
}
}
}
true
}
def itemOfByPosition(position: Int): VecItem = {
_items(position)
}
def apply(dimensionIdx: Int): Double = {
var i = 0
while (i < _items.length) {
if (_items(i).index == dimensionIdx) {
return _items(i).value
}
i += 1
}
0.0
}
def update(dimensionIdx: Int, value: Double) {
val itemIdx = itemIdxOf(dimensionIdx)
if (itemIdx >= 0) {
_items(itemIdx) = VecItem(dimension, value)
} else {
val newItems = new Array[VecItem](_items.length + 1)
var added = false
var i = 0
while (i < newItems.length) {
if (_items(i).index < dimensionIdx) {
newItems(i) = _items(i)
} else {
if (!added) {
newItems(i) = VecItem(dimensionIdx, value)
added = true
} else {
newItems(i) = _items(i - 1)
}
}
i += 1
}
_items = newItems
}
}
private def itemIdxOf(dimensionIdx: Int): Int = {
var i = 0
while (i < _items.length) {
if (_items(i).index == dimensionIdx) {
return _items(i).index
}
i += 1
}
-1
}
def itemOf(dimensionIdx: Int): VecItem = {
val i = itemIdxOf(dimensionIdx)
if (i >= 0) {
_items(i)
} else {
null
}
}
def setAll(value: Double) {
if (value == 0) {
_items = null
} else {
_items = new Array[VecItem](dimension)
var i = 0
while (i < dimension) {
_items(i) = VecItem(i, value)
i += 1
}
}
}
def copy(src: Vec) {
checkDimensionEquality(src)
_items = src match {
case x: SparseVec =>
val srcItems = x._items
val newItems = new Array[VecItem](srcItems.length)
System.arraycopy(srcItems, 0, newItems, 0, srcItems.length)
newItems
case _ =>
val itemBuf = new ArrayList[VecItem]
var i = 0
while (i < src.dimension) {
val value = src(i)
if (value != 0) {
itemBuf += VecItem(i, value)
}
i += 1
}
itemBuf.toArray
}
}
def copy(src: Vec, srcPos: Int, destPos: Int, length: Int) {
/** todo */
//System.arraycopy(src.toDoubleArray(), srcPos, items, destPos, length);
}
def plus(operand: Vec): Vec = {
checkDimensionEquality(operand)
val result = new SparseVec(dimension)
for (i <- 0 until dimension) {
val value = this(i) + operand(i)
if (value != 0) {
result(i) = value
}
}
result
}
def minus(operand: Vec): Vec = {
checkDimensionEquality(operand)
val result = new SparseVec(dimension)
for (i <- 0 until operand.dimension) {
val value = this(i) - operand(i)
if (value != 0) {
result(i) = value
}
}
result
}
def innerProduct(operand: Vec): Double = {
checkDimensionEquality(operand)
var result = 0d
operand match {
case x: SparseVec =>
/** A quick algorithm in case of both are SparseVec */
val itemsA = this._items
val itemsB = x._items
val lenA = itemsA.length
val lenB = itemsB.length
var idxA = 0
var idxB = 0
while (idxA < lenA && idxB < lenB) {
val itemA = itemsA(idxA)
val itemB = itemsB(idxB)
if (itemA.index == itemB.index) {
result += itemA.value * itemB.value
idxA += 1
idxB += 1
} else if (itemA.index > itemB.index) {
idxB += 1
} else {
idxA += 1
}
}
case _ =>
/** for inner product, we only need compute with those value != 0 */
for (i <- 0 until _items.length) {
val item = _items(i)
result += item.value * operand(item.index)
}
}
result
}
def square: Double = {
var result = 0d
var i = 0
while (i < _items.length) {
val value = _items(i).value
result += value * value
i += 1
}
result
}
def plus(operand: Double): Vec = {
val result = new SparseVec(this)
var i = 0
while (i < _items.length) {
val item = _items(i)
result._items(i) = VecItem(item.index, item.value + operand)
i += 1
}
result
}
def times(operand: Double): Vec = {
val result = new SparseVec(this)
var i = 0
while (i < _items.length) {
val item = _items(i)
result._items(i) = VecItem(item.index, item.value * operand)
i += 1
}
result
}
def compactSize: Int = {
_items.length
}
def compactData: Array[VecItem] = {
_items
}
def normOne: Double = {
var result = 0d
/** for norm1 operation, we only need compute with those data.value != 0 */
var i = 0
while (i < _items.length) {
result += math.abs(_items(i).value)
i += 1
}
result
}
def normTwo: Double = {
var result = 0.0
/** for norm2 operation, we only need compute with those data.value != 0 */
var i = 0
while (i < _items.length) {
result += math.pow(_items(i).value, 2)
i += 1
}
result = math.sqrt(result)
result
}
def checkValidation: Boolean = {
var i = 0
while (i < _items.length) {
if (Null.is(_items(i).value)) {
return false
}
i += 1
}
true
}
def randomize(min: Double, max: Double) {
val source = new Random(System.currentTimeMillis + Runtime.getRuntime.freeMemory)
var i = 0
while (i < dimension) {
/**
* @NOTICE
* source.nextDouble() returns a pseudorandom value between 0.0 and 1.0
*/
update(i, source.nextDouble * (max - min) + min)
i += 1
}
}
override def toString: String = {
val sb = new StringBuffer()
sb.append("[")
var i = 0
while (i < dimension) {
sb.append(this(i)).append(ITEM_SEPARATOR)
i += 1
}
sb.append("]")
sb.toString
}
}
object SparseVec {
val ITEM_SEPARATOR = " "
/**
* Parses a String into a <code>DefaultVec</code>.
* Elements are separated by <code>DefaultVec.ITEM_SEPARATOR</code>
*
* @param str the String to parse
* @return the resulting <code>DefaultVec</code>
* @see DefaultVec#ITEM_SEPARATOR
*/
def parseVec(str: String): Vec = {
val st = new StringTokenizer(str, ITEM_SEPARATOR)
val dimension = st.countTokens
val result = new DefaultVec(dimension)
for (i <- 0 until dimension) {
result(i) = st.nextToken.toDouble
}
result
}
}
|
wandoulabs/wandou-math
|
wandou-math/src/main/scala/wandou/math/vector/SparseVec.scala
|
Scala
|
apache-2.0
| 10,037 |
package mesosphere.marathon
package integration
import mesosphere.AkkaIntegrationTest
import mesosphere.marathon.integration.facades.ITEnrichedTask
import mesosphere.marathon.integration.setup._
import mesosphere.marathon.raml.App
import mesosphere.marathon.state.PathId._
import org.scalatest.Inside
import scala.concurrent.duration._
@IntegrationTest
class TaskUnreachableIntegrationTest extends AkkaIntegrationTest with EmbeddedMarathonTest with Inside {
override lazy val mesosNumMasters = 1
override lazy val mesosNumSlaves = 2
override val marathonArgs: Map[String, String] = Map(
"reconciliation_initial_delay" -> "5000",
"reconciliation_interval" -> "5000",
"scale_apps_initial_delay" -> "5000",
"scale_apps_interval" -> "5000",
"min_revive_offers_interval" -> "100",
"task_lost_expunge_gc" -> "30000",
"task_lost_expunge_initial_delay" -> "1000",
"task_lost_expunge_interval" -> "1000"
)
// TODO unreachable tests for pods
before {
zkServer.start()
mesosCluster.masters.foreach(_.start())
mesosCluster.agents.head.start()
mesosCluster.agents(1).stop()
mesosCluster.waitForLeader().futureValue
cleanUp()
}
"TaskUnreachable" should {
"A task unreachable update will trigger a replacement task" in {
Given("a new app with proper timeouts")
val strategy = raml.UnreachableEnabled(inactiveAfterSeconds = 10, expungeAfterSeconds = 5 * 60)
val app = appProxy(testBasePath / "unreachable", "v1", instances = 1, healthCheck = None).copy(
unreachableStrategy = Option(strategy)
)
waitForDeployment(marathon.createAppV2(app))
val task = waitForTasks(app.id.toPath, 1).head
When("the slave is partitioned")
mesosCluster.agents(0).stop()
Then("the task is declared unreachable")
waitForEventMatching("Task is declared unreachable") {
matchEvent("TASK_UNREACHABLE", task)
}
And("the task is declared unreachable inactive")
waitForEventWith("instance_changed_event", _.info("condition") == "UnreachableInactive")
And("a replacement task is started on a different slave")
mesosCluster.agents(1).start() // Start an alternative slave
waitForEventWith("status_update_event", _.info("taskStatus") == "TASK_RUNNING")
val tasks = marathon.tasks(app.id.toPath).value
tasks should have size 2
tasks.groupBy(_.state).keySet should be(Set("TASK_RUNNING", "TASK_UNREACHABLE"))
val replacement = tasks.find(_.state == "TASK_RUNNING").get
When("the first slaves comes back")
mesosCluster.agents(0).start()
Then("the task reappears as running")
waitForEventMatching("Task is declared running") {
matchEvent("TASK_RUNNING", task)
}
And("the replacement task is killed")
waitForEventMatching("Replacement task is killed") {
matchEvent("TASK_KILLED", replacement)
}
And("there is only one running task left")
marathon.tasks(app.id.toPath).value should have size 1
marathon.tasks(app.id.toPath).value.head.state should be("TASK_RUNNING")
}
"A task unreachable update with inactiveAfterSeconds 0 will trigger a replacement task instantly" in {
Given("a new app with proper timeouts")
val strategy = raml.UnreachableEnabled(inactiveAfterSeconds = 0, expungeAfterSeconds = 60)
val app = appProxy(testBasePath / "unreachable-instant", "v1", instances = 1, healthCheck = None).copy(
unreachableStrategy = Option(strategy)
)
waitForDeployment(marathon.createAppV2(app))
val task = waitForTasks(app.id.toPath, 1).head
When("the slave is partitioned")
mesosCluster.agents(0).stop()
mesosCluster.agents(1).start() // Start an alternative agent
Then("the task is declared unreachable")
waitForEventMatching("Task is declared unreachable") {
matchEvent("TASK_UNREACHABLE", task)
}
Then("the replacement task is running")
// wait not longer than 1 second, because it should be replaced even faster
waitForEventMatching("Replacement task is declared running", 6.seconds) {
matchEvent("TASK_RUNNING", app)
}
// immediate replacement should be started
val tasks = marathon.tasks(app.id.toPath).value
tasks should have size 2
tasks.groupBy(_.state).keySet should be(Set("TASK_RUNNING", "TASK_UNREACHABLE"))
}
// regression test for https://github.com/mesosphere/marathon/issues/4059
"Scaling down an app with constraints and unreachable task will succeed" in {
Given("an app that is constrained to a unique hostname")
val constraint = Set(Seq("node", "MAX_PER", "1"))
// start both slaves
mesosCluster.agents.foreach(_.start())
val strategy = raml.UnreachableEnabled(inactiveAfterSeconds = 3 * 60, expungeAfterSeconds = 4 * 60)
val app = appProxy(testBasePath / "regression", "v1", instances = 2, healthCheck = None)
.copy(constraints = constraint, unreachableStrategy = Option(strategy))
waitForDeployment(marathon.createAppV2(app))
val enrichedTasks = waitForTasks(app.id.toPath, num = 2)
val clusterState = mesosCluster.state.value
val slaveId = clusterState.agents.find(_.attributes.attributes("node").toString.toDouble.toInt == 0).getOrElse(
throw new RuntimeException(s"failed to find agent1: attributes by agent=${clusterState.agents.map(_.attributes.attributes)}")
)
val task = enrichedTasks.find(t => t.slaveId.contains(slaveId.id)).getOrElse(
throw new RuntimeException("No matching task found on slave1")
)
When("agent1 is stopped")
mesosCluster.agents.head.stop()
Then("one task is declared unreachable")
waitForEventMatching("Task is declared lost") {
matchEvent("TASK_UNREACHABLE", task)
}
And("the task is not removed from the task list")
inside(waitForTasks(app.id.toPath, num = 2)) {
case tasks =>
tasks should have size 2
tasks.exists(_.state == "TASK_UNREACHABLE") shouldBe true
}
When("we try to scale down to one instance")
val update = marathon.updateApp(app.id.toPath, raml.AppUpdate(instances = Some(1)))
waitForEventMatching("deployment to scale down should be triggered") {
matchDeploymentStart(app.id)
}
Then("the update deployment will eventually finish")
waitForDeployment(update)
And("The unreachable task is expunged")
eventually(inside(marathon.tasks(app.id.toPath).value) {
case t :: Nil =>
t.state shouldBe "TASK_RUNNING"
})
marathon.listDeploymentsForBaseGroup().value should have size 0
}
}
def matchEvent(status: String, app: App): CallbackEvent => Boolean = { event =>
event.info.get("taskStatus").contains(status) &&
event.info.get("appId").contains(app.id)
}
def matchEvent(status: String, task: ITEnrichedTask): CallbackEvent => Boolean = { event =>
event.info.get("taskStatus").contains(status) &&
event.info.get("taskId").contains(task.id)
}
private def matchDeploymentStart(appId: String): CallbackEvent => Boolean = { event =>
val infoString = event.info.toString()
event.eventType == "deployment_info" && matchScaleApplication(infoString, appId)
}
private def matchScaleApplication(infoString: String, appId: String): Boolean = {
infoString.contains(s"List(Map(actions -> List(Map(action -> ScaleApplication, app -> $appId)))))")
}
}
|
janisz/marathon
|
src/test/scala/mesosphere/marathon/integration/TaskUnreachableIntegrationTest.scala
|
Scala
|
apache-2.0
| 7,559 |
/*
* Copyright (c) 2015.
* Created by MrTJP.
* All rights reserved.
*/
package mrtjp.core.math
import java.util.Random
import net.minecraft.world.World
object PerlinNoiseGenerator extends PerlinNoiseGenerator
{
protected def floor(x:Double) = if (x >= 0) x.toInt else x.toInt-1
protected def fade(x:Double) = x*x*x*(x*(x*6-15)+10)
protected def lerp(x:Double, y:Double, z:Double) = y+x*(z-y)
protected def grad(hash1:Int, x:Double, y:Double, z:Double) =
{
val hash = hash1&15
val u = if (hash < 8) x else y
val v = if (hash < 4) y else if (hash == 12 || hash == 14) x else z
(if ((hash&1) == 0) u else -u)+(if ((hash&2) == 0) v else -v)
}
}
class PerlinNoiseGenerator(rand:Random)
{
def this() = this(new Random)
def this(w:World) = this(w.rand)
def this(seed:Long) = this(new Random(seed))
private final val perm = new Array[Int](512)
private val offsetX = rand.nextDouble*256
private val offsetY = rand.nextDouble*256
private val offsetZ = rand.nextDouble*256
{
for (i <- 0 until 256) perm(i) = rand.nextInt(256)
for (i <- 0 until 256)
{
val pos = rand.nextInt(256-i)+i
val old = perm(i)
perm(i) = perm(pos)
perm(pos) = old
perm(i+256) = perm(i)
}
}
def noise(x:Double):Double = noise(x, 0, 0)
def noise(x:Double, y:Double):Double = noise(x, y, 0)
/**
* Computes and returns the 3D noise for the given coordinates in 3D space
*
* @param x X coordinate
* @param y Y coordinate
* @param z Z coordinate
* @return Noise at given location, from range -1 to 1
*/
def noise(x:Double, y:Double, z:Double):Double =
{
var x1 = x+offsetX
var y1 = y+offsetY
var z1 = z+offsetZ
import PerlinNoiseGenerator._
val floorX = floor(x1)
val floorY = floor(y1)
val floorZ = floor(z1)
// Find unit cube containing the point
val X = floorX&255
val Y = floorY&255
val Z = floorZ&255
// Get relative xyz coordinates of the point within the cube
x1 -= floorX
y1 -= floorY
z1 -= floorZ
// Compute fade curves for xyz
val fX = fade(x1)
val fY = fade(y1)
val fZ = fade(z1)
// Hash coordinates of the cube corners
val A = perm(X)+Y
val AA = perm(A)+Z
val AB = perm(A+1)+Z
val B = perm(X+1)+Y
val BA = perm(B)+Z
val BB = perm(B+1)+Z
lerp(fZ, lerp(fY, lerp(fX, grad(perm(AA), x1, y1, z1), grad(perm(BA), x1-1, y1, z1)), lerp(fX, grad(perm(AB),
x1, y1-1, z1), grad(perm(BB), x1-1, y1-1, z1))), lerp(fY, lerp(fX, grad(perm(AA+1), x1, y1, z1-1),
grad(perm(BA+1), x1-1, y1, z1-1)), lerp(fX, grad(perm(AB+1), x1, y1-1, z1-1),
grad(perm(BB+1), x1-1, y1-1, z1-1))))
}
/**
* Generates noise for the 3D coordinates using the specified number of
* octaves and parameters
*
* @param x X-coordinate
* @param y Y-coordinate
* @param z Z-coordinate
* @param octaves Number of octaves to use
* @param frequency How much to alter the frequency by each octave
* @param amplitude How much to alter the amplitude by each octave
* @param normalized If true, normalize the value to [-1, 1]
* @return Resulting noise
*/
def noise(x:Double, y:Double, z:Double, octaves:Int, frequency:Double, amplitude:Double, normalized:Boolean):Double =
{
var result = 0.0D
var amp = 1.0D
var freq = 1.0D
var max = 0.0D
for (i <- 0 until octaves)
{
result += noise(x*freq, y*freq, z*freq)*amp
max += amp
freq *= frequency
amp *= amplitude
}
if (normalized) result /= max
result
}
}
|
MrTJP/MrTJPCore
|
src/main/scala/mrtjp/core/math/PerlinNoiseGenerator.scala
|
Scala
|
lgpl-3.0
| 3,962 |
package build.unstable.sonicd.source
import akka.actor._
import akka.event.Logging.LogLevel
import akka.http.scaladsl.model._
import akka.http.scaladsl.settings.ConnectionPoolSettings
import akka.stream.actor.ActorPublisher
import akka.util.ByteString
import build.unstable.sonic.JsonProtocol._
import build.unstable.sonic.model._
import build.unstable.sonicd.source.ElasticSearch.ESQuery
import build.unstable.sonicd.source.http.HttpSupervisor
import build.unstable.sonicd.source.http.HttpSupervisor.{HttpRequestCommand, Traceable}
import build.unstable.sonicd.{SonicdConfig, SonicdLogging}
import build.unstable.tylog.Variation
import org.slf4j.event.Level
import spray.json._
import scala.collection.mutable
import scala.concurrent.duration.{Duration, FiniteDuration, _}
import scala.util.Try
object ElasticSearch {
def getSupervisorName(nodeUrl: String, port: Int): String = s"elasticsearch_${nodeUrl}_$port"
case class ESQuery(extractedFrom: Option[Long], extractedSize: Option[Long], payload: JsObject)
case object ESQueryJsonFormat {
//returns index (default _all), _type (default null), and parsed es query
def read(json: JsValue): (String, Option[String], ESQuery) = {
val obj = json.asJsObject
val fields = obj.fields
val index = fields.get("_index").flatMap(_.convertTo[Option[String]]).getOrElse("_all")
val typeHint = fields.get("_type").flatMap(_.convertTo[Option[String]])
(index, typeHint,
ESQuery(fields.get("from").flatMap(_.convertTo[Option[Long]]),
fields.get("size").flatMap(_.convertTo[Option[Long]]), obj))
}
def write(obj: ESQuery, from: Long, size: Long): JsValue = {
val fields = mutable.Map.empty ++ obj.payload.fields
//if not configured it will reject query
fields.remove("_type")
fields.remove("_index")
fields.update("from", JsNumber(from))
fields.update("size", JsNumber(size))
JsObject(fields.toMap)
}
}
case class Shards(total: Int, successful: Int, failed: Int)
case class Hit(_index: String, _type: String, _id: String, _score: Float, _source: Option[JsObject])
case class Hits(total: Long, max_score: Float, hits: Vector[Hit])
case class QueryResults(traceId: Option[String], took: Long, timed_out: Boolean,
_shards: Shards, hits: Hits)
extends HttpSupervisor.Traceable {
def id = traceId.get
def setTraceId(newId: String): Traceable = this.copy(traceId = Some(newId))
}
implicit val shardsFormat: RootJsonFormat[Shards] = jsonFormat3(Shards.apply)
implicit val hitFormat: RootJsonFormat[Hit] = jsonFormat5(Hit.apply)
implicit val hitsFormat: RootJsonFormat[Hits] = jsonFormat3(Hits.apply)
implicit val queryResultsFormat: RootJsonFormat[QueryResults] = jsonFormat5(QueryResults.apply)
}
class ElasticSearchSource(query: Query, actorContext: ActorContext, context: RequestContext)
extends SonicdSource(query, actorContext, context) {
def elasticsearchSupervisorProps(nodeUrl: String, masterPort: Int): Props =
Props(classOf[ElasticSearchSupervisor], nodeUrl, masterPort)
val nodeUrl: String = getConfig[String]("url")
val nodePort: Int = getConfig[Int]("port")
val (index, typeHint, esQuery) = ElasticSearch.ESQueryJsonFormat.read(query.query.parseJson)
val supervisorName = ElasticSearch.getSupervisorName(nodeUrl, nodePort)
def getSupervisor(name: String): ActorRef = {
actorContext.child(name).getOrElse {
actorContext.actorOf(elasticsearchSupervisorProps(nodeUrl, nodePort), supervisorName)
}
}
lazy val publisher: Props = {
//if no ES supervisor has been initialized yet for this ES cluster, initialize one
val supervisor = getSupervisor(supervisorName)
Props(classOf[ElasticSearchPublisher], query.traceId.get, esQuery,
index, typeHint, SonicdConfig.ES_QUERY_SIZE, supervisor, SonicdConfig.ES_WATERMARK, context)
}
}
class ElasticSearchSupervisor(val masterUrl: String, val port: Int) extends HttpSupervisor[ElasticSearch.QueryResults] {
lazy val jsonFormat: RootJsonFormat[ElasticSearch.QueryResults] = ElasticSearch.queryResultsFormat
lazy val poolSettings: ConnectionPoolSettings = ConnectionPoolSettings(SonicdConfig.ES_CONNECTION_POOL_SETTINGS)
lazy val httpEntityTimeout: FiniteDuration = SonicdConfig.ES_HTTP_ENTITY_TIMEOUT
lazy val extraHeaders = scala.collection.immutable.Seq.empty[HttpHeader]
lazy val debug: Boolean = false
override def cancelRequestFromResult(t: ElasticSearch.QueryResults): Option[HttpRequest] = None
}
class ElasticSearchPublisher(traceId: String,
query: ESQuery,
index: String,
typeHint: Option[String],
querySize: Long,
supervisor: ActorRef,
watermark: Long)
(implicit ctx: RequestContext)
extends ActorPublisher[SonicMessage] with SonicdPublisher with SonicdLogging {
import akka.stream.actor.ActorPublisherMessage._
override def subscriptionTimeout: Duration = 1.minute
@throws[Exception](classOf[Exception])
override def postStop(): Unit = {
log.info("stopping ES publisher {}", traceId)
context unwatch supervisor
}
@throws[Exception](classOf[Exception])
override def preStart(): Unit = {
log.debug("starting ES publisher {}", traceId)
context watch supervisor
}
/* HELPERS */
def nextRequest: HttpRequestCommand = {
val payload: String = ElasticSearch.ESQueryJsonFormat.write(query, nextFrom, nextSize).compactPrint
log.trace("sending query: {}", payload)
val entity: RequestEntity =
HttpEntity.Strict.apply(ContentTypes.`application/json`, ByteString(payload, ByteString.UTF_8))
val httpRequest = HttpRequest.apply(HttpMethods.POST, uri, entity = entity)
HttpRequestCommand(traceId, httpRequest)
}
def tryPushDownstream() {
while (isActive && totalDemand > 0 && buffer.nonEmpty) {
onNext(buffer.dequeue())
}
}
def tryPullUpstream() {
if (target > 0 && fetched + nextSize > target) nextSize = target - fetched
if (!resultsPending && (buffer.isEmpty || shouldQueryAhead)) {
resultsPending = true
supervisor ! nextRequest
}
}
def shouldQueryAhead: Boolean = watermark > 0 && buffer.length < watermark
def getTypeMetadata(hit: ElasticSearch.Hit): TypeMetadata = {
TypeMetadata(hit._source.map(_.fields.toVector).getOrElse(Vector.empty))
}
val uri = typeHint.map(t ⇒ s"/$index/$t/_search").getOrElse(s"/$index/_search")
val limit = query.extractedSize.getOrElse(-1L)
/* STATE */
var target = limit
val buffer: mutable.Queue[SonicMessage] = mutable.Queue(StreamStarted(ctx.traceId))
var nextSize = if (limit > 0) Math.min(limit, querySize) else querySize
var nextFrom = query.extractedFrom.getOrElse(0L)
var fetched = 0L
var resultsPending = false
/* BEHAVIOUR */
def terminating(done: StreamCompleted): Receive = {
tryPushDownstream()
if (buffer.isEmpty && isActive && totalDemand > 0) {
onNext(done)
onCompleteThenStop()
}
{
case r: Request ⇒ terminating(done)
}
}
def getSelect(query: ESQuery): Vector[String] = {
val payload = query.payload.fields
payload.get("stored_fields")
.orElse(payload.get("fields"))
.orElse(payload.get("_source"))
.flatMap(f ⇒ Try(f.convertTo[Vector[String]]).toOption)
.getOrElse(Vector.empty)
}
def materialized: Receive = commonReceive orElse {
case Request(n) ⇒
tryPushDownstream()
tryPullUpstream()
case r: ElasticSearch.QueryResults ⇒
val nhits = r.hits.hits.size
resultsPending = false
fetched += nhits
if (target < 0L) target = r.hits.total
// emit artificial TypeMetadata to conform to sonic protocol
if (r.hits.total == 0 || r.hits.hits.forall(_._source.isEmpty)) {
val select = getSelect(query)
// no need to use updateMeta as if hits is 0 we only
// get here once and complete stream
meta = TypeMetadata(select.map(i ⇒ i → JsNull))
buffer.enqueue(meta)
}
r.hits.hits.foreach { h ⇒
val extracted = getTypeMetadata(h)
if (updateMeta(extracted)) {
buffer.enqueue(meta)
}
// ignore if hit doesn't contain _source
h._source.foreach(f ⇒ buffer.enqueue(alignOutput(f.fields, meta)))
}
if (nhits < nextSize || fetched == target) {
log.tylog(Level.INFO, traceId, ExecuteStatement, Variation.Success, "fetched {} documents", fetched)
context.become(terminating(StreamCompleted.success))
} else {
nextFrom += nhits
tryPullUpstream()
}
tryPushDownstream()
case Status.Failure(e) ⇒
log.tylog(Level.INFO, traceId, ExecuteStatement, Variation.Failure(e), "something went wrong with the http request")
context.become(terminating(StreamCompleted.error(e)))
}
def commonReceive: Receive = {
case Cancel ⇒
log.debug("client canceled")
onCompleteThenStop()
}
def receive: Receive = commonReceive orElse {
case SubscriptionTimeoutExceeded ⇒
log.info(s"no subscriber in within subs timeout $subscriptionTimeout")
onCompleteThenStop()
//first time client requests
case Request(n) ⇒
log.tylog(Level.INFO, traceId, ExecuteStatement, Variation.Attempt,
"send query to supervisor in path {}", supervisor.path)
tryPullUpstream()
tryPushDownstream()
context.become(materialized)
}
}
|
ernestrc/sonicd
|
server/src/main/scala/build/unstable/sonicd/source/ElasticSearchSource.scala
|
Scala
|
mit
| 9,646 |
package xyz.mattclifton.play.swagger.stringent.test
import com.iheart.playSwagger.{PrefixDomainModelQualifier, SwaggerSpecGenerator}
import org.specs2.mutable.Specification
import play.api.libs.json.{JsString, JsArray, JsObject, JsValue}
import xyz.mattclifton.play.swagger.stringent.EndPointSpecBuilderFactory
class EndpointSpecBuilderSpec extends Specification {
implicit val cl = getClass.getClassLoader
"EndpointSpecBuilder" >> {
lazy val json = SwaggerSpecGenerator(PrefixDomainModelQualifier(Seq("models"):_*), endpointSpecBuilder = EndPointSpecBuilderFactory.apply).generate("routes.routes").get
lazy val pathJson = json \ "paths"
lazy val definitionsJson = json \ "definitions"
lazy val okJson = (pathJson \ "/ok" \ "get").as[JsObject]
lazy val multipleResultsJson = (pathJson \ "/multiple" \ "post").as[JsObject]
lazy val okWithContentJson = (pathJson \ "/okWithContent" \ "post").as[JsObject]
lazy val noDocJson = (pathJson \ "/noDoc" \ "post").as[JsObject]
lazy val existingStatusJson = (pathJson \ "/existingStatus" \ "post").as[JsObject]
def parametersOf(json: JsValue): Option[JsArray] = {
(json \ "parameters").asOpt[JsArray]
}
"generate basic response code" >> {
(okJson \ "responses" \ "200").asOpt[JsValue].nonEmpty === true
}
"generate multiple response codes" >> {
(multipleResultsJson \ "responses" \ "200").asOpt[JsValue].nonEmpty === true
(multipleResultsJson \ "responses" \ "400").asOpt[JsValue].nonEmpty === true
}
"generate response with schema" >> {
val status200 = (okWithContentJson \ "responses" \ "200").asOpt[JsObject]
status200.nonEmpty === true
val schema = (status200.get \ "schema").asOpt[JsValue]
schema.nonEmpty === true
val refValue = (status200.get \ "schema" \ "$ref").asOpt[JsString]
refValue.nonEmpty === true
refValue.get.value === "#/definitions/models.TestContent"
}
"includes expected definition" >> {
val modelJson = definitionsJson \ "models.TestContent"
val propertiesJson = modelJson \ "properties"
val idProperty = (propertiesJson \ "id").asOpt[JsObject]
val nameProperty = (propertiesJson \ "name").asOpt[JsObject]
idProperty.nonEmpty === true
nameProperty.nonEmpty === true
}
"includes status when no doc specified" >> {
(noDocJson \ "responses" \ "200").asOpt[JsValue].nonEmpty === true
}
"includes other status when existing status is already documented" >> {
(existingStatusJson \ "responses" \ "200").asOpt[JsValue].nonEmpty === true
(existingStatusJson \ "responses" \ "400").asOpt[JsValue].nonEmpty === true
}
"includes description" >> {
(okJson \ "responses" \ "200" \ "description").asOpt[JsValue].nonEmpty === true
}
// throw error when response type doesn't match existing user doc?
// status codes
lazy val createdResultJson = (pathJson \ "/createdResult" \ "post").as[JsObject]
lazy val createdWithContentJson = (pathJson \ "/createdWithContent" \ "post").as[JsObject]
// Created
"created result" >> {
(createdResultJson \ "responses" \ "201").asOpt[JsValue].nonEmpty === true
}
"created with content" >> {
(createdWithContentJson \ "responses" \ "201").asOpt[JsValue].nonEmpty === true
(createdWithContentJson \ "responses" \ "201" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val acceptedResultJson = (pathJson \ "/acceptedResult" \ "post").as[JsObject]
lazy val acceptedWithContentJson = (pathJson \ "/acceptedWithContent" \ "post").as[JsObject]
// Accepted
"accepted result" >> {
(acceptedResultJson \ "responses" \ "202").asOpt[JsValue].nonEmpty === true
}
"created with content" >> {
(acceptedWithContentJson \ "responses" \ "202").asOpt[JsValue].nonEmpty === true
(acceptedWithContentJson \ "responses" \ "202" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val nonAuthoritativeInformationResultJson = (pathJson \ "/nonAuthoritativeInformationResult" \ "post").as[JsObject]
lazy val nonAuthoritativeInformationWithContentJson = (pathJson \ "/nonAuthoritativeInformationWithContent" \ "post").as[JsObject]
// NonAuthoritativeInformation
"NonAuthoritativeInformation result" >> {
(nonAuthoritativeInformationResultJson \ "responses" \ "203").asOpt[JsValue].nonEmpty === true
}
"NonAuthoritativeInformation with content" >> {
(nonAuthoritativeInformationWithContentJson \ "responses" \ "203").asOpt[JsValue].nonEmpty === true
(nonAuthoritativeInformationWithContentJson \ "responses" \ "203" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val noContentResultJson = (pathJson \ "/noContentResult" \ "post").as[JsObject]
// NoContent
"NoContent result" >> {
(noContentResultJson \ "responses" \ "204").asOpt[JsValue].nonEmpty === true
}
lazy val resetContentResultJson = (pathJson \ "/resetContentResult" \ "post").as[JsObject]
// ResetContent
"ResetContent result" >> {
(resetContentResultJson \ "responses" \ "205").asOpt[JsValue].nonEmpty === true
}
lazy val partialContentResultJson = (pathJson \ "/partialContentResult" \ "post").as[JsObject]
lazy val partialContentWithContentJson = (pathJson \ "/partialContentWithContent" \ "post").as[JsObject]
// PartialContent
"PartialContent result" >> {
(partialContentResultJson \ "responses" \ "206").asOpt[JsValue].nonEmpty === true
}
"PartialContent with content" >> {
(partialContentWithContentJson \ "responses" \ "206").asOpt[JsValue].nonEmpty === true
(partialContentWithContentJson \ "responses" \ "206" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val multiStatusResultJson = (pathJson \ "/multiStatusResult" \ "post").as[JsObject]
lazy val multiStatusWithContentJson = (pathJson \ "/multiStatusWithContent" \ "post").as[JsObject]
// MultiStatus
"MultiStatus result" >> {
(multiStatusResultJson \ "responses" \ "207").asOpt[JsValue].nonEmpty === true
}
"MultiStatus with content" >> {
(multiStatusWithContentJson \ "responses" \ "207").asOpt[JsValue].nonEmpty === true
(multiStatusWithContentJson \ "responses" \ "207" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val movedPermanentlyResultJson = (pathJson \ "/movedPermanentlyResult" \ "post").as[JsObject]
// MovedPermanently
"MovedPermanently result" >> {
(movedPermanentlyResultJson \ "responses" \ "301").asOpt[JsValue].nonEmpty === true
}
lazy val foundResultJson = (pathJson \ "/foundResult" \ "post").as[JsObject]
// Found
"Found result" >> {
(foundResultJson \ "responses" \ "302").asOpt[JsValue].nonEmpty === true
}
lazy val seeOtherResultJson = (pathJson \ "/seeOtherResult" \ "post").as[JsObject]
// SeeOther
"SeeOther result" >> {
(seeOtherResultJson \ "responses" \ "303").asOpt[JsValue].nonEmpty === true
}
lazy val notModifiedResultJson = (pathJson \ "/notModifiedResult" \ "post").as[JsObject]
// NotModified
"NotModified result" >> {
(notModifiedResultJson \ "responses" \ "304").asOpt[JsValue].nonEmpty === true
}
lazy val temporaryRedirectResultJson = (pathJson \ "/temporaryRedirectResult" \ "post").as[JsObject]
// TemporaryRedirect
"TemporaryRedirect result" >> {
(temporaryRedirectResultJson \ "responses" \ "307").asOpt[JsValue].nonEmpty === true
}
lazy val permanentRedirectResultJson = (pathJson \ "/permanentRedirectResult" \ "post").as[JsObject]
// PermanentRedirect
"PermanentRedirect result" >> {
(permanentRedirectResultJson \ "responses" \ "308").asOpt[JsValue].nonEmpty === true
}
lazy val badRequestResultJson = (pathJson \ "/badRequestResult" \ "post").as[JsObject]
lazy val badRequestWithContentJson = (pathJson \ "/badRequestWithContent" \ "post").as[JsObject]
// BadRequest
"BadRequest result" >> {
(badRequestResultJson \ "responses" \ "400").asOpt[JsValue].nonEmpty === true
}
"BadRequest with content" >> {
(badRequestWithContentJson \ "responses" \ "400").asOpt[JsValue].nonEmpty === true
(badRequestWithContentJson \ "responses" \ "400" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val unauthorizedResultJson = (pathJson \ "/unauthorizedResult" \ "post").as[JsObject]
lazy val unauthorizedWithContentJson = (pathJson \ "/unauthorizedWithContent" \ "post").as[JsObject]
// Unauthorized
"Unauthorized result" >> {
(unauthorizedResultJson \ "responses" \ "401").asOpt[JsValue].nonEmpty === true
}
"Unauthorized with content" >> {
(unauthorizedWithContentJson \ "responses" \ "401").asOpt[JsValue].nonEmpty === true
(unauthorizedWithContentJson \ "responses" \ "401" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val paymentRequiredResultJson = (pathJson \ "/paymentRequiredResult" \ "post").as[JsObject]
lazy val paymentRequiredWithContentJson = (pathJson \ "/paymentRequiredWithContent" \ "post").as[JsObject]
// PaymentRequired
"PaymentRequired result" >> {
(paymentRequiredResultJson \ "responses" \ "402").asOpt[JsValue].nonEmpty === true
}
"PaymentRequired with content" >> {
(paymentRequiredWithContentJson \ "responses" \ "402").asOpt[JsValue].nonEmpty === true
(paymentRequiredWithContentJson \ "responses" \ "402" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val forbiddenResultJson = (pathJson \ "/forbiddenResult" \ "post").as[JsObject]
lazy val forbiddenWithContentJson = (pathJson \ "/forbiddenWithContent" \ "post").as[JsObject]
// Forbidden
"Forbidden result" >> {
(forbiddenResultJson \ "responses" \ "403").asOpt[JsValue].nonEmpty === true
}
"Forbidden with content" >> {
(forbiddenWithContentJson \ "responses" \ "403").asOpt[JsValue].nonEmpty === true
(forbiddenWithContentJson \ "responses" \ "403" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val notFoundResultJson = (pathJson \ "/notFoundResult" \ "post").as[JsObject]
lazy val notFoundWithContentJson = (pathJson \ "/notFoundWithContent" \ "post").as[JsObject]
// NotFound
"NotFound result" >> {
(notFoundResultJson \ "responses" \ "404").asOpt[JsValue].nonEmpty === true
}
"NotFound with content" >> {
(notFoundWithContentJson \ "responses" \ "404").asOpt[JsValue].nonEmpty === true
(notFoundWithContentJson \ "responses" \ "404" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val methodNotAllowedResultJson = (pathJson \ "/methodNotAllowedResult" \ "post").as[JsObject]
lazy val methodNotAllowedWithContentJson = (pathJson \ "/methodNotAllowedWithContent" \ "post").as[JsObject]
// MethodNotAllowed
"MethodNotAllowed result" >> {
(methodNotAllowedResultJson \ "responses" \ "405").asOpt[JsValue].nonEmpty === true
}
"MethodNotAllowed with content" >> {
(methodNotAllowedWithContentJson \ "responses" \ "405").asOpt[JsValue].nonEmpty === true
(methodNotAllowedWithContentJson \ "responses" \ "405" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val notAcceptableResultJson = (pathJson \ "/notAcceptableResult" \ "post").as[JsObject]
lazy val notAcceptableWithContentJson = (pathJson \ "/notAcceptableWithContent" \ "post").as[JsObject]
// NotAcceptable
"NotAcceptable result" >> {
(notAcceptableResultJson \ "responses" \ "406").asOpt[JsValue].nonEmpty === true
}
"NotAcceptable with content" >> {
(notAcceptableWithContentJson \ "responses" \ "406").asOpt[JsValue].nonEmpty === true
(notAcceptableWithContentJson \ "responses" \ "406" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val requestTimeoutResultJson = (pathJson \ "/requestTimeoutResult" \ "post").as[JsObject]
lazy val requestTimeoutWithContentJson = (pathJson \ "/requestTimeoutWithContent" \ "post").as[JsObject]
// RequestTimeout
"RequestTimeout result" >> {
(requestTimeoutResultJson \ "responses" \ "408").asOpt[JsValue].nonEmpty === true
}
"RequestTimeout with content" >> {
(requestTimeoutWithContentJson \ "responses" \ "408").asOpt[JsValue].nonEmpty === true
(requestTimeoutWithContentJson \ "responses" \ "408" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val conflictResultJson = (pathJson \ "/conflictResult" \ "post").as[JsObject]
lazy val conflictWithContentJson = (pathJson \ "/conflictWithContent" \ "post").as[JsObject]
// Conflict
"Conflict result" >> {
(conflictResultJson \ "responses" \ "409").asOpt[JsValue].nonEmpty === true
}
"Conflict with content" >> {
(conflictWithContentJson \ "responses" \ "409").asOpt[JsValue].nonEmpty === true
(conflictWithContentJson \ "responses" \ "409" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val goneResultJson = (pathJson \ "/goneResult" \ "post").as[JsObject]
lazy val goneWithContentJson = (pathJson \ "/goneWithContent" \ "post").as[JsObject]
// Gone
"Gone result" >> {
(goneResultJson \ "responses" \ "410").asOpt[JsValue].nonEmpty === true
}
"Gone with content" >> {
(goneWithContentJson \ "responses" \ "410").asOpt[JsValue].nonEmpty === true
(goneWithContentJson \ "responses" \ "410" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val preconditionFailedResultJson = (pathJson \ "/preconditionFailedResult" \ "post").as[JsObject]
lazy val preconditionFailedWithContentJson = (pathJson \ "/preconditionFailedWithContent" \ "post").as[JsObject]
// PreconditionFailed
"PreconditionFailed result" >> {
(preconditionFailedResultJson \ "responses" \ "412").asOpt[JsValue].nonEmpty === true
}
"PreconditionFailed with content" >> {
(preconditionFailedWithContentJson \ "responses" \ "412").asOpt[JsValue].nonEmpty === true
(preconditionFailedWithContentJson \ "responses" \ "412" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val entityTooLargeResultJson = (pathJson \ "/entityTooLargeResult" \ "post").as[JsObject]
lazy val entityTooLargeWithContentJson = (pathJson \ "/entityTooLargeWithContent" \ "post").as[JsObject]
// EntityTooLarge
"EntityTooLarge result" >> {
(entityTooLargeResultJson \ "responses" \ "413").asOpt[JsValue].nonEmpty === true
}
"EntityTooLarge with content" >> {
(entityTooLargeWithContentJson \ "responses" \ "413").asOpt[JsValue].nonEmpty === true
(entityTooLargeWithContentJson \ "responses" \ "413" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val uriTooLongResultJson = (pathJson \ "/uriTooLongResult" \ "post").as[JsObject]
lazy val uriTooLongWithContentJson = (pathJson \ "/uriTooLongWithContent" \ "post").as[JsObject]
// UriTooLong
"UriTooLong result" >> {
(uriTooLongResultJson \ "responses" \ "414").asOpt[JsValue].nonEmpty === true
}
"UriTooLong with content" >> {
(uriTooLongWithContentJson \ "responses" \ "414").asOpt[JsValue].nonEmpty === true
(uriTooLongWithContentJson \ "responses" \ "414" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val unsupportedMediaTypeResultJson = (pathJson \ "/unsupportedMediaTypeResult" \ "post").as[JsObject]
lazy val unsupportedMediaTypeWithContentJson = (pathJson \ "/unsupportedMediaTypeWithContent" \ "post").as[JsObject]
// UnsupportedMediaType
"UnsupportedMediaType result" >> {
(unsupportedMediaTypeResultJson \ "responses" \ "415").asOpt[JsValue].nonEmpty === true
}
"UnsupportedMediaType with content" >> {
(unsupportedMediaTypeWithContentJson \ "responses" \ "415").asOpt[JsValue].nonEmpty === true
(unsupportedMediaTypeWithContentJson \ "responses" \ "415" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val expectationFailedResultJson = (pathJson \ "/expectationFailedResult" \ "post").as[JsObject]
lazy val expectationFailedWithContentJson = (pathJson \ "/expectationFailedWithContent" \ "post").as[JsObject]
// ExpectationFailed
"ExpectationFailed result" >> {
(expectationFailedResultJson \ "responses" \ "417").asOpt[JsValue].nonEmpty === true
}
"ExpectationFailed with content" >> {
(expectationFailedWithContentJson \ "responses" \ "417").asOpt[JsValue].nonEmpty === true
(expectationFailedWithContentJson \ "responses" \ "417" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val unprocessableEntityResultJson = (pathJson \ "/unprocessableEntityResult" \ "post").as[JsObject]
lazy val unprocessableEntityWithContentJson = (pathJson \ "/unprocessableEntityWithContent" \ "post").as[JsObject]
// UnprocessableEntity
"UnprocessableEntity result" >> {
(unprocessableEntityResultJson \ "responses" \ "422").asOpt[JsValue].nonEmpty === true
}
"UnprocessableEntity with content" >> {
(unprocessableEntityWithContentJson \ "responses" \ "422").asOpt[JsValue].nonEmpty === true
(unprocessableEntityWithContentJson \ "responses" \ "422" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val lockedResultJson = (pathJson \ "/lockedResult" \ "post").as[JsObject]
lazy val lockedWithContentJson = (pathJson \ "/lockedWithContent" \ "post").as[JsObject]
// Locked
"Locked result" >> {
(lockedResultJson \ "responses" \ "423").asOpt[JsValue].nonEmpty === true
}
"Locked with content" >> {
(lockedWithContentJson \ "responses" \ "423").asOpt[JsValue].nonEmpty === true
(lockedWithContentJson \ "responses" \ "423" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val failedDependencyResultJson = (pathJson \ "/failedDependencyResult" \ "post").as[JsObject]
lazy val failedDependencyWithContentJson = (pathJson \ "/failedDependencyWithContent" \ "post").as[JsObject]
// FailedDependency
"FailedDependency result" >> {
(failedDependencyResultJson \ "responses" \ "424").asOpt[JsValue].nonEmpty === true
}
"FailedDependency with content" >> {
(failedDependencyWithContentJson \ "responses" \ "424").asOpt[JsValue].nonEmpty === true
(failedDependencyWithContentJson \ "responses" \ "424" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val tooManyRequestsResultJson = (pathJson \ "/tooManyRequestsResult" \ "post").as[JsObject]
lazy val tooManyRequestsWithContentJson = (pathJson \ "/tooManyRequestsWithContent" \ "post").as[JsObject]
// TooManyRequests
"TooManyRequests result" >> {
(tooManyRequestsResultJson \ "responses" \ "429").asOpt[JsValue].nonEmpty === true
}
"TooManyRequests with content" >> {
(tooManyRequestsWithContentJson \ "responses" \ "429").asOpt[JsValue].nonEmpty === true
(tooManyRequestsWithContentJson \ "responses" \ "429" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val internalServerErrorResultJson = (pathJson \ "/internalServerErrorResult" \ "post").as[JsObject]
lazy val internalServerErrorWithContentJson = (pathJson \ "/internalServerErrorWithContent" \ "post").as[JsObject]
// InternalServerError
"InternalServerError result" >> {
(internalServerErrorResultJson \ "responses" \ "500").asOpt[JsValue].nonEmpty === true
}
"InternalServerError with content" >> {
(internalServerErrorWithContentJson \ "responses" \ "500").asOpt[JsValue].nonEmpty === true
(internalServerErrorWithContentJson \ "responses" \ "500" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val notImplementedResultJson = (pathJson \ "/notImplementedResult" \ "post").as[JsObject]
lazy val notImplementedWithContentJson = (pathJson \ "/notImplementedWithContent" \ "post").as[JsObject]
// NotImplemented
"NotImplemented result" >> {
(notImplementedResultJson \ "responses" \ "501").asOpt[JsValue].nonEmpty === true
}
"NotImplemented with content" >> {
(notImplementedWithContentJson \ "responses" \ "501").asOpt[JsValue].nonEmpty === true
(notImplementedWithContentJson \ "responses" \ "501" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val badGatewayResultJson = (pathJson \ "/badGatewayResult" \ "post").as[JsObject]
lazy val badGatewayWithContentJson = (pathJson \ "/badGatewayWithContent" \ "post").as[JsObject]
// BadGateway
"BadGateway result" >> {
(badGatewayResultJson \ "responses" \ "502").asOpt[JsValue].nonEmpty === true
}
"BadGateway with content" >> {
(badGatewayWithContentJson \ "responses" \ "502").asOpt[JsValue].nonEmpty === true
(badGatewayWithContentJson \ "responses" \ "502" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val serviceUnavailableResultJson = (pathJson \ "/serviceUnavailableResult" \ "post").as[JsObject]
lazy val serviceUnavailableWithContentJson = (pathJson \ "/serviceUnavailableWithContent" \ "post").as[JsObject]
// ServiceUnavailable
"ServiceUnavailable result" >> {
(serviceUnavailableResultJson \ "responses" \ "503").asOpt[JsValue].nonEmpty === true
}
"ServiceUnavailable with content" >> {
(serviceUnavailableWithContentJson \ "responses" \ "503").asOpt[JsValue].nonEmpty === true
(serviceUnavailableWithContentJson \ "responses" \ "503" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val gatewayTimeoutResultJson = (pathJson \ "/gatewayTimeoutResult" \ "post").as[JsObject]
lazy val gatewayTimeoutWithContentJson = (pathJson \ "/gatewayTimeoutWithContent" \ "post").as[JsObject]
// GatewayTimeout
"GatewayTimeout result" >> {
(gatewayTimeoutResultJson \ "responses" \ "504").asOpt[JsValue].nonEmpty === true
}
"GatewayTimeout with content" >> {
(gatewayTimeoutWithContentJson \ "responses" \ "504").asOpt[JsValue].nonEmpty === true
(gatewayTimeoutWithContentJson \ "responses" \ "504" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val httpVersionNotSupportedResultJson = (pathJson \ "/httpVersionNotSupportedResult" \ "post").as[JsObject]
lazy val httpVersionNotSupportedWithContentJson = (pathJson \ "/httpVersionNotSupportedWithContent" \ "post").as[JsObject]
// HttpVersionNotSupported
"HttpVersionNotSupported result" >> {
(httpVersionNotSupportedResultJson \ "responses" \ "505").asOpt[JsValue].nonEmpty === true
}
"HttpVersionNotSupported with content" >> {
(httpVersionNotSupportedWithContentJson \ "responses" \ "505").asOpt[JsValue].nonEmpty === true
(httpVersionNotSupportedWithContentJson \ "responses" \ "505" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
lazy val insufficientStorageResultJson = (pathJson \ "/insufficientStorageResult" \ "post").as[JsObject]
lazy val insufficientStorageWithContentJson = (pathJson \ "/insufficientStorageWithContent" \ "post").as[JsObject]
// InsufficientStorage
"InsufficientStorage result" >> {
(insufficientStorageResultJson \ "responses" \ "507").asOpt[JsValue].nonEmpty === true
}
"InsufficientStorage with content" >> {
(insufficientStorageWithContentJson \ "responses" \ "507").asOpt[JsValue].nonEmpty === true
(insufficientStorageWithContentJson \ "responses" \ "507" \ "schema" \ "$ref").as[JsString].value === "#/definitions/models.TestContent"
}
}
}
|
lynx44/play-swagger-reflect
|
play-swagger-stringent/src/test/scala/xyz/mattclifton/play/swagger/stringent/test/EndpointSpecBuilderSpec.scala
|
Scala
|
mit
| 24,507 |
package sma.digging
case class Digging(user: String, network: String, term: String, action: String) {
val key: String = s"${term}!${user}@${network}"
def mkString = key
}
case class DiggingReply()
case class BulkDigging(messages: Seq[Digging], version: Int) {
def apply() = messages
def mkString = messages.mkString(", ")
}
case class BulkDiggingReply()
|
eduardo-lago-aguilar/sma
|
src/main/scala/sma/digging/Digging.scala
|
Scala
|
mit
| 370 |
/**
* Copyright 2015 Zaradai
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zaradai.lattrac.store
trait Observer[Subject] {
def onUpdate(subject: Subject)
}
trait Observable[Subject] {
private var observers: List[Observer[Subject]] = Nil
def addObserver(observer: Observer[Subject]): Unit = observers = observer :: observers
def removeObserver(observer: Observer[Subject]): Unit = observers = observers diff List(observer)
def notify(subject: Subject) = observers.foreach(_.onUpdate(subject))
}
|
zaradai/lattrac
|
src/main/scala/com/zaradai/lattrac/store/Observable.scala
|
Scala
|
apache-2.0
| 1,034 |
package com.themillhousegroup.scoup
import org.jsoup.{ Connection, Jsoup }
import org.jsoup.nodes.Document
import scala.collection.JavaConverters._
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import com.themillhousegroup.scoup.options.ScoupOptions
import org.jsoup.Connection.{ Response, Method }
import scala.collection.mutable
import java.net.URL
object Scoup extends Scoup(new RealJsoup(), ScoupOptions()) {}
/**
* Instantiate a Scoup of your own if you want to set custom
* options to be applied to all of its operations
*/
class Scoup(impl: JSoupProvider = new RealJsoup(), scoupOptions: ScoupOptions = ScoupOptions()) {
private def basicJsoup(url: String, options: ScoupOptions, withCookies: Map[String, String], method: Method, data: Map[String, String]): Connection = {
impl
.connect(url)
.userAgent(options.userAgent)
.timeout(options.timeout.toMillis.toInt)
.cookies(withCookies.asJava)
.ignoreContentType(options.ignoreContentType)
.followRedirects(options.followRedirects)
.ignoreHttpErrors(options.ignoreHttpErrors)
.data(data.asJava)
.method(method)
}
private def executeAsync(url: String, options: ScoupOptions, withCookies: Map[String, String], method: Method = Method.GET, data: Map[String, String] = Map()): Future[Response] = {
Future(basicJsoup(url, options, withCookies, method, data).execute)
}
/** Perform a GET on the URL, parsing the resulting Document */
def parse(url: String, options: ScoupOptions = scoupOptions, withCookies: Map[String, String] = Map()): Future[Document] = {
executeAsync(url, options, withCookies).map(_.parse)
}
/** Perform a GET on the URL, parsing the resulting Document and any cookies into the returned tuple */
def parseWithCookies(url: String, options: ScoupOptions = scoupOptions, withCookies: Map[String, String] = Map()): Future[(Document, Map[String, String])] = {
executeAsync(url, options, withCookies).map { resp =>
(resp.parse, resp.cookies.asScala.toMap)
}
}
/** Perform a POST on the URL, parsing the resulting Document */
def parsePost(url: String, data: Map[String, String], options: ScoupOptions = scoupOptions, withCookies: Map[String, String] = Map()): Future[Document] = {
executeAsync(url, options, withCookies, Method.POST, data).map(_.parse)
}
/** Perform a POST on the URL, parsing the resulting Document and any cookies into the returned tuple */
def parsePostWithCookies(url: String, data: Map[String, String], options: ScoupOptions = scoupOptions, withCookies: Map[String, String] = Map()): Future[(Document, Map[String, String])] = {
executeAsync(url, options, withCookies, Method.POST, data).map { resp =>
(resp.parse, resp.cookies.asScala.toMap)
}
}
/** Perform a GET on the URL, and return the response body. I.E. you are just using Scoup/JSoup as a HTTP client :-) */
def get(url: String, options: ScoupOptions = scoupOptions, withCookies: Map[String, String] = Map()): Future[String] = {
val acceptNonHtmlOptions = options.copy(ignoreContentType = true)
executeAsync(url, acceptNonHtmlOptions, withCookies).map(_.body)
}
/** Perform a POST on the URL, and return the response body. I.E. you are just using Scoup/JSoup as a HTTP client :-) */
def post(url: String, data: Map[String, String], options: ScoupOptions = scoupOptions, withCookies: Map[String, String] = Map()): Future[String] = {
val acceptNonHtmlOptions = options.copy(ignoreContentType = true)
executeAsync(url, acceptNonHtmlOptions, withCookies, Method.POST, data).map(_.body)
}
def parseHTML(html: String): Document = {
impl.parse(html)
}
}
/** Indirection to allow JSoup's static API to be mocked */
private[scoup] trait JSoupProvider {
def connect(url: String): Connection
def parse(html: String): Document
}
private[scoup] class RealJsoup extends JSoupProvider {
def connect(url: String): Connection = Jsoup.connect(url)
def parse(html: String): Document = Jsoup.parse(html)
}
|
themillhousegroup/scoup
|
src/main/scala/com/themillhousegroup/scoup/Scoup.scala
|
Scala
|
mit
| 4,065 |
package com.github.saurfang.parquet.proto
import com.google.protobuf.GeneratedMessage
import org.apache.hadoop.mapreduce.{InputSplit, RecordReader, TaskAttemptContext}
import org.apache.parquet.hadoop.{ParquetInputFormat, ParquetRecordReader}
class ProtoMessageParquetInputFormat[T <: GeneratedMessage] extends ParquetInputFormat[T](classOf[ProtoLISTReadSupport[T]]) {
override def createRecordReader(inputSplit: InputSplit, taskAttemptContext: TaskAttemptContext): RecordReader[Void, T] = {
val reader = super.createRecordReader(inputSplit, taskAttemptContext).asInstanceOf[ParquetRecordReader[_ <: GeneratedMessage.Builder[_]]]
new MessageRecordReader(reader)
}
}
private[proto] class MessageRecordReader[T <: GeneratedMessage](reader: ParquetRecordReader[_ <: GeneratedMessage.Builder[_]]) extends RecordReader[Void, T] {
override def initialize(split: InputSplit, context: TaskAttemptContext): Unit = reader.initialize(split, context)
override def getProgress: Float = reader.getProgress
override def nextKeyValue(): Boolean = reader.nextKeyValue()
override def getCurrentValue: T = reader.getCurrentValue.build.asInstanceOf[T]
override def getCurrentKey: Void = reader.getCurrentKey
override def close(): Unit = reader.close()
}
|
saurfang/sparksql-protobuf
|
src/main/scala/com/github/saurfang/parquet/proto/ProtoMessageParquetInputFormat.scala
|
Scala
|
apache-2.0
| 1,267 |
package org.monarchinitiative.dosdp.cli
import caseapp._
import org.apache.jena.sys.JenaSystem
import org.monarchinitiative.dosdp.DOSDP
import scribe._
import scribe.filter._
import zio._
object Main extends ZCommandApp[Config] {
override def appName: String = "dosdp-tools"
override def progName: String = "dosdp-tools"
override def run(config: Config, args: RemainingArgs): ZIO[ZEnv, Nothing, ExitCode] =
ZIO.effectTotal(JenaSystem.init()) *>
ZIO.effectTotal(
scribe.Logger.root
.clearHandlers()
.clearModifiers()
.withModifier(select(packageName(DOSDP.getClass.getPackage.getName)).include(level >= Level.Info))
.withHandler(minimumLevel = Some(Level.Warn))
.replace()
) *>
config.run.exitCode
}
|
balhoff/dosdp-scala
|
src/main/scala/org/monarchinitiative/dosdp/cli/Main.scala
|
Scala
|
mit
| 803 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3.ast.functions
import org.neo4j.cypher.internal.frontend.v2_3.ast.{Function, SimpleTypedFunction}
import org.neo4j.cypher.internal.frontend.v2_3.symbols._
case object Range extends Function with SimpleTypedFunction {
def name = "range"
val signatures = Vector(
Signature(argumentTypes = Vector(CTInteger, CTInteger), outputType = CTCollection(CTInteger)),
Signature(argumentTypes = Vector(CTInteger, CTInteger, CTInteger), outputType = CTCollection(CTInteger))
)
}
|
HuangLS/neo4j
|
community/cypher/frontend-2.3/src/main/scala/org/neo4j/cypher/internal/frontend/v2_3/ast/functions/Range.scala
|
Scala
|
apache-2.0
| 1,325 |
package org.jetbrains.plugins.scala.codeInspection.collections
import org.jetbrains.plugins.scala.codeInspection.InspectionBundle
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
/**
* @author Nikolay.Tropin
*/
class ZeroIndexToHeadInspection extends OperationOnCollectionInspection {
override def possibleSimplificationTypes: Array[SimplificationType] = Array(ZeroIndexToHead)
}
object ZeroIndexToHead extends SimplificationType() {
override def hint: String = InspectionBundle.message("replace.with.head")
override def getSimplification(expr: ScExpression): Option[Simplification] = {
expr match {
case qual`.apply`(literal("0")) if isSeq(qual) && !isIndexedSeq(qual) =>
Some(replace(expr).withText(invocationText(qual, "head")).highlightFrom(qual))
case _ => None
}
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInspection/collections/ZeroIndexToHeadInspection.scala
|
Scala
|
apache-2.0
| 832 |
package com.twitter.zipkin.json
import com.twitter.zipkin.common.Annotation
case class JsonAnnotation(timestamp: Long, value: String, endpoint: Option[JsonEndpoint])
object JsonAnnotation extends (Annotation => JsonAnnotation) {
override def apply(a: Annotation) =
JsonAnnotation(a.timestamp, a.value, a.host.map(JsonService))
def invert(a: JsonAnnotation) =
Annotation(a.timestamp, a.value, a.endpoint.map(JsonService.invert))
}
|
betable/zipkin
|
zipkin-common/src/main/scala/com/twitter/zipkin/json/JsonAnnotation.scala
|
Scala
|
apache-2.0
| 446 |
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.summingbird.storm.option
import com.twitter.summingbird.storm.StormMetric
import com.twitter.tormenta.spout.Metric
import java.io.Serializable
import org.apache.storm.metric.api.IMetric
/**
* Options used by the flatMapping stage of a storm topology.
*
* @author Oscar Boykin
* @author Sam Ritchie
* @author Ashu Singhal
*/
/**
* This workaround is necessary because val parameters can't be
* call-by-name. We pass a function so that the metrics aren't
* serialized. Beyond the storm IMetric not being serializable,
* passing a value also causes problems with the instance registered
* in the bolt being different from the one used in the summingbird
* job.
*/
object FlatMapStormMetrics {
def apply(metrics: => TraversableOnce[StormMetric[IMetric]]) = new FlatMapStormMetrics(() => metrics)
def unapply(metrics: FlatMapStormMetrics) = Some(metrics.metrics)
}
/**
* When a bolt is prepared, these metrics will be use by being called with the TopologyContext for the storm
* bolt.
*/
class FlatMapStormMetrics(val metrics: () => TraversableOnce[StormMetric[IMetric]])
object SpoutStormMetrics {
def apply(metrics: => TraversableOnce[StormMetric[IMetric]]) = new SpoutStormMetrics(() => metrics)
def unapply(metrics: SpoutStormMetrics) = Some(metrics.metrics)
}
class SpoutStormMetrics(val metrics: () => TraversableOnce[StormMetric[IMetric]]) extends Serializable {
def toSpoutMetrics: () => TraversableOnce[Metric[IMetric]] =
{ () => metrics().map { x: StormMetric[IMetric] => Metric(x.name, x.metric, x.interval.inSeconds) } }
}
/**
* This signals that the storm bolts should use localOrShuffleGrouping, which means that if the downstream bolt
* has a task on the same local worker, the output will only go to those tasks. Otherwise, shuffling
* happens normally. This is important to understand as this can create hot spots in the topology.
*/
case class PreferLocalDependency(get: Boolean)
/**
* If this is set to true, this means that a bolt will ack a tuple as soon as it is received and processing begins;
* otherwise, the tuple will be acked when the bolt completes. Acking signals to storm that a tuple has been fully
* processed, so if a tuple is acked on entry and then there is a failure it will not be replayed per storm's
* normal replay mechanisms.
*/
case class AckOnEntry(get: Boolean)
/**
* Maximum number of elements to execute in a given second per task
*/
case class MaxExecutePerSecond(lowerBound: Long, upperBound: Long, rampUptimeMS: Long) {
require(rampUptimeMS >= 0L, "Ramp up time must greater than or equal to zero")
}
|
twitter/summingbird
|
summingbird-storm/src/main/scala/com/twitter/summingbird/storm/option/FlatMapOptions.scala
|
Scala
|
apache-2.0
| 3,175 |
package memnets.fx.games.wta
import memnets.fx._
import memnets.linalg.W
import memnets.model._
import memnets.models.neuro.swta.SoftWTAGrid
import scalafx.scene.paint.Color
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.util.Random
trait BoardItem
object Board {
type BItems = List[BoardItem]
val unknownColor = Color.DeepPink.sat(0.6)
}
class Board(val rows: Int, val cols: Int, val tau: Double = 5.0, val defaultThresh: Double = 4.0)(
implicit val sys: DynamicSystem)
extends GridLike[Board.BItems]
with Logging {
import Board._
val grid = new SoftWTAGrid(rows, cols, tau = tau, unitViz = Viz.Skip)
val memAct = (x: Double) => if (x < 0.0) 0.0 else if (x > 1.0) 1.0 else x
val memTie = Param("mem", max = 1.0, init = 0.4)
object memGrid extends YGrid(rows, cols) {
override def create(r: Int, c: Int) = {
val m = Y("mem", decay = 0.0, tau = 30.0 * tau, threshold = 0.2)
m.out = memAct
grid(r, c) --> m tie = memTie
m.ui.color = Color.White
m.ui.skip()
m
}
}
object thresGrid extends YGrid(rows, cols) {
override def create(r: Int, c: Int) = {
// not adding thresh here (in Wall) so can see pattern in Source
val thres = Y("thres", decay = -1.0, act = Activation.Relu, tau = 10.0)
thres.ui.skip()
thres
}
}
// can override to customize...
/*
def createGridSource(grid : YGrid, loc : Loc) : GridSource = {
grid.skip()
grid.loc = loc
val gs = new GridSource(grid)
gs.imageHints = ImageHints(scaleX = 0.6, scaleY = 0.6)
sys.elements += gs
gs
}
*/
def createGridSource(grid: YGrid, loc: Loc): YGrid = {
// grid.skip()
grid.loc = loc
grid.hints = GridHints(scaleX = 0.6, scaleY = 0.6)
// sys.elements += gs
grid
}
val rowCtr = Loc().up(20)
val thresSource = createGridSource(thresGrid, rowCtr.left(310))
val gridSource = createGridSource(grid, rowCtr)
val memSource = createGridSource(memGrid, rowCtr.right(310))
val rowData = Array.fill[BItems](rows, cols) { Nil }
def apply(r: Int, c: Int): BItems = rowData(r)(c)
def update(r: Int, c: Int, bi: BoardItem): Unit = rowData(r)(c) = bi :: apply(r, c)
def randPicks(n: Int): Seq[(Int, Int)] = for (i <- 0 until n) yield randPick()
def randPick(): (Int, Int) = {
var res: Option[(Int, Int)] = None
while (res.isEmpty) {
val pick = (Random.nextInt(rows), Random.nextInt(cols))
if (apply(pick._1, pick._2).isEmpty)
res = Some(pick)
}
res.get
}
val combos = ArrayBuffer[Combo]()
}
trait ConveyorBase extends BoardItem {}
case class Start(r: Int, c: Int)(implicit board: Board) extends BoardItem {
board(r, c) = this
board.grid(r, c).threshold = -0.001 // start loc
}
case class WormHole(row: Int, col: Int)(implicit board: Board) extends BoardItem with Linkable {
board(row, col) = this
val start = board.grid(row, col)
start.ui.color = Color.web("#222")
def src: Y = start
}
case class WallBox(row: Int, col: Int, rows: Int, cols: Int, skipRow: Int = -1, skipCol: Int = -1)(
implicit board: Board) {
val units = ListBuffer[Wall]()
for {
r <- 0 until rows
c <- 0 until cols
} {
// one or the other here
if (r != skipRow && c != skipCol) {
if (r == 0 || r == rows - 1)
units += Wall(row + r, col + c)
else if (c == 0 || c == cols - 1)
units += Wall(row + r, col + c)
}
}
}
case class ConveyorBelt(row: Int, col: Int, length: Int, color: Color = Color.Crimson)(implicit board: Board)
extends ConveyorBase {
import board.sys
val units = (0 until length).map { i =>
val c = col + i
board(row, c) = this
board.grid(row, c)
}
chain(units, 1.3)
for (u <- units)
u.ui.color = color
}
case class ConveyorBox(row: Int, col: Int, rows: Int, cols: Int, color: Color = Color.Crimson)(implicit board: Board)
extends ConveyorBase
with Logging {
import board.sys
val units = ListBuffer[Y]()
logger.debug(s"ConveyorBox[r= $row, c= $col, rows= $rows, cols= $cols]")
private def create(r: Int, c: Int): Y = {
logger.debug(s"unit created: [$r, $c]")
board(r, c) = this
board.grid(r, c)
}
// top row
for (c <- 0 until cols)
units += create(row, col + c)
// right ool
for (r <- 1 until rows)
units += create(row + r, col + cols - 1)
// bottom // skip guy above
for (c <- cols - 2 until -1 by -1)
units += create(row + rows - 1, col + c)
// left ool // skip guy above + 1st row
for (r <- rows - 2 until 0 by -1)
units += create(row + r, col)
logger.debug("units size:" + units.length)
chain(units, 1.3)
for (u <- units)
u.ui.color = color
}
case class Wall(row: Int, col: Int)(implicit board: Board) extends BoardItem with Linkable {
board(row, col) = this
val thres = board.thresGrid(row, col)
thres.threshold = -board.defaultThresh
val unit = board.grid(row, col)
thres --> unit w = -10.0
def src: Y = thres
}
case class Unknown(row: Int, col: Int)(implicit board: Board) extends BoardItem with Linkable {
board(row, col) = this
val unit = board.grid(row, col)
val mem = board.memGrid(row, col)
unit.ui.color = Board.unknownColor
mem.ui.color = Board.unknownColor
def src: Y = mem
}
case class Combo(prior: Y, ctl: Y)(implicit board: Board) extends BoardItem {
board.combos += this
private val entries = ListBuffer[(Y, Y, W)]()
def size = entries.length
def apply(i: Int): Y = entries(i)._2
def apply(y: Y): Y = apply(entries.indexWhere(_._1 == y))
def update(i: Int, v: Double): Unit = { entries(i)._2.update(v) }
def add(r: Int, c: Int): Unit = {
board(r, c) = this
val u = board.grid(r, c)
val m = board.memGrid(r, c)
u.ui.color = ctl.ui.color.get
m.ui.color = ctl.ui.color.get
val m2c = m --> ctl
val tuple = (u, m, m2c)
entries += tuple
val size = entries.length
for (link <- entries) link._3.w = 1.0 / size
ctl --> m w = -0.3 // turn off
}
}
case class ComboInput(combo: Combo, start: Int, msg: String = "", dur: Int = 300) extends Logging {
val indices = 0 until combo.size
def tick(t: Int): Unit = {
val t2 = t - start
if (t2 >= 0 && t2 <= dur) {
if (t2 == 0)
logger.debug(this.toString)
for (i <- indices) {
val t3 = t2 - i * 100
val amp = Math.max(0.0, t3 * 0.03)
combo(i) = if (amp > 1.0) 1.0 else amp
}
} else if (t2 == 20) {
for (i <- indices)
combo(i) = 0.0
}
}
override def toString: String = s"ComboInput[start=$start, msg= $msg]"
}
|
MemoryNetworks/memnets
|
fx/src/main/scala/memnets/fx/games/wta/Board.scala
|
Scala
|
apache-2.0
| 6,569 |
package org.jetbrains.plugins.scala.runner
import com.intellij.openapi.project.Project
import com.intellij.psi._
import com.intellij.psi.impl.light.LightElement
import com.intellij.psi.scope.PsiScopeProcessor
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.util.IncorrectOperationException
import org.jetbrains.plugins.scala.ScalaLanguage
import org.jetbrains.plugins.scala.caches.ModTracker
import org.jetbrains.plugins.scala.extensions.PsiClassExt
import org.jetbrains.plugins.scala.lang.psi.adapters.PsiClassAdapter
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.PsiClassFake
import org.jetbrains.plugins.scala.lang.psi.stubs.index.ScalaIndexKeys
import org.jetbrains.plugins.scala.lang.psi.types.TypePresentationContext
import org.jetbrains.plugins.scala.macroAnnotations.CachedInUserData
import org.jetbrains.plugins.scala.runner.Scala3MainMethodSyntheticClass.MainMethodParameters.CustomParameter
import org.jetbrains.plugins.scala.runner.Scala3MainMethodSyntheticClass.{MainMethodParameters, extractNameFromFqn}
/**
* This class is only needed during application configuration verification
*
* @see [[com.intellij.execution.application.ApplicationConfiguration#checkConfiguration()]]
* @see [[com.intellij.execution.configurations.JavaRunConfigurationModule#checkClassName(java.lang.String, java.lang.String)]]
*/
private final class Scala3MainMethodSyntheticClassFinder(project: Project)
extends PsiElementFinder {
override def findClass(qualifiedName: String, scope: GlobalSearchScope): PsiClass = {
if (qualifiedName.isEmpty) return null
val project = scope.getProject
if (project == null) return null
findClass(qualifiedName, scope, project)
}
private def findClass(qualifiedName: String, scope: GlobalSearchScope, project: Project): Scala3MainMethodSyntheticClass = {
import ScalaIndexKeys.StubIndexKeyExt
val results = ScalaIndexKeys.ANNOTATED_MAIN_FUNCTION_BY_PKG_KEY.elements(qualifiedName, scope)(project)
if (results.nonEmpty) {
val function = results.head
syntheticClassForFunction(function, qualifiedName)
}
else null
}
@CachedInUserData(function, ModTracker.anyScalaPsiChange)
private def syntheticClassForFunction(function: ScFunction, qualifiedName: String): Scala3MainMethodSyntheticClass = {
val params = function.parameterList.params
val mainParams = if (isDefaultMainVarargs(params))
MainMethodParameters.Default
else {
val customParams = params.map(param => customParameter(param))
MainMethodParameters.Custom(customParams)
}
new Scala3MainMethodSyntheticClass(
PsiManager.getInstance(project),
// HACK: the file doesn't actually contain the function,
// but it's required during accessibility checks in application configuration validation
function.getContainingFile,
qualifiedName,
mainParams
)
}
private def isDefaultMainVarargs(params: Seq[ScParameter]): Boolean = {
if (params.size == 1) {
val param = params.head
param.isVarArgs && {
val typ = param.`type`()
typ.exists(_.extractClass.exists(_.qualifiedName == "java.lang.String"))
}
} else {
false
}
}
private def customParameter(param: ScParameter): CustomParameter = {
val typeText = param.`type`().fold(_ => "", _.presentableText(TypePresentationContext.emptyContext))
CustomParameter(param.name, typeText, param.isVarArgs)
}
// Not implemented because it isn't required during application configuration verification
override def findClasses(qualifiedName: String, scope: GlobalSearchScope): Array[PsiClass] = PsiClass.EMPTY_ARRAY
}
private final class Scala3MainMethodSyntheticClass(
psiManager: PsiManager,
containingFile: PsiFile,
qualifiedName: String,
val parameters: MainMethodParameters,
) extends LightElement(psiManager, ScalaLanguage.INSTANCE)
with PsiNameIdentifierOwner
with PsiClassAdapter
with PsiClassFake {
override val getName: String = extractNameFromFqn(qualifiedName)
override def getQualifiedName: String = qualifiedName
override def getText = ""
override def getNameIdentifier: PsiIdentifier = null
override def getContainingFile: PsiFile = containingFile
override def getContext: PsiFile = containingFile
override def toString = s"synthetic class for scala @main method: $qualifiedName"
override def setName(newName: String): PsiElement = throw new IncorrectOperationException("nonphysical element")
override def copy = throw new IncorrectOperationException("nonphysical element")
override def accept(v: PsiElementVisitor): Unit = throw new IncorrectOperationException("should not call")
override def processDeclarations(
processor: PsiScopeProcessor,
state: ResolveState,
lastParent: PsiElement,
place: PsiElement
): Boolean = {
// NOTE: we probably need add some fake psi file with all fake @main method classes declarations
// strictly speaking ScalaMainMethodSyntheticClass can't declare itself, but this solution works...
processor.execute(this, state)
false
}
}
private object Scala3MainMethodSyntheticClass {
sealed trait MainMethodParameters
object MainMethodParameters {
object Default extends MainMethodParameters // (args: String*)
case class Custom(parameterNames: Seq[CustomParameter]) extends MainMethodParameters
case class CustomParameter(name: String, typ: String, isVararg: Boolean)
}
private def extractNameFromFqn(qualifiedName: String): String =
qualifiedName.lastIndexOf('.') match {
case -1 => qualifiedName // in root package
case idx => qualifiedName.substring(idx)
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/runner/ScalaMainMethodSyntheticClassFinder.scala
|
Scala
|
apache-2.0
| 5,840 |
package com.durooma.api.route
import akka.http.scaladsl.model.StatusCodes
import com.durooma.api.model.{Account, AccountBody}
object AccountResource extends CustomDirectives with JsonSupport {
val route = pathPrefix("account") {
authenticateToken { implicit session =>
pathEnd {
get {
complete(Account.all)
} ~
post {
entity(as[AccountBody]) { account =>
complete((StatusCodes.Created, Account.create(account)))
}
}
} ~
path(LongNumber) { id =>
get {
complete(Account.get(id))
} ~
put {
entity(as[AccountBody]) { account =>
completeDbMutation(Account.update(id, account))
}
} ~
delete {
completeDbMutation(Account.remove(id), StatusCodes.NoContent)
}
}
}
}
}
|
durooma/api
|
src/main/scala/com/durooma/api/route/AccountResource.scala
|
Scala
|
mit
| 873 |
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.analyzers.{StemmerTokenFilter, SnowballTokenFilter, ShingleTokenFilter, NGramTokenFilter, EdgeNGramTokenFilter, CommonGramsTokenFilter}
// a dumping ground for deprecated syntax, keeps the main file clear
trait DeprecatedElasticDsl {
@deprecated("use scoreSort, geoSort, fieldSort or scriptSort", "1.6.0")
case object by {
def score: ScoreSortDefinition = ElasticDsl.score.sort
def geo(field: String): GeoDistanceSortDefinition = ElasticDsl.geo sort field
def field(field: String): FieldSortDefinition = ElasticDsl.field.sort(field)
def script(script: String) = ElasticDsl.script.sort(script)
}
@deprecated("prefer the method commonGramsTokenFilter(\\"name\\")", "2.0.0")
case object commonGrams {
@deprecated("prefer the method commonGramsTokenFilter(\\"name\\")", "2.0.0")
def tokenfilter(name: String): CommonGramsTokenFilter = CommonGramsTokenFilter(name)
}
@deprecated("prefer the method edgeNGramTokenFilter(\\"name\\")", "2.0.0")
case object edgeNGram {
@deprecated("prefer the method edgeNGramTokenFilter(\\"name\\")", "2.0.0")
def tokenfilter(name: String): EdgeNGramTokenFilter = EdgeNGramTokenFilter(name)
}
@deprecated("prefer the method edgeNGramTokenFilter(\\"name\\") <-- note capitalization", "2.0.0")
def edgeNGramTokenfilter(name: String): EdgeNGramTokenFilter = EdgeNGramTokenFilter(name)
@deprecated("prefer the method ngramTokenFilter(\\"name\\")", "2.0.0")
case object ngram {
@deprecated("prefer the method ngramTokenFilter(\\"name\\")", "2.0.0")
def tokenfilter(name: String): NGramTokenFilter = NGramTokenFilter(name)
}
@deprecated("use optimizeIndex(index)", "1.6.2")
def optimize(indexes: String*): ForceMergeDefinition = new ForceMergeDefinition(indexes.toSeq)
@deprecated("prefer the method shingleTokenFilter(\\"name\\")", "2.0.0")
case object shingle {
@deprecated("prefer the method shingleTokenFilter(\\"name\\")", "2.0.0")
def tokenfilter(name: String): ShingleTokenFilter = ShingleTokenFilter(name)
}
@deprecated("prefer the method snowballTokenFilter(\\"name\\")", "2.0.0")
case object snowball {
@deprecated("prefer the method snowballTokenFilter(\\"name\\")", "2.0.0")
def tokenfilter(name: String): SnowballTokenFilter = SnowballTokenFilter(name)
}
@deprecated("use score sort, geo sort, field sort or script sort", "1.6.1")
case object sortby {
def score: ScoreSortDefinition = new ScoreSortDefinition
def geo(field: String): GeoDistanceSortDefinition = new GeoDistanceSortDefinition(field)
def field(field: String): FieldSortDefinition = new FieldSortDefinition(field)
def script(script: String): ScriptSortDefinition = new ScriptSortDefinition(script)
}
@deprecated("prefer the method stemmerTokenFilter(\\"name\\")", "2.0.0")
case object stemmer {
@deprecated("prefer the method stemmerTokenFilter(\\"name\\")", "2.0.0")
def tokenfilter(name: String): StemmerTokenFilter = StemmerTokenFilter(name)
}
}
|
k4200/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/DeprecatedElasticDsl.scala
|
Scala
|
apache-2.0
| 3,027 |
package org.jetbrains.plugins.hocon
import scala.collection.JavaConversions
import scala.collection.convert.{DecorateAsJava, DecorateAsScala}
object JavaInterop extends DecorateAsJava with DecorateAsScala {
type JIterator[A] = java.util.Iterator[A]
type JIterable[A] = java.lang.Iterable[A]
type JCollection[A] = java.util.Collection[A]
type JList[A] = java.util.List[A]
type JArrayList[A] = java.util.ArrayList[A]
type JLinkedList[A] = java.util.LinkedList[A]
type JSet[A] = java.util.Set[A]
type JHashSet[A] = java.util.HashSet[A]
type JSortedSet[A] = java.util.SortedSet[A]
type JNavigableSet[A] = java.util.NavigableSet[A]
type JTreeSet[A] = java.util.TreeSet[A]
type JMap[K, V] = java.util.Map[K, V]
type JHashMap[K, V] = java.util.HashMap[K, V]
type JLinkedHashMap[K, V] = java.util.LinkedHashMap[K, V]
type JSortedMap[K, V] = java.util.SortedMap[K, V]
type JNavigableMap[K, V] = java.util.NavigableMap[K, V]
type JTreeMap[K, V] = java.util.TreeMap[K, V]
object JList {
def apply[A](values: A*): JList[A] =
JavaConversions.seqAsJavaList(values)
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/hocon/JavaInterop.scala
|
Scala
|
apache-2.0
| 1,108 |
/*
* Copyright (C) 2012 Mikołaj Sochacki mikolajsochacki AT gmail.com
* This file is part of VRegister (Virtual Register)
* Apache License Version 2.0, January 2004 http://www.apache.org/licenses/
*/
package eu.brosbit.opos.snippet.page
import net.liftweb.http.js.JsCmds.SetHtml
import eu.brosbit.opos.lib.Formater
import scala.xml.{Unparsed}
import _root_.net.liftweb.util._
import net.liftweb.json.JsonDSL._
import net.liftweb.common._
import eu.brosbit.opos.model.page._
import eu.brosbit.opos.model.User
import _root_.net.liftweb.http.{S, SHtml}
import Helpers._
class PageSn extends FlashTileSn {
val user = User.currentUser
val isTeacher = if (user.isEmpty) false
else {
user.openOrThrowException("Niemożliwe").role.get match {
case "a" => true
case "n" => true
case "d" => true
case _ => false
}
}
val dep = S.param("id").openOr("0")
val pageDep = PageDepartment.find(dep).getOrElse(PageDepartment.create)
val what = S.param("w").getOrElse("n") //all news by default
//adding slide for department
def slide = {
val tile = appendTile("/page/" + dep)
"#slideImg [src]" #> pageDep.img &
"#slideInfo *" #> Unparsed(pageDep.info) &
"#flashTile" #> tile
}
def buildMenu() = {
val pathStart = "/page/" + dep + "?w="
val addArticle = if(isTeacher) <a href={"/editarticle/0?d=" + dep}
class="list-group-item">
<span class="glyphicon glyphicon-plus"></span> Dodaj artykuł</a>
else <span></span>
"a" #> (List(addArticle, <a href={pathStart + "n"} class="list-group-item"> Aktualności </a>) :::
ArticleHead.findAll(("news" -> false)~("departmentId" -> dep), "prior" -> 1).map(
art =>
<a href={pathStart + art._id.toString} class="list-group-item">{art.title}</a>
))
}
def switchContent() = {
what match {
case "n" => {
val newses = ArticleHead.findAll(("news" -> true)~("departmentId" -> dep), "_id" -> -1)
showNewses(newses)
}
case idArt:String => {
ArticleHead.find(idArt) match {
case Some(artH) => pageContent(artH)
case _ => S.redirectTo("/page/" + dep)
}
}
case _ => S.redirectTo("/page/" + dep)
}
}
def showNewses(newses: List[ArticleHead]) = {
val sizeP = 10
val page = S.param("p").getOrElse("1")
val pageInt = tryo(page.toInt).getOrElse(1)
val pages = newses.size / sizeP + (if (newses.size % sizeP > 0) 1 else 0)
val endNews = if (sizeP * pageInt > newses.size) newses.size else sizeP * pageInt
val beginNews = if (endNews - sizeP < 0) 0 else endNews - sizeP
val toShowNewses = newses.slice(beginNews, endNews)
"#articleCont" #> "" &
"#departmentInfo *" #> (if(newses.size > 0) newses.head.departmentName else "") &
".newsInfo" #> <div>
{toShowNewses.map(news => createPinBox(news))}
</div> &
"li" #> (1 to pages).map(p => {
<li><a href={"/page/" + dep + "?w=n&p=" + p.toString}
class={ (if (p == pageInt) "actualPage" else "")}>
{p.toString}
</a></li>
})
}
def showOneNews() = {
def create(id: String) = {
ArticleHead.find(id) match {
case Some(newsHead) => {
val contentOption = ArticleContent.find(newsHead.content)
<div class="pagebody">
{Unparsed(contentOption.getOrElse(ArticleContent.create).content)}
</div> ++
<div class="pageinfo">
{if (isOwner(newsHead.authorId)) {
<span class="edit">
<a href={"/editarticle/" + newsHead._id.toString} class="btn btn-info">
<span class="glyphicon glyphicon-pencil"></span>
Edytuj</a>
</span>
}
else <span></span>}<span class="btn btn-small btn-danger closeNewsButton" onclick="closeNews()">
<span class="glyphicon glyphicon-remove"></span>
Zamknij</span>
</div>
}
case _ => <div>Błąd - brak wybranej lekcji</div>
}
}
"#hiddenAjaxText" #> SHtml.ajaxText("", id => SetHtml("ajaxNews", create(id)))
}
private def isOwner(idFromArticle: Long): Boolean = {
User.currentUser match {
case Full(u) => idFromArticle == u.id.get || u.role.get == "a"
case _ => false
}
}
private def createPinBox(news: ArticleHead) = {
<div class="row pine-box">
<div class="col-md-3">
<img class="img-box featurette-image img-responsive" src={news.thumbnailLink} />
</div>
<div class="col-md-9 innerBox">
<h2>
{news.title}
</h2>
<div class="footPrint">
<span class="glyphicon glyphicon-user"></span>
<span class="fullname">
{news.authorName}
</span>
<span class="glyphicon glyphicon-calendar"></span>
<span class="date">
{Formater.formatDate(news._id.getDate)}
</span>
</div>
<div class="textBox">
<div class="introNews">
{Unparsed(news.introduction)}
</div>
<span class="btn btn-small btn-info" onclick={"return showNews('" + news._id + "', this)"}>Czytaj dalej</span>
</div>
</div>
<div style="clear:both;"></div>
</div>
}
private def pageContent(articleHead: ArticleHead) = {
val contentOption = ArticleContent.find(articleHead.content)
"#newsCont" #> "" &
"#depBody" #> <div id="pagecontent">
<h1>
{articleHead.title}
</h1>
<div id="pagebody">
{Unparsed(contentOption.getOrElse(ArticleContent.create).content)}
</div>
<hr/>
<p id="pageinfo">
<span class="fullname">
{articleHead.authorName}
</span>
<span class="date">
{Formater.formatTime(articleHead._id.getDate)}
</span>{if (isOwner(articleHead.authorId)) <span class="edit">
<a href={"/editarticle/" + articleHead._id.toString} class="btn btn-info">
<span class="glyphicon glyphicon-pencil"></span>Edytuj</a>
</span>
else <span></span>}
</p>
</div>
}
}
|
mikolajs/osp
|
src/main/scala/eu/brosbit/opos/snippet/page/PageSn.scala
|
Scala
|
agpl-3.0
| 6,286 |
package com.softwaremill.demo.step1
import akka.actor.ActorSystem
import org.json4s.JValue
import org.json4s.JsonAST.JNothing
import org.supler.Supler
import org.supler.field.ActionResult
import spray.http.MediaTypes
import spray.http.StatusCodes._
import spray.httpx.Json4sSupport
import spray.routing.{Route, SimpleRoutingApp}
/**
* - basic form definition
* - getting the form
* - displaying on the frontend
* - basic validation
*/
object ServerStep1 extends App with Json4sSupport with SimpleRoutingApp {
implicit val actorSystem = ActorSystem()
implicit val json4sFormats = org.json4s.DefaultFormats
import com.softwaremill.demo.step1.Forms._
import com.softwaremill.demo.step1.Instances._
var troll = aTroll
def getJson(route: Route) = get { respondWithMediaType(MediaTypes.`application/json`) { route } }
startServer(interface = "localhost", port = 8080) {
path("rest" / "form1.json") {
getJson {
complete {
trollForm(troll).generateJSON
}
}
} ~
pathPrefix("site") {
getFromResourceDirectory("")
} ~
path("") {
redirect("/site/index.html", Found)
}
}
println(s"Server starting... open http://localhost:8080")
}
|
softwaremill/supler-pres
|
src/main/scala/com/softwaremill/demo/step1/ServerStep1.scala
|
Scala
|
apache-2.0
| 1,222 |
package org.flowpaint.raster.tasks
import org.flowpaint.raster.tile.TileId
import java.lang.Object
import java.util.{HashSet, LinkedList}
import javax.swing.SwingUtilities
import java.util.concurrent.{TimeUnit, Executors}
/**
* Allows spawning render jobs for tiles on different threads, speeding up the application on multi-core
* processors, and allowing rendering to be done in the background.
*/
object TaskService {
private var listeners: List[TaskListener] = Nil
private val queue: LinkedList[Operation] = new LinkedList[Operation]()
private val tilesLeft: HashSet[TileId] = new HashSet[TileId]()
private var initialTilesLeft: Int = 0
private var currentOperation: Operation = null
private val lock = new Object()
// Create a new executor with thread number equal to available processors
private val taskExecutor = Executors.newFixedThreadPool(Runtime.getRuntime.availableProcessors())
/**
* Run an operation, splitting it into tiles, and processing each tile separately, using all available cores.
*/
def queueOperation(operation: Operation) {
require(operation != null)
if (operationRunning) {
lock synchronized {
queue.addFirst(operation)
}
}
else {
lock synchronized {
startOperation(operation)
}
}
}
def operationRunning: Boolean = {
lock synchronized {
currentOperation != null
}
}
def stopAll() {
// Shut down ongoing tasks, and wait for a while for them to finish / stop
taskExecutor.shutdownNow()
taskExecutor.awaitTermination(10, TimeUnit.MINUTES)
lock synchronized {
// Cleanup
queue.clear()
tilesLeft.clear()
val ongoingOp = currentOperation
currentOperation = null
initialTilesLeft = 0
// Notify listeners of stop
if (ongoingOp != null) {
SwingUtilities.invokeLater(new Runnable {
def run() {
listeners foreach {_.onFinished(ongoingOp)}
}
})
}
}
}
def addTaskListener(listener: TaskListener) {
listeners ::= listener
}
def removeTaskListener(listener: TaskListener) {
listeners = listeners.filterNot(_ == listener)
}
private def startOperation(operation: Operation) {
currentOperation = operation
// Keep track of the tiles that are part of the operation
tilesLeft.clear()
operation.affectedTiles foreach {ti => tilesLeft.add(ti)}
initialTilesLeft = tilesLeft.size()
// Notify any listeners, in the main event dispatch thread
SwingUtilities.invokeLater(new Runnable {
def run() {
listeners foreach {_.onStarted(operation)}
}
})
// Schedule calculation of the tiles
operation.affectedTiles foreach { tileId =>
taskExecutor.submit(Task(operation,tileId))
}
}
private def removeRemainingTileId(tileId: TileId) {
lock synchronized {
tilesLeft.remove(tileId)
if (tilesLeft.isEmpty) {
onOperationFinished()
}
else {
// Notify any listeners about progress, in the main event dispatch thread
val op = currentOperation
val progress = if (initialTilesLeft == 0) 1.0f else 1.0f - 1.0f * tilesLeft.size() / initialTilesLeft
SwingUtilities.invokeLater(new Runnable {
def run() {
listeners foreach {_.onProgress(op, progress)}
}
})
}
}
}
private def onOperationFinished() {
val completedOperation = currentOperation
currentOperation = null
// Notify any listeners, in the main event dispatch thread
SwingUtilities.invokeLater(new Runnable {
def run() {
listeners foreach {_.onFinished(completedOperation)}
}
})
// Start next operation if one is queued
if (!queue.isEmpty) {
startOperation(queue.removeLast())
}
}
private final case class Task(operation: Operation, tileId: TileId) extends Runnable {
def run() {
try {
operation.doOperation(tileId)
}
finally {
removeRemainingTileId(tileId)
}
}
}
}
|
zzorn/flowpaint
|
src/main/scala/org/flowpaint/raster/tasks/TaskService.scala
|
Scala
|
gpl-2.0
| 4,092 |
package com.twitter.finagle.http.exp
import com.twitter.finagle.{CancelledRequestException, Failure}
import com.twitter.finagle.context.{Contexts, RemoteInfo}
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.util.DefaultTimer
import com.twitter.logging.{Level, Logger}
import com.twitter.util._
import java.util.concurrent.atomic.AtomicReference
private[finagle] object GenStreamingSerialServerDispatcher {
private val logger = Logger.get()
// Note: this is a slightly different Eof than the finagle-core version, but I don't think it matters
private val Eof = Future.exception(Failure("EOF"))
private val cancelled = new CancelledRequestException
private sealed trait DispatchState
private case object Idle extends DispatchState
private case object Running extends DispatchState
private case object Closing extends DispatchState
}
/**
* A generic version of
* [[com.twitter.finagle.dispatch.SerialServerDispatcher SerialServerDispatcher]],
* allowing the implementor to furnish custom dispatchers & handlers.
*/
private[finagle] abstract class GenStreamingSerialServerDispatcher[Req, Rep, In, Out](
trans: StreamTransport[In, Out])
extends Closable {
def this(trans: Transport[In, Out]) = this(new IdentityStreamTransport(trans))
import GenStreamingSerialServerDispatcher._
private[this] val state = new AtomicReference[DispatchState](Idle)
/**
* Dispatches a request. The first argument is the request. The second
* argument `eos` (end-of-stream promise) must be fulfilled when the request
* is complete.
*
* For non-streaming requests, `eos.setDone()` should be called immediately,
* since the entire request is present. For streaming requests,
* `eos.setDone()` must be called at the end of stream (in HTTP, this is on
* receipt of last chunk). Refer to the implementation in
* [[com.twitter.finagle.http.codec.HttpServerDispatcher]].
*/
protected def dispatch(req: Out): Future[Rep]
protected def handle(rep: Rep): Future[Unit]
/**
* Only the dispatch loop can make state transitions to Idle and Running but close
* operations can transition the state to Closing. If the loop finds that the state
* has been transitioned from Idle -> Closing, it is the closer's job to close the
* transport. If the loops finds that the state has transitioned from Running -> Closing,
* it has been given a chance to drain the last connection and will ensure that the
* transport is closed.
*/
private[this] def loop(): Future[Unit] = {
trans
.read()
.flatMap(dispatchAndHandleFn)
.transform(continueLoopFn)
}
private[this] val handleFn: Rep => Future[Unit] = handle(_)
// Dispatches and handles a message from the transport or closes down if necessary
private[this] val dispatchAndHandleFn: Multi[Out] => Future[Unit] = {
case Multi(req, eos) =>
if (state.compareAndSet(Idle, Running)) {
val save = Local.save()
val dispatched = try {
Contexts.local.let(RemoteInfo.Upstream.AddressCtx, trans.context.remoteAddress) {
val peerCertificates = trans.context.sslSessionInfo.peerCertificates
if (peerCertificates.isEmpty) dispatch(req)
else
Contexts.local.let(Transport.peerCertCtx, peerCertificates.head) {
dispatch(req)
}
}
} finally Local.restore(save)
val handled = dispatched.flatMap(handleFn)
// This version of `Future.join` doesn't collect the values from the Futures, but
// since they are both Future[Unit], we know what the result is and can avoid the
// overhead of collecting two Units just to throw them away via another flatMap.
Future.join(handled :: eos :: Nil)
} else {
// must have transitioned from Idle to Closing, by someone else who is
// responsible for closing the transport
val st = state.get
if (st == Closing) Eof
else {
// Something really bad happened. Shutdown and log as loudly as possible.
trans.close()
val msg = s"Dispatch loop found in illegal state: $st"
val ex = new IllegalStateException(msg)
logger.error(ex, msg)
Future.exception(ex)
}
}
}
// Checks the state after a dispatch and continues or shuts down the transport if necessary
private[this] val continueLoopFn: Try[Unit] => Future[Unit] = { res =>
if (res.isReturn && state.compareAndSet(Running, Idle)) loop()
else {
// The loop has been canceled and we have been given the opportunity to drain so
// we need to close the transport.
// Note: We don't sequence the transport.close() Future because we don't care to wait
// for it and also don't want to clobber the result of the loop.
if (logger.isLoggable(Level.TRACE)) {
if (res.isThrow) {
logger.trace(res.throwable, s"closing $trans due to read error")
} else {
logger.trace(
s"closing $trans due to status.cas failure, state is ${state.get()}, expect Running"
)
}
}
trans.close()
Future.const(res)
}
}
// Clear all locals to start the loop; we want a clean slate.
private[this] val looping = Local.letClear { loop() }
trans.onClose.ensure {
state.set(Closing)
looping.raise(cancelled)
}
/** Exposed for testing */
protected[exp] def isClosing: Boolean = state.get() == Closing
/** Exposed for testing */
private[exp] def timer: Timer = DefaultTimer
// Note: this is racy, but that's inherent in draining (without
// protocol support). Presumably, half-closing a TCP connection is
// also possible.
def close(deadline: Time): Future[Unit] = {
// What to do next depends on the state of the dispatcher:
// - Idle: we can close the transport immediately.
// - Running: we need to allow time to drain. Set a timer to ensure it closes by the deadline
// - Closing: close has already been called or the transport closed: return the trans.onClose future.
state.getAndSet(Closing) match {
case Idle => trans.close(deadline)
case Running =>
trans.onClose.by(timer, deadline).onFailure { _ =>
trans.close(deadline) // The dispatcher took too long, ask the transport to close
}
case Closing => () // No action required.
}
trans.onClose.unit
}
}
|
luciferous/finagle
|
finagle-base-http/src/main/scala/com/twitter/finagle/http/exp/GenStreamingSerialServerDispatcher.scala
|
Scala
|
apache-2.0
| 6,475 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.kubernetes
import io.fabric8.kubernetes.api.model.{Container, Pod}
private[spark] case class PodWithDetachedInitContainer(
pod: Pod,
initContainer: Container,
mainContainer: Container)
|
kimoonkim/spark
|
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/PodWithDetachedInitContainer.scala
|
Scala
|
apache-2.0
| 1,032 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.geojson.query
import org.geotools.filter.text.ecql.ECQL
import org.junit.runner.RunWith
import org.locationtech.geomesa.features.kryo.json.JsonPathParser.PathAttribute
import org.locationtech.geomesa.geojson.GeoMesaIndexPropertyTransformer
import org.locationtech.geomesa.geojson.query.GeoJsonQuery._
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class GeoJsonQueryTest extends Specification {
"GeoJsonQuery" should {
"parse json predicates" in {
GeoJsonQuery("""{"status":"A"}""") mustEqual Equals("status", "A")
GeoJsonQuery("""{"status":"A","age":{"$lt":30}}""") mustEqual
And(Equals("status", "A"), LessThan("age", 30, inclusive = false))
GeoJsonQuery("""{"$or":[{"status":"A"},{"age":{"$lte":30}}]}""") mustEqual
Or(Equals("status", "A"), LessThan("age", 30, inclusive = true))
GeoJsonQuery("""{"loc":{"$within":{"$geometry":{"type":"Polygon","coordinates":[[[0,0],[3,6],[6,1],[0,0]]]}}}}""") mustEqual
Within("loc", WKTUtils.read("POLYGON ((0 0, 3 6, 6 1, 0 0))"))
GeoJsonQuery("""{"loc":{"$bbox":[-180,-90.0,180,90.0]}}""") mustEqual
Bbox("loc", -180.0, -90.0, 180.0, 90.0)
}
"unparse json predicates" in {
val queries = Seq(
"""{"status":"A"}""",
"""{"status":"A","age":{"$lt":30}}""",
"""{"$or":[{"status":"A"},{"age":{"$lte":30}}]}""",
"""{"loc":{"$within":{"$geometry":{"type":"Polygon","coordinates":[[[0.1,0.1],[3.1,6.1],[6.1,1.1],[0.1,0.1]]]}}}}""",
"""{"loc":{"$bbox":[-180.0,-90.0,180.0,90.0]}}"""
)
forall(queries) { q => GeoJsonQuery(q).toString mustEqual q }
}
"apply" in {
val geom = WKTUtils.read("POINT (10 10)")
GeoJsonQuery.Include mustEqual GeoJsonQuery.Include
GeoJsonQuery.LessThan("age", 30, inclusive = false) mustEqual LessThan("age", 30, inclusive = false)
GeoJsonQuery.GreaterThan("age", 30, inclusive = false) mustEqual GreaterThan("age", 30, inclusive = false)
GeoJsonQuery.Bbox(-10, -20, 10, 20) mustEqual Bbox(GeoJsonQuery.defaultGeom, -10, -20, 10, 20)
GeoJsonQuery.Contains(geom) mustEqual Contains(GeoJsonQuery.defaultGeom, geom)
GeoJsonQuery.Within(geom) mustEqual Within(GeoJsonQuery.defaultGeom, geom)
GeoJsonQuery.Intersects(geom) mustEqual Intersects(GeoJsonQuery.defaultGeom, geom)
}
"translate to CQL" in {
ECQL.toCQL(GeoJsonQuery("""{"id":"foo"}""").toFilter(new GeoMesaIndexPropertyTransformer(None, None))) mustEqual """"$.json.id" = 'foo'"""
ECQL.toCQL(GeoJsonQuery("""{"id":"foo"}""").toFilter(new GeoMesaIndexPropertyTransformer(Some(Seq(PathAttribute("id"))), None))) mustEqual "IN ('foo')"
ECQL.toCQL(GeoJsonQuery("""{"loc":{"$bbox":[-180,-90.0,180,90.0]}}""").toFilter(new GeoMesaIndexPropertyTransformer(None, None))) mustEqual
"BBOX($.json.loc, -180.0,-90.0,180.0,90.0)" // TODO this won't work with non-default geoms due to CQL parsing...
ECQL.toCQL(GeoJsonQuery("""{"geometry":{"$bbox":[-180,-90.0,180,90.0]}}""").toFilter(new GeoMesaIndexPropertyTransformer(None, None))) mustEqual
"BBOX(geom, -180.0,-90.0,180.0,90.0)"
}
}
}
|
elahrvivaz/geomesa
|
geomesa-geojson/geomesa-geojson-api/src/test/scala/org/locationtech/geomesa/geojson/query/GeoJsonQueryTest.scala
|
Scala
|
apache-2.0
| 3,748 |
/***
* Copyright 2018 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker.wadl
import com.rackspace.com.papi.components.checker.{LogAssertions, Config}
import org.apache.logging.log4j.Level
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class WADLCheckerTenantFailSpec extends BaseCheckerSpec with LogAssertions {
//
// Namespaces
//
register ("xsd", "http://www.w3.org/2001/XMLSchema")
register ("wadl","http://wadl.dev.java.net/2009/02")
register ("chk","http://www.rackspace.com/repose/wadl/checker")
//
// Custom Configs
//
val maskConfig = {
val c = new Config()
c.enableRaxRolesExtension = true
c.maskRaxRoles403 = true
c.checkPlainParams = true
c
}
val raxRolesConfig = {
val c = new Config()
c.enableRaxRolesExtension = true
c.maskRaxRoles403 = false
c.checkPlainParams = true
c
}
feature("The WADLCheckerBuilder identitifes errors related to tenanted roles and fails") {
info("As a delveloper")
info("I want to catch errors with tenanted roles early in the loading of a WADL")
info("so that I can more easly debug a WADL, and anomolies to appear in production")
//
// The two scenarios below are a stop gap, while we implement
// support for mask rax:roles.
//
scenario ("Given a transform with Mask Rax-Roles enabled") {
Given("a WADL which leverages rax:roles")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/{id}/resource/{stepType}" rax:roles="admin/{id}">
<param name="id" style="template" type="xsd:string"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#getMethod" />
</resource>
</resources>
<method id="getMethod" name="GET">
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, maskConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog,"not implemented")
}
scenario ("Given a transform with Mask Rax-Roles enabled (no tenant match)") {
Given("a WADL which leverages rax:roles mask when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/{id}/resource/{stepType}" rax:roles="admin/{foo}">
<param name="id" style="template" type="xsd:string"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#getMethod" />
</resource>
</resources>
<method id="getMethod" name="GET">
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, maskConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog,"not implemented")
}
scenario ("Given a transform with Rax-Roles on a teant but missing the tenant parameter") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/{id}/resource/{stepType}" rax:roles="admin/{foo}">
<param name="id" style="template" type="xsd:string"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#getMethod" />
</resource>
</resources>
<method id="getMethod" name="GET">
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'foo'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (uri)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/{id}/resource/{stepType}" rax:roles="admin/{ID}">
<param name="id" style="template" type="xsd:string"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#getMethod" />
</resource>
</resources>
<method id="getMethod" name="GET">
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (xpath)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/resource/{stepType}" rax:roles="admin/{ID}">
<param name="stepType" style="template" type="xsd:string"/>
<method href="#postMethod" />
</resource>
</resources>
<method id="postMethod" name="POST">
<request>
<representation mediaType="application/xml">
<param name="id" style="plain" path="//@id" required="true"/>
</representation>
</request>
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (xpath-json)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/resource/{stepType}" rax:roles="admin/{ID}">
<param name="stepType" style="template" type="xsd:string"/>
<method href="#postMethod" />
</resource>
</resources>
<method id="postMethod" name="POST">
<request>
<representation mediaType="application/json">
<param name="id" style="plain" path="$body(@id)" required="true"/>
</representation>
</request>
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (header)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/resource/{stepType}" rax:roles="admin/{ID}">
<param name="id" style="header" repeating="true" required="true"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#postMethod" />
</resource>
</resources>
<method id="postMethod" name="POST">
<request>
<representation mediaType="application/json">
</representation>
</request>
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (header any)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/resource/{stepType}" rax:roles="admin/{ID}">
<param name="id" style="header" repeating="true" required="true" rax:anyMatch="true" fixed="foo"/>
<param name="id" style="header" repeating="true" required="true" rax:anyMatch="true" fixed="bar"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#postMethod" />
</resource>
</resources>
<method id="postMethod" name="POST">
<request>
<representation mediaType="application/json">
</representation>
</request>
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (header any, first match)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/resource/{stepType}" rax:roles="admin/{ID}">
<param name="id" style="header" repeating="true" required="true" rax:anyMatch="true" fixed="foo"/>
<param name="ID" style="header" repeating="true" required="true" rax:anyMatch="true" fixed="bar"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#postMethod" />
</resource>
</resources>
<method id="postMethod" name="POST">
<request>
<representation mediaType="application/json">
</representation>
</request>
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (header all)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/resource/{stepType}" rax:roles="admin/{ID}">
<param name="id" style="header" repeating="true" required="true" fixed="foo"/>
<param name="id" style="header" repeating="true" required="true" fixed="bar"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#postMethod" />
</resource>
</resources>
<method id="postMethod" name="POST">
<request>
<representation mediaType="application/json">
</representation>
</request>
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (header all, first match)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/resource/{stepType}" rax:roles="admin/{ID}">
<param name="id" style="header" repeating="true" required="true" fixed="foo"/>
<param name="ID" style="header" repeating="true" required="true" fixed="bar"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#postMethod" />
</resource>
</resources>
<method id="postMethod" name="POST">
<request>
<representation mediaType="application/json">
</representation>
</request>
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (header single)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/resource/{stepType}" rax:roles="admin/{ID}">
<param name="id" style="header" repeating="false" required="true"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#postMethod" />
</resource>
</resources>
<method id="postMethod" name="POST">
<request>
<representation mediaType="application/json">
</representation>
</request>
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant but mismatch in case of tenant parameter (capture header)") {
Given("a WADL which leverages rax:roles when there is no tenant match")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/resource/{stepType}" rax:roles="admin/{ID}">
<param name="stepType" style="template" type="xsd:string"/>
<method href="#postMethod" />
</resource>
</resources>
<method id="postMethod" name="POST">
<request>
<representation mediaType="application/json">
<rax:captureHeader name="id" path="//@id"/>
</representation>
</request>
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checkerLog = log(Level.ERROR) {
intercept[WADLException] {
val checker = builder.build(inWADL, raxRolesConfig)
println (checker) // Should never print!
}
}
Then ("There should be an error detailing that rax:roles mask is currently not supported")
assert(checkerLog, "no defined param named 'ID'")
}
scenario ("Given a transform with Rax-Roles on a teant with a role name that contains a \\\\") {
Given("a WADL which leverages rax:roles tenanted with a role name that contains a \\\\")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/{id}/resource/{stepType}" rax:roles="ad\\m\\in/{header}">
<param name="id" style="template" type="xsd:string"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#getMethod" />
</resource>
<rax:captureHeader path="55" name="header"/>
</resources>
<method id="getMethod" name="GET">
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checker = builder.build(inWADL, raxRolesConfig)
Then ("The backslash should be properly encoded")
assert(checker,"exists(/chk:checker/chk:step[@type='CAPTURE_HEADER' and @matchingRoles='ad\\\\m\\\\in/{header}'])")
assert(checker,"exists(/chk:checker/chk:step[@type='CAPTURE_HEADER' and @name='X-RELEVANT-ROLES' and contains(@path, 'ad\\\\m\\\\in/{header}')])")
}
scenario ("Given a transform with Rax-Roles on a teant with a role name that contains a \\\\\\\\") {
Given("a WADL which leverages rax:roles tenanted with a role name that contains a \\\\\\\\")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/{id}/resource/{stepType}" rax:roles="ad\\\\m\\\\in/{header}">
<param name="id" style="template" type="xsd:string"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#getMethod" />
</resource>
<rax:captureHeader path="55" name="header"/>
</resources>
<method id="getMethod" name="GET">
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checker = builder.build(inWADL, raxRolesConfig)
Then ("The backslash should be properly encoded")
assert(checker,"exists(/chk:checker/chk:step[@type='CAPTURE_HEADER' and @matchingRoles='ad\\\\\\\\m\\\\\\\\in/{header}'])")
assert(checker,"exists(/chk:checker/chk:step[@type='CAPTURE_HEADER' and @name='X-RELEVANT-ROLES' and contains(@path, 'ad\\\\\\\\m\\\\\\\\in/{header}')])")
}
scenario ("Given a transform with Rax-Roles on a teant with a role name that contains a space") {
Given("a WADL which leverages rax:roles tenanted with a role name that contains a space")
val inWADL = <application xmlns="http://wadl.dev.java.net/2009/02"
xmlns:rax="http://docs.rackspace.com/api"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<grammars/>
<resources base="https://test.api.openstack.com">
<resource path="path/to/my/{id}/resource/{stepType}" rax:roles="ad m in/{header}">
<param name="id" style="template" type="xsd:string"/>
<param name="stepType" style="template" type="xsd:string"/>
<method href="#getMethod" />
</resource>
<rax:captureHeader path="55" name="header"/>
</resources>
<method id="getMethod" name="GET">
<response status="200 203"/>
</method>
</application>
When("The WADL is translated")
val checker = builder.build(inWADL, raxRolesConfig)
Then ("The backslash should be properly encoded")
assert(checker,"exists(/chk:checker/chk:step[@type='CAPTURE_HEADER' and @matchingRoles='ad m in/{header}'])")
assert(checker,"exists(/chk:checker/chk:step[@type='CAPTURE_HEADER' and @name='X-RELEVANT-ROLES' and contains(@path, 'ad m in/{header}')])")
}
}
}
|
wdschei/api-checker
|
core/src/test/scala/com/rackspace/com/papi/components/checker/wadl/WADLCheckerTenantFailSpec.scala
|
Scala
|
apache-2.0
| 25,763 |
package dispatch.spec
import org.scalacheck._
import org.scalacheck.Prop._
object UriSpecification extends Properties("Uri") {
/** java.net.URLDecoder should *NOT* be used for testing URI segment decoding
* because it implements completely different functionality: query parameter decoding
*/
property("Encodes and decodes basic strings") = Prop.forAll { (path: String) =>
!path.contains(":") ==> {
new java.net.URI(dispatch.UriEncode.path(path)).getPath == path
} // else Prop.throws(classOf[java.net.URISyntaxException])
}
/** if there is nothing to escape, encoder must return original reference */
property("Does nothing if there's nothing eo encode") = Prop.forAll(Gen.choose(0,100)) { (n: Int) =>
val path = "A" * n
dispatch.UriEncode.path(path) eq path
}
property("Encodes emoji correctly") = forAll(Gen.const("unused")) { (sample: String) =>
val path = "roma🇮🇹"
new java.net.URI(dispatch.UriEncode.path(path)).getPath == (path)
}
}
|
dispatch/reboot
|
core/src/test/scala/uri.scala
|
Scala
|
lgpl-3.0
| 1,003 |
package org.broadinstitute.dsde.firecloud.webservice
import akka.http.scaladsl.client.RequestBuilding
import akka.http.scaladsl.server.{Directives, Route}
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model._
import org.broadinstitute.dsde.firecloud.service.NihService
import org.broadinstitute.dsde.firecloud.utils.StandardUserInfoDirectives
import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContext
trait NihApiService extends Directives with RequestBuilding with StandardUserInfoDirectives {
implicit val executionContext: ExecutionContext
lazy val log = LoggerFactory.getLogger(getClass)
val nihServiceConstructor: () => NihService
val syncRoute: Route =
path("sync_whitelist" / Segment) { whitelistName =>
post {
complete { nihServiceConstructor().syncWhitelistAllUsers(whitelistName) }
}
} ~ path("sync_whitelist") {
post {
complete { nihServiceConstructor().syncAllNihWhitelistsAllUsers() }
}
}
val nihRoutes: Route =
requireUserInfo() { userInfo =>
pathPrefix("nih") {
// api/nih/callback: accept JWT, update linkage + lastlogin
path("callback") {
post {
entity(as[JWTWrapper]) { jwtWrapper =>
complete { nihServiceConstructor().updateNihLinkAndSyncSelf(userInfo, jwtWrapper) }
}
}
} ~
path ("status") {
complete { nihServiceConstructor().getNihStatus(userInfo) }
}
}
}
}
|
broadinstitute/firecloud-orchestration
|
src/main/scala/org/broadinstitute/dsde/firecloud/webservice/NihApiService.scala
|
Scala
|
bsd-3-clause
| 1,559 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import kafka.api.LeaderAndIsr
import kafka.common.TopicAndPartition
import kafka.controller.{IsrChangeNotificationListener, LeaderIsrAndControllerEpoch}
import kafka.utils.ZkUtils._
import org.apache.zookeeper.data.Stat
import scala.collection._
object ReplicationUtils extends Logging {
private val IsrChangeNotificationPrefix = "isr_change_"
def updateLeaderAndIsr(zkUtils: ZkUtils, topic: String, partitionId: Int, newLeaderAndIsr: LeaderAndIsr, controllerEpoch: Int,
zkVersion: Int): (Boolean,Int) = {
debug("Updated ISR for partition [%s,%d] to %s".format(topic, partitionId, newLeaderAndIsr.isr.mkString(",")))
val path = getTopicPartitionLeaderAndIsrPath(topic, partitionId)
val newLeaderData = zkUtils.leaderAndIsrZkData(newLeaderAndIsr, controllerEpoch)
// use the epoch of the controller that made the leadership decision, instead of the current controller epoch
val updatePersistentPath: (Boolean, Int) = zkUtils.conditionalUpdatePersistentPath(path, newLeaderData, zkVersion, Some(checkLeaderAndIsrZkData))
updatePersistentPath
}
def propagateIsrChanges(zkUtils: ZkUtils, isrChangeSet: Set[TopicAndPartition]): Unit = {
val isrChangeNotificationPath: String = zkUtils.createSequentialPersistentPath(
ZkUtils.IsrChangeNotificationPath + "/" + IsrChangeNotificationPrefix,
generateIsrChangeJson(isrChangeSet))
debug("Added " + isrChangeNotificationPath + " for " + isrChangeSet)
}
def checkLeaderAndIsrZkData(zkUtils: ZkUtils, path: String, expectedLeaderAndIsrInfo: String): (Boolean,Int) = {
try {
val writtenLeaderAndIsrInfo = zkUtils.readDataMaybeNull(path)
val writtenLeaderOpt = writtenLeaderAndIsrInfo._1
val writtenStat = writtenLeaderAndIsrInfo._2
val expectedLeader = parseLeaderAndIsr(expectedLeaderAndIsrInfo, path, writtenStat)
writtenLeaderOpt match {
case Some(writtenData) =>
val writtenLeader = parseLeaderAndIsr(writtenData, path, writtenStat)
(expectedLeader,writtenLeader) match {
case (Some(expectedLeader),Some(writtenLeader)) =>
if(expectedLeader == writtenLeader)
return (true, writtenStat.getVersion())
case _ =>
}
case None =>
}
} catch {
case _: Exception =>
}
(false,-1)
}
def getLeaderIsrAndEpochForPartition(zkUtils: ZkUtils, topic: String, partition: Int):Option[LeaderIsrAndControllerEpoch] = {
val leaderAndIsrPath = getTopicPartitionLeaderAndIsrPath(topic, partition)
val (leaderAndIsrOpt, stat) = zkUtils.readDataMaybeNull(leaderAndIsrPath)
leaderAndIsrOpt.flatMap(leaderAndIsrStr => parseLeaderAndIsr(leaderAndIsrStr, leaderAndIsrPath, stat))
}
private def parseLeaderAndIsr(leaderAndIsrStr: String, path: String, stat: Stat)
: Option[LeaderIsrAndControllerEpoch] = {
Json.parseFull(leaderAndIsrStr).flatMap {m =>
val leaderIsrAndEpochInfo = m.asInstanceOf[Map[String, Any]]
val leader = leaderIsrAndEpochInfo.get("leader").get.asInstanceOf[Int]
val epoch = leaderIsrAndEpochInfo.get("leader_epoch").get.asInstanceOf[Int]
val isr = leaderIsrAndEpochInfo.get("isr").get.asInstanceOf[List[Int]]
val controllerEpoch = leaderIsrAndEpochInfo.get("controller_epoch").get.asInstanceOf[Int]
val zkPathVersion = stat.getVersion
debug("Leader %d, Epoch %d, Isr %s, Zk path version %d for leaderAndIsrPath %s".format(leader, epoch,
isr.toString(), zkPathVersion, path))
Some(LeaderIsrAndControllerEpoch(LeaderAndIsr(leader, epoch, isr, zkPathVersion), controllerEpoch))}
}
private def generateIsrChangeJson(isrChanges: Set[TopicAndPartition]): String = {
val partitions = isrChanges.map(tp => Map("topic" -> tp.topic, "partition" -> tp.partition)).toArray
Json.encode(Map("version" -> IsrChangeNotificationListener.version, "partitions" -> partitions))
}
}
|
geeag/kafka
|
core/src/main/scala/kafka/utils/ReplicationUtils.scala
|
Scala
|
apache-2.0
| 4,744 |
/*
* Copyright 2014–2020 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.datasource
import slamdata.Predef._
import quasar.api.datasource.DatasourceType
import quasar.api.resource.{ResourceName, ResourcePath, ResourcePathType}
import quasar.connector.{MonadResourceErr, ResourceError}
import quasar.connector.datasource._
import quasar.contrib.scalaz._
import scala.util.{Left, Right}
import cats.data.NonEmptyList
import cats.effect.Sync
import cats.instances.option._
import cats.syntax.applicative._
import cats.syntax.eq._
import cats.syntax.flatMap._
import cats.syntax.functor._
import cats.syntax.option._
import cats.syntax.traverse._
import fs2.{Pull, Stream}
import monocle.Lens
import shims.{equalToCats, monadToScalaz}
/** A datasource transformer that augments underlying datasources by adding an aggregate resource
* `p / **` for every prefix `p`. An aggregate resource `p / **` will aggregate
* all descendant resources of the prefix `p`.
*/
final class AggregatingDatasource[F[_]: MonadResourceErr: Sync, Q, R] private(
underlying: Datasource[F, Stream[F, ?], Q, R, ResourcePathType.Physical],
queryPath: Lens[Q, ResourcePath])
extends Datasource[F, Stream[F, ?], Q, CompositeResult[F, R], ResourcePathType] {
def kind: DatasourceType =
underlying.kind
lazy val loaders: NonEmptyList[Loader[F, Q, CompositeResult[F, R]]] =
underlying.loaders map {
case Loader.Batch(BatchLoader.Full(full)) =>
Loader.Batch(BatchLoader.Full(aggregateFull(full)))
case Loader.Batch(seek @ BatchLoader.Seek(_)) =>
Loader.Batch(seek.map[CompositeResult[F, R]](Left(_)))
}
def pathIsResource(path: ResourcePath): F[Boolean] =
underlying.pathIsResource(path).ifM(true.pure[F], aggPathExists(path))
def prefixedChildPaths(prefixPath: ResourcePath)
: F[Option[Stream[F, (ResourceName, ResourcePathType)]]] =
aggPath(prefixPath) match {
case None =>
underlying.prefixedChildPaths(prefixPath)
.flatMap(os => ofPrefix(os).ifM(
os.map(s => Stream.emit((AggName, ResourcePathType.AggregateResource)) ++ s).pure[F],
os.map(_.covaryOutput[(ResourceName, ResourcePathType)]).pure[F]))
case Some(_) =>
pathIsResource(prefixPath).ifM(
Stream.empty.covary[F].covaryOutput[(ResourceName, ResourcePathType)].some.pure[F],
none.pure[F])
}
////
private val AggName: ResourceName = ResourceName("**")
private def aggPath(path: ResourcePath): Option[ResourcePath] =
path.unsnoc.flatMap {
case (p, n) => if (n === AggName) Some(p) else None
}
// `p / *` exists iff underlying `p` is prefix/prefixresource
private def aggPathExists(path: ResourcePath): F[Boolean] =
aggPath(path)
.map(p => underlying.prefixedChildPaths(p).flatMap(ofPrefix))
.getOrElse(false.pure[F])
// checks whether the provided stream is that of a prefix/prefixresource
private def ofPrefix[A](os: Option[Stream[F, A]]): F[Boolean] =
os.traverse(s => s.pull.peek1.flatMap {
case None => Pull.output1(false)
case _ => Pull.output1(true)
}.stream.compile.last).map(_.flatten getOrElse false)
private def aggregateFull(full: Q => F[R])(q: Q): F[CompositeResult[F, R]] = {
def aggregate(p: ResourcePath): F[AggregateResult[F, R]] =
aggPath(p).map(doAggregate).getOrElse(MonadResourceErr[F].raiseError(ResourceError.pathNotFound(p)))
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def doAggregate(p: ResourcePath): F[AggregateResult[F, R]] =
underlying.prefixedChildPaths(p) flatMap {
case Some(s) =>
val children: AggregateResult[F, R] =
s.collect {
case (n, ResourcePathType.LeafResource) => p / n
case (n, ResourcePathType.PrefixResource) => p / n
} .evalMap(r => full(queryPath.set(r)(q)).tupleLeft(r).attempt)
.map(_.toOption)
.unNone
val nested: AggregateResult[F, R] =
s.collect {
case (n, ResourcePathType.Prefix) => p / n
case (n, ResourcePathType.PrefixResource) => p / n
} .evalMap(doAggregate).flatten
(children ++ nested).pure[F]
case None =>
MonadResourceErr[F].raiseError(ResourceError.pathNotFound(p))
}
val qpath = queryPath.get(q)
underlying.pathIsResource(qpath).ifM(
full(q).map(Left(_)),
aggregate(qpath).map(Right(_)))
}
}
object AggregatingDatasource {
def apply[F[_]: MonadResourceErr: Sync, Q, R](
underlying: Datasource[F, Stream[F, ?], Q, R, ResourcePathType.Physical],
queryPath: Lens[Q, ResourcePath])
: Datasource[F, Stream[F, ?], Q, CompositeResult[F, R], ResourcePathType] =
new AggregatingDatasource(underlying, queryPath)
}
|
slamdata/quasar
|
impl/src/main/scala/quasar/impl/datasource/AggregatingDatasource.scala
|
Scala
|
apache-2.0
| 5,382 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.catalog
import java.util.{Collections, Properties}
import com.google.common.collect.Lists
import org.apache.calcite.config.{CalciteConnectionConfigImpl, CalciteConnectionProperty}
import org.apache.calcite.jdbc.CalciteSchema
import org.apache.calcite.prepare.CalciteCatalogReader
import org.apache.calcite.schema.SchemaPlus
import org.apache.calcite.sql.parser.SqlParser
import org.apache.calcite.sql.validate.SqlMonikerType
import org.apache.flink.table.calcite.{CalciteConfig, FlinkTypeFactory, FlinkTypeSystem}
import org.apache.flink.table.plan.schema.TableSourceTable
import org.apache.flink.table.runtime.utils.CommonTestData
import org.apache.flink.table.sources.CsvTableSource
import org.junit.Assert._
import org.junit.{Before, Test}
import scala.collection.JavaConverters._
class ExternalCatalogSchemaTest {
private val schemaName: String = "test"
private var externalCatalogSchema: SchemaPlus = _
private var calciteCatalogReader: CalciteCatalogReader = _
private val db = "db1"
private val tb = "tb1"
@Before
def setUp(): Unit = {
val rootSchemaPlus: SchemaPlus = CalciteSchema.createRootSchema(true, false).plus()
val catalog = CommonTestData.getInMemoryTestCatalog
ExternalCatalogSchema.registerCatalog(rootSchemaPlus, schemaName, catalog)
externalCatalogSchema = rootSchemaPlus.getSubSchema("schemaName")
val typeFactory = new FlinkTypeFactory(new FlinkTypeSystem())
val prop = new Properties()
prop.setProperty(CalciteConnectionProperty.CASE_SENSITIVE.camelName, "false")
val calciteConnConfig = new CalciteConnectionConfigImpl(prop)
calciteCatalogReader = new CalciteCatalogReader(
CalciteSchema.from(rootSchemaPlus),
Collections.emptyList(),
typeFactory,
calciteConnConfig
)
}
@Test
def testGetSubSchema(): Unit = {
val allSchemaObjectNames = calciteCatalogReader
.getAllSchemaObjectNames(Lists.newArrayList(schemaName))
val subSchemas = allSchemaObjectNames.asScala
.filter(_.getType.equals(SqlMonikerType.SCHEMA))
.map(_.getFullyQualifiedNames.asScala.toList).toSet
assertTrue(Set(List(schemaName), List(schemaName, "db1"),
List(schemaName, "db2")) == subSchemas)
}
@Test
def testGetTable(): Unit = {
val relOptTable = calciteCatalogReader.getTable(Lists.newArrayList(schemaName, db, tb))
assertNotNull(relOptTable)
val tableSourceTable = relOptTable.unwrap(classOf[TableSourceTable[_]])
tableSourceTable match {
case tst: TableSourceTable[_] =>
assertTrue(tst.tableSource.isInstanceOf[CsvTableSource])
case _ =>
fail("unexpected table type!")
}
}
@Test
def testGetNotExistTable(): Unit = {
val relOptTable = calciteCatalogReader.getTable(
Lists.newArrayList(schemaName, db, "nonexist-tb"))
assertNull(relOptTable)
}
}
|
zimmermatt/flink
|
flink-libraries/flink-table/src/test/scala/org/apache/flink/table/catalog/ExternalCatalogSchemaTest.scala
|
Scala
|
apache-2.0
| 3,688 |
/*
* Copyright DataGenerator Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.datagenerator.common.SocialNetwork_Example
import org.finra.datagenerator.common.NodeData.NodeData
import org.finra.datagenerator.common.NodeData.NodeDataType.NodeDataType
import scala.beans.BeanProperty
/*
Social network user
*/
class User( private var _dataType: UserType.UserType,
@BeanProperty var firstName: String,
@BeanProperty var lastName: String,
@BeanProperty val dateOfBirth: java.sql.Date, // Assert > 13 years old when creating
@BeanProperty var geographicalLocation: (Double, Double),
@BeanProperty var isSecret: Boolean,
@BeanProperty val socialNetworkId: Long) extends NodeData(None) {
override def defaultDisplayableDataId: String = s"$socialNetworkId (${dataType.name}): $lastName, $firstName"
override def getDataType: UserType.UserType = {
_dataType
}
}
|
mibrahim/DataGenerator
|
dg-common/src/main/scala/org/finra/datagenerator/common/SocialNetwork_Example/User.scala
|
Scala
|
apache-2.0
| 1,487 |
/*
* Copyright 2016 University of Basel, Graphics and Vision Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalismo.faces.color
import scalismo.color.RGB
/** HSV color value with Hue in [0.0,2*Pi), Saturation in [0.0,1.0] and Value in [0.0,1.0] */
case class HSV(hue: Double, saturation: Double, value: Double) {
/** convert to RGB value. May throw or result in undefined behaviour if values outside of ranges.*/
def toRGB: RGB = {
val hs = hue / (math.Pi / 3.0)
val h: Int = hs.toInt
val f: Double = hs - h
val p: Double = value * (1.0 - saturation)
val q: Double = value * (1.0 - saturation * f)
val t: Double = value * (1.0 - saturation * (1.0 - f))
h match {
case 0 => RGB(value, t, p)
case 1 => RGB(q, value, p)
case 2 => RGB(p, value, t)
case 3 => RGB(p, q, value)
case 4 => RGB(t, p, value)
case 5 => RGB(value, p, q)
case 6 => RGB(value, t, p)
case _ => throw new RuntimeException(s"Invalid hue value (${h}) in conversion of color ${this}.")
}
}
}
object HSV {
/** convert from RGB value. */
def apply(rgb: RGB): HSV = {
val maxCh: Double = math.max(rgb.r, math.max(rgb.g, rgb.b))
val minCh: Double = math.min(rgb.r, math.min(rgb.g, rgb.b))
val RGB(r, g, b) = rgb
val h: Double = maxCh match {
case `minCh` => 0.0
case `r` => math.Pi / 3.0 * (0.0 + (g - b) / (maxCh - minCh))
case `g` => math.Pi / 3.0 * (2.0 + (b - r) / (maxCh - minCh))
case `b` => math.Pi / 3.0 * (4.0 + (r - g) / (maxCh - minCh))
}
val s = if (maxCh > 0.0) (maxCh - minCh) / maxCh else 0.0
val v = maxCh
HSV( if(h<0) h+2.0*math.Pi else h, s, v)
}
/** ColorBlender for HSV colors */
implicit object HSVBlender extends ColorBlender[HSV] {
/** Blend two colors (or other objects), necessary for interpolation in images, l is within [0, 1] */
override def blend(obj1: HSV, obj2: HSV, l: Double): HSV = {
val m = 1.0 - l
val h1 = obj1.hue
val h2 = obj2.hue
val hue_nonSanitized = math.atan2(l * math.sin(h1) + m * math.cos(h1), l * math.sin(h2) + m * math.cos(h2))
val hue = if(hue_nonSanitized < 0){
hue_nonSanitized % (2 * Math.PI) + (2 * Math.PI)
} else {
hue_nonSanitized % (2 * Math.PI)
}
HSV(
hue,
obj1.saturation * l + obj2.saturation * m,
obj1.value * l + obj2.value * m
)
}
}
}
|
unibas-gravis/scalismo-faces
|
src/main/scala/scalismo/faces/color/HSV.scala
|
Scala
|
apache-2.0
| 2,975 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.log4j.Level
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.{Ascending, AttributeReference, Literal, SortOrder}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.plans.Inner
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.IntegerType
class ResolveHintsSuite extends AnalysisTest {
import org.apache.spark.sql.catalyst.analysis.TestRelations._
test("invalid hints should be ignored") {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("some_random_hint_that_does_not_exist", Seq("TaBlE"), table("TaBlE")),
testRelation,
caseSensitive = false)
}
test("case-sensitive or insensitive parameters") {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("TaBlE"), table("TaBlE")),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = false)
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("table"), table("TaBlE")),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = false)
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("TaBlE"), table("TaBlE")),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = true)
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("table"), table("TaBlE")),
testRelation,
caseSensitive = true)
}
test("multiple broadcast hint aliases") {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("table", "table2"), table("table").join(table("table2"))),
Join(ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
ResolvedHint(testRelation2, HintInfo(strategy = Some(BROADCAST))),
Inner, None, JoinHint.NONE),
caseSensitive = false)
}
test("do not traverse past existing broadcast hints") {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("table"),
ResolvedHint(table("table").where('a > 1), HintInfo(strategy = Some(BROADCAST)))),
ResolvedHint(testRelation.where('a > 1), HintInfo(strategy = Some(BROADCAST))).analyze,
caseSensitive = false)
}
test("should work for subqueries") {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("tableAlias"), table("table").as("tableAlias")),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = false)
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("tableAlias"), table("table").subquery('tableAlias)),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = false)
// Negative case: if the alias doesn't match, don't match the original table name.
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("table"), table("table").as("tableAlias")),
testRelation,
caseSensitive = false)
}
test("do not traverse past subquery alias") {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("MAPJOIN", Seq("table"), table("table").where('a > 1).subquery('tableAlias)),
testRelation.where('a > 1).analyze,
caseSensitive = false)
}
test("should work for CTE") {
checkAnalysisWithoutViewWrapper(
CatalystSqlParser.parsePlan(
"""
|WITH ctetable AS (SELECT * FROM table WHERE a > 1)
|SELECT /*+ BROADCAST(ctetable) */ * FROM ctetable
""".stripMargin
),
ResolvedHint(testRelation.where('a > 1).select('a), HintInfo(strategy = Some(BROADCAST)))
.select('a).analyze,
caseSensitive = false,
inlineCTE = true)
}
test("should not traverse down CTE") {
checkAnalysisWithoutViewWrapper(
CatalystSqlParser.parsePlan(
"""
|WITH ctetable AS (SELECT * FROM table WHERE a > 1)
|SELECT /*+ BROADCAST(table) */ * FROM ctetable
""".stripMargin
),
testRelation.where('a > 1).select('a).select('a).analyze,
caseSensitive = false,
inlineCTE = true)
}
test("coalesce and repartition hint") {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("COALESCE", Seq(Literal(10)), table("TaBlE")),
Repartition(numPartitions = 10, shuffle = false, child = testRelation))
checkAnalysisWithoutViewWrapper(
UnresolvedHint("coalesce", Seq(Literal(20)), table("TaBlE")),
Repartition(numPartitions = 20, shuffle = false, child = testRelation))
checkAnalysisWithoutViewWrapper(
UnresolvedHint("REPARTITION", Seq(Literal(100)), table("TaBlE")),
Repartition(numPartitions = 100, shuffle = true, child = testRelation))
checkAnalysisWithoutViewWrapper(
UnresolvedHint("RePARTITion", Seq(Literal(200)), table("TaBlE")),
Repartition(numPartitions = 200, shuffle = true, child = testRelation))
val errMsg = "COALESCE Hint expects a partition number as a parameter"
assertAnalysisError(
UnresolvedHint("COALESCE", Seq.empty, table("TaBlE")),
Seq(errMsg))
assertAnalysisError(
UnresolvedHint("COALESCE", Seq(Literal(10), Literal(false)), table("TaBlE")),
Seq(errMsg))
assertAnalysisError(
UnresolvedHint("COALESCE", Seq(Literal(1.0)), table("TaBlE")),
Seq(errMsg))
checkAnalysisWithoutViewWrapper(
UnresolvedHint("RePartition", Seq(Literal(10), UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(Seq(AttributeReference("a", IntegerType)()), testRelation, 10))
checkAnalysisWithoutViewWrapper(
UnresolvedHint("REPARTITION", Seq(Literal(10), UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(Seq(AttributeReference("a", IntegerType)()), testRelation, 10))
checkAnalysisWithoutViewWrapper(
UnresolvedHint("REPARTITION", Seq(UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(
Seq(AttributeReference("a", IntegerType)()), testRelation, None))
val e = intercept[AnalysisException] {
checkAnalysis(
UnresolvedHint("REPARTITION",
Seq(SortOrder(AttributeReference("a", IntegerType)(), Ascending)),
table("TaBlE")),
RepartitionByExpression(
Seq(SortOrder(AttributeReference("a", IntegerType)(), Ascending)), testRelation, 10)
)
}
e.getMessage.contains("For range partitioning use REPARTITION_BY_RANGE instead")
checkAnalysisWithoutViewWrapper(
UnresolvedHint(
"REPARTITION_BY_RANGE", Seq(Literal(10), UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(
Seq(SortOrder(AttributeReference("a", IntegerType)(), Ascending)), testRelation, 10))
checkAnalysisWithoutViewWrapper(
UnresolvedHint(
"REPARTITION_BY_RANGE", Seq(UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(
Seq(SortOrder(AttributeReference("a", IntegerType)(), Ascending)),
testRelation, None))
val errMsg2 = "REPARTITION Hint parameter should include columns, but"
assertAnalysisError(
UnresolvedHint("REPARTITION", Seq(Literal(true)), table("TaBlE")),
Seq(errMsg2))
assertAnalysisError(
UnresolvedHint("REPARTITION",
Seq(Literal(1.0), AttributeReference("a", IntegerType)()),
table("TaBlE")),
Seq(errMsg2))
val errMsg3 = "REPARTITION_BY_RANGE Hint parameter should include columns, but"
assertAnalysisError(
UnresolvedHint("REPARTITION_BY_RANGE",
Seq(Literal(1.0), AttributeReference("a", IntegerType)()),
table("TaBlE")),
Seq(errMsg3))
assertAnalysisError(
UnresolvedHint("REPARTITION_BY_RANGE",
Seq(Literal(10), Literal(10)),
table("TaBlE")),
Seq(errMsg3))
assertAnalysisError(
UnresolvedHint("REPARTITION_BY_RANGE",
Seq(Literal(10), Literal(10), UnresolvedAttribute("a")),
table("TaBlE")),
Seq(errMsg3))
}
test("log warnings for invalid hints") {
val logAppender = new LogAppender("invalid hints")
withLogAppender(logAppender) {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("unknown_hint", Seq("TaBlE"), table("TaBlE")),
testRelation,
caseSensitive = false)
}
assert(logAppender.loggingEvents.exists(
e => e.getLevel == Level.WARN &&
e.getRenderedMessage.contains("Unrecognized hint: unknown_hint")))
}
test("SPARK-30003: Do not throw stack overflow exception in non-root unknown hint resolution") {
checkAnalysisWithoutViewWrapper(
Project(testRelation.output, UnresolvedHint("unknown_hint", Seq("TaBlE"), table("TaBlE"))),
Project(testRelation.output, testRelation),
caseSensitive = false)
}
test("Supports multi-part table names for join strategy hint resolution") {
Seq(("MAPJOIN", BROADCAST),
("MERGEJOIN", SHUFFLE_MERGE),
("SHUFFLE_HASH", SHUFFLE_HASH),
("SHUFFLE_REPLICATE_NL", SHUFFLE_REPLICATE_NL)).foreach { case (hintName, st) =>
// local temp table (single-part identifier case)
checkAnalysisWithoutViewWrapper(
UnresolvedHint(hintName, Seq("table", "table2"),
table("TaBlE").join(table("TaBlE2"))),
Join(
ResolvedHint(testRelation, HintInfo(strategy = Some(st))),
ResolvedHint(testRelation2, HintInfo(strategy = Some(st))),
Inner,
None,
JoinHint.NONE),
caseSensitive = false)
checkAnalysisWithoutViewWrapper(
UnresolvedHint(hintName, Seq("TaBlE", "table2"),
table("TaBlE").join(table("TaBlE2"))),
Join(
ResolvedHint(testRelation, HintInfo(strategy = Some(st))),
testRelation2,
Inner,
None,
JoinHint.NONE),
caseSensitive = true)
// global temp table (multi-part identifier case)
checkAnalysisWithoutViewWrapper(
UnresolvedHint(hintName, Seq("GlOBal_TeMP.table4", "table5"),
table("global_temp", "table4").join(table("global_temp", "table5"))),
Join(
ResolvedHint(testRelation4, HintInfo(strategy = Some(st))),
ResolvedHint(testRelation5, HintInfo(strategy = Some(st))),
Inner,
None,
JoinHint.NONE),
caseSensitive = false)
checkAnalysisWithoutViewWrapper(
UnresolvedHint(hintName, Seq("global_temp.TaBlE4", "table5"),
table("global_temp", "TaBlE4").join(table("global_temp", "TaBlE5"))),
Join(
ResolvedHint(testRelation4, HintInfo(strategy = Some(st))),
testRelation5,
Inner,
None,
JoinHint.NONE),
caseSensitive = true)
}
}
test("SPARK-35786: Support optimize repartition by expression in AQE") {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("REBALANCE", Seq(UnresolvedAttribute("a")), table("TaBlE")),
RebalancePartitions(Seq(AttributeReference("a", IntegerType)()), testRelation))
checkAnalysisWithoutViewWrapper(
UnresolvedHint("REBALANCE", Seq.empty, table("TaBlE")),
RebalancePartitions(Seq.empty, testRelation))
withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false") {
checkAnalysisWithoutViewWrapper(
UnresolvedHint("REBALANCE", Seq(UnresolvedAttribute("a")), table("TaBlE")),
testRelation)
checkAnalysisWithoutViewWrapper(
UnresolvedHint("REBALANCE", Seq.empty, table("TaBlE")),
testRelation)
}
assertAnalysisError(
UnresolvedHint("REBALANCE", Seq(Literal(1)), table("TaBlE")),
Seq("Hint parameter should include columns"))
}
}
|
chuckchen/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveHintsSuite.scala
|
Scala
|
apache-2.0
| 12,684 |
package net.flaviusb.atomish
import scala.util.parsing.combinator._
import scala.collection.mutable.{Map => MMap, MutableList => MList}
import scala.language.postfixOps
class PreReader {
def read(str: AtomishString): AtomishCode = {
AtomishParser.parseAll(AtomishParser.code, str.value) match {
case AtomishParser.Success(atomval, _) => atomval
case AtomishParser.Failure(msg, next) => { println(msg); println(next.toString()); null } // Should trigger condition system
case AtomishParser.Error(msg, wtf) => { println(msg); println(wtf.toString()); null } // Should trigger condition system
}
}
val alien_read = AlienProxy(shallowwrapstrtocode(read))
}
object AtomishParser extends RegexParsers {
override type Elem = Char
override def skipWhitespace = false
def nll: Parser[AtomishCode] = "[.]|\\\\n|\\\\n\\\\r".r ^^ { x => AtomishNL }
def wss: Parser[String] = "[ ]+".r ^^ { x => "" }
def rational = "[+-]?[0-9]+\\\\.[0-9]+".r ^^ { (double: String) => AtomishDecimal(double.toDouble) }
def dinteger: Parser[AtomishCode] = "[+-]?[0-9]+".r ^^ { (int: String) => AtomishInt(int.toInt) }
def hinteger: Parser[AtomishCode] = "0x" ~ "[0-9a-fA-F]+".r ^^ { case "0x" ~ (int: String) => AtomishInt(Integer.parseInt(int, 16)) }
def integer: Parser[AtomishCode] = (hinteger | dinteger)
// In the actual AtomishLanguage, flagcheck should be implemented as a reader macro
def flagcheck = "#" ~ "[_+:]*[a-zA-Z][a-zA-Z0-9_:$!?%=<>-]*".r ^^ {
case "#" ~ flagName => {
AtomishCall("flag", Array(AtomishString(flagName)))
}
}
// In the actual AtomishLanguage, %w etc should be implemented as part of the MMOP
def pct_w: Parser[AtomishCode] = "%w{" ~ (("[^\\\\s}]+".r ~ "[\\\\s]*".r)*) ~ "}" ^^ {
case "%w{" ~ List() ~ "}" => AtomishArray(Array())
case "%w{" ~ x ~ "}" => {
AtomishArray(x.map(_ match {
case value ~ _ => AtomishString(value)
}).toArray)
}
}
def commated_code = code ~ ((("," ~ code) ^^ {case x ~ y => y})*) ^^ { case x ~ y => x::y }
def square_array: Parser[AtomishCode] = "[" ~ ((nlws*) ~ (commated_code ~ ((nlws*) ~ "]"))) ^^ {
case "[" ~ (x ~ (y ~ (z ~ "]"))) => AtomishCall("Array", y.toArray)
}
def square_map: Parser[AtomishCode] = "{" ~ ((nlws*) ~ (commated_code ~ ((nlws*) ~ "}"))) ^^ {
case "{" ~ (x ~ (y ~ (z ~ "}"))) => AtomishCall("Map", y.toArray)
}
def regex_escapes = ("""\\/""" | """\\\\""" | """\\n""" | """\\r""") ^^ {
case """\\\\""" => """\\"""
case """\\n""" => "\\n"
case """\\r""" => "\\r"
case """\\/""" => "/"
}
def regex: Parser[AtomishRegex] = ("/" ~ ((regex_escapes | """[^/\\\\]""".r)*) ~ "/" ~ (("[a-zA-Z]".r)*)) ^^ {
case "/" ~ regex_chunks ~ "/" ~ flags => AtomishRegex(regex_chunks.mkString, flags.distinct)
}
def at_square = ("[" ~ code ~ "]") ^^ { case "[" ~ the_code ~ "]" => AtomishCall("at", Array(the_code)) }
def qstring_escapes = ("""\\\\""" | """\\n""" | """\\"""" | """\\r""") ^^ {
case """\\\\""" => """\\"""
case """\\n""" => "\\n"
case """\\r""" => "\\r"
case """\\"""" => "\\""
}
def sstring_escapes = ("""\\\\""" | """\\n""" | """\\]""" | """\\r""") ^^ {
case """\\\\""" => """\\"""
case """\\n""" => "\\n"
case """\\r""" => "\\r"
case """\\]""" => "]"
}
def interpolated_section: Parser[AtomishCode] = "#{" ~ code ~ "}" ^^ { case "#{" ~ interpolated_code ~ "}" => interpolated_code }
def qstring: Parser[AtomishCode] = ("\\"" ~ ((interpolated_section | (("""([^"\\\\])""".r | qstring_escapes) ^^ { AtomishString(_) }))*) ~ "\\"") ^^ {
case "\\"" ~ List() ~ "\\"" => AtomishString("")
case "\\"" ~ List(AtomishString(x)) ~ "\\"" => AtomishString(x)
case "\\"" ~ chunks ~ "\\"" => {
var interpolated_list = MList[AtomishCode]()
var accumulated_string = ""
for(chunk <- chunks) {
chunk match {
case AtomishString(x) => accumulated_string += x
case x: AtomishCode => {
if(accumulated_string != "") {
interpolated_list += AtomishString(accumulated_string)
accumulated_string = ""
}
interpolated_list += x
}
}
}
if(accumulated_string != "") {
interpolated_list += AtomishString(accumulated_string)
accumulated_string = ""
}
interpolated_list match {
case MList(x: AtomishString) => x
case _ => AtomishInterpolatedString(interpolated_list.toList)
}
}
}
def sstring: Parser[AtomishCode] = ("#[" ~ ((("""([^\\]\\\\])""".r | sstring_escapes)*) ~ "]")) ^^ { case "#[" ~ (str ~ "]") => AtomishString(str.foldLeft("")(_ + _)) }
def string: Parser[AtomishCode] = (sstring | qstring)
def symbol: Parser[AtomishCall] = ":" ~ identifier ^^ { case ":" ~ symb => AtomishCall(":", Array(symb)) }
def identifier: Parser[AtomishMessage] = ("([_+]+[_+:]*)?[a-zA-Z][a-zA-Z0-9_:$!?%=<>-]*".r | "[~!@$%^&*_=\\'`/?×÷≠→←⇒⇐⧺⧻§∘≢∨∪∩□∀⊃∈+<>-]+[:~!@$%^&*_=\\'`/?×÷≠→←⇒⇐⧺⧻§∘≢∨∪∩□∀⊃∈+<>-]*".r | "[]"
| "{}" | "…") ^^ { AtomishMessage(_) }
def literal = ((regex: Parser[AtomishRegex]) | string | rational | integer | symbol | pct_w | square_array | square_map)
def lside = (literal | rside_bit)
def rside_bit = (comment | at_square | literal | commated | atomish_call | identifier | flagcheck)
def rside: Parser[List[AtomishCode]] = (rside_bit ~ ((((wss) ~ rside_bit) ^^ {
case x ~ y => y })*)) ^^ {
case x ~ List() => List(x)
case (x:AtomishCode) ~ (y:List[AtomishCode]) => x +: y
}
def limb: Parser[AtomishCode] = (((((wss?) ~ ((lside) ~ (wss?))) ^^ { case w ~ (x ~ y) => x })) ~ (((rside ~ (wss?)) ^^ { case x ~ y => x })?)) ^^ {
//case None ~ Some(List(x)) => x
//case None ~ Some(x) => AtomishForm(x)
case x ~ Some(List()) => x
case x ~ Some(y) => AtomishForm(x::y)
//case None ~ None => AtomishForm(List())
case x ~ None => x
}
def nlws = ((wss?) ~ (nll ~ (wss?))) ^^ { case _ ~ (x ~ _) => x }
def code : Parser[AtomishCode] = ((((nlws*) ~ (limb ~ (((
((((nlws+) ~ limb) ^^ {case x ~ y => x :+ y})*) ^^ { case x: List[List[AtomishCode]] => x.flatten }): Parser[List[AtomishCode]]) ~ (nlws*))
)) ^^ {
case n ~ (x ~ (List() ~ List())) => {
//println(x.toString())
x
}
case n ~ (x ~ (y ~ z)) => {
val ret = AtomishForm(n ++ (x::y ++ z))
//println(ret.toString())
ret
}
}) | (((wss?) ~ (((nll ~ (wss?)) ^^ {case x ~ y => x})+)) ^^ {
case a ~ List(x) => x
case a ~ x => AtomishForm(x)
}))
def commated_bit: Parser[List[AtomishCode]] = (("," ~ (wss?) ~ code)*) ^^ { _.map { case "," ~ x ~ frag => frag } }
def commated: Parser[AtomishCommated] = "(" ~ (wss?) ~ ((code ~ (wss?) ~ commated_bit)?) ~ (wss?) ~ ")" ^^ {
case opb ~ wsa ~ Some((code1: AtomishCode) ~ wsb ~ (code2: List[AtomishCode])) ~ wsc ~ opc => AtomishCommated(Array[AtomishCode](code1) ++
(code2.toArray[AtomishCode]))
case opb ~ wsa ~ None ~ wsb ~ opc => AtomishCommated(Array())
}
def atomish_call: Parser[AtomishCall] = identifier ~ commated ^^ {
case AtomishMessage(name) ~ AtomishCommated(args) => AtomishCall(name, args)
}
def comment: Parser[AtomishComment] = ("""#[.][^.]*[.]""".r | "#;[^\\\\n\\\\r]*".r | "؟[^\\\\n\\\\r]*".r | "#!/[^\\\\n\\\\r]*".r) ^^ { AtomishComment(_) }
}
|
flaviusb/Atomish
|
bootstrap/PreReader.scala
|
Scala
|
gpl-3.0
| 7,446 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import kafka.cluster.Broker
import kafka.server.AbstractFetcherThread
import kafka.message.ByteBufferMessageSet
import kafka.api.{Request, OffsetRequest, FetchResponsePartitionData}
import kafka.common.TopicAndPartition
class ConsumerFetcherThread(name: String,
val config: ConsumerConfig,
sourceBroker: Broker,
partitionMap: Map[TopicAndPartition, PartitionTopicInfo],
val consumerFetcherManager: ConsumerFetcherManager)
extends AbstractFetcherThread(name = name,
clientId = config.clientId + "-" + name,
sourceBroker = sourceBroker,
socketTimeout = config.socketTimeoutMs,
socketBufferSize = config.socketReceiveBufferBytes,
fetchSize = config.fetchMessageMaxBytes,
fetcherBrokerId = Request.OrdinaryConsumerId,
maxWait = config.fetchWaitMaxMs,
minBytes = config.fetchMinBytes,
isInterruptible = true) {
// process fetched data
def processPartitionData(topicAndPartition: TopicAndPartition, fetchOffset: Long, partitionData: FetchResponsePartitionData) {
val pti = partitionMap(topicAndPartition)
if (pti.getFetchOffset != fetchOffset)
throw new RuntimeException("Offset doesn't match for partition [%s,%d] pti offset: %d fetch offset: %d"
.format(topicAndPartition.topic, topicAndPartition.partition, pti.getFetchOffset, fetchOffset))
pti.enqueue(partitionData.messages.asInstanceOf[ByteBufferMessageSet])
}
// handle a partition whose offset is out of range and return a new fetch offset
def handleOffsetOutOfRange(topicAndPartition: TopicAndPartition): Long = {
var startTimestamp : Long = 0
config.autoOffsetReset match {
case OffsetRequest.SmallestTimeString => startTimestamp = OffsetRequest.EarliestTime
case OffsetRequest.LargestTimeString => startTimestamp = OffsetRequest.LatestTime
case _ => startTimestamp = OffsetRequest.LatestTime
}
val newOffset = simpleConsumer.earliestOrLatestOffset(topicAndPartition, startTimestamp, Request.OrdinaryConsumerId)
val pti = partitionMap(topicAndPartition)
pti.resetFetchOffset(newOffset)
pti.resetConsumeOffset(newOffset)
newOffset
}
// any logic for partitions whose leader has changed
def handlePartitionsWithErrors(partitions: Iterable[TopicAndPartition]) {
partitions.foreach(tap => removePartition(tap.topic, tap.partition))
consumerFetcherManager.addPartitionsWithError(partitions)
}
}
|
archieco/kafka
|
core/src/main/scala/kafka/consumer/ConsumerFetcherThread.scala
|
Scala
|
apache-2.0
| 3,665 |
/*
* Copyright 2015 Magnus Madsen.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dk.umonitor.language
import java.nio.file.Path
import dk.umonitor.language.Compiler.CompileError._
import dk.umonitor.util.Validation._
import dk.umonitor.util.{PropertyMap, Validation}
object Compiler {
private type Out[Result] = Validation[Result, CompileError]
/**
* Attempts to compile all the given `paths` into a single program.
*
* Emits error messages if compilation of one or more units fail.
*
* Returns the minimal program which is consistent.
*/
def compile(paths: Seq[Path]): Validation[Program, CompileError] = {
// sort the paths before parsing
val sortedPaths = paths.sorted
// iterate through each valid ast
Validation.collect(sortedPaths map parse) flatMap {
case asts =>
// collect all declarations
val declarations = asts flatMap (ast => ast.decls)
val actionDecls = declarations.collect { case d: Ast.Declaration.Action => d }
val bindDecls = declarations.collect { case d: Ast.Declaration.Bind => d }
val contactDecls = declarations.collect { case d: Ast.Declaration.Contact => d }
val monitorDecls = declarations.collect { case d: Ast.Declaration.Monitor => d }
val notifyDecls = declarations.collect { case d: Ast.Declaration.Notify => d }
val serviceDecls = declarations.collect { case d: Ast.Declaration.Service => d }
/*
* Actions
*/
val actionsVal: Out[Map[String, Program.Action]] = Validation.fold(actionDecls, Map.empty[String, Program.Action]) {
case (m, [email protected](Ast.Ident(name, location), _)) => m.get(name) match {
case None => // Unused action name. Compile and update map.
Action.compileAction(action) map (result => m + (name -> result))
case Some(otherAction) => // Existing action name. Raise duplicate error.
DuplicateAction(name, otherAction.location, location).toFailure
}
}
/*
* Bindings
*/
val bindingsVal: Out[Map[String, Program.Bind]] = Validation.fold(bindDecls, Map.empty[String, Program.Bind]) {
case (m, Ast.Declaration.Bind(Ast.Ident(service, _), Ast.Ident(monitor, _), Ast.Ident(binding, location))) => m.get(binding) match {
case None => // Unused binding name. Update map.
(m + (binding -> Program.Bind(binding, service, monitor, location))).toSuccess
case Some(otherTarget) => // Existing binding name. Raise duplicate error.
DuplicateBinding(binding, otherTarget.location, location).toFailure
}
}
/*
* Contacts
*/
val contactsVal: Out[Map[String, Program.Contact]] = Validation.fold(contactDecls, Map.empty[String, Program.Contact]) {
case (m, [email protected](Ast.Ident(name, location), properties)) => m.get(name) match {
case None => // Unused contact name. Compile and update map.
Contact.compileContact(contact) map (result => m + (name -> result))
case Some(otherContact) => // Existing contact name. Raise duplicate error.
DuplicateContact(name, otherContact.location, location).toFailure
}
}
/*
* Monitors
*/
val monitorsVal: Out[Map[String, Program.Monitor]] = Validation.fold(monitorDecls, Map.empty[String, Program.Monitor]) {
case (m, [email protected](Ast.Ident(name, location), body)) => m.get(name) match {
case None => // Unused monitor name. Compile and update map.
Monitor.compileMonitor(monitor) map (result => m + (name -> result))
case Some(otherMonitor) => // Existing monitor name. Raise duplicate error.
DuplicateMonitor(name, otherMonitor.location, location).toFailure
}
}
/*
* Notifies
*/
val notifiesVal: Out[Map[String, Set[Ast.Ident]]] = Validation.fold(notifyDecls, Map.empty[String, Set[Ast.Ident]]) {
case (m, Ast.Declaration.Notify([email protected](contactName, _), Ast.Ident(targetName, _))) => m.get(targetName) match {
case None => (m + (targetName -> Set(contact))).toSuccess
case Some(contacts) => (m + (targetName -> (contacts + contact))).toSuccess
}
}
/*
* Services
*/
val servicesVal: Out[Map[String, Program.Service]] = Validation.fold(serviceDecls, Map.empty[String, Program.Service]) {
case (m, [email protected](Ast.Ident(name, location), connector)) => m.get(name) match {
case None => // Unused service name. Compile and update map.
Service.compileService(service) map (result => m + (name -> result))
case Some(otherService) => // Existing service name. Raise duplicate error.
DuplicateService(name, otherService.location, location).toFailure
}
}
/*
* Targets
*/
@@(actionsVal, bindingsVal, contactsVal, monitorsVal, servicesVal) flatMap {
case (actionMap, bindingMap, contactMap, monitorMap, serviceMap) =>
// loop through each binding.
val targetsVal = collect(bindingMap.values.toSeq.map {
case Program.Bind(name, serviceName, monitorName, location) =>
// lookup the monitor and service.
val monitorVal = monitorMap.get(monitorName).toSuccessOr[CompileError](MonitorNotFound(monitorName, location))
val serviceVal = serviceMap.get(serviceName).toSuccessOr[CompileError](ServiceNotFound(serviceName, location))
// lookup the contacts.
val contactsVal = notifiesVal flatMap {
case m => Validation.collect(m.getOrElse(name, Set.empty).toSeq map {
case Ast.Ident(contactName, notifyLocation) =>
contactMap.get(contactName).toSuccessOr[CompileError](ContactNotFound(contactName, notifyLocation))
})
}
// construct a target if all the components are well-defined.
@@(contactsVal, monitorVal, serviceVal) map {
case (contacts, monitor, service) =>
val boundMonitor = monitor.bind(serviceName)
Program.Target(name, service, boundMonitor, contacts.toSet, location)
}
})
targetsVal map {
case targets => Program(actionMap, contactMap, monitorMap, serviceMap, targets)
}
}
}
}
/////////////////////////////////////////////////////////////////////////////
// Compiler Errors //
/////////////////////////////////////////////////////////////////////////////
sealed trait CompileError {
/**
* Returns a pretty formatted string of the compiler error.
*/
def format: String
}
object CompileError {
/**
* An error that represents a parse error.
*
* @param message the error message
* @param path the path of the source file.
*/
case class ParseError(message: String, path: Path) extends CompileError {
val format = s"Error: Unable to parse '$path' (this file will be ignored.)\\n" +
s" $message.\\n"
}
/**
* An error that represents a missing property.
*
* @param name the name of the property.
* @param expectedType the expected type.
*/
case class MissingProperty(name: String, expectedType: Type) extends CompileError {
val format =
s"Error: Missing required property '$name'.\\n" +
s" Expected a value of type '$expectedType'.\\n"
}
/**
* An error that represents a property which has a value of an incorrect type.
*
* @param name the name of the property.
* @param expectedType the expected type.
* @param actualType the actual type.
* @param location the location of the property.
*/
case class IllegalPropertyType(name: String, expectedType: Type, actualType: Type, location: SourceLocation) extends CompileError {
val format =
s"Error: Illegal property value for '$name' at '${location.format}'.\\n" +
s" Expected a value of type '$expectedType', but got a value of type '$actualType'.\\n"
}
/**
* An error that represents a duplicate declaration of an action with the given `name`.
*
* @param name the name of the property.
* @param location1 the location of the first declaration.
* @param location2 the location of the second declaration.
*/
case class DuplicateAction(name: String, location1: SourceLocation, location2: SourceLocation) extends CompileError {
val format =
s"Error: Duplicated action declaration '$name'.\\n" +
s" 1st declaration was here: '${location1.format}' (this one will be used).\\n" +
s" 2nd declaration was here: '${location2.format}' (this one will be ignored).\\n"
}
/**
* An error that represents a duplicate declaration of a binding with the given `name`.
*
* @param name the name of the binding.
* @param location1 the location of the first declaration.
* @param location2 the location of the second declaration.
*/
case class DuplicateBinding(name: String, location1: SourceLocation, location2: SourceLocation) extends CompileError {
val format =
s"Error: Duplicated binding declaration '$name'.\\n" +
s" 1st declaration was here: '${location1.format}' (this one will be used).\\n" +
s" 2nd declaration was here: '${location2.format}' (this one will be ignored).\\n"
}
/**
* An error that represents a duplicate declaration of a contact with the given `name`.
*
* @param name the name of the property.
* @param location1 the location of the first declaration.
* @param location2 the location of the second declaration.
*/
case class DuplicateContact(name: String, location1: SourceLocation, location2: SourceLocation) extends CompileError {
val format =
s"Error: Duplicated contact declaration '$name'.\\n" +
s" 1st declaration was here: '${location1.format}' (this one will be used).\\n" +
s" 2nd declaration was here: '${location2.format}' (this one will be ignored).\\n"
}
/**
* An error that represents a duplicate declaration of a property with the given `name`.
*
* @param name the name of the property.
* @param location1 the location of the first declaration.
* @param location2 the location of the second declaration.
*/
case class DuplicateProperty(name: String, location1: SourceLocation, location2: SourceLocation) extends CompileError {
val format =
s"Error: Duplicated property declaration '$name'.\\n" +
s" 1st declaration was here: '${location1.format}' (this one will be used).\\n" +
s" 2nd declaration was here: '${location2.format}' (this one will be ignored).\\n"
}
/**
* An error that represents a duplicate declaration of a monitor with the given `name`.
*
* @param name the name of the monitor.
* @param location1 the location of the first declaration.
* @param location2 the location of the second declaration.
*/
case class DuplicateMonitor(name: String, location1: SourceLocation, location2: SourceLocation) extends CompileError {
val format =
s"Error: Duplicated monitor declaration '$name'.\\n" +
s" 1st declaration was here: '${location1.format}' (this one will be used).\\n" +
s" 2nd declaration was here: '${location2.format}' (this one will be ignored).\\n"
}
/**
* An error that represents a duplicate declaration of a service with the given `name`.
*
* @param name the name of the service.
* @param location1 the location of the first declaration.
* @param location2 the location of the second declaration.
*/
case class DuplicateService(name: String, location1: SourceLocation, location2: SourceLocation) extends CompileError {
val format =
s"Error: Duplicated service declaration '$name'.\\n" +
s" 1st declaration was here: '${location1.format}' (this one will be used).\\n" +
s" 2nd declaration was here: '${location2.format}' (this one will be ignored).\\n"
}
/**
* An error that represents that the named action will not be available due to previous errors.
*
* @param name the name of the action.
* @param location the location of the declaration.
*/
case class ActionUnavailable(name: String, location: SourceLocation) extends CompileError {
val format =
s"Error: The action with name '$name' declared at '${location.format}' is unavailable due to previous errors.\\n"
}
/**
* An error that represents that the named contact will not be available due to previous errors.
*
* @param name the name of the contact.
* @param location the location of the declaration.
*/
case class ContactUnavailable(name: String, location: SourceLocation) extends CompileError {
val format =
s"Error: The contact with name '$name' declared at '${location.format}' is unavailable due to previous errors.\\n"
}
/**
* An error that represents that the named service will not be available due to previous errors.
*
* @param name the name of the service.
* @param location the location of the declaration.
*/
case class ServiceUnavailable(name: String, location: SourceLocation) extends CompileError {
val format =
s"Error: The service with name '$name' declared at '${location.format}' is unavailable due to previous errors.\\n"
}
/**
* An error that represents a reference to an unknown contact.
*
* @param name the name of the contact.
* @param location the location of the binding which refers to the monitor.
*/
case class ContactNotFound(name: String, location: SourceLocation) extends CompileError {
val format =
s"Error: The contact with name '$name' referenced at '${location.format}' does not exist.\\n"
}
/**
* An error that represents a reference to an unknown monitor.
*
* @param name the name of the monitor.
* @param location the location of the binding which refers to the monitor.
*/
case class MonitorNotFound(name: String, location: SourceLocation) extends CompileError {
val format =
s"Error: The monitor with name '$name' referenced at '${location.format}' does not exist.\\n"
}
/**
* An error that represents a reference to an unknown service.
*
* @param name the name of the service.
* @param location the location of the binding which refers to the service.
*/
case class ServiceNotFound(name: String, location: SourceLocation) extends CompileError {
val format =
s"Error: The service with name '$name' referenced at '${location.format}' does not exist.\\n"
}
/**
* An error that represents a generic error.
*
* @param message the error message.
*/
case class GenericError(message: String) extends CompileError {
val format = message + "\\n"
}
}
/**
* Returns the abstract syntax tree of the given `path`.
*/
private def parse(path: Path): Validation[Ast.Root, CompileError] = Parser.parse(path) match {
case scala.util.Success(root) =>
root.toSuccess
case scala.util.Failure(exception) =>
ParseError(exception.getMessage, path).toFailure
}
/////////////////////////////////////////////////////////////////////////////
// Actions //
/////////////////////////////////////////////////////////////////////////////
object Action {
/**
* Returns the compiled `action`.
*/
def compileAction(action: Ast.Declaration.Action): Out[Program.Action] = {
Validation.flatten(action.run.map(compileRun)) map {
case runs => Program.Action(action.ident.name, runs, action.ident.location)
} onFailure {
ActionUnavailable(action.ident.name, action.ident.location).toFailure
}
}
/**
* Returns the compiled `run`.
*/
def compileRun(run: Ast.Action.Run): Out[Program.Run] = {
getPropertyMap(run.properties) flatMap { case properties =>
lookupStrSeq("exec", properties, optional = false) map { case command =>
// NB: the parser guarantees that the sequence is non-empty.
val path = command.head
val args = command.tail
val opts = getOpts(properties)
Program.Run(path, args, opts)
}
}
}
}
/////////////////////////////////////////////////////////////////////////////
// Contacts //
/////////////////////////////////////////////////////////////////////////////
object Contact {
/**
* Returns the compiled `contact`.
*/
def compileContact(contact: Ast.Declaration.Contact): Out[Program.Contact] =
getPropertyMap(contact.properties) flatMap { case opts =>
val emailAddressVal = lookupStrOpt("email", opts)
val phoneNumberVal = lookupStrOpt("phone", opts)
val location = contact.ident.location
@@(emailAddressVal, phoneNumberVal) map {
case (emailAddress, phoneNumber) =>
Program.Contact(contact.ident.name, emailAddress, phoneNumber, location)
}
} onFailure {
ContactUnavailable(contact.ident.name, contact.ident.location).toFailure
}
}
/////////////////////////////////////////////////////////////////////////////
// Connectors //
/////////////////////////////////////////////////////////////////////////////
object Connector {
/**
* Returns the compiled `connector`.
*/
def compileConnector(connector: Ast.Connector): Out[Program.Connector] = connector match {
case c: Ast.Connector.AllOf => compile(c)
case c: Ast.Connector.OneOf => compile(c)
case c: Ast.Connector.Cmd => compile(c)
case c: Ast.Connector.Dns => compile(c)
case c: Ast.Connector.File => compile(c)
case c: Ast.Connector.Ftp => compile(c)
case c: Ast.Connector.Http => compile(c)
case c: Ast.Connector.Icmp => compile(c)
case c: Ast.Connector.Imap => compile(c)
case c: Ast.Connector.Pop3 => compile(c)
case c: Ast.Connector.Rdp => compile(c)
case c: Ast.Connector.Smtp => compile(c)
case c: Ast.Connector.Ssh => compile(c)
case c: Ast.Connector.SshCmd => compile(c)
}
/**
* Returns the compiled allOf `connector`.
*/
def compile(connector: Ast.Connector.AllOf): Out[Program.Connector.AllOf] = {
val connectors = Validation.flatten(connector.xs.map(compileConnector))
connectors map {
case xs => Program.Connector.AllOf(xs)
}
}
/**
* Returns the compiled oneOf `connector`.
*/
def compile(connector: Ast.Connector.OneOf): Out[Program.Connector.OneOf] = {
val connectors = Validation.flatten(connector.xs.map(compileConnector))
connectors map {
case xs => Program.Connector.OneOf(xs)
}
}
/**
* Returns the compiled cmd `connector`.
*/
def compile(connector: Ast.Connector.Cmd): Out[Program.Connector.Cmd] =
getPropertyMap(connector.properties) flatMap { case properties =>
val pathProp = lookupStr("path", properties)
val exitCodeProp = lookupIntOpt("exit-code", properties)
val goodWordsProp = lookupStrSeq("good-words", properties, optional = true)
val badWordsProp = lookupStrSeq("bad-words", properties, optional = true)
val opts = getOpts(properties)
@@(pathProp, exitCodeProp, goodWordsProp, badWordsProp) map {
case (path, exitCode, goodWords, badWords) =>
Program.Connector.Cmd(path, exitCode, goodWords.toSet, badWords.toSet, opts)
}
}
/**
* Returns the compiled dns `connector`.
*/
def compile(connector: Ast.Connector.Dns): Out[Program.Connector.Dns] =
getPropertyMap(connector.properties) flatMap { case properties =>
val hostProp = lookupStr("host", properties)
val domainProp = lookupStr("domain", properties)
val addressProp = lookupStr("address", properties)
val opts = getOpts(properties)
@@(hostProp, domainProp, addressProp) map {
case (host, domain, address) =>
Program.Connector.Dns(host, domain, address, opts)
}
}
/**
* Returns the compiled file `connector`.
*/
def compile(connector: Ast.Connector.File): Out[Program.Connector.File] =
getPropertyMap(connector.properties) flatMap { case properties =>
val pathProp = lookupStr("path", properties)
val goodWordsProp = lookupStrSeq("good-words", properties, optional = true)
val badWordsProp = lookupStrSeq("bad-words", properties, optional = true)
val opts = getOpts(properties)
@@(pathProp, goodWordsProp, badWordsProp) map {
case (path, goodWords, badWords) =>
Program.Connector.File(path, goodWords.toSet, badWords.toSet, opts)
}
}
/**
* Returns the compiled ftp `connector`.
*/
def compile(connector: Ast.Connector.Ftp): Out[Program.Connector.Ftp] =
getPropertyMap(connector.properties) flatMap { case properties =>
val hostProp = lookupStr("host", properties)
val portProp = lookupIntOpt("port", properties)
val opts = getOpts(properties)
@@(hostProp, portProp) map {
case (host, port) =>
Program.Connector.Ftp(host, port, opts)
}
}
/**
* Returns the compiled http `connector`.
*/
def compile(connector: Ast.Connector.Http): Out[Program.Connector.Http] =
getPropertyMap(connector.properties) flatMap { case properties =>
val urlProp = lookupStr("url", properties)
val statusCodeProp = lookupIntOpt("status-code", properties)
val goodWordsProp = lookupStrSeq("good-words", properties, optional = true)
val badWordsProp = lookupStrSeq("bad-words", properties, optional = true)
val opts = getOpts(properties)
@@(urlProp, statusCodeProp, goodWordsProp, badWordsProp) map {
case (url, statusCode, goodWords, badWords) =>
Program.Connector.Http(url, statusCode, goodWords.toSet, badWords.toSet, opts)
}
}
/**
* Returns the compiled icmp `connector`.
*/
def compile(connector: Ast.Connector.Icmp): Out[Program.Connector.Icmp] =
getPropertyMap(connector.properties) flatMap { case properties =>
val hostProp = lookupStr("host", properties)
val opts = getOpts(properties)
hostProp map {
case host =>
Program.Connector.Icmp(host, opts)
}
}
/**
* Returns the compiled imap `connector`.
*/
def compile(connector: Ast.Connector.Imap): Out[Program.Connector.Imap] =
getPropertyMap(connector.properties) flatMap { case properties =>
val hostProp = lookupStr("host", properties)
val portProp = lookupIntOpt("port", properties)
val opts = getOpts(properties)
@@(hostProp, portProp) map {
case (host, port) =>
Program.Connector.Imap(host, port, opts)
}
}
/**
* Returns the compiled pop3 `connector`.
*/
def compile(connector: Ast.Connector.Pop3): Out[Program.Connector.Pop3] =
getPropertyMap(connector.properties) flatMap { case properties =>
val hostProp = lookupStr("host", properties)
val portProp = lookupIntOpt("port", properties)
val opts = getOpts(properties)
@@(hostProp, portProp) map {
case (host, port) =>
Program.Connector.Pop3(host, port, opts)
}
}
/**
* Returns the compiled rdp `connector`.
*/
def compile(connector: Ast.Connector.Rdp): Out[Program.Connector.Rdp] =
getPropertyMap(connector.properties) flatMap { case properties =>
val hostProp = lookupStr("host", properties)
val portProp = lookupIntOpt("port", properties)
val opts = getOpts(properties)
@@(hostProp, portProp) map {
case (host, port) =>
Program.Connector.Rdp(host, port, opts)
}
}
/**
* Returns the compiled smtp `connector`.
*/
def compile(connector: Ast.Connector.Smtp): Out[Program.Connector.Smtp] =
getPropertyMap(connector.properties) flatMap { case properties =>
val hostProp = lookupStr("host", properties)
val portProp = lookupIntOpt("port", properties)
val opts = getOpts(properties)
@@(hostProp, portProp) map {
case (host, port) =>
Program.Connector.Smtp(host, port, opts)
}
}
/**
* Returns the compiled ssh `connector`.
*/
def compile(connector: Ast.Connector.Ssh): Out[Program.Connector.Ssh] =
getPropertyMap(connector.properties) flatMap { case properties =>
val hostProp = lookupStr("host", properties)
val portProp = lookupIntOpt("port", properties)
val opts = getOpts(properties)
@@(hostProp, portProp) map {
case (host, port) =>
Program.Connector.Ssh(host, port, opts)
}
}
/**
* Returns the compiled sshcmd `connector`.
*/
def compile(connector: Ast.Connector.SshCmd): Out[Program.Connector.SshCmd] =
getPropertyMap(connector.properties) flatMap { case properties =>
val hostProp = lookupStr("host", properties)
val portProp = lookupIntOpt("port", properties)
val usernameProp = lookupStr("username", properties)
val passwordProp = lookupStr("password", properties)
val commandProp = lookupStr("command", properties)
val exitCodeProp = lookupIntOpt("exit-code", properties)
val goodWordsProp = lookupStrSeq("good-words", properties, optional = true)
val badWordsProp = lookupStrSeq("bad-words", properties, optional = true)
val opts = getOpts(properties)
@@(hostProp, portProp, usernameProp, passwordProp, commandProp, exitCodeProp, goodWordsProp, badWordsProp) map {
case (host, port, username, password, command, exitCode, goodWords, badWords) =>
Program.Connector.SshCmd(host, port, username, password, command, exitCode, goodWords.toSet, badWords.toSet, opts)
}
}
}
/////////////////////////////////////////////////////////////////////////////
// Monitors //
/////////////////////////////////////////////////////////////////////////////
object Monitor {
/**
* Returns the compiled `monitor`.
*/
def compileMonitor(monitor: Ast.Declaration.Monitor): Out[Program.Monitor] = {
val Ast.Declaration.Monitor(Ast.Ident(name, location), body) = monitor
val states = body.collect({ case d: Ast.Monitor.States => d }).flatMap(_.idents).map(_.name)
val clocks = body.collect({ case d: Ast.Monitor.Clocks => d }).flatMap(_.idents).map(_.name)
val when = body.collect({ case d: Ast.Monitor.When => d }) flatMap {
case Ast.Monitor.When(Ast.Ident(src, _), transitions) => transitions map {
case transition =>
val srcState = Program.State(src)
val symbol = transition.event match {
case Ast.Event.Up(Ast.Ident(monitorName, _)) => Program.Symbol.Up(monitorName)
case Ast.Event.Dn(Ast.Ident(monitorName, _)) => Program.Symbol.Dn(monitorName)
}
val dstState = Program.State(transition.dst.name)
val guards = transition.guards.map(Guard.compileGuard)
val resets = transition.resets.map(_.name)
val actions = transition.actions.map(_.name)
Program.Transition(srcState, symbol, dstState, guards, resets, actions)
}
}
if (states.isEmpty) {
GenericError(s"Error: The monitor declared at '${monitor.ident.location.format}' has no states.").toFailure
} else {
Program.Monitor(name, states.toSet, clocks.toSet, when.toSet, location).toSuccess
}
}
}
/////////////////////////////////////////////////////////////////////////////
// Guards //
/////////////////////////////////////////////////////////////////////////////
object Guard {
/**
* Returns the compiled `guard`.
*/
def compileGuard(guard: Ast.Guard): Program.Guard = guard match {
case Ast.Guard.Clock(ident, duration) => duration match {
case Ast.Duration.Second(seconds) => Program.Guard.Clock(ident.name, seconds)
case Ast.Duration.Minute(minutes) => Program.Guard.Clock(ident.name, 60 * minutes)
case Ast.Duration.Hour(hours) => Program.Guard.Clock(ident.name, 60 * 60 * hours)
case Ast.Duration.Day(days) => Program.Guard.Clock(ident.name, 24 * 60 * 60 * days)
case Ast.Duration.Week(weeks) => Program.Guard.Clock(ident.name, 7 * 24 * 60 * 60 * weeks)
}
case Ast.Guard.TimeOfDay(b, e) => (b, e) match {
case (Ast.HourMin(beginHour, beginMin, beginAmPm), Ast.HourMin(endHour, endMin, endAmPm)) =>
Program.Guard.TimeOfDay(beginHour + offset(beginAmPm), beginMin, endHour + offset(endAmPm), endMin)
}
case Ast.Guard.DayOfWeek(days) => Program.Guard.DayOfWeek(days.toSet[Ast.DayOfWeek].map {
case Ast.DayOfWeek.Monday => 1
case Ast.DayOfWeek.Tuesday => 2
case Ast.DayOfWeek.Wednesday => 3
case Ast.DayOfWeek.Thursday => 4
case Ast.DayOfWeek.Friday => 5
case Ast.DayOfWeek.Saturday => 6
case Ast.DayOfWeek.Sunday => 7
})
}
/**
* Returns the time offset for the given time-of-day.
*/
def offset(ampm: Ast.TimeOfDay): Int = ampm match {
case Ast.TimeOfDay.AM => 0
case Ast.TimeOfDay.PM => 12
}
}
/////////////////////////////////////////////////////////////////////////////
// Services //
/////////////////////////////////////////////////////////////////////////////
object Service {
/**
* Returns the compiled `service`.
*/
def compileService(service: Ast.Declaration.Service): Out[Program.Service] = {
val Ast.Ident(name, location) = service.ident
Connector.compileConnector(service.connector) map {
case connector => Program.Service(name, connector, location)
} onFailure {
ServiceUnavailable(service.ident.name, service.ident.location).toFailure
}
}
}
/////////////////////////////////////////////////////////////////////////////
// Properties //
/////////////////////////////////////////////////////////////////////////////
/**
* Returns a map from string keys to ast (identifier, literal) pairs.
*
* Fails the entire computation if one of the individual properties fail.
*/
def getPropertyMap(xs: Seq[Ast.Property]): Out[Map[String, (Ast.Ident, Ast.Literal)]] = {
Validation.fold(xs, Map.empty[String, (Ast.Ident, Ast.Literal)]) {
case (m, Ast.Property([email protected](name, location), literal)) => m.get(name) match {
case None => // Unused action name. Add to map.
(m + (name -> ((ident, literal)))).toSuccess[Map[String, (Ast.Ident, Ast.Literal)], CompileError]
case Some((otherIdent, otherLiteral)) => // Existing property name. Raise duplicate error.
DuplicateProperty(name, otherIdent.location, location).toFailure
}
}
}
/**
* Optionally returns the string value of the given key `k` in map `m`.
*/
private def lookupStrOpt(k: String, m: Map[String, (Ast.Ident, Ast.Literal)]): Out[Option[String]] = m.get(k) match {
case None =>
None.toSuccess
case Some((Ast.Ident(name, location), Ast.Literal.Str(s))) =>
Some(s).toSuccess
case Some((Ast.Ident(name, location), literal)) =>
IllegalPropertyType(name = k, expectedType = Type.Str, actualType = typeOf(literal), location = location).toFailure
}
/**
* Returns the string value of the given key `k` in map `m`
*/
private def lookupStr(k: String, m: Map[String, (Ast.Ident, Ast.Literal)]): Out[String] = m.get(k) match {
case None => MissingProperty(k, Type.Str).toFailure
case Some((Ast.Ident(name, location), Ast.Literal.Str(s))) => s.toSuccess
case Some((Ast.Ident(name, location), literal)) =>
IllegalPropertyType(name = k, expectedType = Type.Str, actualType = typeOf(literal), location = location).toFailure
}
/**
* Optionally returns the int value of the given key `k` in map `m`
*/
private def lookupIntOpt(k: String, m: Map[String, (Ast.Ident, Ast.Literal)]): Out[Option[Int]] = m.get(k) match {
case None => None.toSuccess
case Some((Ast.Ident(name, location), Ast.Literal.Int(i))) => Some(i).toSuccess
case Some((Ast.Ident(name, location), literal)) =>
IllegalPropertyType(name = k, expectedType = Type.Int, actualType = typeOf(literal), location = location).toFailure
}
/**
* Returns the sequence of strings for the given key `k`.
*/
private def lookupStrSeq(k: String, m: Map[String, (Ast.Ident, Ast.Literal)], optional: Boolean): Out[Seq[String]] = m.get(k) match {
case None if optional => Seq.empty[String].toSuccess
case None => MissingProperty(k, Type.Seq(Seq(Type.Str))).toFailure
case Some((Ast.Ident(name, location), literal)) => literal match {
case Ast.Literal.Str(x) => Seq(x).toSuccess
case Ast.Literal.Seq(xs) => (xs map unwrap map (_.toString)).toSuccess
case _ => IllegalPropertyType(name = k, expectedType = Type.Seq(Seq(Type.Str)), typeOf(literal), location = location).toFailure
}
}
/**
* Returns the property map of the given `properties`.
*/
private def getOpts(properties: Map[String, (Ast.Ident, Ast.Literal)]): PropertyMap = {
val inner = properties.foldLeft(Map.empty[String, Any]) {
case (macc, (key, (ident, literal))) => macc + (key -> unwrap(literal))
}
PropertyMap(inner)
}
/**
* Returns the given `literal` as a plain Scala value.
*/
private def unwrap(literal: Ast.Literal): Any = literal match {
case Ast.Literal.Bool(b) => b
case Ast.Literal.Int(i) => i
case Ast.Literal.Str(s) => s
case Ast.Literal.Seq(xs) => xs map unwrap
}
/////////////////////////////////////////////////////////////////////////////
// Types //
/////////////////////////////////////////////////////////////////////////////
/**
* A common-super type for types.
*/
sealed trait Type
object Type {
/**
* Boolean type.
*/
case object Bool extends Type
/**
* Integer type.
*/
case object Int extends Type
/**
* String type.
*/
case object Str extends Type
/**
* Sequence type.
*/
case class Seq(parameters: scala.Seq[Type]) extends Type
}
/**
* Returns the type of the given `literal`.
*/
def typeOf(literal: Ast.Literal): Type = literal match {
case Ast.Literal.Bool(_) => Type.Bool
case Ast.Literal.Int(_) => Type.Int
case Ast.Literal.Str(_) => Type.Str
case Ast.Literal.Seq(xs) => Type.Seq(xs map typeOf)
}
}
|
magnus-madsen/umonitor
|
src/dk/umonitor/language/Compiler.scala
|
Scala
|
apache-2.0
| 36,579 |
package com.szadowsz.cadisainmduit.ships.uboat
import java.io.{File, StringReader}
import java.sql.Date
import java.text.SimpleDateFormat
import com.szadowsz.common.io.delete.DeleteUtil
import com.szadowsz.common.io.read.{CsvReader, FReader}
import com.szadowsz.common.io.write.CsvWriter
import com.szadowsz.common.io.zip.ZipperUtil
import com.szadowsz.spark.ml.{Lineage, LocalDataframeIO}
import com.szadowsz.spark.ml.feature.{CsvColumnExtractor, RegexGroupExtractor, StringCapitaliser, StringMapper, StringStatistics, _}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SQLContext, SparkSession}
import org.slf4j.LoggerFactory
import org.supercsv.io.CsvListReader
import org.supercsv.prefs.CsvPreference
import scala.collection.JavaConverters._
import org.apache.spark.sql.functions._
import scala.util.Try
/**
* Created on 27/04/2016.
*/
object UboatPreparer extends LocalDataframeIO {
private val _logger = LoggerFactory.getLogger(UboatPreparer.getClass)
val dateUdf = udf[Date, String]((s: String) => {
val sDf = new SimpleDateFormat("dd MMM yyyy")
val sDf2 = new SimpleDateFormat("MMM yyyy")
val sDf3 = new SimpleDateFormat("yyyy")
Try(sDf.parse(s)).orElse(Try(sDf2.parse(s))).orElse(Try(sDf3.parse(s))).map(d => new Date(d.getTime)).toOption.orNull
})
val dateComboUdf = udf[Date, Date,Date]((d1: Date,d2 : Date) => if (d1 == null) d2 else d1)
private def buildClassPipe(): Lineage = {
val pipe = new Lineage("UboatClass")
pipe.addStage(classOf[CsvColumnExtractor], "inputCol" -> "fields", "outputCols" -> UboatSchema.classSchema, "size" -> UboatSchema.classSchema.length)
pipe.addStage(classOf[CastTransformer], "inputCol" -> "built", "outputDataType" -> IntegerType)
pipe.addStage(classOf[CastTransformer], "inputCol" -> "planned", "outputDataType" -> IntegerType)
pipe.addPassThroughTransformer(classOf[StringStatistics], Map("isDebug" -> true, "debugPath" -> "./data/debug/uboat/"))
pipe
}
private def buildShipPipe(): Lineage = {
val pipe = new Lineage("UboatShip")
pipe.addStage(classOf[CsvColumnExtractor], "inputCol" -> "fields", "outputCols" -> UboatSchema.shipSchema, "size" -> UboatSchema.shipSchema.length)
pipe.addPassThroughTransformer(classOf[StringStatistics], Map("isDebug" -> true, "debugPath" -> "./data/debug/uboat/"))
pipe
}
private def buildInfoPipe(): Lineage = {
val pipe = new Lineage("UboatInfo")
pipe.addStage(classOf[StringMapper], "inputCol" -> "navy", "outputCol" -> "country", "mapping" -> UboatSchema.alliesMap)
pipe.addStage(classOf[StringMapper], "inputCol" -> "navy", "mapping" -> UboatSchema.navyMap)
pipe.addStage(classOf[StringCapitaliser], "inputCol" -> "type")
pipe.addStage(classOf[StringTrimmer], "inputCol" -> "type")
pipe.addStage(classOf[StringTrimmer], "inputCol" -> "class")
pipe.addStage(classOf[StringTrimmer], "inputCol" -> "commissioned")
pipe.addStage(classOf[StringTrimmer], "inputCol" -> "endService")
pipe.addStage(classOf[RegexGroupExtractor], "inputCol" -> "name", "pattern" -> "^(.+?)( \\\\(.+?\\\\)){0,1}$")
pipe.addStage(classOf[RegexGroupExtractor], "inputCol" -> "name", "pattern" -> "^(?:[HU][A-Z]{1,3}[SC] ){0,1}(.+?)$")
pipe.addStage(classOf[RegexValidator], "inputCol" -> "name", "pattern" -> "^\\\\D+$")
pipe.addStage(classOf[RegexValidator], "inputCol" -> "name", "pattern" -> "^((?!HMS).)*$")
pipe.addStage(classOf[RegexValidator], "inputCol" -> "name", "pattern" -> "^((?!USS).)*$")
pipe.addPassThroughTransformer(classOf[StringStatistics], Map("isDebug" -> true, "debugPath" -> "./data/debug/uboat/"))
pipe
}
private def buildExtPipe(): Lineage = {
val pipe = new Lineage("UboatExt")
pipe.addPassThroughTransformer(classOf[StringStatistics], Map("isDebug" -> true, "debugPath" -> "./data/debug/uboat/"))
pipe
}
def main(args: Array[String]): Unit = {
val sess = SparkSession.builder()
.config("spark.driver.host", "localhost")
.master("local[4]")
.getOrCreate()
DeleteUtil.delete(new File("./data/web/uboat"))
ZipperUtil.unzip("./archives/data/web/uboat.zip", "./data/web/uboat")
val dfShip = extractFile(sess, new File("./data/web/uboat/uboatShip.csv"), false)
val dfClass = extractFile(sess, new File("./data/web/uboat/uboatClass.csv"), false)
val cPipe = buildClassPipe()
val rClModel = cPipe.fit(dfClass)
val rdfClass = rClModel.transform(dfClass)
val sPipe = buildShipPipe()
val rSpModel = sPipe.fit(dfShip)
val rdfShip = rSpModel.transform(dfShip)
val rdf = rdfShip.join(rdfClass, Seq("classUrl"), "outer")
val iPipe = buildInfoPipe()
val rInModel = iPipe.fit(rdf)
val rInf = rInModel.transform(rdf)
val serviceUDF = udf[Boolean, String]((s: String) => s != null && s.length > 0)
val rInfTmp = rInf.select("name", "type", "class", "navy", "country", "commissioned", "endService", "lost")
.filter(col("country") =!= "Other")
.filter(col("class") =!= "[No specific class]")
//.withColumn("served", serviceUDF(col("commissioned")))
.withColumn("startDate", dateUdf(col("commissioned")))
.withColumn("endServiceDate", dateUdf(col("endService")))
.withColumn("lossDate", dateUdf(col("lost")))
.withColumn("endDate", dateComboUdf(col("endServiceDate"),col("lossDate")))
.withColumn("daysActive", datediff(col("endDate"),col("startDate")))
.drop("commissioned", "endService", "lost", "endServiceDate","lossDate")
val ePipe = buildExtPipe()
val rEModel = ePipe.fit(rInfTmp)
val rInfx = rEModel.transform(rInfTmp)
val finalOrd: Ordering[Seq[String]] = Ordering.by(seq => seq.head)
writeDF(rInfx, "./data/web/uboat/uboatInfo.csv", "UTF-8", (s: Seq[String]) => true, finalOrd)
}
}
|
zakski/project-cadisainmduit
|
module/dataprep-people/src/main/scala/com/szadowsz/cadisainmduit/ships/uboat/UboatPreparer.scala
|
Scala
|
apache-2.0
| 5,899 |
package com.zczapran.freshmail.model
case class Auth(
key: String,
sign: String
)
object Auth {
def build(key: String, path: String, json: String, secret: String) = {
val md = java.security.MessageDigest.getInstance("SHA-1")
Auth(
key = key,
sign = md.digest((key ++ path ++ json ++ secret).getBytes("UTF-8")).map("%02x".format(_)).mkString
)
}
}
|
zczapran/freshmail-scala
|
src/main/scala/com/zczapran/freshmail/model/Auth.scala
|
Scala
|
mit
| 381 |
package example
import java.util.NoSuchElementException
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
/**
* This class implements a ScalaTest test suite for the methods in object
* `Lists` that need to be implemented as part of this assignment. A test
* suite is simply a collection of individual tests for some specific
* component of a program.
*
* A test suite is created by defining a class which extends the type
* `org.scalatest.FunSuite`. When running ScalaTest, it will automatically
* find this class and execute all of its tests.
*
* Adding the `@RunWith` annotation enables the test suite to be executed
* inside eclipse using the built-in JUnit test runner.
*
* You have two options for running this test suite:
*
* - Start the sbt console and run the "test" command
* - Right-click this file in eclipse and chose "Run As" - "JUnit Test"
*/
@RunWith(classOf[JUnitRunner])
class ListsSuite extends FunSuite {
/**
* Tests are written using the `test` operator which takes two arguments:
*
* - A description of the test. This description has to be unique, no two
* tests can have the same description.
* - The test body, a piece of Scala code that implements the test
*
* The most common way to implement a test body is using the method `assert`
* which tests that its argument evaluates to `true`. So one of the simplest
* successful tests is the following:
*/
test("one plus one is two")(assert(1 + 1 == 2))
/**
* In Scala, it is allowed to pass an argument to a method using the block
* syntax, i.e. `{ argument }` instead of parentheses `(argument)`.
*
* This allows tests to be written in a more readable manner:
*/
test("one plus one is three?") {
assert(1 + 1 != 3) // This assertion fails! Go ahead and fix it.
}
/**
* One problem with the previous (failing) test is that ScalaTest will
* only tell you that a test failed, but it will not tell you what was
* the reason for the failure. The output looks like this:
*
* {{{
* [info] - one plus one is three? *** FAILED ***
* }}}
*
* This situation can be improved by using a special equality operator
* `===` instead of `==` (this is only possible in ScalaTest). So if you
* run the next test, ScalaTest will show the following output:
*
* {{{
* [info] - details why one plus one is not three *** FAILED ***
* [info] 2 did not equal 3 (ListsSuite.scala:67)
* }}}
*
* We recommend to always use the `===` equality operator when writing tests.
*/
test("details why one plus one is not three") {
assert(2 + 1 === 3) // Fix me, please!
}
/**
* In order to test the exceptional behavior of a methods, ScalaTest offers
* the `intercept` operation.
*
* In the following example, we test the fact that the method `intNotZero`
* throws an `IllegalArgumentException` if its argument is `0`.
*/
test("intNotZero throws an exception if its argument is 0") {
intercept[IllegalArgumentException] {
intNotZero(0)
}
}
def intNotZero(x: Int): Int = {
if (x == 0) throw new IllegalArgumentException("zero is not allowed")
else x
}
/**
* Now we finally write some tests for the list functions that have to be
* implemented for this assignment. We fist import all members of the
* `List` object.
*/
import Lists._
/**
* We only provide two very basic tests for you. Write more tests to make
* sure your `sum` and `max` methods work as expected.
*
* In particular, write tests for corner cases: negative numbers, zeros,
* empty lists, lists with repeated elements, etc.
*
* It is allowed to have multiple `assert` statements inside one test,
* however it is recommended to write an individual `test` statement for
* every tested aspect of a method.
*/
test("sum of List(1,2,0)") {
assert(sum(List(1,2,0)) === 3)
}
test("sum of List(-1,-2,-3)") {
assert(sum(List(-1,-2,-3)) === -6)
}
test("sum of List(0,0)") {
assert(sum(List(0,0)) === 0)
}
test("sum of List(), empty list") {
assert(sum(List()) === 0)
}
test("sum of List(-3,-3,-3)") {
assert(sum(List(-3,-3,-3)) === -9)
}
test("max of an empty list throws exception") {
intercept[java.util.NoSuchElementException] {
max(List())
}
}
test("max of List(3,7,2) is 7") {
assert(max(List(3, 7, 2)) === 7)
}
test("max of List(12) is 12") {
assert(max(List(12)) === 12)
}
test("max of List(2,2,3) is 3") {
assert(max(List(2,2,3)) === 3)
}
test("max of List(-5,-2,-3,-5,-4) is -2") {
assert(max(List(-5,-2,-3,-5,-4)) === -2)
}
}
|
jeffreylloydbrown/classwork
|
FunctionalProgrammingWithScala/example/src/test/scala/example/ListsSuite.scala
|
Scala
|
unlicense
| 4,745 |
package dk.gp.cogp.testutils
import breeze.linalg.DenseMatrix
import breeze.linalg._
import java.io._
import dk.gp.cogp.model.Task
object loadToyModelData {
def apply(n: Int = Int.MaxValue, y0Filter: (Double) => Boolean = (x) => true, y1Filter: (Double) => Boolean = (x) => true): Array[Task] = {
val allData = csvread(new File("src/test/resources/cogp/cogp_no_missing_points.csv"))
val data = n match {
case Int.MaxValue => allData
case _ => allData(0 until n, ::)
}
val y0Idx = data(::, 0).findAll { x => y0Filter(x) }
val data0 = data(y0Idx, ::).toDenseMatrix
val task0 = Task(data0(::, 0 to 0), data0(::, 1))
val y1Idx = data(::, 0).findAll { x => y1Filter(x) }
val data1 = data(y1Idx, ::).toDenseMatrix
val task1 = Task(data1(::, 0 to 0), data1(::, 2))
Array(task0, task1)
}
}
|
danielkorzekwa/bayes-scala-gp
|
src/test/scala/dk/gp/cogp/testutils/loadToyModelData.scala
|
Scala
|
bsd-2-clause
| 855 |
/*
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package play.api.libs.openid
import org.specs2.mutable.Specification
object UserInfoSpec extends Specification {
val claimedId = "http://example.com/openid?id=C123"
val identity = "http://example.com/openid?id=C123&id"
val defaultSigned = "op_endpoint,claimed_id,identity,return_to,response_nonce,assoc_handle"
"UserInfo" should {
"successfully be created using the value of the openid.claimed_id field" in {
val userInfo = UserInfo(createDefaultResponse(claimedId, identity, defaultSigned))
userInfo.id must be equalTo claimedId
userInfo.attributes must beEmpty
}
"successfully be created using the value of the openid.identity field" in {
// For testing the claimed_id is removed to check that id contains the identity value.
val userInfo = UserInfo(createDefaultResponse(claimedId, identity, defaultSigned) - "openid.claimed_id")
userInfo.id must be equalTo identity
userInfo.attributes must beEmpty
}
}
"UserInfo" should {
"not include attributes that are not signed" in {
val requestParams = createDefaultResponseWithAttributeExchange ++ Map[String, Seq[String]](
"openid.ext1.type.email" -> "http://schema.openid.net/contact/email",
"openid.ext1.value.email" -> "[email protected]",
"openid.signed" -> defaultSigned) // the email attribute is not in the list of signed fields
val userInfo = UserInfo(requestParams)
userInfo.attributes.get("email") must beNone
}
"include attributes that are signed" in {
val requestParams = createDefaultResponseWithAttributeExchange ++ Map[String, Seq[String]](
"openid.ext1.type.email" -> "http://schema.openid.net/contact/email",
"openid.ext1.value.email" -> "[email protected]", // the email attribute *is* in the list of signed fields
"openid.signed" -> (defaultSigned + "ns.ext1,ext1.mode,ext1.type.email,ext1.value.email"))
val userInfo = UserInfo(requestParams)
userInfo.attributes.get("email") must beSome("[email protected]")
}
"include multi valued attributes that are signed" in {
val requestParams = createDefaultResponseWithAttributeExchange ++ Map[String, Seq[String]](
"openid.ext1.type.fav_movie" -> "http://example.com/schema/favourite_movie",
"openid.ext1.count.fav_movie" -> "2",
"openid.ext1.value.fav_movie.1" -> "Movie1",
"openid.ext1.value.fav_movie.2" -> "Movie2",
"openid.signed" -> (defaultSigned + "ns.ext1,ext1.mode,ext1.type.fav_movie,ext1.value.fav_movie.1,ext1.value.fav_movie.2,ext1.count.fav_movie"))
val userInfo = UserInfo(requestParams)
userInfo.attributes.size must be equalTo 2
userInfo.attributes.get("fav_movie.1") must beSome("Movie1")
userInfo.attributes.get("fav_movie.2") must beSome("Movie2")
}
}
"only include attributes that have a value" in {
val requestParams = createDefaultResponseWithAttributeExchange ++ Map[String, Seq[String]](
"openid.ext1.type.firstName" -> "http://axschema.org/namePerson/first",
"openid.ext1.value.firstName" -> Nil,
"openid.signed" -> (defaultSigned + "ns.ext1,ext1.mode,ext1.type.email,ext1.value.email,ext1.type.firstName,ext1.value.firstName"))
val userInfo = UserInfo(requestParams)
userInfo.attributes.get("firstName") must beNone
}
// http://openid.net/specs/openid-attribute-exchange-1_0.html#fetch_response
private def createDefaultResponseWithAttributeExchange = Map[String, Seq[String]](
"openid.ns.ext1" -> "http://openid.net/srv/ax/1.0",
"openid.ext1.mode" -> "fetch_response"
) ++ createDefaultResponse(claimedId, identity, defaultSigned)
}
|
jyotikamboj/container
|
pf-framework/src/play-ws/src/test/scala/play/api/libs/openid/UserInfoSpec.scala
|
Scala
|
mit
| 3,749 |
import sbt.Keys._
import sbt._
object BuildSettings {
val app = "kipper-calendar"
val dist = TaskKey[Unit]("dist")
val kipperCalendarBuildSettings: Seq[Setting[_]] = Seq[Setting[_]](
scalacOptions := Seq( "-feature", "-language:_", "-deprecation", "-unchecked"),
logBuffered := true,
logBuffered in Test := false,
javaOptions ++= Seq("-Xmx1G", "-Xss4m", "-server")
)
}
object SbtBuild extends Build {
import BuildSettings._
import sbt._
lazy val kipperCalendar = Project("KipperCalendar", file(".")).settings(kipperCalendarBuildSettings: _*)
}
|
kipperjim/kipper-calendar
|
project/SbtBuild.scala
|
Scala
|
mit
| 577 |
import sbt._
import Keys._
object Dependencies {
lazy val scala282 = "2.8.2"
lazy val scala292 = "2.9.2"
lazy val scala293 = "2.9.3"
lazy val scala210 = "2.10.5"
lazy val scala211 = "2.11.7"
lazy val jline = "jline" % "jline" % "2.11"
lazy val ivy = "org.scala-sbt.ivy" % "ivy" % "2.3.0-sbt-d592b1b0f77cf706e882b1b8e0162dee28165fb2"
lazy val jsch = "com.jcraft" % "jsch" % "0.1.46" intransitive ()
lazy val sbinary = "org.scala-tools.sbinary" %% "sbinary" % "0.4.2"
lazy val sbtSerialization = "org.scala-sbt" %% "serialization" % "0.1.2"
lazy val scalaCompiler = Def.setting { "org.scala-lang" % "scala-compiler" % scalaVersion.value }
lazy val scalaReflect = Def.setting { "org.scala-lang" % "scala-reflect" % scalaVersion.value }
lazy val testInterface = "org.scala-sbt" % "test-interface" % "1.0"
lazy val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.11.4"
lazy val specs2 = "org.specs2" %% "specs2" % "2.3.11"
lazy val junit = "junit" % "junit" % "4.11"
lazy val launcherInterface = "org.scala-sbt" % "launcher-interface" % "1.0.0-M1"
lazy val rawLauncher = "org.scala-sbt" % "launcher" % "1.0.0-M1"
private def scala211Module(name: String, moduleVersion: String) =
Def.setting {
scalaVersion.value match {
case sv if (sv startsWith "2.9.") || (sv startsWith "2.10.") => Nil
case _ => ("org.scala-lang.modules" %% name % moduleVersion) :: Nil
}
}
lazy val scalaXml = scala211Module("scala-xml", "1.0.1")
lazy val scalaParsers = scala211Module("scala-parser-combinators", "1.0.1")
// Maven related dependnecy craziness
//val mvnEmbedder = "org.apache.maven" % "maven-embedder" % mvnVersion
val mvnWagonVersion = "2.4"
val mvnVersion = "3.2.3"
val aetherVersion = "1.0.1.v20141111"
val mvnAether = "org.apache.maven" % "maven-aether-provider" % mvnVersion
val aether = "org.eclipse.aether" % "aether" % aetherVersion
val aetherImpl = "org.eclipse.aether" % "aether-impl" % aetherVersion
val aetherUtil = "org.eclipse.aether" % "aether-util" % aetherVersion
val aetherTransportFile = "org.eclipse.aether" % "aether-transport-file" % aetherVersion
val aetherTransportWagon = "org.eclipse.aether" % "aether-transport-wagon" % aetherVersion
val aetherTransportHttp = "org.eclipse.aether" % "aether-transport-http" % aetherVersion
val aetherConnectorBasic = "org.eclipse.aether" % "aether-connector-basic" % aetherVersion
val sisuPlexus = ("org.eclipse.sisu" % "org.eclipse.sisu.plexus" % "0.3.0.M1").exclude("javax.enterprise", "cdi-api").exclude("com.google.code.findbugs", "jsr305")
val guice = "com.google.inject" % "guice" % "3.0"
val guava = "com.google.guava" % "guava" % "18.0"
val javaxInject = "javax.inject" % "javax.inject" % "1"
//val sisuGuice = ("org.eclipse.sisu" % "sisu-guice" % "3.1.0").classifier("no_aop").exclude("javax.enterprise", "cdi-api", )
/*
val mvnWagon = "org.apache.maven.wagon" % "wagon-http" % mvnWagonVersion
val mvnWagonProviderApi = "org.apache.maven.wagon" % "wagon-provider-api" % mvnWagonVersion
val mvnWagonLwHttp = "org.apache.maven.wagon" % "wagon-http-lightweight" % mvnWagonVersion
val mvnWagonFile = "org.apache.maven.wagon" % "wagon-file" % mvnWagonVersion
*/
def aetherLibs =
Seq(
guava,
javaxInject,
sisuPlexus,
aetherImpl,
aetherConnectorBasic,
mvnAether)
}
|
adriaanm/sbt
|
project/Dependencies.scala
|
Scala
|
bsd-3-clause
| 3,388 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.engine.output.processing
import com.bwsw.common.es.ElasticsearchClient
import com.bwsw.sj.common.dal.model.stream.ESStreamDomain
import com.bwsw.sj.common.engine.core.entities.{OutputEnvelope, TStreamEnvelope}
import com.bwsw.sj.common.engine.core.output.Entity
import com.bwsw.sj.common.engine.core.reporting.PerformanceMetrics
import com.bwsw.sj.engine.core.output.types.es.ElasticsearchCommandBuilder
import com.bwsw.sj.engine.output.task.OutputTaskManager
import org.elasticsearch.action.bulk.BulkRequestBuilder
import scaldi.Injector
/**
* ref. [[OutputProcessor]] object
*/
class EsOutputProcessor[T <: AnyRef](esStream: ESStreamDomain,
performanceMetrics: PerformanceMetrics,
manager: OutputTaskManager,
entity: Entity[_])
(implicit injector: Injector)
extends AsyncOutputProcessor[T](esStream, performanceMetrics) {
private val esService = esStream.service
private val esClient = openConnection()
private var maybeBulkRequestBuilder: Option[BulkRequestBuilder] = None
private var inputEnvelopesInBulk: Long = 0
private val maxInputEnvelopesPerBulk = Seq(manager.outputInstance.checkpointInterval / outputParallelism, 1L).max
private var lastInputEnvelopeId: Long = 0
override protected val commandBuilder: ElasticsearchCommandBuilder =
new ElasticsearchCommandBuilder(transactionFieldName, entity.asInstanceOf[Entity[String]])
private def openConnection(): ElasticsearchClient = {
logger.info(s"Open a connection to elasticsearch at address: '${esService.provider.hosts}'.")
val hosts = esService.provider.hosts.map(splitHost).toSet
val maybeUsername = Option(esService.provider.login)
val maybePassword = Option(esService.provider.password)
new ElasticsearchClient(hosts, maybeUsername, maybePassword)
}
private def splitHost(host: String): (String, Int) = {
val parts = host.split(":")
(parts(0), parts(1).toInt)
}
def delete(envelope: TStreamEnvelope[T]): Unit = {
val index = esService.index
val streamName = esStream.name
logger.debug(s"Delete a transaction: '${envelope.id}' from elasticsearch stream.")
if (esClient.doesIndexExist(index)) {
val query = commandBuilder.buildDelete(envelope.id)
esClient.deleteDocuments(index, streamName, query)
}
}
override def send(envelope: OutputEnvelope, inputEnvelope: TStreamEnvelope[T]): Unit = {
if (maybeBulkRequestBuilder.isEmpty)
maybeBulkRequestBuilder = Some(esClient.createBulk())
if (lastInputEnvelopeId != inputEnvelope.id) {
lastInputEnvelopeId = inputEnvelope.id
inputEnvelopesInBulk += 1
}
val bulkRequestBuilder = maybeBulkRequestBuilder.get
val esFieldsValue = envelope.getFieldsValue
val data = commandBuilder.buildInsert(inputEnvelope.id, esFieldsValue)
logger.debug(s"Task: ${manager.taskName}. Write an output envelope to elasticsearch stream.")
esClient.addToBulk(bulkRequestBuilder, data, esService.index, esStream.name)
if (inputEnvelopesInBulk > maxInputEnvelopesPerBulk) sendBulk()
}
override def checkpoint(): Unit = {
sendBulk()
super.checkpoint()
}
override def close(): Unit = {
esClient.close()
}
private def sendBulk(): Unit = {
maybeBulkRequestBuilder match {
case Some(bulkRequestBuilder) =>
runInFuture(() => bulkRequestBuilder.get())
inputEnvelopesInBulk = 0
maybeBulkRequestBuilder = None
case None =>
}
}
}
|
bwsw/sj-platform
|
core/sj-output-streaming-engine/src/main/scala/com/bwsw/sj/engine/output/processing/EsOutputProcessor.scala
|
Scala
|
apache-2.0
| 4,411 |
package edu.rice.habanero.benchmarks.concdict
import edu.rice.habanero.benchmarks.BenchmarkRunner
import edu.rice.hj.Module0._
import edu.rice.hj.api.HjSuspendable
import edu.rice.hj.experimental.actors.ReaderWriterPolicy
/**
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> ([email protected])
*/
object DictionaryHabaneroRWWriterFirstBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new DictionaryHabaneroRWWriterFirstBenchmark)
}
private final class DictionaryHabaneroRWWriterFirstBenchmark extends DictionaryHabaneroRWAbstractBenchmark.DictionaryHabaneroRWAbstractBenchmark {
def runIteration() {
finish(new HjSuspendable {
override def run() = {
val numWorkers: Int = DictionaryConfig.NUM_ENTITIES
val numMessagesPerWorker: Int = DictionaryConfig.NUM_MSGS_PER_WORKER
val master = new DictionaryHabaneroRWAbstractBenchmark.Master(numWorkers, numMessagesPerWorker, ReaderWriterPolicy.WRITER_PRIORITY)
master.start()
}
})
}
}
}
|
shamsmahmood/savina
|
src/main/scala/edu/rice/habanero/benchmarks/concdict/DictionaryHabaneroRWWriterFirstBenchmark.scala
|
Scala
|
gpl-2.0
| 1,070 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.conf
import com.typesafe.config.Config
object ConfConversions {
implicit class RichConfig(val base: Config) {
def getStringOpt(path: String): Option[String] =
if (base.hasPath(path)) Some(base.getString(path)) else None
def getBooleanOpt(path: String): Option[Boolean] =
if (base.hasPath(path)) Some(base.getBoolean(path)) else None
def getIntOpt(path: String): Option[Int] =
if (base.hasPath(path)) Some(base.getInt(path)) else None
def getLongOpt(path: String): Option[Long] =
if (base.hasPath(path)) Some(base.getLong(path)) else None
def getDoubleOpt(path: String): Option[Double] =
if (base.hasPath(path)) Some(base.getDouble(path)) else None
def getConfigOpt(path: String): Option[Config] =
if (base.hasPath(path)) Some(base.getConfig(path)) else None
def getConfigListOpt(path: String): Option[java.util.List[_ <: Config]] =
if (base.hasPath(path)) Some(base.getConfigList(path)) else None
def getStringListOpt(path: String): Option[java.util.List[String]] =
if (base.hasPath(path)) Some(base.getStringList(path)) else None
}
}
|
ronq/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/conf/ConfConversions.scala
|
Scala
|
apache-2.0
| 1,633 |
package org.http4s
package server
package middleware
package authentication
import java.util.concurrent.Executors
import org.http4s.dsl._
import org.http4s.headers._
import org.http4s.parser.HttpHeaderParser
import org.http4s.util.CaseInsensitiveString
import scalaz.concurrent.Task
import scala.concurrent.duration._
class AuthenticationSpec extends Http4sSpec {
def nukeService(launchTheNukes: => Unit) = AuthedService[String] {
case GET -> Root / "launch-the-nukes" as user =>
for {
_ <- Task.delay(launchTheNukes)
r <- Response(Gone).withBody(s"Oops, ${user} launched the nukes.")
} yield r
}
val realm = "Test Realm"
val username = "Test User"
val password = "Test Password"
def authStore(u: String) = Task.now {
if (u == username) Some(u -> password)
else None
}
def validatePassword(creds: BasicCredentials) = Task.now {
if (creds.username == username && creds.password == password) Some(creds.username)
else None
}
val service = AuthedService[String] {
case GET -> Root as user => Ok(user)
case req as _ => Response.notFound(req)
}
"Failure to authenticate" should {
"not run unauthorized routes" in {
val req = Request(uri = Uri(path = "/launch-the-nukes"))
var isNuked = false
val authedValidateNukeService = BasicAuth(realm, validatePassword _)(nukeService { isNuked = true })
val res = authedValidateNukeService.run(req).run
isNuked must_== false
res.status must_== (Unauthorized)
}
}
"BasicAuthentication" should {
val basicAuthedService = BasicAuth(realm, validatePassword _)(service)
"Respond to a request without authentication with 401" in {
val req = Request(uri = Uri(path = "/"))
val res = basicAuthedService.run(req).run
res.status must_== (Unauthorized)
res.headers.get(`WWW-Authenticate`).map(_.value) must_== (Some(Challenge("Basic", realm, Nil.toMap).toString))
}
"Respond to a request with unknown username with 401" in {
val req = Request(uri = Uri(path = "/"), headers = Headers(Authorization(BasicCredentials("Wrong User", password))))
val res = basicAuthedService.run(req).run
res.status must_== (Unauthorized)
res.headers.get(`WWW-Authenticate`).map(_.value) must_== (Some(Challenge("Basic", realm, Nil.toMap).toString))
}
"Respond to a request with wrong password with 401" in {
val req = Request(uri = Uri(path = "/"), headers = Headers(Authorization(BasicCredentials(username, "Wrong Password"))))
val res = basicAuthedService.run(req).run
res.status must_== (Unauthorized)
res.headers.get(`WWW-Authenticate`).map(_.value) must_== (Some(Challenge("Basic", realm, Nil.toMap).toString))
}
"Respond to a request with correct credentials" in {
val req = Request(uri = Uri(path = "/"), headers = Headers(Authorization(BasicCredentials(username, password))))
val res = basicAuthedService.run(req).run
res.status must_== (Ok)
}
}
private def parse(value: String) = HttpHeaderParser.WWW_AUTHENTICATE(value).fold(err => sys.error(s"Couldn't parse: $value"), identity)
"DigestAuthentication" should {
"Respond to a request without authentication with 401" in {
val authedService = DigestAuth(realm, authStore)(service)
val req = Request(uri = Uri(path = "/"))
val res = authedService.run(req).run
res.status must_== (Status.Unauthorized)
val opt = res.headers.get(`WWW-Authenticate`).map(_.value)
opt.isDefined must beTrue
val challenge = parse(opt.get).values.head
(challenge match {
case Challenge("Digest", realm, _) => true
case _ => false
}) must_== true
ok
}
// Send a request without authorization, receive challenge.
def doDigestAuth1(digest: HttpService) = {
// Get auth data
val req = Request(uri = Uri(path = "/"))
val res = digest.apply(req).run
res.status must_== (Unauthorized)
val opt = res.headers.get(`WWW-Authenticate`).map(_.value)
opt.isDefined must beTrue
val challenge = parse(opt.get).values.head
challenge
}
// Respond to a challenge with a correct response.
// If withReplay is true, also send a replayed request.
def doDigestAuth2(digest: HttpService, challenge: Challenge, withReplay: Boolean) = {
// Second request with credentials
val method = "GET"
val uri = "/"
val qop = "auth"
val nc = "00000001"
val cnonce = "abcdef"
val nonce = challenge.params("nonce")
val response = DigestUtil.computeResponse(method, username, realm, password, uri, nonce, nc, cnonce, qop)
val params: Map[String, String] = Map("username" -> username, "realm" -> realm, "nonce" -> nonce,
"uri" -> uri, "qop" -> qop, "nc" -> nc, "cnonce" -> cnonce, "response" -> response,
"method" -> method)
val header = Authorization(GenericCredentials(CaseInsensitiveString("Digest"), params))
val req2 = Request(uri = Uri(path = "/"), headers = Headers(header))
val res2 = digest.apply(req2).run
if (withReplay) {
val res3 = digest.apply(req2).run
(res2, res3)
} else
(res2, null)
}
"Respond to a request with correct credentials" in {
val digestAuthService = DigestAuth(realm, authStore)(service)
val challenge = doDigestAuth1(digestAuthService)
(challenge match {
case Challenge("Digest", realm, _) => true
case _ => false
}) must_== true
val (res2, res3) = doDigestAuth2(digestAuthService, challenge, true)
res2.status must_== (Ok)
// Digest prevents replay
res3.status must_== (Unauthorized)
ok
}
"Respond to many concurrent requests while cleaning up nonces" in {
val n = 100
val sched = Executors.newFixedThreadPool(4)
val digestAuthService = DigestAuth(realm, authStore, 2.millis, 2.millis)(service)
val tasks = (1 to n).map(i =>
Task {
val challenge = doDigestAuth1(digestAuthService)
(challenge match {
case Challenge("Digest", realm, _) => true
case _ => false
}) must_== true
val res = doDigestAuth2(digestAuthService, challenge, false)._1
// We don't check whether res.status is Ok since it may not
// be due to the low nonce stale timer. Instead, we check
// that it's found.
res.status mustNotEqual (NotFound)
}(sched))
Task.gatherUnordered(tasks).run
ok
}
"Avoid many concurrent replay attacks" in {
val n = 100
val sched = Executors.newFixedThreadPool(4)
val digestAuthService = DigestAuth(realm, authStore)(service)
val challenge = doDigestAuth1(digestAuthService)
val tasks = (1 to n).map(i =>
Task {
val res = doDigestAuth2(digestAuthService, challenge, false)._1
res.status
}(sched))
val res = Task.gatherUnordered(tasks).run
res.filter(s => s == Ok).size must_== 1
res.filter(s => s == Unauthorized).size must_== n - 1
ok
}
"Respond to invalid requests with 401" in {
val digestAuthService = DigestAuth(realm, authStore)(service)
val method = "GET"
val uri = "/"
val qop = "auth"
val nc = "00000001"
val cnonce = "abcdef"
val nonce = "abcdef"
val response = DigestUtil.computeResponse(method, username, realm, password, uri, nonce, nc, cnonce, qop)
val params: Map[String, String] = Map("username" -> username, "realm" -> realm, "nonce" -> nonce,
"uri" -> uri, "qop" -> qop, "nc" -> nc, "cnonce" -> cnonce, "response" -> response,
"method" -> method)
val expected = (0 to params.size).map(i => Unauthorized)
val result = (0 to params.size).map(i => {
val invalid_params = params.take(i) ++ params.drop(i + 1)
val header = Authorization(GenericCredentials(CaseInsensitiveString("Digest"), invalid_params))
val req = Request(uri = Uri(path = "/"), headers = Headers(header))
val res = digestAuthService.run(req).run
res.status
})
expected must_== result
ok
}
}
}
|
m4dc4p/http4s
|
server/src/test/scala/org/http4s/server/middleware/authentication/AuthenticationSpec.scala
|
Scala
|
apache-2.0
| 8,282 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v3
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600e.v3.retriever.CT600EBoxRetriever
case class E45(value: Option[Boolean]) extends CtBoxIdentifier("during the period covered by these supplementary pages have you over claimed tax?") with CtOptionalBoolean with Input with ValidatableBox[CT600EBoxRetriever] {
override def validate(boxRetriever: CT600EBoxRetriever): Set[CtValidation] = validateBooleanAsMandatory("E45", this)
}
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600e/v3/E45.scala
|
Scala
|
apache-2.0
| 1,074 |
package uk.gov.gds.ier.transaction.overseas.confirmation.blocks
trait DateOfBirthBlocks {
self: ConfirmationBlock =>
def dateOfBirth = {
val dob = form.dateOfBirth map { dob =>
dob.toString("d MMMM yyyy")
} getOrElse ""
ConfirmationQuestion(
title = "Date of birth",
editLink = overseas.DateOfBirthStep.routing.editGet.url,
changeName = "date of birth",
content = ifComplete(keys.dob) {
List(dob)
}
)
}
}
|
michaeldfallen/ier-frontend
|
app/uk/gov/gds/ier/transaction/overseas/confirmation/blocks/DateOfBirthBlocks.scala
|
Scala
|
mit
| 474 |
/*
* Copyright (c) 2014-2015 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics
package snowplow
package enrich
package common
package utils
package shredder
// Snowplow Utils
import util.Tap._
// Snowplow Common Enrich
import outputs.EnrichedEvent
// Specs2
import org.specs2.Specification
class ShredderSpec extends Specification /*with DataTables with ValidationMatchers*/ { def is =
"This is a specification to test the Shredder functionality" ^
p^
"makePartialHierarchy should initialize a partial TypeHierarchy" ! e1^
"shred should extract the JSONs from an unstructured event with multiple contexts" ! e2^
end
val EventId = "f81d4fae-7dec-11d0-a765-00a0c91e6bf6"
val CollectorTimestamp = "2014-04-29 09:00:54.000"
implicit val resolver = SpecHelpers.IgluResolver
def e1 =
Shredder.makePartialHierarchy(EventId, CollectorTimestamp) must_==
TypeHierarchy(
rootId = EventId,
rootTstamp = CollectorTimestamp,
refRoot = "events",
refTree = List("events"),
refParent = "events")
def e2 = {
val event = new EnrichedEvent().tap { e =>
e.event_id = EventId
e.collector_tstamp = CollectorTimestamp
e.unstruct_event = """{"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/link_click/jsonschema/1-0-0","data":{"targetUrl":"http://snowplowanalytics.com/blog/page2","elementClasses":["next"]}}}"""
e.contexts = """{"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-0","data":[{"schema":"iglu:org.schema/WebPage/jsonschema/1-0-0","data":{"datePublished":"2014-07-23T00:00:00Z","author":"Jonathan Almeida","inLanguage":"en-US","genre":"blog","breadcrumb":["blog","releases"],"keywords":["snowplow","analytics","java","jvm","tracker"]}},{"schema":"iglu:org.schema/WebPage/jsonschema/1-0-0","data":{"datePublished":"2014-07-23T00:00:00Z","author":"Jonathan Almeida","inLanguage":"en-US","genre":"blog","breadcrumb":["blog","releases"],"keywords":["snowplow","analytics","java","jvm","tracker"]}}]}"""
}
// TODO: check actual contents (have already confirmed in REPL)
Shredder.shred(event).toOption.get must have size(3)
}
}
|
mdavid/lessig-bigdata
|
lib/snowplow/3-enrich/scala-common-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/ShredderSpec.scala
|
Scala
|
mit
| 3,122 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.javaapi.http.internal
import java.{ lang => jl, util => ju }
import java.util.{ function => juf }
import io.gatling.commons.validation.{ safely, SuccessWrapper, Validation }
import io.gatling.core.session.{ Expression, Session => ScalaSession }
import io.gatling.core.session.el._
import io.gatling.http.response.Response
import io.gatling.javaapi.core.{ CheckBuilder, Session }
import io.gatling.javaapi.core.internal.Expressions._
import io.gatling.javaapi.core.internal.JavaExpression
import io.gatling.javaapi.http.HttpRequestActionBuilder
object ScalaHttpRequestActionBuilderConditions {
def untyped(context: io.gatling.http.request.builder.HttpRequestBuilder, condition: String): Untyped =
new Untyped(context, condition.el)
def untyped(context: io.gatling.http.request.builder.HttpRequestBuilder, condition: JavaExpression[jl.Boolean]): Untyped =
new Untyped(context, javaBooleanFunctionToExpression(condition))
final class Untyped(context: io.gatling.http.request.builder.HttpRequestBuilder, condition: Expression[Boolean]) {
def then_(checkBuilders: ju.List[CheckBuilder]): HttpRequestActionBuilder =
new HttpRequestActionBuilder(context.checkIf(condition)(HttpChecks.toScalaChecks(checkBuilders): _*))
}
def typed(context: io.gatling.http.request.builder.HttpRequestBuilder, condition: juf.BiFunction[Response, Session, jl.Boolean]): Typed =
new Typed(context, (u, session) => safely()(condition.apply(u, new Session(session)).booleanValue.success))
final class Typed(context: io.gatling.http.request.builder.HttpRequestBuilder, condition: (Response, ScalaSession) => Validation[Boolean]) {
def then_(checkBuilders: ju.List[CheckBuilder]): HttpRequestActionBuilder =
new HttpRequestActionBuilder(context.checkIf(condition)(HttpChecks.toScalaChecks(checkBuilders): _*))
}
}
|
gatling/gatling
|
gatling-http-java/src/main/scala/io/gatling/javaapi/http/internal/ScalaHttpRequestActionBuilderConditions.scala
|
Scala
|
apache-2.0
| 2,475 |
package org.mms.core.transform
import org.mms.core._
import org.mms.core.Property
import org.mms.core.codemodel.SourceMember
import org.mms.core.codemodel.IType
import org.mms.core.Type
import org.mms.core.runtime.RuntimeProperty
import org.mms.core.runtime.IRuntimeProperty
import org.mms.core.runtime.ICollectionProperty
import org.mms.core.runtime.ICollectionProperty
import org.mms.core.codemodel.SourceMember
import org.mms.core.codemodel.SourceMember
import org.mms.core.codemodel.SourceType
import org.mms.core.codemodel.SourceType
import org.mms.core.codemodel.IModelElement
import org.mms.core.codemodel.IMember
import org.mms.core.codemodel.IType
import javax.lang.model.element.PackageElement
import org.mms.core.codemodel.Package
import org.mms.core.ParentChildAssertion
import org.mms.core.codemodel.CodeModel
import org.mms.core.codemodel.SourceUnit
import org.mms.core.codemodel.Package
import org.mms.core.codegen.SimpleJavaPOJOCodeGen
import org.mms.core.codegen.FileSystemUnitWriter
import java.io.File
import org.mms.core.codemodel.ISourceType
import org.mms.core.ParentChildAssertion
/**
* code model related models
*/
object ITypeModel extends AbstractType {
}
object CodeModelModel extends ModelType {
val name = key(str);
val children = list(propOf(PackageElementModel));
}
object PackageElementModel extends ModelType() {
val name = key(str);
val parent = required(propOf(CodeModelModel));
val children = list(propOf(SourceUnitModel));
ParentChildAssertion(parent,CodeModelModel.children);//more convinient way is to mark prop with parent
}
object SourceTypeModel extends ModelType(ITypeModel) {
val name = key(str);
val superClass = propOf(classOf[IType]);
val children = list(propOf(SourceMemberModel));
val parent = required(propOf(SourceUnitModel))
ParentChildAssertion(parent,SourceUnitModel.children);//more convinient way is to mark prop with parent
//listof
}
object SourceUnitModel extends ModelType{
val name = key(str);
val children = list(propOf(SourceTypeModel));
val parent = required(propOf(PackageElementModel));
ParentChildAssertion(parent,PackageElementModel.children);//more convinient way is to mark prop with parent
}
object SourceMemberModel extends ModelType {
val name = str;
val elementsType = propOf(classOf[IType])
val isList = bool;
val isReq = bool;
}
/**
* meta model relateed models
*/
object TypeModel extends AbstractType {
val typeNameProp = str withName ("typeName");
val superTypeProp = propOf(TypeModel) withName ("superType")
val toModel=computed (propOf(SourceTypeModel)).withName("toModelIfPossible")
}
object ModelTypeModel extends ModelType(TypeModel) {
val props = list(propOf(PropertyModelModel)).withName("declaredProperties");
val modelPackage = str.withName("packageName");
}
object Universe extends ModelType{
val types = listOf(ModelTypeModel);
}
object PropertyModelModel extends ModelType {
val name = str;
val range = propOf(classOf[Type]);
}
object ListPropModel extends ModelType(PropertyModelModel);
//this type is a member of both models
object BuiltInTypeModel extends ModelType(null, withTrait(ITypeModel, TypeModel))
//Knowledge data should not be global!!! //FIXME
object Mappings extends AssertionContainer {
def typeMappings() {
ITypeModel <=> classOf[IType];
SourceTypeModel <=> classOf[SourceType];
PackageElementModel<=>classOf[Package];
CodeModelModel<=>classOf[CodeModel];
ListPropModel<=>classOf[ListProp[_,_]];
SourceMemberModel <=> classOf[SourceMember]; //We should be able to build transform proto without mapping
SourceUnitModel<=>classOf[SourceUnit]
TypeModel <=> classOf[Type]
PropertyModelModel <=> classOf[Property[_, _]] //We should check compatibility when stating it
ModelTypeModel <=> classOf[ModelType[_]];
Universe<=>classOf[TypeUniverse];
BuiltInTypeModel
}
//first init mappings to classes;
def definitions() = {
ModelTypeModel <=> SourceTypeModel;
//type to source type conversion
ModelTypeModel.props <=> SourceTypeModel.children;
ListPropModel <=>(SourceMemberModel.isList<=>true)
TypeModel.typeNameProp <=> SourceTypeModel.name;
TypeModel.superTypeProp <=> SourceTypeModel.superClass
TypeModel pretransform TypeModel.toModel;
SourceTypeModel.parent.$.parent.$.name<=>ModelTypeModel.modelPackage;
SourceTypeModel.parent.$.name<=>TypeModel.typeNameProp;
//Property to SourceMember conversion
PropertyModelModel.name <=> SourceMemberModel.name;
PropertyModelModel.range <=> SourceMemberModel.elementsType;
Universe.types<=>CodeModelModel.children.$.children.$.children;//mind crash!!!
}
}
import Entity._;
object TestApp extends App {
Mappings.learn();
val u=new TypeUniverse();
u.add(SourceTypeModel);
val cm=Transformers.transform(u, classOf[CodeModel]);
println(cm);
new SimpleJavaPOJOCodeGen(new FileSystemUnitWriter(new File("/Users/kor/Documents/scala/demo/src"))).doGenerate(cm);
}
|
petrochenko-pavel-a/mms.core
|
org.mms.core/src/main/scala/org/mms/core/transform/modelToSource.scala
|
Scala
|
epl-1.0
| 5,002 |
package com.enkidu.lignum.parsers.ast.expression.discardable.instantiation
import com.enkidu.lignum.parsers.ast.expression.ArrayInitializer
import com.enkidu.lignum.parsers.ast.expression.types.Type
case class InitializedArrayInstantiation(`type`: Type, initializer: ArrayInitializer) extends ArrayInstantiation {
override def dispatch(visitor: Visitor): Unit = {
`type`.dispatch(visitor)
initializer.dispatch(visitor)
apply(visitor)
}
}
|
marek1840/java-parser
|
src/main/scala/com/enkidu/lignum/parsers/ast/expression/discardable/instantiation/InitializedArrayInstantiation.scala
|
Scala
|
mit
| 456 |
var i = 0
while(i < args.length){
if(i != 0)
print(" ")
print(args(i))
i += 1
}
|
jmlb23/scala
|
libro_odersky/scripts_CH2/echoargs.scala
|
Scala
|
gpl-3.0
| 93 |
package domain.model.account
import scala.util.Try
trait AccountRepository {
def nextIdentity(): Try[AccountId]
def accountOfIdentity(id: AccountId): Try[Account]
def accountOfMail(mail: AccountMail): Try[Account]
def save(account: Account): Try[Account]
}
|
tarugo07/play-chat
|
app/domain/model/account/AccountRepository.scala
|
Scala
|
mit
| 272 |
//package muster
//
//import scala.language.experimental.macros
//import scala.reflect.macros._
//
//
//object TypeHints {
// def apply[T](fName: String) = new TypeHints[T] { val fieldName: String = fName }
//}
//
//trait TypeHints[T] {
// def fieldName: String
//
// def short[C <: T](): TypeHint[C] = macro shortImpl[C]
// def shortImpl[C <: T: c.WeakTypeTag](c: blackbox.Context): c.Expr[TypeHint[C]] = {
// val tpe = c.weakTypeOf[C].dealias
// generate[C, c.type](c)(fieldName, tpe.typeSymbol.name.decodedName.toString, tpe)
// }
//
// def full[C <: T](): TypeHint[C] = macro fullImpl[C]
// def fullImpl[C <: T: c.WeakTypeTag](c: blackbox.Context): c.Expr[TypeHint[C]] = {
// val tpe = c.weakTypeOf[C].dealias
// generate[C, c.type](c)(fieldName, tpe.typeSymbol.fullName, tpe)
// }
//
// def custom[C <: T](stringValue: String): TypeHint[C] = macro customImpl[C]
// def customImpl[C <: T: c.WeakTypeTag](c: blackbox.Context)(stringValue: c.Expr[String]): c.Expr[TypeHint[C]] = {
// val tpe = c.weakTypeOf[C].dealias
// generate[C, c.type](c)(fieldName, stringValue.splice, tpe)
// }
//
// private def generate[C <: T : c.WeakTypeTag, CT <: blackbox.Context](c: CT)(field: String, nameValue: String, tpe: c.Type): c.Expr[TypeHint[C]] = {
// import c.universe._
// val fn = tpe.typeSymbol.fullName
// val ct = appliedType(typeOf[Consumer[Any]], tpe :: Nil)
// val pt = appliedType(typeOf[Producer[Any]], tpe :: Nil)
// val ce = c.inferImplicitValue(ct) match {
// case EmptyTree =>
// c.abort(c.enclosingPosition, s"Couldn't find a muster.Consumer[$fn], try bringing an implicit value for ${ct.typeSymbol.fullName} in scope by importing one or defining one.")
// case resolved => c.Expr[Consumer[C]](resolved)
// }
// val pe = c.inferImplicitValue(pt) match {
// case EmptyTree =>
// c.abort(c.enclosingPosition, s"Couldn't find a muster.Producer[$fn], try bringing an implicit value for ${pt.typeSymbol.fullName} in scope by importing one or defining one.")
// case resolved => c.Expr[Producer[C]](resolved)
// }
//
// reify {
// new TypeHint[T] {
// val fieldName: String = c.Expr[String](Literal(Constant(field))).splice
// val value: String = c.Expr[String](Literal(Constant(nameValue))).splice
// private[muster] val consumer: Consumer[C] = ce.splice
// private[muster] val producer: Producer[C] = pe.splice
// }
// }
// }
//
//
// trait TypeHint[C <: T] {
// def value: String
// def consumer: Consumer[C]
// def producer: Producer[C]
// }
//}
|
json4s/muster
|
core/src/main/scala/muster/type_hints.scala
|
Scala
|
mit
| 2,597 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions
import org.apache.flink.table.api._
import org.apache.flink.table.planner.expressions.utils.ArrayTypeTestBase
import org.apache.flink.table.planner.utils.DateTimeTestUtil.{localDate, localDateTime, localTime => gLocalTime}
import org.junit.Test
import java.time.{LocalDateTime => JLocalDateTime}
class ArrayTypeTest extends ArrayTypeTestBase {
@Test
def testInputTypeGeneralization(): Unit = {
testAllApis(
array(1, 2.0, 3.0),
"array(1, 2.0, 3.0)",
"ARRAY[1, cast(2.0 AS DOUBLE), cast(3.0 AS DOUBLE)]",
"[1.0, 2.0, 3.0]")
}
@Test
def testArrayLiterals(): Unit = {
// primitive literals
testAllApis(array(1, 2, 3), "array(1, 2, 3)", "ARRAY[1, 2, 3]", "[1, 2, 3]")
testAllApis(
array(true, true, true),
"array(true, true, true)",
"ARRAY[TRUE, TRUE, TRUE]",
"[TRUE, TRUE, TRUE]")
// object literals
testTableApi(array(BigDecimal(1), BigDecimal(1)), "array(1p, 1p)", "[1, 1]")
testAllApis(
array(array(array(1), array(1))),
"array(array(array(1), array(1)))",
"ARRAY[ARRAY[ARRAY[1], ARRAY[1]]]",
"[[[1], [1]]]")
testAllApis(
array(1 + 1, 3 * 3),
"array(1 + 1, 3 * 3)",
"ARRAY[1 + 1, 3 * 3]",
"[2, 9]")
testAllApis(
array(nullOf(DataTypes.INT), 1),
"array(Null(INT), 1)",
"ARRAY[NULLIF(1,1), 1]",
"[NULL, 1]")
testAllApis(
array(array(nullOf(DataTypes.INT), 1)),
"array(array(Null(INT), 1))",
"ARRAY[ARRAY[NULLIF(1,1), 1]]",
"[[NULL, 1]]")
// implicit conversion
testTableApi(
Array(1, 2, 3),
"array(1, 2, 3)",
"[1, 2, 3]")
testTableApi(
Array[Integer](1, 2, 3),
"array(1, 2, 3)",
"[1, 2, 3]")
testAllApis(
Array(localDate("1985-04-11"), localDate("2018-07-26")),
"array('1985-04-11'.toDate, '2018-07-26'.toDate)",
"ARRAY[DATE '1985-04-11', DATE '2018-07-26']",
"[1985-04-11, 2018-07-26]")
testAllApis(
Array(gLocalTime("14:15:16"), gLocalTime("17:18:19")),
"array('14:15:16'.toTime, '17:18:19'.toTime)",
"ARRAY[TIME '14:15:16', TIME '17:18:19']",
"[14:15:16, 17:18:19]")
// There is no timestamp literal function in Java String Table API,
// toTimestamp is casting string to TIMESTAMP(3) which is not the same to timestamp literal.
testTableApi(
Array(localDateTime("1985-04-11 14:15:16"), localDateTime("2018-07-26 17:18:19")),
"[1985-04-11 14:15:16, 2018-07-26 17:18:19]")
testSqlApi(
"ARRAY[TIMESTAMP '1985-04-11 14:15:16', TIMESTAMP '2018-07-26 17:18:19']",
"[1985-04-11 14:15:16, 2018-07-26 17:18:19]")
// localDateTime use DateTimeUtils.timestampStringToUnixDate to parse a time string,
// which only support millisecond's precision.
testTableApi(
Array(
JLocalDateTime.of(1985, 4, 11, 14, 15, 16, 123456789),
JLocalDateTime.of(2018, 7, 26, 17, 18, 19, 123456789)),
"[1985-04-11 14:15:16.123456789, 2018-07-26 17:18:19.123456789]")
testTableApi(
Array(
JLocalDateTime.of(1985, 4, 11, 14, 15, 16, 123456700),
JLocalDateTime.of(2018, 7, 26, 17, 18, 19, 123456700)),
"[1985-04-11 14:15:16.1234567, 2018-07-26 17:18:19.1234567]")
testTableApi(
Array(
JLocalDateTime.of(1985, 4, 11, 14, 15, 16, 123456000),
JLocalDateTime.of(2018, 7, 26, 17, 18, 19, 123456000)),
"[1985-04-11 14:15:16.123456, 2018-07-26 17:18:19.123456]")
testTableApi(
Array(
JLocalDateTime.of(1985, 4, 11, 14, 15, 16, 123400000),
JLocalDateTime.of(2018, 7, 26, 17, 18, 19, 123400000)),
"[1985-04-11 14:15:16.1234, 2018-07-26 17:18:19.1234]")
testSqlApi(
"ARRAY[TIMESTAMP '1985-04-11 14:15:16.123456789', TIMESTAMP '2018-07-26 17:18:19.123456789']",
"[1985-04-11 14:15:16.123456789, 2018-07-26 17:18:19.123456789]")
testSqlApi(
"ARRAY[TIMESTAMP '1985-04-11 14:15:16.1234567', TIMESTAMP '2018-07-26 17:18:19.1234567']",
"[1985-04-11 14:15:16.1234567, 2018-07-26 17:18:19.1234567]")
testSqlApi(
"ARRAY[TIMESTAMP '1985-04-11 14:15:16.123456', TIMESTAMP '2018-07-26 17:18:19.123456']",
"[1985-04-11 14:15:16.123456, 2018-07-26 17:18:19.123456]")
testSqlApi(
"ARRAY[TIMESTAMP '1985-04-11 14:15:16.1234', TIMESTAMP '2018-07-26 17:18:19.1234']",
"[1985-04-11 14:15:16.1234, 2018-07-26 17:18:19.1234]")
testAllApis(
Array(BigDecimal(2.0002), BigDecimal(2.0003)),
"Array(2.0002p, 2.0003p)",
"ARRAY[CAST(2.0002 AS DECIMAL(10,4)), CAST(2.0003 AS DECIMAL(10,4))]",
"[2.0002, 2.0003]")
testAllApis(
Array(Array(x = true)),
"Array(Array(true))",
"ARRAY[ARRAY[TRUE]]",
"[[TRUE]]")
testAllApis(
Array(Array(1, 2, 3), Array(3, 2, 1)),
"Array(Array(1, 2, 3), Array(3, 2, 1))",
"ARRAY[ARRAY[1, 2, 3], ARRAY[3, 2, 1]]",
"[[1, 2, 3], [3, 2, 1]]")
// implicit type cast only works on SQL APIs.
testSqlApi("ARRAY[CAST(1 AS DOUBLE), CAST(2 AS FLOAT)]", "[1.0, 2.0]")
}
@Test
def testArrayField(): Unit = {
testAllApis(
array('f0, 'f1),
"array(f0, f1)",
"ARRAY[f0, f1]",
"[NULL, 42]")
testAllApis(
array('f0, 'f1),
"array(f0, f1)",
"ARRAY[f0, f1]",
"[NULL, 42]")
testAllApis(
'f2,
"f2",
"f2",
"[1, 2, 3]")
testAllApis(
'f3,
"f3",
"f3",
"[1984-03-12, 1984-02-10]")
testAllApis(
'f5,
"f5",
"f5",
"[[1, 2, 3], NULL]")
testAllApis(
'f6,
"f6",
"f6",
"[1, NULL, NULL, 4]")
testAllApis(
'f2,
"f2",
"f2",
"[1, 2, 3]")
testAllApis(
'f2.at(1),
"f2.at(1)",
"f2[1]",
"1")
testAllApis(
'f3.at(1),
"f3.at(1)",
"f3[1]",
"1984-03-12")
testAllApis(
'f3.at(2),
"f3.at(2)",
"f3[2]",
"1984-02-10")
testAllApis(
'f5.at(1).at(2),
"f5.at(1).at(2)",
"f5[1][2]",
"2")
testAllApis(
'f5.at(2).at(2),
"f5.at(2).at(2)",
"f5[2][2]",
"NULL")
testAllApis(
'f4.at(2).at(2),
"f4.at(2).at(2)",
"f4[2][2]",
"NULL")
testAllApis(
'f11.at(1),
"f11.at(1)",
"f11[1]",
"1")
}
@Test
def testArrayOperations(): Unit = {
// cardinality
testAllApis(
'f2.cardinality(),
"f2.cardinality()",
"CARDINALITY(f2)",
"3")
testAllApis(
'f4.cardinality(),
"f4.cardinality()",
"CARDINALITY(f4)",
"NULL")
testAllApis(
'f11.cardinality(),
"f11.cardinality()",
"CARDINALITY(f11)",
"1")
// element
testAllApis(
'f9.element(),
"f9.element()",
"ELEMENT(f9)",
"1")
testAllApis(
'f8.element(),
"f8.element()",
"ELEMENT(f8)",
"4.0")
testAllApis(
'f10.element(),
"f10.element()",
"ELEMENT(f10)",
"NULL")
testAllApis(
'f4.element(),
"f4.element()",
"ELEMENT(f4)",
"NULL")
testAllApis(
'f11.element(),
"f11.element()",
"ELEMENT(f11)",
"1")
// comparison
testAllApis(
'f2 === 'f5.at(1),
"f2 === f5.at(1)",
"f2 = f5[1]",
"TRUE")
testAllApis(
'f6 === array(1, 2, 3),
"f6 === array(1, 2, 3)",
"f6 = ARRAY[1, 2, 3]",
"FALSE")
testAllApis(
'f2 !== 'f5.at(1),
"f2 !== f5.at(1)",
"f2 <> f5[1]",
"FALSE")
testAllApis(
'f2 === 'f7,
"f2 === f7",
"f2 = f7",
"FALSE")
testAllApis(
'f2 !== 'f7,
"f2 !== f7",
"f2 <> f7",
"TRUE")
testAllApis(
'f11 === 'f11,
"f11 === f11",
"f11 = f11",
"TRUE")
testAllApis(
'f11 === 'f9,
"f11 === f9",
"f11 = f9",
"TRUE")
testAllApis(
'f11 !== 'f11,
"f11 !== f11",
"f11 <> f11",
"FALSE")
testAllApis(
'f11 !== 'f9,
"f11 !== f9",
"f11 <> f9",
"FALSE")
}
@Test
def testArrayTypeCasting(): Unit = {
testTableApi(
'f3.cast(DataTypes.ARRAY(DataTypes.DATE)),
"f3.cast(OBJECT_ARRAY(SQL_DATE))",
"[1984-03-12, 1984-02-10]"
)
}
@Test
def testArrayIndexStaticCheckForTable(): Unit = {
thrown.expect(classOf[ValidationException])
thrown.expectMessage("Array element access needs an index starting at 1 but was 0.")
testTableApi('f2.at(0), "1")
}
@Test
def testArrayIndexStaticCheckForSql(): Unit = {
thrown.expect(classOf[ValidationException])
thrown.expectMessage("Array element access needs an index starting at 1 but was 0.")
testSqlApi("f2[0]", "1")
}
@Test
def testReturnNullWhenArrayIndexOutOfBounds(): Unit = {
// ARRAY<INT NOT NULL>
testAllApis(
'f2.at(4),
"f2.at(4)",
"f2[4]",
"NULL")
// ARRAY<INT>
testAllApis(
'f11.at(3),
"f11.at(3)",
"f11[4]",
"NULL")
}
}
|
lincoln-lil/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/expressions/ArrayTypeTest.scala
|
Scala
|
apache-2.0
| 9,932 |
package com.sksamuel.elastic4s
import org.elasticsearch.action.explain.{ExplainAction, ExplainRequest, ExplainRequestBuilder, ExplainResponse}
import org.elasticsearch.action.support.QuerySourceBuilder
import org.elasticsearch.client.Client
import scala.concurrent.Future
/** @author Stephen Samuel */
trait ExplainDsl {
implicit object ExplainDefinitionExecutable extends Executable[ExplainDefinition, ExplainResponse, ExplainResponse] {
override def apply(c: Client, t: ExplainDefinition): Future[ExplainResponse] = {
val builder = t.build(c.prepareExplain(t.index, t.`type`, t.id))
injectFuture(builder.execute)
}
}
class ExplainExpectsIndex(id: String) {
def in(indexAndTypes: IndexAndTypes): ExplainDefinition = {
new ExplainDefinition(indexAndTypes.index, indexAndTypes.types.head, id)
}
}
}
case class ExplainDefinition(index: String,
`type`: String,
id: String,
query: Option[QueryDefinition] = None,
fetchSource: Option[Boolean] = None,
parent: Option[String] = None,
preference: Option[String] = None,
routing: Option[String] = None) extends Serializable {
// used by testing to get the full builder without a client
private[elastic4s] def request: ExplainRequest = {
build(new ExplainRequestBuilder(ProxyClients.client, ExplainAction.INSTANCE, index, `type`, id)).request()
}
def build(builder: ExplainRequestBuilder): ExplainRequestBuilder = {
// need to set the query on the request - workaround for ES internals
query.foreach(q => builder.request.source(new QuerySourceBuilder().setQuery(q.builder)))
query.foreach(q => builder.setQuery(q.builder))
fetchSource.foreach(builder.setFetchSource)
parent.foreach(builder.setParent)
preference.foreach(builder.setPreference)
routing.foreach(builder.setRouting)
builder
}
def query(string: String): ExplainDefinition = query(new QueryStringQueryDefinition(string))
def query(block: => QueryDefinition): ExplainDefinition = copy(query = Option(block))
def fetchSource(fetchSource: Boolean): ExplainDefinition = copy(fetchSource = Option(fetchSource))
def parent(parent: String): ExplainDefinition = copy(parent = Option(parent))
def preference(preference: String): ExplainDefinition = copy(preference = Option(preference))
def routing(routing: String): ExplainDefinition = copy(routing = Option(routing))
}
|
k4200/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/ExplainDsl.scala
|
Scala
|
apache-2.0
| 2,575 |
/*
* Copyright 2007-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package widgets {
package gravatar {
import _root_.scala.xml.NodeSeq
import _root_.java.security._
import _root_.java.util._
import _root_.java.io._
import _root_.net.liftweb.common.Loggable
/**
* To make a Gravatar:
*
* <pre><code>
* Gravatar("[email protected]") // => Produces a gravatar thats 42x42 with a G rating
* Gravatar("[email protected]", 50) // => Produces a gravatar thats 50x50 with a G rating
* Gravatar("[email protected]", 50, "R") // => Produces a gravatar thats 50x50 with a R rating
* </code></pre>
*
*/
object Gravatar extends Loggable {
val defaultSize: Int = 42
val defaultRating: String = "G"
val avatarEndpoint: String = "http://www.gravatar.com/avatar/"
/**
* @param e The email address of the recipient
*/
def apply(e: String): NodeSeq = url(e,defaultSize,defaultRating)
/**
* @param e The email address of the recipient
* @param s The square size of the output gravatar
*/
def apply(e: String, s: Int): NodeSeq = url(e,s,defaultRating)
/**
* @param e The email address of the recipient
* @param s The square size of the output gravatar
* @param r The rating of the Gravater, the default is G
*/
def apply(e: String, s: Int, r: String) = url(e,s,r)
private def url(email: String, size: Int, rating: String): NodeSeq = {
html(avatarEndpoint + getMD5(email) + "?s=" + size.toString + "&r=" + rating)
}
private def html(in: String): NodeSeq = {
<div id="gravatar_wrapper"><div id="gravatar_image"><img src={in} alt="Gravater" /></div></div>
}
private def getMD5(message: String): String = {
val md: MessageDigest = MessageDigest.getInstance("MD5")
val bytes = message.getBytes("CP1252")
try {
BigInt(1,md.digest(bytes)).toString(16)
} catch {
case a: NoSuchAlgorithmException => logger.error("[Gravater] No Algorithm.", a); ""
case x: UnsupportedEncodingException => logger.warn("[Gravater] Unsupported Encoding.", x); ""
case _ => logger.warn("[Gravater] Unknown error."); ""
}
}
}
}
}
}
|
lift/lift
|
framework/lift-modules/lift-widgets/src/main/scala/net/liftweb/widgets/gravatar/Gravatar.scala
|
Scala
|
apache-2.0
| 2,680 |
package net.liftweb.json.scalaz
import scalaz._
import Scalaz._
import JsonScalaz._
import net.liftweb.json._
import org.specs2.mutable.Specification
object TupleExample extends Specification {
"Parse tuple from List" in {
val json = JsonParser.parse(""" [1,2,3] """)
fromJSON[Tuple3[Int, Int, Int]](json) mustEqual Success(1, 2, 3)
}
}
|
lzpfmh/framework-2
|
core/json-scalaz/src/test/scala/net/lifweb/json/scalaz/TupleExample.scala
|
Scala
|
apache-2.0
| 352 |
package lila.tournament
import org.joda.time.DateTime
private[tournament] case class WaitingUsers(
hash: Map[String, DateTime],
clock: Option[chess.Clock],
date: DateTime) {
// 1+0 -> 5 -> 7
// 3+0 -> 9 -> 11
// 5+0 -> 17 -> 17
// 10+0 -> 32 -> 30
private val waitSeconds = {
(clock.fold(60)(_.estimateTotalTime) / 20) + 2
} min 30 max 7
lazy val all = hash.keys.toList
lazy val size = hash.size
def isOdd = size % 2 == 1
// skips the most recent user if odd
def evenNumber: List[String] = {
if (isOdd) hash.toList.sortBy(-_._2.getMillis).drop(1).map(_._1)
else all
}
def waitSecondsOf(userId: String) = hash get userId map { d =>
nowSeconds - d.getSeconds
}
def waiting = {
val since = date minusSeconds waitSeconds
hash.collect {
case (u, d) if d.isBefore(since) => u
}.toList
}
def update(us: Seq[String], clock: Option[chess.Clock]) = {
val newDate = DateTime.now
copy(
date = newDate,
clock = clock,
hash = hash.filterKeys(us.contains) ++
us.filterNot(hash.contains).map { _ -> newDate }
)
}
def intersect(us: Seq[String]) = copy(hash = hash filterKeys us.contains)
def diff(us: Set[String]) = copy(hash = hash filterKeys { k => !us.contains(k) })
}
private[tournament] object WaitingUsers {
def empty = WaitingUsers(Map.empty, none, DateTime.now)
}
|
Happy0/lila
|
modules/tournament/src/main/WaitingUsers.scala
|
Scala
|
mit
| 1,398 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate.crdt.pure
import akka.actor._
import akka.remote.testkit._
import akka.remote.transport.ThrottlerTransportAdapter.Direction
import akka.testkit.TestProbe
import com.rbmhtechnology.eventuate._
import com.rbmhtechnology.eventuate.crdt.pure.AWSetService.AWSet
import com.rbmhtechnology.eventuate.crdt.pure.CRDTTypes.Operation
import com.typesafe.config.ConfigFactory
class ReplicatedAWSetSpecLeveldb extends ReplicatedAWSetSpec with MultiNodeSupportLeveldb
class ReplicatedAWSetSpecLeveldbMultiJvmNode1 extends ReplicatedAWSetSpecLeveldb
class ReplicatedAWSetSpecLeveldbMultiJvmNode2 extends ReplicatedAWSetSpecLeveldb
object ReplicatedORSetConfig extends MultiNodeReplicationConfig {
val nodeA = role("nodeA")
val nodeB = role("nodeB")
val customConfig = ConfigFactory.parseString(
"""
|eventuate.log.write-batch-size = 200
|eventuate.log.replication.remote-read-timeout = 2s
""".stripMargin)
testTransport(on = true)
setConfig(customConfig.withFallback(MultiNodeConfigLeveldb.providerConfig))
}
abstract class ReplicatedAWSetSpec extends MultiNodeSpec(ReplicatedORSetConfig) with MultiNodeWordSpec with MultiNodeReplicationEndpoint {
import ReplicatedORSetConfig._
import CRDTTestDSL.AWSetCRDT
def initialParticipants: Int =
roles.size
muteDeadLetters(classOf[AnyRef])(system)
"A replicated AWSet" must {
"converge" in {
val probe = TestProbe()
runOn(nodeA) {
val endpoint = createEndpoint(nodeA.name, Set(node(nodeB).address.toReplicationConnection))
val service = new AWSetService[Int]("A", endpoint.log) {
override private[crdt] def onChange(crdt: AWSet[Int], operation: Option[Operation]): Unit = probe.ref ! ops.value(crdt)
}
service.add("x", 1)
probe.expectMsg(Set(1))
probe.expectMsg(Set(1, 2))
// network partition
testConductor.blackhole(nodeA, nodeB, Direction.Both).await
enterBarrier("broken")
// this is concurrent to service.remove("x", 1) on node B
service.add("x", 1)
probe.expectMsg(Set(1, 2))
enterBarrier("repair")
testConductor.passThrough(nodeA, nodeB, Direction.Both).await
probe.expectMsg(Set(1, 2))
service.remove("x", 2)
probe.expectMsg(Set(1))
}
runOn(nodeB) {
val endpoint = createEndpoint(nodeB.name, Set(node(nodeA).address.toReplicationConnection))
val service = new AWSetService[Int]("B", endpoint.log) {
override private[crdt] def onChange(crdt: AWSet[Int], operation: Option[Operation]): Unit = probe.ref ! ops.value(crdt)
}
service.value("x")
probe.expectMsg(Set(1))
service.add("x", 2)
probe.expectMsg(Set(1, 2))
enterBarrier("broken")
// this is concurrent to service.add("x", 1) on node A
service.remove("x", 1)
probe.expectMsg(Set(2))
enterBarrier("repair")
// add has precedence over (concurrent) remove
probe.expectMsg(Set(1, 2))
probe.expectMsg(Set(1))
}
enterBarrier("finish")
}
}
}
|
RBMHTechnology/eventuate
|
eventuate-crdt-pure/src/multi-jvm/scala/com/rbmhtechnology/eventuate/crdt/ReplicatedAWSetSpec.scala
|
Scala
|
apache-2.0
| 3,825 |
package edu.stanford.cme323.spark.smti.utils
import java.io.File
import java.io.FileWriter
import scala.io.Source
import org.apache.spark.Logging
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import edu.stanford.cme323.spark.smti._
object IO extends Logging {
/**
* Modified Random generated SMTI instances (RGSI) format.
* Gent, I. P. and Prosser, P. An Empirical Study of the Stable Marriage
* Problem with Ties and Incomplete Lists. In ECAI 2002
* Munera, D. et al. Solving Hard Stable Matching Problems via Local Search
* and Cooperative Parallelization. In AAAI 2015
*
* Each line is as "index: raw preference list"
* Raw preference list is the index of opposite sex people, ordered from the
* most preferred to the least, and separated by space.
* Ties are represented using negative indices, i.e., negative index has the
* same preference rank as the one before it.
*/
def loadModifiedRGSIPrefLists(sc: SparkContext, dir: String, file: String): RDD[(Index, PrefList)] = {
logInfo(f"Load RGSI preference list from $dir/$file")
sc.textFile(new File(dir, file).toString).map{ line =>
val spLine = line.split(":")
assert(spLine.length == 2)
val index: Index = spLine(0).toLong
val rawPrefList: Array[Index] = spLine(1).trim().split(" ").map( x => x.toLong )
// Compose rank list
var rankList = new Array[Rank](rawPrefList.length)
var rank = 0
var i = 0
for ( i <- 0 until rawPrefList.length ) {
// do not increase rank when index is negative
if (rawPrefList(i) > 0) rank += 1
rankList(i) = rank
}
val prefList = (rankList zip rawPrefList).map( tuple => Pref(tuple._1, tuple._2.abs) )
(index, prefList)
}
}
def storeModifiedRGSIPrefLists(sc: SparkContext, prefLists: RDD[(Index, PrefList)], dir: String, file: String) = {
logInfo(f"Store RGSI preference list to $dir/$file")
val rawLists: RDD[(Index, Array[Index])] = prefLists
.mapValues{ prefList =>
var rawPrefList: Array[Index] = new Array[Index](prefList.length)
rawPrefList(0) = prefList(0).index
for ( i <- 1 until prefList.length) {
if (prefList(i).rank == prefList(i-1).rank) {
rawPrefList(i) = -prefList(i).index
} else {
rawPrefList(i) = prefList(i).index
}
}
rawPrefList
}
val content: Array[String] = rawLists
.map{ case(index, rawPrefList) =>
index.toString + ": " + rawPrefList.mkString(" ")
}
.collect()
val writer = new FileWriter(new File(dir, file).toString)
content.foreach(line => writer.write(line + "\\n"))
writer.close()
}
}
|
gaomy3832/spark-smti
|
src/main/scala/edu/stanford/cme323/spark/smti/utils/IO.scala
|
Scala
|
apache-2.0
| 2,749 |
// See LICENSE.txt for license details.
package problems
import chisel3._
import chisel3.util._
// Problem:
//
// Implement a vending machine using 'when' states.
// 'nickel' is a 5 cent coin
// 'dime' is 10 cent coin
// 'sOk' is reached when there are coins totalling 20 cents or more in the machine.
// The vending machine should return to the 'sIdle' state from the 'sOk' state.
//
class VendingMachine extends Module {
val io = IO(new Bundle {
val nickel = Input(Bool())
val dime = Input(Bool())
val valid = Output(Bool())
})
val sIdle :: s5 :: s10 :: s15 :: sOk :: Nil = Enum(5)
val state = RegInit(sIdle)
// Implement below ----------
state := s5
// Implement above ----------
io.valid := (state === sOk)
}
|
timtian090/Playground
|
chiselTutorial/src/main/scala/problems/VendingMachine.scala
|
Scala
|
mit
| 752 |
package object models {
type ResourceId = String
type UserId = java.util.UUID
type UniqueId = java.util.UUID
type TripId = UniqueId
type ScheduleId = UniqueId
type DayId = UniqueId
type VisitId = UniqueId
type TransportId = UniqueId
type TransportModalityId = ResourceId
type CityId = ResourceId
type RegionId = ResourceId
type POIId = ResourceId
type FeatureId = Long
}
|
joaoraf/tripspace
|
app/models/package.scala
|
Scala
|
apache-2.0
| 408 |
package fpinscala.state
import fpinscala.state.RNG._
trait RNG {
def nextInt: (Int, RNG) // Should generate a random `Int`. We'll later define other functions in terms of `nextInt`.
}
object RNG {
// NB - this was called SimpleRNG in the book text
case class Simple(seed: Long) extends RNG {
def nextInt: (Int, RNG) = {
val newSeed = (seed * 0x5DEECE66DL + 0xBL) & 0xFFFFFFFFFFFFL // `&` is bitwise AND. We use the current seed to generate a new seed.
val nextRNG = Simple(newSeed) // The next state, which is an `RNG` instance created from the new seed.
val n = (newSeed >>> 16).toInt // `>>>` is right binary shift with zero fill. The value `n` is our new pseudo-random integer.
(n, nextRNG) // The return value is a tuple containing both a pseudo-random integer and the next `RNG` state.
}
}
type Rand[+A] = RNG => (A, RNG)
val int: Rand[Int] = _.nextInt
def unit[A](a: A): Rand[A] =
rng => (a, rng)
def map[A,B](s: Rand[A])(f: A => B): Rand[B] =
rng => {
val (a, rng2) = s(rng)
(f(a), rng2)
}
def mapViaFlatMap[A,B](s: Rand[A])(f: A => B): Rand[B] =
flatMap(s)(a => unit(f(a)))
def nonNegativeInt(rng: RNG): (Int, RNG) = {
val (i, r) = rng.nextInt
if(i < 0) (-(i + 1), r) else (i, r)
}
val nonNegativeIntRand: Rand[Int] = nonNegativeInt
def double(rng: RNG): (Double, RNG) = {
val (i,r) = nonNegativeInt(rng)
(i / (Int.MaxValue.toDouble + 1), r)
}
def doubleViaMap: Rand[Double] = map(nonNegativeInt)(_/(Int.MaxValue.toDouble + 1))
def intDouble(rng: RNG): ((Int,Double), RNG) = {
val (i, r1) = nonNegativeInt(rng)
val (d, r2) = double(r1)
((i, d), r2)
}
def intDoubleViaMap2: Rand[(Int, Double)] = map2(nonNegativeInt, doubleViaMap)((_,_))
def doubleInt(rng: RNG): ((Double,Int), RNG) = {
val ((i, d), r) = intDouble(rng)
((d, i), r)
}
def doubleIntViaMap2: Rand[(Double,Int)] = map2(doubleViaMap, nonNegativeInt)((_,_))
def double3(rng: RNG): ((Double,Double,Double), RNG) = {
val (d1, r1) = double(rng)
val (d2, r2) = double(r1)
val (d3, r3) = double(r2)
((d1, d2, d3),r3)
}
def ints(count: Int)(rng: RNG): (List[Int], RNG) = {
def go(n: Int, r: (Int, RNG), acc:List[Int]):(List[Int], RNG) = {
if (n > 0)
go(n - 1, RNG.nonNegativeInt(r._2), r._1::acc)
else
(acc, r._2)
}
go(count, RNG.nonNegativeInt(rng), List())
}
def map2[A,B,C](ra: Rand[A], rb: Rand[B])(f: (A, B) => C): Rand[C] =
rng => {
val (a, r1) = ra(rng)
val (b, r2) = rb(r1)
(f(a,b), r2)
}
def map2ViaFlatMap[A,B,C](ra: Rand[A], rb: Rand[B])(f: (A, B) => C): Rand[C] = ???
// List(double, double, double) => double(List(1,2))
def sequence[A](fs: List[Rand[A]]): Rand[List[A]] = rnd => {
fs.foldRight((List[A](), rnd))((rand: Rand[A], z:(List[A], RNG)) => z match {
case (l, r1) =>
val (a, r2) = rand(r1)
(l:::List(a), r2)
})
}
def sequenceViaUnit[A](fs: List[Rand[A]]): Rand[List[A]] =
fs.foldRight(unit(List[A]()))((ra, z) => map2(ra, z)(_::_))
def flatMap[A,B](f: Rand[A])(g: A => Rand[B]): Rand[B] = rnd => {
val (a, r) = f(rnd)
g(a)(r)
}
}
case class State[S,+A](run: S => (A, S)) {
def map[B](f: A => B): State[S, B] = ???
//flatMap()
def map2[B,C](sb: State[S, B])(f: (A, B) => C): State[S, C] =
State(s => {
val (a, s1) = run(s)
val (b, s2) = sb.run(s1)
(f(a,b), s2)
})
def flatMap[B](f: A => State[S, B]): State[S, B] =
State(s => {
val (a, s1) = run(s)
f(a).run(s1)
})
}
sealed trait Input
case object Coin extends Input
case object Turn extends Input
case class Machine(locked: Boolean, candies: Int, coins: Int)
object State {
type Rand[A] = State[RNG, A]
def simulateMachine(inputs: List[Input]): State[Machine, (Int, Int)] = ???
}
object StateMain{
def main(args: Array[String]) {
println(sequence(List.fill(5)(nonNegativeIntRand))(Simple(1)))
println(sequenceViaUnit(List.fill(5)(nonNegativeIntRand))(Simple(1)))
}
}
|
siwulus/fpinscala
|
exercises/src/main/scala/fpinscala/state/State.scala
|
Scala
|
mit
| 4,097 |
/*
* Copyright 2019 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.microservice.bootstrap
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import ch.qos.logback.classic.Level
import org.mockito.Mockito._
import org.scalatest.LoneElement
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.mockito.MockitoSugar
import play.api.libs.json.Json
import play.api.mvc.RequestHeader
import play.api.{GlobalSettings, Logger}
import uk.gov.hmrc.http.{BadRequestException, NotFoundException, UnauthorizedException, Upstream5xxResponse}
import uk.gov.hmrc.play.test.{LogCapturing, UnitSpec}
trait MaterializerSupport {
implicit val system = ActorSystem("Sys")
implicit val materializer = ActorMaterializer()
}
class JsonErrorHandlingSpec
extends UnitSpec
with ScalaFutures
with MockitoSugar
with LogCapturing
with LoneElement
with Eventually
with MaterializerSupport {
"error handling in onError function" should {
"convert a NotFoundException to NotFound response" in new Setup {
val resultF = jsh.onError(requestHeader, new NotFoundException("test")).futureValue
resultF.header.status shouldBe 404
jsonBodyOf(resultF) shouldBe Json.parse("""{"statusCode":404,"message":"test"}""")
}
"convert a BadRequestException to NotFound response" in new Setup {
val resultF = jsh.onError(requestHeader, new BadRequestException("bad request")).futureValue
resultF.header.status shouldBe 400
jsonBodyOf(resultF) shouldBe Json.parse("""{"statusCode":400,"message":"bad request"}""")
}
"convert an UnauthorizedException to Unauthorized response" in new Setup {
val resultF = jsh.onError(requestHeader, new UnauthorizedException("unauthorized")).futureValue
resultF.header.status shouldBe 401
jsonBodyOf(resultF) shouldBe Json.parse("""{"statusCode":401,"message":"unauthorized"}""")
}
"convert an Exception to InternalServerError" in new Setup {
val resultF = jsh.onError(requestHeader, new Exception("any application exception")).futureValue
resultF.header.status shouldBe 500
jsonBodyOf(resultF) shouldBe Json.parse("""{"statusCode":500,"message":"any application exception"}""")
}
"log one error message for each exception" in new Setup {
when(requestHeader.method).thenReturn(method)
when(requestHeader.uri).thenReturn(uri)
withCaptureOfLoggingFrom(Logger) { logEvents =>
jsh.onError(requestHeader, new Exception("any application exception")).futureValue
verify(requestHeader).method
verify(requestHeader).uri
verifyNoMoreInteractions(requestHeader)
eventually {
val event = logEvents.loneElement
event.getLevel shouldBe Level.ERROR
event.getMessage shouldBe s"! Internal server error, for ($method) [$uri] -> "
}
}
}
"log a warning for upstream code in the warning list" when {
val requestHeader = mock[RequestHeader]
"an UpstreamErrorResponse exception occurs" in {
val weh = new GlobalSettings with JsonErrorHandling {
override val upstreamWarnStatuses: Seq[Int] = Seq(500)
}
withCaptureOfLoggingFrom(Logger) { logEvents =>
weh.onError(requestHeader, Upstream5xxResponse("any application exception", 500, 502)).futureValue
eventually {
val event = logEvents.loneElement
event.getLevel shouldBe Level.WARN
event.getMessage shouldBe s"any application exception"
}
}
}
"a HttpException occurs" in {
val weh = new GlobalSettings with JsonErrorHandling {
override val upstreamWarnStatuses: Seq[Int] = Seq(400)
}
withCaptureOfLoggingFrom(Logger) { logEvents =>
weh.onError(requestHeader, new BadRequestException("any application exception")).futureValue
eventually {
val event = logEvents.loneElement
event.getLevel shouldBe Level.WARN
event.getMessage shouldBe s"any application exception"
}
}
}
}
sealed trait Setup {
val method = "some-method"
val uri = "some-uri"
val requestHeader = mock[RequestHeader]
val jsh = new GlobalSettings with JsonErrorHandling {}
}
}
}
|
hmrc/microservice-bootstrap
|
src/test/scala/uk/gov/hmrc/play/microservice/bootstrap/JsonErrorHandlingSpec.scala
|
Scala
|
apache-2.0
| 4,954 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qsu
import slamdata.Predef.{Map => SMap, _}
import quasar.RenderTreeT
import quasar.common.effect.NameGenerator
import quasar.contrib.cats.stateT._
import quasar.contrib.iota.copkTraverse
import quasar.fp.symbolOrder
import quasar.frontend.logicalplan.JoinDir
import quasar.qscript.{construction, JoinSide, LeftSide, MonadPlannerErr, RightSide}
import quasar.qscript.PlannerError.InternalError
import quasar.qscript.RecFreeS._
import cats.data.StateT
import matryoshka.{BirecursiveT, ShowT}
import scalaz.Tags.Disjunction
import scalaz.{Monad, NonEmptyList, IList, Scalaz, Tag, \\/, \\/-, -\\/, Free, OptionT}, Scalaz._
import shims.{monadToCats, monadToScalaz}
/** Extracts `MapFunc` expressions from operations by requiring an argument
* to be a function of one or more sibling arguments and creating an
* autojoin if not.
*/
final class ExtractFreeMap[T[_[_]]: BirecursiveT: RenderTreeT: ShowT] private () extends QSUTTypes[T] {
import QScriptUniform._
import QSUGraph.Extractors
def apply[F[_]: Monad: NameGenerator: MonadPlannerErr](graph: QSUGraph)
: F[QSUGraph] = {
type G[A] = StateT[F, RevIdx, A]
graph.rewriteM[G](extract[G]).runA(graph.generateRevIndex)
}
////
private type QSU[A] = QScriptUniform[A]
private val func = construction.Func[T]
private def extract[F[_]: Monad: NameGenerator: MonadPlannerErr: RevIdxM]
: PartialFunction[QSUGraph, F[QSUGraph]] = {
case graph @ Extractors.GroupBy(src, key) =>
unifyShapePreserving[F](graph, src.root, NonEmptyList(key.root))("group_source", "group_key") {
case (sym, fms) => DimEdit(sym, DTrans.Group(fms.head))
}
case graph @ Extractors.LPFilter(src, predicate) =>
unifyShapePreserving[F](graph, src.root, NonEmptyList(predicate.root))("filter_source", "filter_predicate") {
case (sym, fms) => QSFilter(sym, fms.head.asRec)
}
case graph @ Extractors.LPJoin(left, right, cond, jtype, lref, rref) => {
val graph0 = graph.foldMapUp[IList[(Symbol, Symbol)]] {
case g @ Extractors.JoinSideRef(`lref`) => IList((g.root, left.root))
case g @ Extractors.JoinSideRef(`rref`) => IList((g.root, right.root))
case _ => IList()
}.foldLeft(graph) {
case (g, (src, target)) => g.replace(src, target)
}
MappableRegion.funcOf(replaceRefs(graph, lref, rref), graph refocus cond.root).
cata(jf => graph.overwriteAtRoot(ThetaJoin(left.root, right.root, jf, jtype, combiner)).point[F],
{
val msg = (desc: String) => InternalError(desc, None)
val max = MappableRegion.maximal(graph refocus cond.root)
val nonMappable: IList[QSUGraph] => Option[NonEmptyList[Symbol]] =
_.filterNot(mappableOf(_, lref, rref)).map(_.root).toNel
max.toIList.partition(hasJoinRef(_, lref)).bimap(nonMappable(_), nonMappable(_)) match {
case (Some(lefts), None) =>
unifyJoin[F](graph0, left.root, lefts, LeftSide, JoinSideRef(lref), JoinSideRef(rref), max)("left_source", "left_target") {
case (newSrc, on, repair) => ThetaJoin(newSrc, right.root, on, jtype, repair)
}.getOrElseF(
MonadPlannerErr[F].raiseError[QSUGraph](msg(s"Unable to unify targets: $lefts")))
case (None, Some(rights)) =>
unifyJoin[F](graph0, right.root, rights, RightSide, JoinSideRef(lref), JoinSideRef(rref), max)("right_source", "right_target") {
case (newSrc, on, repair) => ThetaJoin(left.root, newSrc, on, jtype, repair)
}.getOrElseF(
MonadPlannerErr[F].raiseError[QSUGraph](msg(s"Unable to unify targets: $rights")))
case (Some(lefts), Some(rights)) => {
val leftUnify =
UnifyTargets[T, F](withName[F](_))(graph, left.root, lefts)("left_source", "left_target")
val rightUnify =
UnifyTargets[T, F](withName[F](_))(graph, right.root, rights)("right_source", "right_target")
(leftUnify |@| rightUnify) {
case ((leftGraph, leftOrig, leftMap0), (rightGraph, rightOrig, rightMap0)) => {
val leftMap = SMap(leftMap0.toList: _*)
val rightMap = SMap(rightMap0.toList: _*)
val repair = combiner >>= (_.fold(leftOrig.as[JoinSide](LeftSide), rightOrig.as[JoinSide](RightSide)))
max.map(partialRefReplace(_, lref, rref))
.traverseM[Option, JoinSide] {
case -\\/(side) =>
Free.pure[MapFunc, JoinSide](side).some
case \\/-(g) =>
leftMap.get(g.root).map(_.as[JoinSide](LeftSide))
.orElse(rightMap.get(g.root).map(_.as[JoinSide](RightSide)))
}.cata(on => {
val node = ThetaJoin(leftGraph.root, rightGraph.root, on, jtype, repair)
(graph0.overwriteAtRoot(node) :++ leftGraph :++ rightGraph).point[F]
}, MonadPlannerErr[F].raiseError[QSUGraph](msg(s"Unable to unify targets. Left: $lefts, Right: $rights")))
}
}.join
}
case _ =>
MonadPlannerErr[F].raiseError[QSUGraph](
InternalError(s"Invalid join condition, $cond, must be a mappable function of $left and $right.", None))
}
})
}
case graph @ Extractors.LPSort(src, keys) =>
unifyShapePreserving[F](graph, src.root, keys map (_._1.root))("sort_source", "sort_key") {
case (sym, fms) => QSSort(sym, Nil, fms fzip keys.seconds)
}
}
private def combiner: JoinFunc =
func.StaticMapS(
JoinDir.Left.name -> func.LeftSide,
JoinDir.Right.name -> func.RightSide)
private def hasJoinRef(g: QSUGraph, refId: Symbol): Boolean =
Tag.unwrap[Boolean, Disjunction](g.foldMapUp({
case Extractors.JoinSideRef(rid) if refId === rid => Tag(true)
case _ => Tag(false)
}))
private def replaceRefs(g: QSUGraph, l: Symbol, r: Symbol)
: Symbol => Option[JoinSide] =
s => g.vertices.get(s) collect {
case JoinSideRef(`l`) => LeftSide
case JoinSideRef(`r`) => RightSide
}
private def partialRefReplace(g: QSUGraph, l: Symbol, r: Symbol): JoinSide \\/ QSUGraph =
replaceRefs(g, l, r)(g.root).cata(_.left[QSUGraph], g.right[JoinSide])
private def mappableOf(g: QSUGraph, l: Symbol, r: Symbol): Boolean =
replaceRefs(g, l, r)(g.root).isDefined
private def unifyShapePreserving[F[_]: Monad: NameGenerator: RevIdxM](
graph: QSUGraph,
source: Symbol,
targets: NonEmptyList[Symbol])(
sourceName: String,
targetPrefix: String)(
buildNode: (Symbol, NonEmptyList[FreeMap]) => QScriptUniform[Symbol]): F[QSUGraph] =
UnifyTargets[T, F](withName[F](_))(graph, source, targets)(sourceName, targetPrefix) flatMap {
case (newSrc, original, targetExprs) =>
val node = buildNode(newSrc.root, targetExprs.seconds)
if (newSrc.root === source)
graph.overwriteAtRoot(node).point[F]
else
withName[F](node) map { inter =>
graph.overwriteAtRoot(Map(inter.root, original.asRec)) :++ inter :++ newSrc
}
}
private def unifyJoin[F[_]: Monad: NameGenerator: RevIdxM](
graph: QSUGraph,
source: Symbol,
targets: NonEmptyList[Symbol],
reshapeSide: JoinSide,
lref: JoinSideRef[T, Symbol],
rref: JoinSideRef[T, Symbol],
max: FreeMapA[QSUGraph])(
sourceName: String,
targetPrefix: String)(
buildNode: (Symbol, JoinFunc, JoinFunc) => QScriptUniform[Symbol]): OptionT[F, QSUGraph] =
UnifyTargets[T, F](withName[F](_))(graph, source, targets)(sourceName, targetPrefix).liftM[OptionT] >>= {
case (newSrc, original, targets) => {
val repair: JoinFunc = combiner >>= {
case side if side === reshapeSide => original.as(side)
case other => Free.pure(other)
}
val targetMap = SMap(targets.toList: _*)
OptionT(max.map(partialRefReplace(_, lref.id, rref.id))
.traverseM[Option, JoinSide] {
case -\\/(side) => Free.pure(side).some
case \\/-(g) => targetMap.get(g.root) map (_.as(reshapeSide))
}.point[F]) map (on => graph.overwriteAtRoot(buildNode(newSrc.root, on, repair)) :++ newSrc)
}
}
private def withName[F[_]: Monad: NameGenerator: RevIdxM](node: QScriptUniform[Symbol]): F[QSUGraph] =
QSUGraph.withName[T, F]("efm")(node)
}
object ExtractFreeMap {
def apply[
T[_[_]]: BirecursiveT: RenderTreeT: ShowT,
F[_]: Monad: NameGenerator: MonadPlannerErr]
(graph: QSUGraph[T])
: F[QSUGraph[T]] =
taggedInternalError("ExtractFreeMap", new ExtractFreeMap[T].apply[F](graph))
}
|
djspiewak/quasar
|
qsu/src/main/scala/quasar/qsu/ExtractFreeMap.scala
|
Scala
|
apache-2.0
| 9,565 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers.dsl
import org.scalatest.matchers.MatchersHelper.checkExpectedException
import org.scalatest.Resources
import org.scalatest.matchers.MatchersHelper.indicateSuccess
import org.scalatest.matchers.MatchersHelper.indicateFailure
import org.scalactic._
import scala.reflect.ClassTag
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="../Matchers.html"><code>Matchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfATypeInvocation[T](val clazzTag: ClassTag[T]) {
val clazz: Class[T] = clazzTag.runtimeClass.asInstanceOf[Class[T]]
def this(c: Class[_]) = this(ClassTag(c).asInstanceOf[ClassTag[T]])
/**
* This method enables the following syntax:
*
* <pre class="stHighlight">
* a [RuntimeException] should be thrownBy { ... }
* ^
* </pre>
**/
def should(beWord: BeWord)(implicit prettifier: Prettifier, pos: source.Position): ResultOfBeWordForAType[T] =
new ResultOfBeWordForAType[T](clazz, prettifier, pos)
/**
* This method enables the following syntax:
*
* <pre class="stHighlight">
* a [RuntimeException] should not
* ^
* </pre>
*
* This method is here to direct people trying to use the above syntax to use <code>noException</code> instead.
*/
def should(notWord: NotWord): PleaseUseNoExceptionShouldSyntaxInstead =
new PleaseUseNoExceptionShouldSyntaxInstead
/**
* This method enables the following syntax:
*
* <pre class="stHighlight">
* a [RuntimeException] shouldBe thrownBy { ... }
* ^
* </pre>
**/
def shouldBe(thrownBy: ResultOfThrownByApplication)(implicit prettifier: Prettifier, pos: source.Position): org.scalatest.Assertion = {
val caught = try {
thrownBy.execute()
None
}
catch {
case u: Throwable => Some(u)
}
if (caught.isEmpty) {
val message = Resources.exceptionExpected(clazz.getName)
indicateFailure(message, None, pos)
} else {
val u = caught.get
if (!clazz.isAssignableFrom(u.getClass)) {
val s = Resources.wrongException(clazz.getName, u.getClass.getName)
indicateFailure(s, Some(u), pos)
} else indicateSuccess(Resources.exceptionThrown(u.getClass.getName))
}
}
/**
* This method enables the following syntax:
*
* <pre class="stHighlight">
* a [IllegalArgumentException] should (be thrownBy { ... })
* ^
* </pre>
**/
def should(beThrownBy: ResultOfBeThrownBy)(implicit prettifier: Prettifier, pos: source.Position): org.scalatest.Assertion = {
val throwables = beThrownBy.throwables
val noThrowable = throwables.find(_.isEmpty)
if (noThrowable.isDefined) {
val message = Resources.exceptionExpected(clazz.getName)
indicateFailure(message, None, pos)
}
else {
val unmatch = throwables.map(_.get).find(t => !clazz.isAssignableFrom(t.getClass))
if (unmatch.isDefined) {
val u = unmatch.get
val s = Resources.wrongException(clazz.getName, u.getClass.getName)
indicateFailure(s, Some(u), pos)
}
else indicateSuccess(Resources.exceptionThrown(clazz.getClass.getName))
}
}
/**
* This method enables the following syntax:
*
* <pre class="stHighlight">
* a [RuntimeException] must be thrownBy { ... }
* ^
* </pre>
**/
def must(beWord: BeWord)(implicit prettifier: Prettifier, pos: source.Position): ResultOfBeWordForAType[T] =
new ResultOfBeWordForAType[T](clazz, prettifier, pos)
/**
* This method enables the following syntax:
*
* <pre class="stHighlight">
* a [RuntimeException] must not
* ^
* </pre>
*
* This method is here to direct people trying to use the above syntax to use <code>noException</code> instead.
*/
def must(notWord: NotWord): PleaseUseNoExceptionShouldSyntaxInstead =
new PleaseUseNoExceptionShouldSyntaxInstead
/**
* This method enables the following syntax:
*
* <pre class="stHighlight">
* a [RuntimeException] mustBe thrownBy { ... }
* ^
* </pre>
**/
def mustBe(thrownBy: ResultOfThrownByApplication)(implicit prettifier: Prettifier, pos: source.Position): org.scalatest.Assertion = {
val caught = try {
thrownBy.execute()
None
}
catch {
case u: Throwable => Some(u)
}
if (caught.isEmpty) {
val message = Resources.exceptionExpected(clazz.getName)
indicateFailure(message, None, pos)
} else {
val u = caught.get
if (!clazz.isAssignableFrom(u.getClass)) {
val s = Resources.wrongException(clazz.getName, u.getClass.getName)
indicateFailure(s, Some(u), pos)
} else indicateSuccess(Resources.exceptionThrown(u.getClass.getName))
}
}
/**
* This method enables the following syntax:
*
* <pre class="stHighlight">
* a [IllegalArgumentException] must (be thrownBy { ... })
* ^
* </pre>
**/
def must(beThrownBy: ResultOfBeThrownBy)(implicit prettifier: Prettifier, pos: source.Position): org.scalatest.Assertion = {
val throwables = beThrownBy.throwables
val noThrowable = throwables.find(_.isEmpty)
if (noThrowable.isDefined) {
val message = Resources.exceptionExpected(clazz.getName)
indicateFailure(message, None, pos)
}
else {
val unmatch = throwables.map(_.get).find(t => !clazz.isAssignableFrom(t.getClass))
if (unmatch.isDefined) {
val u = unmatch.get
val s = Resources.wrongException(clazz.getName, u.getClass.getName)
indicateFailure(s, Some(u), pos)
}
else indicateSuccess(Resources.exceptionThrown(clazz.getClass.getName))
}
}
override def toString: String = "a [" + clazz.getName + "]"
}
|
dotty-staging/scalatest
|
scalatest/src/main/scala/org/scalatest/matchers/dsl/ResultOfATypeInvocation.scala
|
Scala
|
apache-2.0
| 6,563 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon.integration.purescala
import leon.test._
import leon.purescala.Common._
import leon.purescala.Expressions._
import leon.purescala.Types._
import leon.datagen._
import leon.evaluators._
class DataGenSuite extends LeonTestSuiteWithProgram with helpers.ExpressionsDSL {
val sources = List(
"""|import leon.lang._
|object Program {
| sealed abstract class List
| case class Cons(head: Int, tail: List) extends List
| case object Nil extends List
|
| def size(lst: List): BigInt = lst match {
| case Cons(_, xs) => 1 + size(xs)
| case Nil => BigInt(0)
| }
|
| def isSorted(lst: List) : Boolean = lst match {
| case Nil => true
| case Cons(_, Nil) => true
| case Cons(x, xs @ Cons(y, ys)) => x < y && isSorted(xs)
| }
|
| def content(lst: List) : Set[Int] = lst match {
| case Nil => Set.empty[Int]
| case Cons(x, xs) => Set(x) ++ content(xs)
| }
|
| def insertSpec(elem: Int, list: List, res: List) : Boolean = {
| isSorted(res) && content(res) == (content(list) ++ Set(elem))
| }
|}""".stripMargin
)
test("Lists") { implicit fix =>
val ctx = fix._1
val pgm = fix._2
val eval = new DefaultEvaluator(ctx, pgm)
val generator = new GrammarDataGen(eval)
generator.generate(BooleanType).toSet.size === 2
generator.generate(TupleType(Seq(BooleanType,BooleanType))).toSet.size === 4
// Make sure we target our own lists
val listType = classDef("Program.List").typed(Seq())
assert(generator.generate(listType).take(100).toSet.size === 100, "Should be able to generate 100 different lists")
val l1 = FreshIdentifier("l1", listType).toVariable
val l2 = FreshIdentifier("l2", listType).toVariable
def size(x: Expr) = fcall("Program.size")(x)
def content(x: Expr) = fcall("Program.content")(x)
def sorted(x: Expr) = fcall("Program.isSorted")(x)
def spec(elem: Expr, list: Expr, res: Expr) = fcall("Program.insertSpec")(elem, list, res)
def cons(h: Expr, t: Expr) = cc("Program.Cons")(h, t)
assert(generator.generateFor(
Seq(l1.id),
GreaterThan(size(l1), bi(0)),
10,
500
).size === 10, "Should find 10 non-empty lists in the first 500 enumerated")
assert(generator.generateFor(
Seq(l1.id, l2.id),
And(Equals(content(l1), content(l2)), sorted(l2)),
10,
500
).size === 10, "Should find 2x 10 lists with same content in the first 500 enumerated")
assert(generator.generateFor(
Seq(l1.id, l2.id),
And(Seq(Equals(content(l1), content(l2)), sorted(l1), sorted(l2), Not(Equals(l1, l2)))),
1,
500
).isEmpty, "There should be no models for this problem")
assert(generator.generateFor(
Seq(l1.id, l2.id, b.id, a.id),
And(Seq(
LessThan(a, b),
sorted(cons(a, l1)),
spec(b, l1, l2)
)),
10,
500
).size >= 5, "There should be at least 5 models for this problem.")
}
}
|
epfl-lara/leon
|
src/test/scala/leon/integration/purescala/DataGenSuite.scala
|
Scala
|
gpl-3.0
| 3,161 |
package autoregister.plugin
import scala.tools.nsc.plugins.PluginComponent
import scala.tools.nsc.{ Global, Phase }
import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
import scala.tools.nsc.ast.TreeDSL
import scala.annotation.tailrec
import Utils._
class RegistryPluginPhase(
val global: Global,
regs: () => Map[Option[String], Set[Value.ToRegister]],
dones: String => Unit,
reporter: (Option[String], Set[String]) => Unit)
extends PluginComponent
with TypingTransformers
with Transform
with TreeDSL { t =>
import global._
import global.definitions._
val runsAfter = List("autoregister:inventory")
override val runsBefore = List("autoregister:check")
val phaseName = "autoregister:registry"
override def newTransformer(unit: CompilationUnit) = {
new RegistryTransformer(unit, regs())
}
class RegistryTransformer(unit: CompilationUnit, registries: Map[Option[String], Set[Value.ToRegister]])
extends TypingTransformer(unit) {
sealed trait CallRegisterable[T] {
def transform(in: T, registeringType: Option[Tree]): Tree
}
def prettyPrint(s: String): String = {
@tailrec
def loop(i: Int, done: String, nl: Boolean)(implicit tabs: Int): String =
if (i >= s.length) done
else {
val start = if (nl) done + "\\n" + " " * tabs else done
def eolc(cnt: Int = 0) = "\\n" + " " * (tabs + cnt)
def eol = eolc()
s.charAt(i) match {
case '(' =>
s.indexWhere(c => "(,)".contains(c), i + 1) match {
case x if x != -1 && s.charAt(x) == ')' => loop(i + 1, s"$start(", false)(tabs + 1)
case _ => loop(i + 1, s"$start(", true)(tabs + 1)
}
case ')' => loop(i + 1, s"$start)", true)(tabs - 1)
case ',' => loop(i + 1, s"$done,", true)
case '\\n' => loop(i + 1, s"$start", true)
case ' ' => loop(i + 1, start, false)
case x => loop(i + 1, s"$start$x", false)
}
}
loop(0, "", false)(0)
}
object OriginalTypeTree {
def unapply(tpe: TypeTree): Option[Tree] =
if (!tpe.original.isEmpty) Some(tpe.original)
else None
}
def fromRegistry(m: ModuleDef)(registry: Set[(String, Value.ToRegister)]): Option[Set[Tree]] =
if (registry.isEmpty) None
else {
import CODE._
registry foreach { s => dones(s._2.name) }
val registers =
(for {
s <- registry
owner = m.symbol.tpe.typeSymbol
registerMethod = m.symbol.tpe.member(TermName(s._1))
member <- m.impl.body
DefDef(_, tname, _, List(args, _*), _, _) <- member
if tname.decoded == s._1
} yield {
val res: Tree =
s._2.tpe match {
case RegisteringType.CaseClass =>
args match {
case List(ValDef(_, _, _: TypeTree, _), ValDef(_, _, _, _), ValDef(_, _, _, _)) =>
val List(ValDef(_, _, _: TypeTree, _), ValDef(_, _, _, _), ValDef(_, _, _, _)) = args
val cls = rootMirror.getRequiredClass(s._2.name)
val mod = cls.companionModule
/*val fields = cls.tpe.decls.collect {
case s: TermSymbol if s.isVal && s.isCaseAccessor =>
(TermName(treeBuilder.fresh.newName(s.name.toString.trim)), s.typeSignature)
}
//val app = TermName(s"app${cls.fullNameAsName('_')}")
//val appargs: List[Tree] = (for { (name, symbol) <- fields } yield q"val $name:$symbol").toList
//treeBuilder.makeFunctionTypeTree(appargs, treeBuilder.)
//val appargsv = for { (name, _) <- fields } yield Ident(name.decoded)
//val appdef = q"""private def $app(..$appargs):$cls = $mod.apply(..$appargsv)"""
fields.size match {
case 0 =>
q"""
$registerMethod(classOf[$cls], (_: Unit) => $mod.apply(), (_:$cls) => ())
"""
case 1 =>
q"""
$registerMethod(classOf[$cls], ($mod.apply _), ($mod.unapply _).andThen(_.get))
"""
case _ =>
q"""
$registerMethod(classOf[$cls], ($mod.apply _).tupled, ($mod.unapply _).andThen(_.get))
"""
}*/
//val tpe = mod
val tpes = cls.tpe.decls.collect {
case s: TermSymbol if s.isVal && s.isCaseAccessor => s.typeSignature
}
val fname = TermName(s"fromTuple${cls.fullNameAsName('_')}")
val tname = TermName(s"toTuple${cls.fullNameAsName('_')}")
tpes match {
case Nil =>
/*val $fname: (Unit) => $cls = (_:Unit) => $mod.apply()
val $tname: ($cls) => Unit = _ => ()*/
q"""
$registerMethod(classOf[$cls], (_:Unit) => $mod.apply(), (_:$cls) => ())
"""
//q""" () """
case head :: Nil =>
/*q"""val $fname: ($head) => $cls = ($mod.apply _)""",
q"""val $tname: ($cls) => (..$tpes) = ($mod.unapply
_).andThen(_.get)""",*/
q"""$registerMethod(classOf[$cls], ($mod.apply _), ($mod.unapply _).andThen(_.get))"""
//q""" () """
case _ =>
/*q"""val $fname: ((..$tpes)) => $cls = ($mod.apply _).tupled""",
q"""val $tname: ($cls) => (..$tpes) = """,*/
q"""$registerMethod(classOf[$cls], ($mod.apply _).tupled, ($mod.unapply _).andThen(_.get))"""
//
}
//q""" val $fname : $cls = null """
//println(defs.mkString("\\n"))
//Block(defs: _*)
//q""" $registerMethod(classOf[$cls], $mod.fromTuple, $mod.toTuple) """
case _ =>
abort(s"${registerMethod.fullNameString} must have 3 args : (Class[C], T => C, C => T)")
}
case RegisteringType.ConcreteClass =>
args match {
case List(ValDef(_, _, OriginalTypeTree(AppliedTypeTree(mainTpe, _)), _))
if mainTpe.symbol.decodedName == "Class" =>
val cls = rootMirror.getRequiredClass(s._2.name)
q"""$registerMethod(classOf[$cls])"""
case _ =>
abort(s"${registerMethod.fullNameString} must have 1 arg : (Class[C])")
}
case RegisteringType.Object =>
args match {
case List(ValDef(_, _, _, _)) =>
val mod = rootMirror.getRequiredModule(s._2.name)
q"""$registerMethod($mod)"""
case _ =>
abort(s"${registerMethod.fullNameString} must have 1 arg")
}
}
val typed = localTyper.atOwner(m.impl.symbol).typed(res)
typed
})
Some(registers)
}
override def transform(tree: Tree): Tree = tree match {
case m @ ModuleDef(_, _, _) =>
val r = (fromRegistry(m) {
m.impl.body.flatMap {
case d @ DefDef(_, tname, _, _, _, _) =>
def process(key: Option[String]) = {
reporter(key, registries(key).map(_.name))
registries(key).map(s => tname.decoded -> s)
}
val registryAnnot =
m.symbol.getAnnotation(typeOf[autoregister.annotations.Registry].typeSymbol).toSeq.flatMap { annot =>
process(annot.args.headOption collect {
case Literal(Constant(key: String)) => key
})
}
val registryOpt =
if (tname.decoded == "register") {
process(None)
} else {
Set()
}
val meth = process(Some(s"${m.symbol.fullNameString}.${tname.decoded}"))
(meth ++ registryAnnot ++ registryOpt)
case _ =>
Nil
}.toSet
} match {
case Some(registers) if registers.nonEmpty =>
val z = treeCopy.ModuleDef(
m,
m.mods,
m.name,
treeCopy.Template(m.impl, m.impl.parents, m.impl.self, m.impl.body.map(transform) ++ registers))
//println(registers.mkString("\\n"))
//println(showCode(z))
//localTyper.typed(z)
//localTyper.atOwner(m.symbol).typed(z)
z
case _ =>
m
})
r
case _ @PackageDef(_, _) =>
super.transform(tree)
case _ @Template(_, _, _) =>
super.transform(tree)
case _ =>
tree
}
}
}
|
math85360/autoregister
|
src/main/scala/autoregister/plugin/RegistryPluginPhase.scala
|
Scala
|
mit
| 9,767 |
// @SOURCE:/home/baptiste/check_my_ride/conf/routes
// @HASH:66277d7cd60a62c63c0557fa462785079a0dca5e
// @DATE:Mon Aug 18 16:34:53 CEST 2014
import play.core._
import play.core.Router._
import play.core.j._
import play.api.mvc._
import Router.queryString
object Routes extends Router.Routes {
private var _prefix = "/"
def setPrefix(prefix: String) {
_prefix = prefix
List[(String,Routes)]().foreach {
case (p, router) => router.setPrefix(prefix + (if(prefix.endsWith("/")) "" else "/") + p)
}
}
def prefix = _prefix
lazy val defaultPrefix = { if(Routes.prefix.endsWith("/")) "" else "/" }
// @LINE:8
private[this] lazy val controllers_Application_index0 = Route("GET", PathPattern(List(StaticPart(Routes.prefix))))
// @LINE:11
private[this] lazy val controllers_Authentification_verifyUser1 = Route("POST", PathPattern(List(StaticPart(Routes.prefix),StaticPart(Routes.defaultPrefix),StaticPart("verifyUser"))))
// @LINE:12
private[this] lazy val controllers_Users_createUser2 = Route("POST", PathPattern(List(StaticPart(Routes.prefix),StaticPart(Routes.defaultPrefix),StaticPart("user"))))
// @LINE:15
private[this] lazy val controllers_Assets_at3 = Route("GET", PathPattern(List(StaticPart(Routes.prefix),StaticPart(Routes.defaultPrefix),StaticPart("assets/"),DynamicPart("file", """.+""",false))))
def documentation = List(("""GET""", prefix,"""controllers.Application.index"""),("""POST""", prefix + (if(prefix.endsWith("/")) "" else "/") + """verifyUser""","""controllers.Authentification.verifyUser"""),("""POST""", prefix + (if(prefix.endsWith("/")) "" else "/") + """user""","""controllers.Users.createUser"""),("""GET""", prefix + (if(prefix.endsWith("/")) "" else "/") + """assets/$file<.+>""","""controllers.Assets.at(path:String = "/public", file:String)""")).foldLeft(List.empty[(String,String,String)]) { (s,e) => e.asInstanceOf[Any] match {
case r @ (_,_,_) => s :+ r.asInstanceOf[(String,String,String)]
case l => s ++ l.asInstanceOf[List[(String,String,String)]]
}}
def routes:PartialFunction[RequestHeader,Handler] = {
// @LINE:8
case controllers_Application_index0(params) => {
call {
invokeHandler(controllers.Application.index, HandlerDef(this, "controllers.Application", "index", Nil,"GET", """ Single endpoint for serving AngularJS""", Routes.prefix + """"""))
}
}
// @LINE:11
case controllers_Authentification_verifyUser1(params) => {
call {
invokeHandler(controllers.Authentification.verifyUser, HandlerDef(this, "controllers.Authentification", "verifyUser", Nil,"POST", """ User management API""", Routes.prefix + """verifyUser"""))
}
}
// @LINE:12
case controllers_Users_createUser2(params) => {
call {
invokeHandler(controllers.Users.createUser, HandlerDef(this, "controllers.Users", "createUser", Nil,"POST", """""", Routes.prefix + """user"""))
}
}
// @LINE:15
case controllers_Assets_at3(params) => {
call(Param[String]("path", Right("/public")), params.fromPath[String]("file", None)) { (path, file) =>
invokeHandler(controllers.Assets.at(path, file), HandlerDef(this, "controllers.Assets", "at", Seq(classOf[String], classOf[String]),"GET", """ Map static resources from the /public folder to the /assets URL path""", Routes.prefix + """assets/$file<.+>"""))
}
}
}
}
|
Viewtiful/Check-my-ride
|
target/scala-2.10/src_managed/main/routes_routing.scala
|
Scala
|
apache-2.0
| 3,379 |
package gitbucket.core.view
import java.util.Date
import gitbucket.core.model.Account
import gitbucket.core.service.{SystemSettingsService, RequestCache}
import gitbucket.core.controller.Context
import SystemSettingsService.SystemSettings
import javax.servlet.http.HttpServletRequest
import play.twirl.api.Html
import org.scalatest.FunSpec
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
class AvatarImageProviderSpec extends FunSpec with MockitoSugar {
val request = mock[HttpServletRequest]
when(request.getRequestURL).thenReturn(new StringBuffer("http://localhost:8080/path.html"))
when(request.getRequestURI).thenReturn("/path.html")
when(request.getContextPath).thenReturn("")
describe("getAvatarImageHtml") {
it("should show Gravatar image for no image account if gravatar integration is enabled") {
implicit val context = Context(createSystemSettings(true), None, request)
val provider = new AvatarImageProviderImpl(Some(createAccount(None)))
assert(provider.toHtml("user", 32).toString ==
"<img src=\\"https://www.gravatar.com/avatar/d41d8cd98f00b204e9800998ecf8427e?s=32&d=retro&r=g\\" class=\\"avatar\\" style=\\"width: 32px; height: 32px;\\" />")
}
it("should show uploaded image even if gravatar integration is enabled") {
implicit val context = Context(createSystemSettings(true), None, request)
val provider = new AvatarImageProviderImpl(Some(createAccount(Some("icon.png"))))
assert(provider.toHtml("user", 32).toString ==
"<img src=\\"/user/_avatar\\" class=\\"avatar\\" style=\\"width: 32px; height: 32px;\\" />")
}
it("should show local image for no image account if gravatar integration is disabled") {
implicit val context = Context(createSystemSettings(false), None, request)
val provider = new AvatarImageProviderImpl(Some(createAccount(None)))
assert(provider.toHtml("user", 32).toString ==
"<img src=\\"/user/_avatar\\" class=\\"avatar\\" style=\\"width: 32px; height: 32px;\\" />")
}
it("should show Gravatar image for specified mail address if gravatar integration is enabled") {
implicit val context = Context(createSystemSettings(true), None, request)
val provider = new AvatarImageProviderImpl(None)
assert(provider.toHtml("user", 20, "[email protected]").toString ==
"<img src=\\"https://www.gravatar.com/avatar/4712f9b0e63f56ad952ad387eaa23b9c?s=20&d=retro&r=g\\" class=\\"avatar-mini\\" style=\\"width: 20px; height: 20px;\\" />")
}
it("should show unknown image for unknown user if gravatar integration is enabled") {
implicit val context = Context(createSystemSettings(true), None, request)
val provider = new AvatarImageProviderImpl(None)
assert(provider.toHtml("user", 20).toString ==
"<img src=\\"/_unknown/_avatar\\" class=\\"avatar-mini\\" style=\\"width: 20px; height: 20px;\\" />")
}
it("should show unknown image for specified mail address if gravatar integration is disabled") {
implicit val context = Context(createSystemSettings(false), None, request)
val provider = new AvatarImageProviderImpl(None)
assert(provider.toHtml("user", 20, "[email protected]").toString ==
"<img src=\\"/_unknown/_avatar\\" class=\\"avatar-mini\\" style=\\"width: 20px; height: 20px;\\" />")
}
it("should add tooltip if it's enabled") {
implicit val context = Context(createSystemSettings(false), None, request)
val provider = new AvatarImageProviderImpl(None)
assert(provider.toHtml("user", 20, "[email protected]", true).toString ==
"<img src=\\"/_unknown/_avatar\\" class=\\"avatar-mini\\" style=\\"width: 20px; height: 20px;\\" data-toggle=\\"tooltip\\" title=\\"user\\"/>")
}
}
private def createAccount(image: Option[String]) =
Account(
userName = "user",
fullName = "user@localhost",
mailAddress = "",
password = "",
isAdmin = false,
url = None,
registeredDate = new Date(),
updatedDate = new Date(),
lastLoginDate = None,
image = image,
isGroupAccount = false,
isRemoved = false)
private def createSystemSettings(useGravatar: Boolean) =
SystemSettings(
baseUrl = None,
information = None,
allowAccountRegistration = false,
allowAnonymousAccess = true,
isCreateRepoOptionPublic = true,
gravatar = useGravatar,
notification = false,
activityLogLimit = None,
ssh = false,
sshHost = None,
sshPort = None,
useSMTP = false,
smtp = None,
ldapAuthentication = false,
ldap = None)
/**
* Adapter to test AvatarImageProviderImpl.
*/
class AvatarImageProviderImpl(account: Option[Account]) extends AvatarImageProvider with RequestCache {
def toHtml(userName: String, size: Int, mailAddress: String = "", tooltip: Boolean = false)
(implicit context: Context): Html = getAvatarImageHtml(userName, size, mailAddress, tooltip)
override def getAccountByMailAddress(mailAddress: String)(implicit context: Context): Option[Account] = account
override def getAccountByUserName(userName: String)(implicit context: Context): Option[Account] = account
}
}
|
marklacroix/gitbucket
|
src/test/scala/gitbucket/core/view/AvatarImageProviderSpec.scala
|
Scala
|
apache-2.0
| 5,460 |
/*
* Shadowsocks - A shadowsocks client for Android
* Copyright (C) 2015 <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* ___====-_ _-====___
* _--^^^#####// \\\\#####^^^--_
* _-^##########// ( ) \\\\##########^-_
* -############// |\\^^/| \\\\############-
* _/############// (@::@) \\\\############\\_
* /#############(( \\\\// ))#############\\
* -###############\\\\ (oo) //###############-
* -#################\\\\ / VV \\ //#################-
* -###################\\\\/ \\//###################-
* _#/|##########/\\######( /\\ )######/\\##########|\\#_
* |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\|
* ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| '
* ` ` ` ` / | | | | \\ ' ' ' '
* ( | | | | )
* __\\ | | | | /__
* (vvv(VVV)(VVV)vvv)
*
* HERE BE DRAGONS
*
*/
package com.github.shadowsocks.utils
import java.io.{File, IOException}
import java.util.concurrent.Executors
import android.net.{LocalServerSocket, LocalSocket, LocalSocketAddress}
import android.util.Log
import com.github.shadowsocks.BaseService
class TrafficMonitorThread() extends Thread {
val TAG = "TrafficMonitorThread"
val PATH = "/data/data/com.github.shadowsocks/stat_path"
@volatile var serverSocket: LocalServerSocket = null
@volatile var isRunning: Boolean = true
def closeServerSocket() {
if (serverSocket != null) {
try {
serverSocket.close()
} catch {
case _: Exception => // ignore
}
serverSocket = null
}
}
def stopThread() {
isRunning = false
closeServerSocket()
}
override def run() {
try {
new File(PATH).delete()
} catch {
case _: Exception => // ignore
}
try {
val localSocket = new LocalSocket
localSocket.bind(new LocalSocketAddress(PATH, LocalSocketAddress.Namespace.FILESYSTEM))
serverSocket = new LocalServerSocket(localSocket.getFileDescriptor)
} catch {
case e: IOException =>
Log.e(TAG, "unable to bind", e)
return
}
val pool = Executors.newFixedThreadPool(1)
while (isRunning) {
try {
val socket = serverSocket.accept()
pool.execute(() => {
try {
val input = socket.getInputStream
val output = socket.getOutputStream
val buffer = new Array[Byte](256)
val size = input.read(buffer)
val array = new Array[Byte](size)
Array.copy(buffer, 0, array, 0, size)
val stat = new String(array, "UTF-8").split("\\\\|")
if (stat.length == 2) {
TrafficMonitor.update(stat(0).toLong, stat(1).toLong)
}
output.write(0)
input.close()
output.close()
} catch {
case e: Exception =>
Log.e(TAG, "Error when recv traffic stat", e)
}
// close socket
try {
socket.close()
} catch {
case _: Exception => // ignore
}
})
} catch {
case e: IOException =>
Log.e(TAG, "Error when accept socket", e)
return
}
}
}
}
|
baohaojun/shadowsocks-android
|
src/main/scala/com/github/shadowsocks/utils/TrafficMonitorThread.scala
|
Scala
|
gpl-3.0
| 4,128 |
// Starter Code for Exercise 1
// From "String Interpolation" atom
import com.atomicscala.AtomicTest._
val gnome = new GardenGnome(20.0, 110.0, false)
gnome.show() is "20.0 110.0 false true"
val bob = new GardenGnome("Bob")
bob.show() is "15.0 100.0 true true"
|
P7h/ScalaPlayground
|
Atomic Scala/atomic-scala-solutions/29_StringInterpolation/Starter-1.scala
|
Scala
|
apache-2.0
| 262 |
/*
* -╥⌐⌐⌐⌐ -⌐⌐⌐⌐-
* ≡╢░░░░⌐\░░░φ ╓╝░░░░⌐░░░░╪╕
* ╣╬░░` `░░░╢┘ φ▒╣╬╝╜ ░░╢╣Q
* ║╣╬░⌐ ` ╤▒▒▒Å` ║╢╬╣
* ╚╣╬░⌐ ╔▒▒▒▒`«╕ ╢╢╣▒
* ╫╬░░╖ .░ ╙╨╨ ╣╣╬░φ ╓φ░╢╢Å
* ╙╢░░░░⌐"░░░╜ ╙Å░░░░⌐░░░░╝`
* ``˚¬ ⌐ ˚˚⌐´
*
* Copyright © 2016 Flipkart.com
*/
package com.flipkart.connekt.receptors.wire
import akka.http.scaladsl.marshalling.{PredefinedToEntityMarshallers, ToEntityMarshaller}
import akka.http.scaladsl.model.MediaTypes
import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, PredefinedFromEntityUnmarshallers}
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import com.flipkart.connekt.receptors.wire.GenericJsonSupport._
import scala.collection.mutable
import scala.reflect.ClassTag
/**
* Derives on [[akka.http.scaladsl.marshalling.PredefinedToEntityMarshallers]] and [[akka.http.scaladsl.marshalling.PredefinedToEntityMarshallers]]
* to provide implicit generic json un/marshallers. As per akka-http documentation `akka-http-spray-json`
* module can be used along-with RootJsonReader/RootJsonWriter implementations for every model type T. <br>
*
* This however, relies on [[https://github.com/FasterXML/jackson-module-scala scala-jackson]]
* and hence, not using [[http://doc.akka.io/docs/akka-stream-and-http-experimental/2.0-M1/scala/http/common/json-support.html akka-http-spray-json]]
* <br>
*
* Gist Attribution [[https://gist.github.com/chadselph Chad Selph]]
* @see
* http://doc.akka.io/docs/akka-stream-and-http-experimental/2.0-M1/scala/http/common/marshalling.html
* http://doc.akka.io/docs/akka-stream-and-http-experimental/2.0-M1/scala/http/common/unmarshalling.html
*
*/
object GenericJsonSupport {
val jacksonModules = Seq(DefaultScalaModule)
val mapper = new ObjectMapper() with ScalaObjectMapper
mapper.registerModules(jacksonModules: _*)
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
val m: mutable.Map[Class[_], ToEntityMarshaller[_]] = mutable.Map.empty[Class[_], ToEntityMarshaller[_]]
val um: mutable.Map[Class[_], FromEntityUnmarshaller[_]] = mutable.Map.empty[Class[_], FromEntityUnmarshaller[_]]
}
trait JsonToEntityMarshaller extends PredefinedToEntityMarshallers {
implicit def findMarshaller[T](implicit cTag: ClassTag[T]): ToEntityMarshaller[T] =
m.getOrElseUpdate(cTag.runtimeClass, genericMarshaller[T]).asInstanceOf[ToEntityMarshaller[T]]
def genericMarshaller[T]: ToEntityMarshaller[T] =
stringMarshaller(MediaTypes.`application/json`)
.compose[T](mapper.writeValueAsString)
}
trait JsonFromEntityUnmarshaller extends PredefinedFromEntityUnmarshallers {
implicit def findUnmarshaller[T](implicit cTag: ClassTag[T]): FromEntityUnmarshaller[T] =
um.getOrElseUpdate(cTag.runtimeClass, genericUnmarshaller[T](cTag)).asInstanceOf[FromEntityUnmarshaller[T]]
def genericUnmarshaller[T](cTag: ClassTag[T]): FromEntityUnmarshaller[T] =
stringUnmarshaller.forContentTypes(MediaTypes.`application/json`)
.map(mapper.readValue(_, cTag.runtimeClass).asInstanceOf[T])
}
|
Flipkart/connekt
|
receptors/src/main/scala/com/flipkart/connekt/receptors/wire/JsonToEntityMarshaller.scala
|
Scala
|
mit
| 3,532 |
package toledo17.communication
import java.io.Serializable
import com.amazonaws.services.sqs.AmazonSQSClient
import com.amazonaws.services.sqs.model.Message
import scala.collection.JavaConverters._
class Communication[Payload <: Serializable] {
private[this] def client : AmazonSQSClient = new AmazonSQSClient()
def fromVisitors(items:Iterable[Payload]) = {
items foreach {
item =>
println(" Submitting "+item.toString)
val serialized = Serializer.serialize[Payload](item)
//TODO consider using sendMessageBatch
//TODO make it generic, don't hard-code queue name
client.sendMessage(Infrastructure.SQS_FROM_VISITORS_TO_MERGERS, serialized)
}
}
def toMergers(action : Payload => Unit) = {
//TODO consider receiving in batches (?)
val messages = client.receiveMessage(Infrastructure.SQS_FROM_VISITORS_TO_MERGERS).getMessages.asScala
messages foreach {
msg:Message =>
try {
val body = msg.getBody
val item = Serializer.deserialize[Payload](body)
action(item)
} finally {
//TODO should it be deleted only on success?
client.deleteMessage(Infrastructure.SQS_FROM_VISITORS_TO_MERGERS, msg.getReceiptHandle)
}
}
}
}
|
mccartney/toledo17
|
src/main/scala/toledo17/communication/Communication.scala
|
Scala
|
gpl-3.0
| 1,292 |
package com.eevolution.context.dictionary.infrastructure.service.impl
import java.util.UUID
import com.eevolution.context.dictionary.infrastructure.repository.UserDefinedTabRepository
import com.eevolution.context.dictionary.infrastructure.service.UserDefinedTabService
import com.lightbend.lagom.scaladsl.api.ServiceCall
import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRegistry
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/EmerisScala
* Created by [email protected] , www.e-evolution.com on 21/11/17.
*/
/**
* User Defined Tab Service Implementation
* @param registry
* @param userDefinedTabRepository
*/
class UserDefinedTabServiceImpl (registry: PersistentEntityRegistry, userDefinedTabRepository: UserDefinedTabRepository) extends UserDefinedTabService {
private val DefaultPageSize = 10
override def getAll() = ServiceCall {_ => userDefinedTabRepository.getAll()}
override def getAllByPage(page : Option[Int], pageSize : Option[Int]) = ServiceCall{_ => userDefinedTabRepository.getAllByPage(page.getOrElse(0) , pageSize.getOrElse(DefaultPageSize))}
override def getById(id: Int) = ServiceCall { _ => userDefinedTabRepository.getById(id)}
override def getByUUID(uuid: UUID) = ServiceCall { _ => userDefinedTabRepository.getByUUID(uuid)}
}
|
adempiere/ADReactiveSystem
|
dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/service/impl/UserDefinedTabServiceImpl.scala
|
Scala
|
gpl-3.0
| 2,095 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.