code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.testkit
import java.lang.management.ManagementFactory
import org.junit.Assert.{assertEquals, assertTrue, fail}
object AllocationTest {
val allocationCounter = ManagementFactory.getThreadMXBean.asInstanceOf[com.sun.management.ThreadMXBean]
assertTrue(allocationCounter.isThreadAllocatedMemorySupported)
allocationCounter.setThreadAllocatedMemoryEnabled(true)
val cost = {
val id = Thread.currentThread().getId
for (i <- 1 to 1000) yield {
val before = allocationCounter.getThreadAllocatedBytes(id)
val after = allocationCounter.getThreadAllocatedBytes(id)
(after - before)
}
}.min
println(s"cost of tracking allocations = $cost")
}
trait AllocationTest {
import AllocationTest._
def nonAllocatingEqual(expected: Boolean, a: AnyRef, b: AnyRef): Unit = {
assertEquals(expected, nonAllocating(a == b))
}
def nonAllocating[T](fn: => T)(implicit execution: AllocationExecution = AllocationExecution()): T = {
val result = allocationInfo(fn)
val expected = fn
if (result.min != 0) {
result.allocations foreach {
x => println(s"allocation $x")
}
fail(s"allocating min = ${result.min}")
}
result.result
}
def allocationInfo[T](fn: => T)(implicit execution: AllocationExecution = AllocationExecution()): AllocationInfo[T] = {
val expected = fn
val id = Thread.currentThread().getId
//warmup
for (i <- 0 until execution.warmupCount) {
val actual = fn
assertEquals(s"warmup at index $i $expected $actual", expected, actual)
}
//test
val counts = new Array[Long](execution.executionCount)
for (i <- 0 until execution.executionCount) {
val before = allocationCounter.getThreadAllocatedBytes(id)
val actual = fn
val after = allocationCounter.getThreadAllocatedBytes(id)
counts(i) = after - cost - before
assertEquals(s"at index $i $expected $actual", expected, actual)
}
AllocationInfo(expected, counts)
}
}
case class AllocationExecution(executionCount: Int = 1000, warmupCount: Int = 1000)
case class AllocationInfo[T](result: T, allocations: Array[Long]) {
def min = allocations.iterator.min
}
|
martijnhoekstra/scala
|
src/testkit/scala/tools/testkit/AllocationTest.scala
|
Scala
|
apache-2.0
| 2,496 |
package geotrellis.statistics
import geotrellis._
import math.{abs, ceil, min, max, sqrt}
object ArrayHistogram {
def apply(size:Int) = new ArrayHistogram(Array.fill[Int](size)(0), 0)
def apply(counts:Array[Int], total:Int) = new ArrayHistogram(counts, total)
def fromRaster(r:Raster, n:Int) = {
val h = ArrayHistogram(n)
r.foreach(z => if (z != NODATA) h.countItem(z, 1))
h
}
def fromHistograms(hs:List[Histogram], n:Int) = {
val total:Histogram = ArrayHistogram(n)
hs.foreach(h => total.update(h))
total
}
}
// TODO: can currently only handle non-negative integers
/**
* Data object representing a histogram that uses an array for internal storage.
*/
class ArrayHistogram(val counts:Array[Int], var total:Int) extends Histogram {
def size = counts.length
def getTotalCount = total
def copy = ArrayHistogram(counts.clone, total)
//def getValues = (0 until counts.length).toArray
def getValues = (0 until counts.length).filter(counts(_) > 0).toArray
def setItem(i:Int, count:Int) {
total = total - counts(i) + count
counts(i) = count
}
def uncountItem(i:Int) {
total -= counts(i)
counts(i) = 0
}
def countItem(i:Int, count:Int=1) {
total += count
counts(i) += count
}
def getItemCount(i:Int) = counts(i)
// REFACTOR: use Option
def getMinValue:Int = {
var i = 0
val limit = counts.length
while (i < limit) {
if (counts(i) > 0) return i
i += 1
}
return Int.MaxValue
}
// REFACTOR: use Option
def getMaxValue:Int = {
var i = counts.length - 1
while (i >= 0) {
if (counts(i) > 0) return i
i -= 1
}
return Int.MinValue
}
}
|
Tjoene/thesis
|
Case_Programs/geotrellis-0.7.0/src/main/scala/geotrellis/statistics/ArrayHistogram.scala
|
Scala
|
gpl-2.0
| 1,697 |
package t1000531
class A[B] extends java.lang.Iterable[B] {
import scala.collection.JavaConversions._
def iterator = Iterator.empty
}
|
Kwestor/scala-ide
|
org.scala-ide.sdt.core.tests/test-workspace/pc/src/t1000531/A.scala
|
Scala
|
bsd-3-clause
| 138 |
package org.bitcoins.rpc.client.v17
import akka.actor.ActorSystem
import org.bitcoins.core.crypto.ECPrivateKey
import org.bitcoins.core.protocol.transaction.Transaction
import org.bitcoins.core.script.crypto.HashType
import org.bitcoins.rpc.client.common.{
BitcoindRpcClient,
BitcoindVersion,
RpcOpts
}
import org.bitcoins.rpc.config.BitcoindInstance
import org.bitcoins.rpc.jsonmodels.{
SignRawTransactionResult,
TestMempoolAcceptResult
}
import org.bitcoins.rpc.serializers.JsonSerializers._
import org.bitcoins.rpc.serializers.JsonWriters._
import play.api.libs.json.{JsArray, JsBoolean, JsString, Json}
import scala.concurrent.Future
import scala.util.Try
/**
* This class is compatible with version 0.17 of Bitcoin Core.
*
* @see [[org.bitcoins.rpc.client.common.BitcoindRpcClient BitcoindRpcClient Scaladocs]]
*
* @define signRawTx Bitcoin Core 0.17 had a breaking change in the API
* for signing raw transactions. Previously the same
* RPC call was used for signing a TX with existing keys
* in the Bitcoin Core wallet or a manually provided private key.
* These RPC calls are now separated out into two distinct calls.
*/
class BitcoindV17RpcClient(override val instance: BitcoindInstance)(
implicit
actorSystem: ActorSystem)
extends BitcoindRpcClient(instance)
with V17LabelRpc
with V17PsbtRpc {
override def version: BitcoindVersion = BitcoindVersion.V17
/**
* $signRawTx
*
* This RPC call signs the raw transaction with keys found in
* the Bitcoin Core wallet.
*/
def signRawTransactionWithWallet(
transaction: Transaction,
utxoDeps: Vector[RpcOpts.SignRawTransactionOutputParameter] = Vector.empty,
sigHash: HashType = HashType.sigHashAll
): Future[SignRawTransactionResult] =
bitcoindCall[SignRawTransactionResult]("signrawtransactionwithwallet",
List(JsString(transaction.hex),
Json.toJson(utxoDeps),
Json.toJson(sigHash)))
/**
* $signRawTx
*
* This RPC call signs the raw transaction with keys provided
* manually.
*/
def signRawTransactionWithKey(
transaction: Transaction,
keys: Vector[ECPrivateKey],
utxoDeps: Vector[RpcOpts.SignRawTransactionOutputParameter] = Vector.empty,
sigHash: HashType = HashType.sigHashAll
): Future[SignRawTransactionResult] =
bitcoindCall[SignRawTransactionResult]("signrawtransactionwithkey",
List(JsString(transaction.hex),
Json.toJson(keys),
Json.toJson(utxoDeps),
Json.toJson(sigHash)))
// testmempoolaccept expects (and returns) a list of txes,
// but currently only lists of length 1 is supported
def testMempoolAccept(
transaction: Transaction,
allowHighFees: Boolean = false): Future[TestMempoolAcceptResult] = {
bitcoindCall[Vector[TestMempoolAcceptResult]](
"testmempoolaccept",
List(JsArray(Vector(Json.toJson(transaction))), JsBoolean(allowHighFees)))
.map(_.head)
}
}
object BitcoindV17RpcClient {
/**
* Creates an RPC client from the given instance.
*
* Behind the scenes, we create an actor system for
* you. You can use `withActorSystem` if you want to
* manually specify an actor system for the RPC client.
*/
def apply(instance: BitcoindInstance): BitcoindV17RpcClient = {
implicit val system = ActorSystem.create(BitcoindRpcClient.ActorSystemName)
withActorSystem(instance)
}
/**
* Creates an RPC client from the given instance,
* together with the given actor system. This is for
* advanced users, wher you need fine grained control
* over the RPC client.
*/
def withActorSystem(instance: BitcoindInstance)(
implicit system: ActorSystem): BitcoindV17RpcClient =
new BitcoindV17RpcClient(instance)
def fromUnknownVersion(
rpcClient: BitcoindRpcClient): Try[BitcoindV17RpcClient] =
Try {
new BitcoindV17RpcClient(rpcClient.instance)(rpcClient.system)
}
}
|
bitcoin-s/bitcoin-s-core
|
bitcoind-rpc/src/main/scala/org/bitcoins/rpc/client/v17/BitcoindV17RpcClient.scala
|
Scala
|
mit
| 4,318 |
package satisfaction.engine.actors
import satisfaction.notifier.Notifier
import akka.actor.Actor
import akka.actor.ActorLogging
import satisfaction.Track
import satisfaction.notifier.Notified
import satisfaction.retry.Retryable
import satisfaction.Goal
import akka.actor.ActorSystem
import scala.concurrent.duration.Duration
import java.util.concurrent.TimeUnit
import scala.concurrent.ExecutionContext
import ExecutionContext.Implicits.global
import satisfaction.GoalState
import akka.actor.ActorRef
import satisfaction.track.TrackHistory
import satisfaction.GoalStatus
import satisfaction.TrackDescriptor
import satisfaction.Witness
import org.joda.time.DateTime
import com.codahale.metrics.MetricRegistry
import nl.grons.metrics.scala.ReceiveCounterActor
import nl.grons.metrics.scala.Counter
/**
* JMX Agent intercepts Satisfy and GoalSuccess
* messages, and updates
* JMX counters.
*
* JMX Agent updates counter runs
*/
class JMXAgent extends Actor with ActorLogging with satisfaction.engine.Instrumented {
val counterMap : Map[String,Counter] = Map[String,Counter]()
/**
* Only count job successes and failures for now
* until we rethink agent publishing ..
* and job lifecycle
*/
def receive = {
case GoalFailure(goalStatus) =>
getCounter( goalStatus, "failure") += 1
case GoalSuccess(goalStatus) =>
getCounter( goalStatus, "success") += 1
case unexpected : Any =>
log.warning(s" Unexpected message $unexpected in JMX Agent")
}
def getCounter(goalStatus : GoalStatus, event : String ) : Counter = {
val counterName = s"${goalStatus.track.trackName}.${goalStatus.goalName}.$event"
counterMap get( counterName ) match {
case Some(counter) => counter
case None => {
val newCounter = metrics.counter( "satisfaction", counterName)
newCounter
}
}
}
}
|
jeromebanks/satisfaction
|
modules/engine/src/main/scala/satisfaction/engine/actors/JMXAgent.scala
|
Scala
|
apache-2.0
| 1,922 |
/**
* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \ /__\ (_ )(_ _)( ___)/ __) ( _ \( )( )( _ \ Read
* ) / /(__)\ / /_ _)(_ )__) \__ \ )___/ )(__)( ) _ < README.txt
* (_)\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.wiki.model
import com.mongodb.DBObject
import com.mongodb.casbah.Imports._
import com.novus.salat._
import controllers.{VErrors, Validation}
import play.api.Play.current
import play.api.cache._
import razie.audit.Audit
import razie.db.RazSalatContext._
import razie.db.{RMany, RazMongo}
import razie.diesel.dom.WikiDomain
import razie.hosting.WikiReactors
import razie.tconf.Visibility.PUBLIC
import razie.tconf.parser.{BaseAstNode, LeafAstNode, SpecParserSettings, StrAstNode}
import razie.wiki.model.features.{WForm, WikiForm}
import razie.wiki.parser.WAST
import razie.wiki.util.QueryParms
import razie.wiki.{Enc, Services, WikiConfig}
import razie.{Logging, clog, ctrace}
import scala.collection.mutable.ListBuffer
object WikiCache {
def set[T](id:String, w:T, i:Int) = {
clog << "WIKI_CACHE_SET - "+id
Cache.set(id, w, 300) // 10 miuntes
}
def getEntry(id:String) : Option[WikiEntry] = {
Cache.getAs[WikiEntry](id).map{x=>
clog << "WIKI_CACHED FULL - "+id
x
}
}
def getDb(id:String) : Option[DBObject] = {
Cache.getAs[DBObject](id).map{x=>
clog << "WIKI_CACHED DB - "+id
x
}
}
def getString(id:String) : Option[String] = {
Cache.getAs[String](id).map{x=>
clog << "WIKI_CACHED FRM - "+id
x
}
}
def remove(id:String) = {
clog << "WIKI_CACHE_CLEAR - "+id
Cache.remove(id)
}
}
/** wiki factory and utils */
object Wikis extends Logging with Validation {
/** create the data section */
def mkFormData(spec: WikiEntry, defaults: Map[String, String] = Map.empty) = {
// build the defaults - cross check with formSpec
var defaultStr = ""
defaults.filter(x=> spec.form.fields.contains(x._1)).map { t =>
val (k, v) = t
defaultStr = defaultStr + s""", "$k":"$v" """
}
val content = s"""
{{.section:formData}}
{"formState":"created" $defaultStr }
{{/section}}
"""
content
}
def isEvent(cat: String) = "Race" == cat || "Event" == cat || "Training" == cat
//todo configure per realm
/** these categories are persisted in their own tables */
final val PERSISTED = Array("Item", "Event", "Training", "Note", "Entry", "Form", "JSON")
// "DslReactor", "DslElement", "DslDomain", "JSON", "DslEntity")
/** customize table names per category */
final val TABLE_NAME = "WikiEntry"
// map all Dsl type entities in the same table
final val TABLE_NAMES = Map.empty[String,String]
//("DslReactor" -> "weDsl", "DslElement" -> "weDsl", "DslDomain" -> "weDsl", "DslEntity" -> "weDslEntity")
final val RK = WikiConfig.RK
final val DFLT = RK // todo replace with RK
def apply(realm: String = RK) = WikiReactors(realm).wiki
def rk = WikiReactors(RK).wiki
def dflt = WikiReactors(WikiReactors.WIKI).wiki
def fromGrated[T <: AnyRef](o: DBObject)(implicit m: Manifest[T]) = grater[T](ctx, m).asObject(o)
/** safe to call before reactors are initialized */
def findSimple (wid:WID) = {
RazMongo(Wikis.TABLE_NAME).findOne(Map("category" -> wid.cat, "name" -> wid.name)) map (grater[WikiEntry].asObject(_))
}
// TODO refactor convenience
def find(wid: WID): Option[WikiEntry] =
apply(wid.getRealm).find(wid)
// TODO find by ID is bad, no - how to make it work across wikis ?
/** @deprecated optimize with realm */
def findById(id: String) = find(new ObjectId(id))
/** @deprecated optimize with realm */
def find(id: ObjectId) =
WikiReactors.reactors.foldLeft(None.asInstanceOf[Option[WikiEntry]])((a, b) => a orElse b._2.wiki.find(id))
/** @deprecated optimize with realm */
def findById(cat: String, id: ObjectId): Option[WikiEntry] =
WikiReactors.reactors.foldLeft(None.asInstanceOf[Option[WikiEntry]])((a, b) => a orElse b._2.wiki.findById(cat, id))
def linksFrom(to: UWID) = RMany[WikiLink]("from.cat" -> to.cat, "from.id" -> to.id)
def linksTo(to: UWID) = RMany[WikiLink]("to.cat" -> to.cat, "to.id" -> to.id)
def childrenOf(parent: UWID) =
RMany[WikiLink]("to.id" -> parent.id, "how" -> "Child").map(_.from)
def linksFrom(from: UWID, role: String) =
RMany[WikiLink]("from.id" -> from.id, "how" -> role)
// not taking realm into account...
def linksTo(cat: String, to: UWID, role: String) =
RMany[WikiLink]("from.cat" -> cat, "to.cat" -> to.cat, "to.id" -> to.id, "how" -> role)
// leave these vvvvvvvvvvvvvvvvvvvvvvvvvv
def label(wid: WID): String = /*wid.page map (_.label) orElse*/
apply(wid.getRealm).label(wid)
def label(wid: UWID): String = /*wid.page map (_.label) orElse*/
wid.wid.map(x => label(x)).getOrElse(wid.nameOrId)
// leave these ^^^^^^^^^^^^^^^^^^^^^^^^^^
//todo refactor in own utils vvv
final val MD = "md"
final val TEXT = "text"
final val JS = "js"
final val SCALA = "scala"
final val JSON = "json"
final val XML = "xml"
final val HTML = "html"
/** helper to deal with the different markups */
object markups {
final val list = Seq(
MD -> "Markdown",
TEXT -> "Text",
JSON -> "JSON",
XML -> "XML",
JS -> "JavaScript",
SCALA -> "Scala",
HTML -> "Raw html"
) // todo per reator type - hackers like stuff
def contains(s: String) = list.exists(_._1 == s)
def isDsl(s: String) =
s == JS || s == XML || s == JSON || s == SCALA
}
def formFor(we: WikiEntry) = {
we.attr("wiki.form") orElse WikiDomain(we.realm).prop(we.category, "inst.form")
}
def templateFor(we: WikiEntry) = {
we.attr("wiki.template") orElse WikiDomain(we.realm).prop(we.category, "inst.template")
}
private def iformatName(name: String, pat: String, pat2: String = "") =
name.replaceAll(pat, "_").replaceAll(pat2, "").replaceAll("_+", "_").replaceFirst("_$", "")
/** format a simple name - try NOT to use this */
/** these are the safe url characters. I also included ',which are confusing many sites */
val SAFECHARS =
"""[^0-9a-zA-Z\$\-_()',]""" // DO NOT TOUCH THIS PATTERN!
def formatName(name: String): String = iformatName(name, SAFECHARS, "") // DO NOT TOUCH THIS PATTERN!
/** format a complex name cat:name */
def formatName(wid: WID): String =
if ("WikiLink" == wid.cat)
iformatName(wid.name, """[ /{}\[\]]""")
else
formatName(wid.name)
/** format an even more complex name
*
* @param rk force links back to RK main or leave them
*/
def formatWikiLink(curRealm: String, wid: WID, nicename: String, label: String, role: Option[String], hover: Option[String] = None, rk: Boolean = false, max: Int = -1) = {
val name = formatName(wid.name)
val title = hover.map("title=\"" + _ + "\"") getOrElse ("")
def trim(s: String) = {
if (max < 0) s
else {
if (s.length > max) s.substring(0, max - 3) + "..."
else s
}
}
val tlabel = trim(label)
val r = wid.realm.getOrElse(curRealm)
// all pages wihtout realm are assumed in current realm
val bigName = Wikis.apply(r).index.getForLower(name.toLowerCase())
if (bigName.isDefined || wid.cat.matches("User")) {
var newwid = Wikis.apply(r).index.getWids(bigName.get).headOption.map(_.copy(section = wid.section)) getOrElse wid.copy(name = bigName.get)
var u = newwid.formatted.urlRelative(curRealm)
if (rk && (u startsWith "/")) u = "http://" + Services.config.home + u
(s"""<a href="$u" title="$title">$tlabel</a>""", Some(ILink(newwid, label, role)))
} else if (rk) {
val sup = "" //"""<sup><b style="color:red">^</b></sup></a>"""
(
s"""<a href="http://${Services.config.home}${wid.formatted.urlRelative}" title="$title">$tlabel$sup</a>""",
Some(ILink(wid, label, role)))
} else {
// topic not found in index - hide it from google
// val prefix = if (wid.realm.isDefined && wid.getRealm != curRealm) s"/we/${wid.getRealm}" else "/wikie"
val prefix = "/wikie"
val plusplus = if (Wikis.PERSISTED.contains(wid.cat)) "" else """<sup><b style="color:red">++</b></sup>"""
(
s"""<a href="$prefix/show/${wid.wpath}" title="%s">$tlabel$plusplus</a>""".format
(hover.getOrElse("Missing page")),
Some(ILink(wid, label, role)))
}
}
def shouldFlag(name: String, label: String, content: String): Option[String] = {
val a = Array(name, label, content)
if (a.exists(_.matches("(?i)^.*<(" + SpecParserSettings.hnok + ")([^>]*)>"))) Some("WIKI_FORBIDDEN_HTML")
else if (hasBadWords(content, adultWords)) Some("WIKI_HAS_ADULT")
else None
}
private def include(wid: WID, c2: String, we: Option[WikiEntry] = None, firstTime: Boolean = false)(implicit errCollector: VErrors): Option[String] = {
// todo this is not cached as the underlying page may change - need to pick up changes
var done = false
val collecting = we.exists(_.depys.isEmpty) // should collect depys
val res = try {
val INCLUDE = """(?<!`)\[\[include(WithSection)?:([^\]]*)\]\]""".r
var res1 = INCLUDE.replaceAllIn(c2, { m =>
val content = for (
iwid <- WID.fromPath(m.group(2)).map(w => if (w.realm.isDefined) w else w.r(wid.getRealm)) orErr ("bad format for page");
c <- (if (m.group(1) == null) iwid.content else iwid.findSection.map(_.original)) orErr s"content for ${iwid.wpath} not found"
) yield {
if (collecting && we.isDefined)
we.get.depys = iwid.uwid.toList ::: we.get.depys
c
}
done = true
// IF YOUR content changes - review this escape here
//regexp uses $ as a substitution
val xx = content
.map(
_.replaceAllLiterally("\\", "\\\\")
.replaceAll("\\$", "\\\\\\$")
)
// .map(_.replaceAllLiterally("$", "\\$"))
// .map(_.replaceAll("\\\\", "\\\\\\\\"))
.getOrElse("`[ERR Can't include $1 " + errCollector.mkString + "]`")
xx
})
if (!res1.contains("{{.wiki.noTemplate")) {
var hadTemplate = false
val TEMPLATE = """(?<!`)\{\{\.?wiki.template[: ]*([^\}]*)\}\}""".r
res1 = TEMPLATE.replaceAllIn(res1, { m =>
done = true
hadTemplate = true
//todo this is parse-ahead, maybe i can make it lazy?
val parms = WikiForm.parseFormData(c2)
val content = template(m.group(1), Map() ++ parms)
// IF YOUR content changes - review this escape here
//regexp uses $ as a substitution
content
.replaceAllLiterally("\\", "\\\\")
.replaceAll("\\$", "\\\\\\$")
})
// check cat for preloaded cats that will trigger stackoverflow
// also, while domain is loading itself, i'm not processing instance templates
if (firstTime && !hadTemplate && wid.cat != "Category" && wid.cat != "Reactor" && !WikiDomain(wid.getRealm).isLoading)
WikiDomain(wid.getRealm).prop(wid.cat, "inst.template").map { t =>
done = true
val parms = WikiForm.parseFormData(c2)
val content = template(t, Map() ++ parms)
res1 = content + "\n\n" + res1
}
}
res1
} catch {
case s: Throwable => log("Error: ", s); "`[ERR Can't process an include]`"
}
if (done) Some(res) else None
}
def preprocessIncludes(wid: WID, markup: String, content: String, page: Option[WikiEntry] = None) = markup match {
case MD =>
implicit val errCollector = new VErrors()
var c2 = content
// TODO stupid - 3 levels of include...
include(wid, c2, page, true).map {
c2 = _
}.flatMap { x =>
include(wid, c2, page, false).map {
c2 = _
}.flatMap { x =>
include(wid, c2, page, false).map {
c2 = _
}
}
}
c2
case _ => content
}
// TODO better escaping of all url chars in wiki name
/** pre-process this wiki: do AST, includes etc */
def preprocess(wid: WID, markup: String, content: String, page: Option[WikiEntry]) : (BaseAstNode, String) = {
implicit val errCollector = new VErrors()
def includes (c:String) = {
var c2 = c
if (c2 contains "[[./")
c2 = c.replaceAll("""\[\[\./""", """[[%s/""".format(wid.realm.map(_ + ".").mkString + wid.cat + ":" + wid.name)) // child topics
if (c2 contains "[[../")
c2 = c2.replaceAll("""\[\[\../""", """[[%s""".format(wid.parentWid.map(wp => wp.realm.map(_ + ".").mkString + wp.cat + ":" + wp.name + "/").getOrElse(""))) // siblings topics
// TODO stupid - 3 levels of include...
include(wid, c2, page, true).map { x =>
page.map(_.cacheable = false) // simple dirty if includes, no depy to manage
c2 = x
}.flatMap { x =>
include(wid, c2, page, false).map {
c2 = _
}.flatMap { x =>
include(wid, c2, page, false).map {
c2 = _
}
}
}
c2
}
try {
markup match {
case MD =>
val t1 = System.currentTimeMillis
var c2 = includes(content)
// pre-mods
page.orElse(wid.page).map { x =>
// WikiMods will dirty the we.cacheable if needed
c2 = razie.wiki.mods.WikiMods.modPreParsing(x, Some(c2)).getOrElse(c2)
}
val res = WikiReactors(wid.getRealm).wiki.mkParser apply c2
val t2 = System.currentTimeMillis
ctrace << s"wikis.preprocessed ${t2 - t1} millis for ${wid.name}"
(res, c2)
case TEXT => {
val c2 = content.replaceAll("""\[\[([^]]*)\]\]""", """[[\(1\)]]""")
(StrAstNode(c2), c2)
}
case JSON | XML | JS | SCALA => {
(StrAstNode(content), content)
}
case HTML => {
// trick: parse it like we normally would, for properties and includes, but then discard
val x = preprocess(wid, MD, content, page)
(LeafAstNode(x._2, x._1), x._2)
}
case _ => (StrAstNode("UNKNOWN_MARKUP " + markup + " - " + content), content)
}
} catch {
case t: Throwable =>
razie.Log.error("EXCEPTION_PARSING " + markup + " - " + wid.wpath, t)
razie.audit.Audit.logdb("EXCEPTION_PARSING", markup + " - " + wid.wpath + " " + t.getLocalizedMessage())
(StrAstNode("EXCEPTION_PARSING " + markup + " - " + t.getLocalizedMessage() + " - " + content), content)
}
}
/** html for later */
def propLater (id:String, url:String) =
s"""<script async>require(['jquery'],function($$){$$("#$id").load("$url");});</script>"""
/** partial formatting function
*
* @param wid - the wid being formatted
* @param markup - markup language being formatted
* @param icontent - the content being formatted or "" if there is a WikiEntry being formatted
* @param we - optional page for context for formatting
* @return
*/
private def format1(wid: WID, markup: String, icontent: String, we: Option[WikiEntry], user:Option[WikiUser]) = {
val res = try {
var content =
(if(icontent == null || icontent.isEmpty) {
if (wid.section.isDefined)
preprocess(wid, markup, noBadWords(wid.content.mkString), we)._1
else
// use preprocessed cache
we.flatMap(_.ipreprocessed.map(_._1)).orElse(
we.map(_.preprocess(user))
).getOrElse(
preprocess(wid, markup, noBadWords(icontent), we)._1
)
}
else
preprocess(wid, markup, noBadWords(icontent), we)._1
).fold(WAST.context(we, user)).s
// apply md templates first
content = Wikis(wid.getRealm).applyTemplates(wid, content, "md")
// TODO index nobadwords when saving/loading page, in the WikiIndex
// TODO have a pre-processed and formatted page index I can use - for non-scripted pages, refreshed on save
// run scripts
val S_PAT = """`\{\{(call):([^#}]*)#([^}]*)\}\}`""".r
try {
// to evaluate scripts wihtout a page, we need this trick:
val tempPage = we orElse None //Some(new WikiEntry("Temp", "fiddle", "fiddle", "md", content, new ObjectId(), Seq("temp"), ""))
// warn against duplicated included scripts
val duplicates = new ListBuffer[String]()
content = S_PAT replaceSomeIn (content, { m =>
we.map(_.cacheable = false)
try {
// find the page with signed scripts and call them
// inline scripts are exanded into the html page
val scriptName = m group 3
val scriptPath = m group 2
val pageWithScripts = WID.fromPath(scriptPath).flatMap(x => Wikis(x.getRealm).find(x)).orElse(tempPage)
val y=pageWithScripts.flatMap(_.scripts.find(_.name == scriptName)).filter(_.checkSignature(user)).map{s=>
val warn = if(duplicates contains s.name) {
s"`WARNING: script named '${s.name}' duplicated - check your includes`\n\n"
} else ""
duplicates.append(s.name)
if("inline" == s.stype) {
val wix = Wikis(wid.getRealm).mkWixJson(we, user, Map.empty, "")
warn + s"""<!-- WikiScript: ${s} -->
|<script>
|withJquery(function(){
|${wix}\n
|${s.content}
|;});
|</script>
""".stripMargin
} else
runScript(s.content, "js", we, user)
}
// dolar sign (jquery) in embedded JS needs to be escaped ... don't remember why
y
.map(_.replaceAll("\\$", "\\\\\\$"))
// also, any escaped double quote needs re-escaped... likely same reason as dolar sign
// wix.toJson can escape realm props including "" and they get lost somehow if I don't do this
.map(_.replaceAll("\\\"", "\\\\\\\""))
} catch {
case t: Throwable => {
log("exception in script", t)
Some("`!?!`")
}
}
})
} catch {
// sometimes the pattern itself blows
case t: Throwable => log("exception in script", t);
}
// cannot have these expanded in the AST parser because then i recurse forever when resolving XPATHs...
val XP_PAT = """`\{\{\{(xp[l]*):([^}]*)\}\}\}`""".r
content = XP_PAT replaceSomeIn (content, { m =>
we.map(_.cacheable = false)
try {
we.map(x => runXp(m group 1, x, m group 2))
} catch { case _: Throwable => Some("!?!") }
})
// for forms
we.map { x => content = new WForm(x).formatFields(content) }
// pre-mods
we.map {x =>
// we don't mark cacheable false - the WikiMods does that
content = razie.wiki.mods.WikiMods.modPreHtml(x, Some(content)).getOrElse(content)
}
//todo plugins register and define formatting for differnet content types
markup match {
case MD => {
object DTimer {
def apply[A](desc:String)(f: => A): A = {
val t1 = System.currentTimeMillis
val res:A = f
val t2 = System.currentTimeMillis
cdebug << s"$desc took ${t2 - t1} millis"
res
}
}
val res = DTimer ("wikis.mdhtml for "+wid.name) {
val ast = DTimer ("wikis.mdast for "+wid.name) {
val parser = org.commonmark.parser.Parser.builder().build();
parser.parse(content);
}
val renderer = org.commonmark.renderer.html.HtmlRenderer.builder().build();
renderer.render(ast); // "<p>This is <em>Sparta</em></p>\n"
}
res
}
case TEXT => content
case JSON | SCALA | JS => "<pre>" + content.replaceAll("\n", "<br/>") + "</pre>"
case XML | HTML => content
case _ => "UNKNOWN_MARKUP " + markup + " - " + content
}
} catch {
case e : Throwable => {
Audit.logdbWithLink("ERR_FORMATTING", wid.ahref, "[[ERROR FORMATTING]]: " + wid.wpath + " err: " + e.toString)
log("[[ERROR FORMATTING]]: ", e)
if(Services.config.isLocalhost) throw e
"[[ERROR FORMATTING]] - sorry, dumb program here! The content is not lost: try editing this topic... also, please report this topic with the error and we'll fix it for you!"
}
}
res
}
def prepUrl (url:String) = {
if(Services.config.isDevMode && Services.config.isLocalhost)
url
.replace("http://cdn.razie.com/", "/admin/img/Users/raz/w/razie.github.io/")
.replace("https://cdn.razie.com/", "/admin/img/")
// .replace("https://cdn.razie.com/", "http://localhost:9000/asset/../../")
// .replace("https://cdn.razie.com/", "file://Users/raz/w/razie.github.io/")
else url
}
def irunXp(what: String, w: WikiEntry, path: String) = {
var root = new razie.Snakk.Wrapper(new WikiWrapper(w.wid), WikiXpSolver)
var xpath = path // TODO why am I doing this?
val ROOT_ALL = """root\(\*\)/(.*)""".r
val ROOT = """root\(([^:]*):([^:)/]*)\)/(.*)""".r //\[[@]*(\w+)[ \t]*([=!~]+)[ \t]*[']*([^']*)[']*\]""".r
path match {
case ROOT_ALL(rest) => {
root = new razie.Snakk.Wrapper(new WikiWrapper(WID("Admin", "*").r(w.realm)), WikiXpSolver)
xpath = rest //path.replace("root(*)/", "")
}
case ROOT(cat, name, rest) => {
root = new razie.Snakk.Wrapper(new WikiWrapper(WID(cat, name).r(w.realm)), WikiXpSolver)
xpath = rest
}
}
val res: List[_] =
if (razie.GPath(xpath).isAttr) (root xpla xpath).filter(_.length > 0) // sometimes attributes come as zero value?
else {
(root xpl xpath).collect {
case ww: WikiWrapper => formatWikiLink(w.realm, ww.wid, ww.wid.name, ww.page.map(_.label).getOrElse(ww.wid.name), None)._1
}
}
res
}
/** a list to html */
def toUl (res:List[Any]) =
"<ul>" +
res.take(100).map { x: Any =>
"<li>" + x.toString + "</li>"
}.mkString +
(if(res.size>100)"<li>...</li>" else "") +
"</ul>"
def runXp(what: String, w: WikiEntry, path: String) = {
val res = irunXp(what, w, path)
what match {
case "xp" => res.headOption.getOrElse("?").toString
case "xpl" => toUl(res)
// case "xmap" => res.take(100).map { x: Any => "<li>" + x.toString + "</li>" }.mkString
}
// else "TOO MANY to list"), None))
}
// scaled down formatting of jsut some content
def sformat(content: String, markup:String="md", realm:String, user:Option[WikiUser]=None) =
format (WID("1","2").r(realm), markup, content, None, user)
/** main formatting function
*
* @param wid - the wid being formatted
* @param markup - markup language being formatted
* @param icontent - the content being formatted or "" if there is a WikiEntry being formatted
* @param we - optional page for context for formatting
* @return
*/
def formatJson(wid: WID, markup: String, icontent: String, we: Option[WikiEntry] = None) = {
val content =
if(icontent == null || icontent.isEmpty) wid.content.mkString
else icontent
content
}
/** main formatting function
*
* @param wid - the wid being formatted
* @param markup - markup language being formatted
* @param icontent - the content being formatted or "" if there is a WikiEntry being formatted
* @param we - optional page for context for formatting
* @return
*/
def format(we: WikiEntry, user:Option[WikiUser]) : String = {
format (we.wid, we.markup, "", Some(we), user)
}
WikiObservers mini {
case ev@WikiEvent(action, "WikiEntry", _, entity, _, _, _) => {
action match {
case WikiAudit.UPD_RENAME => {
val oldWid = ev.oldId.flatMap(WID.fromPath)
Wikis.clearCache(oldWid.get)
}
case a if WikiAudit.isUpd(a) => {
val wid = WID.fromPath(ev.id)
Wikis.clearCache(wid.get)
}
case _ => {}
}
}
}
/** clearing all possible versions of this WID from the cache */
def clearCache(wids : WID*) = {
wids.foreach(wid=>
Array(
wid.r("rk"), // yea, stupid but...
wid,
wid.copy(parent=None, section=None),
wid.copy(realm = None, section=None),
wid.copy(realm = None, parent=None, section=None),
wid.copy(realm = None, parent=None, section=None, cat="")
).foreach {wid=>
val key = wid.wpathFull
WikiCache.remove(key + ".db")
WikiCache.remove(key + ".formatted")
WikiCache.remove(key + ".page")
})
}
/** main formatting function
*
* @param wid - the wid being formatted
* @param markup - markup language being formatted
* @param icontent - the content being formatted or "" if there is a WikiEntry being formatted
* @param we - optional page for context for formatting
* @return
*/
def format(wid: WID, markup: String, icontent: String, we: Option[WikiEntry], user:Option[WikiUser]) : String = {
if (JSON == wid.cat || JSON == markup || XML == wid.cat || XML == markup || TEXT == markup)
formatJson(wid, markup, icontent, we)
else {
var res = {
val cacheFormatted = Services.config.cacheFormat
if(cacheFormatted &&
we.exists(w=> w.cacheable && w.category != "-" && w.category != "") &&
(icontent == null || icontent == "") &&
wid.section.isEmpty) {
WikiCache.getString(we.get.wid.wpathFull+".formatted").map{x=>
x
}.getOrElse {
val n = format1(wid, markup, icontent, we, user)
if(we.exists(_.cacheable)) // format can change cacheable
WikiCache.set(we.get.wid.wpathFull+".formatted", n, 300) // 10 miuntes
n
}
} else
format1(wid, markup, icontent, we, user)
}
// mark the external links
val sup = "" //"""<sup> <b style="color:darkred">^</b></sup>""")
val A_PAT = """(<a +href="http://)([^>]*)>([^<]*)(</a>)""".r
res = A_PAT replaceSomeIn (res, { m =>
if (Option(m group 2) exists (s=> !s.startsWith(Services.config.hostport) &&
!Services.isSiteTrusted("", s))
)
Some("""$1$2 title="External site"><i>$3</i>"""+sup+"$4")
else None
})
// replace all divs - limitation of the markdown parser
val DPAT1 = "\\{\\{div ([^}]*)\\}\\}".r
res = DPAT1 replaceSomeIn (res, { m =>
Some("<div "+Enc.unescapeHtml(m group 1)+">")
})
res = res.replaceAll("\\{\\{/div *\\}\\}", "</div>")
// // modify external sites mapped to external URLs
// // TODO optimize - either this logic or a parent-based approach
// for (site <- Wikis.urlmap)
// res = res.replaceAll ("""<a +href="%s""".format(site._1), """<a href="%s""".format(site._2))
// get some samples of what people get stuck on...
if(res contains "CANNOT PARSE")
Audit.logdbWithLink(
"CANNOT_PARSE",
wid.urlRelative,
s"""${wid.wpath} ver ${we.map(_.ver)}""")
res
}
}
def divLater(x:String) = {
val y = x.replaceAll("\\{\\{div.later ([^ ]*) ([^}]*)\\}\\}",
"""
| <div id=$1>div.later</div>
| <script>
| withJquery(function(){
| \$("#$1").attr("src","$2");
| });
| </script>
| """.stripMargin)
y
}
// todo protect this from tresspassers
def runScript(s: String, lang:String, page: Option[WikiEntry], au:Option[WikiUser]) = {
// page preprocessed for, au or default to thread statics - the least reliable
val up = page.flatMap(_.ipreprocessed.flatMap(_._2)) orElse au
//todo use au not up
val q = razie.NoStaticS.get[QueryParms]
Services.runScript(s, lang, page, up, q.map(_.q.map(t => (t._1, t._2.mkString))).getOrElse(Map()))
}
/** format content from a template, given some parms
*
* - this is used only when creating new pages from spec
*
* DO NOT mess with this - one side effect is only replacing the ${} it understands...
*
* CANNOT should reconcile with templateFromContent
*/
def template(wpath: String, parms:Map[String,String]) = {
(for (
wid <- WID.fromPath(wpath).map(x=>if(x.realm.isDefined) x else x.r("wiki")); // templates are in wiki or rk
c <- wid.content
) yield {
var extraParms = Map.empty[String,String]
val TIF = """(?s)\{\{\.*(tif)([: ])?([^ :}]*)([ :]+)?([^}]+)?\}\}((?>.*?(?=\{\{/[^`])))\{\{/\.*tif\}\}""".r
var res = TIF.replaceAllIn(c, { m =>
if(parms.get(m.group(3)).exists(_.length > 0)) "$6"
else if(m.group(5) != null) { // default value
extraParms = extraParms + (m.group(3) -> m.group(5))
"$6"
} else ""
})
val s1 = (parms ++ extraParms).foldLeft(res){(a,b)=>
a.replaceAll("\\{\\{\\$\\$"+b._1+"\\}\\}", b._2)
}
s1.replaceAll("\\{\\{`", "{{").replaceAll("\\[\\[`", "[[")
}) getOrElse (
"No content template for: " + wpath + "\n\nAttributes:\n\n" + parms.map{t=>s"* ${t._1} = ${t._2}\n"}.mkString
)
}
/** format content from a template, given some parms
*
* @param parms will resolve expressions from the template into Strings. you can use a Map.
* parms("*") should return some details for debugging
*/
def templateFromContent(content: String, parms:String=>String) = {
val PAT = """\\$\\{([^\\}]*)\\}""".r
val s1 = PAT.replaceAllIn(content, {m =>
parms(m.group(1))
})
}
def noBadWords(s: String) = badWords.foldLeft(s)((x, y) => x.replaceAll("""\b%s\b""".format(y), "BLIP"))
def hasBadWords(s: String, what: Array[String] = badWords): Boolean = s.toLowerCase.split("""\w""").exists(what.contains(_))
def flag(we: WikiEntry) { flag(we.wid) }
def flag(wid: WID, reason: String = "") {
Audit.logdb("WIKI_FLAGGED", reason, wid.toString)
}
final val badWords = "boohoo,hell".split(",")
final val adultWords = "damn,heck".split(",")
//todo who uses this
def updateUserName(uold: String, unew: String) = {
// TODO 1 optimize with find()
// TODO 2 rename references
val we = RazMongo("WikiEntry")
for (u <- we.findAll() if "User" == u.get("category") && uold == u.get("name")) {
u.put("name", unew)
we.save(u)
}
val weo = RazMongo("WikiEntryOld")
for (u <- weo.findAll() if "User" == u.get("category") && uold == u.get("name")) {
u.put("name", unew)
weo.save(u)
}
}
def w(we: UWID):String = we.wid.map(wid=>w(wid)).getOrElse("ERR_NO_URL_FOR_"+we.toString)
def w(we: WID, shouldCount: Boolean = true):String =
we.urlRelative + (if (!shouldCount) "?count=0" else "")
/** make a relative href for the given tag. give more tags with 1/2/3 */
def hrefTag(wid:WID, t:String,label:String) = {
if(Array("Blog","Forum") contains wid.cat) {
s"""<b><a href="${w(wid)}/tag/$t">$label</a></b>"""
} else {
if(wid.parentWid.isDefined) {
s"""<b><a href="${w(wid.parentWid.get)}/tag/$t">$label</a></b>"""
} else {
s"""<b><a href="/tag/$t">$label</a></b>"""
}
}
}
/////////////////// visibility for new wikis
def mkVis(wid:WID, realm:String) = wid.findParent
.flatMap(_.props.get("visibility"))
.orElse(WikiReactors(realm).props.prop("default.visibility"))
.getOrElse(
WikiReactors(realm)
.wiki
.visibilityFor(wid.cat)
.headOption
.getOrElse(PUBLIC))
/** extract wvis (edit permissions) prop from wiki */
protected def wvis(props: Option[Map[String, String]]): Option[String] =
props.flatMap(p => p.get("wvis").orElse(p.get("visibility"))).map(_.asInstanceOf[String])
def mkwVis(wid:WID, realm:String) = wvis(wid.findParent.map(_.props))
.orElse(WikiReactors(realm).props.prop("default.wvis"))
.getOrElse(
WikiReactors(realm)
.wiki
.visibilityFor(wid.cat)
.headOption
.getOrElse(PUBLIC))
/** see if a exists otherwise return b */
def fallbackPage (a:String, b:String) : String = {
WID.fromPath(a).flatMap(find).map(x => a).getOrElse(b)
}
}
|
razie/diesel-hydra
|
diesel/src/main/scala/razie/wiki/model/Wikis.scala
|
Scala
|
apache-2.0
| 32,648 |
package collins.util
import scala.collection.mutable.StringBuilder
import scala.concurrent.duration.Duration
import scala.sys.process.Process
import scala.sys.process.ProcessLogger
import play.api.Logger
import collins.models.IpmiInfo
import collins.shell.CommandResult
import collins.util.concurrent.BackgroundProcess
import collins.util.config.AppConfig
abstract class IpmiCommand extends BackgroundProcess[Option[CommandResult]] {
val interval: Duration
var debug: Boolean = false
protected def ipmiInfo: IpmiInfo
protected def ipmiCommand: String
protected val logger = Logger(getClass)
protected lazy val (address, username, password) = {
val ipmi = ipmiInfo
(ipmi.dottedAddress(), ipmi.username, ipmi.decryptedPassword())
}
def shouldRun(): Boolean = {
AppConfig.isProd() || debug
}
def run(): Option[CommandResult] = {
if (!shouldRun) {
return None
}
val command = substitute(ipmiCommand)
val process = Process(command, None, ("IPMI_PASSWORD" -> password))
val stdout = new StringBuilder()
val stderr = new StringBuilder()
val exitStatus = try {
process ! ProcessLogger(
s => stdout.append(s + "\\n"),
e => stderr.append(e + "\\n")
)
} catch {
case e: Throwable =>
stderr.append(e.getMessage)
-1
}
val stdoutString = stdout.toString.trim
val stderrString = stderr.toString.trim
val cr = CommandResult(exitStatus, stdoutString, Some(stderrString))
if (!cr.isSuccess) {
logger.error("Error running command '%s'".format(command))
logger.error(cr.toString)
} else {
logger.info("Ran command %s".format(command))
logger.info(cr.toString)
}
Some(cr)
}
protected def substitute(cmd: String): String = {
cmd.replace("<host>", address)
.replace("<username>", username)
.replace("<password>", password)
.replace("<interval>", interval.toSeconds.toString)
}
}
|
funzoneq/collins
|
app/collins/util/IpmiCommand.scala
|
Scala
|
apache-2.0
| 1,968 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.examples.mllib
import org.apache.log4j.{Level, Logger}
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.regression.LinearRegressionWithSGD
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.mllib.optimization.{SimpleUpdater, SquaredL2Updater, L1Updater}
/**
* An example app for linear regression. Run with
* {{{
* bin/run-example org.apache.spark.examples.mllib.LinearRegression
* }}}
* A synthetic dataset can be found at `data/mllib/sample_linear_regression_data.txt`.
* If you use it as a template to create your own app, please use `spark-submit` to submit your app.
*/
object LinearRegression extends App {
object RegType extends Enumeration {
type RegType = Value
val NONE, L1, L2 = Value
}
import RegType._
case class Params(
input: String = null,
numIterations: Int = 100,
stepSize: Double = 1.0,
regType: RegType = L2,
regParam: Double = 0.1)
val defaultParams = Params()
val parser = new OptionParser[Params]("LinearRegression") {
head("LinearRegression: an example app for linear regression.")
opt[Int]("numIterations")
.text("number of iterations")
.action((x, c) => c.copy(numIterations = x))
opt[Double]("stepSize")
.text(s"initial step size, default: ${defaultParams.stepSize}")
.action((x, c) => c.copy(stepSize = x))
opt[String]("regType")
.text(s"regularization type (${RegType.values.mkString(",")}), " +
s"default: ${defaultParams.regType}")
.action((x, c) => c.copy(regType = RegType.withName(x)))
opt[Double]("regParam")
.text(s"regularization parameter, default: ${defaultParams.regParam}")
arg[String]("<input>")
.required()
.text("input paths to labeled examples in LIBSVM format")
.action((x, c) => c.copy(input = x))
note(
"""
|For example, the following command runs this app on a synthetic dataset:
|
| bin/spark-submit --class org.apache.spark.examples.mllib.LinearRegression \\
| examples/target/scala-*/spark-examples-*.jar \\
| data/mllib/sample_linear_regression_data.txt
""".stripMargin)
}
parser.parse(args, defaultParams).map { params =>
run(params)
} getOrElse {
sys.exit(1)
}
def run(params: Params) {
val conf = new SparkConf().setAppName(s"LinearRegression with $params")
val sc = new SparkContext(conf)
Logger.getRootLogger.setLevel(Level.WARN)
val examples = MLUtils.loadLibSVMFile(sc, params.input, multiclass = true).cache()
val splits = examples.randomSplit(Array(0.8, 0.2))
val training = splits(0).cache()
val test = splits(1).cache()
val numTraining = training.count()
val numTest = test.count()
println(s"Training: $numTraining, test: $numTest.")
examples.unpersist(blocking = false)
val updater = params.regType match {
case NONE => new SimpleUpdater()
case L1 => new L1Updater()
case L2 => new SquaredL2Updater()
}
val algorithm = new LinearRegressionWithSGD()
algorithm.optimizer
.setNumIterations(params.numIterations)
.setStepSize(params.stepSize)
.setUpdater(updater)
.setRegParam(params.regParam)
val model = algorithm.run(training)
val prediction = model.predict(test.map(_.features))
val predictionAndLabel = prediction.zip(test.map(_.label))
val loss = predictionAndLabel.map { case (p, l) =>
val err = p - l
err * err
}.reduce(_ + _)
val rmse = math.sqrt(loss / numTest)
println(s"Test RMSE = $rmse.")
sc.stop()
}
}
|
adobe-research/spark-cluster-deployment
|
initial-deployment-puppet/modules/spark/files/spark/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala
|
Scala
|
apache-2.0
| 4,469 |
package net.orfjackal.dimdwarf.domain
import java.util.concurrent.atomic.AtomicReference
import javax.annotation.concurrent.ThreadSafe
import scala.annotation.tailrec
@ThreadSafe
class Clock(startingValue: Timestamp) {
private val next = new AtomicReference[Timestamp](startingValue)
@tailrec final def nextTimestamp(): Timestamp = {
val current = next.get
if (next.compareAndSet(current, current.next)) {
current
} else {
nextTimestamp()
}
}
}
|
orfjackal/dimdwarf
|
dimdwarf-core/src/main/scala/net/orfjackal/dimdwarf/domain/Clock.scala
|
Scala
|
apache-2.0
| 482 |
package knot.core.stream
import knot.core.stream.flows.IdleTimeoutDuplex
import knot.core.stream.graphs.builders.{DuplexBuilder, DuplexFromFlowsBuilder}
import knot.core.stream.graphs.{DuplexGraph, MatSelector}
import knot.core.stream.ops.DuplexOps
import knot.core.{Decorations, Done}
import scala.concurrent.duration.FiniteDuration
trait Duplex[I1, O1, I2, O2, M] extends StreamNode[DuplexOps[I1, O1, I2, O2, M]] {
}
object Duplex {
def from[I1, O1, I2, O2, M](duplex: Duplex[I1, O1, I2, O2, M]): DuplexGraph[I1, O1, I2, O2, M] = {
val b = DuplexBuilder(duplex, Decorations.empty, MatSelector.up)
new DuplexGraph[I1, O1, I2, O2, M](b)
}
def idleTimeoutDuplex[I, O](timeout: FiniteDuration): DuplexGraph[I, I, O, O, Done] =
from(IdleTimeoutDuplex(timeout))
def fromFlows[I1, O1, I2, O2, M1, M2, M](flow1: Flow[I1, O1, M1], flow2: Flow[I2, O2, M2])(matSelector: (M1, M2) => M): DuplexGraph[I1, O1, I2, O2, M] = {
val b = DuplexFromFlowsBuilder(flow1, Decorations.empty, flow2, Decorations.empty, matSelector)
new DuplexGraph[I1, O1, I2, O2, M](b)
}
}
|
defvar/knot
|
knot-core/src/main/scala/knot/core/stream/Duplex.scala
|
Scala
|
mit
| 1,088 |
package controllers
import javax.inject._
import akka.actor.{ ActorRef, ActorSystem }
import akka.stream.Materializer
import akka.util.Timeout
import com.github.j5ik2o.spetstore.adaptor.http.{ CreateCustomerJson, CustomerSupport }
import com.github.j5ik2o.spetstore.usecase.CustomerUseCase
import com.github.tototoshi.play2.json4s.Json4s
import org.json4s._
import play.api.mvc.{ Action, Controller }
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
@Singleton
class CustomerController @Inject() (json4s: Json4s, @Named("customer-aggregate") customerAggregate: ActorRef)(implicit exec: ExecutionContext, actorSystem: ActorSystem, meterializer: Materializer)
extends Controller with CustomerSupport {
import json4s._
implicit val formats = DefaultFormats
implicit val timeout = Timeout(10 seconds)
override val customerUseCase: CustomerUseCase = CustomerUseCase(customerAggregate)
def create: Action[JValue] = Action.async(json) { implicit request =>
val createCustomerJson = request.body.extract[CreateCustomerJson]
createCustomerGraph(createCustomerJson).run().map(e => Ok(Extraction.decompose(e)))
}
}
|
j5ik2o/spetstore-cqrs-es-akka
|
play2-application/app/controllers/CustomerController.scala
|
Scala
|
mit
| 1,165 |
package com.outr.stripe.support
import com.outr.stripe.connect.{Acceptance, Account, DeclineChargeOn, LegalEntity, TransferSchedule}
import com.outr.stripe.{Deleted, Implicits, QueryConfig, ResponseError, Stripe, StripeList}
import scala.concurrent.Future
class AccountsSupport(stripe: Stripe) extends Implicits {
def create(country: Option[String] = None,
email: Option[String] = None,
custom: Boolean = false,
accountToken: Option[String] = None,
businessLogo: Option[String] = None,
businessName: Option[String] = None,
businessPrimaryColor: Option[String] = None,
businessURL: Option[String] = None,
legalEntity: Option[LegalEntity] = None,
tosAcceptance: Option[Acceptance] = None): Future[Either[ResponseError, Account]] = {
val data = List(
write("type", if (custom) "custom" else "standard"),
write("country", country),
write("email", email),
write("account_token", accountToken),
write("business_logo", businessLogo),
write("business_name", businessName),
write("business_primary_color", businessPrimaryColor),
write("business_url", businessURL),
write("legal_entity", legalEntity),
write("tos_acceptance", tosAcceptance)
).flatten
stripe.post[Account]("accounts", QueryConfig.default, data: _*)
}
def byId(accountId: String): Future[Either[ResponseError, Account]] = {
stripe.get[Account](s"accounts/$accountId", QueryConfig.default)
}
def update(accountId: String,
businessLogo: Option[String] = None,
businessName: Option[String] = None,
businessPrimaryColor: Option[String] = None,
businessUrl: Option[String] = None,
debitNegativeBalances: Option[Boolean] = None,
declineChargeOn: Option[DeclineChargeOn] = None,
defaultCurrency: Option[String] = None,
email: Option[String] = None,
externalAccount: Option[String] = None,
legalEntity: Option[LegalEntity] = None,
metadata: Map[String, String] = Map.empty,
productDescription: Option[String] = None,
statementDescriptor: Option[String] = None,
supportEmail: Option[String] = None,
supportPhone: Option[String] = None,
supportUrl: Option[String] = None,
tosAcceptance: Option[Acceptance] = None,
transferSchedule: Option[TransferSchedule] = None,
transferStatementDescriptor: Option[String] = None): Future[Either[ResponseError, Account]] = {
val data = List(
write("business_logo", businessLogo),
write("business_name", businessName),
write("business_primary_color", businessPrimaryColor),
write("business_url", businessUrl),
write("debit_negative_balances", debitNegativeBalances),
write("decline_charges_on", declineChargeOn),
write("default_currency", defaultCurrency),
write("email", email),
write("external_account", externalAccount),
write("legal_entity", legalEntity),
write("metadata", metadata),
write("product_description", productDescription),
write("statement_descriptor", statementDescriptor),
write("support_email", supportEmail),
write("support_phone", supportPhone),
write("support_url", supportUrl),
write("tos_acceptance", tosAcceptance),
write("transfer_schedule", transferSchedule),
write("transfer_statement_descriptor", transferStatementDescriptor)
).flatten
stripe.post[Account](s"accounts/$accountId", QueryConfig.default, data: _*)
}
def delete(accountId: String): Future[Either[ResponseError, Deleted]] = {
stripe.delete[Deleted](s"accounts/$accountId", QueryConfig.default)
}
def reject(accountId: String, reason: String): Future[Either[ResponseError, Account]] = {
stripe.post[Account](s"accounts/$accountId/reject", QueryConfig.default, "reason" -> reason)
}
def list(config: QueryConfig = QueryConfig.default): Future[Either[ResponseError, StripeList[Account]]] = {
stripe.get[StripeList[Account]]("accounts", config)
}
object external {
lazy val bankAccounts = new ExternalBankAccountsSupport(stripe)
lazy val cards = new ExternalCreditCardsSupport(stripe)
}
}
|
outr/scala-stripe
|
core/jvm/src/main/scala/com/outr/stripe/support/AccountsSupport.scala
|
Scala
|
mit
| 4,379 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.sinks
import org.apache.flink.api.java.tuple.{Tuple2 => JTuple2}
import org.apache.flink.api.java.typeutils.{GenericTypeInfo, TupleTypeInfo}
import org.apache.flink.api.scala.typeutils.CaseClassTypeInfo
import org.apache.flink.table.api._
import org.apache.flink.table.catalog.{CatalogTable, ObjectIdentifier}
import org.apache.flink.table.dataformat.BaseRow
import org.apache.flink.table.operations.CatalogSinkModifyOperation
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.plan.utils.RelOptUtils
import org.apache.flink.table.runtime.typeutils.BaseRowTypeInfo
import org.apache.flink.table.sinks._
import org.apache.flink.table.types.DataType
import org.apache.flink.table.types.inference.TypeTransformations.{legacyDecimalToDefaultDecimal, toNullable}
import org.apache.flink.table.types.logical.utils.{LogicalTypeCasts, LogicalTypeChecks}
import org.apache.flink.table.types.logical.{LegacyTypeInformationType, LogicalType, RowType}
import org.apache.flink.table.types.utils.DataTypeUtils
import org.apache.flink.table.types.utils.TypeConversions.{fromLegacyInfoToDataType, fromLogicalToDataType}
import org.apache.flink.table.utils.{TableSchemaUtils, TypeMappingUtils}
import org.apache.flink.types.Row
import org.apache.calcite.rel.RelNode
import _root_.scala.collection.JavaConversions._
object TableSinkUtils {
/**
* Checks if the given query can be written into the given sink. It checks the field types
* should be compatible (types should equal including precisions). If types are not compatible,
* but can be implicitly casted, a cast projection will be applied. Otherwise, an exception will
* be thrown.
*
* @param query the query to be checked
* @param sinkSchema the schema of sink to be checked
* @param typeFactory type factory
* @return the query RelNode which may be applied the implicitly cast projection.
*/
def validateSchemaAndApplyImplicitCast(
query: RelNode,
sinkSchema: TableSchema,
typeFactory: FlinkTypeFactory,
sinkIdentifier: Option[String] = None): RelNode = {
val queryLogicalType = FlinkTypeFactory.toLogicalRowType(query.getRowType)
val sinkLogicalType = DataTypeUtils
// we recognize legacy decimal is the same to default decimal
.transform(sinkSchema.toRowDataType, legacyDecimalToDefaultDecimal)
.getLogicalType
.asInstanceOf[RowType]
if (LogicalTypeCasts.supportsImplicitCast(queryLogicalType, sinkLogicalType)) {
// the query can be written into sink
// but we may need to add a cast project if the types are not compatible
if (LogicalTypeChecks.areTypesCompatible(
nullableLogicalType(queryLogicalType), nullableLogicalType(sinkLogicalType))) {
// types are compatible excepts nullable, do not need cast project
// we ignores nullable to avoid cast project as cast non-null to nullable is redundant
query
} else {
// otherwise, add a cast project
val castedDataType = typeFactory.buildRelNodeRowType(
sinkLogicalType.getFieldNames,
sinkLogicalType.getFields.map(_.getType))
RelOptUtils.createCastRel(query, castedDataType)
}
} else {
// format query and sink schema strings
val srcSchema = queryLogicalType.getFields
.map(f => s"${f.getName}: ${f.getType}")
.mkString("[", ", ", "]")
val sinkSchema = sinkLogicalType.getFields
.map(f => s"${f.getName}: ${f.getType}")
.mkString("[", ", ", "]")
val sinkDesc: String = sinkIdentifier.getOrElse("")
throw new ValidationException(
s"Field types of query result and registered TableSink $sinkDesc do not match.\n" +
s"Query schema: $srcSchema\n" +
s"Sink schema: $sinkSchema")
}
}
/**
* Make the logical type nullable recursively.
*/
private def nullableLogicalType(logicalType: LogicalType): LogicalType = {
DataTypeUtils.transform(fromLogicalToDataType(logicalType), toNullable).getLogicalType
}
/**
* It checks whether the [[TableSink]] is compatible to the INSERT INTO clause, e.g.
* whether the sink is a [[PartitionableTableSink]] and the partitions are valid.
*
* @param sinkOperation The sink operation with the query that is supposed to be written.
* @param sinkIdentifier Tha path of the sink. It is needed just for logging. It does not
* participate in the validation.
* @param sink The sink that we want to write to.
* @param partitionKeys The partition keys of this table.
*/
def validateTableSink(
sinkOperation: CatalogSinkModifyOperation,
sinkIdentifier: ObjectIdentifier,
sink: TableSink[_],
partitionKeys: Seq[String]): Unit = {
// check partitions are valid
if (partitionKeys.nonEmpty) {
sink match {
case _: PartitionableTableSink =>
case _ => throw new ValidationException("We need PartitionableTableSink to write data to" +
s" partitioned table: $sinkIdentifier")
}
}
val staticPartitions = sinkOperation.getStaticPartitions
if (staticPartitions != null && !staticPartitions.isEmpty) {
staticPartitions.map(_._1) foreach { p =>
if (!partitionKeys.contains(p)) {
throw new ValidationException(s"Static partition column $p should be in the partition" +
s" fields list $partitionKeys for Table($sinkIdentifier).")
}
}
}
sink match {
case overwritableTableSink: OverwritableTableSink =>
overwritableTableSink.setOverwrite(sinkOperation.isOverwrite)
case _ =>
assert(!sinkOperation.isOverwrite, "INSERT OVERWRITE requires " +
s"${classOf[OverwritableTableSink].getSimpleName} but actually got " +
sink.getClass.getName)
}
}
/**
* Inferences the physical schema of [[TableSink]], the physical schema ignores change flag
* field and normalizes physical types (can be generic type or POJO type) into [[TableSchema]].
* @param queryLogicalType the logical type of query, will be used to full-fill sink physical
* schema if the sink physical type is not specified.
* @param sink the instance of [[TableSink]]
*/
def inferSinkPhysicalSchema(
queryLogicalType: RowType,
sink: TableSink[_]): TableSchema = {
val withChangeFlag = sink match {
case _: RetractStreamTableSink[_] | _: UpsertStreamTableSink[_] => true
case _: StreamTableSink[_] => false
case dsts: DataStreamTableSink[_] => dsts.withChangeFlag
}
inferSinkPhysicalSchema(sink.getConsumedDataType, queryLogicalType, withChangeFlag)
}
/**
* Inferences the physical schema of [[TableSink]], the physical schema ignores change flag
* field and normalizes physical types (can be generic type or POJO type) into [[TableSchema]].
*
* @param consumedDataType the consumed data type of sink
* @param queryLogicalType the logical type of query, will be used to full-fill sink physical
* schema if the sink physical type is not specified.
* @param withChangeFlag true if the emitted records contains change flags.
*/
def inferSinkPhysicalSchema(
consumedDataType: DataType,
queryLogicalType: RowType,
withChangeFlag: Boolean): TableSchema = {
// the requested output physical type which ignores the flag field
val requestedOutputType = inferSinkPhysicalDataType(
consumedDataType,
queryLogicalType,
withChangeFlag)
if (LogicalTypeChecks.isCompositeType(requestedOutputType.getLogicalType)) {
DataTypeUtils.expandCompositeTypeToSchema(requestedOutputType)
} else {
// atomic type
TableSchema.builder().field("f0", requestedOutputType).build()
}
}
/**
* Inferences the physical data type of [[TableSink]], the physical data type ignores
* the change flag field.
*
* @param consumedDataType the consumed data type of sink
* @param queryLogicalType the logical type of query, will be used to full-fill sink physical
* schema if the sink physical type is not specified.
* @param withChangeFlag true if the emitted records contains change flags.
*/
def inferSinkPhysicalDataType(
consumedDataType: DataType,
queryLogicalType: RowType,
withChangeFlag: Boolean): DataType = {
consumedDataType.getLogicalType match {
case lt: LegacyTypeInformationType[_] =>
val requestedTypeInfo = if (withChangeFlag) {
lt.getTypeInformation match {
// Scala tuple
case t: CaseClassTypeInfo[_]
if t.getTypeClass == classOf[(_, _)] && t.getTypeAt(0) == Types.BOOLEAN =>
t.getTypeAt[Any](1)
// Java tuple
case t: TupleTypeInfo[_]
if t.getTypeClass == classOf[JTuple2[_, _]] && t.getTypeAt(0) == Types.BOOLEAN =>
t.getTypeAt[Any](1)
case _ => throw new TableException(
"Don't support " + consumedDataType + " conversion for the retract sink")
}
} else {
lt.getTypeInformation
}
// The tpe may been inferred by invoking [[TypeExtractor.createTypeInfo]] based the
// class of the resulting type. For example, converts the given [[Table]] into
// an append [[DataStream]]. If the class is Row, then the return type only is
// [[GenericTypeInfo[Row]]. So it should convert to the [[RowTypeInfo]] in order
// to better serialize performance.
requestedTypeInfo match {
case gt: GenericTypeInfo[Row] if gt.getTypeClass == classOf[Row] =>
fromLogicalToDataType(queryLogicalType).bridgedTo(classOf[Row])
case gt: GenericTypeInfo[BaseRow] if gt.getTypeClass == classOf[BaseRow] =>
fromLogicalToDataType(queryLogicalType).bridgedTo(classOf[BaseRow])
case bt: BaseRowTypeInfo =>
val fields = bt.getFieldNames.zip(bt.getLogicalTypes).map { case (n, t) =>
DataTypes.FIELD(n, fromLogicalToDataType(t))
}
DataTypes.ROW(fields: _*).bridgedTo(classOf[BaseRow])
case _ =>
fromLegacyInfoToDataType(requestedTypeInfo)
}
case _ =>
consumedDataType
}
}
/**
* Checks whether the logical schema (from DDL) and physical schema
* (from TableSink.getConsumedDataType()) of sink are compatible.
*
* @param catalogTable the catalog table of sink
* @param sink the instance of [[TableSink]]
* @param queryLogicalType the logical type of query
*/
def validateLogicalPhysicalTypesCompatible(
catalogTable: CatalogTable,
sink: TableSink[_],
queryLogicalType: RowType): Unit = {
// there may be generated columns in DDL, only get the physical part of DDL
val logicalSchema = TableSchemaUtils.getPhysicalSchema(catalogTable.getSchema)
// infer the physical schema from TableSink#getConsumedDataType
val physicalSchema = TableSinkUtils.inferSinkPhysicalSchema(
queryLogicalType,
sink)
// check for valid type info
if (logicalSchema.getFieldCount != physicalSchema.getFieldCount) {
throw new ValidationException("The field count of logical schema of the table does" +
" not match with the field count of physical schema\n. " +
s"The logical schema: [${logicalSchema.getFieldDataTypes.mkString(",")}]\n" +
s"The physical schema: [${physicalSchema.getFieldDataTypes.mkString(",")}].")
}
for (i <- 0 until logicalSchema.getFieldCount) {
val logicalFieldType = DataTypeUtils.transform(
logicalSchema.getFieldDataTypes()(i), toNullable) // ignore nullabilities
val logicalFieldName = logicalSchema.getFieldNames()(i)
val physicalFieldType = DataTypeUtils.transform(
physicalSchema.getFieldDataTypes()(i), toNullable) // ignore nullabilities
val physicalFieldName = physicalSchema.getFieldNames()(i)
TypeMappingUtils.checkPhysicalLogicalTypeCompatible(
physicalFieldType.getLogicalType,
logicalFieldType.getLogicalType,
physicalFieldName,
logicalFieldName,
false)
}
}
}
|
gyfora/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/sinks/TableSinkUtils.scala
|
Scala
|
apache-2.0
| 13,178 |
package org.vds.discoverscala.core.ch07
import org.scalatest.{Matchers, WordSpec}
/**
* Example for ch7. Control Structures
*/
class ControlStructures extends WordSpec with Matchers {
"For structures for (i <- i to n){}" when {
"We use range" should {
"Interact from 1 to 4" in {
var res = 1;
for (i <- 1 to 4) {
res = i;
}
assert(res == 4)
}
"able to filering numbers" in {
var res = 0;
for (
i <- 1 to 4
if i % 2 == 0
) res = res + i;
assert(res == (2 + 4))
}
"Return collections using loop expession" in {
val res = for (
i <- 1 to 4
if i % 2 == 0
) yield i;
res should contain allOf(2, 4);
}
}
"We use match expression" when {
"Define simple match" should {
"select corresponding cese block" in {
val firstArg = "chips"
val res =
firstArg match {
case "salt" => "pepper"
case "chips" => "salsa"
case "eggs" => "bacon"
case _ => "huh?"
}
assert(res == "salsa")
}
}
}
}
}
|
dawid-swist/discover-scala
|
src/test/scala/org/vds/discoverscala/core/ch07/ControlStructures.scala
|
Scala
|
gpl-3.0
| 1,224 |
package com.sksamuel.elastic4s.searches.aggs.pipeline
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy
import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder
case class MovAvgDefinition(name: String,
bucketsPath: String,
format: Option[String] = None,
gapPolicy: Option[GapPolicy] = None,
minimise: Option[Boolean] = None,
modelBuilder: Option[MovAvgModelBuilder] = None,
numPredictions: Option[Integer] = None,
settings: Map[String, AnyRef] = Map.empty,
window: Option[Integer] = None,
metadata: Map[String, AnyRef] = Map.empty) extends PipelineAggregationDefinition {
type T = MovAvgDefinition
def minimise(minimise: Boolean): MovAvgDefinition = copy(minimise = Some(minimise))
def modelBuilder(modelBuilder: MovAvgModelBuilder): MovAvgDefinition = copy(modelBuilder = Some(modelBuilder))
def numPredictions(numPredictions: Integer): MovAvgDefinition = copy(numPredictions = Some(numPredictions))
def settings(format: Map[String, AnyRef]): MovAvgDefinition = copy(settings = settings)
def window(window: Integer): MovAvgDefinition = copy(window = Some(window))
def format(format: String): MovAvgDefinition = copy(format = Some(format))
def gapPolicy(gapPolicy: GapPolicy): MovAvgDefinition = copy(gapPolicy = Some(gapPolicy))
def metadata(metadata: Map[String, AnyRef]): MovAvgDefinition = copy(metadata = metadata)
}
|
aroundus-inc/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/pipeline/MovAvgDefinition.scala
|
Scala
|
apache-2.0
| 1,656 |
package leo.datastructures.blackboard
import leo.datastructures.context.Context
import leo.modules.output.StatusSZS
/**
* Marker Interface for any Event for the Agents
*/
trait Event {
}
/**
* Marker Interface for a message from Agent to Agent
*
* @author Max Wisniewski
* @since 11/19/14
*/
trait Message extends Event {
}
/**
* Capsules a Formula that was recently added or modified in the blackboard.
* @param f - Modified formula
*/
private class FormulaEvent(f : FormulaStore) extends Event {
def getF : FormulaStore = f
}
/**
* Creates and deconstructs an Event containing a single formula
*/
object FormulaEvent{
def apply(f : FormulaStore) : Event = new FormulaEvent(f)
def unapply(e : Event) : Option[FormulaStore] = e match {
case f : FormulaEvent => Some(f.getF)
case _ => None
}
}
private class ContextEvent(c : Context) extends Event {
def getC : Context = c
}
object ContextEvent {
def apply(c : Context) : Event = new ContextEvent(c)
def unapply(e : Event) : Option[Context] = e match {
case c : ContextEvent => Some(c.getC)
case _ => None
}
}
private class StatusEvent(val c : Context, val s : StatusSZS) extends Event {}
object StatusEvent {
def apply(c : Context, s : StatusSZS) : Event = new StatusEvent(c,s)
def unapply(e : Event) : Option[(Context, StatusSZS)] = e match {
case c : StatusEvent => Some(c.c, c.s)
case _ => None
}
}
class DoneEvent() extends Event {}
object DoneEvent {
def apply() = new DoneEvent()
}
|
cbenzmueller/LeoPARD
|
src/main/scala/leo/datastructures/blackboard/BlackboardMarker.scala
|
Scala
|
bsd-3-clause
| 1,534 |
package org.zbritva.main
/**
* Created by zBritva on 04.05.16.
*/
import org.apache.spark.{SparkConf, SparkContext, SparkEnv}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.zbritva.graph.{CubeTree, Simplex}
import tests.org.zbritva.graph.TestOptimizationTask
object SparkCubeRun extends App {
case class Person(name: String, handedness: String, height: Double, weight: Double, avg: Double, HR: Int)
override def main(args: Array[String]) {
val conf = new SparkConf()
conf.setJars(Seq("out\\artifacts\\CustomFunctions_jar\\CustomFunctions.jar"))
// val sc = new SparkContext(master = "spark://192.168.142.176:7077", appName = "SparkCubeRun", conf)
val sc = new SparkContext(master = "local[4]", appName = "SparkCubeRun", conf)
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
sc.addJar("out\\artifacts\\CustomFunctions_jar\\CustomFunctions.jar")
import sqlContext.implicits._
import org.zbritva.rdd.DataFrameCubeExtension._
// val dataAndHeader = sc.textFile("C:\\SparkData\\baseball_data.csv")
val dataAndHeader = sc.textFile("D:\\SparkData\\baseball_data.csv")
// split / clean data
val headerString = dataAndHeader.first()
val header = headerString.split(",")
// val data = dataAndHeader.filter(str => org.zbritva.udf.CustomFunctions.notEqual(str, headerString))
// print(data.count())
val dataAndHeader_df = dataAndHeader.filter(r => r != "name,handedness,height,weight,avg,HR").map[Array[String]](_.split(",")).map[Person](
p =>
try {
Person(
p(0).trim,
p(1).trim,
p(2).trim.toDouble,
p(3).trim.toDouble,
p(4).trim.toDouble,
p(5).trim.toInt)
}
catch {
case e: Exception =>
println("EXCEPTION:")
throw e
}
).toDF()
dataAndHeader_df.cubePipeSort(
dataAndHeader_df.col("height"),
dataAndHeader_df.col("weight"),
dataAndHeader_df.col("avg"),
dataAndHeader_df.col("HR"))
//test simplex method
// val table2: Array[Array[Double]]
// = Array(
// Array(21, 5, 7),
// Array(8, -1, 3),
// Array(0, -1, -2))
//
// val simplex2 = new Simplex(table2)
// val result = simplex2.Calculate()
//
// println(result)
//list of columns of cubing
// val lst: List[String] = List(
// "A", "B", "C"
// )
//
// val cubtree = new CubeTree(lst)
//
// val tree = cubtree.getTree
//
// new TestOptimizationTask().walkOnTree(tree.getRoot())
//
// val task = tree._constructSimpexOptimizationTask()
//
// println(task)
//
// tree.solveOptimizationTask()
//
// print("DONE")
}
}
|
zBritva/SparkCUBE
|
src/org/zbritva/main/SparkCubeRun.scala
|
Scala
|
apache-2.0
| 2,897 |
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.edu.grade.model
import org.beangle.data.model.LongId
import org.openurp.code.edu.model.GradingMode
/**
* 成绩状态抽象基类
*
* @author chaostone
*/
abstract class AbstractGradeState extends LongId with GradeState {
/**
* 成绩记录方式
*/
var gradingMode: GradingMode = _
/**
* 成绩录入状态
*/
var status: Int = Grade.Status.New
/**
* 操作者
*/
var operator: String = _
/**
* 确认的和发布的全部算作确认过的
*/
def confirmed: Boolean = status >= Grade.Status.Confirmed
def published: Boolean = status == Grade.Status.Published
}
|
openurp/api
|
edu/src/main/scala/org/openurp/edu/grade/model/AbstractGradeState.scala
|
Scala
|
lgpl-3.0
| 1,358 |
/*
* Copyright 2014 Michael Krolikowski
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mkroli.dns4s.examples.simple.client
import java.net.InetSocketAddress
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
import com.github.mkroli.dns4s.akka.Dns
import com.github.mkroli.dns4s.dsl._
import akka.actor.ActorSystem
import akka.io.IO
import akka.pattern.ask
import akka.util.Timeout
object DnsClient extends App {
implicit val system = ActorSystem("DnsServer")
implicit val timeout = Timeout(5 seconds)
import system.dispatcher
IO(Dns) ? Dns.DnsPacket(Query ~ Questions(QName("google.de")), new InetSocketAddress("8.8.8.8", 53)) onSuccess {
case Response(Answers(answers)) =>
answers.collect {
case ARecord(arecord) => println(arecord.address.getHostAddress)
}
system.shutdown
}
}
|
mkroli/dns4s
|
examples/simple-client/src/main/scala/com/github/mkroli/dns4s/examples/simple/client/DnsClient.scala
|
Scala
|
apache-2.0
| 1,388 |
package blended.streams.file
import blended.container.context.api.ContainerContext
import blended.streams.FlowHeaderConfig
import blended.streams.message.{FlowMessage, MsgProperty}
import blended.util.config.Implicits._
import com.typesafe.config.Config
import scala.jdk.CollectionConverters._
import scala.concurrent.duration._
object FilePollConfig {
val PATH_ID = "id"
val PATH_INTERVAL = "interval"
val PATH_SOURCEDIR = "sourceDirectory"
val PATH_PATTERN = "pattern"
val PATH_BACKUP = "backup"
val PATH_BACKUP_TST = "backupTimestamp"
val PATH_LOCK = "lock"
val PATH_ASTEXT = "asText"
val PATH_TMP_EXT = "extension"
val PATH_ACKTIMEOUT = "ackTimeout"
val PATH_FILENAME_PROP = "filenameProperty"
val PATH_FILEPATH_PROP = "filepathProperty"
val PATH_BATCHSIZE = "batchSize"
val PATH_CHARSET = "charset"
val PATH_HEADER = "header"
val DEFAULT_BATCH_SIZE : Int = 10
def apply(cfg : Config, ctCtxt : ContainerContext) : FilePollConfig = {
val props : FlowMessage.FlowMessageProps = if (cfg.hasPath(PATH_HEADER)) {
cfg.getConfig(PATH_HEADER).entrySet().asScala.map { e =>
val k = e.getKey()
val v = ctCtxt.resolveString(cfg.getConfig(PATH_HEADER).getString(k, "")).get.toString()
k -> MsgProperty(v).get
}.toMap
} else {
Map.empty
}
apply(cfg, FlowHeaderConfig.create(ctCtxt), props)
}
def apply(cfg : Config, headerCfg : FlowHeaderConfig, header : FlowMessage.FlowMessageProps = Map.empty) : FilePollConfig = {
new FilePollConfig(
id = cfg.getString(PATH_ID),
headerCfg = headerCfg,
interval = cfg.getDuration(PATH_INTERVAL, 1.second),
sourceDir = cfg.getString(PATH_SOURCEDIR),
pattern = cfg.getStringOption(PATH_PATTERN),
lock = cfg.getStringOption(PATH_LOCK),
backup = cfg.getStringOption(PATH_BACKUP),
backupTimestamp = cfg.getBoolean(PATH_BACKUP_TST, true),
charSet = cfg.getStringOption(PATH_CHARSET),
ackTimeout = cfg.getDuration(PATH_ACKTIMEOUT, 1.second),
asText = cfg.getBoolean(PATH_ASTEXT, false),
tmpExt = cfg.getString(PATH_TMP_EXT, "_to_send"),
filenameProp = cfg.getString(PATH_FILENAME_PROP, "BlendedFileName"),
filepathProp = cfg.getString(PATH_FILEPATH_PROP, "BlendedFilePath"),
batchSize = cfg.getInt(PATH_BATCHSIZE, DEFAULT_BATCH_SIZE),
header = header
)
}
}
case class FilePollConfig(
id : String,
headerCfg : FlowHeaderConfig,
interval : FiniteDuration,
sourceDir : String,
pattern : Option[String],
lock : Option[String],
backup : Option[String],
backupTimestamp : Boolean,
asText: Boolean,
charSet : Option[String],
ackTimeout : FiniteDuration,
batchSize : Int,
filenameProp : String,
filepathProp : String,
tmpExt : String,
header : FlowMessage.FlowMessageProps
)
|
woq-blended/blended
|
blended.streams/src/main/scala/blended/streams/file/FilePollConfig.scala
|
Scala
|
apache-2.0
| 2,925 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.controller
import scala.language.implicitConversions
/** Defines an engine parameters generator.
*
* Implementations of this trait can be supplied to "pio eval" as the second
* command line argument.
*
* @group Evaluation
*/
trait EngineParamsGenerator {
protected[this] var epList: Seq[EngineParams] = _
protected[this] var epListSet: Boolean = false
/** Returns the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
def engineParamsList: Seq[EngineParams] = {
assert(epListSet, "EngineParamsList not set")
epList
}
/** Sets the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
def engineParamsList_=(l: Seq[EngineParams]) {
assert(!epListSet, "EngineParamsList can bet set at most once")
epList = Seq(l:_*)
epListSet = true
}
}
|
ch33hau/PredictionIO
|
core/src/main/scala/io/prediction/controller/EngineParamsGenerator.scala
|
Scala
|
apache-2.0
| 1,445 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.feeder
import java.nio.channels.{ Channels, ReadableByteChannel }
import java.nio.charset.Charset
import scala.jdk.CollectionConverters._
import org.simpleflatmapper.lightningcsv.CsvParser
private[gatling] object SeparatedValuesParser {
val DefaultQuoteChar: Char = '"'
val CommaSeparator: Char = ','
val SemicolonSeparator: Char = ';'
val TabulationSeparator: Char = '\\t'
def stream(columnSeparator: Char, quoteChar: Char, charset: Charset): ReadableByteChannel => Feeder[String] = {
val parser = CsvParser
.separator(columnSeparator)
.quote(quoteChar)
channel => {
val reader = Channels.newReader(new Utf8BomSkipReadableByteChannel(channel), charset.newDecoder, -1)
val it = parser.iterator(reader)
require(it.hasNext, "Feeder source is empty")
val headers = it.next().map(_.trim)
require(headers.nonEmpty, "CSV sources must have a non empty first line containing the headers")
headers.foreach { header =>
require(header.nonEmpty, "CSV headers can't be empty")
}
it.asScala.collect { case row if !(row.length == 1 && row(0).isEmpty) => ArrayBasedMap(headers, row) }
}
}
}
|
gatling/gatling
|
gatling-core/src/main/scala/io/gatling/core/feeder/SeparatedValuesParser.scala
|
Scala
|
apache-2.0
| 1,819 |
/**
* Copyright (C) 2007 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.control.controls
import org.apache.commons.io.FileUtils
import org.apache.commons.lang3.StringUtils
import org.orbeon.dom.Element
import org.orbeon.oxf.util.NetUtils
import org.orbeon.oxf.util.StringUtils._
import org.orbeon.oxf.xforms.XFormsConstants._
import org.orbeon.oxf.xforms.control.XFormsControl.{ControlProperty, ImmutableControlProperty, MutableControlProperty}
import org.orbeon.oxf.xforms.control.controls.FileMetadata._
import org.orbeon.oxf.xforms.control.{ControlAjaxSupport, XFormsControl, XFormsValueControl}
import org.orbeon.oxf.xforms.event.Dispatch
import org.orbeon.oxf.xforms.event.events.XXFormsBindingErrorEvent
import org.orbeon.oxf.xforms.model.DataModel
import org.orbeon.oxf.xforms.upload.UploaderServer
import org.orbeon.saxon.om.NodeInfo
import org.xml.sax.helpers.AttributesImpl
import scala.util.control.NonFatal
// This trait is used by controls that support nested file metadata such as "filename"
trait FileMetadata extends XFormsValueControl {
self: XFormsControl ⇒
// Children elements
// TODO: Don't deal with elements here, this should be part of the ElementAnalysis
private val mediatypeElement = Option(self.element.element(XFORMS_MEDIATYPE_QNAME))
private val filenameElement = Option(self.element.element(XFORMS_FILENAME_QNAME))
private val sizeElement = Option(self.element.element(XXFORMS_SIZE_QNAME))
private class FileMetadataProperty(evaluator: Evaluator) extends MutableControlProperty[String] {
protected def evaluateValue() = evaluator.evaluate(self)
override protected def nonRelevantValue = evaluator.default
protected def isRelevant = self.isRelevant
protected def wasRelevant = self.wasRelevant
// No dependencies yet
protected def requireUpdate = true
protected def notifyCompute() = ()
protected def notifyOptimized() = ()
}
// Supported file metadata properties
private var props: Map[String, ControlProperty[String]] =
supportedFileMetadata map (name ⇒ name → new FileMetadataProperty(Evaluators(name))) toMap
// Properties to support
def supportedFileMetadata: Seq[String]
// Evaluate all properties
def evaluateFileMetadata(relevant: Boolean) =
props.values foreach (_.value)
// Mark all properties dirty
def markFileMetadataDirty() =
props.values foreach (_.handleMarkDirty())
// Getters
def state = props("state") .value
def fileMediatype = props("mediatype").value.trimAllToOpt
def filename = props("filename") .value.trimAllToOpt
def fileSize = props("size") .value.trimAllToOpt
def iterateProperties = props.iterator map {
case (k, v) ⇒ k → Option(v.value)
}
def humanReadableFileSize = fileSize filter StringUtils.isNotBlank map humanReadableBytes
// "Instant" evaluators which go straight to the bound nodes if possible
def boundFileMediatype = Evaluators("mediatype").evaluate(self)
def boundFilename = Evaluators("filename").evaluate(self)
// Setters
def setFileMediatype(mediatype: String): Unit =
setInfoValue(mediatypeElement, mediatype)
def setFilename(filename: String): Unit = {
// Depending on web browsers, the filename may contain a path or not (sending the path is fairly insecure and a
// bad idea but some browsers do it. For consistency and security we just keep the filename.
val justFileName = StringUtils.split(filename, """\\/""").lastOption getOrElse ""
setInfoValue(filenameElement, justFileName)
}
def setFileSize(size: String): Unit =
setInfoValue(sizeElement, size)
def addFileMetadataAttributes(attributesImpl: AttributesImpl, previousControlOpt: Option[FileMetadata]): Boolean = {
val uploadControl2 = self
var added: Boolean = false
def addAtt(name: String, getValue: FileMetadata ⇒ String): Unit = {
val value1 = previousControlOpt map getValue orNull
val value2 = getValue(uploadControl2)
if (value1 != value2) {
val attributeValue = StringUtils.defaultString(value2)
added |= ControlAjaxSupport.addAttributeIfNeeded(attributesImpl, name, attributeValue, previousControlOpt.isEmpty, attributeValue == "")
}
}
// Add attributes for each property with a different value
props foreach {
case (name @ "size", _) ⇒ addAtt(name, _.humanReadableFileSize.orNull) // special case size so we can format
case (name, _) ⇒ addAtt(name, _.props(name).value)
}
added
}
// True if all metadata is the same (NOTE: the names must match)
def compareFileMetadata(other: FileMetadata) =
props.size == other.props.size && (props forall { case (name, prop) ⇒ prop.value == other.props(name).value })
// Update other with an immutable version of the metadata
def updateFileMetadataCopy(other: FileMetadata) =
other.props = props map { case (name, prop) ⇒ name → new ImmutableControlProperty(prop.value) }
private def setInfoValue(element: Option[Element], value: String) =
if (value ne null)
element foreach { e ⇒
val contextStack = self.getContextStack
contextStack.setBinding(self.bindingContext)
contextStack.pushBinding(e, self.getEffectiveId, self.getChildElementScope(e))
contextStack.getCurrentBindingContext.singleNodeOpt foreach { currentSingleNode ⇒
DataModel.setValueIfChanged(
nodeInfo = currentSingleNode,
newValue = value,
onSuccess = oldValue ⇒ DataModel.logAndNotifyValueChange(
containingDocument = self.container.getContainingDocument,
source = "file metadata",
nodeInfo = currentSingleNode,
oldValue = oldValue,
newValue = value,
isCalculate = false,
collector = Dispatch.dispatchEvent
),
reason ⇒ Dispatch.dispatchEvent(new XXFormsBindingErrorEvent(self, self.getLocationData, reason))
)
}
}
}
object FileMetadata {
case class Evaluator(evaluate: FileMetadata ⇒ String, default: String)
// How to evaluate each property and default values used when control is non-relevant
private val Evaluators = Map[String, Evaluator](
"state" → Evaluator(m ⇒ if (StringUtils.isBlank(m.getValue)) "empty" else "file", "empty"),
"mediatype" → Evaluator(m ⇒ m.mediatypeElement map (childMetadataValue(m, _)) orNull, null),
"filename" → Evaluator(m ⇒ m.filenameElement map (childMetadataValue(m, _)) orNull, null),
"size" → Evaluator(m ⇒ m.sizeElement map (childMetadataValue(m, _)) orNull, null),
"progress-state" → Evaluator(m ⇒ progress(m) map (_.state.name) orNull, null),
"progress-received" → Evaluator(m ⇒ progress(m) map (_.receivedSize.toString) orNull, null),
"progress-expected" → Evaluator(m ⇒ progress(m) flatMap (_.expectedSize) map (_.toString) orNull, null)
)
// All possible property names
val AllMetadataNames: Seq[String] = Evaluators.keys.toList
def progress(metadata: FileMetadata) = {
val option = UploaderServer.getUploadProgress(NetUtils.getExternalContext.getRequest, metadata.containingDocument.getUUID, metadata.getEffectiveId)
option filter (_.fieldName == metadata.getEffectiveId)
}
private def childMetadataValue(m: FileMetadata, element: Element) = {
val contextStack = m.getContextStack
contextStack.setBinding(m.bindingContext)
contextStack.pushBinding(element, m.getEffectiveId, m.getChildElementScope(element))
DataModel.getValue(contextStack.getCurrentBindingContext.getSingleItem)
}
// Format a string containing a number of bytes to a human-readable string
// If the input string doesn't represent a Long, return the string unchanged
def humanReadableBytes(size: String) =
try FileUtils.byteCountToDisplaySize(size.toLong)
catch { case NonFatal(_) ⇒ size }
}
|
brunobuzzi/orbeon-forms
|
xforms/jvm/src/main/scala/org/orbeon/oxf/xforms/control/controls/FileMetadata.scala
|
Scala
|
lgpl-2.1
| 8,820 |
package adtoyou.spark.analysis
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by tuyou006 on 2017/5/10.
*/
object HintExtract {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("Spark HintExtract").setMaster("yarn-client")
val sc = new SparkContext(conf)
val lines = sc.textFile("hdfs:////asmgo-data/output/2017*/1004/part*")
val hints = lines.map(_.split("\\|")).
map(arr => arr(7)).
flatMap(_.split(' ')).
map(s => (s, 1.00)).
reduceByKey((v1, v2) => v1).
map(tup => tup._1)
hints.saveAsTextFile("hdfs:///asmgo-data/client/1001/appNames.txt")
sc.stop()
}
}
|
7u/spark-learning
|
spark.learning/spark_test/src/main/scala/adtoyou/spark/analysis/HintExtract.scala
|
Scala
|
apache-2.0
| 695 |
package com.ignition.frame
import org.apache.spark.sql.Row
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import com.ignition.TestDataHelper
import com.ignition.script.RichString
import com.ignition.types.{ RichStructType, date, double, fieldToRichStruct, string }
@RunWith(classOf[JUnitRunner])
class FormulaSpec extends FrameFlowSpecification with TestDataHelper {
val schema = string("name") ~ date("dob") ~ double("total") ~ string("xml") ~ string("json")
val row0 = Row("john", javaDate(1990, 12, 5), 123.45, <a><b>123</b></a>.toString, """{"books" : ["a", "b", "c"]}""")
val row1 = Row("jane", javaDate(1975, 2, 12), 25.1, <a><b>45</b></a>.toString, """{"books" : ["x"]}""")
val row2 = Row("jack", javaDate(1984, 4, 21), 349.0, <a><b>67</b></a>.toString, """{"books" : []}""")
val row3 = Row("jake", javaDate(1999, 11, 25), 4.22, <a><c></c></a>.toString, """{"movies" : ["a", "b", "c"]}""")
val row4 = Row("jill", javaDate(1970, 7, 2), 77.13, <a></a>.toString, """{"books" : ["c", "c", "c"]}""")
val row5 = Row("josh", javaDate(1981, 7, 18), 13.6, <a><b></b></a>.toString, """{"books" : ["1", "2"]}""")
val row6 = Row("judd", javaDate(1994, 2, 20), 5.999, <a><b>x</b></a>.toString, """{}""")
val row7 = Row("jess", javaDate(1974, 1, 27), 15.0, <b>...</b>.toString, """{"books" : "some"}""")
val grid = DataGrid(schema, Seq(row0, row1, row2, row3, row4, row5, row6, row7))
"Formula" should {
"work for XPath expressions" in {
val formula = Formula("X" -> "b".xpath("xml"))
grid --> formula
assertSchema(schema ~ string("X"), formula, 0)
assertOutput(formula, 0,
apd(row0, "<b>123</b>"), apd(row1, "<b>45</b>"), apd(row2, "<b>67</b>"), apd(row3, ""),
apd(row4, ""), apd(row5, "<b/>"), apd(row6, "<b>x</b>"), apd(row7, "<b>...</b>"))
}
"work for JsonPath expressions" in {
val formula = Formula("Y" -> "$.books[1]".json("json"))
grid --> formula
assertSchema(schema ~ string("Y"), formula, 0)
assertOutput(formula, 0,
apd(row0, "b"), apd(row1, ""), apd(row2, ""), apd(row3, ""),
apd(row4, "c"), apd(row5, "2"), apd(row6, ""), apd(row7, ""))
}
"work for Mvel expressions" in {
val formula = Formula("Z" -> "YEAR(dob) + total / 2".mvel)
grid --> formula
assertSchema(schema ~ double("Z"), formula, 0)
assertOutput(formula, 0,
apd(row0, 2051.725), apd(row1, 1987.55), apd(row2, 2158.5), apd(row3, 2001.11),
apd(row4, 2008.565), apd(row5, 1987.8), apd(row6, 1996.9995), apd(row7, 1981.5))
}
"work for multiple expressions" in {
val formula = Formula("X" -> "b".xpath("xml"), "Y" -> "$.books[1]".json("json"),
"Z" -> "YEAR(dob) + total / 2".mvel)
grid --> formula
assertSchema(schema ~ string("X") ~ string("Y") ~ double("Z"), formula, 0)
assertOutput(formula, 0,
apd(row0, "<b>123</b>", "b", 2051.725), apd(row1, "<b>45</b>", "", 1987.55),
apd(row2, "<b>67</b>", "", 2158.5), apd(row3, "", "", 2001.11),
apd(row4, "", "c", 2008.565), apd(row5, "<b/>", "2", 1987.8),
apd(row6, "<b>x</b>", "", 1996.9995), apd(row7, "<b>...</b>", "", 1981.5))
}
"save to/load from xml" in {
val formula = Formula("X" -> "b".xpath("xml"), "Y" -> "$.books[1]".json("json"),
"Z" -> "YEAR(dob) + total / 2".mvel)
formula.toXml must ==/(
<formula>
<field name="X"><xpath source="xml">b</xpath></field>
<field name="Y"><json source="json">$.books[1]</json></field>
<field name="Z"><mvel>YEAR(dob) + total / 2</mvel></field>
</formula>)
Formula.fromXml(formula.toXml) === formula
}
"save to/load from json" in {
import org.json4s.JsonDSL._
val formula = Formula("X" -> "b".xpath("xml"), "Y" -> "$.books[1]".json("json"),
"Z" -> "YEAR(dob) + total / 2".mvel)
formula.toJson === ("tag" -> "formula") ~ ("fields" -> List(
("name" -> "X") ~ ("type" -> "xpath") ~ ("source" -> "xml") ~ ("query" -> "b"),
("name" -> "Y") ~ ("type" -> "json") ~ ("source" -> "json") ~ ("query" -> "$.books[1]"),
("name" -> "Z") ~ ("type" -> "mvel") ~ ("expression" -> "YEAR(dob) + total / 2")))
Formula.fromJson(formula.toJson) === formula
}
"be unserializable" in assertUnserializable(Formula("Y" -> "$.books[1]".json("json")))
}
private def apd(row: Row, values: Any*) = Row(row.toSeq ++ values: _*)
}
|
uralian/ignition
|
src/test/scala/com/ignition/frame/FormulaSpec.scala
|
Scala
|
apache-2.0
| 4,471 |
/**
* Copyright 2017 Daniel Götten
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.dangoe.freda
import java.sql.{Blob, CallableStatement, Clob, Connection, DatabaseMetaData, NClob, PreparedStatement, SQLWarning, SQLXML, Savepoint, Statement, Struct}
import java.util.Properties
import java.{sql, util}
import java.util.concurrent.Executor
/**
* A wrapper that wraps a given [[java.sql.Connection]] and restricts its methods
* to non-management operations (i.e. `prepareCall`, `prepareStatement` etc.).
*
* @param delegate The delegate `Connection`.
*/
private[freda] class RestrictedConnection private(delegate: Connection) extends Connection {
override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = delegate.createArrayOf(typeName, elements)
override def createBlob(): Blob = delegate.createBlob()
override def createClob(): Clob = delegate.createClob()
override def createNClob(): NClob = delegate.createNClob()
override def createSQLXML(): SQLXML = delegate.createSQLXML()
override def createStatement(): Statement = delegate.createStatement()
override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = delegate.createStatement(resultSetType, resultSetConcurrency)
override def createStatement(resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): Statement = delegate.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability)
override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = delegate.createStruct(typeName, attributes)
override def isClosed: Boolean = delegate.isClosed
override def isReadOnly: Boolean = delegate.isReadOnly
override def isValid(timeout: Int): Boolean = delegate.isValid(timeout)
override def nativeSQL(sql: String): String = delegate.nativeSQL(sql)
override def prepareCall(sql: String): CallableStatement = delegate.prepareCall(sql)
override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int): CallableStatement = delegate.prepareCall(sql, resultSetType, resultSetConcurrency)
override def prepareCall(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): CallableStatement = delegate.prepareCall(sql, resultSetType, resultSetConcurrency, resultSetHoldability)
override def prepareStatement(sql: String): PreparedStatement = delegate.prepareStatement(sql)
override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = delegate.prepareStatement(sql, autoGeneratedKeys)
override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = delegate.prepareStatement(sql, columnIndexes)
override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = delegate.prepareStatement(sql, columnNames)
override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int): PreparedStatement = delegate.prepareStatement(sql, resultSetType, resultSetConcurrency)
override def prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): PreparedStatement = delegate.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability)
override def abort(executor: Executor): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'abort'.")
override def clearWarnings(): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'clearWarnings'.")
override def close(): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'close'.")
override def commit(): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'commit'.")
override def getAutoCommit: Boolean = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getAutoCommit'.")
override def getCatalog: String = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getCatalog'.")
override def getClientInfo(name: String): String = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getClientInfo'.")
override def getClientInfo: Properties = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getClientInfo'.")
override def getHoldability: Int = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getHoldability'.")
override def getMetaData: DatabaseMetaData = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getMetaData'.")
override def getNetworkTimeout: Int = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getNetworkTimeout'.")
override def getSchema: String = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getSchema'.")
override def getTransactionIsolation: Int = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getTransactionIsolation'.")
override def getTypeMap: util.Map[String, Class[_]] = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getTypeMap'.")
override def getWarnings: SQLWarning = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'getWarnings'.")
override def isWrapperFor(iface: Class[_]): Boolean = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'isWrapperFor'.")
override def releaseSavepoint(savepoint: Savepoint): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'releaseSavepoint'.")
override def rollback(): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'rollback'.")
override def rollback(savepoint: Savepoint): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'rollback'.")
override def setAutoCommit(autoCommit: Boolean): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setAutoCommit'.")
override def setCatalog(catalog: String): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setCatalog'.")
override def setClientInfo(name: String, value: String): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setClientInfo'.")
override def setClientInfo(properties: Properties): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setClientInfo'.")
override def setHoldability(holdability: Int): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setHoldability'.")
override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setNetworkTimeout'.")
override def setReadOnly(readOnly: Boolean): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setReadOnly'.")
override def setSavepoint(): Savepoint = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setSavepoint'.")
override def setSavepoint(name: String): Savepoint = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setSavepoint'.")
override def setSchema(schema: String): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setSchema'.")
override def setTransactionIsolation(level: Int): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setTransactionIsolation'.")
override def setTypeMap(map: util.Map[String, Class[_]]): Unit = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'setTypeMap'.")
override def unwrap[T](iface: Class[T]): T = throw new UnsupportedOperationException(s"RestrictedConnection does not allow operation 'unwrap'.")
}
private[freda] object RestrictedConnection {
def apply(connection: Connection): Connection = new RestrictedConnection(connection)
}
|
dangoe/freda
|
core/src/main/scala/de/dangoe/freda/RestrictedConnection.scala
|
Scala
|
apache-2.0
| 8,881 |
package com.codiply.barrio.tests.helpers
import org.scalatest.FlatSpec
import com.codiply.barrio.helpers.LongQuantityStats
import com.codiply.barrio.helpers.LongQuantityStatsContainer
class LongQuantityStatsContainerSpec extends FlatSpec {
"LongQuantityStatsContainer.empty" should "return an empty container with expected stats" in {
val container = LongQuantityStatsContainer.empty
assert(container.count == 0, "when testing count")
val stats = container.toStats
assert(stats.min == Long.MaxValue, "when testing min")
assert(stats.max == Long.MinValue, "when testing max")
assert(stats.mean == 0.0, "when testing mean")
assert(stats.standardError == Double.MaxValue, "when testing standardError")
}
it should "return a container with expected stats after adding one value" in {
val value = 3
val container = LongQuantityStatsContainer.empty.add(value)
assert(container.count == 1, "when testing count")
val stats = container.toStats
assert(stats.min == value, "when testing min")
assert(stats.max == value, "when testing max")
assert(stats.mean == value.toDouble, "when testing mean")
assert(stats.standardError == Double.MaxValue, "when testing standardError")
}
it should "return a container with expected stats after adding one value twice" in {
val value = 3
val container = LongQuantityStatsContainer.empty.add(value).add(value)
assert(container.count == 2, "when testing count")
val stats = container.toStats
assert(stats.min == value, "when testing min")
assert(stats.max == value, "when testing max")
assert(stats.mean == value.toDouble, "when testing mean")
assert(stats.standardError == 0.0, "when testing standardError")
}
it should "return a container with expected stats after adding two values" in {
val value1 = -2
val value2 = 2
val expectedMean = 0.0
val expectedStandardError = 2.0
val container = LongQuantityStatsContainer.empty.add(value1).add(value2)
assert(container.count == 2, "when testing count")
assert(container.sum == 0, "when testing sum")
val stats = container.toStats
assert(stats.min == value1, "when testing min")
assert(stats.max == value2, "when testing max")
assert(stats.mean == expectedMean, "when testing mean")
assert(stats.standardError == expectedStandardError, "when testing standardError")
}
"LongQuantityStatsContainer.apply" should "return a container with expected stats when passing in a large value" in {
val value = Long.MinValue
val expectedMean = value
val expectedStandardError = 0.0
val container = LongQuantityStatsContainer(value)
assert(container.count == 1, "when testing count")
assert(container.sum == value, "when testing sum")
val stats = container.toStats
assert(stats.min == value, "when testing min")
assert(stats.max == value, "when testing max")
assert(stats.mean == expectedMean, "when testing mean")
assert(stats.standardError == Double.MaxValue, "when testing standardError")
}
it should "return a container with expected stats when passing in a list with the same large value several times" in {
val value = Long.MaxValue
val count = 100
val values = (1 to count).map { _ => value }
val expectedMean = value
val expectedStandardError = 0.0
val container = LongQuantityStatsContainer(values)
assert(container.count == count, "when testing count")
assert(container.sum == BigInt(value) * count, "when testing sum")
val stats = container.toStats
assert(stats.min == value, "when testing min")
assert(stats.max == value, "when testing max")
assert(stats.mean == expectedMean, "when testing mean")
assert(stats.standardError == 0.0, "when testing standardError")
}
it should "return a container with expected stats when passing in a list of different values" in {
val count = 5
val values = (1 to count).map { _.toLong }
val expectedMean = 3.0
val expectedStandardErrorLowerBound = 0.70710
val expectedStandardErrorUpperBound = 0.70711
val container = LongQuantityStatsContainer(values)
assert(container.count == count, "when testing count")
assert(container.sum == values.sum, "when testing sum")
val stats = container.toStats
assert(stats.min == values.min, "when testing min")
assert(stats.max == values.max, "when testing max")
assert(stats.mean == expectedMean, "when testing mean")
assert(stats.standardError > expectedStandardErrorLowerBound, "when testing standardError lower bound")
assert(stats.standardError < expectedStandardErrorUpperBound, "when testing standardError upper bound")
}
}
|
codiply/barrio
|
src/test/scala/com/codiply/barrio/helpers/StatsContainerSpec.scala
|
Scala
|
apache-2.0
| 4,720 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.observers
import minitest.TestSuite
import monix.execution.Ack
import monix.execution.Ack.{Continue, Stop}
import monix.execution.schedulers.TestScheduler
import monix.reactive.Observer
import monix.reactive.OverflowStrategy.DropNew
import monix.execution.exceptions.DummyException
import scala.concurrent.{Future, Promise}
object OverflowStrategyDropNewSuite extends TestSuite[TestScheduler] {
def setup() = TestScheduler()
def tearDown(s: TestScheduler) = {
assert(s.state.tasks.isEmpty, "TestScheduler should have no pending tasks")
}
test("should not lose events, synchronous test 1") { implicit s =>
var number = 0
var wasCompleted = false
val underlying = new Observer[Int] {
def onNext(elem: Int): Future[Ack] = {
number += 1
Continue
}
def onError(ex: Throwable): Unit = {
s.reportFailure(ex)
}
def onComplete(): Unit = {
wasCompleted = true
}
}
val buffer = BufferedSubscriber[Int](Subscriber(underlying, s), DropNew(1000))
for (i <- 0 until 1000) buffer.onNext(i)
buffer.onComplete()
assert(!wasCompleted)
s.tick()
assert(number == 1000)
assert(wasCompleted)
}
test("should not lose events, synchronous test 2") { implicit s =>
var number = 0
var completed = false
val underlying = new Observer[Int] {
def onNext(elem: Int): Future[Ack] = {
number += 1
Continue
}
def onError(ex: Throwable): Unit = {
s.reportFailure(ex)
}
def onComplete(): Unit = {
completed = true
}
}
val buffer = BufferedSubscriber[Int](Subscriber(underlying, s), DropNew(1000))
def loop(n: Int): Unit =
if (n > 0)
s.executeAsync { () =>
buffer.onNext(n); loop(n - 1)
} else
buffer.onComplete()
loop(10000)
assert(!completed)
assertEquals(number, 0)
s.tick()
assert(completed)
assertEquals(number, 10000)
}
test("should not lose events, async test 1") { implicit s =>
var number = 0
var wasCompleted = false
val underlying = new Observer[Int] {
def onNext(elem: Int) = Future {
number += 1
Continue
}
def onError(ex: Throwable): Unit = {
s.reportFailure(ex)
}
def onComplete(): Unit = {
wasCompleted = true
}
}
val buffer = BufferedSubscriber[Int](Subscriber(underlying, s), DropNew(1000))
for (i <- 0 until 1000) buffer.onNext(i)
buffer.onComplete()
assert(!wasCompleted)
s.tick()
assert(number == 1000)
assert(wasCompleted)
}
test("should not lose events, async test 2") { implicit s =>
var number = 0
var completed = false
val underlying = new Observer[Int] {
def onNext(elem: Int) = Future {
number += 1
Continue
}
def onError(ex: Throwable): Unit =
s.reportFailure(ex)
def onComplete(): Unit =
completed = true
}
val buffer = BufferedSubscriber[Int](Subscriber(underlying, s), DropNew(10000))
def loop(n: Int): Unit =
if (n > 0)
s.executeAsync { () =>
buffer.onNext(n); loop(n - 1)
} else
buffer.onComplete()
loop(10000)
assert(!completed)
assertEquals(number, 0)
s.tick()
assert(completed)
assertEquals(number, 10000)
}
test("should drop incoming when over capacity") { implicit s =>
var received = 0
var wasCompleted = false
val promise = Promise[Ack]()
val underlying = new Observer[Int] {
def onNext(elem: Int) = {
received += elem
promise.future
}
def onError(ex: Throwable) = ()
def onComplete() = {
wasCompleted = true
}
}
val buffer = BufferedSubscriber[Int](Subscriber(underlying, s), DropNew(5))
for (i <- 1 to 9)
assertEquals(buffer.onNext(i), Continue)
for (i <- 0 until 5)
assertEquals(buffer.onNext(10 + i), Continue)
s.tick()
assertEquals(received, 1)
promise.success(Continue); s.tick()
assertEquals(received, (1 to 8).sum)
for (i <- 1 to 8) assertEquals(buffer.onNext(i), Continue)
s.tick()
assertEquals(received, (1 to 8).sum * 2)
buffer.onComplete(); s.tick()
assert(wasCompleted, "wasCompleted should be true")
}
test("should send onError when empty") { implicit s =>
var errorThrown: Throwable = null
val buffer = BufferedSubscriber[Int](
new Subscriber[Int] {
def onError(ex: Throwable) = {
errorThrown = ex
}
def onNext(elem: Int) = throw new IllegalStateException()
def onComplete() = throw new IllegalStateException()
val scheduler = s
},
DropNew(5)
)
buffer.onError(DummyException("dummy"))
s.tickOne()
assertEquals(errorThrown, DummyException("dummy"))
val r = buffer.onNext(1)
assertEquals(r, Stop)
}
test("should send onError when in flight") { implicit s =>
var errorThrown: Throwable = null
val buffer = BufferedSubscriber[Int](
new Subscriber[Int] {
def onError(ex: Throwable) = {
errorThrown = ex
}
def onNext(elem: Int) = Continue
def onComplete() = throw new IllegalStateException()
val scheduler = s
},
DropNew(5)
)
buffer.onNext(1)
buffer.onError(DummyException("dummy"))
s.tickOne()
assertEquals(errorThrown, DummyException("dummy"))
}
test("should send onError when at capacity") { implicit s =>
var errorThrown: Throwable = null
val promise = Promise[Ack]()
val buffer = BufferedSubscriber[Int](
new Subscriber[Int] {
def onError(ex: Throwable) = {
errorThrown = ex
}
def onNext(elem: Int) = promise.future
def onComplete() = throw new IllegalStateException()
val scheduler = s
},
DropNew(5)
)
buffer.onNext(1)
buffer.onNext(2)
buffer.onNext(3)
buffer.onNext(4)
buffer.onNext(5)
buffer.onError(DummyException("dummy"))
promise.success(Continue)
s.tick()
assertEquals(errorThrown, DummyException("dummy"))
}
test("should do onComplete only after all the queue was drained") { implicit s =>
var sum = 0L
var wasCompleted = false
val startConsuming = Promise[Continue.type]()
val buffer = BufferedSubscriber[Long](
new Subscriber[Long] {
def onNext(elem: Long) = {
sum += elem
startConsuming.future
}
def onError(ex: Throwable) = throw ex
def onComplete() = wasCompleted = true
val scheduler = s
},
DropNew(10000)
)
(0 until 9999).foreach { x => buffer.onNext(x.toLong); () }
buffer.onComplete()
startConsuming.success(Continue)
s.tick()
assert(wasCompleted)
assert(sum == (0 until 9999).sum)
}
test("should do onComplete only after all the queue was drained, test2") { implicit s =>
var sum = 0L
var wasCompleted = false
val buffer = BufferedSubscriber[Long](
new Subscriber[Long] {
def onNext(elem: Long) = {
sum += elem
Continue
}
def onError(ex: Throwable) = throw ex
def onComplete() = wasCompleted = true
val scheduler = s
},
DropNew(10000)
)
(0 until 9999).foreach { x => buffer.onNext(x.toLong); () }
buffer.onComplete()
s.tick()
assert(wasCompleted)
assert(sum == (0 until 9999).sum)
}
test("should do onError only after the queue was drained") { implicit s =>
var sum = 0L
var errorThrown: Throwable = null
val startConsuming = Promise[Continue.type]()
val buffer = BufferedSubscriber[Long](
new Subscriber[Long] {
def onNext(elem: Long) = {
sum += elem
startConsuming.future
}
def onError(ex: Throwable) = errorThrown = ex
def onComplete() = throw new IllegalStateException()
val scheduler = s
},
DropNew(10000)
)
(0 until 9999).foreach { x => buffer.onNext(x.toLong); () }
buffer.onError(DummyException("dummy"))
startConsuming.success(Continue)
s.tick()
assertEquals(errorThrown, DummyException("dummy"))
assertEquals(sum, (0 until 9999).sum.toLong)
}
test("should do onError only after all the queue was drained, test2") { implicit s =>
var sum = 0L
var errorThrown: Throwable = null
val buffer = BufferedSubscriber[Long](
new Subscriber[Long] {
def onNext(elem: Long) = {
sum += elem
Continue
}
def onError(ex: Throwable) = errorThrown = ex
def onComplete() = throw new IllegalStateException()
val scheduler = s
},
DropNew(10000)
)
(0 until 9999).foreach { x => buffer.onNext(x.toLong); () }
buffer.onError(DummyException("dummy"))
s.tick()
assertEquals(errorThrown, DummyException("dummy"))
assertEquals(sum, (0 until 9999).sum.toLong)
}
test("subscriber STOP after a synchronous onNext") { implicit s =>
var received = 0
var wasCompleted = false
val underlying = new Subscriber[Int] {
val scheduler = s
def onNext(elem: Int): Future[Ack] = {
received += elem
Stop
}
def onError(ex: Throwable): Unit =
throw ex
def onComplete(): Unit =
wasCompleted = true
}
val buffer = BufferedSubscriber[Int](underlying, DropNew(16))
assertEquals(buffer.onNext(1), Continue)
s.tick()
assertEquals(buffer.onNext(2), Stop)
buffer.onComplete(); s.tick()
assert(!wasCompleted, "!wasCompleted")
assertEquals(received, 1)
}
test("subscriber STOP after an asynchronous onNext") { implicit s =>
var received = 0
var wasCompleted = false
val underlying = new Subscriber[Int] {
val scheduler = s
def onNext(elem: Int): Future[Ack] = Future {
received += elem
Stop
}
def onError(ex: Throwable): Unit =
throw ex
def onComplete(): Unit =
wasCompleted = true
}
val buffer = BufferedSubscriber[Int](underlying, DropNew(16))
assertEquals(buffer.onNext(1), Continue)
s.tick()
assertEquals(received, 1)
buffer.onNext(2); s.tick() // uncertain
assertEquals(buffer.onNext(3), Stop)
buffer.onComplete(); s.tick()
assert(!wasCompleted, "!wasCompleted")
assertEquals(received, 1)
}
test("stop after a synchronous Failure(ex)") { implicit s =>
var received = 0
var wasCompleted = false
var errorThrown: Throwable = null
val dummy = new RuntimeException("dummy")
val underlying = new Subscriber[Int] {
val scheduler = s
def onNext(elem: Int): Future[Ack] = {
received += elem
Future.failed(dummy)
}
def onError(ex: Throwable): Unit =
errorThrown = ex
def onComplete(): Unit =
wasCompleted = true
}
val buffer = BufferedSubscriber[Int](underlying, DropNew(16))
assertEquals(buffer.onNext(1), Continue)
s.tick()
assertEquals(buffer.onNext(2), Stop)
buffer.onComplete(); s.tick()
assert(!wasCompleted, "!wasCompleted")
assertEquals(received, 1)
assertEquals(errorThrown, dummy)
}
test("stop after an asynchronous Failure(ex)") { implicit s =>
var received = 0
var wasCompleted = false
var errorThrown: Throwable = null
val dummy = new RuntimeException("dummy")
val underlying = new Subscriber[Int] {
val scheduler = s
def onNext(elem: Int): Future[Ack] = Future {
received += elem
throw dummy
}
def onError(ex: Throwable): Unit =
errorThrown = ex
def onComplete(): Unit =
wasCompleted = true
}
val buffer = BufferedSubscriber[Int](underlying, DropNew(16))
assertEquals(buffer.onNext(1), Continue)
s.tick(); buffer.onNext(2) // uncertain
s.tick()
assertEquals(buffer.onNext(3), Stop)
buffer.onComplete(); s.tick()
assert(!wasCompleted, "!wasCompleted")
assertEquals(received, 1)
assertEquals(errorThrown, dummy)
}
test("should protect against user-code in onNext") { implicit s =>
var received = 0
var wasCompleted = false
var errorThrown: Throwable = null
val dummy = new RuntimeException("dummy")
val underlying = new Subscriber[Int] {
val scheduler = s
def onNext(elem: Int): Future[Ack] = {
received += elem
throw dummy
}
def onError(ex: Throwable): Unit =
errorThrown = ex
def onComplete(): Unit =
wasCompleted = true
}
val buffer = BufferedSubscriber[Int](underlying, DropNew(16))
assertEquals(buffer.onNext(1), Continue)
s.tick()
assertEquals(buffer.onNext(2), Stop)
buffer.onComplete(); s.tick()
assert(!wasCompleted, "!wasCompleted")
assertEquals(received, 1)
assertEquals(errorThrown, dummy)
}
test("should protect against user-code in onComplete") { implicit s =>
var received = 0
var errorThrown: Throwable = null
val dummy = new RuntimeException("dummy")
val underlying = new Subscriber[Int] {
val scheduler = s
def onNext(elem: Int): Future[Ack] = {
received += elem
Continue
}
def onError(ex: Throwable): Unit =
errorThrown = ex
def onComplete(): Unit =
throw dummy
}
val buffer = BufferedSubscriber[Int](underlying, DropNew(16))
buffer.onNext(1)
buffer.onComplete()
s.tick()
assertEquals(received, 1)
assertEquals(errorThrown, null)
assertEquals(s.state.lastReportedError, dummy)
}
test("should protect against user-code in onError") { implicit s =>
var received = 0
var errorThrown: Throwable = null
val dummy1 = new RuntimeException("dummy1")
val dummy2 = new RuntimeException("dummy2")
val underlying = new Subscriber[Int] {
val scheduler = s
def onNext(elem: Int): Future[Ack] = {
received += elem
Future.failed(dummy1)
}
def onError(ex: Throwable): Unit = {
errorThrown = ex
throw dummy2
}
def onComplete(): Unit =
throw new IllegalStateException("onComplete")
}
val buffer = BufferedSubscriber[Int](underlying, DropNew(16))
buffer.onNext(1)
s.tick()
assertEquals(received, 1)
assertEquals(errorThrown, dummy1)
assertEquals(s.state.lastReportedError, dummy2)
}
test("streaming null is not allowed") { implicit s =>
var errorThrown: Throwable = null
val underlying = new Subscriber[String] {
val scheduler = s
def onNext(elem: String) =
Continue
def onError(ex: Throwable): Unit =
errorThrown = ex
def onComplete(): Unit =
throw new IllegalStateException("onComplete")
}
val buffer = BufferedSubscriber[String](underlying, DropNew(16))
buffer.onNext(null)
s.tick()
assert(errorThrown != null, "errorThrown != null")
assert(errorThrown.isInstanceOf[NullPointerException], "errorThrown.isInstanceOf[NullPointerException]")
}
test("buffer size is required to be greater than 1") { implicit s =>
intercept[IllegalArgumentException] {
BufferedSubscriber[Int](Subscriber.empty[Int], DropNew(1))
()
}
()
}
}
|
alexandru/monifu
|
monix-reactive/shared/src/test/scala/monix/reactive/observers/OverflowStrategyDropNewSuite.scala
|
Scala
|
apache-2.0
| 16,091 |
/**
* Copyright 2013-2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.typesafe.sbt.SbtSite.SiteKeys._
import java.io.PrintWriter
import org.eclipse.jgit.storage.file.FileRepositoryBuilder
import sbt._
import Keys._
import sbtunidoc.Plugin._
import com.typesafe.sbt.pgp.PgpKeys._
import com.typesafe.sbt.SbtSite._
import com.typesafe.sbt.SbtGit._
import GitKeys._
import com.typesafe.sbt.SbtGhPages._
import GhPagesKeys._
import sbtrelease._
import sbtrelease.Utilities._
import scala.io.Source
import net.virtualvoid.sbt.graph.Plugin
import org.scalastyle.sbt.ScalastylePlugin
import org.scalastyle.sbt.ScalastylePlugin._
import de.johoop.jacoco4sbt._
import JacocoPlugin._
import ReleaseStateTransformations._
import ReleasePlugin._
import ReleaseKeys._
object BuildUtilitiesKeys {
lazy val ghpagesDir = SettingKey[String]("build-utilities-ghpages-directory", "unique folder structure for the git project gh-pages branch")
lazy val readmeTemplateMappings = SettingKey[Map[String, String]]("build-utilities-readme-template-mappings", "Mappings for generating readme file")
}
object BuildSettings {
import AdditionalReleaseSteps._
import BuildUtilitiesKeys._
val org = "com.paypal"
val scalaVsn = "2.10.4"
val nexusHost = "https://oss.sonatype.org"
private val gitDir = new File(".", ".git")
private val repo = FileRepositoryBuilder.create(gitDir)
private val originUrl = repo.getConfig.getString("remote", "origin", "url")
private def extractDirStructure(str: String): String = {
val gitRemoved = str.replace(".git", "")
val colonsReplaced = gitRemoved.replace(":", "/")
val splitStr = colonsReplaced.split('/')
val repo = splitStr(splitStr.length - 1)
val name = splitStr(splitStr.length - 2)
s"$name/$repo"
}
lazy val standardPluginSettings = Defaults.coreDefaultSettings ++
releaseSettings ++
Plugin.graphSettings ++
ScalastylePlugin.projectSettings ++
jacoco.settings ++
site.settings ++
ghpages.settings ++
unidocSettings ++
Seq(
ghpagesNoJekyll := false,
ghpagesDir := extractDirStructure(originUrl),
repository <<= ghpagesDir.apply (dir => file(System.getProperty("user.home")) / ".sbt" / "ghpages" / dir),
siteMappings <++= (mappings in (ScalaUnidoc, packageDoc), version).map { (mapping, ver) =>
for((file, path) <- mapping) yield (file, s"api/$ver/$path")
},
synchLocal <<= (privateMappings, updatedRepository, gitRunner, streams).map { (mappings, repo, git, s) =>
val betterMappings = mappings.map { case (file, t) => (file, repo / t) }
IO.copy(betterMappings)
repo
},
git.remoteRepo := originUrl,
tagName <<= (version in ThisBuild).map(a => a),
readmeTemplateMappings <<= (version in ThisBuild) { ver =>
Map("version" -> ver)
},
readmeTemplateMappings ++= Map("auto-gen" -> "THIS FILE WAS AUTO GENERATED BY THE README TEMPLATE. DO NOT EDIT DIRECTLY."),
releaseProcess := Seq[ReleaseStep](
checkSnapshotDependencies,
inquireVersions,
ensureChangelogEntry,
runTest,
setReleaseVersion,
commitReleaseVersion,
generateReadme,
tagRelease,
publishArtifacts.copy(action = publishSignedAction),
generateAndPushDocs,
setNextVersion,
commitNextVersion,
pushChanges
)
)
lazy val standardSettings = standardPluginSettings ++ Seq(
organization := org,
name := "horizon",
scalaVersion := scalaVsn,
sbtPlugin := true,
conflictManager := ConflictManager.strict,
fork := true,
scalacOptions ++= Seq("-deprecation", "-unchecked", "-feature", "-Xlint", "-target:jvm-1.7"),
scalacOptions in Test ++= Seq("-Yrangepos"),
dependencyOverrides <++= scalaVersion { vsn => Set(
"org.scala-lang" % "scala-library" % vsn,
"org.scala-lang" % "scala-compiler" % vsn
)},
addSbtPlugin("com.github.gseitz" % "sbt-release" % "0.8.5"),
addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.5.2" exclude("com.typesafe.sbt", "sbt-git")),
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.7.0"),
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.6.4" exclude ("org.eclipse.jgit", "org.eclipse.jgit")),
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0"),
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.0"),
libraryDependencies ++= Seq(
"org.eclipse.jgit" % "org.eclipse.jgit" % "3.3.0.201403021825-r",
"org.specs2" %% "specs2" % "2.4.15" % "test"
),
apiURL := Some(url("http://paypal.github.io/horizon/api/")),
autoAPIMappings := true,
publishTo := {
val nexus = s"$nexusHost/"
if (isSnapshot.value) {
Some("snapshots" at nexus + "content/repositories/snapshots")
} else {
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
},
// scalaz-stream_2.10 is not on Maven Central, until that changes, this line needs to stay in
resolvers += Resolver.bintrayRepo("scalaz", "releases"),
scalastyleConfigUrl in Compile := Option(url("https://raw.githubusercontent.com/paypal/scala-style-guide/develop/scalastyle-config.xml")),
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := { _ => false },
pomExtra :=
<url>https://github.com/paypal/horizon</url>
<licenses>
<license>
<name>Apache 2</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<url>[email protected]:paypal/horizon.git</url>
<connection>scm:git:[email protected]:paypal/horizon.git</connection>
</scm>
<developers>
<developer>
<id>msv</id>
<name>Matt Vaznaian</name>
<url>https://github.com/msv</url>
</developer>
<developer>
<id>arschles</id>
<name>Aaron Schlesinger</name>
<url>https://github.com/arschles</url>
</developer>
<developer>
<id>taylorleese</id>
<name>Taylor Leese</name>
<url>https://github.com/taylorleese</url>
</developer>
</developers>
)
}
object UtilitiesBuild extends Build {
import BuildSettings._
lazy val root = Project(id = "root", base = file("."), settings = standardSettings)
}
/**
* Adds step to ensure an entry for the current release version is present in the changelog,
* generate and push ScalaDocs to gh-pages branch, and generate readme with release version injected.
*/
object AdditionalReleaseSteps {
import BuildUtilitiesKeys._
lazy val ensureChangelogEntry: ReleaseStep = { st: State =>
try {
checkChangelog(st)
st
} catch {
case entry: ChangelogEntryMissingException => sys.error(entry.getMessage)
}
}
val changelog = "CHANGELOG.md"
class ChangelogEntryMissingException(e: Throwable) extends Exception(e)
private def getReleasedVersion(st: State): String = st.get(versions).getOrElse(sys.error("No versions are set! Was this release part executed before inquireVersions?"))._1
private def checkChangelog(st: State) {
try {
val currentChangelog = Source.fromFile(changelog).mkString
val version = getReleasedVersion(st)
if (!currentChangelog.contains(version)) {
throw new Exception(s"No changelog entry found for current release version $version.")
}
} catch {
case e: Throwable => throw new ChangelogEntryMissingException(e)
}
}
lazy val publishSignedAction: State => State = { st: State =>
val extracted = st.extract
val ref = extracted.get(thisProjectRef)
extracted.runAggregated(publishSigned in Global in ref, st)
}
lazy val generateAndPushDocs: ReleaseStep = { st: State =>
val st2 = executeTask(makeSite, "Making doc site")(st)
executeTask(pushSite, "Publishing doc site")(st2)
}
private def executeTask(task: TaskKey[_], info: String) = (st: State) => {
st.log.info(info)
val extracted = st.extract
val ref: ProjectRef = extracted.get(thisProjectRef)
val (newState, _) = extracted.runTask(task in ref, st)
newState
}
val readme = "README.md"
val readmeTemplate = "Readme-Template.md"
lazy val generateReadme: ReleaseStep = { st: State =>
val version = getReleasedVersion(st)
generateReadmeFromMappings(st, version)
commitReadme(st, version)
st
}
private def generateReadmeFromMappings(st: State, newVersion: String): Unit = {
val extracted = st.extract
val templateMappings = extracted.get(readmeTemplateMappings)
val template = Source.fromFile(readmeTemplate).mkString
val out = new PrintWriter(readme, "UTF-8")
try {
val newReadme = templateMappings.foldLeft(template) { (currentReadme, mapping) =>
val (regKey, replacement) = mapping
val regex = s"\\\\{\\\\{$regKey\\\\}\\\\}".r
regex.replaceAllIn(currentReadme, replacement)
}
newReadme.foreach(out.write(_))
} finally {
out.close()
}
}
private def commitReadme(st: State, newVersion: String): Unit = {
val vcs = st.extract.get(versionControlSystem).getOrElse(sys.error("Unable to get version control system."))
vcs.add(readme) !! st.log
vcs.commit(s"README.md updated to $newVersion") ! st.log
}
}
|
webhost/horizon
|
project/Build.scala
|
Scala
|
apache-2.0
| 9,924 |
/**
* Copyright 2015 ICT.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.ac.ict.acs.netflow.metrics
import java.io.{ FileInputStream, InputStream }
import java.util.Properties
import scala.collection.mutable
import cn.ac.ict.acs.netflow.Logging
import scala.util.matching.Regex
class MetricsConfig(val configFile: Option[String]) extends Logging {
private val DEFAULT_PREFIX = "*"
private val INSTANCE_REGEX = "^(\\\\*|[a-zA-Z]+)\\\\.(.+)".r
private val DEFAULT_METRICS_CONF_FILENAME = "metrics.properties"
private[metrics] val properties = new Properties()
private[metrics] var propertyCategories: mutable.HashMap[String, Properties] = null
def initialize() {
// If spark.metrics.conf is not set, try to get file in class path
val isOpt: Option[InputStream] = configFile.map(new FileInputStream(_)).orElse {
try {
Option(getClass.getClassLoader.getResourceAsStream(DEFAULT_METRICS_CONF_FILENAME))
} catch {
case e: Exception =>
logError("Error loading default configuration file", e)
None
}
}
isOpt.foreach { is =>
try {
properties.load(is)
} finally {
is.close()
}
}
propertyCategories = subProperties(properties, INSTANCE_REGEX)
if (propertyCategories.contains(DEFAULT_PREFIX)) {
import scala.collection.JavaConversions._
val defaultProperty = propertyCategories(DEFAULT_PREFIX)
for {
(inst, prop) <- propertyCategories
if (inst != DEFAULT_PREFIX)
(k, v) <- defaultProperty
if (prop.getProperty(k) == null)
} {
prop.setProperty(k, v)
}
}
}
def subProperties(prop: Properties, regex: Regex): mutable.HashMap[String, Properties] = {
val subProperties = new mutable.HashMap[String, Properties]
import scala.collection.JavaConversions._
prop.foreach { kv =>
if (regex.findPrefixOf(kv._1).isDefined) {
val regex(prefix, suffix) = kv._1
subProperties.getOrElseUpdate(prefix, new Properties).setProperty(suffix, kv._2)
}
}
subProperties
}
def getInstance(inst: String): Properties = {
propertyCategories.get(inst) match {
case Some(s) => s
case None => propertyCategories.getOrElse(DEFAULT_PREFIX, new Properties)
}
}
}
|
ayscb/netflow
|
common/src/main/scala/cn/ac/ict/acs/netflow/metrics/MetricsConfig.scala
|
Scala
|
apache-2.0
| 3,073 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package reflect
package io
import java.net.URL
import java.io.{ByteArrayInputStream, FilterInputStream, IOException, InputStream}
import java.io.{File => JFile}
import java.util.concurrent.{ArrayBlockingQueue, TimeUnit}
import java.util.zip.{ZipEntry, ZipFile, ZipInputStream}
import java.util.jar.Manifest
import scala.annotation.tailrec
import scala.collection.mutable
import scala.jdk.CollectionConverters._
import scala.reflect.internal.JDK9Reflectors
import ZipArchive._
/** An abstraction for zip files and streams. Everything is written the way
* it is for performance: we come through here a lot on every run. Be careful
* about changing it.
*
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object ZipArchive {
private[io] val closeZipFile = sys.props.get("scala.classpath.closeZip").map(_.toBoolean).getOrElse(false)
private[io] final val RootEntry = "/"
/**
* @param file a File
* @return A ZipArchive if `file` is a readable zip file, otherwise null.
*/
def fromFile(file: File): FileZipArchive = fromFile(file.jfile)
def fromFile(file: JFile): FileZipArchive =
try { new FileZipArchive(file) }
catch { case _: IOException => null }
/**
* @param url the url of a zip file
* @return A ZipArchive backed by the given url.
*/
def fromURL(url: URL): URLZipArchive = new URLZipArchive(url)
def fromManifestURL(url: URL): AbstractFile = new ManifestResources(url)
private def dirName(path: String) = splitPath(path, front = true)
private def baseName(path: String) = splitPath(path, front = false)
private def splitPath(path0: String, front: Boolean): String = {
val isDir = path0.charAt(path0.length - 1) == '/'
val path = if (isDir) path0.substring(0, path0.length - 1) else path0
val idx = path.lastIndexOf('/')
if (idx < 0)
if (front) RootEntry
else path
else
if (front) path.substring(0, idx + 1)
else path.substring(idx + 1)
}
@deprecated("Kept for compatibility", "2.13.1")
def pathToDotted(path: String): String = {
if (RootEntry == path) ""
else {
val slashEnd = path.endsWith("/")
val len = path.length - (if (slashEnd) 1 else 0)
val result = new Array[Char](len)
var i = 0
while (i < len) {
val char = path.charAt(i)
result(i) = if (char == '/') '.' else char
i += 1
}
new String(result)
}
}
}
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
abstract class ZipArchive(override val file: JFile, release: Option[String]) extends AbstractFile with Equals {
self =>
def this(file: JFile) = this(file, None)
override lazy val canonicalPath = super.canonicalPath
override def underlyingSource = Some(this)
def isDirectory = true
def lookupName(name: String, directory: Boolean) = unsupported()
def lookupNameUnchecked(name: String, directory: Boolean) = unsupported()
def create() = unsupported()
def delete() = unsupported()
def output = unsupported()
def container = unsupported()
def absolute = unsupported()
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
// have to keep this name for compat with sbt's compiler-interface
def getArchive: ZipFile = null
override def underlyingSource = Some(self)
override def toString = self.path + "(" + path + ")"
override def unsafeToByteArray: Array[Byte] = toByteArray
}
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
class DirEntry(path: String) extends Entry(path) {
val entries = mutable.HashMap[String, Entry]()
override def isDirectory = true
override def iterator: Iterator[Entry] = entries.valuesIterator
override def lookupName(name: String, directory: Boolean): Entry = {
if (directory) entries.get(name + "/").orNull
else entries.get(name).orNull
}
}
protected def getDir(dirs: java.util.Map[String, DirEntry], entry: ZipEntry): DirEntry = {
def ensureDir(path: String): DirEntry =
dirs.get(path) match {
case null =>
val parent = ensureDir(dirName(path))
val dir = new DirEntry(path)
parent.entries(baseName(path)) = dir
dirs.put(path, dir)
dir
case dir => dir
}
val name = if (entry.isDirectory) entry.getName else dirNameUsingLast(entry.getName)
ensureDir(name)
}
@volatile private[this] var lastDirName: String = RootEntry
private def dirNameUsingLast(name: String): String = {
val last = lastDirName
if (name.length > last.length + 1 && name.startsWith(last) && name.charAt(last.length) == '/' && name.indexOf('/', last.length + 1) == -1) {
// OPT: Avoid string allocation when reading successive entries in a zip index from the same directory.
lastDirName
} else {
val result = dirName(name)
lastDirName = result
result
}
}
def close(): Unit
}
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArchive(file, release) {
def this(file: JFile) = this(file, None)
private object zipFilePool {
private[this] val zipFiles = new ArrayBlockingQueue[ZipFile](Runtime.getRuntime.availableProcessors())
def acquire: ZipFile = {
val zf = zipFiles.poll(0, TimeUnit.MILLISECONDS)
zf match {
case null =>
openZipFile()
case _ =>
zf
}
}
def release(zf: ZipFile): Unit = {
if (!zipFiles.offer(zf, 0, TimeUnit.MILLISECONDS))
zf.close()
}
def close(): Unit = {
val zipFilesToClose = new java.util.ArrayList[ZipFile]
zipFiles.drainTo(zipFilesToClose)
zipFilesToClose.iterator().forEachRemaining(_.close())
}
}
private[this] def openZipFile(): ZipFile = try {
release match {
case Some(r) if file.getName.endsWith(".jar") =>
val releaseVersion = JDK9Reflectors.runtimeVersionParse(r)
JDK9Reflectors.newJarFile(file, true, ZipFile.OPEN_READ, releaseVersion)
case _ =>
new ZipFile(file)
}
} catch {
case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe)
}
private[this] class LazyEntry(
name: String,
time: Long,
size: Int
) extends Entry(name) {
override def lastModified: Long = time // could be stale
override def input: InputStream = {
val zipFile = openZipFile()
val entry = zipFile.getEntry(name) // with `-release`, returns the correct version under META-INF/versions
val delegate = zipFile.getInputStream(entry)
new FilterInputStream(delegate) {
override def close(): Unit = { zipFile.close() }
}
}
override def sizeOption: Option[Int] = Some(size) // could be stale
}
// keeps file handle(s) open to ZipFile in the pool this.zipFiles,
// which forbids file mutation on Windows, and leaks memory on all OS (typically by stopping
// classloaders from being garbage collected). But is slightly faster than LazyEntry.
//
// Note: scala/scala#7366 / scala/scala#7644, LeakyEntry _does_ close the file when `Global.close` is called,
// or after a short delay specified by FileBasedCache.deferCloseMs if classpath caching is enabled.
// So the file handle "leak" is far less a problem than it used do be.
private[this] class LeakyEntry(
name: String,
time: Long,
size: Int
) extends Entry(name) {
override def lastModified: Long = time // could be stale
override def input: InputStream = {
val zipFile = zipFilePool.acquire
val entry = zipFile.getEntry(name) // with `-release`, returns the correct version under META-INF/versions
val delegate = zipFile.getInputStream(entry)
new FilterInputStream(delegate) {
override def close(): Unit = { zipFilePool.release(zipFile) }
}
}
override def sizeOption: Option[Int] = Some(size)
}
private[this] val dirs = new java.util.HashMap[String, DirEntry]()
lazy val root: DirEntry = {
val root = new DirEntry(RootEntry)
dirs.put(RootEntry, root)
val zipFile = openZipFile()
val enum = zipFile.entries()
try {
while (enum.hasMoreElements) {
val zipEntry = enum.nextElement
if (!zipEntry.getName.startsWith("META-INF/versions/")) {
if (!zipEntry.isDirectory) {
val dir = getDir(dirs, zipEntry)
val mrEntry = if (release.isDefined) {
zipFile.getEntry(zipEntry.getName)
} else zipEntry
val f =
if (ZipArchive.closeZipFile)
new LazyEntry(
zipEntry.getName,
mrEntry.getTime,
mrEntry.getSize.toInt)
else
new LeakyEntry(zipEntry.getName,
mrEntry.getTime,
mrEntry.getSize.toInt)
dir.entries(f.name) = f
}
}
}
} finally {
if (!ZipArchive.closeZipFile)
zipFilePool.release(zipFile)
}
root
}
lazy val allDirs: java.util.Map[String, DirEntry] = { root; dirs }
def iterator: Iterator[Entry] = root.iterator
def name = file.getName
def path = file.getPath
def input = File(file).inputStream()
def lastModified = file.lastModified
override def sizeOption = Some(file.length.toInt)
override def canEqual(other: Any) = other.isInstanceOf[FileZipArchive]
override def hashCode() = file.hashCode
override def equals(that: Any) = that match {
case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile
case _ => false
}
override def close(): Unit = {
zipFilePool.close()
}
}
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
final class URLZipArchive(val url: URL) extends ZipArchive(null) {
def iterator: Iterator[Entry] = {
val root = new DirEntry(RootEntry)
val dirs = new java.util.HashMap[String, DirEntry]()
dirs.put(RootEntry, root)
val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input)))
closeables ::= in
@tailrec def loop(): Unit = {
val zipEntry = in.getNextEntry()
class EmptyFileEntry() extends Entry(zipEntry.getName) {
override def toByteArray: Array[Byte] = null
override def sizeOption = Some(0)
}
class FileEntry() extends Entry(zipEntry.getName) {
override val toByteArray: Array[Byte] = {
val len = zipEntry.getSize().toInt
val arr = if (len == 0) Array.emptyByteArray else new Array[Byte](len)
var offset = 0
@tailrec
def loop(): Unit = {
if (offset < len) {
val read = in.read(arr, offset, len - offset)
if (read >= 0) {
offset += read
loop()
}
}
}
loop()
if (offset == arr.length) arr
else throw new IOException("Input stream truncated: read %d of %d bytes".format(offset, len))
}
override def sizeOption = Some(zipEntry.getSize().toInt)
}
if (zipEntry != null) {
val dir = getDir(dirs, zipEntry)
if (zipEntry.isDirectory)
dir
else {
val f = if (zipEntry.getSize() == 0) new EmptyFileEntry() else new FileEntry()
dir.entries(f.name) = f
}
in.closeEntry()
loop()
}
}
loop()
try root.iterator
finally dirs.clear()
}
def name = url.getFile()
def path = url.getPath()
def input = url.openStream()
def lastModified =
try url.openConnection().getLastModified()
catch { case _: IOException => 0 }
override def canEqual(other: Any) = other.isInstanceOf[URLZipArchive]
override def hashCode() = url.hashCode
override def equals(that: Any) = that match {
case x: URLZipArchive => url == x.url
case _ => false
}
private[this] var closeables: List[java.io.Closeable] = Nil
def close(): Unit = {
closeables.foreach(_.close())
}
}
final class ManifestResources(val url: URL) extends ZipArchive(null) {
def iterator = {
val root = new DirEntry(RootEntry)
val dirs = new java.util.HashMap[String, DirEntry]
dirs.put(RootEntry, root)
val manifest = new Manifest(input)
closeables ::= input
val iter = manifest.getEntries().keySet().iterator.asScala.filter(_.endsWith(".class")).map(new ZipEntry(_))
for (zipEntry <- iter) {
val dir = getDir(dirs, zipEntry)
if (!zipEntry.isDirectory) {
class FileEntry() extends Entry(zipEntry.getName) {
override def lastModified = zipEntry.getTime()
override def input = resourceInputStream(path)
override def sizeOption = None
}
val f = new FileEntry()
dir.entries(f.name) = f
}
}
try root.iterator
finally dirs.clear()
}
def name = path
def path: String = {
val s = url.getPath
val n = s.lastIndexOf('!')
s.substring(0, n)
}
def input = url.openStream()
def lastModified =
try url.openConnection().getLastModified()
catch { case _: IOException => 0 }
override def canEqual(other: Any) = other.isInstanceOf[ManifestResources]
override def hashCode() = url.hashCode
override def equals(that: Any) = that match {
case x: ManifestResources => url == x.url
case _ => false
}
private def resourceInputStream(path: String): InputStream = {
new FilterInputStream(null) {
override def read(): Int = {
if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path)
if(in == null) throw new RuntimeException(path + " not found")
super.read()
}
override def close(): Unit = {
super.close()
in = null
}
}
}
private[this] var closeables: List[java.io.Closeable] = Nil
override def close(): Unit = {
closeables.foreach(_.close())
}
}
|
lrytz/scala
|
src/reflect/scala/reflect/io/ZipArchive.scala
|
Scala
|
apache-2.0
| 14,832 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.stream.sql
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.api.scala._
import org.apache.flink.api.scala.typeutils.Types
import org.apache.flink.table.api.scala._
import org.apache.flink.table.dataformat.{BaseRow, GenericRow}
import org.apache.flink.table.planner.runtime.utils.{StreamingTestBase, TestData, TestSinkUtil, TestingAppendBaseRowSink, TestingAppendSink, TestingAppendTableSink}
import org.apache.flink.table.runtime.typeutils.BaseRowTypeInfo
import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType}
import org.apache.flink.types.Row
import org.junit.Assert._
import org.junit._
class CalcITCase extends StreamingTestBase {
@Test
def testGenericRowAndBaseRow(): Unit = {
val sqlQuery = "SELECT * FROM MyTableRow"
val rowData: GenericRow = new GenericRow(3)
rowData.setInt(0, 1)
rowData.setInt(1, 1)
rowData.setLong(2, 1L)
val data = List(rowData)
implicit val tpe: TypeInformation[GenericRow] =
new BaseRowTypeInfo(
new IntType(),
new IntType(),
new BigIntType()).asInstanceOf[TypeInformation[GenericRow]]
val ds = env.fromCollection(data)
val t = ds.toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("MyTableRow", t)
val outputType = new BaseRowTypeInfo(
new IntType(),
new IntType(),
new BigIntType())
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[BaseRow]
val sink = new TestingAppendBaseRowSink(outputType)
result.addSink(sink)
env.execute()
val expected = List("0|1,1,1")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowAndBaseRow(): Unit = {
val sqlQuery = "SELECT * FROM MyTableRow WHERE c < 3"
val data = List(
Row.of("Hello", "Worlds", Int.box(1)),
Row.of("Hello", "Hiden", Int.box(5)),
Row.of("Hello again", "Worlds", Int.box(2)))
implicit val tpe: TypeInformation[Row] = new RowTypeInfo(
Types.STRING,
Types.STRING,
Types.INT)
val ds = env.fromCollection(data)
val t = ds.toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("MyTableRow", t)
val outputType = new BaseRowTypeInfo(
new VarCharType(VarCharType.MAX_LENGTH),
new VarCharType(VarCharType.MAX_LENGTH),
new IntType())
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[BaseRow]
val sink = new TestingAppendBaseRowSink(outputType)
result.addSink(sink)
env.execute()
val expected = List("0|Hello,Worlds,1","0|Hello again,Worlds,2")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testGenericRowAndRow(): Unit = {
val sqlQuery = "SELECT * FROM MyTableRow"
val rowData: GenericRow = new GenericRow(3)
rowData.setInt(0, 1)
rowData.setInt(1, 1)
rowData.setLong(2, 1L)
val data = List(rowData)
implicit val tpe: TypeInformation[GenericRow] =
new BaseRowTypeInfo(
new IntType(),
new IntType(),
new BigIntType()).asInstanceOf[TypeInformation[GenericRow]]
val ds = env.fromCollection(data)
val t = ds.toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("MyTableRow", t)
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List("1,1,1")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testRowAndRow(): Unit = {
val sqlQuery = "SELECT * FROM MyTableRow WHERE c < 3"
val data = List(
Row.of("Hello", "Worlds", Int.box(1)),
Row.of("Hello", "Hiden", Int.box(5)),
Row.of("Hello again", "Worlds", Int.box(2)))
implicit val tpe: TypeInformation[Row] = new RowTypeInfo(
Types.STRING,
Types.STRING,
Types.INT)
val ds = env.fromCollection(data)
val t = ds.toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("MyTableRow", t)
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List("Hello,Worlds,1","Hello again,Worlds,2")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testPrimitiveMapType(): Unit = {
val sqlQuery = "SELECT MAP[b, 30, 10, a] FROM MyTableRow"
val t = env.fromCollection(TestData.smallTupleData3)
.toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("MyTableRow", t)
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List(
"{1=30, 10=1}",
"{2=30, 10=2}",
"{2=30, 10=3}")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testNonPrimitiveMapType(): Unit = {
val sqlQuery = "SELECT MAP[a, c] FROM MyTableRow"
val t = env.fromCollection(TestData.smallTupleData3)
.toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("MyTableRow", t)
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List(
"{1=Hi}",
"{2=Hello}",
"{3=Hello world}")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testSelectStarFromNestedTable(): Unit = {
val sqlQuery = "SELECT * FROM MyTable"
val table = tEnv.fromDataStream(env.fromCollection(Seq(
((0, 0), "0"),
((1, 1), "1"),
((2, 2), "2")
)))
tEnv.registerTable("MyTable", table)
val result = tEnv.sqlQuery(sqlQuery)
val sink = TestSinkUtil.configureSink(result, new TestingAppendTableSink())
tEnv.registerTableSink("MySink", sink)
tEnv.insertInto("MySink", result)
tEnv.execute("test")
val expected = List("0,0,0", "1,1,1", "2,2,2")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testIn(): Unit = {
val sqlQuery = "SELECT * FROM MyTable WHERE b in (1,3,4,5,6)"
val t = env.fromCollection(TestData.tupleData3)
.toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("MyTable", t)
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List(
"1,1,Hi", "4,3,Hello world, how are you?", "5,3,I am fine.", "6,3,Luke Skywalker",
"7,4,Comment#1", "8,4,Comment#2", "9,4,Comment#3", "10,4,Comment#4", "11,5,Comment#5",
"12,5,Comment#6", "13,5,Comment#7", "14,5,Comment#8", "15,5,Comment#9", "16,6,Comment#10",
"17,6,Comment#11", "18,6,Comment#12", "19,6,Comment#13", "20,6,Comment#14", "21,6,Comment#15")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testNotIn(): Unit = {
val sqlQuery = "SELECT * FROM MyTable WHERE b not in (1,3,4,5,6)"
val t = env.fromCollection(TestData.tupleData3)
.toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("MyTable", t)
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = List("2,2,Hello", "3,2,Hello world")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testLongProjectionList(): Unit = {
val t = env.fromCollection(TestData.smallTupleData3)
.toTable(tEnv, 'a, 'b, 'c)
tEnv.createTemporaryView("MyTable", t)
val selectList = Stream.range(3, 200)
.map(i => s"CASE WHEN a IS NOT NULL AND a > $i THEN 0 WHEN a < 0 THEN 0 ELSE $i END")
.mkString(",")
val sqlQuery = s"select $selectList from MyTable"
val result = tEnv.sqlQuery(sqlQuery).toAppendStream[Row]
val sink = new TestingAppendSink
result.addSink(sink)
env.execute()
val expected = Stream.range(3, 200).map(_.toString).mkString(",")
assertEquals(sink.getAppendResults.size, TestData.smallTupleData3.size)
sink.getAppendResults.foreach( result =>
assertEquals(expected, result)
)
}
}
|
bowenli86/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/CalcITCase.scala
|
Scala
|
apache-2.0
| 8,927 |
package org.jetbrains.plugins.scala
package lang
package completion
package filters.expression
import com.intellij.psi.filters.ElementFilter
import com.intellij.psi.{PsiElement, _}
import org.jetbrains.annotations.NonNls
import org.jetbrains.plugins.scala.extensions.PsiFileExt
import org.jetbrains.plugins.scala.lang.completion.ScalaCompletionUtil._
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.scaladoc.psi.api.ScDocComment
/**
* @author Alexander Podkhalyuzin
* Date: 22.05.2008
*/
class FinallyFilter extends ElementFilter{
def isAcceptable(element: Object, context: PsiElement): Boolean = {
if (context.isInstanceOf[PsiComment]) return false
val leaf = getLeafByOffset(context.getTextRange.getStartOffset, context)
if (leaf != null) {
var i = getPrevNotWhitespaceAndComment(context.getTextRange.getStartOffset - 1, context)
var leaf1 = getLeafByOffset(i, context)
while (leaf1 != null && !leaf1.isInstanceOf[ScTryStmt]) leaf1 = leaf1.getParent
if (leaf1 == null) return false
if (leaf1.getNode.getChildren(null).exists(_.getElementType == ScalaElementTypes.FINALLY_BLOCK)) return false
i = getNextNotWhitespaceAndComment(context.getTextRange.getEndOffset, context)
if (Array("catch", "finally").contains(getLeafByOffset(i, context).getText)) return false
return true
}
false
}
def isClassAcceptable(hintClass: java.lang.Class[_]): Boolean = {
true
}
@NonNls
override def toString: String = {
"statements keyword filter"
}
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/completion/filters/expression/FinallyFilter.scala
|
Scala
|
apache-2.0
| 1,640 |
import de.wayofquality.sbt.testlogconfig.TestLogConfig.autoImport._
import sbt._
import blended.sbt.Dependencies
object BlendedJmsBridge extends ProjectFactory {
private[this] val helper : ProjectSettings = new ProjectSettings(
projectName = "blended.jms.bridge",
description = "A generic JMS bridge to connect the local JMS broker to en external JMS",
deps = Seq(
Dependencies.akkaActor,
Dependencies.akkaStream,
Dependencies.typesafeConfig,
Dependencies.logbackCore % "test",
Dependencies.logbackClassic % "test",
Dependencies.activeMqBroker % "test",
Dependencies.scalatest % "test",
Dependencies.scalacheck % "test"
),
adaptBundle = b => b.copy(
bundleActivator = s"${b.bundleSymbolicName}.internal.BridgeActivator"
)
) {
override def settings: Seq[sbt.Setting[_]] = defaultSettings ++ Seq(
Test / testlogLogPackages ++= Map("" +
"App" -> "DEBUG",
"blended" -> "TRACE"
)
)
}
override val project = helper.baseProject.dependsOn(
BlendedUtil.project,
BlendedUtilLogging.project,
BlendedJmsUtils.project,
BlendedDomino.project,
BlendedAkka.project,
BlendedStreams.project,
BlendedActivemqBrokerstarter.project % "test",
BlendedTestsupport.project % "test",
BlendedTestsupportPojosr.project % "test",
BlendedStreamsTestsupport.project % "test"
)
}
|
lefou/blended
|
project/BlendedJmsBridge.scala
|
Scala
|
apache-2.0
| 1,417 |
package com.typesafe.sbt.webpack
import sbt._
import com.typesafe.sbt.web.{PathMapping, SbtWeb}
import com.typesafe.sbt.web.js.JS
import com.typesafe.sbt.web.pipeline.Pipeline
import sbt.Keys._
import sbt.Task
object Import {
// For development only, could execute before "run"
// eg: run in Compile <<= (run in Compile) dependsOn webpack
val webpack = TaskKey[Seq[File]]("webpack", "Invoke the webpack module bundler in dev mode.")
// For production
val webpackStage = TaskKey[Pipeline.Stage]("webpack-stage", "Invoke the webpack module bundler.")
object WebpackKeys {
val command = SettingKey[String]("webpack-command", "The webpack command in dev mode.")
val outputPath = SettingKey[String]("webpack-output-path", "Path to the generated asset file in dev mode.")
val stageCommand = SettingKey[String]("webpack-stage-command", "The webpack command.")
val stageOutputPath = SettingKey[String]("webpack-stage-output-path", "Path to the generated asset file.")
}
}
object SbtWebpack extends AutoPlugin {
override def requires = SbtWeb
override def trigger = AllRequirements
val autoImport = Import
import SbtWeb.autoImport._
import WebKeys._
import autoImport._
import WebpackKeys._
override def projectSettings = Seq(
command := "npm run build",
outputPath := "web/main/public",
webpack := webpackDevelopTask.value,
stageCommand := "npm run build release",
stageOutputPath := "webpack",
webpackStage := webpackStageTask.value
)
def webpackDevelopTask: Def.Initialize[Task[Seq[File]]] = Def.task {
exec(command.value)
Seq()
}
def webpackStageTask: Def.Initialize[Task[Pipeline.Stage]] = Def.task { mappings =>
exec(stageCommand.value)
val outputDir = target.value / stageOutputPath.value
val outputFiles = outputDir ** "*.*"
val newMappings = outputFiles pair relativeTo(outputDir)
// Replace existed ones
val newNames = newMappings map (_._2)
val (existed, other) = mappings partition (newNames contains _._2)
newMappings ++ other
}
// Execute NPM command
def exec(cmd: String) = {
try {
val rc = Process(cmd, file(".")).!
if (rc != 0) {
sys.error(s"NPM generated non-zero return code: $rc")
}
} catch {
case e: java.io.IOException => {
// For windows
val rc = Process("cmd /c " + cmd, file(".")).!
if (rc != 0) {
sys.error(s"NPM generated non-zero return code: $rc")
}
}
}
}
}
|
zhengcan/play-webpack-react
|
project/WebpackPlugin.scala
|
Scala
|
mit
| 2,516 |
// Test from https://lrytz.github.io/scala-aladdin-bugtracker/displayItem.do%3Fid=763.html
// and expanded with package object variants
trait Foo { type T; def apply() : T }
object e extends Foo { type T = Int; def apply() = 42 }
package p {
trait T[X] { def O : { def apply(): X } }
object `package` extends T[Int] {
def O: { def apply(): Int } = new { def apply(): Int = 42 }
}
object Test {
val x: Int = O()
}
}
object Test {
val f = new Foo { type T = Int; def apply() = 42 }
def main(args: Array[String]): Unit = {
val g = new Foo { type T = Int; def apply() = 42 }
(e: Foo)()
val ee: Int = e()
(f: Foo)()
val ff: Int = f()
(g: Foo)()
val gg: Int = g()
val pp: Int = p.O()
}
}
|
scala/scala
|
test/files/pos/alladin763.scala
|
Scala
|
apache-2.0
| 749 |
package com.pulptunes.relay.models
import javax.inject.Inject
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import cats.data.Xor
import cats.syntax.option._
import cats.syntax.xor._
import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfigProvider}
import slick.driver.JdbcProfile
case class Server(id: String, status: String, publicDns: String, online: String, onlineTest: String)
class ServerProvider @Inject() (protected val dbConfigProvider: DatabaseConfigProvider)
extends HasDatabaseConfigProvider[JdbcProfile] {
import driver.api._
def getAll: Future[List[Server]] = db.run {
servers.to[List].result
}
def getById(id: String): Future[Throwable Xor Server] = {
val res = db.run {
servers.filter(_.id === id).result.headOption
}
res.map(_ toRightXor(new Exception(s"Server $id not found in the database"))).recover {
case t => t.left
}
}
def isUp(id: String, testStatus: Boolean): Future[Boolean] = {
getById(id).map(_.fold(
_ => false,
server => {
if (testStatus) server.status == "test" && server.onlineTest == "true"
else server.status == "production" && server.online == "true"
}
))
}
val servers = TableQuery[Servers]
class Servers(_tableTag: Tag) extends Table[Server](_tableTag, "servers") {
def * = (id, status, publicDns, online, onlineTest) <> (Server.tupled, Server.unapply)
/** Maps whole row to an option. Useful for outer joins. */
def ? = (Rep.Some(id), Rep.Some(status), Rep.Some(publicDns), Rep.Some(online), Rep.Some(onlineTest)).shaped.<>({r=>import r._; _1.map(_=> Server.tupled((_1.get, _2.get, _3.get, _4.get, _5.get)))}, (_:Any) => throw new Exception("Inserting into ? projection not supported."))
/** Database column id SqlType(VARCHAR), PrimaryKey, Length(100,true) */
val id: Rep[String] = column[String]("id", O.PrimaryKey, O.Length(100,varying=true))
/** Database column status SqlType(VARCHAR), Length(20,true) */
val status: Rep[String] = column[String]("status", O.Length(20,varying=true))
/** Database column public_dns SqlType(VARCHAR), Length(100,true) */
val publicDns: Rep[String] = column[String]("public_dns", O.Length(100,varying=true))
/** Database column online SqlType(VARCHAR), Length(10,true) */
val online: Rep[String] = column[String]("online", O.Length(10,varying=true))
/** Database column online_test SqlType(VARCHAR), Length(10,true) */
val onlineTest: Rep[String] = column[String]("online_test", O.Length(10,varying=true))
}
}
|
alpeb/pulptunes-relay
|
app/com/pulptunes/relay/models/Server.scala
|
Scala
|
mpl-2.0
| 2,601 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.ws
import java.util.Properties
import akka.actor.Actor
import cmwell.common.ExitWithError
import cmwell.common.OffsetsService
import com.typesafe.scalalogging.LazyLogging
import k.grid.Grid
import kafka.utils.ZkUtils
import org.I0Itec.zkclient.ZkClient
import org.I0Itec.zkclient.serialize.ZkSerializer
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.common.TopicPartition
import org.joda.time.DateTime
import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
/**
* Created by israel on 01/12/2016.
*/
class BGMonitorActor(zkServers: String,
offsetService: OffsetsService,
implicit val ec: ExecutionContext = concurrent.ExecutionContext.Implicits.global)
extends Actor
with LazyLogging {
val zkClient = new ZkClient(zkServers, 10000, 10000, ZKLikeStringSerializer)
val zkUtils = ZkUtils(zkClient, false)
val allBrokers = zkUtils
.getAllBrokersInCluster()
.map { b =>
val endPoint = b.endPoints.head
s"${endPoint.host}:${endPoint.port}"
}
.mkString(",")
val topics = Seq("persist_topic", "persist_topic.priority", "index_topic", "index_topic.priority")
val partitionsForTopics = zkUtils.getPartitionsForTopics(topics)
val topicsPartitionsAndGroups = partitionsForTopics.flatMap {
case ("persist_topic", partitions) =>
partitions.map { partition =>
(new TopicPartition("persist_topic", partition), s"imp.$partition")
}
case ("persist_topic.priority", partitions) =>
partitions.map { partition =>
(new TopicPartition("persist_topic.priority", partition), s"imp.p.$partition")
}
case ("index_topic", partitions) =>
partitions.map { partition =>
(new TopicPartition("index_topic", partition), s"indexer.$partition")
}
case ("index_topic.priority", partitions) =>
partitions.map { partition =>
(new TopicPartition("index_topic.priority", partition), s"indexer.p.$partition")
}
case x@(topicName, partition) => logger.error(s"Unexpected topicName: $topicName . Partition: $partition"); ???
}
val topicsPartitionsAndConsumers = topicsPartitionsAndGroups.map {
case (topicPartition, groupId) =>
val kafkaConsumerProps = new Properties()
kafkaConsumerProps.put("bootstrap.servers", allBrokers)
kafkaConsumerProps.put("group.id", groupId)
kafkaConsumerProps.put("key.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer")
kafkaConsumerProps.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer")
kafkaConsumerProps.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer")
kafkaConsumerProps.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer")
(topicPartition, new KafkaConsumer[Array[Byte], Array[Byte]](kafkaConsumerProps))
}
val topicsPartitions = topicsPartitionsAndConsumers.keys
var previousOffsetInfo: OffsetsInfo = OffsetsInfo(Map.empty[String, PartitionOffsetsInfo], DateTime.now())
@volatile var currentOffsetInfo: OffsetsInfo = OffsetsInfo(Map.empty[String, PartitionOffsetsInfo], DateTime.now())
var lastFetchDuration: Long = 0
import java.util.concurrent.ConcurrentHashMap
val redSince: collection.concurrent.Map[Int, Long] = new ConcurrentHashMap[Int, Long]().asScala
self ! CalculateOffsetInfo
override def receive: Receive = {
case GetOffsetInfo =>
logger.debug(s"got GetOffsetInfo message returning $currentOffsetInfo")
sender() ! currentOffsetInfo
case CalculateOffsetInfo =>
logger.debug(s"got inner request to generate new offsets info")
generateOffsetsInfo
}
@volatile var statusesCheckedTime: Long = System.currentTimeMillis()
private def generateOffsetsInfo = {
logger.debug(s"generating offsets info")
def calculateOffsetInfo(): Future[(OffsetsInfo, Long)] = {
import concurrent._
Future {
blocking {
val start = System.currentTimeMillis()
val topicPartitionsWriteOffsets =
topicsPartitionsAndConsumers.head._2.endOffsets(topicsPartitions.asJavaCollection)
val partitionsOffsetsInfo: Map[String, PartitionOffsetsInfo] = topicPartitionsWriteOffsets.asScala.map {
case (topicPartition, writeOffset) =>
val streamId = topicPartition.topic() match {
case "persist_topic" => s"imp.${topicPartition.partition()}_offset"
case "persist_topic.priority" => s"imp.${topicPartition.partition()}.p_offset"
case "index_topic" => s"indexer.${topicPartition.partition()}_offset"
case "index_topic.priority" => s"indexer.${topicPartition.partition()}.p_offset"
}
val readOffset = offsetService.read(streamId).getOrElse(0L)
((topicPartition.topic() + topicPartition.partition()),
PartitionOffsetsInfo(topicPartition.topic(), topicPartition.partition(), readOffset, writeOffset))
}.toMap
val end = System.currentTimeMillis()
(OffsetsInfo(partitionsOffsetsInfo, new DateTime()), end - start)
}
}
}
calculateOffsetInfo().onComplete {
case Success((info, duration)) =>
logger.debug(s"calculate offset info successful: \nInfo:$info\nDuration:$duration")
val now = System.currentTimeMillis()
if (now - statusesCheckedTime > 1 * 60 * 1000) {
logger.debug(s"more than 1 minute has past since last checked statuses, let's check")
statusesCheckedTime = now
previousOffsetInfo = currentOffsetInfo
try {
val partitionsOffsetInfoUpdated = info.partitionsOffsetInfo.map {
case (key, partitionInfo) =>
val readDiff = partitionInfo.readOffset - previousOffsetInfo.partitionsOffsetInfo
.get(key)
.map {
_.readOffset
}
.getOrElse(0L)
val partitionStatus = {
if (readDiff > 0)
Green
else if (partitionInfo.readOffset - partitionInfo.writeOffset == 0) {
Green
} else if ((previousOffsetInfo).partitionsOffsetInfo
.get(key)
.map { _.partitionStatus }
.getOrElse(Green) == Green) {
Yellow
} else {
Red
}
}
if (partitionStatus == Red) {
val currentTime = System.currentTimeMillis()
redSince.get(partitionInfo.partition) match {
case None =>
logger.warn(s"BG status for partition ${partitionInfo.partition} turned RED")
redSince.putIfAbsent(partitionInfo.partition, currentTime)
case Some(since) if ((currentTime - since) > 15 * 60 * 1000) =>
logger.error(
s"BG status for partition ${partitionInfo.partition} is RED for more than 15 minutes. sending it an exit message"
)
Grid.serviceRef(s"BGActor${partitionInfo.partition}") ! ExitWithError
redSince.replace(partitionInfo.partition, currentTime)
case Some(since) =>
logger.warn(
s"BG for partition ${partitionInfo.partition} is RED since ${(currentTime - since) / 1000} seconds ago"
)
}
}
key -> partitionInfo.copy(partitionStatus = partitionStatus)
}
currentOffsetInfo = info.copy(partitionsOffsetInfo = partitionsOffsetInfoUpdated)
} catch {
case t: Throwable => logger.error("exception ingesting offset info", t)
}
} else if (currentOffsetInfo.partitionsOffsetInfo.nonEmpty) {
currentOffsetInfo = info.copy(partitionsOffsetInfo = info.partitionsOffsetInfo.map {
case (topic, info) =>
(topic, info.copy(partitionStatus = currentOffsetInfo.partitionsOffsetInfo(topic).partitionStatus))
})
} else {
currentOffsetInfo = info
}
lastFetchDuration = duration
logger.debug(s"updated currentOffsetInfo: $currentOffsetInfo")
context.system.scheduler
.scheduleOnce(math.max(10000, lastFetchDuration).milliseconds, self, CalculateOffsetInfo)
case Failure(exception) =>
logger.error("failed to calculate offset info", exception)
context.system.scheduler
.scheduleOnce(math.max(10000, lastFetchDuration).milliseconds, self, CalculateOffsetInfo)
}
}
}
object BGMonitorActor {
def serviceName = classOf[BGMonitorActor].getName
}
object ZKLikeStringSerializer extends ZkSerializer {
def serialize(data: Object): Array[Byte] = data.asInstanceOf[String].getBytes("UTF-8")
def deserialize(bytes: Array[Byte]): Object = {
if (bytes == null)
null
else
new String(bytes, "UTF-8")
}
}
case object GetOffsetInfo
case object CalculateOffsetInfo
case class OffsetsInfo(partitionsOffsetInfo: Map[String, PartitionOffsetsInfo], timeStamp: DateTime)
trait PartitionStatus
case object Green extends PartitionStatus
case object Yellow extends PartitionStatus
case object Red extends PartitionStatus
case class PartitionOffsetsInfo(topic: String,
partition: Int,
readOffset: Long,
writeOffset: Long,
partitionStatus: PartitionStatus = Green) {
def toShortInfoString =
s"${topic.head}${if (topic.contains(".p")) ".p" else ""}:${writeOffset - readOffset}:${partitionStatus.toString.head}"
}
|
bryaakov/CM-Well
|
server/cmwell-ws/app/BGMonitorActor.scala
|
Scala
|
apache-2.0
| 10,742 |
package amphip.data
import scala.annotation.implicitNotFound
import scala.math.BigDecimal.RoundingMode.{ HALF_UP => HalfUp, DOWN => Down }
import java.math.MathContext.{ DECIMAL128 => D128 }
/*
import scalaz.std.list._, listSyntax._
import scalaz.std.option._, optionSyntax._
import scalaz.syntax.foldable1._
import scalaz.syntax.show._
import scalaz.syntax.std.map._
*/
import scalaz.Scalaz._
import spire.math._
import spire.implicits._
import amphip.base._
import amphip.model.ast._
import amphip.model.show._
import amphip.data.ModelData._
import amphip.data.ModelData.SimpleData._
object eval {
val key = DataKey
@implicitNotFound("Eval is not defined for ${A}")
trait Eval[A, B] {
def eval(expr: A)(implicit modelData: ModelData): B
}
def apply[A, B](expr: A)(implicit modelData: ModelData, Eval: Eval[A, B]): B = Eval.eval(expr)
def apply[A, B](expr: A, modelData: => ModelData)(implicit Eval: Eval[A, B]): B = Eval.eval(expr)(modelData)
private def from[A, B](f: ModelData => A => B): Eval[A, B] = new Eval[A, B] {
def eval(a: A)(implicit modelData: ModelData): B = f(modelData)(a)
}
// STATEMENTS
private val PFSetAssing: PartialFunction[SetAtt, SetData] = {
case SetAssign(SetLit(values @ _*)) =>
values.toList.map { l =>
SetTuple(l.collect {
case NumLit (num) => SimpleNum(num)
case StringLit(str) => SimpleStr(str)
})
}
}
private val PFSetDefault: PartialFunction[SetAtt, SetData] = {
case SetDefault(SetLit(values @ _*)) =>
values.toList.map { l =>
SetTuple(l.collect {
case NumLit (num) => SimpleNum(num)
case StringLit(str) => SimpleStr(str)
})
}
}
/**
* Helper to get the value of an specific attribute.
* It assumes that the set is expanded, ie, the domain is `none'.
*/
private def evalAtt(expansion: SetStat, dataPF: PartialFunction[SetAtt, SetData]): Option[SetData] =
expansion.atts.collect(dataPF).headOption
private val PFParamAssing: PartialFunction[ParamAtt, SimpleData] = {
case ParamAssign(NumLit (num)) => SimpleNum(num)
case ParamAssign(StringLit(str)) => SimpleStr(str)
}
private val PFParamDefault: PartialFunction[ParamAtt, SimpleData] = {
case ParamDefault(NumLit (num)) => SimpleNum(num)
case ParamDefault(StringLit(str)) => SimpleStr(str)
}
/**
* Helper to get the value of an specific attribute.
* It assumes that the parameter is expanded, ie, the domain is `none'.
*/
private def evalAtt(expansion: ParamStat, dataPF: PartialFunction[ParamAtt, SimpleData]): Option[SimpleData] =
expansion.atts.collect(dataPF).headOption
def expand(stat: Stat)(implicit modelData: ModelData): LazyExpansion[Stat] = stat match {
case x: SetStat => expand(x)
case x: ParamStat => expand(x)
case x: VarStat => expand(x)
case x: ConstraintStat => expand(x)
case x: ObjectiveStat => expand(x)
}
@inline private[this] def thunk[A](x: => A): () => A = { () => x }
def expand(set: SetStat)(implicit modelData: ModelData): LazyExpansion[SetStat] = set match {
case SetStat(name, alias, domain, atts) =>
domain match {
case None => LinkedMap(key(name) -> thunk(SetStat(name, alias, none, atts.map(eval(_)))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(SetStat(name, alias, none, atts.map(eval(_, modelData.plusParams(localData)))))
}
LinkedMap(pairs: _*)
}
}
def expand(param: ParamStat)(implicit modelData: ModelData): LazyExpansion[ParamStat] = param match {
case ParamStat(name, alias, domain, atts) =>
domain match {
case None => LinkedMap(key(name) -> thunk(ParamStat(name, alias, none, atts.map(eval(_)))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(ParamStat(name, alias, none, atts.map(eval(_, modelData.plusParams(localData)))))
}
LinkedMap(pairs: _*)
}
}
def expand(xvar: VarStat)(implicit modelData: ModelData): LazyExpansion[VarStat] = xvar match {
case VarStat(name, alias, domain, atts) =>
domain match {
case None => LinkedMap(key(name) -> thunk(VarStat(name, alias, none, atts.map(eval(_)))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(VarStat(name, alias, none, atts.map(eval(_, modelData.plusParams(localData)))))
}
LinkedMap(pairs: _*)
}
}
def expand(ctr: ConstraintStat)(implicit modelData: ModelData): LazyExpansion[ConstraintStat] = ctr match {
case EqConstraintStat(name, alias, domain, left, right) =>
domain match {
case None => LinkedMap(key(name) -> thunk(EqConstraintStat(name, alias, none, eval(left), eval(right))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(EqConstraintStat(name, alias, none, eval(left, modelData.plusParams(localData)), eval(right, modelData.plusParams(localData))))
}
LinkedMap(pairs: _*)
}
case LTEConstraintStat(name, alias, domain, left, right) =>
domain match {
case None => LinkedMap(key(name) -> thunk(LTEConstraintStat(name, alias, none, eval(left), eval(right))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(LTEConstraintStat(name, alias, none, eval(left, modelData.plusParams(localData)), eval(right, modelData.plusParams(localData))))
}
LinkedMap(pairs: _*)
}
case GTEConstraintStat(name, alias, domain, left, right) =>
domain match {
case None => LinkedMap(key(name) -> thunk(GTEConstraintStat(name, alias, none, eval(left), eval(right))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(GTEConstraintStat(name, alias, none, eval(left, modelData.plusParams(localData)), eval(right, modelData.plusParams(localData))))
}
LinkedMap(pairs: _*)
}
case DLTEConstraintStat(name, alias, domain, lower, expr, upper) =>
domain match {
case None => LinkedMap(key(name) -> thunk(DLTEConstraintStat(name, alias, none, NumLit(eval(lower)), eval(expr), NumLit(eval(upper)))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(DLTEConstraintStat(
name,
alias,
none,
NumLit(eval(lower, modelData.plusParams(localData))),
eval(expr, modelData.plusParams(localData)),
NumLit(eval(upper, modelData.plusParams(localData)))))
}
LinkedMap(pairs: _*)
}
case DGTEConstraintStat(name, alias, domain, lower, expr, upper) =>
domain match {
case None => LinkedMap(key(name) -> thunk(DGTEConstraintStat(name, alias, none, NumLit(eval(lower)), eval(expr), NumLit(eval(upper)))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(DGTEConstraintStat(
name,
alias,
none,
NumLit(eval(lower, modelData.plusParams(localData))),
eval(expr, modelData.plusParams(localData)),
NumLit(eval(upper, modelData.plusParams(localData)))))
}
LinkedMap(pairs: _*)
}
}
def expand(obj: ObjectiveStat)(implicit modelData: ModelData): LazyExpansion[ObjectiveStat] = obj match {
case Minimize(name, alias, domain, expr) =>
domain match {
case None => LinkedMap(key(name) -> thunk(Minimize(name, alias, none, eval(expr))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(Minimize(name, alias, none, eval(expr, modelData.plusParams(localData))))
}
LinkedMap(pairs: _*)
}
case Maximize(name, alias, domain, expr) =>
domain match {
case None => LinkedMap(key(name) -> thunk(Maximize(name, alias, none, eval(expr))))
case Some(indExpr) =>
val pairs =
for {
localData <- eval(indExpr)
} yield {
val k = key(name, localData.values.toList)
k -> thunk(Maximize(name, alias, none, eval(expr, modelData.plusParams(localData))))
}
LinkedMap(pairs: _*)
}
}
implicit val StatEval: Eval[Stat, Map[DataKey, Stat]] = from(implicit modelData =>
{
case x: SetStat => eval(x)
case x: ParamStat => eval(x)
case x: VarStat => eval(x)
case x: ConstraintStat => eval(x)
case x: ObjectiveStat => eval(x)
})
private[this] def force[A](exp: LazyExpansion[A]): Map[DataKey, A] = exp.mapValues(_())
implicit val SetStatEval: Eval[SetStat, Map[DataKey, SetStat]] = from(implicit modelData => x => force(expand(x)))
implicit val ParamStatEval: Eval[ParamStat, Map[DataKey, ParamStat]] = from(implicit modelData => x => force(expand(x)))
implicit val VarStatEval: Eval[VarStat, Map[DataKey, VarStat]] = from(implicit modelData => x => force(expand(x)))
implicit val ConstraintStatEval: Eval[ConstraintStat, Map[DataKey, ConstraintStat]] = from(implicit modelData => x => force(expand(x)))
implicit val ObjectiveStatEval: Eval[ObjectiveStat, Map[DataKey, ObjectiveStat]] = from(implicit modelData => x => force(expand(x)))
implicit val SetAttEval: Eval[SetAtt, SetAtt] = from(implicit modelData =>
{
case SetDimen(n) => SetDimen(n)
case SetWithin(expr) => SetWithin(asSetLit(eval(expr)))
case SetAssign(expr) => SetAssign(asSetLit(eval(expr)))
case SetDefault(expr) => SetDefault(asSetLit(eval(expr)))
})
implicit val ParamAttEval: Eval[ParamAtt, ParamAtt] = from(implicit modelData =>
{
case ParamLT(expr) => ParamLT(eval(expr).fold(NumLit, StringLit))
case ParamLTE(expr) => ParamLTE(eval(expr).fold(NumLit, StringLit))
case ParamEq(expr) => ParamEq(eval(expr).fold(NumLit, StringLit))
case ParamNEq(expr) => ParamNEq(eval(expr).fold(NumLit, StringLit))
case ParamGT(expr) => ParamGT(eval(expr).fold(NumLit, StringLit))
case ParamGTE(expr) => ParamGTE(eval(expr).fold(NumLit, StringLit))
case ParamIn(expr) => ParamIn(asSetLit(eval(expr)))
case ParamAssign(expr) => ParamAssign(eval(expr).fold(NumLit, StringLit))
case ParamDefault(expr) => ParamDefault(eval(expr).fold(NumLit, StringLit))
case x => x
})
implicit val VarAttEval: Eval[VarAtt, VarAtt] = from(implicit modelData =>
{
case VarLTE(expr) => VarLTE(NumLit(eval(expr)))
case VarEq(expr) => VarEq(NumLit(eval(expr)))
case VarGTE(expr) => VarGTE(NumLit(eval(expr)))
case x => x
})
// EXPRESSIONS
implicit val ExprEval: Eval[Expr, Any] = from(implicit modelData =>
{
case x: SimpleExpr => eval(x)
case x: SetExpr => eval(x)
case x: LogicExpr => eval(x)
case x: LinExpr => eval(x)
})
// SIMPLE
implicit val SimpleExprEval: Eval[SimpleExpr, SimpleData] = from(implicit modelData =>
{
case x: ParamRef => eval(x)
case x: DummyIndRef => eval(x)
case x: NumExpr => SimpleNum(eval(x))
case x: SymExpr => SimpleStr(eval(x))
})
implicit val NumExprWithSymExprEval: Eval[NumExpr with SymExpr, SimpleData] = from(implicit modelData =>
{
case x: ParamRef => eval(x)
case x: DummyIndRef => eval(x)
})
implicit val ParamRefEval: Eval[ParamRef, SimpleData] = from(implicit modelData =>
{
case ParamRef(param, subscript) =>
val k = key(param.name, eval(subscript))
val expParam =
modelData.paramsExpansion.get(k)
.orElse(expand(param).get(k).map(_())) // only calculates the expansion if it is not preloaded
.err(s"subscript `${subscript.shows}' does not " +
s"conform to parameter `${param.name}' definition")
val assignData = evalAtt(expParam, PFParamAssing)
val defaultData = evalAtt(expParam, PFParamDefault)
// FIXME check lower and upper bounds
assignData
.orElse(modelData.params.get(k))
.orElse(defaultData)
.err(s"no data found for `$k'")
})
implicit val DummyIndRefEval: Eval[DummyIndRef, SimpleData] = from(implicit modelData =>
{
case DummyIndRef(dummyInd) =>
modelData.params.get(key(dummyInd.name))
.err(s"dummy index `${dummyInd.name}' out of scope")
})
// NUMERIC
implicit val NumExprEval: Eval[NumExpr, BigDecimal] = from(implicit modelData =>
{
case CondNumExpr(test, ifTrue, otherwise) => if (eval(test)) eval(ifTrue) else otherwise.fold[BigDecimal](0)(eval(_))
case NumAdd(left, right) => eval(left) + eval(right)
case NumSub(left, right) => eval(left) - eval(right)
case NumLess(left, right) => max(eval(left) - eval(right), 0)
case NumSum(indexing, integrand) => eval(indexing -> integrand).sum
case NumProd(indexing, integrand) => eval(indexing -> integrand).product
case NumMax(indexing, integrand) => eval(indexing -> integrand).max
case NumMin(indexing, integrand) => eval(indexing -> integrand).min
case NumMult(left, right) => eval(left) * eval(right)
case NumDiv(left, right) => eval(left).apply(D128) / eval(right)
case NumDivExact(left, right) => eval(left) /~ eval(right)
case NumMod(left, right) => eval(left) % eval(right)
case NumUnaryPlus(expr) => eval(expr)
case NumUnaryMinus(expr) => -eval(expr)
case NumRaise(left, right) => eval(left) fpow eval(right)
case x: ParamRef =>
eval(x).fold(identity, _ => sys.error(typeMismatchNumExpr(x.param.name, "param")))
case x: DummyIndRef =>
eval(x).fold(identity, _ => sys.error(typeMismatchNumExpr(x.dummyInd.name, "dummy index")))
case x: NumFuncRef => eval(x)
case NumLit(num) => num
})
// XXX 😬 🤔
private val rnd = new scala.util.Random()
implicit val NumFuncRefEval: Eval[NumFuncRef, BigDecimal] = from(implicit modelData =>
{
case Abs(expr) => eval(expr).abs
case Atan(expr) => atan(eval(expr))
case Atan2(x1, x2) => atan2(eval(x1), eval(x2))
case Card(expr) => eval(expr).size
case Ceil(expr) => ceil(eval(expr))
case Cos(expr) => cos(eval(expr))
case Exp(expr) => exp(eval(expr))
case Floor(expr) => floor(eval(expr))
case Gmtime() => new java.util.Date().getTime
case Length(expr) => eval(expr).length
case Log(expr) => log(eval(expr))
case Log10(expr) => log10(eval(expr).toDouble) // no implementation of log10 for BigDecimal ...
case Max(expr @ _*) => expr.map(eval(_)).max
case Min(expr @ _*) => expr.map(eval(_)).min
case Round(expr, n) => n.fold(round(eval(expr)))(n => eval(expr).setScale(eval(n).toIntExact, HalfUp))
case Sin(expr) => sin(eval(expr))
case Sqrt(expr) => sqrt(eval(expr))
case Str2time(s @_, f @_) => sys.error("`str2time' not yet supported")
case Trunc(expr, n) => {
def trunc(x: BigDecimal, n: BigDecimal): BigDecimal = x.setScale(n.toIntExact, Down)
n.fold(trunc(eval(expr), 0))(n => trunc(eval(expr), eval(n)))
}
case Irand224() => rnd.nextInt(2 ** 24)
case Uniform01() => rnd.nextDouble()
})
// SYMBOLIC
implicit val SymExprEval: Eval[SymExpr, String] = from(implicit modelData =>
{
case CondSymExpr(test, ifTrue, otherwise) => if (eval(test)) eval(ifTrue) else otherwise.fold("0")(eval(_))
case Concat(left, right) => eval(left) + eval(right)
case SymNumExpr(expr) => eval(expr).toString
case x: ParamRef => eval(x).fold(_.toString, identity)
case x: DummyIndRef => eval(x).fold(_.toString, identity)
case x: SymFuncRef => eval(x)
case StringLit(str) => str
})
implicit val SymFuncRefEval: Eval[SymFuncRef, String] = from(implicit modelData =>
{
case Substr(expr, from, length) =>
val text = eval(expr)
val start = eval(from).toIntExact
val end = start + length.fold(text.length)(l => eval(l).toIntExact)
text.substring(start, end)
case Time2str(t @_, f @_) => sys.error("`time2str' not yet supported")
})
// SET
implicit val SetExprEval: Eval[SetExpr, SetData] = from(implicit modelData =>
{
case CondSetExpr(test, ifTrue, otherwise) => if (eval(test)) eval(ifTrue) else eval(otherwise)
case Union(left, right) => (eval(left) ::: eval(right)).distinct
case Diff(left, right) => eval(left).diff(eval(right))
case SymDiff(left, right) =>
val l = eval(left)
val r = eval(right)
l.union(r).diff(l.intersect(r)).distinct
case Inter(left, right) => eval(left).intersect(eval(right))
case Cross(left, right) =>
for {
x <- eval(left)
y <- eval(right)
} yield {
SetTuple(x.values ::: y.values)
}
case SetOf(indexing, integrand) => eval(indexing -> integrand)
case ArithSet(t0, tf, deltaT) =>
val t0Val = eval(t0)
val tfVal = eval(tf)
val deltaTVal = deltaT.fold[BigDecimal](1)(eval(_))
(t0Val to tfVal by deltaTVal).map(x => SetTuple(List(SimpleNum(x)))).toList
case x: SetRef => eval(x)
case SetLit(values @ _*) =>
values.toList.map(l => SetTuple(eval(l)))
case IndExprSet(indexing) =>
eval(indexing).map(_.values.toList).map(SetTuple(_))
})
implicit val SetRefEval: Eval[SetRef, SetData] = from(implicit modelData => {
case SetRef(set, subscript) =>
val k = key(set.name, eval(subscript))
val expSet =
modelData.setsExpansion.get(k)
.orElse(expand(set).get(k).map(_())) // only calculates the expansion if it is not preloaded
.err(s"subscript `${subscript.shows}' does not conform to set `${set.name}' definition")
val assignData = evalAtt(expSet, PFSetAssing)
val defaultData = evalAtt(expSet, PFSetDefault)
assignData
.orElse(modelData.sets.get(k))
.orElse(defaultData)
.err(s"no data found for `$k'")
})
// INDEXING
implicit val IndExprEval: Eval[IndExpr, IndexingData] = from(implicit modelData =>
{
expr =>
val entriesData = expr.entries.foldLeft(List(LinkedMap.empty[DataKey, SimpleData])) { (lastData, entry) =>
for {
data <- lastData
newData <- eval(entry, modelData.plusParams(data))
} yield {
data ++ newData
}
}
entriesData.filter { data =>
expr.predicate.fold(true)(f => eval(f, modelData.plusParams(data)))
}
})
implicit val IndEntryEval: Eval[IndEntry, IndexingData] = from(implicit modelData =>
{
case IndEntry(indices, set, predicate) =>
def localData(indices: List[DummyIndDecl], setD: SetTuple): LinkedMap[DataKey, SimpleData] =
(indices, setD) match {
case (Nil, SetTuple(Nil)) => LinkedMap.empty
case (i :: is, SetTuple(x :: xs)) => LinkedMap(key(i.name) -> x) ++ localData(is, SetTuple(xs))
case (_, SetTuple(_)) => sys.error(s"`${indices.shows}' has incompatible size for `${set.shows}'")
}
val setEv = eval(set)
val effInd = effectiveIndices(setEv, indices)
val filtered = setEv.map(localData(effInd, _))
.filter { lData =>
predicate.fold(true)(f => eval(f, modelData.plusParams(lData)))
}
val predExprMap = predicate.fold(Map.empty[String, SimpleExpr])(p => IndEntry.extract(p).mapKeys(_.name))
/*
* the final expression must have only the values not appearing in the predicate expression
*/
filtered.map(_.filter { case (k, _) => predExprMap.get(k.name).isEmpty })
})
def effectiveIndices(setEv: SetData, indices: List[DummyIndDecl], nameHint: Option[String] = None, gen: Gen = gen): List[DummyIndDecl] = {
if (indices.isEmpty) {
val dimen = setEv.headOption.fold(0)(_.values.size)
List.fill(dimen)(DummyIndDecl(gen.dummy(nameHint).freshName, synthetic = true))
} else {
indices
}
}
implicit val IndExprNumIntegrandEval: Eval[(IndExpr, NumExpr), List[BigDecimal]] = from(implicit modelData =>
{
case (indexing, integrand) =>
for {
localData <- eval(indexing)
} yield {
eval(integrand, modelData.plusParams(localData))
}
})
implicit val IndExprSetIntegrandEval: Eval[(IndExpr, List[SimpleExpr]), SetData] = from(implicit modelData =>
{
case (indexing, integrand) =>
for {
localData <- eval(indexing)
} yield {
integrand match {
case Nil => SetTuple(Nil) // XXX delete if not needed
case l => SetTuple(eval(l, modelData.plusParams(localData)))
}
}
})
implicit val IndExprLogicIntegrandEval: Eval[(IndExpr, LogicExpr), List[Boolean]] = from(implicit modelData =>
{
case (indexing, integrand) =>
for {
localData <- eval(indexing)
} yield {
eval(integrand, modelData.plusParams(localData))
}
})
// LOGIC
implicit val LogicExprEval: Eval[LogicExpr, Boolean] = from(implicit modelData =>
{
case Disj(left, right) => eval(left) || eval(right)
case Forall(indexing, integrand) => eval(indexing -> integrand).forall(identity)
case Exists(indexing, integrand) => eval(indexing -> integrand).exists(identity)
case Conj(left, right) => eval(left) && eval(right)
case Neg(expr) => !eval(expr)
case LT(left, right) => eval(left) < eval(right)
case LTE(left, right) => eval(left) <= eval(right)
case GT(left, right) => eval(left) > eval(right)
case GTE(left, right) => eval(left) >= eval(right)
case NEq(left, right) => eval(left) =!= eval(right)
case Eq(left, right) => eval(left) === eval(right)
case NotIn(values, set) =>
val ev = eval(values)
!eval(set).exists(x => ev == x.values)
case In(values, set) =>
val ev = eval(values)
eval(set).exists(x => ev == x.values)
case NotWithin(left, right) =>
val r = eval(right).toSet
!eval(left).forall(r)
case Within(left, right) =>
val r = eval(right).toSet
eval(left).forall(r)
case x: NumExpr => eval(x) != 0
})
//LINEAR
implicit val LinExprEval: Eval[LinExpr, LinExpr] = from(implicit modelData =>
{
case CondLinExpr(test, ifTrue, otherwise) => if (eval(test)) eval(ifTrue) else otherwise.fold[LinExpr](NumLit(0))(eval(_))
case LinAdd(left, right) => LinAdd(eval(left), eval(right))
case LinSub(left, right) => LinSub(eval(left), eval(right))
case LinSum(indexing, integrand) =>
val evIntegrand =
for {
localData <- eval(indexing)
} yield {
eval(integrand, modelData.plusParams(localData))
}
LinSumExp(evIntegrand)
case LinSumExp(summands) => LinSumExp(summands.map(eval(_)))
/* case LinSum(indexing, integrand) =>
val evIntegrand =
for {
localData <- eval(indexing)
} yield {
eval(integrand, modelData.plusParams(localData))
}
evIntegrand.toNel.fold[LinExpr](NumLit(0))(_.foldLeft1((expr1, expr2) => LinAdd(expr1, expr2))) */
case LinMult(left, right) => LinMult(NumLit(eval(left)), eval(right))
case LinDiv(left, right) => LinDiv(eval(left), NumLit(eval(right)))
case LinUnaryPlus(x) => LinUnaryPlus(eval(x))
case LinUnaryMinus(x) => LinUnaryMinus(eval(x))
case x: VarRef => eval(x)
case x: NumExpr => NumLit(eval(x))
})
implicit val VarRefEval: Eval[VarRef, VarRef] = from(implicit modelData => {
case VarRef(xvar, subscript) => VarRef(xvar, eval(subscript).map(_.fold(NumLit(_), StringLit(_))))
})
// BASIC
implicit val SubscriptEval: Eval[List[SimpleExpr], List[SimpleData]] = from(implicit modelData => _.map(eval(_)))
def typeMismatchNumExpr(decl: SymName, declType: String) = s"$declType `$decl' has incorrect type. Expected `NumExpr', found `SymExpr'."
private def asSetLit(data: SetData): SetLit = {
val tuples = data.map(_.values.map(_.fold(NumLit, StringLit)))
SetLit(tuples: _*)
}
}
|
gerferra/amphip
|
core/src/main/scala/amphip/data/eval.scala
|
Scala
|
mpl-2.0
| 25,703 |
package chapter21
/*
How does -> work? That is, how can "Hello" -> 42 and 42 -> "Hello" be pairs ("Hello", 42) and (42, "Hello")?
Hint: Predef.any2ArrowAssoc.
*/
object Exercise1 {
/*
implicit final class ArrowAssoc[A](private val self: A) extends AnyVal {
@inline def -> [B](y: B): Tuple2[A, B] = Tuple2(self, y)
def →[B](y: B): Tuple2[A, B] = ->(y)
}
1. "Hello" -> 42
2. ArrowAssoc[String]("Hello") -> 42
3. ArrowAssoc[String]("Hello").->(42) == Tuple2[String, Int]("Hello", "42")
*/
}
|
vsuharnikov/books-exercises
|
scala/scala-for-the-impatient/src/main/scala/chapter21/Exercise1.scala
|
Scala
|
mit
| 517 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.Properties
import kafka.log.LogConfig._
import kafka.server.KafkaConfig.fromProps
import kafka.server.QuotaType._
import kafka.utils.TestUtils._
import kafka.utils.CoreUtils._
import kafka.utils.TestUtils
import kafka.zk.ZooKeeperTestHarness
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.TopicPartition
import org.junit.Assert._
import org.junit.{After, Test}
import scala.collection.JavaConverters._
/**
* This is the main test which ensure Replication Quotas work correctly.
*
* The test will fail if the quota is < 1MB/s as 1MB is the default for replica.fetch.max.bytes.
* So with a throttle of 100KB/s, 1 fetch of 1 partition would fill 10s of quota. In turn causing
* the throttled broker to pause for > 10s
*
* Anything over 100MB/s tends to fail as this is the non-throttled replication rate
*/
class ReplicationQuotasTest extends ZooKeeperTestHarness {
def percentError(percent: Int, value: Long): Long = Math.round(value * percent / 100)
val msg100KB = new Array[Byte](100000)
var brokers: Seq[KafkaServer] = null
val topic = "topic1"
var producer: KafkaProducer[Array[Byte], Array[Byte]] = null
@After
override def tearDown() {
producer.close()
shutdownServers(brokers)
super.tearDown()
}
@Test
def shouldBootstrapTwoBrokersWithLeaderThrottle(): Unit = {
shouldMatchQuotaReplicatingThroughAnAsymmetricTopology(true)
}
@Test
def shouldBootstrapTwoBrokersWithFollowerThrottle(): Unit = {
shouldMatchQuotaReplicatingThroughAnAsymmetricTopology(false)
}
def shouldMatchQuotaReplicatingThroughAnAsymmetricTopology(leaderThrottle: Boolean): Unit = {
/**
* In short we have 8 brokers, 2 are not-started. We assign replicas for the two non-started
* brokers, so when we start them we can monitor replication from the 6 to the 2.
*
* We also have two non-throttled partitions on two of the 6 brokers, just to make sure
* regular replication works as expected.
*/
brokers = (100 to 105).map { id => createServer(fromProps(createBrokerConfig(id, zkConnect))) }
//Given six partitions, led on nodes 0,1,2,3,4,5 but with followers on node 6,7 (not started yet)
//And two extra partitions 6,7, which we don't intend on throttling.
val assignment = Map(
0 -> Seq(100, 106), //Throttled
1 -> Seq(101, 106), //Throttled
2 -> Seq(102, 106), //Throttled
3 -> Seq(103, 107), //Throttled
4 -> Seq(104, 107), //Throttled
5 -> Seq(105, 107), //Throttled
6 -> Seq(100, 106), //Not Throttled
7 -> Seq(101, 107) //Not Throttled
)
TestUtils.createTopic(zkClient, topic, assignment, brokers)
val msg = msg100KB
val msgCount = 100
val expectedDuration = 10 //Keep the test to N seconds
var throttle: Long = msgCount * msg.length / expectedDuration
if (!leaderThrottle) throttle = throttle * 3 //Follower throttle needs to replicate 3x as fast to get the same duration as there are three replicas to replicate for each of the two follower brokers
//Set the throttle limit on all 8 brokers, but only assign throttled replicas to the six leaders, or two followers
(100 to 107).foreach { brokerId =>
adminZkClient.changeBrokerConfig(Seq(brokerId),
propsWith(
(DynamicConfig.Broker.LeaderReplicationThrottledRateProp, throttle.toString),
(DynamicConfig.Broker.FollowerReplicationThrottledRateProp, throttle.toString)
))
}
//Either throttle the six leaders or the two followers
if (leaderThrottle)
adminZkClient.changeTopicConfig(topic, propsWith(LeaderReplicationThrottledReplicasProp, "0:100,1:101,2:102,3:103,4:104,5:105" ))
else
adminZkClient.changeTopicConfig(topic, propsWith(FollowerReplicationThrottledReplicasProp, "0:106,1:106,2:106,3:107,4:107,5:107"))
//Add data equally to each partition
producer = createProducer(getBrokerListStrFromServers(brokers), acks = 1)
(0 until msgCount).foreach { _ =>
(0 to 7).foreach { partition =>
producer.send(new ProducerRecord(topic, partition, null, msg))
}
}
//Ensure data is fully written: broker 1 has partition 1, broker 2 has partition 2 etc
(0 to 5).foreach { id => waitForOffsetsToMatch(msgCount, id, 100 + id) }
//Check the non-throttled partitions too
waitForOffsetsToMatch(msgCount, 6, 100)
waitForOffsetsToMatch(msgCount, 7, 101)
val start = System.currentTimeMillis()
//When we create the 2 new, empty brokers
createBrokers(106 to 107)
//Check that throttled config correctly migrated to the new brokers
(106 to 107).foreach { brokerId =>
assertEquals(throttle, brokerFor(brokerId).quotaManagers.follower.upperBound())
}
if (!leaderThrottle) {
(0 to 2).foreach { partition => assertTrue(brokerFor(106).quotaManagers.follower.isThrottled(tp(partition))) }
(3 to 5).foreach { partition => assertTrue(brokerFor(107).quotaManagers.follower.isThrottled(tp(partition))) }
}
//Wait for non-throttled partitions to replicate first
(6 to 7).foreach { id => waitForOffsetsToMatch(msgCount, id, 100 + id) }
val unthrottledTook = System.currentTimeMillis() - start
//Wait for replicas 0,1,2,3,4,5 to fully replicated to broker 106,107
(0 to 2).foreach { id => waitForOffsetsToMatch(msgCount, id, 106) }
(3 to 5).foreach { id => waitForOffsetsToMatch(msgCount, id, 107) }
val throttledTook = System.currentTimeMillis() - start
//Check the times for throttled/unthrottled are each side of what we expect
val throttledLowerBound = expectedDuration * 1000 * 0.9
val throttledUpperBound = expectedDuration * 1000 * 3
assertTrue(s"Expected $unthrottledTook < $throttledLowerBound", unthrottledTook < throttledLowerBound)
assertTrue(s"Expected $throttledTook > $throttledLowerBound", throttledTook > throttledLowerBound)
assertTrue(s"Expected $throttledTook < $throttledUpperBound", throttledTook < throttledUpperBound)
// Check the rate metric matches what we expect.
// In a short test the brokers can be read unfairly, so assert against the average
val rateUpperBound = throttle * 1.1
val rateLowerBound = throttle * 0.5
val rate = if (leaderThrottle) avRate(LeaderReplication, 100 to 105) else avRate(FollowerReplication, 106 to 107)
assertTrue(s"Expected ${rate} < $rateUpperBound", rate < rateUpperBound)
assertTrue(s"Expected ${rate} > $rateLowerBound", rate > rateLowerBound)
}
def tp(partition: Int): TopicPartition = new TopicPartition(topic, partition)
@Test
def shouldThrottleOldSegments(): Unit = {
/**
* Simple test which ensures throttled replication works when the dataset spans many segments
*/
//2 brokers with 1MB Segment Size & 1 partition
val config: Properties = createBrokerConfig(100, zkConnect)
config.put("log.segment.bytes", (1024 * 1024).toString)
brokers = Seq(createServer(fromProps(config)))
TestUtils.createTopic(zkClient, topic, Map(0 -> Seq(100, 101)), brokers)
//Write 20MBs and throttle at 5MB/s
val msg = msg100KB
val msgCount: Int = 200
val expectedDuration = 4
val throttle: Long = msg.length * msgCount / expectedDuration
//Set the throttle to only limit leader
adminZkClient.changeBrokerConfig(Seq(100), propsWith(DynamicConfig.Broker.LeaderReplicationThrottledRateProp, throttle.toString))
adminZkClient.changeTopicConfig(topic, propsWith(LeaderReplicationThrottledReplicasProp, "0:100"))
//Add data
addData(msgCount, msg)
val start = System.currentTimeMillis()
//Start the new broker (and hence start replicating)
debug("Starting new broker")
brokers = brokers :+ createServer(fromProps(createBrokerConfig(101, zkConnect)))
waitForOffsetsToMatch(msgCount, 0, 101)
val throttledTook = System.currentTimeMillis() - start
assertTrue(s"Throttled replication of ${throttledTook}ms should be > ${expectedDuration * 1000 * 0.9}ms",
throttledTook > expectedDuration * 1000 * 0.9)
assertTrue(s"Throttled replication of ${throttledTook}ms should be < ${expectedDuration * 1500}ms",
throttledTook < expectedDuration * 1000 * 1.5)
}
def addData(msgCount: Int, msg: Array[Byte]): Unit = {
producer = createProducer(getBrokerListStrFromServers(brokers), acks = 0)
(0 until msgCount).map(_ => producer.send(new ProducerRecord(topic, msg))).foreach(_.get)
waitForOffsetsToMatch(msgCount, 0, 100)
}
private def waitForOffsetsToMatch(offset: Int, partitionId: Int, brokerId: Int): Unit = {
waitUntilTrue(() => {
offset == brokerFor(brokerId).getLogManager.getLog(new TopicPartition(topic, partitionId))
.map(_.logEndOffset).getOrElse(0)
}, s"Offsets did not match for partition $partitionId on broker $brokerId", 60000)
}
private def brokerFor(id: Int): KafkaServer = brokers.filter(_.config.brokerId == id).head
def createBrokers(brokerIds: Seq[Int]): Unit = {
brokerIds.foreach { id =>
brokers = brokers :+ createServer(fromProps(createBrokerConfig(id, zkConnect)))
}
}
private def avRate(replicationType: QuotaType, brokers: Seq[Int]): Double = {
brokers.map(brokerFor).map(measuredRate(_, replicationType)).sum / brokers.length
}
private def measuredRate(broker: KafkaServer, repType: QuotaType): Double = {
val metricName = broker.metrics.metricName("byte-rate", repType.toString)
broker.metrics.metrics.asScala(metricName).metricValue.asInstanceOf[Double]
}
}
|
mihbor/kafka
|
core/src/test/scala/unit/kafka/server/ReplicationQuotasTest.scala
|
Scala
|
apache-2.0
| 10,433 |
package com.twitter.monoloco.tricks
import java.util.concurrent.TimeUnit.SECONDS
import com.twitter.monoloco.CodeTrick
import javax.crypto.{KeyGenerator, Cipher}
import java.security.SecureRandom
import java.math.BigInteger
import java.util.concurrent.{Executors, ExecutorService, ThreadPoolExecutor}
class CipherUpdate extends Runnable {
def run() {
val keyGenerator = KeyGenerator.getInstance("AES")
keyGenerator.init(256)
val secretKey = keyGenerator.generateKey()
val cipher = Cipher.getInstance("AES")
cipher.init(Cipher.ENCRYPT_MODE, secretKey)
(1 to 100000) foreach { x =>
if (Thread.interrupted()) {
return
} else {
cipher.update(new BigInteger(131232, new SecureRandom).toString(32).getBytes)
}
}
}
}
class BurnCpu extends CodeTrick {
def duration() = (30L, SECONDS)
var pool:ExecutorService = null
def start() {
val i = Runtime.getRuntime.availableProcessors();
pool = Executors.newFixedThreadPool(i)
(1 to i) foreach { xs => pool.execute(new CipherUpdate)}
}
def stop() {
pool.shutdownNow()
}
}
|
capotej/monoloco
|
src/main/scala/com/twitter/monoloco/tricks/BurnCpu.scala
|
Scala
|
apache-2.0
| 1,111 |
// This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
package ducttape.workflow
import collection._
import ducttape.hyperdag.PackedVertex
import ducttape.hyperdag.meta.MetaHyperDag
import ducttape.workflow.Types.UnpackState
import ducttape.workflow.Types.UnpackedWorkVert
import ducttape.workflow.Types.WorkflowEdge
import ducttape.workflow.Types.PackedWorkVert
import ducttape.syntax.Namespace
import ducttape.syntax.AbstractSyntaxTree.Spec
import ducttape.syntax.AbstractSyntaxTree.PackageDef
import ducttape.syntax.AbstractSyntaxTree.SubmitterDef
import ducttape.syntax.AbstractSyntaxTree.VersionerDef
import ducttape.syntax.AbstractSyntaxTree.WorkflowDefinition
import ducttape.hyperdag.HyperEdge
import ducttape.hyperdag.UnpackedVertex
import ducttape.hyperdag.meta.PhantomMetaHyperDag
import ducttape.hyperdag.meta.UnpackedMetaVertex
import ducttape.hyperdag.walker.UnpackedPhantomMetaDagWalker
import ducttape.hyperdag.walker.PackedPhantomMetaDagWalker
import ducttape.hyperdag.walker.MetaVertexFilter
import ducttape.hyperdag.walker.RealizationMunger
import ducttape.hyperdag.walker.Traversal
import ducttape.hyperdag.walker.Arbitrary
import ducttape.workflow.SpecTypes.SpecPair
import grizzled.slf4j.Logging
object HyperWorkflow {
type ExplainCallback = (=>String, =>String, Boolean) => Unit
type UnpackedWalker = UnpackedPhantomMetaDagWalker[TaskTemplate,BranchPoint,Branch,SpecGroup,Branch,UnpackState]
type HyperWorkflowMunger = RealizationMunger[Option[TaskTemplate], Branch, SpecGroup, Branch, UnpackState]
/**
* See also [[ducttape.workflow.Types.UnpackState]] for an explanation
* of how the UnpackState functions.
*/
trait HyperWorkflowStateMunger extends HyperWorkflowMunger {
// heBranch might be None if this vertex has no incoming hyperedge
override def initHyperedge(heBranch: Option[Branch]): UnpackState = heBranch match {
case None => UnpackState.empty
// This line is cool: It cleanly states that branches introduced at a hyperedge
// are never subject to the grafts of that hyperedge's component edges
//
// For example, consider a task t1 that has a realization BP1.b1. We also have a
// task t2 has an incoming hyperedge that introduces branch BP1.b2, but one of
// its component edges matches and grafts away BP1.b1 such that it doesn't conflict
// with BP1.b1. The separation of the hyperedgeState and edgeState allows this.
// This line also shows that just-introduced branches such as BP1.b2 can never be
// grafted away by component edges.
case Some(branch: Branch) => new UnpackState(
hyperedgeState = UnpackState.emptyMap + ((branch.branchPoint, branch)),
edgeState = UnpackState.emptyMap)
}
override def toRealization(state: UnpackState): Seq[Branch] = {
assert(state.edgeState.isEmpty)
state.hyperedgeState.values.toSeq
}
}
}
// final type parameter TaskDef is for storing the source of input edges
// each element of plan is a set of branches that are mutually compatible
// - not specifying a branch point indicates that any value is acceptable
// TODO: Multimap (just use typedef?)
class HyperWorkflow(val dag: PhantomMetaHyperDag[TaskTemplate,BranchPoint,Branch,SpecGroup],
val wd: WorkflowDefinition,
val packageDefs: Map[Namespace,PackageDef],
val plans: Seq[RealizationPlan],
val submitters: Seq[SubmitterDef], // TODO: Resolve earlier?
val versioners: Seq[VersionerDef],
val branchPointFactory: BranchPointFactory,
val branchFactory: BranchFactory,
traversal: Traversal = Arbitrary)
extends Logging {
import HyperWorkflow._
val vertexMap = new mutable.HashMap[TaskTemplate, PackedWorkVert]
for (v: PackedVertex[Option[TaskTemplate]] <- dag.vertices()) {
val tt: TaskTemplate = v.value.get
vertexMap += tt -> v
}
def toPackedVertex(task: TaskTemplate): PackedWorkVert = vertexMap(task)
def packedWalker(): PackedPhantomMetaDagWalker[TaskTemplate] = dag.packedWalker
// TODO: Currently only used by initial pass to find goals
// TODO: Document different use cases of planFilter vs plannedVertices
// NOTE: explainCallback can be used to provide the user with
// useful information about why certain realizations are not produced
def NO_EXPLAIN(vertexName: => String, msg: => String, accepted: Boolean) {}
def unpackedWalker(policy: PlanPolicy,
explainCallback: ( =>String, =>String, Boolean) => Unit = NO_EXPLAIN,
traversal: Traversal = Arbitrary)
: UnpackedWalker = {
// TODO: XXX: HACK: This shouldn't be called for nulls generated by epsilons
def toD(branch: Branch): Branch = if (branch != null) branch else Task.NO_BRANCH
def observe(v: UnpackedVertex[Option[TaskTemplate], Branch, SpecGroup, Branch])
= explainCallback(v.packed.toString, v.realization.mkString(Realization.delimiter), true)
// the inPlanConstraint implements both a RealizationMunger and a MetaVertexFilter
val inPlanConstraint = new InPlanConstraint(policy, explainCallback)
// order is important!
// 1) first, add each edge's state into a holding buffer: edgeState
// 2) apply grafts
// NOTE: some branches may disappear in grafting
// 3) enforce global branch point consistency
// 4) merge the edge state's holding buffer into the hyperedgeState
// 5) check that the final hyperedge state for plan membership
val munger = EdgeStateInitializer.
andThen(new BranchGraftMunger(dag, explainCallback)).
andThen(GlobalBranchPointConstraint).
andThen(EdgeStateMerger).
andThen(inPlanConstraint)
dag.unpackedWalker[Branch,UnpackState](munger, inPlanConstraint, toD, traversal, observe)(RealizationOrdering)
}
}
|
jhclark/ducttape
|
src/main/scala/ducttape/workflow/HyperWorkflow.scala
|
Scala
|
mpl-2.0
| 6,090 |
package com.nxtwv.graphs.neo
import play.api.libs.concurrent.Execution.Implicits._
import play.api.libs.functional.syntax._
import play.api.libs.json.Json.JsValueWrapper
import play.api.libs.json._
import play.api.libs.ws.ning.NingAsyncHttpClientConfigBuilder
import play.api.libs.ws.{DefaultWSClientConfig, WS, WSAuthScheme}
import scala.concurrent.Future
//import play.api.Play.current
trait NeoService {
sealed class Neo4JServer(val host:String, val port:Int, val path:String){
def url(part:String) = {
"http://%s:%s%s%s".format(host,port,path,part)
}
}
val clientConfig = new DefaultWSClientConfig()
val secureDefaults:com.ning.http.client.AsyncHttpClientConfig = new NingAsyncHttpClientConfigBuilder(clientConfig).build()
// You can directly use the builder for specific options once you have secure TLS defaults...
val builder = new com.ning.http.client.AsyncHttpClientConfig.Builder(secureDefaults)
builder.setCompressionEnabled(true)
val secureDefaultsWithSpecificOptions:com.ning.http.client.AsyncHttpClientConfig = builder.build()
implicit val implicitClient = new play.api.libs.ws.ning.NingWSClient(secureDefaultsWithSpecificOptions)
def batchCypher(statments: List[String])(implicit neoServer:Neo4JServer): Future[JsValue] = {
val jsonStatments: List[JsValue] = statments.map{
case s:String => Json.obj("statement" -> s)
}
// TODO: move to config
val req = WS.clientUrl(neoServer.url("transaction/commit"))
.withHeaders( ("Accept","application/json; charset=UTF-8"), ("Content-Type", "application/json") )
.withAuth("neo4j","p@ssword", WSAuthScheme.BASIC)
.post(Json.obj("statements" -> jsonStatments))
req.map { res =>
println(res.json)
res.json
}
}
case class Q(q: String, params: JsObject = Json.obj()) {
type Cols = List[String]
type Values = List[List[String]]
type ValuesNode = List[JsValue]
case class Neo4PlayException(msg: String) extends Exception(msg)
case class ResultError(code: String, message: String)
implicit val errorReads: Reads[ResultError] = (
(JsPath \ "code").read[String] and
(JsPath \\ "message").read[String]
)(ResultError.apply _)
def use(params: (String, JsValueWrapper)*): Q = Q(q, Json.obj(params: _*))
def getSingle[T](column: String)(implicit neoServer:Neo4JServer, reader: Reads[T]) = neoPost.map( l => (l.head \ column).as[T])
def getMultiple[T](column: String)(implicit neoServer:Neo4JServer, reader: Reads[T]) = neoPost.map(l => l.map(js => (js \ column).as[T]))
def getOneJs(implicit neoServer:Neo4JServer): Future[JsObject] = neoPost.map(_.head)
def getOne[T](implicit neoServer:Neo4JServer, r: Reads[T]): Future[T] = neoPost.map(_.head.as[T])
def getManyJs(implicit neoServer:Neo4JServer): Future[JsArray] = neoPost.map(JsArray)
def getMany[T](implicit neoServer:Neo4JServer, r: Reads[T]): Future[Seq[T]] = neoPost.map { results =>
results.map(_.as[T])
}
import scala.reflect.runtime.universe._
def getFields[T: TypeTag] = typeOf[T].members.collect {
case m: MethodSymbol if m.isCaseAccessor => m.name.toString
}.toList
def withReturn[T: TypeTag](node: String = "n"): Q = {
val newq = q + getFields[T].map( f => s"$node.$f as $f").mkString(" RETURN ", ", ", ";")
Q(newq, params)
}
def transactWith(queries: Q*) = ???
/*
* Because of the heterogeneous response type. We can only parse queries that
* RETURN a single node like RETURN p,
* or RETURN projections like RETURN p.id, p.name as provider
* No RETURN will always returns an empty Seq
*
* Neo4j always use http 200 or 201 code. So no 40x for errors, we need to inspect the json "errors" field
*/
private def neoPost()(implicit neoServer:Neo4JServer): Future[Seq[JsObject]] = {
val toPost = Json.obj("statements" -> Json.arr(Json.obj("statement" -> q.stripMargin, "parameters" -> params)))
WS.clientUrl(neoServer.url("transaction/commit"))
.withHeaders( ("Accept","application/json; charset=UTF-8"), ("Content-Type", "application/json") )
.withAuth("neo4j","p@ssword", WSAuthScheme.BASIC)
.post(toPost)
.map { res =>
println(res.json)
if(res.json.toString.contains("IN"))println(toPost)
val errors = (res.json \ "errors").as[Seq[ResultError]]
val results = (res.json \ "results")(0)
val values = (results \\ "row").map(_.as[Seq[JsValue]])
(errors, values) match {
case (err, _) if !err.isEmpty => throw Neo4PlayException(s"neo4j exception: ${errors.head.message} - ${errors.head.code}")
case (_, rows) if rows.isEmpty => Seq.empty
case _ => parseResult(values, res.json)
}
}
}
private def parseResult(values: Seq[Seq[JsValue]], json: JsValue) = {
val results = (json \ "results")(0)
val cols = (results \ "columns").as[Seq[String]]
values.head.head match {
case str: JsString => values.map(row => JsObject(cols.zip(row)))
case obj: JsObject => values.map { row =>
if(row.size > 1)
throw Neo4PlayException(s"Cannot parse multi node RETURN. $q")
else row.head.as[JsObject]
}
case _ => throw Neo4PlayException(s"neo4j exception: Cannot parse result request: $json")
}
}
}
}
|
coreyauger/agent-smith
|
src/main/scala/com/nxtwv/graphs/neo/NeoService.scala
|
Scala
|
mit
| 5,416 |
/*
* Copyright (c) 2014-2016
* nonblocking.at gmbh [http://www.nonblocking.at]
*
* This file is part of Cliwix.
*
* Cliwix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package at.nonblocking.cliwix.core.validation
import javax.xml.bind.{ValidationEventLocator, ValidationEvent, ValidationEventHandler}
import scala.collection.mutable
class XMLSchemaValidationEventHandler extends ValidationEventHandler {
private val _validationErrors = new mutable.MutableList[ValidationError]()
def validationErrors: List[ValidationError] = _validationErrors.toList
override def handleEvent(event: ValidationEvent): Boolean = {
def locationToString(location: ValidationEventLocator) = s"Line:${location.getLineNumber},Column:${location.getColumnNumber}"
_validationErrors += new ValidationError(event.getMessage, locationToString(event.getLocator), event.getLinkedException)
true
}
}
|
nonblocking/cliwix
|
cliwix-core/src/main/scala/at/nonblocking/cliwix/core/validation/XMLSchemaValidationEventHandler.scala
|
Scala
|
agpl-3.0
| 1,518 |
/* Copyright 2015 Alessandro Maria Rizzi
* Copyright 2016 Eugenio Gianniti
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package profiler
class Execution(name: String, tasks: Seq[Record], allDurations: Duration) {
private lazy val taskNames: Seq[String] = tasks map { _.name }
lazy val taskId: String = taskNames.head
lazy val duration: Long = allDurations obtainTotalDuration taskNames
lazy val locations: Set[String] = tasks.map(_.location).toSet
def tasks(taskType: TaskType): Seq[Record] = tasks filter { _.taskType == taskType }
def numTasks(taskType: TaskType): Int = tasks(taskType).length
def validTasks(taskType: TaskType): Seq[Record] = tasks(taskType) filter { _.durationMSec > 0 }
def numValidTasks(taskType: TaskType): Int = validTasks(taskType).length
def sum(taskType: TaskType): Long = validTasks(taskType).map(_.durationMSec).sum
def max(taskType: TaskType): Long = validTasks(taskType).map(_.durationMSec).max
def min(taskType: TaskType): Long = validTasks(taskType).map(_.durationMSec).min
def avg(taskType: TaskType): Long = sum(taskType) / numValidTasks(taskType)
def tasks(vertex: String): Seq[Record] = tasks filter { _.vertex == vertex }
def numTasks(vertex: String): Long = tasks(vertex).length
def validTasks(vertex: String): Seq[Record] = tasks(vertex) filter { _.durationMSec > 0 }
def numValidTasks(vertex: String): Long = validTasks(vertex).length
def sum(vertex: String): Long = validTasks(vertex).map(_.durationMSec).sum
def max(vertex: String): Long = validTasks(vertex).map(_.durationMSec).max
def min(vertex: String): Long = validTasks(vertex).map(_.durationMSec).min
def avg(vertex: String): Long = sum(vertex) / numValidTasks(vertex)
lazy val vertices: List[String] = tasks.map(_.vertex).toList.distinct sortBy { _.split(" ").last.toInt }
lazy val isNonTrivialDag: Boolean = {
// lengthCompare is O(2) instead of O(length)
val moreThanTwo = vertices filterNot { _ startsWith "Shuffle" } lengthCompare 2
moreThanTwo > 0
}
lazy val shuffleBytes: Seq[Long] = tasks(ShuffleTask) map { _.bytes }
lazy val sumShuffleBytes: Long = shuffleBytes.sum
lazy val maxShuffleBytes: Long = shuffleBytes.max
lazy val minShuffleBytes: Long = shuffleBytes.min
lazy val avgShuffleBytes: Long = sumShuffleBytes / numTasks(ShuffleTask)
def shuffleBytes(vertex: String): Seq[Long] = tasks(vertex) map { _.bytes }
def sumShuffleBytes(vertex: String): Long = shuffleBytes(vertex).sum
def maxShuffleBytes(vertex: String): Long = shuffleBytes(vertex).max
def minShuffleBytes(vertex: String): Long = shuffleBytes(vertex).min
def avgShuffleBytes(vertex: String): Long = sumShuffleBytes(vertex) / numTasks(vertex)
lazy val nodes: Seq[String] = tasks.map( _.node ).toList.distinct.sorted
private lazy val tasksByNodes: Map[String, Seq[Record]] = tasks groupBy { _.node }
def tasks(vertex: String, node: String): Seq[Record] = tasksByNodes getOrElse
(node, Seq[Record]()) filter { _.vertex == vertex }
def numTasks(vertex: String, node: String): Long = tasks(vertex, node).length
def validTasks(vertex: String, node: String): Seq[Record] = tasks(vertex, node) filter { _.durationMSec > 0 }
def numValidTasks(vertex: String, node: String): Long = validTasks(vertex, node).length
def sum(vertex: String, node: String): Option[Long] = try {
Some(validTasks(vertex, node).map( _.durationMSec ).sum)
} catch {
case e: UnsupportedOperationException => None
}
def max(vertex: String, node: String): Option[Long] = try {
Some(validTasks(vertex, node).map( _.durationMSec ).max)
} catch {
case e: UnsupportedOperationException => None
}
def min(vertex: String, node: String): Option[Long] = try {
Some(validTasks(vertex, node).map( _.durationMSec ).min)
} catch {
case e: UnsupportedOperationException => None
}
def avg(vertex: String, node: String): Option[Long] = sum(vertex, node) map { _ / numValidTasks(vertex, node) }
def cleanOverlaps(dependencies: Map[String, List[String]]): Execution = {
val groups = tasks groupBy { _.vertex }
val nextTasks = groups flatMap {
case (vertex, theseTasks) =>
val actualVertexName = vertex replace ("Shuffle", "Reducer")
dependencies get actualVertexName map {
list =>
val predecessorCompletions = list flatMap groups.apply map { _.stopMSec }
val lastCompletion = predecessorCompletions.max
theseTasks map { _ cutFrontOverlap lastCompletion }
} getOrElse theseTasks
}
new Execution(name, nextTasks.toSeq, allDurations)
}
}
object Execution {
def apply(text: String, duration: Duration, shuffle: Shuffle, vertices: Vertices,
nodes: Nodes): Execution = {
val lines = text split "\\n"
new Execution(lines.head, nodes(shuffle(vertices(lines map Record.apply))), duration)
}
}
|
deib-polimi/Profiler
|
src/main/scala-2.11/profiler/Execution.scala
|
Scala
|
apache-2.0
| 5,397 |
object Test {
def test() = {
java.util.Arrays.asList(Array(1,2,3):_*)
}
def main(args: Array[String]) {
println(test())
}
}
|
felixmulder/scala
|
test/files/run/t3199b.scala
|
Scala
|
bsd-3-clause
| 143 |
package io.getquill
import com.datastax.oss.driver.api.core.CqlSession
import com.typesafe.config.Config
import io.getquill.context.{ AsyncFutureCache, CassandraSession, SyncCache }
import io.getquill.util.LoadConfig
import zio.{ Has, ZIO, ZLayer, ZManaged }
case class CassandraZioSession(
override val session: CqlSession,
override val preparedStatementCacheSize: Long
) extends CassandraSession with SyncCache with AsyncFutureCache with AutoCloseable
object CassandraZioSession {
val live: ZLayer[Has[CassandraContextConfig], Throwable, Has[CassandraZioSession]] =
(for {
config <- ZManaged.service[CassandraContextConfig]
// Evaluate the configuration inside of 'effect' and then create the session from it
session <- ZManaged.fromAutoCloseable(
ZIO.effect(CassandraZioSession(config.session, config.preparedStatementCacheSize))
)
} yield session).toLayer
def fromContextConfig(config: CassandraContextConfig): ZLayer[Any, Throwable, Has[CassandraZioSession]] =
ZLayer.succeed(config) >>> live
def fromConfig(config: Config) = fromContextConfig(CassandraContextConfig(config))
// Call the by-name constructor for the construction to fail inside of the effect if it fails
def fromPrefix(configPrefix: String) = fromContextConfig(CassandraContextConfig(LoadConfig(configPrefix)))
}
|
getquill/quill
|
quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioSession.scala
|
Scala
|
apache-2.0
| 1,365 |
package launchers
import java.io.{File,FileWriter, BufferedWriter}
import java.util.TimerTask
import org.apache.spark.launcher.SparkLauncher
import org.joda.time.DateTime
import models.Project
import scala.sys.process
import scala.sys.process._
import scala.sys.process.ProcessLogger
class PTCrawlerLauncher(crawlerPath:String, projectsPath: String, tempProjectJsonPath: String) extends TimerTask {
def run {
val source = scala.io.Source.fromFile(projectsPath)
try {
val lines = source.getLines().mkString("\n")
val projects = Project.projectsFromJson(lines)
val today = DateTime.now()
var newProjects = List[Project]()
// 1. The crawler is launched
for (project: Project <- projects) {
println("Today : " + today)
println("Project day : " + project.start.toDate)
if(project.start.dayOfYear().get() <= today.dayOfYear().get() & project.start.plusDays(1).dayOfYear().get() > today.dayOfYear().get()){
newProjects = project :: newProjects
}
}
launchNewProjects(newProjects)
launchAllProjects
// 2. The Spark process is launched
// println("Launching the Spark app")
}finally{
source.close()
}
}
def sparkLauncher():Unit = {
val spark = new SparkLauncher()
.setSparkHome("/opt/sds/spark/")
.setAppResource("/home/jvmarcos/DummySparkApp-assembly-1.0.jar")
.setMainClass("DummySparkApp")
.setMaster("mesos://192.168.1.12:5050")
.launch()
spark.waitFor()
}
def launchNewProjects(projects: List[Project]): Unit = {
println("Launching new project")
writeToTempJson(projects, tempProjectJsonPath)
val commandStr = "python " + crawlerPath + " new_projects " + tempProjectJsonPath
println(commandStr)
val stdout = new StringBuilder
val stderr = new StringBuilder
//val status = commandStr.lines_!(ProcessLogger(stdout append _, stderr append _))
val status = commandStr.lines_!(ProcessLogger(stdout append _, stderr append _))
//println(status)
//println("stdout: " + stdout)
//println("stderr: " + stderr)
//print(stdout)
println("Continuing")
}
def launchAllProjects(): Unit = {
println("Launching all projects")
}
def writeToTempJson(projects: List[Project], filePath: String): Unit = {
val file = new File(filePath)
val bw = new BufferedWriter(new FileWriter(file))
bw.write(Project.toJson(projects))
bw.close()
}
}
object PTCrawlerLauncher {
//def main(args: Array[String]) {
// println("pwd".!!)
// val pt_crawler = new PTCrawlerLauncher("/home/cnavarro/workspace/mixedemotions/me_extractors/crawlers/paradigma_python_harvester/paradigma_harvester.py",
// "/home/cnavarro/workspace/mixedemotions/me_extractors/BRMProjectManager/src/main/resources/projects.json",
// "/home/cnavarro/workspace/mixedemotions/me_extractors/crawlers/paradigma_python_harvester/temp.json")
// pt_crawler.run
//}
}
|
canademar/me_extractors
|
BRMProjectManager/src/main/scala/launchers/PTCrawlerLauncher.scala
|
Scala
|
gpl-2.0
| 2,999 |
package com.stefansavev.randomprojections.implementation
import java.io.File
import com.stefansavev.randomprojections.datarepr.dense.DataFrameView
import com.stefansavev.randomprojections.implementation.query.NearestNeigbhorQueryScratchBuffer
import com.stefansavev.randomprojections.interface.Index
import com.stefansavev.randomprojections.serialization.{PointSignaturesSerializer, BinaryFileSerializer}
import com.stefansavev.randomprojections.tuning.PerformanceCounters
class IndexImpl(val signatures: PointSignatures, val totalNumPoints: Int, val leaf2Points: Leaf2Points, val id2Label: Array[Int]) extends Index{
def toFile(file: File): Unit = {
val ser = new BinaryFileSerializer(file)
PointSignaturesSerializer.toBinary(ser.stream, signatures)
ser.putIntArrays(leaf2Points.starts, leaf2Points.points)
ser.putInt(leaf2Points.numberOfLeaves())
ser.putInt(totalNumPoints)
ser.putIntArray(id2Label)
ser.close()
}
def fillCountsWithoutWeights(query: Array[Double], settings: SearcherSettings, trainingSet: DataFrameView, bucketIds: Array[Int], pointCounts: Array[Int], maxIndex: Int, countsOfCounts: Array[Int]): Unit = {
val signatures = this.signatures
val sigVectors = settings.randomTrees.signatureVecs
val querySigs = sigVectors.computePointSignatures(query)
val leafEnd = bucketIds.length
var i = 0
while (i < leafEnd) {
val leaf = bucketIds(i)
val pointsStart = leaf2Points.starts(leaf)
val pointsEnd = leaf2Points.starts(leaf + 1)
PerformanceCounters.touchedPointsDuringSearch(pointsEnd - pointsStart)
var j = pointsStart
while (j < pointsEnd) {
val pointId = leaf2Points.points(j)
val prevCount = pointCounts(pointId)
val inc = if (prevCount == 0) {1 + signatures.overlap(querySigs, pointId)} else 1
if (prevCount < maxIndex){
countsOfCounts(prevCount) -= 1
countsOfCounts(prevCount + inc) += 1
}
pointCounts(pointId) = prevCount + inc
j += 1
}
i += 1
}
countsOfCounts(0) = 0
}
def clearCountsWithoutWeights(bucketIds: Array[Int], pointCounts: Array[Int], pruningThreshold: Int, maxIndex: Int, countsOfCounts: Array[Int], buffer: Array[KNN]): Unit = {
val leafEnd = bucketIds.length
var i = 0 //leafStart
while (i < leafEnd) {
val leaf = bucketIds(i)
//val bucketScore = bucketScores(i) //not used
val pointsStart = leaf2Points.starts(leaf)
val pointsEnd = leaf2Points.starts(leaf + 1)
var j = pointsStart
while (j < pointsEnd) {
val pointId = leaf2Points.points(j)
val count = pointCounts(pointId)
if (count >= pruningThreshold) {
val index = Math.min(count, maxIndex) - 1
buffer(buffer.length - countsOfCounts(index) - 1) = new KNN(neighborId = pointId, count = count)
countsOfCounts(index) += 1
}
if (count != 0) {
pointCounts(pointId) = 0 //also serves to avoid processing the same point twice
}
j += 1
}
i += 1
}
}
def fillCountsWithWeights(bucketIds: Array[Int], bucketScores: Array[Double], pointCounts: Array[Int], pointWeights: Array[Double]): Unit = {
//bucket scores are useful when you have only a few trees and you go very fine grained
val leafEnd = bucketIds.length
var i = 0 //leafStart
while (i < leafEnd) {
val leaf = bucketIds(i)
val bucketScore = bucketScores(i)
val pointsStart = leaf2Points.starts(leaf)
val pointsEnd = leaf2Points.starts(leaf + 1)
PerformanceCounters.touchedPointsDuringSearch(pointsEnd - pointsStart)
var j = pointsStart
while (j < pointsEnd) {
val pointId = leaf2Points.points(j)
pointCounts(pointId) += 1
bucketScores(pointId) += bucketScore
j += 1
}
i += 1
}
}
def getPruningThreshold(maxIndex: Int, countsOfCounts: Array[Int], threshold: Int): (Int, Int) = {
var i = maxIndex
var sumCounts = countsOfCounts(maxIndex)
while(i >= 1 && sumCounts < threshold){
i -= 1
sumCounts += countsOfCounts(i)
}
PerformanceCounters.nonPrunedPoints(sumCounts)
val pruningThreshold = Math.max(i, 1) //cannot be zero
(pruningThreshold, sumCounts)
}
def partialSumOnCounts(threshold: Int, countsOfCounts: Array[Int]): Unit = {
var i = threshold
countsOfCounts(i - 1) = 0
while(i < countsOfCounts.length){
countsOfCounts(i) += countsOfCounts(i - 1)
i += 1
}
}
def fillDistances(query: Array[Double], rescoreExactlyTopK: Int, buffer: Array[KNN], settings: SearcherSettings, trainingSet: DataFrameView): Unit = {
var i = 0
while(i < buffer.length){
val knn = buffer(i)
val pointId = knn.neighborId
knn.label = id2Label(pointId)
knn.dist = trainingSet.cosineForNormalizedData(query, pointId)
i += 1
}
PerformanceCounters.evaluatedPointsDuringSearch(buffer.length)
}
override def getNearestNeighbors(k: Int, pointName: Int, query: Array[Double], settings: SearcherSettings, searchBucketsResult: SearchBucketsResult, scratchBuffer: NearestNeigbhorQueryScratchBuffer): KNNS = {
val bucketIds = searchBucketsResult.bucketIndexBuffer.toArray()
///val usesWeights = settings.usesPointWeights TODO
//val bucketScores = if (usesWeights) (searchBucketsResult.bucketScoreBuffer.toArray()) else null TODO
val pointCounts = scratchBuffer.pointCounts
val dataset = settings.trainingSet
val maxIndex = 64*settings.randomTrees.signatureVecs.numSignatures + settings.randomTrees.trees.length //number of trees
val countsOfCounts = Array.ofDim[Int](maxIndex + 1) //a point can appear in at most 100 trees
//Stage 1: Fill the counts of the points (in how many trees(buckets) does the point appear)
fillCountsWithoutWeights(query, settings, dataset, bucketIds, pointCounts, maxIndex, countsOfCounts)
//Stage 2: extract the counts and clear the memory
val (pruningThreshold, expectedCounts) = getPruningThreshold(maxIndex, countsOfCounts, settings.pointScoreSettings.topKCandidates)
PerformanceCounters.pruningThreshold(pruningThreshold)
partialSumOnCounts(pruningThreshold, countsOfCounts)
//Clear the counts
val buffer = Array.ofDim[KNN](expectedCounts)
clearCountsWithoutWeights(bucketIds, pointCounts, pruningThreshold, maxIndex, countsOfCounts, buffer)
PerformanceCounters.thresholdedPointsDuringSearch(buffer.size)
//val sortedArray = java.util.Arrays.sort .sortBy(-_.similarity) //notice doing it by similarity
PerformanceCounters.sortedPointsDuringSearch(buffer.size)
fillDistances(query, settings.pointScoreSettings.rescoreExactlyTopK, buffer, settings, dataset)
java.util.Arrays.sort(buffer, new KNNDistanceComparator())
//val topK: Array[KVPair] = buffer.take(k)
//val knns = topK.map({(kv: KVPair) => KNN(k = k, neighborId = kv.key, count = kv.value, label = id2Label(kv.key), dist = kv.value)})
KNNS(k, pointName, buffer)
}
/*
val sortedArray = if (pruningThreshold == 0) buffer.toArray.sortBy({case (k,v) => (-v, k)}) else buffer.toArray.sortBy(-_._2)
val knns = sortedArray.take(k).map({case (pointId, count) => KNN(k = k, neighborId = pointId, count = count, label = id2Label(pointId))})
*/
/*
val arrPntIds = buffPntIds.toArray()
val cntPntIds = buffCnts.toArray()
var numNNReported = if (arrPntIds.length > 0) 1 else 0
val knns = Array.ofDim[KNN](numNNReported)
var z = 0
var maxId = -1
var maxCnt = -1
while(z < arrPntIds.length){
val cnt = cntPntIds(z)
if (cnt > maxCnt){
maxCnt = cnt
maxId = arrPntIds(z)
}
z += 1
}
if (numNNReported > 0){
knns(0) = KNN(k, maxId, maxCnt, id2Label(maxId))
}
KNNS(k, pointName, knns)
}
*/
}
|
codeaudit/random-projections-at-berlinbuzzwords
|
src/main/scala/com/stefansavev/randomprojections/implementation/IndexImpl.scala
|
Scala
|
apache-2.0
| 7,839 |
package ml.combust.bundle.test.ops
/**
* Created by hollinwilkins on 8/21/16.
*/
trait Transformer {
val uid: String
}
|
combust/mleap
|
bundle-ml/src/test/scala/ml/combust/bundle/test/ops/Transformer.scala
|
Scala
|
apache-2.0
| 126 |
object A {
def f :Int = {
class B {
println("B")
return 10
}
new B
20
}
def main(args: Array[String]) {
f
}
}
|
felixmulder/scala
|
test/files/neg/t1033.scala
|
Scala
|
bsd-3-clause
| 150 |
/*
* Copyright 2011-2014 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.ebpi.yaidom.queryapitests.dom
import scala.Vector
import scala.collection.immutable
import org.junit.Test
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import nl.ebpi.yaidom.convert
import nl.ebpi.yaidom.core.EName
import nl.ebpi.yaidom.core.QName
import nl.ebpi.yaidom.core.Scope
import nl.ebpi.yaidom.simple.Document
import nl.ebpi.yaidom.simple.Elem
import nl.ebpi.yaidom.simple.ElemBuilder
import nl.ebpi.yaidom.simple.NodeBuilder
import nl.ebpi.yaidom.dom.DomElem
import nl.ebpi.yaidom.queryapi.HasENameApi.ToHasElemApi
import nl.ebpi.yaidom.queryapitests.AbstractElemLikeQueryTest
import nl.ebpi.yaidom.resolved
import javax.xml.parsers.DocumentBuilderFactory
/**
* Query test case for DOM wrapper elements.
*
* @author Chris de Vreeze
*/
@RunWith(classOf[JUnitRunner])
class QueryTest extends AbstractElemLikeQueryTest {
final type E = DomElem
@Test def testQueryAll(): Unit = {
require(bookstore.localName == "Bookstore")
val elems = bookstore.findAllElemsOrSelf
assertResult(true) {
!elems.isEmpty
}
}
@Test def testQueryBookOrMagazineTitlesUsingParent(): Unit = {
// XPath: doc("bookstore.xml")/Bookstore/(Book | Magazine)/Title
require(bookstore.localName == "Bookstore")
val bookOrMagazineTitles =
for {
title <- bookstore filterElems { _.resolvedName == EName("Title") }
if (title.parent.resolvedName == EName("Book")) || (title.parent.resolvedName == EName("Magazine"))
} yield title
assertResult(Set(
"A First Course in Database Systems",
"Database Systems: The Complete Book",
"Hector and Jeff's Database Hints",
"Jennifer's Economical Database Hints",
"National Geographic",
"Newsweek")) {
val result = bookOrMagazineTitles map { _.trimmedText }
result.toSet
}
}
@Test def testQueryTitlesUsingPaths(): Unit = {
// XPath: doc("bookstore.xml")/Bookstore/*/Title
require(bookstore.localName == "Bookstore")
val titles =
for {
title <- bookstore findTopmostElems { _.resolvedName == EName("Title") }
if title.parentOption.isDefined && title.parent.parentOption.map(_.localName) == Some("Bookstore")
} yield title
assertResult(Set(
"A First Course in Database Systems",
"Database Systems: The Complete Book",
"Hector and Jeff's Database Hints",
"Jennifer's Economical Database Hints",
"National Geographic",
"Newsweek")) {
val result = titles map { _.trimmedText }
result.toSet
}
}
@Test def testQueryCheapBookTitlesUsingParent(): Unit = {
// XPath: doc("bookstore.xml")/Bookstore/Book[@Price < 90]/Title
require(bookstore.localName == "Bookstore")
val titles = bookstore.findAllElems filter { e =>
(e.resolvedName == EName("Title")) && {
val parentElm = e.parent
parentElm.localName == "Book" && parentElm.attribute(EName("Price")).toInt < 90
}
}
assertResult(Set(
"A First Course in Database Systems",
"Hector and Jeff's Database Hints",
"Jennifer's Economical Database Hints")) {
val result = titles map { _.trimmedText }
result.toSet
}
}
@Test def testQueryTitlesOfCheapBooksByUllmanUsingParent(): Unit = {
// XPath: doc("bookstore.xml")/Bookstore/Book[@Price < 90 and Authors/Author/Last_Name = "Ullman"]/Title
require(bookstore.localName == "Bookstore")
val bookTitles =
bookstore findTopmostElems { e => e.localName == "Last_Name" && e.trimmedText == "Ullman" } flatMap { elm =>
require(elm.resolvedName == EName("Last_Name"))
val bookOption = elm findAncestor { e => e.resolvedName == EName("Book") && e.attribute(EName("Price")).toInt < 90 }
val titleOption = bookOption flatMap { bookElm => bookElm findElem { e => e.resolvedName == EName("Title") } }
titleOption
}
assertResult(Set(
"A First Course in Database Systems",
"Hector and Jeff's Database Hints")) {
val result = bookTitles map { _.trimmedText }
result.toSet
}
}
@Test def testQueryTitlesOfCheapBooksByJeffreyUllmanUsingParent(): Unit = {
// XPath: doc("bookstore.xml")/Bookstore/Book[@Price < 90 and Authors/Author[Last_Name = "Ullman" and First_Name = "Jeffrey"]]/Title
require(bookstore.localName == "Bookstore")
def authorLastAndFirstName(authorElem: DomElem): (String, String) = {
val lastNames = authorElem.filterChildElems(EName("Last_Name")) map { _.text.trim }
val firstNames = authorElem.filterChildElems(EName("First_Name")) map { _.text.trim }
(lastNames.mkString, firstNames.mkString)
}
val bookTitles2 =
for {
authorElem <- bookstore filterElemsOrSelf { _.resolvedName == EName("Author") }
(lastName, firstName) = authorLastAndFirstName(authorElem)
if lastName == "Ullman" && firstName == "Jeffrey"
bookElem <- authorElem findAncestor { _.resolvedName == EName("Book") }
if bookElem.attributeOption(EName("Price")).map(_.toInt).getOrElse(0) < 90
} yield bookElem.getChildElem(EName("Title"))
assertResult(Set(
"A First Course in Database Systems",
"Hector and Jeff's Database Hints")) {
val result = bookTitles2 map { _.trimmedText }
result.toSet
}
val bookTitles3 =
for {
authorElem <- bookstore \\ EName("Author")
(lastName, firstName) = authorLastAndFirstName(authorElem)
if lastName == "Ullman" && firstName == "Jeffrey"
bookElem <- authorElem findAncestor { _.resolvedName == EName("Book") }
if (bookElem \@ EName("Price")).map(_.toInt).getOrElse(0) < 90
} yield (bookElem \ EName("Title")).head
assertResult(Set(
"A First Course in Database Systems",
"Hector and Jeff's Database Hints")) {
val result = bookTitles3 map { _.trimmedText }
result.toSet
}
}
@Test def testQueryBooksByJeffreyUllmanUsingParent(): Unit = {
// Own example
require(bookstore.localName == "Bookstore")
val ullmanBookElms =
for {
authorElm <- bookstore filterElems { e =>
(e.localName == "Author") &&
((e.getChildElem(_.localName == "First_Name")).text == "Jeffrey") &&
((e.getChildElem(_.localName == "Last_Name")).text == "Ullman")
}
bookElm = authorElm.parent.parent
} yield {
require(bookElm.localName == "Book")
bookElm
}
assertResult(Set(
"A First Course in Database Systems",
"Database Systems: The Complete Book",
"Hector and Jeff's Database Hints")) {
val result = ullmanBookElms map { e => e.getChildElem(_.localName == "Title").text }
result.toSet
}
}
@Test def testQueryElementsWithParentNotBookOrBookstore(): Unit = {
// XPath: doc("bookstore.xml")//*[name(parent::*) != "Bookstore" and name(parent::*) != "Book"]
require(bookstore.localName == "Bookstore")
val elms =
for {
e <- bookstore.findAllElems
parent = e.parent
if parent.qname != QName("Bookstore") && parent.qname != QName("Book")
} yield e
assert(elms.size > 10, "Expected more than 10 matching elements")
assertResult(Set(QName("Title"), QName("Author"), QName("First_Name"), QName("Last_Name"))) {
val result = elms map { e => e.qname }
result.toSet
}
}
/**
* The equivalent of XQuery:
* {{{
* for $b in doc("bookstore.xml")/Bookstore/Book
* where some $fm in $b/Authors/Author/First_Name satisfies contains($b/Title, $fn)
* return <Book>
* { $b/Title }
* { for $fm in $b/Authors/Author/First_Name where contains($b/Title, $fn) return $fn }
* </Book>
* }}}
*/
@Test def testQueryBooksWithAuthorInTitle(): Unit = {
require(bookstore.localName == "Bookstore")
import NodeBuilder._
val titleAndFirstNames =
for {
book <- bookstore \ (_.localName == "Book")
title = book.getChildElem(EName("Title"))
authorFirstNames = {
val result = book.filterElems(EName("Author")) map { _.getChildElem(EName("First_Name")).trimmedText }
result.toSet
}
searchedForFirstNames = authorFirstNames filter { firstName => title.trimmedText.indexOf(firstName) >= 0 }
if !searchedForFirstNames.isEmpty
} yield {
val titleElem = convert.DomConversions.convertToElem(title.wrappedNode, book.scope)
elem(
qname = QName("Book"),
children = Vector(
fromElem(titleElem)(Scope.Empty),
textElem(QName("First_Name"), searchedForFirstNames.head))).build()
}
assertResult(2) {
titleAndFirstNames.size
}
assertResult(Set("Hector and Jeff's Database Hints", "Jennifer's Economical Database Hints")) {
val titleElms = titleAndFirstNames map { e => e.filterElems(EName("Title")) }
val result = titleElms.flatten map { e => e.trimmedText }
result.toSet
}
}
/**
* The equivalent of XQuery:
* {{{
* let $a := avg(doc("bookstore.xml")/Bookstore/Book/@Price)
* for $b in doc("bookstore.xml")/Bookstore/Book
* where $b/@Price < $a
* return <Book>
* { $b/Title }
* <Price> { $b/data(@Price) } </Price>
* </Book>
* }}}
*/
@Test def testQueryBooksPricedBelowAverage(): Unit = {
require(bookstore.localName == "Bookstore")
import NodeBuilder._
val prices: immutable.IndexedSeq[Double] =
for {
book <- bookstore \ (_.localName == "Book")
price = book.attribute(EName("Price")).toDouble
} yield price
val avg: Double = prices.sum.toDouble / prices.size
val cheapBooks =
for {
book <- bookstore \ (_.localName == "Book")
price = book.attribute(EName("Price")).toDouble
if price < avg
} yield {
val title = book.getChildElem(EName("Title"))
val titleElem = convert.DomConversions.convertToElem(title.wrappedNode, book.scope)
elem(
qname = QName("Book"),
children = Vector(
fromElem(titleElem)(Scope.Empty),
textElem(QName("Price"), price.toString))).build()
}
assertResult(2) {
cheapBooks.size
}
assertResult(Set(50, 25)) {
val result = cheapBooks flatMap { e => e.filterElems(EName("Price")) } map { e => e.trimmedText.toDouble.intValue }
result.toSet
}
assertResult(Set("Hector and Jeff's Database Hints", "Jennifer's Economical Database Hints")) {
val result = cheapBooks flatMap { e => e.filterElems(EName("Title")) } map { e => e.trimmedText }
result.toSet
}
}
/**
* The equivalent of XQuery:
* {{{
* for $b in doc("bookstore.xml")/Bookstore/Book
* order by $b/@Price
* return <Book>
* { $b/Title }
* <Price> { $b/data(@Price) } </Price>
* </Book>
* }}}
*/
@Test def testQueryBooksOrderedByPrice(): Unit = {
require(bookstore.localName == "Bookstore")
import NodeBuilder._
def cheaper(book1: DomElem, book2: DomElem): Boolean = {
val price1 = book1.attribute(EName("Price")).toInt
val price2 = book2.attribute(EName("Price")).toInt
price1 < price2
}
val books = {
for {
book <- bookstore \ (_.localName == "Book") sortWith { cheaper _ }
price = book.attribute(EName("Price")).toDouble
} yield {
val title = book.getChildElem(EName("Title"))
val titleElem = convert.DomConversions.convertToElem(title.wrappedNode, book.scope)
elem(
qname = QName("Book"),
children = Vector(
fromElem(titleElem)(Scope.Empty),
textElem(QName("Price"), price.toString))).build()
}
}
assertResult(4) {
books.size
}
assertResult(List(25, 50, 85, 100)) {
books flatMap { e => e.filterElems(EName("Price")) } map { e => e.trimmedText.toDouble.intValue }
}
assertResult(List(
"Jennifer's Economical Database Hints",
"Hector and Jeff's Database Hints",
"A First Course in Database Systems",
"Database Systems: The Complete Book")) {
books flatMap { e => e.filterElems(EName("Title")) } map { e => e.trimmedText }
}
}
private val book1Builder: ElemBuilder = {
import NodeBuilder._
elem(
qname = QName("Book"),
attributes = Vector(QName("ISBN") -> "ISBN-0-13-713526-2", QName("Price") -> "85", QName("Edition") -> "3rd"),
children = Vector(
textElem(QName("Title"), "A First Course in Database Systems"),
elem(
qname = QName("Authors"),
children = Vector(
elem(
qname = QName("Author"),
children = Vector(
textElem(QName("First_Name"), "Jeffrey"),
textElem(QName("Last_Name"), "Ullman"))),
elem(
qname = QName("Author"),
children = Vector(
textElem(QName("First_Name"), "Jennifer"),
textElem(QName("Last_Name"), "Widom")))))))
}
private val book2Builder: ElemBuilder = {
import NodeBuilder._
elem(
qname = QName("Book"),
attributes = Vector(QName("ISBN") -> "ISBN-0-13-815504-6", QName("Price") -> "100"),
children = Vector(
textElem(QName("Title"), "Database Systems: The Complete Book"),
elem(
qname = QName("Authors"),
children = Vector(
elem(
qname = QName("Author"),
children = Vector(
textElem(QName("First_Name"), "Hector"),
textElem(QName("Last_Name"), "Garcia-Molina"))),
elem(
qname = QName("Author"),
children = Vector(
textElem(QName("First_Name"), "Jeffrey"),
textElem(QName("Last_Name"), "Ullman"))),
elem(
qname = QName("Author"),
children = Vector(
textElem(QName("First_Name"), "Jennifer"),
textElem(QName("Last_Name"), "Widom"))))),
textElem(QName("Remark"), "Buy this book bundled with \"A First Course\" - a great deal!")))
}
private val book3Builder: ElemBuilder = {
import NodeBuilder._
elem(
qname = QName("Book"),
attributes = Vector(QName("ISBN") -> "ISBN-0-11-222222-3", QName("Price") -> "50"),
children = Vector(
textElem(QName("Title"), "Hector and Jeff's Database Hints"),
elem(
qname = QName("Authors"),
children = Vector(
elem(
qname = QName("Author"),
children = Vector(
textElem(QName("First_Name"), "Jeffrey"),
textElem(QName("Last_Name"), "Ullman"))),
elem(
qname = QName("Author"),
children = Vector(
textElem(QName("First_Name"), "Hector"),
textElem(QName("Last_Name"), "Garcia-Molina"))))),
textElem(QName("Remark"), "An indispensable companion to your textbook")))
}
private val book4Builder: ElemBuilder = {
import NodeBuilder._
elem(
qname = QName("Book"),
attributes = Vector(QName("ISBN") -> "ISBN-9-88-777777-6", QName("Price") -> "25"),
children = Vector(
textElem(QName("Title"), "Jennifer's Economical Database Hints"),
elem(
qname = QName("Authors"),
children = Vector(
elem(
qname = QName("Author"),
children = Vector(
textElem(QName("First_Name"), "Jennifer"),
textElem(QName("Last_Name"), "Widom")))))))
}
private val magazine1Builder: ElemBuilder = {
import NodeBuilder._
elem(
qname = QName("Magazine"),
attributes = Vector(QName("Month") -> "January", QName("Year") -> "2009"),
children = Vector(
textElem(QName("Title"), "National Geographic")))
}
private val magazine2Builder: ElemBuilder = {
import NodeBuilder._
elem(
qname = QName("Magazine"),
attributes = Vector(QName("Month") -> "February", QName("Year") -> "2009"),
children = Vector(
textElem(QName("Title"), "National Geographic")))
}
private val magazine3Builder: ElemBuilder = {
import NodeBuilder._
elem(
qname = QName("Magazine"),
attributes = Vector(QName("Month") -> "February", QName("Year") -> "2009"),
children = Vector(
textElem(QName("Title"), "Newsweek")))
}
private val magazine4Builder: ElemBuilder = {
import NodeBuilder._
elem(
qname = QName("Magazine"),
attributes = Vector(QName("Month") -> "March", QName("Year") -> "2009"),
children = Vector(
textElem(QName("Title"), "Hector and Jeff's Database Hints")))
}
protected final val bookstore: DomElem = {
import NodeBuilder._
val resultElem: Elem =
elem(
qname = QName("Bookstore"),
children = Vector(
book1Builder, book2Builder, book3Builder, book4Builder,
magazine1Builder, magazine2Builder, magazine3Builder, magazine4Builder)).build(Scope.Empty)
val resultDoc: Document = Document(resultElem)
val dbf = DocumentBuilderFactory.newInstance
val db = dbf.newDocumentBuilder
val domDoc = db.newDocument()
new DomElem(convert.DomConversions.convertElem(resultDoc.documentElement)(domDoc))
}
protected final def toResolvedElem(elem: E): resolved.Elem =
resolved.Elem(convert.DomConversions.convertToElem(elem.wrappedNode, Scope.Empty))
}
|
EBPI/yaidom
|
src/test/scala/nl/ebpi/yaidom/queryapitests/dom/QueryTest.scala
|
Scala
|
apache-2.0
| 18,307 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sparklinedata.druid.client.test
import com.github.nscala_time.time.Imports._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.sparklinedata.druid.{DruidQuery, GroupByQuerySpec, SearchQuerySpec}
import org.sparklinedata.spark.dateTime.dsl.expressions._
import scala.language.postfixOps
class DruidRewritesTest extends BaseTest {
test("profile",
"select count(distinct o_custkey) from orderLineItemPartSupplier",
1,
true,
true
)
test("basicAggB",
"select l_returnflag, l_linestatus, " +
"count(*), sum(l_extendedprice) as s, max(ps_supplycost) as m, avg(ps_availqty) as a," +
"count(distinct o_orderkey) " +
"from orderLineItemPartSupplierBase group by l_returnflag, l_linestatus",
0,
true
)
test("basicAgg",
"select l_returnflag, l_linestatus, " +
"count(*), sum(l_extendedprice) as s, max(ps_supplycost) as m, avg(ps_availqty) as a," +
"count(distinct o_orderkey) " +
"from orderLineItemPartSupplier group by l_returnflag, l_linestatus",
2,
true
)
test("view",
"""
|SELECT COUNT(DISTINCT CAST(`orderLineItemPartSupplier`.`l_shipdate` AS TIMESTAMP))
| AS `ctd_date_string_ok`
|FROM `orderLineItemPartSupplier`
| HAVING (COUNT(1) > 0)
""".stripMargin,
2,
true,
true
)
test("noAggs",
"select l_returnflag, l_linestatus " +
"from orderLineItemPartSupplier " +
"group by l_returnflag, l_linestatus",
1,
true,
true
)
test("basicAggWithProject",
"select f, s, " +
"count(*) " +
"from (select l_returnflag f, l_linestatus s " +
"from orderLineItemPartSupplier) t group by f, s",
1,
true,
false,
true
)
test("dateFilter", {
val shipDtPredicate = dateTime('l_shipdate) <= (dateTime("1997-12-01") - 90.day)
date"""
select f, s, count(*) as count_order
from
(
select l_returnflag as f, l_linestatus as s, l_shipdate, s_region
from orderLineItemPartSupplier
) t
where $shipDtPredicate
group by f,s
order by f,s
"""},
1,
true
)
test("intervalFilter", {
val shipDtPredicate = dateTime('l_shipdate) <= (dateTime("1997-12-01") - 90.day)
date"""
select f, s, count(*) as count_order
from
(
select l_returnflag as f, l_linestatus as s, l_shipdate, s_region, s_nation, c_nation
from orderLineItemPartSupplier
) t
where $shipDtPredicate and ((s_nation = 'FRANCE' and c_nation = 'GERMANY') or
(c_nation = 'FRANCE' and s_nation = 'GERMANY')
)
group by f,s
order by f,s
"""
},
1, true
)
test("intervalFilter2", {
val shipDtPredicate = dateTime('l_shipdate) <= (dateTime("1997-12-01") - 90.day)
val shipDtPredicate2 = dateTime('l_shipdate) > (dateTime("1995-12-01"))
date"""
select f, s, count(*) as count_order
from
(
select l_returnflag as f, l_linestatus as s, l_shipdate, s_region, s_nation, c_nation
from orderLineItemPartSupplier
) t
where $shipDtPredicate and $shipDtPredicate2
group by f,s
order by f,s
"""
},
1,
true
)
test("intervalFilter3", {
val shipDtPredicate = dateTime('l_shipdate) <= (dateTime("1997-12-01") - 90.day)
val shipDtPredicate2 = dateTime('l_shipdate) < (dateTime("1995-12-01"))
date"""
select f, s, count(*) as count_order
from
(
select l_returnflag as f, l_linestatus as s, l_shipdate, s_region, s_nation, c_nation
from orderLineItemPartSupplier
) t
where $shipDtPredicate and $shipDtPredicate2
group by f,s
order by f,s
"""
},
1,
true)
test("intervalFilter4", {
val shipDtPredicate = dateTime('l_shipdate) <= (dateTime("1997-12-01") - 90.day)
val shipDtPredicate2 = dateTime('l_shipdate) > (dateTime("1997-12-02"))
date"""
select f, s, count(*) as count_order
from
(
select l_returnflag as f, l_linestatus as s, l_shipdate, s_region, s_nation, c_nation
from orderLineItemPartSupplier
) t
where $shipDtPredicate and $shipDtPredicate2
group by f,s
order by f,s
"""
},
1,
true
)
test("dimFilter2", {
val shipDtPredicate = dateTime('l_shipdate) <= (dateTime("1997-12-01") - 90.day)
date"""
select f, s, count(*) as count_order
from
(
select l_returnflag as f, l_linestatus as s, l_shipdate,
s_region, s_nation, c_nation, p_type
from orderLineItemPartSupplier
) t
where $shipDtPredicate and ((s_nation = 'FRANCE' and c_nation = 'GERMANY') or
(c_nation = 'FRANCE' and s_nation = 'GERMANY')
) and p_type = 'ECONOMY ANODIZED STEEL'
group by f,s
order by f,s
"""
},
1,
true,
true
)
test("dimFilter3", {
val shipDtPredicate = dateTime('l_shipdate) <= (dateTime("1997-12-01") - 90.day)
date"""
select s_nation, count(*) as count_order
from
(
select l_returnflag as f, l_linestatus as s, l_shipdate,
s_region, s_nation, c_nation, p_type
from orderLineItemPartSupplier
) t
where $shipDtPredicate and ((s_nation = 'FRANCE' and c_nation = 'GERMANY') or
(c_nation = 'FRANCE' and s_nation = 'GERMANY')
)
group by s_nation
order by s_nation
"""
},
1,
true
)
test("dimFilter4", {
val shipDtPredicate = dateTime('l_shipdate) <= (dateTime("1997-12-01") - 90.day)
date"""
select s_nation, count(*) as count_order
from
(
select l_returnflag as f, l_linestatus as s, l_shipdate,
s_region, s_nation, c_nation, p_type
from orderLineItemPartSupplier
) t
where $shipDtPredicate and s_nation >= 'FRANCE'
group by s_nation
order by s_nation
"""
},
1,
true
)
test("projFilterAgg", {
val shipDtPredicate = dateTime('l_shipdate) <= (dateTime("1997-12-01") - 90.day)
val shipDtPredicate2 = dateTime('l_shipdate) > (dateTime("1995-12-01"))
date"""
select s_nation,
count(*) as count_order,
sum(l_extendedprice) as s,
max(ps_supplycost) as m,
avg(ps_availqty) as a,
count(distinct o_orderkey)
from
(
select l_returnflag as f, l_linestatus as s, l_shipdate,
s_region, s_nation, c_nation, p_type,
l_extendedprice, ps_supplycost, ps_availqty, o_orderkey
from orderLineItemPartSupplier
where p_type = 'ECONOMY ANODIZED STEEL'
) t
where $shipDtPredicate and
$shipDtPredicate2 and ((s_nation = 'FRANCE' and c_nation = 'GERMANY') or
(c_nation = 'FRANCE' and s_nation = 'GERMANY')
)
group by s_nation
order by s_nation
"""
},
2,
true
)
test("ShipDateYearAgg", {
val shipDtYrGroup = dateTime('l_shipdate) year
date"""select l_returnflag, l_linestatus, $shipDtYrGroup, count(*),
sum(l_extendedprice) as s, max(ps_supplycost) as m, avg(ps_availqty) as a,
count(distinct o_orderkey)
from orderLineItemPartSupplier group by l_returnflag, l_linestatus, $shipDtYrGroup"""
},
2,
true
)
test("OrderDateYearAgg", {
val orderDtYrGroup = dateTime('o_orderdate) year
date"""select l_returnflag, l_linestatus, $orderDtYrGroup, count(*),
sum(l_extendedprice) as s, max(ps_supplycost) as m, avg(ps_availqty) as a,
count(distinct o_orderkey)
from orderLineItemPartSupplier group by l_returnflag, l_linestatus, $orderDtYrGroup"""
},
2,
true
)
test("noRewrite",
"""select *
|from orderLineItemPartSupplier
|limit 3""".stripMargin,
0,
true
)
test("noMetricsCName", {
"""select c_name
from orderLineItemPartSupplier group by c_name"""
},
1,
true, true
)
test("noMetricsCNameSort", {
"""select c_name
from orderLineItemPartSupplier group by c_name
order by c_name"""
},
1,
true, true,
false,
Seq(
{ dq : DruidQuery =>
dq.q.isInstanceOf[SearchQuerySpec]
}
)
)
test("noMetricsPSize", {
"""select p_size
from orderLineItemPartSupplier group by p_size"""
},
1,
true, true,
false,
Seq(
{ dq : DruidQuery => dq.q.isInstanceOf[SearchQuerySpec]}
)
)
test("noMetricsPRetailPrice", {
"""select p_retailprice
from orderLineItemPartSupplier group by p_retailprice"""
},
1,
true,
true,
false,
Seq(
{ dq : DruidQuery => dq.q.isInstanceOf[SearchQuerySpec]}
)
)
test("noMetricsYearExpr", {
val orderDtYrGroup = dateTime('o_orderdate) year
date"""select $orderDtYrGroup
from orderLineItemPartSupplier group by $orderDtYrGroup"""
},
1,
true, true
)
test("noMetricsTimestamp", {
val orderDtYrGroup = dateTime('o_orderdate) year
date"""select Cast(Concat(To_date(l_shipdate), ' 00:00:00') AS TIMESTAMP)
from orderLineItemPartSupplier
group by Cast(Concat(To_date(l_shipdate), ' 00:00:00') AS TIMESTAMP)"""
},
1,
true, true
)
test("noMetricsCNameSortWithIntervalFilter", {
"""select c_name
from orderLineItemPartSupplier
where l_shipdate > '1994'
group by c_name
order by c_name"""
},
1,
true, true,
false,
Seq(
{ dq : DruidQuery =>
dq.q.isInstanceOf[GroupByQuerySpec]
}
)
)
// scalastyle:off line.size.limit
test("countDistinctAndJS",
"""
|SELECT CAST((MONTH(CAST(`sp_ad_demo_qube`.`l_shipdate` AS TIMESTAMP)) - 1) / 3 + 1 AS BIGINT) AS `qr_l_shipdate_ok`,
|COUNT(DISTINCT `sp_ad_demo_qube`.`s_region`) AS `usr_calculation_622270835564429312_ok`,
|YEAR(CAST(`sp_ad_demo_qube`.`l_shipdate` AS TIMESTAMP)) AS `yr_l_shipdate_ok`
|FROM `default`.`orderLineItemPartSupplier` `sp_ad_demo_qube`
|WHERE ((CAST(`sp_ad_demo_qube`.`l_shipdate` AS TIMESTAMP) >= CAST('2016-02-29 16:00:00' AS TIMESTAMP)) AND
|(CAST(`sp_ad_demo_qube`.`l_shipdate` AS TIMESTAMP) <= CAST('2016-03-27 16:00:00' AS TIMESTAMP)))
|GROUP BY CAST((MONTH(CAST(`sp_ad_demo_qube`.`l_shipdate` AS TIMESTAMP)) - 1) / 3 + 1 AS BIGINT),
|YEAR(CAST(`sp_ad_demo_qube`.`l_shipdate` AS TIMESTAMP))
""".stripMargin,
1,
true,
true
)
test("aggExprIncludesGByExpr",
"""
|SELECT SUM(`sparkline_paysafe`.`l_extendedprice`) AS `temp_tc___1602391293__0_`,
|SUM(`sparkline_paysafe`.`l_extendedprice`) AS `temp_temp_tc___4123911036__0___3760310399__0_`,
|MIN(CAST(CAST(CAST(`sparkline_paysafe`.`l_shipdate` AS TIMESTAMP) AS TIMESTAMP) AS TIMESTAMP))
| AS `temp_temp_tc___4123911036__0___4190136418__0_`,
| CAST(`sparkline_paysafe`.`l_shipdate` AS TIMESTAMP) AS `l_shipdate`
| FROM `default`.`orderLineItemPartSupplier` `sparkline_paysafe`
| GROUP BY CAST(`sparkline_paysafe`.`l_shipdate` AS TIMESTAMP)
""".stripMargin,
1,
true,
true)
test("topN",
"select l_returnflag, " +
"count(*), sum(l_extendedprice) as s, max(ps_supplycost) as m, avg(ps_availqty) as a " +
"from orderLineItemPartSupplier group by l_returnflag " +
"order by max(ps_supplycost) " +
"limit 200",
1,
true,
true,
false,
Seq(
isTopN _,
TopNThresholdCheck(200)
)
)
test("topNDesc",
"select l_returnflag, " +
"count(*), sum(l_extendedprice) as s, max(ps_supplycost) as m, avg(ps_availqty) as a " +
"from orderLineItemPartSupplier group by l_returnflag " +
"order by max(ps_supplycost) desc " +
"limit 200",
1,
true,
true,
false,
Seq(
isTopN _,
TopNThresholdCheck(200)
)
)
test("topNNotPushedLargeLimit",
"select l_returnflag, " +
"count(*), sum(l_extendedprice) as s, max(ps_supplycost) as m, avg(ps_availqty) as a " +
"from orderLineItemPartSupplier group by l_returnflag " +
"order by max(ps_supplycost) " +
"limit 100001",
1,
false,
false,
false,
Seq(
isGBy _
)
)
test("topNNotPushedMultiDim",
"select l_returnflag, l_linestatus, " +
"count(*), sum(l_extendedprice) as s, max(ps_supplycost) as m, avg(ps_availqty) as a " +
"from orderLineItemPartSupplier " +
"group by l_returnflag, l_linestatus " +
"order by max(ps_supplycost) " +
"limit 10",
1,
false,
false,
false,
Seq(
isGBy _
)
)
test("topNNotPushedOByDim",
"select l_returnflag, " +
"count(*), sum(l_extendedprice) as s, max(ps_supplycost) as m, avg(ps_availqty) as a " +
"from orderLineItemPartSupplier " +
"group by l_returnflag " +
"order by l_returnflag " +
"limit 10",
1,
false,
false,
false,
Seq(
isGBy _
)
)
test("countSName",
"select " +
"count(s_name) " +
"from orderLineItemPartSupplier " +
"limit 10",
1,
true,
true,
false
)
}
|
SparklineData/spark-druid-olap
|
src/test/scala/org/sparklinedata/druid/client/test/DruidRewritesTest.scala
|
Scala
|
apache-2.0
| 14,070 |
package lara.epfl.scalasca.rules
import lara.epfl.scalasca.core._
import scala.tools.nsc._
case class UnusedCodeRemovalBlocks(blocksToRemove: Map[Global#Position, Global#Tree]) extends RuleResult with TreeTransformer {
override def warning = Notice("GEN_UNUSED_CODE_REMOVAL",
"Removing code that no execution path traverses",
Console.GREEN + "No unused code found" + Console.RESET,
GeneralCategory())
override def toString: String =
if (blocksToRemove.size > 0)
warning.formattedWarning + " - " + Console.BLUE + blocksToRemove.size + " unused block(s) removed" + Console.RESET
else
warning.formattedDefaultMessage
override def isSuccess: Boolean = blocksToRemove.size == 0
override def getTransformation[T <: Global](global: T, tree: T#Tree): T#Tree = {
import global._
object transformer extends Transformer {
override def transform(tree: Tree): Tree = tree match {
case q"$t" if blocksToRemove.contains(tree.pos) =>
transform(blocksToRemove(tree.pos.asInstanceOf[Position]).asInstanceOf[Tree])
case _ =>
super.transform(tree)
}
}
transformer.transform(tree.asInstanceOf[Tree])
}
}
case class UnusedCodeRemovalTraversalState(blocksToRemove: Map[Global#Position, Global#Tree]) extends TraversalState
/**
* GEN_UNUSED_CODE_REMOVAL
*
* Removes dead code
*
*/
class UnusedCodeRemoval[T <: Global](val global: T, inputResults: List[RuleResult] = List()) extends ASTRule with ConstantPropagationEvaluator {
type TS = UnusedCodeRemovalTraversalState
type RR = UnusedCodeRemovalBlocks
override val ruleName = "GEN_UNUSED_CODE_REMOVAL"
import global._
override def getDefaultState(): TS = UnusedCodeRemovalTraversalState(Map())
private val inputSymbolMap = SymbolMapper.getLiteralMapping(inputResults)
override def step(tree: Global#Tree, state: TS): List[(Option[TT], TS)] = tree match {
// case q"$mods def $tname[..$tparams](...$paramss): $tpt = $expr" =>
// goto(expr, state.copy(inMethod = Some(tree.symbol)))
case q"if($cond) $thenP else $elseP" =>
val evaluatedCond =
if (inputSymbolMap.isEmpty)
cond match {
case q"true" => Some(true)
case q"false" => Some(false)
case _ => None
}
else
evaluateToConstant(cond)(global)(inputSymbolMap) match {
case Some(value) if value.isInstanceOf[Boolean] =>
Some(value.asInstanceOf[Boolean])
case _ => None
}
evaluatedCond match {
case Some(c) if c =>
goto(thenP, state.copy(blocksToRemove = state.blocksToRemove + (tree.pos -> thenP)))
case Some(c) if !c =>
goto(elseP, state.copy(blocksToRemove = state.blocksToRemove + (tree.pos -> elseP)))
case _ =>
goto(List(cond, thenP, elseP), state)
}
case _ =>
goto(tree.children, state)
}
override def getRuleResult(state: TS): RR = UnusedCodeRemovalBlocks(state.blocksToRemove)
override def apply(syntaxTree: Tree): RR = {
ASTRule.apply(global)(syntaxTree, List(this)) match {
case result :: rest => result match {
case p @ UnusedCodeRemovalBlocks(_) => p
case _ => UnusedCodeRemovalBlocks(Map())
}
case _ => UnusedCodeRemovalBlocks(Map())
}
}
override def mergeStates(s1: TS, s2: TS): TS =
UnusedCodeRemovalTraversalState(s1.blocksToRemove ++ s2.blocksToRemove)
}
|
jean-andre-gauthier/scalasca
|
src/main/scala/lara/epfl/scalasca/rules/UnusedCodeRemoval.scala
|
Scala
|
bsd-3-clause
| 3,297 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import scala.collection.mutable
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodegenFallback, ExprCode}
import org.apache.spark.sql.catalyst.util.{ArrayData, MapData}
import org.apache.spark.sql.types._
/**
* An expression that produces zero or more rows given a single input row.
*
* Generators produce multiple output rows instead of a single value like other expressions,
* and thus they must have a schema to associate with the rows that are output.
*
* However, unlike row producing relational operators, which are either leaves or determine their
* output schema functionally from their input, generators can contain other expressions that
* might result in their modification by rules. This structure means that they might be copied
* multiple times after first determining their output schema. If a new output schema is created for
* each copy references up the tree might be rendered invalid. As a result generators must
* instead define a function `makeOutput` which is called only once when the schema is first
* requested. The attributes produced by this function will be automatically copied anytime rules
* result in changes to the Generator or its children.
*/
trait Generator extends Expression {
override def dataType: DataType = ArrayType(elementSchema)
override def foldable: Boolean = false
override def nullable: Boolean = false
/**
* The output element schema.
*/
def elementSchema: StructType
/** Should be implemented by child classes to perform specific Generators. */
override def eval(input: InternalRow): TraversableOnce[InternalRow]
/**
* Notifies that there are no more rows to process, clean up code, and additional
* rows can be made here.
*/
def terminate(): TraversableOnce[InternalRow] = Nil
/**
* Check if this generator supports code generation.
*/
def supportCodegen: Boolean = !isInstanceOf[CodegenFallback]
}
/**
* A collection producing [[Generator]]. This trait provides a different path for code generation,
* by allowing code generation to return either an [[ArrayData]] or a [[MapData]] object.
*/
trait CollectionGenerator extends Generator {
/** The position of an element within the collection should also be returned. */
def position: Boolean
/** Rows will be inlined during generation. */
def inline: Boolean
/** The type of the returned collection object. */
def collectionType: DataType = dataType
}
/**
* A generator that produces its output using the provided lambda function.
*/
case class UserDefinedGenerator(
elementSchema: StructType,
function: Row => TraversableOnce[InternalRow],
children: Seq[Expression])
extends Generator with CodegenFallback {
@transient private[this] var inputRow: InterpretedProjection = _
@transient private[this] var convertToScala: (InternalRow) => Row = _
private def initializeConverters(): Unit = {
inputRow = new InterpretedProjection(children)
convertToScala = {
val inputSchema = StructType(children.map { e =>
StructField(e.simpleString, e.dataType, nullable = true)
})
CatalystTypeConverters.createToScalaConverter(inputSchema)
}.asInstanceOf[InternalRow => Row]
}
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
if (inputRow == null) {
initializeConverters()
}
// Convert the objects into Scala Type before calling function, we need schema to support UDT
function(convertToScala(inputRow(input)))
}
override def toString: String = s"UserDefinedGenerator(${children.mkString(",")})"
}
/**
* Separate v1, ..., vk into n rows. Each row will have k/n columns. n must be constant.
* {{{
* SELECT stack(2, 1, 2, 3) ->
* 1 2
* 3 NULL
* }}}
*/
@ExpressionDescription(
usage = "_FUNC_(n, expr1, ..., exprk) - Separates `expr1`, ..., `exprk` into `n` rows.",
extended = """
Examples:
> SELECT _FUNC_(2, 1, 2, 3);
1 2
3 NULL
""")
case class Stack(children: Seq[Expression]) extends Generator {
private lazy val numRows = children.head.eval().asInstanceOf[Int]
private lazy val numFields = Math.ceil((children.length - 1.0) / numRows).toInt
/**
* Return true iff the first child exists and has a foldable IntegerType.
*/
def hasFoldableNumRows: Boolean = {
children.nonEmpty && children.head.dataType == IntegerType && children.head.foldable
}
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length <= 1) {
TypeCheckResult.TypeCheckFailure(s"$prettyName requires at least 2 arguments.")
} else if (children.head.dataType != IntegerType || !children.head.foldable || numRows < 1) {
TypeCheckResult.TypeCheckFailure("The number of rows must be a positive constant integer.")
} else {
for (i <- 1 until children.length) {
val j = (i - 1) % numFields
if (children(i).dataType != elementSchema.fields(j).dataType) {
return TypeCheckResult.TypeCheckFailure(
s"Argument ${j + 1} (${elementSchema.fields(j).dataType}) != " +
s"Argument $i (${children(i).dataType})")
}
}
TypeCheckResult.TypeCheckSuccess
}
}
def findDataType(index: Int): DataType = {
// Find the first data type except NullType.
val firstDataIndex = ((index - 1) % numFields) + 1
for (i <- firstDataIndex until children.length by numFields) {
if (children(i).dataType != NullType) {
return children(i).dataType
}
}
// If all values of the column are NullType, use it.
NullType
}
override def elementSchema: StructType =
StructType(children.tail.take(numFields).zipWithIndex.map {
case (e, index) => StructField(s"col$index", e.dataType)
})
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
val values = children.tail.map(_.eval(input)).toArray
for (row <- 0 until numRows) yield {
val fields = new Array[Any](numFields)
for (col <- 0 until numFields) {
val index = row * numFields + col
fields.update(col, if (index < values.length) values(index) else null)
}
InternalRow(fields: _*)
}
}
/**
* Only support code generation when stack produces 50 rows or less.
*/
override def supportCodegen: Boolean = numRows <= 50
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Rows - we write these into an array.
val rowData = ctx.freshName("rows")
ctx.addMutableState("InternalRow[]", rowData, s"$rowData = new InternalRow[$numRows];")
val values = children.tail
val dataTypes = values.take(numFields).map(_.dataType)
val code = ctx.splitExpressions(ctx.INPUT_ROW, Seq.tabulate(numRows) { row =>
val fields = Seq.tabulate(numFields) { col =>
val index = row * numFields + col
if (index < values.length) values(index) else Literal(null, dataTypes(col))
}
val eval = CreateStruct(fields).genCode(ctx)
s"${eval.code}\\n$rowData[$row] = ${eval.value};"
})
// Create the collection.
val wrapperClass = classOf[mutable.WrappedArray[_]].getName
ctx.addMutableState(
s"$wrapperClass<InternalRow>",
ev.value,
s"${ev.value} = $wrapperClass$$.MODULE$$.make($rowData);")
ev.copy(code = code, isNull = "false")
}
}
/**
* Wrapper around another generator to specify outer behavior. This is used to implement functions
* such as explode_outer. This expression gets replaced during analysis.
*/
case class GeneratorOuter(child: Generator) extends UnaryExpression with Generator {
final override def eval(input: InternalRow = null): TraversableOnce[InternalRow] =
throw new UnsupportedOperationException(s"Cannot evaluate expression: $this")
final override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode =
throw new UnsupportedOperationException(s"Cannot evaluate expression: $this")
override def elementSchema: StructType = child.elementSchema
override lazy val resolved: Boolean = false
}
/**
* A base class for [[Explode]] and [[PosExplode]].
*/
abstract class ExplodeBase extends UnaryExpression with CollectionGenerator with Serializable {
override val inline: Boolean = false
override def checkInputDataTypes(): TypeCheckResult = child.dataType match {
case _: ArrayType | _: MapType =>
TypeCheckResult.TypeCheckSuccess
case _ =>
TypeCheckResult.TypeCheckFailure(
s"input to function explode should be array or map type, not ${child.dataType}")
}
// hive-compatible default alias for explode function ("col" for array, "key", "value" for map)
override def elementSchema: StructType = child.dataType match {
case ArrayType(et, containsNull) =>
if (position) {
new StructType()
.add("pos", IntegerType, nullable = false)
.add("col", et, containsNull)
} else {
new StructType()
.add("col", et, containsNull)
}
case MapType(kt, vt, valueContainsNull) =>
if (position) {
new StructType()
.add("pos", IntegerType, nullable = false)
.add("key", kt, nullable = false)
.add("value", vt, valueContainsNull)
} else {
new StructType()
.add("key", kt, nullable = false)
.add("value", vt, valueContainsNull)
}
}
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
child.dataType match {
case ArrayType(et, _) =>
val inputArray = child.eval(input).asInstanceOf[ArrayData]
if (inputArray == null) {
Nil
} else {
val rows = new Array[InternalRow](inputArray.numElements())
inputArray.foreach(et, (i, e) => {
rows(i) = if (position) InternalRow(i, e) else InternalRow(e)
})
rows
}
case MapType(kt, vt, _) =>
val inputMap = child.eval(input).asInstanceOf[MapData]
if (inputMap == null) {
Nil
} else {
val rows = new Array[InternalRow](inputMap.numElements())
var i = 0
inputMap.foreach(kt, vt, (k, v) => {
rows(i) = if (position) InternalRow(i, k, v) else InternalRow(k, v)
i += 1
})
rows
}
}
}
override def collectionType: DataType = child.dataType
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.genCode(ctx)
}
}
/**
* Given an input array produces a sequence of rows for each value in the array.
*
* {{{
* SELECT explode(array(10,20)) ->
* 10
* 20
* }}}
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Separates the elements of array `expr` into multiple rows, or the elements of map `expr` into multiple rows and columns.",
extended = """
Examples:
> SELECT _FUNC_(array(10, 20));
10
20
""")
// scalastyle:on line.size.limit
case class Explode(child: Expression) extends ExplodeBase {
override val position: Boolean = false
}
/**
* Given an input array produces a sequence of rows for each position and value in the array.
*
* {{{
* SELECT posexplode(array(10,20)) ->
* 0 10
* 1 20
* }}}
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Separates the elements of array `expr` into multiple rows with positions, or the elements of map `expr` into multiple rows and columns with positions.",
extended = """
Examples:
> SELECT _FUNC_(array(10,20));
0 10
1 20
""")
// scalastyle:on line.size.limit
case class PosExplode(child: Expression) extends ExplodeBase {
override val position = true
}
/**
* Explodes an array of structs into a table.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Explodes an array of structs into a table.",
extended = """
Examples:
> SELECT _FUNC_(array(struct(1, 'a'), struct(2, 'b')));
1 a
2 b
""")
case class Inline(child: Expression) extends UnaryExpression with CollectionGenerator {
override val inline: Boolean = true
override val position: Boolean = false
override def checkInputDataTypes(): TypeCheckResult = child.dataType match {
case ArrayType(st: StructType, _) =>
TypeCheckResult.TypeCheckSuccess
case _ =>
TypeCheckResult.TypeCheckFailure(
s"input to function $prettyName should be array of struct type, not ${child.dataType}")
}
override def elementSchema: StructType = child.dataType match {
case ArrayType(st: StructType, _) => st
}
override def collectionType: DataType = child.dataType
private lazy val numFields = elementSchema.fields.length
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
val inputArray = child.eval(input).asInstanceOf[ArrayData]
if (inputArray == null) {
Nil
} else {
for (i <- 0 until inputArray.numElements())
yield inputArray.getStruct(i, numFields)
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.genCode(ctx)
}
}
|
saturday-shi/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
|
Scala
|
apache-2.0
| 14,186 |
import sbt._
import Keys._
import sbtassembly.Plugin._
import sbtjflex.SbtJFlexPlugin._
import AssemblyKeys._
object Versions{
val scalaMajorVersion = "2.11"
val scalaMinorVersion = "7"
val factorieVersion = "1.2-SNAPSHOT"
}
object FactorieBuild extends Build {
import Dependencies._
lazy val overrideSettings = {
lazy val publishSetting = publishTo <<= (version) {
version: String =>
def repo(name: String) = name at "https://dev-iesl.cs.umass.edu/nexus/content/repositories/" + name
val isSnapshot = version.trim.endsWith("SNAPSHOT")
val repoName = if(isSnapshot) "snapshots" else "releases"
Some(repo(repoName))
}
lazy val credentialsSetting = credentials += {
Seq("build.publish.user", "build.publish.password").map(k => Option(System.getProperty(k))) match {
case Seq(Some(user), Some(pass)) =>
Credentials("Sonatype Nexus Repository Manager", "iesl.cs.umass.edu", user, pass)
case _ =>
Credentials(Path.userHome / ".ivy2" / ".credentials")
}
}
}
val NoNLP = config("no-nlp-resources") extend(Runtime)
val WithNLP = config("with-nlp-resources") extend(Runtime)
lazy val factorie = Project("factorie", file(".")).
configs(NoNLP, WithNLP).
settings(jflexSettings ++ Seq(
organization := s"cc.factorie_${Versions.scalaMajorVersion}",
version := Versions.factorieVersion,
scalaVersion := s"${Versions.scalaMajorVersion}.${Versions.scalaMinorVersion}",
// no verbose deprecation warnings, octal escapes in jflex file are too many
scalacOptions := Seq("-unchecked", "-encoding", "utf8"),
resolvers ++= resolutionRepos,
libraryDependencies ++= Seq(
CompileDependencies.mongodb,
CompileDependencies.colt,
CompileDependencies.compiler,
CompileDependencies.junit,
CompileDependencies.acompress,
CompileDependencies.acommonslang,
CompileDependencies.snappy,
CompileDependencies.bliki,
CompileDependencies.json4s,
CompileDependencies.guava,
TestDependencies.scalatest,
TestDependencies.slf4j,
TestDependencies.fongo
),
unmanagedSourceDirectories in Compile <+= (sourceDirectory in jflex),
sourceGenerators in Compile <+= generate in jflex
):_*).
settings(inConfig(NoNLP)(
Classpaths.configSettings ++ Defaults.defaultSettings ++ baseAssemblySettings ++ jflexSettings ++ Seq(
test in assembly := {},
target in assembly <<= target,
assemblyDirectory in assembly := cacheDirectory.value / "assembly-no-nlp-resources",
jarName in assembly := "%s_%s-%s-%s" format (name.value, Versions.scalaMajorVersion, version.value, "jar-with-dependencies.jar")
)): _*).
settings(inConfig(WithNLP)(
Classpaths.configSettings ++ Defaults.defaultSettings ++ baseAssemblySettings ++ jflexSettings ++ Seq(
test in assembly := {},
target in assembly <<= target,
assemblyDirectory in assembly := cacheDirectory.value / "assembly-with-nlp-resources",
jarName in assembly := "%s_%s-%s-%s" format (name.value, Versions.scalaMajorVersion, version.value, "nlp-jar-with-dependencies.jar"),
libraryDependencies ++= Seq(Resources.nlpresources)
)): _*)
}
object Dependencies {
val resolutionRepos = Seq(
"Scala tools" at "https://oss.sonatype.org/content/groups/scala-tools",
"OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
"OSS Releases" at "https://oss.sonatype.org/content/repositories/releases",
"UMass Releases" at "https://dev-iesl.cs.umass.edu/nexus/content/repositories/public",
"UMass Snapshots" at "https://dev-iesl.cs.umass.edu/nexus/content/repositories/public-snapshots"
)
object CompileDependencies {
val mongodb = "org.mongodb" % "mongo-java-driver" % "2.12.3"
val colt = "org.jblas" % "jblas" % "1.2.3"
val compiler = "org.scala-lang" % "scala-compiler" % s"${Versions.scalaMajorVersion}.${Versions.scalaMinorVersion}"
val junit = "junit" % "junit" % "4.10"
val acompress = "org.apache.commons" % "commons-compress" % "1.8"
val acommonslang = "commons-lang" % "commons-lang" % "2.6"
val snappy = "org.xerial.snappy" % "snappy-java" % "1.1.1.3"
val bliki = "info.bliki.wiki" % "bliki-core" % "3.0.19"
val json4s = "org.json4s" % s"json4s-jackson_${Versions.scalaMajorVersion}" % "3.2.9"
val guava = "com.google.guava" % "guava" % "12.0"
}
object TestDependencies {
val scalatest = "org.scalatest" % s"scalatest_${Versions.scalaMajorVersion}" % "2.2.2" % Test
val slf4j = "org.slf4j" % "slf4j-log4j12" % "1.7.7" % Test
val fongo = "com.github.fakemongo" % "fongo" % "1.5.10" % Test
}
object Resources {
// This may be brittle, but intransitive() avoids creating a circular dependency.
val nlpresources = "cc.factorie.app.nlp" % "all-models" % Versions.factorieVersion % "with-nlp-resources" intransitive()
}
}
|
patverga/factorie
|
project/Build.scala
|
Scala
|
apache-2.0
| 4,993 |
package com.twitter.finagle.http
import com.twitter.finagle.{Status => CoreStatus}
import com.twitter.finagle.http.codec.ConnectionManager
import com.twitter.finagle.http.exp.{Multi, StreamTransportProxy, StreamTransport}
import com.twitter.util.{Future, Promise}
import scala.util.control.NonFatal
/**
* A Transport with close behavior managed by ConnectionManager.
*
* @note the connection manager will close connections as required by RFC 2616 § 8
* irrespective of any pending requests in the dispatcher.
*/
private[finagle] class HttpTransport[A <: Message, B <: Message](
self: StreamTransport[A, B],
manager: ConnectionManager)
extends StreamTransportProxy[A, B](self) {
private[this] val readFn: Multi[B] => Unit = { case Multi(m, onFinish) =>
manager.observeMessage(m, onFinish)
if (manager.shouldClose)
self.close()
}
def this(self: StreamTransport[A, B]) = this(self, new ConnectionManager)
def read(): Future[Multi[B]] =
self.read().onSuccess(readFn)
def write(m: A): Future[Unit] =
try {
val p = Promise[Unit]
manager.observeMessage(m, p)
val f = self.write(m)
p.become(f)
if (manager.shouldClose) f.before(self.close())
else f
} catch {
case NonFatal(e) => Future.exception(e)
}
override def status: CoreStatus = if (manager.shouldClose) CoreStatus.Closed else self.status
}
|
sveinnfannar/finagle
|
finagle-http/src/main/scala/com/twitter/finagle/http/HttpTransport.scala
|
Scala
|
apache-2.0
| 1,402 |
package com.twitter.inject.thrift.internal.filters
import com.twitter.finagle.stats.Counter
import com.twitter.finagle.{Service, SimpleFilter}
import com.twitter.util._
private[thrift] class IncrementCounterFilter[Req, Rep](counter: Counter)
extends SimpleFilter[Req, Rep] {
override def apply(request: Req, service: Service[Req, Rep]): Future[Rep] = {
counter.incr()
service(request)
}
}
|
twitter/finatra
|
inject/inject-thrift-client/src/main/scala/com/twitter/inject/thrift/internal/filters/IncrementCounterFilter.scala
|
Scala
|
apache-2.0
| 408 |
package org.scalajs.testsuite.niocharset
import scala.language.implicitConversions
import scala.scalajs.niocharset.StandardCharsets._
import java.nio.charset._
import org.junit.Test
import org.junit.Assert._
class CharsetJSTest {
@Test def defaultCharset(): Unit = {
assertSame(UTF_8, Charset.defaultCharset())
}
@Test def forName(): Unit = {
assertSame(ISO_8859_1, Charset.forName("ISO-8859-1"))
assertSame(ISO_8859_1, Charset.forName("Iso8859-1"))
assertSame(ISO_8859_1, Charset.forName("iso_8859_1"))
assertSame(ISO_8859_1, Charset.forName("LaTin1"))
assertSame(ISO_8859_1, Charset.forName("l1"))
assertSame(US_ASCII, Charset.forName("US-ASCII"))
assertSame(US_ASCII, Charset.forName("Default"))
assertSame(UTF_8, Charset.forName("UTF-8"))
assertSame(UTF_8, Charset.forName("utf-8"))
assertSame(UTF_8, Charset.forName("UtF8"))
assertSame(UTF_8, Charset.forName("UTF-8"))
assertSame(UTF_16BE, Charset.forName("UTF-16BE"))
assertSame(UTF_16BE, Charset.forName("Utf_16BE"))
assertSame(UTF_16BE, Charset.forName("UnicodeBigUnmarked"))
assertSame(UTF_16LE, Charset.forName("UTF-16le"))
assertSame(UTF_16LE, Charset.forName("Utf_16le"))
assertSame(UTF_16LE, Charset.forName("UnicodeLittleUnmarked"))
assertSame(UTF_16, Charset.forName("UTF-16"))
assertSame(UTF_16, Charset.forName("Utf_16"))
assertSame(UTF_16, Charset.forName("unicode"))
assertSame(UTF_16, Charset.forName("UnicodeBig"))
}
}
|
lrytz/scala-js
|
test-suite/js/src/test/scala/org/scalajs/testsuite/niocharset/CharsetJSTest.scala
|
Scala
|
bsd-3-clause
| 1,498 |
package mesosphere.marathon.event.http
import java.util.UUID
import javax.inject.{ Inject, Named }
import javax.servlet.http.HttpServletRequest
import akka.actor.ActorRef
import mesosphere.marathon.ModuleNames
import mesosphere.marathon.event.http.HttpEventStreamActor._
import org.eclipse.jetty.servlets.EventSource.Emitter
import org.eclipse.jetty.servlets.{ EventSource, EventSourceServlet }
import scala.concurrent.blocking
/**
* The Stream handle implementation for SSE.
* @param request the initial http request.
* @param emitter the emitter to emit data
*/
class HttpEventSSEHandle(request: HttpServletRequest, emitter: Emitter) extends HttpEventStreamHandle {
lazy val id: String = UUID.randomUUID().toString
override def remoteAddress: String = request.getRemoteAddr
override def close(): Unit = emitter.close()
override def sendEvent(event: String, message: String): Unit = blocking(emitter.event(event, message))
override def toString: String = s"HttpEventSSEHandle($id on $remoteAddress)"
}
/**
* Handle a server side event client stream by delegating events to the stream actor.
*/
class HttpEventStreamServlet @Inject() (@Named(ModuleNames.HTTP_EVENT_STREAM) streamActor: ActorRef)
extends EventSourceServlet {
override def newEventSource(request: HttpServletRequest): EventSource = new EventSource {
@volatile private var handler: Option[HttpEventSSEHandle] = None
override def onOpen(emitter: Emitter): Unit = {
val handle = new HttpEventSSEHandle(request, emitter)
this.handler = Some(handle)
streamActor ! HttpEventStreamConnectionOpen(handle)
}
override def onClose(): Unit = {
handler.foreach(streamActor ! HttpEventStreamConnectionClosed(_))
handler = None
}
}
}
|
ss75710541/marathon
|
src/main/scala/mesosphere/marathon/event/http/HttpEventStreamServlet.scala
|
Scala
|
apache-2.0
| 1,778 |
package io.iohk.ethereum.domain
import akka.util.ByteString
trait TransactionOutcome
case class HashOutcome(stateHash: ByteString) extends TransactionOutcome
case object SuccessOutcome extends TransactionOutcome
case object FailureOutcome extends TransactionOutcome
|
input-output-hk/etc-client
|
src/main/scala/io/iohk/ethereum/domain/TransactionOutcome.scala
|
Scala
|
mit
| 271 |
package ml.wolfe.nlp.converters
import java.util.Properties
import edu.arizona.sista.processors.{Document => SistaDocument, Sentence => SistaSentence}
import edu.arizona.sista.processors.corenlp.{CoreNLPProcessor, CoreNLPDocument}
import edu.arizona.sista.processors.fastnlp.FastNLPProcessor
import ml.wolfe.nlp.{Document => WolfeDocument, Sentence => WolfeSentence, Token => WolfeToken}
import ml.wolfe.nlp.ie.CorefAnnotation
import scala.collection.JavaConversions._
/**
* Convenience methods for processing NLP documents.
*
* @author Sebastian Riedel
* @author Jason Naradowsky
*/
object SISTAProcessors {
// The main SISTA wrapper for most CoreNLP processes
lazy val sistaCoreNLPProcessor = new CoreNLPProcessor(basicDependencies = false)
// A separate processor for calls to MaltParser wrapper returning basic dependencies
lazy val maltSistaCoreNLPProcessor = new FastNLPProcessor(useMalt = true)
// A separate processor for calls to Stanford Neural Parser wrapper returning basic dependencies
lazy val nnSistaCoreNLPProcessor = new FastNLPProcessor(useMalt = false, useBasicDependencies = true)
// Another processor for basic dependencies extracted from the Stanford constituent parser
lazy val basicSistaCoreNLPProcessor = new CoreNLPProcessor(basicDependencies = true)
/**
* Applies tokenization and sentence splitting to the text.
* @param text text to process.
* @return a document containing sentences with basic tokens.
*/
def mkDocument(text: String): WolfeDocument = {
println("making document...")
val result = sistaCoreNLPProcessor.mkDocument(text)
val sentences = result.sentences map SISTAConverter.toWolfeSentence
WolfeDocument(text, sentences)
}
/**
* Applies tokenization, sentence splitting, and parsing to the text.
* @param text text to process.
* @return a document containing sentences with basic tokens and parse structure.
*/
def mkParsedDocument(text: String): WolfeDocument = {
val result = sistaCoreNLPProcessor.mkDocument(text)
sistaCoreNLPProcessor.parse(result)
val sentences = result.sentences map SISTAConverter.toFullWolfeSentence
WolfeDocument(text, sentences)
}
/**
* Calls the full SISTA CoreNLP pipeline and returns a wolfe document.
* @param text the text to process.
* @return a document with full annotation.
*/
def annotate(text: String): WolfeDocument = {
val result = sistaCoreNLPProcessor.annotate(text)
val sentences = result.sentences map SISTAConverter.toWolfeSentence
val coref = SISTAConverter.toWolfeCoreference(result.coreferenceChains.get).toArray
WolfeDocument(text, sentences, coref = CorefAnnotation(coref))
}
/**
* Calls the SISTA CoreNLP components as specified by the arguments
* @param text the text to process
* @param posTagger part-of-speech tagger
* @param lemmatizer lemmatizer
* @param parser constituent and dependency parses
* @param ner named entity recognition
* @param coreference coreference resolution
* @param srl (NOT SUPPORTED BY CoreNLP) semantic role labeling
* @return fully annotated document
*/
def annotateWithParse(text: String,
posTagger: Boolean=false,
lemmatizer: Boolean=false,
parser: Option[ParserModel] = None,
ner: Boolean=false,
coreference: Boolean=false,
srl: Boolean = false,
prereqs: Boolean = false): WolfeDocument = {
val result = sistaCoreNLPProcessor.mkDocument(text)
if (posTagger || (prereqs && (coreference || parser.isDefined || ner))) sistaCoreNLPProcessor.tagPartsOfSpeech(result)
if (parser.isDefined || (prereqs && coreference)) parse(result, parser.get) //sistaCoreNLPProcessor.parse(result)
if (lemmatizer || (prereqs && (coreference || ner))) sistaCoreNLPProcessor.lemmatize(result)
if (ner || (prereqs && coreference)) sistaCoreNLPProcessor.recognizeNamedEntities(result)
if (srl) ??? // sistaCoreNLPProcessor.labelSemanticRoles(result)
if (coreference && !prereqs) {
require(posTagger && lemmatizer && ner && parser.isDefined, "Coreference resolution requires execution of POS tagger, lemmatizer, NER and parser")
sistaCoreNLPProcessor.resolveCoreference(result)
}
SISTAConverter.sistaToWolfeDocument(result, text = text)
}
def annotate(text: String, posTagger: Boolean=false, lemmatizer: Boolean=false, parser: Boolean=false,ner: Boolean=false,coreference: Boolean=false,srl: Boolean = false, prereqs: Boolean = false): WolfeDocument = {
annotateWithParse(text, posTagger, lemmatizer, if (parser) Some(StanfordCollapsedDependency) else None, ner, coreference, srl, prereqs)
}
// def parse(doc: WolfeDocument, model: ParserModel = StanfordCollapsedDependency): WolfeDocument = {
// SISTAConverter.sistaToWolfeDocument(sistaCoreNLPProcessor.mkDocument(doc.source))
// }
def parse(doc: SistaDocument, model: ParserModel = StanfordCollapsedDependency): SistaDocument = {
sistaCoreNLPProcessor.tagPartsOfSpeech(doc)
sistaCoreNLPProcessor.lemmatize(doc)
model match {
case MaltParser => maltSistaCoreNLPProcessor.parse(doc)
case StanfordBasicDependency => basicSistaCoreNLPProcessor.parse(doc)
case StanfordCollapsedDependency => sistaCoreNLPProcessor.parse(doc)
case StanfordNeuralDependency => nnSistaCoreNLPProcessor.parse(doc)
}
doc
}
}
sealed trait ParserModel
case object MaltParser extends ParserModel
case object StanfordBasicDependency extends ParserModel
case object StanfordCollapsedDependency extends ParserModel
case object StanfordNeuralDependency extends ParserModel
// def main(args: Array[String]): Unit = {
// val sent = "the quick brown fox jumped over the lazy dog ."
// val tokens = sent.split(" ").map(w => WolfeToken(word = w))
// parse(tokens.map(_.word))
// annotate(sent, ner = true, parser = true, prereqs = true)
// }
|
wolfe-pack/wolfe
|
wolfe-nlp/src/main/scala/ml/wolfe/nlp/converters/SISTAProcessors.scala
|
Scala
|
apache-2.0
| 5,966 |
package core.engine
/**
* Created by OpenDataFlow on 02/01/16.
*/
abstract class JobResult {
}
|
opendataflow/opendataflow
|
opendataflow-core/src/main/scala/core/engine/JobResult.scala
|
Scala
|
apache-2.0
| 99 |
package com.caibowen.prma.logger.jul
import java.net.InetAddress
import java.util.logging.{LogRecord => JulLogRecord}
import javax.annotation.{Nonnull, Nullable}
import com.caibowen.gplume.misc.Str
import com.caibowen.prma.api.LogLevel.LogLevel
import com.caibowen.prma.api.model.{EventVO, ExceptionVO}
import com.caibowen.prma.api.{EventAdaptor, LogLevel}
import com.caibowen.prma.logger.logback.LogbackEventAdaptor
import scala.collection.immutable.Vector
import scala.collection.mutable.ArrayBuffer
/**
* @author BowenCai
* @since 04/12/2014.
*/
class JulRecordAdaptor(private[this] val formatter: MsgFormatter = new SimpleMsgFormatter) extends EventAdaptor[JulLogRecord] {
override def from(ev: JulLogRecord): EventVO = {
val le = JulRecordAdaptor.levelMap(ev.getLevel.intValue())
val st = JulRecordAdaptor.getCallerST(ev)
val msg = formatter.fmt(ev)
val loggerName = if (ev.getLoggerName == null) "" else ev.getLoggerName
new EventVO(ev.getMillis, le,
loggerName, ev.getThreadID.toString, st,
msg,
JulRecordAdaptor.localIP,
null,
getExcepts(ev),
null)
}
override def to(vo: EventVO): JulLogRecord = ???
@Nullable
def getExcepts(ev: JulLogRecord): Vector[ExceptionVO] = {
@inline
val toVO = (th: Throwable, start: Int) => {
val stps = th.getStackTrace
new ExceptionVO(th.getClass.getName,
th.getMessage,
stps.take(stps.length - start).toVector)
}
val _t = ev.getThrown
if (_t == null)
return null
var cause = _t.getCause
if (cause == null)
return Vector(toVO(_t, 0))
val buf = Vector.newBuilder[ExceptionVO]
buf.sizeHint(16)
buf += toVO(_t, 0)
val cs = JulRecordAdaptor.commonFrames(_t, cause)
do {
buf += toVO(cause, cs)
cause = cause.getCause
} while (cause != null)
buf.result()
}
}
object JulRecordAdaptor {
val localIP = Str.Utils.ipV4ToLong(InetAddress.getLocalHost.getHostAddress)
@Nonnull
def getCallerST(record: JulLogRecord): StackTraceElement = {
if (record.getSourceClassName != null)
new StackTraceElement(record.getSourceClassName, record.getSourceMethodName, record.getSourceClassName, -1)
else {
val sts = new Throwable().getStackTrace
if (sts != null && sts.length > 2)
sts(2)
else EventVO.NA_ST
}
}
@inline
def commonFrames(t1: Throwable, t2: Throwable): Int ={
val s1 = t1.getStackTrace
val s2 = t2.getStackTrace
if (s1 == null || s2 == null)
0
else {
var count = 0
for ((i, j) <- (s1 zip s2).reverse if i.equals(j))
count += 1
count
}
}
private[jul] val levelMap = Map[Int, LogLevel](
Int.MinValue -> LogLevel.ALL,
300 -> LogLevel.TRACE,
400 -> LogLevel.DEBUG,
500 -> LogLevel.DEBUG,
700 -> LogLevel.INFO,
800 -> LogLevel.INFO,
900 -> LogLevel.WARN,
1000 -> LogLevel.ERROR,
Int.MaxValue -> LogLevel.OFF)
}
/*
OFF = new Level("OFF",Integer.MAX_VALUE, defaultBundle); OFF 32
SEVERE = new Level("SEVERE",1000, defaultBundle); ERROR 16
WARNING = new Level("WARNING", 900, defaultBundle); WARN 8
INFO = new Level("INFO", 800, defaultBundle);INFO
CONFIG = new Level("CONFIG", 700, defaultBundle);INFO 4
FINE = new Level("FINE", 500, defaultBundle);DEBUG
FINER = new Level("FINER", 400, defaultBundle);DEBUG 2
FINEST = new Level("FINEST", 300, defaultBundle);TRACE 1
ALL = new Level("ALL", Integer.MIN_VALUE, defaultBundle);ALL 0
*/
|
xkommando/PRMA
|
logger/src/main/scala/com/caibowen/prma/logger/jul/JulRecordAdaptor.scala
|
Scala
|
lgpl-3.0
| 3,529 |
package scavlink.link
import scavlink.state.{State, StateGenerator}
package object telemetry {
type StateGenerators = Set[StateGenerator[_ <: State]]
}
|
nickolasrossi/scavlink
|
src/main/scala/scavlink/link/telemetry/package.scala
|
Scala
|
mit
| 156 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js IR **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2014, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.core.ir
import java.io._
import Infos._
object InfoSerializers {
/** Scala.js IR File Magic Number
*
* CA FE : first part of magic number of Java class files
* 4A 53 : "JS" in ASCII
*
*/
final val IRMagicNumber = 0xCAFE4A53
def serialize(stream: OutputStream, classInfo: ClassInfo): Unit = {
new Serializer().serialize(stream, classInfo)
}
def deserialize(stream: InputStream): ClassInfo = {
deserializeWithVersion(stream)._2
}
def deserializeWithVersion(stream: InputStream): (String, ClassInfo) = {
new Deserializer(stream).deserialize()
}
private final class Serializer {
def serialize(stream: OutputStream, classInfo: ClassInfo): Unit = {
val s = new DataOutputStream(stream)
def writeSeq[A](seq: Seq[A])(writeElem: A => Unit): Unit = {
s.writeInt(seq.size)
seq.foreach(writeElem)
}
def writeStrings(seq: Seq[String]): Unit =
writeSeq(seq)(s.writeUTF(_))
// Write the Scala.js IR magic number
s.writeInt(IRMagicNumber)
// Write the Scala.js Version
s.writeUTF(ScalaJSVersions.binaryEmitted)
import classInfo._
s.writeUTF(encodedName)
s.writeBoolean(isExported)
s.writeByte(ClassKind.toByte(kind))
s.writeUTF(superClass.getOrElse(""))
writeStrings(interfaces)
def writeMethodInfo(methodInfo: MethodInfo): Unit = {
import methodInfo._
def writePerClassStrings(m: Map[String, List[String]]): Unit = {
writeSeq(m.toSeq) {
case (cls, items) => s.writeUTF(cls); writeStrings(items)
}
}
s.writeUTF(encodedName)
s.writeBoolean(isStatic)
s.writeBoolean(isAbstract)
s.writeBoolean(isExported)
writePerClassStrings(staticFieldsRead)
writePerClassStrings(staticFieldsWritten)
writePerClassStrings(methodsCalled)
writePerClassStrings(methodsCalledStatically)
writePerClassStrings(staticMethodsCalled)
writeStrings(instantiatedClasses)
writeStrings(accessedModules)
writeStrings(usedInstanceTests)
writeStrings(accessedClassData)
}
writeSeq(methods)(writeMethodInfo(_))
s.flush()
}
}
private final class Deserializer(stream: InputStream) {
private[this] val input = new DataInputStream(stream)
def readList[A](readElem: => A): List[A] =
List.fill(input.readInt())(readElem)
def readStrings(): List[String] =
readList(input.readUTF())
def deserialize(): (String, ClassInfo) = {
val version = readHeader()
import input._
val useHacks065 =
Set("0.6.0", "0.6.3", "0.6.4", "0.6.5").contains(version)
val useHacks0614 =
useHacks065 || Set("0.6.6", "0.6.8", "0.6.13", "0.6.14").contains(version)
val encodedName = readUTF()
val isExported = readBoolean()
val kind = ClassKind.fromByte(readByte())
val superClass0 = readUTF()
val superClass = if (superClass0 == "") None else Some(superClass0)
val interfaces = readList(readUTF())
def readMethod(): MethodInfo = {
def readPerClassStrings(): Map[String, List[String]] =
readList(readUTF() -> readStrings()).toMap
val encodedName = readUTF()
val isStatic = readBoolean()
val isAbstract = readBoolean()
val isExported = readBoolean()
val staticFieldsRead =
if (useHacks0614) Map.empty[String, List[String]]
else readPerClassStrings()
val staticFieldsWritten =
if (useHacks0614) Map.empty[String, List[String]]
else readPerClassStrings()
val methodsCalled = readPerClassStrings()
val methodsCalledStatically = readPerClassStrings()
val staticMethodsCalled = readPerClassStrings()
val instantiatedClasses = readStrings()
val accessedModules = readStrings()
val usedInstanceTests = readStrings()
val accessedClassData = readStrings()
MethodInfo(encodedName, isStatic, isAbstract, isExported,
staticFieldsRead, staticFieldsWritten,
methodsCalled, methodsCalledStatically, staticMethodsCalled,
instantiatedClasses, accessedModules, usedInstanceTests,
accessedClassData)
}
val methods0 = readList(readMethod())
val methods = if (useHacks065) {
methods0.filter(m => !Definitions.isReflProxyName(m.encodedName))
} else {
methods0
}
val info = ClassInfo(encodedName, isExported, kind,
superClass, interfaces, methods)
(version, info)
}
/** Reads the Scala.js IR header and verifies the version compatibility.
* Returns the emitted binary version.
*/
def readHeader(): String = {
// Check magic number
if (input.readInt() != IRMagicNumber)
throw new IOException("Not a Scala.js IR file")
// Check that we support this version of the IR
val version = input.readUTF()
val supported = ScalaJSVersions.binarySupported
if (!supported.contains(version)) {
throw new IRVersionNotSupportedException(version, supported,
s"This version ($version) of Scala.js IR is not supported. " +
s"Supported versions are: ${supported.mkString(", ")}")
}
version
}
}
}
|
xuwei-k/scala-js
|
ir/src/main/scala/org/scalajs/core/ir/InfoSerializers.scala
|
Scala
|
bsd-3-clause
| 5,913 |
/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
// Developer note:
// scala -J-Dscala.process.debug
// for process debugging output.
//
package scala.sys {
/** This package handles the execution of external processes. The contents of
* this package can be divided in three groups, according to their
* responsibilities:
*
* - Indicating what to run and how to run it.
* - Handling a process input and output.
* - Running the process.
*
* For simple uses, the only group that matters is the first one. Running an
* external command can be as simple as `"ls".!`, or as complex as building a
* pipeline of commands such as this:
*
* {{{
* import scala.sys.process._
* "ls" #| "grep .scala" #&& Seq("sh", "-c", "scalac *.scala") #|| "echo nothing found" lineStream
* }}}
*
* We describe below the general concepts and architecture of the package,
* and then take a closer look at each of the categories mentioned above.
*
* ==Concepts and Architecture==
*
* The underlying basis for the whole package is Java's `Process` and
* `ProcessBuilder` classes. While there's no need to use these Java classes,
* they impose boundaries on what is possible. One cannot, for instance,
* retrieve a ''process id'' for whatever is executing.
*
* When executing an external process, one can provide a command's name,
* arguments to it, the directory in which it will be executed and what
* environment variables will be set. For each executing process, one can
* feed its standard input through a `java.io.OutputStream`, and read from
* its standard output and standard error through a pair of
* `java.io.InputStream`. One can wait until a process finishes execution and
* then retrieve its return value, or one can kill an executing process.
* Everything else must be built on those features.
*
* This package provides a DSL for running and chaining such processes,
* mimicking Unix shells ability to pipe output from one process to the input
* of another, or control the execution of further processes based on the
* return status of the previous one.
*
* In addition to this DSL, this package also provides a few ways of
* controlling input and output of these processes, going from simple and
* easy to use to complex and flexible.
*
* When processes are composed, a new `ProcessBuilder` is created which, when
* run, will execute the `ProcessBuilder` instances it is composed of
* according to the manner of the composition. If piping one process to
* another, they'll be executed simultaneously, and each will be passed a
* `ProcessIO` that will copy the output of one to the input of the other.
*
* ==What to Run and How==
*
* The central component of the process execution DSL is the
* [[scala.sys.process.ProcessBuilder]] trait. It is `ProcessBuilder` that
* implements the process execution DSL, that creates the
* [[scala.sys.process.Process]] that will handle the execution, and return
* the results of such execution to the caller. We can see that DSL in the
* introductory example: `#|`, `#&&` and `#!!` are methods on
* `ProcessBuilder` used to create a new `ProcessBuilder` through
* composition.
*
* One creates a `ProcessBuilder` either through factories on the
* [[scala.sys.process.Process]]'s companion object, or through implicit
* conversions available in this package object itself. Implicitly, each
* process is created either out of a `String`, with arguments separated by
* spaces -- no escaping of spaces is possible -- or out of a
* [[scala.collection.Seq]], where the first element represents the command
* name, and the remaining elements are arguments to it. In this latter case,
* arguments may contain spaces.
*
* To further control what how the process will be run, such as specifying
* the directory in which it will be run, see the factories on
* [[scala.sys.process.Process]]'s companion object.
*
* Once the desired `ProcessBuilder` is available, it can be executed in
* different ways, depending on how one desires to control its I/O, and what
* kind of result one wishes for:
*
* - Return status of the process (`!` methods)
* - Output of the process as a `String` (`!!` methods)
* - Continuous output of the process as a `Stream[String]` (`lineStream` methods)
* - The `Process` representing it (`run` methods)
*
* Some simple examples of these methods:
* {{{
* import scala.sys.process._
*
* // This uses ! to get the exit code
* def fileExists(name: String) = Seq("test", "-f", name).! == 0
*
* // This uses !! to get the whole result as a string
* val dirContents = "ls".!!
*
* // This "fire-and-forgets" the method, which can be lazily read through
* // a Stream[String]
* def sourceFilesAt(baseDir: String): Stream[String] = {
* val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f")
* cmd.lineStream
* }
* }}}
*
* We'll see more details about controlling I/O of the process in the next
* section.
*
* ==Handling Input and Output==
*
* In the underlying Java model, once a `Process` has been started, one can
* get `java.io.InputStream` and `java.io.OutputStream` representing its
* output and input respectively. That is, what one writes to an
* `OutputStream` is turned into input to the process, and the output of a
* process can be read from an `InputStream` -- of which there are two, one
* representing normal output, and the other representing error output.
*
* This model creates a difficulty, which is that the code responsible for
* actually running the external processes is the one that has to take
* decisions about how to handle its I/O.
*
* This package presents an alternative model: the I/O of a running process
* is controlled by a [[scala.sys.process.ProcessIO]] object, which can be
* passed _to_ the code that runs the external process. A `ProcessIO` will
* have direct access to the java streams associated with the process I/O. It
* must, however, close these streams afterwards.
*
* Simpler abstractions are available, however. The components of this
* package that handle I/O are:
*
* - [[scala.sys.process.ProcessIO]]: provides the low level abstraction.
* - [[scala.sys.process.ProcessLogger]]: provides a higher level abstraction
* for output, and can be created through its companion object.
* - [[scala.sys.process.BasicIO]]: a library of helper methods for the
* creation of `ProcessIO`.
* - This package object itself, with a few implicit conversions.
*
* Some examples of I/O handling:
* {{{
* import scala.sys.process._
*
* // An overly complex way of computing size of a compressed file
* def gzFileSize(name: String) = {
* val cat = Seq("zcat", name)
* var count = 0
* def byteCounter(input: java.io.InputStream) = {
* while(input.read() != -1) count += 1
* input.close()
* }
* val p = cat run new ProcessIO(_.close(), byteCounter, _.close())
* p.exitValue()
* count
* }
*
* // This "fire-and-forgets" the method, which can be lazily read through
* // a Stream[String], and accumulates all errors on a StringBuffer
* def sourceFilesAt(baseDir: String): (Stream[String], StringBuffer) = {
* val buffer = new StringBuffer()
* val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f")
* val lineStream = cmd lineStream_! ProcessLogger(buffer append _)
* (lineStream, buffer)
* }
* }}}
*
* Instances of the java classes `java.io.File` and `java.net.URL` can both
* be used directly as input to other processes, and `java.io.File` can be
* used as output as well. One can even pipe one to the other directly
* without any intervening process, though that's not a design goal or
* recommended usage. For example, the following code will copy a web page to
* a file:
* {{{
* import java.io.File
* import java.net.URL
* import scala.sys.process._
* new URL("http://www.scala-lang.org/") #> new File("scala-lang.html") !
* }}}
*
* More information about the other ways of controlling I/O can be found
* in the Scaladoc for the associated objects, traits and classes.
*
* ==Running the Process==
*
* Paradoxically, this is the simplest component of all, and the one least
* likely to be interacted with. It consists solely of
* [[scala.sys.process.Process]], and it provides only two methods:
*
* - `exitValue()`: blocks until the process exit, and then returns the exit
* value. This is what happens when one uses the `!` method of
* `ProcessBuilder`.
* - `destroy()`: this will kill the external process and close the streams
* associated with it.
*/
package object process extends ProcessImplicits {
/** The input stream of this process */
def stdin = java.lang.System.in
/** The output stream of this process */
def stdout = java.lang.System.out
/** The error stream of this process */
def stderr = java.lang.System.err
}
// private val shell: String => Array[String] =
// if (isWin) Array("cmd.exe", "/C", _)
// else Array("sh", "-c", _)
package process {
// These are in a nested object instead of at the package level
// due to the issues described in tickets #3160 and #3836.
private[process] object processInternal {
final val processDebug = props contains "scala.process.debug"
dbg("Initializing process package.")
type =?>[-A, +B] = PartialFunction[A, B]
type Closeable = java.io.Closeable
type File = java.io.File
type IOException = java.io.IOException
type InterruptedIOException = java.io.InterruptedIOException
type InputStream = java.io.InputStream
type JProcess = java.lang.Process
type JProcessBuilder = java.lang.ProcessBuilder
type LinkedBlockingQueue[T] = java.util.concurrent.LinkedBlockingQueue[T]
type OutputStream = java.io.OutputStream
type SyncVar[T] = scala.concurrent.SyncVar[T]
type URL = java.net.URL
def onError[T](handler: Throwable => T): Throwable =?> T = {
case e @ _ => handler(e)
}
def onIOInterrupt[T](handler: => T): Throwable =?> T = {
case _: InterruptedIOException => handler
}
def onInterrupt[T](handler: => T): Throwable =?> T = {
case _: InterruptedException => handler
}
def ioFailure[T](handler: IOException => T): Throwable =?> T = {
case e: IOException => handler(e)
}
def dbg(msgs: Any*) = if (processDebug) {
Console.println("[process] " + (msgs mkString " "))
}
}
}
}
|
rorygraves/perf_tester
|
corpus/scala-library/src/main/scala/sys/process/package.scala
|
Scala
|
apache-2.0
| 11,675 |
/*
* AnnealingSmokersTest.scala
* Bayesian network example tests.
*
* Created By: Avi Pfeffer ([email protected])
* Creation Date: Jan 1, 2009
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email [email protected] for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.test.example
import org.scalatest.Matchers
import org.scalatest.WordSpec
import com.cra.figaro.algorithm._
import com.cra.figaro.language._
import com.cra.figaro.algorithm.factored._
import com.cra.figaro.algorithm.sampling._
import com.cra.figaro.library.compound.^^
import com.cra.figaro.test._
class AnnealingSmokersTest extends WordSpec with Matchers {
"A simple AnnealingSmokersTest" should {
"produce the correct probability under Metropolis-Hastings Annealing" taggedAs (ExampleTest) in {
Universe.createNew
class Person {
val smokes = Flip(0.6)
}
val alice, bob, clara = new Person
val friends = List((alice, bob), (bob, clara))
clara.smokes.observe(true)
def smokingInfluence(pair: (Boolean, Boolean)) =
if (pair._1 == pair._2) 3.0; else 1.0
for { (p1, p2) <- friends } {
^^(p1.smokes, p2.smokes).setConstraint(smokingInfluence)
}
val alg = MetropolisHastingsAnnealer(ProposalScheme.default, Schedule.default(3.0))
alg.start()
Thread.sleep(1000)
alg.stop()
alg.mostLikelyValue(alice.smokes) should be(true)
alg.mostLikelyValue(bob.smokes) should be(true)
alg.kill
}
}
}
|
wkretschmer/figaro
|
Figaro/src/test/scala/com/cra/figaro/test/example/AnnealingSmokersTest.scala
|
Scala
|
bsd-3-clause
| 1,626 |
package org.jetbrains.plugins.dotty.lang.psi.types
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.api.{TypeVisitor, ValueType}
import org.jetbrains.plugins.scala.project.ProjectContext
/**
* @author adkozlov
*/
trait DottyType extends ScType {
override def typeSystem: DottyTypeSystem = DottyTypeSystem.instance
}
// is value type?
class DottyNoType(implicit val projectContext: ProjectContext) extends DottyType with ValueType {
override def visitType(visitor: TypeVisitor): Unit = visitor match {
case dottyVisitor: DottyTypeVisitor => dottyVisitor.visitNoType(this)
case _ =>
}
override def isFinalType = true
override def equals(other: Any): Boolean = other.isInstanceOf[DottyNoType]
override def hashCode(): Int = DottyNoType.hashCode()
}
object DottyNoType {
def apply()(implicit projectContext: ProjectContext) = new DottyNoType()
def unapply(t: DottyNoType): Boolean = true
}
trait DottyConstantType extends DottyType
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/dotty/lang/psi/types/DottyType.scala
|
Scala
|
apache-2.0
| 1,021 |
/*
* Copyright (c) 2014 Dufresne Management Consulting LLC.
*/
package testActors;
import org.scalatest.Suites
class TestSuite extends Suites (
new HtmlCleanerSpec,
new HotHouseJazzSpec,
new CrawlerHelperSpec,
new EventCrawlerSpec)
|
reactivecore01/livejazznear.me
|
play/test/testActors/TestSuites.scala
|
Scala
|
apache-2.0
| 243 |
package org.jetbrains.sbt
package codeInspection
import com.intellij.codeInspection.{ProblemHighlightType, ProblemsHolder}
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.codeInspection.{AbstractFixOnPsiElement, AbstractInspection}
import org.jetbrains.plugins.scala.lang.psi.api.ScalaRecursiveElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScReferencePattern
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScMethodCall
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScPatternDefinition
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createExpressionFromText
import org.jetbrains.plugins.scala.lang.psi.impl.base.ScLiteralImpl
/**
* @author Nikolay Obedin
* @since 8/5/14.
*/
class SbtReplaceProjectWithProjectInInspection extends AbstractInspection {
def actionFor(holder: ProblemsHolder): PartialFunction[PsiElement, Any] = {
case defn: ScPatternDefinition if defn.getContainingFile.getFileType.getName == Sbt.Name =>
(defn.expr, defn.bindings) match {
case (Some(call: ScMethodCall), Seq(projectNamePattern: ScReferencePattern)) =>
findPlaceToFix(call, projectNamePattern.getText).foreach { place =>
holder.registerProblem(place, SbtBundle("sbt.inspection.projectIn.name"),
ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
new SbtReplaceProjectWithProjectInQuickFix(place))
}
case _ => // do nothing
}
}
private def findPlaceToFix(call: ScMethodCall, projectName: String): Option[ScMethodCall] = {
var placeToFix: Option[ScMethodCall] = None
val visitor = new ScalaRecursiveElementVisitor {
override def visitMethodCallExpression(call: ScMethodCall): Unit = call match {
case ScMethodCall(expr, Seq(ScLiteralImpl.string(name), _))
if expr.getText == "Project" && name == projectName =>
placeToFix = Some(call)
case _ =>
super.visitMethodCallExpression(call)
}
}
call.accept(visitor)
placeToFix
}
}
class SbtReplaceProjectWithProjectInQuickFix(call: ScMethodCall)
extends AbstractFixOnPsiElement(SbtBundle("sbt.inspection.projectIn.name"), call) {
def doApplyFix(project: Project): Unit = {
val place = getElement
place match {
case ScMethodCall(_, Seq(_, pathElt)) =>
place.replace(createExpressionFromText("project.in(" + pathElt.getText + ")")(place.getManager))
case _ => // do nothing
}
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/sbt/codeInspection/SbtReplaceProjectWithProjectInInspection.scala
|
Scala
|
apache-2.0
| 2,623 |
package com.github.mjreid.flinkwrapper
import java.time.LocalDateTime
import com.github.mjreid.flinkwrapper.util.Readers
import play.api.libs.json.{JsPath, Reads}
import play.api.libs.functional.syntax._
import scala.concurrent.duration.Duration
case class JobVertex(
id: String,
name: String,
parallelism: Int,
status: ExecutionStatus.ExecutionStatus,
startTime: Option[LocalDateTime],
endTime: Option[LocalDateTime],
duration: Option[Duration],
taskCounts: VertexTaskCounts,
metrics: VertexMetrics
)
object JobVertex {
implicit val reads: Reads[JobVertex] = (
(JsPath \ "id").read[String] and
(JsPath \ "name").read[String] and
(JsPath \ "parallelism").read[Int] and
(JsPath \ "status").read[ExecutionStatus.ExecutionStatus] and
(JsPath \ "start-time").read[Option[LocalDateTime]](Readers.millisOptionalDateTimeReader) and
(JsPath \ "end-time").read[Option[LocalDateTime]](Readers.millisOptionalDateTimeReader) and
(JsPath \ "duration").read[Option[Duration]](Readers.millisOptionalDurationReader) and
(JsPath \ "tasks").read[VertexTaskCounts] and
(JsPath \ "metrics").read[VertexMetrics]
)(JobVertex.apply _)
}
|
mjreid/flink-rest-scala-wrapper
|
api/src/main/scala/com/github/mjreid/flinkwrapper/JobVertex.scala
|
Scala
|
apache-2.0
| 1,195 |
package org.jetbrains.plugins.scala.debugger.evaluateExpression
import org.jetbrains.plugins.scala.debugger._
/**
* User: Alefas
* Date: 17.10.11
*/
class ScalaMethodEvaluationTest extends ScalaMethodEvaluationTestBase {
override implicit val version: ScalaVersion = Scala_2_11
}
class ScalaMethodEvaluationTest_212 extends ScalaMethodEvaluationTestBase {
override implicit val version: ScalaVersion = Scala_2_12
}
abstract class ScalaMethodEvaluationTestBase extends ScalaDebuggerTestCase {
addFileWithBreakpoints("SmartBoxing.scala",
s"""
|object SmartBoxing {
| def foo(x: AnyVal) = 1
| def goo(x: Int) = x + 1
| def main(args: Array[String]) {
| val z = java.lang.Integer.valueOf(5)
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testSmartBoxing() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo(1)", "1")
evalEquals("goo(z)", "6")
}
}
addFileWithBreakpoints("FunctionWithSideEffects.scala",
s"""
|object FunctionWithSideEffects {
| var i = 1
| def foo = {
| i = i + 1
| i
| }
| def main(args: Array[String]) {
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testFunctionWithSideEffects() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo", "2")
evalEquals("foo", "3")
}
}
addFileWithBreakpoints("SimpleFunction.scala",
s"""
|object SimpleFunction {
| def foo() = 2
| def main(args: Array[String]) {
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testSimpleFunction() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo", "2")
}
}
addFileWithBreakpoints("PrivateMethods.scala",
s"""
|import PrivateMethods._
|object PrivateMethods {
| private def foo() = 2
| def main(args: Array[String]) {
| ""$bp
| }
|}
|class PrivateMethods {
| private def bar() = 1
|}
""".stripMargin.trim()
)
def testPrivateMethods() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo", "2")
evalEquals("new PrivateMethods().bar()", "1")
}
}
addFileWithBreakpoints("ApplyCall.scala",
s"""
|object ApplyCall {
| class A {
| def apply(x: Int) = x + 1
| }
| def main(args : Array[String]) {
| val a = new A()
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testApplyCall() {
runDebugger() {
waitForBreakpoint()
evalEquals("a(-1)", "0")
evalEquals("Array(\\"a\\", \\"b\\")", "[a,b]")
}
}
addFileWithBreakpoints("CurriedFunction.scala",
s"""
|object CurriedFunction {
| def foo(x: Int)(y: Int) = x * 2 + y
| def main(args: Array[String]) {
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testCurriedFunction() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo(1)(2)", "4")
}
}
addFileWithBreakpoints("ArrayApplyFunction.scala",
s"""
|object ArrayApplyFunction {
| def main(args : Array[String]) {
| val s = Array.ofDim[String](2, 2)
| s(1)(1) = "test"
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testArrayApplyFunction() {
runDebugger() {
waitForBreakpoint()
evalEquals("s(1)(1)", "test")
}
}
addFileWithBreakpoints("OverloadedFunction.scala",
s"""
|object OverloadedFunction {
| def foo(x: Int) = 1
| def foo(x: String) = 2
| def main(args: Array[String]) {
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testOverloadedFunction() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo(1)", "1")
evalEquals("foo(\\"\\")", "2")
}
}
addFileWithBreakpoints("ImplicitConversion.scala",
s"""
|object ImplicitConversion {
| def main(args : Array[String]) {
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testImplicitConversion() {
runDebugger() {
waitForBreakpoint()
evalEquals("\\"test\\".dropRight(2)", "te")
evalEquals("\\"3\\" -> \\"3\\"", "(3,3)")
evalEquals("(1 - 3).abs", "2")
}
}
addFileWithBreakpoints("SequenceArgument.scala",
s"""
|object SequenceArgument {
| def moo(x: String*) = x.foldLeft(0)(_ + _.length())
| def main(args: Array[String]) {
| val x = Seq("a", "b")
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testSequenceArgument() {
runDebugger() {
waitForBreakpoint()
evalEquals("moo(x: _*)", "2")
}
}
addFileWithBreakpoints("ArrayLengthFunction.scala",
s"""
|object ArrayLengthFunction {
| def main(args : Array[String]) {
| val s = Array(1, 2, 3)
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testArrayLengthFunction() {
runDebugger() {
waitForBreakpoint()
evalEquals("s.length", "3")
}
}
addFileWithBreakpoints("SimpleFunctionFromInner.scala",
s"""
|object SimpleFunctionFromInner {
| def foo() = 2
| def main(args: Array[String]) {
| val x = 1
| val r = () => {
| x
| ""$bp
| }
| r()
| }
|}
""".stripMargin.trim()
)
def testSimpleFunctionFromInner() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo", "2")
}
}
addFileWithBreakpoints("LibraryFunctions.scala",
s"""
|object LibraryFunctions {
| def main(args: Array[String]) {
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testLibraryFunctions() {
runDebugger() {
waitForBreakpoint()
evalStartsWith("scala.collection.mutable.ArrayBuffer.newBuilder", "ArrayBuffer()")
evalStartsWith("\\"test\\".substring(0, 2)", "te")
evalStartsWith("\\"test\\".substring(2)", "st")
evalEquals("List[Int](1, 2)", "List(1, 2)")
evalEquals("List(1, 2)", "List(1, 2)")
evalEquals("Some(\\"a\\")", "Some(a)")
evalEquals("Option(\\"a\\")", "Some(a)")
evalStartsWith("1 -> 2", "(1,2)")
evalEquals("123.toString", "123")
evalStartsWith("BigInt(2)", "2")
evalEquals("Seq(4, 3, 2, 1).sorted", "List(1, 2, 3, 4)")
}
}
addFileWithBreakpoints("DynamicFunctionApplication.scala",
s"""
|class A
|class B extends A {
| def foo() = 1
| def bar(s: String) = s
|}
|object DynamicFunctionApplication {
| def main(args: Array[String]) {
| val a: A = new B
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testDynamicFunctionApplication() {
runDebugger() {
waitForBreakpoint()
evalEquals("a.foo()", "1")
evalEquals("a.bar(\\"hi\\")", "hi")
}
}
addFileWithBreakpoints("NonStaticFunction.scala",
s"""
|object NonStaticFunction {
| def foo() = 2
| val x = 1
| def main(args: Array[String]) {
| def moo() {}
| class A {
| val x = 1
| def goo() = 2
| def foo() {
| val r = () => {
| moo()
| x
| ""$bp
| }
| r()
| }
| }
|
| new A().foo()
| }
|}
""".stripMargin.trim()
)
def testNonStaticFunction() {
runDebugger() {
waitForBreakpoint()
evalStartsWith("goo", "2")
}
}
addFileWithBreakpoints("DefaultAndNamedParameters.scala",
s"""
|object DefaultAndNamedParameters {
| def foo(x: Int, y: Int = 1, z: Int)(h: Int = x + y, m: Int) = x + y + z + h + m
| def main(args: Array[String]) {
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testDefaultAndNamedParameters() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo(1, z = 1)(m = 1)", "6")
evalEquals("foo(1, 2, 1)(m = 1)", "8")
evalEquals("foo(1, 2, 1)(1, m = 1)", "6")
}
}
addFileWithBreakpoints("RepeatedParameters.scala",
s"""
|object RepeatedParameters {
| def foo(x: String*) = x.foldLeft("")(_ + _)
| def main(args: Array[String]) {
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testRepeatedParameters() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo(\\"a\\", \\"b\\", \\"c\\")", "abc")
evalEquals("foo(\\"a\\")", "a")
evalEquals("foo()", "")
evalEquals("Array[Byte](0, 1)", "[0,1]")
}
}
addFileWithBreakpoints("ImplicitParameters.scala",
s"""
|object ImplicitParameters {
| def moo(x: Int)(implicit s: String) = x + s.length()
| def foo(x: Int)(implicit y: Int) = {
| implicit val s = "test"
| ""$bp
| x + y
| }
| def main(args: Array[String]) {
| implicit val x = 1
| foo(1)
| }
|}
""".stripMargin.trim()
)
def testImplicitParameters() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo(1)", "2")
evalEquals("foo(1)(2)", "3")
evalEquals("moo(1)", "5")
evalEquals("moo(1)(\\"a\\")", "2")
}
}
addFileWithBreakpoints("CaseClasses.scala",
s"""
|case class CCA(x: Int)
|object CaseClasses {
| case class CCB(x: Int)
| def main(args: Array[String]) {
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testCaseClasses() {
runDebugger() {
waitForBreakpoint()
evalEquals("CCA(1)", "CCA(1)")
evalEquals("CCA.apply(1)", "CCA(1)")
evalEquals("CCB(1)", "CCB(1)")
evalEquals("CCB.apply(1)", "CCB(1)")
}
}
addFileWithBreakpoints("PrivateInTrait.scala",
s"""
|trait Privates {
|
| private[this] def privThis(i: Int) = i + 1
|
| private def priv(i: Int) = i + 2
|
| private val privConst = 42
|
| def open() = {
| ""$bp
| }
|}
|
|object PrivateInTrait {
| class A extends Privates
|
| def main(args: Array[String]) {
| val a = new A
| a.open()
| }
|}""".stripMargin)
def testPrivateInTrait(): Unit = {
runDebugger() {
waitForBreakpoint()
evalEquals("priv(0)", "2")
evalEquals("privThis(0)", "1")
evalEquals("privConst", "42")
}
}
addFileWithBreakpoints("LocalsInTrait.scala",
s"""trait TTT {
| def foo() = {
| def bar() = {
| def baz() = 1
| baz()$bp
| }
| bar()
| }
|}
|
|object LocalsInTrait {
| class A extends TTT
|
| def main(args: Array[String]) {
| val a = new A
| a.foo()
| }
|}
""".stripMargin)
def testLocalsInTrait(): Unit = {
runDebugger() {
waitForBreakpoint()
evalEquals("bar()", "1")
evalEquals("bar", "1")
evalEquals("baz()", "1")
evalEquals("foo()", "1")
evalEquals("foo + bar", "2")
}
}
// tests for local functions ----------------------------------------------
addFileWithBreakpoints("LocalFunctions.scala",
s"""
|object LocalFunctions {
| val field = 1
|
| def main(args: Array[String]) {
| simple()
| withParameters()
| withParamFromLocal()
| withDiffParams1()
| withDiffParams2()
| withDiffParams3()
| withObject()
| withAnonfunField()
| useField()
| }
|
| def simple() {
| def foo1: Int = 1
| ""$bp
| }
|
| def withParameters() {
| val y = "test"
| def foo2(x: Int): Int = x + y.length
| ""$bp
| }
|
| def withParamFromLocal() {
| val x = 2
| def foo3: Int = x - 1
| ""$bp
| }
|
| def withDiffParams1() {
| val x = 2
| val y = "c"
| def foo4: Int = x - y.length()
| ""$bp
| }
|
| def withDiffParams2() {
| val y = "c"
| val x = 2
| def foo5(): Int = x - y.length()
| ""$bp
| }
|
| def withDiffParams3() {
| val y = "c"
| val x = 2
| def foo6: Int = - y.length() + x
| ""$bp
| }
|
| def withObject() {
| object y {val y = 1}
| val x = 2
| def foo7: Int = x - y.y
| ""$bp
| }
|
| def withAnonfunField() {
| val g = 1
| def moo(x: Int) = g + x
| val zz = (y: Int) => {
| val uu = (x: Int) => {
| g
| ""$bp
| }
| uu(1)
| }
| zz(2)
| }
|
| def useField() {
| val x = 2
| def foo8: Int = x - field
| ""$bp
| }
|}
""".stripMargin.trim()
)
def testLocalFunctions() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo1", "1")
atNextBreakpoint {
evalEquals("foo2(3)", "7")
}
atNextBreakpoint {
evalEquals("foo3", "1")
}
atNextBreakpoint {
evalEquals("foo4", "1")
}
atNextBreakpoint {
evalEquals("foo5", "1")
}
atNextBreakpoint {
evalEquals("foo6", "1")
}
atNextBreakpoint {
evalEquals("foo7", "1")
}
atNextBreakpoint {
evalEquals("moo(x)", "2")
}
atNextBreakpoint {
evalEquals("foo8", "1")
}
}
}
addFileWithBreakpoints("Closure.scala",
s"""
|object Closure {
| def main(args: Array[String]) {
| def outer() {
| val s = "start"
| def inner(a: String, b: String): String = {
| ""$bp
| s + a + b
| }
| inner("aa", "bb")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
def testClosure() {
runDebugger() {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "bb")
evalEquals("s", "start")
evalEquals("inner(\\"qq\\", \\"ww\\")", "startqqww")
}
}
addFileWithBreakpoints("LocalWithDefaultAndNamedParams.scala",
s"""
|object LocalWithDefaultAndNamedParams {
| def main(args: Array[String]) {
| def outer() {
| def inner(a: String, b: String = "default", c: String = "other"): String = {
| ""$bp
| a + b + c
| }
| inner("aa")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
def testLocalWithDefaultAndNamedParams() {
runDebugger() {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "default")
evalEquals("c", "other")
evalEquals("inner(\\"aa\\", \\"bb\\")", "aabbother")
evalEquals("inner(\\"aa\\")", "aadefaultother")
evalEquals("inner(\\"aa\\", c = \\"cc\\")", "aadefaultcc")
}
}
addFileWithBreakpoints("LocalMethodsWithSameName.scala",
s"""
|object LocalMethodsWithSameName {
| def main(args: Array[String]) {
| def foo(i: Int = 1) = {
| def foo(j: Int = 2) = j
| i$bp
| }
| ""$bp
| def other() {
| def foo(i: Int = 3) = i
| ""$bp
| }
| def third() {
| def foo(i: Int = 4) = i
| ""$bp
| }
| foo()
| other()
| third()
| }
|}
""".stripMargin.trim())
def testLocalMethodsWithSameName() {
runDebugger() {
waitForBreakpoint()
evalEquals("foo()", "1")
atNextBreakpoint {
evalEquals("foo()", "2")
}
atNextBreakpoint {
evalEquals("foo()", "3")
}
atNextBreakpoint {
evalEquals("foo()", "4")
}
}
}
addFileWithBreakpoints("ClosureWithDefaultParameter.scala",
s"""
|object ClosureWithDefaultParameter {
| def main(args: Array[String]) {
| def outer() {
| val s = "start"
| val d = "default"
| def inner(a: String, b: String = d): String = {
| ""$bp
| s + a + b
| }
| inner("aa")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
def testClosureWithDefaultParameter() {
runDebugger() {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "default")
evalEquals("s", "start")
evalEquals("inner(\\"aa\\", \\"bb\\")", "startaabb")
evalEquals("inner(\\"aa\\")", "startaadefault")
}
}
addFileWithBreakpoints("FunctionsWithLocalParameters.scala",
s"""
|object FunctionsWithLocalParameters {
| def main(args: Array[String]) {
| val x = 1
| val y = 2
| def outer() = {
| val s = "start"
| val d = "default"
| def inner(a: String, b: String = d): String = {
| val z = s + a + b + y
| def inInner() = {
| z + x
| }
| inInner()
| ""$bp
| z
| }
| inner("aa")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
def testFunctionsWithLocalParameters(): Unit = {
runDebugger() {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "default")
evalEquals("x", "1")
evalEquals("y", "2")
evalEquals("s", "start")
evalEquals("z", "startaadefault2")
evalEquals("inInner()", "startaadefault21")
evalEquals("inner(\\"aa\\", \\"bb\\")", "startaabb2")
evalEquals("inner(\\"aa\\")", "startaadefault2")
evalEquals("outer()", "startaadefault2")
}
}
addFileWithBreakpoints("WithFieldsFromOtherThread.scala",
s"""object WithFieldsFromOtherThread {
| val field = "field"
| def main(args: Array[String]) {
| def localFun1() = "localFun1"
|
| val inMain = "inMain"
| val inMainNotUsed = ":("
| inOtherThread {
| def localFun2 = "localFun2"
|
| val inFirst = "inFirst"
| var inFirstVar = "inFirstVar"
| val inFirstVarNotUsed = ":("
| inOtherThread {
| val local = "local"
| inMain + inFirst + inFirstVar
| ""$bp
| }
| }
| }
|
| def inOtherThread(action: => Unit) = {
| new Thread {
| override def run(): Unit = action
| }.start()
| }
|}
""".stripMargin.trim)
def testWithFieldsFromOtherThread(): Unit = {
runDebugger() {
waitForBreakpoint()
evalEquals("field", "field")
evalEquals("inMain", "inMain")
evalEquals("inFirst", "inFirst")
evalEquals("inFirstVar", "inFirstVar")
evalEquals("local", "local")
evalEquals("localFun2", "localFun2")
evalEquals("localFun1()", "localFun1")
}
}
addFileWithBreakpoints("InForStmt.scala",
s"""
|object InForStmt {
| def main(args: Array[String]) {
| for {
| x <- Seq(1, 2)
| x1 = x + 1
| y <- Seq(3, 4)
| y1 = y + 1
| if x == 1 && y == 3
| } {
| class Inner {
| def foo = x$bp
| }
| def getX = x
| def getX1 = x1
| def getY = y
| def getY1 = y1
| new Inner().foo
| ""$bp
| }
| }
|}
""".stripMargin.trim)
def testInForStmt(): Unit = {
runDebugger() {
waitForBreakpoint()
evalEquals("getX", "1")
evalEquals("getX1", "2")
evalEquals("getY()", "3")
evalEquals("getY1", "4")
evalEquals("new Inner().foo", "1")
atNextBreakpoint {
evalEquals("getX", "1")
evalEquals("getX1", "2")
evalEquals("getY()", "3")
evalEquals("getY1", "4")
evalEquals("new Inner().foo", "1")
}
}
}
addFileWithBreakpoints("QualifierNamedAsPackage.scala",
s"""
|object QualifierNamedAsPackage {
| def main(args: Array[String]) {
| val invoke = "invoke"
| val text = "text"
| val ref = "ref"
| ""$bp
| }
|}
""".stripMargin.trim)
def testQualifierNamedAsPackage(): Unit = {
runDebugger() {
waitForBreakpoint()
evalEquals("invoke.charAt(0)", "i")
evalEquals("text.length", "4")
evalEquals("ref.isEmpty()", "false")
evalEquals("ref + text", "reftext")
}
}
addFileWithBreakpoints("DefaultArgsInTrait.scala",
s"""object DefaultArgsInTrait extends SomeTrait {
| def main(args: Array[String]): Unit = {
| traitMethod("", true)
| }
|
|}
|
|trait SomeTrait {
| def traitMethod(s: String, firstArg: Boolean = false): String = {
| def local(firstArg: Boolean = false, secondArg: Boolean = false): String = {
| if (firstArg) "1"
| else if (secondArg) "2"
| else "0"
| }
| "stop here"$bp
| local(firstArg)
| }
|}
""".stripMargin.trim)
def testDefaultArgsInTrait(): Unit = {
runDebugger() {
waitForBreakpoint()
evalEquals("local()", "0")
evalEquals("local(false)", "0")
evalEquals("local(false, true)", "2")
evalEquals("local(secondArg = true)", "2")
evalEquals("local(firstArg = firstArg)", "1")
evalEquals("""traitMethod("")""", "0")
evalEquals("""traitMethod("", true)""", "1")
evalEquals("""traitMethod("", firstArg = false)""", "0")
}
}
}
|
ilinum/intellij-scala
|
test/org/jetbrains/plugins/scala/debugger/evaluateExpression/ScalaMethodEvaluationTest.scala
|
Scala
|
apache-2.0
| 22,136 |
package org.bfn.ninetynineprobs
import org.scalatest._
class P51Spec extends UnitSpec {
// TODO
}
|
bfontaine/99Scala
|
src/test/scala/P51Spec.scala
|
Scala
|
mit
| 105 |
package controllers
import lila.app._
import lila.api.Context
import lila.common.HTTPRequest
import lila.game.{ Game => GameModel, GameRepo }
import play.api.http.ContentTypes
import views._
object Search extends LilaController {
private def paginator = Env.game.paginator
private def env = Env.gameSearch
def searchForm = env.forms.search
def index(page: Int) = OpenBody { implicit ctx =>
NoBot {
Reasonable(page, 100) {
implicit def req = ctx.body
searchForm.bindFromRequest.fold(
failure => Ok(html.search.index(failure)).fuccess,
data => env.nonEmptyQuery(data) ?? { query =>
env.paginator(query, page) map (_.some)
} map { pager =>
Ok(html.search.index(searchForm fill data, pager))
}
)
}
}
}
def export = OpenBody { implicit ctx =>
NoBot {
implicit def req = ctx.body
searchForm.bindFromRequest.fold(
failure => Ok(html.search.index(failure)).fuccess,
data => env.nonEmptyQuery(data) ?? { query =>
env.paginator.ids(query, 5000) map { ids =>
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
val date = (DateTimeFormat forPattern "yyyy-MM-dd") print new DateTime
Ok.chunked(Env.api.pgnDump exportGamesFromIds ids).withHeaders(
CONTENT_TYPE -> ContentTypes.TEXT,
CONTENT_DISPOSITION -> ("attachment; filename=" + s"lichess_search_$date.pgn"))
}
}
)
}
}
private def NoBot(res: => Fu[play.api.mvc.Result])(implicit ctx: Context) =
if (HTTPRequest.isBot(ctx.req)) notFound
else res
}
|
pawank/lila
|
app/controllers/Search.scala
|
Scala
|
mit
| 1,693 |
/* Copyright (C) 2008-2016 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.topics.lda
import cc.factorie.util.{TopEntry, TopN}
import cc.factorie.variable.CategoricalDomain
class TopicPhraseCounts(numTopics:Int, multiWordOnly:Boolean = true) {
private val counts = Array.tabulate(numTopics)(i => new CategoricalDomain[String])
counts.foreach(_.gatherCounts = true) // Turn on counting so each call to "index" increments that string's count
def apply(zi:Int): CategoricalDomain[String] = counts(zi)
//val lengths = new ArrayBuffer[Int]
def +=(doc:Doc): Unit = {
var prevzi = -1
val sb = new StringBuffer
val ws = doc.ws
val zs = doc.zs
def addThenReset(s:String): Unit = {
if (!multiWordOnly || s.contains("_")) counts(prevzi).index(sb.toString)
//println("phrase="+sb.toString)
sb.setLength(0)
}
//println("TopicPhraseCounts.+="+doc.breaks+" len="+ws.length+" "+doc.ws.categoryValues.mkString(" "))
for (i <- 0 until ws.length) {
//if (i+1 < ws.length && (ws.categoryValue(i) == "logistic" || ws.categoryValue(i) == "Logistic")) println("@"+i+" Logistic:"+zs.intValue(i)+" "+(if (doc.breaks.contains(i+1)) "#" else " ")+" "+ws.categoryValue(i+1)+":"+zs.intValue(i+1)+"\\t "+doc.ws.categoryValues.mkString(" "))
if (zs.intValue(i) == prevzi && !doc.breaks.contains(i)) sb.append("_")
else if (sb.length > 0) addThenReset(sb.toString)
sb.append(ws.categoryValue(i))
prevzi = zs.intValue(i)
}
if (sb.length > 0) addThenReset(sb.toString)
}
def ++=(docs:Iterable[Doc]): this.type = {
//println("TopicPhraseCounts docs.length="+docs.size)
docs.foreach(+=(_))
//forIndex(numTopics)({i => println("topic %d counts=%d".format(i, counts(i).countsTotal))})
//println("lengths median="+lengths.sorted.apply(lengths.length/2))
//println("lengths mean="+(lengths.sum * 1.0 / lengths.length))
this
}
def topicEntries(zi:Int, n:Int = 10): Seq[TopEntry[String]] = new TopN(n, counts(zi).counts.asDoubleSeq, counts(zi).categories)
def topicPhrases(zi:Int, n:Int = 10, includeCounts:Boolean = true): Seq[String] = topicEntries(zi, n).map(e => if (includeCounts) e.category+":"+e.score.toInt else e.category)
def topicPhrasesSummary(topicIndex:Int, n:Int = 10): String = "Topic "+topicIndex+" "+ topicPhrases(topicIndex, n).mkString(" ")
def topicsPhrasesSummary(n:Int = 10): String = Range(0, numTopics).map(topicPhrasesSummary(_, n)).mkString("\\n")
}
|
strubell/factorie
|
src/main/scala/cc/factorie/app/topics/lda/TopicPhraseCounts.scala
|
Scala
|
apache-2.0
| 3,183 |
package demo
package components
import chandu0101.scalajs.react.components.RCustomStyles
import chandu0101.scalajs.react.components.RCustomStyles._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
object AppHeader {
object Style {
val headerStyle = Seq(
^.background := "#F2706D",
^.fontSize := "1.5em",
^.padding := "0px",
^.margin := "0px",
^.position := "fixed",
^.width := "100%",
^.zIndex := "5"
).toTagMod
val menuNav = Seq(
MsFlexAlign := "center",
WebkitAlignItems := "center",
WebkitBoxAlign := "center",
^.alignItems := "center",
^.display := "-ms-flexbox",
^.display := "-webkit-box",
^.display := "-webkit-flex",
^.display := "flex",
^.height := "64px",
^.lineHeight := "64px",
^.margin := "0 3rem"
).toTagMod
val logo = Seq(
^.color := "rgb(244, 233, 233)",
^.textDecoration := "none",
^.width := "150px"
).toTagMod
val menuItem =
Seq(^.padding := "20px", ^.color := "rgb(244, 233, 233)", ^.textDecoration := "none").toTagMod
val menuItemHover = Seq(^.background := "#f1453e").toTagMod
}
case class State(menuHover: String = "")
class Backend(t: BackendScope[_, State]) {
def onMouseEnter(menu: String) = t.modState(_.copy(menuHover = menu))
val onMouseLeave = t.modState(_.copy(menuHover = ""))
def render(S: State) = {
val github: String = "Github"
<.header(Style.headerStyle)(
<.nav(Style.menuNav)(
<.a(Style.logo, ^.href := "#")("S J R C"),
<.div(^.marginLeft := "auto")(
<.a(
^.target := "_blank",
Style.menuItemHover.when(S.menuHover == github),
Style.menuItem,
^.href := "https://github.com/chandu0101/scalajs-react-components",
^.onMouseEnter --> onMouseEnter(github),
^.onMouseLeave --> onMouseLeave
)(github)
)
)
)
}
}
val component = ScalaComponent
.builder[Unit]("AppHeader")
.initialState(State())
.renderBackend[Backend]
.build
def apply() = component()
}
|
rleibman/scalajs-react-components
|
demo/src/main/scala/demo/components/AppHeader.scala
|
Scala
|
apache-2.0
| 2,210 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.planning
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.trees.TreeNodeRef
/**
* A pattern that matches any number of project or filter operations on top of another relational
* operator. All filter operators are collected and their conditions are broken up and returned
* together with the top project operator.
* [[org.apache.spark.sql.catalyst.expressions.Alias Aliases]] are in-lined/substituted if
* necessary.
*/
object PhysicalOperation extends PredicateHelper {
type ReturnType = (Seq[NamedExpression], Seq[Expression], LogicalPlan)
def unapply(plan: LogicalPlan): Option[ReturnType] = {
val (fields, filters, child, _) = collectProjectsAndFilters(plan)
Some((fields.getOrElse(child.output), filters, child))
}
/**
* Collects all deterministic projects and filters, in-lining/substituting aliases if necessary.
* Here are two examples for alias in-lining/substitution.
* Before:
* {{{
* SELECT c1 FROM (SELECT key AS c1 FROM t1) t2 WHERE c1 > 10
* SELECT c1 AS c2 FROM (SELECT key AS c1 FROM t1) t2 WHERE c1 > 10
* }}}
* After:
* {{{
* SELECT key AS c1 FROM t1 WHERE key > 10
* SELECT key AS c2 FROM t1 WHERE key > 10
* }}}
*/
private def collectProjectsAndFilters(plan: LogicalPlan):
(Option[Seq[NamedExpression]], Seq[Expression], LogicalPlan, Map[Attribute, Expression]) =
plan match {
case Project(fields, child) if fields.forall(_.deterministic) =>
val (_, filters, other, aliases) = collectProjectsAndFilters(child)
val substitutedFields = fields.map(substitute(aliases)).asInstanceOf[Seq[NamedExpression]]
(Some(substitutedFields), filters, other, collectAliases(substitutedFields))
case Filter(condition, child) if condition.deterministic =>
val (fields, filters, other, aliases) = collectProjectsAndFilters(child)
val substitutedCondition = substitute(aliases)(condition)
(fields, filters ++ splitConjunctivePredicates(substitutedCondition), other, aliases)
case other =>
(None, Nil, other, Map.empty)
}
private def collectAliases(fields: Seq[Expression]): Map[Attribute, Expression] = fields.collect {
case a @ Alias(child, _) => a.toAttribute -> child
}.toMap
private def substitute(aliases: Map[Attribute, Expression])(expr: Expression): Expression = {
expr.transform {
case a @ Alias(ref: AttributeReference, name) =>
aliases.get(ref).map(Alias(_, name)(a.exprId, a.qualifiers)).getOrElse(a)
case a: AttributeReference =>
aliases.get(a).map(Alias(_, a.name)(a.exprId, a.qualifiers)).getOrElse(a)
}
}
}
/**
* Matches a logical aggregation that can be performed on distributed data in two steps. The first
* operates on the data in each partition performing partial aggregation for each group. The second
* occurs after the shuffle and completes the aggregation.
*
* This pattern will only match if all aggregate expressions can be computed partially and will
* return the rewritten aggregation expressions for both phases.
*
* The returned values for this match are as follows:
* - Grouping attributes for the final aggregation.
* - Aggregates for the final aggregation.
* - Grouping expressions for the partial aggregation.
* - Partial aggregate expressions.
* - Input to the aggregation.
*/
object PartialAggregation {
type ReturnType =
(Seq[Attribute], Seq[NamedExpression], Seq[Expression], Seq[NamedExpression], LogicalPlan)
def unapply(plan: LogicalPlan): Option[ReturnType] = plan match {
case logical.Aggregate(groupingExpressions, aggregateExpressions, child) =>
// Collect all aggregate expressions.
val allAggregates =
aggregateExpressions.flatMap(_ collect { case a: AggregateExpression1 => a})
// Collect all aggregate expressions that can be computed partially.
val partialAggregates =
aggregateExpressions.flatMap(_ collect { case p: PartialAggregate1 => p})
// Only do partial aggregation if supported by all aggregate expressions.
if (allAggregates.size == partialAggregates.size) {
// Create a map of expressions to their partial evaluations for all aggregate expressions.
val partialEvaluations: Map[TreeNodeRef, SplitEvaluation] =
partialAggregates.map(a => (new TreeNodeRef(a), a.asPartial)).toMap
// We need to pass all grouping expressions though so the grouping can happen a second
// time. However some of them might be unnamed so we alias them allowing them to be
// referenced in the second aggregation.
val namedGroupingExpressions: Seq[(Expression, NamedExpression)] =
groupingExpressions.map {
case n: NamedExpression => (n, n)
case other => (other, Alias(other, "PartialGroup")())
}
// Replace aggregations with a new expression that computes the result from the already
// computed partial evaluations and grouping values.
val rewrittenAggregateExpressions = aggregateExpressions.map(_.transformDown {
case e: Expression if partialEvaluations.contains(new TreeNodeRef(e)) =>
partialEvaluations(new TreeNodeRef(e)).finalEvaluation
case e: Expression =>
namedGroupingExpressions.collectFirst {
case (expr, ne) if expr semanticEquals e => ne.toAttribute
}.getOrElse(e)
}).asInstanceOf[Seq[NamedExpression]]
val partialComputation = namedGroupingExpressions.map(_._2) ++
partialEvaluations.values.flatMap(_.partialEvaluations)
val namedGroupingAttributes = namedGroupingExpressions.map(_._2.toAttribute)
Some(
(namedGroupingAttributes,
rewrittenAggregateExpressions,
groupingExpressions,
partialComputation,
child))
} else {
None
}
case _ => None
}
}
/**
* A pattern that finds joins with equality conditions that can be evaluated using equi-join.
*
* Null-safe equality will be transformed into equality as joining key (replace null with default
* value).
*/
object ExtractEquiJoinKeys extends Logging with PredicateHelper {
/** (joinType, leftKeys, rightKeys, condition, leftChild, rightChild) */
type ReturnType =
(JoinType, Seq[Expression], Seq[Expression], Option[Expression], LogicalPlan, LogicalPlan)
def unapply(plan: LogicalPlan): Option[ReturnType] = plan match {
case join @ Join(left, right, joinType, condition) =>
logDebug(s"Considering join on: $condition")
// Find equi-join predicates that can be evaluated before the join, and thus can be used
// as join keys.
val predicates = condition.map(splitConjunctivePredicates).getOrElse(Nil)
val joinKeys = predicates.flatMap {
case EqualTo(l, r) if canEvaluate(l, left) && canEvaluate(r, right) => Some((l, r))
case EqualTo(l, r) if canEvaluate(l, right) && canEvaluate(r, left) => Some((r, l))
// Replace null with default value for joining key, then those rows with null in it could
// be joined together
case EqualNullSafe(l, r) if canEvaluate(l, left) && canEvaluate(r, right) =>
Some((Coalesce(Seq(l, Literal.default(l.dataType))),
Coalesce(Seq(r, Literal.default(r.dataType)))))
case EqualNullSafe(l, r) if canEvaluate(l, right) && canEvaluate(r, left) =>
Some((Coalesce(Seq(r, Literal.default(r.dataType))),
Coalesce(Seq(l, Literal.default(l.dataType)))))
case other => None
}
val otherPredicates = predicates.filterNot {
case EqualTo(l, r) =>
canEvaluate(l, left) && canEvaluate(r, right) ||
canEvaluate(l, right) && canEvaluate(r, left)
case other => false
}
if (joinKeys.nonEmpty) {
val (leftKeys, rightKeys) = joinKeys.unzip
logDebug(s"leftKeys:$leftKeys | rightKeys:$rightKeys")
Some((joinType, leftKeys, rightKeys, otherPredicates.reduceOption(And), left, right))
} else {
None
}
case _ => None
}
}
/**
* A pattern that collects all adjacent unions and returns their children as a Seq.
*/
object Unions {
def unapply(plan: LogicalPlan): Option[Seq[LogicalPlan]] = plan match {
case u: Union => Some(collectUnionChildren(u))
case _ => None
}
private def collectUnionChildren(plan: LogicalPlan): Seq[LogicalPlan] = plan match {
case Union(l, r) => collectUnionChildren(l) ++ collectUnionChildren(r)
case other => other :: Nil
}
}
|
pronix/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
|
Scala
|
apache-2.0
| 9,583 |
package com.cloudera.hue.livy.server.sessions
import java.net.URL
import com.cloudera.hue.livy.msgs.ExecuteRequest
import com.cloudera.hue.livy.server.Statement
import scala.annotation.tailrec
import scala.concurrent.Future
object Session {
sealed trait State
case class NotStarted() extends State
case class Starting() extends State
case class Idle() extends State
case class Busy() extends State
case class Error() extends State
case class Dead() extends State
class SessionFailedToStart(msg: String) extends Exception(msg)
class StatementNotFound extends Exception
}
trait Session {
import Session._
def id: String
def lastActivity: Long
def state: State
def url: Option[URL]
def url_=(url: URL)
def executeStatement(content: ExecuteRequest): Statement
def statement(statementId: Int): Option[Statement]
def statements(): Seq[Statement]
def statements(fromIndex: Integer, toIndex: Integer): Seq[Statement]
def interrupt(): Future[Unit]
def stop(): Future[Unit]
@tailrec
final def waitForStateChange[A](oldState: State, f: => A): A = {
if (state == oldState) {
Thread.sleep(1000)
waitForStateChange(oldState, f)
} else {
f
}
}
}
|
dulems/hue
|
apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/sessions/Session.scala
|
Scala
|
apache-2.0
| 1,229 |
package pl.project13.scala.akka.raft.cluster
import akka.testkit.ImplicitSender
import concurrent.duration._
import akka.cluster.ClusterEvent.{CurrentClusterState, MemberUp}
import akka.cluster.Cluster
import akka.actor.Props
import akka.util.Timeout
import clusters._
import pl.project13.scala.akka.raft.example.WordConcatRaftActor
abstract class ClusterRoleAwarenessSpec extends RaftClusterSpec(FourNodesOnlyTwoRaftNodesCluster)
with ImplicitSender {
implicit val defaultTimeout = {
import concurrent.duration._
Timeout(3.seconds)
}
import FourNodesOnlyTwoRaftNodesCluster._
def initialParticipants = nodes.size
behavior of s"Leader election on cluster of $initialParticipants nodes"
it should "not allow raft Members to be started on Nodes without the 'raft' role" in within(20.seconds) {
Cluster(system).subscribe(testActor, classOf[MemberUp])
expectMsgClass(classOf[CurrentClusterState])
Cluster(system) join node(first).address
(1 to initialParticipants) map { idx =>
runOn(nodes(idx)) {
val raftActor = system.actorOf(Props[WordConcatRaftActor], s"raft-$idx")
system.actorOf(ClusterRaftActor.props(raftActor, initialParticipants), s"raft-member-$idx")
}
}
Cluster(system).unsubscribe(testActor)
testConductor.enter("all-nodes-up")
raftNodes foreach { nonRaftNode =>
selectActorRefMaybe(node(nonRaftNode).address) should be ('defined)
}
nonRaftNodes foreach { nonRaftNode =>
selectActorRefMaybe(node(nonRaftNode).address) should be ('empty)
}
}
}
class ClusterRoleAwarenessJvmNode1 extends ClusterRoleAwarenessSpec
class ClusterRoleAwarenessJvmNode2 extends ClusterRoleAwarenessSpec
class ClusterRoleAwarenessJvmNode3 extends ClusterRoleAwarenessSpec
class ClusterRoleAwarenessJvmNode4 extends ClusterRoleAwarenessSpec
|
ktoso/akka-raft
|
src/multi-jvm/scala/pl/project13/scala/akka/raft/cluster/ClusterRoleAwarenessSpec.scala
|
Scala
|
apache-2.0
| 1,849 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.testkit
import java.util.concurrent.ConcurrentHashMap
import java.util.function.{ Function => JFunction }
import akka.Done
import akka.actor.ActorRef
import akka.actor.ActorSystem
import akka.actor.Props
import akka.stream.Materializer
import akka.stream.scaladsl.Flow
import akka.stream.scaladsl.Source
import com.lightbend.lagom.internal.testkit.InternalSubscriberStub
import com.lightbend.lagom.internal.testkit.TopicBufferActor
import com.lightbend.lagom.scaladsl.api.broker.Topic.TopicId
import com.lightbend.lagom.scaladsl.api.broker.Message
import com.lightbend.lagom.scaladsl.api.broker.Subscriber
import com.lightbend.lagom.scaladsl.api.broker.Topic
import scala.concurrent.Future
/**
* Factors [[com.lightbend.lagom.scaladsl.testkit.ProducerStub]]'s.
*/
final class ProducerStubFactory(actorSystem: ActorSystem, materializer: Materializer) {
private val topics = new ConcurrentHashMap[String, ProducerStub[_]]
def producer[T](topicId: String): ProducerStub[T] = {
val builder = new JFunction[String, ProducerStub[_]] {
override def apply(t: String) = new ProducerStub[T](t, actorSystem, materializer)
}
topics.computeIfAbsent(topicId, builder).asInstanceOf[ProducerStub[T]]
}
}
/**
* Stubs the production end of a [[com.lightbend.lagom.scaladsl.api.broker.Topic]] so that test writers can mock
* message production from upstream services into topics consumed by services under test.
*/
final class ProducerStub[T] private[lagom] (topicName: String, actorSystem: ActorSystem, materializer: Materializer) {
private lazy val bufferActor = {
val bufferProps: Props = Props.create(classOf[TopicBufferActor])
actorSystem.actorOf(bufferProps)
}
val topic: Topic[T] = new TopicStub[T](TopicId(topicName), bufferActor)(materializer)
/**
* Send a message payload to the topic.
*
* @param message The message to send.
*/
def send(message: T): Unit = bufferActor.tell(Message(message), ActorRef.noSender)
/**
* Send a message wrapped with metadata to the topic.
*
* @param message The message to send.
*/
def send(message: Message[T]): Unit = bufferActor.tell(message, ActorRef.noSender)
}
private[lagom] class TopicStub[T](val topicId: Topic.TopicId, topicBuffer: ActorRef)(
implicit materializer: Materializer
) extends Topic[T] {
def subscribe = new SubscriberStub[T, T]("default", topicBuffer, _.payload)
class SubscriberStub[Payload, SubscriberPayload](
groupId: String,
topicBuffer: ActorRef,
transform: Message[Payload] => SubscriberPayload
)(implicit materializer: Materializer)
extends InternalSubscriberStub[Payload, Message](groupId, topicBuffer)
with Subscriber[SubscriberPayload] {
override def withMetadata: Subscriber[Message[SubscriberPayload]] =
new SubscriberStub[Payload, Message[SubscriberPayload]](
groupId,
topicBuffer,
msg => msg.withPayload(transform(msg))
)
override def withGroupId(groupId: String): Subscriber[SubscriberPayload] =
new SubscriberStub[Payload, SubscriberPayload](groupId, topicBuffer, transform)
override def atMostOnceSource: Source[SubscriberPayload, _] = super.mostOnceSource.map(transform)
override def atLeastOnce(flow: Flow[SubscriberPayload, Done, _]): Future[Done] =
super.leastOnce(Flow[Message[Payload]].map(transform).via(flow))
}
}
|
lagom/lagom
|
testkit/scaladsl/src/main/scala/com/lightbend/lagom/scaladsl/testkit/ProducerStubFactory.scala
|
Scala
|
apache-2.0
| 3,484 |
package controllers
import com.mohiva.play.silhouette.api.Silhouette
import com.mohiva.play.silhouette.impl.authenticators.CookieAuthenticator
import models.User
import play.api.i18n.I18nSupport
import play.api.mvc.Controller
/**
* Created by gbecan on 9/25/15.
*/
abstract class BaseController extends Controller with I18nSupport with Silhouette[User, CookieAuthenticator] {
implicit def userAwareRequestToViewContext[R](implicit request: UserAwareRequest[R]): ViewContext = ViewContext(request.identity, request.request)
implicit def securedRequestToViewContext[R](implicit request: SecuredRequest[R]): ViewContext = ViewContext(Some(request.identity), request.request)
}
|
gbecan/OpenCompare
|
org.opencompare/play-app/app/controllers/BaseController.scala
|
Scala
|
apache-2.0
| 686 |
package juan.ddd.proto.domain.routing
case class Route(uri: Uri)
|
yoskhdia/ddd-proto
|
src/main/scala/juan/ddd/proto/domain/routing/Route.scala
|
Scala
|
mit
| 66 |
package dao
import org.bson.types.ObjectId
import com.novus.salat.dao.SalatDAO
import se.radley.plugin.salat._
import model.{BlogStatus, Blog}
import play.api.Play.current
import com.mongodb.casbah.commons.MongoDBObject
import org.joda.time.DateTime
/**
* The Class BlogDao.
*
* @author Nguyen Duc Dung
* @since 1/31/14 3:18 AM
*
*/
object BlogDao extends BaseDao[Blog, ObjectId] {
override def dao = new SalatDAO[Blog, ObjectId](collection = mongoCollection("blog")) {}
/**
* Blog need to update is a feed is not updating and has last updated is 30m ago.
* @return
*/
def needToUpdate = find(
MongoDBObject(
"status" -> MongoDBObject("$ne" -> BlogStatus.UPDATING),
"isEnable" -> true,
"lastUpdated" -> MongoDBObject("$lt" -> DateTime.now.minusMinutes(30))
)
).toList
def findByCatId(id: ObjectId) = find(MongoDBObject("categoryId" -> id)).sort(MongoDBObject("read" -> -1)).toList
def findByBlogName(blogName: String) = findOne(MongoDBObject("uniqueName" -> blogName))
def increaseRead(id: ObjectId) = findOneById(id).map(blog => {
save(blog.copy(read = blog.read + 1))
})
def findByName(name: String) = findOne(MongoDBObject("name" -> name))
def top = find(MongoDBObject("homePage" -> true)).sort(MongoDBObject("read" -> -1)).take(10).toList
def canShowInHomePage = find(MongoDBObject("homePage" -> true)).toList
def showAll() = all.foreach(blog => {
save(blog.copy(homePage = true))
})
}
|
SunriseSoftVN/hayhayblog
|
core/app/dao/BlogDao.scala
|
Scala
|
gpl-2.0
| 1,475 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.types.{LongType, StringType, TypeCollection}
class ExpressionTypeCheckingSuite extends SparkFunSuite {
val testRelation = LocalRelation(
'intField.int,
'stringField.string,
'booleanField.boolean,
'decimalField.decimal(8, 0),
'arrayField.array(StringType),
'mapField.map(StringType, LongType))
def assertError(expr: Expression, errorMessage: String): Unit = {
val e = intercept[AnalysisException] {
assertSuccess(expr)
}
assert(e.getMessage.contains(
s"cannot resolve '${expr.sql}' due to data type mismatch:"))
assert(e.getMessage.contains(errorMessage))
}
def assertSuccess(expr: Expression): Unit = {
val analyzed = testRelation.select(expr.as("c")).analyze
SimpleAnalyzer.checkAnalysis(analyzed)
}
def assertErrorForDifferingTypes(expr: Expression): Unit = {
assertError(expr,
s"differing types in '${expr.sql}'")
}
test("check types for unary arithmetic") {
assertError(UnaryMinus('stringField), "(numeric or calendarinterval) type")
assertError(Abs('stringField), "requires numeric type")
assertError(BitwiseNot('stringField), "requires integral type")
}
test("check types for binary arithmetic") {
// We will cast String to Double for binary arithmetic
assertSuccess(Add('intField, 'stringField))
assertSuccess(Subtract('intField, 'stringField))
assertSuccess(Multiply('intField, 'stringField))
assertSuccess(Divide('intField, 'stringField))
assertSuccess(Remainder('intField, 'stringField))
// checkAnalysis(BitwiseAnd('intField, 'stringField))
assertErrorForDifferingTypes(Add('intField, 'booleanField))
assertErrorForDifferingTypes(Subtract('intField, 'booleanField))
assertErrorForDifferingTypes(Multiply('intField, 'booleanField))
assertErrorForDifferingTypes(Divide('intField, 'booleanField))
assertErrorForDifferingTypes(Remainder('intField, 'booleanField))
assertErrorForDifferingTypes(BitwiseAnd('intField, 'booleanField))
assertErrorForDifferingTypes(BitwiseOr('intField, 'booleanField))
assertErrorForDifferingTypes(BitwiseXor('intField, 'booleanField))
assertErrorForDifferingTypes(MaxOf('intField, 'booleanField))
assertErrorForDifferingTypes(MinOf('intField, 'booleanField))
assertError(Add('booleanField, 'booleanField), "requires (numeric or calendarinterval) type")
assertError(Subtract('booleanField, 'booleanField),
"requires (numeric or calendarinterval) type")
assertError(Multiply('booleanField, 'booleanField), "requires numeric type")
assertError(Divide('booleanField, 'booleanField), "requires (double or decimal) type")
assertError(Remainder('booleanField, 'booleanField), "requires numeric type")
assertError(BitwiseAnd('booleanField, 'booleanField), "requires integral type")
assertError(BitwiseOr('booleanField, 'booleanField), "requires integral type")
assertError(BitwiseXor('booleanField, 'booleanField), "requires integral type")
assertError(MaxOf('mapField, 'mapField),
s"requires ${TypeCollection.Ordered.simpleString} type")
assertError(MinOf('mapField, 'mapField),
s"requires ${TypeCollection.Ordered.simpleString} type")
}
test("check types for predicates") {
// We will cast String to Double for binary comparison
assertSuccess(EqualTo('intField, 'stringField))
assertSuccess(EqualNullSafe('intField, 'stringField))
assertSuccess(LessThan('intField, 'stringField))
assertSuccess(LessThanOrEqual('intField, 'stringField))
assertSuccess(GreaterThan('intField, 'stringField))
assertSuccess(GreaterThanOrEqual('intField, 'stringField))
// We will transform EqualTo with numeric and boolean types to CaseKeyWhen
assertSuccess(EqualTo('intField, 'booleanField))
assertSuccess(EqualNullSafe('intField, 'booleanField))
assertErrorForDifferingTypes(EqualTo('intField, 'mapField))
assertErrorForDifferingTypes(EqualNullSafe('intField, 'mapField))
assertErrorForDifferingTypes(LessThan('intField, 'booleanField))
assertErrorForDifferingTypes(LessThanOrEqual('intField, 'booleanField))
assertErrorForDifferingTypes(GreaterThan('intField, 'booleanField))
assertErrorForDifferingTypes(GreaterThanOrEqual('intField, 'booleanField))
assertError(LessThan('mapField, 'mapField),
s"requires ${TypeCollection.Ordered.simpleString} type")
assertError(LessThanOrEqual('mapField, 'mapField),
s"requires ${TypeCollection.Ordered.simpleString} type")
assertError(GreaterThan('mapField, 'mapField),
s"requires ${TypeCollection.Ordered.simpleString} type")
assertError(GreaterThanOrEqual('mapField, 'mapField),
s"requires ${TypeCollection.Ordered.simpleString} type")
assertError(If('intField, 'stringField, 'stringField),
"type of predicate expression in If should be boolean")
assertErrorForDifferingTypes(If('booleanField, 'intField, 'booleanField))
assertError(
CaseWhen(Seq(('booleanField.attr, 'intField.attr), ('booleanField.attr, 'mapField.attr))),
"THEN and ELSE expressions should all be same type or coercible to a common type")
assertError(
CaseKeyWhen('intField, Seq('intField, 'stringField, 'intField, 'mapField)),
"THEN and ELSE expressions should all be same type or coercible to a common type")
assertError(
CaseWhen(Seq(('booleanField.attr, 'intField.attr), ('intField.attr, 'intField.attr))),
"WHEN expressions in CaseWhen should all be boolean type")
}
test("check types for aggregates") {
// We use AggregateFunction directly at here because the error will be thrown from it
// instead of from AggregateExpression, which is the wrapper of an AggregateFunction.
// We will cast String to Double for sum and average
assertSuccess(Sum('stringField))
assertSuccess(Average('stringField))
assertSuccess(Min('arrayField))
assertError(Min('mapField), "min does not support ordering on type")
assertError(Max('mapField), "max does not support ordering on type")
assertError(Sum('booleanField), "function sum requires numeric type")
assertError(Average('booleanField), "function average requires numeric type")
}
test("check types for others") {
assertError(CreateArray(Seq('intField, 'booleanField)),
"input to function array should all be the same type")
assertError(Coalesce(Seq('intField, 'booleanField)),
"input to function coalesce should all be the same type")
assertError(Coalesce(Nil), "input to function coalesce cannot be empty")
assertError(new Murmur3Hash(Nil), "function hash requires at least one argument")
assertError(Explode('intField),
"input to function explode should be array or map type")
assertError(PosExplode('intField),
"input to function explode should be array or map type")
}
test("check types for CreateNamedStruct") {
assertError(
CreateNamedStruct(Seq("a", "b", 2.0)), "even number of arguments")
assertError(
CreateNamedStruct(Seq(1, "a", "b", 2.0)),
"Only foldable StringType expressions are allowed to appear at odd position")
assertError(
CreateNamedStruct(Seq('a.string.at(0), "a", "b", 2.0)),
"Only foldable StringType expressions are allowed to appear at odd position")
assertError(
CreateNamedStruct(Seq(Literal.create(null, StringType), "a")),
"Field name should not be null")
}
test("check types for CreateMap") {
assertError(CreateMap(Seq("a", "b", 2.0)), "even number of arguments")
assertError(
CreateMap(Seq('intField, 'stringField, 'booleanField, 'stringField)),
"keys of function map should all be the same type")
assertError(
CreateMap(Seq('stringField, 'intField, 'stringField, 'booleanField)),
"values of function map should all be the same type")
}
test("check types for ROUND/BROUND") {
assertSuccess(Round(Literal(null), Literal(null)))
assertSuccess(Round('intField, Literal(1)))
assertError(Round('intField, 'intField), "Only foldable Expression is allowed")
assertError(Round('intField, 'booleanField), "requires int type")
assertError(Round('intField, 'mapField), "requires int type")
assertError(Round('booleanField, 'intField), "requires numeric type")
assertSuccess(BRound(Literal(null), Literal(null)))
assertSuccess(BRound('intField, Literal(1)))
assertError(BRound('intField, 'intField), "Only foldable Expression is allowed")
assertError(BRound('intField, 'booleanField), "requires int type")
assertError(BRound('intField, 'mapField), "requires int type")
assertError(BRound('booleanField, 'intField), "requires numeric type")
}
test("check types for Greatest/Least") {
for (operator <- Seq[(Seq[Expression] => Expression)](Greatest, Least)) {
assertError(operator(Seq('booleanField)), "requires at least 2 arguments")
assertError(operator(Seq('intField, 'stringField)), "should all have the same type")
assertError(operator(Seq('mapField, 'mapField)), "does not support ordering")
}
}
}
|
gioenn/xSpark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
|
Scala
|
apache-2.0
| 10,335 |
package org.jetbrains.sbt
package project.template
import com.intellij.ide.util.projectWizard.WizardContext
import com.intellij.platform.ProjectTemplate
import org.jetbrains.plugins.scala.project.template.ScalaProjectTemplatesFactoryBase
import org.jetbrains.sbt.project.template.techhub.TechHubProjectTemplate
/**
* User: Dmitry.Naydanov, Pavel Fatin
* Date: 11.03.14.
*/
class SbtProjectTemplateFactory extends ScalaProjectTemplatesFactoryBase {
override def createTemplates(group: String, context: WizardContext): Array[ProjectTemplate] = {
if (context.isCreatingNewProject) {
Array(
new SbtProjectTemplate,
new TechHubProjectTemplate
)
} else {
Array.empty
}
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/sbt/project/template/SbtProjectTemplateFactory.scala
|
Scala
|
apache-2.0
| 726 |
package com.nthportal.extrapredef
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.OptionValues
import scala.collection.immutable.SortedMap
import scala.concurrent.{Await, Future}
import scala.concurrent.duration.Duration
import scala.language.implicitConversions
import scala.util.Try
class ExtraPredefTest extends AnyFlatSpec with Matchers with OptionValues {
import ExtraPredef._
import ExtraPredefTest._
private val _null: Any = null
behavior of "ExtraPredef"
it should "require state correctly" in {
an[IllegalStateException] should be thrownBy requireState(false)
noException should be thrownBy requireState(true)
an[IllegalStateException] should be thrownBy requireState(requirement = false, "message")
noException should be thrownBy requireState(requirement = true, "message")
}
it should "handle impossible conditions correctly" in {
an[AssertionError] should be thrownBy !!!
}
it should "check for null correctly" in {
a[NullPointerException] should be thrownBy _null.nonNull
noException should be thrownBy "a string".nonNull
}
it should "coalesce null references correctly" in {
_null ?? "bar" shouldEqual "bar"
"foo" ?? 4 shouldEqual "foo"
"foo" ?? null shouldEqual "foo"
}
it should "chain comparisons" in {
case class ComparisonChainTest(a: Int, b: Int, c: BasicOrdered) extends Ordered[ComparisonChainTest] {
override def compare(that: ComparisonChainTest): Int = {
(this.a compare that.a)
.thenCompare(this.b, that.b)
.thenCompare(this.c, that.c)
}
}
val test = ComparisonChainTest(1, 2, 3)
test should be > ComparisonChainTest(1, 2, 2)
test should be > ComparisonChainTest(1, 1, 4)
test should be > ComparisonChainTest(0, 3, 4)
test should be < ComparisonChainTest(1, 2, 4)
test should be < ComparisonChainTest(1, 3, 2)
test should be < ComparisonChainTest(2, 1, 2)
test shouldNot be > ComparisonChainTest(1, 2, 3)
test shouldNot be < ComparisonChainTest(1, 2, 3)
}
it should "compare natural ordering correctly" in {
(BasicOrdered(1) <> 2) shouldBe true
(BasicOrdered(2) <> 1) shouldBe true
(BasicOrdered(1) <> 1) shouldBe false
(BasicOrdered(1) !<> 2) shouldBe false
(BasicOrdered(2) !<> 1) shouldBe false
(BasicOrdered(1) !<> 1) shouldBe true
}
it should "create equivalent `Try`s from `Option`s" in {
val t1 = Some("string").toTry
t1.isSuccess shouldBe true
t1.get shouldEqual "string"
val t2 = None.toTry
t2.isFailure shouldBe true
a[NoSuchElementException] should be thrownBy t2.get
}
it should "create equivalent `Future`s from `Option`s" in {
Some("string").toFuture.getNow shouldEqual "string"
a[NoSuchElementException] should be thrownBy None.toFuture.getNow
}
it should "transform `Option`s" in {
Some("string").transform(s => Some(s.toUpperCase), Some("none")).value shouldBe "STRING"
Some("string").transform(s => Some(s.toUpperCase), None).value shouldBe "STRING"
Some("string").transform(_ => None, Some("none")) shouldBe empty
Some("string").transform(_ => None, None) shouldBe empty
None.transform((_: Nothing) => Some("some"), Some("none")).value shouldBe "none"
None.transform((_: Nothing) => Some("some"), None) shouldBe empty
None.transform((_: Nothing) => None, Some("none")).value shouldBe "none"
None.transform((_: Nothing) => None, None) shouldBe empty
}
it should "invert `Option`s" in {
Some("string").invert("none") shouldBe empty
None.invert("some").value shouldBe "some"
Some("string").invertWith(Some("none")) shouldBe empty
Some("string").invertWith(None) shouldBe empty
None.invertWith(Some("some")).value shouldBe "some"
None.invertWith(None) shouldBe empty
}
it should "create equivalent `Future`s from `Try`s" in {
Try("string").toFuture.getNow shouldEqual "string"
val ex = new Exception("foo")
Try(throw ex).toFuture.failed.getNow should be theSameInstanceAs ex
}
it should "create equivalent `Future`s from `Either`s" in {
Right[Throwable, String]("string").toFuture.getNow shouldEqual "string"
val ex = new Exception("foo")
Left(ex).toFuture.failed.getNow should be theSameInstanceAs ex
}
it should "test `SortedMap`s for ordered equality" in {
val sm = SortedMap(1 -> 1, 2 -> 2, 3 -> 3)
sm orderedEquals SortedMap(3 -> 3, 2 -> 2, 1 -> 1) shouldBe true
sm orderedEquals SortedMap(1 -> 1, 2 -> 2, 3 -> 3)(Ordering[Int].reverse) shouldBe false
}
}
object ExtraPredefTest {
implicit final class FinishedFuture[A](private val self: Future[A]) extends AnyVal {
def getNow: A = Await.result(self, Duration.Zero)
}
case class BasicOrdered(int: Int) extends Ordered[BasicOrdered] {
override def compare(that: BasicOrdered): Int = this.int compare that.int
}
implicit def int2BasicOrdered(int: Int): BasicOrdered = BasicOrdered(int)
}
|
NthPortal/extra-predef
|
src/test/scala/com/nthportal/extrapredef/ExtraPredefTest.scala
|
Scala
|
apache-2.0
| 5,028 |
package com.enkidu.lignum.parsers.ast
import com.enkidu.lignum.parsers.ast.expression.discardable.dimension.AbstractDimension
import com.enkidu.lignum.parsers.ast.expression.discardable.literals.BooleanLiteral
import com.enkidu.lignum.parsers.ast.expression.types.annotations.MarkerAnnotation
import com.enkidu.lignum.parsers.ast.expression.types.references.ClassType
import com.enkidu.lignum.parsers.ast.expression.types.templates.{AnyTemplate, ParameterTemplate}
import com.enkidu.lignum.parsers.ast.statement.declaration.LocalVariableDeclaration
import com.enkidu.lignum.parsers.ast.statement.declaration.types.EmptyDeclaration
import com.enkidu.lignum.parsers.ast.statement.declarator.VariableDeclarator
import com.enkidu.lignum.parsers.ast.statement.parameter.InferredParameter
import com.enkidu.lignum.parsers.ast.statement.{Block, EmptyStatement}
import org.scalatest.prop.{Checkers, PropertyChecks}
import org.scalatest.{BeforeAndAfterEach, FreeSpec, Matchers}
abstract class VisitorTest extends FreeSpec with PropertyChecks with Matchers with Checkers with BeforeAndAfterEach {
val dim = AbstractDimension(Vector())
val declarator = VariableDeclarator("")
val ann = MarkerAnnotation("")
val block = Block(Vector())
val expr = BooleanLiteral("1")
val stmt = EmptyStatement
val decl = EmptyDeclaration
val typ = ClassType(Vector(), None, "A", Vector())
val arg = AnyTemplate(Vector())
val argParam = InferredParameter("")
val templParam = ParameterTemplate(Vector(), "")
val local = LocalVariableDeclaration(ann, false, typ, declarator)
val visitor: PartialFunction[Visitable, Unit] = {
case _ => visited += 1
}
protected var visited = 0
override def beforeEach(): Unit = {
visited = 0
}
protected implicit def toSeq[A](a: A): Seq[A] = Seq(a)
protected implicit def toOpt[A](a: A): Option[A] = Some(a)
protected implicit class EasyVisit[A <: Visitable](a: A) {
def visit() = a.dispatch(visitor)
}
}
|
marek1840/java-parser
|
src/test/scala/com/enkidu/lignum/parsers/ast/VisitorTest.scala
|
Scala
|
mit
| 1,966 |
package models.quiz.question.table
import models.quiz.question.{MultipleChoiceQuestionOption, MultipleChoiceQuestion}
import models.quiz.table.{QuestionIdNext, quizzesTable}
import models.support.QuestionId
import play.api.db.slick.Config.driver.simple._
import play.api.templates.Html
import service.table.LoginsTable
import models.support._
import scala.slick.direct.order
import scala.slick.model.ForeignKeyAction
import models.quiz.table.{QuestionIdNext, multipleChoiceAnswersTable, multipleChoiceQuestionsTable, quizzesTable}
class MultipleChoiceQuestionOptionsTable(tag: Tag) extends Table[MultipleChoiceQuestionOption](tag, "multiple_choice_question_options") {
def id = column[Long]("id", O.AutoInc, O.PrimaryKey)
def questionId = column[QuestionId]("question_id")
def optionRaw = column[String]("option_raw")
def optionHtml = column[Html]("option_html")
def * = (id, questionId, optionRaw, optionHtml) <> (MultipleChoiceQuestionOption.tupled, MultipleChoiceQuestionOption.unapply _)
def questionIdFK = foreignKey("multiple_choice_question_options__question_id_fk", questionId, multipleChoiceQuestionsTable)(_.id, onDelete = ForeignKeyAction.Cascade)
}
|
kristiankime/web-education-games
|
app/models/quiz/question/table/MultipleChoiceQuestionOptionsTable.scala
|
Scala
|
mit
| 1,181 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.objects
import java.lang.reflect.Modifier
import scala.collection.mutable.Builder
import scala.language.existentials
import scala.reflect.ClassTag
import scala.util.Try
import org.apache.spark.{SparkConf, SparkEnv}
import org.apache.spark.serializer._
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, ArrayData, GenericArrayData}
import org.apache.spark.sql.types._
/**
* Common base class for [[StaticInvoke]], [[Invoke]], and [[NewInstance]].
*/
trait InvokeLike extends Expression with NonSQLExpression {
def arguments: Seq[Expression]
def propagateNull: Boolean
protected lazy val needNullCheck: Boolean = propagateNull && arguments.exists(_.nullable)
/**
* Prepares codes for arguments.
*
* - generate codes for argument.
* - use ctx.splitExpressions() to not exceed 64kb JVM limit while preparing arguments.
* - avoid some of nullabilty checking which are not needed because the expression is not
* nullable.
* - when needNullCheck == true, short circuit if we found one of arguments is null because
* preparing rest of arguments can be skipped in the case.
*
* @param ctx a [[CodegenContext]]
* @return (code to prepare arguments, argument string, result of argument null check)
*/
def prepareArguments(ctx: CodegenContext): (String, String, String) = {
val resultIsNull = if (needNullCheck) {
val resultIsNull = ctx.freshName("resultIsNull")
ctx.addMutableState("boolean", resultIsNull, "")
resultIsNull
} else {
"false"
}
val argValues = arguments.map { e =>
val argValue = ctx.freshName("argValue")
ctx.addMutableState(ctx.javaType(e.dataType), argValue, "")
argValue
}
val argCodes = if (needNullCheck) {
val reset = s"$resultIsNull = false;"
val argCodes = arguments.zipWithIndex.map { case (e, i) =>
val expr = e.genCode(ctx)
val updateResultIsNull = if (e.nullable) {
s"$resultIsNull = ${expr.isNull};"
} else {
""
}
s"""
if (!$resultIsNull) {
${expr.code}
$updateResultIsNull
${argValues(i)} = ${expr.value};
}
"""
}
reset +: argCodes
} else {
arguments.zipWithIndex.map { case (e, i) =>
val expr = e.genCode(ctx)
s"""
${expr.code}
${argValues(i)} = ${expr.value};
"""
}
}
val argCode = ctx.splitExpressions(ctx.INPUT_ROW, argCodes)
(argCode, argValues.mkString(", "), resultIsNull)
}
}
/**
* Invokes a static function, returning the result. By default, any of the arguments being null
* will result in returning null instead of calling the function.
*
* @param staticObject The target of the static call. This can either be the object itself
* (methods defined on scala objects), or the class object
* (static methods defined in java).
* @param dataType The expected return type of the function call
* @param functionName The name of the method to call.
* @param arguments An optional list of expressions to pass as arguments to the function.
* @param propagateNull When true, and any of the arguments is null, null will be returned instead
* of calling the function.
*/
case class StaticInvoke(
staticObject: Class[_],
dataType: DataType,
functionName: String,
arguments: Seq[Expression] = Nil,
propagateNull: Boolean = true) extends InvokeLike {
val objectName = staticObject.getName.stripSuffix("$")
override def nullable: Boolean = true
override def children: Seq[Expression] = arguments
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported.")
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = ctx.javaType(dataType)
val (argCode, argString, resultIsNull) = prepareArguments(ctx)
val callFunc = s"$objectName.$functionName($argString)"
// If the function can return null, we do an extra check to make sure our null bit is still set
// correctly.
val postNullCheck = if (ctx.defaultValue(dataType) == "null") {
s"${ev.isNull} = ${ev.value} == null;"
} else {
""
}
val code = s"""
$argCode
boolean ${ev.isNull} = $resultIsNull;
final $javaType ${ev.value} = $resultIsNull ? ${ctx.defaultValue(dataType)} : $callFunc;
$postNullCheck
"""
ev.copy(code = code)
}
}
/**
* Calls the specified function on an object, optionally passing arguments. If the `targetObject`
* expression evaluates to null then null will be returned.
*
* In some cases, due to erasure, the schema may expect a primitive type when in fact the method
* is returning java.lang.Object. In this case, we will generate code that attempts to unbox the
* value automatically.
*
* @param targetObject An expression that will return the object to call the method on.
* @param functionName The name of the method to call.
* @param dataType The expected return type of the function.
* @param arguments An optional list of expressions, whos evaluation will be passed to the function.
* @param propagateNull When true, and any of the arguments is null, null will be returned instead
* of calling the function.
* @param returnNullable When false, indicating the invoked method will always return
* non-null value.
*/
case class Invoke(
targetObject: Expression,
functionName: String,
dataType: DataType,
arguments: Seq[Expression] = Nil,
propagateNull: Boolean = true,
returnNullable : Boolean = true) extends InvokeLike {
override def nullable: Boolean = targetObject.nullable || needNullCheck || returnNullable
override def children: Seq[Expression] = targetObject +: arguments
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported.")
@transient lazy val method = targetObject.dataType match {
case ObjectType(cls) =>
val m = cls.getMethods.find(_.getName == functionName)
if (m.isEmpty) {
sys.error(s"Couldn't find $functionName on $cls")
} else {
m
}
case _ => None
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = ctx.javaType(dataType)
val obj = targetObject.genCode(ctx)
val (argCode, argString, resultIsNull) = prepareArguments(ctx)
val returnPrimitive = method.isDefined && method.get.getReturnType.isPrimitive
val needTryCatch = method.isDefined && method.get.getExceptionTypes.nonEmpty
def getFuncResult(resultVal: String, funcCall: String): String = if (needTryCatch) {
s"""
try {
$resultVal = $funcCall;
} catch (Exception e) {
org.apache.spark.unsafe.Platform.throwException(e);
}
"""
} else {
s"$resultVal = $funcCall;"
}
val evaluate = if (returnPrimitive) {
getFuncResult(ev.value, s"${obj.value}.$functionName($argString)")
} else {
val funcResult = ctx.freshName("funcResult")
// If the function can return null, we do an extra check to make sure our null bit is still
// set correctly.
val assignResult = if (!returnNullable) {
s"${ev.value} = (${ctx.boxedType(javaType)}) $funcResult;"
} else {
s"""
if ($funcResult != null) {
${ev.value} = (${ctx.boxedType(javaType)}) $funcResult;
} else {
${ev.isNull} = true;
}
"""
}
s"""
Object $funcResult = null;
${getFuncResult(funcResult, s"${obj.value}.$functionName($argString)")}
$assignResult
"""
}
val code = s"""
${obj.code}
boolean ${ev.isNull} = true;
$javaType ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${obj.isNull}) {
$argCode
${ev.isNull} = $resultIsNull;
if (!${ev.isNull}) {
$evaluate
}
}
"""
ev.copy(code = code)
}
override def toString: String = s"$targetObject.$functionName"
}
object NewInstance {
def apply(
cls: Class[_],
arguments: Seq[Expression],
dataType: DataType,
propagateNull: Boolean = true): NewInstance =
new NewInstance(cls, arguments, propagateNull, dataType, None)
}
/**
* Constructs a new instance of the given class, using the result of evaluating the specified
* expressions as arguments.
*
* @param cls The class to construct.
* @param arguments A list of expression to use as arguments to the constructor.
* @param propagateNull When true, if any of the arguments is null, then null will be returned
* instead of trying to construct the object.
* @param dataType The type of object being constructed, as a Spark SQL datatype. This allows you
* to manually specify the type when the object in question is a valid internal
* representation (i.e. ArrayData) instead of an object.
* @param outerPointer If the object being constructed is an inner class, the outerPointer for the
* containing class must be specified. This parameter is defined as an optional
* function, which allows us to get the outer pointer lazily,and it's useful if
* the inner class is defined in REPL.
*/
case class NewInstance(
cls: Class[_],
arguments: Seq[Expression],
propagateNull: Boolean,
dataType: DataType,
outerPointer: Option[() => AnyRef]) extends InvokeLike {
private val className = cls.getName
override def nullable: Boolean = needNullCheck
override def children: Seq[Expression] = arguments
override lazy val resolved: Boolean = {
// If the class to construct is an inner class, we need to get its outer pointer, or this
// expression should be regarded as unresolved.
// Note that static inner classes (e.g., inner classes within Scala objects) don't need
// outer pointer registration.
val needOuterPointer =
outerPointer.isEmpty && cls.isMemberClass && !Modifier.isStatic(cls.getModifiers)
childrenResolved && !needOuterPointer
}
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported.")
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = ctx.javaType(dataType)
val (argCode, argString, resultIsNull) = prepareArguments(ctx)
val outer = outerPointer.map(func => Literal.fromObject(func()).genCode(ctx))
ev.isNull = resultIsNull
val constructorCall = outer.map { gen =>
s"${gen.value}.new ${cls.getSimpleName}($argString)"
}.getOrElse {
s"new $className($argString)"
}
val code = s"""
$argCode
${outer.map(_.code).getOrElse("")}
final $javaType ${ev.value} = ${ev.isNull} ? ${ctx.defaultValue(javaType)} : $constructorCall;
"""
ev.copy(code = code)
}
override def toString: String = s"newInstance($cls)"
}
/**
* Given an expression that returns on object of type `Option[_]`, this expression unwraps the
* option into the specified Spark SQL datatype. In the case of `None`, the nullbit is set instead.
*
* @param dataType The expected unwrapped option type.
* @param child An expression that returns an `Option`
*/
case class UnwrapOption(
dataType: DataType,
child: Expression) extends UnaryExpression with NonSQLExpression with ExpectsInputTypes {
override def nullable: Boolean = true
override def inputTypes: Seq[AbstractDataType] = ObjectType :: Nil
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = ctx.javaType(dataType)
val inputObject = child.genCode(ctx)
val code = s"""
${inputObject.code}
final boolean ${ev.isNull} = ${inputObject.isNull} || ${inputObject.value}.isEmpty();
$javaType ${ev.value} = ${ev.isNull} ?
${ctx.defaultValue(javaType)} : (${ctx.boxedType(javaType)}) ${inputObject.value}.get();
"""
ev.copy(code = code)
}
}
/**
* Converts the result of evaluating `child` into an option, checking both the isNull bit and
* (in the case of reference types) equality with null.
*
* @param child The expression to evaluate and wrap.
* @param optType The type of this option.
*/
case class WrapOption(child: Expression, optType: DataType)
extends UnaryExpression with NonSQLExpression with ExpectsInputTypes {
override def dataType: DataType = ObjectType(classOf[Option[_]])
override def nullable: Boolean = false
override def inputTypes: Seq[AbstractDataType] = optType :: Nil
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val inputObject = child.genCode(ctx)
val code = s"""
${inputObject.code}
scala.Option ${ev.value} =
${inputObject.isNull} ?
scala.Option$$.MODULE$$.apply(null) : new scala.Some(${inputObject.value});
"""
ev.copy(code = code, isNull = "false")
}
}
/**
* A placeholder for the loop variable used in [[MapObjects]]. This should never be constructed
* manually, but will instead be passed into the provided lambda function.
*/
case class LambdaVariable(
value: String,
isNull: String,
dataType: DataType,
nullable: Boolean = true) extends LeafExpression
with Unevaluable with NonSQLExpression {
override def genCode(ctx: CodegenContext): ExprCode = {
ExprCode(code = "", value = value, isNull = if (nullable) isNull else "false")
}
}
/**
* When constructing [[MapObjects]], the element type must be given, which may not be available
* before analysis. This class acts like a placeholder for [[MapObjects]], and will be replaced by
* [[MapObjects]] during analysis after the input data is resolved.
* Note that, ideally we should not serialize and send unresolved expressions to executors, but
* users may accidentally do this(e.g. mistakenly reference an encoder instance when implementing
* Aggregator). Here we mark `function` as transient because it may reference scala Type, which is
* not serializable. Then even users mistakenly reference unresolved expression and serialize it,
* it's just a performance issue(more network traffic), and will not fail.
*/
case class UnresolvedMapObjects(
@transient function: Expression => Expression,
child: Expression,
customCollectionCls: Option[Class[_]] = None) extends UnaryExpression with Unevaluable {
override lazy val resolved = false
override def dataType: DataType = customCollectionCls.map(ObjectType.apply).getOrElse {
throw new UnsupportedOperationException("not resolved")
}
}
object MapObjects {
private val curId = new java.util.concurrent.atomic.AtomicInteger()
/**
* Construct an instance of MapObjects case class.
*
* @param function The function applied on the collection elements.
* @param inputData An expression that when evaluated returns a collection object.
* @param elementType The data type of elements in the collection.
* @param elementNullable When false, indicating elements in the collection are always
* non-null value.
* @param customCollectionCls Class of the resulting collection (returning ObjectType)
* or None (returning ArrayType)
*/
def apply(
function: Expression => Expression,
inputData: Expression,
elementType: DataType,
elementNullable: Boolean = true,
customCollectionCls: Option[Class[_]] = None): MapObjects = {
val id = curId.getAndIncrement()
val loopValue = s"MapObjects_loopValue$id"
val loopIsNull = s"MapObjects_loopIsNull$id"
val loopVar = LambdaVariable(loopValue, loopIsNull, elementType, elementNullable)
MapObjects(
loopValue, loopIsNull, elementType, function(loopVar), inputData, customCollectionCls)
}
}
/**
* Applies the given expression to every element of a collection of items, returning the result
* as an ArrayType or ObjectType. This is similar to a typical map operation, but where the lambda
* function is expressed using catalyst expressions.
*
* The type of the result is determined as follows:
* - ArrayType - when customCollectionCls is None
* - ObjectType(collection) - when customCollectionCls contains a collection class
*
* The following collection ObjectTypes are currently supported on input:
* Seq, Array, ArrayData, java.util.List
*
* @param loopValue the name of the loop variable that used when iterate the collection, and used
* as input for the `lambdaFunction`
* @param loopIsNull the nullity of the loop variable that used when iterate the collection, and
* used as input for the `lambdaFunction`
* @param loopVarDataType the data type of the loop variable that used when iterate the collection,
* and used as input for the `lambdaFunction`
* @param lambdaFunction A function that take the `loopVar` as input, and used as lambda function
* to handle collection elements.
* @param inputData An expression that when evaluated returns a collection object.
* @param customCollectionCls Class of the resulting collection (returning ObjectType)
* or None (returning ArrayType)
*/
case class MapObjects private(
loopValue: String,
loopIsNull: String,
loopVarDataType: DataType,
lambdaFunction: Expression,
inputData: Expression,
customCollectionCls: Option[Class[_]]) extends Expression with NonSQLExpression {
override def nullable: Boolean = inputData.nullable
override def children: Seq[Expression] = lambdaFunction :: inputData :: Nil
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")
override def dataType: DataType =
customCollectionCls.map(ObjectType.apply).getOrElse(
ArrayType(lambdaFunction.dataType, containsNull = lambdaFunction.nullable))
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val elementJavaType = ctx.javaType(loopVarDataType)
ctx.addMutableState("boolean", loopIsNull, "")
ctx.addMutableState(elementJavaType, loopValue, "")
val genInputData = inputData.genCode(ctx)
val genFunction = lambdaFunction.genCode(ctx)
val dataLength = ctx.freshName("dataLength")
val convertedArray = ctx.freshName("convertedArray")
val loopIndex = ctx.freshName("loopIndex")
val convertedType = ctx.boxedType(lambdaFunction.dataType)
// Because of the way Java defines nested arrays, we have to handle the syntax specially.
// Specifically, we have to insert the [$dataLength] in between the type and any extra nested
// array declarations (i.e. new String[1][]).
val arrayConstructor = if (convertedType contains "[]") {
val rawType = convertedType.takeWhile(_ != '[')
val arrayPart = convertedType.reverse.takeWhile(c => c == '[' || c == ']').reverse
s"new $rawType[$dataLength]$arrayPart"
} else {
s"new $convertedType[$dataLength]"
}
// In RowEncoder, we use `Object` to represent Array or Seq, so we need to determine the type
// of input collection at runtime for this case.
val seq = ctx.freshName("seq")
val array = ctx.freshName("array")
val determineCollectionType = inputData.dataType match {
case ObjectType(cls) if cls == classOf[Object] =>
val seqClass = classOf[Seq[_]].getName
s"""
$seqClass $seq = null;
$elementJavaType[] $array = null;
if (${genInputData.value}.getClass().isArray()) {
$array = ($elementJavaType[]) ${genInputData.value};
} else {
$seq = ($seqClass) ${genInputData.value};
}
"""
case _ => ""
}
// The data with PythonUserDefinedType are actually stored with the data type of its sqlType.
// When we want to apply MapObjects on it, we have to use it.
val inputDataType = inputData.dataType match {
case p: PythonUserDefinedType => p.sqlType
case _ => inputData.dataType
}
val (getLength, getLoopVar) = inputDataType match {
case ObjectType(cls) if classOf[Seq[_]].isAssignableFrom(cls) =>
s"${genInputData.value}.size()" -> s"${genInputData.value}.apply($loopIndex)"
case ObjectType(cls) if cls.isArray =>
s"${genInputData.value}.length" -> s"${genInputData.value}[$loopIndex]"
case ObjectType(cls) if classOf[java.util.List[_]].isAssignableFrom(cls) =>
s"${genInputData.value}.size()" -> s"${genInputData.value}.get($loopIndex)"
case ArrayType(et, _) =>
s"${genInputData.value}.numElements()" -> ctx.getValue(genInputData.value, et, loopIndex)
case ObjectType(cls) if cls == classOf[Object] =>
s"$seq == null ? $array.length : $seq.size()" ->
s"$seq == null ? $array[$loopIndex] : $seq.apply($loopIndex)"
}
// Make a copy of the data if it's unsafe-backed
def makeCopyIfInstanceOf(clazz: Class[_ <: Any], value: String) =
s"$value instanceof ${clazz.getSimpleName}? ${value}.copy() : $value"
val genFunctionValue = lambdaFunction.dataType match {
case StructType(_) => makeCopyIfInstanceOf(classOf[UnsafeRow], genFunction.value)
case ArrayType(_, _) => makeCopyIfInstanceOf(classOf[UnsafeArrayData], genFunction.value)
case MapType(_, _, _) => makeCopyIfInstanceOf(classOf[UnsafeMapData], genFunction.value)
case _ => genFunction.value
}
val loopNullCheck = inputDataType match {
case _: ArrayType => s"$loopIsNull = ${genInputData.value}.isNullAt($loopIndex);"
// The element of primitive array will never be null.
case ObjectType(cls) if cls.isArray && cls.getComponentType.isPrimitive =>
s"$loopIsNull = false"
case _ => s"$loopIsNull = $loopValue == null;"
}
val (initCollection, addElement, getResult): (String, String => String, String) =
customCollectionCls match {
case Some(cls) if classOf[Seq[_]].isAssignableFrom(cls) =>
// Scala sequence
val getBuilder = s"${cls.getName}$$.MODULE$$.newBuilder()"
val builder = ctx.freshName("collectionBuilder")
(
s"""
${classOf[Builder[_, _]].getName} $builder = $getBuilder;
$builder.sizeHint($dataLength);
""",
genValue => s"$builder.$$plus$$eq($genValue);",
s"(${cls.getName}) $builder.result();"
)
case Some(cls) if classOf[java.util.List[_]].isAssignableFrom(cls) =>
// Java list
val builder = ctx.freshName("collectionBuilder")
(
if (cls == classOf[java.util.List[_]] || cls == classOf[java.util.AbstractList[_]] ||
cls == classOf[java.util.AbstractSequentialList[_]]) {
s"${cls.getName} $builder = new java.util.ArrayList($dataLength);"
} else {
val param = Try(cls.getConstructor(Integer.TYPE)).map(_ => dataLength).getOrElse("")
s"${cls.getName} $builder = new ${cls.getName}($param);"
},
genValue => s"$builder.add($genValue);",
s"$builder;"
)
case None =>
// array
(
s"""
$convertedType[] $convertedArray = null;
$convertedArray = $arrayConstructor;
""",
genValue => s"$convertedArray[$loopIndex] = $genValue;",
s"new ${classOf[GenericArrayData].getName}($convertedArray);"
)
}
val code = s"""
${genInputData.code}
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${genInputData.isNull}) {
$determineCollectionType
int $dataLength = $getLength;
$initCollection
int $loopIndex = 0;
while ($loopIndex < $dataLength) {
$loopValue = ($elementJavaType) ($getLoopVar);
$loopNullCheck
${genFunction.code}
if (${genFunction.isNull}) {
${addElement("null")}
} else {
${addElement(genFunctionValue)}
}
$loopIndex += 1;
}
${ev.value} = $getResult
}
"""
ev.copy(code = code, isNull = genInputData.isNull)
}
}
object CollectObjectsToMap {
private val curId = new java.util.concurrent.atomic.AtomicInteger()
/**
* Construct an instance of CollectObjectsToMap case class.
*
* @param keyFunction The function applied on the key collection elements.
* @param valueFunction The function applied on the value collection elements.
* @param inputData An expression that when evaluated returns a map object.
* @param collClass The type of the resulting collection.
*/
def apply(
keyFunction: Expression => Expression,
valueFunction: Expression => Expression,
inputData: Expression,
collClass: Class[_]): CollectObjectsToMap = {
val id = curId.getAndIncrement()
val keyLoopValue = s"CollectObjectsToMap_keyLoopValue$id"
val mapType = inputData.dataType.asInstanceOf[MapType]
val keyLoopVar = LambdaVariable(keyLoopValue, "", mapType.keyType, nullable = false)
val valueLoopValue = s"CollectObjectsToMap_valueLoopValue$id"
val valueLoopIsNull = s"CollectObjectsToMap_valueLoopIsNull$id"
val valueLoopVar = LambdaVariable(valueLoopValue, valueLoopIsNull, mapType.valueType)
CollectObjectsToMap(
keyLoopValue, keyFunction(keyLoopVar),
valueLoopValue, valueLoopIsNull, valueFunction(valueLoopVar),
inputData, collClass)
}
}
/**
* Expression used to convert a Catalyst Map to an external Scala Map.
* The collection is constructed using the associated builder, obtained by calling `newBuilder`
* on the collection's companion object.
*
* @param keyLoopValue the name of the loop variable that is used when iterating over the key
* collection, and which is used as input for the `keyLambdaFunction`
* @param keyLambdaFunction A function that takes the `keyLoopVar` as input, and is used as
* a lambda function to handle collection elements.
* @param valueLoopValue the name of the loop variable that is used when iterating over the value
* collection, and which is used as input for the `valueLambdaFunction`
* @param valueLoopIsNull the nullability of the loop variable that is used when iterating over
* the value collection, and which is used as input for the
* `valueLambdaFunction`
* @param valueLambdaFunction A function that takes the `valueLoopVar` as input, and is used as
* a lambda function to handle collection elements.
* @param inputData An expression that when evaluated returns a map object.
* @param collClass The type of the resulting collection.
*/
case class CollectObjectsToMap private(
keyLoopValue: String,
keyLambdaFunction: Expression,
valueLoopValue: String,
valueLoopIsNull: String,
valueLambdaFunction: Expression,
inputData: Expression,
collClass: Class[_]) extends Expression with NonSQLExpression {
override def nullable: Boolean = inputData.nullable
override def children: Seq[Expression] =
keyLambdaFunction :: valueLambdaFunction :: inputData :: Nil
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")
override def dataType: DataType = ObjectType(collClass)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// The data with PythonUserDefinedType are actually stored with the data type of its sqlType.
// When we want to apply MapObjects on it, we have to use it.
def inputDataType(dataType: DataType) = dataType match {
case p: PythonUserDefinedType => p.sqlType
case _ => dataType
}
val mapType = inputDataType(inputData.dataType).asInstanceOf[MapType]
val keyElementJavaType = ctx.javaType(mapType.keyType)
ctx.addMutableState(keyElementJavaType, keyLoopValue, "")
val genKeyFunction = keyLambdaFunction.genCode(ctx)
val valueElementJavaType = ctx.javaType(mapType.valueType)
ctx.addMutableState("boolean", valueLoopIsNull, "")
ctx.addMutableState(valueElementJavaType, valueLoopValue, "")
val genValueFunction = valueLambdaFunction.genCode(ctx)
val genInputData = inputData.genCode(ctx)
val dataLength = ctx.freshName("dataLength")
val loopIndex = ctx.freshName("loopIndex")
val tupleLoopValue = ctx.freshName("tupleLoopValue")
val builderValue = ctx.freshName("builderValue")
val getLength = s"${genInputData.value}.numElements()"
val keyArray = ctx.freshName("keyArray")
val valueArray = ctx.freshName("valueArray")
val getKeyArray =
s"${classOf[ArrayData].getName} $keyArray = ${genInputData.value}.keyArray();"
val getKeyLoopVar = ctx.getValue(keyArray, inputDataType(mapType.keyType), loopIndex)
val getValueArray =
s"${classOf[ArrayData].getName} $valueArray = ${genInputData.value}.valueArray();"
val getValueLoopVar = ctx.getValue(valueArray, inputDataType(mapType.valueType), loopIndex)
// Make a copy of the data if it's unsafe-backed
def makeCopyIfInstanceOf(clazz: Class[_ <: Any], value: String) =
s"$value instanceof ${clazz.getSimpleName}? $value.copy() : $value"
def genFunctionValue(lambdaFunction: Expression, genFunction: ExprCode) =
lambdaFunction.dataType match {
case StructType(_) => makeCopyIfInstanceOf(classOf[UnsafeRow], genFunction.value)
case ArrayType(_, _) => makeCopyIfInstanceOf(classOf[UnsafeArrayData], genFunction.value)
case MapType(_, _, _) => makeCopyIfInstanceOf(classOf[UnsafeMapData], genFunction.value)
case _ => genFunction.value
}
val genKeyFunctionValue = genFunctionValue(keyLambdaFunction, genKeyFunction)
val genValueFunctionValue = genFunctionValue(valueLambdaFunction, genValueFunction)
val valueLoopNullCheck = s"$valueLoopIsNull = $valueArray.isNullAt($loopIndex);"
val builderClass = classOf[Builder[_, _]].getName
val constructBuilder = s"""
$builderClass $builderValue = ${collClass.getName}$$.MODULE$$.newBuilder();
$builderValue.sizeHint($dataLength);
"""
val tupleClass = classOf[(_, _)].getName
val appendToBuilder = s"""
$tupleClass $tupleLoopValue;
if (${genValueFunction.isNull}) {
$tupleLoopValue = new $tupleClass($genKeyFunctionValue, null);
} else {
$tupleLoopValue = new $tupleClass($genKeyFunctionValue, $genValueFunctionValue);
}
$builderValue.$$plus$$eq($tupleLoopValue);
"""
val getBuilderResult = s"${ev.value} = (${collClass.getName}) $builderValue.result();"
val code = s"""
${genInputData.code}
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${genInputData.isNull}) {
int $dataLength = $getLength;
$constructBuilder
$getKeyArray
$getValueArray
int $loopIndex = 0;
while ($loopIndex < $dataLength) {
$keyLoopValue = ($keyElementJavaType) ($getKeyLoopVar);
$valueLoopValue = ($valueElementJavaType) ($getValueLoopVar);
$valueLoopNullCheck
${genKeyFunction.code}
${genValueFunction.code}
$appendToBuilder
$loopIndex += 1;
}
$getBuilderResult
}
"""
ev.copy(code = code, isNull = genInputData.isNull)
}
}
object ExternalMapToCatalyst {
private val curId = new java.util.concurrent.atomic.AtomicInteger()
def apply(
inputMap: Expression,
keyType: DataType,
keyConverter: Expression => Expression,
valueType: DataType,
valueConverter: Expression => Expression,
valueNullable: Boolean): ExternalMapToCatalyst = {
val id = curId.getAndIncrement()
val keyName = "ExternalMapToCatalyst_key" + id
val valueName = "ExternalMapToCatalyst_value" + id
val valueIsNull = "ExternalMapToCatalyst_value_isNull" + id
ExternalMapToCatalyst(
keyName,
keyType,
keyConverter(LambdaVariable(keyName, "false", keyType, false)),
valueName,
valueIsNull,
valueType,
valueConverter(LambdaVariable(valueName, valueIsNull, valueType, valueNullable)),
inputMap
)
}
}
/**
* Converts a Scala/Java map object into catalyst format, by applying the key/value converter when
* iterate the map.
*
* @param key the name of the map key variable that used when iterate the map, and used as input for
* the `keyConverter`
* @param keyType the data type of the map key variable that used when iterate the map, and used as
* input for the `keyConverter`
* @param keyConverter A function that take the `key` as input, and converts it to catalyst format.
* @param value the name of the map value variable that used when iterate the map, and used as input
* for the `valueConverter`
* @param valueIsNull the nullability of the map value variable that used when iterate the map, and
* used as input for the `valueConverter`
* @param valueType the data type of the map value variable that used when iterate the map, and
* used as input for the `valueConverter`
* @param valueConverter A function that take the `value` as input, and converts it to catalyst
* format.
* @param child An expression that when evaluated returns the input map object.
*/
case class ExternalMapToCatalyst private(
key: String,
keyType: DataType,
keyConverter: Expression,
value: String,
valueIsNull: String,
valueType: DataType,
valueConverter: Expression,
child: Expression)
extends UnaryExpression with NonSQLExpression {
override def foldable: Boolean = false
override def dataType: MapType = MapType(
keyConverter.dataType, valueConverter.dataType, valueContainsNull = valueConverter.nullable)
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val inputMap = child.genCode(ctx)
val genKeyConverter = keyConverter.genCode(ctx)
val genValueConverter = valueConverter.genCode(ctx)
val length = ctx.freshName("length")
val index = ctx.freshName("index")
val convertedKeys = ctx.freshName("convertedKeys")
val convertedValues = ctx.freshName("convertedValues")
val entry = ctx.freshName("entry")
val entries = ctx.freshName("entries")
val (defineEntries, defineKeyValue) = child.dataType match {
case ObjectType(cls) if classOf[java.util.Map[_, _]].isAssignableFrom(cls) =>
val javaIteratorCls = classOf[java.util.Iterator[_]].getName
val javaMapEntryCls = classOf[java.util.Map.Entry[_, _]].getName
val defineEntries =
s"final $javaIteratorCls $entries = ${inputMap.value}.entrySet().iterator();"
val defineKeyValue =
s"""
final $javaMapEntryCls $entry = ($javaMapEntryCls) $entries.next();
${ctx.javaType(keyType)} $key = (${ctx.boxedType(keyType)}) $entry.getKey();
${ctx.javaType(valueType)} $value = (${ctx.boxedType(valueType)}) $entry.getValue();
"""
defineEntries -> defineKeyValue
case ObjectType(cls) if classOf[scala.collection.Map[_, _]].isAssignableFrom(cls) =>
val scalaIteratorCls = classOf[Iterator[_]].getName
val scalaMapEntryCls = classOf[Tuple2[_, _]].getName
val defineEntries = s"final $scalaIteratorCls $entries = ${inputMap.value}.iterator();"
val defineKeyValue =
s"""
final $scalaMapEntryCls $entry = ($scalaMapEntryCls) $entries.next();
${ctx.javaType(keyType)} $key = (${ctx.boxedType(keyType)}) $entry._1();
${ctx.javaType(valueType)} $value = (${ctx.boxedType(valueType)}) $entry._2();
"""
defineEntries -> defineKeyValue
}
val valueNullCheck = if (ctx.isPrimitiveType(valueType)) {
s"boolean $valueIsNull = false;"
} else {
s"boolean $valueIsNull = $value == null;"
}
val arrayCls = classOf[GenericArrayData].getName
val mapCls = classOf[ArrayBasedMapData].getName
val convertedKeyType = ctx.boxedType(keyConverter.dataType)
val convertedValueType = ctx.boxedType(valueConverter.dataType)
val code =
s"""
${inputMap.code}
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${inputMap.isNull}) {
final int $length = ${inputMap.value}.size();
final Object[] $convertedKeys = new Object[$length];
final Object[] $convertedValues = new Object[$length];
int $index = 0;
$defineEntries
while($entries.hasNext()) {
$defineKeyValue
$valueNullCheck
${genKeyConverter.code}
if (${genKeyConverter.isNull}) {
throw new RuntimeException("Cannot use null as map key!");
} else {
$convertedKeys[$index] = ($convertedKeyType) ${genKeyConverter.value};
}
${genValueConverter.code}
if (${genValueConverter.isNull}) {
$convertedValues[$index] = null;
} else {
$convertedValues[$index] = ($convertedValueType) ${genValueConverter.value};
}
$index++;
}
${ev.value} = new $mapCls(new $arrayCls($convertedKeys), new $arrayCls($convertedValues));
}
"""
ev.copy(code = code, isNull = inputMap.isNull)
}
}
/**
* Constructs a new external row, using the result of evaluating the specified expressions
* as content.
*
* @param children A list of expression to use as content of the external row.
*/
case class CreateExternalRow(children: Seq[Expression], schema: StructType)
extends Expression with NonSQLExpression {
override def dataType: DataType = ObjectType(classOf[Row])
override def nullable: Boolean = false
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val rowClass = classOf[GenericRowWithSchema].getName
val values = ctx.freshName("values")
ctx.addMutableState("Object[]", values, "")
val childrenCodes = children.zipWithIndex.map { case (e, i) =>
val eval = e.genCode(ctx)
eval.code + s"""
if (${eval.isNull}) {
$values[$i] = null;
} else {
$values[$i] = ${eval.value};
}
"""
}
val childrenCode = ctx.splitExpressions(ctx.INPUT_ROW, childrenCodes)
val schemaField = ctx.addReferenceObj("schema", schema)
val code = s"""
$values = new Object[${children.size}];
$childrenCode
final ${classOf[Row].getName} ${ev.value} = new $rowClass($values, $schemaField);
"""
ev.copy(code = code, isNull = "false")
}
}
/**
* Serializes an input object using a generic serializer (Kryo or Java).
*
* @param kryo if true, use Kryo. Otherwise, use Java.
*/
case class EncodeUsingSerializer(child: Expression, kryo: Boolean)
extends UnaryExpression with NonSQLExpression {
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Code to initialize the serializer.
val serializer = ctx.freshName("serializer")
val (serializerClass, serializerInstanceClass) = {
if (kryo) {
(classOf[KryoSerializer].getName, classOf[KryoSerializerInstance].getName)
} else {
(classOf[JavaSerializer].getName, classOf[JavaSerializerInstance].getName)
}
}
// try conf from env, otherwise create a new one
val env = s"${classOf[SparkEnv].getName}.get()"
val sparkConf = s"new ${classOf[SparkConf].getName}()"
val serializerInit = s"""
if ($env == null) {
$serializer = ($serializerInstanceClass) new $serializerClass($sparkConf).newInstance();
} else {
$serializer = ($serializerInstanceClass) new $serializerClass($env.conf()).newInstance();
}
"""
ctx.addMutableState(serializerInstanceClass, serializer, serializerInit)
// Code to serialize.
val input = child.genCode(ctx)
val javaType = ctx.javaType(dataType)
val serialize = s"$serializer.serialize(${input.value}, null).array()"
val code = s"""
${input.code}
final $javaType ${ev.value} = ${input.isNull} ? ${ctx.defaultValue(javaType)} : $serialize;
"""
ev.copy(code = code, isNull = input.isNull)
}
override def dataType: DataType = BinaryType
}
/**
* Serializes an input object using a generic serializer (Kryo or Java). Note that the ClassTag
* is not an implicit parameter because TreeNode cannot copy implicit parameters.
*
* @param kryo if true, use Kryo. Otherwise, use Java.
*/
case class DecodeUsingSerializer[T](child: Expression, tag: ClassTag[T], kryo: Boolean)
extends UnaryExpression with NonSQLExpression {
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Code to initialize the serializer.
val serializer = ctx.freshName("serializer")
val (serializerClass, serializerInstanceClass) = {
if (kryo) {
(classOf[KryoSerializer].getName, classOf[KryoSerializerInstance].getName)
} else {
(classOf[JavaSerializer].getName, classOf[JavaSerializerInstance].getName)
}
}
// try conf from env, otherwise create a new one
val env = s"${classOf[SparkEnv].getName}.get()"
val sparkConf = s"new ${classOf[SparkConf].getName}()"
val serializerInit = s"""
if ($env == null) {
$serializer = ($serializerInstanceClass) new $serializerClass($sparkConf).newInstance();
} else {
$serializer = ($serializerInstanceClass) new $serializerClass($env.conf()).newInstance();
}
"""
ctx.addMutableState(serializerInstanceClass, serializer, serializerInit)
// Code to deserialize.
val input = child.genCode(ctx)
val javaType = ctx.javaType(dataType)
val deserialize =
s"($javaType) $serializer.deserialize(java.nio.ByteBuffer.wrap(${input.value}), null)"
val code = s"""
${input.code}
final $javaType ${ev.value} = ${input.isNull} ? ${ctx.defaultValue(javaType)} : $deserialize;
"""
ev.copy(code = code, isNull = input.isNull)
}
override def dataType: DataType = ObjectType(tag.runtimeClass)
}
/**
* Initialize a Java Bean instance by setting its field values via setters.
*/
case class InitializeJavaBean(beanInstance: Expression, setters: Map[String, Expression])
extends Expression with NonSQLExpression {
override def nullable: Boolean = beanInstance.nullable
override def children: Seq[Expression] = beanInstance +: setters.values.toSeq
override def dataType: DataType = beanInstance.dataType
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported.")
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val instanceGen = beanInstance.genCode(ctx)
val javaBeanInstance = ctx.freshName("javaBean")
val beanInstanceJavaType = ctx.javaType(beanInstance.dataType)
ctx.addMutableState(beanInstanceJavaType, javaBeanInstance, "")
val initialize = setters.map {
case (setterMethod, fieldValue) =>
val fieldGen = fieldValue.genCode(ctx)
s"""
${fieldGen.code}
${javaBeanInstance}.$setterMethod(${fieldGen.value});
"""
}
val initializeCode = ctx.splitExpressions(ctx.INPUT_ROW, initialize.toSeq)
val code = s"""
${instanceGen.code}
${javaBeanInstance} = ${instanceGen.value};
if (!${instanceGen.isNull}) {
$initializeCode
}
"""
ev.copy(code = code, isNull = instanceGen.isNull, value = instanceGen.value)
}
}
/**
* Asserts that input values of a non-nullable child expression are not null.
*
* Note that there are cases where `child.nullable == true`, while we still need to add this
* assertion. Consider a nullable column `s` whose data type is a struct containing a non-nullable
* `Int` field named `i`. Expression `s.i` is nullable because `s` can be null. However, for all
* non-null `s`, `s.i` can't be null.
*/
case class AssertNotNull(child: Expression, walkedTypePath: Seq[String] = Nil)
extends UnaryExpression with NonSQLExpression {
override def dataType: DataType = child.dataType
override def foldable: Boolean = false
override def nullable: Boolean = false
override def flatArguments: Iterator[Any] = Iterator(child)
private val errMsg = "Null value appeared in non-nullable field:" +
walkedTypePath.mkString("\\n", "\\n", "\\n") +
"If the schema is inferred from a Scala tuple/case class, or a Java bean, " +
"please try to use scala.Option[_] or other nullable types " +
"(e.g. java.lang.Integer instead of int/scala.Int)."
override def eval(input: InternalRow): Any = {
val result = child.eval(input)
if (result == null) {
throw new NullPointerException(errMsg)
}
result
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val childGen = child.genCode(ctx)
// Use unnamed reference that doesn't create a local field here to reduce the number of fields
// because errMsgField is used only when the value is null.
val errMsgField = ctx.addReferenceMinorObj(errMsg)
val code = s"""
${childGen.code}
if (${childGen.isNull}) {
throw new NullPointerException($errMsgField);
}
"""
ev.copy(code = code, isNull = "false", value = childGen.value)
}
}
/**
* Returns the value of field at index `index` from the external row `child`.
* This class can be viewed as [[GetStructField]] for [[Row]]s instead of [[InternalRow]]s.
*
* Note that the input row and the field we try to get are both guaranteed to be not null, if they
* are null, a runtime exception will be thrown.
*/
case class GetExternalRowField(
child: Expression,
index: Int,
fieldName: String) extends UnaryExpression with NonSQLExpression {
override def nullable: Boolean = false
override def dataType: DataType = ObjectType(classOf[Object])
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")
private val errMsg = s"The ${index}th field '$fieldName' of input row cannot be null."
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Use unnamed reference that doesn't create a local field here to reduce the number of fields
// because errMsgField is used only when the field is null.
val errMsgField = ctx.addReferenceMinorObj(errMsg)
val row = child.genCode(ctx)
val code = s"""
${row.code}
if (${row.isNull}) {
throw new RuntimeException("The input external row cannot be null.");
}
if (${row.value}.isNullAt($index)) {
throw new RuntimeException($errMsgField);
}
final Object ${ev.value} = ${row.value}.get($index);
"""
ev.copy(code = code, isNull = "false")
}
}
/**
* Validates the actual data type of input expression at runtime. If it doesn't match the
* expectation, throw an exception.
*/
case class ValidateExternalType(child: Expression, expected: DataType)
extends UnaryExpression with NonSQLExpression with ExpectsInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(ObjectType(classOf[Object]))
override def nullable: Boolean = child.nullable
override def dataType: DataType = RowEncoder.externalDataTypeForInput(expected)
override def eval(input: InternalRow): Any =
throw new UnsupportedOperationException("Only code-generated evaluation is supported")
private val errMsg = s" is not a valid external type for schema of ${expected.simpleString}"
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Use unnamed reference that doesn't create a local field here to reduce the number of fields
// because errMsgField is used only when the type doesn't match.
val errMsgField = ctx.addReferenceMinorObj(errMsg)
val input = child.genCode(ctx)
val obj = input.value
val typeCheck = expected match {
case _: DecimalType =>
Seq(classOf[java.math.BigDecimal], classOf[scala.math.BigDecimal], classOf[Decimal])
.map(cls => s"$obj instanceof ${cls.getName}").mkString(" || ")
case _: ArrayType =>
s"$obj instanceof ${classOf[Seq[_]].getName} || $obj.getClass().isArray()"
case _ =>
s"$obj instanceof ${ctx.boxedType(dataType)}"
}
val code = s"""
${input.code}
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${input.isNull}) {
if ($typeCheck) {
${ev.value} = (${ctx.boxedType(dataType)}) $obj;
} else {
throw new RuntimeException($obj.getClass().getName() + $errMsgField);
}
}
"""
ev.copy(code = code, isNull = input.isNull)
}
}
|
saturday-shi/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
|
Scala
|
apache-2.0
| 51,274 |
package org.apache.spark.core.server.actors
import java.util
import akka.actor.{Actor, ActorRef, ActorSelection, Props}
import akka.pattern.ask
import com.typesafe.config.{Config, ConfigFactory}
import org.apache.commons.lang.exception.ExceptionUtils
import org.apache.spark.core.response.{Context, Contexts}
import ContextManagerActor.{IsAwake, NoSuchContext, ContextAlreadyExists}
import org.apache.spark.core.utils.ActorUtils
import org.slf4j.LoggerFactory
import ContextManagerActor._
import JarActor.{GetJarsPathForAll, ResultJarsPathForAll}
import scala.collection.mutable
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.sys.process.{Process, ProcessBuilder}
import scala.util.Success
/**
* Context management messages
*/
object ContextManagerActor {
case class CreateContext(contextName: String, jars: String, config: Config)
case class ContextInitialized(port: String)
case class DeleteContext(contextName: String)
case class ContextProcessTerminated(contextName: String, statusCode: Int)
case class GetContext(contextName: String)
case class GetContextInfo(contextName: String)
case class GetAllContextsForClient()
case class GetAllContexts()
case class NoSuchContext()
case class ContextAlreadyExists()
case class DestroyProcess(process: Process)
case class IsAwake()
case class ContextInfo(contextName: String, sparkUiPort: String, @transient referenceActor: ActorSelection)
}
/**
* Actor that creates, monitors and destroys contexts and corresponding processes.
* @param defaultConfig configuration defaults
* @param jarActor actor that responsible for jars which may be included to context classpath
*/
class ContextManagerActor(defaultConfig: Config, jarActor: ActorRef) extends Actor {
val log = LoggerFactory.getLogger(getClass)
var lastUsedPort = getValueFromConfig(defaultConfig, "appConf.actor.systems.first.port", 11000)
var lastUsedPortSparkUi = getValueFromConfig(defaultConfig, "appConf.spark.ui.first.port", 16000)
val contextMap = new mutable.HashMap[String, ContextInfo]() with mutable.SynchronizedMap[String, ContextInfo]
val processMap = new mutable.HashMap[String, ActorRef]() with mutable.SynchronizedMap[String, ActorRef]
val sparkUIConfigPath: String = "spark.ui.port"
override def receive: Receive = {
case CreateContext(contextName, jars, config) =>
if (contextMap contains contextName) {
sender ! ContextAlreadyExists
} else if (jars.isEmpty) {
sender ! ContextActor.FailedInit("jars property is not defined or is empty.")
} else {
//adding the default configs
var mergedConfig = config.withFallback(defaultConfig)
//The port for the actor system
val port = ActorUtils.findAvailablePort(lastUsedPort)
lastUsedPort = port
//If not defined, setting the spark.ui port
if (!config.hasPath(sparkUIConfigPath)) {
mergedConfig = addSparkUiPortToConfig(mergedConfig)
}
val webSender = sender()
log.info(s"Received CreateContext message : context=$contextName jars=$jars")
val jarsFuture = jarActor ? GetJarsPathForAll(jars, contextName)
jarsFuture map {
case result @ ResultJarsPathForAll(pathForClasspath, pathForSpark) =>
log.info(s"Received jars path: $result")
val processBuilder = createProcessBuilder(contextName, port, pathForClasspath, mergedConfig)
val command = processBuilder.toString
log.info(s"Starting new process for context $contextName: '$command'")
val processActor = context.actorOf(Props(classOf[ContextProcessActor], processBuilder, contextName))
processMap += contextName -> processActor
val host = getValueFromConfig(defaultConfig, ActorUtils.HOST_PROPERTY_NAME, "127.0.0.1")
val actorRef = context.actorSelection(ActorUtils.getContextActorAddress(contextName, host, port))
sendInitMessage(contextName, port, actorRef, webSender, mergedConfig, pathForSpark)
} onFailure {
case e: Exception =>
log.error(s"Failed! ${ExceptionUtils.getStackTrace(e)}")
webSender ! e
}
}
case DeleteContext(contextName) =>
log.info(s"Received DeleteContext message : context=$contextName")
if (contextMap contains contextName) {
for (
contextInfo <- contextMap remove contextName;
processRef <- processMap remove contextName
) {
contextInfo.referenceActor ! ContextActor.ShutDown()
sender ! Success
// Terminate process
processRef ! ContextProcessActor.Terminate()
}
} else {
sender ! NoSuchContext
}
case ContextProcessTerminated(contextName, statusCode) =>
log.info(s"Received ContextProcessTerminated message : context=$contextName, statusCode=$statusCode")
contextMap remove contextName foreach {
case contextInfo: ContextInfo =>
log.error(s"Removing context $contextName due to corresponding process exit with status code $statusCode")
contextInfo.referenceActor ! DeleteContext(contextName)
}
case GetContext(contextName) =>
log.info(s"Received GetContext message : context=$contextName")
if (contextMap contains contextName) {
sender ! contextMap(contextName).referenceActor
} else {
sender ! NoSuchContext
}
case GetContextInfo(contextName) =>
log.info(s"Received GetContext message : context=$contextName")
if (contextMap contains contextName) {
sender ! Context(contextName, contextMap(contextName).sparkUiPort)
} else {
sender ! NoSuchContext
}
case GetAllContextsForClient() =>
log.info(s"Received GetAllContexts message.")
sender ! Contexts(contextMap.values.map(contextInfo => Context(contextInfo.contextName, contextInfo.sparkUiPort)).toArray)
case GetAllContexts() =>
sender ! contextMap.values.map(_.referenceActor)
log.info(s"Received GetAllContexts message.")
}
def sendInitMessage(contextName: String, port: Int, actorRef: ActorSelection, sender: ActorRef, config: Config, jarsForSpark: List[String]): Unit = {
val sleepTime = getValueFromConfig(config, "appConf.init.sleep", 3000)
val tries = config.getInt("appConf.init.tries")
val retryTimeOut = config.getLong("appConf.init.retry-timeout") millis
val retryInterval = config.getLong("appConf.init.retry-interval") millis
val sparkUiPort = config.getString(sparkUIConfigPath)
context.system.scheduler.scheduleOnce(sleepTime millis) {
val isAwakeFuture = context.actorOf(ReTry.props(tries, retryTimeOut, retryInterval, actorRef)) ? IsAwake
isAwakeFuture.map {
case isAwake =>
log.info(s"Remote context actor is awaken: $isAwake")
val initializationFuture = actorRef ? ContextActor.Initialize(contextName, config, jarsForSpark)
initializationFuture map {
case success: ContextActor.Initialized =>
log.info(s"Context '$contextName' initialized: $success")
contextMap += contextName -> ContextInfo(contextName, sparkUiPort, actorRef)
sender ! Context(contextName, sparkUiPort)
case error @ ContextActor.FailedInit(reason) =>
log.error(s"Init failed for context $contextName", reason)
sender ! error
processMap.remove(contextName).get ! ContextProcessActor.Terminate()
} onFailure {
case e: Exception =>
log.error("FAILED to send init message!", e)
sender ! ContextActor.FailedInit(ExceptionUtils.getStackTrace(e))
processMap.remove(contextName).get ! ContextProcessActor.Terminate()
}
} onFailure {
case e: Exception =>
log.error("Refused to wait for remote actor, consider it as dead!", e)
sender ! ContextActor.FailedInit(ExceptionUtils.getStackTrace(e))
}
}
}
def addSparkUiPortToConfig(config: Config): Config = {
lastUsedPortSparkUi = ActorUtils.findAvailablePort(lastUsedPortSparkUi)
val map = new util.HashMap[String, String]()
map.put(sparkUIConfigPath, lastUsedPortSparkUi.toString)
val newConf = ConfigFactory.parseMap(map)
newConf.withFallback(config)
}
def createProcessBuilder(contextName: String, port: Int, jarsForClasspath: String, config: Config): ProcessBuilder = {
val scriptPath = ContextManagerActor.getClass.getClassLoader.getResource("context_start.sh").getPath
val xmxMemory = getValueFromConfig(config, "driver.xmxMemory", "1g")
// Create context process directory
val processDirName = new java.io.File(defaultConfig.getString("context.contexts-base-dir")).toString + s"/$contextName"
Process(scriptPath, Seq(jarsForClasspath, contextName, port.toString, xmxMemory, processDirName))
}
}
|
linzhe/matrix
|
src/main/scala/org/apache/spark/core/server/actors/ContextManagerActor.scala
|
Scala
|
apache-2.0
| 9,006 |
/**
* Copyright (C) 2014-2015 Really Inc. <http://really.io>
*/
package io.really.model.materializer
import akka.actor.{ FSM, Stash, ActorLogging }
import akka.persistence.{ SaveSnapshotFailure, SaveSnapshotSuccess, SnapshotOffer, PersistentView }
import io.really.CommandError.OperationPartiallyComplete
import io.really._
import _root_.io.really.model.materializer.CollectionViewMaterializer.{ MaterializerData, MaterializerState }
import _root_.io.really.model.{ ReferenceField, FieldKey, Model, Helpers }
import _root_.io.really.model.persistent.ModelRegistry.ModelOperation
import _root_.io.really.gorilla._
import _root_.io.really.model.CollectionActor.CollectionActorEvent
import _root_.io.really.model.materializer.MongoStorage._
import _root_.io.really.protocol.UpdateCommand
import _root_.io.really.protocol.UpdateOp
import play.api.libs.json.{ JsNumber, Json, JsObject }
import akka.pattern.pipe
/**
* Collection view Materializer is Akka Persistent view for Collection Actor Persistent
* that receives a message when a collection bucket is updated so that it can replay
* the ordered events and generate a set of projections that is optimized for queries by the user.
*
* The Materializer is a PersistentView that replays the journal log and restores
* the last snapshot, constructing a projection database that is purely optimized
* for read performance and send the event to Gorilla Event Centre.
*
* @param globals
*/
class CollectionViewMaterializer(val globals: ReallyGlobals) extends PersistentView
with FSM[MaterializerState, MaterializerData] with ActorLogging with Stash with MongoStorage {
import CollectionViewMaterializer._
/**
* Bucket Id is used as identifier for a set of the objects in this collection
*/
val bucketID: BucketID = self.path.name
/**
* r is used as identifier for R that represent this collection
*/
val r: R = Helpers.getRFromBucketID(bucketID)
/**
* messageMarker is marker for last message consumed and persisted on DB Projection
*/
private var messageMarker: Long = 0L
implicit val ec = context.dispatcher
override def viewId: String = s"view_materializer${bucketID}"
override def persistenceId: String = bucketID
/**
* Returns `lastSequenceNr`.
*/
override def lastSequenceNr: Long = messageMarker
/**
* The Materializer does not fetch events from the journal automatically,
* Collection Actor must explicitly update this view by sending [[Envelope]] request
*/
override def autoUpdate: Boolean = false
log.debug(s"CollectionViewMaterializer with viewId: $viewId for CollectionActor with persistentId: $persistenceId starting with BucketID: $bucketID and R: $r")
/**
* materializerCurrentState is present current state for view and this used for debugging and testing
*/
private var _materializerCurrentState: MaterializerDebuggingState = _
/**
* Return `materializerCurrentState`
*/
def materializerCurrentState = _materializerCurrentState
override def preStart() = {
//create indexes
defaultIndexes map (createIndex)
_materializerCurrentState = MaterializerDebuggingState(None, None, lastSequenceNr, "without-model")
super.preStart()
}
startWith(Initialization, Empty)
when(Initialization)(handleRecover orElse handleModelCreated orElse stashMessage)
when(WithModel)(handleModelOperations orElse handleCollectionEvents orElse handleInternalRequest orElse handleSnapshotResponse)
when(WaitingDBOperation)(handleDBResponses orElse stashMessage)
when(WithingReferenceField)(handleReferenceFieldOperations orElse stashMessage)
initialize()
/**
* This function is responsible for handle messages when CollectionMaterializer restart and replay snapshot
* @return
*/
def handleRecover: StateFunction = {
case Event(SaveSnapshotSuccess(metadata), _) =>
log.debug("Snapshot was taken successfully")
stay
case Event(SaveSnapshotFailure(metadata, cause), _) =>
log.warning("Snapshot was NOT taken: {}", cause)
stay
case Event(SnapshotOffer(metadata, snapshot: SnapshotData), _) =>
log.debug(s"Current state for CollectionViewMaterializer with viewId: $viewId for CollectionActor with " +
s"persistentId: $persistenceId: $materializerCurrentState")
messageMarker = snapshot.marker
_materializerCurrentState = _materializerCurrentState.copy(model = Some(snapshot.model), actorState = "with-model")
unstashAll()
goto(WithModel) using ModelData(snapshot.model, snapshot.referencedCollections)
}
/**
* This function is responsible for handle [[ModelOperation.ModelCreated]] message
*/
def handleModelCreated: StateFunction = {
case Event(ModelOperation.ModelCreated(r, model, refCollections), _) =>
log.debug(s"CollectionViewMaterializer with viewId: $viewId for CollectionActor with persistentId: $persistenceId" +
s" receive the model for r: $r")
log.debug(s"Current state for CollectionViewMaterializer with viewId: $viewId for CollectionActor with " +
s"persistentId: $persistenceId: $materializerCurrentState")
// TODO create ReferenceUpdater Actor
messageMarker = super.lastSequenceNr
_materializerCurrentState = _materializerCurrentState.copy(
model = Some(model), lastModelOp = Some("ModelCreated"), lastSequenceNr = lastSequenceNr, actorState = "with-model"
)
unstashAll()
goto(WithModel) using ModelData(model, refCollections.toSet)
}
/**
* This function is responsible for handle Model Operation messages like
* [[ModelOperation.ModelUpdated]], [[ModelOperation.ModelDeleted]]
* @return
*/
def handleModelOperations: StateFunction = {
case Event(ModelOperation.ModelUpdated(r, m, refCollections), _) if isPersistent =>
log.debug(s"CollectionViewMaterializer with viewId: $viewId for CollectionActor with persistentId: $persistenceId" +
s" receive new version for model with r: $r")
log.debug(s"Current state for CollectionViewMaterializer with viewId: $viewId for CollectionActor with " +
s"persistentId: $persistenceId: $materializerCurrentState")
messageMarker = super.lastSequenceNr
_materializerCurrentState = _materializerCurrentState.copy(
model = Some(m), lastModelOp = Some("ModelUpdated"), lastSequenceNr = lastSequenceNr
)
notifyGorillaEventCenter(ModelUpdatedEvent(bucketID, m))
stay using ModelData(m, refCollections.toSet)
case Event(ModelOperation.ModelDeleted(r), _) if isPersistent =>
// TODO send clear message to Cleaner to delete all objects on this collection
// TODO send Terminate to ReferenceUpdater
notifyGorillaEventCenter(ModelDeletedEvent(bucketID))
shutdown()
stay
}
/**
* This function responsible for handle Collection Events
* @return
*/
def handleCollectionEvents: StateFunction = {
case Event(evt @ CollectionActorEvent.Created(r, obj, modelVersion, reqContext, _, _), ModelData(model, referencedCollections)) if isPersistent =>
log.debug(s"CollectionViewMaterializer with viewId: $viewId for CollectionActor with persistentId: $persistenceId " +
s"receive create event for obj with R: $r")
log.debug(s"Current state for CollectionViewMaterializer with viewId: $viewId for CollectionActor with " +
s"persistentId: $persistenceId: $materializerCurrentState")
val currentSequence = super.lastSequenceNr
val referenceFields = getReferenceField(model, obj)
if (referenceFields.isEmpty) {
saveObject(obj, model) pipeTo self
goto(WaitingDBOperation) using DBOperationInfo(
DBOperation.Insert,
None,
model,
referencedCollections,
evt,
currentSequence
)
} else {
val expected = askAboutReferenceFields(referenceFields)
goto(WithingReferenceField) using ReferenceFieldsData(
DBOperation.Insert,
obj,
model,
referencedCollections,
Map.empty,
expected,
evt,
currentSequence
)
}
case Event(evt @ CollectionActorEvent.Updated(r, ops, rev, modelVersion, reqContext, _, _), ModelData(model, referencedCollections)) if isPersistent =>
log.debug(s"CollectionViewMaterializer with viewId: $viewId for CollectionActor with persistentId: $persistenceId " +
s"receive update event for obj with R: $r")
log.debug(s"Current state for CollectionViewMaterializer with viewId: $viewId for CollectionActor with " +
s"persistentId: $persistenceId: $materializerCurrentState")
getObject(r) pipeTo self
goto(WaitingDBOperation) using DBOperationInfo(DBOperation.Get, Some(DBOperation.Update), model, referencedCollections, evt, super.lastSequenceNr)
case Event(evt @ CollectionActorEvent.Deleted(r, newRev, modelVersion, reqContext, _, _), ModelData(model, referencedCollections)) if isPersistent =>
log.debug("CollectionViewMaterializer with viewId: {} for CollectionActor with persistentId: {} " +
"receive delete event for obj with R: {}", viewId, persistenceId, r)
log.debug("Current state for CollectionViewMaterializer with viewId: {} for CollectionActor with " +
"persistentId: {}: {}", viewId, persistenceId, materializerCurrentState)
getObject(r) pipeTo self
goto(WaitingDBOperation) using DBOperationInfo(DBOperation.Get, Some(DBOperation.Delete), model, referencedCollections, evt, super.lastSequenceNr)
}
/**
* This function is responsible for handling requests between materializer view actors
* @return
*/
def handleInternalRequest: StateFunction = {
case Event(GetObject(_, r), _) =>
val requester = sender()
getObject(r) map (requester ! _)
stay
}
def handleSnapshotResponse: StateFunction = {
case Event(SaveSnapshotSuccess(metadata), _) =>
log.debug(s"Materializer received SaveSnapshotSuccess for snapshot: ${metadata}")
stay()
case Event(SaveSnapshotFailure(metadata, failure), _) =>
log.error(failure, s"Materializer received SaveSnapshotFailure for snapshot: ${metadata}")
stay()
}
def handleDBResponses: StateFunction = {
case Event(OperationSucceeded(r, obj), DBOperationInfo(DBOperation.Insert, None, model, referencedCollections, event: CollectionActorEvent.Created, currentSequence)) =>
persistAndNotifyGorilla(PersistentCreatedEvent(event), currentSequence, model, referencedCollections)
event.bySender ! event.response
unstashAll()
goto(WithModel) using ModelData(model, referencedCollections)
case Event(OperationSucceeded(r, obj), DBOperationInfo(DBOperation.Get, Some(DBOperation.Update), model, referencedCollections, event: CollectionActorEvent.Updated, currentSequence)) =>
val (referencedOps, ops) = event.ops.partition(o => getModelReferenceField(model).contains(o.key))
if (referencedOps.isEmpty) {
val newObj = applyUpdateOps(obj, ops)
updateObject(newObj, event.rev, event.modelVersion) pipeTo self
stay using (DBOperationInfo(DBOperation.Update, None, model, referencedCollections, event, currentSequence))
} else {
val expected = askAboutReferenceFields(referencedOps.map(o => (o.key, o.value.as[R])).toMap)
val newObj = applyUpdateOps(obj, ops)
goto(WithingReferenceField) using ReferenceFieldsData(
DBOperation.Update,
newObj,
model,
referencedCollections,
Map.empty,
expected,
event,
currentSequence
)
}
case Event(OperationSucceeded(r, obj), DBOperationInfo(DBOperation.Get, Some(DBOperation.Delete), model, referencedCollections, event: CollectionActorEvent.Deleted, currentSequence)) =>
val newObj = obj.copy(obj.fields.filter(_._1.startsWith("_"))) ++ Json.obj(
Model.DeletedField -> true
)
deleteObject(newObj, event.rev, event.modelVersion) pipeTo self
stay using (DBOperationInfo(DBOperation.Delete, None, model, referencedCollections, event, currentSequence))
case Event(OperationSucceeded(r, obj), DBOperationInfo(DBOperation.Update, None, model, referencedCollections, event: CollectionActorEvent.Updated, currentSequence)) =>
persistAndNotifyGorilla(PersistentUpdatedEvent(event, obj), currentSequence, model, referencedCollections)
event.bySender ! event.response
//TODO notify to update any object refer to this object
unstashAll()
goto(WithModel) using ModelData(model, referencedCollections)
case Event(OperationSucceeded(r, obj), DBOperationInfo(DBOperation.Delete, None, model, referencedCollections, event: CollectionActorEvent.Deleted, currentSequence)) =>
persistAndNotifyGorilla(PersistentDeletedEvent(event), currentSequence, model, referencedCollections)
event.bySender ! event.response
//TODO notify to update any object refer to this object
unstashAll()
goto(WithModel) using ModelData(model, referencedCollections)
case Event(OperationFailed(_, failure), DBOperationInfo(_, _, model, referencedCollections, event, _)) =>
context.parent ! failure
//TODO: Should inform event.bySender that we have failed!
event.bySender ! OperationPartiallyComplete(event.r, "view is not updated")
shutdown()
unstashAll()
goto(WithModel) using ModelData(model, referencedCollections)
}
def handleReferenceFieldOperations: StateFunction = {
case e @ Event(OperationSucceeded(r, obj), data @ ReferenceFieldsData(_, _, _, _, _, expected, _, _)) if !expected.contains(r) =>
log.warning("Unexpected Reference. Probably Coding bug. Event was: {}", e)
stay
case Event(OperationSucceeded(r, obj), data @ ReferenceFieldsData(DBOperation.Insert, _, _, _, received, expected, _, _)) =>
val referencesObjects = received + (expected(r) -> obj)
val newExpected = expected - r
if (newExpected.isEmpty) {
val newObj = writeReferenceField(data.obj, referencesObjects, data.model)
saveObject(newObj, data.model) pipeTo self
unstashAll()
goto(WaitingDBOperation) using DBOperationInfo(
data.operation,
None,
data.model,
data.referencedCollections,
data.collectionEvent,
data.currentMessageNum
)
} else {
stay using data.copy(received = referencesObjects, expected = newExpected)
}
case Event(OperationSucceeded(r, obj), data @ ReferenceFieldsData(DBOperation.Update, _, _, _, received, expected, event: CollectionActorEvent.Updated, _)) =>
val referencesObjects = received + (expected(r) -> obj)
val newExpected = expected - r
if (newExpected.isEmpty) {
val newObj = writeReferenceField(data.obj, referencesObjects, data.model)
updateObject(newObj, event.rev, event.modelVersion) pipeTo self
unstashAll()
goto(WaitingDBOperation) using DBOperationInfo(
data.operation,
None,
data.model,
data.referencedCollections,
data.collectionEvent,
data.currentMessageNum
)
} else {
stay using data.copy(received = referencesObjects, expected = newExpected)
}
}
/**
* apply update operations on object
*/
private def applyUpdateOps(obj: JsObject, ops: List[UpdateOp]): JsObject =
ops match {
case Nil => obj
case ops =>
val result: List[JsObject] = ops map {
case UpdateOp(UpdateCommand.Set, key, value, _) =>
obj ++ Json.obj(key -> value)
case UpdateOp(UpdateCommand.AddNumber, key, JsNumber(v), _) =>
obj ++ Json.obj(key -> ((obj \\ key).as[JsNumber].value + v))
}
result.foldLeft(Json.obj())((o, a) => a ++ o)
}
/**
* notify gorillaEventCenter
*/
def notifyGorillaEventCenter(event: ModelEvent) = globals.gorillaEventCenter ! event
/**
* This function is responsible for stashing message if it should handle on another state
* @return
*/
def stashMessage: StateFunction = {
case msg =>
stash()
stay()
}
/**
* send event to gorilla event center and create snapshot
* @param evt
* @param currentSequence
* @param model
* @param referencedCollections
*/
private def persistAndNotifyGorilla(evt: PersistentEvent, currentSequence: Long, model: Model, referencedCollections: Set[R]) = {
globals.gorillaEventCenter ! evt
messageMarker = currentSequence
_materializerCurrentState = _materializerCurrentState.copy(lastSequenceNr = lastSequenceNr)
takeSnapshot(model, referencedCollections, currentSequence)
}
/**
* save snapshot for this view
*/
private def takeSnapshot(model: Model, referencedCollections: Set[R], marker: Long): Unit = {
messageMarker = marker
saveSnapshot(SnapshotData(model, referencedCollections, marker))
}
/**
* shutdown Marterializer
*/
private def shutdown(): Unit = {
context.stop(self)
}
/**
* This function is responsible for get reference fields from object based on model schema
* @param model
* @param obj
* @return
*/
def getReferenceField(model: Model, obj: JsObject): Map[FieldKey, R] =
model.fields collect {
case (key, ReferenceField(_, true, _, _)) =>
key -> (obj \\ key).as[R]
case (key, ReferenceField(_, false, _, _)) if ((obj \\ key).asOpt[R]).isDefined =>
key -> (obj \\ key).as[R]
}
def getModelReferenceField(model: Model): Map[FieldKey, ReferenceField] =
model.fields collect {
case f @ (key, rf: ReferenceField) => key -> rf
}
/**
* This function is responsible for send messages to get object for all reference field
* @param fields
*/
def askAboutReferenceFields(fields: Map[FieldKey, R]): Map[R, FieldKey] =
fields.map { f =>
globals.materializerView ! GetObject(Helpers.getBucketIDFromR(f._2)(globals.config), f._2)
f._2 -> f._1
}
def writeReferenceField(obj: JsObject, referencedFields: Map[FieldKey, JsObject], model: Model): JsObject = {
val dereferenceFields = referencedFields map {
field =>
val fields = (model.fields(field._1).asInstanceOf[ReferenceField].fields ++ List(Model.RField, Model.RevisionField)).toSet
val obj = JsObject(field._2.value.filter(f => fields.contains(f._1)).toSeq)
val _r = (field._2 \\ "_r").as[R]
field._1 -> Json.obj("value" -> _r, "ref" -> obj)
}
obj deepMerge JsObject(dereferenceFields.toSeq)
}
}
object CollectionViewMaterializer {
sealed trait MaterializerState
case object Initialization extends MaterializerState
case object WithModel extends MaterializerState
case object WaitingDBOperation extends MaterializerState
case object WithingReferenceField extends MaterializerState
sealed trait MaterializerData
case object Empty extends MaterializerData
case class ModelData(model: Model, referencedCollections: Set[R]) extends MaterializerData
case class DBOperationInfo(
operation: DBOperation,
nextOperation: Option[DBOperation],
model: Model,
referencedCollections: Set[R],
collectionEvent: CollectionActorEvent,
currentMessageNum: Long
) extends MaterializerData
case class ReferenceFieldsData(
operation: DBOperation,
obj: JsObject,
model: Model,
referencedCollections: Set[R],
received: Map[FieldKey, JsObject],
expected: Map[R, FieldKey],
collectionEvent: CollectionActorEvent,
currentMessageNum: Long
) extends MaterializerData
trait RoutableToMaterializer {
def bucketId: BucketID
}
case class GetObject(bucketId: BucketID, r: R) extends RoutableToMaterializer
case class Envelope(bucketId: BucketID, message: Any) extends RoutableToMaterializer
case class SnapshotData(model: Model, referencedCollections: Set[R], marker: Long)
case class MaterializerDebuggingState(model: Option[Model], lastModelOp: Option[String], lastSequenceNr: Long, actorState: String)
}
|
reallylabs/really
|
modules/really-core/src/main/scala/io/really/model/materializer/CollectionViewMaterializer.scala
|
Scala
|
apache-2.0
| 20,147 |
package fileSearcher
import java.io.File
object FileConverter {
def convertToIOObject(file: File) =
if(file.isDirectory()) DirectoryObject(file)
else FileObject(file)
}
|
sagasu/scalaPlayground
|
sbt/src/main/scala-2.11/fileSearcher/FileConverter.scala
|
Scala
|
apache-2.0
| 181 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp.
package scala
/** A function of 5 parameters.
*
*/
trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): R
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5) == apply(x1, x2, x3, x4, x5)`
*/
@annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1, x2, x3, x4, x5)).curried
}
/** Creates a tupled version of this function: instead of 5 arguments,
* it accepts a single [[scala.Tuple5]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5)) == f(Tuple5(x1, x2, x3, x4, x5)) == apply(x1, x2, x3, x4, x5)`
*/
@annotation.unspecialized def tupled: ((T1, T2, T3, T4, T5)) => R = {
case ((x1, x2, x3, x4, x5)) => apply(x1, x2, x3, x4, x5)
}
override def toString(): String = "<function5>"
}
|
lrytz/scala
|
src/library/scala/Function5.scala
|
Scala
|
apache-2.0
| 1,437 |
/**
* Copyright 2011-2012 eBusiness Information, Groupe Excilys (www.excilys.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.excilys.ebi.gatling.recorder.scenario
class TagElement(text: String) extends ScenarioElement {
override def toString = "//" + text
}
|
Tjoene/thesis
|
Case_Programs/gatling-1.4.0/gatling-recorder/src/main/scala/com/excilys/ebi/gatling/recorder/scenario/TagElement.scala
|
Scala
|
gpl-2.0
| 790 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.cluster.BrokerEndPoint
class ReplicaAlterLogDirsManager(brokerConfig: KafkaConfig,
replicaManager: ReplicaManager,
quotaManager: ReplicationQuotaManager,
brokerTopicStats: BrokerTopicStats)
extends AbstractFetcherManager[ReplicaAlterLogDirsThread](
name = s"ReplicaAlterLogDirsManager on broker ${brokerConfig.brokerId}",
clientId = "ReplicaAlterLogDirs",
numFetchers = brokerConfig.getNumReplicaAlterLogDirsThreads) {
override def createFetcherThread(fetcherId: Int, sourceBroker: BrokerEndPoint): ReplicaAlterLogDirsThread = {
val threadName = s"ReplicaAlterLogDirsThread-$fetcherId"
new ReplicaAlterLogDirsThread(threadName, sourceBroker, brokerConfig, failedPartitions, replicaManager,
quotaManager, brokerTopicStats)
}
def shutdown() {
info("shutting down")
closeAllFetchers()
info("shutdown completed")
}
}
|
KevinLiLu/kafka
|
core/src/main/scala/kafka/server/ReplicaAlterLogDirsManager.scala
|
Scala
|
apache-2.0
| 1,816 |
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest._
object F {
def add(a: Int, b: Int) = a + b
}
@RunWith(classOf[JUnitRunner])
class ScalaTestFunSuite extends FunSuite {
test("add(1, 1) == 2") {
assert(F.add(1, 1) == 2)
}
}
@RunWith(classOf[JUnitRunner])
class ScalaTestFlatSpec extends FlatSpec {
"add(1, 1)" should "return 2" in {
assert(F.add(1, 1) == 2)
}
}
@RunWith(classOf[JUnitRunner])
class ScalaTestFunSpec extends FunSpec {
describe("add(1, 1)") {
it("should return 2") {
assert(F.add(1, 1) == 2)
}
}
}
|
Codewars/codewars-runner
|
frameworks/gradle/src/test/scala/ScalaTestJUnitRunner.scala
|
Scala
|
mit
| 597 |
/**
* Copyright (C) 2007 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.submission
import org.orbeon.oxf.util.{ConnectionResult, XPathCache}
import org.orbeon.oxf.xforms.XFormsContainingDocument
import org.orbeon.oxf.xforms.action.XFormsActions
import org.orbeon.oxf.xforms.event.Dispatch
import org.orbeon.oxf.xforms.event.events.{ErrorType, XFormsSubmitErrorEvent}
import org.orbeon.oxf.xforms.model.DataModel
import org.orbeon.oxf.xforms.model.DataModel._
import org.orbeon.saxon.om.NodeInfo
/**
* Handle replace="text".
*/
class TextReplacer(submission: XFormsModelSubmission, containingDocument: XFormsContainingDocument)
extends BaseReplacer(submission, containingDocument) {
private var responseBody: String = _
def deserialize(
connectionResult : ConnectionResult,
p : SubmissionParameters,
p2 : SecondPassParameters
): Unit =
connectionResult.readTextResponseBody match {
case Some(responseBody) ⇒
this.responseBody = responseBody
case None ⇒
// Non-text/non-XML result
// Don't store anything for now as per the spec, but we could do something better by going beyond the spec
// NetUtils.inputStreamToAnyURI(pipelineContext, connectionResult.resultInputStream, NetUtils.SESSION_SCOPE);
// XForms 1.1: "For a success response including a body that is both a non-XML media type (i.e. with a
// content type not matching any of the specifiers in [RFC 3023]) and a non-text type (i.e. with a content
// type not matching text/*), when the value of the replace attribute on element submission is "text",
// nothing in the document is replaced and submission processing concludes after dispatching
// xforms-submit-error with appropriate context information, including an error-type of resource-error."
val message =
connectionResult.mediatype match {
case Some(mediatype) ⇒ s"""Mediatype is neither text nor XML for replace="text": $mediatype"""
case None ⇒ s"""No mediatype received for replace="text""""
}
throw new XFormsSubmissionException(
submission,
message,
"reading response body",
new XFormsSubmitErrorEvent(
submission,
ErrorType.ResourceError,
connectionResult
)
)
}
def replace(
connectionResult : ConnectionResult,
p : SubmissionParameters,
p2 : SecondPassParameters
): Runnable = {
// XForms 1.1: "If the replace attribute contains the value "text" and the submission response conforms to an
// XML mediatype (as defined by the content type specifiers in [RFC 3023]) or a text media type (as defined by
// a content type specifier of text/*), then the response data is encoded as text and replaces the content of
// the replacement target node."
// XForms 1.1: "If the processing of the targetref attribute fails, then submission processing ends after
// dispatching the event xforms-submit-error with an error-type of target-error."
def throwSubmissionException(message: String) =
throw new XFormsSubmissionException(
submission,
message,
"processing targetref attribute",
new XFormsSubmitErrorEvent(
submission,
ErrorType.TargetError,
connectionResult
)
)
// Find target location
val destinationNodeInfo =
submission.staticSubmission.targetrefOpt match {
case Some(targetRef) ⇒
// Evaluate destination node
XPathCache.evaluateSingleWithContext(
xpathContext = p.refContext.xpathContext,
contextItem = p.refContext.refNodeInfo,
xpathString = targetRef,
reporter = containingDocument.getRequestStats.addXPathStat
) match {
case n: NodeInfo ⇒ n
case _ ⇒ throwSubmissionException("""targetref attribute doesn't point to a node for replace="text".""")
}
case None ⇒
// Use default destination
submission.findReplaceInstanceNoTargetref(p.refContext.refInstanceOpt).rootElement
}
def handleSetValueSuccess(oldValue: String) =
DataModel.logAndNotifyValueChange(
containingDocument = containingDocument,
source = "submission",
nodeInfo = destinationNodeInfo,
oldValue = oldValue,
newValue = responseBody,
isCalculate = false,
collector = Dispatch.dispatchEvent)(
containingDocument.getIndentedLogger(XFormsActions.LOGGING_CATEGORY)
)
def handleSetValueError(reason: Reason) =
throwSubmissionException(
reason match {
case DisallowedNodeReason ⇒
"""targetref attribute doesn't point to an element without children or to an attribute for replace="text"."""
case ReadonlyNodeReason ⇒
"""targetref attribute points to a readonly node for replace="text"."""
}
)
// Set value into the instance
// NOTE: Here we decided to use the actions logger, by compatibility with xf:setvalue. Anything we would like
// to log in "submission" mode?
DataModel.setValueIfChanged(
nodeInfo = destinationNodeInfo,
newValue = responseBody,
onSuccess = handleSetValueSuccess,
onError = handleSetValueError
)
// Dispatch xforms-submit-done
submission.sendSubmitDone(connectionResult)
}
}
|
brunobuzzi/orbeon-forms
|
xforms/jvm/src/main/scala/org/orbeon/oxf/xforms/submission/TextReplacer.scala
|
Scala
|
lgpl-2.1
| 6,243 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.client.execution
import com.ibm.spark.kernel.protocol.v5.UUID
case class DeferredExecutionTuple ( id: UUID, de: DeferredExecution)
|
codeaudit/spark-kernel
|
client/src/main/scala/com/ibm/spark/kernel/protocol/v5/client/execution/DeferredExecutionTuple.scala
|
Scala
|
apache-2.0
| 767 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.