code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package org.ai4fm.proofprocess.zeves.core.analysis
import scala.collection.JavaConverters._
import net.sourceforge.czt.base.{ast => z}
import net.sourceforge.czt.z.ast._
import net.sourceforge.czt.zeves.util.Factory
/**
* Sub-term computation utilities for CZT terms.
*
* @author Andrius Velykis
*/
object CztSubTerms {
lazy val factory = new Factory
def subTerms(obj: AnyRef, maxDepth: Int): List[z.Term] =
// collect the subterms and reverse the result (it is accumulated backwards)
collectSubTerms(maxDepth, 0)(Nil, obj).distinct.reverse
private def collectSubTerms(maxDepth: Int, depth: Int)(
acc: List[z.Term],
obj: AnyRef): List[z.Term] =
if (depth > maxDepth) {
acc
} else obj match {
case AndTerm(andPred, andTerms) =>
collectSubTermList(andPred :: acc, andTerms, maxDepth, depth)
case SubTerm(zTerm) =>
collectSubTermList(zTerm :: acc, zTerm.getChildren, maxDepth, depth)
case zObj: z.Term =>
collectSubTermList(acc, zObj.getChildren, maxDepth, depth)
case _ => acc
}
private def collectSubTermList(acc: List[z.Term],
objs: Seq[AnyRef],
maxDepth: Int,
depth: Int): List[z.Term] =
(objs foldLeft acc)(collectSubTerms(maxDepth, depth + 1))
def diffSubTerms(t1: z.Term, t2: z.Term): (z.Term, z.Term) = (t1, t2) match {
case (AndTerm(_, al1), AndTerm(_, al2)) => {
val (same, diff1, diff2) = diffsQnt(al1, al2)
// add placeholder if there are same assumptions that will be hidden
val sameAnds = if (same.isEmpty) Nil else List(CztSchemaTerms.createPredPlaceholder())
(AndTerm(sameAnds ::: diff1).get, AndTerm(sameAnds ::: diff2).get)
}
case (p1: Pred, p2: Pred) if sameClass(p1, p2) && (qntEq(p1) == qntEq(p2)) => {
// match - use placeholders
val p = CztSchemaTerms.createPredPlaceholder
(p, p)
}
case (t1: z.Term, t2: z.Term) if sameClass(t1, t2) => {
// decompose
val children1 = t1.getChildren
val children2 = t2.getChildren
if (children1.length == children2.length) {
val newChildren = (children1.toList zip children2.toList) map diffAny
val (new1, new2) = newChildren.unzip
(t1.create(new1.toArray), t2.create(new2.toArray))
} else {
(t1, t2)
}
}
case (unknown1, unknown2) => (unknown1, unknown2)
}
private def diffAny(objPair: (AnyRef, AnyRef)): (AnyRef, AnyRef) = objPair match {
case (t1: z.Term, t2: z.Term) => diffSubTerms(t1, t2)
case other => other
}
private def sameClass(o1: AnyRef, o2: AnyRef): Boolean = o1.getClass == o2.getClass
private def diffPred(e1: Pred, e2: Pred): (Option[Pred], Option[Pred]) =
if (qntEq(e1) == qntEq(e2)) {
val p = CztSchemaTerms.createPredPlaceholder
(Some(p), Some(p))
} else {
(Some(e1), Some(e2))
}
private def diffsQnt[T <: z.Term](l1: List[T], l2: List[T]): (List[T], List[T], List[T]) = {
val qntL1 = l1 map qntEq
val qntL2 = l2 map qntEq
val (sameQnt, diffQnt1, diffQnt2) = diffs(qntL1, qntL2)
val same = sameQnt map (_.term)
val diff1 = diffQnt1 map (_.term)
val diff2 = diffQnt2 map (_.term)
(same, diff1, diff2)
}
private def diffs[A](l1: List[A], l2: List[A]): (List[A], List[A], List[A]) = {
val same = l1 intersect l2
val diffL1 = l1 diff same
val diffL2 = l2 diff same
(same, diffL1, diffL2)
}
private def impPred(assms: Option[Pred], goal: Pred): Pred =
if (assms.isEmpty) goal
else factory.createImpliesPred(assms.get, goal)
def findQuantifiedNames(t: AnyRef): List[ZName] = t match {
case qp: QntPred => findQntQuantifiedNames(qp.getZSchText, qp.getPred)
case qe: QntExpr => findQntQuantifiedNames(qe.getZSchText, qe.getExpr)
case zObj: z.Term => (zObj.getChildren.toList map findQuantifiedNames).flatten
case _ => Nil
}
private def findQntQuantifiedNames(schText: ZSchText, body: AnyRef): List[ZName] = {
val declList = schText.getZDeclList
val declNames = declList.asScala.toList map {
case (v: VarDecl) => {
val declNames = v.getZNameList.asScala.toList map { case zn: ZName => zn }
val exprNames = findQuantifiedNames(v.getExpr)
declNames ::: exprNames
}
case unknown => findQuantifiedNames(unknown)
}
val qSchPredNames = findQuantifiedNames(schText.getPred)
val bodyNames = findQuantifiedNames(body)
declNames.flatten ::: qSchPredNames ::: bodyNames
}
private def replaceNames(t: z.Term, names: Map[ZName, ZName]): z.Term = {
val children = t.getChildren
val replaced = children map { c =>
c match {
case name: ZName => names.getOrElse(name, name)
case zObj: z.Term => replaceNames(zObj, names)
case obj => obj
}
}
t.create(replaced)
}
private def qntEq[T <: z.Term](term: T): QntEquals[T] = new QntEquals(term)
/**
* A wrapper to perform quantifier-equals operations on the term.
*
* Quantifier-equals normalises quantified names to match semantically-same expressions.
*/
sealed class QntEquals[T <: z.Term](val term: T) {
lazy val qntNames = findQuantifiedNames(term)
override def equals(other0: Any): Boolean = other0 match {
case other: QntEquals[_] =>
if (qntNames.isEmpty && other.qntNames.isEmpty) {
// no quantifiers - just perform term equality
term == other.term
} else if (qntNames.size == other.qntNames.size) {
// same quantifier count - map to the same ones and compare
val newNames = List.fill(qntNames.size)(CztSchemaTerms.createNamePlaceholder())
val map1 = qntNames zip newNames
val map2 = other.qntNames zip newNames
val new1 = replaceNames(term, map1.toMap)
val new2 = replaceNames(other.term, map2.toMap)
// try checking equality after normalising the names
new1 == new2
} else {
// different quantifier count - different terms
false
}
case _ => false
}
override lazy val hashCode: Int = term.hashCode
}
object AndTerm {
def unapply(zObj: z.Term): Option[(AndPred, List[Pred])] = zObj match {
case andPred: AndPred => Some((andPred, unfoldAndPred(andPred)))
case _ => None
}
private def unfoldAndPred(zObj: Pred): List[Pred] = zObj match {
case andPred: AndPred =>
unfoldAndPred(andPred.getLeftPred) ::: unfoldAndPred(andPred.getRightPred)
case _ => List(zObj)
}
def apply(andTerms: List[Pred]): Option[Pred] = andTerms match {
case Nil => None
case single :: Nil => Some(single)
case first :: rest => {
val joined = (rest foldLeft first)(andPred)
Some(joined)
}
}
// TODO review And type
private def andPred(left: Pred, right: Pred): AndPred =
factory.createAndPred(left, right, And.Wedge)
}
object SubTerm {
def unapply(zObj: z.Term): Option[z.Term] = zObj match {
case exprPred: ExprPred => Option(exprPred.getExpr)
case expr: Expr => Some(expr)
case pred: Pred => Some(pred)
case _ => None
}
}
}
|
andriusvelykis/proofprocess
|
org.ai4fm.proofprocess.zeves.core/src/org/ai4fm/proofprocess/zeves/core/analysis/CztSubTerms.scala
|
Scala
|
epl-1.0
| 7,320 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import scala.collection.mutable.{ArrayBuffer, HashSet, HashMap, Map}
import scala.language.reflectiveCalls
import scala.util.control.NonFatal
import org.scalatest.{BeforeAndAfter, FunSuiteLike}
import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}
import org.apache.spark.util.CallSite
import org.apache.spark.executor.TaskMetrics
class DAGSchedulerEventProcessLoopTester(dagScheduler: DAGScheduler)
extends DAGSchedulerEventProcessLoop(dagScheduler) {
override def post(event: DAGSchedulerEvent): Unit = {
try {
// Forward event to `onReceive` directly to avoid processing event asynchronously.
onReceive(event)
} catch {
case NonFatal(e) => onError(e)
}
}
}
/**
* An RDD for passing to DAGScheduler. These RDDs will use the dependencies and
* preferredLocations (if any) that are passed to them. They are deliberately not executable
* so we can test that DAGScheduler does not try to execute RDDs locally.
*/
class MyRDD(
sc: SparkContext,
numPartitions: Int,
dependencies: List[Dependency[_]],
locations: Seq[Seq[String]] = Nil) extends RDD[(Int, Int)](sc, dependencies) with Serializable {
override def compute(split: Partition, context: TaskContext): Iterator[(Int, Int)] =
throw new RuntimeException("should not be reached")
override def getPartitions = (0 until numPartitions).map(i => new Partition {
override def index = i
}).toArray
override def getPreferredLocations(split: Partition): Seq[String] =
if (locations.isDefinedAt(split.index))
locations(split.index)
else
Nil
override def toString: String = "DAGSchedulerSuiteRDD " + id
}
class DAGSchedulerSuiteDummyException extends Exception
class DAGSchedulerSuite extends FunSuiteLike with BeforeAndAfter with LocalSparkContext with Timeouts {
val conf = new SparkConf
/** Set of TaskSets the DAGScheduler has requested executed. */
val taskSets = scala.collection.mutable.Buffer[TaskSet]()
/** Stages for which the DAGScheduler has called TaskScheduler.cancelTasks(). */
val cancelledStages = new HashSet[Int]()
val taskScheduler = new TaskScheduler() {
override def rootPool: Pool = null
override def schedulingMode: SchedulingMode = SchedulingMode.NONE
override def start() = {}
override def stop() = {}
override def executorHeartbeatReceived(execId: String, taskMetrics: Array[(Long, TaskMetrics)],
blockManagerId: BlockManagerId): Boolean = true
override def submitTasks(taskSet: TaskSet) = {
// normally done by TaskSetManager
taskSet.tasks.foreach(_.epoch = mapOutputTracker.getEpoch)
taskSets += taskSet
}
override def cancelTasks(stageId: Int, interruptThread: Boolean) {
cancelledStages += stageId
}
override def setDAGScheduler(dagScheduler: DAGScheduler) = {}
override def defaultParallelism() = 2
}
/** Length of time to wait while draining listener events. */
val WAIT_TIMEOUT_MILLIS = 10000
val sparkListener = new SparkListener() {
val successfulStages = new HashSet[Int]
val failedStages = new ArrayBuffer[Int]
val stageByOrderOfExecution = new ArrayBuffer[Int]
override def onStageCompleted(stageCompleted: SparkListenerStageCompleted) {
val stageInfo = stageCompleted.stageInfo
stageByOrderOfExecution += stageInfo.stageId
if (stageInfo.failureReason.isEmpty) {
successfulStages += stageInfo.stageId
} else {
failedStages += stageInfo.stageId
}
}
}
var mapOutputTracker: MapOutputTrackerMaster = null
var scheduler: DAGScheduler = null
var dagEventProcessLoopTester: DAGSchedulerEventProcessLoop = null
/**
* Set of cache locations to return from our mock BlockManagerMaster.
* Keys are (rdd ID, partition ID). Anything not present will return an empty
* list of cache locations silently.
*/
val cacheLocations = new HashMap[(Int, Int), Seq[BlockManagerId]]
// stub out BlockManagerMaster.getLocations to use our cacheLocations
val blockManagerMaster = new BlockManagerMaster(null, conf, true) {
override def getLocations(blockIds: Array[BlockId]): Seq[Seq[BlockManagerId]] = {
blockIds.map {
_.asRDDId.map(id => (id.rddId -> id.splitIndex)).flatMap(key => cacheLocations.get(key)).
getOrElse(Seq())
}.toSeq
}
override def removeExecutor(execId: String) {
// don't need to propagate to the driver, which we don't have
}
}
/** The list of results that DAGScheduler has collected. */
val results = new HashMap[Int, Any]()
var failure: Exception = _
val jobListener = new JobListener() {
override def taskSucceeded(index: Int, result: Any) = results.put(index, result)
override def jobFailed(exception: Exception) = { failure = exception }
}
before {
// Enable local execution for this test
val conf = new SparkConf().set("spark.localExecution.enabled", "true")
sc = new SparkContext("local", "DAGSchedulerSuite", conf)
sparkListener.successfulStages.clear()
sparkListener.failedStages.clear()
failure = null
sc.addSparkListener(sparkListener)
taskSets.clear()
cancelledStages.clear()
cacheLocations.clear()
results.clear()
mapOutputTracker = new MapOutputTrackerMaster(conf)
scheduler = new DAGScheduler(
sc,
taskScheduler,
sc.listenerBus,
mapOutputTracker,
blockManagerMaster,
sc.env) {
override def runLocally(job: ActiveJob) {
// don't bother with the thread while unit testing
runLocallyWithinThread(job)
}
}
dagEventProcessLoopTester = new DAGSchedulerEventProcessLoopTester(scheduler)
}
override def afterAll() {
super.afterAll()
}
/**
* Type of RDD we use for testing. Note that we should never call the real RDD compute methods.
* This is a pair RDD type so it can always be used in ShuffleDependencies.
*/
type PairOfIntsRDD = RDD[(Int, Int)]
/**
* Process the supplied event as if it were the top of the DAGScheduler event queue, expecting
* the scheduler not to exit.
*
* After processing the event, submit waiting stages as is done on most iterations of the
* DAGScheduler event loop.
*/
private def runEvent(event: DAGSchedulerEvent) {
dagEventProcessLoopTester.post(event)
}
/**
* When we submit dummy Jobs, this is the compute function we supply. Except in a local test
* below, we do not expect this function to ever be executed; instead, we will return results
* directly through CompletionEvents.
*/
private val jobComputeFunc = (context: TaskContext, it: Iterator[(_)]) =>
it.next.asInstanceOf[Tuple2[_, _]]._1
/** Send the given CompletionEvent messages for the tasks in the TaskSet. */
private def complete(taskSet: TaskSet, results: Seq[(TaskEndReason, Any)]) {
assert(taskSet.tasks.size >= results.size)
for ((result, i) <- results.zipWithIndex) {
if (i < taskSet.tasks.size) {
runEvent(CompletionEvent(taskSet.tasks(i), result._1, result._2, null, createFakeTaskInfo(), null))
}
}
}
private def completeWithAccumulator(accumId: Long, taskSet: TaskSet,
results: Seq[(TaskEndReason, Any)]) {
assert(taskSet.tasks.size >= results.size)
for ((result, i) <- results.zipWithIndex) {
if (i < taskSet.tasks.size) {
runEvent(CompletionEvent(taskSet.tasks(i), result._1, result._2,
Map[Long, Any]((accumId, 1)), createFakeTaskInfo(), null))
}
}
}
/** Sends the rdd to the scheduler for scheduling and returns the job id. */
private def submit(
rdd: RDD[_],
partitions: Array[Int],
func: (TaskContext, Iterator[_]) => _ = jobComputeFunc,
allowLocal: Boolean = false,
listener: JobListener = jobListener): Int = {
val jobId = scheduler.nextJobId.getAndIncrement()
runEvent(JobSubmitted(jobId, rdd, func, partitions, allowLocal, CallSite("", ""), listener))
jobId
}
/** Sends TaskSetFailed to the scheduler. */
private def failed(taskSet: TaskSet, message: String) {
runEvent(TaskSetFailed(taskSet, message))
}
/** Sends JobCancelled to the DAG scheduler. */
private def cancel(jobId: Int) {
runEvent(JobCancelled(jobId))
}
test("[SPARK-3353] parent stage should have lower stage id") {
sparkListener.stageByOrderOfExecution.clear()
sc.parallelize(1 to 10).map(x => (x, x)).reduceByKey(_ + _, 4).count()
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
assert(sparkListener.stageByOrderOfExecution.length === 2)
assert(sparkListener.stageByOrderOfExecution(0) < sparkListener.stageByOrderOfExecution(1))
}
test("zero split job") {
var numResults = 0
val fakeListener = new JobListener() {
override def taskSucceeded(partition: Int, value: Any) = numResults += 1
override def jobFailed(exception: Exception) = throw exception
}
submit(new MyRDD(sc, 0, Nil), Array(), listener = fakeListener)
assert(numResults === 0)
}
test("run trivial job") {
submit(new MyRDD(sc, 1, Nil), Array(0))
complete(taskSets(0), List((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty
}
test("local job") {
val rdd = new PairOfIntsRDD(sc, Nil) {
override def compute(split: Partition, context: TaskContext): Iterator[(Int, Int)] =
Array(42 -> 0).iterator
override def getPartitions = Array( new Partition { override def index = 0 } )
override def getPreferredLocations(split: Partition) = Nil
override def toString = "DAGSchedulerSuite Local RDD"
}
val jobId = scheduler.nextJobId.getAndIncrement()
runEvent(JobSubmitted(jobId, rdd, jobComputeFunc, Array(0), true, CallSite("", ""), jobListener))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty
}
test("local job oom") {
val rdd = new PairOfIntsRDD(sc, Nil) {
override def compute(split: Partition, context: TaskContext): Iterator[(Int, Int)] =
throw new java.lang.OutOfMemoryError("test local job oom")
override def getPartitions = Array( new Partition { override def index = 0 } )
override def getPreferredLocations(split: Partition) = Nil
override def toString = "DAGSchedulerSuite Local RDD"
}
val jobId = scheduler.nextJobId.getAndIncrement()
runEvent(JobSubmitted(jobId, rdd, jobComputeFunc, Array(0), true, CallSite("", ""), jobListener))
assert(results.size == 0)
assertDataStructuresEmpty
}
test("run trivial job w/ dependency") {
val baseRdd = new MyRDD(sc, 1, Nil)
val finalRdd = new MyRDD(sc, 1, List(new OneToOneDependency(baseRdd)))
submit(finalRdd, Array(0))
complete(taskSets(0), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty
}
test("cache location preferences w/ dependency") {
val baseRdd = new MyRDD(sc, 1, Nil)
val finalRdd = new MyRDD(sc, 1, List(new OneToOneDependency(baseRdd)))
cacheLocations(baseRdd.id -> 0) =
Seq(makeBlockManagerId("hostA"), makeBlockManagerId("hostB"))
submit(finalRdd, Array(0))
val taskSet = taskSets(0)
assertLocations(taskSet, Seq(Seq("hostA", "hostB")))
complete(taskSet, Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty
}
test("avoid exponential blowup when getting preferred locs list") {
// Build up a complex dependency graph with repeated zip operations, without preferred locations.
var rdd: RDD[_] = new MyRDD(sc, 1, Nil)
(1 to 30).foreach(_ => rdd = rdd.zip(rdd))
// getPreferredLocs runs quickly, indicating that exponential graph traversal is avoided.
failAfter(10 seconds) {
val preferredLocs = scheduler.getPreferredLocs(rdd,0)
// No preferred locations are returned.
assert(preferredLocs.length === 0)
}
}
test("unserializable task") {
val unserializableRdd = new MyRDD(sc, 1, Nil) {
class UnserializableClass
val unserializable = new UnserializableClass
}
submit(unserializableRdd, Array(0))
assert(failure.getMessage.startsWith(
"Job aborted due to stage failure: Task not serializable:"))
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
assert(sparkListener.failedStages.contains(0))
assert(sparkListener.failedStages.size === 1)
assertDataStructuresEmpty
}
test("trivial job failure") {
submit(new MyRDD(sc, 1, Nil), Array(0))
failed(taskSets(0), "some failure")
assert(failure.getMessage === "Job aborted due to stage failure: some failure")
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
assert(sparkListener.failedStages.contains(0))
assert(sparkListener.failedStages.size === 1)
assertDataStructuresEmpty
}
test("trivial job cancellation") {
val rdd = new MyRDD(sc, 1, Nil)
val jobId = submit(rdd, Array(0))
cancel(jobId)
assert(failure.getMessage === s"Job $jobId cancelled ")
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
assert(sparkListener.failedStages.contains(0))
assert(sparkListener.failedStages.size === 1)
assertDataStructuresEmpty
}
test("job cancellation no-kill backend") {
// make sure that the DAGScheduler doesn't crash when the TaskScheduler
// doesn't implement killTask()
val noKillTaskScheduler = new TaskScheduler() {
override def rootPool: Pool = null
override def schedulingMode: SchedulingMode = SchedulingMode.NONE
override def start() = {}
override def stop() = {}
override def submitTasks(taskSet: TaskSet) = {
taskSets += taskSet
}
override def cancelTasks(stageId: Int, interruptThread: Boolean) {
throw new UnsupportedOperationException
}
override def setDAGScheduler(dagScheduler: DAGScheduler) = {}
override def defaultParallelism() = 2
override def executorHeartbeatReceived(execId: String, taskMetrics: Array[(Long, TaskMetrics)],
blockManagerId: BlockManagerId): Boolean = true
}
val noKillScheduler = new DAGScheduler(
sc,
noKillTaskScheduler,
sc.listenerBus,
mapOutputTracker,
blockManagerMaster,
sc.env) {
override def runLocally(job: ActiveJob) {
// don't bother with the thread while unit testing
runLocallyWithinThread(job)
}
}
dagEventProcessLoopTester = new DAGSchedulerEventProcessLoopTester(noKillScheduler)
val jobId = submit(new MyRDD(sc, 1, Nil), Array(0))
cancel(jobId)
// Because the job wasn't actually cancelled, we shouldn't have received a failure message.
assert(failure === null)
// When the task set completes normally, state should be correctly updated.
complete(taskSets(0), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
assert(sparkListener.failedStages.isEmpty)
assert(sparkListener.successfulStages.contains(0))
}
test("run trivial shuffle") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 1, List(shuffleDep))
submit(reduceRdd, Array(0))
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostB", 1))))
assert(mapOutputTracker.getServerStatuses(shuffleId, 0).map(_._1) ===
Array(makeBlockManagerId("hostA"), makeBlockManagerId("hostB")))
complete(taskSets(1), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty
}
test("run trivial shuffle with fetch failure") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostB", 1))))
// the 2nd ResultTask failed
complete(taskSets(1), Seq(
(Success, 42),
(FetchFailed(makeBlockManagerId("hostA"), shuffleId, 0, 0, "ignored"), null)))
// this will get called
// blockManagerMaster.removeExecutor("exec-hostA")
// ask the scheduler to try it again
scheduler.resubmitFailedStages()
// have the 2nd attempt pass
complete(taskSets(2), Seq((Success, makeMapStatus("hostA", 1))))
// we can see both result blocks now
assert(mapOutputTracker.getServerStatuses(shuffleId, 0).map(_._1.host) === Array("hostA", "hostB"))
complete(taskSets(3), Seq((Success, 43)))
assert(results === Map(0 -> 42, 1 -> 43))
assertDataStructuresEmpty
}
test("trivial shuffle with multiple fetch failures") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostB", 1))))
// The MapOutputTracker should know about both map output locations.
assert(mapOutputTracker.getServerStatuses(shuffleId, 0).map(_._1.host) ===
Array("hostA", "hostB"))
// The first result task fails, with a fetch failure for the output from the first mapper.
runEvent(CompletionEvent(
taskSets(1).tasks(0),
FetchFailed(makeBlockManagerId("hostA"), shuffleId, 0, 0, "ignored"),
null,
Map[Long, Any](),
createFakeTaskInfo(),
null))
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
assert(sparkListener.failedStages.contains(1))
// The second ResultTask fails, with a fetch failure for the output from the second mapper.
runEvent(CompletionEvent(
taskSets(1).tasks(0),
FetchFailed(makeBlockManagerId("hostA"), shuffleId, 1, 1, "ignored"),
null,
Map[Long, Any](),
createFakeTaskInfo(),
null))
// The SparkListener should not receive redundant failure events.
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
assert(sparkListener.failedStages.size == 1)
}
test("ignore late map task completions") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
// pretend we were told hostA went away
val oldEpoch = mapOutputTracker.getEpoch
runEvent(ExecutorLost("exec-hostA"))
val newEpoch = mapOutputTracker.getEpoch
assert(newEpoch > oldEpoch)
val taskSet = taskSets(0)
// should be ignored for being too old
runEvent(CompletionEvent(taskSet.tasks(0), Success, makeMapStatus("hostA", 1), null, createFakeTaskInfo(), null))
// should work because it's a non-failed host
runEvent(CompletionEvent(taskSet.tasks(0), Success, makeMapStatus("hostB", 1), null, createFakeTaskInfo(), null))
// should be ignored for being too old
runEvent(CompletionEvent(taskSet.tasks(0), Success, makeMapStatus("hostA", 1), null, createFakeTaskInfo(), null))
// should work because it's a new epoch
taskSet.tasks(1).epoch = newEpoch
runEvent(CompletionEvent(taskSet.tasks(1), Success, makeMapStatus("hostA", 1), null, createFakeTaskInfo(), null))
assert(mapOutputTracker.getServerStatuses(shuffleId, 0).map(_._1) ===
Array(makeBlockManagerId("hostB"), makeBlockManagerId("hostA")))
complete(taskSets(1), Seq((Success, 42), (Success, 43)))
assert(results === Map(0 -> 42, 1 -> 43))
assertDataStructuresEmpty
}
test("run shuffle with map stage failure") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
// Fail the map stage. This should cause the entire job to fail.
val stageFailureMessage = "Exception failure in map stage"
failed(taskSets(0), stageFailureMessage)
assert(failure.getMessage === s"Job aborted due to stage failure: $stageFailureMessage")
// Listener bus should get told about the map stage failing, but not the reduce stage
// (since the reduce stage hasn't been started yet).
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
assert(sparkListener.failedStages.toSet === Set(0))
assertDataStructuresEmpty
}
/**
* Makes sure that failures of stage used by multiple jobs are correctly handled.
*
* This test creates the following dependency graph:
*
* shuffleMapRdd1 shuffleMapRDD2
* | \\ |
* | \\ |
* | \\ |
* | \\ |
* reduceRdd1 reduceRdd2
*
* We start both shuffleMapRdds and then fail shuffleMapRdd1. As a result, the job listeners for
* reduceRdd1 and reduceRdd2 should both be informed that the job failed. shuffleMapRDD2 should
* also be cancelled, because it is only used by reduceRdd2 and reduceRdd2 cannot complete
* without shuffleMapRdd1.
*/
test("failure of stage used by two jobs") {
val shuffleMapRdd1 = new MyRDD(sc, 2, Nil)
val shuffleDep1 = new ShuffleDependency(shuffleMapRdd1, null)
val shuffleMapRdd2 = new MyRDD(sc, 2, Nil)
val shuffleDep2 = new ShuffleDependency(shuffleMapRdd2, null)
val reduceRdd1 = new MyRDD(sc, 2, List(shuffleDep1))
val reduceRdd2 = new MyRDD(sc, 2, List(shuffleDep1, shuffleDep2))
// We need to make our own listeners for this test, since by default submit uses the same
// listener for all jobs, and here we want to capture the failure for each job separately.
class FailureRecordingJobListener() extends JobListener {
var failureMessage: String = _
override def taskSucceeded(index: Int, result: Any) {}
override def jobFailed(exception: Exception) = { failureMessage = exception.getMessage }
}
val listener1 = new FailureRecordingJobListener()
val listener2 = new FailureRecordingJobListener()
submit(reduceRdd1, Array(0, 1), listener=listener1)
submit(reduceRdd2, Array(0, 1), listener=listener2)
val stageFailureMessage = "Exception failure in map stage"
failed(taskSets(0), stageFailureMessage)
assert(cancelledStages.toSet === Set(0, 2))
// Make sure the listeners got told about both failed stages.
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
assert(sparkListener.successfulStages.isEmpty)
assert(sparkListener.failedStages.toSet === Set(0, 2))
assert(listener1.failureMessage === s"Job aborted due to stage failure: $stageFailureMessage")
assert(listener2.failureMessage === s"Job aborted due to stage failure: $stageFailureMessage")
assertDataStructuresEmpty
}
test("run trivial shuffle with out-of-band failure and retry") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 1, List(shuffleDep))
submit(reduceRdd, Array(0))
// blockManagerMaster.removeExecutor("exec-hostA")
// pretend we were told hostA went away
runEvent(ExecutorLost("exec-hostA"))
// DAGScheduler will immediately resubmit the stage after it appears to have no pending tasks
// rather than marking it is as failed and waiting.
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostB", 1))))
// have hostC complete the resubmitted task
complete(taskSets(1), Seq((Success, makeMapStatus("hostC", 1))))
assert(mapOutputTracker.getServerStatuses(shuffleId, 0).map(_._1) ===
Array(makeBlockManagerId("hostC"), makeBlockManagerId("hostB")))
complete(taskSets(2), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty
}
test("recursive shuffle failures") {
val shuffleOneRdd = new MyRDD(sc, 2, Nil)
val shuffleDepOne = new ShuffleDependency(shuffleOneRdd, null)
val shuffleTwoRdd = new MyRDD(sc, 2, List(shuffleDepOne))
val shuffleDepTwo = new ShuffleDependency(shuffleTwoRdd, null)
val finalRdd = new MyRDD(sc, 1, List(shuffleDepTwo))
submit(finalRdd, Array(0))
// have the first stage complete normally
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 2)),
(Success, makeMapStatus("hostB", 2))))
// have the second stage complete normally
complete(taskSets(1), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostC", 1))))
// fail the third stage because hostA went down
complete(taskSets(2), Seq(
(FetchFailed(makeBlockManagerId("hostA"), shuffleDepTwo.shuffleId, 0, 0, "ignored"), null)))
// TODO assert this:
// blockManagerMaster.removeExecutor("exec-hostA")
// have DAGScheduler try again
scheduler.resubmitFailedStages()
complete(taskSets(3), Seq((Success, makeMapStatus("hostA", 2))))
complete(taskSets(4), Seq((Success, makeMapStatus("hostA", 1))))
complete(taskSets(5), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty
}
test("cached post-shuffle") {
val shuffleOneRdd = new MyRDD(sc, 2, Nil)
val shuffleDepOne = new ShuffleDependency(shuffleOneRdd, null)
val shuffleTwoRdd = new MyRDD(sc, 2, List(shuffleDepOne))
val shuffleDepTwo = new ShuffleDependency(shuffleTwoRdd, null)
val finalRdd = new MyRDD(sc, 1, List(shuffleDepTwo))
submit(finalRdd, Array(0))
cacheLocations(shuffleTwoRdd.id -> 0) = Seq(makeBlockManagerId("hostD"))
cacheLocations(shuffleTwoRdd.id -> 1) = Seq(makeBlockManagerId("hostC"))
// complete stage 2
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 2)),
(Success, makeMapStatus("hostB", 2))))
// complete stage 1
complete(taskSets(1), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostB", 1))))
// pretend stage 0 failed because hostA went down
complete(taskSets(2), Seq(
(FetchFailed(makeBlockManagerId("hostA"), shuffleDepTwo.shuffleId, 0, 0, "ignored"), null)))
// TODO assert this:
// blockManagerMaster.removeExecutor("exec-hostA")
// DAGScheduler should notice the cached copy of the second shuffle and try to get it rerun.
scheduler.resubmitFailedStages()
assertLocations(taskSets(3), Seq(Seq("hostD")))
// allow hostD to recover
complete(taskSets(3), Seq((Success, makeMapStatus("hostD", 1))))
complete(taskSets(4), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty
}
test("misbehaved accumulator should not crash DAGScheduler and SparkContext") {
val acc = new Accumulator[Int](0, new AccumulatorParam[Int] {
override def addAccumulator(t1: Int, t2: Int): Int = t1 + t2
override def zero(initialValue: Int): Int = 0
override def addInPlace(r1: Int, r2: Int): Int = {
throw new DAGSchedulerSuiteDummyException
}
})
// Run this on executors
sc.parallelize(1 to 10, 2).foreach { item => acc.add(1) }
// Run this within a local thread
sc.parallelize(1 to 10, 2).map { item => acc.add(1) }.take(1)
// Make sure we can still run local commands as well as cluster commands.
assert(sc.parallelize(1 to 10, 2).count() === 10)
assert(sc.parallelize(1 to 10, 2).first() === 1)
}
test("misbehaved resultHandler should not crash DAGScheduler and SparkContext") {
val e1 = intercept[SparkDriverExecutionException] {
val rdd = sc.parallelize(1 to 10, 2)
sc.runJob[Int, Int](
rdd,
(context: TaskContext, iter: Iterator[Int]) => iter.size,
Seq(0),
allowLocal = true,
(part: Int, result: Int) => throw new DAGSchedulerSuiteDummyException)
}
assert(e1.getCause.isInstanceOf[DAGSchedulerSuiteDummyException])
val e2 = intercept[SparkDriverExecutionException] {
val rdd = sc.parallelize(1 to 10, 2)
sc.runJob[Int, Int](
rdd,
(context: TaskContext, iter: Iterator[Int]) => iter.size,
Seq(0, 1),
allowLocal = false,
(part: Int, result: Int) => throw new DAGSchedulerSuiteDummyException)
}
assert(e2.getCause.isInstanceOf[DAGSchedulerSuiteDummyException])
// Make sure we can still run local commands as well as cluster commands.
assert(sc.parallelize(1 to 10, 2).count() === 10)
assert(sc.parallelize(1 to 10, 2).first() === 1)
}
test("accumulator not calculated for resubmitted result stage") {
//just for register
val accum = new Accumulator[Int](0, AccumulatorParam.IntAccumulatorParam)
val finalRdd = new MyRDD(sc, 1, Nil)
submit(finalRdd, Array(0))
completeWithAccumulator(accum.id, taskSets(0), Seq((Success, 42)))
completeWithAccumulator(accum.id, taskSets(0), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assert(Accumulators.originals(accum.id).value === 1)
assertDataStructuresEmpty
}
/**
* Assert that the supplied TaskSet has exactly the given hosts as its preferred locations.
* Note that this checks only the host and not the executor ID.
*/
private def assertLocations(taskSet: TaskSet, hosts: Seq[Seq[String]]) {
assert(hosts.size === taskSet.tasks.size)
for ((taskLocs, expectedLocs) <- taskSet.tasks.map(_.preferredLocations).zip(hosts)) {
assert(taskLocs.map(_.host) === expectedLocs)
}
}
private def makeMapStatus(host: String, reduces: Int): MapStatus =
MapStatus(makeBlockManagerId(host), Array.fill[Long](reduces)(2))
private def makeBlockManagerId(host: String): BlockManagerId =
BlockManagerId("exec-" + host, host, 12345)
private def assertDataStructuresEmpty = {
assert(scheduler.activeJobs.isEmpty)
assert(scheduler.failedStages.isEmpty)
assert(scheduler.jobIdToActiveJob.isEmpty)
assert(scheduler.jobIdToStageIds.isEmpty)
assert(scheduler.stageIdToStage.isEmpty)
assert(scheduler.runningStages.isEmpty)
assert(scheduler.shuffleToMapStage.isEmpty)
assert(scheduler.waitingStages.isEmpty)
}
// Nothing in this test should break if the task info's fields are null, but
// OutputCommitCoordinator requires the task info itself to not be null.
private def createFakeTaskInfo(): TaskInfo = {
val info = new TaskInfo(0, 0, 0, 0L, "", "", TaskLocality.ANY, false)
info.finishTime = 1 // to prevent spurious errors in JobProgressListener
info
}
}
|
hengyicai/OnlineAggregationUCAS
|
core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
|
Scala
|
apache-2.0
| 31,905 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.raster.data
import java.io.Serializable
import java.util.Map.Entry
import java.util.concurrent.{Callable, TimeUnit}
import java.util.{Map => JMap}
import com.google.common.cache.CacheBuilder
import com.google.common.collect.{ImmutableMap, ImmutableSetMultimap}
import com.typesafe.scalalogging.slf4j.Logging
import org.apache.accumulo.core.client.{BatchWriterConfig, Connector, TableExistsException}
import org.apache.accumulo.core.data.{Key, Mutation, Range, Value}
import org.apache.accumulo.core.security.TablePermission
import org.geotools.coverage.grid.GridEnvelope2D
import org.joda.time.DateTime
import org.locationtech.geomesa.accumulo.index.Strategy._
import org.locationtech.geomesa.accumulo.iterators.BBOXCombiner._
import org.locationtech.geomesa.accumulo.stats.{RasterQueryStat, RasterQueryStatTransform, StatWriter}
import org.locationtech.geomesa.accumulo.util.SelfClosingIterator
import org.locationtech.geomesa.raster._
import org.locationtech.geomesa.raster.index.RasterIndexSchema
import org.locationtech.geomesa.raster.util.RasterUtils
import org.locationtech.geomesa.security.AuthorizationsProvider
import org.locationtech.geomesa.utils.geohash.BoundingBox
import org.locationtech.geomesa.utils.stats.{MethodProfiling, NoOpTimings, Timings, TimingsImpl}
import scala.collection.JavaConversions._
class AccumuloRasterStore(val connector: Connector,
val tableName: String,
val authorizationsProvider: AuthorizationsProvider,
val writeVisibilities: String,
writeMemoryConfig: Option[String] = None,
writeThreadsConfig: Option[Int] = None,
queryThreadsConfig: Option[Int] = None,
collectStats: Boolean = false) extends MethodProfiling with StatWriter with Logging {
val writeMemory = writeMemoryConfig.getOrElse("10000").toLong
val writeThreads = writeThreadsConfig.getOrElse(10)
val bwConfig: BatchWriterConfig =
new BatchWriterConfig().setMaxMemory(writeMemory).setMaxWriteThreads(writeThreads)
val numQThreads = queryThreadsConfig.getOrElse(20)
private val tableOps = connector.tableOperations()
private val securityOps = connector.securityOperations
private val profileTable = s"${tableName}_queries"
private def getBoundsRowID = tableName + "_bounds"
def getAuths = authorizationsProvider.getAuthorizations
/**
* Given A Query, return a single buffered image that is a mosaic of the tiles
* This is primarily used to satisfy WCS/WMS queries.
* @param query
* @param params
* @return Buffered
*/
def getMosaicedRaster(query: RasterQuery, params: GeoMesaCoverageQueryParams) = {
implicit val timings = if (collectStats) new TimingsImpl else NoOpTimings
val rasters = getRasters(query)
val (image, numRasters) = profile(
RasterUtils.mosaicChunks(rasters, params.width.toInt, params.height.toInt, params.envelope),
"mosaic")
if (collectStats) {
val stat = RasterQueryStat(tableName,
System.currentTimeMillis(),
query.toString,
timings.time("planning"),
timings.time("scanning") - timings.time("planning"),
timings.time("mosaic"),
numRasters)
this.writeStat(stat, profileTable)
}
image
}
def getRasters(rasterQuery: RasterQuery)(implicit timings: Timings): Iterator[Raster] = {
profile({
val batchScanner = connector.createBatchScanner(tableName, authorizationsProvider.getAuthorizations, numQThreads)
val plan = AccumuloRasterQueryPlanner.getQueryPlan(rasterQuery, getResToGeoHashLenMap, getResToBoundsMap)
plan match {
case Some(qp) =>
configureBatchScanner(batchScanner, qp)
adaptIteratorToChunks(SelfClosingIterator(batchScanner))
case _ => Iterator.empty
}
}, "scanning")
}
def getQueryRecords(numRecords: Int): Iterator[String] = {
val scanner = connector.createScanner(profileTable, authorizationsProvider.getAuthorizations)
scanner.iterator.take(numRecords).map(RasterQueryStatTransform.decodeStat)
}
def getBounds: BoundingBox = {
ensureBoundsTableExists()
val scanner = connector.createScanner(GEOMESA_RASTER_BOUNDS_TABLE, authorizationsProvider.getAuthorizations)
scanner.setRange(new Range(getBoundsRowID))
val resultingBounds = SelfClosingIterator(scanner)
if (resultingBounds.isEmpty) {
BoundingBox(-180, 180, -90, 90)
} else {
//TODO: GEOMESA-646 anti-meridian questions
reduceValuesToBoundingBox(resultingBounds.map(_.getValue))
}
}
def getAvailableBoundingBoxes: Seq[BoundingBox] = getResToBoundsMap.values().toSeq
def getAvailableResolutions: Seq[Double] = getResToGeoHashLenMap.keySet.toSeq.sorted
def getAvailableGeoHashLengths: Set[Int] = getResToGeoHashLenMap.values().toSet
def getResToGeoHashLenMap: ImmutableSetMultimap[Double, Int] =
AccumuloRasterStore.geoHashLenCache.get(tableName, resToGeoHashLenMapCallable)
def resToGeoHashLenMapCallable = new Callable[ImmutableSetMultimap[Double, Int]] {
override def call(): ImmutableSetMultimap[Double, Int] = {
val m = new ImmutableSetMultimap.Builder[Double, Int]()
for {
k <- metaScanner().map(_.getKey)
} {
val resolution = lexiDecodeStringToDouble(k.getColumnQualifier.toString)
val geohashlen = lexiDecodeStringToInt(k.getColumnFamily.toString)
m.put(resolution, geohashlen)
}
m.build()
}
}
def getResToBoundsMap: ImmutableMap[Double, BoundingBox] =
AccumuloRasterStore.extentCache.get(tableName, resToBoundsCallable)
def resToBoundsCallable = new Callable[ImmutableMap[Double, BoundingBox]] {
override def call(): ImmutableMap[Double, BoundingBox] = {
val m = new ImmutableMap.Builder[Double, BoundingBox]()
for {
kv <- metaScanner()
} {
val resolution = lexiDecodeStringToDouble(kv.getKey.getColumnQualifier.toString)
val bounds = valueToBbox(kv.getValue)
m.put(resolution, bounds)
}
m.build()
}
}
def metaScanner = () => {
ensureBoundsTableExists()
val scanner = connector.createScanner(GEOMESA_RASTER_BOUNDS_TABLE, getAuths)
scanner.setRange(new Range(getBoundsRowID))
SelfClosingIterator(scanner)
}
def getGridRange: GridEnvelope2D = {
val bounds = getBounds
val resolutions = getAvailableResolutions
// If no resolutions are available, then we have an empty table so assume default value for now
// TODO: determine what to do about the resolution, arguably should be resolutions.max: https://geomesa.atlassian.net/browse/GEOMESA-868
val resolution = if (resolutions.isEmpty) defaultResolution else resolutions.min
val width = Math.abs(bounds.getWidth / resolution).toInt
val height = Math.abs(bounds.getHeight / resolution).toInt
new GridEnvelope2D(0, 0, width, height)
}
def adaptIteratorToChunks(iter: java.util.Iterator[Entry[Key, Value]]): Iterator[Raster] = {
iter.map(entry => RasterIndexSchema.decode((entry.getKey, entry.getValue)))
}
private def dateToAccTimestamp(dt: DateTime): Long = dt.getMillis / 1000
private def createBoundsMutation(raster: Raster): Mutation = {
// write the bounds mutation
val mutation = new Mutation(getBoundsRowID)
val value = bboxToValue(BoundingBox(raster.metadata.geom.getEnvelopeInternal))
val resolution = lexiEncodeDoubleToString(raster.resolution)
val geohashlen = lexiEncodeIntToString(raster.minimumBoundingGeoHash.map( _.hash.length ).getOrElse(0))
mutation.put(geohashlen, resolution, value)
mutation
}
private def createMutation(raster: Raster): Mutation = {
val (key, value) = RasterIndexSchema.encode(raster, writeVisibilities)
val mutation = new Mutation(key.getRow)
val colFam = key.getColumnFamily
val colQual = key.getColumnQualifier
val colVis = key.getColumnVisibilityParsed
val timestamp: Long = dateToAccTimestamp(raster.time)
mutation.put(colFam, colQual, colVis, timestamp, value)
mutation
}
def putRasters(rasters: Seq[Raster]) = rasters.foreach(putRaster)
def putRaster(raster: Raster) {
writeMutations(tableName, createMutation(raster))
writeMutations(GEOMESA_RASTER_BOUNDS_TABLE, createBoundsMutation(raster))
}
private def writeMutations(tableName: String, mutations: Mutation*) {
val writer = connector.createBatchWriter(tableName, bwConfig)
mutations.foreach { m => writer.addMutation(m) }
writer.flush()
writer.close()
}
def createTableStructure() = {
ensureTableExists(tableName)
ensureBoundsTableExists()
}
def ensureBoundsTableExists() = {
createTable(GEOMESA_RASTER_BOUNDS_TABLE)
if (!tableOps.listIterators(GEOMESA_RASTER_BOUNDS_TABLE).containsKey("GEOMESA_BBOX_COMBINER")) {
val bboxcombinercfg = AccumuloRasterBoundsPlanner.getBoundsScannerCfg(tableName)
tableOps.attachIterator(GEOMESA_RASTER_BOUNDS_TABLE, bboxcombinercfg)
}
}
private def ensureTableExists(tableName: String) {
// TODO: WCS: ensure that this does not duplicate what is done in AccumuloDataStore
// Perhaps consolidate with different default configurations
// GEOMESA-564
val user = connector.whoami
val defaultVisibilities = authorizationsProvider.getAuthorizations.toString.replaceAll(",", "&")
if (!tableOps.exists(tableName)) {
createTables(user, defaultVisibilities, Array(tableName, profileTable):_*)
}
}
private def createTables(user: String, defaultVisibilities: String, tableNames: String*) = {
tableNames.foreach(tableName => {
createTable(tableName)
AccumuloRasterTableConfig.settings(defaultVisibilities).foreach { case (key, value) =>
tableOps.setProperty(tableName, key, value)
}
AccumuloRasterTableConfig.permissions.split(",").foreach { p =>
securityOps.grantTablePermission(user, tableName, TablePermission.valueOf(p))
}
})
}
private def createTable(tableName: String) = {
if(!tableOps.exists(tableName)) {
try {
tableOps.create(tableName)
} catch {
case e: TableExistsException => // this can happen with multiple threads but shouldn't cause any issues
}
}
}
def deleteRasterTable(): Unit = {
deleteMetaData()
deleteTable(profileTable)
deleteTable(tableName)
}
private def deleteTable(table: String): Unit = {
try {
if (tableOps.exists(table)) {
tableOps.delete(table)
}
} catch {
case e: Exception => logger.warn(s"Error occurred when attempting to delete table: $table", e)
}
}
private def deleteMetaData(): Unit = {
try {
if (tableOps.exists(GEOMESA_RASTER_BOUNDS_TABLE)) {
val deleter = connector.createBatchDeleter(GEOMESA_RASTER_BOUNDS_TABLE, getAuths, 3, bwConfig)
val deleteRange = new Range(getBoundsRowID)
deleter.setRanges(Seq(deleteRange))
deleter.delete()
deleter.close()
AccumuloRasterStore.geoHashLenCache.invalidate(tableName)
}
} catch {
case e: Exception => logger.warn(s"Error occurred when attempting to delete Metadata for table: $tableName")
}
}
}
object AccumuloRasterStore {
import org.locationtech.geomesa.accumulo.data.AccumuloDataStoreFactory._
import org.locationtech.geomesa.accumulo.data.AccumuloDataStoreFactory.params._
def apply(username: String,
password: String,
instanceId: String,
zookeepers: String,
tableName: String,
auths: String,
writeVisibilities: String,
useMock: Boolean = false,
writeMemoryConfig: Option[String] = None,
writeThreadsConfig: Option[Int] = None,
queryThreadsConfig: Option[Int] = None,
collectStats: Boolean = false): AccumuloRasterStore = {
val conn = AccumuloStoreHelper.buildAccumuloConnector(username, password, instanceId, zookeepers, useMock)
val authorizationsProvider = AccumuloStoreHelper.getAuthorizationsProvider(auths.split(","), conn)
val rasterStore = new AccumuloRasterStore(conn, tableName, authorizationsProvider, writeVisibilities,
writeMemoryConfig, writeThreadsConfig, queryThreadsConfig, collectStats)
// this will actually create the Accumulo Table
rasterStore.createTableStructure()
rasterStore
}
def apply(config: JMap[String, Serializable]): AccumuloRasterStore = {
val username: String = userParam.lookUp(config).asInstanceOf[String]
val password: String = passwordParam.lookUp(config).asInstanceOf[String]
val instance: String = instanceIdParam.lookUp(config).asInstanceOf[String]
val zookeepers: String = zookeepersParam.lookUp(config).asInstanceOf[String]
val auths: String = authsParam.lookupOpt[String](config).getOrElse("")
val vis: String = visibilityParam.lookupOpt[String](config).getOrElse("")
val tablename: String = tableNameParam.lookUp(config).asInstanceOf[String]
val useMock: Boolean = mockParam.lookUp(config).asInstanceOf[String].toBoolean
val wMem: Option[String] = RasterParams.writeMemoryParam.lookupOpt[String](config)
val wThread: Option[Int] = writeThreadsParam.lookupOpt[Int](config)
val qThread: Option[Int] = queryThreadsParam.lookupOpt[Int](config)
val cStats: Boolean = java.lang.Boolean.valueOf(statsParam.lookupOpt[Boolean](config).getOrElse(false))
AccumuloRasterStore(username, password, instance, zookeepers, tablename,
auths, vis, useMock, wMem, wThread, qThread, cStats)
}
val geoHashLenCache =
CacheBuilder.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.expireAfterWrite(10, TimeUnit.MINUTES)
.build[String, ImmutableSetMultimap[Double, Int]]
val extentCache =
CacheBuilder.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.expireAfterWrite(10, TimeUnit.MINUTES)
.build[String, ImmutableMap[Double, BoundingBox]]
}
object AccumuloRasterTableConfig {
/**
* documentation for raster table settings:
*
* table.security.scan.visibility.default
* - The default visibility for the table
*
* table.iterator.majc.vers.opt.maxVersions
* - Versioning iterator setting
* - max versions, major compaction
*
* table.iterator.minc.vers.opt.maxVersions
* - Versioning iterator setting
* - max versions, minor compaction
*
* table.iterator.scan.vers.opt.maxVersions
* - Versioning iterator setting
* - max versions, scan time
*
* table.split.threshold
* - The split threshold for the table, when reached
* - Accumulo splits the table into tablets of this size.
*
* @param visibilities
* @return
*/
def settings(visibilities: String): Map[String, String] = Map (
"table.security.scan.visibility.default" -> visibilities,
"table.iterator.majc.vers.opt.maxVersions" -> rasterMajcMaxVers,
"table.iterator.minc.vers.opt.maxVersions" -> rasterMincMaxVers,
"table.iterator.scan.vers.opt.maxVersions" -> rasterScanMaxVers,
"table.split.threshold" -> rasterSplitThresh
)
val permissions = "BULK_IMPORT,READ,WRITE,ALTER_TABLE"
}
|
drackaer/geomesa
|
geomesa-raster/src/main/scala/org/locationtech/geomesa/raster/data/AccumuloRasterStore.scala
|
Scala
|
apache-2.0
| 15,921 |
package org.jetbrains.sbt
package annotator
import java.io.File
import com.intellij.openapi.util.io.FileUtil
import com.intellij.openapi.util.text.StringUtil
import com.intellij.openapi.vfs.{CharsetToolkit, LocalFileSystem}
import com.intellij.psi.PsiFileFactory
import com.intellij.testFramework.PlatformTestCase
import org.jetbrains.plugins.scala.util.TestUtils
import org.jetbrains.sbt.language.{SbtFileImpl, SbtFileType}
import org.junit.Assert._
/**
* @author Nikolay Obedin
* @since 8/4/14.
*/
abstract class AnnotatorTestBase extends PlatformTestCase {
protected def testdataPath: String = s"${TestUtils.getTestDataPath}/annotator/Sbt/"
def loadTestFile(): SbtFileImpl = {
val filePath = s"$testdataPath/${getTestName(false)}.sbt"
val file = LocalFileSystem.getInstance.findFileByPath(filePath.replace(File.separatorChar, '/'))
assertNotNull(filePath, file)
val fileText = StringUtil.convertLineSeparators(FileUtil.loadFile(new File(file.getCanonicalPath), CharsetToolkit.UTF8))
assertFalse(fileText.isEmpty)
PsiFileFactory.getInstance(getProject).createFileFromText(s"${getTestName(false)}.sbt", SbtFileType, fileText).asInstanceOf[SbtFileImpl]
}
}
|
jastice/intellij-scala
|
scala/scala-impl/test/org/jetbrains/sbt/annotator/AnnotatorTestBase.scala
|
Scala
|
apache-2.0
| 1,194 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.geojson
import java.io.Closeable
trait GeoJsonIndex {
/**
* Create a new index by name
*
* @param name identifier for this index
* @param id json-path expression to extract an ID from a geojson feature, used to uniquely identify that feature
* @param dtg json-path expression to extract a date from a geojson feature, used to index by date
* @param points store the geojson geometries as points or as geometries with extents
*/
def createIndex(name: String, id: Option[String] = None, dtg: Option[String] = None, points: Boolean = false): Unit
/**
* Delete an existing index by name
*
* @param name index to delete
*/
def deleteIndex(name: String): Unit
/**
* Add new features to the index
*
* @param name index to modify
* @param json geojson 'Feature' or 'FeatureCollection'
* @return ids of the newly created features
*/
def add(name: String, json: String): Seq[String]
/**
* Update existing features in the index. To use this method, the index must have
* been created with an 'id' json-path in order to determine which features to update
*
* @param name index to modify
* @param json geojson 'Feature' or 'FeatureCollection'
*/
def update(name: String, json: String): Unit
/**
* Update existing features in the index
*
* @param name index to modify
* @param ids ids of the features to update - must correspond to the feature geojson
* @param json geojson 'Feature' or 'FeatureCollection'
*/
def update(name: String, ids: Seq[String], json: String): Unit
/**
* Delete an existing feature
*
* @param name index to modify
* @param id id of a feature to delete
*/
def delete(name: String, id: String): Unit = delete(name, Seq(id))
/**
* Delete existing features
*
* @param name index to modify
* @param ids ids of features to delete
*/
def delete(name: String, ids: Iterable[String]): Unit
/**
* Returns features by id
*
* @param name index to query
* @param ids feature ids
* @return
*/
def get(name: String, ids: Iterable[String], transform: Map[String, String] = Map.empty): Iterator[String] with Closeable
/**
* Query features in the index
*
* @param name index to query
* @param query json query string - @see org.locationtech.geomesa.geojson.query.GeoJsonQuery
* @param transform optional transform for json coming back, consisting of path keys and json-path selector values
* @return matching geojson, or transformed json
*/
def query(name: String, query: String, transform: Map[String, String] = Map.empty): Iterator[String] with Closeable
}
|
elahrvivaz/geomesa
|
geomesa-geojson/geomesa-geojson-api/src/main/scala/org/locationtech/geomesa/geojson/GeoJsonIndex.scala
|
Scala
|
apache-2.0
| 3,204 |
/******************************************************************************
* Copyright © 2016 Maxim Karpov *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
******************************************************************************/
package ru.makkarpov.scalingua
import Compat._
import ru.makkarpov.scalingua.extract.MessageExtractor
import ru.makkarpov.scalingua.plural.Suffix
import ru.makkarpov.scalingua.InsertableIterator._
object Macros {
// All macros variants: (lazy, eager) x (singular, plural) x (interpolation, ctx, non ctx, tagged), 16 total
// Interpolators:
def interpolate[T: c.WeakTypeTag](c: Context)
(args: c.Expr[Any]*)
(lang: c.Expr[Language], outputFormat: c.Expr[OutputFormat[T]]): c.Expr[T] =
{
import c.universe._
val (msg, argsT) = interpolator(c)(args.map(_.tree))
c.Expr[T](generate[T](c)(None, q"$msg", None, argsT)(Some(lang.tree), outputFormat.tree))
}
def lazyInterpolate[T: c.WeakTypeTag](c: Context)
(args: c.Expr[Any]*)
(outputFormat: c.Expr[OutputFormat[T]]): c.Expr[LValue[T]] =
{
import c.universe._
val (msg, argsT) = interpolator(c)(args.map(_.tree))
c.Expr[LValue[T]](generate[T](c)(None, q"$msg", None, argsT)(None, outputFormat.tree))
}
def pluralInterpolate[T: c.WeakTypeTag](c: Context)
(args: c.Expr[Any]*)
(lang: c.Expr[Language], outputFormat: c.Expr[OutputFormat[T]]): c.Expr[T] =
{
import c.universe._
val (msg, msgP, argsT, nVar) = pluralInterpolator(c)(args.map(_.tree))
c.Expr[T](generate[T](c)(None, q"$msg", Some((q"$msgP", nVar, false)), argsT)(Some(lang.tree), outputFormat.tree))
}
def lazyPluralInterpolate[T: c.WeakTypeTag](c: Context)
(args: c.Expr[Any]*)
(outputFormat: c.Expr[OutputFormat[T]]): c.Expr[LValue[T]] =
{
import c.universe._
val (msg, msgP, argsT, nVar) = pluralInterpolator(c)(args.map(_.tree))
c.Expr[LValue[T]](generate[T](c)(None, q"$msg", Some((q"$msgP", nVar, false)), argsT)(None, outputFormat.tree))
}
// Singular:
def singular[T: c.WeakTypeTag](c: Context)
(msg: c.Expr[String], args: c.Expr[(String, Any)]*)
(lang: c.Expr[Language], outputFormat: c.Expr[OutputFormat[T]]): c.Expr[T] =
c.Expr[T](generate[T](c)(None, msg.tree, None, args.map(_.tree))(Some(lang.tree), outputFormat.tree))
def lazySingular[T: c.WeakTypeTag](c: Context)
(msg: c.Expr[String], args: c.Expr[(String, Any)]*)
(outputFormat: c.Expr[OutputFormat[T]]): c.Expr[LValue[T]] =
c.Expr[LValue[T]](generate[T](c)(None, msg.tree, None, args.map(_.tree))(None, outputFormat.tree))
def singularCtx[T: c.WeakTypeTag](c: Context)
(ctx: c.Expr[String], msg: c.Expr[String], args: c.Expr[(String, Any)]*)
(lang: c.Expr[Language], outputFormat: c.Expr[OutputFormat[T]]): c.Expr[T] =
c.Expr[T](generate[T](c)(Some(ctx.tree), msg.tree, None, args.map(_.tree))(Some(lang.tree), outputFormat.tree))
def lazySingularCtx[T: c.WeakTypeTag](c: Context)
(ctx: c.Expr[String], msg: c.Expr[String], args: c.Expr[(String, Any)]*)
(outputFormat: c.Expr[OutputFormat[T]]): c.Expr[LValue[T]] =
c.Expr[LValue[T]](generate[T](c)(Some(ctx.tree), msg.tree, None, args.map(_.tree))(None, outputFormat.tree))
// Plural:
def plural[T: c.WeakTypeTag](c: Context)
(msg: c.Expr[String], msgPlural: c.Expr[String], n: c.Expr[Long], args: c.Expr[(String, Any)]*)
(lang: c.Expr[Language], outputFormat: c.Expr[OutputFormat[T]]): c.Expr[T] =
c.Expr[T](generate[T](c)(None, msg.tree, Some((msgPlural.tree, n.tree, true)), args.map(_.tree))
(Some(lang.tree), outputFormat.tree))
def lazyPlural[T: c.WeakTypeTag](c: Context)
(msg: c.Expr[String], msgPlural: c.Expr[String], n: c.Expr[Long], args: c.Expr[(String, Any)]*)
(outputFormat: c.Expr[OutputFormat[T]]): c.Expr[LValue[T]] =
c.Expr[LValue[T]](generate[T](c)(None, msg.tree, Some((msgPlural.tree, n.tree, true)), args.map(_.tree))
(None, outputFormat.tree))
def pluralCtx[T: c.WeakTypeTag](c: Context)
(ctx: c.Expr[String], msg: c.Expr[String], msgPlural: c.Expr[String], n: c.Expr[Long], args: c.Expr[(String, Any)]*)
(lang: c.Expr[Language], outputFormat: c.Expr[OutputFormat[T]]): c.Expr[T] =
c.Expr[T](generate[T](c)(Some(ctx.tree), msg.tree, Some((msgPlural.tree, n.tree, true)), args.map(_.tree))
(Some(lang.tree), outputFormat.tree))
def lazyPluralCtx[T: c.WeakTypeTag](c: Context)
(ctx: c.Expr[String], msg: c.Expr[String], msgPlural: c.Expr[String], n: c.Expr[Long], args: c.Expr[(String, Any)]*)
(outputFormat: c.Expr[OutputFormat[T]]): c.Expr[LValue[T]] =
c.Expr[LValue[T]](generate[T](c)(Some(ctx.tree), msg.tree, Some((msgPlural.tree, n.tree, true)), args.map(_.tree))
(None, outputFormat.tree))
// Tagged: just forwards all calls to supplied Language.
// These should be implemented as macros since otherwise they would leak a reference to I18n in compiled code.
def singularTag[T: c.WeakTypeTag](c: Context)
(tag: c.Expr[String], args: c.Expr[(String, Any)]*)
(lang: c.Expr[Language], outputFormat: c.Expr[OutputFormat[T]]): c.Expr[T] =
c.Expr[T](tagGenerate(c)(tag.tree, None, args.map(_.tree))(Some(lang.tree), outputFormat.tree))
def lazySingularTag[T: c.WeakTypeTag](c: Context)
(tag: c.Expr[String], args: c.Expr[(String, Any)]*)
(outputFormat: c.Expr[OutputFormat[T]]): c.Expr[LValue[T]] =
c.Expr[LValue[T]](tagGenerate(c)(tag.tree, None, args.map(_.tree))(None, outputFormat.tree))
def pluralTag[T: c.WeakTypeTag](c: Context)
(tag: c.Expr[String], n: c.Expr[Long], args: c.Expr[(String, Any)]*)
(lang: c.Expr[Language], outputFormat: c.Expr[OutputFormat[T]]): c.Expr[T] =
c.Expr[T](tagGenerate(c)(tag.tree, Some(n.tree), args.map(_.tree))(Some(lang.tree), outputFormat.tree))
def lazyPluralTag[T: c.WeakTypeTag](c: Context)
(tag: c.Expr[String], n: c.Expr[Long], args: c.Expr[(String, Any)]*)
(outputFormat: c.Expr[OutputFormat[T]]): c.Expr[LValue[T]] =
c.Expr[LValue[T]](tagGenerate(c)(tag.tree, Some(n.tree), args.map(_.tree))(None, outputFormat.tree))
// Macro internals:
/**
* A generic macro that extracts interpolation string and set of interpolation
* variables from string interpolator invocation.
*
* @param c Macro context
* @param args Arguments of interpolator
* @return (Extracted string, extracted variables)
*/
private def interpolator(c: Context)(args: Seq[c.Tree]): (String, Seq[c.Tree]) = {
import c.universe._
// Extract raw interpolation parts
val parts = c.prefix.tree match {
case Apply(_, List(Apply(_, rawParts))) =>
rawParts.map(stringLiteral(c)(_)).map(processEscapes)
case _ =>
c.abort(c.enclosingPosition, s"failed to match prefix, got ${prettyPrint(c)(c.prefix.tree)}")
}
interpolationString(c)(parts, args)
}
/**
* A macro function that extracts singular and plural strings, arguments and `n` variable from plural interpolation.
*
* E.g.: `I have $n fox${S.ex}` ->
* * Singular string: "I have %(n) fox"
* * Plural string: "I have %(n) foxes"
* * Arguments: <| "n" -> n |>
* * N variable: <| n |>
*
* @param c Macro context
* @param args Interpolation arguments
* @return
*/
private def pluralInterpolator(c: Context)(args: Seq[c.Tree]): (String, String, Seq[c.Tree], c.Tree) = {
import c.universe._
val parts = c.prefix.tree match {
case Apply(_, List(Apply(_, rawParts))) =>
rawParts.map(stringLiteral(c)(_)).map(processEscapes)
case _ =>
c.abort(c.enclosingPosition, s"failed to match prefix, got ${prettyPrint(c)(c.prefix.tree)}")
}
assert(parts.size == args.size + 1)
def nVarHint(expr: c.Tree): Option[c.Tree] = expr match {
case q"$prefix.int2MacroExtension($arg).nVar" => Some(arg)
case q"$prefix.long2MacroExtension($arg).nVar" => Some(arg)
case _ => None
}
// Find a variable that represents plural number and strip `.nVar`s, if any.
val (filteredArgs, nVar) = {
val intVars = args.indices.filter { i =>
val tpe = typecheck(c)(args(i)).tpe
(tpe <:< typeOf[Int]) || (tpe <:< typeOf[Long])
}
val nVars = args.indices.filter(i => nVarHint(args(i)).isDefined)
val chosenN = (intVars, nVars) match {
case (_, Seq(i)) => i
case (_, Seq(_, _*)) => c.abort(c.enclosingPosition, "multiple `.nVar` annotations present")
case (Seq(i), _) => i
case (Seq(), Seq()) => c.abort(c.enclosingPosition, "no integer variable is present - provide at least one as plural number")
case _ => c.abort(c.enclosingPosition, "multiple integer variables present. Annotate one that represents a plural number with `x.nVar`")
}
val fArgs = args.map(x => nVarHint(x).getOrElse(x))
(fArgs, fArgs(chosenN))
}
// Merge parts separated by plural suffix - e.g. "fox"$es"" becomes "fox" and "foxes".
val (partsSingular, partsPlural, finalArgs) = {
val itS = parts.iterator.insertable
val itP = parts.iterator.insertable
val retS = Seq.newBuilder[String]
val retP = Seq.newBuilder[String]
val retA = Seq.newBuilder[c.Tree]
for {
arg <- args
tpe = typecheck(c)(arg).tpe
} if (tpe <:< weakTypeOf[Suffix.Generic]) {
arg match {
case q"$prefix.string2SuffixExtension($sing).&>($plur)" =>
itS.unnext(itS.next() + stringLiteral(c)(sing) + itS.next())
itP.unnext(itP.next() + stringLiteral(c)(plur) + itP.next())
case _ =>
c.abort(c.enclosingPosition, s"expression of type `Suffix.Generic` should have `a &> b` form, got instead `${prettyPrint(c)(arg)}`")
}
} else if (tpe <:< weakTypeOf[Suffix]) {
val rawSuffix =
if (tpe <:< weakTypeOf[Suffix.S]) "s"
else if (tpe <:< weakTypeOf[Suffix.ES]) "es"
else c.abort(c.enclosingPosition, s"unknown suffix type: $tpe")
val suffix =
if (itS.head.nonEmpty && Character.isUpperCase(itS.head.last)) rawSuffix.toUpperCase
else rawSuffix
itS.unnext(itS.next() + itS.next())
itP.unnext(itP.next() + suffix + itP.next())
} else {
retS += itS.next()
retP += itP.next()
retA += arg
}
// One part should remain:
retS += itS.next()
retP += itP.next()
(retS.result(), retP.result(), retA.result())
}
// Build interpolation strings by parts
val (sStr, tArgs) = interpolationString(c)(partsSingular, finalArgs)
// These string are guaranteed to have the same structure, so we can ignore second args:
val (pStr, _) = interpolationString(c)(partsPlural, finalArgs)
(sStr, pStr, tArgs, nVar)
}
/**
* A generic function to generate interpolation results. Other macros do nothing but call it.
*
* @param c Macro context
* @param ctxTree Optional tree with context argument
* @param msgTree Message argument
* @param pluralTree Optional with (plural message, n, insert "n" arg) arguments
* @param argsTree Supplied args as a trees
* @param lang Language tree that if present means instant evaluation
* @param outputFormat Tree representing `OutputFormat[T]` instance
* @return Tree representing an instance of `T` if language was present, or `LValue[T]` if
* language was absent.
*/
private def generate[T: c.WeakTypeTag](c: Context)
(ctxTree: Option[c.Tree], msgTree: c.Tree, pluralTree: Option[(c.Tree, c.Tree, Boolean)], argsTree: Seq[c.Tree])
(lang: Option[c.Tree], outputFormat: c.Tree): c.Tree =
{
import c.universe._
val session = MessageExtractor.setupSession(c)
// Extract literals:
val rawCtx = ctxTree.map(stringLiteral(c))
val ctx = session.setts.mergeContext(rawCtx)
val args = argsTree.map(tupleLiteral(c)(_)) ++ (pluralTree match {
case Some((_, n, true)) => Seq("n" -> n)
case _ => Nil
})
// Strip off introduced escapes if string does not contain interpolation
def unescape(s: String): String =
if (args.isEmpty) StringUtils.interpolate(s) else s
val msg = unescape(stringLiteral(c)(msgTree))
val plural = pluralTree.map { case (s, n, i) => (unescape(stringLiteral(c)(s)), n, i) }
// Call message extractor:
plural match {
case None => session.singular(c)(rawCtx, msg)
case Some((pl, _, _)) => session.plural(c)(rawCtx, msg, pl)
}
// Verify variables consistency:
def verifyVariables(s: String): Unit = {
if (args.isEmpty)
return // no interpolation, nothing to verify
val varsArg = args.map(_._1).toSet
val varsStr = StringUtils.extractVariables(s).toSet
for (v <- (varsArg diff varsStr) ++ (varsStr diff varsArg))
if (varsArg.contains(v))
c.abort(c.enclosingPosition, s"variable `$v` is not present in interpolation string")
else
c.abort(c.enclosingPosition, s"variable `$v` is not present at arguments section")
}
for ((v, xs) <- args.groupBy(_._1) if xs.length > 1)
c.abort(c.enclosingPosition, s"duplicate variable `$v`")
verifyVariables(msg)
for ((pl, _, _) <- plural)
verifyVariables(pl)
/**
* Given a language tree `lng`, creates a tree that will translate given message.
*/
def translate(lng: c.Tree): c.Tree = {
val str = plural match {
case None => q"$lng.singular(..${ctx.toSeq}, $msg)"
case Some((pl, n, _)) => q"$lng.plural(..${ctx.toSeq}, $msg, $pl, $n)"
}
if (args.isEmpty)
q"$outputFormat.convert($str)"
else {
val argsT = processArgs(c)(args, lng)
q"_root_.ru.makkarpov.scalingua.StringUtils.interpolate[${weakTypeOf[T]}]($str, ..$argsT)"
}
}
lang match {
case Some(lng) => translate(lng)
case None =>
val name = termName(c)("lng")
q"""
new _root_.ru.makkarpov.scalingua.LValue(
($name: _root_.ru.makkarpov.scalingua.Language) => ${translate(q"$name")}
)
"""
}
}
private def tagGenerate[T: c.WeakTypeTag](c: Context)
(tagTree: c.Tree, pluralTree: Option[c.Tree], argsTree: Seq[c.Tree])
(lang: Option[c.Tree], outputFormat: c.Tree): c.Tree =
{
import c.universe._
def translate(lng: c.Tree): c.Tree = {
val str = pluralTree match {
case None => q"$lng.taggedSingular($tagTree)"
case Some(n) => q"$lng.taggedPlural($tagTree, $n)"
}
if (argsTree.isEmpty) q"$outputFormat.convert($str)"
else q"_root_.ru.makkarpov.scalingua.StringUtils.interpolate[${weakTypeOf[T]}]($str, ..$argsTree)"
}
lang match {
case Some(lng) => translate(lng)
case None =>
val name = termName(c)("lng")
q"""
new _root_.ru.makkarpov.scalingua.LValue(
($name: _root_.ru.makkarpov.scalingua.Language) => ${translate(q"$name")}
)
"""
}
}
/**
* Convert name/value pairs to a sequence of tuples and expands specific arguments.
* @param c
* @param args
* @return
*/
private def processArgs(c: Context)(args: Seq[(String, c.Tree)], lang: c.Tree): Seq[c.Tree] = args.map {
case (k, v) =>
import c.universe._
val tpe = typecheck(c)(v).tpe
val xv =
if (tpe <:< weakTypeOf[LValue[_]]) q"$v($lang)"
else v
q"$k -> $xv"
}
/**
* Given the parts of interpolation string and trees of interpolation arguments, this function tries to
* guess final string with variable names like "Hello, %(name)!"
*
* @param c Macro context
* @param parts Interpolation string parts
* @param args Interpolation variables
* @return Final string and trees of arguments to `StringUtils.interpolate` (in format of `a -> b`)
*/
private def interpolationString(c: Context)(parts: Seq[String], args: Seq[c.Tree]): (String, Seq[c.Tree]) = {
import c.universe._
assert(parts.size == args.size + 1)
val inferredNames = args.map {
case Ident(name: TermName) => Some(name.decodedName.toString)
case Select(This(_), name: TermName) => Some(name.decodedName.toString)
case _ => None
}
// Match the %(x) explicit variable name specifications in parts and get final variable names
val filtered: Seq[(String /* part */, String /* arg name */, c.Tree /* value */)] =
for {
idx <- args.indices
argName = inferredNames(idx)
part = parts(idx + 1)
} yield {
if (part.startsWith(StringUtils.VariableStartStr)) {
val pos = part.indexOf(StringUtils.VariableParentheses._2)
val name = part.substring(2, pos)
val filtered = part.substring(pos + 1)
(filtered, name, args(idx))
} else if (part.startsWith(StringUtils.VariableEscapeStr)) {
(StringUtils.VariableStr + part.substring(2), argName.get, args(idx))
} else if (part.startsWith(StringUtils.VariableStr)) {
c.abort(c.enclosingPosition, s"Stray '${StringUtils.VariableStr}' at the beginning of part: it should start "+
s"either with '${StringUtils.VariableEscapeStr}' or '${StringUtils.VariableStartStr}'")
} else {
if (argName.isEmpty)
c.abort(c.enclosingPosition, s"No name is defined for part #$idx (${Compat.prettyPrint(c)(args(idx))})")
(part, argName.get, args(idx))
}
}
(StringUtils.escapeInterpolation(parts.head) + filtered.map {
case (part, name, _) => s"%($name)${StringUtils.escapeInterpolation(part)}"
}.mkString, filtered.map {
case (_, name, value) => q"($name, $value)"
})
}
/**
* Matches string against string literal pattern and return literal string if matched. Currently supported
* literal types:
*
* 1) Plain literals like `"123"`
* 2) Strip margin literals like `""" ... """.stripMargin`
* 3) Strip margin literals with custom margin character like `""" ... """.stripMargin('#')`
*
* @param c Macro context
* @param e Tree to match
* @return Extracted string literal
*/
private def stringLiteral(c: Context)(e: c.Tree): String = {
import c.universe._
def stripMargin(str: Tree, ch: Tree): String = (str, ch) match {
case (Literal(Constant(s: String)), Literal(Constant(c: Char))) => s.stripMargin(c).trim
case (Literal(Constant(s: String)), EmptyTree) => s.stripMargin.trim
case (Literal(Constant(_: String)), _) =>
c.abort(c.enclosingPosition, s"Expected character literal, got instead ${prettyPrint(c)(ch)}")
case _ => c.abort(c.enclosingPosition, s"Expected string literal, got instead ${prettyPrint(c)(str)}")
}
e match {
case Literal(Constant(s: String)) => s
case q"scala.this.Predef.augmentString($str).stripMargin" => stripMargin(str, EmptyTree) // 2.11
case q"scala.Predef.augmentString($str).stripMargin" => stripMargin(str, EmptyTree) // 2.12
case q"scala.this.Predef.augmentString($str).stripMargin($ch)" => stripMargin(str, ch) // 2.11
case q"scala.Predef.augmentString($str).stripMargin($ch)" => stripMargin(str, ch) // 2.12
case _ =>
c.abort(c.enclosingPosition, s"Expected string literal or multi-line string, got instead ${prettyPrint(c)(e)}")
}
}
/**
* Matches string against tuple `(String, T)` pattern and returns extracted string literal and tuple value.
* Currently supported literal types:
*
* 1) Plain literals like `("1", x)`
* 2) ArrowAssoc literals like `"1" -> x`
*
* @param c Macro context
* @param e Tree to match
* @return Extracted tuple literal parts
*/
private def tupleLiteral(c: Context)(e: c.Tree): (String, c.Tree) = {
import c.universe._
val (a, b) = e match {
case q"scala.Predef.ArrowAssoc[$aType]($ax).->[$bType]($bx)" => (ax, bx) // 2.12
case q"scala.this.Predef.ArrowAssoc[$aType]($ax).->[$bType]($bx)" => (ax, bx) // 2.11
case q"scala.this.Predef.any2ArrowAssoc[$aType]($ax).->[$bType]($bx)" => (ax, bx) // 2.10
case q"($ax, $bx)" => (ax, bx)
case _ =>
c.abort(c.enclosingPosition, s"Expected tuple definition `x -> y` or `(x, y)`, got instead ${prettyPrint(c)(e)}")
}
(stringLiteral(c)(a), b)
}
}
|
makkarpov/scalingua
|
scalingua/shared/src/main/scala/ru/makkarpov/scalingua/Macros.scala
|
Scala
|
apache-2.0
| 21,449 |
package net.wrap_trap.goju
import com.typesafe.config.Config
import com.typesafe.config.ConfigException
import com.typesafe.config.ConfigFactory
/**
* goju-to: HanoiDB(LSM-trees (Log-Structured Merge Trees) Indexed Storage) clone
* Copyright (c) 2016 Masayuki Takahashi
* This software is released under the MIT License.
* http://opensource.org/licenses/mit-license.php
*/
object Settings {
var config: Option[Config] = None
def getSettings: Settings = {
if (config.isEmpty) {
config = Option(ConfigFactory.load())
}
new Settings(config.get)
}
}
class Settings(config: Config) {
def getString(path: String, default: String): String =
readValue(path, config.getString(path), default)
def getInt(path: String, default: Int): Int = readValue(path, config.getInt(path), default)
def getLong(path: String, default: Long): Long = readValue(path, config.getLong(path), default)
def getBoolean(path: String, default: Boolean): Boolean =
readValue(path, config.getBoolean(path), default)
def hasPath(path: String): Boolean = config.hasPath(path)
private def readValue[T](path: String, v: => T, default: T): T = {
try {
v
} catch {
case _: ConfigException.Missing => default
case _: Throwable => throw new IllegalArgumentException("Failed to get: " + path)
}
}
}
|
masayuki038/goju
|
src/main/scala/net/wrap_trap/goju/Settings.scala
|
Scala
|
mit
| 1,394 |
package nozzle.logging
import ingredients.logging._
import scala.language.experimental.macros
trait Logging {
def logger(name: String): Logger
def plainOldLogger(name: String): PlainOldLogger
def nameOf[T]: String = macro Logger.nameOf[T]
}
|
utaal/nozzle
|
src/main/scala/logging/Logging.scala
|
Scala
|
mit
| 249 |
// This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
package ducttape.exec
import java.io.File
import collection._
import ducttape.util.Files
import ducttape.util.OrderedSet
import ducttape.util.MutableOrderedSet
import ducttape.workflow.Realization
import ducttape.workflow.VersionedTask
import ducttape.workflow.VersionedTaskId
import ducttape.versioner.WorkflowVersionInfo
import grizzled.slf4j.Logging
// checks the state of a task directory to make sure things completed as expected
// TODO: Return a set object with incomplete nodes that can be handed to future passes
// so that completion checking is atomic
object CompletionChecker extends Logging {
def isExitCodeZero(exitCodeFile: File): Boolean = {
try {
Files.read(exitCodeFile)(0).trim == "0"
} catch {
case _: Throwable => false
}
}
def NO_CALLBACK(task: VersionedTask, msg: String) {}
def isComplete(taskEnv: TaskEnvironment,
incompleteCallback: (VersionedTask, String) => Unit = NO_CALLBACK): Boolean = {
// TODO: Grep stdout/stderr for "error"
// TODO: Move this check and make it check file size and date with fallback to checksums? or always checksums? or checksum only if files are under a certain size?
// use a series of thunks so that we don't try to open non-existent files
val conditions: Seq[(() => Boolean, Option[String])] = (
Seq(( () => taskEnv.where.exists, None), // no message, since this is normal
( () => taskEnv.exitCodeFile.exists, Some("Exit code file does not exist")),
( () => isExitCodeZero(taskEnv.exitCodeFile), Some("Non-zero exit code")),
( () => taskEnv.stdoutFile.exists, Some("Stdout file does not exist")),
( () => taskEnv.stderrFile.exists, Some("Stderr file does not exist")),
( () => !isInvalidated(taskEnv), Some("Previous version is complete, but invalidated"))) ++
taskEnv.outputs.map { case (_, f: String) =>
( () => Files.exists(f), Some(s"${f} does not exist"))
}
)
// check each condition necessary for a task to be complete
// if a condition fails, notify the user why, if a message is provided
conditions.forall { case (cond, msgOpt) =>
val conditionHolds = cond()
if (!conditionHolds) msgOpt match {
case Some(msg) => incompleteCallback(taskEnv.task, msg)
case None => ;
}
conditionHolds
}
}
def isInvalidated(taskEnv: TaskEnvironment): Boolean = taskEnv.invalidatedFile.exists
def invalidate(taskEnv: TaskEnvironment): Boolean = taskEnv.invalidatedFile.createNewFile
// not recommended -- but sometimes manual intervention is required
// and the user just wants the workflow to continue
def forceCompletion(taskEnv: TaskEnvironment) {
Files.write("0", taskEnv.exitCodeFile)
val files = List(taskEnv.stdoutFile, taskEnv.stderrFile) ++ taskEnv.outputs.map {
case (_,f) => new File(f)
}
for (file <- files) {
if (!taskEnv.stdoutFile.exists) {
Files.write("", taskEnv.stdoutFile)
}
}
if (!isComplete(taskEnv)) {
throw new RuntimeException("Failed to force completion of task")
}
}
def hasPartialOutput(taskEnv: TaskEnvironment) = taskEnv.where.exists
def isBroken(taskEnv: TaskEnvironment) = taskEnv.where.exists && !taskEnv.versionFile.exists
def isLocked(taskEnv: TaskEnvironment) = taskEnv.lockFile.exists
}
// the initVersioner is generally the MostRecentWorkflowVersioner, so that we can check if
// the most recent result is untouched, invalid, partial, or complete
//
// use incompleteCallback to show info about why tasks aren't complete
class CompletionChecker(dirs: DirectoryArchitect,
unionVersion: WorkflowVersionInfo,
nextWorkflowVersion: Int,
incompleteCallback: (VersionedTask, String) => Unit)
extends UnpackedDagVisitor with Logging {
// we make a single pass to atomically determine what needs to be done
// so that we can then prompt the user for confirmation
// note: this is one of the major reasons that ducttape is a graph specification
// language with an imperative language (bash) nested within --
// if ducttape were turing complete, this multi-pass approach wouldn't be possible
private val _completedVersions = new MutableOrderedSet[VersionedTaskId]
private val _todoVersions = new MutableOrderedSet[VersionedTaskId]
private val _completed = new MutableOrderedSet[(String,Realization)] // TODO: Change datatype of realization?
private val _partial = new MutableOrderedSet[(String,Realization)] // not complete, but has partial output
private val _todo = new MutableOrderedSet[(String,Realization)]
private val _broken = new MutableOrderedSet[(String,Realization)]
private val _locked = new MutableOrderedSet[(String,Realization)]
// NOTE: completed never includes invalidated
// TODO: Change these tuples to "RealTaskId"?
def completedVersions: OrderedSet[VersionedTaskId] = _completedVersions
def todoVersions: OrderedSet[VersionedTaskId] = _todoVersions
def completed: OrderedSet[(String,Realization)] = _completed
def partial: OrderedSet[(String,Realization)] = _partial
def todo: OrderedSet[(String,Realization)] = _todo
def broken: OrderedSet[(String,Realization)] = _broken
def locked: OrderedSet[(String,Realization)] = _locked
// the version of this task will be drawn from the "union" workflow version info
override def visit(task: VersionedTask) {
debug("Checking $task")
val taskEnv = new TaskEnvironment(dirs, task)
if (CompletionChecker.isComplete(taskEnv, incompleteCallback)) {
_completedVersions += task.toVersionedTaskId
_completed += ((task.name, task.realization))
} else {
// do NOT reuse the existing task for its version
// since we're about to create a new version
// and there's no way for the union workflow versioner
// to know whether or not we'll use the existing version or not
//
// The walker is versioning with the "union" version --
// If it has no version for this task, we'll get the "next" workflow version (which is fine)
// But if it gave us a previous version, we want to reject that version and start a new one
_todoVersions += new VersionedTaskId(task.namespace, task.realization.toCanonicalString(), nextWorkflowVersion)
_todo += ((task.name, task.realization))
debug(s"Todo: $task (Version $nextWorkflowVersion)")
// Important: Check for locking *before* checking if something is broken
// since not all output files may exist while another process is working on this task
if (CompletionChecker.isLocked(taskEnv)) {
debug(s"Locked: $task")
_locked += ((task.name, task.realization))
} else if (CompletionChecker.isBroken(taskEnv)) {
debug(s"Broken: $task")
_broken += ((task.name, task.realization))
} else if (CompletionChecker.hasPartialOutput(taskEnv)) {
debug(s"Partially complete: $task")
_partial += ((task.name, task.realization))
}
}
}
}
|
jhclark/ducttape
|
src/main/scala/ducttape/exec/CompletionChecker.scala
|
Scala
|
mpl-2.0
| 7,311 |
package com.github.bepcyc
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.{Column, DataFrame, DataFrameWriter, SQLContext}
import scala.util.{Failure, Success, Try}
object SparkTools {
implicit class DataFrameOps(df: DataFrame) {
def createTable(name: String, partitions: Seq[String] = Seq.empty) =
HiveTable(name)(df.sqlContext).createTableFrom(df, partitions)
}
case class HiveTable(tableName: String)(implicit val ctx: SQLContext) {
import ctx.implicits._
val (db, name): (String, String) = {
val parts = tableName.split('.')
(parts(0), parts(1))
}
lazy val exists: Boolean =
ctx
.sql(s"SHOW TABLES IN $db")
.where($"tableName" === name)
.count == 1
def createTableFrom(df: DataFrame, partitions: Seq[String]): Unit =
createTableFrom(df = df, partitions = Option(partitions) filter (_.nonEmpty))
def createTableFrom(df: DataFrame,
partitions: Option[Seq[String]] = None): Unit = {
if (!exists) {
val query = CreateTable(df = df, tableName = tableName, partitions = partitions).sqlQuery
ctx sql query
}
}
/**
* Aligns DataFrame's column and data order with a table, respecting partitions order
*
* @param df source DataFrame
* @return DataFrame with aligned columns
*/
def align(df: DataFrame,
partitions: Option[Seq[String]] = None,
sortWithinPartitions: Option[Seq[String]] = None): DataFrame = exists match {
case false => df
case true => val cols: Array[Column] = ctx.table(tableName).columns.map(df(_))
val dfSelect = df.select(cols: _*)
val dfPartitions = partitions orElse discoverPartitions match {
case Some(parts) => dfSelect.repartition(parts.map(df(_)): _*)
case _ => dfSelect
}
sortWithinPartitions match {
case Some(parts) => dfPartitions.sortWithinPartitions(parts.map(dfPartitions(_)): _ *)
case _ => dfPartitions
}
}
def discoverPartitions: Option[Seq[String]] = exists match {
case false => None
case true => Try(ctx.sql(s"SHOW PARTITIONS $tableName").as[String].first) match {
case Failure(_) => None
case Success(str) => val parts = str
.split('/')
.filter(_.contains('='))
.map(p => p.split('=').head)
.toSeq
Option(parts).filterNot(_.isEmpty)
}
}
}
trait HiveTableOperation {
val partitions: Option[Seq[String]]
def structFieldSQL(s: StructField): String = s"${s.name} ${s.dataType.simpleString}"
def sqlQuery: String
def fieldsAndPartitionsFrom(df: DataFrame): (Seq[String], Seq[String]) = {
val (f, p) = {
// we want to preserve the order of fields everywhere, so use of partition() is not possible
val flds: Seq[StructField] = df
.schema
.filterNot(f => partitions.getOrElse(Seq.empty).toSet.contains(f.name))
// will fail if no such partition exists - and it's good
val prtns: Seq[StructField] = partitions.getOrElse(Seq.empty).map(p => df
.schema.find(_.name == p).get)
(flds, prtns)
}
(f map structFieldSQL, p map structFieldSQL)
}
}
// TODO: finish or kill it
case class InsertQuery(sinkTableName: String, partitions: Option[Seq[String]], sourceTableName: String = "temp1")
(implicit val sqlContext: SQLContext) extends HiveTableOperation {
val (fieldNames, partitionNames) = fieldsAndPartitionsFrom(sqlContext.table(sinkTableName))
private lazy val template: Seq[(String, Option[String])] = Seq(
"INSERT INTO TABLE %s" -> Some(sinkTableName),
"PARTITION (%s)" -> Option(partitionNames).filter(_.nonEmpty).map(_ mkString ",")
)
lazy val fields: Seq[String] =
sqlContext.table(sinkTableName).schema.fieldNames.filterNot(partitions.getOrElse(Seq.empty).toSet).toSeq
override def sqlQuery: String = ""
}
case class CreateTable(df: DataFrame,
tableName: String,
partitions: Option[Seq[String]] = None,
format: Option[String] = Some("orc"),
tblProperties: Option[Map[String, String]] = Some(Map("orc.compress" -> "ZLIB"))
) extends HiveTableOperation {
val (fieldNames, partitionNames) = fieldsAndPartitionsFrom(df)
private lazy val template: Seq[(String, Option[String])] = Seq(
"CREATE TABLE %s (" -> Some(tableName),
"%s" -> Some(fieldNames.mkString(",")),
")" -> Some(""),
"PARTITIONED BY (%s)" -> Option(partitionNames).filter(_.nonEmpty).map(_ mkString ","),
"STORED AS %s" -> format,
"TBLPROPERTIES(%s)" -> tblProperties.map(_.map { case (k, v) => s"""'$k'='$v'""" }.mkString(","))
)
override def sqlQuery: String =
template
.collect { case (k, Some(v)) => k format v }
.mkString(" ")
.trim
}
}
|
bepcyc/spark-tools
|
src/main/scala/com/github/bepcyc/SparkTools.scala
|
Scala
|
apache-2.0
| 5,070 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frsse2008.boxes.micro
import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC420(value: Option[Int]) extends CtBoxIdentifier(name = "Current Depreciation and other amounts written off assets")
with CtOptionalInteger with Input
with SelfValidatableBox[AccountsBoxRetriever, Option[Int]] {
override def validate(boxRetriever: AccountsBoxRetriever): Set[CtValidation] = {
validateMoney(value)
}
}
|
liquidarmour/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frsse2008/boxes/micro/AC420.scala
|
Scala
|
apache-2.0
| 1,163 |
package scredis.protocol.requests
import scredis.protocol._
import scredis.serialization.{ Reader, Writer }
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable.ListBuffer
import scala.concurrent.duration.FiniteDuration
object KeyRequests {
import scredis.serialization.Implicits.stringReader
import scredis.serialization.Implicits.bytesReader
object Del extends Command("DEL") with WriteCommand
object Dump extends Command("DUMP")
object Exists extends Command("EXISTS")
object Expire extends Command("EXPIRE") with WriteCommand
object ExpireAt extends Command("EXPIREAT") with WriteCommand
object Keys extends Command("KEYS")
object Migrate extends Command("MIGRATE") with WriteCommand
object Move extends Command("MOVE") with WriteCommand
object ObjectRefCount extends Command("OBJECT", "REFCOUNT")
object ObjectEncoding extends Command("OBJECT", "ENCODING")
object ObjectIdleTime extends Command("OBJECT", "IDLETIME")
object Persist extends Command("PERSIST") with WriteCommand
object PExpire extends Command("PEXPIRE") with WriteCommand
object PExpireAt extends Command("PEXPIREAT") with WriteCommand
object PTTL extends Command("PTTL")
object RandomKey extends ZeroArgCommand("RANDOMKEY")
object Rename extends Command("RENAME") with WriteCommand
object RenameNX extends Command("RENAMENX") with WriteCommand
object Restore extends Command("RESTORE") with WriteCommand
object Scan extends Command("SCAN")
object Sort extends Command("SORT")
object TTL extends Command("TTL")
object Type extends Command("TYPE")
protected def generateSortArgs(
key: String,
byOpt: Option[String],
limitOpt: Option[(Long, Long)],
get: Traversable[String],
desc: Boolean,
alpha: Boolean,
storeKeyOpt: Option[String]
): List[Any] = {
val args = ListBuffer[Any]()
args += key
byOpt.foreach {
args += "BY" += _
}
limitOpt.foreach {
case (offset, limit) => args += "LIMIT" += offset += limit
}
get.foreach {
args += "GET" += _
}
if (desc) {
args += "DESC"
}
if (alpha) {
args += "ALPHA"
}
storeKeyOpt.foreach {
args += "STORE" += _
}
args.toList
}
case class Del(keys: String*) extends Request[Long](Del, keys: _*) {
override def decode = {
case IntegerResponse(value) => value
}
}
case class Dump(key: String) extends Request[Option[Array[Byte]]](Dump, key) {
override def decode = {
case b: BulkStringResponse => b.parsed[Array[Byte]]
}
}
case class Exists(key: String) extends Request[Boolean](Exists, key) {
override def decode = {
case i: IntegerResponse => i.toBoolean
}
}
case class Expire(key: String, ttlSeconds: Int) extends Request[Boolean](
Expire, key, ttlSeconds
) {
override def decode = {
case i: IntegerResponse => i.toBoolean
}
}
case class ExpireAt(key: String, timestampSeconds: Long) extends Request[Boolean](
ExpireAt, key, timestampSeconds
) {
override def decode = {
case i: IntegerResponse => i.toBoolean
}
}
case class Keys[CC[X] <: Traversable[X]](pattern: String)(
implicit cbf: CanBuildFrom[Nothing, String, CC[String]]
) extends Request[CC[String]](Keys, pattern) {
override def decode = {
case a: ArrayResponse => a.parsed[String, CC] {
case b: BulkStringResponse => b.flattened[String]
}
}
}
case class Migrate(
key: String,
host: String,
port: Int,
database: Int,
timeout: FiniteDuration,
copy: Boolean,
replace: Boolean
) extends Request[Unit](
Migrate,
{
val args = ListBuffer[Any](host, port, key, database, timeout.toMillis)
if (copy) {
args += "COPY"
}
if (replace) {
args += "REPLACE"
}
args.toList
}: _*
) {
override def decode = {
case s: SimpleStringResponse => ()
}
}
case class Move(key: String, database: Int) extends Request[Boolean](Move, key, database) {
override def decode = {
case i: IntegerResponse => i.toBoolean
}
}
case class ObjectRefCount(key: String) extends Request[Option[Long]](ObjectRefCount, key) {
override def decode = {
case IntegerResponse(value) => Some(value)
case BulkStringResponse(None) => None
}
}
case class ObjectEncoding(key: String) extends Request[Option[String]](ObjectEncoding, key) {
override def decode = {
case b: BulkStringResponse => b.parsed[String]
}
}
case class ObjectIdleTime(key: String) extends Request[Option[Long]](ObjectIdleTime, key) {
override def decode = {
case IntegerResponse(value) => Some(value)
case BulkStringResponse(None) => None
}
}
case class Persist(key: String) extends Request[Boolean](Persist, key) {
override def decode = {
case i: IntegerResponse => i.toBoolean
}
}
case class PExpire(key: String, ttlMillis: Long) extends Request[Boolean](
PExpire, key, ttlMillis
) {
override def decode = {
case i: IntegerResponse => i.toBoolean
}
}
case class PExpireAt(key: String, timestampMillis: Long) extends Request[Boolean](
PExpireAt, key, timestampMillis
) {
override def decode = {
case i: IntegerResponse => i.toBoolean
}
}
case class PTTL(key: String) extends Request[Either[Boolean, Long]](PTTL, key) {
override def decode = {
case IntegerResponse(-2) => Left(false)
case IntegerResponse(-1) => Left(true)
case IntegerResponse(x) => Right(x)
}
}
case class RandomKey() extends Request[Option[String]](RandomKey) {
override def decode = {
case b: BulkStringResponse => b.parsed[String]
}
}
case class Rename(key: String, newKey: String) extends Request[Unit](Rename, key, newKey) {
override def decode = {
case s: SimpleStringResponse => ()
}
}
case class RenameNX(key: String, newKey: String) extends Request[Boolean](
RenameNX, key, newKey
) {
override def decode = {
case i: IntegerResponse => i.toBoolean
}
}
case class Restore[W: Writer](
key: String, value: W, ttlOpt: Option[FiniteDuration]
) extends Request[Unit](
Restore, key, ttlOpt.map(_.toMillis).getOrElse(0), implicitly[Writer[W]].write(value)
) {
override def decode = {
case s: SimpleStringResponse => ()
}
}
case class Scan[CC[X] <: Traversable[X]](
cursor: Long, matchOpt: Option[String], countOpt: Option[Int]
)(implicit cbf: CanBuildFrom[Nothing, String, CC[String]]) extends Request[(Long, CC[String])](
Scan,
generateScanLikeArgs(
keyOpt = None,
cursor = cursor,
matchOpt = matchOpt,
countOpt = countOpt
): _*
) {
override def decode = {
case a: ArrayResponse => a.parsedAsScanResponse[String, CC] {
case a: ArrayResponse => a.parsed[String, CC] {
case b: BulkStringResponse => b.flattened[String]
}
}
}
}
case class Sort[R: Reader, CC[X] <: Traversable[X]](
key: String,
byOpt: Option[String],
limitOpt: Option[(Long, Long)],
get: Traversable[String],
desc: Boolean,
alpha: Boolean
)(
implicit cbf: CanBuildFrom[Nothing, Option[R], CC[Option[R]]]
) extends Request[CC[Option[R]]](
Sort,
generateSortArgs(key, byOpt, limitOpt, get, desc, alpha, None): _*
) {
override def decode = {
case a: ArrayResponse => a.parsed[Option[R], CC] {
case b: BulkStringResponse => b.parsed[R]
}
}
}
case class SortAndStore(
key: String,
targetKey: String,
byOpt: Option[String],
limitOpt: Option[(Long, Long)],
get: Traversable[String],
desc: Boolean,
alpha: Boolean
) extends Request[Long](
Sort,
generateSortArgs(key, byOpt, limitOpt, get, desc, alpha, Some(targetKey)): _*
) {
override def isReadOnly = false
override def decode = {
case IntegerResponse(value) => value
}
}
case class TTL(key: String) extends Request[Either[Boolean, Int]](TTL, key) {
override def decode = {
case IntegerResponse(-2) => Left(false)
case IntegerResponse(-1) => Left(true)
case IntegerResponse(x) => Right(x.toInt)
}
}
case class Type(key: String) extends Request[Option[scredis.Type]](Type, key) {
override def decode = {
case SimpleStringResponse("none") => None
case SimpleStringResponse(value) => Some(scredis.Type(value))
}
}
}
|
Livestream/scredis
|
src/main/scala/scredis/protocol/requests/KeyRequests.scala
|
Scala
|
apache-2.0
| 8,589 |
package edu.gemini.phase2.template.factory
import edu.gemini.pot.sp.{ISPObservation, ISPGroup}
import edu.gemini.spModel.rich.pot.sp._
import scala.collection.JavaConverters._
import edu.gemini.spModel.data.config.{DefaultParameter, IParameter, ISysConfig}
package object impl {
type Maybe[A] = Either[String,A]
type LibraryId = String
type LibraryNumber = Int
def transpose1[A,B](map:Map[A, List[B]]):List[Map[A, B]] = {
val ws = map.values.map(_.length).toList.distinct
require(ws.length == 1, "Values are non-uniform.")
(0 to ws.head - 1).toList.map(n => map.mapValues(_(n)))
}
def transpose2[A,B](ms:List[Map[A,B]]):Map[A,List[B]] = {
val ks = ms.map(_.keys).distinct
require(ks.length == 1, "Keys are non-uniform.")
ks.head.map(k => (k -> ms.map(_(k)))).toMap
}
// Normalizes a sys config to a Map of non-empty Lists
def toMap(sys: ISysConfig): Map[String, List[Any]] = {
def nonEmptyList(p: IParameter): Option[List[Any]] =
Option(p.getValue) flatMap {
_ match {
case c: java.util.Collection[_] => if (c.isEmpty) None else Some(c.asScala.toList)
case x => Some(List(x))
}
}
(Map.empty[String, List[Any]]/:sys.getParameters.asScala) { case (m, p) =>
val entry = for {
name <- Option(p.getName)
values <- nonEmptyList(p)
} yield (name -> values)
entry.map(e => m+e).getOrElse(m)
}
}
def toParams(m: Map[String, List[Any]]): java.util.Collection[IParameter] = {
val params = m map {
case (name, lst) => DefaultParameter.getInstance(name, new java.util.ArrayList[Any](lst.asJavaCollection)).asInstanceOf[IParameter]
}
params.asJavaCollection
}
implicit def pimpSeq[A](as:Seq[A]) = new {
// RCN: note that mapM needs to fold right and mapM_ needs to fold left
/** Returns successful map or first error. */
def mapM[B, C](f:A => Either[B, C]):Either[B, List[C]] =
(as :\ (Right(Nil):Either[B, List[C]])) {
(a, ecs) =>
for {
cs <- ecs.right
c <- f(a).right
} yield c :: cs
}
/** Returns unit or first error. */
def mapM_[B,C](f:A => Either[B, C]):Either[B, Unit] = {
val x = ((Right(Nil):Either[B, List[C]]) /: as) {
(ecs, a) =>
for {
cs <- ecs.right
c <- f(a).right
} yield c :: cs
}
x.right.map(_ => ())
}
}
/** Returns a or result of exception mapping */
def tryFold[A, B](b: => B)(f:Exception => A):Either[A, B] =
try Right(b) catch { case e:Exception => Left(f(e)) }
implicit def obsLookup(grp:ISPGroup) = new Object {
def apply(libraryId:LibraryId):Maybe[ISPObservation] =
grp.findObservation(_.libraryId.exists(_ == libraryId)).toRight("Could not find observation '%s' in group '%s'".format(libraryId, grp.libraryId))
def apply(libraryId:LibraryNumber):Maybe[ISPObservation] = apply(libraryId.toString)
def allObservations:List[ISPObservation] = grp.getAllObservations.asScala.toList
}
def attempt[A](a: => A) = tryFold(a) {e =>
e.printStackTrace()
e.getMessage
}
}
|
arturog8m/ocs
|
bundle/edu.gemini.phase2.skeleton.servlet/src/main/scala/edu/gemini/phase2/template/factory/impl/package.scala
|
Scala
|
bsd-3-clause
| 3,164 |
package mqfiletransferagent.actors
import akka.camel.Consumer
import akka.camel.CamelMessage
import akka.actor.ActorLogging
import akka.event.LoggingReceive
import scala.xml.XML
import mqfiletransferagent.messages._
import akka.actor.ActorRef
class DataQueueConsumer(dataQueue: String, agentCoordinator: ActorRef) extends Consumer with ActorLogging {
def endpointUri = dataQueue
def this(dataQueue: String) = this(dataQueue, null)
def receive = LoggingReceive {
case message: CamelMessage => agentCoordinator ! DataTransferMessage(message.bodyAs[String])
case x: Any => {
log.warning("CommandQueueConsumer received unknown message type: " + x.getClass)
}
}
}
|
antongerbracht/MQFileTransfer
|
MQFileTransferAgent/src/main/scala/mqfiletransferagent/actors/DataQueueConsumer.scala
|
Scala
|
apache-2.0
| 683 |
package backend.data.mongodb.service
import backend.data.mongodb.dao.UserDaoComponentMongo
import backend.data.service.UserDataService
import model.blog.User
/**
* The UserDataService using MongoDB
*
* @author Stefan Bleibinhaus
*
*/
object UserDataServiceMongo extends UserDataService with UserDaoComponentMongo {
override protected var cachedUser: Option[User] = dao.allAsList match {
case head :: Nil => Some(head)
case _ => None
}
}
|
ExNexu/scablo
|
app/backend/data/mongodb/service/UserDataServiceMongo.scala
|
Scala
|
bsd-2-clause
| 460 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.metadata
import com.google.common.collect.ImmutableList
import org.apache.calcite.rel.metadata._
object FlinkDefaultRelMetadataProvider {
val INSTANCE: RelMetadataProvider = ChainedRelMetadataProvider.of(
ImmutableList.of(
FlinkRelMdPercentageOriginalRows.SOURCE,
FlinkRelMdNonCumulativeCost.SOURCE,
FlinkRelMdCumulativeCost.SOURCE,
FlinkRelMdRowCount.SOURCE,
FlinkRelMdSize.SOURCE,
FlinkRelMdSelectivity.SOURCE,
FlinkRelMdDistinctRowCount.SOURCE,
FlinkRelMdColumnInterval.SOURCE,
FlinkRelMdFilteredColumnInterval.SOURCE,
FlinkRelMdDistribution.SOURCE,
FlinkRelMdColumnNullCount.SOURCE,
FlinkRelMdColumnOriginNullCount.SOURCE,
FlinkRelMdPopulationSize.SOURCE,
FlinkRelMdColumnUniqueness.SOURCE,
FlinkRelMdUniqueKeys.SOURCE,
FlinkRelMdUniqueGroups.SOURCE,
FlinkRelMdModifiedMonotonicity.SOURCE,
RelMdColumnOrigins.SOURCE,
RelMdMaxRowCount.SOURCE,
RelMdMinRowCount.SOURCE,
RelMdPredicates.SOURCE,
RelMdCollation.SOURCE,
RelMdExplainVisibility.SOURCE
)
)
}
|
shaoxuan-wang/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/metadata/FlinkDefaultRelMetadataProvider.scala
|
Scala
|
apache-2.0
| 1,950 |
/**
* Copyright (C) 2015 DANS - Data Archiving and Networked Services ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.stage.command.fileitem
import java.io.File
import java.nio.file.Paths
import nl.knaw.dans.easy.stage.command.CustomMatchers._
import nl.knaw.dans.easy.stage.command.{ AbstractConfSpec, Configuration }
import org.apache.commons.configuration.PropertiesConfiguration
import org.rogach.scallop.ScallopConf
class FileItemConfSpec extends AbstractConfSpec {
private def resourceDirString: String = Paths.get(getClass.getResource("/").toURI).toAbsolutePath.toString
private def mockedConfiguration = new Configuration("version x.y.z", new PropertiesConfiguration() {
setDelimiterParsingDisabled(true)
load(Paths.get(resourceDirString + "/debug-config", "application.properties").toFile)
}, Map.empty)
private def clo = new FileItemCommandLineOptions("-i i -d http:// -p p -s 0 --format f outdir".split(" "), mockedConfiguration) {
// avoids System.exit() in case of invalid arguments or "--help"
override def verify(): Unit = {}
}
// Overriding verify to create an instance without arguments
// (as done for other README/pom tests) changes the order of the listed options.
// Another test needs a verified instance, so we keep using the dummy here too.
override def getCommandLineOptions: ScallopConf = clo
"synopsis in help info" should "be part of docs/index.md" in {
new File("../docs/index.md") should containTrimmed(clo.synopsis)
}
}
|
DANS-KNAW/easy-stage-dataset
|
command/src/test/scala/nl.knaw.dans.easy.stage.command/fileitem/FileItemConfSpec.scala
|
Scala
|
apache-2.0
| 2,058 |
package akka.pattern
import language.implicitConversions
import scala.concurrent.{ Future, ExecutionContext }
import scala.util.{ Failure, Success }
import akka.actor._
trait PipeToSupport {
final class PipeableFuture[T](val future: Future[T])(implicit executionContext: ExecutionContext) {
def pipeTo(recipient: ActorRef)(implicit sender: ActorRef = Actor.noSender): Future[T] = {
future onComplete {
case Success(r) => recipient ! r
case Failure(f) => recipient ! Status.Failure(f)
}
future
}
/*def pipeToSelection(recipient: ActorSelection)(implicit sender: ActorRef = Actor.noSender): Future[T] = {
future onComplete {
case Success(r) => recipient ! r
case Failure(f) => recipient ! Status.Failure(f)
}
future
}*/
def to(recipient: ActorRef): PipeableFuture[T] = to(recipient, Actor.noSender)
def to(recipient: ActorRef, sender: ActorRef): PipeableFuture[T] = {
pipeTo(recipient)(sender)
this
}
/*def to(recipient: ActorSelection): PipeableFuture[T] = to(recipient, Actor.noSender)
def to(recipient: ActorSelection, sender: ActorRef): PipeableFuture[T] = {
pipeToSelection(recipient)(sender)
this
}*/
}
/**
* Import this implicit conversion to gain the `pipeTo` method on [[scala.concurrent.Future]]:
*
* {{{
* import akka.pattern.pipe
*
* Future { doExpensiveCalc() } pipeTo nextActor
*
* or
*
* pipe(someFuture) to nextActor
*
* }}}
*
* The successful result of the future is sent as a message to the recipient, or
* the failure is sent in a [[akka.actor.Status.Failure]] to the recipient.
*/
implicit def pipe[T](future: Future[T])(implicit executionContext: ExecutionContext): PipeableFuture[T] = new PipeableFuture(future)
}
object PipeTo extends PipeToSupport
|
simonlischka/scala-js-actors
|
actors/src/main/scala/akka/pattern/PipeTo.scala
|
Scala
|
bsd-3-clause
| 1,865 |
// #Sireum
/*
Copyright (c) 2017-2021, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum
@datatype trait Either[L, R] {
@pure def isLeft: B
@pure def isRight: B
@pure def leftOpt: Option[L]
@pure def left: L
@pure def rightOpt: Option[R]
@pure def right: R
}
object Either {
@datatype class Left[L, R](val value: L) extends Either[L, R] {
@pure override def isLeft: B = {
Contract(Ensures(Res))
return T
}
@pure override def isRight: B = {
Contract(Ensures(!Res[B]))
return F
}
@pure override def leftOpt: Option[L] = {
Contract(Ensures(Res == Some(value)))
return Some(value)
}
@pure override def left: L = {
Contract(Ensures(Res == value))
return value
}
@pure override def rightOpt: Option[R] = {
Contract(Ensures(Res == None[R]()))
return None()
}
@pure override def right: R = {
Contract(Requires(F))
halt("Invalid 'Either.Left' operation 'right'.")
}
}
@datatype class Right[L, R](val value: R) extends Either[L, R] {
@pure override def isLeft: B = {
Contract(Ensures(!Res[B]))
return F
}
@pure override def isRight: B = {
Contract(Ensures(Res))
return T
}
@pure override def leftOpt: Option[L] = {
Contract(Ensures(Res == None[L]()))
return None()
}
@pure override def left: L = {
Contract(Requires(F))
halt("Invalid 'Either.Right' operation 'left'.")
}
@pure override def rightOpt: Option[R] = {
Contract(Ensures(Res == Some(value)))
return Some(value)
}
@pure override def right: R = {
Contract(Ensures(Res == value))
return value
}
}
@pure def left[L, R](value: L): Either[L, R] = {
return Left(value)
}
@pure def right[L, R](value: R): Either[L, R] = {
return Right(value)
}
}
|
sireum/v3-logika-runtime
|
library/shared/src/main/scala/org/sireum/Either.scala
|
Scala
|
bsd-2-clause
| 3,173 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2012-2014 Alexey Aksenov [email protected]
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: [email protected]
*/
package org.digimead.tabuddy.desktop.logic.operation.view
import org.digimead.digi.lib.aop.log
import org.digimead.digi.lib.api.XDependencyInjection
import org.digimead.digi.lib.log.api.XLoggable
import org.digimead.tabuddy.desktop.core.definition.Operation
import org.digimead.tabuddy.desktop.logic.operation.view.api.XOperationModifySorting
import org.digimead.tabuddy.desktop.logic.payload.view.Sorting
import org.digimead.tabuddy.model.Model
import org.digimead.tabuddy.model.graph.Graph
/**
* OperationModifySorting base trait.
*/
trait OperationModifySorting extends XOperationModifySorting[Sorting] {
/**
* Create 'Modify sorting' operation.
*
* @param graph graph that contains a sorting
* @param sorting the initial sorting
* @param sortingList the list of exists sortings
* @return 'Modify sorting' operation
*/
override def operation(graph: Graph[_ <: Model.Like], sorting: Sorting, sortingList: Set[Sorting]): OperationModifySorting.Abstract
/**
* Checks that this class can be subclassed.
* <p>
* The API class is intended to be subclassed only at specific,
* controlled point. This method enforces this rule
* unless it is overridden.
* </p><p>
* <em>IMPORTANT:</em> By providing an implementation of this
* method that allows a subclass of a class which does not
* normally allow subclassing to be created, the implementer
* agrees to be fully responsible for the fact that any such
* subclass will likely fail.
* </p>
*/
override protected def checkSubclass() {}
}
/**
* Modify a view's sorting.
*/
object OperationModifySorting extends XLoggable {
/** Stable identifier with OperationModifySorting DI */
def operation = DI.operation.asInstanceOf[Option[OperationModifySorting]]
/**
* Build a new 'Modify sorting' operation.
*
* @param graph graph that contains a sorting
* @param sorting the initial sorting
* @param sortingList the list of exists sortings
* @return 'Modify sorting' operation
*/
@log
def apply(graph: Graph[_ <: Model.Like], sorting: Sorting, sortingList: Set[Sorting]): Option[Abstract] =
operation match {
case Some(operation) ⇒
Some(operation.operation(graph, sorting, sortingList))
case None ⇒
log.error("OperationModifySorting implementation is not defined.")
None
}
/** Bridge between abstract XOperation[Sorting] and concrete Operation[Sorting] */
abstract class Abstract(val graph: Graph[_ <: Model.Like], val sorting: Sorting, val sortingList: Set[Sorting])
extends Operation[Sorting](s"Edit sorting $sorting for graph $graph.") {
this: XLoggable ⇒
}
/**
* Dependency injection routines.
*/
private object DI extends XDependencyInjection.PersistentInjectable {
lazy val operation = injectOptional[XOperationModifySorting[_]]
}
}
|
digimead/digi-TABuddy-desktop
|
part-logic/src/main/scala/org/digimead/tabuddy/desktop/logic/operation/view/OperationModifySorting.scala
|
Scala
|
agpl-3.0
| 5,108 |
/*
* Copyright (C) 2012 The Regents of The University California.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shark.execution
import java.util.{ArrayList, Arrays}
import scala.reflect.BeanProperty
import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf}
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS
import org.apache.hadoop.hive.ql.exec.{TableScanOperator => HiveTableScanOperator}
import org.apache.hadoop.hive.ql.exec.{MapSplitPruning, Utilities}
import org.apache.hadoop.hive.ql.metadata.{Partition, Table}
import org.apache.hadoop.hive.ql.plan.{PlanUtils, PartitionDesc, TableDesc}
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, ObjectInspectorFactory,
StructObjectInspector}
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
import org.apache.hadoop.io.Writable
import shark.{SharkConfVars, SharkEnv, Utils}
import shark.api.QueryExecutionException
import shark.execution.serialization.{XmlSerializer, JavaSerializer}
import shark.memstore2.{CacheType, TablePartition, TablePartitionStats}
import shark.tachyon.TachyonException
import spark.RDD
import spark.rdd.{PartitionPruningRDD, UnionRDD}
import org.apache.hadoop.hive.ql.io.HiveInputFormat
class TableScanOperator extends TopOperator[HiveTableScanOperator] with HiveTopOperator {
@transient var table: Table = _
@transient var parts: Array[Object] = _
@BeanProperty var firstConfPartDesc: PartitionDesc = _
@BeanProperty var tableDesc: TableDesc = _
@BeanProperty var localHconf: HiveConf = _
/**
* Initialize the hive TableScanOperator. This initialization propagates
* downstream. When all Hive TableScanOperators are initialized, the entire
* Hive query plan operators are initialized.
*/
override def initializeHiveTopOperator() {
val rowObjectInspector = {
if (parts == null) {
val serializer = tableDesc.getDeserializerClass().newInstance()
serializer.initialize(hconf, tableDesc.getProperties)
serializer.getObjectInspector()
} else {
val partProps = firstConfPartDesc.getProperties()
val tableDeser = firstConfPartDesc.getDeserializerClass().newInstance()
tableDeser.initialize(hconf, partProps)
val partCols = partProps.getProperty(META_TABLE_PARTITION_COLUMNS)
val partNames = new ArrayList[String]
val partObjectInspectors = new ArrayList[ObjectInspector]
partCols.trim().split("/").foreach{ key =>
partNames.add(key)
partObjectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector)
}
// No need to lock this one (see SharkEnv.objectInspectorLock) because
// this is called on the master only.
val partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(
partNames, partObjectInspectors)
val oiList = Arrays.asList(
tableDeser.getObjectInspector().asInstanceOf[StructObjectInspector],
partObjectInspector.asInstanceOf[StructObjectInspector])
// new oi is union of table + partition object inspectors
ObjectInspectorFactory.getUnionStructObjectInspector(oiList)
}
}
setInputObjectInspector(0, rowObjectInspector)
super.initializeHiveTopOperator()
}
override def initializeOnMaster() {
localHconf = super.hconf
}
override def execute(): RDD[_] = {
assert(parentOperators.size == 0)
val tableKey: String = tableDesc.getTableName.split('.')(1)
// There are three places we can load the table from.
// 1. Tachyon table
// 2. Spark heap (block manager)
// 3. Hive table on HDFS (or other Hadoop storage)
val cacheMode = CacheType.fromString(
tableDesc.getProperties().get("shark.cache").asInstanceOf[String])
if (cacheMode == CacheType.heap) {
// Table should be in Spark heap (block manager).
val rdd = SharkEnv.memoryMetadataManager.get(tableKey).getOrElse {
logError("""|Table %s not found in block manager.
|Are you trying to access a cached table from a Shark session other than
|the one in which it was created?""".stripMargin.format(tableKey))
throw(new QueryExecutionException("Cached table not found"))
}
logInfo("Loading table " + tableKey + " from Spark block manager")
loadRddFromSpark(tableKey, rdd)
} else if (cacheMode == CacheType.tachyon) {
// Table is in Tachyon.
if (!SharkEnv.tachyonUtil.tableExists(tableKey)) {
throw new TachyonException("Table " + tableKey + " does not exist in Tachyon")
}
logInfo("Loading table " + tableKey + " from Tachyon")
var indexToStats: collection.Map[Int, TablePartitionStats] =
SharkEnv.memoryMetadataManager.getStats(tableKey).getOrElse(null)
if (indexToStats == null) {
val statsByteBuffer = SharkEnv.tachyonUtil.getTableMetadata(tableKey)
indexToStats = JavaSerializer.deserialize[collection.Map[Int, TablePartitionStats]](
statsByteBuffer.array())
SharkEnv.memoryMetadataManager.putStats(tableKey, indexToStats)
}
SharkEnv.tachyonUtil.createRDD(tableKey)
} else {
// Table is a Hive table on HDFS (or other Hadoop storage).
super.execute()
}
}
private def loadRddFromSpark(tableKey: String, rdd: RDD[_]): RDD[_] = {
// Stats used for map pruning.
val indexToStats: collection.Map[Int, TablePartitionStats] =
SharkEnv.memoryMetadataManager.getStats(tableKey).get
// Run map pruning if the flag is set, there exists a filter predicate on
// the input table and we have statistics on the table.
val prunedRdd: RDD[_] =
if (SharkConfVars.getBoolVar(localHconf, SharkConfVars.MAP_PRUNING) &&
childOperators(0).isInstanceOf[FilterOperator] &&
indexToStats.size == rdd.partitions.size) {
val startTime = System.currentTimeMillis
val printPruneDebug = SharkConfVars.getBoolVar(
localHconf, SharkConfVars.MAP_PRUNING_PRINT_DEBUG)
// Must initialize the condition evaluator in FilterOperator to get the
// udfs and object inspectors set.
val filterOp = childOperators(0).asInstanceOf[FilterOperator]
filterOp.initializeOnSlave()
def prunePartitionFunc(index: Int): Boolean = {
if (printPruneDebug) {
logInfo("\nPartition " + index + "\n" + indexToStats(index))
}
// Only test for pruning if we have stats on the column.
val partitionStats = indexToStats(index)
if (partitionStats != null && partitionStats.stats != null)
MapSplitPruning.test(partitionStats, filterOp.conditionEvaluator)
else true
}
// Do the pruning.
val prunedRdd = PartitionPruningRDD.create(rdd, prunePartitionFunc)
val timeTaken = System.currentTimeMillis - startTime
logInfo("Map pruning %d partitions into %s partitions took %d ms".format(
rdd.partitions.size, prunedRdd.partitions.size, timeTaken))
prunedRdd
} else {
rdd
}
prunedRdd.mapPartitions { iter =>
if (iter.hasNext) {
val tablePartition = iter.next.asInstanceOf[TablePartition]
tablePartition.iterator
} else {
Iterator()
}
}
}
/**
* Create a RDD representing the table (with or without partitions).
*/
override def preprocessRdd(rdd: RDD[_]): RDD[_] = {
if (table.isPartitioned) {
logInfo("Making %d Hive partitions".format(parts.size))
makePartitionRDD(rdd)
} else {
val tablePath = table.getPath.toString
val ifc = table.getInputFormatClass
.asInstanceOf[java.lang.Class[InputFormat[Writable, Writable]]]
logInfo("Table input: %s".format(tablePath))
createHadoopRdd(tablePath, ifc)
}
}
override def processPartition(index: Int, iter: Iterator[_]): Iterator[_] = {
val deserializer = tableDesc.getDeserializerClass().newInstance()
deserializer.initialize(localHconf, tableDesc.getProperties)
iter.map { value =>
value match {
case rowWithPart: Array[Object] => rowWithPart
case v: Writable => deserializer.deserialize(v)
case _ => throw new RuntimeException("Failed to match " + value.toString)
}
}
}
private def makePartitionRDD[T](rdd: RDD[T]): RDD[_] = {
val partitions = parts
val rdds = new Array[RDD[Any]](partitions.size)
var i = 0
partitions.foreach { part =>
val partition = part.asInstanceOf[Partition]
val partDesc = Utilities.getPartitionDesc(partition)
val tablePath = partition.getPartitionPath.toString
val ifc = partition.getInputFormatClass
.asInstanceOf[java.lang.Class[InputFormat[Writable, Writable]]]
val parts = createHadoopRdd(tablePath, ifc)
val serializedHconf = XmlSerializer.serialize(localHconf, localHconf)
val partRDD = parts.mapPartitions { iter =>
// Map each tuple to a row object
val hconf = XmlSerializer.deserialize(serializedHconf).asInstanceOf[HiveConf]
val deserializer = partDesc.getDeserializerClass().newInstance()
deserializer.initialize(hconf, partDesc.getProperties())
// Get partition field info
val partSpec = partDesc.getPartSpec()
val partProps = partDesc.getProperties()
val partCols = partProps.getProperty(META_TABLE_PARTITION_COLUMNS)
val partKeys = partCols.trim().split("/")
val partValues = new ArrayList[String]
partKeys.foreach { key =>
if (partSpec == null) {
partValues.add(new String)
} else {
partValues.add(new String(partSpec.get(key)))
}
}
val rowWithPartArr = new Array[Object](2)
iter.map { value =>
val deserializedRow = deserializer.deserialize(value) // LazyStruct
rowWithPartArr.update(0, deserializedRow)
rowWithPartArr.update(1, partValues)
rowWithPartArr.asInstanceOf[Object]
}
}
rdds(i) = partRDD.asInstanceOf[RDD[Any]]
i += 1
}
// Even if we don't use any partitions, we still need an empty RDD
if (rdds.size == 0) {
SharkEnv.sc.makeRDD(Seq[Object]())
} else {
new UnionRDD(rdds(0).context, rdds)
}
}
private def createHadoopRdd(path: String, ifc: Class[InputFormat[Writable, Writable]])
: RDD[Writable] = {
val conf = new JobConf(localHconf)
if (tableDesc != null) {
Utilities.copyTableJobPropertiesToConf(tableDesc, conf)
}
new HiveInputFormat() {
def doPushFilters() {
pushFilters(conf, hiveOp)
}
}.doPushFilters()
FileInputFormat.setInputPaths(conf, path)
val bufferSize = System.getProperty("spark.buffer.size", "65536")
conf.set("io.file.buffer.size", bufferSize)
// Set s3/s3n credentials. Setting them in conf ensures the settings propagate
// from Spark's master all the way to Spark's slaves.
var s3varsSet = false
val s3vars = Seq("fs.s3n.awsAccessKeyId", "fs.s3n.awsSecretAccessKey",
"fs.s3.awsAccessKeyId", "fs.s3.awsSecretAccessKey").foreach { variableName =>
if (localHconf.get(variableName) != null) {
s3varsSet = true
conf.set(variableName, localHconf.get(variableName))
}
}
// If none of the s3 credentials are set in Hive conf, try use the environmental
// variables for credentials.
if (!s3varsSet) {
Utils.setAwsCredentials(conf)
}
// Choose the minimum number of splits. If mapred.map.tasks is set, use that unless
// it is smaller than what Spark suggests.
val minSplits = math.max(localHconf.getInt("mapred.map.tasks", 1), SharkEnv.sc.defaultMinSplits)
val rdd = SharkEnv.sc.hadoopRDD(conf, ifc, classOf[Writable], classOf[Writable], minSplits)
// Only take the value (skip the key) because Hive works only with values.
rdd.map(_._2)
}
}
|
vax11780/shark
|
src/main/scala/shark/execution/TableScanOperator.scala
|
Scala
|
apache-2.0
| 12,661 |
package hello
import org.springframework.web.bind.annotation.{RequestMapping, RestController}
@RestController
class PingResource {
@RequestMapping(path = Array("/ping"))
def ping(): String = "pong"
}
|
shekhargulati/52-technologies-in-2016
|
37-spring-boot-scala/gs-rest-service/src/main/scala/hello/PingController.scala
|
Scala
|
mit
| 207 |
package example
final class Baz {
val name = "baz"
def hello = s"Hello, $name"
}
|
t-mochizuki/scala-study
|
mockito-example/src/main/scala/Baz.scala
|
Scala
|
mit
| 86 |
package io.github.shogowada.scala.jsonrpc.example.e2e.websocket
import io.github.shogowada.scala.jsonrpc.DisposableFunction1
import scala.concurrent.Future
case class Todo(id: String, description: String)
object TodoEventTypes {
val Add = "Add"
val Remove = "Remove"
}
case class TodoEvent(todo: Todo, eventType: String)
trait TodoRepositoryAPI {
def add(description: String): Future[Todo]
def remove(id: String): Future[Unit]
def register(observer: DisposableFunction1[TodoEvent, Future[Unit]]): Future[String]
def unregister(observerId: String): Future[Unit]
}
|
shogowada/scala-json-rpc
|
examples/e2e-web-socket/shared/src/main/scala/io/github/shogowada/scala/jsonrpc/example/e2e/websocket/API.scala
|
Scala
|
mit
| 587 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 MineFormers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package de.mineformers.kybology.core.init
import de.mineformers.core.mod.ItemHolder
import de.mineformers.core.registry.SharedItemRegistry
import de.mineformers.kybology.Core.Names
import de.mineformers.kybology.core.item.ItemWrench
import net.minecraftforge.common.MinecraftForge
/**
* CoreItems
*
* @author PaleoCrafter
*/
class CoreItems extends ItemHolder {
val wrench = new ItemWrench
override def init(): Unit = {
MinecraftForge.EVENT_BUS.register(wrench)
SharedItemRegistry.add(Names.Items.Wrench, wrench)
}
}
|
MineFormers/Kybology
|
src/main/scala/de/mineformers/kybology/core/init/CoreItems.scala
|
Scala
|
mit
| 1,683 |
/* sbt -- Simple Build Tool
* Copyright 2009,2010 Mark Harrah
*/
package sbt
package impl
import StringUtilities.nonEmpty
trait DependencyBuilders
{
final implicit def toGroupID(groupID: String): GroupID =
{
nonEmpty(groupID, "Group ID")
new GroupID(groupID)
}
final implicit def toRepositoryName(name: String): RepositoryName =
{
nonEmpty(name, "Repository name")
new RepositoryName(name)
}
final implicit def moduleIDConfigurable(m: ModuleID): ModuleIDConfigurable =
{
require(m.configurations.isEmpty, "Configurations already specified for module " + m)
new ModuleIDConfigurable(m)
}
}
final class GroupID private[sbt] (groupID: String)
{
def % (artifactID: String) = groupArtifact(artifactID, CrossVersion.Disabled)
def %% (artifactID: String): GroupArtifactID = groupArtifact(artifactID, CrossVersion.binary)
@deprecated(deprecationMessage, "0.12.0")
def %% (artifactID: String, crossVersion: String => String) = groupArtifact(artifactID, CrossVersion.binaryMapped(crossVersion))
@deprecated(deprecationMessage, "0.12.0")
def %% (artifactID: String, alternatives: (String, String)*) = groupArtifact(artifactID, CrossVersion.binaryMapped(Map(alternatives: _*) orElse { case s => s }))
private def groupArtifact(artifactID: String, cross: CrossVersion) =
{
nonEmpty(artifactID, "Artifact ID")
new GroupArtifactID(groupID, artifactID, cross)
}
private[this] def deprecationMessage = """Use the cross method on the constructed ModuleID. For example: ("a" % "b" % "1").cross(...)"""
}
final class GroupArtifactID private[sbt] (groupID: String, artifactID: String, crossVersion: CrossVersion)
{
def % (revision: String): ModuleID =
{
nonEmpty(revision, "Revision")
ModuleID(groupID, artifactID, revision).cross(crossVersion)
}
}
final class ModuleIDConfigurable private[sbt] (moduleID: ModuleID)
{
def % (configuration: Configuration): ModuleID = %(configuration.name)
def % (configurations: String): ModuleID =
{
nonEmpty(configurations, "Configurations")
val c = configurations
moduleID.copy(configurations = Some(c))
}
}
final class RepositoryName private[sbt] (name: String)
{
def at (location: String) =
{
nonEmpty(location, "Repository location")
new MavenRepository(name, location)
}
}
|
harrah/xsbt
|
ivy/src/main/scala/sbt/impl/DependencyBuilders.scala
|
Scala
|
bsd-3-clause
| 2,262 |
package core
import com.bryzek.apidoc.spec.v0.models.ParameterLocation
import org.scalatest.{FunSpec, Matchers}
class ServiceValidatorSpec extends FunSpec with Matchers {
it("should detect empty inputs") {
val validator = TestHelper.serviceValidatorFromApiJson("")
validator.errors.mkString should be("No Data")
}
it("should detect invalid json") {
val validator = TestHelper.serviceValidatorFromApiJson(" { ")
validator.errors.mkString.indexOf("expected close marker") should be >= 0
}
it("service name must be a valid name") {
val json =
"""
{
"name": "5@4",
"apidoc": { "version": "0.9.6" }
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString should be("Name[5@4] must start with a letter")
}
it("base url shouldn't end with a '/'") {
val json =
"""
{
"name": "TestApp",
"base_url": "http://localhost:9000/",
"apidoc": { "version": "0.9.6" }
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString should be("base_url[http://localhost:9000/] must not end with a '/'")
}
it("model that is missing fields") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": []
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString should be("Model[user] must have at least one field")
}
it("model has a field with an invalid name") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "_!@#", "type": "string" }
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString should be("Model[user] field name[_!@#] is invalid: Name can only contain a-z, A-Z, 0-9 and _ characters and Name must start with a letter")
}
it("model with duplicate field names") {
val json =
"""
{
"name": "Api Doc",
"models": {
"user": {
"fields": [
{ "name": "key", "type": "string" },
{ "name": "KEY", "type": "string", "required": false }
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString("") should be("Model[user] field[key] appears more than once")
}
it("reference that points to a non-existent model") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "foo", "type": "foo" }
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString should be("user.foo has invalid type[foo]")
}
it("types are lowercased in service definition") {
val json =
"""
{
"name": "Api Doc",
"models": {
"user": {
"fields": [
{ "name": "id", "type": "UUID" }
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString should be("")
validator.service.models.head.fields.head.`type` should be("uuid")
}
it("base_url is optional") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "id", "type": "long" }
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString should be("")
}
it("defaults to a NoContent response") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "guid", "type": "string" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "DELETE",
"path": "/:guid"
}
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString("") should be("")
validator.service.resources.head.operations.head.responses.find(r => TestHelper.responseCode(r.code) == "204").getOrElse {
sys.error("Missing 204 response")
}
}
it("accepts request header params") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "guid", "type": "string" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "DELETE",
"path": "/:guid",
"parameters": [
{ "name": "guid", "type": "%s", "location": "header" }
]
}
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json.format("string"))
validator.errors.mkString("") should be("")
val guid = validator.service.resources.head.operations.head.parameters.head
guid.`type` should be("string")
guid.location should be(ParameterLocation.Header)
TestHelper.serviceValidatorFromApiJson(json.format("user")).errors.mkString("") should be("Resource[user] DELETE /users/:guid Parameter[guid] has an invalid type[user]. Model and union types are not supported as header parameters.")
}
it("accepts response headers") {
val header = """{ "name": "foo", "type": "%s" }"""
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "guid", "type": "string" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "GET",
"path": "/:guid",
"responses": {
"200": {
"type": "user",
"headers" : [
%s
]
}
}
}
]
}
}
}
"""
val stringHeader = header.format("string")
val userHeader = header.format("user")
val validator = TestHelper.serviceValidatorFromApiJson(json.format(stringHeader))
validator.errors.mkString("") should be("")
val headers = validator.service.resources.head.operations.head.responses.head.headers
headers.size should be(1)
headers.get(0).name should be("foo")
headers.get(0).`type` should be("string")
TestHelper.serviceValidatorFromApiJson(json.format(s"$stringHeader, $stringHeader")).errors.mkString("") should be("Resource[user] GET /users/:guid response code[200] header[foo] appears more than once")
TestHelper.serviceValidatorFromApiJson(json.format(userHeader)).errors.mkString("") should be("Resource[user] GET /users/:guid response code[200] header[foo] type[user] is invalid: Must be a string or the name of an enum")
}
it("operations w/ a valid response validates correct") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "guid", "type": "string" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "GET",
"path": "/:guid",
"parameters": [
{ "name": "guid", "type": "string" }
],
"responses": {
"200": { "type": "%s" }
}
}
]
}
}
}
"""
TestHelper.serviceValidatorFromApiJson(json.format("user")).errors.mkString("") should be("")
TestHelper.serviceValidatorFromApiJson(json.format("unknown_model")).errors.mkString("") should be("Resource[user] GET /users/:guid response code[200] has an invalid type[unknown_model].")
}
it("operations w/ a valid attributes validates correct") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "guid", "type": "string" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "GET",
"attributes": [
{
"name": "sample",
"value": {}
}
]
}
]
}
}
}
"""
TestHelper.serviceValidatorFromApiJson(json).errors.mkString("") should be("")
}
it("includes path parameter in operations") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "guid", "type": "uuid" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "DELETE",
"path": "/:guid"
}
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString("") should be("")
val op = validator.service.resources.head.operations.head
op.parameters.map(_.name) should be(Seq("guid"))
val guid = op.parameters.head
guid.`type` should be("uuid")
}
it("DELETE supports query parameters") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "guid", "type": "uuid" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "DELETE",
"parameters": [
{ "name": "guid", "type": "[uuid]" }
]
}
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString("") should be("")
val op = validator.service.resources.head.operations.head
op.parameters.map(_.name) should be(Seq("guid"))
val guid = op.parameters.head
guid.`type` should be("[uuid]")
guid.location should be(ParameterLocation.Query)
}
it("path parameters must be required") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "id", "type": "long" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "GET",
"path": "/:id",
"parameters": [
{ "name": "id", "type": "long", "required": false }
]
}
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString("") should be("Resource[user] GET /users/:id path parameter[id] is specified as optional. All path parameters are required")
}
it("infers datatype for a path parameter from the associated model") {
val json =
"""
{
"name": "Api Doc",
"apidoc": { "version": "0.9.6" },
"models": {
"user": {
"fields": [
{ "name": "id", "type": "long" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "DELETE",
"path": "/:id"
}
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString("") should be("")
val op = validator.service.resources.head.operations.head
val idParam = op.parameters.head
idParam.name should be("id")
idParam.`type` should be("long")
}
describe("parameter validations") {
val baseJson =
"""
{
"name": "Test Validation of Parameters",
"apidoc": { "version": "0.9.6" },
"models": {
"tag": {
"fields": [
{ "name": "name", "type": "string" }
]
}
},
"resources": {
"tag": {
"operations": [
{
"method": "GET",
"parameters": [
{ "name": "tags", "type": "%s" }
]
}
]
}
}
}
"""
it("lists of primitives are valid in query parameters") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("[string]"))
validator.errors should be(Nil)
}
it("maps of primitives are valid in query parameters") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("map[string]"))
validator.errors.mkString("") should be("Resource[tag] GET /tags Parameter[tags] has an invalid type[map[string]]. Maps are not supported as query parameters.")
}
it("lists of models are not valid in query parameters") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("[tag]"))
validator.errors should be(
Seq("Resource[tag] GET /tags Parameter[tags] has an invalid type[[tag]]. Parameters that are lists must be lists of primitive types or enums.")
)
}
it("models are not valid in query parameters") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("tag"))
validator.errors.mkString("") should be("Resource[tag] GET /tags Parameter[tags] has an invalid type[tag]. Model and union types are not supported as query parameters.")
}
it("validates type name in collection") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("[foo]"))
validator.errors.mkString("") should be("Resource[tag] GET /tags Parameter[tags] has an invalid type: [foo]")
}
}
it("model with duplicate plural names") {
val json =
"""
{
"name": "Api Doc",
"models": {
"user": {
"plural": "users",
"fields": [
{ "name": "id", "type": "string" }
]
},
"person": {
"plural": "users",
"fields": [
{ "name": "id", "type": "string" }
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors.mkString("") should be("Model with plural[users] appears more than once")
}
}
|
movio/apidoc
|
core/src/test/scala/core/ServiceValidatorSpec.scala
|
Scala
|
mit
| 15,049 |
package com.sksamuel.elastic4s.validate
import com.sksamuel.elastic4s.IndexesAndTypes
import com.sksamuel.elastic4s.searches.queries.QueryDefinition
trait ValidateApi {
def validateIn(indexesAndTypes: IndexesAndTypes): ValidateExpectsQuery = new ValidateExpectsQuery(indexesAndTypes)
class ValidateExpectsQuery(indexesAndTypes: IndexesAndTypes) {
def query(query: QueryDefinition): ValidateDefinition = ValidateDefinition(indexesAndTypes, query)
}
}
|
aroundus-inc/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/validate/ValidateApi.scala
|
Scala
|
apache-2.0
| 463 |
package com.mentatlabs.nsa
package scalac
package options
/* -Ynooptimise
* ============
* 2.11.0 - 2.11.8: Clears all the flags set by -optimise. Useful for testing optimizations in isolation.
* 2.12.0: !! missing !!
*/
case object ScalacYNooptimise
extends ScalacOptionBoolean("-Ynooptimise", ScalacVersions.`2.11.0`)
|
mentat-labs/sbt-nsa
|
nsa-core/src/main/scala/com/mentatlabs/nsa/scalac/options/private/ScalacYNooptimise.scala
|
Scala
|
bsd-3-clause
| 342 |
package vultura.util
import org.specs2.Specification
/**
* @author Thomas Geier <[email protected]>
*/
class packageTest extends Specification {
override def is =
s2"tests for disjoint set of sets" ^
(Set(Set(1),Set(2)).isPairwiseDisjoint must beTrue) ^
(Set(Set(1,2),Set(2)).isPairwiseDisjoint must beFalse) ^
(Set(Set[Int](),Set(2)).isPairwiseDisjoint must beTrue) ^
(Set(Set(1),Set(2),Set(3)).isPairwiseDisjoint must beTrue) ^
(Set(Set(1),Set(2),Set(3,2,1)).isPairwiseDisjoint must beFalse) ^
(Set(Set(1)).isPairwiseDisjoint must beTrue) ^
(Set(Set[Int]()).isPairwiseDisjoint must beTrue)
}
|
ziggystar/vultura-factor
|
util/src/test/scala/vultura/util/packageTest.scala
|
Scala
|
mit
| 652 |
/*
* Copyright 2015 LG CNS.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.db.xlog;
import java.io.IOException
import java.util.ArrayList
import java.util.HashMap
import java.util.Hashtable
import java.util.Iterator
import java.util.List
import java.util.Map
import java.util.Set
import scouter.server.db.io.IndexKeyFile
import scouter.server.db.io.IndexTimeFile
import scouter.io.DataInputX
import scouter.io.DataOutputX
import scouter.util.FileUtil
import scouter.util.IClose;
import scouter.server.util.EnumerScala
object XLogIndex {
val POSTFIX_TIME = "_tim";
val POSTFIX_GID = "_gid";
val POSTFIX_TID = "_tid";
val table = new Hashtable[String, XLogIndex]();
def open(file: String): XLogIndex = {
table.synchronized {
var index = table.get(file);
if (index != null) {
index.refrence += 1;
return index;
} else {
index = new XLogIndex(file);
table.put(file, index);
return index;
}
}
}
}
class XLogIndex(_file: String) extends IClose {
val file = _file
var refrence = 0;
var txidIndex: IndexKeyFile = null
var gxidIndex: IndexKeyFile = null
var timeIndex: IndexTimeFile = null
def setByTxid(txid: Long, pos: Long) {
if (this.txidIndex == null) {
this.txidIndex = new IndexKeyFile(file + XLogIndex.POSTFIX_TID);
}
this.txidIndex.put(DataOutputX.toBytes(txid), DataOutputX.toBytes5(pos));
}
def setByGxid(gxid: Long, pos: Long) {
if (gxid == 0)
return ;
if (this.gxidIndex == null) {
this.gxidIndex = new IndexKeyFile(file + XLogIndex.POSTFIX_GID);
}
this.gxidIndex.put(DataOutputX.toBytes(gxid), DataOutputX.toBytes5(pos));
}
def setByTime(time: Long, pos: Long) {
if (this.timeIndex == null) {
this.timeIndex = new IndexTimeFile(file + XLogIndex.POSTFIX_TIME);
}
this.timeIndex.put(time, DataOutputX.toBytes5(pos));
}
def getByTxid(txid: Long): Long = {
if (this.txidIndex == null) {
this.txidIndex = new IndexKeyFile(file + XLogIndex.POSTFIX_TID);
}
val b = this.txidIndex.get(DataOutputX.toBytes(txid));
if (b == null) -1 else DataInputX.toLong5(b, 0);
}
def getByTxid(txSet: Set[Long]): Map[Long, Long] = {
if (this.txidIndex == null) {
this.txidIndex = new IndexKeyFile(file + XLogIndex.POSTFIX_TID);
}
val map = new HashMap[Long, Long]();
EnumerScala.foreach(txSet.iterator(), (key: Long) => {
val value = this.txidIndex.get(DataOutputX.toBytes(key));
if (value != null) {
map.put(key, DataInputX.toLong5(value, 0));
}
})
return map;
}
def getByGxid(gxid: Long): List[Long] = {
if (this.gxidIndex == null) {
this.gxidIndex = new IndexKeyFile(file + XLogIndex.POSTFIX_GID);
}
val blist = this.gxidIndex.getAll(DataOutputX.toBytes(gxid));
val olist = new ArrayList[Long]();
EnumerScala.foreach(blist.iterator(), (bb: Array[Byte]) => {
olist.add(DataInputX.toLong5(bb, 0))
})
return olist;
}
def readByTxid(handler: (Array[Byte], Array[Byte]) => Any, dr: (Long)=>Array[Byte]) {
if (this.txidIndex == null) {
this.txidIndex = new IndexKeyFile(file + XLogIndex.POSTFIX_TID);
}
this.txidIndex.read(handler, dr);
}
def readByGxid(handler: (Array[Byte], Array[Byte]) => Any, dr: (Long)=>Array[Byte]) {
if (this.gxidIndex == null) {
this.gxidIndex = new IndexKeyFile(file + XLogIndex.POSTFIX_GID);
}
this.gxidIndex.read(handler, dr);
}
override def close() {
XLogIndex.table.synchronized {
if (this.refrence == 0) {
XLogIndex.table.remove(this.file);
FileUtil.close(this.txidIndex);
FileUtil.close(this.gxidIndex);
FileUtil.close(this.timeIndex);
} else {
this.refrence -= 1;
}
}
}
}
|
jahnaviancha/scouter
|
scouter.server/src/scouter/server/db/xlog/XLogIndex.scala
|
Scala
|
apache-2.0
| 4,917 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalastyle.scalariform
import org.scalastyle.PositionError
import org.scalastyle.ScalariformChecker
import org.scalastyle.ScalastyleError
import org.scalastyle.scalariform.VisitorHelper.Clazz
import org.scalastyle.scalariform.VisitorHelper.visit
import _root_.scalariform.parser.AstNode
import _root_.scalariform.parser.GeneralTokens
import _root_.scalariform.parser.InfixExpr
import _root_.scalariform.parser.PrefixExprElement
import _root_.scalariform.lexer.Token
import _root_.scalariform.lexer.Tokens.FALSE
import _root_.scalariform.lexer.Tokens.TRUE
import _root_.scalariform.lexer.Tokens.VARID
import _root_.scalariform.parser.CompilationUnit
class SimplifyBooleanExpressionChecker extends ScalariformChecker {
val errorKey = "simplify.boolean.expression"
def verify(ast: CompilationUnit): List[ScalastyleError] = {
val it1 = for {
List(left, right) <- ast.tokens.sliding(2);
if (left.text == "!" && isBoolean(right))
} yield {
PositionError(left.offset)
}
val it2 = for {
t <- localvisit(ast);
if (matches(t))
} yield {
PositionError(t.position.get)
}
(it1.toList ::: it2.toList).sortWith((a, b) => a.position < b.position)
}
private def matches[T <: AstNode](t: Clazz[T]): Boolean = {
t match {
case t: InfixExprClazz => matchesInfixOp(t.id) && (boolean(t.left) || boolean(t.right))
case _ => false
}
}
private def matchesInfixOp(t: Token) = t.tokenType == VARID && Set("!=", "==", "&&", "||").contains(t.text)
class BaseClazz[+T <: AstNode](val position: Option[Int]) extends Clazz[T]
case class InfixExprClazz(_position: Option[Int], id: Token, left: List[Clazz[_]], right: List[Clazz[_]]) extends BaseClazz[InfixExpr](_position)
case class PrefixExprElementClazz(_position: Option[Int], id: Token, expr: List[Clazz[_]]) extends BaseClazz[PrefixExprElement](_position)
case class GeneralTokensClazz(_position: Option[Int], bool: Boolean) extends BaseClazz[GeneralTokens](_position)
private def localvisit(ast: Any): List[BaseClazz[AstNode]] = ast match {
case t: InfixExpr => List(InfixExprClazz(Some(t.firstToken.offset), t.infixId, localvisit(t.left), localvisit(t.right)))
case t: GeneralTokens => List(GeneralTokensClazz(Some(t.firstToken.offset), isBoolean(t)))
case t: Any => visit(t, localvisit)
}
private def boolean(expr: List[Clazz[_]]) = expr.size == 1 && expr(0).isInstanceOf[GeneralTokensClazz] && expr(0).asInstanceOf[GeneralTokensClazz].bool
private def isBoolean(t: GeneralTokens): Boolean = t.tokens.size == 1 && isBoolean(t.tokens(0))
private def isBoolean(t: Token): Boolean = Set(TRUE, FALSE).contains(t.tokenType)
}
|
scalastyle/scalastyle
|
src/main/scala/org/scalastyle/scalariform/SimplifyBooleanExpressionChecker.scala
|
Scala
|
apache-2.0
| 3,428 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.security.auth
import kafka.common.KafkaException
import kafka.security.authorizer.AclEntry
import org.apache.kafka.common.resource.{PatternType, ResourcePattern}
@deprecated("Use org.apache.kafka.common.resource.ResourcePattern", "Since 2.5")
object Resource {
val Separator = AclEntry.ResourceSeparator
val ClusterResourceName = "kafka-cluster"
val ClusterResource = Resource(Cluster, Resource.ClusterResourceName, PatternType.LITERAL)
val WildCardResource = AclEntry.WildcardResource
@deprecated("This resource name is not used by Kafka and will be removed in a future release", since = "2.1")
val ProducerIdResourceName = "producer-id" // This is not used since we don't have a producer id resource
def fromString(str: String): Resource = {
ResourceType.values.find(resourceType => str.startsWith(resourceType.name + Separator)) match {
case None => throw new KafkaException("Invalid resource string: '" + str + "'")
case Some(resourceType) =>
val remaining = str.substring(resourceType.name.length + 1)
PatternType.values.find(patternType => remaining.startsWith(patternType.name + Separator)) match {
case Some(patternType) =>
val name = remaining.substring(patternType.name.length + 1)
Resource(resourceType, name, patternType)
case None =>
Resource(resourceType, remaining, PatternType.LITERAL)
}
}
}
}
/**
*
* @param resourceType non-null type of resource.
* @param name non-null name of the resource, for topic this will be topic name , for group it will be group name. For cluster type
* it will be a constant string kafka-cluster.
* @param patternType non-null resource pattern type: literal, prefixed, etc.
*/
@deprecated("Use org.apache.kafka.common.resource.ResourcePattern", "Since 2.5")
case class Resource(resourceType: ResourceType, name: String, patternType: PatternType) {
if (!patternType.isSpecific)
throw new IllegalArgumentException(s"patternType must not be $patternType")
/**
* Create an instance of this class with the provided parameters.
* Resource pattern type would default to PatternType.LITERAL.
*
* @param resourceType non-null resource type
* @param name non-null resource name
* @deprecated Since 2.0, use [[kafka.security.auth.Resource(ResourceType, String, PatternType)]]
*/
@deprecated("Use Resource(ResourceType, String, PatternType", "Since 2.0")
def this(resourceType: ResourceType, name: String) = {
this(resourceType, name, PatternType.LITERAL)
}
def toPattern: ResourcePattern = {
new ResourcePattern(resourceType.toJava, name, patternType)
}
override def toString: String = {
resourceType.name + Resource.Separator + patternType + Resource.Separator + name
}
}
|
sslavic/kafka
|
core/src/main/scala/kafka/security/auth/Resource.scala
|
Scala
|
apache-2.0
| 3,636 |
package net.codejitsu.tasks.dsl
import java.io.File
import java.util.UUID
import org.scalatest.{FlatSpec, Matchers}
/**
* RmTask tests.
*/
class RmTest extends FlatSpec with Matchers {
import scala.concurrent.duration._
import net.codejitsu.tasks._
import net.codejitsu.tasks.dsl.Tasks._
implicit val timeout = 30 seconds
implicit val stage = new Dev
"Rm task" should "remove a file with given name on given host" in {
val path = getClass.getResource("/program-param.sh").getPath.split("/").init.mkString("/") + "/"
val name = s"${UUID.randomUUID().toString}testfile.txt"
val file2create = new File(path + name)
file2create.exists should be (false)
val touchTask: TaskM[Boolean] = Touch(Localhost, path + name)
val touchResult = touchTask.run()
touchResult.res.isSuccess should be (true)
touchResult.out should be (empty)
touchResult.err should be (empty)
file2create.exists should be (true)
val rmTask: TaskM[Boolean] = Rm(Localhost, path + name)
val rmResult = rmTask.run()
rmResult.res.isSuccess should be (true)
rmResult.out should be (empty)
rmResult.err should be (empty)
file2create.exists should be (false)
}
it should "compose with the touch task" in {
val path = getClass.getResource("/program-param.sh").getPath.split("/").init.mkString("/") + "/"
val name = s"${UUID.randomUUID().toString}testfile.txt"
val file2create = new File(path + name)
file2create.exists should be (false)
val task = for {
tr <- Touch(Localhost, path + name)
rr <- Rm(Localhost, path + name)
} yield rr
val result = task.run()
result.res.isSuccess should be (true)
result.out should be (empty)
result.err should be (empty)
file2create.exists should be (false)
}
it should "compose with the touch task with `andThen`" in {
val path = getClass.getResource("/program-param.sh").getPath.split("/").init.mkString("/") + "/"
val name = s"${UUID.randomUUID().toString}testfile.txt"
val file2create = new File(path + name)
file2create.exists should be (false)
val task =
Touch(Localhost, path + name) andThen
Rm(Localhost, path + name)
val result = task.run()
result.res.isSuccess should be (true)
result.out should be (empty)
result.err should be (empty)
file2create.exists should be (false)
}
it should "return error if file don't exists" in {
val path = getClass.getResource("/program-param.sh").getPath.split("/").init.mkString("/") + "/"
val name = s"${UUID.randomUUID().toString}testfile.txt"
val file2create = new File(path + name)
file2create.exists should be (false)
val task = for {
tr <- Touch(Localhost, path + name)
rr1 <- Rm(Localhost, path + name)
rr2 <- Rm(Localhost, path + name)
} yield rr2
val result = task.run()
result.res.isSuccess should be (false)
file2create.exists should be (false)
}
}
|
codejitsu/tasks
|
tasks-dsl/src/test/scala/RmTest.scala
|
Scala
|
apache-2.0
| 2,977 |
package arqbrowser.lib
import java.io.IOException
import java.io.DataInput
class ArqCommit(val treeHash: Hash) {
}
object ArqCommit {
def read(stream: DataInput): ArqCommit = {
{
val a = stream.readInt();
val b = stream.readShort();
val c = stream.readByte();
if (a != 0x436f6d6d ||
b != 0x6974 ||
c != 0x56) {
throw new IOException("Bad magic");
}
}
val version = Utils.readVersionBytes(stream);
version match {
case 6 => {
val author = Utils.readStringWithIsNotNullHeader(stream);
val comment = Utils.readStringWithIsNotNullHeader(stream);
val parentCommitCount = stream.readLong().toInt;
for (i <- 0 until parentCommitCount) {
Utils.readStringWithIsNotNullHeader(stream);
stream.readBoolean();
}
val treeHash: Hash = Hash.fromString(Utils.readStringWithIsNotNullHeader(stream));
return new ArqCommit(treeHash);
}
case _ => throw new IOException("Unexpected version");
}
}
}
|
pauldoo/scratch
|
ArqBrowser/src/arqbrowser/lib/ArqCommit.scala
|
Scala
|
isc
| 1,058 |
package net.sansa_stack.rdf.common.partition.schema
/**
* Created by Simon Bin on 09/06/17.
*/
case class SchemaStringStringLang(s: String, o: String, l: String)
|
SANSA-Stack/SANSA-RDF
|
sansa-rdf/sansa-rdf-common/src/main/scala/net/sansa_stack/rdf/common/partition/schema/SchemaStringStringLang.scala
|
Scala
|
apache-2.0
| 165 |
package zooowner
package test
import org.apache.zookeeper.ZooKeeper
import org.apache.zookeeper.Watcher.Event.KeeperState
import scala.concurrent.duration._
class ZKConnectionMock(
override val client: ZooKeeper,
connectionWatcher: ZKConnectionWatcher)
extends impl.ZKConnectionImpl("", 1.second, connectionWatcher, None)
{
stateWatcher.dispatch(KeeperState.SyncConnected)
}
// vim: set ts=2 sw=2 et:
|
ataraxer/zooowner
|
zooowner-core/src/main/scala/test/ZKConnectionMock.scala
|
Scala
|
mit
| 418 |
package typelevelcourse.scalaz
import java.net.URI
import scalaz.{\\/, Applicative, Foldable, Functor, IList, State, Traverse}
// Add a phantom tparam to this class.
sealed abstract class IntOrString[A]
final case class IOSInt[A](x: Int) extends IntOrString[A]
final case class IOSString[A](x: String) extends IntOrString[A]
object IntOrString {
// Then uncomment and implement this, following the types and laws.
// implicit val intOrStringCovariant: Functor[IntOrString] = ???
}
// Write the Functor, Foldable, Traverse for this.
final case class IntAndA[A](i: Int, a: A)
object IntAndA {
implicit val intandaCovariant: Functor[IntAndA] = ???
object NonScalaZTraversal {
List(1,2,3,4).foldLeft((0,List[String]())) { case ((z:Int,a:List[String]),e: Int) => (z+e, a :+ e.toString) }
}
// Add a type parameter to these to replace URI.
final case class Doc(paras: IList[Para])
final case class Para(elts: IList[String \\/ URI])
object Doc {
// Implement this: Functor, Foldable, Traverse
// implicit val docCovariant: Functor[Doc] = ???
}
object Para {
// And this
// implicit val paraCovariant: Functor[Para] = ???
// List the "URLs"
def urls[A](p: Para/*[A]*/): List[A] = ???
// How do I fetch a URI? (Use a side-effect here.)
def getURIContents(u: URI): String = ???
// Hint: extractAndFetch is trivial if you implement this first and
// use it there.
def memo[A, B](f: A => B): A => State[Map[A, B], B] = ???
// Fetch URI contents, but caching as you go.
def extractAndFetch(p: Para/*[URI]*/): State[Map[URI, String], Para/*[String]*/] = ???
// Then, update extractAndFetch to work with Doc, not Para. Then
// with arbitrary Traverse[F].
}
|
AEtherSurfer/typelevel-course
|
exercises/src/main/scala/scalaz/Functor.scala
|
Scala
|
apache-2.0
| 1,698 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.tools
import java.io.PrintStream
import java.nio.file.Files
import kafka.common.MessageFormatter
import kafka.tools.ConsoleConsumer.ConsumerWrapper
import kafka.utils.{Exit, TestUtils}
import org.apache.kafka.clients.consumer.{ConsumerRecord, MockConsumer, OffsetResetStrategy}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.test.MockDeserializer
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers
import ArgumentMatchers._
import org.junit.Assert._
import org.junit.{Before, Test}
import scala.collection.JavaConverters._
class ConsoleConsumerTest {
@Before
def setup(): Unit = {
ConsoleConsumer.messageCount = 0
}
@Test
def shouldResetUnConsumedOffsetsBeforeExit() {
val topic = "test"
val maxMessages: Int = 123
val totalMessages: Int = 700
val startOffset: java.lang.Long = 0L
val mockConsumer = new MockConsumer[Array[Byte], Array[Byte]](OffsetResetStrategy.EARLIEST)
val tp1 = new TopicPartition(topic, 0)
val tp2 = new TopicPartition(topic, 1)
val consumer = new ConsumerWrapper(Some(topic), None, None, None, mockConsumer)
mockConsumer.rebalance(List(tp1, tp2).asJava)
mockConsumer.updateBeginningOffsets(Map(tp1 -> startOffset, tp2 -> startOffset).asJava)
0 until totalMessages foreach { i =>
// add all records, each partition should have half of `totalMessages`
mockConsumer.addRecord(new ConsumerRecord[Array[Byte], Array[Byte]](topic, i % 2, i / 2, "key".getBytes, "value".getBytes))
}
val formatter = mock(classOf[MessageFormatter])
ConsoleConsumer.process(maxMessages, formatter, consumer, System.out, skipMessageOnError = false)
assertEquals(totalMessages, mockConsumer.position(tp1) + mockConsumer.position(tp2))
consumer.resetUnconsumedOffsets()
assertEquals(maxMessages, mockConsumer.position(tp1) + mockConsumer.position(tp2))
verify(formatter, times(maxMessages)).writeTo(any(), any())
}
@Test
def shouldLimitReadsToMaxMessageLimit() {
val consumer = mock(classOf[ConsumerWrapper])
val formatter = mock(classOf[MessageFormatter])
val record = new ConsumerRecord("foo", 1, 1, Array[Byte](), Array[Byte]())
val messageLimit: Int = 10
when(consumer.receive()).thenReturn(record)
ConsoleConsumer.process(messageLimit, formatter, consumer, System.out, true)
verify(consumer, times(messageLimit)).receive()
verify(formatter, times(messageLimit)).writeTo(any(), any())
consumer.cleanup()
}
@Test
def shouldStopWhenOutputCheckErrorFails() {
val consumer = mock(classOf[ConsumerWrapper])
val formatter = mock(classOf[MessageFormatter])
val printStream = mock(classOf[PrintStream])
val record = new ConsumerRecord("foo", 1, 1, Array[Byte](), Array[Byte]())
when(consumer.receive()).thenReturn(record)
//Simulate an error on System.out after the first record has been printed
when(printStream.checkError()).thenReturn(true)
ConsoleConsumer.process(-1, formatter, consumer, printStream, true)
verify(formatter).writeTo(any(), ArgumentMatchers.eq(printStream))
verify(consumer).receive()
verify(printStream).checkError()
consumer.cleanup()
}
@Test
def shouldParseValidConsumerValidConfig() {
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--from-beginning")
//When
val config = new ConsoleConsumer.ConsumerConfig(args)
//Then
assertEquals("localhost:9092", config.bootstrapServer)
assertEquals("test", config.topicArg)
assertEquals(true, config.fromBeginning)
}
@Test
def shouldParseValidSimpleConsumerValidConfigWithNumericOffset(): Unit = {
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--partition", "0",
"--offset", "3")
//When
val config = new ConsoleConsumer.ConsumerConfig(args)
//Then
assertEquals("localhost:9092", config.bootstrapServer)
assertEquals("test", config.topicArg)
assertEquals(0, config.partitionArg.get)
assertEquals(3, config.offsetArg)
assertEquals(false, config.fromBeginning)
}
@Test(expected = classOf[IllegalArgumentException])
def shouldExitOnUnrecognizedNewConsumerOption(): Unit = {
Exit.setExitProcedure((_, message) => throw new IllegalArgumentException(message.orNull))
//Given
val args: Array[String] = Array(
"--new-consumer",
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--from-beginning")
//When
try {
new ConsoleConsumer.ConsumerConfig(args)
} finally {
Exit.resetExitProcedure()
}
}
@Test
def shouldParseValidSimpleConsumerValidConfigWithStringOffset() {
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--partition", "0",
"--offset", "LatEst",
"--property", "print.value=false")
//When
val config = new ConsoleConsumer.ConsumerConfig(args)
//Then
assertEquals("localhost:9092", config.bootstrapServer)
assertEquals("test", config.topicArg)
assertEquals(0, config.partitionArg.get)
assertEquals(-1, config.offsetArg)
assertEquals(false, config.fromBeginning)
assertEquals(false, config.formatter.asInstanceOf[DefaultMessageFormatter].printValue)
}
@Test
def shouldParseValidConsumerConfigWithAutoOffsetResetLatest() {
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--consumer-property", "auto.offset.reset=latest")
//When
val config = new ConsoleConsumer.ConsumerConfig(args)
val consumerProperties = ConsoleConsumer.consumerProps(config)
//Then
assertEquals("localhost:9092", config.bootstrapServer)
assertEquals("test", config.topicArg)
assertEquals(false, config.fromBeginning)
assertEquals("latest", consumerProperties.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG))
}
@Test
def shouldParseValidConsumerConfigWithAutoOffsetResetEarliest() {
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--consumer-property", "auto.offset.reset=earliest")
//When
val config = new ConsoleConsumer.ConsumerConfig(args)
val consumerProperties = ConsoleConsumer.consumerProps(config)
//Then
assertEquals("localhost:9092", config.bootstrapServer)
assertEquals("test", config.topicArg)
assertEquals(false, config.fromBeginning)
assertEquals("earliest", consumerProperties.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG))
}
@Test
def shouldParseValidConsumerConfigWithAutoOffsetResetAndMatchingFromBeginning() {
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--consumer-property", "auto.offset.reset=earliest",
"--from-beginning")
//When
val config = new ConsoleConsumer.ConsumerConfig(args)
val consumerProperties = ConsoleConsumer.consumerProps(config)
//Then
assertEquals("localhost:9092", config.bootstrapServer)
assertEquals("test", config.topicArg)
assertEquals(true, config.fromBeginning)
assertEquals("earliest", consumerProperties.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG))
}
@Test
def shouldParseValidConsumerConfigWithNoOffsetReset() {
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test")
//When
val config = new ConsoleConsumer.ConsumerConfig(args)
val consumerProperties = ConsoleConsumer.consumerProps(config)
//Then
assertEquals("localhost:9092", config.bootstrapServer)
assertEquals("test", config.topicArg)
assertEquals(false, config.fromBeginning)
assertEquals("latest", consumerProperties.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG))
}
@Test(expected = classOf[IllegalArgumentException])
def shouldExitOnInvalidConfigWithAutoOffsetResetAndConflictingFromBeginning() {
Exit.setExitProcedure((_, message) => throw new IllegalArgumentException(message.orNull))
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--consumer-property", "auto.offset.reset=latest",
"--from-beginning")
try {
val config = new ConsoleConsumer.ConsumerConfig(args)
ConsoleConsumer.consumerProps(config)
} finally {
Exit.resetExitProcedure()
}
}
@Test
def shouldParseConfigsFromFile() {
val propsFile = TestUtils.tempFile()
val propsStream = Files.newOutputStream(propsFile.toPath)
propsStream.write("request.timeout.ms=1000\\n".getBytes())
propsStream.write("group.id=group1".getBytes())
propsStream.close()
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--consumer.config", propsFile.getAbsolutePath
)
val config = new ConsoleConsumer.ConsumerConfig(args)
assertEquals("1000", config.consumerProps.getProperty("request.timeout.ms"))
assertEquals("group1", config.consumerProps.getProperty("group.id"))
}
@Test
def groupIdsProvidedInDifferentPlacesMustMatch() {
Exit.setExitProcedure((_, message) => throw new IllegalArgumentException(message.orNull))
// different in all three places
var propsFile = TestUtils.tempFile()
var propsStream = Files.newOutputStream(propsFile.toPath)
propsStream.write("group.id=group-from-file".getBytes())
propsStream.close()
var args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--group", "group-from-arguments",
"--consumer-property", "group.id=group-from-properties",
"--consumer.config", propsFile.getAbsolutePath
)
try {
new ConsoleConsumer.ConsumerConfig(args)
fail("Expected groups ids provided in different places to match")
} catch {
case e: IllegalArgumentException => //OK
}
// the same in all three places
propsFile = TestUtils.tempFile()
propsStream = Files.newOutputStream(propsFile.toPath)
propsStream.write("group.id=test-group".getBytes())
propsStream.close()
args = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--group", "test-group",
"--consumer-property", "group.id=test-group",
"--consumer.config", propsFile.getAbsolutePath
)
var config = new ConsoleConsumer.ConsumerConfig(args)
var props = ConsoleConsumer.consumerProps(config)
assertEquals("test-group", props.getProperty("group.id"))
// different via --consumer-property and --consumer.config
propsFile = TestUtils.tempFile()
propsStream = Files.newOutputStream(propsFile.toPath)
propsStream.write("group.id=group-from-file".getBytes())
propsStream.close()
args = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--consumer-property", "group.id=group-from-properties",
"--consumer.config", propsFile.getAbsolutePath
)
try {
new ConsoleConsumer.ConsumerConfig(args)
fail("Expected groups ids provided in different places to match")
} catch {
case e: IllegalArgumentException => //OK
}
// different via --consumer-property and --group
args = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--group", "group-from-arguments",
"--consumer-property", "group.id=group-from-properties"
)
try {
new ConsoleConsumer.ConsumerConfig(args)
fail("Expected groups ids provided in different places to match")
} catch {
case e: IllegalArgumentException => //OK
}
// different via --group and --consumer.config
propsFile = TestUtils.tempFile()
propsStream = Files.newOutputStream(propsFile.toPath)
propsStream.write("group.id=group-from-file".getBytes())
propsStream.close()
args = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--group", "group-from-arguments",
"--consumer.config", propsFile.getAbsolutePath
)
try {
new ConsoleConsumer.ConsumerConfig(args)
fail("Expected groups ids provided in different places to match")
} catch {
case e: IllegalArgumentException => //OK
}
// via --group only
args = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--group", "group-from-arguments"
)
config = new ConsoleConsumer.ConsumerConfig(args)
props = ConsoleConsumer.consumerProps(config)
assertEquals("group-from-arguments", props.getProperty("group.id"))
Exit.resetExitProcedure()
}
@Test
def testCustomPropertyShouldBePassedToConfigureMethod(): Unit = {
val args = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--property", "print.key=true",
"--property", "key.deserializer=org.apache.kafka.test.MockDeserializer",
"--property", "key.deserializer.my-props=abc"
)
val config = new ConsoleConsumer.ConsumerConfig(args)
assertTrue(config.formatter.isInstanceOf[DefaultMessageFormatter])
assertTrue(config.formatterArgs.containsKey("key.deserializer.my-props"))
val formatter = config.formatter.asInstanceOf[DefaultMessageFormatter]
assertTrue(formatter.keyDeserializer.get.isInstanceOf[MockDeserializer])
assertEquals(1, formatter.keyDeserializer.get.asInstanceOf[MockDeserializer].configs.size)
assertEquals("abc", formatter.keyDeserializer.get.asInstanceOf[MockDeserializer].configs.get("my-props"))
assertTrue(formatter.keyDeserializer.get.asInstanceOf[MockDeserializer].isKey)
}
@Test
def shouldParseGroupIdFromBeginningGivenTogether() {
// Start from earliest
var args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--group", "test-group",
"--from-beginning")
var config = new ConsoleConsumer.ConsumerConfig(args)
assertEquals("localhost:9092", config.bootstrapServer)
assertEquals("test", config.topicArg)
assertEquals(-2, config.offsetArg)
assertEquals(true, config.fromBeginning)
// Start from latest
args = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--group", "test-group"
)
config = new ConsoleConsumer.ConsumerConfig(args)
assertEquals("localhost:9092", config.bootstrapServer)
assertEquals("test", config.topicArg)
assertEquals(-1, config.offsetArg)
assertEquals(false, config.fromBeginning)
}
@Test(expected = classOf[IllegalArgumentException])
def shouldExitOnGroupIdAndPartitionGivenTogether() {
Exit.setExitProcedure((_, message) => throw new IllegalArgumentException(message.orNull))
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--group", "test-group",
"--partition", "0")
//When
try {
new ConsoleConsumer.ConsumerConfig(args)
} finally {
Exit.resetExitProcedure()
}
}
@Test(expected = classOf[IllegalArgumentException])
def shouldExitOnOffsetWithoutPartition() {
Exit.setExitProcedure((_, message) => throw new IllegalArgumentException(message.orNull))
//Given
val args: Array[String] = Array(
"--bootstrap-server", "localhost:9092",
"--topic", "test",
"--offset", "10")
//When
try {
new ConsoleConsumer.ConsumerConfig(args)
} finally {
Exit.resetExitProcedure()
}
}
}
|
KevinLiLu/kafka
|
core/src/test/scala/unit/kafka/tools/ConsoleConsumerTest.scala
|
Scala
|
apache-2.0
| 16,592 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.containerpool.docker
import java.io.File
import java.nio.file.Paths
import akka.actor.ActorSystem
import akka.stream.alpakka.file.scaladsl.FileTailSource
import akka.stream.scaladsl.{FileIO, Source => AkkaSource}
import akka.util.ByteString
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import scala.concurrent.blocking
import spray.json.DefaultJsonProtocol._
import spray.json._
import org.apache.openwhisk.common.Logging
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.containerpool.ContainerId
import org.apache.openwhisk.core.containerpool.ContainerAddress
import scala.io.Source
import scala.concurrent.duration.FiniteDuration
class DockerClientWithFileAccess(dockerHost: Option[String] = None,
containersDirectory: File = Paths.get("containers").toFile)(
executionContext: ExecutionContext)(implicit log: Logging, as: ActorSystem)
extends DockerClient(dockerHost)(executionContext)
with DockerApiWithFileAccess {
implicit private val ec = executionContext
/**
* Provides the home directory of the specified Docker container.
*
* Assumes that property "containersDirectory" holds the location of the
* home directory of all Docker containers. Default: directory "containers"
* in the current working directory.
*
* Does not verify that the returned directory actually exists.
*
* @param containerId Id of the desired Docker container
* @return canonical location of the container's home directory
*/
protected def containerDirectory(containerId: ContainerId) = {
new File(containersDirectory, containerId.asString).getCanonicalFile()
}
/**
* Provides the configuration file of the specified Docker container.
*
* Assumes that the file has the well-known location and name.
*
* Does not verify that the returned file actually exists.
*
* @param containerId Id of the desired Docker container
* @return canonical location of the container's configuration file
*/
protected def containerConfigFile(containerId: ContainerId) = {
new File(containerDirectory(containerId), "config.v2.json").getCanonicalFile()
}
/**
* Provides the log file of the specified Docker container written by
* Docker's JSON log driver.
*
* Assumes that the file has the well-known location and name.
*
* Does not verify that the returned file actually exists.
*
* @param containerId Id of the desired Docker container
* @return canonical location of the container's log file
*/
protected def containerLogFile(containerId: ContainerId) = {
new File(containerDirectory(containerId), s"${containerId.asString}-json.log").getCanonicalFile()
}
/**
* Provides the contents of the specified Docker container's configuration
* file as JSON object.
*
* @param configFile the container's configuration file in JSON format
* @return contents of configuration file as JSON object
*/
protected def configFileContents(configFile: File): Future[JsObject] = Future {
blocking { // Needed due to synchronous file operations
val source = Source.fromFile(configFile)
val config = try source.mkString
finally source.close()
config.parseJson.asJsObject
}
}
/**
* Extracts the IP of the container from the local config file of the docker daemon.
*
* A container may have more than one network. The container has an
* IP address in each of these networks such that the network name
* is needed.
*
* @param id the id of the container to get the IP address from
* @param network name of the network to get the IP address from
* @return the ip address of the container
*/
protected def ipAddressFromFile(id: ContainerId, network: String): Future[ContainerAddress] = {
configFileContents(containerConfigFile(id)).map { json =>
val networks = json.fields("NetworkSettings").asJsObject.fields("Networks").asJsObject
val specifiedNetwork = networks.fields(network).asJsObject
val ipAddr = specifiedNetwork.fields("IPAddress")
ContainerAddress(ipAddr.convertTo[String])
}
}
// See extended trait for description
override def inspectIPAddress(id: ContainerId, network: String)(
implicit transid: TransactionId): Future[ContainerAddress] = {
ipAddressFromFile(id, network).recoverWith {
case _ => super.inspectIPAddress(id, network)
}
}
override def isOomKilled(id: ContainerId)(implicit transid: TransactionId): Future[Boolean] =
configFileContents(containerConfigFile(id))
.map(_.fields("State").asJsObject.fields("OOMKilled").convertTo[Boolean])
.recover { case _ => false }
private val readChunkSize = 8192 // bytes
override def rawContainerLogs(containerId: ContainerId,
fromPos: Long,
pollInterval: Option[FiniteDuration]): AkkaSource[ByteString, Any] =
try {
// If there is no waiting interval, we can end the stream early by reading just what is there from file.
pollInterval match {
case Some(interval) => FileTailSource(containerLogFile(containerId).toPath, readChunkSize, fromPos, interval)
case None => FileIO.fromPath(containerLogFile(containerId).toPath, readChunkSize, fromPos)
}
} catch {
case t: Throwable => AkkaSource.failed(t)
}
}
trait DockerApiWithFileAccess extends DockerApi {
/**
* Reads logs from the container written json-log file and returns them
* streamingly in bytes.
*
* @param containerId id of the container to get the logs for
* @param fromPos position to start to read in the file
* @param pollInterval interval to poll for changes of the file
* @return a source emitting chunks read from the log-file
*/
def rawContainerLogs(containerId: ContainerId,
fromPos: Long,
pollInterval: Option[FiniteDuration]): AkkaSource[ByteString, Any]
}
|
csantanapr/incubator-openwhisk
|
core/invoker/src/main/scala/org/apache/openwhisk/core/containerpool/docker/DockerClientWithFileAccess.scala
|
Scala
|
apache-2.0
| 6,864 |
package com.hyenawarrior.OldNorseGrammar.grammar.nominal
import com.hyenawarrior.OldNorseGrammar.grammar.adjectival.AdjectiveTraits
import com.hyenawarrior.OldNorseGrammar.grammar.adjectival.core.AdjectiveFormType
import com.hyenawarrior.OldNorseGrammar.grammar.calcinfra.Stage
import com.hyenawarrior.OldNorseGrammar.grammar.calcinfra.calculators.Calculator
import com.hyenawarrior.OldNorseGrammar.grammar.morphophonology.U_Umlaut
import com.hyenawarrior.OldNorseGrammar.grammar.phonology.MorphemeProperty.{Stem, StemSuffix, Suffix}
import com.hyenawarrior.OldNorseGrammar.grammar.phonology.PhonemeProperty.Syncopated
import com.hyenawarrior.OldNorseGrammar.grammar.phonology._
/**
* Created by HyenaWarrior on 2018.10.02..
*/
object SyncopeWordCalculator extends Calculator[Word, AdjectiveFormType] {
override def compute(word: Word, declension: AdjectiveFormType, stage: Stage[Word, AdjectiveFormType]) = {
val hasVowelAfterTheStem = word.morphemes
.find(m => !(m is Stem))
.exists(mh => mh.phonemes.headOption.exists(_.isVowel))
if(hasVowelAfterTheStem) {
val newWord = word.transformMorphemes {
case (mh @ SimpleMorpheme(_, Stem | StemSuffix), _) =>
mh.transformPhonemes[Vowel2](
{ case ph: Vowel2 => ph },
{ case (1, v) => v copyWithPropertyOf Syncopated }
)
}
Left(Seq(newWord))
} else {
Left(Seq(word))
}
}
override def shortCode: String = "SYNC"
override def reverseCompute(word: Word, declension: AdjectiveFormType, stage: Stage[Word, AdjectiveFormType]) = {
val optStemMorpheme = word.selectMorpheme(Stem)
val optSuffixMorpheme = word.selectMorpheme(Suffix)
if(optStemMorpheme.isEmpty) {
Right("Error: stem is missing")
} else if(optSuffixMorpheme.isEmpty) {
Right("Error: suffix is missing")
} else Left {
val Some(stemMorpheme) = optStemMorpheme
val Some(suffixMorpheme) = optSuffixMorpheme
val lastCons = stemMorpheme.phonemes.takeRight(2).collect { case c: Consonant => c }
val stemEndsWithTwoConsonants = lastCons.size == 2
val stemEndsWithDoubledConsonants = lastCons.toSet.size == 1
val suffixStartsWithVowel = suffixMorpheme.phonemes.headOption.exists(_.isVowel)
if(suffixStartsWithVowel && stemEndsWithTwoConsonants) {
val newWord = word.transformMorphemes {
case (mh, _) if mh is Stem =>
val stressedVowel = mh.phonemes
.collect { case ph if ph.isVowel => ph.asInstanceOf[Vowel2] }
.head
val possibleSecondVowel = possibleUnstressedVowel(declension, stressedVowel)
val (phs1, phs2) = mh.splitAtTo(-1).get
val seq = (phs1 :+ possibleSecondVowel) ++ phs2
SimpleMorpheme(seq, Stem)
case (mh, _) if mh is Suffix => mh.transformPhonemesRight[Vowel2](
{ case ph: Vowel2 => ph },
{ case (1, v) => v copyWithPropertyOf Syncopated }
)
}
if(stemEndsWithDoubledConsonants) {
Seq(word, newWord)
} else {
Seq(word, newWord)
}
} else {
Seq(word)
}
}
}
private def possibleUnstressedVowel(declension: AdjectiveFormType, stressedVowel: Vowel2): SimpleVowel = {
val optTransform = AdjectiveTraits.transformationsFor(declension)
val chr = (optTransform, stressedVowel) match {
// non-productive transformation
case (Some(U_Umlaut), _) => 'u'
// productive U-umlaut
case (None, SimpleVowel('ǫ', _)) => 'u'
//case ? => "i"
case _ => 'a'
}
SimpleVowel(chr, PhonemeProperty.Syncopated)
}
}
|
HyenaSoftware/IG-Dictionary
|
OldNorseGrammarEngine/src/main/scala/com/hyenawarrior/OldNorseGrammar/grammar/nominal/SyncopeWordCalculator.scala
|
Scala
|
lgpl-3.0
| 3,708 |
package uk.co.sprily
package btf.web
package plugins
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.concurrent.blocking
import scala.concurrent.Future
import com.github.kxbmap.configs._
import com.typesafe.scalalogging.LazyLogging
import play.api.Application
import play.api.Play
import play.api.Plugin
import scalaz.concurrent._
import scalaz.stream._
import uk.co.sprily.dh.modbus.ModbusResponse
import uk.co.sprily.mqtt._
class MqttPublisher(app: Application) extends Plugin
with LazyLogging {
import scala.concurrent.ExecutionContext.Implicits.global
type MqttClient = AsyncSimpleClient.Client
private var client: Option[MqttClient] = None
private[this] lazy val options = loadOptions("datahopper.mqtt")
lazy val root = app.configuration.underlying.get[String]("datahopper.mqtt.root")
override def onStart() = {
def initialConnect(attempts: Int): Future[MqttClient] = {
AsyncSimpleClient.connect(options)
.recoverWith {
case (e: Exception) if attempts > 0 =>
logger.error(s"Unable to connect to MQTT broker. Re-trying. $e")
Future { blocking { Thread.sleep(3000) } } flatMap (_ => initialConnect(attempts-1))
case (e: Exception) =>
logger.error(s"Unable to connect to MQTT broker. Giving up. $e")
Future.failed(e)
}
}
logger.info("Attempting to connect to MQTT broker")
initialConnect(5).onSuccess {
case c =>
logger.info(s"Successfully made initial connection to MQTT broker")
client = Some(c)
}
}
override def onStop() = {
try {
logger.info("Disconnecting from MQTT broker")
client.foreach { c =>
Await.ready(AsyncSimpleClient.disconnect(c), 10.seconds)
}
} catch {
case (e: Exception) =>
logger.error(s"Error shutting down MQTT publisher: $e")
}
}
private[this] def loadOptions(key: String) = {
logger.info(s"Attempting to load '$key' MQTT options")
val cfg = app.configuration.underlying.getConfig(key)
logger.info(s"'$key' raw config: $cfg")
val o = MqttOptions(
url = cfg.get[String]("url"),
port = cfg.get[Int]("port"),
clientId = ClientId.random(),
cleanSession = true,
username = Some(cfg.get[String]("username")),
password = Some(cfg.get[String]("password")),
keepAliveInterval = 60.seconds,
InMemory)
val redacted = o.copy(password=o.password.map(_ => "<redacted>"))
logger.info(s"Loaded '$key' MQTT options: ${redacted}")
o
}
}
object MqttPublisher {
lazy val publisherO = Play.current.plugin[MqttPublisher]
def publish: Sink[Task,ModbusResponse] = {
channel.lift[Task,ModbusResponse,Unit] { r =>
Task {
publisherO.map { publisher =>
publisher.client.foreach { client =>
AsyncSimpleClient.publish(
client,
Topic(s"${publisher.root}/${r.device.id.value}/data/raw"),
r.measurement.toArray,
AtMostOnce,
retain=false
)
}
}
}
}
}
}
|
sprily/brush-training
|
on-site/app/plugins/MqttPublisher.scala
|
Scala
|
gpl-3.0
| 3,176 |
/**
* Copyright 2015 Eric Loots
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sequoia.sastest
import java.io.{FileOutputStream, OutputStreamWriter}
import java.nio.charset.StandardCharsets
object DelimitedFileRW {
def writeDelimitedFile(results: List[String], fileName: String): Unit = {
def printToFile(f: java.io.File)(op: java.io.PrintWriter => Unit) {
val p = new java.io.PrintWriter(new OutputStreamWriter(new FileOutputStream(f), StandardCharsets.UTF_8), true)
try { op(p) } finally { p.close() }
}
import java.io._
printToFile(new File(fileName)) { p =>
results.foreach(line => p.println(line))
}
}
}
import scala.language.postfixOps
import java.io.BufferedReader
import java.io.FileReader
import java.io.File
/**
* FileLineTraversable
* Source: Scala in Depth by Joshua Suereth
*/
class FileLineTraversable(file: File) extends Traversable[String] {
override def foreach[U](f: String => U): Unit = {
val input = new BufferedReader(new FileReader(file))
try {
var line = input.readLine
while (line != null) {
f(line)
line = input.readLine
}
} finally {
input.close()
}
}
override def toString =
"{Lines of " + file.getAbsolutePath + "}"
}
|
eloots/ScalaTestSas
|
src/main/scala/com/sequoia/sastest/DelimitedFileRW.scala
|
Scala
|
apache-2.0
| 1,786 |
package views.util
import models.PaperSizeAndWhiteSpace
object Calc {
def labelWidth(paper: PaperSizeAndWhiteSpace, cols: Int): Double = {
(paper.width - 2 * paper.margin - (cols - 1) * paper.interLabelGutter) / cols.toDouble
}
def labelHeight(paper: PaperSizeAndWhiteSpace, rows: Int): Double = {
(paper.height - 2 * paper.margin - (rows - 1) * paper.interLabelGutter) / rows.toDouble
}
}
|
dmanchester/playfop
|
sample-scala/app/views/util/Calc.scala
|
Scala
|
apache-2.0
| 410 |
package Chapter2
class uncurryTest {
}
|
aromine2/fpinscala
|
src/test/scala/Chapter2/uncurryTest.scala
|
Scala
|
mit
| 41 |
/**
* Copyright 2013 Alex Jones
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with work for additional information
* regarding copyright ownership. The ASF licenses file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package controllers
import akka.stream.scaladsl.{Source => AkkaSource}
import com.mohiva.play.silhouette.api.actions.SecuredRequest
import com.typesafe.scalalogging.StrictLogging
import dao.GameDao
import dates.ZonedDateTimeFactory
import logging.{Fatal, RemoteStream}
import model.{FatalError, Game}
import models.Competition.FRIENDLY
import models.{Competition, FatalErrorReportRel}
import monads.FO.FutureOption
import play.api.i18n.MessagesApi
import play.api.mvc._
import security.Definitions._
import services.GameRowFactory
import update.MainUpdateService
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Failure
/**
* @author alex
*
*/
class Update @javax.inject.Inject() (val secret: SecretToken,
val mainUpdateService: MainUpdateService,
val gameDao: GameDao,
val gameRowFactory: GameRowFactory,
override val messagesApi: MessagesApi,
override val controllerComponents: ControllerComponents,
override val zonedDateTimeFactory: ZonedDateTimeFactory,
val silhouette: DefaultSilhouette,
val auth: Auth,
val fatal: Fatal,
override implicit val ec: ExecutionContext
) extends AbstractController(controllerComponents, zonedDateTimeFactory, ec) with Secure with Secret with LinkFactories with StrictLogging {
/**
* Update all games in the database from the web.
*/
def update(secretPayload: String) = Secret(secretPayload) {
Action { implicit request: Request[AnyContent] =>
chunked(mainUpdateService.processDatabaseUpdates) { count =>
s"There are now $count games."
}
}
}
/**
* Attend or unattend a game.
*/
def attendOrUnattend(gameUpdater: Long => FutureOption[Game], gameId: Long) =
silhouette.SecuredAction.async { implicit request: SecuredRequest[DefaultEnv, AnyContent] =>
json(gameUpdater(gameId)) { game =>
gameRowFactory.toRow(includeAttended = true, gameRowLinksFactory(includeUpdates = true))(game)
}
}
/**
* Attend a game.
*/
def attend(gameId: Long) = attendOrUnattend(mainUpdateService.attendGame, gameId)
/**
* Unattend a game.
*/
def unattend(gameId: Long) = attendOrUnattend(mainUpdateService.unattendGame, gameId)
}
|
unclealex72/west-ham-calendar
|
app/controllers/Update.scala
|
Scala
|
apache-2.0
| 3,400 |
package org.jetbrains.plugins.dotty.lang.parser.parsing.types
import org.jetbrains.plugins.dotty.lang.parser.DottyElementTypes._
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.parsing.types.Refinement
/**
* @author adkozlov
*/
/*
* RefinedType ::= WithType {[nl] Refinement}
*/
object RefinedType extends org.jetbrains.plugins.scala.lang.parser.parsing.types.Type {
override protected def infixType = InfixType
override def parse(builder: ScalaPsiBuilder, star: Boolean, isPattern: Boolean): Boolean = {
val marker = builder.mark()
if (!WithType.parse(builder, star, isPattern)) {
marker.drop()
return false
}
var isDone = false
while (Refinement.parse(builder)) {
isDone = true
}
if (isDone) {
marker.done(REFINED_TYPE)
} else {
marker.drop()
}
true
}
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/dotty/lang/parser/parsing/types/RefinedType.scala
|
Scala
|
apache-2.0
| 922 |
package pl.abankowski.ws
import play.api.libs.iteratee._
import play.api.libs.iteratee.Input._
import play.api.libs.ws.{ WS => PlayWS }
class WSImpl extends WS {
override def client = PlayWS.client(play.api.Play.current)
override def url(url: String) = client.url(url)
override def canonizeUrl(url: String) = url.endsWith("/") match {
case true => url.slice(0, url.length - 1)
case false => url
}
}
|
abankowski/musicbrainz-scala-client
|
src/main/scala/pl/abankowski/ws/WSImpl.scala
|
Scala
|
mit
| 418 |
package testsequencing
import java.io.File
import feature._
import htsjdk.samtools.SAMRecord
import org.scalatest.FunSuite
import sequencing.SamReader
import shared.GTF22Data._
/**
* Created by prussell on 2/8/17.
*/
class SamReaderSuite extends FunSuite {
def makeRecordIter(reader: SamReader, feat: Feature): Iterator[SAMRecord] =
reader.compatibleRecords(feat)
test("Compatible records overlapping transcript - reads paired") {
// Read contains multiple blocks fully contained in exon
assert(!makeRecordIter(samReaderPairedPlus, ENST00000373664).contains(DRR02375226601209_paired))
assert(!makeRecordIter(samReaderPairedMinus, ENST00000373664).contains(DRR02375226601209_paired))
// Mate unmapped
assert(makeRecordIter(samReaderPairedPlus, ENST00000525052).contains(DRR0237521381132_paired))
assert(!makeRecordIter(samReaderPairedMinus, ENST00000525052).contains(DRR0237521381132_paired))
// Unstranded should add up to Plus + Minus
val s8 = makeRecordIter(samReaderPairedPlus, ENST00000525052).size
val s9 = makeRecordIter(samReaderPairedMinus, ENST00000525052).size
val s10 = makeRecordIter(samReaderPairedUnstranded, ENST00000525052).size
assert(s10 == s8 + s9)
// Read 1 minus strand
assert(makeRecordIter(samReaderPairedPlus, ENST00000373664).contains(DRR0237521778081_paired))
assert(!makeRecordIter(samReaderPairedMinus, ENST00000373664).contains(DRR0237521778081_paired))
// Read 1 plus strand
assert(!makeRecordIter(samReaderPairedPlus, ENST00000373664).contains(DRR02375225421084_paired))
assert(makeRecordIter(samReaderPairedMinus, ENST00000373664).contains(DRR02375225421084_paired))
// Read 2 minus strand
assert(!makeRecordIter(samReaderPairedPlus, ENST00000373664).contains(DRR02375221918054_paired))
assert(makeRecordIter(samReaderPairedMinus, ENST00000373664).contains(DRR02375221918054_paired))
// Read 2 plus strand
assert(makeRecordIter(samReaderPairedPlus, ENST00000373664).contains(DRR02375228168172_paired))
assert(!makeRecordIter(samReaderPairedMinus, ENST00000373664).contains(DRR02375228168172_paired))
// Multiple isoforms
assert(makeRecordIter(samReaderPairedMinus, ENST00000373606).contains(DRR02375227278814_paired))
assert(!makeRecordIter(samReaderPairedMinus, ENST00000397152).contains(DRR02375227278814_paired))
assert(!makeRecordIter(samReaderPairedMinus, ENST00000373606).contains(DRR0237529822998_paired))
assert(makeRecordIter(samReaderPairedMinus, ENST00000397152).contains(DRR0237529822998_paired))
// Constructed features for testing off by one errors
assert(makeRecordIter(samReaderPairedPlus, DRR0237527248016_contained1).contains(DRR0237527248016_paired))
assert(makeRecordIter(samReaderPairedPlus, DRR0237527248016_contained2).contains(DRR0237527248016_paired))
assert(makeRecordIter(samReaderPairedPlus, DRR0237527248016_contained3).contains(DRR0237527248016_paired))
assert(!makeRecordIter(samReaderPairedPlus, DRR0237527248016_notContained1).contains(DRR0237527248016_paired))
assert(!makeRecordIter(samReaderPairedPlus, DRR0237527248016_notContained2).contains(DRR0237527248016_paired))
assert(!makeRecordIter(samReaderPairedPlus, DRR0237527248016_notContained3).contains(DRR0237527248016_paired))
assert(!makeRecordIter(samReaderPairedPlus, DRR0237527248016_notContained4).contains(DRR0237527248016_paired))
}
test("Compatible records overlapping transcript - reads unpaired") {
// DRR02375229686457_unpaired
assert(makeRecordIter(samReaderUnpairedPlus, ENST00000346199).contains(DRR02375229686457_unpaired))
assert(!makeRecordIter(samReaderUnpairedPlus, ENST00000445723).contains(DRR02375229686457_unpaired))
assert(!makeRecordIter(samReaderUnpairedMinus, ENST00000346199).contains(DRR02375229686457_unpaired))
assert(!makeRecordIter(samReaderUnpairedMinus, ENST00000445723).contains(DRR02375229686457_unpaired))
assert(makeRecordIter(samReaderUnpairedUnstranded, ENST00000346199).contains(DRR02375229686457_unpaired))
assert(!makeRecordIter(samReaderUnpairedUnstranded, ENST00000445723).contains(DRR02375229686457_unpaired))
// DRR0237526367658_unpaired
assert(!makeRecordIter(samReaderUnpairedPlus, ENST00000346199).contains(DRR0237526367658_unpaired))
assert(!makeRecordIter(samReaderUnpairedPlus, ENST00000445723).contains(DRR0237526367658_unpaired))
assert(makeRecordIter(samReaderUnpairedMinus, ENST00000346199).contains(DRR0237526367658_unpaired))
assert(!makeRecordIter(samReaderUnpairedMinus, ENST00000445723).contains(DRR0237526367658_unpaired))
assert(makeRecordIter(samReaderUnpairedUnstranded, ENST00000346199).contains(DRR0237526367658_unpaired))
assert(!makeRecordIter(samReaderUnpairedUnstranded, ENST00000445723).contains(DRR0237526367658_unpaired))
// DRR02375217157817_unpaired
assert(!makeRecordIter(samReaderUnpairedPlus, ENST00000346199).contains(DRR02375217157817_unpaired))
assert(makeRecordIter(samReaderUnpairedPlus, ENST00000445723).contains(DRR02375217157817_unpaired))
assert(makeRecordIter(samReaderUnpairedMinus, ENST00000346199).contains(DRR02375217157817_unpaired))
assert(!makeRecordIter(samReaderUnpairedMinus, ENST00000445723).contains(DRR02375217157817_unpaired))
assert(makeRecordIter(samReaderUnpairedUnstranded, ENST00000346199).contains(DRR02375217157817_unpaired))
assert(makeRecordIter(samReaderUnpairedUnstranded, ENST00000445723).contains(DRR02375217157817_unpaired))
}
test("Compatible records overlapping transcript - reads unpaired, short reference names") {
// Block with illegal long reference name
intercept[IllegalArgumentException]{makeRecordIter(samReaderUnpairedShortRefNamePlus,
new GenericFeature(Block("chr20", 37682448, 37682602, Plus), None)).size === 9}
// DRR02375229686457_unpaired
assert(makeRecordIter(samReaderUnpairedShortRefNamePlus, ENST00000346199).contains(DRR02375229686457_unpaired_shortRefName))
assert(!makeRecordIter(samReaderUnpairedShortRefNamePlus, ENST00000445723).contains(DRR02375229686457_unpaired_shortRefName))
assert(!makeRecordIter(samReaderUnpairedShortRefNameMinus, ENST00000346199).contains(DRR02375229686457_unpaired_shortRefName))
assert(!makeRecordIter(samReaderUnpairedShortRefNameMinus, ENST00000445723).contains(DRR02375229686457_unpaired_shortRefName))
assert(makeRecordIter(samReaderUnpairedShortRefNameUnstranded, ENST00000346199).contains(DRR02375229686457_unpaired_shortRefName))
assert(!makeRecordIter(samReaderUnpairedShortRefNameUnstranded, ENST00000445723).contains(DRR02375229686457_unpaired_shortRefName))
// DRR0237526367658_unpaired
assert(!makeRecordIter(samReaderUnpairedShortRefNamePlus, ENST00000346199).contains(DRR0237526367658_unpaired_shortRefName))
assert(!makeRecordIter(samReaderUnpairedShortRefNamePlus, ENST00000445723).contains(DRR0237526367658_unpaired_shortRefName))
assert(makeRecordIter(samReaderUnpairedShortRefNameMinus, ENST00000346199).contains(DRR0237526367658_unpaired_shortRefName))
assert(!makeRecordIter(samReaderUnpairedShortRefNameMinus, ENST00000445723).contains(DRR0237526367658_unpaired_shortRefName))
assert(makeRecordIter(samReaderUnpairedShortRefNameUnstranded, ENST00000346199).contains(DRR0237526367658_unpaired_shortRefName))
assert(!makeRecordIter(samReaderUnpairedShortRefNameUnstranded, ENST00000445723).contains(DRR0237526367658_unpaired_shortRefName))
// DRR02375217157817_unpaired
assert(!makeRecordIter(samReaderUnpairedShortRefNamePlus, ENST00000346199).contains(DRR02375217157817_unpaired_shortRefName))
assert(makeRecordIter(samReaderUnpairedShortRefNamePlus, ENST00000445723).contains(DRR02375217157817_unpaired_shortRefName))
assert(makeRecordIter(samReaderUnpairedShortRefNameMinus, ENST00000346199).contains(DRR02375217157817_unpaired_shortRefName))
assert(!makeRecordIter(samReaderUnpairedShortRefNameMinus, ENST00000445723).contains(DRR02375217157817_unpaired_shortRefName))
assert(makeRecordIter(samReaderUnpairedShortRefNameUnstranded, ENST00000346199).contains(DRR02375217157817_unpaired_shortRefName))
assert(makeRecordIter(samReaderUnpairedShortRefNameUnstranded, ENST00000445723).contains(DRR02375217157817_unpaired_shortRefName))
}
test("Iterator size") {
assert(samReaderPairedUnstranded.iterator.size === 45787)
assert(samReaderPairedPlus.iterator.size === 45787)
assert(samReaderPairedMinus.compatibleRecords(ENST00000411780).size === 5)
assert(samReaderPairedUnstranded.compatibleRecords(ENST00000411780).size === 5)
assert(samReaderPairedPlus.compatibleRecords(ENST00000411780).size === 0)
}
test("Iterator size - reader with filter") {
assert(new SamReader(new File(getClass.getResource("/DRR023752_chr20_37Mb_38Mb.bam").getPath),
Unstranded, rec => rec.getNotPrimaryAlignmentFlag && rec.getReadPairedFlag)
.iterator.size === 1024)
assert(new SamReader(new File(getClass.getResource("/DRR023752_chr20_37Mb_38Mb.bam").getPath),
Minus, rec => !rec.getReadPairedFlag)
.iterator.size === 0)
assert(new SamReader(new File(getClass.getResource("/DRR023752_chr20_37Mb_38Mb.bam").getPath),
Plus, rec => rec.getReadPairedFlag)
.iterator.size === 45787)
assert(new SamReader(new File(getClass.getResource("/DRR023752_chr20_37Mb_38Mb.bam").getPath),
Minus, rec => rec.getReadNegativeStrandFlag).compatibleRecords(ENST00000411780).size === 3)
assert(new SamReader(new File(getClass.getResource("/DRR023752_chr20_37Mb_38Mb.bam").getPath),
Minus, rec => !rec.getReadNegativeStrandFlag).compatibleRecords(ENST00000411780).size === 2)
assert(new SamReader(new File(getClass.getResource("/DRR023752_chr20_37Mb_38Mb.bam").getPath),
Plus, rec => !rec.getReadNegativeStrandFlag).compatibleRecords(ENST00000411780).size === 0)
}
test("Count compatible records") {
assert(samReaderPairedMinus.countCompatibleRecords(ENST00000411780) === 5)
assert(samReaderPairedUnstranded.countCompatibleRecords(ENST00000411780) === 5)
assert(samReaderPairedPlus.countCompatibleRecords(ENST00000411780) === 0)
}
test("Count compatible records and fragments - nonexistent chromosome") {
assert(samReaderUnpairedPlus.countCompatibleRecords(new GenericFeature(Block("xxx", 1000, 2000, Plus), None)) === 0)
assert(samReaderPairedPlus.countCompatibleRecords(new GenericFeature(Block("xxx", 1000, 2000, Plus), None)) === 0)
assert(samReaderUnpairedPlus.countCompatibleFragments(new GenericFeature(Block("xxx", 1000, 2000, Plus), None)) === 0)
assert(samReaderPairedPlus.countCompatibleFragments(new GenericFeature(Block("xxx", 1000, 2000, Plus), None)) === 0)
}
test("Iterator size - count compatible records with filter") {
assert(new SamReader(new File(getClass.getResource("/DRR023752_chr20_37Mb_38Mb.bam").getPath),
Minus, rec => rec.getReadNegativeStrandFlag).countCompatibleRecords(ENST00000411780) === 3)
assert(new SamReader(new File(getClass.getResource("/DRR023752_chr20_37Mb_38Mb.bam").getPath),
Minus, rec => !rec.getReadNegativeStrandFlag).countCompatibleRecords(ENST00000411780) === 2)
assert(new SamReader(new File(getClass.getResource("/DRR023752_chr20_37Mb_38Mb.bam").getPath),
Plus, rec => !rec.getReadNegativeStrandFlag).countCompatibleRecords(ENST00000411780) === 0)
}
test("Overlappers") {
assert(samReaderPairedMinus.overlappers(ENST00000411780).size === 5)
assert(samReaderPairedPlus.overlappers(ENST00000411780).size === 1)
assert(samReaderPairedUnstranded.overlappers(ENST00000411780).size === 6)
}
test("No next element entire file") {
val iter1 = samReaderPairedUnstranded.iterator
while(iter1.hasNext) {iter1.next()}
intercept[NoSuchElementException]{iter1.next()}
}
test("No next element one feature") {
val iter2 = samReaderPairedPlus.compatibleRecords(ENST00000525052)
while(iter2.hasNext) {iter2.next()}
intercept[NoSuchElementException]{iter2.next()}
}
test("Invalid header") {
intercept[IllegalArgumentException]{new SamReader(new File(getClass.getResource("/invalid_header.bam").getPath), Plus)}
}
test("Invalid mixed paired and unpaired bam") {
val sr = new SamReader(new File(getClass.getResource("/mixed_paired_unpaired.bam").getPath), Plus)
val it = sr.iterator
intercept[IllegalArgumentException] {
while(it.hasNext) it.next()
}
intercept[IllegalArgumentException] {
sr.countCompatibleRecords(new GenericFeature(Block("20", 37499114, 37510351, Minus), None))
}
}
def applySamReaderPaired(fopStrand: Orientation, fn: SamReader => Boolean): Boolean = {
fopStrand match {
case Plus => fn(samReaderPairedPlus)
case Minus => fn(samReaderPairedMinus)
case Unstranded => fn(samReaderPairedUnstranded)
}
}
def existsRecord(start: Int, end: Int, orientation: Orientation, name: String, fopStrand: Orientation): Boolean = {
def fn (samReader: SamReader) = {
samReader.compatibleRecords(new GenericFeature(Block("20", start, end, orientation), None))
.exists(rec => rec.getReadName == name)
}
applySamReaderPaired(fopStrand, fn)
}
def existsFragment(feat: Feature, name: String, fopStrand: Orientation): Boolean = {
def fn (samReader: SamReader) = {
samReader.compatibleFragments(feat)
.exists(pair => pair._1.getReadName == name && pair._2.getReadName == name
&& pair._1.getFirstOfPairFlag && !pair._2.getFirstOfPairFlag) }
applySamReaderPaired(fopStrand, fn)
}
def notExistsFragment(feat: Feature, name: String, fopStrand: Orientation): Boolean = {
def fn(samReader: SamReader) = {
!samReader.compatibleFragments(feat)
.exists(pair => pair._1.getReadName == name || pair._2.getReadName == name) }
applySamReaderPaired(fopStrand, fn)
}
test("Compatible fragments") {
assert(samReaderPairedPlus.compatibleFragments(new GenericFeature(Block("20", 37500065, 37500486, Minus), None)).size === 4)
// Unpaired
intercept[IllegalArgumentException](samReaderUnpairedPlus.compatibleFragments(new GenericFeature(Block("20", 37500065, 37500486, Minus), None)))
assert(samReaderPairedMinus.compatibleFragments(new GenericFeature(Block("20", 37500065, 37500486, Minus), None)).size === 1)
assert(samReaderPairedUnstranded.compatibleFragments(new GenericFeature(Block("20", 37500065, 37500486, Minus), None)).size === 5)
assert(existsFragment(new GenericFeature(Block("20", 37500065, 37500486, Minus), None), "DRR023752.16260396", Plus))
assert(existsFragment(new GenericFeature(Block("20", 37500065, 37500486, Minus), None), "DRR023752.17105240", Minus))
// No full pairs
assert(samReaderPairedPlus.compatibleFragments(ENST00000373508).size === 0)
assert(samReaderPairedMinus.compatibleFragments(ENST00000373508).size === 0)
// Mate unmapped
assert(notExistsFragment(new GenericFeature(Block("20", 37003677, 37003895, Unstranded), None),
"DRR023752.7710123", Unstranded))
assert(existsRecord(37003677, 37003895, Unstranded, "DRR023752.7710123", Unstranded))
// Both mates mapped on same strand
assert(notExistsFragment(new GenericFeature(Block("20", 37184083, 37184521, Unstranded), None),
"DRR023752.35442471", Unstranded))
assert(existsRecord(37184083, 37184521, Unstranded, "DRR023752.35442471", Unstranded))
// Spliced records
assert(existsFragment(ENST00000397152, "DRR023752.36362659", Minus))
assert(notExistsFragment(ENST00000397152, "DRR023752.36362659", Plus))
// Not primary alignment
assert(notExistsFragment(new GenericFeature(Block("20", 37240662, 37240966, Unstranded), None), "DRR023752.35525881", Unstranded))
assert(existsRecord(37240662, 37240966, Unstranded, "DRR023752.35525881", Unstranded))
}
test("queryRecords with other predicate") {
samReaderPairedMinus.queryRecords("20", 37945227, 37945318, rec => rec.getReadName == "DRR023752.26575011").size === 1
samReaderPairedMinus.queryRecords("20", 37945227, 37945318, rec => rec.getReadName == "???").size === 0
}
test("Count compatible fragments - reads unpaired") {
assert(samReaderUnpairedMinus.countCompatibleFragments(ENST00000411780) === 2)
assert(samReaderUnpairedUnstranded.countCompatibleFragments(ENST00000411780) === 2)
assert(samReaderUnpairedPlus.countCompatibleFragments(ENST00000411780) === 0)
}
test("Count compatible fragments - reads paired") {
assert(samReaderPairedPlus.countCompatibleFragments(new GenericFeature(Block("20", 37500065, 37500486, Minus), None)) === 4)
assert(samReaderPairedMinus.countCompatibleFragments(new GenericFeature(Block("20", 37500065, 37500486, Minus), None)) === 1)
assert(samReaderPairedUnstranded.countCompatibleFragments(new GenericFeature(Block("20", 37500065, 37500486, Minus), None)) === 5)
assert(samReaderPairedUnstranded.countCompatibleFragments(new GenericFeature(Block("20", 37500065, 37500486, Unstranded), None)) === 5)
// No full pairs
assert(samReaderPairedPlus.countCompatibleFragments(ENST00000373508) === 0)
assert(samReaderPairedMinus.countCompatibleFragments(ENST00000373508) === 0)
}
}
|
pamelarussell/sgxlib
|
src/test/scala/testsequencing/SamReaderSuite.scala
|
Scala
|
mit
| 17,283 |
package scalation
/** The `activity` package contains classes, traits and objects for
* activity-oriented simulation models (for example, Petri Nets).
*/
package object activity { }
|
scalation/fda
|
scalation_1.3/scalation_modeling/src/main/scala/scalation/activity/package.scala
|
Scala
|
mit
| 189 |
/*
* Copyright (C) 2012 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.environment.ssh
import java.io.File
import org.openmole.core.exception._
import org.openmole.plugin.environment.batch.authentication._
import org.openmole.core.authentication._
import org.openmole.core.preference.Preference
import org.openmole.core.serializer.SerializerService
import org.openmole.tool.crypto.Cypher
import scala.util.Try
object SSHAuthentication {
implicit def isGridScaleAuthentication(implicit cypher: Cypher) = new _root_.gridscale.ssh.SSHAuthentication[org.openmole.plugin.environment.ssh.SSHAuthentication] {
override def login(a: org.openmole.plugin.environment.ssh.SSHAuthentication) = a match {
case a: LoginPassword ⇒ a.login
case a: PrivateKey ⇒ a.login
}
override def authenticate(a: org.openmole.plugin.environment.ssh.SSHAuthentication, sshClient: _root_.gridscale.ssh.sshj.SSHClient) = a match {
case a: LoginPassword ⇒
val gsAuth = gridscale.authentication.UserPassword(a.login, a.password)
implicitly[gridscale.ssh.SSHAuthentication[gridscale.authentication.UserPassword]].authenticate(gsAuth, sshClient)
case a: PrivateKey ⇒
val gsAuth = gridscale.authentication.PrivateKey(a.privateKey, a.password, a.login)
implicitly[gridscale.ssh.SSHAuthentication[gridscale.authentication.PrivateKey]].authenticate(gsAuth, sshClient)
}
}
def apply()(implicit authenticationStore: AuthenticationStore, serializerService: SerializerService) =
Authentication.allByCategory.getOrElse(classOf[SSHAuthentication].getName, Seq.empty).map(_.asInstanceOf[SSHAuthentication])
def find(login: String, host: String, port: Int = 22)(implicit authenticationStore: AuthenticationStore, serializerService: SerializerService): SSHAuthentication = {
val list = apply()
val auth = list.reverse.find { a ⇒ (a.login, a.host, a.port) == (login, host, port) }
auth.getOrElse(throw new UserBadDataError(s"No authentication method found for $login@$host:$port"))
}
def +=(a: SSHAuthentication)(implicit authenticationStore: AuthenticationStore, serializerService: SerializerService) =
Authentication.save[SSHAuthentication](a, eq)
def -=(a: SSHAuthentication)(implicit authenticationStore: AuthenticationStore, serializerService: SerializerService) =
Authentication.remove[SSHAuthentication](a, eq)
def clear()(implicit authenticationStore: AuthenticationStore) = Authentication.clear[SSHAuthentication]
private def eq(a1: SSHAuthentication, a2: SSHAuthentication) = (a1.getClass, a1.login, a1.host, a1.port) == (a2.getClass, a2.login, a2.host, a2.port)
def test(a: SSHAuthentication)(implicit cypher: Cypher, authenticationStore: AuthenticationStore, serializerService: SerializerService, preference: Preference) = {
implicit val intp = gridscale.ssh.SSH()
try
Try {
val server = gridscale.ssh.SSHServer(a.host, a.port, preference(SSHEnvironment.timeOut))(a)
gridscale.ssh.home(server)
}.map(_ ⇒ true)
finally intp().close()
}
}
sealed trait SSHAuthentication {
def host: String
def port: Int
def login: String
}
case class LoginPassword(
val login: String,
val cypheredPassword: String,
val host: String,
val port: Int = 22
) extends SSHAuthentication with CypheredPassword {
override def toString = s"$login@$host:$port using password"
}
case class PrivateKey(
val privateKey: File,
val login: String,
val cypheredPassword: String,
val host: String,
val port: Int = 22
) extends SSHAuthentication with CypheredPassword {
override def toString = s"$login@$host:$port using private key $privateKey"
}
|
openmole/openmole
|
openmole/plugins/org.openmole.plugin.environment.ssh/src/main/scala/org/openmole/plugin/environment/ssh/SSHAuthentication.scala
|
Scala
|
agpl-3.0
| 4,422 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.resource
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._
import org.apache.spark.network.util.JavaUtils
import org.apache.spark.resource.ResourceProfile._
/**
* A set of Executor resource requests. This is used in conjunction with the ResourceProfile to
* programmatically specify the resources needed for an RDD that will be applied at the
* stage level.
*
* This api is currently private until the rest of the pieces are in place and then it
* will become public.
*/
private[spark] class ExecutorResourceRequests() extends Serializable {
private val _executorResources = new ConcurrentHashMap[String, ExecutorResourceRequest]()
def requests: Map[String, ExecutorResourceRequest] = _executorResources.asScala.toMap
/**
* Specify heap memory. The value specified will be converted to MiB.
*
* @param amount Amount of memory. In the same format as JVM memory strings (e.g. 512m, 2g).
* Default unit is MiB if not specified.
*/
def memory(amount: String): this.type = {
val amountMiB = JavaUtils.byteStringAsMb(amount)
val req = new ExecutorResourceRequest(MEMORY, amountMiB)
_executorResources.put(MEMORY, req)
this
}
/**
* Specify overhead memory. The value specified will be converted to MiB.
*
* @param amount Amount of memory. In the same format as JVM memory strings (e.g. 512m, 2g).
* Default unit is MiB if not specified.
*/
def memoryOverhead(amount: String): this.type = {
val amountMiB = JavaUtils.byteStringAsMb(amount)
val req = new ExecutorResourceRequest(OVERHEAD_MEM, amountMiB)
_executorResources.put(OVERHEAD_MEM, req)
this
}
/**
* Specify pyspark memory. The value specified will be converted to MiB.
*
* @param amount Amount of memory. In the same format as JVM memory strings (e.g. 512m, 2g).
* Default unit is MiB if not specified.
*/
def pysparkMemory(amount: String): this.type = {
val amountMiB = JavaUtils.byteStringAsMb(amount)
val req = new ExecutorResourceRequest(PYSPARK_MEM, amountMiB)
_executorResources.put(PYSPARK_MEM, req)
this
}
/**
* Specify number of cores per Executor.
*
* @param amount Number of cores to allocate per Executor.
*/
def cores(amount: Int): this.type = {
val req = new ExecutorResourceRequest(CORES, amount)
_executorResources.put(CORES, req)
this
}
/**
* Amount of a particular custom resource(GPU, FPGA, etc) to use. The resource names supported
* correspond to the regular Spark configs with the prefix removed. For instance, resources
* like GPUs are gpu (spark configs spark.executor.resource.gpu.*). If you pass in a resource
* that the cluster manager doesn't support the result is undefined, it may error or may just
* be ignored.
*
* @param resourceName Name of the resource.
* @param amount amount of that resource per executor to use.
* @param discoveryScript Optional script used to discover the resources. This is required on
* some cluster managers that don't tell Spark the addresses of
* the resources allocated. The script runs on Executors startup to
* of the resources available.
* @param vendor Optional vendor, required for some cluster managers
*/
def resource(
resourceName: String,
amount: Long,
discoveryScript: String = "",
vendor: String = ""): this.type = {
// a bit weird but for Java api use empty string as meaning None because empty
// string is otherwise invalid for those parameters anyway
val req = new ExecutorResourceRequest(resourceName, amount, discoveryScript, vendor)
_executorResources.put(resourceName, req)
this
}
override def toString: String = {
s"Executor resource requests: ${_executorResources}"
}
}
|
darionyaphet/spark
|
core/src/main/scala/org/apache/spark/resource/ExecutorResourceRequests.scala
|
Scala
|
apache-2.0
| 4,748 |
package com.ing.baker.test.scaladsl
import com.ing.baker.runtime.scaladsl.EventInstance
import com.ing.baker.test.RecipeAssert
import com.ing.baker.test.recipe.WebshopBaker._
import com.ing.baker.test.recipe.WebshopRecipe
import com.ing.baker.test.recipe.WebshopRecipe.{ItemsReserved, OrderPlaced}
import com.typesafe.scalalogging.StrictLogging
import org.scalatest.Assertions
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import java.util.{Timer, TimerTask, UUID}
import scala.util.{Failure, Try}
class RecipeAssertTest extends AnyFlatSpec with Matchers with StrictLogging {
private def createBakerAssert(async: Boolean = false): RecipeAssert = {
val recipeInstanceId = UUID.randomUUID().toString
val sensoryEvent = EventInstance.unsafeFrom(OrderPlaced("order-1", "item-1" :: Nil))
baker.bake(recipeId, recipeInstanceId)
if (async) {
new Timer().schedule(new TimerTask {
override def run(): Unit = baker.fireEvent(recipeInstanceId, sensoryEvent)
}, 500)
} else {
baker.fireEvent(recipeInstanceId, sensoryEvent)
}
RecipeAssert(baker, recipeInstanceId)
}
private def assertFail[T](assertion: => T): Unit = Try(assertion) match {
case Failure(t: Throwable) => logger.info(t.getMessage)
case default => fail(s"assertion error is expected but got $default")
}
"RecipeAssert object" should "be created" in {
RecipeAssert.apply(baker, "someProcessId")
}
"assertEventsFlow" should "work with happy flow" in {
createBakerAssert()
.waitFor(WebshopRecipe.happyFlow)
.assertEventsFlow(WebshopRecipe.happyFlow)
}
"assertEventsFlow" should "work with some delay" in {
createBakerAssert(true)
.waitFor(WebshopRecipe.happyFlow)
.assertEventsFlow(WebshopRecipe.happyFlow)
}
"assertEventsFlow" should "fail on incorrect events" in assertFail {
createBakerAssert()
.waitFor(WebshopRecipe.happyFlow)
.assertEventsFlow(WebshopRecipe.happyFlow -- classOf[ItemsReserved])
}
"assertIngredient" should "work for isEqual" in {
createBakerAssert()
.waitFor(WebshopRecipe.happyFlow)
.assertIngredient("orderId").isEqual("order-1")
}
"assertIngredient" should "fail for isEqual" in assertFail {
createBakerAssert()
.waitFor(WebshopRecipe.happyFlow)
.assertIngredient("orderId").isEqual("order-2")
}
"assertIngredient" should "work for isAbsent" in {
createBakerAssert()
.waitFor(WebshopRecipe.happyFlow)
.assertIngredient("non-existent").isAbsent
}
"assertIngredient" should "work for isNull" in assertFail {
createBakerAssert()
.waitFor(WebshopRecipe.happyFlow)
.assertIngredient("orderId").isNull
}
"assertIngredient" should "work for customAssert" in {
createBakerAssert()
.waitFor(WebshopRecipe.happyFlow)
.assertIngredient("orderId").is(value => Assertions.assert(value.as[String] == "order-1"))
}
"assertIngredient" should "fail for customAssert" in assertFail {
createBakerAssert()
.waitFor(WebshopRecipe.happyFlow)
.assertIngredient("orderId").is(value => Assertions.assert(value.as[String] == "order-2"))
}
}
|
ing-bank/baker
|
core/baker-test/src/test/scala/com/ing/baker/test/scaladsl/RecipeAssertTest.scala
|
Scala
|
mit
| 3,207 |
package com.socrata.datacoordinator.resources
import javax.servlet.http.HttpServletResponse
import com.rojoma.json.v3.ast.JArray
import com.rojoma.json.v3.util.ArrayIteratorEncode
import com.socrata.datacoordinator.id.DatasetId
import com.socrata.datacoordinator.truth.loader.{MissingVersion, Delogger}
import com.socrata.http.server.HttpRequest
import com.socrata.http.server.responses._
import com.socrata.http.server.implicits._
case class DatasetLogResource[CV](datasetId: DatasetId,
copyNumber: Long,
fetchLog: (DatasetId, Long) => (Iterator[Delogger.LogEvent[CV]] => Unit) => Unit,
formatDatasetId: DatasetId => String) extends ErrorHandlingSodaResource(formatDatasetId) {
override def get = { (req: HttpRequest) => (resp: HttpServletResponse) =>
try {
fetchLog(datasetId, copyNumber) { it =>
val r = OK ~> Write("application/json") { w =>
ArrayIteratorEncode.toText(it.map(_.toString)).foreach(w.write)
}
r(resp)
}
} catch {
case _: MissingVersion =>
(OK ~> Json(JArray.canonicalEmpty))(resp)
}
}
}
|
socrata-platform/data-coordinator
|
coordinator/src/main/scala/com/socrata/datacoordinator/resources/DatasetLogResource.scala
|
Scala
|
apache-2.0
| 1,187 |
package com.tsukaby.bean_validation_scala
import java.util.{Calendar, Date}
import javax.validation.constraints.Past
import javax.validation.{ConstraintValidator, ConstraintValidatorContext}
import org.hibernate.validator.internal.constraintvalidators.bv.past.{PastValidatorForReadablePartial, PastValidatorForReadableInstant, PastValidatorForDate, PastValidatorForCalendar}
import org.joda.time.{ReadableInstant, ReadablePartial}
/**
* Check that the wrapped Calendar, Date and JodaTime classes passed to be validated is in the past.
*/
class PastValidatorForOption extends ConstraintValidator[Past, Option[_]] {
private var constraintAnnotation: Past = null
override def initialize(constraintAnnotation: Past): Unit = {
this.constraintAnnotation = constraintAnnotation
}
override def isValid(value: Option[_], context: ConstraintValidatorContext): Boolean = {
value match {
case Some(x: Calendar) =>
val v = new PastValidatorForCalendar
v.initialize(constraintAnnotation)
v.isValid(x, context)
case Some(x: Date) =>
val v = new PastValidatorForDate
v.initialize(constraintAnnotation)
v.isValid(x, context)
case Some(x: ReadableInstant) =>
val v = new PastValidatorForReadableInstant
v.initialize(constraintAnnotation)
v.isValid(x, context)
case Some(x: ReadablePartial) =>
val v = new PastValidatorForReadablePartial
v.initialize(constraintAnnotation)
v.isValid(x, context)
case None =>
true
case Some(_) =>
throw new IllegalStateException("oops.")
}
}
}
|
bean-validation-scala/bean-validation-scala
|
src/main/scala/com/tsukaby/bean_validation_scala/PastValidatorForOption.scala
|
Scala
|
mit
| 1,635 |
package com.inocybe.odlexplorer
import akka.actor.{Actor, ActorRef, Props}
object StepParent {
case class SendToChild(msg: Any)
}
class StepParent(childProps: Props, fwd: ActorRef) extends Actor {
import StepParent._
val child = context.actorOf(childProps, "child")
def receive = {
case SendToChild(msg) => child ! msg
case msg => fwd forward msg
}
}
|
jshamash/odl-project-explorer
|
src/test/scala/com/inocybe/odlexplorer/StepParent.scala
|
Scala
|
mit
| 371 |
package com.varunvats.practice.graph
import scala.collection.mutable
object RouteBetweenNodes {
def apply[T](graph: Map[T, Seq[T]], start: T, goal: T): Boolean =
apply(graph, start, goal, mutable.Set(start))
private def apply[T](graph: Map[T, Seq[T]], start: T, goal: T, visited: mutable.Set[T]): Boolean = {
if (start == goal)
true
else if (!graph.contains(start))
false
else
graph(start).exists { t =>
!visited(t) && apply(graph, t, goal, visited += t)
}
}
}
|
varunvats/practice
|
jvm/src/main/scala/com/varunvats/practice/graph/RouteBetweenNodes.scala
|
Scala
|
mit
| 520 |
package core.collection
/**
* Collection that belongs to an organization
*
* @author Manuel Bernhardt <[email protected]>
*/
abstract class OrganizationCollection extends Collection {
val ownerType: OwnerType.OwnerType = OwnerType.ORGANIZATION
}
|
delving/culture-hub
|
web-core/app/core/collection/OrganizationCollection.scala
|
Scala
|
apache-2.0
| 262 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.cassandra.tools.commands
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.cassandra.data.CassandraDataStore
import org.locationtech.geomesa.cassandra.tools.CassandraDataStoreCommand
import org.locationtech.geomesa.cassandra.tools.CassandraDataStoreCommand.CassandraDataStoreParams
import org.locationtech.geomesa.cassandra.tools.commands.CassandraDeleteFeaturesCommand.CassandraDeleteFeaturesParams
import org.locationtech.geomesa.tools.data.{DeleteFeaturesCommand, DeleteFeaturesParams}
class CassandraDeleteFeaturesCommand extends DeleteFeaturesCommand[CassandraDataStore] with CassandraDataStoreCommand {
override val params = new CassandraDeleteFeaturesParams
}
object CassandraDeleteFeaturesCommand {
@Parameters(commandDescription = "Delete features from a table in GeoMesa. Does not delete any tables or schema information.")
class CassandraDeleteFeaturesParams extends DeleteFeaturesParams with CassandraDataStoreParams
}
|
ronq/geomesa
|
geomesa-cassandra/geomesa-cassandra-tools/src/main/scala/org/locationtech/geomesa/cassandra/tools/commands/CassandraDeleteFeaturesCommand.scala
|
Scala
|
apache-2.0
| 1,459 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.spark
import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.IndexPartitioner
import org.geotools.factory.CommonFactoryFinder
import org.locationtech.jts.geom.{Coordinate, Envelope, Geometry}
import org.locationtech.jts.index.strtree.{AbstractNode, Boundable, STRtree}
import org.opengis.feature.simple.SimpleFeature
import org.opengis.filter.FilterFactory2
import scala.collection.mutable.ListBuffer
object RelationUtils extends LazyLogging {
import scala.collection.JavaConverters._
@transient val ff: FilterFactory2 = CommonFactoryFinder.getFilterFactory2
implicit val CoordinateOrdering: Ordering[Coordinate] = Ordering.by {_.x}
def grid(rdd: SpatialRDD, envelopes: List[Envelope], parallelism: Int): RDD[(Int, Iterable[SimpleFeature])] = {
val geom = rdd.schema.indexOf(rdd.schema.getGeometryDescriptor.getLocalName)
rdd.flatMap(RelationUtils.gridIdMapper(_, envelopes, geom)).groupByKey(new IndexPartitioner(parallelism))
}
// Maps a SimpleFeature to the id of the envelope that contains it
// Will duplicate features that belong to more than one envelope
// Returns -1 if no match was found
// TODO: Filter duplicates when querying
def gridIdMapper(sf: SimpleFeature, envelopes: List[Envelope], geometryOrdinal: Int): List[(Int, SimpleFeature)] = {
val geom = sf.getAttribute(geometryOrdinal).asInstanceOf[Geometry]
val mappings = envelopes.indices.flatMap { index =>
if (envelopes(index).intersects(geom.getEnvelopeInternal)) {
Some(index, sf)
} else {
None
}
}
if (mappings.isEmpty) {
List((-1, sf))
} else {
mappings.toList
}
}
// Maps a geometry to the id of the envelope that contains it
// Used to derive partition hints
def gridIdMapper(geom: Geometry, envelopes: List[Envelope]): List[Int] = {
val mappings = envelopes.indices.flatMap { index =>
if (envelopes(index).intersects(geom.getEnvelopeInternal)) {
Some(index)
} else {
None
}
}
if (mappings.isEmpty) {
List(-1)
} else {
mappings.toList
}
}
def getBound(rdd: RDD[SimpleFeature]): Envelope = {
rdd.aggregate[Envelope](new Envelope())(
(env: Envelope, sf: SimpleFeature) => {
env.expandToInclude(sf.getDefaultGeometry.asInstanceOf[Geometry].getEnvelopeInternal)
env
},
(env1: Envelope, env2: Envelope) => {
env1.expandToInclude(env2)
env1
}
)
}
def equalPartitioning(bound: Envelope, numPartitions: Int): List[Envelope] = {
// Compute bounds of each partition
val partitionsPerDim = Math.sqrt(numPartitions).toInt
val partitionWidth = bound.getWidth / partitionsPerDim
val partitionHeight = bound.getHeight / partitionsPerDim
val minX = bound.getMinX
val minY = bound.getMinY
val partitionEnvelopes: ListBuffer[Envelope] = ListBuffer()
// Build partitions
for (xIndex <- 0 until partitionsPerDim) {
val xPartitionStart = minX + (xIndex * partitionWidth)
val xPartitionEnd = xPartitionStart + partitionWidth
for (yIndex <- 0 until partitionsPerDim) {
val yPartitionStart = minY + (yIndex * partitionHeight)
val yPartitionEnd = yPartitionStart+ partitionHeight
partitionEnvelopes += new Envelope(xPartitionStart, xPartitionEnd, yPartitionStart, yPartitionEnd)
}
}
partitionEnvelopes.toList
}
def weightedPartitioning(rawRDD: RDD[SimpleFeature], bound: Envelope, numPartitions: Int, sampleSize: Int): List[Envelope] = {
val width: Int = Math.sqrt(numPartitions).toInt
val binSize = sampleSize / width
val sample = rawRDD.takeSample(withReplacement = false, sampleSize)
val xSample = sample.map{f => f.getDefaultGeometry.asInstanceOf[Geometry].getCoordinates.min.x}
val ySample = sample.map{f => f.getDefaultGeometry.asInstanceOf[Geometry].getCoordinates.min.y}
val xSorted = xSample.sorted
val ySorted = ySample.sorted
val partitionEnvelopes: ListBuffer[Envelope] = ListBuffer()
for (xBin <- 0 until width) {
val minX = xSorted(xBin * binSize)
val maxX = xSorted(((xBin + 1) * binSize) - 1)
for (yBin <- 0 until width) {
val minY = ySorted(yBin)
val maxY = ySorted(((yBin + 1) * binSize) - 1)
partitionEnvelopes += new Envelope(minX, maxX, minY, maxY)
}
}
partitionEnvelopes.toList
}
def wholeEarthPartitioning(numPartitions: Int): List[Envelope] = {
equalPartitioning(new Envelope(-180,180,-90,90), numPartitions)
}
// Constructs an RTree based on a sample of the data and returns its bounds as envelopes
// returns one less envelope than requested to account for the catch-all envelope
def rtreePartitioning(
rawRDD: RDD[SimpleFeature],
numPartitions: Int,
sampleSize: Int,
thresholdMultiplier: Double): List[Envelope] = {
val sample = rawRDD.takeSample(withReplacement = false, sampleSize)
val rtree = new STRtree()
sample.foreach{ sf =>
rtree.insert(sf.getDefaultGeometry.asInstanceOf[Geometry].getEnvelopeInternal, sf)
}
val envelopes: java.util.List[Envelope] = new java.util.ArrayList[Envelope]()
// get rtree envelopes, limited to those containing reasonable size
val reasonableSize = sampleSize / numPartitions
val threshold = (reasonableSize * thresholdMultiplier).toInt
val minSize = reasonableSize - threshold
val maxSize = reasonableSize + threshold
rtree.build()
queryBoundary(rtree.getRoot, envelopes, minSize, maxSize)
envelopes.asScala.take(numPartitions - 1).toList
}
// Helper method to get the envelopes of an RTree
def queryBoundary(node: AbstractNode, boundaries: java.util.List[Envelope], minSize: Int, maxSize: Int): Int = {
// get node's immediate children
val childBoundables: java.util.List[_] = node.getChildBoundables
// True if current node is leaf
var flagLeafnode = true
var i = 0
while (i < childBoundables.size && flagLeafnode) {
val childBoundable = childBoundables.get(i).asInstanceOf[Boundable]
if (childBoundable.isInstanceOf[AbstractNode]) {
flagLeafnode = false
}
i += 1
}
if (flagLeafnode) {
childBoundables.size
} else {
var nodeCount = 0
for ( i <- 0 until childBoundables.size ) {
val childBoundable = childBoundables.get(i).asInstanceOf[Boundable]
childBoundable match {
case child: AbstractNode =>
val childSize = queryBoundary(child, boundaries, minSize, maxSize)
// check boundary for size and existence in chosen boundaries
if (childSize < maxSize && childSize > minSize) {
var alreadyAdded = false
if (node.getLevel != 1) {
child.getChildBoundables.asInstanceOf[java.util.List[AbstractNode]].asScala.foreach { c =>
alreadyAdded = alreadyAdded || boundaries.contains(c.getBounds.asInstanceOf[Envelope])
}
}
if (!alreadyAdded) {
boundaries.add(child.getBounds.asInstanceOf[Envelope])
}
}
nodeCount += childSize
case _ => nodeCount += 1 // negligible difference but accurate
}
}
nodeCount
}
}
}
|
elahrvivaz/geomesa
|
geomesa-spark/geomesa-spark-sql/src/main/scala/org/locationtech/geomesa/spark/RelationUtils.scala
|
Scala
|
apache-2.0
| 7,863 |
package com.github.jefersonm.sandbox.scala.learnscala
import java.util.{Date, Locale}
import java.text.DateFormat._
object InteroperateExample {
def main(args: Array[String]){
val now = new Date
val df = getDateInstance(LONG, Locale.GERMAN)
println(df format now)
println(df.format(now))
}
}
|
jefersonm/sandbox
|
languages/scala/LearnScala/src/com/github/jefersonm/sandbox/scala/learnscala/InteroperateExample.scala
|
Scala
|
mit
| 305 |
package org.deeplearning4s.nn.conf.layers
import org.deeplearning4j.nn.conf.Updater
import org.deeplearning4j.nn.conf.distribution.Distribution
import org.deeplearning4j.nn.conf.layers.AutoEncoder.Builder
import org.deeplearning4j.nn.weights.WeightInit
import org.deeplearning4s.nn.conf.ActivationFunction
case class AutoEncoder(
corruptionLevel: Double = Double.NaN,
sparsity: Double = Double.NaN,
override val nIn: Int = Int.MinValue,
override val nOut: Int = Int.MinValue,
override val activationFunction: ActivationFunction = null,
override val weightInit: WeightInit = null,
override val dist: Distribution = null,
override val dropOut: Double = Double.NaN,
override val updater: Updater = null
) extends BasePretrainNetwork {
def asJava = new Builder(corruptionLevel)
.activation(activationFunction.name)
.weightInit(weightInit)
.dist(dist)
.dropOut(dropOut)
.updater(updater)
.nIn(nIn)
.nOut(nOut)
.sparsity(sparsity).build()
}
|
everpeace/deeplearning4s
|
src/main/scala/org/deeplearning4s/nn/conf/layers/AutoEncoder.scala
|
Scala
|
apache-2.0
| 1,211 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.tools.data
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore
import org.locationtech.geomesa.accumulo.tools.{AccumuloDataStoreCommand, AccumuloDataStoreParams}
import org.locationtech.geomesa.tools.data.{DeleteCatalogCommand, DeleteCatalogParams}
class AccumuloDeleteCatalogCommand extends DeleteCatalogCommand[AccumuloDataStore] with AccumuloDataStoreCommand {
override val params = new AccumuloDeleteCatalogParams
}
class AccumuloDeleteCatalogParams extends DeleteCatalogParams with AccumuloDataStoreParams
|
nagavallia/geomesa
|
geomesa-accumulo/geomesa-accumulo-tools/src/main/scala/org/locationtech/geomesa/accumulo/tools/data/AccumuloDeleteCatalogCommand.scala
|
Scala
|
apache-2.0
| 1,031 |
package spmd
object Util {
def spawnDaemon(f: => Any) {
val t = new Thread(new Runnable {
def run {
f
}
})
t.setDaemon(true)
t.start
}
}
trait Log {
def debug(msg: String) {
if (System.getProperty("spmd.debug") != null) println(msg)
}
}
|
jonifreeman/spmd
|
src/main/scala/spmd/util.scala
|
Scala
|
apache-2.0
| 287 |
import sbt._
import sbt.Process._
/**
* Includes the <code>testng</code> task into the project, which depends upon the
* testng.xml file in the current working directory.
*
* Note that this continues to have problems when finding files out on the class path.
*/
trait TestNGTestingProject extends DefaultProject
{
val test_ng = "org.testng" % "testng" % "5.9" % "test->default" intransitive()
lazy val testng = task {
import java.io.File
import sbt.FileUtilities._
val pathElements = {
configurationPath( Configurations.Compile ).descendentsExcept( "*.jar", ".svn" ).getPaths ++
configurationPath( Configurations.Test ).descendentsExcept( "*.jar", ".svn" ).getPaths ++
List(
outputPath / "classes",
outputPath / "test-classes",
scalaLibraryJar.getPath,
testResourcesPath
)
}
val classpath = pathElements.mkString( ":" )
val testDir = outputPath / "testng"
createDirectory( testDir, log )
log.debug( "java -cp " + classpath + " org.testng.TestNG -d " + testDir / "test-output" + " testng.xml" )
var code = (
( "java -cp " + classpath + " org.testng.TestNG -d " + testDir / "test-output" + " testng.xml" ! log ) +
( "ant -f src/test/resources/build.xml -Dbasedir=" + testDir ! log )
)
if ( code > 0 ) Some( code.toString ) else None
} dependsOn( testCompile )
}
|
emarsys/dyson
|
project/build/TestNGTestingProject.scala
|
Scala
|
gpl-3.0
| 1,559 |
package kamon.test
import akka.actor.{ ActorSystem, Actor, Props }
object SimpleApp extends App {
val system = ActorSystem("test-app")
val testActor = system.actorOf(Props[TestActor], "test-actor")
for(_ <- 1 to 10) {
testActor ! "hello"
}
}
class TestActor extends Actor {
def receive = {
case x => println(x)
}
}
|
ivantopo/sam-serpoosh-kamon-example
|
src/main/scala/kamon/test/SimpleApp.scala
|
Scala
|
apache-2.0
| 340 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package runtime
final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Double] {
protected def num = scala.math.Numeric.DoubleIsFractional
protected def ord = scala.math.Ordering.Double
protected def integralNum = scala.math.Numeric.DoubleAsIfIntegral
override def doubleValue() = self
override def floatValue() = self.toFloat
override def longValue() = self.toLong
override def intValue() = self.toInt
override def byteValue() = self.toByte
override def shortValue() = self.toShort
override def isWhole = {
val l = self.toLong
l.toDouble == self || l == Long.MaxValue && self < Double.PositiveInfinity || l == Long.MinValue && self > Double.NegativeInfinity
}
override def isValidByte = self.toByte.toDouble == self
override def isValidShort = self.toShort.toDouble == self
override def isValidChar = self.toChar.toDouble == self
override def isValidInt = self.toInt.toDouble == self
// override def isValidLong = { val l = self.toLong; l.toDouble == self && l != Long.MaxValue }
// override def isValidFloat = self.toFloat.toDouble == self
// override def isValidDouble = !java.lang.Double.isNaN(self)
def isNaN: Boolean = java.lang.Double.isNaN(self)
def isInfinity: Boolean = java.lang.Double.isInfinite(self)
def isPosInfinity: Boolean = Double.PositiveInfinity == self
def isNegInfinity: Boolean = Double.NegativeInfinity == self
override def abs: Double = math.abs(self)
override def max(that: Double): Double = math.max(self, that)
override def min(that: Double): Double = math.min(self, that)
override def signum: Int = math.signum(self).toInt // !!! NaN
def round: Long = math.round(self)
def ceil: Double = math.ceil(self)
def floor: Double = math.floor(self)
/** Converts an angle measured in degrees to an approximately equivalent
* angle measured in radians.
*
* @return the measurement of the angle x in radians.
*/
def toRadians: Double = math.toRadians(self)
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees.
* @return the measurement of the angle x in degrees.
*/
def toDegrees: Double = math.toDegrees(self)
}
|
felixmulder/scala
|
src/library/scala/runtime/RichDouble.scala
|
Scala
|
bsd-3-clause
| 2,806 |
/**
* Copyright 2014-2015 Martin Cooper
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.martincooper.datatable.examples
import com.github.martincooper.datatable.{ GenericColumn, DataColumn, DataTable }
import scala.util.Try
class DataColumnModificationExamples {
def addColumn(dataTable: DataTable): Try[DataTable] = {
val stringCol = new DataColumn[String]("New Column", (1 to 100).map(i => "Another " + i))
dataTable.columns.add(stringCol)
}
def removeColumnByName(dataTable: DataTable): Try[DataTable] = {
dataTable.columns.remove("ColumnToRemove")
}
def removeColumnByIndex(dataTable: DataTable): Try[DataTable] = {
dataTable.columns.remove(1)
}
def removeColumn(dataTable: DataTable, columnToRemove: GenericColumn): Try[DataTable] = {
dataTable.columns.remove(columnToRemove)
}
def insertColumnByName(dataTable: DataTable): Try[DataTable] = {
val stringCol = new DataColumn[String]("New Column", (1 to 100).map(i => "Another " + i))
dataTable.columns.insert("ColumnTwo", stringCol)
}
def insertColumnByIndex(dataTable: DataTable): Try[DataTable] = {
val stringCol = new DataColumn[String]("New Column", (1 to 100).map(i => "Another " + i))
dataTable.columns.insert(2, stringCol)
}
def insertColumn(dataTable: DataTable, insertBeforeColumn: GenericColumn): Try[DataTable] = {
val stringCol = new DataColumn[String]("New Column", (1 to 100).map(i => "Another " + i))
dataTable.columns.insert(insertBeforeColumn, stringCol)
}
def replaceColumnByName(dataTable: DataTable): Try[DataTable] = {
val stringCol = new DataColumn[String]("New Column", (1 to 100).map(i => "Another " + i))
dataTable.columns.replace("ColumnTwo", stringCol)
}
def replaceColumnByIndex(dataTable: DataTable): Try[DataTable] = {
val stringCol = new DataColumn[String]("New Column", (1 to 100).map(i => "Another " + i))
dataTable.columns.replace(2, stringCol)
}
def replaceColumn(dataTable: DataTable, columnToReplace: GenericColumn): Try[DataTable] = {
val stringCol = new DataColumn[String]("New Column", (1 to 100).map(i => "Another " + i))
dataTable.columns.replace(columnToReplace, stringCol)
}
}
|
martincooper/scala-datatable
|
src/test/scala/com/github/martincooper/datatable/examples/DataColumnModificationExamples.scala
|
Scala
|
apache-2.0
| 2,732 |
package scalalab.gui
import javax.swing._
import java.awt.{Color, Font, Dimension, BorderLayout}
import scala.actors._
import Actor._
import scalalab.utils._
import scalalab.core.InterpreterMessages._
import scalalab.core.UserMessages._
import scalaExec.Interpreter._
// p is the processing actor
class ScalalabFrame(private val p: Actor) extends JFrame {
import BetterSwing._
val editor = new OuterEditor(p)
val printer = new PrinterEditor()
private[scalalab] val proc = actor {
loop {
// actor loop
receive {
// receive a GUITask and process it
case GUITask(fn) =>
SwingUtilities.invokeLater(
new Runnable() {
def run {
fn()
}
} // runnable
)
case res: InterpResult => // editor processes Interpreter results
editor process res
case msg: SysoutMessage => // printer processes SysoutMessage and SyserrMessage
printer.process(msg)
case msg: SyserrMessage =>
printer.process(msg)
} // receive
} // actor loop
} // actor
// processing is performed by delegating to the proc actor
def process(res: InterpResult) {
proc ! res
}
def process(msg: SysoutMessage) {
proc ! msg
}
def process(msg: SyserrMessage) {
proc ! msg
}
private def guiTask(task: => Unit) {
proc ! GUITask(() => task)
}
def load(data: scala.xml.Elem) {
editor.load(data)
}
//On startup add the gui task into the processor
guiTask {
def mkMenuBar = {
import scalalab.core.UserMessages._
val mb = new MenuBar {
new Menu("State") {
"Print Cells Interpreter State" does {
}
}
new Menu("File") {
"New" does {
p ! NewFile()
}
"Open File..." does {
p ! OpenFile()
}
---
"Save File Ctrl+S" does {
editor.save(false)
}
"Save As ..." does {
editor.save(true)
}
}
new Menu("Interpreter") {
"Interpret Shift+Enter" does {
editor.interpret
}
"Interpret All Ctrl+Shift+Enter" does {
editor.interpretAll
}
"Restart with default libraries Ctrl+R" does {
p ! Restart()
}
"Restart with EJML based libraries " does {
p ! RestartEJML()
}
"Restart with MTJ based libraries " does {
p ! RestartMTJ()
}
"Restart with Apache Common Maths based libraries " does {
p ! RestartApacheCommons()
}
}
new Menu("CodeCell") {
"New Ctrl+N" does {
editor.mkCodeCell
}
"Delete Ctrl+Del" does {
editor.delCodeCell
}
}
new Menu("Help") {
"Contents F1" does {
p ! ShowHelpDialog()
}
}
}
mb.setFont(new Font(scalalab.utils.Props("InnerEditor.font.name", "Courier New"), 0, 10))
mb
}
//Set up the frame
setJMenuBar(mkMenuBar)
setTitle("ScalaLab Code Cells Build: " + scalaExec.Interpreter.GlobalValues.scalalabBuildVersion)
//setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
//val splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT)
//splitPane.setTopComponent(new JScrollPane(editor))
//splitPane.setBottomComponent(new JScrollPane(printer))
setContentPane(new JScrollPane(editor))
setLocation(GlobalValues.locX, GlobalValues.locY)
setSize((0.5 * GlobalValues.sizeX).asInstanceOf[Int], GlobalValues.sizeY)
//pack
setVisible(true)
//Have to set size after making the frame visible
// splitPane.setDividerLocation(900)
editor.start // start the editor in order to be able to edit cells
}
// guiTask
private case class GUITask(fn: () => Unit)
}
|
scalalab/scalalab
|
source/src/main/scala/scalalab/gui/ScalaideFrame.scala
|
Scala
|
mit
| 4,092 |
package io.mashin.rich.spark
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{FunSuite, ShouldMatchers}
abstract class RichSparkTestSuite extends FunSuite with ShouldMatchers {
protected def sparkContext(name: String): SparkContext = {
new SparkContext(new SparkConf().setAppName(name).setMaster("local[*]"))
}
def sparkTest(testCaseName: String)(testProcedure: SparkContext => Unit): Unit = {
test(testCaseName) {
implicit val sc = sparkContext(testCaseName)
testProcedure(sc)
sc.stop()
}
}
def time(proc: => Unit): Long = {
val t0 = System.nanoTime
proc
System.nanoTime - t0
}
def formatDuration(nanos: Long): String = {
var rem = nanos
val h = rem / (1000000000L * 60L * 60L)
rem -= h * (1000000000L * 60L * 60L)
val m = rem / (1000000000L * 60L)
rem -= m * (1000000000L * 60L)
val s = rem / 1000000000L
rem -= s * 1000000000L
val ms = rem / 1000000L
rem -= ms * 1000000L
val ns = rem
s"${h}h ${m}m ${s}s ${ms}ms ${ns}ns"
}
}
|
mashin-io/rich-spark
|
main/src/test/scala/io/mashin/rich/spark/RichSparkTestSuite.scala
|
Scala
|
apache-2.0
| 1,062 |
final class NonNullChar(val get: Char) extends AnyVal {
def isEmpty = get == 0.toChar
override def toString = if (isEmpty) "NoChar" else s"'$get'"
}
object NonNullChar {
@inline final val None = new NonNullChar(0.toChar)
}
final class SomeProduct extends Product3[String, Int, List[String]] {
def canEqual(x: Any) = x.isInstanceOf[SomeProduct]
def _1 = "abc"
def _2 = 5
def _3 = List("bippy")
def isEmpty = false
def isDefined = !isEmpty
def get = this
}
object SomeProduct {
def unapply(x: SomeProduct) = x
}
object Test {
def prod(x: SomeProduct): Int = x match {
case SomeProduct(x, y, z) => x.length + y + z.length
case _ => -1
}
def f(x: Char): NonNullChar = x match {
case 'a' => new NonNullChar('a')
case 'b' => new NonNullChar('b')
case 'c' => new NonNullChar('c')
case _ => NonNullChar.None
}
// public char f(char);
// 0: iload_1
// 1: tableswitch { // 97 to 99
// 97: 47
// 98: 42
// 99: 37
// default: 28
// }
// 28: getstatic #19 // Field NonNullChar$.MODULE$:LNonNullChar$;
// 31: invokevirtual #23 // Method NonNullChar$.None:()C
// 34: goto 49
// 37: bipush 99
// 39: goto 49
// 42: bipush 98
// 44: goto 49
// 47: bipush 97
// 49: ireturn
def g(x: Char): Option[Char] = x match {
case 'a' => Some('a')
case 'b' => Some('b')
case 'c' => Some('c')
case _ => None
}
// public scala.Option<java.lang.Object> g(char);
// 0: iload_1
// 1: tableswitch { // 97 to 99
// 97: 64
// 98: 49
// 99: 34
// default: 28
// }
// 28: getstatic #33 // Field scala/None$.MODULE$:Lscala/None$;
// 31: goto 76
// 34: new #35 // class scala/Some
// 37: dup
// 38: bipush 99
// 40: invokestatic #41 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
// 43: invokespecial #44 // Method scala/Some."<init>":(Ljava/lang/Object;)V
// 46: goto 76
// 49: new #35 // class scala/Some
// 52: dup
// 53: bipush 98
// 55: invokestatic #41 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
// 58: invokespecial #44 // Method scala/Some."<init>":(Ljava/lang/Object;)V
// 61: goto 76
// 64: new #35 // class scala/Some
// 67: dup
// 68: bipush 97
// 70: invokestatic #41 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
// 73: invokespecial #44 // Method scala/Some."<init>":(Ljava/lang/Object;)V
// 76: areturn
def main(args: Array[String]): Unit = {
"abcd" foreach (ch => println(f(ch)))
"abcd" foreach (ch => println(g(ch)))
println(prod(new SomeProduct))
}
}
|
yusuke2255/dotty
|
tests/run/value-class-extractor.scala
|
Scala
|
bsd-3-clause
| 3,337 |
package org.qirx.cms.metadata.dsl
import scala.collection.immutable.ListMap
import org.qirx.cms.metadata.DefaultDocumentIdGenerator
import org.qirx.cms.metadata.DocumentMetadata
import org.qirx.cms.evolution.Evolution
import org.qirx.cms.evolution.Evolutions
import org.qirx.cms.metadata.PropertyMetadata
import play.api.libs.json.Json.obj
object Document {
def apply(id: String, idField: String)(properties: (String, PropertyMetadata)*): DocumentMetadata =
DefaultDocument(id, idField, ListMap(properties: _*))
trait IdFieldGenerator { self: DocumentMetadata =>
val idField:String
val idGenerator = new DefaultDocumentIdGenerator(idField)
}
trait ToJson { self: DocumentMetadata =>
lazy val toJson =
obj(
"id" -> id,
"properties" -> properties.map {
case (name, property) => property.toJson ++ obj("name" -> name)
}
)
}
private case class DefaultDocument(
id: String,
idField: String,
properties: ListMap[String, PropertyMetadata],
evolutions: Evolutions = new Evolutions(Seq.empty))
extends DocumentMetadata with IdFieldGenerator with ToJson {
def withEvolutions(evolutions: Evolution*) =
copy(evolutions = this.evolutions.withEvolutions(evolutions))
}
}
|
EECOLOR/play-cms
|
cms/src/main/scala/org/qirx/cms/metadata/dsl/Document.scala
|
Scala
|
mit
| 1,275 |
package view
import org.specs2.mutable._
class GitBucketHtmlSerializerSpec extends Specification {
import GitBucketHtmlSerializer._
"generateAnchorName" should {
"convert whitespace characters to hyphens" in {
val before = "foo bar baz"
val after = generateAnchorName(before)
after mustEqual "foo-bar-baz"
}
"normalize characters with diacritics" in {
val before = "Dónde estará mi vida"
val after = generateAnchorName(before)
after mustEqual "do%cc%81nde-estara%cc%81-mi-vida"
}
"omit special characters" in {
val before = "foo!bar@baz>9000"
val after = generateAnchorName(before)
after mustEqual "foo%21bar%40baz%3e9000"
}
}
}
|
Muscipular/gitbucket
|
src/test/scala/view/GitBucketHtmlSerializerSpec.scala
|
Scala
|
apache-2.0
| 721 |
/*
This file is part of Intake24.
Copyright 2015, 2016 Newcastle University.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package uk.ac.ncl.openlab.intake24.services.foodindex
import uk.ac.ncl.openlab.intake24.api.data.{UserCategoryHeader, UserFoodHeader}
case class MatchedFood(food: UserFoodHeader, matchCost: Int)
case class MatchedCategory(category: UserCategoryHeader, matchCost: Int)
case class IndexLookupResult(foods: Seq[MatchedFood], categories: Seq[MatchedCategory])
trait FoodIndex {
def lookup(description: String, maxFoods: Int, maxCategories: Int): IndexLookupResult
}
object FoodIndex {
val specialFoodSandwich = "$SND"
val specialFoodSalad = "$SLD"
val specialFoodMissing = "$MIS"
val allSpecialFoodCodes = Seq(specialFoodSandwich, specialFoodSalad, specialFoodMissing)
}
|
digitalinteraction/intake24
|
FoodDataServices/src/main/scala/uk/ac/ncl/openlab/intake24/services/foodindex/FoodIndex.scala
|
Scala
|
apache-2.0
| 1,291 |
package com.softwaremill.play24.modules
import com.softwaremill.play24.dao.{CoffeeDao, SupplierDao}
import org.specs2.mock.Mockito
trait MockDaoModule extends Mockito {
lazy val coffeeDao = mock[CoffeeDao]
lazy val supplierDao = mock[SupplierDao]
}
|
adamw/macwire
|
examples/play24/test/com/softwaremill/play24/modules/MockDaoModule.scala
|
Scala
|
apache-2.0
| 255 |
package org.deepdive.inference
case class Factor(id: Long, factorFunction: String, weightId: Long,
variables: List[FactorVariable]) extends CSVFormattable {
def toCSVRow = Array(id.toString, weightId.toString, factorFunction.toString)
}
case class FactorVariable(factorId: Long, position: Long, positive: Boolean,
variableId: Long) extends CSVFormattable {
def toCSVRow = Array(factorId.toString, variableId.toString, position.toString, positive.toString)
}
|
feiranwang/deepdive
|
src/main/scala/org/deepdive/inference/models/Factor.scala
|
Scala
|
apache-2.0
| 472 |
package com.googlecode.kanbanik.commands
import com.googlecode.kanbanik.model.Project
import com.googlecode.kanbanik.builders.ProjectBuilder
import com.googlecode.kanbanik.dtos._
import com.googlecode.kanbanik.dtos.ListDto
import com.googlecode.kanbanik.dtos.ProjectDto
import com.googlecode.kanbanik.dtos.EmptyDto
class GetAllProjectsCommand extends Command[EmptyDto, ListDto[ProjectDto]] {
lazy val projectBuilder = new ProjectBuilder()
def execute(params: EmptyDto): Either[ListDto[ProjectDto], ErrorDto] = {
val dtos = Project.all.map(projectBuilder.buildDto)
Left(ListDto(dtos))
}
}
|
nagyistoce/kanbanik
|
kanbanik-server/src/main/scala/com/googlecode/kanbanik/commands/GetAllProjectsCommand.scala
|
Scala
|
apache-2.0
| 609 |
package org.freetrm.eventstore.http
import java.net.URLDecoder
import java.util.concurrent.atomic.AtomicReference
import akka.NotUsed
import akka.actor.{ActorLogging, Actor, Props, ActorSystem}
import akka.http.scaladsl.coding.{NoCoding, Gzip}
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.headers.{Allow, HttpChallenge, BasicHttpCredentials, HttpCredentials}
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server._
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Flow, Source}
import de.heikoseeberger.akkasse.{ServerSentEvent, EventStreamMarshalling}
import org.freetrm.eventstore._
import org.freetrm.eventstore.utils._
import spray.json._
import fommil.sjs.FamilyFormats._
import scala.concurrent.{ExecutionContextExecutor, Future}
import scala.util.control.NonFatal
import scala.concurrent.duration.DurationInt
/**
* @param cookie Erlang style 'cookie' that ensures we're allowed to use this service.
*/
class EventStoreHttpServer(writer: EventSourceWriter,
reader: EventSourceReader,
cookie: String)
(implicit system: ActorSystem)
extends Directives with Log {
private val dedup = new DeDuplicator(reader, writer)
private implicit val executionContext: ExecutionContextExecutor = system.dispatcher
private implicit val mat = ActorMaterializer()
private def withGzipSupport = {
(decodeRequestWith(NoCoding) | decodeRequestWith(Gzip))
}
private def authenticator(cred: Option[HttpCredentials]): Future[AuthenticationResult[String]] = {
// the user is just informational, we only really care that the 'password' matches the server cookie.
Future {
cred match {
case Some(BasicHttpCredentials(user, `cookie`)) => Right(user)
case _ => Left(HttpChallenge("Basic", "freetrm"))
}
}
}
implicit def rejectionHandler: RejectionHandler =
RejectionHandler.newBuilder()
.handle { case x ⇒ sys.error("Unhandled rejection: " + x) }
.handleNotFound {
complete((NotFound, "The requested resource could not be found."))
}
.result()
def flow: Flow[HttpRequest, HttpResponse, NotUsed] = {
RouteResult.route2HandlerFlow(route)
}
val route: Route = {
import EventStreamMarshalling._
post {
withGzipSupport {
pathPrefix("es") {
authenticateOrRejectWithChallenge(authenticator(_)) {
user =>
path("publish" / Segment) {
(topic) =>
entity(as[String]) {
data =>
val event = data.parseJson.convertTo[Event]
complete(writeToEventStore(Topic(topic), event))
}
}
}
}
}
} ~
get {
withGzipSupport {
pathPrefix("es") {
authenticateOrRejectWithChallenge(authenticator(_)) {
user =>
pathPrefix("consume") {
path(Segment / LongNumber / LongNumber.?) {
(topic, earliestOffsetToReadFrom, lastOffsetToReadTo) =>
complete {
consume(Topic(topic), earliestOffsetToReadFrom, lastOffsetToReadTo)
}
}
} ~
pathPrefix("consumeNotifications") {
complete {
consumeNotifications
}
} ~
pathPrefix("topicExists") {
path(Segment) {
(topic) =>
complete {
topicExists(Topic(topic))
}
}
} ~
pathPrefix("surface") {
path(Segment) {
(topic) =>
complete {
latestSurface(Topic(topic))
}
}
} ~
pathPrefix("maxTxnNo") {
path(Segment) {
(topic) =>
complete {
maxTxnNo(Topic(topic))
}
}
} ~
pathPrefix("maxSeqNo") {
path(Segment) {
(topic) =>
complete {
maxSeqNo(Topic(topic))
}
}
} ~
pathPrefix("listTopics") {
complete {
listTopics
}
}
}
}
}
}
}
def writeToEventStore(topic: Topic, event: Event): Future[HttpResponse] = {
execAndHandle(
() => dedup.writeWithDuplicateCheckResult(topic, event).map {
case result if !result.wasDuplicate =>
response(OK, EventStoreResponseOK(result.version))
case result =>
response(OK, EventStoreResponseDuplicate(result.version))
},
e =>
s"Failed to publish to event store: $topic: $event"
)
}
def consume(topic: Topic,
earliestOffsetToReadFrom: Long,
lastOffsetToReadTo: Option[Long]): Source[ServerSentEvent, NotUsed] = {
reader
.streamEvents(topic, earliestOffsetToReadFrom, lastOffsetToReadTo)
.map {
e => ServerSentEvent(e.toJson.prettyPrint)
}
.keepAlive(1.second, () => {
ServerSentEvent.heartbeat
})
}
def consumeNotifications: Source[ServerSentEvent, NotUsed] = {
reader
.streamNotifications
.map {
e => ServerSentEvent(e.toJson.prettyPrint)
}
.keepAlive(1.second, () => {
ServerSentEvent.heartbeat
})
}
def latestSurface(topic: Topic): Source[ServerSentEvent, NotUsed] = {
reader
.latestSurface(topic)
.map {
e => ServerSentEvent(e.toJson.prettyPrint)
}
.keepAlive(1.second, () => {
ServerSentEvent.heartbeat
})
}
def topicExists(topic: Topic): Future[HttpResponse] = {
toHttpResponse(reader.topicExists(topic), s"Failed with topicExists($topic)")
}
def listTopics: Future[HttpResponse] = {
toHttpResponse(reader.listTopics, s"Failed with listTopics")
}
def maxTxnNo(topic: Topic): Future[HttpResponse] = {
toHttpResponse(reader.maxTxnNo(topic), s"Failed with maxTxnNo($topic)")
}
def maxSeqNo(topic: Topic): Future[HttpResponse] = {
toHttpResponse(reader.maxSeqNo(topic), s"Failed with maxSeqNo($topic)")
}
private def execAndHandle(f: () => Future[HttpResponse], errorMessage: Throwable => String): Future[HttpResponse] = {
try {
f().recover {
case NonFatal(e) =>
val msg = errorMessage(e)
log.error(msg, e)
response(InternalServerError, EventStoreResponseError(msg, e.getMessage))
}
} catch {
case NonFatal(e) =>
// We shouldn't be here unless we've written some bad code. The call to f should give a failed
// future, not throw and exception.
val msg = errorMessage(e)
log.error("Bad code path: " + msg, e)
Future.successful(
response(InternalServerError, EventStoreResponseError(msg, e.getMessage))
)
}
}
private def response(status: StatusCode, resp: EventStoreResponse) = {
import fommil.sjs.FamilyFormats._
val json = (resp: EventStoreResponse).toJson.prettyPrint
HttpResponse(status = status, entity = HttpEntity(MediaTypes.`application/json`, json))
}
private def toHttpResponse[T](result: Future[T], errorMessage: String)
(implicit writer: spray.json.JsonWriter[T]): Future[HttpResponse] = {
result.map {
case res =>
HttpResponse(status = OK, entity = HttpEntity(MediaTypes.`application/json`, res.toJson.prettyPrint))
}.recover {
case NonFatal(e) =>
log.error(errorMessage, e)
response(InternalServerError, EventStoreResponseError(errorMessage, e.getMessage))
}
}
}
|
freetrm/eventstore
|
base/src/org/freetrm/eventstore/http/EventStoreHttpServer.scala
|
Scala
|
apache-2.0
| 8,183 |
/*
* DialogSource.scala
* (Desktop)
*
* Copyright (c) 2013-2021 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss
package desktop
import de.sciss.desktop.OptionPane.Result
object DialogSource {
implicit final class Dialog(val source: swing.Dialog) extends DialogSource[Unit] {
def show(window: Option[Window]): Unit = source.open()
}
def exceptionToOptionPane(exception: scala.Throwable): OptionPane[Result.Value] = {
val message = Util.formatException(exception, margin = 40, stackTraceLines = 0)
val optionOk = "Ok"
val options = Seq(optionOk, "Show Stack Trace")
val op = desktop.OptionPane(message = message, messageType = desktop.OptionPane.Message.Error,
optionType = desktop.OptionPane.Options.YesNo, entries = options, initial = Some(optionOk))
op
}
implicit final class Exception(val source: (scala.Throwable, String)) extends DialogSource[Unit] {
def show(window: Option[Window]): Unit = {
val (exception, title) = source
val op = exceptionToOptionPane(exception)
op.title = title
if (op.show(window).id == 1) {
exception.printStackTrace()
}
}
}
}
trait DialogSource[+A] {
def show(window: Option[Window]): A
}
|
Sciss/Desktop
|
core/src/main/scala/de/sciss/desktop/DialogSource.scala
|
Scala
|
lgpl-2.1
| 1,419 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.libs
import akka.actor.ActorSystem
import akka.stream.scaladsl._
import akka.stream.{ ActorMaterializer, Materializer }
import akka.util.{ ByteString, Timeout }
import org.specs2.mutable._
import play.api.PlayCoreTestApplication
import play.api.http.ContentTypes
import play.api.libs.json.{ JsString, JsValue }
import play.api.mvc._
import play.core.test.FakeRequest
import scala.concurrent.{ Await, Future }
class CometSpec extends Specification {
class MockController(val materializer: Materializer, action: ActionBuilder[Request, AnyContent]) extends Controller {
//#comet-string
def cometString = action {
implicit val m = materializer
def stringSource: Source[String, _] = Source(List("kiki", "foo", "bar"))
Ok.chunked(stringSource via Comet.string("parent.cometMessage")).as(ContentTypes.HTML)
}
//#comet-string
//#comet-json
def cometJson = action {
implicit val m = materializer
def stringSource: Source[JsValue, _] = Source(List(JsString("jsonString")))
Ok.chunked(stringSource via Comet.json("parent.cometMessage")).as(ContentTypes.HTML)
}
//#comet-json
}
def newTestApplication(): play.api.Application = new PlayCoreTestApplication() {
override lazy val actorSystem = ActorSystem()
override lazy val materializer = ActorMaterializer()(actorSystem)
}
"play comet" should {
"work with string" in {
val app = newTestApplication()
try {
implicit val m = app.materializer
val controller = new MockController(m, ActionBuilder.ignoringBody)
val result = controller.cometString.apply(FakeRequest())
contentAsString(result) must contain("<html><body><script type=\"text/javascript\">parent.cometMessage('kiki');</script><script type=\"text/javascript\">parent.cometMessage('foo');</script><script type=\"text/javascript\">parent.cometMessage('bar');</script>")
} finally {
app.stop()
}
}
"work with json" in {
val app = newTestApplication()
try {
implicit val m = app.materializer
val controller = new MockController(m, ActionBuilder.ignoringBody)
val result = controller.cometJson.apply(FakeRequest())
contentAsString(result) must contain("<html><body><script type=\"text/javascript\">parent.cometMessage(\"jsonString\");</script>")
} finally {
app.stop()
}
}
}
//---------------------------------------------------------------------------
// Can't use play.api.test.ResultsExtractor here as it is not imported
// So, copy the methods necessary to extract string.
import scala.concurrent.duration._
implicit def timeout: Timeout = 20.seconds
def charset(of: Future[Result]): Option[String] = {
Await.result(of, timeout.duration).body.contentType match {
case Some(s) if s.contains("charset=") => Some(s.split("; *charset=").drop(1).mkString.trim)
case _ => None
}
}
/**
* Extracts the content as String.
*/
def contentAsString(of: Future[Result])(implicit mat: Materializer): String =
contentAsBytes(of).decodeString(charset(of).getOrElse("utf-8"))
/**
* Extracts the content as bytes.
*/
def contentAsBytes(of: Future[Result])(implicit mat: Materializer): ByteString = {
val result = Await.result(of, timeout.duration)
Await.result(result.body.consumeData, timeout.duration)
}
}
|
aradchykov/playframework
|
framework/src/play/src/test/scala/play/api/libs/CometSpec.scala
|
Scala
|
apache-2.0
| 3,495 |
package com.twitter.finatra.json.internal.caseclass.jackson
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.core.ObjectCodec
import com.fasterxml.jackson.databind._
import com.fasterxml.jackson.databind.`type`.TypeFactory
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import com.fasterxml.jackson.databind.node.TreeTraversingParser
import com.fasterxml.jackson.databind.util.ClassUtil
import com.twitter.finatra.json.internal.caseclass.exceptions.CaseClassValidationException
import com.twitter.finatra.json.internal.caseclass.exceptions.CaseClassValidationException.PropertyPath
import com.twitter.finatra.json.internal.caseclass.reflection.CaseClassSigParser
import com.twitter.finatra.json.internal.caseclass.reflection.DefaultMethodUtils.defaultFunction
import com.twitter.finatra.json.internal.caseclass.utils.AnnotationUtils._
import com.twitter.finatra.json.internal.caseclass.utils.FieldInjection
import com.twitter.finatra.validation.{ErrorCode, Validation}
import com.twitter.finatra.validation.ValidationResult._
import com.twitter.inject.Logging
import java.lang.annotation.Annotation
import scala.language.existentials
import scala.reflect.NameTransformer
object CaseClassField {
def createFields(clazz: Class[_], namingStrategy: PropertyNamingStrategy, typeFactory: TypeFactory): Seq[CaseClassField] = {
val allAnnotations = constructorAnnotations(clazz)
val constructorParams = CaseClassSigParser.parseConstructorParams(clazz)
assert(allAnnotations.size == constructorParams.size, "Non-static inner 'case classes' not supported")
for {
(constructorParam, idx) <- constructorParams.zipWithIndex
annotations = allAnnotations(idx)
name = jsonNameForField(annotations, namingStrategy, constructorParam.name)
deserializer = deserializerOrNone(annotations)
} yield {
CaseClassField(
name = name,
javaType = JacksonTypes.javaType(typeFactory, constructorParam.scalaType),
parentClass = clazz,
defaultFuncOpt = defaultFunction(clazz, idx),
annotations = annotations,
deserializer = deserializer)
}
}
private[finatra] def constructorAnnotations(clazz: Class[_]): Seq[Array[Annotation]] = {
clazz.getConstructors.head.getParameterAnnotations.toSeq
}
private def jsonNameForField(annotations: Seq[Annotation], namingStrategy: PropertyNamingStrategy, name: String): String = {
findAnnotation[JsonProperty](annotations) match {
case Some(jsonProperty) => jsonProperty.value
case _ =>
val decodedName = NameTransformer.decode(name) //decode unicode escaped field names
namingStrategy.nameForField(//apply json naming strategy (e.g. snake_case)
/* config = */ null,
/* field = */ null,
/* defaultName = */ decodedName)
}
}
private def deserializerOrNone(annotations: Array[Annotation]): Option[JsonDeserializer[Object]] = {
for {
jsonDeserializer <- findAnnotation[JsonDeserialize](annotations)
if jsonDeserializer.using != classOf[JsonDeserializer.None]
} yield ClassUtil.createInstance(jsonDeserializer.using, false).asInstanceOf[JsonDeserializer[Object]]
}
}
case class CaseClassField(
name: String,
javaType: JavaType,
parentClass: Class[_],
defaultFuncOpt: Option[() => Object],
annotations: Seq[Annotation],
deserializer: Option[JsonDeserializer[Object]])
extends Logging {
private val isOption = javaType.getRawClass == classOf[Option[_]]
private val isString = javaType.getRawClass == classOf[String]
private val fieldInjection = new FieldInjection(name, javaType, parentClass, annotations)
private lazy val firstTypeParam = javaType.containedType(0)
private lazy val requiredFieldException = CaseClassValidationException(PropertyPath.leaf(name), Invalid("field is required", ErrorCode.RequiredFieldMissing))
/* Public */
lazy val missingValue = {
if (javaType.isPrimitive)
ClassUtil.defaultValue(javaType.getRawClass)
else
null
}
val validationAnnotations =
filterIfAnnotationPresent[Validation](annotations)
/**
* Parse the field from a JsonNode representing a JSON object
* NOTE: I'd normally return a Try[Object], but instead I'm using exceptions to optimize the non-failure case
* NOTE: Option fields default to None even if no default is specified
*
* @param context DeserializationContext for deserialization
* @param codec Codec for field
* @param objectJsonNode The JSON object
* @return The parsed object for this field
* @throws CaseClassValidationException with reason for the parsing error
*/
def parse(context: DeserializationContext, codec: ObjectCodec, objectJsonNode: JsonNode): Object = {
if (fieldInjection.isInjectable)
fieldInjection.inject(context, codec) orElse defaultValue getOrElse throwRequiredFieldException()
else {
val fieldJsonNode = objectJsonNode.get(name)
if (fieldJsonNode != null)
if (isOption)
Option(
parseFieldValue(codec, fieldJsonNode, firstTypeParam, context))
else
assertNotNull(
fieldJsonNode,
parseFieldValue(codec, fieldJsonNode, javaType, context))
else if (defaultFuncOpt.isDefined)
defaultFuncOpt.get.apply()
else if (isOption)
None
else
throwRequiredFieldException()
}
}
/* Private */
//optimized
private[this] def parseFieldValue(fieldCodec: ObjectCodec, field: JsonNode, fieldType: JavaType, context: DeserializationContext): Object = {
if (isString) {
field.asText()
}
else {
val treeTraversingParser = new TreeTraversingParser(field, fieldCodec)
if (deserializer.isDefined) {
deserializer.get.deserialize(treeTraversingParser, context)
} else {
fieldCodec.readValue[Object](
treeTraversingParser,
fieldType)
}
}
}
//optimized
private[this] def assertNotNull(field: JsonNode, value: Object): Object = {
if (value == null) {
throw new JsonMappingException("error parsing '" + field.asText + "'")
}
value
}
private def defaultValue: Option[Object] = {
if (defaultFuncOpt.isDefined)
defaultFuncOpt map {_()}
else if (isOption)
Some(None)
else
None
}
private def throwRequiredFieldException() = {
throw requiredFieldException
}
}
|
jaume-pinyol/finatra
|
jackson/src/main/scala/com/twitter/finatra/json/internal/caseclass/jackson/CaseClassField.scala
|
Scala
|
apache-2.0
| 6,459 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.common.config
import com.datamountaineer.streamreactor.common.config.base.traits.KcqlSettings
import org.apache.kafka.common.config.types.Password
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.util
class KcqlSettingsTest extends AnyWordSpec with Matchers {
import scala.collection.JavaConverters._
case class KS(kcql: String) extends KcqlSettings {
override def connectorPrefix: String = "66686723939"
override def getString(key: String): String = key match {
case `kcqlConstant` => kcql
case _ => null
}
override def getInt(key: String): Integer = 0
override def getBoolean(key: String): java.lang.Boolean = false
override def getPassword(key: String): Password = null
override def getList(key: String): util.List[String] = List.empty[String].asJava
}
def testUpsertKeys(
kcql: String,
expectedKeys: Set[String],
topic: String = "t",
preserve: Boolean = false) = {
val keys = KS(kcql).getUpsertKeys(preserveFullKeys=preserve)(topic)
// get rid of ListSet to avoid ordering issues:
keys.toList.toSet shouldBe expectedKeys
}
"KcqlSettings.getUpsertKeys()" should {
"return 'basename' of key by default" in {
testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a", Set("a"))
testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a, b.m.x", Set("a", "x"))
testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK b.m.x", Set("x"))
}
"return full keys if requested" in {
testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a", Set("a"), preserve=true)
testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a, b.m", Set("a", "b.m"), preserve=true)
testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a, b.m, b.n.x", Set("a", "b.m", "b.n.x"), preserve=true)
testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK b.m.x", Set("b.m.x"), preserve=true)
}
"return keys in the expected order - as listed in the PK clause" in {
val kcql = "UPSERT INTO coll SELECT * FROM t PK a,b,c,d"
val expectedKeys = List("a","b","c","d")
val keys = KS(kcql).getUpsertKeys(preserveFullKeys=true)("t").toList.sorted
// SCALA 2.12 WARNING: If this fails when you upgrade to 2.12, you need to
// modify KcqlSettings to remove all the reverse() calls when constructing
// the ListSets.
keys shouldBe expectedKeys
}
}
}
|
datamountaineer/stream-reactor
|
kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/config/KcqlSettingsTest.scala
|
Scala
|
apache-2.0
| 3,091 |
package com.xenopsconsulting.gamedayapi
import org.scalatest.junit.AssertionsForJUnit
import org.junit.Test
import org.junit.Assert._
import org.junit.Before
import java.text.SimpleDateFormat
import java.util.Date
class LineScoreInningTest extends AssertionsForJUnit {
var game: Game = _
var lineScoreInning: LineScoreInning = _
var date: Date = _
var team: String = _
@Before def initialize {
date = new SimpleDateFormat("yyy-MM-dd").parse("2011-08-13")
team = "sea"
game = new Game(date, team) with TestFetchStrategyProvider
val lineScore = game.boxScore().lineScore()
lineScoreInning = lineScore.innings.head
}
@Test def testInning {
assertEquals("1", lineScoreInning.inning)
}
@Test def testHomeInningRuns {
assertEquals("5", lineScoreInning.homeInningRuns)
}
@Test def testAwayInningRuns {
assertEquals("0", lineScoreInning.awayInningRuns)
}
}
|
ecopony/scala-gameday-api
|
src/test/scala/com/xenopsconsulting/gamedayapi/LineScoreInningTest.scala
|
Scala
|
mit
| 912 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.gihyo.spark.ch06
// scalastyle:off println
import org.atilika.kuromoji.Token
import twitter4j.Status
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.twitter.TwitterUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
object gihyo_6_3_TwitterStream {
def main(args: Array[String]) {
if (args.length != 7) {
new IllegalArgumentException("Invalid arguments")
System.exit(1)
}
val Array(cKey, cSecret, aToken, aSecret, cDir, tagDir, wordDir) = args
System.setProperty("twitter4j.oauth.consumerKey", cKey)
System.setProperty("twitter4j.oauth.consumerSecret", cSecret)
System.setProperty("twitter4j.oauth.accessToken", aToken)
System.setProperty("twitter4j.oauth.accessTokenSecret", aSecret)
val f = createStreamingContext(cDir, tagDir, wordDir)
val ssc = StreamingContext.getOrCreate(cDir, f)
sys.ShutdownHookThread {
System.out.println("Gracefully stopping SparkStreaming Application")
ssc.stop(true, true)
System.out.println("SparkStreaming Application stopped")
}
ssc.start
ssc.awaitTermination
}
def createStreamingContext(checkpointDir: String,
tagDir: String,
wordDir: String): () => StreamingContext = { () => {
/*
* StreamingContextの生成メソッド
*/
val conf = new SparkConf().setAppName("gihyoSample_Application")
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
conf.registerKryoClasses(Array(classOf[UserDic]))
val sc = new SparkContext(conf)
val ssc = new StreamingContext(sc, Seconds(5))
ssc.checkpoint(checkpointDir)
val twitterStream = TwitterUtils.createStream(ssc, None)
run(sc, twitterStream, tagDir, wordDir)
ssc
}
}
def run(sc: SparkContext, stream: InputDStream[Status], tagDir: String, wordDir: String) {
val tokenizer = sc.broadcast(UserDic.getInstance)
val tweets = stream.map(tweet => tweet.getText())
tweets.persist()
val TweetText = tweets
.flatMap(text => {
val tokens = tokenizer.value.tokenize(text).toArray
tokens.filter(t => {
val token = t.asInstanceOf[Token]
((token.getPartOfSpeech.indexOf("名詞") > -1 &&
token.getPartOfSpeech.indexOf("一般") > -1) ||
token.getPartOfSpeech.indexOf("カスタム名詞") > -1) &&
token.getSurfaceForm.length > 1 &&
!(token.getSurfaceForm matches "^[a-zA-Z]+$|^[0-9]+$")
}).map(t => t.asInstanceOf[Token].getSurfaceForm)
})
.countByValue()
.map(x => (x._2, x._1))
.transform(_.sortByKey(false))
.map(x => (x._2, x._1))
val TweetTags = tweets
.flatMap(tweet => tweet.split(" ").filter(_.startsWith("#")))
.countByValue()
.map(x => (x._2, x._1))
.transform(_.sortByKey(false))
.map(x => (x._2, x._1))
TweetText.saveAsTextFiles(wordDir)
TweetTags.saveAsTextFiles(tagDir)
}
}
// scalastyle:on println
|
yu-iskw/gihyo-spark-book-example
|
src/main/scala/jp/gihyo/spark/ch06/gihyo_6_3_TwitterStream.scala
|
Scala
|
apache-2.0
| 3,869 |
package tuner.gui.util
import scala.collection.immutable.SortedMap
import tuner.Table
object Histogram {
private def countData(values:Iterable[Float], numBreaks:Int) = {
// Make sure we have enough values for the histogram
if(values.isEmpty || values.tail.isEmpty) {
throw new IllegalArgumentException("values must have length > 1")
}
// First figure out the breaks
val breaks = new collection.mutable.MutableList[Float]
val min = values.min
val max = values.max
if(min == max) {
// Fake a single bucket
breaks += min
breaks += min + 1e-9f
} else {
val step = (max-min) / (numBreaks+1).toFloat
var cur = min + step
breaks += min
while(cur <= max) {
breaks += cur
cur += step
}
}
// Now figure out the counts
val counts = breaks.sliding(2) map { lims =>
val (mn, mx) = (lims(0), lims(1))
values.reduceLeft {(sum, x) => sum + (if(x >= mn && x < mx) 1 else 0)}
}
(breaks.toList, counts map {_.toInt} toList)
}
def countData(field:String, data:Table, numBreaks:Int)
: SortedMap[Float,Int] = {
val (breaks, counts) = countData(data.values(field), numBreaks)
SortedMap[Float,Int]() ++ breaks.zip(counts)
}
def pctData(field:String, data:Table, numBreaks:Int)
: SortedMap[Float,Float] = {
val (breaks, counts) = countData(data.values(field), numBreaks)
val ttlCount = counts.sum.toFloat
val pcts = counts map {x => x.toFloat / ttlCount}
SortedMap[Float,Float]() ++ breaks.zip(pcts)
}
}
|
gabysbrain/tuner
|
src/main/scala/tuner/gui/util/Histogram.scala
|
Scala
|
mit
| 1,583 |
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.event.events
import org.orbeon.oxf.xforms.control.XFormsControl
import org.orbeon.oxf.xforms.event.XFormsEvent
import XFormsEvent._
/**
* Base class for UI events, that is events only dispatched to controls.
*/
abstract class XFormsUIEvent(
eventName : String,
val targetControl : XFormsControl,
properties : PropertyGetter,
bubbles : Boolean,
cancelable : Boolean
) extends XFormsEvent(
eventName,
targetControl,
properties,
bubbles,
cancelable
) {
def this(eventName: String, target: XFormsControl, properties: PropertyGetter) =
this(eventName, target, properties, bubbles = true, cancelable = false)
require(targetControl ne null)
override def lazyProperties = getters(this, XFormsUIEvent.Getters)
override def newPropertyName(name: String) = XFormsUIEvent.Deprecated.get(name) orElse super.newPropertyName(name)
}
private object XFormsUIEvent {
val Deprecated = Map(
"target-ref" → "xxf:binding",
"alert" → "xxf:alert",
"label" → "xxf:label",
"hint" → "xxf:hint",
"help" → "xxf:help"
)
val Getters = Map[String, XFormsUIEvent ⇒ Option[Any]](
"target-ref" → binding,
xxfName("binding") → binding,
xxfName("control-position") → controlPosition,
"label" → label,
xxfName("label") → label,
"help" → help,
xxfName("help") → help,
"hint" → hint,
xxfName("hint") → hint,
"alert" → alert,
xxfName("alert") → alert
)
def binding(e: XFormsUIEvent) = Option(e.targetControl.binding)
def controlPosition(e: XFormsUIEvent) =
e.targetControl.container.getPartAnalysis.getControlPosition(e.targetControl.getPrefixedId)
def label(e: XFormsUIEvent) = Option(e.targetControl.getLabel)
def help(e: XFormsUIEvent) = Option(e.targetControl.getHelp)
def hint(e: XFormsUIEvent) = Option(e.targetControl.getHint)
def alert(e: XFormsUIEvent) = Option(e.targetControl.getAlert)
}
|
joansmith/orbeon-forms
|
src/main/scala/org/orbeon/oxf/xforms/event/events/XFormsUIEvent.scala
|
Scala
|
lgpl-2.1
| 2,809 |
package com.github.aselab.activerecord.validations
import com.github.aselab.activerecord._
import inner._
class ValidatableSpec extends DatabaseSpecification {
class SaveableImpl extends Saveable {
var calledMethods = List[String]()
override def save() = {
calledMethods :+= "save"
true
}
val isNewRecord = true
}
case class ValidatableModel(e: Seq[String]) extends SaveableImpl with Validatable with ProductModel {
override def doValidate(): Unit = {
e.foreach(errors.add)
calledMethods :+= "doValidate"
}
override def beforeValidation(): Unit = {
calledMethods :+= "beforeValidation"
}
}
"Validatable" should {
"addError" in {
val m = ValidatableModel(Nil)
m.errors.add("global error1")
m.errors.add("global error2")
m.errors.add("s", "field error1")
m.errors.add("i", "field error2")
val mc = m.getClass
"errors" in {
m.errors must contain(exactly(
ValidationError(mc, "", "global error1"),
ValidationError(mc, "", "global error2"),
ValidationError(mc, "s", "field error1"),
ValidationError(mc, "i", "field error2")
))
}
"globalErrors" in {
m.globalErrors must contain(exactly(
ValidationError(mc, "", "global error1"),
ValidationError(mc, "", "global error2")
))
}
"fieldErrors" in {
m.fieldErrors must contain(exactly(
ValidationError(mc, "s", "field error1"),
ValidationError(mc, "i", "field error2")
))
}
"hasErrors" in {
m.hasErrors must beTrue
}
"hasError" in {
m.hasError("s") must beTrue
m.hasError("i") must beTrue
m.hasError("xxx") must beFalse
}
"isValid" in {
m.isValid must beFalse
}
}
"validate success" in {
"validate" in {
val m = new ValidatableModel(Nil)
m.validate() must beTrue
}
"save" in {
val m = new ValidatableModel(Nil)
m.save() must beTrue
m.calledMethods mustEqual List("beforeValidation", "doValidate", "save")
}
"validate twice" in {
val m = new ValidatableModel(Nil)
m.validate() must beTrue
m.validate() must beTrue
m.calledMethods mustEqual List("beforeValidation", "doValidate")
}
}
"validate failure" in {
"validate" in {
val m = new ValidatableModel(Seq("error"))
m.validate() must beFalse
}
"save" in {
val m = new ValidatableModel(Seq("error"))
m.save() must beFalse
m.calledMethods mustEqual List("beforeValidation", "doValidate")
}
"validate twice" in {
val m = new ValidatableModel(Seq("error"))
m.errors.add("manual error")
m.validate() must beFalse
m.validate() must beFalse
m.calledMethods mustEqual List("beforeValidation", "doValidate")
m.errors.toList mustEqual List(
ValidationError(m.getClass, "", "manual error"),
ValidationError(m.getClass, "", "error")
)
}
}
}
}
|
aselab/scala-activerecord
|
activerecord/src/test/scala/validations/ValidatableSpec.scala
|
Scala
|
mit
| 3,170 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.anormdb
import com.twitter.ostrich.stats.Stats
import java.sql.Connection
/**
* Common database connection code.
*/
trait DBPool {
val db: DB
// If an open database connection is supplied, it supersedes usage of the connection pool.
val openCon: Option[Connection]
/**
* Closes all database connections.
*/
def close() {
if (!openCon.isEmpty) {
openCon.get.close()
}
db.closeConnectionPool()
}
/**
* Borrow a connection from the connection pool.
*
* @return a tuple containing the connection and a timestamp for stats tracking
*/
def borrowConn(): (Connection, Long) = {
val borrowTime = System.currentTimeMillis()
if (openCon.isEmpty) {
(db.getPooledConnection(), borrowTime)
} else {
(openCon.get, borrowTime)
}
}
/**
* Return a borrowed connection to the connection pool.
*/
def returnConn(con: Connection, borrowTime: Long, method: String) = {
if (openCon.isEmpty) {
con.close()
}
Stats.addMetric(method + "_msec", (System.currentTimeMillis() - borrowTime).toInt)
}
}
|
Flipkart/zipkin
|
zipkin-anormdb/src/main/scala/com/twitter/zipkin/storage/anormdb/DBPool.scala
|
Scala
|
apache-2.0
| 1,726 |
package io.cosmicteapot
import java.nio.ByteOrder
/**
* Created by michael on 15/10/2015.
*/
object Colour {
type ColourI = Int
def r(c:ColourI) : Int = c & 0xff
def g(c:ColourI) : Int = (c >>> 8) & 0xff
def b(c:ColourI) : Int = (c >>> 16) & 0xff
def a(c:ColourI) : Int = (c >>> 24) & 0xff
def rf(c:ColourI) : Float = r(c).asInstanceOf[Float] / 255f
def gf(c:ColourI) : Float = g(c).asInstanceOf[Float] / 255f
def bf(c:ColourI) : Float = b(c).asInstanceOf[Float] / 255f
def af(c:ColourI) : Float = a(c).asInstanceOf[Float] / 255f
val maskOffOneBit = ~ Integer.parseInt("00000001000000000000000000000000", 2)
def littleEndian : Boolean = ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN
def colourI(sample:Array[Int]) : ColourI = colourI(sample(0), sample(1), sample(2), sample(3))
def colourI(r:Int, g:Int, b:Int, a:Int) : ColourI = {
val ur = clamp(r, 0, 255)
val ug = clamp(g, 0, 255)
val ub = clamp(b, 0, 255)
val ua = clamp(a, 0, 255)
if(littleEndian) {
// println("we are little endian")
(ua << 24 | ub << 16 | ug << 8 | ur) & maskOffOneBit // mask off one bit just
} else {
// println("we are not little endian!")
ur << 24 | ug << 16 | ub << 8 | ua
}
}
def clamp(n:Int, a:Int, b:Int) : Int = {
if(n < a) a else if(n > b ) b else n
}
val white = colourI(255,255,255,255)
val black = colourI(0, 0, 0, 255)
val transWhite = colourI(255,255,255,0)
val transBlack = colourI(0, 0, 0, 0)
}
|
MichaelShaw/gridme
|
src/main/scala/io/cosmicteapot/Colour.scala
|
Scala
|
mit
| 1,510 |
package main.scala
import java.io.FileReader
import scala.util.parsing.combinator._
object TestParser {
def foo(args : Array[String]) {
val parser = GraphParser
val reader = new FileReader("test.graph")
parser.parseAll(parser.graph, reader)
println("Got a graph named: " + parser.net.name)
println("Got nodes: " + parser.net.graphTopology)
println("Activation: " + parser.net.graphSnapshot)
}
}
|
dbigelow/activation_network
|
src/main/scala/TestParser.scala
|
Scala
|
bsd-2-clause
| 429 |
package org.scalamu.core.testapi
package junit
import _root_.junit.framework.TestCase
import org.junit.Test
import org.junit.runner.{Result, RunWith}
class JUnitFramework(override val arguments: String) extends TestingFramework {
type R = Result
override def name: String = "JUnit"
override def runner: TestRunner[R] = new JUnitRunner(arguments)
override def classFilter: TestClassFilter = new CompositeTestClassFilter(
new AnnotationBasedFilter(classOf[Test], this) with NotAModule with NotAbstract,
new AnnotationBasedFilter(classOf[RunWith], this) with NotAModule with NotAbstract,
new SuperclassBasedFilter(classOf[TestCase], this) with NotAModule with NotAbstract
)
}
object JUnitFramework extends JUnitFramework("") {
def apply(arguments: String): JUnitFramework = new JUnitFramework(arguments)
}
|
sugakandrey/scalamu
|
core/src/main/scala/org/scalamu/core/testapi/junit/JUnitFramework.scala
|
Scala
|
gpl-3.0
| 841 |
package com.datastax.spark.connector.rdd.typeTests
import com.datastax.oss.driver.api.core.cql.Row
import com.datastax.spark.connector.cluster.DefaultCluster
class AsciiTypeTest extends AbstractTypeTest[String, String] with DefaultCluster {
override val typeName = "ascii"
override val typeData: Seq[String] = Seq("row1", "row2", "row3", "row4", "row5")
override val addData: Seq[String] = Seq("row6", "row7", "row8", "row9", "row10")
override def getDriverColumn(row: Row, colName: String): String = {
row.getString(colName)
}
}
|
datastax/spark-cassandra-connector
|
connector/src/it/scala/com/datastax/spark/connector/rdd/typeTests/AsciiTypeTest.scala
|
Scala
|
apache-2.0
| 550 |
package com.avsystem.commons
package spring
import java.{util => ju}
import com.avsystem.commons.spring.AttrNames._
import scala.annotation.nowarn
import com.typesafe.config._
import org.springframework.beans.factory.annotation.Qualifier
import org.springframework.beans.factory.config.ConstructorArgumentValues.ValueHolder
import org.springframework.beans.factory.config.{BeanDefinitionHolder, ConstructorArgumentValues, RuntimeBeanNameReference, RuntimeBeanReference}
import org.springframework.beans.factory.support._
import org.springframework.beans.{MutablePropertyValues, PropertyValue}
import org.springframework.core.io.Resource
class HoconBeanDefinitionReader(registry: BeanDefinitionRegistry)
extends AbstractBeanDefinitionReader(registry) {
import com.typesafe.config.ConfigValueType._
private implicit class ConfigValueExtensions(value: ConfigValue) {
def as[T: HoconType] =
implicitly[HoconType[T]].get(value)
}
private val autowireMapping = AutowireMapping.withDefault {
v => throw new IllegalArgumentException(s"Invalid value $v for $AutowireAttr attribute")
}
private val dependencyCheckMapping = DependencyCheckMapping.withDefault {
v => throw new IllegalArgumentException(s"Invalid value $v for $DependencyCheckAttr attribute")
}
private def setup[T](t: T)(setupFunc: T => Any) = {
setupFunc(t)
t
}
private def iterate(obj: ConfigObject)
(attrFun: (String, ConfigValue) => Any)
(propFun: (String, ConfigValue) => Any) =
obj.asScala.foreach {
case (key, _) if key.startsWith("_") =>
case (key, value) if key.startsWith("%") => attrFun(key, value)
case (key, value) => propFun(key, value)
}
private def validateObj(
required: Set[String] = Set.empty,
requiredAny: Set[String] = Set.empty,
allowed: Set[String] = Set.empty,
props: Boolean = false
)(obj: ConfigObject): Unit = {
require(required.forall(obj.containsKey),
s"Attributes ${required.mkString(", ")} must be present in object at ${obj.origin.description}")
require(requiredAny.isEmpty || requiredAny.exists(obj.containsKey),
s"At least one of ${requiredAny.mkString(", ")} must be present in object at ${obj.origin.description}")
val allAllowed = required ++ requiredAny ++ allowed
iterate(obj) { (key, value) =>
if (!allAllowed.contains(key))
badAttr(key, value)
} { (key, value) =>
if (!props)
badProp(key, value)
}
}
@nowarn("msg=deprecated")
private def getProps(obj: ConfigObject) =
obj.asScala.filterKeys(k => !k.startsWith("%") && !k.startsWith("_"))
private def badAttr(key: String, value: ConfigValue) =
throw new IllegalArgumentException(s"Unexpected attribute $key at ${value.origin.description}")
private def badProp(key: String, value: ConfigValue) =
throw new IllegalArgumentException(s"Unexpected property $key at ${value.origin.description}")
private object BeanDefinition {
val BeanOnlyAttrs = BeanAttrs - MetaAttr
def unapply(obj: ConfigObject) =
if (BeanOnlyAttrs.exists(obj.as[ConfigObject].keySet.contains)) Some(obj) else None
}
private class ObjectWithAttributePresentExtractor(elementAttr: String) {
def unapply(obj: ConfigObject) =
if (obj.containsKey(elementAttr)) Some(obj) else None
}
private object ListDefinition extends ObjectWithAttributePresentExtractor(ListAttr)
private object ArrayDefinition extends ObjectWithAttributePresentExtractor(ArrayAttr)
private object SetDefinition extends ObjectWithAttributePresentExtractor(SetAttr)
private object PropertiesDefinition extends ObjectWithAttributePresentExtractor(PropsAttr)
private object ValueDefinition extends ObjectWithAttributePresentExtractor(ValueAttr)
private object BeanReference extends ObjectWithAttributePresentExtractor(RefAttr)
private object BeanNameReference extends ObjectWithAttributePresentExtractor(IdrefAttr)
private object RawConfig extends ObjectWithAttributePresentExtractor(ConfigAttr)
private def read(value: ConfigValue): Any = value match {
case BeanDefinition(obj) =>
val bd = readBean(obj)
obj.get(NameAttr).as[Option[String]] match {
case Some(name) => new BeanDefinitionHolder(bd, name)
case None => bd
}
case BeanReference(obj) => readRef(obj)
case BeanNameReference(obj) => readIdref(obj)
case ArrayDefinition(obj) => readArray(obj)
case ListDefinition(obj) => readList(obj)
case SetDefinition(obj) => readSet(obj)
case PropertiesDefinition(obj) => readProperties(obj)
case RawConfig(obj) => readRawConfig(obj)
case obj: ConfigObject => readMap(obj)
case list: ConfigList => readRawList(list)
case _ => value.unwrapped
}
private def readRef(obj: ConfigObject) = {
validateObj(required = Set(RefAttr), allowed = Set(ParentAttr))(obj)
new RuntimeBeanReference(obj.get(RefAttr).as[String], obj.get(ParentAttr).as[Option[Boolean]].getOrElse(false))
}
private def readIdref(obj: ConfigObject) = {
validateObj(required = Set(IdrefAttr))(obj)
new RuntimeBeanNameReference(obj.get(IdrefAttr).as[String])
}
private def readList(obj: ConfigObject) = {
validateObj(required = Set(ListAttr), allowed = Set(MergeAttr, ValueTypeAttr))(obj)
setup(new ManagedList[Any]) { list =>
list.addAll(obj.get(ListAttr).as[ConfigList].asScala.map(read).asJavaCollection)
list.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
list.setElementTypeName(obj.get(ValueTypeAttr).as[Option[String]].orNull)
}
}
private def readArray(obj: ConfigObject) = {
validateObj(required = Set(ArrayAttr), allowed = Set(MergeAttr, ValueTypeAttr))(obj)
val elements = obj.get(ArrayAttr).as[ConfigList]
val valueType = obj.get(ValueTypeAttr).as[Option[String]].getOrElse("")
val result = new ManagedArray(valueType, elements.size)
result.addAll(elements.asScala.map(v => read(v).asInstanceOf[AnyRef]).asJavaCollection)
result.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
result
}
private def readSet(obj: ConfigObject) = {
validateObj(required = Set(SetAttr), allowed = Set(MergeAttr, ValueTypeAttr))(obj)
setup(new ManagedSet[Any]) { set =>
set.addAll(obj.get(SetAttr).as[ConfigList].asScala.map(read).asJavaCollection)
set.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
set.setElementTypeName(obj.get(ValueTypeAttr).as[Option[String]].orNull)
}
}
private def readRawList(list: ConfigList) = {
setup(new ManagedList[Any]) { ml =>
ml.addAll(list.asScala.map(read).asJavaCollection)
}
}
private def readMap(obj: ConfigObject) = {
validateObj(allowed = Set(MergeAttr, KeyTypeAttr, ValueTypeAttr, EntriesAttr), props = true)(obj)
setup(new ManagedMap[Any, Any]) { mm =>
mm.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
mm.setKeyTypeName(obj.get(KeyTypeAttr).as[Option[String]].orNull)
mm.setValueTypeName(obj.get(ValueTypeAttr).as[Option[String]].orNull)
obj.get(EntriesAttr).as[Option[ConfigList]].getOrElse(ju.Collections.emptyList).asScala.foreach {
case obj: ConfigObject =>
validateObj(required = Set(KeyAttr, ValueAttr))(obj)
mm.put(read(obj.get(KeyAttr)), read(obj.get(ValueAttr)))
case _ =>
throw new IllegalArgumentException(s"Required an object at ${obj.origin.description}")
}
getProps(obj).foreach {
case (key, value) => mm.put(key, read(value))
}
}
}
private def readProperties(obj: ConfigObject) = {
validateObj(required = Set(PropsAttr), allowed = Set(MergeAttr))(obj)
setup(new ManagedProperties) { mp =>
mp.setMergeEnabled(obj.get(MergeAttr).as[Option[Boolean]].getOrElse(false))
obj.get(PropsAttr).as[Option[Config]].getOrElse(ConfigFactory.empty).entrySet.asScala.foreach {
case entry if Set(STRING, NUMBER, BOOLEAN).contains(entry.getValue.valueType) =>
mp.setProperty(entry.getKey, entry.getValue.unwrapped.toString)
case entry => throw new IllegalArgumentException(s"Bad prop definition at ${entry.getValue.origin.description}")
}
}
}
private def readRawConfig(obj: ConfigObject) = {
validateObj(required = Set(ConfigAttr))(obj)
obj.get(ConfigAttr).as[Config]
}
private def readBean(obj: ConfigObject) = {
val bd = new GenericBeanDefinition
val cargs = new ConstructorArgumentValues
val propertyValues = new MutablePropertyValues
bd.setConstructorArgumentValues(cargs)
bd.setPropertyValues(propertyValues)
bd.setResourceDescription(obj.origin.description)
def addConstructorArg(idxAndValue: (Option[Int], ValueHolder)) = idxAndValue match {
case (Some(idx), valueHolder) => cargs.addIndexedArgumentValue(idx, valueHolder)
case (None, valueHolder) => cargs.addGenericArgumentValue(valueHolder)
}
obj.get(AbstractAttr).as[Option[Boolean]].foreach(bd.setAbstract)
obj.get(AutowireCandidateAttr).as[Option[Boolean]].foreach(bd.setAutowireCandidate)
obj.get(AutowireAttr).as[Option[String]].map(autowireMapping).foreach(bd.setAutowireMode)
obj.get(ClassAttr).as[Option[String]].foreach(bd.setBeanClassName)
readConstructorArgs(obj.get(ConstructorArgsAttr)).foreach(addConstructorArg)
obj.get(DependencyCheckAttr).as[Option[String]].map(dependencyCheckMapping).foreach(bd.setDependencyCheck)
obj.get(DescriptionAttr).as[Option[String]].foreach(bd.setDescription)
obj.get(DestroyMethodAttr).as[Option[String]].foreach(bd.setDestroyMethodName)
obj.get(DependsOnAttr).as[Option[ju.List[String]]].map(_.asScala.toArray).foreach(bd.setDependsOn(_: _*))
obj.get(FactoryBeanAttr).as[Option[String]].foreach(bd.setFactoryBeanName)
obj.get(FactoryMethodAttr).as[Option[String]].foreach(bd.setFactoryMethodName)
obj.get(InitMethodAttr).as[Option[String]].foreach(bd.setInitMethodName)
bd.setLazyInit(obj.get(LazyInitAttr).as[Option[Boolean]].getOrElse(false))
obj.get(LookupMethodsAttr).as[Option[ConfigObject]].foreach { obj =>
validateObj(props = true)(obj)
getProps(obj).foreach {
case (key, value) => bd.getMethodOverrides.addOverride(new LookupOverride(key, value.as[String]))
}
}
obj.get(MetaAttr).as[Option[ConfigObject]].getOrElse(ConfigFactory.empty.root).asScala.foreach {
case (mkey, mvalue) => bd.setAttribute(mkey, mvalue.as[String])
}
obj.get(ParentAttr).as[Option[String]].foreach(bd.setParentName)
obj.get(PrimaryAttr).as[Option[Boolean]].foreach(bd.setPrimary)
obj.get(QualifiersAttr).as[Option[ju.List[ConfigObject]]].getOrElse(ju.Collections.emptyList).asScala.foreach { obj =>
bd.addQualifier(readQualifier(obj))
}
obj.get(ReplacedMethodsAttr).as[Option[ju.List[ConfigObject]]].getOrElse(ju.Collections.emptyList).asScala.foreach { obj =>
bd.getMethodOverrides.addOverride(readReplacedMethod(obj))
}
obj.get(ScopeAttr).as[Option[String]].foreach(bd.setScope)
val construct = obj.get(ConstructAttr).as[Option[Boolean]].getOrElse(false)
getProps(obj).foreach {
case (key, value) =>
if (construct) {
addConstructorArg(readConstructorArg(value, forcedName = key))
} else {
propertyValues.addPropertyValue(readPropertyValue(key, value))
}
}
bd
}
private def readQualifier(obj: ConfigObject) = {
validateObj(allowed = Set(TypeAttr, ValueAttr), props = true)(obj)
val acq = new AutowireCandidateQualifier(obj.get(TypeAttr).as[Option[String]].getOrElse(classOf[Qualifier].getName))
obj.get(ValueAttr).as[Option[String]].foreach(acq.setAttribute(AutowireCandidateQualifier.VALUE_KEY, _))
getProps(obj).foreach {
case (key, value) => acq.setAttribute(key, value.as[String])
}
acq
}
private def readReplacedMethod(obj: ConfigObject) = {
validateObj(required = Set(NameAttr, ReplacerAttr), allowed = Set(ArgTypesAttr))(obj)
val replaceOverride = new ReplaceOverride(obj.get(NameAttr).as[String], obj.get(ReplacerAttr).as[String])
obj.get(ArgTypesAttr).as[Option[ju.List[String]]].getOrElse(ju.Collections.emptyList).asScala.foreach(replaceOverride.addTypeIdentifier)
replaceOverride
}
private def readConstructorArgs(value: ConfigValue) = {
value.as[Option[Either[ConfigList, ConfigObject]]] match {
case Some(Left(list)) =>
list.iterator.asScala.zipWithIndex.map { case (configValue, idx) =>
readConstructorArg(configValue, forcedIndex = idx)
}
case Some(Right(obj)) =>
validateObj(props = true)(obj)
getProps(obj).iterator.map { case (name, configValue) =>
readConstructorArg(configValue, forcedName = name)
}
case None =>
Iterator.empty
}
}
private def readConstructorArg(
value: ConfigValue,
forcedIndex: OptArg[Int] = OptArg.Empty,
forcedName: OptArg[String] = OptArg.Empty
) = value match {
case ValueDefinition(obj) =>
validateObj(required = Set(ValueAttr), allowed = Set(IndexAttr, TypeAttr, NameAttr))(obj)
val vh = new ValueHolder(read(obj.get(ValueAttr)))
obj.get(TypeAttr).as[Option[String]].foreach(vh.setType)
(forcedName.toOption orElse obj.get(NameAttr).as[Option[String]]).foreach(vh.setName)
val indexOpt = forcedIndex.toOption orElse obj.get(IndexAttr).as[Option[Int]]
(indexOpt, vh)
case _ =>
val vh = new ValueHolder(read(value))
forcedName.foreach(vh.setName)
(forcedIndex.toOption, vh)
}
private def readPropertyValue(name: String, value: ConfigValue) = value match {
case ValueDefinition(obj) =>
validateObj(required = Set(ValueAttr), allowed = Set(MetaAttr))(obj)
val pv = new PropertyValue(name, read(obj.get(ValueAttr)))
obj.get(MetaAttr).as[Option[ConfigObject]].getOrElse(ConfigFactory.empty.root).asScala.foreach {
case (mkey, mvalue) => pv.setAttribute(mkey, mvalue.as[String])
}
pv
case _ =>
new PropertyValue(name, read(value))
}
private def readBeans(obj: ConfigObject) = {
validateObj(props = true)(obj)
val beanDefs = getProps(obj).iterator.flatMap {
case (key, value) =>
try {
value.as[Option[ConfigObject]].map(obj => (key, readBean(obj)))
} catch {
case e: Exception => throw new RuntimeException(
s"Could not read definition of bean $key at ${value.origin.description}", e)
}
}.toVector
beanDefs.foreach((registry.registerBeanDefinition _).tupled)
beanDefs.size
}
private def readAliases(obj: ConfigObject): Unit = {
validateObj(props = true)(obj)
getProps(obj).foreach {
case (key, value) => value.as[Option[String]].foreach(registry.registerAlias(_, key))
}
}
def loadBeanDefinitions(config: Config): Int = {
val beans = if (config.hasPath("beans")) config.getObject("beans") else ConfigFactory.empty.root
val aliases = if (config.hasPath("aliases")) config.getObject("aliases") else ConfigFactory.empty.root
val result = readBeans(beans)
readAliases(aliases)
result
}
def loadBeanDefinitions(resource: Resource): Int =
loadBeanDefinitions(ConfigFactory.parseURL(resource.getURL).resolve)
}
|
AVSystem/scala-commons
|
commons-spring/src/main/scala/com/avsystem/commons/spring/HoconBeanDefinitionReader.scala
|
Scala
|
mit
| 15,364 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.libs.crypto
import org.specs2.mutable.Specification
import play.api.http.SecretConfiguration
class CookieSignerSpec extends Specification {
"signer.sign" should {
"be able to sign input using HMAC-SHA1 using the config secret" in {
val text = "Play Framework 2.0"
val key = "0123456789abcdef"
val secretConfiguration = SecretConfiguration(key, None)
val signer = new DefaultCookieSigner(secretConfiguration)
signer.sign(text) must be_==("94f63b1470ee74e15dc15fd704e26b0df36ef848")
}
"be able to sign input using HMAC-SHA1 using an explicitly passed in key" in {
val text = "Play Framework 2.0"
val key = "different key"
val secretConfiguration = SecretConfiguration(key, None)
val signer = new DefaultCookieSigner(secretConfiguration)
signer.sign(text, key.getBytes("UTF-8")) must be_==("470037631bddcbd13bb85d80d531c97a340f836f")
}
"be able to sign input using HMAC-SHA1 using an explicitly passed in key (same as secret)" in {
val text = "Play Framework 2.0"
val key = "0123456789abcdef"
val secretConfiguration = SecretConfiguration(key, None)
val signer = new DefaultCookieSigner(secretConfiguration)
signer.sign(text, key.getBytes("UTF-8")) must be_==("94f63b1470ee74e15dc15fd704e26b0df36ef848")
}
}
}
|
ktoso/playframework
|
framework/src/play/src/test/scala/play/api/libs/crypto/CookieSignerSpec.scala
|
Scala
|
apache-2.0
| 1,428 |
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.hyperbus.model
object Method {
final val GET = "get"
final val POST = "post"
final val PUT = "put"
final val PATCH = "patch"
final val DELETE = "delete"
final val FEED_GET = "feed:get"
final val FEED_POST = "feed:post"
final val FEED_PUT = "feed:put"
final val FEED_PATCH = "feed:patch"
final val FEED_DELETE = "feed:delete"
}
|
hypertino/hyperbus
|
hyperbus/src/main/scala/com/hypertino/hyperbus/model/Method.scala
|
Scala
|
mpl-2.0
| 638 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.plugins.dependencies
import org.apache.toree.ReflectionAccessor
import org.scalatest.{FunSpec, Matchers, OneInstancePerTest}
class DependencyManagerSpec extends FunSpec with Matchers with OneInstancePerTest {
private val dependencyManager = new DependencyManager
describe("DependencyManager") {
describe("#Empty") {
it("should return the same dependency manager each time") {
val expected = DependencyManager.Empty
val actual = DependencyManager.Empty
actual should be (expected)
}
it("should not add dependencies when the add method is invoked") {
val d = DependencyManager.Empty
d.add(new Object)
d.add("id", new Object)
d.add(Dependency.fromValue(new Object))
d.toSeq should be (empty)
}
}
describe("#from") {
it("should return a new dependency manager using the dependencies") {
val expected = Seq(
Dependency.fromValue("value1"),
Dependency.fromValue("value2")
)
val actual = DependencyManager.from(expected: _*).toSeq
actual should contain theSameElementsAs (expected)
}
it("should throw an exception if two dependencies have the same name") {
intercept[IllegalArgumentException] {
DependencyManager.from(
Dependency.fromValueWithName("name", "value1"),
Dependency.fromValueWithName("name", "value2")
)
}
}
}
describe("#merge") {
it("should return a new dependency manager with both manager's dependencies") {
val expected = Seq(
Dependency.fromValue("value1"),
Dependency.fromValue("value2"),
Dependency.fromValue("value3"),
Dependency.fromValue("value4")
)
val dm1 = DependencyManager.from(
expected.take(expected.length / 2): _*
)
val dm2 = DependencyManager.from(
expected.takeRight(expected.length / 2): _*
)
val actual = dm1.merge(dm2).toSeq
actual should contain theSameElementsAs (expected)
}
it("should overwrite any dependency with the same name from this manager with the other") {
val expected = Seq(
Dependency.fromValueWithName("name", "value1"),
Dependency.fromValue("value2"),
Dependency.fromValue("value3"),
Dependency.fromValue("value4")
)
val dm1 = DependencyManager.from(
Dependency.fromValueWithName("name", "value5")
)
val dm2 = DependencyManager.from(expected: _*)
val actual = dm1.merge(dm2).toSeq
actual should contain theSameElementsAs (expected)
}
}
describe("#toMap") {
it("should return a map of dependency names to dependency values") {
val expected = Map(
"some name" -> new Object,
"some other name" -> new Object
)
expected.foreach { case (k, v) => dependencyManager.add(k, v) }
val actual = dependencyManager.toMap
actual should be (expected)
}
}
describe("#toSeq") {
it("should return a sequence of dependency objects") {
val expected = Seq(
Dependency.fromValue(new Object),
Dependency.fromValue(new Object)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.toSeq
actual should contain theSameElementsAs (expected)
}
}
describe("#add") {
it("should generate a dependency name if not provided") {
dependencyManager.add(new Object)
dependencyManager.toSeq.head.name should not be (empty)
}
it("should use the provided name as the dependency's name") {
val expected = "some name"
dependencyManager.add(expected, new Object)
val actual = dependencyManager.toSeq.head.name
actual should be (expected)
}
it("should use the provided value for the dependency's value") {
val expected = new Object
dependencyManager.add(expected)
val actual = dependencyManager.toSeq.head.value
actual should be (expected)
}
it("should use the reflective type of the value for the dependency's type") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = typeOf[Object]
dependencyManager.add(new Object)
val actual = dependencyManager.toSeq.head.`type`
actual should be (expected)
}
}
it("should add the provided dependency object directly") {
val expected = Dependency.fromValue(new Object)
dependencyManager.add(expected)
val actual = dependencyManager.toSeq.head
actual should be (expected)
}
it("should throw an exception if a dependency with the same name already exists") {
intercept[IllegalArgumentException] {
dependencyManager.add("id", new Object)
dependencyManager.add("id", new Object)
}
}
}
describe("#find") {
it("should return Some(Dependency) if found by name") {
val expected = Some(Dependency.fromValue(new Object))
dependencyManager.add(expected.get)
val actual = dependencyManager.find(expected.get.name)
actual should be (expected)
}
it("should return None if no dependency with a matching name exists") {
val expected = None
val actual = dependencyManager.find("some name")
actual should be (expected)
}
}
describe("#findByType") {
it("should return a collection including of dependencies with the same type") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[Object], new Object),
Dependency("id2", typeOf[Object], new Object)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.findByType(typeOf[Object])
actual should contain theSameElementsAs (expected)
}
}
it("should return a collection including of dependencies with a sub type") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[String], new String),
Dependency("id2", typeOf[String], new String)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.findByType(typeOf[Object])
actual should contain theSameElementsAs (expected)
}
}
it("should return an empty collection if no dependency has the type") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Nil
dependencyManager.add(Dependency("id", typeOf[Object], new Object))
dependencyManager.add(Dependency("id2", typeOf[Object], new Object))
val actual = dependencyManager.findByType(typeOf[String])
actual should contain theSameElementsAs (expected)
}
}
}
describe("#findByTypeClass") {
it("should return a collection including of dependencies with the same class for the type") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[Object], new Object),
Dependency("id2", typeOf[Object], new Object)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.findByTypeClass(classOf[Object])
actual should contain theSameElementsAs (expected)
}
}
it("should return a collection including of dependencies with a sub class for the type") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[String], new String),
Dependency("id2", typeOf[String], new String)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.findByTypeClass(classOf[Object])
actual should contain theSameElementsAs (expected)
}
}
it("should return an empty collection if no dependency has a matching class for its type") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Nil
dependencyManager.add(Dependency("id", typeOf[Object], new Object))
dependencyManager.add(Dependency("id2", typeOf[Object], new Object))
val actual = dependencyManager.findByTypeClass(classOf[String])
actual should contain theSameElementsAs (expected)
}
}
ignore("should throw an exception if the dependency's type class is not found in the provided class' classloader") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
intercept[ClassNotFoundException] {
// TODO: Find some class that is in a different classloader and
// create a dependency from it
dependencyManager.add(Dependency("id", typeOf[Object], new Object))
dependencyManager.findByTypeClass(classOf[Object])
}
}
}
}
describe("#findByValueClass") {
it("should return a collection including of dependencies with the same class for the value") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[AnyVal], new AnyRef),
Dependency("id2", typeOf[AnyVal], new AnyRef)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.findByValueClass(classOf[AnyRef])
actual should contain theSameElementsAs (expected)
}
}
it("should return a collection including of dependencies with a sub class for the value") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[AnyVal], new String),
Dependency("id2", typeOf[AnyVal], new String)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.findByValueClass(classOf[AnyRef])
actual should contain theSameElementsAs (expected)
}
}
it("should return an empty collection if no dependency has a matching class for its value") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Nil
dependencyManager.add(Dependency("id", typeOf[String], new Object))
dependencyManager.add(Dependency("id2", typeOf[String], new Object))
val actual = dependencyManager.findByValueClass(classOf[String])
actual should contain theSameElementsAs (expected)
}
}
}
describe("#remove") {
it("should remove the dependency with the matching name") {
val dSeq = Seq(
Dependency.fromValue(new Object),
Dependency.fromValue(new Object)
)
val dToRemove = Dependency.fromValue(new Object)
dSeq.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
dependencyManager.remove(dToRemove.name)
val actual = dependencyManager.toSeq
actual should not contain (dToRemove)
}
it("should return Some(Dependency) representing the removed dependency") {
val expected = Some(Dependency.fromValue(new Object))
dependencyManager.add(expected.get)
val actual = dependencyManager.remove(expected.get.name)
actual should be (expected)
}
it("should return None if no dependency was removed") {
val expected = None
val actual = dependencyManager.remove("some name")
actual should be (expected)
}
}
describe("#removeByType") {
it("should remove dependencies with the specified type") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[String], new AnyRef),
Dependency("id2", typeOf[String], new AnyRef)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.removeByType(typeOf[String])
actual should contain theSameElementsAs (expected)
}
}
it("should remove dependencies with a type that is a subtype of the specified type") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[String], new AnyRef),
Dependency("id2", typeOf[String], new AnyRef)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.removeByType(typeOf[CharSequence])
actual should contain theSameElementsAs (expected)
}
}
it("should return a collection of any removed dependencies") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[String], new AnyRef),
Dependency("id2", typeOf[CharSequence], new AnyRef)
)
val all = Seq(
Dependency("id3", typeOf[Integer], new AnyRef),
Dependency("id4", typeOf[Boolean], new AnyRef)
) ++ expected
all.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.removeByType(typeOf[CharSequence])
actual should contain theSameElementsAs (expected)
}
}
}
describe("#removeByTypeClass") {
it("should remove dependencies with the specified type class") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[String], new AnyRef),
Dependency("id2", typeOf[String], new AnyRef)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.removeByTypeClass(classOf[String])
actual should contain theSameElementsAs (expected)
}
}
it("should remove dependencies with a type that is a subtype of the specified type class") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[String], new AnyRef),
Dependency("id2", typeOf[String], new AnyRef)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.removeByTypeClass(classOf[CharSequence])
actual should contain theSameElementsAs (expected)
}
}
it("should return a collection of any removed dependencies") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[String], new AnyRef),
Dependency("id2", typeOf[CharSequence], new AnyRef)
)
val all = Seq(
Dependency("id3", typeOf[Integer], new AnyRef),
Dependency("id4", typeOf[Boolean], new AnyRef)
) ++ expected
all.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.removeByTypeClass(classOf[CharSequence])
actual should contain theSameElementsAs (expected)
}
}
}
describe("#removeByValueClass") {
it("should remove dependencies with the specified value class") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[AnyRef], new String),
Dependency("id2", typeOf[AnyRef], new String)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.removeByValueClass(classOf[String])
actual should contain theSameElementsAs (expected)
}
}
it("should remove dependencies with a type that is a subtype of the specified value class") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[AnyRef], new String),
Dependency("id2", typeOf[AnyRef], new String)
)
expected.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.removeByValueClass(classOf[CharSequence])
actual should contain theSameElementsAs (expected)
}
}
it("should return a collection of any removed dependencies") {
ReflectionAccessor.useReflection {
import scala.reflect.runtime.universe._
val expected = Seq(
Dependency("id", typeOf[AnyRef], new String),
Dependency("id2", typeOf[AnyRef], new CharSequence {
override def charAt(i: Int): Char = ???
override def length(): Int = ???
override def subSequence(i: Int, i1: Int): CharSequence = ???
})
)
val all = Seq(
Dependency("id3", typeOf[AnyRef], Int.box(3)),
Dependency("id4", typeOf[AnyRef], Boolean.box(true))
) ++ expected
all.foreach(dependencyManager.add(_: Dependency[_ <: AnyRef]))
val actual = dependencyManager.removeByValueClass(classOf[CharSequence])
actual should contain theSameElementsAs (expected)
}
}
}
}
}
|
Myllyenko/incubator-toree
|
plugins/src/test/scala/org/apache/toree/plugins/dependencies/DependencyManagerSpec.scala
|
Scala
|
apache-2.0
| 19,184 |
package eu.liderproject.jsonld
sealed trait Json {
def toObj : Object
}
object Json {
def apply(objs : (String, Json)*) = JsonObject(objs.
map({case (x, y) => JsonField(x, y)}).iterator)
def apply(objs : Json*) = JsonArray(objs.iterator)
}
case class JsonArray(val values : Iterator[Json]) extends Json {
def toObj : List[Object] = (values map (_.toObj)).toList
def next = values.next
def hasNext = values.hasNext
}
case class JsonObject(val values : Iterator[JsonField]) extends Json {
def toObj : Map[String, Object] = (values map ({ case JsonField(k,v) => k -> v.toObj })).toMap
def next = values.next
def hasNext = values.hasNext
}
case class JsonField(val key : String, val value : Json)
case class JsonString(val value : String) extends Json {
def toObj = value
}
case class JsonInt(val value : Int) extends Json {
def toObj = new Integer(value)
}
case class JsonNumber(val value : Double) extends Json {
def toObj = new java.lang.Double(value)
}
case class JsonBoolean(val value : Boolean) extends Json {
def toObj = new java.lang.Boolean(value)
}
object JsonNull extends Json {
def toObj = null
}
case class JsonException(message : String = "", cause : Throwable = null) extends
RuntimeException(message, cause)
|
jmccrae/codegen
|
src/main/scala/lider/jsonld/json.scala
|
Scala
|
apache-2.0
| 1,266 |
package io.itdraft.levenshteinautomaton.description.nonparametric
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import io.itdraft.levenshteinautomaton.description.{DefaultCharacteristicVector, State}
import org.specs2.mutable.Specification
import org.specs2.specification.Tables
class ElementaryTransitionSpec extends Specification with Tables {
import io.itdraft.levenshteinautomaton._
import io.itdraft.levenshteinautomaton.description.nonparametric._
import io.itdraft.levenshteinautomaton.util.StringUtil._
"Elementary transition for various positions" should {
"perform transition according to the algorithm's publication" in {
// todo: tests for table including transpositions
"position" | "word" | "alpha" | "degree" | "inclTranspositions" | "state" |>
(0, 0) ! "" ! 'x' ! 0 ! false ! Nil |
(0, 0) ! "" ! 'x' ! 0 ! true ! Nil |
(0, 0) ! "x" ! 'x' ! 0 ! false ! (1, 0) :: Nil |
(0, 0) ! "x" ! 'x' ! 0 ! true ! (1, 0) :: Nil |
(0, 0) ! "" ! 'x' ! 1 ! false ! (0, 1) :: Nil |
(0, 0) ! "" ! 'x' ! 1 ! true ! (0, 1) :: Nil |
(2, 0) ! "00001011" ! '1' ! 5 ! false ! (2, 1) ::(3, 1) ::(5, 2) :: Nil |
(2, 0) ! "00101011" ! '1' ! 5 ! false ! (3, 0) :: Nil |
(2, 0) ! "00000000" ! '1' ! 5 ! false ! (2, 1) ::(3, 1) :: Nil |
(2, 0) ! "00000001" ! '1' ! 5 ! false ! (2, 1) ::(3, 1) ::(8, 5) :: Nil |
(7, 0) ! "00000001" ! '1' ! 5 ! false ! (8, 0) :: Nil |
(7, 0) ! "00000000" ! '1' ! 5 ! false ! (7, 1) ::(8, 1) :: Nil |
(8, 0) ! "00000000" ! '1' ! 5 ! false ! (8, 1) :: Nil |
(8, 5) ! "00000000" ! '1' ! 5 ! false ! Nil | {
(position, word, alpha, degree, inclTranspositions, state: List[(Int, Int)]) =>
implicit val automatonConfig = createLevenshteinAutomatonConfig(word, degree, inclTranspositions)
val transition = ElementaryTransition()
val (i, e) = position
val v = DefaultCharacteristicVector(alpha, toCodePoints(word), i)
transition(i ^# e, v) must be equalTo state
}
}
}
implicit def conversionToState(stateFormat: List[(Int, Int)])
(implicit c: LevenshteinAutomatonConfig): NonparametricState =
stateFormat match {
case Nil => FailureState
case ps => State(ps.map(t => t._1 ^# t._2): _*)
}
}
|
itdraft/levenshtein-automaton
|
src/test/scala-2.11/io/itdraft/levenshteinautomaton/description/nonparametric/ElementaryTransitionSpec.scala
|
Scala
|
apache-2.0
| 2,882 |
package io.finch.petstore
import argonaut.CodecJson
import argonaut.Argonaut._
/**
* Represents the current state of the Petstore and how many pets are currently of which [[Status]].
*/
case class Inventory(available: Int, pending: Int, adopted: Int)
/**
* Provides a codec for encoding and decoding [[Inventory]] objects.
*/
object Inventory {
implicit val inventoryCodec: CodecJson[Inventory] =
CodecJson(
(i: Inventory) =>
("available" := i.available) ->: ("pending" := i.pending) ->: ("adopted" := i.adopted) ->: jEmptyObject,
c => for {
available <- (c --\\ "available").as[Int]
pending <- (c --\\ "pending").as[Int]
adopted <- (c --\\ "adopted").as[Int]
} yield Inventory(available, pending, adopted))
}
|
peel/finch
|
petstore/src/main/scala/io/finch/petstore/inventory.scala
|
Scala
|
apache-2.0
| 770 |
/*
* Copyright (c) 2015 Alexandros Pappas p_alx hotmail com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
import org.scalatra.atmosphere.{OutboundMessage, ClientFilter, AtmosphereClient}
import scala.concurrent.ExecutionContext.Implicits.global
import org.atmosphere.cpr.AtmosphereResource
/*
class SecureClient extends AtmosphereClient {
var adminUuids: List[String] = List()
// adminUuids is a collection of uuids for admin users. You'd need to
// add each admin user's uuid to the list at connection time.
final protected def OnlyAdmins: ClientFilter = adminUuids.contains(_.uuid)
/**
* Broadcast a message to admin users only.
*/
def adminBroadcast(msg: OutboundMessage) {
broadcast(msg, OnlyAdmins)
}
}*/
|
gnostix/freeswitch-monitoring
|
src/main/scala/gr/gnostix/freeswitch/model/SecureClient.scala
|
Scala
|
apache-2.0
| 1,289 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.