code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package net.rrm.ehour.persistence.dao
import java.{io, util}
import net.rrm.ehour.domain.DomainObject
import net.rrm.ehour.persistence.retry.ExponentialBackoffRetryPolicy
import org.hibernate.Criteria
import org.springframework.stereotype.Repository
import org.springframework.transaction.annotation.Transactional
/**
* GenericDAO interface for CRUD on domain objects
*/
@Repository
abstract class AbstractGenericDaoHibernateImpl[PK <: io.Serializable, T <: DomainObject[PK, _]](entityType: Class[T])
extends AbstractAnnotationDaoHibernate4Impl with GenericDao[PK, T] with SingleTypedFindByNamedQuery[T] {
@Transactional(readOnly = true)
override def findAll(): util.List[T] = {
val criteria: Criteria = getSession.createCriteria(entityType)
ExponentialBackoffRetryPolicy retry criteria.list.asInstanceOf[util.List[T]]
}
@Transactional
override def delete(domObj: T) {
ExponentialBackoffRetryPolicy retry getSession.delete(domObj)
}
@Transactional
override def deleteOnId(id: PK) {
val dom = findById(id)
delete(dom)
}
@Transactional
override def persist(domObj: T): T = {
ExponentialBackoffRetryPolicy retry getSession.saveOrUpdate(domObj)
domObj
}
override def flush() { getSession.flush()}
@Transactional(readOnly = true)
override def findById(id: PK): T = ExponentialBackoffRetryPolicy retry getSession.get(entityType, id).asInstanceOf[T]
@Transactional
override def merge(domobj: T): T = ExponentialBackoffRetryPolicy retry getSession.merge(domobj).asInstanceOf[T]
}
|
momogentoo/ehour
|
eHour-persistence/src/main/scala/net/rrm/ehour/persistence/dao/AbstractGenericDaoHibernateImpl.scala
|
Scala
|
gpl-2.0
| 1,547 |
package com.twitter.scalding
import cascading.flow.{ Flow, FlowListener, FlowDef, FlowProcess }
import cascading.flow.hadoop.HadoopFlowProcess
import cascading.stats.CascadingStats
import java.util.concurrent.ConcurrentHashMap
import org.slf4j.{ Logger, LoggerFactory }
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.ref.WeakReference
import scala.util.Try
/*
* This can be a bit tricky to use, but it is important that incBy and inc
* are called INSIDE any map or reduce functions.
* Like:
* val stat = Stat("test")
* .map { x =>
* stat.inc
* 2 * x
* }
* NOT: map( { stat.inc; { x => 2*x } } )
* which increments on the submitter before creating the function. See the difference?
*/
trait Stat extends java.io.Serializable {
/**
* increment by the given amount
*/
def incBy(amount: Long): Unit
/** increment by 1L */
def inc: Unit = incBy(1L)
/** increment by -1L (decrement) */
def dec: Unit = incBy(-1L)
def key: StatKey
}
case class StatKey(counter: String, group: String) extends java.io.Serializable
object StatKey {
// This is implicit to allow Stat("c", "g") to work.
implicit def fromCounterGroup(counterGroup: (String, String)): StatKey = counterGroup match {
case (c, g) => StatKey(c, g)
}
// Create a Stat in the ScaldingGroup
implicit def fromCounterDefaultGroup(counter: String): StatKey =
StatKey(counter, Stats.ScaldingGroup)
implicit def fromStat(stat: Stat): StatKey = stat.key
}
private[scalding] object CounterImpl {
def apply(fp: FlowProcess[_], statKey: StatKey): CounterImpl =
fp match {
case hFP: HadoopFlowProcess => HadoopFlowPCounterImpl(hFP, statKey)
case _ => GenericFlowPCounterImpl(fp, statKey)
}
}
sealed private[scalding] trait CounterImpl {
def increment(amount: Long): Unit
}
private[scalding] case class GenericFlowPCounterImpl(fp: FlowProcess[_], statKey: StatKey) extends CounterImpl {
override def increment(amount: Long): Unit = fp.increment(statKey.group, statKey.counter, amount)
}
private[scalding] case class HadoopFlowPCounterImpl(fp: HadoopFlowProcess, statKey: StatKey) extends CounterImpl {
private[this] val cntr = fp.getReporter().getCounter(statKey.group, statKey.counter)
override def increment(amount: Long): Unit = cntr.increment(amount)
}
object Stat {
def apply(k: StatKey)(implicit uid: UniqueID): Stat = new Stat {
// This is materialized on the mappers, and will throw an exception if users incBy before then
private[this] lazy val cntr = CounterImpl(RuntimeStats.getFlowProcessForUniqueId(uid), k)
def incBy(amount: Long): Unit = cntr.increment(amount)
def key: StatKey = k
}
}
object Stats {
// This is the group that we assign all custom counters to
val ScaldingGroup = "Scalding Custom"
// When getting a counter value, cascadeStats takes precedence (if set) and
// flowStats is used after that. Returns None if neither is defined.
def getCounterValue(key: StatKey)(implicit cascadingStats: CascadingStats): Long =
cascadingStats.getCounterValue(key.group, key.counter)
// Returns a map of all custom counter names and their counts.
def getAllCustomCounters()(implicit cascadingStats: CascadingStats): Map[String, Long] = {
val counts = for {
counter <- cascadingStats.getCountersFor(ScaldingGroup).asScala
value = getCounterValue(counter)
} yield (counter, value)
counts.toMap
}
}
/**
* Used to inject a typed unique identifier to uniquely name each scalding flow.
* This is here mostly to deal with the case of testing where there are many
* concurrent threads running Flows. Users should never have to worry about
* these
*/
case class UniqueID(get: String) {
assert(get.indexOf(',') == -1, "UniqueID cannot contain ,: " + get)
}
object UniqueID {
val UNIQUE_JOB_ID = "scalding.job.uniqueId"
private val id = new java.util.concurrent.atomic.AtomicInteger(0)
def getRandom: UniqueID = {
// This number is unique as long as we don't create more than 10^6 per milli
// across separate jobs. which seems very unlikely.
val unique = (System.currentTimeMillis << 20) ^ (id.getAndIncrement.toLong)
UniqueID(unique.toString)
}
implicit def getIDFor(implicit fd: FlowDef): UniqueID =
/*
* In real deploys, this can even be a constant, but for testing
* we need to allocate unique IDs to prevent different jobs running
* at the same time from touching each other's counters.
*/
UniqueID(System.identityHashCode(fd).toString)
}
/**
* Wrapper around a FlowProcess useful, for e.g. incrementing counters.
*/
object RuntimeStats extends java.io.Serializable {
@transient private lazy val logger: Logger = LoggerFactory.getLogger(this.getClass)
private val flowMappingStore: mutable.Map[String, WeakReference[FlowProcess[_]]] = {
(new ConcurrentHashMap[String, WeakReference[FlowProcess[_]]]).asScala
}
def getFlowProcessForUniqueId(uniqueId: UniqueID): FlowProcess[_] = {
(for {
weakFlowProcess <- flowMappingStore.get(uniqueId.get)
flowProcess <- weakFlowProcess.get
} yield {
flowProcess
}).getOrElse {
sys.error("Error in job deployment, the FlowProcess for unique id %s isn't available".format(uniqueId))
}
}
private[this] var prevFP: FlowProcess[_] = null
def addFlowProcess(fp: FlowProcess[_]) {
if (!(prevFP eq fp)) {
val uniqueJobIdObj = fp.getProperty(UniqueID.UNIQUE_JOB_ID)
if (uniqueJobIdObj != null) {
// for speed concern, use a while loop instead of foreach here
var splitted = StringUtility.fastSplit(uniqueJobIdObj.asInstanceOf[String], ",")
while (!splitted.isEmpty) {
val uniqueId = splitted.head
splitted = splitted.tail
logger.debug("Adding flow process id: " + uniqueId)
flowMappingStore.put(uniqueId, new WeakReference(fp))
}
}
prevFP = fp
}
}
/**
* For serialization, you may need to do:
* val keepAlive = RuntimeStats.getKeepAliveFunction
* outside of a closure passed to map/etc..., and then call:
* keepAlive()
* inside of your closure (mapping, reducing function)
*/
def getKeepAliveFunction(implicit flowDef: FlowDef): () => Unit = {
// Don't capture the flowDef, just the id
val id = UniqueID.getIDFor(flowDef)
() => {
val flowProcess = RuntimeStats.getFlowProcessForUniqueId(id)
flowProcess.keepAlive
}
}
}
/**
* FlowListener that checks counter values against a function.
*/
class StatsFlowListener(f: Map[StatKey, Long] => Try[Unit]) extends FlowListener {
private var success = true
override def onCompleted(flow: Flow[_]): Unit = {
if (success) {
val stats = flow.getFlowStats
val keys = stats.getCounterGroups.asScala.flatMap(g => stats.getCountersFor(g).asScala.map(c => StatKey(c, g)))
val values = keys.map(k => (k, stats.getCounterValue(k.group, k.counter))).toMap
f(values).get
}
}
override def onThrowable(flow: Flow[_], throwable: Throwable): Boolean = {
success = false
false
}
override def onStarting(flow: Flow[_]): Unit = {}
override def onStopping(flow: Flow[_]): Unit = {}
}
|
tglstory/scalding
|
scalding-core/src/main/scala/com/twitter/scalding/Stats.scala
|
Scala
|
apache-2.0
| 7,231 |
package org.apache.spark.examples.mllib
import org.apache.spark.SparkContext
import org.apache.spark.mllib.classification.SVMWithSGD
import org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.SparkConf
/**
* Spark cookbook p127
*/
object SVMWithSGDExample {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("SVMWithSGDExample").setMaster("local[4]")
val sc = new SparkContext(conf)
//把数据加载成RDD
val svmData = MLUtils.loadLibSVMFile(sc, "../data/mllib/sample_libsvm_data.txt")
//计算记录的数目
svmData.count
//把数据集分成两半,一半训练数据和一半测试数据
val trainingAndTest = svmData.randomSplit(Array(0.5, 0.5))
//训练数据和测试数据赋值
val trainingData = trainingAndTest(0)
val testData = trainingAndTest(1)
//训练算法产并经过100次迭代构建模型 (SGD随机梯度下降)
val model = SVMWithSGD.train(trainingData, 100)
//用模型去为任意数据集预测标签,使用测试数据中的第一个点测试标签
val label = model.predict(testData.first.features)
//创建一个元组,其中第一个元素是测试数据的预测标签,第二个元素是实际标签
val predictionsAndLabels = testData.map(r => (model.predict(r.features), r.label))
//计算有多少预测标签和实际标签不匹配的记录
predictionsAndLabels.filter(p => p._1 != p._2).count
}
}
|
tophua/spark1.52
|
examples/src/main/scala/org/apache/spark/examples/mllib/SVMWithSGDExample.scala
|
Scala
|
apache-2.0
| 1,614 |
package rpm4s.repo.data.updateinfo
import java.time.Instant
import cats.Id
import cats.implicits._
import rpm4s.data.{CVE, _}
case class UpdateF[F[_]](
from: F[String],
status: F[UpdateF.Status],
tpe: F[UpdateF.UpdateType],
//TODO: find out what are valid values for version
version: F[String],
id: F[String],
title: F[String],
severity: F[UpdateF.Severity],
release: F[String],
issued: F[Instant],
references: F[Set[UpdateF.Reference]],
description: F[String],
packages: F[Set[UpdateF.PackageF.Package]]
)
object UpdateF {
type Update = UpdateF[cats.Id]
type UpdateBuilder = UpdateF[Option]
sealed trait Reference extends Product with Serializable {
def href: String
def id: String
def title: String
}
case class Bugzilla(
href: String,
id: String,
title: String
) extends Reference
case class CVERef(
href: String,
cve: CVE,
title: String
) extends Reference {
def id: String = cve.string
}
case class Fate(
href: String,
id: String,
title: String
) extends Reference
case class Self(
href: String,
title: String,
id: String
) extends Reference
sealed trait Status extends Product with Serializable
object Status {
def fromString(value: String): Option[Status] = value match {
case "stable" => Some(Stable)
case "final" => Some(Final)
case "retracted" => Some(Retracted)
case "testing" => Some(Testing)
case _ => None
}
def toString(value: Status): String = value match {
case Stable => "stable"
case Final => "final"
case Testing => "testing"
case Retracted => "retracted"
}
case object Stable extends Status
case object Testing extends Status
case object Final extends Status
case object Retracted extends Status
}
sealed trait Severity extends Product with Serializable
object Severity {
def fromString(value: String): Option[Severity] = value.toLowerCase match {
case "critical" => Some(Critical)
case "important" => Some(Important)
case "moderate" => Some(Moderate)
case "low" => Some(Low)
case _ => None
}
def toString(value: Severity): String = value match {
case Important => "important"
case Moderate => "moderate"
case Low => "low"
case Critical => "critical"
}
case object Critical extends Severity
case object Important extends Severity
case object Moderate extends Severity
case object Low extends Severity
}
sealed trait UpdateType extends Product with Serializable
object UpdateType {
def fromString(value: String): Option[UpdateType] = value match {
case "recommended" => Some(Recommended)
case "security" => Some(Security)
case "optional" => Some(Optional)
case "feature" => Some(Feature)
case "enhancement" => Some(Enhancement)
case "bugfix" => Some(Bugfix)
case _ => None
}
def toString(value: UpdateType): String = value match {
case Recommended => "recommended"
case Security => "security"
case Optional => "optional"
case Feature => "feature"
case Enhancement => "enhancement"
case Bugfix => "bugfix"
}
case object Recommended extends UpdateType
case object Security extends UpdateType
case object Optional extends UpdateType
case object Feature extends UpdateType
case object Enhancement extends UpdateType
case object Bugfix extends UpdateType
}
case class PackageF[F[_]](
name: F[Name],
version: F[Version],
release: F[Release],
epoch: F[Epoch],
//TODO: find out how this arch relates to rpm arch
arch: F[Architecture],
src: F[Option[String]],
//TODO: find out if filename is always also in src
filename: F[String],
restartSuggested: F[Boolean],
rebootSuggested: F[Boolean],
reloginSuggested: F[Boolean]
)
object PackageF {
type Package = PackageF[cats.Id]
type PackageBuilder = PackageF[Option]
object PackageBuilder {
def build(packageBuilder: PackageBuilder): Option[Package] = {
(packageBuilder.name,
packageBuilder.version,
packageBuilder.release,
packageBuilder.epoch,
packageBuilder.arch,
packageBuilder.src,
packageBuilder.filename,
packageBuilder.restartSuggested,
packageBuilder.rebootSuggested,
packageBuilder.reloginSuggested
).mapN(PackageF[Id])
}
val empty: PackageBuilder = apply()
def apply(
name: Option[Name] = None,
version: Option[Version] = None,
release: Option[Release] = None,
epoch: Option[Epoch] = None,
arch: Option[Architecture] = None,
src: Option[Option[String]] = None,
filename: Option[String] = None,
restartSuggested: Option[Boolean] = Some(false),
rebootSuggested: Option[Boolean] = Some(false),
reloginSuggested: Option[Boolean] = Some(false)
): PackageBuilder = PackageF(
name, version, release, epoch, arch,
src, filename, rebootSuggested, rebootSuggested, reloginSuggested
)
}
}
object UpdateBuilder {
def build(updateBuilder: UpdateBuilder): Option[Update] = {
(updateBuilder.from,
updateBuilder.status,
updateBuilder.tpe,
updateBuilder.version,
updateBuilder.id,
updateBuilder.title,
updateBuilder.severity,
updateBuilder.release,
updateBuilder.issued,
updateBuilder.references,
updateBuilder.description,
updateBuilder.packages
).mapN(UpdateF[Id])
}
val empty: UpdateBuilder = apply()
def apply(
from: Option[String] = None,
status: Option[Status] = None,
tpe: Option[UpdateType] = None,
version: Option[String] = None,
id: Option[String] = None,
title: Option[String] = None,
severity: Option[Severity] = None,
release: Option[String] = None,
issued: Option[Instant] = None,
description: Option[String] = None,
references: Option[Set[Reference]] = None,
packages: Option[Set[UpdateF.PackageF.Package]] = None
): UpdateBuilder = UpdateF(
from, status, tpe, version, id, title,
severity, release, issued, references,
description, packages
)
}
}
|
lucidd/rpm4s
|
repo-utils/shared/src/main/scala/rpm4s/repo/data/updateinfo/UpdateF.scala
|
Scala
|
mit
| 6,368 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.BinaryTreeLSTM.{apply => _}
import com.intel.analytics.bigdl.nn.Reshape.{apply => _, createBigDLModule => _, createSerializeBigDLModule => _, getClass => _}
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.nn.{Module => _, _}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.transform.vision.image.util.BboxUtil
import com.intel.analytics.bigdl.utils.Table
import org.apache.log4j.Logger
import DetectionOutputSSD.logger
import scala.reflect.ClassTag
/**
* Layer to Post-process SSD output
* @param nClasses number of classes
* @param shareLocation whether to share location, default is true
* @param bgLabel background label
* @param nmsThresh nms threshold
* @param nmsTopk nms topk
* @param keepTopK result topk
* @param confThresh confidence threshold
* @param varianceEncodedInTarget if variance is encoded in target,
* we simply need to retore the offset predictions,
* else if variance is encoded in bbox,
* we need to scale the offset accordingly.
* @param confPostProcess whether add some additional post process to confidence prediction
* @tparam T Numeric type of parameter(e.g. weight, bias). Only support float/double now
*/
@SerialVersionUID(5253792953255433914L)
class DetectionOutputSSD[T: ClassTag](val nClasses: Int = 21,
val shareLocation: Boolean = true,
val bgLabel: Int = 0,
val nmsThresh: Float = 0.45f,
val nmsTopk: Int = 400,
var keepTopK: Int = 200,
val confThresh: Float = 0.01f,
val varianceEncodedInTarget: Boolean = false,
val confPostProcess: Boolean = true)
(implicit ev: TensorNumeric[T])
extends AbstractModule[Table, Activity, T] {
@transient private var nms: Nms = _
def setTopK(topK: Int): this.type = {
keepTopK = topK
this
}
private def filterBboxes(decodedBboxes: Array[Tensor[Float]],
confScores: Array[Tensor[Float]], indices: Array[Array[Int]],
indicesNum: Array[Int]): Int = {
var numDet = 0
var c = 0
while (c < nClasses) {
if (c != bgLabel) {
val scores = confScores(c)
if (scores.nElement() == 0) {
logger.warn(s"Could not find confidence predictions for label $c")
}
val label = if (shareLocation) decodedBboxes.length - 1 else c
val bboxes = decodedBboxes(label)
if (bboxes == null || bboxes.nElement() == 0) {
logger.warn(s"Could not find locƒation predictions for label $label")
return 0
}
indicesNum(c) = nms.nmsFast(scores, bboxes, nmsThresh,
confThresh, indices(c), nmsTopk, normalized = true)
numDet += indicesNum(c)
}
c += 1
}
if (keepTopK > -1 && numDet > keepTopK) {
val scoreClassIndex = new Array[(Float, Int, Int)](numDet)
var c = 0
var count = 0
while (c < indices.length) {
var j = 0
while (j < indicesNum(c)) {
val idx = indices(c)(j)
scoreClassIndex(count) = (confScores(c).valueAt(idx), c, idx)
count += 1
j += 1
}
indicesNum(c) = 0
c += 1
}
// keep top k results per image
val sortedPairs = scoreClassIndex.sortBy(x => -x._1)
var i = 0
while (i < keepTopK) {
val label = sortedPairs(i)._2
val idx = sortedPairs(i)._3
indices(label)(indicesNum(label)) = idx
indicesNum(label) += 1
i += 1
}
keepTopK
} else {
numDet
}
}
@transient private var allLocPreds: Array[Array[Tensor[Float]]] = _
@transient private var allConfScores: Array[Array[Tensor[Float]]] = _
@transient private var allIndices: Array[Array[Array[Int]]] = _
@transient private var allIndicesNum: Array[Array[Int]] = _
private def init(batch: Int, numLocClasses: Int, nPriors: Int): Unit = {
var i = 0
if (allLocPreds == null || allLocPreds.length < batch) {
// the outer array is the batch, each img contains an array of results, grouped by class
allLocPreds = new Array[Array[Tensor[Float]]](batch)
allConfScores = new Array[Array[Tensor[Float]]](batch)
allIndices = new Array[Array[Array[Int]]](batch)
allIndicesNum = new Array[Array[Int]](batch)
i = 0
while (i < batch) {
allLocPreds(i) = new Array[Tensor[Float]](numLocClasses)
allConfScores(i) = new Array[Tensor[Float]](nClasses)
allIndices(i) = new Array[Array[Int]](nClasses)
allIndicesNum(i) = new Array[Int](nClasses)
var c = 0
while (c < numLocClasses) {
allLocPreds(i)(c) = Tensor[Float](nPriors, 4)
c += 1
}
c = 0
while (c < nClasses) {
allConfScores(i)(c) = Tensor[Float](nPriors)
if (c != bgLabel) allIndices(i)(c) = new Array[Int](nPriors)
c += 1
}
i += 1
}
} else {
i = 0
while (i < batch) {
var c = 0
while (c < numLocClasses) {
allLocPreds(i)(c).resize(nPriors, 4)
c += 1
}
c = 0
while (c < nClasses) {
allConfScores(i)(c).resize(nPriors)
if (c != bgLabel && allIndices(i)(c).length < nPriors) {
allIndices(i)(c) = new Array[Int](nPriors)
}
c += 1
}
i += 1
}
}
}
private val confPost = if (confPostProcess) {
Sequential[T]()
.add(InferReshape[T](Array(0, -1, nClasses)).setName("mbox_conf_reshape"))
.add(TimeDistributed[T](SoftMax[T]()).setName("mbox_conf_softmax"))
.add(InferReshape[T](Array(0, -1)).setName("mbox_conf_flatten"))
} else {
null
}
override def updateOutput(input: Table): Activity = {
if (isTraining()) {
output = input
return output
}
if (nms == null) nms = new Nms()
val loc = input[Tensor[Float]](1)
val conf = if (confPostProcess) {
confPost.forward(input[Tensor[Float]](2)).toTensor[Float]
} else {
input[Tensor[Float]](2)
}
val prior = input[Tensor[Float]](3)
val batch = loc.size(1)
val numLocClasses = if (shareLocation) 1 else nClasses
val nPriors = prior.size(3) / 4
var i = 0
init(batch, numLocClasses, nPriors)
BboxUtil.getLocPredictions(loc, nPriors, numLocClasses, shareLocation,
allLocPreds)
BboxUtil.getConfidenceScores(conf, nPriors, nClasses, allConfScores)
val (priorBoxes, priorVariances) = BboxUtil.getPriorBboxes(prior, nPriors)
val allDecodedBboxes = BboxUtil.decodeBboxesAll(allLocPreds, priorBoxes, priorVariances,
numLocClasses, bgLabel, false, varianceEncodedInTarget, shareLocation,
allLocPreds)
val numKepts = new Array[Int](batch)
var maxDetection = 0
i = 0
while (i < batch) {
val num = filterBboxes(allDecodedBboxes(i), allConfScores(i),
allIndices(i), allIndicesNum(i))
numKepts(i) = num
maxDetection = Math.max(maxDetection, num)
i += 1
}
// the first element is the number of detection numbers
val out = Tensor[Float](batch, 1 + maxDetection * 6)
if (numKepts.sum > 0) {
i = 0
while (i < batch) {
val outi = out(i + 1)
var c = 0
outi.setValue(1, numKepts(i))
var offset = 2
while (c < allIndices(i).length) {
val indices = allIndices(i)(c)
if (indices != null) {
val indicesNum = allIndicesNum(i)(c)
val locLabel = if (shareLocation) allDecodedBboxes(i).length - 1 else c
val bboxes = allDecodedBboxes(i)(locLabel)
var j = 0
while (j < indicesNum) {
val idx = indices(j)
outi.setValue(offset, c)
outi.setValue(offset + 1, allConfScores(i)(c).valueAt(idx))
outi.setValue(offset + 2, bboxes.valueAt(idx, 1))
outi.setValue(offset + 3, bboxes.valueAt(idx, 2))
outi.setValue(offset + 4, bboxes.valueAt(idx, 3))
outi.setValue(offset + 5, bboxes.valueAt(idx, 4))
offset += 6
j += 1
}
}
c += 1
}
i += 1
}
}
output = out
output
}
override def updateGradInput(input: Table, gradOutput: Activity): Table = {
gradInput = gradOutput.toTable
gradInput
}
override def clearState(): DetectionOutputSSD.this.type = {
nms = null
allLocPreds = null
allConfScores = null
allIndices = null
allIndicesNum = null
if (null != confPost) confPost.clearState()
this
}
}
object DetectionOutputSSD {
val logger = Logger.getLogger(getClass)
def apply[@specialized(Float) T: ClassTag]
(param: DetectionOutputParam, postProcess: Boolean = true)
(implicit ev: TensorNumeric[T]): DetectionOutputSSD[T] =
new DetectionOutputSSD[T](param.nClasses,
param.shareLocation,
param.bgLabel,
param.nmsThresh,
param.nmsTopk,
param.keepTopK,
param.confThresh,
param.varianceEncodedInTarget,
postProcess)
}
case class DetectionOutputParam(nClasses: Int = 21, shareLocation: Boolean = true, bgLabel: Int = 0,
nmsThresh: Float = 0.45f, nmsTopk: Int = 400, var keepTopK: Int = 200,
confThresh: Float = 0.01f,
varianceEncodedInTarget: Boolean = false)
|
qiuxin2012/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/DetectionOutputSSD.scala
|
Scala
|
apache-2.0
| 10,124 |
package controllers.auth
import javax.inject.Inject
import com.mohiva.play.silhouette.api._
import com.mohiva.play.silhouette.api.exceptions.ProviderException
import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository
import com.mohiva.play.silhouette.impl.authenticators.CookieAuthenticator
import com.mohiva.play.silhouette.impl.providers._
import models.User
import models.services.UserService
import play.api.i18n.{Messages, MessagesApi}
import play.api.libs.concurrent.Execution.Implicits._
import play.api.mvc.Action
import scala.concurrent.Future
/**
* The social auth controller.
*
* @param messagesApi The Play messages API.
* @param env The Silhouette environment.
* @param userService The user service implementation.
* @param authInfoRepository The auth info service implementation.
* @param socialProviderRegistry The social provider registry.
*/
class SocialAuthController @Inject() (
val messagesApi: MessagesApi,
val env: Environment[User, CookieAuthenticator],
userService: UserService,
authInfoRepository: AuthInfoRepository,
socialProviderRegistry: SocialProviderRegistry)
extends Silhouette[User, CookieAuthenticator] with Logger {
/**
* Authenticates a user against a social provider.
*
* @param provider The ID of the provider to authenticate against.
* @return The result to display.
*/
def authenticate(provider: String) = Action.async { implicit request =>
(socialProviderRegistry.get[SocialProvider](provider) match {
case Some(p: SocialProvider with CommonSocialProfileBuilder) =>
p.authenticate().flatMap {
case Left(result) => Future.successful(result)
case Right(authInfo) => for {
profile <- p.retrieveProfile(authInfo)
user <- userService.save(profile)
authInfo <- authInfoRepository.save(profile.loginInfo, authInfo)
authenticator <- env.authenticatorService.create(profile.loginInfo)
value <- env.authenticatorService.init(authenticator)
result <- env.authenticatorService.embed(value, Redirect("/"))
} yield {
env.eventBus.publish(LoginEvent(user, request, request2Messages))
result
}
}
case _ => Future.failed(new ProviderException(s"Cannot authenticate with unexpected social provider $provider"))
}).recover {
case e: ProviderException =>
logger.error("Unexpected provider error", e)
Redirect(routes.AuthenticationController.signIn()).flashing("error" -> Messages("could.not.authenticate"))
}
}
}
|
OpenCompare/OpenCompare
|
org.opencompare/play-app/app/controllers/auth/SocialAuthController.scala
|
Scala
|
apache-2.0
| 2,590 |
package net.aruneko.redmineforspigot
import dispatch._
import dispatch.Defaults._
import org.bukkit.ChatColor
import org.bukkit.command.{Command, CommandExecutor, CommandSender, TabCompleter}
import collection.JavaConverters._
import scala.util.{Failure, Success}
/**
* Redmineチケットに関するコマンドを実行するクラス
*/
class IssueCommandExecutor(config: Configuration) extends CommandExecutor with TabCompleter {
/**
* コマンドを実行するメソッド
*
* @param sender コマンド送信者
* @param cmd 送信されたコマンド
* @param label コマンドのラベル
* @param args コマンドの引数
* @return コマンドを実行した場合true、そうでなければfalse
*/
override def onCommand(sender: CommandSender, cmd: Command, label: String, args: Array[String]): Boolean = {
Utils.canExecCommand(sender, config) match {
case Right(a) => execCommand(sender, args)
case Left(e) =>
sender.sendMessage(e)
true
}
}
/**
* 実行するコマンドの振り分け
* @param sender コマンド送信者
* @param args コマンドの引数
* @return
*/
def execCommand(sender: CommandSender, args: Array[String]): Boolean = {
args match {
case Array(arg) if arg.equalsIgnoreCase("list") =>
printIssueList(sender)
case Array(num) if Utils.stringToInt(num).isDefined =>
issueDetails(sender, Utils.stringToInt(num).get)
case Array(arg, num) if arg.equalsIgnoreCase("list") && Utils.stringToInt(num).isDefined =>
issueListByProjectId(sender, Utils.stringToInt(num).get)
case Array(arg, _, _, _, _*) if arg.equalsIgnoreCase("time") && checkTimeCommandArgs(args) =>
createNewTimeEntry(sender, args)
case Array(arg, _, _, _, _, _*) if arg.equalsIgnoreCase("new") && checkNewCommandArgs(args) =>
pushNewIssue(sender, args)
case _ => false
}
}
/**
* Tab補完の実装
* @param sender コマンド送信者
* @param cmd 送信されたコマンド
* @param alias コマンドの別名
* @param args コマンドの引数
* @return 補完候補
*/
override def onTabComplete(sender: CommandSender, cmd: Command, alias: String, args: Array[String]): java.util.List[String] = {
args.length match {
case 1 if args(0).length == 0 => List("list", "time", "new").asJava
case 1 if "list".startsWith(args(0)) => List("list").asJava
case 1 if "new".startsWith(args(0)) => List("new").asJava
case 1 if "time".startsWith(args(0)) => List("time").asJava
case _ => List("").asJava
}
}
/**
* チケット一覧を取得するコマンド
*
* @param sender コマンド送信者
* @return
*/
def printIssueList(sender: CommandSender): Boolean = {
Utils.fetchXmlByApiKey(sender, config, "issues.xml") match {
case Left(e) =>
// 取得に失敗した旨を表示
sender.sendMessage(e)
case Right(xml) =>
// メッセージの送信
sender.sendMessage(ChatColor.AQUA + "===== Issues List =====")
sender.sendMessage("Project ID : Issue ID : Issue Subject")
// すべてのチケットを表示
val issues = xml \\\\ "issue"
issues foreach {
issue => {
val issueId = issue \\ "id"
val pid = issue \\ "project" \\ "@id"
val subject = issue \\ "subject"
sender.sendMessage(pid.text + " : " + issueId.text + " : " + subject.text)
}
}
}
true
}
/**
* 指定したプロジェクトIDのチケット一覧を取得するコマンド
*
* @param sender コマンド送信者
* @param projectId プロジェクトID
* @return
*/
def issueListByProjectId(sender: CommandSender, projectId: Int): Boolean = {
Utils.fetchXmlByApiKey(sender, config, "issues.xml?project_id=" + projectId.toString) match {
case Left(e) =>
// 取得に失敗した旨を表示
sender.sendMessage(e)
case Right(xml) =>
// メッセージの送信
sender.sendMessage(ChatColor.AQUA + "===== Issues List =====")
sender.sendMessage("Issue ID : Issue Subject")
// 該当プロジェクトのすべてのチケットを表示
val issues = xml \\\\ "issue"
issues foreach {
issue => {
val issueId = issue \\ "id"
val subject = issue \\ "subject"
sender.sendMessage(issueId.text + " : " + subject.text)
}
}
}
true
}
/**
* チケットの詳細を表示するコマンド
*
* @param sender コマンド送信者
* @param issueId チケットID
* @return
*/
def issueDetails(sender: CommandSender, issueId: Int): Boolean = {
Utils.fetchXmlByApiKey(sender, config, "issues/" + issueId.toString + ".xml") match {
case Left(e) =>
// 取得に失敗した旨を表示
sender.sendMessage(e)
case Right(xml) =>
// パーツの分解
val issue = xml \\\\ "issue"
val project = issue \\ "project" \\ "@name"
val tracker = issue \\ "tracker" \\ "@name"
val status = issue \\ "status" \\ "@name"
val priority = issue \\ "priority" \\ "@name"
val subject = issue \\ "subject"
val description = issue \\ "description"
val startDate = issue \\ "start_date"
val dueDate = issue \\ "due_date"
val doneRatio = issue \\ "done_ratio"
// チケットの詳細を表示
sender.sendMessage(ChatColor.AQUA + "===== Details of Issue \\"" + subject.text + "\\" =====")
sender.sendMessage("Issue ID : " + issueId)
sender.sendMessage("Project name : " + project.text)
sender.sendMessage("Tracker : " + tracker.text)
sender.sendMessage("Status : " + status.text)
sender.sendMessage("Priority : " + priority.text)
sender.sendMessage("Description : " + description.text)
sender.sendMessage("Start date : " + startDate.text)
sender.sendMessage("Due date : " + dueDate.text)
sender.sendMessage("Done ratio : " + doneRatio.text + "%")
}
true
}
/**
* 新規作成モードの引数の妥当性をチェック
*
* @param args チェックする引数
* @return
*/
def checkNewCommandArgs(args: Array[String]): Boolean = {
args match {
case Array(_, projectId, _*) if Utils.stringToInt(projectId).isEmpty => false
case Array(_, _, trackerId, _*) if Utils.stringToInt(trackerId).isEmpty => false
case Array(_, _, _, priorityId, _*) if Utils.stringToInt(priorityId).isEmpty => false
case _ => true
}
}
/**
* 新たなチケットを発行するコマンド
*
* @param sender コマンド送信者
* @param args 発行に必要な引数
* @return
*/
def pushNewIssue(sender: CommandSender, args: Array[String]): Boolean = {
// 引数を整形
val projectId = Utils.stringToInt(args(1)).get
val trackerId = Utils.stringToInt(args(2)).get
val priorityId = Utils.stringToInt(args(3)).get
val subject = args.slice(4, args.length + 1).mkString(" ")
// XMLの組み立て
val reqXML =
s"""<?xml version="1.0"?>
|<issue>
| <project_id>$projectId</project_id>
| <tracker_id>$trackerId</tracker_id>
| <subject>$subject</subject>
| <priority_id>$priorityId</priority_id>
|</issue>
""".stripMargin
// リクエストの組み立て
val headers = config.getApiKey(sender) match {
case Right(key) => Map("Content-type" -> "text/xml; charset=UTF-8", "X-Redmine-API-Key" -> key)
case Left(e) =>
sender.sendMessage(e)
Map("Content-type" -> "text/xml; charset=UTF-8")
}
val reqUrl = url(config.url + "issues.xml") << reqXML <:< headers
val res = Http(reqUrl OK as.String)
// 結果に応じてメッセージを表示
res.onComplete {
case Success(_) => sender.sendMessage("Success!")
case Failure(_) => sender.sendMessage("Failed to add an issue.")
}
true
}
/**
* time引数の妥当性をチェック
* @param args コマンドの引数
* @return
*/
def checkTimeCommandArgs(args: Array[String]): Boolean = {
args match {
case Array(_, issueId, _*) if Utils.stringToInt(issueId).isEmpty => false
case Array(_, _, hours, _*) if Utils.stringToDouble(hours).isEmpty => false
case Array(_, _, _, activityId, _*) if Utils.stringToInt(activityId).isEmpty => false
case _ => true
}
}
/**
* 作業時間を記録するコマンド
* @param sender 実行したプレイヤー
* @param args コマンドの引数
* @return
*/
def createNewTimeEntry(sender: CommandSender, args: Array[String]): Boolean = {
// 引数を整形
val issueId = Utils.stringToInt(args(1)).get
val hours = Utils.stringToDouble(args(2)).get
val activityId = Utils.stringToInt(args(3)).get
val comments = if (args.length >= 5) {
args.slice(4, args.length + 1).mkString(" ")
} else {
""
}
// XMLの組み立て
val reqXML =
s"""<?xml version="1.0"?>
|<time_entry>
| <issue_id>$issueId</issue_id>
| <hours>$hours</hours>
| <activity_id>$activityId</activity_id>
| <comments>$comments</comments>
|</time_entry>
""".stripMargin
// リクエストの組み立て
val headers = config.getApiKey(sender) match {
case Right(key) => Map("Content-type" -> "text/xml; charset=UTF-8", "X-Redmine-API-Key" -> key)
case Left(e) =>
sender.sendMessage(e)
Map("Content-type" -> "text/xml; charset=UTF-8")
}
val reqUrl = url(config.url + "time_entries.xml") << reqXML <:< headers
val res = Http(reqUrl OK as.String)
// 結果に応じてメッセージを表示
res.onComplete {
case Success(_) => sender.sendMessage("Success!")
case Failure(_) => sender.sendMessage("Failed to add a time entry.")
}
true
}
}
|
aruneko/RedmineForSpigot
|
src/main/scala/net/aruneko/redmineforspigot/IssueCommandExecutor.scala
|
Scala
|
mit
| 10,187 |
package ammonite
object TestMain{
def main(args: Array[String]): Unit = {
System.setProperty("ammonite-sbt-build", "true")
val homeFlag = Array("--home", "target/tempAmmoniteHome")
args match{
case Array(first, rest@_*) if first.startsWith("--") => Main.main(args ++ homeFlag)
case Array(first, rest@_*) => Main.main(Array(first) ++ homeFlag ++ Array("--") ++ rest)
case _ => Main.main(homeFlag ++ args)
}
}
}
|
alexarchambault/ammonium
|
amm/src/test/scala/ammonite/TestMain.scala
|
Scala
|
mit
| 448 |
/*
* Copyright 2015 The SIRIS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The SIRIS Project is a cooperation between Beuth University, Berlin and the
* HCI Group at the University of Würzburg. The project is funded by the German
* Federal Ministry of Education and Research (grant no. 17N4409).
*/
package simx.components.ai.mipro.supervisedlearning.slprovider
import simx.components.ai.mipro.supervisedlearning.Mode
import simx.components.ai.mipro.supervisedlearning.slprovider.neuralnetwork.{NeuralNetworkConfiguration, NeuralNetwork}
/**
* Factory class that returns instances of supervised learning method instances.
*
* @author Thomas Krause
*/
object SlFactory {
/**
* Get a neural network instance created according to the passed configuration object.
*
* @param aConfigurationObj
* @return
*/
def getNeuralNetworkInstance(aConfigurationObj: NeuralNetworkConfiguration): NeuralNetwork =
{
new NeuralNetwork(aConfigurationObj)
}
/* FUTURE VISION/EXAMPLE
def getSupportVectorMachineInstance(aConfigurationObj: SupportVectorMachineConfiguration): SupportVectorMachine =
{
SupportVectorMachine(SupportVectorMachineConfiguration)
}
*/
}
|
simulator-x/feature
|
src/simx/components/ai/mipro/supervisedlearning/slprovider/SlFactory.scala
|
Scala
|
apache-2.0
| 1,750 |
package com.googlecode.kanbanik
import java.util.ArrayList
import com.mongodb.DBObject
package object commons {
type JavaList[T] = java.util.List[T]
class ToJavaConvertableList[T](list: List[T]) {
def toJavaList(): JavaList[T] = {
val javaList = new ArrayList[T]
list.foreach(javaList.add(_))
javaList
}
}
class ToScalaConvertableList[T](list: JavaList[T]) {
def toScalaList(): List[T] = {
list.toArray().toList.asInstanceOf[List[T]]
}
}
class ToDbWithDefault(dbObject: DBObject) {
def getWithDefault[T](key: Any, default: T): T = {
val res = dbObject.get(key.toString())
if (res != null) {
res.asInstanceOf[T]
} else {
default
}
}
}
implicit def makeToJavaConvertableList[T](scalaList: List[T]): ToJavaConvertableList[T] = new ToJavaConvertableList[T](scalaList)
implicit def makeToScalaConvertableList[T](javaList: JavaList[T]): ToScalaConvertableList[T] = new ToScalaConvertableList[T](javaList)
implicit def makeToWithDefault(dbObject: DBObject) = new ToDbWithDefault(dbObject)
}
|
mortenpoulsen/kanbanik
|
kanbanik-server/src/main/scala/com/googlecode/kanbanik/commons/package.scala
|
Scala
|
apache-2.0
| 1,111 |
import sbt._
import sbt.Keys._
import com.typesafe.sbt.SbtAspectj.{ Aspectj, aspectjSettings, useInstrumentedClasses }
import com.typesafe.sbt.SbtAspectj.AspectjKeys.inputs
object STS2Application extends Build {
lazy val sets2app = Project(
id = "randomSearch",
base = file("."),
settings = Defaults.defaultSettings ++ aspectjSettings ++ Seq(
organization := "com.typesafe.sbt.aspectj",
version := "0.1",
scalaVersion := "2.11.0",
libraryDependencies += "com.typesafe.akka" %% "akka-actor" % "2.3.6",
libraryDependencies += "com.typesafe.akka" %% "akka-cluster" % "2.3.6",
libraryDependencies += "com.assembla.scala-incubator" %% "graph-core" % "1.9.0",
libraryDependencies += "com.assembla.scala-incubator" %% "graph-dot" % "1.9.0",
//libraryDependencies += "com.typesafe.scala-logging" %% "scala-logging-api" % "2.1.2",
libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.1.2",
// For 2.11.0:
libraryDependencies += "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.2",
// For 2.10.*:
//libraryDependencies += "org.scala-lang" % "scala-swing" % scalaVersion.value,
// add akka-actor as an aspectj input (find it in the update report)
inputs in Aspectj <++= update map { report =>
report.matching(moduleFilter(organization = "com.typesafe.akka", name = "akka-actor*"))
},
// replace the original akka-actor jar with the instrumented classes in runtime
fullClasspath in Runtime <<= useInstrumentedClasses(Runtime)
)
)
}
|
NetSys/demi-applications
|
project/Build.scala
|
Scala
|
bsd-2-clause
| 1,584 |
package com.nitin.nizhawan.decompiler.structures.constantpool
import com.nitin.nizhawan.decompiler.main.ByteReader
/**
* Created by nitin on 13/12/15.
*/
class StringConstPoolEntry(tag:Int,br:ByteReader,pool:ConstantPool) extends ConstPoolEntry(tag,br,pool){
val index = br.readChar()
override lazy val info = pool.poolEntries(index).info
}
|
nitin-nizhawan/jedi
|
src/com/nitin/nizhawan/decompiler/structures/constantpool/StringConstPoolEntry.scala
|
Scala
|
artistic-2.0
| 349 |
class D[T]
class C {
def f() = {
locally {
class dd[U] extends D[U] {
val xx = 1
}
class ee[V] extends dd[(V, V)]
def d[V]: dd[V] = new dd[V]
g[D[Int]](d[Int])
g[D[(Int, Int)]](new ee[Int])
}
}
inline def locally[T](inline body: T): T = body
def g[T](x: T): T = x
}
|
dotty-staging/dotty
|
tests/pos/i9965.scala
|
Scala
|
apache-2.0
| 328 |
package org.eigengo.akkapatterns.api
import org.eigengo.akkapatterns.domain.{SuperuserKind, User, UserFormats}
import java.util.UUID
import org.eigengo.akkapatterns.core.{UserGraphDatabaseIndexes, SprayJsonNodeMarshalling, TypedGraphDatabase}
// TODO https://github.com/eigengo/akka-patterns/issues/35
trait Neo4JFixtures extends TypedGraphDatabase with UserFormats with SprayJsonNodeMarshalling with UserGraphDatabaseIndexes {
val RootUserPassword = "*******"
val RootUser = User(UUID.fromString("a3372060-2b3b-11e2-81c1-0800200c9a66"), "root", "", "[email protected]", None, "Jan", "Machacek", SuperuserKind).resetPassword(RootUserPassword)
private def ensureUserSanity: Boolean = {
findOneEntityWithIndex[User] { _.get("username", RootUser.username) } match {
case None =>
withTransaction {
addOneWithIndex(RootUser) { (node, index) =>
index.putIfAbsent(node, "username", RootUser.username)
index.putIfAbsent(node, "id", RootUser.id.toString)
}
}
true
case Some(rootUser) =>
rootUser.checkPassword(RootUserPassword)
}
}
def neo4jFixtures: Boolean = synchronized {
ensureUserSanity
}
}
|
eigengo/akka-patterns
|
server/api/src/test/scala/org/eigengo/akkapatterns/api/neo4jsupport.scala
|
Scala
|
apache-2.0
| 1,205 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.utils.io
import java.io.{ File, PrintWriter }
import java.net.{ ServerSocket, URI }
import java.util.concurrent.atomic.AtomicInteger
import com.amazonaws.services.s3.AmazonS3Client
import org.bdgenomics.utils.misc.{ NetworkConnected, S3Test }
import org.scalatest.FunSuite
class ByteAccessSuite extends FunSuite {
lazy val credentials = new CredentialsProperties(Some(new File(System.getProperty("user.home") + "/spark.conf")))
.awsCredentials(Some("s3"))
lazy val bucketName = System.getenv("BUCKET_NAME")
lazy val parquetLocation = System.getenv("PARQUET_LOCATION")
test("ByteArrayByteAccess returns arbitrary subsets of bytes correctly") {
val bytes = Array[Byte](0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
val access = new ByteArrayByteAccess(bytes)
assert(access.length() === bytes.length)
assert(access.readFully(5, 5) === bytes.slice(5, 10))
}
test("ByteArrayByteAccess supports two successive calls with different offsets") {
val bytes = Array[Byte](0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
val access = new ByteArrayByteAccess(bytes)
assert(access.length() === bytes.length)
assert(access.readFully(5, 5) === bytes.slice(5, 10))
assert(access.readFully(3, 5) === bytes.slice(3, 8))
}
test("LocalFileByteAccess returns arbitrary subsets of bytes correctly") {
val content = "abcdefghij"
val temp = File.createTempFile("byteaccesssuite", "test")
val writer = new PrintWriter(temp)
writer.print(content)
writer.close()
val access = new LocalFileByteAccess(temp)
assert(access.length() === content.length())
assert(access.readFully(3, 5) === content.substring(3, 8).getBytes("ASCII"))
}
test("HTTPRangedByteAccess will retry multiple times", NetworkConnected) {
val socket = new ServerSocket(0)
if (!socket.isBound) throw new Exception("Could not bind ServerSocket")
val port = socket.getLocalPort
val count = new AtomicInteger(0)
val thread = new Thread(new Runnable {
def single() = {
val client = socket.accept()
count.getAndIncrement
client.close()
}
override def run(): Unit = {
single()
run()
}
})
thread.start()
val uri = URI.create("http://localhost:" + port + "/")
val http = new HTTPRangedByteAccess(uri, 9)
intercept[Exception] {
http.length()
}
assert(count.get() === 10, "Retrying 9 times should result in 10 connections")
socket.close()
thread.interrupt()
}
test("HTTPRangedByteAccess supports range queries", NetworkConnected) {
val uri = URI.create("https://s3.amazonaws.com/bdgenomics-test/mouse_chrM.bam")
val http = new HTTPRangedByteAccess(uri, 1)
val bytes1 = http.readFully(100, 10)
val bytes2 = http.readFully(100, 100)
assert(bytes1.length === 10)
assert(bytes2.length === 100)
assert(bytes1 === bytes2.slice(0, 10))
// figured this out by executing:
// curl --range 100-109 http://www.cs.berkeley.edu/~massie/bams/mouse_chrM.bam | od -t u1
assert(bytes1 === Array(188, 185, 119, 110, 102, 222, 76, 23, 189, 139).map(_.toByte))
}
test("HTTPRangedByteAccess can retrieve a full range", NetworkConnected) {
val uri = URI.create("https://s3.amazonaws.com/bdgenomics-test/eecslogo.gif")
val http = new HTTPRangedByteAccess(uri, 1)
val bytes = http.readFully(0, http.length().toInt)
assert(bytes.length === http.length())
}
test("Testing S3 byte access", NetworkConnected, S3Test) {
val byteAccess = new S3ByteAccess(new AmazonS3Client(credentials),
bucketName,
parquetLocation)
assert(byteAccess.readFully(0, 1)(0) === 80)
}
}
|
nfergu/bdg-utils
|
utils-io/src/test/scala/org/bdgenomics/utils/io/ByteAccessSuite.scala
|
Scala
|
apache-2.0
| 4,474 |
/*
* Copyright 2017-2018 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package examples.todolist
package runtime
import java.util.Properties
import cats.effect.IO
import cats.Monad
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import doobie._
import doobie.hikari._
import doobie.hikari.implicits._
import doobie.implicits._
import examples.todolist.persistence._
import examples.todolist.persistence.runtime._
import scala.concurrent.ExecutionContext
object implicits extends ExecutionImplicits with RepositoryHandlersImplicits with DoobieImplicits
trait RepositoryHandlersImplicits {
implicit def appRepositoryHandler[F[_]: Monad](
implicit T: Transactor[F]): AppRepository.Handler[F] =
new AppRepositoryHandler[F]
implicit def todoItemRepositoryHandler[F[_]: Monad](
implicit T: Transactor[F]): TodoItemRepository.Handler[F] =
new TodoItemRepositoryHandler[F]
implicit def todoListRepositoryHandler[F[_]: Monad](
implicit T: Transactor[F]): TodoListRepository.Handler[F] =
new TodoListRepositoryHandler[F]
implicit def tagRepositoryHandler[F[_]: Monad](
implicit T: Transactor[F]): TagRepository.Handler[F] =
new TagRepositoryHandler[F]
}
trait DoobieImplicits {
implicit val xa: HikariTransactor[IO] =
HikariTransactor[IO](new HikariDataSource(new HikariConfig(new Properties {
setProperty("driverClassName", "org.h2.Driver")
setProperty("jdbcUrl", "jdbc:h2:mem:todo")
setProperty("username", "sa")
setProperty("password", "")
setProperty("maximumPoolSize", "10")
setProperty("minimumIdle", "10")
setProperty("idleTimeout", "600000")
setProperty("connectionTimeout", "30000")
setProperty("connectionTestQuery", "SELECT 1")
setProperty("maxLifetime", "1800000")
setProperty("autoCommit", "true")
})))
}
trait ExecutionImplicits {
implicit val ec: ExecutionContext =
scala.concurrent.ExecutionContext.Implicits.global
}
|
frees-io/freestyle
|
modules/examples/todolist-http-http4s/src/main/scala/todo/runtime/implicits.scala
|
Scala
|
apache-2.0
| 2,537 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.orc
import java.net.URI
import java.nio.charset.StandardCharsets.UTF_8
import java.util.Properties
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.Output
import org.apache.commons.codec.binary.Base64
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, Path}
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hadoop.hive.ql.io.orc._
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument
import org.apache.hadoop.hive.serde2.objectinspector.{SettableStructObjectInspector, StructObjectInspector}
import org.apache.hadoop.hive.serde2.typeinfo.{StructTypeInfo, TypeInfoUtils}
import org.apache.hadoop.io.{NullWritable, Writable}
import org.apache.hadoop.mapred.{JobConf, OutputFormat => MapRedOutputFormat, RecordWriter, Reporter}
import org.apache.hadoop.mapreduce._
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.orc.OrcConf.COMPRESS
import org.apache.spark.{SPARK_VERSION_SHORT, TaskContext}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SPARK_VERSION_METADATA_KEY
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.execution.datasources.orc.OrcOptions
import org.apache.spark.sql.hive.{HiveInspectors, HiveShim}
import org.apache.spark.sql.sources.{Filter, _}
import org.apache.spark.sql.types._
import org.apache.spark.util.SerializableConfiguration
/**
* `FileFormat` for reading ORC files. If this is moved or renamed, please update
* `DataSource`'s backwardCompatibilityMap.
*/
class OrcFileFormat extends FileFormat with DataSourceRegister with Serializable {
override def shortName(): String = "orc"
override def toString: String = "ORC"
override def inferSchema(
sparkSession: SparkSession,
options: Map[String, String],
files: Seq[FileStatus]): Option[StructType] = {
val ignoreCorruptFiles = sparkSession.sessionState.conf.ignoreCorruptFiles
OrcFileOperator.readSchema(
files.map(_.getPath.toString),
Some(sparkSession.sessionState.newHadoopConf()),
ignoreCorruptFiles
)
}
override def prepareWrite(
sparkSession: SparkSession,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory = {
val orcOptions = new OrcOptions(options, sparkSession.sessionState.conf)
val configuration = job.getConfiguration
configuration.set(COMPRESS.getAttribute, orcOptions.compressionCodec)
configuration match {
case conf: JobConf =>
conf.setOutputFormat(classOf[OrcOutputFormat])
case conf =>
conf.setClass(
"mapred.output.format.class",
classOf[OrcOutputFormat],
classOf[MapRedOutputFormat[_, _]])
}
new OutputWriterFactory {
override def newInstance(
path: String,
dataSchema: StructType,
context: TaskAttemptContext): OutputWriter = {
new OrcOutputWriter(path, dataSchema, context)
}
override def getFileExtension(context: TaskAttemptContext): String = {
val compressionExtension: String = {
val name = context.getConfiguration.get(COMPRESS.getAttribute)
OrcFileFormat.extensionsForCompressionCodecNames.getOrElse(name, "")
}
compressionExtension + ".orc"
}
}
}
override def isSplitable(
sparkSession: SparkSession,
options: Map[String, String],
path: Path): Boolean = {
true
}
override def buildReader(
sparkSession: SparkSession,
dataSchema: StructType,
partitionSchema: StructType,
requiredSchema: StructType,
filters: Seq[Filter],
options: Map[String, String],
hadoopConf: Configuration): (PartitionedFile) => Iterator[InternalRow] = {
if (sparkSession.sessionState.conf.orcFilterPushDown) {
// Sets pushed predicates
OrcFilters.createFilter(requiredSchema, filters.toArray).foreach { f =>
hadoopConf.set(OrcFileFormat.SARG_PUSHDOWN, toKryo(f))
hadoopConf.setBoolean(ConfVars.HIVEOPTINDEXFILTER.varname, true)
}
}
val broadcastedHadoopConf =
sparkSession.sparkContext.broadcast(new SerializableConfiguration(hadoopConf))
val ignoreCorruptFiles = sparkSession.sessionState.conf.ignoreCorruptFiles
(file: PartitionedFile) => {
val conf = broadcastedHadoopConf.value.value
val filePath = new Path(new URI(file.filePath))
// SPARK-8501: Empty ORC files always have an empty schema stored in their footer. In this
// case, `OrcFileOperator.readSchema` returns `None`, and we can't read the underlying file
// using the given physical schema. Instead, we simply return an empty iterator.
val isEmptyFile =
OrcFileOperator.readSchema(Seq(filePath.toString), Some(conf), ignoreCorruptFiles).isEmpty
if (isEmptyFile) {
Iterator.empty
} else {
OrcFileFormat.setRequiredColumns(conf, dataSchema, requiredSchema)
val orcRecordReader = {
val job = Job.getInstance(conf)
FileInputFormat.setInputPaths(job, file.filePath)
// Custom OrcRecordReader is used to get
// ObjectInspector during recordReader creation itself and can
// avoid NameNode call in unwrapOrcStructs per file.
// Specifically would be helpful for partitioned datasets.
val orcReader = OrcFile.createReader(filePath, OrcFile.readerOptions(conf))
new SparkOrcNewRecordReader(orcReader, conf, file.start, file.length)
}
val recordsIterator = new RecordReaderIterator[OrcStruct](orcRecordReader)
Option(TaskContext.get())
.foreach(_.addTaskCompletionListener[Unit](_ => recordsIterator.close()))
// Unwraps `OrcStruct`s to `UnsafeRow`s
OrcFileFormat.unwrapOrcStructs(
conf,
dataSchema,
requiredSchema,
Some(orcRecordReader.getObjectInspector.asInstanceOf[StructObjectInspector]),
recordsIterator)
}
}
}
override def supportDataType(dataType: DataType): Boolean = dataType match {
case _: AtomicType => true
case st: StructType => st.forall { f => supportDataType(f.dataType) }
case ArrayType(elementType, _) => supportDataType(elementType)
case MapType(keyType, valueType, _) =>
supportDataType(keyType) && supportDataType(valueType)
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
case _ => false
}
// HIVE-11253 moved `toKryo` from `SearchArgument` to `storage-api` module.
// This is copied from Hive 1.2's SearchArgumentImpl.toKryo().
private def toKryo(sarg: SearchArgument): String = {
val kryo = new Kryo()
val out = new Output(4 * 1024, 10 * 1024 * 1024)
kryo.writeObject(out, sarg)
out.close()
Base64.encodeBase64String(out.toBytes)
}
}
private[orc] class OrcSerializer(dataSchema: StructType, conf: Configuration)
extends HiveInspectors {
def serialize(row: InternalRow): Writable = {
wrapOrcStruct(cachedOrcStruct, structOI, row)
serializer.serialize(cachedOrcStruct, structOI)
}
private[this] val serializer = {
val table = new Properties()
table.setProperty("columns", dataSchema.fieldNames.mkString(","))
table.setProperty("columns.types", dataSchema.map(_.dataType.catalogString).mkString(":"))
val serde = new OrcSerde
serde.initialize(conf, table)
serde
}
// Object inspector converted from the schema of the relation to be serialized.
private[this] val structOI = {
val typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(dataSchema.catalogString)
OrcStruct.createObjectInspector(typeInfo.asInstanceOf[StructTypeInfo])
.asInstanceOf[SettableStructObjectInspector]
}
private[this] val cachedOrcStruct = structOI.create().asInstanceOf[OrcStruct]
// Wrapper functions used to wrap Spark SQL input arguments into Hive specific format
private[this] val wrappers = dataSchema.zip(structOI.getAllStructFieldRefs().asScala.toSeq).map {
case (f, i) => wrapperFor(i.getFieldObjectInspector, f.dataType)
}
private[this] def wrapOrcStruct(
struct: OrcStruct,
oi: SettableStructObjectInspector,
row: InternalRow): Unit = {
val fieldRefs = oi.getAllStructFieldRefs
var i = 0
val size = fieldRefs.size
while (i < size) {
oi.setStructFieldData(
struct,
fieldRefs.get(i),
wrappers(i)(row.get(i, dataSchema(i).dataType))
)
i += 1
}
}
}
private[orc] class OrcOutputWriter(
path: String,
dataSchema: StructType,
context: TaskAttemptContext)
extends OutputWriter {
private[this] val serializer = new OrcSerializer(dataSchema, context.getConfiguration)
// `OrcRecordWriter.close()` creates an empty file if no rows are written at all. We use this
// flag to decide whether `OrcRecordWriter.close()` needs to be called.
private var recordWriterInstantiated = false
private lazy val recordWriter: RecordWriter[NullWritable, Writable] = {
recordWriterInstantiated = true
new OrcOutputFormat().getRecordWriter(
new Path(path).getFileSystem(context.getConfiguration),
context.getConfiguration.asInstanceOf[JobConf],
path,
Reporter.NULL
).asInstanceOf[RecordWriter[NullWritable, Writable]]
}
override def write(row: InternalRow): Unit = {
recordWriter.write(NullWritable.get(), serializer.serialize(row))
}
override def close(): Unit = {
if (recordWriterInstantiated) {
// Hive 1.2.1 ORC initializes its private `writer` field at the first write.
OrcFileFormat.addSparkVersionMetadata(recordWriter)
recordWriter.close(Reporter.NULL)
}
}
}
private[orc] object OrcFileFormat extends HiveInspectors with Logging {
// This constant duplicates `OrcInputFormat.SARG_PUSHDOWN`, which is unfortunately not public.
private[orc] val SARG_PUSHDOWN = "sarg.pushdown"
// The extensions for ORC compression codecs
val extensionsForCompressionCodecNames = Map(
"NONE" -> "",
"SNAPPY" -> ".snappy",
"ZLIB" -> ".zlib",
"LZO" -> ".lzo")
def unwrapOrcStructs(
conf: Configuration,
dataSchema: StructType,
requiredSchema: StructType,
maybeStructOI: Option[StructObjectInspector],
iterator: Iterator[Writable]): Iterator[InternalRow] = {
val deserializer = new OrcSerde
val mutableRow = new SpecificInternalRow(requiredSchema.map(_.dataType))
val unsafeProjection = UnsafeProjection.create(requiredSchema)
def unwrap(oi: StructObjectInspector): Iterator[InternalRow] = {
val (fieldRefs, fieldOrdinals) = requiredSchema.zipWithIndex.map {
case (field, ordinal) =>
var ref = oi.getStructFieldRef(field.name)
if (ref == null) {
ref = oi.getStructFieldRef("_col" + dataSchema.fieldIndex(field.name))
}
ref -> ordinal
}.unzip
val unwrappers = fieldRefs.map(r => if (r == null) null else unwrapperFor(r))
iterator.map { value =>
val raw = deserializer.deserialize(value)
var i = 0
val length = fieldRefs.length
while (i < length) {
val fieldRef = fieldRefs(i)
val fieldValue = if (fieldRef == null) null else oi.getStructFieldData(raw, fieldRef)
if (fieldValue == null) {
mutableRow.setNullAt(fieldOrdinals(i))
} else {
unwrappers(i)(fieldValue, mutableRow, fieldOrdinals(i))
}
i += 1
}
unsafeProjection(mutableRow)
}
}
maybeStructOI.map(unwrap).getOrElse(Iterator.empty)
}
def setRequiredColumns(
conf: Configuration, dataSchema: StructType, requestedSchema: StructType): Unit = {
val ids = requestedSchema.map(a => dataSchema.fieldIndex(a.name): Integer)
val (sortedIDs, sortedNames) = ids.zip(requestedSchema.fieldNames).sorted.unzip
HiveShim.appendReadColumns(conf, sortedIDs, sortedNames)
}
/**
* Add a metadata specifying Spark version.
*/
def addSparkVersionMetadata(recordWriter: RecordWriter[NullWritable, Writable]): Unit = {
try {
val writerField = recordWriter.getClass.getDeclaredField("writer")
writerField.setAccessible(true)
val writer = writerField.get(recordWriter).asInstanceOf[Writer]
writer.addUserMetadata(SPARK_VERSION_METADATA_KEY, UTF_8.encode(SPARK_VERSION_SHORT))
} catch {
case NonFatal(e) => log.warn(e.toString, e)
}
}
}
|
icexelloss/spark
|
sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala
|
Scala
|
apache-2.0
| 13,562 |
package teksol
import java.util.Locale
import javax.sql.DataSource
import org.springframework.jdbc.core.JdbcTemplate
import org.springframework.jdbc.datasource.DataSourceTransactionManager
import org.springframework.transaction.PlatformTransactionManager
import org.springframework.transaction.support.TransactionTemplate
import teksol.infrastructure.{EventBus, InMemoryI18n}
import teksol.mybank.domain.services.MyBankAppService
import teksol.mybank.infrastructure.postgres.PostgresMyBankRepository
import teksol.postgres.{PostgresEventBus, PostgresFamilyApp}
trait Config {
def dataSource: DataSource
lazy val jdbcTemplate: JdbcTemplate = new JdbcTemplate(dataSource)
lazy val transactionManager: PlatformTransactionManager = new DataSourceTransactionManager(dataSource)
lazy val transactionTemplate: TransactionTemplate = new TransactionTemplate(transactionManager)
lazy val eventBus: EventBus = new PostgresEventBus(jdbcTemplate)
lazy val app = new PostgresFamilyApp(jdbcTemplate, eventBus)
lazy val myBankRepository = new PostgresMyBankRepository(jdbcTemplate, eventBus)
lazy val myBankService = {
val service = new MyBankAppService(myBankRepository, eventBus)
eventBus.register(service)
service
}
val en_US = Locale.US
val fr_CA = Locale.CANADA_FRENCH
val fr_FR = Locale.FRANCE
lazy val i18n = new InMemoryI18n(Map(
en_US -> Map(
"salary.none" -> "No completed chores this period",
"salary.positive" -> "%{numUnitsCompleted} completed this week",
"salary.negative" -> "%{numUnitsCompleted} completed this week",
"interests.none" -> "No interests for period",
"interests.negative" -> "Negative interests on $ %{balance} balance, at a rate of %{rate}",
"interests.positive" -> "Interests on $ %{balance} balance, at a rate of %{rate}"),
fr_CA -> Map(
"salary.none" -> "Aucune tâche ménagères complétées cette semaine",
"salary.positive" -> "%{numUnitsCompleted} tâches ménagères complétées cette semaine",
"salary.negative" -> "%{numUnitsCompleted} tâches ménagères complétées cette semaine",
"interests.none" -> "Aucun intérêts pour la période",
"interests.negative" -> "Intérêts négatifs calculés sur un solde de %{balance} $ et un taux de %{rate}",
"interests.positive" -> "Intérêts calculés sur un solde de %{balance} $ et un taux de %{rate}"),
fr_FR -> Map(
"salary.none" -> "Aucune tâche ménagères complétées cette semaine",
"salary.positive" -> "%{numUnitsCompleted} tâches ménagères complétées cette semaine",
"salary.negative" -> "%{numUnitsCompleted} tâches ménagères complétées cette semaine",
"interests.none" -> "Aucun intérêts pour la période",
"interests.negative" -> "Intérêts négatifs calculés sur un solde de %{balance} $ et un taux de %{rate}",
"interests.positive" -> "Intérêts calculés sur un solde de %{balance} $ et un taux de %{rate}")))
}
|
francois/family
|
src/main/scala/teksol/Config.scala
|
Scala
|
mit
| 3,146 |
package com.typesafe.slick.docs
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
import slick.jdbc.H2Profile.api._
object OrmToSlick extends App {
import SqlToSlick.Tables._
// fake ORM
object PeopleFinder{
def getByIds(ids: Seq[Int]): Seq[Person] = Seq()
def getById(id: Int): Person = null
}
implicit class OrmPersonAddress(person: Person){
def address: Address = null
}
implicit class OrmPrefetch(people: Seq[Person]){
def prefetch(f: Person => Address) = people
}
object session{
def createQuery(hql: String) = new HqlQuery
def createCriteria(cls: java.lang.Class[_]) = new Criteria
def save = ()
}
class Criteria{
def add(r: Restriction) = this
}
type Restriction = Criteria
class HqlQuery{
def setParameterList(column: String, values: Array[_]): Unit = ()
}
object Property{
def forName(s:String) = new Property
}
class Property{
def in(array: Array[_]): Restriction = new Restriction
def lt(i: Int) = new Restriction
def gt(i: Int) = new Restriction
}
object Restrictions{
def disjunction = new Criteria
}
val db = Database.forConfig("h2mem1")
try {
val setup = DBIO.seq(
addresses.schema.create,
people.schema.create,
sql"ALTER TABLE PERSON ALTER COLUMN NAME VARCHAR(255) DEFAULT('')".as[Int],
sql"ALTER TABLE PERSON ALTER COLUMN AGE INT DEFAULT(-1)".as[Int],
sql"ALTER TABLE PERSON ALTER COLUMN ADDRESS_ID INT DEFAULT(1)".as[Int],
SqlToSlick.inserts
)
Await.result(db.run(setup), Duration.Inf)
;{
//#ormObjectNavigation
val people: Seq[Person] = PeopleFinder.getByIds(Seq(2,99,17,234))
val addresses: Seq[Address] = people.map(_.address)
//#ormObjectNavigation
};{
//#ormPrefetch
// tell the ORM to load all related addresses at once
val people: Seq[Person] = PeopleFinder.getByIds(Seq(2,99,17,234)).prefetch(_.address)
val addresses: Seq[Address] = people.map(_.address)
//#ormPrefetch
}
;{
//#slickNavigation
val peopleQuery: Query[People,Person,Seq] = people.filter(_.id inSet(Set(2,99,17,234)))
val addressesQuery: Query[Addresses,Address,Seq] = peopleQuery.flatMap(_.address)
//#slickNavigation
//#slickExecution
val addressesAction: DBIO[Seq[Address]] = addressesQuery.result
val addresses: Future[Seq[Address]] = db.run(addressesAction)
//#slickExecution
Await.result(addresses, Duration.Inf)
};{
type Query = HqlQuery
//#hqlQuery
val hql: String = "FROM Person p WHERE p.id in (:ids)"
val q: Query = session.createQuery(hql)
q.setParameterList("ids", Array(2,99,17,234))
//#hqlQuery
};{
//#criteriaQuery
val id = Property.forName("id")
val q = session.createCriteria(classOf[Person])
.add( id in Array(2,99,17,234) )
//#criteriaQuery
//#criteriaQueryComposition
def byIds(c: Criteria, ids: Array[Int]) = c.add( id in ids )
val c = byIds(
session.createCriteria(classOf[Person]),
Array(2,99,17,234)
)
//#criteriaQueryComposition
};{
//#criteriaComposition
val age = Property.forName("age")
val q = session.createCriteria(classOf[Person])
.add(
Restrictions.disjunction
.add(age lt 5)
.add(age gt 65)
)
//#criteriaComposition
};{
//#slickQuery
val q = people.filter(p => p.age < 5 || p.age > 65)
//#slickQuery
};{
//#slickQueryWithTypes
val q = (people: Query[People, Person, Seq]).filter(
(p: People) =>
(
((p.age: Rep[Int]) < 5 || p.age > 65)
: Rep[Boolean]
)
)
//#slickQueryWithTypes
};{
//#slickForComprehension
for( p <- people if p.age < 5 || p.age > 65 ) yield p
//#slickForComprehension
};{
//#slickOrderBy
( for( p <- people if p.age < 5 || p.age > 65 ) yield p ).sortBy(_.name)
//#slickOrderBy
};{
//#slickMap
people.map(p => (p.name, p.age))
//#slickMap
};{
//#ormGetById
PeopleFinder.getById(5)
//#ormGetById
};{
Await.result(
//#slickRun
db.run(people.filter(_.id === 5).result)
//#slickRun
, Duration.Inf)
};{
//#ormWriteCaching
val person = PeopleFinder.getById(5)
//#ormWriteCaching
};{
import scala.language.reflectiveCalls
val person = new {
var name: String = ""
var age: Int = 0
}
//#ormWriteCaching
person.name = "C. Vogt"
person.age = 12345
session.save
//#ormWriteCaching
};{
//#slickUpdate
val personQuery = people.filter(_.id === 5)
personQuery.map(p => (p.name,p.age)).update("C. Vogt", 12345)
//#slickUpdate
//#slickDelete
personQuery.delete // deletes person with id 5
//#slickDelete
};{
//#slickInsert
people.map(p => (p.name,p.age)) += ("S. Zeiger", 54321)
//#slickInsert
};{
import scala.language.higherKinds
//#slickRelationships
implicit class PersonExtensions[C[_]](q: Query[People, Person, C]) {
// specify mapping of relationship to address
def withAddress = q.join(addresses).on(_.addressId === _.id)
}
//#slickRelationships
;{
//#slickRelationships
val chrisQuery = people.filter(_.id === 2)
val stefanQuery = people.filter(_.id === 3)
val chrisWithAddress: Future[(Person, Address)] =
db.run(chrisQuery.withAddress.result.head)
val stefanWithAddress: Future[(Person, Address)] =
db.run(stefanQuery.withAddress.result.head)
//#slickRelationships
Await.result(chrisWithAddress, Duration.Inf)
Await.result(stefanWithAddress, Duration.Inf)
};{
//#relationshipNavigation
val chris: Person = PeopleFinder.getById(2)
val address: Address = chris.address
//#relationshipNavigation
};{
/*
//#relationshipNavigation2
case class Address( … )
case class Person( …, address: Address )
//#relationshipNavigation2
*/
};{
//#slickRelationships2
val chrisQuery: Query[People,Person,Seq] = people.filter(_.id === 2)
val addressQuery: Query[Addresses,Address,Seq] = chrisQuery.withAddress.map(_._2)
val address = db.run(addressQuery.result.head)
//#slickRelationships2
Await.result(address, Duration.Inf)
};{
import scala.concurrent.ExecutionContext.Implicits.global
//#associationTuple
val tupledJoin: Query[(People,Addresses),(Person,Address), Seq]
= people join addresses on (_.addressId === _.id)
case class PersonWithAddress(person: Person, address: Address)
val caseClassJoinResults = db.run(tupledJoin.result).map(_.map(PersonWithAddress.tupled))
//#associationTuple
}
}
} finally db.close
}
|
nmartynenko/slick
|
slick/src/sphinx/code/OrmToSlick.scala
|
Scala
|
bsd-2-clause
| 7,207 |
package scray.cassandra.tools.api
import scray.cassandra.tools.types.LuceneColumnTypes.LuceneColumnType
import scray.querying.description.TableIdentifier
case class LucenIndexedColumn(name: String, dataType: LuceneColumnType, isSorted: Boolean)
trait LuceneIndexStatementGenerator {
def getIndexString(ti: TableIdentifier, column: List[LucenIndexedColumn], luceneVersion: Tuple3[Int, Int, Int]): Option[String]
}
|
scray/scray
|
scray-cassandra/src/main/scala/scray/cassandra/tools/api/LuceneIndexStatementGenerator.scala
|
Scala
|
apache-2.0
| 426 |
package org.littlewings.javaee7.bootstrap
import javax.annotation.PostConstruct
import javax.ejb.{Singleton, Startup}
import javax.persistence.{EntityManager, PersistenceContext}
import org.hibernate.search.jpa.Search
@Singleton
@Startup
class ContextInitializer {
@PersistenceContext
private var em: EntityManager = _
@PostConstruct
def initialize(): Unit = {
/* インデックスの保存先をInfinispanにして、
* かつクラスタにする場合はこのコードは外す
val fullTextEm = Search.getFullTextEntityManager(em)
fullTextEm.createIndexer().purgeAllOnStart(true).startAndWait()
*/
}
}
|
kazuhira-r/javaee7-scala-examples
|
hibernate-search-with-jpa/src/main/scala/org/littlewings/javaee7/bootstrap/ContextInitializer.scala
|
Scala
|
mit
| 643 |
package com.twitter.finagle.mux.transport
import io.netty.channel.{ChannelHandler, ChannelPipeline}
import io.netty.handler.codec.{LengthFieldBasedFrameDecoder, LengthFieldPrepender}
private[mux] object Netty4Framer {
val MaxFrameLength = 0x7fffffff
val LengthFieldOffset = 0
val LengthFieldLength = 4
val LengthAdjustment = 0
val InitialBytesToStrip = 4
val FrameEncoder: String = "frameEncoder"
val FrameDecoder: String = "frameDecoder"
}
/**
* An implementation of a mux framer using netty4 primitives.
*/
private[mux] abstract class Netty4Framer extends (ChannelPipeline => Unit) {
def bufferManagerName: String
def bufferManager: ChannelHandler
def apply(pipeline: ChannelPipeline): Unit = {
pipeline.addLast(
Netty4Framer.FrameDecoder,
new LengthFieldBasedFrameDecoder(
Netty4Framer.MaxFrameLength,
Netty4Framer.LengthFieldOffset,
Netty4Framer.LengthFieldLength,
Netty4Framer.LengthAdjustment,
Netty4Framer.InitialBytesToStrip
)
)
pipeline.addLast(
Netty4Framer.FrameEncoder,
new LengthFieldPrepender(Netty4Framer.LengthFieldLength))
pipeline.addLast(bufferManagerName, bufferManager)
}
}
|
twitter/finagle
|
finagle-mux/src/main/scala/com/twitter/finagle/mux/transport/Netty4Framer.scala
|
Scala
|
apache-2.0
| 1,209 |
/*^
===========================================================================
Helios - FX
===========================================================================
Copyright (C) 2013-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.helios.fx.dialogs
import info.gianlucacosta.helios.mathutils.Numbers
import scalafx.Includes._
import scalafx.event.ActionEvent
import scalafx.geometry.{Dimension2D, Insets}
import scalafx.scene.control.Alert.AlertType
import scalafx.scene.control._
import scalafx.scene.layout.VBox
/**
* Shows common input dialogs
*/
case object InputDialogs {
/**
* Shows a dialog with "Yes", "No" and "Cancel" button
*
* @param message
* @param header
* @return Some(true) if the user chooses "Yes", Some(false) if the user chooses "No", None otherwise
*/
def askYesNoCancel(message: String, header: String = ""): Option[Boolean] = {
val yesButton =
new ButtonType("Yes")
val noButton =
new ButtonType("No")
val cancelButton =
new ButtonType("Cancel", ButtonBar.ButtonData.CancelClose)
val alert =
new Alert(Alert.AlertType.Confirmation) {
headerText = header
contentText = message
buttonTypes = List(
yesButton,
noButton,
cancelButton
)
}
Alerts.fix(alert)
val inputResult =
alert.showAndWait()
inputResult match {
case Some(`yesButton`) =>
Some(true)
case Some(`noButton`) =>
Some(false)
case _ =>
None
}
}
/**
* Asks for a string, automatically trimming it
*
* @param message
* @param initialValue
* @param header
* @return Some(user string, trimmed) or None
*/
def askForString(message: String, initialValue: String = "", header: String = ""): Option[String] = {
val inputDialog = new TextInputDialog(initialValue) {
headerText = header
contentText = message
resizable = true
}
val inputResult =
inputDialog.showAndWait()
inputResult
.map(_.trim)
}
/**
* Asks for a string by showing a dedicated dialog with a TextArea
* and then trimming the user's input.
*
* If a validator is provided, it must be a function receiving the trimmed text and returning
* a boolean: <i>true</i> if the dialog can be closed when the user clicks OK,
* <i>false</i> otherwise.
*
* @param message
* @param initialValue
* @param header
* @param validator The validator function - returning <i>true</i>
* if the dialog can be closed when the user clicks OK.
* Error notifications to the user should occur before returning false
* @param textAreaStyle The JavaFX CSS style for the text area.
* If it is an empty string, no style will be applied
* @param textAreaDimension The preferred size for the text area
* @return The trimmed version of the user's input, or None if the user canceled the dialog
*/
def askForText(
message: String,
initialValue: String = "",
header: String = "",
validator: (String => Boolean) = (text) => true,
textAreaStyle: String = "",
textAreaDimension: Dimension2D = new Dimension2D(700, 500)
): Option[String] = {
val dialog =
new Alert(AlertType.Confirmation) {
headerText = header
resizable = true
}
val messageLabel =
new Label {
text = message
padding = Insets(0, 0, 8, 0)
}
val textArea =
new TextArea {
text =
initialValue
prefWidth =
textAreaDimension.width
prefHeight =
textAreaDimension.height
if (textAreaStyle.nonEmpty) {
style =
textAreaStyle
}
}
dialog.dialogPane().content =
new VBox {
children.setAll(
messageLabel,
textArea
)
}
dialog.dialogPane().buttonTypes = List(
ButtonType.OK,
ButtonType.Cancel
)
val okButton: Button =
dialog
.dialogPane()
.lookupButton(ButtonType.OK)
.asInstanceOf[javafx.scene.control.Button]
okButton.filterEvent(ActionEvent.Action) {
event: ActionEvent => {
val canClose =
validator(textArea.text().trim)
if (!canClose) {
event.consume()
}
}
}
Alerts.fix(dialog)
val dialogResult =
dialog.showAndWait()
dialogResult match {
case Some(ButtonType.OK) =>
Some(textArea.text().trim)
case _ =>
None
}
}
/**
* Keeps asking for a double value, checking that it is between the given bounds and
* showing warnings if the user inputs an invalid value.
*
* @param message
* @param initialValue
* @param minValue
* @param maxValue
* @param header
* @param formatter The Double=>String function used to format numbers. Defaults to Numbers.smartString
* @return Some(input double) or None if the user canceled the dialog
*/
def askForDouble(
message: String,
initialValue: Double = 0,
minValue: Double = Double.MinValue,
maxValue: Double = Double.MaxValue,
header: String = "",
formatter: Double => String = Numbers.smartString
): Option[Double] = {
while (true) {
val inputString =
askForString(
message,
formatter(initialValue),
header
)
if (inputString.isEmpty) {
return None
}
try {
val value =
inputString.get.toDouble
if (value < minValue || value > maxValue) {
Alerts.showWarning(
s"Please, enter a number in the range [${formatter(minValue)}; ${formatter(maxValue)}]", header)
} else
return Some(value)
} catch {
case _: NumberFormatException =>
Alerts.showWarning("Please, enter a numeric value", header)
}
}
throw new AssertionError()
}
/**
* Keeps asking for a Long value, checking that it is between the given bounds and
* showing warnings if the user inputs an invalid value.
*
* @param message
* @param initialValue
* @param minValue
* @param maxValue
* @param header
* @return Some(input long) or None if the user canceled the dialog
*/
def askForLong(
message: String,
initialValue: Long = 0,
minValue: Long = Long.MinValue,
maxValue: Long = Long.MaxValue,
header: String = ""
): Option[Long] = {
while (true) {
val inputDoubleResult =
askForDouble(message, initialValue, minValue, maxValue, header)
if (inputDoubleResult.isEmpty) {
return None
}
val longValueOption =
Numbers.asLong(inputDoubleResult.get)
longValueOption match {
case Some(longValue) =>
return longValueOption
case None =>
Alerts.showWarning("Please, input an integer number", header)
}
}
throw new AssertionError()
}
/**
* Asks the user to choose an item from a given list
*
* @param message The message
* @param items The items list. Must not be empty
* @param initialItem The initial item. If None, the first item in the list will be used
* @param header The dialog's header
* @tparam T The type of the items
* @return Some(chosen item) or None
*/
def askForItem[T](message: String, items: Seq[T], initialItem: Option[T] = None, header: String = ""): Option[T] = {
require(items.nonEmpty)
val choiceDialog =
new ChoiceDialog[T](initialItem.getOrElse(items.head), items) {
headerText = header
contentText = message
}
choiceDialog.showAndWait()
}
}
|
giancosta86/Helios-fx
|
src/main/scala/info/gianlucacosta/helios/fx/dialogs/InputDialogs.scala
|
Scala
|
apache-2.0
| 8,864 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.io.{Externalizable, ObjectInput, ObjectOutput}
import java.sql.{Date, Timestamp}
import org.apache.spark.sql.catalyst.encoders.{OuterScopes, RowEncoder}
import org.apache.spark.sql.catalyst.plans.{LeftAnti, LeftSemi}
import org.apache.spark.sql.catalyst.util.sideBySide
import org.apache.spark.sql.execution.{LogicalRDD, RDDScanExec, SortExec}
import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, ShuffleExchange}
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
case class TestDataPoint(x: Int, y: Double, s: String, t: TestDataPoint2)
case class TestDataPoint2(x: Int, s: String)
class DatasetSuite extends QueryTest with SharedSQLContext {
import testImplicits._
private implicit val ordering = Ordering.by((c: ClassData) => c.a -> c.b)
test("checkAnswer should compare map correctly") {
val data = Seq((1, "2", Map(1 -> 2, 2 -> 1)))
checkAnswer(
data.toDF(),
Seq(Row(1, "2", Map(2 -> 1, 1 -> 2))))
}
test("toDS") {
val data = Seq(("a", 1), ("b", 2), ("c", 3))
checkDataset(
data.toDS(),
data: _*)
}
test("toDS with RDD") {
val ds = sparkContext.makeRDD(Seq("a", "b", "c"), 3).toDS()
checkDataset(
ds.mapPartitions(_ => Iterator(1)),
1, 1, 1)
}
test("emptyDataset") {
val ds = spark.emptyDataset[Int]
assert(ds.count() == 0L)
assert(ds.collect() sameElements Array.empty[Int])
}
test("range") {
assert(spark.range(10).map(_ + 1).reduce(_ + _) == 55)
assert(spark.range(10).map{ case i: java.lang.Long => i + 1 }.reduce(_ + _) == 55)
assert(spark.range(0, 10).map(_ + 1).reduce(_ + _) == 55)
assert(spark.range(0, 10).map{ case i: java.lang.Long => i + 1 }.reduce(_ + _) == 55)
assert(spark.range(0, 10, 1, 2).map(_ + 1).reduce(_ + _) == 55)
assert(spark.range(0, 10, 1, 2).map{ case i: java.lang.Long => i + 1 }.reduce(_ + _) == 55)
}
test("SPARK-12404: Datatype Helper Serializability") {
val ds = sparkContext.parallelize((
new Timestamp(0),
new Date(0),
java.math.BigDecimal.valueOf(1),
scala.math.BigDecimal(1)) :: Nil).toDS()
ds.collect()
}
test("collect, first, and take should use encoders for serialization") {
val item = NonSerializableCaseClass("abcd")
val ds = Seq(item).toDS()
assert(ds.collect().head == item)
assert(ds.collectAsList().get(0) == item)
assert(ds.first() == item)
assert(ds.take(1).head == item)
assert(ds.takeAsList(1).get(0) == item)
assert(ds.toLocalIterator().next() === item)
}
test("coalesce, repartition") {
val data = (1 to 100).map(i => ClassData(i.toString, i))
val ds = data.toDS()
intercept[IllegalArgumentException] {
ds.coalesce(0)
}
intercept[IllegalArgumentException] {
ds.repartition(0)
}
assert(ds.repartition(10).rdd.partitions.length == 10)
checkDatasetUnorderly(
ds.repartition(10),
data: _*)
assert(ds.coalesce(1).rdd.partitions.length == 1)
checkDatasetUnorderly(
ds.coalesce(1),
data: _*)
}
test("as tuple") {
val data = Seq(("a", 1), ("b", 2)).toDF("a", "b")
checkDataset(
data.as[(String, Int)],
("a", 1), ("b", 2))
}
test("as case class / collect") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDF("a", "b").as[ClassData]
checkDataset(
ds,
ClassData("a", 1), ClassData("b", 2), ClassData("c", 3))
assert(ds.collect().head == ClassData("a", 1))
}
test("as case class - reordered fields by name") {
val ds = Seq((1, "a"), (2, "b"), (3, "c")).toDF("b", "a").as[ClassData]
assert(ds.collect() === Array(ClassData("a", 1), ClassData("b", 2), ClassData("c", 3)))
}
test("as case class - take") {
val ds = Seq((1, "a"), (2, "b"), (3, "c")).toDF("b", "a").as[ClassData]
assert(ds.take(2) === Array(ClassData("a", 1), ClassData("b", 2)))
}
test("as seq of case class - reorder fields by name") {
val df = spark.range(3).select(array(struct($"id".cast("int").as("b"), lit("a").as("a"))))
val ds = df.as[Seq[ClassData]]
assert(ds.collect() === Array(
Seq(ClassData("a", 0)),
Seq(ClassData("a", 1)),
Seq(ClassData("a", 2))))
}
test("map") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.map(v => (v._1, v._2 + 1)),
("a", 2), ("b", 3), ("c", 4))
}
test("map with type change with the exact matched number of attributes") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.map(identity[(String, Int)])
.as[OtherTuple]
.map(identity[OtherTuple]),
OtherTuple("a", 1), OtherTuple("b", 2), OtherTuple("c", 3))
}
test("map with type change with less attributes") {
val ds = Seq(("a", 1, 3), ("b", 2, 4), ("c", 3, 5)).toDS()
checkDataset(
ds.as[OtherTuple]
.map(identity[OtherTuple]),
OtherTuple("a", 1), OtherTuple("b", 2), OtherTuple("c", 3))
}
test("map and group by with class data") {
// We inject a group by here to make sure this test case is future proof
// when we implement better pipelining and local execution mode.
val ds: Dataset[(ClassData, Long)] = Seq(ClassData("one", 1), ClassData("two", 2)).toDS()
.map(c => ClassData(c.a, c.b + 1))
.groupByKey(p => p).count()
checkDatasetUnorderly(
ds,
(ClassData("one", 2), 1L), (ClassData("two", 3), 1L))
}
test("select") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(expr("_2 + 1").as[Int]),
2, 3, 4)
}
test("SPARK-16853: select, case class and tuple") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(expr("struct(_2, _2)").as[(Int, Int)]): Dataset[(Int, Int)],
(1, 1), (2, 2), (3, 3))
checkDataset(
ds.select(expr("named_struct('a', _1, 'b', _2)").as[ClassData]): Dataset[ClassData],
ClassData("a", 1), ClassData("b", 2), ClassData("c", 3))
}
test("select 2") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(
expr("_1").as[String],
expr("_2").as[Int]) : Dataset[(String, Int)],
("a", 1), ("b", 2), ("c", 3))
}
test("select 2, primitive and tuple") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(
expr("_1").as[String],
expr("struct(_2, _2)").as[(Int, Int)]),
("a", (1, 1)), ("b", (2, 2)), ("c", (3, 3)))
}
test("select 2, primitive and class") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(
expr("_1").as[String],
expr("named_struct('a', _1, 'b', _2)").as[ClassData]),
("a", ClassData("a", 1)), ("b", ClassData("b", 2)), ("c", ClassData("c", 3)))
}
test("select 2, primitive and class, fields reordered") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(
expr("_1").as[String],
expr("named_struct('b', _2, 'a', _1)").as[ClassData]),
("a", ClassData("a", 1)), ("b", ClassData("b", 2)), ("c", ClassData("c", 3)))
}
test("filter") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.filter(_._1 == "b"),
("b", 2))
}
test("filter and then select") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.filter(_._1 == "b").select(expr("_1").as[String]),
"b")
}
test("SPARK-15632: typed filter should preserve the underlying logical schema") {
val ds = spark.range(10)
val ds2 = ds.filter(_ > 3)
assert(ds.schema.equals(ds2.schema))
}
test("foreach") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
val acc = sparkContext.longAccumulator
ds.foreach(v => acc.add(v._2))
assert(acc.value == 6)
}
test("foreachPartition") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
val acc = sparkContext.longAccumulator
ds.foreachPartition(_.foreach(v => acc.add(v._2)))
assert(acc.value == 6)
}
test("reduce") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
assert(ds.reduce((a, b) => ("sum", a._2 + b._2)) == ("sum", 6))
}
test("joinWith, flat schema") {
val ds1 = Seq(1, 2, 3).toDS().as("a")
val ds2 = Seq(1, 2).toDS().as("b")
checkDataset(
ds1.joinWith(ds2, $"a.value" === $"b.value", "inner"),
(1, 1), (2, 2))
}
test("joinWith tuple with primitive, expression") {
val ds1 = Seq(1, 1, 2).toDS()
val ds2 = Seq(("a", 1), ("b", 2)).toDS()
checkDataset(
ds1.joinWith(ds2, $"value" === $"_2"),
(1, ("a", 1)), (1, ("a", 1)), (2, ("b", 2)))
}
test("joinWith class with primitive, toDF") {
val ds1 = Seq(1, 1, 2).toDS()
val ds2 = Seq(ClassData("a", 1), ClassData("b", 2)).toDS()
checkAnswer(
ds1.joinWith(ds2, $"value" === $"b").toDF().select($"_1", $"_2.a", $"_2.b"),
Row(1, "a", 1) :: Row(1, "a", 1) :: Row(2, "b", 2) :: Nil)
}
test("multi-level joinWith") {
val ds1 = Seq(("a", 1), ("b", 2)).toDS().as("a")
val ds2 = Seq(("a", 1), ("b", 2)).toDS().as("b")
val ds3 = Seq(("a", 1), ("b", 2)).toDS().as("c")
checkDataset(
ds1.joinWith(ds2, $"a._2" === $"b._2").as("ab").joinWith(ds3, $"ab._1._2" === $"c._2"),
((("a", 1), ("a", 1)), ("a", 1)),
((("b", 2), ("b", 2)), ("b", 2)))
}
test("joinWith join types") {
val ds1 = Seq(1, 2, 3).toDS().as("a")
val ds2 = Seq(1, 2).toDS().as("b")
val e1 = intercept[AnalysisException] {
ds1.joinWith(ds2, $"a.value" === $"b.value", "left_semi")
}.getMessage
assert(e1.contains("Invalid join type in joinWith: " + LeftSemi.sql))
val e2 = intercept[AnalysisException] {
ds1.joinWith(ds2, $"a.value" === $"b.value", "left_anti")
}.getMessage
assert(e2.contains("Invalid join type in joinWith: " + LeftAnti.sql))
}
test("groupBy function, keys") {
val ds = Seq(("a", 1), ("b", 1)).toDS()
val grouped = ds.groupByKey(v => (1, v._2))
checkDatasetUnorderly(
grouped.keys,
(1, 1))
}
test("groupBy function, map") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
val grouped = ds.groupByKey(v => (v._1, "word"))
val agged = grouped.mapGroups { case (g, iter) => (g._1, iter.map(_._2).sum) }
checkDatasetUnorderly(
agged,
("a", 30), ("b", 3), ("c", 1))
}
test("groupBy function, flatMap") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
val grouped = ds.groupByKey(v => (v._1, "word"))
val agged = grouped.flatMapGroups { case (g, iter) =>
Iterator(g._1, iter.map(_._2).sum.toString)
}
checkDatasetUnorderly(
agged,
"a", "30", "b", "3", "c", "1")
}
test("groupBy function, mapValues, flatMap") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
val keyValue = ds.groupByKey(_._1).mapValues(_._2)
val agged = keyValue.mapGroups { case (g, iter) => (g, iter.sum) }
checkDataset(agged, ("a", 30), ("b", 3), ("c", 1))
val keyValue1 = ds.groupByKey(t => (t._1, "key")).mapValues(t => (t._2, "value"))
val agged1 = keyValue1.mapGroups { case (g, iter) => (g._1, iter.map(_._1).sum) }
checkDataset(agged, ("a", 30), ("b", 3), ("c", 1))
}
test("groupBy function, reduce") {
val ds = Seq("abc", "xyz", "hello").toDS()
val agged = ds.groupByKey(_.length).reduceGroups(_ + _)
checkDatasetUnorderly(
agged,
3 -> "abcxyz", 5 -> "hello")
}
test("groupBy single field class, count") {
val ds = Seq("abc", "xyz", "hello").toDS()
val count = ds.groupByKey(s => Tuple1(s.length)).count()
checkDataset(
count,
(Tuple1(3), 2L), (Tuple1(5), 1L)
)
}
test("typed aggregation: expr") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
checkDatasetUnorderly(
ds.groupByKey(_._1).agg(sum("_2").as[Long]),
("a", 30L), ("b", 3L), ("c", 1L))
}
test("typed aggregation: expr, expr") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
checkDatasetUnorderly(
ds.groupByKey(_._1).agg(sum("_2").as[Long], sum($"_2" + 1).as[Long]),
("a", 30L, 32L), ("b", 3L, 5L), ("c", 1L, 2L))
}
test("typed aggregation: expr, expr, expr") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
checkDatasetUnorderly(
ds.groupByKey(_._1).agg(sum("_2").as[Long], sum($"_2" + 1).as[Long], count("*")),
("a", 30L, 32L, 2L), ("b", 3L, 5L, 2L), ("c", 1L, 2L, 1L))
}
test("typed aggregation: expr, expr, expr, expr") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
checkDatasetUnorderly(
ds.groupByKey(_._1).agg(
sum("_2").as[Long],
sum($"_2" + 1).as[Long],
count("*").as[Long],
avg("_2").as[Double]),
("a", 30L, 32L, 2L, 15.0), ("b", 3L, 5L, 2L, 1.5), ("c", 1L, 2L, 1L, 1.0))
}
test("cogroup") {
val ds1 = Seq(1 -> "a", 3 -> "abc", 5 -> "hello", 3 -> "foo").toDS()
val ds2 = Seq(2 -> "q", 3 -> "w", 5 -> "e", 5 -> "r").toDS()
val cogrouped = ds1.groupByKey(_._1).cogroup(ds2.groupByKey(_._1)) { case (key, data1, data2) =>
Iterator(key -> (data1.map(_._2).mkString + "#" + data2.map(_._2).mkString))
}
checkDatasetUnorderly(
cogrouped,
1 -> "a#", 2 -> "#q", 3 -> "abcfoo#w", 5 -> "hello#er")
}
test("cogroup with complex data") {
val ds1 = Seq(1 -> ClassData("a", 1), 2 -> ClassData("b", 2)).toDS()
val ds2 = Seq(2 -> ClassData("c", 3), 3 -> ClassData("d", 4)).toDS()
val cogrouped = ds1.groupByKey(_._1).cogroup(ds2.groupByKey(_._1)) { case (key, data1, data2) =>
Iterator(key -> (data1.map(_._2.a).mkString + data2.map(_._2.a).mkString))
}
checkDatasetUnorderly(
cogrouped,
1 -> "a", 2 -> "bc", 3 -> "d")
}
test("sample with replacement") {
val n = 100
val data = sparkContext.parallelize(1 to n, 2).toDS()
checkDataset(
data.sample(withReplacement = true, 0.05, seed = 13),
5, 10, 52, 73)
}
test("sample without replacement") {
val n = 100
val data = sparkContext.parallelize(1 to n, 2).toDS()
checkDataset(
data.sample(withReplacement = false, 0.05, seed = 13),
3, 17, 27, 58, 62)
}
test("SPARK-16686: Dataset.sample with seed results shouldn't depend on downstream usage") {
val simpleUdf = udf((n: Int) => {
require(n != 1, "simpleUdf shouldn't see id=1!")
1
})
val df = Seq(
(0, "string0"),
(1, "string1"),
(2, "string2"),
(3, "string3"),
(4, "string4"),
(5, "string5"),
(6, "string6"),
(7, "string7"),
(8, "string8"),
(9, "string9")
).toDF("id", "stringData")
val sampleDF = df.sample(false, 0.7, 50)
// After sampling, sampleDF doesn't contain id=1.
assert(!sampleDF.select("id").collect.contains(1))
// simpleUdf should not encounter id=1.
checkAnswer(sampleDF.select(simpleUdf($"id")), List.fill(sampleDF.count.toInt)(Row(1)))
}
test("SPARK-11436: we should rebind right encoder when join 2 datasets") {
val ds1 = Seq("1", "2").toDS().as("a")
val ds2 = Seq(2, 3).toDS().as("b")
val joined = ds1.joinWith(ds2, $"a.value" === $"b.value")
checkDataset(joined, ("2", 2))
}
test("self join") {
val ds = Seq("1", "2").toDS().as("a")
val joined = ds.joinWith(ds, lit(true), "cross")
checkDataset(joined, ("1", "1"), ("1", "2"), ("2", "1"), ("2", "2"))
}
test("toString") {
val ds = Seq((1, 2)).toDS()
assert(ds.toString == "[_1: int, _2: int]")
}
test("Kryo encoder") {
implicit val kryoEncoder = Encoders.kryo[KryoData]
val ds = Seq(KryoData(1), KryoData(2)).toDS()
assert(ds.groupByKey(p => p).count().collect().toSet ==
Set((KryoData(1), 1L), (KryoData(2), 1L)))
}
test("Kryo encoder self join") {
implicit val kryoEncoder = Encoders.kryo[KryoData]
val ds = Seq(KryoData(1), KryoData(2)).toDS()
assert(ds.joinWith(ds, lit(true), "cross").collect().toSet ==
Set(
(KryoData(1), KryoData(1)),
(KryoData(1), KryoData(2)),
(KryoData(2), KryoData(1)),
(KryoData(2), KryoData(2))))
}
test("Kryo encoder: check the schema mismatch when converting DataFrame to Dataset") {
implicit val kryoEncoder = Encoders.kryo[KryoData]
val df = Seq((1)).toDF("a")
val e = intercept[AnalysisException] {
df.as[KryoData]
}.message
assert(e.contains("cannot cast IntegerType to BinaryType"))
}
test("Java encoder") {
implicit val kryoEncoder = Encoders.javaSerialization[JavaData]
val ds = Seq(JavaData(1), JavaData(2)).toDS()
assert(ds.groupByKey(p => p).count().collect().toSet ==
Set((JavaData(1), 1L), (JavaData(2), 1L)))
}
test("Java encoder self join") {
implicit val kryoEncoder = Encoders.javaSerialization[JavaData]
val ds = Seq(JavaData(1), JavaData(2)).toDS()
assert(ds.joinWith(ds, lit(true), "cross").collect().toSet ==
Set(
(JavaData(1), JavaData(1)),
(JavaData(1), JavaData(2)),
(JavaData(2), JavaData(1)),
(JavaData(2), JavaData(2))))
}
test("SPARK-14696: implicit encoders for boxed types") {
assert(spark.range(1).map { i => i : java.lang.Long }.head == 0L)
}
test("SPARK-11894: Incorrect results are returned when using null") {
val nullInt = null.asInstanceOf[java.lang.Integer]
val ds1 = Seq((nullInt, "1"), (new java.lang.Integer(22), "2")).toDS()
val ds2 = Seq((nullInt, "1"), (new java.lang.Integer(22), "2")).toDS()
checkDataset(
ds1.joinWith(ds2, lit(true), "cross"),
((nullInt, "1"), (nullInt, "1")),
((nullInt, "1"), (new java.lang.Integer(22), "2")),
((new java.lang.Integer(22), "2"), (nullInt, "1")),
((new java.lang.Integer(22), "2"), (new java.lang.Integer(22), "2")))
}
test("change encoder with compatible schema") {
val ds = Seq(2 -> 2.toByte, 3 -> 3.toByte).toDF("a", "b").as[ClassData]
assert(ds.collect().toSeq == Seq(ClassData("2", 2), ClassData("3", 3)))
}
test("verify mismatching field names fail with a good error") {
val ds = Seq(ClassData("a", 1)).toDS()
val e = intercept[AnalysisException] {
ds.as[ClassData2]
}
assert(e.getMessage.contains("cannot resolve '`c`' given input columns: [a, b]"), e.getMessage)
}
test("runtime nullability check") {
val schema = StructType(Seq(
StructField("f", StructType(Seq(
StructField("a", StringType, nullable = true),
StructField("b", IntegerType, nullable = true)
)), nullable = true)
))
def buildDataset(rows: Row*): Dataset[NestedStruct] = {
val rowRDD = spark.sparkContext.parallelize(rows)
spark.createDataFrame(rowRDD, schema).as[NestedStruct]
}
checkDataset(
buildDataset(Row(Row("hello", 1))),
NestedStruct(ClassData("hello", 1))
)
// Shouldn't throw runtime exception when parent object (`ClassData`) is null
assert(buildDataset(Row(null)).collect() === Array(NestedStruct(null)))
val message = intercept[RuntimeException] {
buildDataset(Row(Row("hello", null))).collect()
}.getMessage
assert(message.contains("Null value appeared in non-nullable field"))
}
test("SPARK-12478: top level null field") {
val ds0 = Seq(NestedStruct(null)).toDS()
checkDataset(ds0, NestedStruct(null))
checkAnswer(ds0.toDF(), Row(null))
val ds1 = Seq(DeepNestedStruct(NestedStruct(null))).toDS()
checkDataset(ds1, DeepNestedStruct(NestedStruct(null)))
checkAnswer(ds1.toDF(), Row(Row(null)))
}
test("support inner class in Dataset") {
val outer = new OuterClass
OuterScopes.addOuterScope(outer)
val ds = Seq(outer.InnerClass("1"), outer.InnerClass("2")).toDS()
checkDataset(ds.map(_.a), "1", "2")
}
test("grouping key and grouped value has field with same name") {
val ds = Seq(ClassData("a", 1), ClassData("a", 2)).toDS()
val agged = ds.groupByKey(d => ClassNullableData(d.a, null)).mapGroups {
case (key, values) => key.a + values.map(_.b).sum
}
checkDataset(agged, "a3")
}
test("cogroup's left and right side has field with same name") {
val left = Seq(ClassData("a", 1), ClassData("b", 2)).toDS()
val right = Seq(ClassNullableData("a", 3), ClassNullableData("b", 4)).toDS()
val cogrouped = left.groupByKey(_.a).cogroup(right.groupByKey(_.a)) {
case (key, lData, rData) => Iterator(key + lData.map(_.b).sum + rData.map(_.b.toInt).sum)
}
checkDataset(cogrouped, "a13", "b24")
}
test("give nice error message when the real number of fields doesn't match encoder schema") {
val ds = Seq(ClassData("a", 1), ClassData("b", 2)).toDS()
val message = intercept[AnalysisException] {
ds.as[(String, Int, Long)]
}.message
assert(message ==
"Try to map struct<a:string,b:int> to Tuple3, " +
"but failed as the number of fields does not line up.")
val message2 = intercept[AnalysisException] {
ds.as[Tuple1[String]]
}.message
assert(message2 ==
"Try to map struct<a:string,b:int> to Tuple1, " +
"but failed as the number of fields does not line up.")
}
test("SPARK-13440: Resolving option fields") {
val df = Seq(1, 2, 3).toDS()
val ds = df.as[Option[Int]]
checkDataset(
ds.filter(_ => true),
Some(1), Some(2), Some(3))
}
test("SPARK-13540 Dataset of nested class defined in Scala object") {
checkDataset(
Seq(OuterObject.InnerClass("foo")).toDS(),
OuterObject.InnerClass("foo"))
}
test("SPARK-14000: case class with tuple type field") {
checkDataset(
Seq(TupleClass((1, "a"))).toDS(),
TupleClass(1, "a")
)
}
test("isStreaming returns false for static Dataset") {
val data = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
assert(!data.isStreaming, "static Dataset returned true for 'isStreaming'.")
}
test("isStreaming returns true for streaming Dataset") {
val data = MemoryStream[Int].toDS()
assert(data.isStreaming, "streaming Dataset returned false for 'isStreaming'.")
}
test("isStreaming returns true after static and streaming Dataset join") {
val static = Seq(("a", 1), ("b", 2), ("c", 3)).toDF("a", "b")
val streaming = MemoryStream[Int].toDS().toDF("b")
val df = streaming.join(static, Seq("b"))
assert(df.isStreaming, "streaming Dataset returned false for 'isStreaming'.")
}
test("SPARK-14554: Dataset.map may generate wrong java code for wide table") {
val wideDF = spark.range(10).select(Seq.tabulate(1000) {i => ('id + i).as(s"c$i")} : _*)
// Make sure the generated code for this plan can compile and execute.
checkDataset(wideDF.map(_.getLong(0)), 0L until 10 : _*)
}
test("SPARK-14838: estimating sizeInBytes in operators with ObjectProducer shouldn't fail") {
val dataset = Seq(
(0, 3, 54f),
(0, 4, 44f),
(0, 5, 42f),
(1, 3, 39f),
(1, 5, 33f),
(1, 4, 26f),
(2, 3, 51f),
(2, 5, 45f),
(2, 4, 30f)
).toDF("user", "item", "rating")
val actual = dataset
.select("user", "item")
.as[(Int, Int)]
.groupByKey(_._1)
.mapGroups { case (src, ids) => (src, ids.map(_._2).toArray) }
.toDF("id", "actual")
dataset.join(actual, dataset("user") === actual("id")).collect()
}
test("SPARK-15097: implicits on dataset's spark can be imported") {
val dataset = Seq(1, 2, 3).toDS()
checkDataset(DatasetTransform.addOne(dataset), 2, 3, 4)
}
test("dataset.rdd with generic case class") {
val ds = Seq(Generic(1, 1.0), Generic(2, 2.0)).toDS()
val ds2 = ds.map(g => Generic(g.id, g.value))
assert(ds.rdd.map(r => r.id).count === 2)
assert(ds2.rdd.map(r => r.id).count === 2)
val ds3 = ds.map(g => new java.lang.Long(g.id))
assert(ds3.rdd.map(r => r).count === 2)
}
test("runtime null check for RowEncoder") {
val schema = new StructType().add("i", IntegerType, nullable = false)
val df = spark.range(10).map(l => {
if (l % 5 == 0) {
Row(null)
} else {
Row(l)
}
})(RowEncoder(schema))
val message = intercept[Exception] {
df.collect()
}.getMessage
assert(message.contains("The 0th field 'i' of input row cannot be null"))
}
test("row nullability mismatch") {
val schema = new StructType().add("a", StringType, true).add("b", StringType, false)
val rdd = spark.sparkContext.parallelize(Row(null, "123") :: Row("234", null) :: Nil)
val message = intercept[Exception] {
spark.createDataFrame(rdd, schema).collect()
}.getMessage
assert(message.contains("The 1th field 'b' of input row cannot be null"))
}
test("createTempView") {
val dataset = Seq(1, 2, 3).toDS()
dataset.createOrReplaceTempView("tempView")
// Overrides the existing temporary view with same name
// No exception should be thrown here.
dataset.createOrReplaceTempView("tempView")
// Throws AnalysisException if temp view with same name already exists
val e = intercept[AnalysisException](
dataset.createTempView("tempView"))
intercept[AnalysisException](dataset.createTempView("tempView"))
assert(e.message.contains("already exists"))
dataset.sparkSession.catalog.dropTempView("tempView")
}
test("SPARK-15381: physical object operator should define `reference` correctly") {
val df = Seq(1 -> 2).toDF("a", "b")
checkAnswer(df.map(row => row)(RowEncoder(df.schema)).select("b", "a"), Row(2, 1))
}
private def checkShowString[T](ds: Dataset[T], expected: String): Unit = {
val numRows = expected.split("\\n").length - 4
val actual = ds.showString(numRows, truncate = 20)
if (expected != actual) {
fail(
"Dataset.showString() gives wrong result:\\n\\n" + sideBySide(
"== Expected ==\\n" + expected,
"== Actual ==\\n" + actual
).mkString("\\n")
)
}
}
test("SPARK-15550 Dataset.show() should show contents of the underlying logical plan") {
val df = Seq((1, "foo", "extra"), (2, "bar", "extra")).toDF("b", "a", "c")
val ds = df.as[ClassData]
val expected =
"""+---+---+-----+
|| b| a| c|
|+---+---+-----+
|| 1|foo|extra|
|| 2|bar|extra|
|+---+---+-----+
|""".stripMargin
checkShowString(ds, expected)
}
test("SPARK-15550 Dataset.show() should show inner nested products as rows") {
val ds = Seq(
NestedStruct(ClassData("foo", 1)),
NestedStruct(ClassData("bar", 2))
).toDS()
val expected =
"""+-------+
|| f|
|+-------+
||[foo,1]|
||[bar,2]|
|+-------+
|""".stripMargin
checkShowString(ds, expected)
}
test(
"SPARK-15112: EmbedDeserializerInFilter should not optimize plan fragment that changes schema"
) {
val ds = Seq(1 -> "foo", 2 -> "bar").toDF("b", "a").as[ClassData]
assertResult(Seq(ClassData("foo", 1), ClassData("bar", 2))) {
ds.collect().toSeq
}
assertResult(Seq(ClassData("bar", 2))) {
ds.filter(_.b > 1).collect().toSeq
}
}
test("mapped dataset should resolve duplicated attributes for self join") {
val ds = Seq(1, 2, 3).toDS().map(_ + 1)
val ds1 = ds.as("d1")
val ds2 = ds.as("d2")
checkDatasetUnorderly(ds1.joinWith(ds2, $"d1.value" === $"d2.value"), (2, 2), (3, 3), (4, 4))
checkDatasetUnorderly(ds1.intersect(ds2), 2, 3, 4)
checkDatasetUnorderly(ds1.except(ds1))
}
test("SPARK-15441: Dataset outer join") {
val left = Seq(ClassData("a", 1), ClassData("b", 2)).toDS().as("left")
val right = Seq(ClassData("x", 2), ClassData("y", 3)).toDS().as("right")
val joined = left.joinWith(right, $"left.b" === $"right.b", "left")
val result = joined.collect().toSet
assert(result == Set(ClassData("a", 1) -> null, ClassData("b", 2) -> ClassData("x", 2)))
}
test("better error message when use java reserved keyword as field name") {
val e = intercept[UnsupportedOperationException] {
Seq(InvalidInJava(1)).toDS()
}
assert(e.getMessage.contains(
"`abstract` is a reserved keyword and cannot be used as field name"))
}
test("Dataset should support flat input object to be null") {
checkDataset(Seq("a", null).toDS(), "a", null)
}
test("Dataset should throw RuntimeException if top-level product input object is null") {
val e = intercept[RuntimeException](Seq(ClassData("a", 1), null).toDS())
assert(e.getMessage.contains("Null value appeared in non-nullable field"))
assert(e.getMessage.contains("top level Product input object"))
}
test("dropDuplicates") {
val ds = Seq(("a", 1), ("a", 2), ("b", 1), ("a", 1)).toDS()
checkDataset(
ds.dropDuplicates("_1"),
("a", 1), ("b", 1))
checkDataset(
ds.dropDuplicates("_2"),
("a", 1), ("a", 2))
checkDataset(
ds.dropDuplicates("_1", "_2"),
("a", 1), ("a", 2), ("b", 1))
}
test("dropDuplicates: columns with same column name") {
val ds1 = Seq(("a", 1), ("a", 2), ("b", 1), ("a", 1)).toDS()
val ds2 = Seq(("a", 1), ("a", 2), ("b", 1), ("a", 1)).toDS()
// The dataset joined has two columns of the same name "_2".
val joined = ds1.join(ds2, "_1").select(ds1("_2").as[Int], ds2("_2").as[Int])
checkDataset(
joined.dropDuplicates(),
(1, 2), (1, 1), (2, 1), (2, 2))
}
test("SPARK-16097: Encoders.tuple should handle null object correctly") {
val enc = Encoders.tuple(Encoders.tuple(Encoders.STRING, Encoders.STRING), Encoders.STRING)
val data = Seq((("a", "b"), "c"), (null, "d"))
val ds = spark.createDataset(data)(enc)
checkDataset(ds, (("a", "b"), "c"), (null, "d"))
}
test("SPARK-16995: flat mapping on Dataset containing a column created with lit/expr") {
val df = Seq("1").toDF("a")
import df.sparkSession.implicits._
checkDataset(
df.withColumn("b", lit(0)).as[ClassData]
.groupByKey(_.a).flatMapGroups { case (x, iter) => List[Int]() })
checkDataset(
df.withColumn("b", expr("0")).as[ClassData]
.groupByKey(_.a).flatMapGroups { case (x, iter) => List[Int]() })
}
test("SPARK-18125: Spark generated code causes CompileException") {
val data = Array(
Route("a", "b", 1),
Route("a", "b", 2),
Route("a", "c", 2),
Route("a", "d", 10),
Route("b", "a", 1),
Route("b", "a", 5),
Route("b", "c", 6))
val ds = sparkContext.parallelize(data).toDF.as[Route]
val grped = ds.map(r => GroupedRoutes(r.src, r.dest, Seq(r)))
.groupByKey(r => (r.src, r.dest))
.reduceGroups { (g1: GroupedRoutes, g2: GroupedRoutes) =>
GroupedRoutes(g1.src, g1.dest, g1.routes ++ g2.routes)
}.map(_._2)
val expected = Seq(
GroupedRoutes("a", "d", Seq(Route("a", "d", 10))),
GroupedRoutes("b", "c", Seq(Route("b", "c", 6))),
GroupedRoutes("a", "b", Seq(Route("a", "b", 1), Route("a", "b", 2))),
GroupedRoutes("b", "a", Seq(Route("b", "a", 1), Route("b", "a", 5))),
GroupedRoutes("a", "c", Seq(Route("a", "c", 2)))
)
implicit def ordering[GroupedRoutes]: Ordering[GroupedRoutes] = new Ordering[GroupedRoutes] {
override def compare(x: GroupedRoutes, y: GroupedRoutes): Int = {
x.toString.compareTo(y.toString)
}
}
checkDatasetUnorderly(grped, expected: _*)
}
test("SPARK-18189: Fix serialization issue in KeyValueGroupedDataset") {
val resultValue = 12345
val keyValueGrouped = Seq((1, 2), (3, 4)).toDS().groupByKey(_._1)
val mapGroups = keyValueGrouped.mapGroups((k, v) => (k, 1))
val broadcasted = spark.sparkContext.broadcast(resultValue)
// Using broadcast triggers serialization issue in KeyValueGroupedDataset
val dataset = mapGroups.map(_ => broadcasted.value)
assert(dataset.collect() sameElements Array(resultValue, resultValue))
}
test("SPARK-18284: Serializer should have correct nullable value") {
val df1 = Seq(1, 2, 3, 4).toDF
assert(df1.schema(0).nullable == false)
val df2 = Seq(Integer.valueOf(1), Integer.valueOf(2)).toDF
assert(df2.schema(0).nullable == true)
val df3 = Seq(Seq(1, 2), Seq(3, 4)).toDF
assert(df3.schema(0).nullable == true)
assert(df3.schema(0).dataType.asInstanceOf[ArrayType].containsNull == false)
val df4 = Seq(Seq("a", "b"), Seq("c", "d")).toDF
assert(df4.schema(0).nullable == true)
assert(df4.schema(0).dataType.asInstanceOf[ArrayType].containsNull == true)
val df5 = Seq((0, 1.0), (2, 2.0)).toDF("id", "v")
assert(df5.schema(0).nullable == false)
assert(df5.schema(1).nullable == false)
val df6 = Seq((0, 1.0, "a"), (2, 2.0, "b")).toDF("id", "v1", "v2")
assert(df6.schema(0).nullable == false)
assert(df6.schema(1).nullable == false)
assert(df6.schema(2).nullable == true)
val df7 = (Tuple1(Array(1, 2, 3)) :: Nil).toDF("a")
assert(df7.schema(0).nullable == true)
assert(df7.schema(0).dataType.asInstanceOf[ArrayType].containsNull == false)
val df8 = (Tuple1(Array((null: Integer), (null: Integer))) :: Nil).toDF("a")
assert(df8.schema(0).nullable == true)
assert(df8.schema(0).dataType.asInstanceOf[ArrayType].containsNull == true)
val df9 = (Tuple1(Map(2 -> 3)) :: Nil).toDF("m")
assert(df9.schema(0).nullable == true)
assert(df9.schema(0).dataType.asInstanceOf[MapType].valueContainsNull == false)
val df10 = (Tuple1(Map(1 -> (null: Integer))) :: Nil).toDF("m")
assert(df10.schema(0).nullable == true)
assert(df10.schema(0).dataType.asInstanceOf[MapType].valueContainsNull == true)
val df11 = Seq(TestDataPoint(1, 2.2, "a", null),
TestDataPoint(3, 4.4, "null", (TestDataPoint2(33, "b")))).toDF
assert(df11.schema(0).nullable == false)
assert(df11.schema(1).nullable == false)
assert(df11.schema(2).nullable == true)
assert(df11.schema(3).nullable == true)
assert(df11.schema(3).dataType.asInstanceOf[StructType].fields(0).nullable == false)
assert(df11.schema(3).dataType.asInstanceOf[StructType].fields(1).nullable == true)
}
Seq(true, false).foreach { eager =>
def testCheckpointing(testName: String)(f: => Unit): Unit = {
test(s"Dataset.checkpoint() - $testName (eager = $eager)") {
withTempDir { dir =>
val originalCheckpointDir = spark.sparkContext.checkpointDir
try {
spark.sparkContext.setCheckpointDir(dir.getCanonicalPath)
f
} finally {
// Since the original checkpointDir can be None, we need
// to set the variable directly.
spark.sparkContext.checkpointDir = originalCheckpointDir
}
}
}
}
testCheckpointing("basic") {
val ds = spark.range(10).repartition('id % 2).filter('id > 5).orderBy('id.desc)
val cp = ds.checkpoint(eager)
val logicalRDD = cp.logicalPlan match {
case plan: LogicalRDD => plan
case _ =>
val treeString = cp.logicalPlan.treeString(verbose = true)
fail(s"Expecting a LogicalRDD, but got\\n$treeString")
}
val dsPhysicalPlan = ds.queryExecution.executedPlan
val cpPhysicalPlan = cp.queryExecution.executedPlan
assertResult(dsPhysicalPlan.outputPartitioning) { logicalRDD.outputPartitioning }
assertResult(dsPhysicalPlan.outputOrdering) { logicalRDD.outputOrdering }
assertResult(dsPhysicalPlan.outputPartitioning) { cpPhysicalPlan.outputPartitioning }
assertResult(dsPhysicalPlan.outputOrdering) { cpPhysicalPlan.outputOrdering }
// For a lazy checkpoint() call, the first check also materializes the checkpoint.
checkDataset(cp, (9L to 6L by -1L).map(java.lang.Long.valueOf): _*)
// Reads back from checkpointed data and check again.
checkDataset(cp, (9L to 6L by -1L).map(java.lang.Long.valueOf): _*)
}
testCheckpointing("should preserve partitioning information") {
val ds = spark.range(10).repartition('id % 2)
val cp = ds.checkpoint(eager)
val agg = cp.groupBy('id % 2).agg(count('id))
agg.queryExecution.executedPlan.collectFirst {
case ShuffleExchange(_, _: RDDScanExec, _) =>
case BroadcastExchangeExec(_, _: RDDScanExec) =>
}.foreach { _ =>
fail(
"No Exchange should be inserted above RDDScanExec since the checkpointed Dataset " +
"preserves partitioning information:\\n\\n" + agg.queryExecution
)
}
checkAnswer(agg, ds.groupBy('id % 2).agg(count('id)))
}
}
test("identity map for primitive arrays") {
val arrayByte = Array(1.toByte, 2.toByte, 3.toByte)
val arrayInt = Array(1, 2, 3)
val arrayLong = Array(1.toLong, 2.toLong, 3.toLong)
val arrayDouble = Array(1.1, 2.2, 3.3)
val arrayString = Array("a", "b", "c")
val dsByte = sparkContext.parallelize(Seq(arrayByte), 1).toDS.map(e => e)
val dsInt = sparkContext.parallelize(Seq(arrayInt), 1).toDS.map(e => e)
val dsLong = sparkContext.parallelize(Seq(arrayLong), 1).toDS.map(e => e)
val dsDouble = sparkContext.parallelize(Seq(arrayDouble), 1).toDS.map(e => e)
val dsString = sparkContext.parallelize(Seq(arrayString), 1).toDS.map(e => e)
checkDataset(dsByte, arrayByte)
checkDataset(dsInt, arrayInt)
checkDataset(dsLong, arrayLong)
checkDataset(dsDouble, arrayDouble)
checkDataset(dsString, arrayString)
}
test("SPARK-18251: the type of Dataset can't be Option of Product type") {
checkDataset(Seq(Some(1), None).toDS(), Some(1), None)
val e = intercept[UnsupportedOperationException] {
Seq(Some(1 -> "a"), None).toDS()
}
assert(e.getMessage.contains("Cannot create encoder for Option of Product type"))
}
test ("SPARK-17460: the sizeInBytes in Statistics shouldn't overflow to a negative number") {
// Since the sizeInBytes in Statistics could exceed the limit of an Int, we should use BigInt
// instead of Int for avoiding possible overflow.
val ds = (0 to 10000).map( i =>
(i, Seq((i, Seq((i, "This is really not that long of a string")))))).toDS()
val sizeInBytes = ds.logicalPlan.stats(sqlConf).sizeInBytes
// sizeInBytes is 2404280404, before the fix, it overflows to a negative number
assert(sizeInBytes > 0)
}
test("SPARK-18717: code generation works for both scala.collection.Map" +
" and scala.collection.imutable.Map") {
val ds = Seq(WithImmutableMap("hi", Map(42L -> "foo"))).toDS
checkDataset(ds.map(t => t), WithImmutableMap("hi", Map(42L -> "foo")))
val ds2 = Seq(WithMap("hi", Map(42L -> "foo"))).toDS
checkDataset(ds2.map(t => t), WithMap("hi", Map(42L -> "foo")))
}
test("SPARK-18746: add implicit encoder for BigDecimal, date, timestamp") {
// For this implicit encoder, 18 is the default scale
assert(spark.range(1).map { x => new java.math.BigDecimal(1) }.head ==
new java.math.BigDecimal(1).setScale(18))
assert(spark.range(1).map { x => scala.math.BigDecimal(1, 18) }.head ==
scala.math.BigDecimal(1, 18))
assert(spark.range(1).map { x => new java.sql.Date(2016, 12, 12) }.head ==
new java.sql.Date(2016, 12, 12))
assert(spark.range(1).map { x => new java.sql.Timestamp(100000) }.head ==
new java.sql.Timestamp(100000))
}
test("SPARK-19896: cannot have circular references in in case class") {
val errMsg1 = intercept[UnsupportedOperationException] {
Seq(CircularReferenceClassA(null)).toDS
}
assert(errMsg1.getMessage.startsWith("cannot have circular references in class, but got the " +
"circular reference of class"))
val errMsg2 = intercept[UnsupportedOperationException] {
Seq(CircularReferenceClassC(null)).toDS
}
assert(errMsg2.getMessage.startsWith("cannot have circular references in class, but got the " +
"circular reference of class"))
val errMsg3 = intercept[UnsupportedOperationException] {
Seq(CircularReferenceClassD(null)).toDS
}
assert(errMsg3.getMessage.startsWith("cannot have circular references in class, but got the " +
"circular reference of class"))
}
test("SPARK-20125: option of map") {
val ds = Seq(WithMapInOption(Some(Map(1 -> 1)))).toDS()
checkDataset(ds, WithMapInOption(Some(Map(1 -> 1))))
}
test("SPARK-20399: do not unescaped regex pattern when ESCAPED_STRING_LITERALS is enabled") {
withSQLConf(SQLConf.ESCAPED_STRING_LITERALS.key -> "true") {
val data = Seq("\\u0020\\u0021\\u0023", "abc")
val df = data.toDF()
val rlike1 = df.filter("value rlike '^\\\\x20[\\\\x20-\\\\x23]+$'")
val rlike2 = df.filter($"value".rlike("^\\\\x20[\\\\x20-\\\\x23]+$"))
val rlike3 = df.filter("value rlike '^\\\\\\\\x20[\\\\\\\\x20-\\\\\\\\x23]+$'")
checkAnswer(rlike1, rlike2)
assert(rlike3.count() == 0)
}
}
test("SPARK-21538: Attribute resolution inconsistency in Dataset API") {
val df = spark.range(3).withColumnRenamed("id", "x")
val expected = Row(0) :: Row(1) :: Row (2) :: Nil
checkAnswer(df.sort("id"), expected)
checkAnswer(df.sort(col("id")), expected)
checkAnswer(df.sort($"id"), expected)
checkAnswer(df.sort('id), expected)
checkAnswer(df.orderBy("id"), expected)
checkAnswer(df.orderBy(col("id")), expected)
checkAnswer(df.orderBy($"id"), expected)
checkAnswer(df.orderBy('id), expected)
}
}
case class WithImmutableMap(id: String, map_test: scala.collection.immutable.Map[Long, String])
case class WithMap(id: String, map_test: scala.collection.Map[Long, String])
case class WithMapInOption(m: Option[scala.collection.Map[Int, Int]])
case class Generic[T](id: T, value: Double)
case class OtherTuple(_1: String, _2: Int)
case class TupleClass(data: (Int, String))
class OuterClass extends Serializable {
case class InnerClass(a: String)
}
object OuterObject {
case class InnerClass(a: String)
}
case class ClassData(a: String, b: Int)
case class ClassData2(c: String, d: Int)
case class ClassNullableData(a: String, b: Integer)
case class NestedStruct(f: ClassData)
case class DeepNestedStruct(f: NestedStruct)
case class InvalidInJava(`abstract`: Int)
/**
* A class used to test serialization using encoders. This class throws exceptions when using
* Java serialization -- so the only way it can be "serialized" is through our encoders.
*/
case class NonSerializableCaseClass(value: String) extends Externalizable {
override def readExternal(in: ObjectInput): Unit = {
throw new UnsupportedOperationException
}
override def writeExternal(out: ObjectOutput): Unit = {
throw new UnsupportedOperationException
}
}
/** Used to test Kryo encoder. */
class KryoData(val a: Int) {
override def equals(other: Any): Boolean = {
a == other.asInstanceOf[KryoData].a
}
override def hashCode: Int = a
override def toString: String = s"KryoData($a)"
}
object KryoData {
def apply(a: Int): KryoData = new KryoData(a)
}
/** Used to test Java encoder. */
class JavaData(val a: Int) extends Serializable {
override def equals(other: Any): Boolean = {
a == other.asInstanceOf[JavaData].a
}
override def hashCode: Int = a
override def toString: String = s"JavaData($a)"
}
object JavaData {
def apply(a: Int): JavaData = new JavaData(a)
}
/** Used to test importing dataset.spark.implicits._ */
object DatasetTransform {
def addOne(ds: Dataset[Int]): Dataset[Int] = {
import ds.sparkSession.implicits._
ds.map(_ + 1)
}
}
case class Route(src: String, dest: String, cost: Int)
case class GroupedRoutes(src: String, dest: String, routes: Seq[Route])
case class CircularReferenceClassA(cls: CircularReferenceClassB)
case class CircularReferenceClassB(cls: CircularReferenceClassA)
case class CircularReferenceClassC(ar: Array[CircularReferenceClassC])
case class CircularReferenceClassD(map: Map[String, CircularReferenceClassE])
case class CircularReferenceClassE(id: String, list: List[CircularReferenceClassD])
|
jlopezmalla/spark
|
sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
|
Scala
|
apache-2.0
| 45,142 |
package toguru.impl
import org.mockito.scalatest.IdiomaticMockito
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec
import toguru.api.{Condition, Toggle}
class ToggleStateSpec extends AnyWordSpec with Matchers with IdiomaticMockito {
def activation(rollout: Option[Rollout] = None, attrs: Map[String, Seq[String]] = Map.empty) =
Seq(ToggleActivation(rollout, attrs))
def rollout(r: Int) = Some(Rollout(r))
val toggles = List(
ToggleState("toggle1", Map("services" -> "toguru"), activation(rollout(30))),
ToggleState("toggle-2", Map.empty[String, String], activation(rollout(100))),
ToggleState(
"toggle-4",
Map.empty[String, String],
activation(attrs = Map("culture" -> Seq("DE", "de-DE"), "version" -> Seq("1", "2")))
),
ToggleState(
"toggle-5",
Map.empty[String, String],
activation(rollout(30), attrs = Map("culture" -> Seq("DE", "de-DE"), "version" -> Seq("1", "2")))
),
ToggleState("toggle6", Map("services" -> "my-service,another-service"), activation(rollout(15))),
ToggleState("toggle7", Map("services" -> "my-service "), activation(rollout(1))),
ToggleState("toggle8", Map("service" -> " my-service"), activation(rollout(100))),
ToggleState("toggle9", Map("services" -> "another-service,yet-another-service,my-service"), activation()),
ToggleState("toggle10", Map("services" -> "another-service, my-service, yet-another-service"), activation())
)
"ToggleState.apply" should {
"transform activations into conditions" in {
val condition = toggles(0).condition
condition mustBe a[UuidDistributionCondition]
val uuidCondition = condition.asInstanceOf[UuidDistributionCondition]
uuidCondition.ranges mustBe List(1 to 30)
}
"Adds AlwayOffCondition if only attribute contitions are given" in {
val condition = toggles(2).condition
condition mustBe All(
Set(AlwaysOffCondition, Attribute("culture", Seq("DE", "de-DE")), Attribute("version", Seq("1", "2")))
)
}
"transform combinations of rollout and attributes to conditions" in {
val condition = toggles(3).condition
condition mustBe All(
Set(
UuidDistributionCondition(List(1 to 30), UuidDistributionCondition.defaultUuidToIntProjection),
Attribute("culture", Seq("DE", "de-DE")),
Attribute("version", Seq("1", "2"))
)
)
}
}
"ToggleState.activations" should {
val activations = new ToggleStateActivations(ToggleStates(Some(10), toggles))
"return toggle conditions for services" in {
val toguruToggles = activations.togglesFor("toguru")
toguruToggles must have size 1
toguruToggles.keySet mustBe Set("toggle1")
val condition = toguruToggles("toggle1")
condition mustBe a[UuidDistributionCondition]
val uuidCondition = condition.asInstanceOf[UuidDistributionCondition]
uuidCondition.ranges mustBe List(1 to 30)
}
"return all toggle activations for a service" in {
val toguruToggles = activations.togglesFor("my-service")
toguruToggles must have size 5
toguruToggles.keySet mustBe Set("toggle6", "toggle7", "toggle8", "toggle9", "toggle10")
}
"return toggle default conditions if toggle is unknown" in {
val condition = mock[Condition]
val toggle = Toggle("toggle-3", condition)
activations.apply(toggle) mustBe condition
}
"apply should return togglestates" in {
activations() mustBe toggles
}
}
}
|
AutoScout24/toguru-scala-client
|
core/src/test/scala/toguru/impl/ToggleStateSpec.scala
|
Scala
|
mit
| 3,575 |
package scalaz.stream
import org.scalacheck._
import org.scalacheck.Prop._
import scalaz.Monoid
import scalaz.std.anyVal._
import scalaz.std.list._
import scalaz.std.list.listSyntax._
import scalaz.std.vector._
import scalaz.std.string._
import scalaz.syntax.equal._
import scalaz.syntax.foldable._
import Process._
import process1._
import TestInstances._
object Process1Spec extends Properties("process1") {
property("all") = forAll { (pi: Process0[Int], ps: Process0[String], n: Int) =>
val li = pi.toList
val ls = ps.toList
val g = (x: Int) => x % 7 === 0
val pf : PartialFunction[Int,Int] = { case x : Int if x % 2 === 0 => x}
val sm = Monoid[String]
("buffer" |: {
pi.buffer(4).toList === li
}) &&
("collect" |: {
pi.collect(pf).toList === li.collect(pf)
}) &&
("collectFirst" |: {
pi.collectFirst(pf).toList === li.collectFirst(pf).toList
}) &&
("drop" |: {
pi.drop(n).toList === li.drop(n)
}) &&
("dropLast" |: {
pi.dropLast.toList === li.dropRight(1)
}) &&
("dropLastIf" |: {
val pred = (_: Int) % 2 === 0
val n = if (li.lastOption.map(pred).getOrElse(false)) 1 else 0
pi.dropLastIf(pred).toList === li.dropRight(n) &&
pi.dropLastIf(_ => false).toList === li
}) &&
("dropWhile" |: {
pi.dropWhile(g).toList === li.dropWhile(g)
}) &&
("exists" |: {
pi.exists(g).toList === List(li.exists(g))
}) &&
("find" |: {
pi.find(_ % 2 === 0).toList === li.find(_ % 2 === 0).toList
}) &&
("filter" |: {
pi.filter(g).toList === li.filter(g)
}) &&
("fold" |: {
pi.fold(0)(_ + _).toList === List(li.fold(0)(_ + _))
}) &&
("foldMap" |: {
pi.foldMap(_.toString).toList.lastOption.toList === List(li.map(_.toString).fold(sm.zero)(sm.append(_,_)))
}) &&
("forall" |: {
pi.forall(g).toList === List(li.forall(g))
}) &&
("id" |: {
((pi |> id) === pi) && ((id |> pi) === pi)
}) &&
("intersperse" |: {
pi.intersperse(0).toList === li.intersperse(0)
}) &&
("lastOr" |: {
pi.lastOr(42).toList.head === li.lastOption.getOrElse(42)
}) &&
("maximum" |: {
pi.maximum.toList === li.maximum.toList
}) &&
("maximumBy" |: {
// enable when switching to scalaz 7.1
//ps.maximumBy(_.length).toList === ls.maximumBy(_.length).toList
true
}) &&
("maximumOf" |: {
ps.maximumOf(_.length).toList === ls.map(_.length).maximum.toList
}) &&
("minimum" |: {
pi.minimum.toList === li.minimum.toList
}) &&
("minimumBy" |: {
// enable when switching to scalaz 7.1
//ps.minimumBy(_.length).toList === ls.minimumBy(_.length).toList
true
}) &&
("minimumOf" |: {
ps.minimumOf(_.length).toList === ls.map(_.length).minimum.toList
}) &&
("reduce" |: {
pi.reduce(_ + _).toList === (if (li.nonEmpty) List(li.reduce(_ + _)) else List())
}) &&
("scan" |: {
li.scan(0)(_ - _) ===
pi.toSource.scan(0)(_ - _).runLog.timed(3000).run.toList
}) &&
("scan1" |: {
li.scan(0)(_ + _).tail ===
pi.toSource.scan1(_ + _).runLog.timed(3000).run.toList
}) &&
("shiftRight" |: {
pi.shiftRight(1, 2).toList === List(1, 2) ++ li
}) &&
("splitWith" |: {
pi.splitWith(_ < n).toList.map(_.toList) === li.splitWith(_ < n)
}) &&
("sum" |: {
pi.toList.sum[Int] ===
pi.toSource.pipe(process1.sum).runLast.timed(3000).run.get
}) &&
("prefixSums" |: {
pi.toList.scan(0)(_ + _) ===
pi.toSource.pipe(process1.prefixSums).runLog.run.toList
}) &&
("take" |: {
pi.take(n).toList === li.take(n)
}) &&
("takeWhile" |: {
pi.takeWhile(g).toList === li.takeWhile(g)
}) &&
("zipWithIndex" |: {
ps.zipWithIndex.toList === ls.zipWithIndex
}) &&
("zipWithIndex[Double]" |: {
ps.zipWithIndex[Double].toList === ls.zipWithIndex.map { case (s, i) => (s, i.toDouble) }
})
}
property("awaitOption") = secure {
Process().awaitOption.toList == List(None) &&
Process(1, 2).awaitOption.toList == List(Some(1))
}
property("chunk") = secure {
Process(0, 1, 2, 3, 4).chunk(2).toList === List(Vector(0, 1), Vector(2, 3), Vector(4))
}
property("chunkBy") = secure {
emitSeq("foo bar baz").chunkBy(_ != ' ').toList.map(_.mkString) ==
List("foo ", "bar ", "baz")
}
property("chunkBy2") = secure {
val s = Process(3, 5, 4, 3, 1, 2, 6)
s.chunkBy2(_ < _).toList === List(Vector(3, 5), Vector(4), Vector(3), Vector(1, 2, 6)) &&
s.chunkBy2(_ > _).toList === List(Vector(3), Vector(5, 4, 3, 1), Vector(2), Vector(6))
}
property("unchunk") = forAll { pi: Process0[List[Int]] =>
pi.pipe(unchunk).toList == pi.toList.flatten
}
property("last") = secure {
var i = 0
Process.range(0, 10).last.map(_ => i += 1).runLog.run
i === 1
}
property("repartition") = secure {
Process("Lore", "m ip", "sum dolo", "r sit amet").repartition(_.split(" ")).toList ==
List("Lorem", "ipsum", "dolor", "sit", "amet") &&
Process("hel", "l", "o Wor", "ld").repartition(_.grouped(2).toVector).toList ==
List("he", "ll", "o ", "Wo", "rl", "d") &&
Process(1, 2, 3, 4, 5).repartition(i => Vector(i, i)).toList ==
List(1, 3, 6, 10, 15, 15) &&
Process[String]().repartition(_ => Vector()).toList.isEmpty &&
Process("hello").repartition(_ => Vector()).toList.isEmpty
}
property("repartition2") = secure {
Process("he", "ll", "o").repartition2(s => (Some(s), None)).toList ===
List("he", "ll", "o") &&
Process("he", "ll", "o").repartition2(s => (None, Some(s))).toList ===
List("hello") &&
Process("he", "ll", "o").repartition2 {
s => (Some(s.take(1)), Some(s.drop(1)))
}.toList === List("h", "e", "l", "lo")
}
property("splitOn") = secure {
Process(0, 1, 2, 3, 4).splitOn(2).toList === List(Vector(0, 1), Vector(3, 4)) &&
Process(2, 0, 1, 2).splitOn(2).toList === List(Vector(), Vector(0, 1), Vector()) &&
Process(2, 2).splitOn(2).toList === List(Vector(), Vector(), Vector())
}
property("stripNone") = secure {
Process(None, Some(1), None, Some(2), None).pipe(stripNone).toList === List(1, 2)
}
property("terminated") = secure {
Process(1, 2, 3).terminated.toList === List(Some(1), Some(2), Some(3), None)
}
property("window") = secure {
def window(n: Int) = Process.range(0, 5).window(n).runLog.run.toList
window(1) === List(Vector(0), Vector(1), Vector(2), Vector(3), Vector(4), Vector()) &&
window(2) === List(Vector(0, 1), Vector(1, 2), Vector(2, 3), Vector(3, 4), Vector(4)) &&
window(3) === List(Vector(0, 1, 2), Vector(1, 2, 3), Vector(2, 3, 4), Vector(3, 4))
}
}
|
jedws/scalaz-stream
|
src/test/scala/scalaz/stream/Process1Spec.scala
|
Scala
|
mit
| 6,803 |
import stainless.collection._
import stainless.annotation._
object InductTacticTest {
case class SharedState(val rqs:BigInt => List[BigInt]);
@induct
def isTree(s:SharedState, content:List[BigInt], id:BigInt):Boolean = {
require(content.forall(e => e < id && e >= 0))
content match {
case Nil() => true
case Cons(t, ts) =>
isTree(s, ts, id) && isTree(s, s.rqs(t), t)
}
}
}
|
epfl-lara/stainless
|
frontends/benchmarks/verification/unchecked/InductTacticTest.scala
|
Scala
|
apache-2.0
| 415 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.stream
import org.apache.flink.api.dag.Transformation
import org.apache.flink.table.dataformat.BaseRow
import org.apache.flink.table.planner.codegen.ValuesCodeGenerator
import org.apache.flink.table.planner.delegation.StreamPlanner
import org.apache.flink.table.planner.plan.nodes.exec.{ExecNode, StreamExecNode}
import com.google.common.collect.ImmutableList
import org.apache.calcite.plan._
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.Values
import org.apache.calcite.rex.RexLiteral
import java.util
/**
* Stream physical RelNode for [[Values]].
*/
class StreamExecValues(
cluster: RelOptCluster,
traitSet: RelTraitSet,
tuples: ImmutableList[ImmutableList[RexLiteral]],
outputRowType: RelDataType)
extends Values(cluster, outputRowType, tuples, traitSet)
with StreamPhysicalRel
with StreamExecNode[BaseRow] {
override def requireWatermark: Boolean = false
override def deriveRowType(): RelDataType = outputRowType
override def copy(traitSet: RelTraitSet, inputs: java.util.List[RelNode]): RelNode = {
new StreamExecValues(cluster, traitSet, getTuples, outputRowType)
}
//~ ExecNode methods -----------------------------------------------------------
override def getInputNodes: util.List[ExecNode[StreamPlanner, _]] = {
new util.ArrayList[ExecNode[StreamPlanner, _]]()
}
override def replaceInputNode(
ordinalInParent: Int,
newInputNode: ExecNode[StreamPlanner, _]): Unit = {
replaceInput(ordinalInParent, newInputNode.asInstanceOf[RelNode])
}
override protected def translateToPlanInternal(
planner: StreamPlanner): Transformation[BaseRow] = {
val inputFormat = ValuesCodeGenerator.generatorInputFormat(
planner.getTableConfig,
getRowType,
tuples,
getRelTypeName)
val transformation = planner.getExecEnv.createInput(inputFormat,
inputFormat.getProducedType).getTransformation
transformation.setName(getRelDetailedDescription)
transformation.setParallelism(1)
transformation.setMaxParallelism(1)
transformation
}
}
|
bowenli86/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/stream/StreamExecValues.scala
|
Scala
|
apache-2.0
| 3,005 |
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.indexer.lucene
import org.apache.lucene.analysis.Analyzer.TokenStreamComponents
import org.apache.lucene.analysis.{ Analyzer, TokenFilter, TokenStream }
import org.apache.lucene.analysis.core.KeywordTokenizer
import org.apache.lucene.analysis.tokenattributes.{
CharTermAttribute,
PositionIncrementAttribute
}
import org.apache.lucene.util.AttributeSource.State
import org.ensime.indexer.lucene.DynamicSynonymFilter._
/**
* `Analyzer` that does no additional (not even lowercasing) other than
* the term itself and its synonyms.
*/
trait DynamicSynonymAnalyzer extends Analyzer with SynonymEngine {
override final def createComponents(
fieldName: String
): TokenStreamComponents = {
val source = new KeywordTokenizer()
val result = new DynamicSynonymFilter(source, this)
new TokenStreamComponents(source, result)
}
}
object DynamicSynonymFilter {
trait SynonymEngine {
/** @return the synonyms of `term` (`term` should not be in the list) */
def synonyms(term: String): Set[String]
}
}
/**
* Splits tokens into synonyms at the same position, taking in a
* simple map from a String to a list of its synonyms (which doesn't
* need to contain the original token).
*
* This has been heavily influenced by SynonymFilter from "Lucene in
* Action" and upgraded for Lucene 4 because bundled
* ```org.apache.lucene.analysis.synonym.SynonymFilter``` requires the
* mappings to be built up in advance.
*
* Apologies for all the mutable state: we're interacting with a
* mutable Java API.
*/
class DynamicSynonymFilter(input: TokenStream, engine: SynonymEngine)
extends TokenFilter(input) {
private val termAtt = addAttribute(classOf[CharTermAttribute])
private val posIncrAtt = addAttribute(classOf[PositionIncrementAttribute])
private var stack: List[String] = Nil
private var current: State = _
// return false when EOL
override def incrementToken(): Boolean = {
if (stack.nonEmpty) {
val synonym = stack.head
stack = stack.tail
restoreState(current) // brings us back to the original token in case of multiple synonyms
termAtt.setEmpty()
termAtt.append(synonym)
posIncrAtt.setPositionIncrement(0)
return true
}
if (!input.incrementToken())
return false
val term = termAtt.toString
val synonyms = engine.synonyms(term)
if (synonyms.nonEmpty) {
synonyms foreach { synonym =>
if (!synonym.equals(term))
stack = synonym :: stack
}
current = captureState()
}
true
}
// Lucene being stupid higher up the hierarchy
override def equals(other: Any): Boolean = other match {
case that: DynamicSynonymFilter => this eq that
case _ => false
}
}
|
yyadavalli/ensime-server
|
core/src/main/scala/org/ensime/indexer/lucene/DynamicSynonymFilter.scala
|
Scala
|
gpl-3.0
| 2,920 |
object Test {
def main(args: Array[String]): Unit = {
fun(foo1)(foo2)
}
def foo1: Int = {
println("foo1")
42
}
def foo2: String = {
println("foo2")
"abc"
}
def fun(erased a: Int)(erased b: String): Unit = {
println("fun")
}
}
|
som-snytt/dotty
|
tests/run-custom-args/erased/erased-3.scala
|
Scala
|
apache-2.0
| 271 |
package com.codingkapoor.codingbat
object LogicII {
def makeBricks(small: Int, big: Int, goal: Int): Boolean = {
val smallSize = small
val bigSize = big * 5
smallSize == goal || bigSize == goal || smallSize + bigSize == goal
}
def loneSum(a: Int, b: Int, c: Int): Int = {
val ls = List(a, b, c)
val distinct = ls.distinct
distinct.diff(ls.diff(distinct)).sum
}
def luckySum(a: Int, b: Int, c: Int): Int = {
val ls = List(a, b, c)
val index = ls.indexOf(13)
if (index < 0) ls.sum else ls.take(index).sum
}
def noTeenSum(a: Int, b: Int, c: Int): Int = {
val ls = (13 to 19).toList.filterNot(x => x == 15 || x == 16)
List(a, b, c).diff(ls).sum
}
def roundSum(a: Int, b: Int, c: Int): Int = {
val ls = List(a, b, c)
ls.map {
x => if (x % 10 >= 5) ((x / 10) + 1) * 10 else (x / 10) * 10
}.sum
}
def closeFar(a: Int, b: Int, c: Int): Boolean = {
if (Math.abs(b - a) <= 1)
Math.abs(c - a) >= 2 && Math.abs(c - b) >= 2
else if (Math.abs(c - a) <= 1)
Math.abs(b - a) >= 2 && Math.abs(b - c) >= 2
else false
}
def blackJack(a: Int, b: Int): Int = {
val ls = List(a, b).filterNot(_ > 21)
ls.zip(ls.map(x => Math.abs(x - 21))).minBy(_._2)._1
}
def evenlySpaced(a: Int, b: Int, c: Int): Boolean = {
val ls = List(a, b, c).sorted
Math.abs(ls.tail.head - ls.head) == Math.abs(ls.last - ls.tail.head)
}
def makeChocolate(small: Int, big: Int, goal: Int): Int = {
val smallBar = 1
val bigBar = 5
val diff = goal - (bigBar * big)
val smalls = diff / smallBar
if (diff % smallBar == 0 && smalls <= small) smalls else -1
}
}
|
codingkapoor/scala-coding-bat
|
src/main/scala/com/codingkapoor/codingbat/LogicII.scala
|
Scala
|
mit
| 1,677 |
package io.cqrs.bench
import _root_.akka.util.Timeout
import concurrent.duration._
package object akka {
val atMost = 50 seconds
implicit val timeout: Timeout = atMost
}
|
jmcabrera/cqrs-bench
|
bench/akka/src/main/scala/io/cqrs/bench/package.scala
|
Scala
|
gpl-2.0
| 187 |
package com.azavea.gtfs
import geotrellis.vector._
import geotrellis.slick.Projected
case class TripShape(id: String, line: Projected[Line])
|
flibbertigibbet/open-transit-indicators
|
scala/gtfs/src/main/scala/com/azavea/gtfs/TripShape.scala
|
Scala
|
gpl-3.0
| 144 |
package jsky.app.ot.scilib
import edu.gemini.util.security.auth.keychain.Action._
import edu.gemini.spModel.core._
import jsky.app.ot.OT
import edu.gemini.pot.sp.ISPProgram
import edu.gemini.pot.client.SPDB
import jsky.app.ot.vcs.VcsOtClient
import jsky.app.ot.viewer.open.OpenDialog
import javax.swing.JComponent
object ScienceLibraryHelper {
/** Return peer for the specified site, OR NULL !! */
def peerForSite(s:Site):Peer =
OT.getKeyChain.peerForSite(s).unsafeRun.fold(_ => null, _.orNull)
def checkout(peer:Peer, pid:SPProgramID, checkedOut: Boolean):ISPProgram = {
val reg = VcsOtClient.unsafeGetRegistrar
val db = SPDB.get()
val auth = OT.getKeyChain
if (checkedOut) OpenDialog.update(db, pid, peer, null : JComponent, reg)
else OpenDialog.checkout(db, pid, peer, null : JComponent, reg)
}
}
|
arturog8m/ocs
|
bundle/jsky.app.ot/src/main/scala/jsky/app/ot/scilib/ScienceLibraryHelper.scala
|
Scala
|
bsd-3-clause
| 838 |
// Copyright 2016 Yahoo Inc.
// Licensed under the terms of the Apache 2.0 license.
// Please see LICENSE file in the project root for terms.
package com.yahoo.ml.caffe
import caffe.Caffe._
import com.yahoo.ml.jcaffe._
import java.io.FileReader
import java.nio.file.{StandardCopyOption, Files, Paths}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.slf4j.{LoggerFactory, Logger}
private[caffe] object FSUtils {
private[caffe] val localfsPrefix: String = "file:"
private[caffe] val hdfsPrefix: String = "hdfs:"
private[caffe] val log: Logger = LoggerFactory.getLogger(this.getClass)
def CopyFileToHDFS(localFilePath: String, hdfsPath: String) {
val dest: Path = new Path(hdfsPath)
val fs: FileSystem = dest.getFileSystem(new Configuration())
if (fs.exists(dest)) fs.delete(dest, true)
fs.copyFromLocalFile(new Path("file://" + localFilePath), dest)
}
def CopyFileToLocal(hdfsPath: String, localFilePath: String) {
val src: Path = new Path(hdfsPath)
val fs: FileSystem = src.getFileSystem(new Configuration())
val dest: Path = new Path("file://" + localFilePath)
fs.copyToLocalFile(false, src, dest, true)
}
private def MatchH5Suffix(src: String, des: String): String = {
var result = des
if ((src.length > 3) && (result.length > 3)) {
if (((src.substring(src.length - 3) == ".h5")) && (!(result.substring(result.length - 3) == ".h5")))
result += ".h5"
else if ((!(src.substring(src.length - 3) == ".h5")) && ((result.substring(result.length - 3) == ".h5")))
result = result.substring(0, result.length - 3)
}
return result
}
private def CopyModelFile(caffeNet: CaffeNet, modelFilename: String, iterId: Int, isState: Boolean, useExactModelFilename: Boolean) {
val localModelFilename: String = System.getProperty("user.dir") + "/" + caffeNet.snapshotFilename(iterId, isState)
var desModelFilename = modelFilename
if (!useExactModelFilename) {
val pathidx: Int = modelFilename.lastIndexOf("/")
val fnidx: Int = localModelFilename.lastIndexOf("/")
desModelFilename = desModelFilename.substring(0, pathidx + 1) + localModelFilename.substring(fnidx + 1)
}
desModelFilename = MatchH5Suffix(localModelFilename, desModelFilename)
log.info("destination file:"+desModelFilename)
if (modelFilename.startsWith(localfsPrefix)) {
desModelFilename = desModelFilename.substring(localfsPrefix.length)
val srcPath: java.nio.file.Path = Paths.get(localModelFilename)
val desPath: java.nio.file.Path = Paths.get(desModelFilename)
log.info(srcPath+"-->"+desPath)
Files.move(srcPath, desPath, StandardCopyOption.REPLACE_EXISTING)
}
else
CopyFileToHDFS(localModelFilename, desModelFilename)
}
def GenModelOrState(caffeNet: CaffeNet, modelFilename: String, genState: Boolean) {
val iterID = caffeNet.snapshot()
CopyModelFile(caffeNet, modelFilename, iterID, false, !genState)
if (genState)
CopyModelFile(caffeNet, modelFilename, iterID, true, false)
}
def GetLocalFileName(fileName: String, tmpFileName: String): String = {
var localFileName = ""
if (fileName.startsWith(localfsPrefix))
localFileName = fileName.substring(localfsPrefix.length)
else if (fileName.length > 3) {
localFileName = System.getProperty("user.dir") + "/" + tmpFileName
if (fileName.substring(fileName.length - 3) == ".h5")
localFileName = localFileName + ".h5"
CopyFileToLocal(fileName, localFileName)
}
return localFileName
}
}
|
yahoo/CaffeOnSpark
|
caffe-grid/src/main/scala/com/yahoo/ml/caffe/FSUtils.scala
|
Scala
|
apache-2.0
| 3,610 |
/*
* Copyright 1998-2015 Linux.org.ru
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.org.linux.gallery
import ru.org.linux.user.User
import scala.beans.BeanProperty
object Image {
private val GalleryName = "(gallery/[^.]+)(?:\\\\.\\\\w+)".r
private def mediumName(name: String, doubleSize: Boolean): String = {
name match {
case GalleryName(base) ⇒
if (doubleSize) {
s"$base-med-2x.jpg"
} else {
s"$base-med.jpg"
}
case _ ⇒
throw new IllegalArgumentException("Not gallery path: " + name)
}
}
}
case class Image(
@BeanProperty id:Int,
@BeanProperty topicId:Int,
@BeanProperty original: String,
@BeanProperty icon: String
) {
def getMedium = Image.mediumName(original, doubleSize = false)
def getMedium2x = Image.mediumName(original, doubleSize = true)
def getSrcset = s"$getMedium2x ${Screenshot.MEDIUM_2X_WIDTH}w, $getMedium ${Screenshot.MEDIUM_WIDTH}w, $icon ${Screenshot.ICON_WIDTH}w"
}
case class PreparedGalleryItem(
@BeanProperty item:GalleryItem,
@BeanProperty user:User)
|
ymn/lorsource
|
src/main/scala/ru/org/linux/gallery/Image.scala
|
Scala
|
apache-2.0
| 1,631 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.arrow.vector
import java.io.Closeable
import org.apache.arrow.memory.BufferAllocator
import org.apache.arrow.vector.NullableBigIntVector
import org.apache.arrow.vector.complex.NullableMapVector
import org.apache.arrow.vector.types.FloatingPointPrecision
import org.locationtech.geomesa.arrow.features.ArrowSimpleFeature
import org.locationtech.geomesa.arrow.vector.SimpleFeatureVector.EncodingPrecision.EncodingPrecision
import org.locationtech.geomesa.arrow.vector.SimpleFeatureVector.SimpleFeatureEncoding
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import scala.collection.mutable.ArrayBuffer
/**
* Abstraction for using simple features in Arrow vectors
*
* @param sft simple feature type
* @param underlying underlying arrow vector
* @param dictionaries map of field names to dictionary values, used for dictionary encoding fields.
* All values must be provided up front.
* @param encoding options for encoding
* @param allocator buffer allocator
*/
class SimpleFeatureVector private (val sft: SimpleFeatureType,
val underlying: NullableMapVector,
val dictionaries: Map[String, ArrowDictionary],
val encoding: SimpleFeatureEncoding)
(implicit allocator: BufferAllocator) extends Closeable {
// note: writer creates the map child vectors based on the sft, and should be instantiated before the reader
val writer = new Writer(this)
val reader = new Reader(this)
/**
* Clear any simple features currently stored in the vector
*/
def clear(): Unit = underlying.getMutator.setValueCount(0)
override def close(): Unit = {
underlying.close()
writer.close()
}
class Writer(vector: SimpleFeatureVector) {
private [SimpleFeatureVector] val arrowWriter = vector.underlying.getWriter
private val idWriter = ArrowAttributeWriter.id(vector.underlying, vector.encoding.fids)
private [arrow] val attributeWriters = ArrowAttributeWriter(sft, vector.underlying, dictionaries, encoding).toArray
def set(index: Int, feature: SimpleFeature): Unit = {
arrowWriter.setPosition(index)
arrowWriter.start()
idWriter.apply(index, feature.getID)
var i = 0
while (i < attributeWriters.length) {
attributeWriters(i).apply(index, feature.getAttribute(i))
i += 1
}
arrowWriter.end()
}
def setValueCount(count: Int): Unit = {
arrowWriter.setValueCount(count)
attributeWriters.foreach(_.setValueCount(count))
}
private [vector] def close(): Unit = arrowWriter.close()
}
class Reader(vector: SimpleFeatureVector) {
val idReader: ArrowAttributeReader = ArrowAttributeReader.id(vector.underlying, vector.encoding.fids)
val readers: Array[ArrowAttributeReader] =
ArrowAttributeReader(sft, vector.underlying, dictionaries, encoding).toArray
// feature that can be re-populated with calls to 'load'
val feature: ArrowSimpleFeature = new ArrowSimpleFeature(sft, idReader, readers, -1)
def get(index: Int): ArrowSimpleFeature = new ArrowSimpleFeature(sft, idReader, readers, index)
def load(index: Int): Unit = feature.index = index
def getValueCount: Int = vector.underlying.getAccessor.getValueCount
}
}
object SimpleFeatureVector {
val DefaultCapacity = 8096
val FeatureIdField = "id"
val DescriptorKey = "descriptor"
object EncodingPrecision extends Enumeration {
type EncodingPrecision = Value
val Min, Max = Value
}
case class SimpleFeatureEncoding(fids: Boolean, geometry: EncodingPrecision, date: EncodingPrecision)
object SimpleFeatureEncoding {
private val Min = SimpleFeatureEncoding(fids = false, EncodingPrecision.Min, EncodingPrecision.Min)
private val Max = SimpleFeatureEncoding(fids = false, EncodingPrecision.Max, EncodingPrecision.Max)
private val MinWithFids = SimpleFeatureEncoding(fids = true, EncodingPrecision.Min, EncodingPrecision.Min)
private val MaxWithFids = SimpleFeatureEncoding(fids = true, EncodingPrecision.Max, EncodingPrecision.Max)
def min(fids: Boolean): SimpleFeatureEncoding = if (fids) { MinWithFids } else { Min }
def max(fids: Boolean): SimpleFeatureEncoding = if (fids) { MaxWithFids } else { Max }
}
/**
* Create a new simple feature vector
*
* @param sft simple feature type
* @param dictionaries map of field names to dictionary values, used for dictionary encoding fields.
* All values must be provided up front.
* @param encoding options for encoding
* @param capacity initial capacity for number of features able to be stored in vectors
* @param allocator buffer allocator
* @return
*/
def create(sft: SimpleFeatureType,
dictionaries: Map[String, ArrowDictionary],
encoding: SimpleFeatureEncoding = SimpleFeatureEncoding.min(false),
capacity: Int = DefaultCapacity)
(implicit allocator: BufferAllocator): SimpleFeatureVector = {
val underlying = NullableMapVector.empty(sft.getTypeName, allocator)
val vector = new SimpleFeatureVector(sft, underlying, dictionaries, encoding)
// set capacity after all child vectors have been created by the writers, then allocate
underlying.setInitialCapacity(capacity)
underlying.allocateNew()
vector
}
/**
* Creates a simple feature vector based on an existing arrow vector
*
* @param vector arrow vector
* @param dictionaries map of field names to dictionary values, used for dictionary encoding fields.
* All values must be provided up front.
* @param allocator buffer allocator
* @return
*/
def wrap(vector: NullableMapVector, dictionaries: Map[String, ArrowDictionary])
(implicit allocator: BufferAllocator): SimpleFeatureVector = {
import scala.collection.JavaConversions._
var includeFids = false
val attributes = ArrayBuffer.empty[String]
vector.getField.getChildren.foreach { field =>
if (field.getName == FeatureIdField) {
includeFids = true
} else {
attributes.append(field.getMetadata.get(DescriptorKey))
}
}
val sft = SimpleFeatureTypes.createType(vector.getField.getName, attributes.mkString(","))
val geomPrecision = {
val geomVector = Option(sft.getGeometryDescriptor).flatMap(d => Option(vector.getChild(d.getLocalName)))
val isDouble = geomVector.exists(v => GeometryFields.precisionFromField(v.getField) == FloatingPointPrecision.DOUBLE)
if (isDouble) { EncodingPrecision.Max } else { EncodingPrecision.Min }
}
val datePrecision = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
val dateVector = sft.getDtgField.flatMap(d => Option(vector.getChild(d)))
val isLong = dateVector.exists(_.isInstanceOf[NullableBigIntVector])
if (isLong) { EncodingPrecision.Max } else { EncodingPrecision.Min }
}
val encoding = SimpleFeatureEncoding(includeFids, geomPrecision, datePrecision)
new SimpleFeatureVector(sft, vector, dictionaries, encoding)
}
/**
* Create a simple feature vector using a new arrow vector
*
* @param vector simple feature vector to copy
* @param underlying arrow vector
* @param allocator buffer allocator
* @return
*/
def clone(vector: SimpleFeatureVector, underlying: NullableMapVector)
(implicit allocator: BufferAllocator): SimpleFeatureVector = {
new SimpleFeatureVector(vector.sft, underlying, vector.dictionaries, vector.encoding)
}
}
|
ronq/geomesa
|
geomesa-arrow/geomesa-arrow-gt/src/main/scala/org/locationtech/geomesa/arrow/vector/SimpleFeatureVector.scala
|
Scala
|
apache-2.0
| 8,258 |
package sbt
package compiler
import java.io.File
import java.net.URLClassLoader
import xsbti.TestCallback
import IO.withTemporaryDirectory
object TestCompile
{
def allVersions = List("2.8.1", "2.9.0-1", "2.8.0")
/** Tests running the compiler interface with the analyzer plugin with a test callback. The test callback saves all information
* that the plugin sends it for post-compile analysis by the provided function.*/
def apply[T](scalaVersion: String, sources: Seq[File], outputDirectory: File, options: Seq[String])
(f: (TestCallback, ScalaInstance, Logger) => T): T =
{
val testCallback = new TestCallback
WithCompiler(scalaVersion) { (compiler, log) =>
compiler(sources, Nil, outputDirectory, options, testCallback, 5, log)
val result = f(testCallback, compiler.scalaInstance, log)
for( (file, src) <- testCallback.apis )
xsbt.api.APIUtil.verifyTypeParameters(src)
result
}
}
/** Tests running the compiler interface with the analyzer plugin. The provided function is given a ClassLoader that can
* load the compiled classes..*/
def apply[T](scalaVersion: String, sources: Seq[File])(f: ClassLoader => T): T =
CallbackTest.full(scalaVersion, sources){ case (_, outputDir, _, _) => f(new URLClassLoader(Array(outputDir.toURI.toURL))) }
}
object CallbackTest
{
def simple[T](scalaVersion: String, sources: Seq[File])(f: TestCallback => T): T =
full(scalaVersion, sources){ case (callback, _, _, _) => f(callback) }
def full[T](scalaVersion: String, sources: Seq[File])(f: (TestCallback, File, ScalaInstance, Logger) => T): T =
withTemporaryDirectory { outputDir =>
TestCompile(scalaVersion, sources, outputDir, Nil) { case (callback, instance, log) => f(callback, outputDir, instance, log) }
}
}
|
ornicar/xsbt
|
compile/src/test/scala/TestCompile.scala
|
Scala
|
bsd-3-clause
| 1,754 |
package com.zgagnon.jsonAdapter
import org.json4s._
import spray.json._
/**
* Created by Zoe on 10/31/2014.
*/
package object sprayAdapter {
import com.zgagnon.jsonAdapter.playAdapter.jValueToJsValue
implicit def sprayString(string: JsString): JString = JString(string.value)
implicit def sprayNumber(number: JsNumber): JDecimal = JDecimal(number.value)
implicit def sprayBoolean(boolean: JsBoolean): JBool = JBool(boolean.value)
implicit def sprayObject(obj: JsObject): JObject = {
val forSFields = for ((name, value) <- obj.fields) yield { (name -> valueTo4S(value)) }
JObject(forSFields.toList)
}
implicit def sprayArray(array: JsArray): JArray = {
val values = for (value <- array.elements) yield { valueTo4S(value) }
JArray(values.toList)
}
implicit def valueTo4S(value: JsValue): JValue = {
value match {
case s: JsString => sprayString(s)
case n: JsNumber => sprayNumber(n)
case b: JsBoolean => sprayBoolean(b)
case o: JsObject => sprayObject(o)
case a: JsArray => sprayArray(a)
case JsNull => JNull
}
}
implicit def forSString(string: JString): JsString = JsString(string.values)
implicit def forSDecimal(dec: JDecimal): JsNumber = JsNumber(dec.values)
//implicit def forSDouble(double: JDouble): JsNumber = JsNumber(double.values)
implicit def forSInt(int: JInt): JsNumber = JsNumber(int.values)
implicit def forSBool(bool: JBool): JsBoolean = JsBoolean(bool.values)
implicit def forSObject(obj: JObject): JsObject = {
val values = for ((field, value: JValue) <- obj.obj) yield { field -> valueToSpray(value) }
JsObject(values.toMap)
}
implicit def forSArray(array: JArray): JsArray = {
val values = for (value <- array.arr) yield { valueToSpray(value) }
JsArray(values.toVector)
}
implicit def valueToSpray(value: JValue): JsValue = {
value match {
case s: JString => forSString(s)
case d: JDecimal => forSDecimal(d)
//case dub: JDouble => forSDouble(dub)
case i: JInt => forSInt(i)
case b: JBool => forSBool(b)
case o: JObject => forSObject(o)
case JNothing => JsNull
case JNull => JsNull
case a: JArray => forSArray(a)
}
}
implicit def sprayToPlay(value: JsValue): play.api.libs.json.JsValue = jValueToJsValue(valueTo4S(value))
}
|
zgagnon/scala-json-adapter
|
src/main/scala/com/zgagnon/jsonAdapter/sprayAdapter/package.scala
|
Scala
|
apache-2.0
| 2,336 |
/*
* Copyright (C) 2010 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.core.workflow.task
import org.openmole.core.eventdispatcher.{ Event, EventDispatcher, EventListener }
import org.openmole.core.exception.{ InternalProcessingError, UserBadDataError }
import org.openmole.core.workflow.builder.TaskBuilder
import org.openmole.core.workflow.data._
import org.openmole.core.workflow.mole._
import org.openmole.core.workflow.puzzle._
import scala.collection.mutable.ListBuffer
object MoleTask {
def apply(puzzle: Puzzle): MoleTaskBuilder =
apply(puzzle toMole, puzzle.lasts.head)
def apply(mole: Mole, last: Capsule) =
new MoleTaskBuilder { builder ⇒
addInput(mole.root.inputs(mole, Sources.empty, Hooks.empty).toSeq: _*)
addOutput(last.outputs(mole, Sources.empty, Hooks.empty).toSeq: _*)
def toTask = new MoleTask(mole, last, implicits) with builder.Built
}
trait MoleTaskBuilder extends TaskBuilder { builder ⇒
val implicits = ListBuffer[String]()
def addImplicit(p: String) = implicits += p
}
}
/**
* *
*
* @param mole the mole executed by this task.
* @param last the capsule which returns the results
* @param implicits the implicit values for the inputs
*/
sealed abstract class MoleTask(
val mole: Mole,
val last: Capsule,
val implicits: Iterable[String]) extends Task {
class ResultGathering extends EventListener[MoleExecution] {
@volatile var lastContext: Option[Context] = None
override def triggered(obj: MoleExecution, ev: Event[MoleExecution]) = synchronized {
ev match {
case ev: MoleExecution.JobFinished ⇒
if (ev.capsule == last) lastContext = Some(ev.moleJob.context)
case _ ⇒
}
}
}
override protected def process(context: Context) = {
val firstTaskContext = inputs.foldLeft(List.empty[Variable[_]]) {
(acc, input) ⇒
if (!(input.mode is Optional) || ((input.mode is Optional) && context.contains(input.prototype)))
context.variable(input.prototype).getOrElse(throw new InternalProcessingError("Bug: variable not found.")) :: acc
else acc
}.toContext
val implicitsValues = implicits.flatMap(i ⇒ context.get(i))
val execution = MoleExecution(mole, seed = context(Task.openMOLESeed), implicits = implicitsValues)
val resultGathering = new ResultGathering
EventDispatcher.listen(execution: MoleExecution, resultGathering, classOf[MoleExecution.JobFinished])
EventDispatcher.listen(execution: MoleExecution, resultGathering, classOf[MoleExecution.ExceptionRaised])
execution.start(firstTaskContext)
execution.waitUntilEnded
execution.exception.foreach(throw _)
context + resultGathering.lastContext.getOrElse(throw new UserBadDataError("Last capsule " + last + " has never been executed."))
}
}
|
ISCPIF/PSEExperiments
|
openmole-src/openmole/core/org.openmole.core.workflow/src/main/scala/org/openmole/core/workflow/task/MoleTask.scala
|
Scala
|
agpl-3.0
| 3,502 |
package rxgpio
import rx.lang.scala.Observable
import rx.lang.scala.subjects.PublishSubject
import rxgpio.pigpio.PigpioLibrary
import rxgpio.pigpio.PigpioLibrary.gpioAlertFunc_t
import scala.util.control.NonFatal
sealed trait GpioAlert {
def gpio: UserGpio
def level: Level
def tick: Long
}
object GpioAlert {
def apply(user_gpio: Int, gpio_level: Int, microtick: Int /*UINT32*/) = {
new GpioAlert {
lazy val gpio = UserGpio(user_gpio)
lazy val level = Level(gpio_level)
lazy val tick = Ticks.asUint(microtick)
}
}
}
object RxGpio {
private val rxpins = (0 to PigpioLibrary.PI_MAX_USER_GPIO).map(_ -> new RxGpio).toMap
def installAll()(implicit pigpio: PigpioLibrary) = rxpins.foreach(t => pigpio.gpioSetAlertFunc(t._1, t._2))
def apply(num: Int): Observable[GpioAlert] = {
require(rxpins.contains(num), s"invalid pin, $num")
Observable(o => rxpins(num).subject.subscribe(o))
}
}
private class RxGpio extends gpioAlertFunc_t {
private[rxgpio] val subject = PublishSubject[GpioAlert]
final def callback(gpio: Int, level: Int, tick: Int /*UINT32*/): Unit = {
try subject.onNext(GpioAlert(gpio, level, tick))
catch {
case NonFatal(e) => subject.onError(e)
}
}
}
|
jw3/rxgpio
|
core/src/main/scala/rxgpio/Watcher.scala
|
Scala
|
apache-2.0
| 1,320 |
package org.liquidizer.view
import scala.xml._
import net.liftweb._
import net.liftweb.util._
import net.liftweb.http._
import net.liftweb.common._
import net.liftweb.mapper._
import Helpers._
import org.liquidizer.model._
object TimeseriesView {
val cache = new ResultCache[Box[XmlResponse]]
private def reformat(in : NodeSeq) : NodeSeq = {
for( node <- in ) yield reformat(node)
}
def reformat(in: Node) : Node = {
in match {
case Elem(prefix, "path", attribs, scope, _*) =>
if
(attribs.get("stroke").isEmpty &&
attribs.get("style").map(!_.text.matches("stroke:rgb[^;]*")).getOrElse(true))
in
else {
Elem(prefix, "path",
attribs
.remove("stroke")
.remove("style")
.append(new UnprefixedAttribute("style", "stroke:#000000;stroke-width:0.5", Null)),
scope)
}
case Elem(prefix, label, attribs, scope, children @ _*) =>
Elem(prefix, label, attribs, scope, reformat(children) : _*)
case other => other
}
}
def createResponse(node: Node) = {
Full(XmlResponse(reformat(node), "image/svg+xml"))
}
def options() : Map[String,String] = {
Map(
"grid" -> S.param("grid").getOrElse("on"),
"axis" -> S.param("axis").getOrElse("log"),
"width" -> S.param("width").getOrElse("640"),
"height" -> S.param("height").getOrElse("400"),
"decay" -> S.param("room").map {
Room.get(_).get.decay.is.toString
}.getOrElse("0.01")
)
}
def userChart(userId : String) : Box[LiftResponse] = {
Votable.find(By(Votable.user, userId.toLong)) match {
case Full(nominee) => {
cache.get(S.uri, options, () => {
val ts= Tick.getTimeSeries(nominee)
val node= (new GnuplotAPI).plotTS(ts, options, false)
createResponse(node.first)
})
}
case _ => Empty
}
}
def queryChart(queryId : String) : Box[LiftResponse] = {
Votable.find(By(Votable.query, queryId.toLong)) match {
case Full(nominee) => {
cache.get(S.uri, options, () => {
val ts= Tick.getTimeSeries(nominee)
val node= (new GnuplotAPI).plotTS(ts, options, true)
createResponse(node.first)
})
}
case _ => Empty
}
}
}
|
liquidizer/liquidizer
|
src/main/scala/org/liquidizer/view/TimeseriesView.scala
|
Scala
|
mit
| 2,228 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.stream.generic
import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets
import java.util.Collections
import java.util.concurrent.{ExecutorService, Executors, LinkedBlockingQueue, TimeUnit}
import java.util.function.Function
import com.typesafe.config.Config
import org.apache.camel.CamelContext
import org.apache.camel.impl._
import org.apache.camel.scala.dsl.builder.RouteBuilder
import org.locationtech.geomesa.convert2.SimpleFeatureConverter
import org.locationtech.geomesa.stream.{SimpleFeatureStreamSource, SimpleFeatureStreamSourceFactory}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.io.WithClose
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.slf4j.LoggerFactory
import scala.util.Try
object GenericSimpleFeatureStreamSourceFactory {
val contexts: java.util.Map[String, CamelContext] = Collections.synchronizedMap(new java.util.HashMap[String, CamelContext]())
def getContext(namespace: String): CamelContext = {
contexts.computeIfAbsent(namespace, new Function[String, CamelContext] {
override def apply(t: String): CamelContext = {
val context = new DefaultCamelContext()
context.start()
context
}
})
}
}
class GenericSimpleFeatureStreamSourceFactory extends SimpleFeatureStreamSourceFactory {
override def canProcess(conf: Config): Boolean =
if(conf.hasPath("type") && conf.getString("type").equals("generic")) true
else false
override def create(conf: Config, namespace: String): SimpleFeatureStreamSource = {
val sourceRoute = conf.getString("source-route")
val sft = SimpleFeatureTypes.createType(conf.getConfig("sft"))
val threads = Try(conf.getInt("threads")).getOrElse(1)
val converterConf = conf.getConfig("converter")
val fac = () => SimpleFeatureConverter(sft, converterConf)
new GenericSimpleFeatureStreamSource(GenericSimpleFeatureStreamSourceFactory.getContext(namespace), sourceRoute, sft, threads, fac)
}
}
class GenericSimpleFeatureStreamSource(val ctx: CamelContext,
sourceRoute: String,
val sft: SimpleFeatureType,
threads: Int,
parserFactory: () => SimpleFeatureConverter)
extends SimpleFeatureStreamSource {
private val logger = LoggerFactory.getLogger(classOf[GenericSimpleFeatureStreamSource])
var inQ: LinkedBlockingQueue[String] = _
var outQ: LinkedBlockingQueue[SimpleFeature] = _
var parsers: Seq[SimpleFeatureConverter] = _
var es: ExecutorService = _
override def init(): Unit = {
super.init()
inQ = new LinkedBlockingQueue[String]()
outQ = new LinkedBlockingQueue[SimpleFeature]()
val route = getProcessingRoute(inQ)
ctx.addRoutes(route)
parsers = List.fill(threads)(parserFactory())
es = Executors.newCachedThreadPool()
parsers.foreach { p => es.submit(getQueueProcessor(p)) }
}
def getProcessingRoute(inQ: LinkedBlockingQueue[String]): RouteBuilder = new RouteBuilder {
from(sourceRoute).process { e => inQ.put(e.getIn.getBody.asInstanceOf[String]) }
}
override def next: SimpleFeature = outQ.poll(500, TimeUnit.MILLISECONDS)
def getQueueProcessor(p: SimpleFeatureConverter) = {
new Runnable {
override def run(): Unit = {
var running = true
val input = new Iterator[String] {
override def hasNext: Boolean = running
override def next(): String = {
var res: String = null
while (res == null) {
res = inQ.take() // blocks
}
res
}
}
try {
input.foreach { i =>
val bytes = new ByteArrayInputStream(i.getBytes(StandardCharsets.UTF_8))
WithClose(p.process(bytes))(_.foreach(outQ.put))
}
} catch {
case t: InterruptedException => running = false
}
}
}
}
}
|
aheyne/geomesa
|
geomesa-stream/geomesa-stream-generic/src/main/scala/org/locationtech/geomesa/stream/generic/GenericSimpleFeatureStreamSourceFactory.scala
|
Scala
|
apache-2.0
| 4,536 |
package org.http4s.client.oauth1
import cats.effect.IO
import org.http4s._
import org.http4s.client.oauth1
import org.http4s.util.CaseInsensitiveString
import org.specs2.mutable.Specification
class OAuthTest extends Specification {
// some params taken from http://oauth.net/core/1.0/#anchor30, others from
// http://tools.ietf.org/html/rfc5849
val Right(uri) = Uri.fromString("http://photos.example.net/photos")
val consumer = oauth1.Consumer("dpf43f3p2l4k3l03", "kd94hf93k423kf44")
val token = oauth1.Token("nnch734d00sl2jdk", "pfkkdhi9sl3r4s00")
val userParams = List(
"file" -> "vacation.jpg",
"size" -> "original"
)
val allParams = List(
"oauth_consumer_key" -> "dpf43f3p2l4k3l03",
"oauth_token" -> "nnch734d00sl2jdk",
"oauth_signature_method" -> "HMAC-SHA1",
"oauth_timestamp" -> "1191242096",
"oauth_nonce" -> "kllo9940pd9333jh",
"oauth_version" -> "1.0"
) ++ userParams
val params2 = List(
"b5" -> Some("=%3D"),
"a3" -> Some("a"),
"c@" -> None,
"a2" -> Some("r b"),
"oauth_consumer_key" -> Some("9djdj82h48djs9d2"),
"oauth_token" -> Some("kkk9d7dh3k39sjv7"),
"oauth_signature_method" -> Some("HMAC-SHA1"),
"oauth_timestamp" -> Some("137131201"),
"oauth_nonce" -> Some("7d8f3e4a"),
"c2" -> None,
"a3" -> Some("2 q")
)
val specBaseString = "GET&http%3A%2F%2Fphotos.example.net%2Fphotos&file%3Dvacation.jpg%26oauth_consumer_key%" +
"3Ddpf43f3p2l4k3l03%26oauth_nonce%3Dkllo9940pd9333jh%26oauth_signature_method%3DHMAC-SHA1%26oauth_timestamp%" +
"3D1191242096%26oauth_token%3Dnnch734d00sl2jdk%26oauth_version%3D1.0%26size%3Doriginal"
"OAuth support" should {
"generate a Base String" in {
oauth1.genBaseString(Method.GET, uri, allParams) must_== specBaseString
}
"Generate correct SHA1 signature" in {
oauth1.makeSHASig(specBaseString, consumer, Some(token)) must_== "tR3+Ty81lMeYAr/Fid0kMTYa/WM="
}
"generate a Authorization header" in {
val auth =
oauth1.genAuthHeader(Method.GET, uri, userParams, consumer, None, None, Some(token))
val creds = auth.credentials
creds.authScheme must_== CaseInsensitiveString("OAuth")
}
}
"RFC 5849 example" should {
implicit def urlFormEncoder: EntityEncoder[IO, UrlForm] =
UrlForm.entityEncoder(Charset.`US-ASCII`)
val Right(uri) = Uri.fromString("http://example.com/request?b5=%3D%253D&a3=a&c%40=&a2=r%20b")
val Right(body) = UrlForm.decodeString(Charset.`US-ASCII`)("c2&a3=2+q")
val req = Request[IO](method = Method.POST, uri = uri).withEntity(body)
"Collect proper params, pg 22" in {
oauth1.getUserParams(req).unsafeRunSync()._2.sorted must_== Seq(
"b5" -> "=%3D",
"a3" -> "a",
"c@" -> "",
"a2" -> "r b",
"c2" -> "",
"a3" -> "2 q"
).sorted
}
}
}
|
aeons/http4s
|
client/src/test/scala/org/http4s/client/oauth1/OAuthTest.scala
|
Scala
|
apache-2.0
| 2,875 |
package org.jetbrains.plugins.scala
package lang.refactoring.changeSignature
import com.intellij.refactoring.changeSignature.ParameterTableModelItemBase
import org.jetbrains.plugins.scala.debugger.evaluation.ScalaCodeFragment
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import scala.collection.mutable.ListBuffer
/**
* Nikolay.Tropin
* 2014-08-29
*/
class ScalaParameterTableModelItem(parameter: ScalaParameterInfo,
typeCodeFragment: ScalaCodeFragment,
defaultValue: ScalaCodeFragment,
var startsNewClause: Boolean = false)
extends ParameterTableModelItemBase[ScalaParameterInfo](parameter, typeCodeFragment, defaultValue) {
var typeText: String = generateTypeText(parameter)
def keywordsAndAnnotations = parameter.keywordsAndAnnotations
override def isEllipsisType: Boolean = parameter.isRepeatedParameter
def updateType(problems: ListBuffer[String] = ListBuffer()): Unit = {
if (parameter.scType != null && typeText == parameter.scType.presentableText) return
var trimmed = typeText.trim
if (trimmed.endsWith("*")) {
parameter.isRepeatedParameter = true
trimmed = trimmed.dropRight(1).trim
} else {
parameter.isRepeatedParameter = false
}
if (typeText.isEmpty) {
problems += ScalaBundle.message("change.signature.specify.type.for.parameter", parameter.getName)
return
}
val funArrow = ScalaPsiUtil.functionArrow(typeCodeFragment.getProject)
val arrow = if (trimmed.startsWith("=>")) "=>" else if (trimmed.startsWith(funArrow)) funArrow else ""
if (arrow != "") {
parameter.isByName = true
trimmed = trimmed.drop(arrow.length).trim
} else {
parameter.isByName = false
}
if (parameter.isByName && parameter.isRepeatedParameter) {
problems += "Parameter could not be repeated and by-name in the same time"
}
val typeElem = ScalaPsiElementFactory.createTypeElementFromText(trimmed, typeCodeFragment, typeCodeFragment)
if (typeElem == null || typeElem.getType().isEmpty) {
problems += s"Could not understand type $trimmed"
parameter.scType = null
}
else {
parameter.scType = typeElem.getType().getOrAny
}
}
private def generateTypeText(parameter: ScalaParameterInfo) = {
val arrow = if (parameter.isByName) ScalaPsiUtil.functionArrow(typeCodeFragment.getProject) else ""
val star = if (parameter.isRepeatedParameter) "*" else ""
val text = Option(parameter.scType).map(_.presentableText)
text.map(tpeText => s"$arrow $tpeText$star").getOrElse("")
}
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterTableModelItem.scala
|
Scala
|
apache-2.0
| 2,738 |
package security
import com.google.inject.{Inject, Singleton}
import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository
import com.mohiva.play.silhouette.api.util.PasswordHasherRegistry
import com.mohiva.play.silhouette.impl.providers.CredentialsProvider
import scala.concurrent.ExecutionContext
@Singleton
class EmailProvider @Inject()(authInfoRepository: AuthInfoRepository, passwordHasherRegistry: PasswordHasherRegistry)(implicit executionContext: ExecutionContext) extends CredentialsProvider(authInfoRepository, passwordHasherRegistry) {
override def id: String = EmailProvider.ID
}
object EmailProvider {
val ID = "email"
}
|
digitalinteraction/intake24
|
ApiPlayServer/app/security/EmailProvider.scala
|
Scala
|
apache-2.0
| 651 |
package vaadin.scala
object Units extends Enumeration {
import com.vaadin.terminal.Sizeable._
val px = Value(UNITS_PIXELS, "px")
val pt = Value(UNITS_POINTS, "pt")
val pc = Value(UNITS_PICAS, "pc")
val em = Value(UNITS_EM, "em")
val ex = Value(UNITS_EX, "ex")
val mm = Value(UNITS_MM, "mm")
val cm = Value(UNITS_CM, "cm")
val in = Value(UNITS_INCH, "in")
val pct = Value(UNITS_PERCENTAGE, "%")
}
case class Measure(value: Number, unit: Units.Value) {
override def toString = value + unit.toString
}
class MeasureExtent(value: Number) {
def px: Option[Measure] = Option(Measure(value, Units.px))
def percent: Option[Measure] = Option(new Measure(value, Units.pct))
def pct: Option[Measure] = Option(new Measure(value, Units.pct))
def em: Option[Measure] = Option(new Measure(value, Units.em))
def ex: Option[Measure] = Option(new Measure(value, Units.ex))
def in: Option[Measure] = Option(new Measure(value, Units.in))
def cm: Option[Measure] = Option(new Measure(value, Units.cm))
def mm: Option[Measure] = Option(new Measure(value, Units.mm))
def pt: Option[Measure] = Option(new Measure(value, Units.pt))
def pc: Option[Measure] = Option(new Measure(value, Units.pc))
}
trait Sizeable extends Component {
def width: Option[Measure] = if (p.getWidth < 0) None else Option(Measure(p.getWidth, Units(p.getWidthUnits)))
def width_=(width: Option[Measure]) = p.setWidth(if (width.isDefined) width.get.toString else null)
def width_=(width: Measure) = p.setWidth(if (width != null) width.toString else null)
def height: Option[Measure] = if (p.getHeight() < 0) None else Option(Measure(p.getHeight(), Units(p.getHeightUnits)))
def height_=(height: Option[Measure]) = p.setHeight(if (height.isDefined) height.get.toString else null)
def height_=(height: Measure) = p.setHeight(if (height != null) height.toString else null)
def sizeFull() = p.setSizeFull
def sizeUndefined() = p.setSizeUndefined
def size(width: Measure, height: Measure) = {
this.width = width
this.height = height
}
def size(width: Option[Measure], height: Option[Measure]) = {
this.width = width
this.height = height
}
}
|
CloudInABox/scalavaadinutils
|
src/main/scala/vaadin/scala/Sizeable.scala
|
Scala
|
mit
| 2,176 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala
package tools.nsc
import settings.FscSettings
import scala.tools.util.CompileOutputCommon
import scala.sys.SystemProperties.preferIPv4Stack
/** The client part of the fsc offline compiler. Instead of compiling
* things itself, it send requests to a CompileServer.
*/
class StandardCompileClient extends HasCompileSocket with CompileOutputCommon {
lazy val compileSocket: CompileSocket = CompileSocket
val versionMsg = "Fast " + Properties.versionMsg
var verbose = false
def process(args: Array[String]): Boolean = {
// Trying to get out in front of the log messages in case we're
// going from verbose to not verbose.
verbose = (args contains "-verbose")
val settings = new FscSettings(Console.println)
val command = new OfflineCompilerCommand(args.toList, settings)
val shutdown = settings.shutdown.value
val extraVmArgs = if (settings.preferIPv4) List("-D%s=true".format(preferIPv4Stack.key)) else Nil
val vmArgs = settings.jvmargs.unparse ++ settings.defines.unparse ++ extraVmArgs
val fscArgs = args.toList ++ command.extraFscArgs
if (settings.version) {
Console println versionMsg
return true
}
info(versionMsg)
info(args.mkString("[Given arguments: ", " ", "]"))
info(fscArgs.mkString("[Transformed arguments: ", " ", "]"))
info(vmArgs.mkString("[VM arguments: ", " ", "]"))
val socket =
if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown, settings.port.value)
else compileSocket.getSocket(settings.server.value)
socket match {
case Some(sock) => compileOnServer(sock, fscArgs)
case _ =>
echo(
if (shutdown) "[No compilation server running.]"
else "Compilation failed."
)
shutdown
}
}
}
object CompileClient extends StandardCompileClient {
def main(args: Array[String]): Unit = sys exit {
try { if (process(args)) 0 else 1 }
catch { case _: Exception => 1 }
}
}
|
felixmulder/scala
|
src/compiler/scala/tools/nsc/CompileClient.scala
|
Scala
|
bsd-3-clause
| 2,140 |
package org.jetbrains.plugins.scala
package codeInsight.generation
import com.intellij.lang.LanguageCodeInsightActionHandler
import com.intellij.openapi.editor.{Editor, ScrollType}
import com.intellij.openapi.project.Project
import com.intellij.psi.codeStyle.CodeStyleManager
import com.intellij.psi.{PsiDocumentManager, PsiFile}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
/**
* Nikolay.Tropin
* 8/17/13
*/
class ScalaGenerateCompanionObjectHandler extends LanguageCodeInsightActionHandler {
def isValidFor(editor: Editor, file: PsiFile): Boolean =
file != null && ScalaFileType.SCALA_FILE_TYPE == file.getFileType &&
GenerationUtil.classOrTraitAtCaret(editor, file).exists(canAddCompanionObject)
def invoke(project: Project, editor: Editor, file: PsiFile) {
val classOpt = GenerationUtil.classOrTraitAtCaret(editor, file)
for (clazz <- classOpt) {
val obj = createCompanionObject(clazz)
val parent = clazz.getParent
val addedObj = parent.addAfter(obj, clazz)
parent.addAfter(ScalaPsiElementFactory.createNewLine(clazz.getManager), clazz)
val document = editor.getDocument
PsiDocumentManager.getInstance(project).doPostponedOperationsAndUnblockDocument(document)
val offset = addedObj.getTextRange.getStartOffset
val lineInside = document.getLineNumber(offset) + 1
CodeStyleManager.getInstance(project).adjustLineIndent(document, document.getLineStartOffset(lineInside))
editor.getCaretModel.moveToOffset(document.getLineEndOffset(lineInside))
editor.getScrollingModel.scrollToCaret(ScrollType.MAKE_VISIBLE)
}
}
def startInWriteAction(): Boolean = true
private def canAddCompanionObject(clazz: ScTemplateDefinition): Boolean = clazz match {
case td: ScTypeDefinition if td.fakeCompanionModule.nonEmpty => false
case _: ScTrait | _: ScClass => ScalaPsiUtil.getBaseCompanionModule(clazz).isEmpty
case _ => false
}
private def createCompanionObject(clazz: ScTemplateDefinition): ScObject = {
if (canAddCompanionObject(clazz)) {
val name = clazz.name
val text = s"object $name {\n \n}"
ScalaPsiElementFactory.createObjectWithContext(text, clazz.getContext, clazz)
}
else throw new IllegalArgumentException("Cannot create companion object")
}
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateCompanionObjectHandler.scala
|
Scala
|
apache-2.0
| 2,455 |
object Test {
val x1: Int = if (true) 1 else 2
val x2: Long = if (true) 1 else 2
val x3: Long = if (true) 1 else 2L
val x4: AnyVal = if (true) 1 else 2
val x5: AnyVal = if (true) 1.0 else 2L
val x6: AnyVal = if (true) 1.0 else true
val x7: String = if (true) "1" else "2"
val x8: AnyRef = if (true) "1" else 'foo
val x9: Any = if (true) "1" else 1
}
|
JetBrains/intellij-scala
|
scala/scala-impl/testdata/annotator/literalTypes/literalTypesLubs.scala
|
Scala
|
apache-2.0
| 378 |
package smt
import sbt._
import sbt.Keys._
import java.io.File
import smt.db.Database
import smt.migration.Migration
import smt.report.Reporter
object SMT extends Plugin {
import MigrationHandling._
lazy val globalSmtSettings = Seq(
migrationsSource <<= (sourceDirectory in Compile) / "migrations",
scriptSource <<= baseDirectory,
allowRollback := false,
runTests := true,
initMigration := None
)
lazy val smtSettings = Seq(
transformedMigrations <<= (migrations, transformations, transformations) map transformedMigrationsImpl,
showHashes <<= (transformedMigrations, initMigration, streams) map showHashesImpl,
showDbState <<= (database, transformedMigrations, initMigration, streams) map SMTImpl.showDbState,
applyMigrations <<= inputTask((argTask: TaskKey[Seq[String]]) => (argTask, database, transformedMigrations, initMigration, allowRollback, runTests, reporters, user, streams) map SMTImpl.applyMigrations),
migrateTo <<= inputTask((argTask: TaskKey[Seq[String]]) => (argTask, database, transformedMigrations, initMigration, allowRollback, runTests, reporters, user, streams) map SMTImpl.migrateTo),
showLatestCommon <<= (database, transformedMigrations, initMigration, streams) map SMTImpl.showLatestCommon,
runScript <<= inputTask((argTask: TaskKey[Seq[String]]) => (argTask, scriptSource, database, streams) map SMTImpl.runScript),
reporters := Seq[Reporter](),
user := System.getProperty("user.name")
)
val migrationsSource = SettingKey[File]("migrations-source", "base-directory for migration files")
val scriptSource = SettingKey[File]("script-source", "base directory for migration-independent db-scripts")
val initMigration = SettingKey[Option[(Int, String)]]("init-migration", "an optional ititial migration to base the migrations in the repository on")
val migrations = TaskKey[Seq[Migration]]("migrations", "sequence of migrations")
val allowRollback = SettingKey[Boolean]("allow-rollback", "indicates if migrations can be rolled back")
val runTests = SettingKey[Boolean]("run-tests", "indicates if tests should be run after applying a migration")
val transformedMigrations = TaskKey[Seq[Migration]]("transformed-migrations", "transformed migrations")
val transformations = SettingKey[Seq[Transformation]]("transformations", "transformations of the migrations")
val showHashes = TaskKey[Unit]("show-hashes", "show the hash sums of the migrations")
val database = SettingKey[Database]("database", "implementation of db abstraction")
val showDbState = TaskKey[Unit]("show-db-state", "show the state of the db")
val showLatestCommon = TaskKey[Unit]("show-latest-common", "show the latest common migration")
val applyMigrations = InputKey[Unit]("apply-migrations", "apply the migrations to the DB")
val migrateTo = InputKey[Unit]("migrate-to", "move db to the specified migration")
val runScript = InputKey[Unit]("run-script", "run a script against the database")
val reporters = SettingKey[Seq[Reporter]]("reporters", "sequence of reporters to notify about db changes")
val user = SettingKey[String]("user", "the operating user")
}
|
davidpeklak/smt
|
src/main/scala/smt/SMT.scala
|
Scala
|
mit
| 3,169 |
package com.sksamuel.elastic4s.http.index.alias
import com.sksamuel.elastic4s.alias.{AddAliasActionDefinition, IndicesAliasesRequestDefinition, RemoveAliasActionDefinition}
import com.sksamuel.elastic4s.http.search.queries.QueryBuilderFn
import org.elasticsearch.common.bytes.BytesArray
import org.elasticsearch.common.xcontent.{XContentBuilder, XContentFactory, XContentType}
object AliasActionBuilder {
def apply(r: IndicesAliasesRequestDefinition): XContentBuilder = {
val source = XContentFactory.jsonBuilder().startObject().startArray("actions")
val actionsArray = r.actions.map {
case addAction: AddAliasActionDefinition => buildAddAction(addAction).string()
case removeAction: RemoveAliasActionDefinition => buildRemoveAction(removeAction).string()
}.mkString(",")
source.rawValue(new BytesArray(actionsArray), XContentType.JSON)
source.endArray().endObject()
}
private def buildAddAction(addAction: AddAliasActionDefinition): XContentBuilder = {
val jsonBuilder = XContentFactory.jsonBuilder().startObject().startObject("add")
jsonBuilder.field("index", addAction.index)
jsonBuilder.field("alias", addAction.alias)
addAction.filter.map(QueryBuilderFn(_)).foreach { queryBuilder =>
jsonBuilder.rawField("filter", queryBuilder.bytes(), XContentType.JSON)
}
addAction.routing.foreach(jsonBuilder.field("routing", _))
addAction.searchRouting.foreach(jsonBuilder.field("search_routing", _))
addAction.indexRouting.foreach(jsonBuilder.field("index_routing", _))
jsonBuilder.endObject().endObject()
}
private def buildRemoveAction(removeAction: RemoveAliasActionDefinition): XContentBuilder = {
val jsonBuilder = XContentFactory.jsonBuilder().startObject().startObject("remove")
jsonBuilder.field("index", removeAction.index)
jsonBuilder.field("alias", removeAction.alias)
removeAction.filter.map(QueryBuilderFn(_)).foreach { queryBuilder =>
jsonBuilder.rawField("filter", queryBuilder.bytes(), XContentType.JSON)
}
removeAction.routing.foreach(jsonBuilder.field("routing", _))
removeAction.searchRouting.foreach(jsonBuilder.field("search_routing", _))
removeAction.indexRouting.foreach(jsonBuilder.field("index_routing", _))
jsonBuilder.endObject().endObject()
jsonBuilder
}
}
|
aroundus-inc/elastic4s
|
elastic4s-http/src/main/scala/com/sksamuel/elastic4s/http/index/alias/AliasActionBuilder.scala
|
Scala
|
apache-2.0
| 2,325 |
package dbtarzan.config.connections
import spray.json._
import dbtarzan.db.util.FileReadWrite
import java.nio.file.Path
/* writes the databases configuration file */
object ConnectionDataWriter {
import ConnectionDataJsonProtocol._
def write(path : Path, connections : List[ConnectionData]) : Unit = {
val text = toText(connections)
FileReadWrite.writeFile(path, text)
}
def toText(connections : List[ConnectionData]) : String = {
val result = connections.sortBy(_.name).toJson
result.prettyPrint
}
}
|
aferrandi/dbtarzan
|
src/main/scala/dbtarzan/config/connections/ConnectionDataWriter.scala
|
Scala
|
apache-2.0
| 522 |
// These are meant to be typed into the REPL. You can also run
// scala -Xnojline < repl-session.scala to run them all at once.
import scala.annotation._
:paste
@implicitNotFound(msg = "I am baffled why you give me ${From} when I want ${To}.")
abstract class <:<[-From, +To] extends Function1[From, To]
object <:< {
implicit def conforms[A] = new (A <:< A) { def apply(x: A) = x }
}
def firstLast[A, C](it: C)(implicit ev: C <:< Iterable[A]) =
(it.head, it.last)
firstLast("Fred")
|
yeahnoob/scala-impatient-2e-code
|
src/ch21/sec10/repl-session.scala
|
Scala
|
gpl-3.0
| 493 |
package com.twitter.finagle.service
import com.twitter.finagle._
import com.twitter.finagle.param.HighResTimer
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.util.{Timer, Duration, Future}
/**
* Requeues service application failures that are encountered in modules below it.
* In addition to requeueing local failures, the filter re-issues remote requests
* that have been NACKd. The policy is inherited from `RetryPolicy.RetryableWriteException`.
* Requeues are also rate-limited according to our retry budget in the [[RetryBudget]].
*
* @param retryBudget Maintains our requeue budget.
*
* @param retryBackoffs Stream of backoffs to use before each retry. (e.g. the
* first element is used to delay the first retry, 2nd for
* the second retry and so on)
*
* @param statsReceiver for stats reporting, typically scoped to ".../retries/"
*
* @param canRetry Represents whether or not it is appropriate to issue a
* retry. This is separate from `retryBudget`.
*
* @param maxRetriesPerReq The maximum number of retries to make for a given request
* computed as a percentage of `retryBudget.balance`.
* Used to prevent a single request from using up a disproportionate amount of the budget.
* Must be non-negative.
*
* @param timer Timer used to schedule retries
* @note consider using a [[Timer]] with high resolution so there is
* less correlation between retries. For example [[HighResTimer.Default]]
*/
private[finagle] class RequeueFilter[Req, Rep](
retryBudget: RetryBudget,
retryBackoffs: Stream[Duration],
statsReceiver: StatsReceiver,
canRetry: () => Boolean,
maxRetriesPerReq: Double,
timer: Timer)
extends SimpleFilter[Req, Rep] {
require(maxRetriesPerReq >= 0,
s"maxRetriesPerReq must be non-negative: $maxRetriesPerReq")
private[this] val requeueCounter = statsReceiver.counter("requeues")
private[this] val budgetExhaustCounter = statsReceiver.counter("budget_exhausted")
private[this] def applyService(
req: Req,
service: Service[Req, Rep],
retriesRemaining: Int,
backoffs: Stream[Duration]
): Future[Rep] = {
service(req).rescue {
case [email protected](_) =>
if (!canRetry()) {
Future.exception(exc)
} else if (retriesRemaining > 0 && retryBudget.tryWithdraw()) {
backoffs match {
case Duration.Zero #:: rest =>
// no delay between retries. Retry immediately.
requeueCounter.incr()
applyService(req, service, retriesRemaining - 1, rest)
case delay #:: rest =>
// Delay and then retry.
timer.doLater(delay) {
requeueCounter.incr()
applyService(req, service, retriesRemaining - 1, rest)
}.flatten
case _ =>
// Schedule has run out of entries. Budget is empty.
budgetExhaustCounter.incr()
Future.exception(exc)
}
} else {
if (retriesRemaining > 0)
budgetExhaustCounter.incr()
Future.exception(exc)
}
}
}
def apply(req: Req, service: Service[Req, Rep]): Future[Rep] = {
retryBudget.deposit()
val maxRetries = Math.ceil(maxRetriesPerReq * retryBudget.balance).toInt
applyService(req, service, maxRetries, retryBackoffs)
}
}
|
a-manumohan/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/service/RequeueFilter.scala
|
Scala
|
apache-2.0
| 3,422 |
package example.doc
object FixPoint {
def Y[S,T](f: (S => T) => (S => T)): (S => T) = f(Y(f))(_:S)
// TODO by-name version
}
|
epfldata/squid
|
example/src/main/scala/example/doc/FixPoint.scala
|
Scala
|
apache-2.0
| 130 |
package org.scalajs.jsenv.test
import org.scalajs.jsenv.rhino._
import org.scalajs.core.tools.sem._
class RhinoJSEnvTest extends TimeoutComTests {
protected def newJSEnv: RhinoJSEnv = new RhinoJSEnv(Semantics.Defaults)
}
|
doron123/scala-js
|
js-envs/src/test/scala/org/scalajs/jsenv/test/RhinoJSEnvTest.scala
|
Scala
|
bsd-3-clause
| 226 |
package com.joescii.sbtjs
import java.io.{InputStreamReader, BufferedReader}
import java.util.regex.Pattern
import com.gargoylesoftware.htmlunit.html.HtmlPage
import implicits._
import com.gargoylesoftware.htmlunit.WebClient
import net.sourceforge.htmlunit.corejs.javascript. { ScriptableObject, Function => JsFunction }
import org.webjars.WebJarAssetLocator
import sbt.{Logger, TestsFailedException, IO, File}
import sbt.Keys._
object SbtJsTestTasks extends SbtJsTestKeys {
private [this] def lsR(fs:Seq[File]):List[File] =
fs.flatMap(lsR).toList
private [this] def lsR(f:File):List[File] =
if(!f.exists()) List()
else if(!f.isDirectory) List(f)
else f.listFiles().toList.flatMap(lsR)
private [this] def doJsLs(log:Logger, main:Seq[File], test:Seq[File]):Unit = {
log.info("jsResources (assets loaded for every test)")
lsR(main).foreach(f => log.info(f.getCanonicalPath))
log.info("jsTestResources (assets defining tests)")
lsR(test).foreach(f => log.info(f.getCanonicalPath))
}
val jsLsTask = sbt.Def.task {
doJsLs(streams.value.log, jsResources.value, jsTestResources.value)
}
private [this] def locator(lib:String) = {
val regex = Pattern.compile(".*" + Pattern.quote(lib) + ".*")
val classLoader = this.getClass.getClassLoader
val ctxClassLoader = Thread.currentThread().getContextClassLoader
Thread.currentThread().setContextClassLoader(classLoader)
val l = new WebJarAssetLocator(WebJarAssetLocator.getFullPathIndex(regex, classLoader))
Thread.currentThread().setContextClassLoader(ctxClassLoader)
l
}
private [this] def cat(classpath:String) = {
val url = this.getClass.getClassLoader.getResource(classpath)
val r = new BufferedReader(new InputStreamReader(url.openStream()))
echo(Iterator.continually(r.readLine()).takeWhile(_ != null).mkString("\\n"))
}
private [this] def echo(s:String) = new {
def > (f:File):File = { IO.write(f, s); f }
}
private [this] val jasmineLocator = locator("jasmine")
private [this] def sbtJsTest(target:File) = target / "sbtJsTest.js"
private [this] def jasmine(target:File) = target / "jasmine" / "jasmine.js"
private [this] def jasmineHtmlUnitBoot(target:File) = target / "jasmine" / "htmlunit_boot.js"
private [this] def jasmineConsole(target:File) = target / "jasmine" / "console.js"
private [this] def writeJasmineAssets(log:Logger, target:File, color:Boolean):List[File] = {
log.info("Writing jasmine2 assets...")
val colorJs = s"""
|window.sbtJsTest = window.sbtJsTest || {};
|window.sbtJsTest.showColors = $color;
""".stripMargin
List(
echo(colorJs) > sbtJsTest(target),
cat(jasmineLocator.getFullPath("jasmine.js")) > jasmine(target),
cat(jasmineLocator.getFullPath("console.js")) > jasmineConsole(target),
cat("js/htmlunit_jasmine_boot.js") > jasmineHtmlUnitBoot(target)
)
}
private [this] def htmlFor(js:List[File]):String = {
val doctype = "<!DOCTYPE html>"
val scripts = js map ( f => <script type="application/javascript" language="javascript" src={f.toURI.toASCIIString}></script> )
val html =
<html>
<head>
{scripts}
</head>
<body>
</body>
</html>
doctype + "\\n" + html.toString
}
private [this] def writeConsoleHtml(log:Logger, rsrcs:Seq[File], html:File):Unit = {
log.info(s"Generating ${html.getCanonicalPath}...")
IO.write(html, htmlFor(lsR(rsrcs)))
}
private [this] def runJs(html:File, browser: Browser, asyncWait: Boolean, asyncWaitTimeout: Option[Long]):Boolean = {
val client = new WebClient(BrowserVersion(browser))
val options = client.getOptions()
options.setHomePage(WebClient.URL_ABOUT_BLANK.toString())
options.setJavaScriptEnabled(true)
client.getPage(html.toURI.toURL)
val window = client.getCurrentWindow().getTopWindow
val page:HtmlPage = window.getEnclosedPage().asInstanceOf[HtmlPage] // asInstanceOf because ... java...
def exec(js:String):String = {
val toRun = "function() {\\n"+js+"\\n};"
val result = page.executeJavaScript(toRun)
val func:JsFunction = result.getJavaScriptResult().asInstanceOf[JsFunction]
val exeResult = page.executeJavaScriptFunctionIfPossible(
func,
window.getScriptableObject(),
Array.empty,
page.getDocumentElement()
)
exeResult.getJavaScriptResult.toString
}
val timeout:Option[Long] = asyncWaitTimeout.map(System.currentTimeMillis() + _)
while(asyncWait
&& timeout.map(System.currentTimeMillis() < _).getOrElse(true)
&& exec("return window.sbtJsTest.readyForTestsToRun") != "true") {
Thread.sleep(250)
}
exec("jasmine.getEnv().execute();")
while(exec("return window.sbtJsTest.complete") != "true") {
Thread.sleep(250)
}
exec("return window.sbtJsTest.allPassed") == "true"
}
private [this] def runTests(log:Logger, rsrcs:Seq[File], target:File, color:Boolean, browsers:Seq[Browser],
frameworks:Seq[Framework], asyncWait: Boolean, asyncWaitTimeout: Option[Long]) = {
import SbtJsTestPlugin.autoImport.JsTestFrameworks._
LogAdapter.logger = log
val html = target / "console.html"
val frameworkAssets:List[File] =
if(frameworks contains Jasmine2) writeJasmineAssets(log, target / "assets", color)
else List()
writeConsoleHtml(log, frameworkAssets ++ rsrcs, html)
browsers.foreach { browser =>
log.info(s"Running JavaScript tests on $browser...")
val success = runJs(html, browser, asyncWait, asyncWaitTimeout)
if (!success) throw new TestsFailedException()
}
}
val jsTestTask = sbt.Def.task {
val resources = jsResources.value ++ jsTestResources.value
runTests(streams.value.log, resources, jsTestTargetDir.value, jsTestColor.value, jsTestBrowsers.value,
jsFrameworks.value, jsAsyncWait.value, jsAsyncWaitTimeout.value)
}
val jsTestOnlyTask = sbt.Def.inputTask {
val tests: Seq[String] = sbt.complete.DefaultParsers.spaceDelimited("<arg>").parsed
val testFiles = tests.map(name => lsR(jsTestResources.value).find(_.getCanonicalPath.endsWith(name))).flatten
val resources = jsResources.value ++ testFiles
runTests(streams.value.log, resources, jsTestTargetDir.value, jsTestColor.value, jsTestBrowsers.value,
jsFrameworks.value, jsAsyncWait.value, jsAsyncWaitTimeout.value)
}
}
private [sbtjs] object BrowserVersion {
import com.gargoylesoftware.htmlunit. { BrowserVersion => HUBrowserVersion }
import HUBrowserVersion._
import SbtJsTestPlugin.autoImport.JsTestBrowsers._
def apply(b:Browser):HUBrowserVersion = b match {
case Firefox38 => FIREFOX_38
// case InternetExplorer8 => INTERNET_EXPLORER_8
case InternetExplorer11 => INTERNET_EXPLORER_11
case Chrome => CHROME
// case Edge => EDGE
}
}
|
joescii/sbt-js-test
|
src/main/scala/com/joescii/sbtjs/SbtJsTestTasks.scala
|
Scala
|
apache-2.0
| 6,920 |
package spa.client.components
import japgolly.scalajs.react.ReactNode
import japgolly.scalajs.react.vdom.prefix_<^._
/**
* Provides type-safe access to Font Awesome icons
*/
object Icon {
type Icon = ReactNode
def apply(name: String): Icon = <.i(^.className := s"fa fa-$name")
def adjust = apply("adjust")
def adn = apply("adn")
def alignCenter = apply("align-center")
def alignJustify = apply("align-justify")
def alignLeft = apply("align-left")
def alignRight = apply("align-right")
def ambulance = apply("ambulance")
def anchor = apply("anchor")
def android = apply("android")
def angellist = apply("angellist")
def angleDoubleDown = apply("angle-double-down")
def angleDoubleLeft = apply("angle-double-left")
def angleDoubleRight = apply("angle-double-right")
def angleDoubleUp = apply("angle-double-up")
def angleDown = apply("angle-down")
def angleLeft = apply("angle-left")
def angleRight = apply("angle-right")
def angleUp = apply("angle-up")
def apple = apply("apple")
def archive = apply("archive")
def areaChart = apply("area-chart")
def arrowCircleDown = apply("arrow-circle-down")
def arrowCircleLeft = apply("arrow-circle-left")
def arrowCircleODown = apply("arrow-circle-o-down")
def arrowCircleOLeft = apply("arrow-circle-o-left")
def arrowCircleORight = apply("arrow-circle-o-right")
def arrowCircleOUp = apply("arrow-circle-o-up")
def arrowCircleRight = apply("arrow-circle-right")
def arrowCircleUp = apply("arrow-circle-up")
def arrowDown = apply("arrow-down")
def arrowLeft = apply("arrow-left")
def arrowRight = apply("arrow-right")
def arrowUp = apply("arrow-up")
def arrows = apply("arrows")
def arrowsAlt = apply("arrows-alt")
def arrowsH = apply("arrows-h")
def arrowsV = apply("arrows-v")
def asterisk = apply("asterisk")
def at = apply("at")
def automobile = apply("automobile")
def backward = apply("backward")
def ban = apply("ban")
def bank = apply("bank")
def barChart = apply("bar-chart")
def barChartO = apply("bar-chart-o")
def barcode = apply("barcode")
def bars = apply("bars")
def bed = apply("bed")
def beer = apply("beer")
def behance = apply("behance")
def behanceSquare = apply("behance-square")
def bell = apply("bell")
def bellO = apply("bell-o")
def bellSlash = apply("bell-slash")
def bellSlashO = apply("bell-slash-o")
def bicycle = apply("bicycle")
def binoculars = apply("binoculars")
def birthdayCake = apply("birthday-cake")
def bitbucket = apply("bitbucket")
def bitbucketSquare = apply("bitbucket-square")
def bitcoin = apply("bitcoin")
def bold = apply("bold")
def bolt = apply("bolt")
def bomb = apply("bomb")
def book = apply("book")
def bookmark = apply("bookmark")
def bookmarkO = apply("bookmark-o")
def briefcase = apply("briefcase")
def btc = apply("btc")
def bug = apply("bug")
def building = apply("building")
def buildingO = apply("building-o")
def bullhorn = apply("bullhorn")
def bullseye = apply("bullseye")
def bus = apply("bus")
def buysellads = apply("buysellads")
def cab = apply("cab")
def calculator = apply("calculator")
def calendar = apply("calendar")
def calendarO = apply("calendar-o")
def camera = apply("camera")
def cameraRetro = apply("camera-retro")
def car = apply("car")
def caretDown = apply("caret-down")
def caretLeft = apply("caret-left")
def caretRight = apply("caret-right")
def caretSquareODown = apply("caret-square-o-down")
def caretSquareOLeft = apply("caret-square-o-left")
def caretSquareORight = apply("caret-square-o-right")
def caretSquareOUp = apply("caret-square-o-up")
def caretUp = apply("caret-up")
def cartArrowDown = apply("cart-arrow-down")
def cartPlus = apply("cart-plus")
def cc = apply("cc")
def ccAmex = apply("cc-amex")
def ccDiscover = apply("cc-discover")
def ccMastercard = apply("cc-mastercard")
def ccPaypal = apply("cc-paypal")
def ccStripe = apply("cc-stripe")
def ccVisa = apply("cc-visa")
def certificate = apply("certificate")
def chain = apply("chain")
def chainBroken = apply("chain-broken")
def check = apply("check")
def checkCircle = apply("check-circle")
def checkCircleO = apply("check-circle-o")
def checkSquare = apply("check-square")
def checkSquareO = apply("check-square-o")
def chevronCircleDown = apply("chevron-circle-down")
def chevronCircleLeft = apply("chevron-circle-left")
def chevronCircleRight = apply("chevron-circle-right")
def chevronCircleUp = apply("chevron-circle-up")
def chevronDown = apply("chevron-down")
def chevronLeft = apply("chevron-left")
def chevronRight = apply("chevron-right")
def chevronUp = apply("chevron-up")
def child = apply("child")
def circle = apply("circle")
def circleO = apply("circle-o")
def circleONotch = apply("circle-o-notch")
def circleThin = apply("circle-thin")
def clipboard = apply("clipboard")
def clockO = apply("clock-o")
def close = apply("close")
def cloud = apply("cloud")
def cloudDownload = apply("cloud-download")
def cloudUpload = apply("cloud-upload")
def cny = apply("cny")
def code = apply("code")
def codeFork = apply("code-fork")
def codepen = apply("codepen")
def coffee = apply("coffee")
def cog = apply("cog")
def cogs = apply("cogs")
def columns = apply("columns")
def comment = apply("comment")
def commentO = apply("comment-o")
def comments = apply("comments")
def commentsO = apply("comments-o")
def compass = apply("compass")
def compress = apply("compress")
def connectdevelop = apply("connectdevelop")
def copy = apply("copy")
def copyright = apply("copyright")
def creditCard = apply("credit-card")
def crop = apply("crop")
def crosshairs = apply("crosshairs")
def css3 = apply("css3")
def cube = apply("cube")
def cubes = apply("cubes")
def cut = apply("cut")
def cutlery = apply("cutlery")
def dashboard = apply("dashboard")
def dashcube = apply("dashcube")
def database = apply("database")
def dedent = apply("dedent")
def delicious = apply("delicious")
def desktop = apply("desktop")
def deviantart = apply("deviantart")
def diamond = apply("diamond")
def digg = apply("digg")
def dollar = apply("dollar")
def dotCircleO = apply("dot-circle-o")
def download = apply("download")
def dribbble = apply("dribbble")
def dropbox = apply("dropbox")
def drupal = apply("drupal")
def edit = apply("edit")
def eject = apply("eject")
def ellipsisH = apply("ellipsis-h")
def ellipsisV = apply("ellipsis-v")
def empire = apply("empire")
def envelope = apply("envelope")
def envelopeO = apply("envelope-o")
def envelopeSquare = apply("envelope-square")
def eraser = apply("eraser")
def eur = apply("eur")
def euro = apply("euro")
def exchange = apply("exchange")
def exclamation = apply("exclamation")
def exclamationCircle = apply("exclamation-circle")
def exclamationTriangle = apply("exclamation-triangle")
def expand = apply("expand")
def externalLink = apply("external-link")
def externalLinkSquare = apply("external-link-square")
def eye = apply("eye")
def eyeSlash = apply("eye-slash")
def eyedropper = apply("eyedropper")
def facebook = apply("facebook")
def facebookF = apply("facebook-f")
def facebookOfficial = apply("facebook-official")
def facebookSquare = apply("facebook-square")
def fastBackward = apply("fast-backward")
def fastForward = apply("fast-forward")
def fax = apply("fax")
def female = apply("female")
def fighterJet = apply("fighter-jet")
def file = apply("file")
def fileArchiveO = apply("file-archive-o")
def fileAudioO = apply("file-audio-o")
def fileCodeO = apply("file-code-o")
def fileExcelO = apply("file-excel-o")
def fileImageO = apply("file-image-o")
def fileMovieO = apply("file-movie-o")
def fileO = apply("file-o")
def filePdfO = apply("file-pdf-o")
def filePhotoO = apply("file-photo-o")
def filePictureO = apply("file-picture-o")
def filePowerpointO = apply("file-powerpoint-o")
def fileSoundO = apply("file-sound-o")
def fileText = apply("file-text")
def fileTextO = apply("file-text-o")
def fileVideoO = apply("file-video-o")
def fileWordO = apply("file-word-o")
def fileZipO = apply("file-zip-o")
def filesO = apply("files-o")
def film = apply("film")
def filter = apply("filter")
def fire = apply("fire")
def fireExtinguisher = apply("fire-extinguisher")
def flag = apply("flag")
def flagCheckered = apply("flag-checkered")
def flagO = apply("flag-o")
def flash = apply("flash")
def flask = apply("flask")
def flickr = apply("flickr")
def floppyO = apply("floppy-o")
def folder = apply("folder")
def folderO = apply("folder-o")
def folderOpen = apply("folder-open")
def folderOpenO = apply("folder-open-o")
def font = apply("font")
def forumbee = apply("forumbee")
def forward = apply("forward")
def foursquare = apply("foursquare")
def frownO = apply("frown-o")
def futbolO = apply("futbol-o")
def gamepad = apply("gamepad")
def gavel = apply("gavel")
def gbp = apply("gbp")
def ge = apply("ge")
def gear = apply("gear")
def gears = apply("gears")
def genderless = apply("genderless")
def gift = apply("gift")
def git = apply("git")
def gitSquare = apply("git-square")
def github = apply("github")
def githubAlt = apply("github-alt")
def githubSquare = apply("github-square")
def gittip = apply("gittip")
def glass = apply("glass")
def globe = apply("globe")
def google = apply("google")
def googlePlus = apply("google-plus")
def googlePlusSquare = apply("google-plus-square")
def googleWallet = apply("google-wallet")
def graduationCap = apply("graduation-cap")
def gratipay = apply("gratipay")
def group = apply("group")
def hSquare = apply("h-square")
def hackerNews = apply("hacker-news")
def handODown = apply("hand-o-down")
def handOLeft = apply("hand-o-left")
def handORight = apply("hand-o-right")
def handOUp = apply("hand-o-up")
def hddO = apply("hdd-o")
def header = apply("header")
def headphones = apply("headphones")
def heart = apply("heart")
def heartO = apply("heart-o")
def heartbeat = apply("heartbeat")
def history = apply("history")
def home = apply("home")
def hospitalO = apply("hospital-o")
def hotel = apply("hotel")
def html5 = apply("html5")
def ils = apply("ils")
def image = apply("image")
def inbox = apply("inbox")
def indent = apply("indent")
def info = apply("info")
def infoCircle = apply("info-circle")
def inr = apply("inr")
def instagram = apply("instagram")
def institution = apply("institution")
def ioxhost = apply("ioxhost")
def italic = apply("italic")
def joomla = apply("joomla")
def jpy = apply("jpy")
def jsfiddle = apply("jsfiddle")
def key = apply("key")
def keyboardO = apply("keyboard-o")
def krw = apply("krw")
def language = apply("language")
def laptop = apply("laptop")
def lastfm = apply("lastfm")
def lastfmSquare = apply("lastfm-square")
def leaf = apply("leaf")
def leanpub = apply("leanpub")
def legal = apply("legal")
def lemonO = apply("lemon-o")
def levelDown = apply("level-down")
def levelUp = apply("level-up")
def lifeBouy = apply("life-bouy")
def lifeBuoy = apply("life-buoy")
def lifeRing = apply("life-ring")
def lifeSaver = apply("life-saver")
def lightbulbO = apply("lightbulb-o")
def lineChart = apply("line-chart")
def link = apply("link")
def linkedin = apply("linkedin")
def linkedinSquare = apply("linkedin-square")
def linux = apply("linux")
def list = apply("list")
def listAlt = apply("list-alt")
def listOl = apply("list-ol")
def listUl = apply("list-ul")
def locationArrow = apply("location-arrow")
def lock = apply("lock")
def longArrowDown = apply("long-arrow-down")
def longArrowLeft = apply("long-arrow-left")
def longArrowRight = apply("long-arrow-right")
def longArrowUp = apply("long-arrow-up")
def magic = apply("magic")
def magnet = apply("magnet")
def mailForward = apply("mail-forward")
def mailReply = apply("mail-reply")
def mailReplyAll = apply("mail-reply-all")
def male = apply("male")
def mapMarker = apply("map-marker")
def mars = apply("mars")
def marsDouble = apply("mars-double")
def marsStroke = apply("mars-stroke")
def marsStrokeH = apply("mars-stroke-h")
def marsStrokeV = apply("mars-stroke-v")
def maxcdn = apply("maxcdn")
def meanpath = apply("meanpath")
def medium = apply("medium")
def medkit = apply("medkit")
def mehO = apply("meh-o")
def mercury = apply("mercury")
def microphone = apply("microphone")
def microphoneSlash = apply("microphone-slash")
def minus = apply("minus")
def minusCircle = apply("minus-circle")
def minusSquare = apply("minus-square")
def minusSquareO = apply("minus-square-o")
def mobile = apply("mobile")
def mobilePhone = apply("mobile-phone")
def money = apply("money")
def moonO = apply("moon-o")
def mortarBoard = apply("mortar-board")
def motorcycle = apply("motorcycle")
def music = apply("music")
def navicon = apply("navicon")
def neuter = apply("neuter")
def newspaperO = apply("newspaper-o")
def openid = apply("openid")
def outdent = apply("outdent")
def pagelines = apply("pagelines")
def paintBrush = apply("paint-brush")
def paperPlane = apply("paper-plane")
def paperPlaneO = apply("paper-plane-o")
def paperclip = apply("paperclip")
def paragraph = apply("paragraph")
def paste = apply("paste")
def pause = apply("pause")
def paw = apply("paw")
def paypal = apply("paypal")
def pencil = apply("pencil")
def pencilSquare = apply("pencil-square")
def pencilSquareO = apply("pencil-square-o")
def phone = apply("phone")
def phoneSquare = apply("phone-square")
def photo = apply("photo")
def pictureO = apply("picture-o")
def pieChart = apply("pie-chart")
def piedPiper = apply("pied-piper")
def piedPiperAlt = apply("pied-piper-alt")
def pinterest = apply("pinterest")
def pinterestP = apply("pinterest-p")
def pinterestSquare = apply("pinterest-square")
def plane = apply("plane")
def play = apply("play")
def playCircle = apply("play-circle")
def playCircleO = apply("play-circle-o")
def plug = apply("plug")
def plus = apply("plus")
def plusCircle = apply("plus-circle")
def plusSquare = apply("plus-square")
def plusSquareO = apply("plus-square-o")
def powerOff = apply("power-off")
def print = apply("print")
def puzzlePiece = apply("puzzle-piece")
def qq = apply("qq")
def qrcode = apply("qrcode")
def question = apply("question")
def questionCircle = apply("question-circle")
def quoteLeft = apply("quote-left")
def quoteRight = apply("quote-right")
def ra = apply("ra")
def random = apply("random")
def rebel = apply("rebel")
def recycle = apply("recycle")
def reddit = apply("reddit-alien")
def redditSquare = apply("reddit-square")
def refresh = apply("refresh")
def remove = apply("remove")
def renren = apply("renren")
def reorder = apply("reorder")
def repeat = apply("repeat")
def reply = apply("reply")
def replyAll = apply("reply-all")
def retweet = apply("retweet")
def rmb = apply("rmb")
def road = apply("road")
def rocket = apply("rocket")
def rotateLeft = apply("rotate-left")
def rotateRight = apply("rotate-right")
def rouble = apply("rouble")
def rss = apply("rss")
def rssSquare = apply("rss-square")
def rub = apply("rub")
def ruble = apply("ruble")
def rupee = apply("rupee")
def save = apply("save")
def scissors = apply("scissors")
def search = apply("search")
def searchMinus = apply("search-minus")
def searchPlus = apply("search-plus")
def sellsy = apply("sellsy")
def send = apply("send")
def sendO = apply("send-o")
def server = apply("server")
def share = apply("share")
def shareAlt = apply("share-alt")
def shareAltSquare = apply("share-alt-square")
def shareSquare = apply("share-square")
def shareSquareO = apply("share-square-o")
def shekel = apply("shekel")
def sheqel = apply("sheqel")
def shield = apply("shield")
def ship = apply("ship")
def shirtsinbulk = apply("shirtsinbulk")
def shoppingCart = apply("shopping-cart")
def signIn = apply("sign-in")
def signOut = apply("sign-out")
def signal = apply("signal")
def simplybuilt = apply("simplybuilt")
def sitemap = apply("sitemap")
def skyatlas = apply("skyatlas")
def skype = apply("skype")
def slack = apply("slack")
def sliders = apply("sliders")
def slideshare = apply("slideshare")
def smileO = apply("smile-o")
def soccerBallO = apply("soccer-ball-o")
def sort = apply("sort")
def sortAlphaAsc = apply("sort-alpha-asc")
def sortAlphaDesc = apply("sort-alpha-desc")
def sortAmountAsc = apply("sort-amount-asc")
def sortAmountDesc = apply("sort-amount-desc")
def sortAsc = apply("sort-asc")
def sortDesc = apply("sort-desc")
def sortDown = apply("sort-down")
def sortNumericAsc = apply("sort-numeric-asc")
def sortNumericDesc = apply("sort-numeric-desc")
def sortUp = apply("sort-up")
def soundcloud = apply("soundcloud")
def spaceShuttle = apply("space-shuttle")
def spinner = apply("spinner")
def spoon = apply("spoon")
def spotify = apply("spotify")
def square = apply("square")
def squareO = apply("square-o")
def stackExchange = apply("stack-exchange")
def stackOverflow = apply("stack-overflow")
def star = apply("star")
def starHalf = apply("star-half")
def starHalfEmpty = apply("star-half-empty")
def starHalfFull = apply("star-half-full")
def starHalfO = apply("star-half-o")
def starO = apply("star-o")
def steam = apply("steam")
def steamSquare = apply("steam-square")
def stepBackward = apply("step-backward")
def stepForward = apply("step-forward")
def stethoscope = apply("stethoscope")
def stop = apply("stop")
def streetView = apply("street-view")
def strikethrough = apply("strikethrough")
def stumbleupon = apply("stumbleupon")
def stumbleuponCircle = apply("stumbleupon-circle")
def subscript = apply("subscript")
def subway = apply("subway")
def suitcase = apply("suitcase")
def sunO = apply("sun-o")
def superscript = apply("superscript")
def support = apply("support")
def table = apply("table")
def tablet = apply("tablet")
def tachometer = apply("tachometer")
def tag = apply("tag")
def tags = apply("tags")
def tasks = apply("tasks")
def taxi = apply("taxi")
def tencentWeibo = apply("tencent-weibo")
def terminal = apply("terminal")
def textHeight = apply("text-height")
def textWidth = apply("text-width")
def th = apply("th")
def thLarge = apply("th-large")
def thList = apply("th-list")
def thumbTack = apply("thumb-tack")
def thumbsDown = apply("thumbs-down")
def thumbsODown = apply("thumbs-o-down")
def thumbsOUp = apply("thumbs-o-up")
def thumbsUp = apply("thumbs-up")
def ticket = apply("ticket")
def times = apply("times")
def timesCircle = apply("times-circle")
def timesCircleO = apply("times-circle-o")
def tint = apply("tint")
def toggleDown = apply("toggle-down")
def toggleLeft = apply("toggle-left")
def toggleOff = apply("toggle-off")
def toggleOn = apply("toggle-on")
def toggleRight = apply("toggle-right")
def toggleUp = apply("toggle-up")
def train = apply("train")
def transgender = apply("transgender")
def transgenderAlt = apply("transgender-alt")
def trash = apply("trash")
def trashO = apply("trash-o")
def tree = apply("tree")
def trello = apply("trello")
def trophy = apply("trophy")
def truck = apply("truck")
def `try` = apply("try")
def tty = apply("tty")
def tumblr = apply("tumblr")
def tumblrSquare = apply("tumblr-square")
def turkishLira = apply("turkish-lira")
def twitch = apply("twitch")
def twitter = apply("twitter")
def twitterSquare = apply("twitter-square")
def umbrella = apply("umbrella")
def underline = apply("underline")
def undo = apply("undo")
def university = apply("university")
def unlink = apply("unlink")
def unlock = apply("unlock")
def unlockAlt = apply("unlock-alt")
def unsorted = apply("unsorted")
def upload = apply("upload")
def usd = apply("usd")
def user = apply("user")
def userMd = apply("user-md")
def userPlus = apply("user-plus")
def userSecret = apply("user-secret")
def userTimes = apply("user-times")
def users = apply("users")
def venus = apply("venus")
def venusDouble = apply("venus-double")
def venusMars = apply("venus-mars")
def viacoin = apply("viacoin")
def videoCamera = apply("video-camera")
def vimeoSquare = apply("vimeo-square")
def vine = apply("vine")
def vk = apply("vk")
def volumeDown = apply("volume-down")
def volumeOff = apply("volume-off")
def volumeUp = apply("volume-up")
def warning = apply("warning")
def wechat = apply("wechat")
def weibo = apply("weibo")
def weixin = apply("weixin")
def whatsapp = apply("whatsapp")
def wheelchair = apply("wheelchair")
def wifi = apply("wifi")
def windows = apply("windows")
def won = apply("won")
def wordpress = apply("wordpress")
def wrench = apply("wrench")
def xing = apply("xing")
def xingSquare = apply("xing-square")
def yahoo = apply("yahoo")
def yCombinator = apply("y-combinator")
def yelp = apply("yelp")
def yen = apply("yen")
def youtube = apply("youtube")
def youtubePlay = apply("youtube-play")
def youtubeSquare = apply("youtube-square")
}
|
IceGiant/internet-abridged
|
client/src/main/scala/spa/client/components/Icon.scala
|
Scala
|
apache-2.0
| 21,519 |
package org.scalatest.tools
import java.io.File
import java.util.regex.Matcher.quoteReplacement
import scala.collection.mutable
import scala.xml.XML
import scala.xml.NodeSeq
private[scalatest] case class Durations(file: File) {
val suites = mutable.Set[Suite]()
if (file.exists) {
val durationsXml = XML.loadFile(file)
for (suiteXml <- durationsXml \\ "suite") {
val suite = Suite("" + (suiteXml \\ "@suiteId"),
"" + (suiteXml \\ "@suiteName"))
suites += suite
for (testXml <- suiteXml \\ "test") {
val test = Test("" + (testXml \\ "@testName"))
suite.tests += test
val previous = testXml \\ "previous"
test.previousNum = (previous \\ "@num").toString.toInt
test.previousAverage = (previous \\ "@average").toString.toInt
for (durationXml <- testXml \\ "duration") {
val duration = Duration((durationXml \\ "@run").toString,
(durationXml \\ "@millis").toString.toInt)
test.durations = duration :: test.durations
}
test.durations = test.durations.reverse
}
}
}
//
// Adds test results from specified xml to this Duration. The xml is
// in the format of a run file.
//
// The 'run' parameter is the timestamp identifier for the run.
//
def addTests(run: String, runXml: NodeSeq) {
for (suite <- runXml \\\\ "suite") {
val suiteId = (suite \\ "@id").toString
val suiteName = (suite \\ "@name").toString
for (test <- suite \\ "test") {
val result = (test \\ "@result").toString
if (result == "succeeded") {
val testName = (test \\ "@name").toString
val millis = (test \\ "@duration").toString.toInt
addDuration(suiteId, suiteName, testName, run, millis)
}
}
}
}
def toXml: String = {
val DurationsTemplate =
"""|<durations>
|$suites$</durations>
|""".stripMargin
val buf = new StringBuilder
for (suite <- suites) buf.append(suite.toXml)
DurationsTemplate.replaceFirst("""\\$suites\\$""",
quoteReplacement(buf.toString))
}
def addDuration(suiteId: String, suiteName: String, testName: String,
run: String, millis: Int)
{
def getSuite(): Suite = {
val suiteOption = suites.find(suite => suite.suiteId == suiteId)
if (suiteOption.isDefined) {
suiteOption.get
}
else {
val newSuite = Suite(suiteId, suiteName)
suites += newSuite
newSuite
}
}
def getTest(): Test = {
val suite = getSuite
val testOption = suite.tests.find(test => test.name == testName)
if (testOption.isDefined) {
testOption.get
}
else {
val newTest = Test(testName)
suite.tests += newTest
newTest
}
}
def archiveOldestDuration(test: Test) {
val oldestDuration = test.durations.last
test.durations = test.durations.dropRight(1)
test.previousAverage =
(test.previousAverage * test.previousNum + oldestDuration.millis) /
(test.previousNum + 1)
test.previousNum += 1
}
val test = getTest()
test.durations ::= new Duration(run, millis)
if ((test.numberOfDurations * 0.8) > test.previousNum) {
archiveOldestDuration(test)
}
}
case class Suite(suiteId: String, suiteName: String) {
val tests = mutable.Set[Test]()
def toXml: String = {
val SuiteTemplate =
"""| <suite suiteId="$suiteId$" suiteName="$suiteName$">
|$tests$ </suite>
|""".stripMargin
val buf = new StringBuilder
for (test <- tests) buf.append(test.toXml)
SuiteTemplate.
replaceFirst("""\\$suiteId\\$""", quoteReplacement(suiteId)).
replaceFirst("""\\$suiteName\\$""", quoteReplacement(suiteName)).
replaceFirst("""\\$tests\\$""", quoteReplacement(buf.toString))
}
}
case class Test(name: String) {
var previousNum = 0
var previousAverage = 0
var durations = List[Duration]()
def numberOfDurations = previousNum + durations.size
def toXml: String = {
val TestTemplate =
"""| <test testName="$testName$">
| <previous num="$previousNum$" average="$previousAverage$"/>
|$durations$ </test>
|""".stripMargin
val buf = new StringBuilder
for (duration <- durations) buf.append(duration.toXml)
TestTemplate.
replaceFirst("""\\$testName\\$""", quoteReplacement(name)).
replaceFirst("""\\$previousNum\\$""", previousNum.toString).
replaceFirst("""\\$previousAverage\\$""", previousAverage.toString).
replaceFirst("""\\$durations\\$""", quoteReplacement(buf.toString))
}
def runCount: Int = {
previousNum + durations.size
}
def computeNewAvg: Int = {
durations.foldLeft(0)(_ + _.millis) / durations.size
}
}
case class Duration(run: String, millis: Int) {
def toXml: String = {
val DurationTemplate =
"""| <duration run="$run$" millis="$millis$"/>
|""".stripMargin
DurationTemplate.
replaceFirst("""\\$run\\$""", run).
replaceFirst("""\\$millis\\$""", millis.toString)
}
}
}
|
hubertp/scalatest
|
src/main/scala/org/scalatest/tools/Durations.scala
|
Scala
|
apache-2.0
| 5,343 |
package org.eknet.spray.openid.consumer
import java.net.{HttpURLConnection, URL}
import scala.concurrent.{ExecutionContext, Future}
object ClaimedIdentifier {
/**
* Normalizes the given identifier to a valid OpenId identifier as described in
* [http://openid.net/specs/openid-authentication-2_0.html#normalization>]
*
* @param id
* @return
*/
def normalize(id: String)(implicit ec: ExecutionContext): Future[String] = {
def isRedirect(code: Int) = code > 300 && code < 399
//todo: check for infinite redirection
@scala.annotation.tailrec
def followRedirects(url: String): String = {
val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
conn.setInstanceFollowRedirects(false)
if (isRedirect(conn.getResponseCode)) {
val nextUrl = Option(conn.getHeaderField("Location")).filter(_.trim.nonEmpty) match {
case Some(n) => n
case _ => sys.error("Unable to follow url " + url)
}
conn.disconnect()
followRedirects(nextUrl)
} else {
conn.disconnect()
url
}
}
val urlString = (stripXri andThen isXri andThen toUrl)(id)
Future(followRedirects(urlString))
}
private val stripXri = (id: String) => if (id startsWith "xri://") id.substring(6) else id
private val isXri = (id: String) => {
val xriChars = Set('@', '=', '+', '$', '!', '(')
if (id.isEmpty) throw new IllegalArgumentException("Empty id string")
else if (xriChars contains id.charAt(0)) throw new UnsupportedOperationException("XRI uris are currently not supported")
else id
}
private val toUrl = (id: String) => {
val withScheme = if (id startsWith "http") id else "http://" + id
withScheme.indexOf('#') match {
case i if i > 0 => withScheme.substring(0, i)
case _ => withScheme
}
}
}
|
eikek/spray-openid
|
src/main/scala/org/eknet/spray/openid/consumer/ClaimedIdentifier.scala
|
Scala
|
apache-2.0
| 1,860 |
package im.actor.server.models.voximplant
case class VoxUser(userId: Int, voxUserId: Long, userName: String, displayName: String, salt: String)
|
boneyao/actor-platform
|
actor-server/actor-models/src/main/scala/im/actor/server/models/voximplant/VoxUser.scala
|
Scala
|
mit
| 144 |
package core.payment
import java.net.{ URL, URLEncoder }
import java.util.Date
import javax.inject.Inject
import com.lvxingpai.inject.morphia.MorphiaMap
import com.lvxingpai.model.marketplace.order.{ Bounty, Prepay }
import core.api.BountyAPI
import core.exception.GeneralPaymentException
import core.formatter.marketplace.order.BountyFormatter
import core.misc.Utils
import core.payment.PaymentService.Provider
import core.service.ViaeGateway
import org.mongodb.morphia.Datastore
import play.api.Play.current
import play.api.inject.BindingKey
import play.api.{ Configuration, Play }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
/**
* Created by topy on 2016/4/11.
*/
class BountyPayAli @Inject() (private val morphiaMap: MorphiaMap, implicit private val viaeGateway: ViaeGateway) extends BountyPay {
override lazy val datastore: Datastore = morphiaMap.map("k2")
override lazy val provider: Provider.Value = Provider.Alipay
override def createPrepay(bounty: Bounty): Future[Option[Prepay]] = {
val providerName = provider.toString
// 创建新的Prepay对象
val prepay = new Prepay
prepay.provider = providerName
prepay.amount = bounty.bountyPrice
prepay.createTime = new Date
prepay.updateTime = new Date
prepay.prepayId = bounty.itemId.toString
val query = datastore.createQuery(classOf[Bounty]) field "itemId" equal bounty.itemId field
s"paymentInfo.$providerName" equal null
Future {
val ops = datastore.createUpdateOperations(classOf[Bounty]).set(s"paymentInfo.$providerName", prepay)
val updateResult = datastore.update(query, ops)
if (updateResult.getUpdatedExisting) Some(prepay)
else None
}
}
/**
* 获得订单在某个具体渠道的支付详情(即是否支付). 由于支付宝不提供主动查询接口, 所以直接返回paymentInfo中的值
* @param bounty 订单号
* @return
*/
override def refreshPaymentStatus(bounty: Bounty): Future[Bounty] = Future(bounty)
/**
* 获得sidecar信息. (比如: 签名等, 就位于其中)
* @return
*/
override protected def createSidecar(bounty: Bounty, prepay: Prepay): Map[String, Any] = {
// 返回带有签名的请求字符串
val requestMap = BountyPayAli.RequestMap(prepay.prepayId, bounty.consumerId.toString, bounty.itemId.toString + bounty.bountyPrice,
bounty.bountyPrice)
Map("requestString" -> requestMap.requestString)
}
/**
* 处理支付渠道服务器发来的异步调用
* @param params
* @return
*/
override def handleCallback(params: Map[String, Any]): Future[Any] = {
// 将Map[String, Seq[String]]转换成Map[String, String]
val data = params mapValues {
case ss: Seq[_] => ss mkString ""
case s: String => s
}
implicit val ds = datastore
try {
// 检查签名是否正常
if (!BountyPayAli.verifyAlipay(data, data("sign")))
throw GeneralPaymentException("Alipay signature check failed.")
// 检查交易状态
val tradeStatus = data.getOrElse("trade_status", "")
tradeStatus match {
case "WAIT_BUYER_PAY" | "TRADE_CLOSED" => Future("success") // 忽略该请求
case "TRADE_SUCCESS" | "TRADE_FINISHED" =>
// 订单支付成功
// 获得订单状态
val tradeNumber = data.getOrElse("out_trade_no", "")
val bountyId = try {
tradeNumber.toLong
} catch {
case _: NumberFormatException => throw GeneralPaymentException(s"Invalid out_trade_no: $tradeNumber")
}
for {
_ <- BountyAPI.setBountyPaid(bountyId, PaymentService.Provider.Alipay)
} yield {
"success"
}
}
} catch {
case e: GeneralPaymentException => Future {
throw e
}
}
}
/**
* 查询退款
* @param params
* @return
*/
override def refundQuery(params: Map[String, Any]): Future[Any] = ???
/**
* 执行退款操作
* @param orderId
* @param refundPrice
* @return
*/
override def refund(userId: Long, orderId: Long, refundPrice: Option[Int], memo: String): Future[Unit] = null
/**
* 计算签名
* @param data
* @return
*/
private def genSign(data: Map[String, String]): String = {
val stringA = (for ((k, v) <- data.toList.sorted) yield s"$k=$v").mkString("&")
val stringSignTemp = stringA + "&key=" + BountyPayAli.md5Key
Utils.MD5(stringSignTemp).toUpperCase
}
/**
* 退款操作
* @return
*/
override def refundProcess(bounty: Bounty, amount: Int): Future[Unit] = Future {
val viae = Play.application.injector instanceOf classOf[ViaeGateway]
val orderNode = BountyFormatter.instance.formatJsonNode(bounty)
// viae.sendTask("viae.job.marketplace.alipayRefund", kwargs = Some(Map("order" -> orderNode, "amount" -> amount)))
}
}
object BountyPayAli {
lazy val instance = Play.application.injector.instanceOf[BountyPayAli]
lazy private val conf = {
val key = BindingKey(classOf[Configuration]) qualifiedWith "default"
Play.current.injector instanceOf key
}
lazy private val partner = (conf getString "hanse.payment.alipay.partner").get
lazy private val sellerId = (conf getString "hanse.payment.alipay.id").get
lazy private val refundOrderUrl = (conf getString "hanse.payment.alipay.refundOrderUrl").get
lazy private val md5Key = (conf getString "hanse.payment.alipay.md5Key").get
lazy private val notifyUrl = {
val baseUrl = new URL(conf getString "hanse.baseUrl" getOrElse "http://localhost:9000")
val protocol = baseUrl.getProtocol
val host = baseUrl.getHost
val port = Some(baseUrl.getPort) flatMap (p => if (p == -1 || p == 80) None else Some(p))
val path1 = baseUrl.getPath
val path2 = controllers.routes.BountyCtrl.alipayCallback("bounties").url
s"$protocol://$host${port map (p => s":$p") getOrElse ""}$path1$path2"
}
/**
* 取得私钥字符串
* @return 私钥字符串
*/
lazy private val privateKey = (conf getString "hanse.payment.alipay.privateKey").get
lazy val alipayPublicKey = (conf getString "hanse.payment.alipay.alipayPublicKey").get
/**
* 验证支付宝签名
* @param params 签名所需的数据
* @param sign 签名
* @return 签名是否通过
*/
def verifyAlipay(params: Map[String, String], sign: String): Boolean = {
// 将获取的数据按字典排序
val sortedKeys = params.keys.toSeq.sorted
// 剔除"sign", "sign_type"字段, 将数据组装成所需的字符串
val contents = sortedKeys filterNot (Seq("sign", "sign_type") contains _) map
(key => s"$key=${params(key)}") mkString "&"
RSA.verify(contents, sign, alipayPublicKey, "utf-8")
}
/**
* 支付宝调用请求
*
* @param outTradeNo 交易订单号
* @param subject 商品标题
* @param body 商品介绍
* @param amount 订单金额
*/
case class RequestMap(outTradeNo: String, subject: String, body: String, amount: Float) {
val service = "mobile.securitypay.pay"
val charset = "utf-8"
val signType = "RSA"
val paymentType = "1"
lazy val requestString = {
val requestMap = Map("service" -> service, "partner" -> partner, "_input_charset" -> charset,
"seller_id" -> sellerId, "total_fee" -> (amount / 100.0).toString,
"notify_url" -> notifyUrl, "out_trade_no" -> (outTradeNo take 64), "payment_type" -> paymentType,
"subject" -> (subject take 128).replaceAllLiterally("\\"", ""),
"body" -> (body take 512).replaceAllLiterally("\\"", ""))
val parameters: Seq[String] = (requestMap map (entry => {
val key = entry._1
val value = entry._2
s"""$key=\\"$value\\""""
})).toSeq
// 签名
val sign = URLEncoder.encode(RSA.sign(parameters mkString "&", privateKey, "utf-8"), "utf-8")
parameters ++ Seq(s"""sign=\\"$sign\\"""", s"""sign_type=\\"$signType\\"""") mkString "&"
}
}
}
|
Lvxingpai/Hanse
|
app/core/payment/BountyPayAli.scala
|
Scala
|
apache-2.0
| 8,029 |
package enpassant
import core.Config
import akka.event.LoggingAdapter
import akka.http.scaladsl.server.Route
trait Dev {
val config: Config
def log: LoggingAdapter
def debug(route: Route): Route = {
config.mode match {
case Some("dev") =>
ctx =>
val start = System.currentTimeMillis
log.info(ctx.request.toString)
val result = route(ctx)
val runningTime = System.currentTimeMillis - start
log.info(s"Running time is ${runningTime} ms")
result
case _ => route
}
}
}
|
enpassant/psmith
|
src/main/scala/enpassant/Dev.scala
|
Scala
|
apache-2.0
| 565 |
package signal
import scala.annotation.tailrec
import scala.collection.generic.CanBuildFrom
import scala.collection.immutable.{ LinearSeq, Seq }
import scala.reflect.ClassTag
import breeze.linalg.Matrix
/** Single second order section (SOS) digital filter.
*
* The filter is described by the `z`-space transfer function:
* {{{
* b0 + b1*z^{-1} + b2*z^{-2}
* H(z) = --------------------------
* a1*z^{-1} + a2*z^{-2}
* }}}
* This is a second order IIR filter with an `a0` coefficient of 1.
*
* ===Algorithm===
* The function uses the Direct Form II Transposed Structure of an IIR filter
* in order to evaluate the result. The filter is evaluated as a finite
* difference equation using a state variable '''z''':
* {{{
* y(m) = b0 * x(m) + z0(m-1)
* z0(m) = b1 * x(m) + z1(m-1) - a1 * ym
* z1(m) = b2 * x(m) - a2 * ym
* }}}
* in which `m` is the sample number.
*
* @tparam T type of the filter (must be available as a `Numeric[T]`)
* @param b0 filter coefficient
* @param b1 filter coefficient
* @param b2 filter coefficient
* @param a1 filter coefficient
* @param a2 filter coefficient
* @author Jonathan Merritt <[email protected]>
*/
case class SOSFilt[@specialized(Float, Double) T]
(b0: T, b1: T, b2: T, a1: T, a2: T)
(implicit n: Numeric[T]) {
import n._
/** Applies the filter.
*
* The return type of the method is determined by the signal, `x`. If `x`
* is a lazy collection (for example, a `Stream`), then the filter is also
* evaluated lazily.
*
* @tparam Repr the type of the signal, which must be available as an
* `collection.immutable.Seq[T]`
* @tparam That the return type, which must have a
* `CanBuildFrom[Repr, T, That]`
* @param x signal to filter
* @return filtered signal
*/
def apply[Repr]
(x: Repr)
(implicit seqX: Repr => Seq[T],
bf: CanBuildFrom[Repr, T, Repr],
m: ClassTag[T]): Repr = {
// filter iterator
val filterIterator = new Iterator[T] {
private var (z0, z1) = (n.zero, n.zero) // initial state
private val xIterator = x.iterator // iterator over x signal
override def hasNext: Boolean = xIterator.hasNext
override def next(): T = {
val xm: T = xIterator.next
val ym: T = b0 * xm + z0
z0 = b1 * xm + z1 - a1 * ym
z1 = b2 * xm - a2 * ym
ym
}
}
// create a builder for the result
val builder = bf(x)
builder ++= filterIterator
builder.result
}
}
object SOSFilt {
/** Creates an instance of the filter with a non-unity `a0` coefficient.
*
* Normally, `SOSFilter` objects are created with an un-specified `a0`
* coefficient of 1. This method allows the user to specify the value
* of the `a0` coefficient. All other coefficients are divided by the
* `a0` value before being passed on to the normal constructor.
*/
def apply[T]
(b0: T, b1: T, b2: T, a0: T, a1: T, a2: T)
(implicit f: Fractional[T]): SOSFilt[T] = {
import f._
SOSFilt(b0 / a0, b1 / a0, b2 / a0, a1 / a0, a2 / a0)
}
/** Applies an `SOSFilt` to a signal.
*
* @tparam T the type of the signal and filter
* @tparam Repr the signal representation, which must be available as a
* `collection.immutable.Seq[T]`
* @param sos second order section filter to apply
* @param x signal to which the filter should be applied
* @return filtered signal
*/
def sosfilt[T, Repr]
(sos: SOSFilt[T], x: Repr)
(implicit seqX: Repr => Seq[T],
bf: CanBuildFrom[Repr, T, Repr],
m: ClassTag[T]): Repr = sos(x)
/** Applies a stack of `SOSFilt`s to a signal.
*
* @tparam T the type of the signal and filter
* @tparam SOSRepr the filter stack representation, which must be available
* as a `collection.immutable.LinearSeq[SOSFilt[T]]`
* @tparam XRepr the signal representation, which must be available as a
* `collection.immutable.Seq[T]`
* @param sos stack of second order section filters to apply
* @param x signal to which the filter stack should be applied
* @return filtered signal
*/
def sosfilt[T, SOSRepr, XRepr]
(sos: SOSRepr, x: XRepr)
(implicit xSeq: XRepr => Seq[T],
sosSeq: SOSRepr => LinearSeq[SOSFilt[T]],
bf: CanBuildFrom[XRepr, T, XRepr],
m: ClassTag[T]): XRepr = {
val builder = bf(x)
builder ++= sosfiltTailRec(sos, x)
builder.result
}
@tailrec
private def sosfiltTailRec[T]
(sos: LinearSeq[SOSFilt[T]], x: Seq[T])
(implicit m: ClassTag[T]): Seq[T] = {
val head = sos.head
val tail = sos.tail
if (tail.isEmpty) {
sosfilt(head, x)
} else {
sosfiltTailRec(tail, sosfilt(head, x))
}
}
/** Applies SOS filter(s), specified as a `Matrix`.
*
* The `sos` matrix of filters should be specified as:
* {{{
* [ b00 b01 b02 a00 a01 a02 ]
* [ b10 b11 b12 a10 a11 a12 ]
* [ ... ]
* [ bN0 bN1 bN1 aN0 aN1 aN2 ]
* }}}
* where `N` is the number of stacked filters.
*
* @tparam T the type of the signal and filter
* @tparam Repr the signal representation, which must be available as a
* `collection.immutable.Seq[T]`
* @param sos matrix of second-order section filter(s) to apply
* @param x signal to which the SOS filter(s) should be applied
* @return filtered signal
*/
def sosfilt[T, Repr]
(sos: Matrix[T], x: Repr)
(implicit xSeq: Repr => Seq[T],
f: Fractional[T],
bf: CanBuildFrom[Repr, T, Repr],
m: ClassTag[T]): Repr = {
// check the matrix size
require(sos.cols == 6)
require(sos.rows >= 1)
// convert the matrix to a sequence of filters and apply them
val filterSeq = (for (j <- 0 until sos.rows) yield {
SOSFilt(sos(j, 0), sos(j, 1), sos(j, 2), sos(j, 3), sos(j, 4), sos(j, 5))
}).toList
sosfilt(filterSeq, x)
}
}
|
lancelet/scalasignal
|
src/main/scala/signal/SOSFilt.scala
|
Scala
|
lgpl-2.1
| 5,885 |
package at.logic.gapt.provers.viper
import at.logic.gapt.formats.ClasspathInputFile
import at.logic.gapt.formats.tip.TipSmtParser
import at.logic.gapt.proofs.{ Sequent, SequentMatchers }
import at.logic.gapt.provers.viper.grammars.TreeGrammarProver
import org.specs2.mutable.Specification
import org.specs2.specification.core.Fragments
class ViperTest extends Specification with SequentMatchers {
val optionsRegex = """;.*viper\\s+(.*)""".r.unanchored
def extractOptions( contents: String ): List[String] =
contents match {
case optionsRegex( opts ) =>
opts.split( " " ).toList.map { case "\\"\\"" => "" case a => a }
case _ => Nil
}
"known to be working problems" in {
Fragments.foreach( Seq(
"appnil",
"comm", "comm1", "commsx", "comms0",
"general", "generaldiffconcl", "linear",
"linear2par",
"square",
"minus", "plus0",
"prod_prop_31", "prod_prop_31_monomorphic"
) ) { prob =>
prob in {
var opts0 = ViperOptions( fixup = false )
if ( prob == "linear2par" )
skipped( "needs careful choice of instance for canonical substitution" )
if ( prob == "prod_prop_31" ) {
if ( !TipSmtParser.isInstalled )
skipped( "tip tool required for preprocessing" )
opts0 = opts0.copy( fixup = true )
}
val file = ClasspathInputFile( s"induction/$prob.smt2" )
val ( Nil, options ) = ViperOptions.parse( extractOptions( file.read ), opts0 )
val problem = if ( options.fixup ) TipSmtParser.fixupAndParse( file ) else TipSmtParser.parse( file )
val lk = new TreeGrammarProver( problem.ctx, problem.toSequent, options.treeGrammarProverOptions ).solve()
problem.ctx check lk
lk.conclusion.distinct.diff( problem.toSequent ) must_== Sequent()
}
}
}
}
|
gebner/gapt
|
tests/src/test/scala/at/logic/gapt/provers/viper/ViperTest.scala
|
Scala
|
gpl-3.0
| 1,853 |
package net.resonious.sburb.game
import scala.collection.mutable.ArrayBuffer
object TimedEvent {
var timedEvents = new ArrayBuffer[TimedEvent]
private var toRemove = new ArrayBuffer[TimedEvent]
def tick() = {
timedEvents foreach { event =>
if (event.tick())
toRemove += event
}
toRemove foreach { event =>
timedEvents -= event
}
toRemove.clear()
}
}
trait TimedEvent {
def tick(): Boolean
}
/*
* Easy delayed execution:
*
* After(5, 'seconds) execute { Unit => println("whoaaa") }
*/
object After {
class Event(target: Int, function: => Unit) extends TimedEvent {
var timer: Int = 0
def tick(): Boolean = {
timer += 1
if (timer >= target) {
function
true
}
else false
}
}
class Preliminary(amount:Int) {
def execute(f: => Unit) = TimedEvent.timedEvents += new Event(amount, f)
}
def apply(amount: Int, scale: Symbol) = {
val actualAmount = scale match {
case 'ticks | 'tick => amount
case 'seconds | 'second => amount * 20
case 'minutes | 'minute => amount * 20 * 60
case 'hours | 'hour => amount * 20 * 60 * 60
}
new Preliminary(actualAmount)
}
}
|
Resonious/mcsburb
|
src/main/scala/net/resonious/sburb/game/After.scala
|
Scala
|
mit
| 1,210 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
import controllers.KafkaManagerContext
import kafka.manager.KafkaManager
import play.api._
/**
* @author hiral
*/
object GlobalKafkaManager extends GlobalSettings {
private[this] var kafkaManager: KafkaManager = null
override def beforeStart(app: Application): Unit = {
Logger.info("Init kafka manager...")
KafkaManagerContext.getKafkaManger
Thread.sleep(5000)
}
override def onStop(app: Application) {
KafkaManagerContext.shutdown()
Logger.info("Application shutdown...")
}
}
|
wking1986/kafka-manager
|
app/GlobalKafkaManager.scala
|
Scala
|
apache-2.0
| 634 |
//package com.sksamuel.avro4s.github
//
//import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
//
//import com.sksamuel.avro4s.record.decoder.ScalaEnumClass
//import com.sksamuel.avro4s.schema.Colours
//import com.sksamuel.avro4s.{AvroSchema, Decoder, DefaultFieldMapper}
//import org.apache.avro.generic.GenericData
//import org.apache.avro.generic.GenericData.EnumSymbol
//import org.scalatest.funsuite.AnyFunSuite
//import org.scalatest.matchers.should.Matchers
//
//class GithubIssue484 extends AnyFunSuite with Matchers {
//
// test("Serializable Scala Enum Decoder #484") {
// val baos = new ByteArrayOutputStream()
// val oos = new ObjectOutputStream(baos)
// oos.writeObject(Decoder[ScalaEnumClass])
// oos.close()
//
// val decoder = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray))
// .readObject()
// .asInstanceOf[Decoder[ScalaEnumClass]]
//
// val schema = AvroSchema[ScalaEnumClass]
// val record = new GenericData.Record(schema)
// record.put("colour", new EnumSymbol(schema.getField("colour").schema(), "Green"))
// decoder.decode(record) shouldBe ScalaEnumClass(Colours.Green)
// }
//}
|
sksamuel/avro4s
|
avro4s-core/src/test/scala/com/sksamuel/avro4s/github/GithubIssue484.scala
|
Scala
|
apache-2.0
| 1,206 |
package de.hpi.asg.breezetestgen.domain.components.brzcomponents
import de.hpi.asg.breezetestgen.domain.components.{BrzComponent, BrzComponentBehaviour, HandshakeComponent}
import BrzComponent._
import de.hpi.asg.breezetestgen.domain.Data
class FalseVariable(id: HandshakeComponent.Id,
readerSpec: Variable.ReaderSpec,
write: PushSpec,
signal: SyncSpec,
reads: Seq[PullSpec]) extends BrzComponent(id) {
type Behaviour = FalseVariableBehaviour
type C = FalseVariableBehaviour.ControlState
type D = Option[Data]
def behaviour(state: Option[HandshakeComponent.State[C, D]]): Behaviour =
new FalseVariableBehaviour(state getOrElse FalseVariableBehaviour.freshState)
object FalseVariableBehaviour {
sealed trait ControlState
case object Idle extends ControlState
case object Active extends ControlState
val freshState: HandshakeComponent.State[C, D] = HandshakeComponent.State(Idle, None)
}
class FalseVariableBehaviour(initState: HandshakeComponent.State[C, D]) extends BrzComponentBehaviour[C, D](initState) {
import FalseVariableBehaviour._
info(s"$id: FalseVariableBehaviour created in state: $initState")
when(Idle) {
case DataReq(`write`, newData, _) =>
info(s"$id: Got Data: $newData!")
request(signal)
goto(Active) using Option(newData)
}
when(Active) {
case Ack(`signal`, _) =>
info(s"$id: isch over, will acknowledge")
acknowledge(write)
goto(Idle) using None
case Req(reader, Some(data)) if reads contains reader =>
info(s"$id: reader #${reads.indexOf(reader)} wants to read me")
val filteredData: Data = readerSpec(reads.indexOf(reader)) match {
case Some(range) if range.isEmpty => data
case Some(range) => data.selectBits(range)
case None =>
error("could not find specified range!")
data
}
info(s"$filteredData read from $id")
dataAcknowledge(reader, filteredData)
stay
}
initialize()
}
}
|
0x203/BreezeTestGen
|
src/main/scala/de/hpi/asg/breezetestgen/domain/components/brzcomponents/FalseVariable.scala
|
Scala
|
mit
| 2,123 |
package scavlink.test.map
import scavlink.coord.{ENU, Geo}
import scavlink.message.Command
import scavlink.message.common.NavWaypoint
import scala.annotation.tailrec
import scala.math._
abstract class PolygonMission(start: Geo, length: Int, sides: Int) extends MissionValue {
val mission: Vector[Command] = {
val polygon = Polygon.make(start, length, sides)
NavWaypoint(start) +: polygon.map(NavWaypoint.apply) :+ NavWaypoint(start)
}
}
abstract class PolygonGuided(start: Geo, length: Int, sides: Int) extends GuidedValue {
val points: Seq[Geo] = Polygon.make(start, length, sides)
}
object Polygon {
def make(start: Geo, length: Double, sides: Int): Vector[Geo] = {
val angle = 360D / sides.toDouble
@tailrec
def rec(prev: Geo, h: Double, s: Int, acc: Vector[Geo]): Vector[Geo] = {
if (s < 0) {
acc
} else {
val loc = prev.move(length, h)
rec(loc, h - angle, s - 1, acc :+ loc)
}
}
rec(start, 0, sides, Vector.empty)
}
}
abstract class CorkscrewMission(start: Geo) extends MissionValue {
val mission: Vector[Command] = Corkscrew.make(start, .5, .1).map(NavWaypoint.apply)
}
object Corkscrew {
def make(start: Geo, coilRate: Double, heightRate: Double): Vector[Geo] = {
(1 to 30).map {
i =>
val t = i.toDouble
start + ENU(t * cos(t * coilRate), t * sin(t * coilRate), i.toDouble * heightRate)
}.toVector
}
}
|
nickolasrossi/scavlink
|
src/it/scala/scavlink/test/map/GenericFlights.scala
|
Scala
|
mit
| 1,433 |
package core.execution.tasks
import ch.qos.logback.classic.Logger
import com.martiansoftware.nailgun.NGContext
import core.config.FsbtModule
import core.config.compile.ExecutionConfig
import core.execution.Task
case class Stop() extends Task {
def perform(config: FsbtModule)(implicit ctx: NGContext, logger: Logger): Unit = {
ctx.getNGServer.shutdown(true)
logger.debug("fsbt server stopped")
}
override def perform(module: FsbtModule,config: ExecutionConfig, moduleTaskCompleted: FsbtModule => Unit)(implicit ctx: NGContext, logger: Logger): Unit = {
}
}
|
Humblehound/fsbt
|
server/src/main/scala/core/execution/tasks/Stop.scala
|
Scala
|
mit
| 575 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.livy.test.framework
import java.util.regex.Pattern
import javax.servlet.http.HttpServletResponse
import scala.annotation.tailrec
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.{Either, Left, Right}
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.apache.hadoop.yarn.api.records.ApplicationId
import org.apache.hadoop.yarn.util.ConverterUtils
import org.apache.http.client.methods.HttpDelete
import org.apache.http.client.methods.HttpGet
import org.apache.http.client.methods.HttpPost
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.CloseableHttpClient
import org.apache.http.HttpResponse
import org.apache.http.StatusLine
import org.scalatest.concurrent.Eventually._
import org.apache.livy.server.batch.CreateBatchRequest
import org.apache.livy.server.interactive.CreateInteractiveRequest
import org.apache.livy.sessions.{Kind, SessionKindModule, SessionState}
import org.apache.livy.utils.AppInfo
object LivyRestClient {
private val BATCH_TYPE = "batches"
private val INTERACTIVE_TYPE = "sessions"
// TODO Define these in production code and share them with test code.
@JsonIgnoreProperties(ignoreUnknown = true)
private case class StatementResult(id: Int, state: String, output: Map[String, Any])
private case class CompletionResult(candidates: Seq[String])
@JsonIgnoreProperties(ignoreUnknown = true)
case class StatementError(ename: String, evalue: String, stackTrace: Seq[String])
@JsonIgnoreProperties(ignoreUnknown = true)
case class SessionSnapshot(
id: Int,
appId: Option[String],
state: String,
appInfo: AppInfo,
log: IndexedSeq[String])
}
class LivyRestClient(val httpClient: CloseableHttpClient, val livyEndpoint: String) {
import LivyRestClient._
val mapper = new ObjectMapper()
.registerModule(DefaultScalaModule)
.registerModule(new SessionKindModule())
class Session(val id: Int, sessionType: String) {
val url: String = s"$livyEndpoint/$sessionType/$id"
def appId(): ApplicationId = {
ConverterUtils.toApplicationId(snapshot().appId.get)
}
def snapshot(): SessionSnapshot = {
val httpGet = new HttpGet(url)
val r = httpClient.execute(httpGet)
val statusLine = r.getStatusLine()
val responseBody = r.getEntity().getContent
val sessionSnapshot = mapper.readValue(responseBody, classOf[SessionSnapshot])
r.close()
assertStatusCode(statusLine, HttpServletResponse.SC_OK)
sessionSnapshot
}
def stop(): Unit = {
val httpDelete = new HttpDelete(url)
val r = httpClient.execute(httpDelete)
r.close()
eventually(timeout(30 seconds), interval(1 second)) {
verifySessionDoesNotExist()
}
}
def verifySessionState(state: SessionState): Unit = {
verifySessionState(Set(state))
}
def verifySessionState(states: Set[SessionState]): Unit = {
val t = if (Cluster.isRunningOnTravis) 5.minutes else 2.minutes
val strStates = states.map(_.toString)
// Travis uses very slow VM. It needs a longer timeout.
// Keeping the original timeout to avoid slowing down local development.
eventually(timeout(t), interval(1 second)) {
val s = snapshot().state
assert(strStates.contains(s), s"Session $id state $s doesn't equal one of $strStates")
}
}
def verifySessionDoesNotExist(): Unit = {
val httpGet = new HttpGet(url)
val r = httpClient.execute(httpGet)
val statusLine = r.getStatusLine()
r.close()
assertStatusCode(statusLine, HttpServletResponse.SC_NOT_FOUND)
}
}
class BatchSession(id: Int) extends Session(id, BATCH_TYPE) {
def verifySessionDead(): Unit = verifySessionState(SessionState.Dead())
def verifySessionKilled(): Unit = verifySessionState(SessionState.Killed())
def verifySessionRunning(): Unit = verifySessionState(SessionState.Running)
def verifySessionSuccess(): Unit = verifySessionState(SessionState.Success())
}
class InteractiveSession(id: Int) extends Session(id, INTERACTIVE_TYPE) {
class Statement(code: String, codeKind: Option[Kind] = None) {
val stmtId = {
val requestBody = if (codeKind.isDefined) {
Map("code" -> code, "kind" -> codeKind.get.toString())
} else {
Map("code" -> code)
}
val httpPost = new HttpPost(s"$url/statements")
val entity = new StringEntity(mapper.writeValueAsString(requestBody))
httpPost.setEntity(entity)
val r = httpClient.execute(httpPost)
val statusLine = r.getStatusLine()
val responseBody = r.getEntity().getContent
val newStmt = mapper.readValue(responseBody, classOf[StatementResult])
r.close()
assertStatusCode(statusLine, HttpServletResponse.SC_CREATED)
newStmt.id
}
final def result(): Either[String, StatementError] = {
eventually(timeout(1 minute), interval(1 second)) {
val httpGet = new HttpGet(s"$url/statements/$stmtId")
val r = httpClient.execute(httpGet)
val statusLine = r.getStatusLine()
val responseBody = r.getEntity().getContent
val newStmt = mapper.readValue(responseBody, classOf[StatementResult])
r.close()
assertStatusCode(statusLine, HttpServletResponse.SC_OK)
assert(newStmt.state == "available", s"Statement isn't available: ${newStmt.state}")
val output = newStmt.output
output.get("status") match {
case Some("ok") =>
val data = output("data").asInstanceOf[Map[String, Any]]
var rst = data.getOrElse("text/plain", "")
val magicRst = data.getOrElse("application/vnd.livy.table.v1+json", null)
val jsonRst = data.getOrElse("application/json", null)
if (magicRst != null) {
rst = mapper.writeValueAsString(magicRst)
} else if (jsonRst != null) {
rst = mapper.writeValueAsString(jsonRst)
}
Left(rst.asInstanceOf[String])
case Some("error") => Right(mapper.convertValue(output, classOf[StatementError]))
case Some(status) =>
throw new IllegalStateException(s"Unknown statement $stmtId status: $status")
case None =>
throw new IllegalStateException(s"Unknown statement $stmtId output: $newStmt")
}
}
}
def verifyResult(expectedRegex: String): Unit = {
result() match {
case Left(result) =>
if (expectedRegex != null) {
matchStrings(result, expectedRegex)
}
case Right(error) =>
assert(false, s"Got error from statement $stmtId $code: ${error.evalue}")
}
}
def verifyError(
ename: String = null, evalue: String = null, stackTrace: String = null): Unit = {
result() match {
case Left(result) =>
assert(false, s"Statement $stmtId `$code` expected to fail, but succeeded.")
case Right(error) =>
val remoteStack = Option(error.stackTrace).getOrElse(Nil).mkString("\\n")
Seq(error.ename -> ename, error.evalue -> evalue, remoteStack -> stackTrace).foreach {
case (actual, expected) if expected != null => matchStrings(actual, expected)
case _ =>
}
}
}
private def matchStrings(actual: String, expected: String): Unit = {
val regex = Pattern.compile(expected, Pattern.DOTALL)
assert(regex.matcher(actual).matches(), s"$actual did not match regex $expected")
}
}
class Completion(code: String, kind: String, cursor: Int) {
val completions = {
val requestBody = Map("code" -> code, "cursor" -> cursor, "kind" -> kind)
val httpPost = new HttpPost(s"$url/completion")
val entity = new StringEntity(mapper.writeValueAsString(requestBody))
httpPost.setEntity(entity)
val r = httpClient.execute(httpPost)
val statusLine = r.getStatusLine()
val responseBody = r.getEntity().getContent
val res = mapper.readValue(responseBody, classOf[CompletionResult])
r.close()
assertStatusCode(statusLine, HttpServletResponse.SC_OK)
res.candidates
}
final def result(): Seq[String] = completions
def verifyContaining(expected: List[String]): Unit = {
assert(expected.forall(result().toList.contains), s"Expected $expected in $result()")
}
def verifyNone(): Unit = {
assert(result() == List(), s"Expected no completion proposals but found $completions")
}
}
def run(code: String, codeKind: Option[Kind] = None): Statement = {
new Statement(code, codeKind)
}
def complete(code: String, kind: String, cursor: Int): Completion = {
new Completion(code, kind, cursor)
}
def runFatalStatement(code: String): Unit = {
val requestBody = Map("code" -> code)
val requestEntity = new StringEntity(mapper.writeValueAsString(requestBody))
val httpPost = new HttpPost(s"$url/statements")
httpPost.setEntity(requestEntity)
val r = httpClient.execute(httpPost)
r.close()
verifySessionState(SessionState.Dead())
}
def verifySessionIdle(): Unit = {
verifySessionState(SessionState.Idle)
}
def verifySessionKilled(): Unit = {
verifySessionState(SessionState.Killed())
}
}
def startBatch(
name: Option[String],
file: String,
className: Option[String],
args: List[String],
sparkConf: Map[String, String]): BatchSession = {
val r = new CreateBatchRequest()
r.file = file
r.name = name
r.className = className
r.args = args
r.conf = Map("spark.yarn.maxAppAttempts" -> "1") ++ sparkConf
val id = start(BATCH_TYPE, mapper.writeValueAsString(r))
new BatchSession(id)
}
def startSession(
name: Option[String],
kind: Kind,
sparkConf: Map[String, String],
heartbeatTimeoutInSecond: Int): InteractiveSession = {
val r = new CreateInteractiveRequest()
r.kind = kind
r.conf = sparkConf
r.name = name
r.heartbeatTimeoutInSecond = heartbeatTimeoutInSecond
val id = start(INTERACTIVE_TYPE, mapper.writeValueAsString(r))
new InteractiveSession(id)
}
def connectSession(id: Int): InteractiveSession = { new InteractiveSession(id) }
private def start(sessionType: String, body: String): Int = {
val httpPost = new HttpPost(s"$livyEndpoint/$sessionType")
val entity = new StringEntity(body)
httpPost.setEntity(entity)
val r = httpClient.execute(httpPost)
val statusLine = r.getStatusLine()
val responseBody = r.getEntity().getContent
val newSession = mapper.readValue(responseBody, classOf[SessionSnapshot])
r.close()
assertStatusCode(statusLine, HttpServletResponse.SC_CREATED)
newSession.id
}
private def assertStatusCode(r: StatusLine, expected: Int): Unit = {
def pretty(r: StatusLine): String = {
s"${r.getStatusCode} ${r.getReasonPhrase}"
}
assert(r.getStatusCode() == expected, s"HTTP status code != $expected: ${pretty(r)}")
}
}
|
ajbozarth/incubator-livy
|
integration-test/src/main/scala/org/apache/livy/test/framework/LivyRestClient.scala
|
Scala
|
apache-2.0
| 12,237 |
package io.getquill.context.cassandra.encoding
/**
* Developers API.
*
* End-users should rely on MappedEncoding since it's more general.
*/
case class CassandraMapper[I, O](f: I => O)
|
mentegy/quill
|
quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapper.scala
|
Scala
|
apache-2.0
| 190 |
package slick.jdbc
import scala.language.existentials
import java.sql.{PreparedStatement, ResultSet}
import slick.relational._
import slick.SlickException
import slick.ast.ScalaBaseType
/** Specialized JDBC ResultConverter for non-`Option` values. */
class BaseResultConverter[@specialized(Byte, Short, Int, Long, Char, Float, Double, Boolean) T](val ti: JdbcType[T], val name: String, val idx: Int) extends ResultConverter[JdbcResultConverterDomain, T] {
def read(pr: ResultSet) = {
val v = ti.getValue(pr, idx)
if(ti.wasNull(pr, idx)) throw new SlickException("Read NULL value for ResultSet column "+name)
v
}
def update(value: T, pr: ResultSet) = ti.updateValue(value, pr, idx)
def set(value: T, pp: PreparedStatement) =
ti.setValue(value, pp, idx)
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = s"idx=$idx, name=$name", attrInfo = ": " + ti)
def width = 1
}
/** Specialized JDBC ResultConverter for handling values of type `Option[T]`.
* Boxing is avoided when the result is `None`. */
class OptionResultConverter[@specialized(Byte, Short, Int, Long, Char, Float, Double, Boolean) T](val ti: JdbcType[T], val idx: Int) extends ResultConverter[JdbcResultConverterDomain, Option[T]] {
def read(pr: ResultSet) = {
val v = ti.getValue(pr, idx)
if(ti.wasNull(pr, idx)) None else Some(v)
}
def update(value: Option[T], pr: ResultSet) = value match {
case Some(v) => ti.updateValue(v, pr, idx)
case _ => ti.updateNull(pr, idx)
}
def set(value: Option[T], pp: PreparedStatement) = value match {
case Some(v) => ti.setValue(v, pp, idx)
case _ => ti.setNull(pp, idx)
}
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = s"idx=$idx", attrInfo = ": " + ti)
def width = 1
def getOrElse(default: () => T): DefaultingResultConverter[T] =
if(ti.scalaType.isPrimitive) new DefaultingResultConverter[T](ti, default, idx)
else new DefaultingResultConverter[T](ti, default, idx) {
override def read(pr: ResultSet) = {
val v = ti.getValue(pr, idx)
if(v.asInstanceOf[AnyRef] eq null) default() else v
}
}
def isDefined = new IsDefinedResultConverter[T](ti, idx)
}
/** Specialized JDBC ResultConverter for handling non-`Option` values with a default.
* A (possibly specialized) function for the default value is used to translate SQL `NULL` values. */
class DefaultingResultConverter[@specialized(Byte, Short, Int, Long, Char, Float, Double, Boolean) T](val ti: JdbcType[T], val default: () => T, val idx: Int) extends ResultConverter[JdbcResultConverterDomain, T] {
def read(pr: ResultSet) = {
val v = ti.getValue(pr, idx)
if(ti.wasNull(pr, idx)) default() else v
}
def update(value: T, pr: ResultSet) = ti.updateValue(value, pr, idx)
def set(value: T, pp: PreparedStatement) = ti.setValue(value, pp, idx)
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = s"idx=$idx, default=" +
{ try default() catch { case e: Throwable => "["+e.getClass.getName+"]" } },
attrInfo = ": " + ti)
def width = 1
}
/** Specialized JDBC ResultConverter for handling `isDefined` checks for `Option` values. */
class IsDefinedResultConverter[@specialized(Byte, Short, Int, Long, Char, Float, Double, Boolean) T](val ti: JdbcType[T], val idx: Int) extends ResultConverter[JdbcResultConverterDomain, Boolean] {
def read(pr: ResultSet) = {
ti.getValue(pr, idx)
!ti.wasNull(pr, idx)
}
def update(value: Boolean, pr: ResultSet) =
throw new SlickException("Cannot insert/update IsDefined check")
def set(value: Boolean, pp: PreparedStatement) =
throw new SlickException("Cannot insert/update IsDefined check")
def width = 1
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = s"idx=$idx", attrInfo = ": " + ti)
}
|
AtkinsChang/slick
|
slick/src/main/scala/slick/jdbc/JdbcResultConverter.scala
|
Scala
|
bsd-2-clause
| 3,787 |
package name.abhijitsarkar.scala.scalaimpatient.operators
import name.abhijitsarkar.scala.scalaimpatient.UnitSpec
class RichFileSpec extends UnitSpec {
"Unapply" should "work for one path segment, name and extension" in {
"/a/b.c" match {
case RichFile("/a", "b", "c") =>
}
}
"Unapply" should "work for multiple path segments, name and extension" in {
"/a/b/c.d" match {
case RichFile("/a/b", "c", "d") =>
}
}
"Unapply" should "work for no path segment, only name and extension" in {
"b.c" match {
case RichFile("", "b", "c") =>
}
}
"Unapply" should "not work for no extension" in {
intercept[MatchError] {
"b/c" match {
case RichFile(path, name, extension) =>
}
}
}
}
|
abhijitsarkar/scala-impatient
|
src/test/scala/name/abhijitsarkar/scala/scalaimpatient/operators/RichFileSpec.scala
|
Scala
|
gpl-3.0
| 764 |
package webserviceclients.fakes
import pages.acquire.SetupTradeDetailsPage.PostcodeValid
import uk.gov.dvla.vehicles.presentation.common.model.AddressModel
object FakeAddressLookupService {
val addressWithoutUprn = AddressModel(address = Seq("44 Hythe Road", "White City", "London", PostcodeValid))
final val BuildingNameOrNumberValid = "123ABC"
final val Line2Valid = "line2 stub"
final val Line3Valid = "line3 stub"
final val PostTownValid = "postTown stub"
}
|
dvla/vehicles-acquire-online
|
test/webserviceclients/fakes/FakeAddressLookupService.scala
|
Scala
|
mit
| 474 |
/**
* Copyright 2015 Otto (GmbH & Co KG)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.schedoscope.scheduler.actors
import akka.actor.{ Actor, ActorRef, Props, actorRef2Scala }
import akka.event.{ Logging, LoggingReceive }
import org.apache.commons.lang.exception.ExceptionUtils
import org.schedoscope.{ DriverSettings, SchedoscopeSettings }
import org.schedoscope.dsl.transformations._
import org.schedoscope.scheduler.driver.{ Driver, RetryableDriverException, DriverRunFailed, DriverRunHandle, DriverRunOngoing, DriverRunState, DriverRunSucceeded, FileSystemDriver, HiveDriver, MapreduceDriver, MorphlineDriver, OozieDriver, PigDriver, ShellDriver }
import org.schedoscope.scheduler.messages._
import scala.concurrent.duration.{ DurationInt, FiniteDuration }
import scala.language.postfixOps
import org.schedoscope.scheduler.driver.RetryableDriverException
/**
* A driver actor manages the executions of transformations using hive, oozie etc. The actual
* execution is done using a driver trait implementation. The driver actor code itself is transformation
* type agnostic. Driver actors poll the transformation tasks they execute from the transformation manager actor
*
*/
class DriverActor[T <: Transformation](transformationManagerActor: ActorRef, ds: DriverSettings, driverConstructor: (DriverSettings) => Driver[T], pingDuration: FiniteDuration) extends Actor {
import context._
val log = Logging(system, this)
var driver: Driver[T] = _
var runningCommand: Option[CommandWithSender] = None
/**
* Start ticking upon start.
*/
override def preStart() {
try {
driver = driverConstructor(ds)
} catch {
case t: Throwable => throw RetryableDriverException("Driver actor could not initialize driver because driver constructor throws exception. Restarting driver actor...", t)
}
logStateInfo("idle", "DRIVER ACTOR: initialized actor")
tick()
}
/**
* If the driver actor is being restarted by the transformation manager actor, the currently running action is reenqueued so it does not get lost.
*/
override def preRestart(reason: Throwable, message: Option[Any]) {
if (runningCommand.isDefined)
transformationManagerActor ! runningCommand.get
}
/**
* Provide continuous ticking in default state
*/
def tick() {
system.scheduler.scheduleOnce(pingDuration, self, "tick")
}
/**
* Message handler for the default state.
* Transitions only to state running, keeps polling the action manager for new work
*/
def receive = LoggingReceive {
case CommandWithSender(command, sender) => toRunning(CommandWithSender(command, sender))
case "tick" => {
transformationManagerActor ! PollCommand(driver.transformationName)
tick()
}
}
/**
* Message handler for the running state
* @param runHandle reference to the running driver
* @param originalSender reference to the viewActor that requested the transformation (for sending back the result)
*/
def running(runHandle: DriverRunHandle[T], originalSender: ActorRef): Receive = LoggingReceive {
case KillCommand() => {
driver.killRun(runHandle)
toReceive()
}
// If getting a command while being busy, reschedule it by sending it to the actionsmanager
// Should this ever happen?
case c: CommandWithSender => transformationManagerActor ! c
// check all 10 seconds the state of the current running driver
case "tick" => try {
driver.getDriverRunState(runHandle) match {
case _: DriverRunOngoing[T] => tick()
case success: DriverRunSucceeded[T] => {
log.info(s"DRIVER ACTOR: Driver run for handle=${runHandle} succeeded.")
try {
driver.driverRunCompleted(runHandle)
} catch {
case d: RetryableDriverException => throw d
case t: Throwable => {
log.error(s"DRIVER ACTOR: Driver run for handle=${runHandle} failed because completion handler threw exception ${t}, trace ${ExceptionUtils.getStackTrace(t)}")
originalSender ! TransformationFailure(runHandle, DriverRunFailed[T](driver, "Completition handler failed", t))
toReceive()
tick()
}
}
originalSender ! TransformationSuccess(runHandle, success)
toReceive()
tick()
}
case failure: DriverRunFailed[T] => {
log.error(s"DRIVER ACTOR: Driver run for handle=${runHandle} failed. ${failure.reason}, cause ${failure.cause}, trace ${if (failure.cause != null) ExceptionUtils.getStackTrace(failure.cause) else "no trace available"}")
try {
driver.driverRunCompleted(runHandle)
} catch {
case d: RetryableDriverException => throw d
case t: Throwable => {
}
}
originalSender ! TransformationFailure(runHandle, failure)
toReceive()
tick()
}
}
} catch {
case exception: RetryableDriverException => {
log.error(s"DRIVER ACTOR: Driver exception caught by driver actor in running state, rethrowing: ${exception.message}, cause ${exception.cause}, trace ${ExceptionUtils.getStackTrace(exception)}")
throw exception
}
case t: Throwable => {
log.error(s"DRIVER ACTOR: Unexpected exception caught by driver actor in running state, rethrowing: ${t.getMessage()}, cause ${t.getCause()}, trace ${ExceptionUtils.getStackTrace(t)}")
throw t
}
}
}
/**
* State transition to default state.
*/
def toReceive() {
runningCommand = None
logStateInfo("idle", "DRIVER ACTOR: becoming idle")
become(receive)
}
/**
* State transition to running state.
*
* Includes special handling of "Deploy" commands, those are executed directly, no state transition despite name of function
* Otherwise run the transformation using the driver instance and switch to running state
*
* @param commandToRun
*/
def toRunning(commandToRun: CommandWithSender) {
runningCommand = Some(commandToRun)
try {
if (commandToRun.command.isInstanceOf[DeployCommand]) {
logStateInfo("deploy", s"DRIVER ACTOR: Running Deploy command")
driver.deployAll(ds)
commandToRun.sender ! DeployCommandSuccess()
logStateInfo("idle", "DRIVER ACTOR: becoming idle")
runningCommand = None
} else {
val runHandle = driver.run(commandToRun.command.asInstanceOf[T])
logStateInfo("running", s"DRIVER ACTOR: Running command ${commandToRun}, runHandle=${runHandle}", runHandle, driver.getDriverRunState(runHandle))
become(running(runHandle, commandToRun.sender))
}
} catch {
case exception: RetryableDriverException => {
log.error(s"DRIVER ACTOR: Driver exception caught by driver actor in receive state, rethrowing: ${exception.message}, cause ${exception.cause}")
throw exception
}
case t: Throwable => {
log.error(s"DRIVER ACTOR: Unexpected exception caught by driver actor in receive state, rethrowing: ${t.getMessage()}, cause ${t.getCause()}")
throw t
}
}
}
def logStateInfo(state: String, message: String, runHandle: DriverRunHandle[T] = null, runState: DriverRunState[T] = null) {
transformationManagerActor ! TransformationStatusResponse(state, self, driver, runHandle, runState)
log.info(message)
}
}
/**
* Factory methods for driver actors.
*/
object DriverActor {
def props(settings: SchedoscopeSettings, driverName: String, transformationManager: ActorRef) = {
val ds = settings.getDriverSettings(driverName)
driverName match {
case "hive" => Props(
classOf[DriverActor[HiveTransformation]],
transformationManager, ds, (ds: DriverSettings) => HiveDriver(ds), 5 seconds).withDispatcher("akka.actor.driver-dispatcher")
case "mapreduce" => Props(
classOf[DriverActor[MapreduceTransformation]],
transformationManager, ds, (ds: DriverSettings) => MapreduceDriver(ds), 5 seconds).withDispatcher("akka.actor.driver-dispatcher")
case "pig" => Props(
classOf[DriverActor[PigTransformation]],
transformationManager, ds, (ds: DriverSettings) => PigDriver(ds), 5 seconds).withDispatcher("akka.actor.driver-dispatcher")
case "filesystem" => Props(
classOf[DriverActor[FilesystemTransformation]],
transformationManager, ds, (ds: DriverSettings) => FileSystemDriver(ds), 100 milliseconds).withDispatcher("akka.actor.driver-dispatcher")
case "oozie" => Props(
classOf[DriverActor[OozieTransformation]],
transformationManager, ds, (ds: DriverSettings) => OozieDriver(ds), 5 seconds).withDispatcher("akka.actor.driver-dispatcher")
case "morphline" => Props(
classOf[DriverActor[MorphlineTransformation]],
transformationManager, ds, (ds: DriverSettings) => MorphlineDriver(ds), 5 seconds).withDispatcher("akka.actor.driver-dispatcher")
case "shell" => Props(
classOf[DriverActor[ShellTransformation]],
transformationManager, ds, (ds: DriverSettings) => ShellDriver(ds), 5 seconds).withDispatcher("akka.actor.driver-dispatcher")
case _ => throw RetryableDriverException(s"Driver for ${driverName} not found")
}
}
}
|
hpzorn/schedoscope
|
schedoscope-core/src/main/scala/org/schedoscope/scheduler/actors/DriverActor.scala
|
Scala
|
apache-2.0
| 9,874 |
/**
* Copyright 2017 Alessandro Simi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.exemplary.aws
import java.net.URI
import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.{AWSCredentialsProvider, DefaultAWSCredentialsProviderChain}
import com.amazonaws.http.{Invoker, JsonErrorResponseHandlerV2}
import com.amazonaws.internal.DefaultServiceEndpointBuilder
import com.amazonaws.regions.{Region, Regions}
import com.amazonaws.services.dynamodbv2.model._
import com.amazonaws.services.dynamodbv2.model.transform._
import com.amazonaws.transform.JsonErrorUnmarshallerV2
import io.exemplary.aws.AmazonDynamoDBNioClient._
import scala.collection.JavaConverters._
import scala.concurrent.{ExecutionContext, Future}
class AmazonDynamoDBNioClient(private[aws] val awsCredentialsProvider: AWSCredentialsProvider = new DefaultAWSCredentialsProviderChain,
private[aws] val config: ClientConfiguration = new ClientConfiguration(),
private[aws] val endpoint: URI = DefaultEndpoint)(implicit executionContext: ExecutionContext)
extends AmazonDynamoDBNio[AmazonDynamoDBNioClient] {
def this(region: Regions)(implicit executionContext: ExecutionContext) = this(
awsCredentialsProvider = new DefaultAWSCredentialsProviderChain,
config = new ClientConfiguration(),
endpoint = convertEndpointFromRegion(Region.getRegion(region), new ClientConfiguration())
)
def this(region: Regions,
awsCredentialsProvider: AWSCredentialsProvider)(implicit executionContext: ExecutionContext) = this(
awsCredentialsProvider = awsCredentialsProvider,
config = new ClientConfiguration(),
endpoint = convertEndpointFromRegion(Region.getRegion(region), new ClientConfiguration())
)
def this(region: Regions,
awsCredentialsProvider: AWSCredentialsProvider,
config: ClientConfiguration)(implicit executionContext: ExecutionContext) = this(
awsCredentialsProvider = awsCredentialsProvider,
config = config,
endpoint = convertEndpointFromRegion(Region.getRegion(region), config)
)
private val errorResponseHandler = new JsonErrorResponseHandlerV2(List(
new JsonErrorUnmarshallerV2(classOf[ResourceInUseException], "ResourceInUseException"),
new JsonErrorUnmarshallerV2(classOf[ItemCollectionSizeLimitExceededException], "ItemCollectionSizeLimitExceededException"),
new JsonErrorUnmarshallerV2(classOf[LimitExceededException], "LimitExceededException"),
new JsonErrorUnmarshallerV2(classOf[ConditionalCheckFailedException], "ConditionalCheckFailedException"),
new JsonErrorUnmarshallerV2(classOf[ProvisionedThroughputExceededException], "ProvisionedThroughputExceededException"),
new JsonErrorUnmarshallerV2(classOf[InternalServerErrorException], "InternalServerError"),
new JsonErrorUnmarshallerV2(classOf[ResourceNotFoundException], "ResourceNotFoundException"),
JsonErrorUnmarshallerV2.DEFAULT_UNMARSHALLER
).asJava)
private val internalInvoker = new Invoker(
serviceName = ServiceName,
endpoint = endpoint,
awsCredentialsProvider = awsCredentialsProvider,
config = config,
errorResponseHandler = errorResponseHandler,
executionContext = executionContext
)
private lazy val invoker = internalInvoker.start() // It would start the first time is used
override def setEndpoint(endpoint: String): AmazonDynamoDBNioClient = {
shutdown()
new AmazonDynamoDBNioClient(awsCredentialsProvider, config, URI.create(endpoint))
}
override def setRegion(region: Region): AmazonDynamoDBNioClient = {
shutdown()
new AmazonDynamoDBNioClient(Regions.fromName(region.getName), awsCredentialsProvider, config)
}
override def batchGetItem(batchGetItemRequest: BatchGetItemRequest): Future[BatchGetItemResult] = invoker.invoke(
req = batchGetItemRequest,
marshaller = new BatchGetItemRequestMarshaller,
unmarshaller = new BatchGetItemResultJsonUnmarshaller
)
override def batchGetItem(requestItems: Map[String, KeysAndAttributes],
returnConsumedCapacity: String): Future[BatchGetItemResult] = batchGetItem(
new BatchGetItemRequest()
.withRequestItems(requestItems.asJava)
.withReturnConsumedCapacity(returnConsumedCapacity)
)
override def batchGetItem(requestItems: Map[String, KeysAndAttributes]): Future[BatchGetItemResult] = batchGetItem(
new BatchGetItemRequest()
.withRequestItems(requestItems.asJava)
)
override def batchWriteItem(batchWriteItemRequest: BatchWriteItemRequest): Future[BatchWriteItemResult] = invoker.invoke(
req = batchWriteItemRequest,
marshaller = new BatchWriteItemRequestMarshaller,
unmarshaller = new BatchWriteItemResultJsonUnmarshaller
)
override def batchWriteItem(requestItems: Map[String, List[WriteRequest]]): Future[BatchWriteItemResult] = batchWriteItem(
new BatchWriteItemRequest()
.withRequestItems(requestItems.mapValues(_.asJava).asJava)
)
override def createTable(createTableRequest: CreateTableRequest): Future[CreateTableResult] = invoker.invoke(
req = createTableRequest,
marshaller = new CreateTableRequestMarshaller,
unmarshaller = new CreateTableResultJsonUnmarshaller
)
override def createTable(attributeDefinitions: List[AttributeDefinition],
tableName: String,
keySchema: List[KeySchemaElement],
provisionedThroughput: ProvisionedThroughput): Future[CreateTableResult] = createTable(
new CreateTableRequest()
.withAttributeDefinitions(attributeDefinitions.asJava)
.withTableName(tableName)
.withKeySchema(keySchema.asJava)
.withProvisionedThroughput(provisionedThroughput)
)
override def deleteItem(deleteItemRequest: DeleteItemRequest): Future[DeleteItemResult] = invoker.invoke(
req = deleteItemRequest,
marshaller = new DeleteItemRequestMarshaller,
unmarshaller = new DeleteItemResultJsonUnmarshaller
)
override def deleteItem(tableName: String, key: Map[String, AttributeValue]): Future[DeleteItemResult] = deleteItem(
new DeleteItemRequest()
.withTableName(tableName)
.withKey(key.asJava)
)
override def deleteItem(tableName: String,
key: Map[String, AttributeValue],
returnValues: String): Future[DeleteItemResult] = deleteItem(
new DeleteItemRequest()
.withTableName(tableName)
.withKey(key.asJava)
.withReturnValues(returnValues)
)
override def deleteTable(deleteTableRequest: DeleteTableRequest): Future[DeleteTableResult] = invoker.invoke(
req = deleteTableRequest,
marshaller = new DeleteTableRequestMarshaller,
unmarshaller = new DeleteTableResultJsonUnmarshaller
)
override def deleteTable(tableName: String): Future[DeleteTableResult] = deleteTable(
new DeleteTableRequest().withTableName(tableName)
)
override def describeTable(describeTableRequest: DescribeTableRequest): Future[DescribeTableResult] = invoker.invoke(
req = describeTableRequest,
marshaller = new DescribeTableRequestMarshaller,
unmarshaller = new DescribeTableResultJsonUnmarshaller
)
override def describeTable(tableName: String): Future[DescribeTableResult] = describeTable(
new DescribeTableRequest().withTableName(tableName)
)
override def getItem(getItemRequest: GetItemRequest): Future[GetItemResult] = invoker.invoke(
req = getItemRequest,
marshaller = new GetItemRequestMarshaller,
unmarshaller = new GetItemResultJsonUnmarshaller
)
override def getItem(tableName: String, key: Map[String, AttributeValue]): Future[GetItemResult] = getItem(
new GetItemRequest().withTableName(tableName).withKey(key.asJava)
)
override def getItem(tableName: String,
key: Map[String, AttributeValue],
consistentRead: Boolean): Future[GetItemResult] = getItem(
new GetItemRequest()
.withTableName(tableName)
.withKey(key.asJava)
.withConsistentRead(consistentRead)
)
override def listTables(listTablesRequest: ListTablesRequest): Future[ListTablesResult] = invoker.invoke(
req = listTablesRequest,
marshaller = new ListTablesRequestMarshaller,
unmarshaller = new ListTablesResultJsonUnmarshaller
)
override def listTables: Future[ListTablesResult] = listTables(new ListTablesRequest)
override def listTables(exclusiveStartTableName: String): Future[ListTablesResult] = listTables(
new ListTablesRequest().withExclusiveStartTableName(exclusiveStartTableName)
)
override def listTables(exclusiveStartTableName: String, limit: Integer): Future[ListTablesResult] = listTables(
new ListTablesRequest()
.withExclusiveStartTableName(exclusiveStartTableName)
.withLimit(limit)
)
override def listTables(limit: Integer): Future[ListTablesResult] = listTables(
new ListTablesRequest().withLimit(limit)
)
override def putItem(putItemRequest: PutItemRequest): Future[PutItemResult] = invoker.invoke(
req = putItemRequest,
marshaller = new PutItemRequestMarshaller,
unmarshaller = new PutItemResultJsonUnmarshaller
)
override def putItem(tableName: String, item: Map[String, AttributeValue]): Future[PutItemResult] = putItem(
new PutItemRequest().withTableName(tableName).withItem(item.asJava)
)
override def putItem(tableName: String,
item: Map[String, AttributeValue],
returnValues: String): Future[PutItemResult] = putItem(
new PutItemRequest()
.withTableName(tableName)
.withItem(item.asJava)
.withReturnValues(returnValues)
)
override def query(queryRequest: QueryRequest): Future[QueryResult] = invoker.invoke(
req = queryRequest,
marshaller = new QueryRequestMarshaller,
unmarshaller = new QueryResultJsonUnmarshaller
)
override def scan(scanRequest: ScanRequest): Future[ScanResult] = invoker.invoke(
req = scanRequest,
marshaller = new ScanRequestMarshaller,
unmarshaller = new ScanResultJsonUnmarshaller
)
override def scan(tableName: String, attributesToGet: List[String]): Future[ScanResult] = scan(
new ScanRequest()
.withTableName(tableName)
.withAttributesToGet(attributesToGet.asJava)
)
override def scan(tableName: String, scanFilter: Map[String, Condition]): Future[ScanResult] = scan(
new ScanRequest()
.withTableName(tableName)
.withScanFilter(scanFilter.asJava)
)
override def scan(tableName: String,
attributesToGet: List[String],
scanFilter: Map[String, Condition]): Future[ScanResult] = scan(
new ScanRequest()
.withTableName(tableName)
.withAttributesToGet(attributesToGet.asJava)
.withScanFilter(scanFilter.asJava)
)
override def updateItem(updateItemRequest: UpdateItemRequest): Future[UpdateItemResult] = invoker.invoke(
req = updateItemRequest,
marshaller = new UpdateItemRequestMarshaller,
unmarshaller = new UpdateItemResultJsonUnmarshaller
)
override def updateItem(tableName: String,
key: Map[String, AttributeValue],
attributeUpdates: Map[String, AttributeValueUpdate]): Future[UpdateItemResult] = updateItem(
new UpdateItemRequest()
.withTableName(tableName)
.withKey(key.asJava)
.withAttributeUpdates(attributeUpdates.asJava)
)
override def updateItem(tableName: String,
key: Map[String, AttributeValue],
attributeUpdates: Map[String, AttributeValueUpdate],
returnValues: String): Future[UpdateItemResult] = updateItem(
new UpdateItemRequest()
.withTableName(tableName)
.withKey(key.asJava)
.withAttributeUpdates(attributeUpdates.asJava)
.withReturnValues(returnValues)
)
override def updateTable(updateTableRequest: UpdateTableRequest): Future[UpdateTableResult] = invoker.invoke(
req = updateTableRequest,
marshaller = new UpdateTableRequestMarshaller,
unmarshaller = new UpdateTableResultJsonUnmarshaller
)
override def updateTable(tableName: String,
provisionedThroughput: ProvisionedThroughput): Future[UpdateTableResult] = updateTable(
new UpdateTableRequest()
.withTableName(tableName)
.withProvisionedThroughput(provisionedThroughput)
)
override def shutdown(): Unit = {
internalInvoker.stop()
}
}
object AmazonDynamoDBNioClient {
private val ServiceName = "dynamodb"
private[aws] val DefaultEndpoint = URI.create("https://dynamodb.us-east-1.amazonaws.com")
private def convertEndpointFromRegion(region: Region, clientConfiguration: ClientConfiguration): URI = {
new DefaultServiceEndpointBuilder(ServiceName, clientConfiguration.getProtocol.toString).withRegion(region).getServiceEndpoint
}
}
|
alessandrosimi/aws-dynamodb-nio
|
dynamodb/src/main/scala/io/exemplary/aws/AmazonDynamoDBNioClient.scala
|
Scala
|
apache-2.0
| 13,495 |
package im.actor.server.util
import scala.concurrent.forkjoin.ThreadLocalRandom
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.{ Failure, Success }
import akka.actor.ActorSystem
import com.amazonaws.services.s3.transfer.TransferManager
import com.sksamuel.scrimage.{ AsyncImage, Format, Position }
import slick.dbio.DBIO
import slick.driver.PostgresDriver.api._
import im.actor.api.rpc.files
import im.actor.api.rpc.files.{ Avatar, AvatarImage, FileLocation }
import im.actor.server.models.AvatarData
import im.actor.server.{ models, persist }
object ImageUtils {
val AvatarSizeLimit = 1024 * 1024 // TODO: configurable
val SmallSize = 100
val LargeSize = 200
def avatar(ad: models.AvatarData) =
(ad.smallOpt, ad.largeOpt, ad.fullOpt) match {
case (None, None, None) ⇒ None
case (smallOpt, largeOpt, fullOpt) ⇒
Some(files.Avatar(
avatarImage(smallOpt, SmallSize, SmallSize),
avatarImage(largeOpt, LargeSize, LargeSize),
avatarImage(fullOpt)
))
}
def avatarImage(idhashsize: Option[(Long, Long, Int)], width: Int, height: Int): Option[files.AvatarImage] =
idhashsize map {
case (id, hash, size) ⇒ files.AvatarImage(files.FileLocation(id, hash), width, height, size)
}
def avatarImage(idhashsizewh: Option[(Long, Long, Int, Int, Int)]): Option[files.AvatarImage] =
idhashsizewh flatMap {
case (id, hash, size, w, h) ⇒ avatarImage(Some((id, hash, size)), w, h)
}
def resizeTo(aimg: AsyncImage, side: Int)(implicit ec: ExecutionContext): Future[AsyncImage] = {
for {
scaledImg ← scaleTo(aimg, side)
resizedImg ← scaledImg.resizeTo(side, side, Position.Center)
} yield resizedImg
}
def scaleTo(aimg: AsyncImage, side: Int)(implicit ec: ExecutionContext): Future[AsyncImage] = {
val scaleFactor = side.toDouble / math.min(aimg.width, aimg.height)
aimg.scale(scaleFactor)
}
def resizeToSmall(aimg: AsyncImage)(implicit ec: ExecutionContext): Future[AsyncImage] = resizeTo(aimg, SmallSize)
def resizeToLarge(aimg: AsyncImage)(implicit ec: ExecutionContext): Future[AsyncImage] = resizeTo(aimg, LargeSize)
def dimensions(aimg: AsyncImage)(implicit ec: ExecutionContext): (Int, Int) =
(aimg.width, aimg.height)
def scaleAvatar(
fullFileId: Long,
rnd: ThreadLocalRandom
)(
implicit
fsAdapter: FileStorageAdapter,
db: Database,
ec: ExecutionContext,
system: ActorSystem
) = {
val smallFileName = "small-avatar.jpg"
val largeFileName = "large-avatar.jpg"
persist.File.find(fullFileId) flatMap {
case Some(fullFileModel) ⇒
fsAdapter.downloadFile(fullFileId) flatMap {
case Some(fullFile) ⇒
val action = for {
fullAimg ← DBIO.from(AsyncImage(fullFile))
(fiw, fih) = dimensions(fullAimg)
smallAimg ← DBIO.from(resizeToSmall(fullAimg))
largeAimg ← DBIO.from(resizeToLarge(fullAimg))
smallFile = fullFile.getParentFile.toPath.resolve(smallFileName).toFile
largeFile = fullFile.getParentFile.toPath.resolve(largeFileName).toFile
_ ← DBIO.from(smallAimg.writer(Format.JPEG).write(smallFile))
_ ← DBIO.from(largeAimg.writer(Format.JPEG).write(largeFile))
smallFileLocation ← fsAdapter.uploadFile(smallFileName, smallFile)
largeFileLocation ← fsAdapter.uploadFile(largeFileName, largeFile)
} yield {
// TODO: #perf calculate file sizes efficiently
val smallImage = AvatarImage(
smallFileLocation,
smallAimg.width,
smallAimg.height,
smallFile.length().toInt
)
val largeImage = AvatarImage(
largeFileLocation,
largeAimg.width,
largeAimg.height,
largeFile.length().toInt
)
val fullImage = AvatarImage(
FileLocation(fullFileId, ACLUtils.fileAccessHash(fullFileId, fullFileModel.accessSalt)),
fullAimg.width,
fullAimg.height,
fullFile.length().toInt
)
Avatar(Some(smallImage), Some(largeImage), Some(fullImage))
}
action.asTry map {
case Success(res) ⇒ Right(res)
case Failure(e) ⇒ Left(e)
}
case None ⇒ DBIO.successful(Left(new Exception("Failed to download file")))
}
case None ⇒
DBIO.successful(Left(new Exception("Cannot find file model")))
}
}
def getAvatar(avatarModel: models.AvatarData): Avatar = {
val smallImageOpt = avatarModel.smallOpt map {
case (fileId, fileHash, fileSize) ⇒ AvatarImage(FileLocation(fileId, fileHash), SmallSize, SmallSize, fileSize)
}
val largeImageOpt = avatarModel.largeOpt map {
case (fileId, fileHash, fileSize) ⇒ AvatarImage(FileLocation(fileId, fileHash), LargeSize, LargeSize, fileSize)
}
val fullImageOpt = avatarModel.fullOpt map {
case (fileId, fileHash, fileSize, w, h) ⇒ AvatarImage(FileLocation(fileId, fileHash), w, h, fileSize)
}
Avatar(smallImageOpt, largeImageOpt, fullImageOpt)
}
def getAvatarData(entityType: models.AvatarData.TypeVal, entityId: Int, avatar: Avatar): AvatarData = {
models.AvatarData(
entityType = entityType,
entityId = entityId.toLong,
smallAvatarFileId = avatar.smallImage map (_.fileLocation.fileId),
smallAvatarFileHash = avatar.smallImage map (_.fileLocation.accessHash),
smallAvatarFileSize = avatar.smallImage map (_.fileSize),
largeAvatarFileId = avatar.largeImage map (_.fileLocation.fileId),
largeAvatarFileHash = avatar.largeImage map (_.fileLocation.accessHash),
largeAvatarFileSize = avatar.largeImage map (_.fileSize),
fullAvatarFileId = avatar.fullImage map (_.fileLocation.fileId),
fullAvatarFileHash = avatar.fullImage map (_.fileLocation.accessHash),
fullAvatarFileSize = avatar.fullImage map (_.fileSize),
fullAvatarWidth = avatar.fullImage map (_.width),
fullAvatarHeight = avatar.fullImage map (_.height)
)
}
}
|
boneyao/actor-platform
|
actor-server/actor-utils/src/main/scala/im/actor/server/util/ImageUtils.scala
|
Scala
|
mit
| 6,329 |
package scala.scalanative
package nir
import util.sh
import Shows._
import fastparse.all.Parsed
import org.scalatest._
class ValParserTest extends FlatSpec with Matchers {
val global = Global.Top("test")
val noTpe = Type.None
"The NIR parser" should "parse `Val.None`" in {
val none: Val = Val.None
val Parsed.Success(result, _) =
parser.Val.None.parse(sh"$none".toString)
result should be(none)
}
it should "parse `Val.True`" in {
val `true`: Val = Val.True
val Parsed.Success(result, _) =
parser.Val.True.parse(sh"${`true`}".toString)
result should be(`true`)
}
it should "parse `Val.False`" in {
val `false`: Val = Val.False
val Parsed.Success(result, _) =
parser.Val.False.parse(sh"${`false`}".toString)
result should be(`false`)
}
it should "parse `Val.Zero`" in {
val zero: Val = Val.Zero(noTpe)
val Parsed.Success(result, _) =
parser.Val.Zero.parse(sh"$zero".toString)
result should be(zero)
}
it should "parse `Val.Undef`" in {
val undef: Val = Val.Undef(noTpe)
val Parsed.Success(result, _) =
parser.Val.Undef.parse(sh"$undef".toString)
result should be(undef)
}
it should "parse `Val.I8`" in {
val i8: Val = Val.I8(1)
val Parsed.Success(result, _) = parser.Val.I8.parse(sh"$i8".toString)
result should be(i8)
}
it should "parse `Val.I16`" in {
val i16: Val = Val.I16(2)
val Parsed.Success(result, _) = parser.Val.I16.parse(sh"$i16".toString)
result should be(i16)
}
it should "parse `Val.I32`" in {
val i32: Val = Val.I32(3)
val Parsed.Success(result, _) = parser.Val.I32.parse(sh"$i32".toString)
result should be(i32)
}
it should "parse `Val.I64`" in {
val i64: Val = Val.I64(4)
val Parsed.Success(result, _) = parser.Val.I64.parse(sh"$i64".toString)
result should be(i64)
}
it should "parse `Val.F32`" in {
val f32: Val = Val.F32(5.6.toFloat)
val Parsed.Success(result, _) = parser.Val.F32.parse(sh"$f32".toString)
result should be(f32)
}
it should "parse `Val.F64`" in {
val f64: Val = Val.F64(7.8)
val Parsed.Success(result, _) = parser.Val.F64.parse(sh"$f64".toString)
result should be(f64)
}
it should "parse `Val.Struct`" in {
val struct: Val = Val.Struct(global, Seq.empty)
val Parsed.Success(result, _) =
parser.Val.Struct.parse(sh"$struct".toString)
result should be(struct)
}
it should "parse `Val.Array`" in {
val array: Val = Val.Array(noTpe, Seq.empty)
val Parsed.Success(result, _) =
parser.Val.Array.parse(sh"$array".toString)
result should be(array)
}
it should "parse `Val.Chars`" in {
val chars: Val = Val.Chars("test")
val Parsed.Success(result, _) =
parser.Val.Chars.parse(sh"$chars".toString)
result should be(chars)
}
it should "parse `Val.Local`" in {
val local: Val = Val.Local(Local("test", 1), noTpe)
val Parsed.Success(result, _) =
parser.Val.Local.parse(sh"$local".toString)
result should be(local)
}
it should "parse `Val.Global`" in {
val global: Val = Val.Global(this.global, noTpe)
val Parsed.Success(result, _) =
parser.Val.Global.parse(sh"$global".toString)
result should be(global)
}
it should "parse `Val.Unit`" in {
val unit: Val = Val.Unit
val Parsed.Success(result, _) =
parser.Val.Unit.parse(sh"$unit".toString)
result should be(unit)
}
it should "parse `Val.Const`" in {
val const: Val = Val.Const(Val.None)
val Parsed.Success(result, _) =
parser.Val.Const.parse(sh"$const".toString)
result should be(const)
}
it should "parse `Val.String`" in {
val string: Val = Val.String("test")
val Parsed.Success(result, _) =
parser.Val.String.parse(sh"$string".toString)
result should be(string)
}
}
|
cedricviaccoz/scala-native
|
tools/src/test/scala/scala/scalanative/nir/ValParserTest.scala
|
Scala
|
bsd-3-clause
| 3,955 |
package sbt
import org.scalacheck._
import org.scalacheck.Arbitrary._
import Prop._
import sbt.librarymanagement._
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
class CacheIvyTest extends Properties("CacheIvy") {
import sbt.util.{ CacheStore, SingletonCache }
import SingletonCache._
import sjsonnew._
import sjsonnew.support.scalajson.unsafe.Converter
private class InMemoryStore(converter: SupportConverter[JValue]) extends CacheStore {
private var content: JValue = _
override def delete(): Unit = ()
override def close(): Unit = ()
override def read[T: JsonReader](): T =
try converter.fromJsonUnsafe[T](content)
catch { case t: Throwable => t.printStackTrace(); throw t }
override def read[T: JsonReader](default: => T): T =
try read[T]()
catch { case _: Throwable => default }
override def write[T: JsonWriter](value: T): Unit =
content = converter.toJsonUnsafe(value)
}
private def testCache[T: JsonFormat, U](f: (SingletonCache[T], CacheStore) => U)(
implicit cache: SingletonCache[T]): U = {
val store = new InMemoryStore(Converter)
f(cache, store)
}
private def cachePreservesEquality[T: JsonFormat](m: T,
eq: (T, T) => Prop,
str: T => String): Prop = testCache[T, Prop] {
(cache, store) =>
cache.write(store, m)
val out = cache.read(store)
eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
}
implicit val arbConfigRef: Arbitrary[ConfigRef] = Arbitrary(
for {
n <- Gen.alphaStr
} yield ConfigRef(n)
)
implicit val arbExclusionRule: Arbitrary[InclExclRule] = Arbitrary(
for {
o <- Gen.alphaStr
n <- Gen.alphaStr
a <- Gen.alphaStr
v <- arbCrossVersion.arbitrary
cs <- arbitrary[List[ConfigRef]]
} yield InclExclRule(o, n, a, cs.toVector, v)
)
implicit val arbCrossVersion: Arbitrary[CrossVersion] = Arbitrary {
// Actual functions don't matter, just Disabled vs Binary vs Full
Gen.oneOf(Disabled(), Binary(), Full())
}
implicit val arbArtifact: Arbitrary[Artifact] = Arbitrary {
for {
(n, t, e, cls) <- arbitrary[(String, String, String, String)]
} yield Artifact(n, t, e, cls) // keep it simple
}
implicit val arbModuleID: Arbitrary[ModuleID] = Arbitrary {
for {
o <- Gen.identifier
n <- Gen.identifier
r <- for { n <- Gen.numChar; ns <- Gen.numStr } yield n + ns
cs <- arbitrary[Option[String]]
branch <- arbitrary[Option[String]]
isChanging <- arbitrary[Boolean]
isTransitive <- arbitrary[Boolean]
isForce <- arbitrary[Boolean]
explicitArtifacts <- Gen.listOf(arbitrary[Artifact])
exclusions <- Gen.listOf(arbitrary[InclExclRule])
inclusions <- Gen.listOf(arbitrary[InclExclRule])
extraAttributes <- Gen.mapOf(arbitrary[(String, String)])
crossVersion <- arbitrary[CrossVersion]
} yield
ModuleID(
organization = o,
name = n,
revision = r,
configurations = cs,
isChanging = isChanging,
isTransitive = isTransitive,
isForce = isForce,
explicitArtifacts = explicitArtifacts.toVector,
inclusions = inclusions.toVector,
exclusions = exclusions.toVector,
extraAttributes = extraAttributes,
crossVersion = crossVersion,
branchName = branch
)
}
property("moduleIDFormat") = forAll { (m: ModuleID) =>
def str(m: ModuleID) = {
import m._
s"ModuleID($organization, ${m.name}, $revision, $configurations, $isChanging, $isTransitive, $isForce, $explicitArtifacts, $exclusions, " +
s"$inclusions, $extraAttributes, $crossVersion, $branchName)"
}
def eq(a: ModuleID, b: ModuleID): Prop = {
def rest = a.withCrossVersion(b.crossVersion) == b
(a.crossVersion, b.crossVersion) match {
case (_: Disabled, _: Disabled) => rest
case (_: Binary, _: Binary) => rest
case (_: Full, _: Full) => rest
case (a, b) => Prop(false) :| s"CrossVersions don't match: $a vs $b"
}
}
import sbt.librarymanagement.LibraryManagementCodec._
cachePreservesEquality(m, eq _, str)
}
}
|
Duhemm/sbt
|
main-actions/src/test/scala/sbt/CacheIvyTest.scala
|
Scala
|
bsd-3-clause
| 4,325 |
package api.hue.endpoint
import api.hue.Bridge
import api.hue.dao.Controller
import api.hue.dao.attribute.Attribute
import play.api.libs.json._
import scala.concurrent.Future
/**
* API endpoint (e.g. groups or lights)
* @todo Figure out a way to make Reads/Writes generic
* @author ddexter
*/
trait Endpoint[T <: Controller] {
def bridge: Bridge
def name: String
def get: Future[Map[String, T]]
def put(id: String, attributes: Attribute*): Future[JsValue] = {
validate(attributes)
val data = attributes.foldLeft(Json.obj())((l, r) => l.deepMerge(r.toJs))
bridge.put(path + "/" + id + "/action", data)
}
protected def path: String
protected def supportedPutAttributes: Set[String]
protected def validate(attributes: Seq[Attribute]): Unit = {
if (!attributes.map(_.name).forall(supportedPutAttributes(_)))
throw new IllegalStateException("Invalid attribute for " + path + " endpoint")
}
}
|
ddexter/HomeBackend
|
src/main/scala/api/hue/endpoint/Endpoint.scala
|
Scala
|
apache-2.0
| 945 |
package com.eevolution.context.dictionary.domain.api.service
import com.eevolution.context.dictionary._
import com.eevolution.context.dictionary.domain.model.Task
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/e-Evolution
* Created by [email protected] , www.e-evolution.com
*/
/**
* Task Service
*/
trait TaskService extends api.Service[Task, Int] {
//Definition
}
|
adempiere/ADReactiveSystem
|
dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/service/TaskService.scala
|
Scala
|
gpl-3.0
| 1,175 |
package com.twitter.finagle
import com.twitter.finagle.stats._
import com.twitter.finagle.thrift._
import com.twitter.finagle.util.Showable
import com.twitter.scrooge.{ThriftMethod, ThriftStruct}
import com.twitter.util.NonFatal
import java.lang.reflect.{Constructor, Method}
import java.net.SocketAddress
import org.apache.thrift.protocol.TProtocolFactory
import scala.reflect.ClassTag
private[twitter] object ThriftUtil {
private type BinaryService = Service[Array[Byte], Array[Byte]]
private val thriftFinagleClientParamTypes =
Seq(classOf[Service[_, _]], classOf[TProtocolFactory])
private val scrooge2FinagleClientParamTypes =
Seq(
classOf[Service[_, _]],
classOf[TProtocolFactory],
classOf[Option[_]],
classOf[StatsReceiver])
private val scrooge3FinagleClientParamTypes =
Seq(
classOf[Service[_, _]],
classOf[TProtocolFactory],
classOf[String],
classOf[StatsReceiver])
def findClass1(name: String): Option[Class[_]] =
try Some(Class.forName(name)) catch {
case _: ClassNotFoundException => None
}
def findClass[A](name: String): Option[Class[A]] =
for {
cls <- findClass1(name)
} yield cls.asInstanceOf[Class[A]]
def findConstructor[A](clz: Class[A], paramTypes: Class[_]*): Option[Constructor[A]] =
try {
Some(clz.getConstructor(paramTypes: _*))
} catch {
case _: NoSuchMethodException => None
}
def findMethod(clz: Class[_], name: String, params: Class[_]*): Option[Method] =
try Some(clz.getMethod(name, params:_*)) catch {
case _: NoSuchMethodException => None
}
def findRootWithSuffix(str: String, suffix: String): Option[String] =
if (str.endsWith(suffix)) Some(str.dropRight(suffix.length)) else None
lazy val findSwiftClass: Class[_] => Option[Class[_]] = {
val f = for {
serviceSym <- findClass1("com.twitter.finagle.exp.swift.ServiceSym")
meth <- findMethod(serviceSym, "isService", classOf[Class[_]])
} yield {
k: Class[_] =>
try {
if (meth.invoke(null, k).asInstanceOf[Boolean]) Some(k)
else None
} catch {
case NonFatal(_) => None
}
}
f getOrElse Function.const(None)
}
/**
* Construct an `Iface` based on an underlying [[com.twitter.finagle.Service]]
* using whichever Thrift code-generation toolchain is available.
*/
def constructIface[Iface](
underlying: Service[ThriftClientRequest, Array[Byte]],
cls: Class[_],
protocolFactory: TProtocolFactory,
sr: StatsReceiver
): Iface = {
val clsName = cls.getName
def tryThriftFinagleClient: Option[Iface] =
for {
baseName <- findRootWithSuffix(clsName, "$ServiceIface")
clientCls <- findClass[Iface](baseName + "$ServiceToClient")
cons <- findConstructor(clientCls, thriftFinagleClientParamTypes: _*)
} yield cons.newInstance(underlying, protocolFactory)
def tryScrooge3FinagleClient: Option[Iface] =
for {
clientCls <- findClass[Iface](clsName + "$FinagleClient")
cons <- findConstructor(clientCls, scrooge3FinagleClientParamTypes: _*)
} yield cons.newInstance(underlying, protocolFactory, "", sr)
def tryScrooge3FinagledClient: Option[Iface] =
for {
baseName <- findRootWithSuffix(clsName, "$FutureIface")
clientCls <- findClass[Iface](baseName + "$FinagledClient")
cons <- findConstructor(clientCls, scrooge3FinagleClientParamTypes: _*)
} yield cons.newInstance(underlying, protocolFactory, "", sr)
def tryScrooge2Client: Option[Iface] =
for {
baseName <- findRootWithSuffix(clsName, "$FutureIface")
clientCls <- findClass[Iface](baseName + "$FinagledClient")
cons <- findConstructor(clientCls, scrooge2FinagleClientParamTypes: _*)
} yield cons.newInstance(underlying, protocolFactory, None, sr)
def trySwiftClient: Option[Iface] =
for {
swiftClass <- findSwiftClass(cls)
proxy <- findClass1("com.twitter.finagle.exp.swift.SwiftProxy")
meth <- findMethod(proxy, "newClient",
classOf[Service[_, _]], classOf[ClassTag[_]])
} yield {
val manifest = ClassTag(swiftClass).asInstanceOf[ClassTag[Iface]]
meth.invoke(null, underlying, manifest).asInstanceOf[Iface]
}
val iface =
tryThriftFinagleClient orElse
tryScrooge3FinagleClient orElse
tryScrooge3FinagledClient orElse
tryScrooge2Client orElse
trySwiftClient
iface getOrElse {
throw new IllegalArgumentException("Iface %s is not a valid thrift iface".format(clsName))
}
}
/**
* Construct a binary [[com.twitter.finagle.Service]] for a given Thrift
* interface using whichever Thrift code-generation toolchain is available.
*/
def serverFromIface(
impl: AnyRef,
protocolFactory: TProtocolFactory,
stats: StatsReceiver,
maxThriftBufferSize: Int
): BinaryService = {
def tryThriftFinagleService(iface: Class[_]): Option[BinaryService] =
for {
baseName <- findRootWithSuffix(iface.getName, "$ServiceIface")
serviceCls <- findClass[BinaryService](baseName + "$Service")
cons <- findConstructor(serviceCls, iface, classOf[TProtocolFactory])
} yield cons.newInstance(impl, protocolFactory)
def tryScroogeFinagleService(iface: Class[_]): Option[BinaryService] =
for {
baseName <- findRootWithSuffix(iface.getName, "$FutureIface") orElse
Some(iface.getName)
serviceCls <- findClass[BinaryService](baseName + "$FinagleService") orElse
findClass[BinaryService](baseName + "$FinagledService")
cons <- findConstructor(serviceCls, iface, classOf[TProtocolFactory], classOf[StatsReceiver], Integer.TYPE)
} yield cons.newInstance(impl, protocolFactory, stats, Int.box(maxThriftBufferSize))
// The legacy $FinagleService that doesn't take stats.
def tryLegacyScroogeFinagleService(iface: Class[_]): Option[BinaryService] =
for {
baseName <- findRootWithSuffix(iface.getName, "$FutureIface") orElse
Some(iface.getName)
serviceCls <- findClass[BinaryService](baseName + "$FinagleService") orElse
findClass[BinaryService](baseName + "$FinagledService")
cons <- findConstructor(serviceCls, iface, classOf[TProtocolFactory])
} yield cons.newInstance(impl, protocolFactory)
def trySwiftService(iface: Class[_]): Option[BinaryService] =
for {
_ <- findSwiftClass(iface)
swiftServiceCls <- findClass1("com.twitter.finagle.exp.swift.SwiftService")
const <- findConstructor(swiftServiceCls, classOf[Object])
} yield const.newInstance(impl).asInstanceOf[BinaryService]
def tryClass(cls: Class[_]): Option[BinaryService] =
tryThriftFinagleService(cls) orElse
tryScroogeFinagleService(cls) orElse
tryLegacyScroogeFinagleService(cls) orElse
trySwiftService(cls) orElse
(Option(cls.getSuperclass) ++ cls.getInterfaces).view.flatMap(tryClass).headOption
tryClass(impl.getClass).getOrElse {
throw new IllegalArgumentException("argument implements no candidate ifaces")
}
}
/**
* Construct a binary [[com.twitter.finagle.Service]] for a given Thrift
* interface using whichever Thrift code-generation toolchain is available.
* (Legacy version for backward-compatibility).
*/
def serverFromIface(impl: AnyRef, protocolFactory: TProtocolFactory): BinaryService = {
serverFromIface(impl, protocolFactory, LoadedStatsReceiver, Thrift.maxThriftBufferSize)
}
}
/**
* A mixin trait to provide a rich Thrift client API.
*
* @define clientExampleObject ThriftRichClient
*
* @define clientExample
*
* For example, this IDL:
*
* {{{
* service TestService {
* string query(1: string x)
* }
* }}}
*
* compiled with Scrooge, generates the interface
* `TestService.FutureIface`. This is then passed
* into `newIface`:
*
* {{{
* $clientExampleObject.newIface[TestService.FutureIface](
* addr, classOf[TestService.FutureIface])
* }}}
*
* However note that the Scala compiler can insert the latter
* `Class` for us, for which another variant of `newIface` is
* provided:
*
* {{{
* $clientExampleObject.newIface[TestService.FutureIface](addr)
* }}}
*
* In Java, we need to provide the class object:
*
* {{{
* TestService.FutureIface client =
* $clientExampleObject.newIface(addr, TestService.FutureIface.class);
* }}}
*
* @define clientUse
*
* Create a new client of type `Iface`, which must be generated
* by either [[https://github.com/twitter/scrooge Scrooge]] or
* [[https://github.com/mariusaeriksen/thrift-0.5.0-finagle thrift-finagle]].
*
* @define thriftUpgrade
*
* The client uses the standard thrift protocols, with support for
* both framed and buffered transports. Finagle attempts to upgrade
* the protocol in order to ship an extra envelope carrying trace IDs
* and client IDs associated with the request. These are used by
* Finagle's tracing facilities and may be collected via aggregators
* like [[http://twitter.github.com/zipkin/ Zipkin]].
*
* The negotiation is simple: on connection establishment, an
* improbably-named method is dispatched on the server. If that
* method isn't found, we are dealing with a legacy thrift server,
* and the standard protocol is used. If the remote server is also a
* finagle server (or any other supporting this extension), we reply
* to the request, and every subsequent request is dispatched with an
* envelope carrying trace metadata. The envelope itself is also a
* Thrift struct described [[https://github.com/twitter/finagle/blob/master/finagle-thrift/src/main/thrift/tracing.thrift here]].
*/
trait ThriftRichClient { self: Client[ThriftClientRequest, Array[Byte]] =>
import ThriftUtil._
protected val protocolFactory: TProtocolFactory
/** The client name used when group isn't named. */
protected val defaultClientName: String
protected lazy val stats: StatsReceiver = ClientStatsReceiver
/**
* $clientUse
*/
def newIface[Iface](dest: String, cls: Class[_]): Iface = {
val (n, l) = Resolver.evalLabeled(dest)
newIface(n, l, cls)
}
/**
* $clientUse
*/
def newIface[Iface](dest: String, label: String, cls: Class[_]): Iface =
newIface(Resolver.eval(dest), label, cls)
/**
* $clientUse
*/
def newIface[Iface: ClassTag](dest: String): Iface = {
val (n, l) = Resolver.evalLabeled(dest)
newIface[Iface](n, l)
}
def newIface[Iface: ClassTag](dest: String, label: String): Iface = {
val cls = implicitly[ClassTag[Iface]].runtimeClass
newIface[Iface](Resolver.eval(dest), label, cls)
}
def newIface[Iface: ClassTag](dest: Name, label: String): Iface = {
val cls = implicitly[ClassTag[Iface]].runtimeClass
newIface[Iface](dest, label, cls)
}
/**
* $clientUse
*/
@deprecated("Use destination names via newIface(String) or newIface(Name)", "6.7.x")
def newIface[Iface: ClassTag](group: Group[SocketAddress]): Iface = {
val cls = implicitly[ClassTag[Iface]].runtimeClass
newIface[Iface](group, cls)
}
/**
* $clientUse
*/
@deprecated("Use destination names via newIface(String) or newIface(Name)", "6.7.x")
def newIface[Iface](group: Group[SocketAddress], cls: Class[_]): Iface = group match {
case LabelledGroup(g, label) => newIface(Name.fromGroup(g), label, cls)
case _ => newIface(Name.fromGroup(group), "", cls)
}
/**
* $clientUse
*/
def newIface[Iface](name: Name, label: String, cls: Class[_]): Iface = {
lazy val underlying = newService(name, label)
lazy val clientLabel = (label, defaultClientName) match {
case ("", "") => Showable.show(name)
case ("", l1) => l1
case (l0, l1) => l0
}
lazy val sr = stats.scope(clientLabel)
constructIface(underlying, cls, protocolFactory, sr)
}
/**
* Construct a Finagle Service interface for a Scrooge-generated thrift object.
*
* E.g. given a thrift service
* {{{
* service Logger {
* string log(1: string message, 2: i32 logLevel);
* i32 getLogSize();
* }
* }}}
*
* you can construct a client interface with a Finagle Service per thrift method:
*
* {{{
* val loggerService = Thrift.newServiceIface(Logger, "localhost:8000")
* val response = loggerService.log(Logger.Log.Args("log message", 1))
* }}}
*
* @param builder The builder type is generated by Scrooge for a thrift service.
* @param dest Address of the service to connect to, in the format accepted by [[Resolver.eval]].
*/
def newServiceIface[ServiceIface](dest: String)(
implicit builder: ServiceIfaceBuilder[ServiceIface]
): ServiceIface = {
val thriftService = newService(dest, "")
val scopedStats = stats.scope(defaultClientName)
builder.newServiceIface(thriftService, protocolFactory, scopedStats)
}
def newServiceIface[ServiceIface](dest: Name)(
implicit builder: ServiceIfaceBuilder[ServiceIface]
): ServiceIface = {
val thriftService = newService(dest, "")
val scopedStats = stats.scope(defaultClientName)
builder.newServiceIface(thriftService, protocolFactory, scopedStats)
}
def newMethodIface[ServiceIface, FutureIface](serviceIface: ServiceIface)(
implicit builder: MethodIfaceBuilder[ServiceIface, FutureIface]
): FutureIface = builder.newMethodIface(serviceIface)
}
/**
* A mixin trait to provide a rich Thrift server API.
*
* @define serveIface
*
* Serve the interface implementation `iface`, which must be generated
* by either [[https://github.com/twitter/scrooge Scrooge]] or
* [[https://github.com/mariusaeriksen/thrift-0.5.0-finagle thrift-finagle]].
*
* Given the IDL:
*
* {{{
* service TestService {
* string query(1: string x)
* }
* }}}
*
* Scrooge will generate an interface, `TestService.FutureIface`,
* implementing the above IDL.
*
* $serverExample
*
* Note that this interface is discovered by reflection. Passing an
* invalid interface implementation will result in a runtime error.
*
* @define serverExample
*
* `TestService.FutureIface` must be implemented and passed
* into `serveIface`:
*
* {{{
* $serverExampleObject.serveIface(":*", new TestService.FutureIface {
* def query(x: String) = Future.value(x) // (echo service)
* })
* }}}
*
* @define serverExampleObject ThriftMuxRichServer
*/
trait ThriftRichServer { self: Server[Array[Byte], Array[Byte]] =>
import ThriftUtil._
protected val protocolFactory: TProtocolFactory
val maxThriftBufferSize: Int = 16 * 1024
protected val serverLabel = "thrift"
protected lazy val serverStats: StatsReceiver = ServerStatsReceiver.scope(serverLabel)
/**
* $serveIface
*/
def serveIface(addr: String, iface: AnyRef): ListeningServer =
serve(addr, serverFromIface(iface, protocolFactory, serverStats, maxThriftBufferSize))
/**
* $serveIface
*/
def serveIface(addr: SocketAddress, iface: AnyRef): ListeningServer =
serve(addr, serverFromIface(iface, protocolFactory, serverStats, maxThriftBufferSize))
}
|
lysu/finagle
|
finagle-thrift/src/main/scala/com/twitter/finagle/rich.scala
|
Scala
|
apache-2.0
| 15,212 |
package at.ac.tuwien.ifs.ir.evaluation.poolbias.estimators
import at.ac.tuwien.ifs.ir.evaluation.TRECEval
import at.ac.tuwien.ifs.ir.evaluation.pool.Pool
import at.ac.tuwien.ifs.ir.model._
class LipaniEstimator(pool: Pool, metric: String, descs: Descs = null) extends ScoreEstimator(pool, metric, descs) {
override def isMetricSupported(metric: String): Boolean =
metric.startsWith("P_")
implicit def shufflableRuns(runs: Runs) = new {
def ◦(sRuns: Runs, N: Int = 0): Runs = {
getNewRunBySelectedRuns(runs, sRuns, N)
}
}
protected def getAdjP(n: Int, ru: Runs, pool: Pool): Double = {
def M(ru: Runs, qRels: QRels = pool.qRels) =
TRECEval().computeMetric("P_" + n, ru, qRels)
def AM(ru: Runs, qRels: QRels = pool.qRels) =
TRECEval().computeAntiMetric("P_" + n, ru, qRels)
val sru = M(ru)
val asru = AM(ru)
val kru = 1d - (sru + asru)
// optimization: if kru is 0 no matters what the correction is going to be 0
if (kru == 0d) return 0d
val vs = pool.lRuns.par.map(rp => {
val nrp = rp ◦ ru
val δsrp = M(nrp) - M(rp)
val δasrp = AM(nrp) - AM(rp)
val δkrp = -δsrp - δasrp
(δsrp, δasrp, δkrp)
}).seq
val (δss, δass, δks) = (vs.map(_._1), vs.map(_._2), vs.map(_._3))
val Δsru = avg(δss)
val Δasru = avg(δass)
val λ = Δsru * asru - Δasru * sru
if (λ > 0)
kru * Math.max(avg(δks), 0d)
else
0d
}
protected def getScoreP(ru: Runs, pool: Pool = this.pool): Score = {
def M(n:Int, ru: Runs, qRels: QRels = pool.qRels) =
TRECEval().computeMetric("P_"+n, ru, qRels)
val n = metric.split("_").last.toInt
val sru = M(n, ru, pool.qRels)
val a = getAdjP(n, ru, pool)
new Score(ru.id, sru + a, metric, pool.qRels)
}
override def getScore(ru: Runs): Score = {
if (metric.startsWith("P"))
getScoreP(ru)
else
null
}
protected def getNewRunBySelectedRuns(runs: Runs, sRuns: Runs, N: Int = 0): Runs = {
new Runs(runs.id + "_" + sRuns.id + ".N_" + N,
(for (id <- sRuns.topicIds.toList) yield {
val run = runs.selectByTopicId(id)
if (run != null) {
val sRun = sRuns.selectByTopicId(id)
new Run(id,
Run.normalizeRank(
run.runRecords.map(rR => {
new RunRecord(
rR.iteration,
rR.document,
0,
getNewScore(sRun, rR, N))
})))
} else
null
}).filter(_ != null))
}
protected def getNewScore(sRun: Run, runRecord: RunRecord, N: Int): Float = (sRun.runRecords.size + 1) - {
val alpha = 1d
val sRunRecord = sRun.getByDocument(runRecord.document)
if (sRunRecord != null && sRunRecord.rank > N) {
val step = sRunRecord.rank - runRecord.rank
val fix =
if (step == 0)
1d / 10000d
else if (step > 0)
(runRecord.rank.toDouble + 1d) / 10000d
else
runRecord.rank.toDouble / (10000d * 10000d)
sRunRecord.rank.toDouble * alpha + runRecord.rank.toDouble * (1d - alpha) + fix
} else runRecord.rank.toDouble + 1d / 10000d
}.toFloat
override def getName = "Lipani"
override def getNewInstance(pool: Pool) = new LipaniEstimator(pool, metric, descs)
}
object LipaniEstimator {
def apply(pool: Pool, metric: String, descs: Descs = null) = new LipaniEstimator(pool, metric, descs)
}
|
aldolipani/PoolBiasEstimators
|
src/main/scala/at/ac/tuwien/ifs/ir/evaluation/poolbias/estimators/LipaniEstimator.scala
|
Scala
|
apache-2.0
| 3,469 |
package at.logic.gapt.integration_tests
import java.io.IOException
import at.logic.gapt.formats.llk.{ HybridLatexParser, HybridLatexExporter, toLLKString }
import at.logic.gapt.algorithms.rewriting.DefinitionElimination
import at.logic.gapt.expr._
import at.logic.gapt.expr.fol.{ reduceHolToFol, undoHol2Fol, replaceAbstractions }
import at.logic.gapt.expr.hol._
import at.logic.gapt.proofs.HOLClause
import at.logic.gapt.proofs.lk.{ AtomicExpansion, regularize, LKToLKsk }
import at.logic.gapt.proofs.lk.base._
import at.logic.gapt.proofs.lksk.sequentToLabelledSequent
import at.logic.gapt.proofs.resolution.RobinsonToRal
import at.logic.gapt.provers.prover9._
import at.logic.gapt.proofs.ceres.clauseSets.AlternativeStandardClauseSet
import at.logic.gapt.proofs.ceres.projections.Projections
import at.logic.gapt.proofs.ceres.struct.StructCreators
import at.logic.gapt.proofs.ceres.ceres_omega
import at.logic.gapt.proofs.lksk.LKskToExpansionProof
import at.logic.gapt.utils.testing.ClasspathFileCopier
import at.logic.gapt.proofs.expansionTrees.{ ETAnd, ETImp, ETWeakQuantifier, ETSkolemQuantifier, ExpansionTree, ExpansionSequent }
import org.specs2.mutable._
class nTapeTest extends Specification with ClasspathFileCopier {
def show( s: String ) = Unit //println( "+++++++++ " + s + " ++++++++++" )
def show_detail( s: String ) = Unit //println( "+++++++++ " + s + " ++++++++++" )
def f( e: LambdaExpression ): String = toLLKString( e )
class Robinson2RalAndUndoHOL2Fol(
sig_vars: Map[String, List[Var]],
sig_consts: Map[String, List[Const]],
cmap: replaceAbstractions.ConstantsMap
) extends RobinsonToRal {
val absmap = Map[String, LambdaExpression]() ++ ( cmap.toList.map( x => ( x._2.toString, x._1 ) ) )
val cache = Map[LambdaExpression, LambdaExpression]()
override def convert_formula( e: HOLFormula ): HOLFormula = {
BetaReduction.betaNormalize(
undoHol2Fol.backtranslate( e, sig_vars, sig_consts, absmap )
)
}
override def convert_substitution( s: Substitution ): Substitution = {
val mapping = s.map.toList.map {
case ( from, to ) =>
(
BetaReduction.betaNormalize( undoHol2Fol.backtranslate( from, sig_vars, sig_consts, absmap, None ) ).asInstanceOf[Var],
BetaReduction.betaNormalize( undoHol2Fol.backtranslate( to, sig_vars, sig_consts, absmap, None ) )
)
}
Substitution( mapping )
}
}
object Robinson2RalAndUndoHOL2Fol {
def apply(
sig_vars: Map[String, List[Var]],
sig_consts: Map[String, List[Const]],
cmap: replaceAbstractions.ConstantsMap
) =
new Robinson2RalAndUndoHOL2Fol( sig_vars, sig_consts, cmap )
}
def decompose( et: ExpansionTree ): List[ExpansionTree] = et match {
case ETAnd( x, y ) => decompose( x ) ++ decompose( y );
case _ => List( et )
}
//prints the interesting terms from the expansion sequent
def printStatistics( et: ExpansionSequent ) = {
val conjuncts = decompose( et.antecedent( 1 ) )
// FIXME: use a less fragile method to find the induction formula...
val indet = conjuncts( 19 )
val List( ind1, ind2 ): List[ExpansionTree] = indet match {
case ETWeakQuantifier( _, List(
( inst1, et1 ),
( inst2, et2 )
) ) =>
List( inst1, inst2 )
}
val ( ind1base, ind1step ) = ind1 match {
case ETImp( ETAnd(
ETWeakQuantifier( _, List( ( _, base ) ) ),
ETSkolemQuantifier( _, _,
ETImp( _, ETWeakQuantifier( f, List( ( inst, step ) ) ) )
)
), _ ) =>
( base, step )
}
val ( ind2base, ind2step ) = ind2 match {
case ETImp( ETAnd(
ETWeakQuantifier( _, List( ( _, base ) ) ),
ETSkolemQuantifier( _, _,
ETImp( _, ETWeakQuantifier( f, List( ( inst, step ) ) ) )
) ), _ ) =>
( base, step )
}
( ind1base, ind1step, ind2base, ind2step ) match {
case ( Abs( xb, sb ), Abs( xs, ss ), Abs( yb, tb ), Abs( ys, ts ) ) =>
val map = Map[LambdaExpression, StringSymbol]()
val counter = new { private var state = 0; def nextId = { state = state + 1; state } }
val ( map1, sb1 ) = replaceAbstractions( sb, map, counter )
val ( map2, ss1 ) = replaceAbstractions( ss, map1, counter )
val ( map3, tb1 ) = replaceAbstractions( tb, map2, counter )
val ( map4, ts1 ) = replaceAbstractions( ts, map3, counter )
show_detail( "base 1 simplified: " + f( Abs( xb, sb1 ) ) )
show_detail( "base 2 simplified: " + f( Abs( yb, tb1 ) ) )
show_detail( "step 1 simplified: " + f( Abs( xs, ss1 ) ) )
show_detail( "step 2 simplified: " + f( Abs( ys, ts1 ) ) )
show_detail( "With shortcuts:" )
for ( ( term, sym ) <- map4 ) {
show_detail( "Symbol: " + sym )
show_detail( "Term: " + f( term ) )
}
}
}
/**
* The actual cut-elimination procedure.
* @param filename
* @return Some(errormessage) if something breaks, None otherwise
*/
def doCutelim( filename: String ): Option[String] = {
show( "Loading file" )
val tokens = HybridLatexParser.parseFile( filename )
val pdb = HybridLatexParser.createLKProof( tokens )
show( "Eliminating definitions, expanding tautological axioms" )
val elp = AtomicExpansion( DefinitionElimination( pdb.Definitions, regularize( pdb.proof( "TAPEPROOF" ) ) ) )
show( "Skolemizing" )
val selp = LKToLKsk( elp )
show( "Extracting struct" )
val struct = StructCreators.extract( selp, x => containsQuantifierOnLogicalLevel( x ) || freeHOVariables( x ).nonEmpty )
show( "Computing projections" )
val proj = Projections( selp, x => containsQuantifierOnLogicalLevel( x ) || freeHOVariables( x ).nonEmpty )
show( "Computing clause set" )
val cl = AlternativeStandardClauseSet( struct )
show( "Exporting to prover 9" )
val ( cmap, folcl_ ) = replaceAbstractions( cl.toList )
show_detail( "Calculated cmap: " )
cmap.map( x => show_detail( x._1 + " := " + x._2 ) )
val folcl = reduceHolToFol( folcl_ ).asInstanceOf[List[HOLClause]]
folcl.map( x => show_detail( x.toString ) )
show( "Refuting clause set" )
Prover9.getRobinsonProof( folcl ) match {
case None =>
Some( "could not refute clause set" )
case Some( rp ) =>
show( "Getting formulas" )
val proofformulas = selp.nodes.flatMap( _.asInstanceOf[LKProof].root.toHOLSequent.formulas ).toList.distinct
show( "Extracting signature from " + proofformulas.size + " formulas" )
val ( sigc, sigv ) = undoHol2Fol.getSignature( proofformulas )
show( "Converting to Ral" )
val myconverter = Robinson2RalAndUndoHOL2Fol( sigv.map( x => ( x._1, x._2.toList ) ), sigc.map( x => ( x._1, x._2.toList ) ), cmap )
val ralp = myconverter( rp )
show( "Creating acnf" )
val ( acnf, endclause ) = ceres_omega( proj, ralp, sequentToLabelledSequent( selp.root ), struct )
show( "Compute expansion tree" )
val et = LKskToExpansionProof( acnf )
show( " HOORAY! " )
printStatistics( et )
None
}
}
args( skipAll = !Prover9.isInstalled )
"The higher-order tape proof" should {
"do cut-elimination on the 2 copies tape proof (tape3.llk)" in {
//skipped("works but takes a bit time")
doCutelim( tempCopyOfClasspathFile( "tape3.llk" ) ) match {
case Some( error ) => ko( error )
case None => ok
}
}
"do cut-elimination on the 1 copy tape proof (tape3ex.llk)" in {
doCutelim( tempCopyOfClasspathFile( "tape3ex.llk" ) ) match {
case Some( error ) => ko( error )
case None => ok
}
}
}
}
|
loewenheim/gapt
|
src/test/scala/at/logic/gapt/integration_tests/nTapeTest.scala
|
Scala
|
gpl-3.0
| 7,836 |
package scife.util.format
import java.io.Writer
case object DocNil extends Document
case object DocBreak extends Document
case class DocText(txt: String) extends Document
case class DocGroup(doc: Document) extends Document
case class DocNest(indent: Int, doc: Document) extends Document
case class DocCons(hd: Document, tl: Document) extends Document
/**
* A basic pretty-printing library, based on Lindig's strict version
* of Wadler's adaptation of Hughes' pretty-printer.
*
* Taken from Schinz's implementation from scala.text
*/
abstract class Document {
def ::(hd: Document): Document = DocCons(hd, this)
def ::(hd: String): Document = DocCons(DocText(hd), this)
def :/:(hd: Document): Document = hd :: DocBreak :: this
def :/:(hd: String): Document = hd :: DocBreak :: this
/**
* Format this document on `writer` and try to set line
* breaks so that the result fits in `width` columns.
*/
def format(width: Int, writer: Writer) {
type FmtState = (Int, Boolean, Document)
def fits(w: Int, state: List[FmtState]): Boolean = state match {
case _ if w < 0 =>
false
case List() =>
true
case (_, _, DocNil) :: z =>
fits(w, z)
case (i, b, DocCons(h, t)) :: z =>
fits(w, (i,b,h) :: (i,b,t) :: z)
case (_, _, DocText(t)) :: z =>
fits(w - t.length(), z)
case (i, b, DocNest(ii, d)) :: z =>
fits(w, (i + ii, b, d) :: z)
case (_, false, DocBreak) :: z =>
fits(w - 1, z)
case (_, true, DocBreak) :: z =>
true
case (i, _, DocGroup(d)) :: z =>
fits(w, (i, false, d) :: z)
}
def spaces(n: Int) {
var rem = n
while (rem >= 16) { writer write " "; rem -= 16 }
if (rem >= 8) { writer write " "; rem -= 8 }
if (rem >= 4) { writer write " "; rem -= 4 }
if (rem >= 2) { writer write " "; rem -= 2}
if (rem == 1) { writer write " " }
}
def fmt(k: Int, state: List[FmtState]): Unit = state match {
case List() => ()
case (_, _, DocNil) :: z =>
fmt(k, z)
case (i, b, DocCons(h, t)) :: z =>
fmt(k, (i, b, h) :: (i, b, t) :: z)
case (i, _, DocText(t)) :: z =>
writer write t
fmt(k + t.length(), z)
case (i, b, DocNest(ii, d)) :: z =>
fmt(k, (i + ii, b, d) :: z)
case (i, true, DocBreak) :: z =>
writer write "\\n"
spaces(i)
fmt(i, z)
case (i, false, DocBreak) :: z =>
writer write " "
fmt(k + 1, z)
case (i, b, DocGroup(d)) :: z =>
val fitsFlat = fits(width - k, (i, false, d) :: z)
fmt(k, (i, !fitsFlat, d) :: z)
case _ =>
()
}
fmt(0, (0, false, DocGroup(this)) :: Nil)
}
}
object Document {
/** The empty document */
def empty = DocNil
/** A break, which will either be turned into a space or a line break */
def break = DocBreak
/** A document consisting of some text literal */
def text(s: String): Document = DocText(s)
/**
* A group, whose components will either be printed with all breaks
* rendered as spaces, or with all breaks rendered as line breaks.
*/
def group(d: Document): Document = DocGroup(d)
/** A nested document, which will be indented as specified. */
def nest(i: Int, d: Document): Document = DocNest(i, d)
}
|
kaptoxic/SciFe
|
src/main/scala/scife/util/format/Document.scala
|
Scala
|
gpl-2.0
| 3,375 |
package io.getquill.sqlserver
import io.getquill.PeopleMonixSpec
import org.scalatest.Matchers._
class PeopleMonixJdbcSpec extends PeopleMonixSpec {
val context = testContext
import testContext._
override def beforeAll = {
testContext.transaction {
for {
_ <- testContext.run(query[Couple].delete)
_ <- testContext.run(query[Person].filter(_.age > 0).delete)
_ <- testContext.run(liftQuery(peopleEntries).foreach(p => peopleInsert(p)))
_ <- testContext.run(liftQuery(couplesEntries).foreach(p => couplesInsert(p)))
} yield ()
}.runSyncUnsafe()
}
"Example 1 - differences" in {
testContext.run(`Ex 1 differences`).runSyncUnsafe() should contain theSameElementsAs `Ex 1 expected result`
}
"Example 2 - range simple" in {
testContext.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))).runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result`
}
"Example 3 - satisfies" in {
testContext.run(`Ex 3 satisfies`).runSyncUnsafe() should contain theSameElementsAs `Ex 3 expected result`
}
"Example 4 - satisfies" in {
testContext.run(`Ex 4 satisfies`).runSyncUnsafe() should contain theSameElementsAs `Ex 4 expected result`
}
"Example 5 - compose" in {
testContext.run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))).runSyncUnsafe() mustEqual `Ex 5 expected result`
}
"Example 6 - predicate 0" in {
testContext.run(satisfies(eval(`Ex 6 predicate`))).runSyncUnsafe() mustEqual `Ex 6 expected result`
}
"Example 7 - predicate 1" in {
testContext.run(satisfies(eval(`Ex 7 predicate`))).runSyncUnsafe() mustEqual `Ex 7 expected result`
}
"Example 8 - contains empty" in {
testContext.run(`Ex 8 and 9 contains`(liftQuery(`Ex 8 param`))).runSyncUnsafe() mustEqual `Ex 8 expected result`
}
"Example 9 - contains non empty" in {
testContext.run(`Ex 8 and 9 contains`(liftQuery(`Ex 9 param`))).runSyncUnsafe() mustEqual `Ex 9 expected result`
}
"Example 10 - pagination" in {
testContext.run(`Ex 10 page 1 query`).runSyncUnsafe() mustEqual `Ex 10 page 1 expected`
testContext.run(`Ex 10 page 2 query`).runSyncUnsafe() mustEqual `Ex 10 page 2 expected`
}
"Example 11 - streaming" in {
collect(testContext.stream(`Ex 11 query`)) should contain theSameElementsAs `Ex 11 expected`
}
}
|
mentegy/quill
|
quill-jdbc-monix/src/test/scala/io/getquill/sqlserver/PeopleMonixJdbcSpec.scala
|
Scala
|
apache-2.0
| 2,372 |
package ylabs.play.common.test
import org.scalatest.{Matchers, OptionValues, WordSpec}
import org.scalatestplus.play.WsScalaTestClient
abstract class MyPlaySpec extends WordSpec with Matchers with OptionValues with WsScalaTestClient
|
springnz/play-app-base
|
src/main/scala/ylabs/play/common/test/MyPlaySpec.scala
|
Scala
|
mit
| 235 |
package org.cloudfun.hackworld.server.model;
/**
* A continuous spatial space, divided into one or more Areas.
*/
class Space { }
|
zzorn/cloudfun
|
src/main/scala/org/cloudfun/hackworld/server/model/Space.scala
|
Scala
|
lgpl-3.0
| 132 |
package com.github.mocyuto
import scala.collection.mutable
object MapUtils {
implicit class tuple2map[A, B](tupleSeq: Seq[(A, B)]) {
/**
* a tuple Seq change to Map
* {{{
* val seq = Seq((1,2),(2, 3),(1,3))
* seq.grouping() // Map(2 -> Seq(3), 1 -> Seq(2, 3))
* }}}
*
* @return a new map keys grouped to Seq
*/
def grouping(): Map[A, Seq[B]] = {
val muteMap = mutable.Map[A, Seq[B]]()
tupleSeq.foreach {
case (t1, t2) =>
val seq = if (muteMap.contains(t1)) muteMap(t1) :+ t2 else Seq(t2)
muteMap += t1 -> seq
}
muteMap.result().toMap
}
}
}
|
moc-yuto/sansyo
|
src/main/scala/com/github/mocyuto/MapUtils.scala
|
Scala
|
apache-2.0
| 654 |
package im.tox.antox.tox
import java.sql.Timestamp
import im.tox.antox.data.State
import im.tox.antox.utils.{Friend, FriendInfo, FriendRequest}
import rx.lang.scala.subjects.BehaviorSubject
object Reactive {
val chatActive = BehaviorSubject[Boolean](false)
val chatActiveSub = chatActive.subscribe(x => State.chatActive(x))
val activeKey = BehaviorSubject[Option[String]](None)
val activeKeySub = activeKey.subscribe(x => State.activeKey(x))
val friendList = BehaviorSubject[Array[Friend]](new Array[Friend](0))
val friendRequests = BehaviorSubject[Array[FriendRequest]](new Array[FriendRequest](0))
val lastMessages = BehaviorSubject[Map[String, (String, Timestamp)]](Map.empty[String, (String, Timestamp)])
val unreadCounts = BehaviorSubject[Map[String, Integer]](Map.empty[String, Integer])
val typing = BehaviorSubject[Boolean](false)
val updatedMessages = BehaviorSubject[Boolean](true)
val friendInfoList = friendList
.combineLatestWith(lastMessages)((fl, lm) => (fl, lm))
.combineLatestWith(unreadCounts)((tup, uc) => {
tup match {
case (fl, lm) => {
fl.map(f => {
val lastMessageTup: Option[(String, Timestamp)] = lm.get(f.friendKey)
val unreadCount: Option[Integer] = uc.get(f.friendKey)
(lastMessageTup, unreadCount) match {
case (Some((lastMessage, lastMessageTimestamp)), Some(unreadCount)) => {
new FriendInfo(f.isOnline, f.friendName, f.friendStatus, f.personalNote, f.friendKey, lastMessage, lastMessageTimestamp, unreadCount, f.alias)
}
case (Some((lastMessage, lastMessageTimestamp)), None) => {
new FriendInfo(f.isOnline, f.friendName, f.friendStatus, f.personalNote, f.friendKey, lastMessage, lastMessageTimestamp, 0, f.alias)
}
case _ => {
new FriendInfo(f.isOnline, f.friendName, f.friendStatus, f.personalNote, f.friendKey, "", new Timestamp(0, 0, 0, 0, 0, 0, 0), 0, f.alias)
}
}
})
}
}
})
val friendListAndRequests = friendInfoList.combineLatestWith(friendRequests)((fi, fr) => (fi, fr))
}
|
ollieh/Antox
|
app/src/main/scala/im/tox/antox/tox/Reactive.scala
|
Scala
|
gpl-3.0
| 2,180 |
package ru.org.codingteam.horta.plugins
import ru.org.codingteam.horta.database.RepositoryFactory
/**
* Description of events.
* @param messages true if plugin want to be notified on message receival.
* @param rooms true if plugin want to be notified on room entering / leaving.
* @param participants true if plugin want to be notified on participant entering / leaving the room.
*/
case class Notifications(messages: Boolean,
rooms: Boolean,
participants: Boolean)
/**
* A plugin definition.
* @param name plugin name.
* @param notifications description of events plugin want to be notified of.
* @param commands a list of commands supported by the plugin.
* @param repositoryFactory a factory of repositories used for data access. Data access is disabled if factory isn't
* defined.
*/
case class PluginDefinition(name: String,
notifications: Notifications,
commands: List[CommandDefinition],
repositoryFactory: Option[RepositoryFactory])
|
codingteam/horta-hell
|
src/main/scala/ru/org/codingteam/horta/plugins/PluginDefinition.scala
|
Scala
|
mit
| 1,115 |
object Test extends Application {
class X
class Y extends X
class C {
def apply(x: X, y: Y) = 1
def apply(x: Y, y: X) = 2
}
class A {
val foo: C = new C
def foo(x: X, y: X) = 3
}
val a = new A
val z = a./* line: 10 */foo(new X, new X)
a./* line: 9 */foo(new X, new Y)
print(z)
}
|
ilinum/intellij-scala
|
testdata/resolve2/bug3/ValueFunction11.scala
|
Scala
|
apache-2.0
| 315 |
// Copyright (C) 2019 MapRoulette contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.maproulette.session
import java.util.Locale
import javax.crypto.{BadPaddingException, IllegalBlockSizeException}
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import org.maproulette.Config
import org.maproulette.data.{ItemType, UserType}
import org.maproulette.models.BaseObject
import org.maproulette.utils.{Crypto, Utils}
import org.slf4j.LoggerFactory
import play.api.data.Form
import play.api.data.Forms._
import play.api.libs.json._
import play.api.libs.oauth.RequestToken
import play.api.libs.json.JodaWrites._
import play.api.libs.json.JodaReads._
import scala.xml.{Elem, XML}
/**
* Classes for the User object and the OSM Profile
*
* @author cuthbertm
*/
/**
* Basic Location case class defining longitude and latitude
*
* @param longitude longitude for location
* @param latitude latitude for location
*/
case class Location(latitude: Double, longitude: Double, name: Option[String] = None)
/**
* Information specific to the OSM profile of the user. All users in the system are based on
* OSM users.
*
* @param id The osm id
* @param displayName The display name for the osm user
* @param description The description of the OSM user as per their OSM profile
* @param avatarURL The avatar URL to enabling displaying of their avatar
* @param homeLocation Their home location
* @param created When their OSM account was created
* @param requestToken The key and secret (access token) used for authorization
*/
case class OSMProfile(
id: Long,
displayName: String,
description: String,
avatarURL: String,
homeLocation: Location,
created: DateTime,
requestToken: RequestToken
)
/**
* A user search result containing a few public fields from user's OSM Profile.
*
* @param osmId The osm id
* @param displayName The display name for the osm user
* @param avatarURL The avatar URL to enabling displaying of their avatar
*/
case class UserSearchResult(osmId: Long, displayName: String, avatarURL: String)
/**
* Information specific to a user managing a project. Includes the project id,
* a few basic fields about the user, and their group types for the project.
*
* @param projectId The project id
* @param userId The user's MapRoulette id
* @param osmId The user's osm id
* @param displayName The display name for the osm user
* @param avatarURL The avatar URL to enabling displaying of their avatar
* @param groupTypes List of the user's group types for the project
*/
case class ProjectManager(
projectId: Long,
userId: Long,
osmId: Long,
displayName: String,
avatarURL: String,
groupTypes: List[Int] = List()
)
/**
* Settings that are not defined by the OSM user profile, but specific to MapRoulette
*
* @param defaultEditor The default editor that the user wants to use
* @param defaultBasemap The default basemap that the user wants to see, will be overridden by default basemap for the challenge if set
* @param defaultBasemapId The string id of the default basemap that the user wants to see
* @param customBasemap It default basemap is custom, then this is the url to the tile server
* @param locale The locale for the user, if not set will default to en
* @param email The user's email address
* @param emailOptIn If the user has opted in to receive emails
* @param leaderboardOptOut If the user has opted out of the public leaderboard
* @param needsReview If the user's work should be reviewed
* @param isReviewer If this user can review others work
* @param theme The theme to display in MapRoulette. Optionally - 0=skin-black, 1=skin-black-light,
* 2=skin-blue, 3=skin-blue-light, 4=skin-green, 5=skin-green-light,
* 6=skin-purple, 7=skin-purple-light, 8=skin-red, 9=skin-red-light, 10=skin-yellow, 11=skin-yellow-light
*/
case class UserSettings(
defaultEditor: Option[Int] = None,
defaultBasemap: Option[Int] = None,
defaultBasemapId: Option[String] = None,
customBasemap: Option[String] = None,
locale: Option[String] = None,
email: Option[String] = None,
emailOptIn: Option[Boolean] = None,
leaderboardOptOut: Option[Boolean] = None,
needsReview: Option[Int] = None,
isReviewer: Option[Boolean] = None,
theme: Option[Int] = None
) {
def getTheme: String = theme match {
case Some(t) =>
t match {
case User.THEME_BLACK => "skin-black"
case User.THEME_BLACK_LIGHT => "skin-black-light"
case User.THEME_BLUE => "skin-blue"
case User.THEME_BLUE_LIGHT => "skin-blue-light"
case User.THEME_GREEN => "skin-green"
case User.THEME_GREEN_LIGHT => "skin-green-light"
case User.THEME_PURPLE => "skin-purple"
case User.THEME_PURPLE_LIGHT => "skin-purple-light"
case User.THEME_RED => "skin-red"
case User.THEME_RED_LIGHT => "skin-red-light"
case User.THEME_YELLOW => "skin-yellow"
case User.THEME_YELLOW_LIGHT => "skin-yellow-light"
case _ => "skin-blue"
}
case None => "skin-blue"
}
}
/**
* Information specific to the MapRoulette user.
*
* @param id The id defined by the database
* @param created When their account was created
* @param modified When their account was last updated. If last updated was longer then a day,
* will automatically update their OSM information
* @param osmProfile The osm profile information
* @param apiKey The current api key to validate requests
* @param guest Whether this is a guest account or not.
*/
case class User(
override val id: Long,
override val created: DateTime,
override val modified: DateTime,
osmProfile: OSMProfile,
groups: List[Group] = List(),
apiKey: Option[String] = None,
guest: Boolean = false,
settings: UserSettings = UserSettings(),
properties: Option[String] = None,
score: Option[Int] = None
) extends BaseObject[Long] {
// for users the display name is always retrieved from OSM
override def name: String = osmProfile.displayName
override val itemType: ItemType = UserType()
def homeLocation: String = osmProfile.homeLocation.name match {
case Some(name) => name
case None => "Unknown"
}
def formattedOSMCreatedDate: String =
DateTimeFormat.forPattern("MMMM. yyyy").print(osmProfile.created)
def formattedMPCreatedDate: String = DateTimeFormat.forPattern("MMMM. yyyy").print(created)
/**
* Checks to see if this user is part of the special super user group
*
* @return true if user is a super user
*/
def isSuperUser: Boolean = groups.exists(_.groupType == Group.TYPE_SUPER_USER)
def isAdmin: Boolean = groups.exists(_.groupType == Group.TYPE_ADMIN)
def adminForProject(projectId: Long): Boolean =
groups.exists(g => g.groupType == Group.TYPE_ADMIN && g.projectId == projectId)
def getUserLocale: Locale = new Locale(this.settings.locale.getOrElse("en"))
}
/**
* Static functions to easily create user objects
*/
object User {
implicit val tokenWrites: Writes[RequestToken] = Json.writes[RequestToken]
implicit val tokenReads: Reads[RequestToken] = Json.reads[RequestToken]
implicit val settingsWrites: Writes[UserSettings] = Json.writes[UserSettings]
implicit val settingsReads: Reads[UserSettings] = Json.reads[UserSettings]
implicit val userGroupWrites: Writes[Group] = Group.groupWrites
implicit val userGroupReads: Reads[Group] = Group.groupReads
implicit val locationWrites: Writes[Location] = Json.writes[Location]
implicit val locationReads: Reads[Location] = Json.reads[Location]
implicit val osmWrites: Writes[OSMProfile] = Json.writes[OSMProfile]
implicit val osmReads: Reads[OSMProfile] = Json.reads[OSMProfile]
implicit val searchResultWrites: Writes[UserSearchResult] = Json.writes[UserSearchResult]
implicit val projectManagerWrites: Writes[ProjectManager] = Json.writes[ProjectManager]
implicit object UserFormat extends Format[User] {
override def writes(o: User): JsValue = {
implicit val taskWrites: Writes[User] = Json.writes[User]
val original = Json.toJson(o)(Json.writes[User])
val updated = o.properties match {
case Some(p) => Utils.insertIntoJson(original, "properties", Json.parse(p), true)
case None => original
}
Utils.insertIntoJson(updated, "properties", Json.parse(o.properties.getOrElse("{}")), true)
}
override def reads(json: JsValue): JsResult[User] = {
val modifiedJson: JsValue = (json \\ "properties").toOption match {
case Some(p) =>
p match {
case props: JsString => json
case _ =>
json.as[JsObject] ++ Json.obj("properties" -> p.toString())
}
case None => json
}
Json.fromJson[User](modifiedJson)(Json.reads[User])
}
}
val DEFAULT_GUEST_USER_ID = -998
val DEFAULT_SUPER_USER_ID = -999
val DEFAULT_GROUP_ID = -999
val THEME_BLACK = 0
val THEME_BLACK_LIGHT = 1
val THEME_BLUE = 2
val THEME_BLUE_LIGHT = 3
val THEME_GREEN = 4
val THEME_GREEN_LIGHT = 5
val THEME_PURPLE = 6
val THEME_PURPLE_LIGHT = 7
val THEME_RED = 8
val THEME_RED_LIGHT = 9
val THEME_YELLOW = 10
val THEME_YELLOW_LIGHT = 11
val superGroup: Group = Group(DEFAULT_GROUP_ID, "SUPERUSERS", 0, Group.TYPE_SUPER_USER)
val settingsForm = Form(
mapping(
"defaultEditor" -> optional(number),
"defaultBasemap" -> optional(number),
"defaultBasemapId" -> optional(text),
"customBasemap" -> optional(text),
"locale" -> optional(text),
"email" -> optional(text),
"emailOptIn" -> optional(boolean),
"leaderboardOptOut" -> optional(boolean),
"needsReview" -> optional(number),
"isReviewer" -> optional(boolean),
"theme" -> optional(number)
)(UserSettings.apply)(UserSettings.unapply)
)
val REVIEW_NOT_NEEDED = 0
val REVIEW_NEEDED = 1
val REVIEW_MANDATORY = 2
/**
* Generates a User object based on the json details and request token
*
* @param userXML A XML string originally queried form the OSM details API
* @param requestToken The access token used to retrieve the OSM details
* @return A user object based on the XML details provided
*/
def generate(userXML: String, requestToken: RequestToken, config: Config): User =
generate(XML.loadString(userXML), requestToken, config)
/**
* Generate a User object based on the XML details and request token
*
* @param root The XML details of the user based on OSM details API
* @param requestToken The access token used to retrieve the OSM details
* @return A user object based on the XML details provided
*/
def generate(root: Elem, requestToken: RequestToken, config: Config): User = {
val userXML = (root \\ "user").head
val displayName = userXML \\@ "display_name"
val osmAccountCreated = userXML \\@ "account_created"
val osmId = userXML \\@ "id"
val description = (userXML \\ "description").head.text
val avatarURL = (userXML \\ "img").headOption match {
case Some(img) => img \\@ "href"
case None => "/assets/images/user_no_image.png"
}
val location = (userXML \\ "home").headOption match {
case Some(loc) => Location((loc \\@ "lat").toDouble, (loc \\@ "lon").toDouble)
case None => Location(47.608013, -122.335167)
}
// check whether this user is a super user
val groups =
if (config.superAccounts.contains(osmId) ||
(config.superAccounts.size == 1 && config.superAccounts.head.equals("*"))) {
List(superGroup)
} else {
List[Group]()
}
User(
-1,
new DateTime(),
new DateTime(),
OSMProfile(
osmId.toLong,
displayName,
description,
avatarURL,
location,
DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").parseDateTime(osmAccountCreated),
requestToken
),
groups,
settings =
UserSettings(theme = Some(THEME_BLUE), needsReview = Option(config.defaultNeedsReview))
)
}
def superUser: User =
User(
DEFAULT_SUPER_USER_ID,
DateTime.now(),
DateTime.now(),
OSMProfile(
DEFAULT_SUPER_USER_ID,
"SuperUser",
"FULL ACCESS",
"/assets/images/user_no_image.png",
Location(47.608013, -122.335167),
DateTime.now(),
RequestToken("", "")
),
List(superGroup.copy()),
settings = UserSettings(theme = Some(THEME_BLACK))
)
def withDecryptedAPIKey(user: User)(implicit crypto: Crypto): User = {
user.apiKey match {
case Some(key) if key.nonEmpty =>
try {
val decryptedAPIKey = Some(s"${user.id}|${crypto.decrypt(key)}")
new User(
user.id,
user.created,
user.modified,
user.osmProfile,
user.groups,
decryptedAPIKey,
user.guest,
user.settings,
user.properties,
user.score
)
} catch {
case _: BadPaddingException | _: IllegalBlockSizeException =>
LoggerFactory
.getLogger(this.getClass)
.debug("Invalid key found, could be that the application secret on server changed.")
user
case e: Throwable => throw e
}
case _ => user
}
}
/**
* Simple helper function that if the provided Option[User] is None, will return a guest
* user, otherwise will simply return back the provided user
*
* @param user The user to check
* @return Guest user if none, otherwise simply the provided user.
*/
def userOrMocked(user: Option[User]): User = {
user match {
case Some(u) => u
case None => guestUser
}
}
/**
* Creates a guest user object with default information.
*/
def guestUser: User =
User(
DEFAULT_GUEST_USER_ID,
DateTime.now(),
DateTime.now(),
OSMProfile(
DEFAULT_GUEST_USER_ID,
"Guest",
"Sign in using your OSM account for more access to MapRoulette features.",
"/assets/images/user_no_image.png",
Location(-33.918861, 18.423300),
DateTime.now(),
RequestToken("", "")
),
List(),
None,
true,
UserSettings(theme = Some(THEME_GREEN))
)
}
|
Crashfreak/maproulette2
|
app/org/maproulette/session/User.scala
|
Scala
|
apache-2.0
| 15,120 |
package org.scalajs.jasmine
import scala.scalajs.js
@js.native
trait Spec extends js.Object {
def results(): SpecResults = js.native
val description: String = js.native
val suite: Suite = js.native
}
|
mdedetrich/scala-js
|
jasmine-test-framework/src/main/scala/org/scalajs/jasmine/Spec.scala
|
Scala
|
bsd-3-clause
| 208 |
package net.fwbrasil.zoot.spray
import scala.concurrent.Future
import akka.actor.Actor
import akka.actor.actorRef2Scala
import akka.util.Timeout
import net.fwbrasil.zoot.core.request.Request
import net.fwbrasil.zoot.core.response.Response
import net.fwbrasil.zoot.spray.request.requestFromSpray
import net.fwbrasil.zoot.spray.response.responseToSpray
import spray.can.Http
import spray.http.HttpRequest
import spray.http.HttpResponse
import spray.http.StatusCodes
case class SprayServer(requestConsumer: Request => Future[Response[Array[Byte]]])(implicit timeout: Timeout)
extends Actor {
import context.dispatcher
def receive = {
case _: Http.Connected =>
sender ! Http.Register(self)
case httpRequest: HttpRequest =>
val sender = this.sender
Future.successful({}).flatMap { _ =>
requestConsumer(requestFromSpray(httpRequest))
.map(responseToSpray(_)).map(sender ! _)
}
}
}
|
fwbrasil/zoot
|
zoot-spray/src/main/scala/net/fwbrasil/zoot/spray/SprayServer.scala
|
Scala
|
lgpl-2.1
| 1,000 |
/**
*
*/
package com.ubeeko.htalk.filter
import com.ubeeko.htalk.criteria._
import org.apache.hadoop.hbase.filter.{Filter => HFilter, SingleColumnValueFilter}
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
/**
* Copyright Ubeeko
* @author Éric Le Blouc'h
*
*/
case class ColumnFilter(column: String, value:String) extends Filter {
def getFilter : HFilter = {
new SingleColumnValueFilter("d", column, CompareOp.EQUAL, value)
}
}
|
eric-leblouch/htalk
|
src/main/scala/com/ubeeko/htalk/filter/ColumnFilter.scala
|
Scala
|
apache-2.0
| 459 |
// val cosmosEndpoint = "<inserted by environment>"
// val cosmosMasterKey = "<inserted by environment>"
val cosmosEndpoint = dbutils.widgets.get("cosmosEndpoint")
val cosmosMasterKey = dbutils.widgets.get("cosmosMasterKey")
val cosmosDatabaseName = "sampleDB"
val cosmosContainerName = "sampleContainer"
val cfg = Map("spark.cosmos.accountEndpoint" -> cosmosEndpoint,
"spark.cosmos.accountKey" -> cosmosMasterKey,
"spark.cosmos.database" -> cosmosDatabaseName,
"spark.cosmos.container" -> cosmosContainerName
)
val cfgWithAutoSchemaInference = Map("spark.cosmos.accountEndpoint" -> cosmosEndpoint,
"spark.cosmos.accountKey" -> cosmosMasterKey,
"spark.cosmos.database" -> cosmosDatabaseName,
"spark.cosmos.container" -> cosmosContainerName,
"spark.cosmos.read.inferSchema.enabled" -> "true"
)
// COMMAND ----------
// create Cosmos Database and Cosmos Container using Catalog APIs
spark.conf.set(s"spark.sql.catalog.cosmosCatalog", "com.azure.cosmos.spark.CosmosCatalog")
spark.conf.set(s"spark.sql.catalog.cosmosCatalog.spark.cosmos.accountEndpoint", cosmosEndpoint)
spark.conf.set(s"spark.sql.catalog.cosmosCatalog.spark.cosmos.accountKey", cosmosMasterKey)
// create a cosmos database
spark.sql(s"CREATE DATABASE IF NOT EXISTS cosmosCatalog.${cosmosDatabaseName};")
// create a cosmos container
spark.sql(s"CREATE TABLE IF NOT EXISTS cosmosCatalog.${cosmosDatabaseName}.${cosmosContainerName} using cosmos.oltp " +
s"TBLPROPERTIES(partitionKeyPath = '/id', manualThroughput = '1100')")
// COMMAND ----------
// ingestion
spark.createDataFrame(Seq(("cat-alive", "Schrodinger cat", 2, true), ("cat-dead", "Schrodinger cat", 2, false)))
.toDF("id","name","age","isAlive")
.write
.format("cosmos.oltp")
.options(cfg)
.mode("APPEND")
.save()
// COMMAND ----------
// Show the schema of the table and data without auto schema inference
val df = spark.read.format("cosmos.oltp").options(cfg).load()
df.printSchema()
df.show()
// COMMAND ----------
// Show the schema of the table and data with auto schema inference
val df = spark.read.format("cosmos.oltp").options(cfgWithAutoSchemaInference).load()
df.printSchema()
df.show()
// COMMAND ----------
import org.apache.spark.sql.functions.col
// Query to find the live cat and increment age of the alive cat
df.filter(col("isAlive") === true)
.withColumn("age", col("age") + 1)
.show()
|
Azure/azure-sdk-for-java
|
sdk/cosmos/azure-cosmos-spark_3_2-12/test-databricks/notebooks/basicScenario.scala
|
Scala
|
mit
| 2,415 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.