code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package net.bhardy.braintree.scala
import gw._
import search.{TransactionSearchRequest, SubscriptionSearchRequest, CustomerSearchRequest, CreditCardVerificationSearchRequest}
abstract trait Pager[T] {
def getPage(ids: List[String]): List[T]
}
class CreditCardVerificationPager(gateway:CreditCardVerificationGateway, query:CreditCardVerificationSearchRequest)
extends Pager[CreditCardVerification] {
def getPage(ids:List[String]) = {
gateway.fetchCreditCardVerifications(query, ids)
}
}
object Pager {
def customer(gateway: CustomerGateway, query: CustomerSearchRequest) = new Pager[Customer] {
def getPage(ids: List[String]): List[Customer] = {
gateway.fetchCustomers(query, ids)
}
}
def expiredCreditCard(gateway: CreditCardGateway) = new Pager[CreditCard] {
def getPage(ids: List[String]): List[CreditCard] = {
gateway.fetchExpiredCreditCards(ids)
}
}
def expiringCreditCard(gateway: CreditCardGateway, queryString: String) = new Pager[CreditCard] {
def getPage(ids: List[String]): List[CreditCard] = {
gateway.fetchExpiringCreditCards(ids, queryString)
}
}
def subscription(gateway: SubscriptionGateway, search: SubscriptionSearchRequest) = new Pager[Subscription] {
def getPage(ids: List[String]): List[Subscription] = {
gateway.fetchSubscriptions(search, ids)
}
}
def transaction(gateway: TransactionGateway, query: TransactionSearchRequest) = new Pager[Transaction] {
def getPage(ids: List[String]): List[Transaction] = {
gateway.fetchTransactions(query, ids)
}
}
}
|
benhardy/braintree-scala
|
src/main/scala/Pager.scala
|
Scala
|
mit
| 1,583 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v3.retriever
import uk.gov.hmrc.ct.box.retriever.BoxValues
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
import uk.gov.hmrc.ct.ct600j.v3._
object CT600JBoxRetriever extends BoxValues[CT600JBoxRetriever]
trait CT600JBoxRetriever {
self: CT600BoxRetriever =>
def retrieveB65(): B65
def retrieveJ1(): J1 = J1(retrieveB1())
def retrieveJ2(): J2 = J2(retrieveB3())
def retrieveJ3(): J3 = J3(retrieveB30())
def retrieveJ4(): J4 = J4(retrieveB35())
def retrieveJ5(): J5
def retrieveJ10(): J10
def retrieveJ15(): J15
def retrieveJ20(): J20
def retrieveJ25(): J25
def retrieveJ30(): J30
def retrieveJ35(): J35
def retrieveJ40(): J40
def retrieveJ45(): J45
def retrieveJ50(): J50
def retrieveJ5A(): J5A
def retrieveJ10A(): J10A
def retrieveJ15A(): J15A
def retrieveJ20A(): J20A
def retrieveJ25A(): J25A
def retrieveJ30A(): J30A
def retrieveJ35A(): J35A
def retrieveJ40A(): J40A
def retrieveJ45A(): J45A
def retrieveJ50A(): J50A
}
|
ahudspith-equalexperts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600j/v3/retriever/CT600JBoxRetriever.scala
|
Scala
|
apache-2.0
| 1,628 |
import scala.collection.mutable.Buffer
case class Buffer
println(/* */ Buffer.getClass)
println(classOf[/* file: this, type: org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScClass */ Buffer])
|
katejim/intellij-scala
|
testdata/resolve2/import/clash/TypeAndValue2.scala
|
Scala
|
apache-2.0
| 205 |
package models
import play.api.Play.current
import play.api.db.slick.Config.driver.simple._
import play.api.db.slick.DB
import play.api.libs.json._
import play.api.libs.functional.syntax._
class Worlds(tag: Tag) extends Table[World](tag, "World") {
def id = column[Int]("id", O.PrimaryKey)
def randomNumber = column[Long]("randomNumber")
def * = (id, randomNumber) <> ((World.apply _).tupled, World.unapply _)
}
class WorldsTableQuery extends TableQuery(new Worlds(_)){
val byId = this.findBy(_.id)
def findById(id: Int)(implicit session: Session): World = {
byId(id).first
}
val updateQuery = Compiled{ (id: Column[Int]) => this.filter(_.id === id) }
def updateRandom(world: World)(implicit session: Session) {
updateQuery(world.id).update(world)
}
}
case class World(id: Int, randomNumber: Long)
object World {
implicit val toJson = new Writes[World] {
def writes(w: World): JsValue = {
Json.obj(
"id" -> w.id,
"randomNumber" -> w.randomNumber
)
}
}
}
|
denkab/FrameworkBenchmarks
|
frameworks/Scala/play2-scala/play2-scala-slick/app/models/World.scala
|
Scala
|
bsd-3-clause
| 1,028 |
package com.thetestpeople.trt.demo
import com.thetestpeople.trt.model.Configuration
import com.thetestpeople.trt.json.RestApi
import com.github.nscala_time.time.Imports._
import scala.util.Random
import org.joda.time.Duration
import com.thetestpeople.trt.mother.{ IncomingFactory ⇒ F }
import com.thetestpeople.trt.utils.UriUtils._
import com.thetestpeople.trt.service.Incoming
import com.ning.http.client.AsyncHttpClientConfig
import play.api.libs.ws.ning.NingWSClient
object DemoPopulator extends App {
val wsClient = new NingWSClient(new AsyncHttpClientConfig.Builder().build())
val restApi = RestApi(uri("http://localhost:9000"), wsClient)
val startDate = 2.months.ago
val numberOfBatches = 100
val random = new Random
val batchGap: (Duration, Duration) = (1.hour, 24.hours)
def randomGap(): Duration = {
val (low, high) = batchGap
val spanMillis = high.getMillis - low.getMillis
val millis = low.getMillis + math.abs(random.nextLong) % spanMillis
Duration.millis(millis)
}
case class TestSpec(name: String, groupOpt: Option[String], passRate: Double) {
def test: Incoming.Test = Incoming.Test(name, groupOpt, categories = Seq())
def randomPassed(fudge: Double) = random.nextDouble <= passRate * fudge
def randomExecution(executionTime: DateTime, configuration: Configuration, fudge: Double) =
F.execution(test, passed = randomPassed(fudge), executionTimeOpt = Some(executionTime), configurationOpt = Some(configuration),
durationOpt = Some(Duration.millis(1000 + math.abs(random.nextInt) % 200)))
}
val configurations = Seq("Firefox", "IE", "Chrome", "Safari").map(Configuration.apply)
val testSpecs = Seq(
TestSpec("user_can_log_in_with_correct_credentials", Some("LoginTests"), 1),
TestSpec("user_cannot_log_in_with_incorrect_credentials", Some("LoginTests"), 0.85),
TestSpec("captcha_is_used_after_five_failed_logins", Some("LoginTests"), 0.75),
TestSpec("a_logged_in_user_can_log_out", Some("LogoutTests"), 0.65),
TestSpec("search_returns_matching_results", Some("SearchTests"), 0.55),
TestSpec("search_returns_no_results", Some("SearchTests"), 0.45))
for (configuration ← configurations) {
var currentDate = startDate
for (batchNumber ← 1 to numberOfBatches) {
val fudge =
if (40 <= batchNumber && batchNumber <= 60 && configuration == Configuration("IE"))
0.05
else
1
val batch = F.batch(
nameOpt = Some(s"Batch $batchNumber"),
urlOpt = Some(uri(s"http://www.example.com/batch/$batchNumber")),
executionTimeOpt = Some(currentDate),
executions = testSpecs.map(_.randomExecution(currentDate, configuration, fudge)).toList)
println(s"Adding '$configuration' batch $batchNumber on $currentDate")
restApi.addBatch(batch)
currentDate = currentDate + randomGap()
}
}
System.exit(0)
}
|
thetestpeople/trt
|
test/com/thetestpeople/trt/demo/DemoPopulator.scala
|
Scala
|
mit
| 2,918 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet
import org.apache.mxnet.Base._
import org.apache.mxnet.DType.DType
import org.apache.mxnet.io.{MXDataIter, MXDataPack}
import org.slf4j.LoggerFactory
import scala.annotation.varargs
import scala.collection.immutable.ListMap
import scala.collection.mutable.ListBuffer
import scala.language.implicitConversions
/**
* IO iterators for loading training & validation data
*/
object IO {
type IterCreateFunc = (Map[String, String]) => DataIter
type PackCreateFunc = (Map[String, String]) => DataPack
private val logger = LoggerFactory.getLogger(classOf[DataIter])
private val iterCreateFuncs: Map[String, IterCreateFunc] = initIOModule()
def MNISTIter: IterCreateFunc = iterCreateFuncs("MNISTIter")
def ImageRecordIter: IterCreateFunc = iterCreateFuncs("ImageRecordIter")
def CSVIter: IterCreateFunc = iterCreateFuncs("CSVIter")
def MNISTPack: PackCreateFunc = createMXDataPack("MNISTIter")
def ImageRecodePack: PackCreateFunc = createMXDataPack("ImageRecordIter")
def CSVPack: PackCreateFunc = createMXDataPack("CSVIter")
/**
* create iterator via iterName and params
* @param iterName name of iterator; "MNISTIter" or "ImageRecordIter"
* @param params parameters for create iterator
* @return created data iterator
*/
def createIterator(iterName: String, params: Map[String, String]): DataIter = {
iterCreateFuncs(iterName)(params)
}
/**
* create dataPack for iterator via itername and params
* @param iterName name of iterator: "MNISTIter" or "ImageRecordIter"
* @param params parameters for create iterator
* @return created dataPack
*/
def createMXDataPack(iterName: String)(params: Map[String, String]): DataPack = {
new MXDataPack(iterName, params)
}
/**
* initialize all IO creator Functions
* @return Map from name to iter creator function
*/
private def initIOModule(): Map[String, IterCreateFunc] = {
val IterCreators = new ListBuffer[DataIterCreator]
checkCall(_LIB.mxListDataIters(IterCreators))
IterCreators.map(makeIOIterator).toMap
}
private def makeIOIterator(handle: DataIterCreator): (String, IterCreateFunc) = {
val name = new RefString
val desc = new RefString
val argNames = new ListBuffer[String]
val argTypes = new ListBuffer[String]
val argDescs = new ListBuffer[String]
checkCall(_LIB.mxDataIterGetIterInfo(handle, name, desc, argNames, argTypes, argDescs))
val paramStr = Base.ctypes2docstring(argNames, argTypes, argDescs)
val docStr = s"${name.value}\\n${desc.value}\\n\\n$paramStr\\n"
logger.debug(docStr)
(name.value, creator(handle))
}
/**
* DataIter creator
* @param handle native memory ptr for the iterator
* @param params parameter passed to the iterator
* @return created DataIter
*/
private def creator(handle: DataIterCreator)(
params: Map[String, String]): DataIter = {
val out = new DataIterHandleRef
val keys = params.keys.toArray
val vals = params.values.toArray
checkCall(_LIB.mxDataIterCreateIter(handle, keys, vals, out))
val dataName = params.getOrElse("data_name", "data")
val labelName = params.getOrElse("label_name", "label")
new MXDataIter(out.value, dataName, labelName)
}
// Convert data into canonical form.
private[mxnet] def initDataDesc(data: IndexedSeq[NDArray],
allowEmpty: Boolean,
defaultName: String,
defaultDType: DType,
defaultLayout: String): IndexedSeq[(DataDesc, NDArray)] = {
require(data != null, "data is required.")
require(data != IndexedSeq.empty || allowEmpty,
s"data should not be empty when allowEmpty is false")
if (data == IndexedSeq.empty) {
IndexedSeq()
} else if (data.length == 1) {
IndexedSeq((new DataDesc(defaultName, data(0).shape,
defaultDType, defaultLayout), data(0)))
} else {
data.zipWithIndex.map(item => {
(new DataDesc(defaultName + "_" + item._2, item._1.shape,
defaultDType, defaultLayout), item._1)
}).toIndexedSeq
}
}
}
/**
* class batch of data
*/
class DataBatch(val data: IndexedSeq[NDArray],
val label: IndexedSeq[NDArray],
val index: IndexedSeq[Long],
val pad: Int,
// the key for the bucket that should be used for this batch,
// for bucketing io only
val bucketKey: AnyRef = null,
// use DataDesc to indicate the order of data/label loading
// (must match the order of input data/label)
private val providedDataDesc: IndexedSeq[DataDesc] = null,
private val providedLabelDesc: IndexedSeq[DataDesc] = null) {
// TODO: change the data/label type into IndexedSeq[(NDArray, DataDesc)]
// However, since the data and label can be accessed publicly (no getter and setter)
// the change on this will break BC
@deprecated("Use provideDataDesc and provideDataLabel instead", "1.3.0")
def this(data: IndexedSeq[NDArray],
label: IndexedSeq[NDArray],
index: IndexedSeq[Long],
pad: Int,
// the key for the bucket that should be used for this batch,
// for bucketing io only
bucketKey: AnyRef,
// use ListMap to indicate the order of data/label loading
// (must match the order of input data/label)
providedData: ListMap[String, Shape]) {
this(data, label, index, pad, bucketKey,
DataDesc.ListMap2Descs(providedData))
}
@deprecated("Use provideDataDesc and provideDataLabel instead", "1.3.0")
def this(data: IndexedSeq[NDArray],
label: IndexedSeq[NDArray],
index: IndexedSeq[Long],
pad: Int,
// the key for the bucket that should be used for this batch,
// for bucketing io only
bucketKey: AnyRef,
// use ListMap to indicate the order of data/label loading
// (must match the order of input data/label)
providedData: ListMap[String, Shape],
providedLabel: ListMap[String, Shape]) {
this(data, label, index, pad, bucketKey,
DataDesc.ListMap2Descs(providedData), DataDesc.ListMap2Descs(providedLabel))
}
/**
* Dispose its data and labels
* The object shall never be used after it is disposed.
*/
def dispose(): Unit = {
if (data != null) {
data.foreach(arr => if (arr != null) arr.dispose())
}
if (label != null) {
label.foreach(arr => if (arr != null) arr.dispose())
}
}
// The name and shape of data
@deprecated("Use provideDataDesc instead", "1.3.0")
def provideData: ListMap[String, Shape] = {
var temp = ListMap[String, Shape]()
if (providedDataDesc == null) null
else {
providedDataDesc.foreach(ele => temp = temp + (ele.name -> ele.shape))
temp
}
}
// The name and shape of label
@deprecated("Use provideLabelDesc instead", "1.3.0")
def provideLabel: ListMap[String, Shape] = {
var temp = ListMap[String, Shape]()
if (providedLabelDesc == null) null
else {
providedLabelDesc.foreach(ele => temp = temp + (ele.name -> ele.shape))
temp
}
}
def provideDataDesc: IndexedSeq[DataDesc] = providedDataDesc
def provideLabelDesc: IndexedSeq[DataDesc] = providedLabelDesc
}
object DataBatch {
/**
* Builder class for DataBatch.
*/
class Builder() {
private var data: IndexedSeq[NDArray] = null
private var label: IndexedSeq[NDArray] = null
private var index: IndexedSeq[Long] = null
private var pad: Int = 0
private var bucketKey: AnyRef = null
private var dataDesc: IndexedSeq[DataDesc] = null
private var labelDesc: IndexedSeq[DataDesc] = null
/**
* Set the input data.
* @param data a list of data.
* @return this.
*/
@varargs def setData(data: NDArray*): Builder = {
this.data = data.toIndexedSeq
this
}
/**
* Set the labels in the same order of data.
* @param label a list of labels.
* @return this.
*/
@varargs def setLabel(label: NDArray*): Builder = {
this.label = label.toIndexedSeq
this
}
/**
* Set the example indices in this batch.
* @param index indices in the same order of data.
* @return this.
*/
@varargs def setIndex(index: Long*): Builder = {
this.index = index.toIndexedSeq
this
}
/**
* Set the pad.
* @param pad The number of examples padded at the end of a batch. It is used when the
* total number of examples read is not divisible by the `batch_size`.
* These extra padded examples are ignored in prediction.
* @return this
*/
def setPad(pad: Int): Builder = {
this.pad = pad
this
}
/**
* Set the bucket key, used for bucketing module.
* @param bucketKey the bucket key related to this batch.
* @return this.
*/
def setBucketKey(bucketKey: AnyRef): Builder = {
this.bucketKey = bucketKey
this
}
/**
* Provide the shape of a data.
* @param dataDesc DataDescriptor
* @return this.
*/
def provideDataDesc(dataDesc: IndexedSeq[DataDesc]): Builder = {
this.dataDesc = dataDesc
this
}
/**
* Provide the shape of a label.
* @param labelDesc LabelDescriptor
* @return this.
*/
def provideLabelDesc(labelDesc: IndexedSeq[DataDesc]): Builder = {
this.labelDesc = labelDesc
this
}
def build(): DataBatch = {
require(data != null, "data is required.")
new DataBatch(data, label, index, pad, bucketKey, dataDesc, labelDesc)
}
}
}
/**
* DataIter object in mxnet.
*/
abstract class DataIter extends Iterator[DataBatch] {
/**
* reset the iterator
*/
def reset(): Unit
def batchSize: Int
/**
* get next data batch from iterator
* @return
*/
@throws(classOf[NoSuchElementException])
def next(): DataBatch = {
new DataBatch(getData(), getLabel(), getIndex(), getPad())
}
/**
* get data of current batch
* @return the data of current batch
*/
def getData(): IndexedSeq[NDArray]
/**
* Get label of current batch
* @return the label of current batch
*/
def getLabel(): IndexedSeq[NDArray]
/**
* Get the number of padding examples
* in current batch
* @return number of padding examples in current batch
*/
def getPad(): Int
/**
* Get the index of current batch
* @return the index of current batch
*/
def getIndex(): IndexedSeq[Long]
// The name and shape of data provided by this iterator
@deprecated("Use provideDataDesc instead", "1.3.0")
def provideData: ListMap[String, Shape]
// The name and shape of label provided by this iterator
@deprecated("Use provideLabelDesc instead", "1.3.0")
def provideLabel: ListMap[String, Shape]
// Provide type:DataDesc of the data
def provideDataDesc: IndexedSeq[DataDesc]
// Provide type:DataDesc of the label
def provideLabelDesc: IndexedSeq[DataDesc]
// For bucketing io only
// The bucket key for the default symbol.
def defaultBucketKey: AnyRef = null
}
/**
* pack of DataIter, use as Iterable class
*/
abstract class DataPack() extends Iterable[DataBatch] {
/**
* get data iterator
* @return DataIter
*/
def iterator: DataIter
}
// Named data desc description contains name, shape, type and other extended attributes.
case class DataDesc(name: String, shape: Shape,
dtype: DType = DType.Float32, layout: String = Layout.UNDEFINED) {
require(layout == Layout.UNDEFINED || shape.length == layout.length,
s"number of dimensions in $shape should match the layout $layout")
override def toString(): String = {
s"DataDesc[$name,$shape,$dtype,$layout]"
}
}
object DataDesc {
private val logger = LoggerFactory.getLogger(classOf[DataDesc])
/**
* Get the dimension that corresponds to the batch size.
* @param layout layout string. For example, "NCHW".
* @return An axis indicating the batch_size dimension. When data-parallelism is used,
* the data will be automatically split and concatenate along the batch_size dimension.
* Axis can be -1, which means the whole array will be copied
* for each data-parallelism device.
*/
def getBatchAxis(layout: Option[String]): Int = {
if (layout.isEmpty|| layout.get == Layout.UNDEFINED) {
logger.warn("Found Undefined Layout, will use default index 0 for batch axis")
0
} else {
if (layout.get.contains('N')) {
layout.get.indexOf("N")
} else {
throw new IllegalArgumentException("no Batch Axis('N') found in Layout!")
}
}
}
@deprecated("Please use DataDesc methods instead", "1.3.0")
implicit def ListMap2Descs(shapes: ListMap[String, Shape]): IndexedSeq[DataDesc] = {
if (shapes != null) {
shapes.map { case (k, s) => new DataDesc(k, s) }.toIndexedSeq
} else {
null
}
}
}
|
zhreshold/mxnet
|
scala-package/core/src/main/scala/org/apache/mxnet/IO.scala
|
Scala
|
apache-2.0
| 13,971 |
package com.thoughtworks.sbtApiMappings
import com.thoughtworks.Extractor._
import sbt._
/**
* @author 杨博 (Yang Bo) <[email protected]>
*/
object PlayApiMappingRule extends AutoPlugin {
import ApiMappings.autoImport._
override def requires = ApiMappings
override def trigger = allRequirements
private def moduleID: Attributed[File] => Option[(String, String, String)] = _.get(Keys.moduleID.key).map { moduleID =>
(moduleID.organization, moduleID.name, moduleID.revision)
}
private def playRule: PartialFunction[Attributed[File], URL] = {
case moduleID.extract("com.typesafe.play", libraryName, VersionNumber(Seq(majorVersion, minorVersion, _*), _, _))
if libraryName == "play" || libraryName.startsWith("play-") =>
url(s"https://playframework.com/documentation/$majorVersion.$minorVersion.x/api/scala/index.html")
}
override def projectSettings = {
apiMappingRules := playRule.orElse(apiMappingRules.value)
}
}
|
ThoughtWorksInc/sbt-api-mappings
|
src/main/scala/com/thoughtworks/sbtApiMappings/PlayApiMappingRule.scala
|
Scala
|
apache-2.0
| 979 |
/**
* Global Sensor Networks (GSN) Source Code
* Copyright (c) 2006-2016, Ecole Polytechnique Federale de Lausanne (EPFL)
*
* This file is part of GSN.
*
* GSN is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* GSN is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with GSN. If not, see <http://www.gnu.org/licenses/>.
*
* File: src/ch/epfl/gsn/config/WatchServiceTask.scala
*
* @author Jean-Paul Calbimonte
*
*/
package ch.epfl.gsn.config
import akka.actor.ActorRef
import java.nio.file.FileSystems
import collection.JavaConversions._
import java.nio.file.Path
import java.nio.file.StandardWatchEventKinds._
import java.io.File
class WatchServiceTask(notifyActor: ActorRef) extends Runnable {
private val watchService = FileSystems.getDefault.newWatchService()
def watch(path: Path) =
path.register(watchService, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY)
def run() {
try {
while (!Thread.currentThread.isInterrupted) {
val key = watchService.take
key.pollEvents foreach {event =>
val relativePath = event.context.asInstanceOf[Path]
val path = key.watchable.asInstanceOf[Path].resolve(relativePath)
event.kind match {
case ENTRY_CREATE =>
notifyActor ! Created(path.toFile)
case ENTRY_DELETE =>
notifyActor ! Deleted(path.toFile)
case ENTRY_MODIFY =>
notifyActor ! Modified(path.toFile)
case x =>
//logger.warn(s"Unknown event $x")
}
}
key reset
}
} catch {
case e: InterruptedException =>
// logger.info("Interrupting, bye!")
} finally {
watchService close
}
}
}
sealed trait FileSystemChange
case class Created(fileOrDir: File) extends FileSystemChange
case class Deleted(fileOrDir: File) extends FileSystemChange
case class Modified(fileOrDir: File) extends FileSystemChange
case class MonitorDir(path: Path)
|
LSIR/gsn
|
gsn-tools/src/main/scala/ch/epfl/gsn/config/WatchServiceTask.scala
|
Scala
|
gpl-3.0
| 2,399 |
package ru.tmtool.math.arithmetic
import _root_.junit.framework.TestCase
import org.scalatest._
import org.junit.Test
import scala.math.E
/**
* User: skozlov
* E-mail: [email protected]
* Date: 24.08.2014
*/
class LnTest extends TestCase with MustMatchers{
@Test
def test1(){
ln(1) mustBe 0
}
@Test
def testE(){
ln(E) mustBe 1
}
@Test
def testE2(){
ln(E*E) mustBe 2
}
}
|
tmtool/math
|
src/test/scala/ru/tmtool/math/arithmetic/LnTest.scala
|
Scala
|
mit
| 403 |
package org.jetbrains.plugins.scala.codeInspection.typeChecking
import com.intellij.codeInspection.LocalInspectionTool
import org.jetbrains.plugins.scala.codeInspection.ScalaLightInspectionFixtureTestAdapter
/**
* Author: Svyatoslav Ilinskiy
* Date: 21.12.15.
*/
class PatternMayNeverMatchInspectionTest extends ScalaLightInspectionFixtureTestAdapter {
override protected def classOfInspection: Class[_ <: LocalInspectionTool] = classOf[PatternMayNeverMatchInspection]
override protected def annotation: String = PatternMayNeverMatchInspection.inspectionName
def testSCL9668(): Unit = {
val code =
s"""
|object Moo {
| (1, 2) match {
| case ${START}ScFunctionType(_, _)$END =>
| case _ =>
| }
|}
|class ScFunctionType(a: Foo, b: Seq[Foo])
|
|object ScFunctionType {
| def unapply(f: Foo): Option[(Foo, Seq[Foo])] = ???
|}
|trait Foo
""".stripMargin
checkTextHasError(code)
}
}
|
whorbowicz/intellij-scala
|
test/org/jetbrains/plugins/scala/codeInspection/typeChecking/PatternMayNeverMatchInspectionTest.scala
|
Scala
|
apache-2.0
| 1,021 |
/*
* Copyright 2017 by Eugene Yokota
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gigahorse
import scala.concurrent.Future
abstract class ReactiveHttpClient extends HttpClient {
/** Runs the request and return a Future of FullResponse. */
def runStream(request: Request): Future[StreamResponse]
/** Runs the request and return a Future of A. */
def runStream[A](request: Request, f: StreamResponse => Future[A]): Future[A]
/** Executes the request and return a Future of StreamResponse. Does not error on non-OK response. */
def processStream(request: Request): Future[StreamResponse]
/** Executes the request and return a Future of A. Does not error on non-OK response. */
def processStream[A](request: Request, f: StreamResponse => Future[A]): Future[A]
}
|
eed3si9n/gigahorse
|
core/src/main/scala/gigahorse/ReactiveHttpClient.scala
|
Scala
|
apache-2.0
| 1,302 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import java.io._
import scala.Serializable
import scala.collection.Map
import scala.collection.immutable.NumericRange
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import org.apache.spark._
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.util.Utils
/**
* 并发集合分区
*/
private[spark] class ParallelCollectionPartition[T: ClassTag](
var rddId: Long,
var slice: Int,
var values: Seq[T])
extends Partition with Serializable {
def iterator: Iterator[T] = values.iterator
override def hashCode(): Int = (41 * (41 + rddId) + slice).toInt
override def equals(other: Any): Boolean = other match {
case that: ParallelCollectionPartition[_] =>
this.rddId == that.rddId && this.slice == that.slice
case _ => false
}
override def index: Int = slice
@throws(classOf[IOException])
private def writeObject(out: ObjectOutputStream): Unit = Utils.tryOrIOException {
val sfactory = SparkEnv.get.serializer
// Treat java serializer with default action rather than going thru serialization, to avoid a
// separate serialization header.
//使用默认操作来处理java serializer,而不是通过序列化来避免单独的序列化头。
sfactory match {
case js: JavaSerializer => out.defaultWriteObject()
case _ =>
out.writeLong(rddId)
out.writeInt(slice)
val ser = sfactory.newInstance()
Utils.serializeViaNestedStream(out, ser)(_.writeObject(values))
}
}
@throws(classOf[IOException])
private def readObject(in: ObjectInputStream): Unit = Utils.tryOrIOException {
val sfactory = SparkEnv.get.serializer
sfactory match {
case js: JavaSerializer => in.defaultReadObject()
case _ =>
rddId = in.readLong()
slice = in.readInt()
val ser = sfactory.newInstance()
Utils.deserializeViaNestedStream(in, ser)(ds => values = ds.readObject[Seq[T]]())
}
}
}
//并行集合RDD
private[spark] class ParallelCollectionRDD[T: ClassTag](
@transient sc: SparkContext,
@transient data: Seq[T],
numSlices: Int,
locationPrefs: Map[Int, Seq[String]])
extends RDD[T](sc, Nil) {
// TODO: Right now, each split sends along its full data, even if later down the RDD chain it gets
// cached. It might be worthwhile to write the data to a file in the DFS and read it in the split
// instead.缓存,将数据写入DFS中的文件可能是值得的。
// UPDATE: A parallel collection can be checkpointed to HDFS, which achieves this goal.
//更新:可以将并行集合检查到HDFS,实现此目标。
override def getPartitions: Array[Partition] = {
val slices = ParallelCollectionRDD.slice(data, numSlices).toArray
slices.indices.map(i => new ParallelCollectionPartition(id, i, slices(i))).toArray
}
override def compute(s: Partition, context: TaskContext): Iterator[T] = {
new InterruptibleIterator(context, s.asInstanceOf[ParallelCollectionPartition[T]].iterator)
}
//返回每个 partiton 都对应一组 hosts,这组 hosts 上往往存放着该 partition 的输入数据
override def getPreferredLocations(s: Partition): Seq[String] = {
locationPrefs.getOrElse(s.index, Nil)
}
}
private object ParallelCollectionRDD {
/**
* Slice a collection into numSlices sub-collections. One extra thing we do here is to treat Range
* collections specially, encoding the slices as other Ranges to minimize memory cost. This makes
* it efficient to run Spark over RDDs representing large sets of numbers. And if the collection
* is an inclusive Range, we use inclusive range for the last slice.
*
* 将一个集合切成numSlices子集合,我们在这里做的另外一件事就是对待范围集合专门,将切片编码为其他范围,以最小化内存成本。
* 这使得对代表大量数字的RDD运行Spark有效,如果收藏是一个包容性范围,我们使用包含范围的最后一个切片。
*/
def slice[T: ClassTag](seq: Seq[T], numSlices: Int): Seq[Seq[T]] = {
if (numSlices < 1) {
throw new IllegalArgumentException("Positive number of slices required")
}
// Sequences need to be sliced at the same set of index positions for operations
// like RDD.zip() to behave as expected
//需要在RDD.zip()操作的相同索引位置集上对序列进行切片,以按预期的方式运行
def positions(length: Long, numSlices: Int): Iterator[(Int, Int)] = {
(0 until numSlices).iterator.map(i => {
val start = ((i * length) / numSlices).toInt
val end = (((i + 1) * length) / numSlices).toInt
(start, end)
})
}
seq match {
case r: Range => {
positions(r.length, numSlices).zipWithIndex.map({ case ((start, end), index) =>
// If the range is inclusive, use inclusive range for the last slice
//如果范围是包容性的,请使用最后一个片段的包含范围
if (r.isInclusive && index == numSlices - 1) {
new Range.Inclusive(r.start + start * r.step, r.end, r.step)
}
else {
new Range(r.start + start * r.step, r.start + end * r.step, r.step)
}
}).toSeq.asInstanceOf[Seq[Seq[T]]]
}
case nr: NumericRange[_] => {
// For ranges of Long, Double, BigInteger, etc
//适用于Long,Double,BigInteger等的范围
val slices = new ArrayBuffer[Seq[T]](numSlices)
var r = nr
for ((start, end) <- positions(nr.length, numSlices)) {
val sliceSize = end - start
slices += r.take(sliceSize).asInstanceOf[Seq[T]]
r = r.drop(sliceSize)
}
slices
}
case _ => {
val array = seq.toArray // To prevent O(n^2) operations for List etc
positions(array.length, numSlices).map({
case (start, end) =>
array.slice(start, end).toSeq
}).toSeq
}
}
}
}
|
tophua/spark1.52
|
core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
|
Scala
|
apache-2.0
| 6,832 |
package com.datastax.examples.meetup.model
import org.joda.time.{DateTimeZone, DateTime}
import org.apache.spark.streaming.Time
/**
* Created by rustam on 29/12/2014.
*/
object EventInterval {
val All = "ALL"
def Seconds(time: Time): String =
"S" + new DateTime(time.milliseconds, DateTimeZone.UTC).toString("yyyyMMddHHmmss")
}
|
rstml/datastax-spark-streaming-demo
|
src/main/scala/com/datastax/examples/meetup/model/EventInterval.scala
|
Scala
|
apache-2.0
| 340 |
package org.jetbrains.plugins.scala
package annotator
package importsTracker
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportExpr
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.usages.{ImportExprUsed, ImportSelectorUsed, ImportUsed}
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import org.jetbrains.plugins.scala.worksheet.ScalaScriptImportsUtil
import scala.collection.mutable
import scala.collection.Set
/**
* @author Alexander Podkhalyuzin
*/
object ImportTracker {
def registerUsedImports(elem: PsiElement, imports: Set[ImportUsed]): Unit = {
if (!elem.isValid) return
elem.getContainingFile match {
case scalaFile: ScalaFile =>
val refHolder = ScalaRefCountHolder.getInstance(scalaFile)
imports.foreach(refHolder.registerImportUsed)
case _ =>
}
}
def registerUsedImports(element: PsiElement, resolveResult: ScalaResolveResult): Unit = {
registerUsedImports(element, resolveResult.importsUsed)
}
def getUnusedImports(file: ScalaFile): Seq[ImportUsed] = {
val buff = new mutable.HashSet[ImportUsed]()
val imports = file.getAllImportUsed
val refHolder = ScalaRefCountHolder.getInstance(file)
refHolder.retrieveUnusedReferencesInfo { () =>
imports.foreach {
case used@ImportSelectorUsed(e) => //if the entire line is unused, highlight the entire line
if (refHolder.isRedundant(used)) {
e.parent.flatMap(_.parent) match {
case Some(expr: ScImportExpr) if expr.selectors.map(ImportSelectorUsed).forall(refHolder.isRedundant) =>
buff += ImportExprUsed(expr)
case _ => buff += used
}
}
case used =>
if (refHolder.isRedundant(used)) {
buff += used
}
}
}
ScalaScriptImportsUtil.filterScriptImportsInUnused(file, buff.toSeq)
}
}
|
triplequote/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/annotator/importsTracker/ImportTracker.scala
|
Scala
|
apache-2.0
| 2,081 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap, AttributeReference}
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Statistics}
import org.apache.spark.sql.sources.BaseRelation
/**
* Used to link a [[BaseRelation]] in to a logical query plan.
* 用于将[[BaseRelation]]链接到逻辑查询计划
*
* Note that sometimes we need to use `LogicalRelation` to replace an existing leaf node without
* changing the output attributes' IDs. The `expectedOutputAttributes` parameter is used for
* this purpose. See https://issues.apache.org/jira/browse/SPARK-10741 for more details.
* 请注意,有时我们需要使用`LogicalRelation`来替换现有的叶节点而不更改输出属性的ID,`expectedOutputAttributes`参数用于此目的。
*/
private[sql] case class LogicalRelation(
relation: BaseRelation,
expectedOutputAttributes: Option[Seq[Attribute]] = None)
extends LeafNode with MultiInstanceRelation {
override val output: Seq[AttributeReference] = {
val attrs = relation.schema.toAttributes
expectedOutputAttributes.map { expectedAttrs =>
assert(expectedAttrs.length == attrs.length)
attrs.zip(expectedAttrs).map {
// We should respect the attribute names provided by base relation and only use the
// exprId in `expectedOutputAttributes`.
// The reason is that, some relations(like parquet) will reconcile attribute names to
// workaround case insensitivity issue.
case (attr, expected) => attr.withExprId(expected.exprId)
}
}.getOrElse(attrs)
}
// Logical Relations are distinct if they have different output for the sake of transformations.
//如果逻辑关系为了转换而具有不同的输出,则它们是不同的
override def equals(other: Any): Boolean = other match {
case l @ LogicalRelation(otherRelation, _) => relation == otherRelation && output == l.output
case _ => false
}
override def hashCode: Int = {
com.google.common.base.Objects.hashCode(relation, output)
}
override def sameResult(otherPlan: LogicalPlan): Boolean = otherPlan match {
case LogicalRelation(otherRelation, _) => relation == otherRelation
case _ => false
}
// When comparing two LogicalRelations from within LogicalPlan.sameResult, we only need
// LogicalRelation.cleanArgs to return Seq(relation), since expectedOutputAttribute's
// expId can be different but the relation is still the same.
//当比较LogicalPlan.sameResult中的两个LogicalRelations时,
//我们只需要LogicalRelation.cleanArgs来返回Seq(关系),因为expectedOutputAttribute的expId可以不同但关系仍然相同
override lazy val cleanArgs: Seq[Any] = Seq(relation)
@transient override lazy val statistics: Statistics = Statistics(
sizeInBytes = BigInt(relation.sizeInBytes)
)
/** Used to lookup original attribute capitalization 用于查找原始属性大小写*/
val attributeMap: AttributeMap[AttributeReference] = AttributeMap(output.map(o => (o, o)))
def newInstance(): this.type = LogicalRelation(relation).asInstanceOf[this.type]
override def simpleString: String = s"Relation[${output.mkString(",")}] $relation"
}
|
tophua/spark1.52
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/LogicalRelation.scala
|
Scala
|
apache-2.0
| 4,145 |
object Test {
def f(i: => Int) = i + i
val res = List(42).map(f) // error
val g: (=> Int) => Int = f
val h: Int => Int = g // error
}
|
lampepfl/dotty
|
tests/neg/i5976.scala
|
Scala
|
apache-2.0
| 142 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
// SKIP-SCALATESTJS,NATIVE-START
import org.scalatestplus.junit.JUnit3Suite
import org.scalatestplus.junit.JUnitSuite
import org.junit.Test
import org.testng.annotations.{Test => TestNG }
import org.scalatestplus.testng.TestNGSuite
import org.scalatest.refspec.RefSpec
// SKIP-SCALATESTJS,NATIVE-END
class TestNameProp extends AllSuiteProp {
type FixtureServices = TestNameFixtureServices
// SKIP-SCALATESTJS,NATIVE-START
def spec = new ExampleTestNameSpec
def junit3Suite = new ExampleTestNameJUnit3Suite
def junitSuite = new ExampleTestNameJUnitSuite
def testngSuite = new ExampleTestNameTestNGSuite
// SKIP-SCALATESTJS,NATIVE-END
def funSuite = new ExampleTestNameFunSuite
def fixtureFunSuite = new ExampleTestNameFixtureFunSuite
def funSpec = new ExampleTestNameFunSpec
def fixtureFunSpec = new ExampleTestNameFixtureFunSpec
def featureSpec = new ExampleTestNameFeatureSpec
def fixtureFeatureSpec = new ExampleTestNameFixtureFeatureSpec
def flatSpec = new ExampleTestNameFlatSpec
def fixtureFlatSpec = new ExampleTestNameFixtureFlatSpec
def freeSpec = new ExampleTestNameFreeSpec
def fixtureFreeSpec = new ExampleTestNameFixtureFreeSpec
def propSpec = new ExampleTestNamePropSpec
def fixturePropSpec = new ExampleTestNameFixturePropSpec
def wordSpec = new ExampleTestNameWordSpec
def fixtureWordSpec = new ExampleTestNameFixtureWordSpec
def pathFreeSpec = new ExampleTestNamePathFreeSpec
def pathFunSpec = new ExampleTestNamePathFunSpec
test("test name will be constructed by concatennating scopes, outer to inner, followed by the test text, separated by a space after each component is trimmed.") {
forAll(examples) { s => s.assertTestNames() }
}
}
trait TestNameFixtureServices { suite: Suite =>
val expectedTestNames: Set[String]
def assertTestNames(): Unit = {
val expectedSet = expectedTestNames
val testNameSet = testNames
assert(expectedSet.size === testNameSet.size)
expectedSet.foreach { tn =>
assert(testNameSet contains tn, "Unable to find test name: '" + tn + "', testNames is: \\n" + testNameSet.map("'" + _ + "'").mkString("\\n"))
}
}
}
// SKIP-SCALATESTJS,NATIVE-START
@DoNotDiscover
class ExampleTestNameSpec extends RefSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
object `Testing 1` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 2 ` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 3` {
object ` Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 4` {
object `Scala code ` {
def `should be fun`: Unit = {}
}
}
object `Testing 5` {
object `Scala code` {
def ` should be fun`: Unit = {}
}
}
object ` Testing 6` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 7` {
object `Scala code` {
def `should be fun `: Unit = {}
}
}
object `Testing 8 ` {
object ` Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 9 ` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
}
@DoNotDiscover
class ExampleTestNameJUnit3Suite extends JUnit3Suite with TestNameFixtureServices {
val expectedTestNames =
Set(
"testingShouldBeFun"
)
def testingShouldBeFun(): Unit = { }
}
@DoNotDiscover
class ExampleTestNameJUnitSuite extends JUnitSuite with TestNameFixtureServices {
val expectedTestNames =
Set(
"testingShouldBeFun"
)
@Test
def testingShouldBeFun(): Unit = {}
}
@DoNotDiscover
class ExampleTestNameTestNGSuite extends TestNGSuite with TestNameFixtureServices {
val expectedTestNames =
Set(
"testingShouldBeFun"
)
@TestNG
def testingShouldBeFun(): Unit = {}
}
// SKIP-SCALATESTJS,NATIVE-END
@DoNotDiscover
protected[scalatest] class ExampleTestNameFunSuite extends FunSuite with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 should be fun",
"Testing 2 should be fun",
"Testing 3 should be fun",
"Testing 4 should be fun",
"Testing 5 should be fun"
)
test("Testing 1 should be fun") {}
test(" Testing 2 should be fun") {}
test("Testing 3 should be fun ") {}
test(" Testing 4 should be fun") {}
test("Testing 5 should be fun ") {}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFunSuite extends fixture.FunSuite with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 should be fun",
"Testing 2 should be fun",
"Testing 3 should be fun",
"Testing 4 should be fun",
"Testing 5 should be fun"
)
test("Testing 1 should be fun") { s => }
test(" Testing 2 should be fun") { s => }
test("Testing 3 should be fun ") { s => }
test(" Testing 4 should be fun") { s => }
test("Testing 5 should be fun ") { s => }
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFunSpec extends FunSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
describe("Testing 1") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 2 ") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 3") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 4") {
describe("Scala code ") {
it("should be fun") {}
}
}
describe("Testing 5") {
describe("Scala code") {
it(" should be fun") {}
}
}
describe(" Testing 6") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 7") {
describe("Scala code") {
it("should be fun ") {}
}
}
describe("Testing 8 ") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 9 ") {
describe("Scala code") {
it("should be fun") {}
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFunSpec extends fixture.FunSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
describe("Testing 1") {
describe("Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 2 ") {
describe("Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 3") {
describe(" Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 4") {
describe("Scala code ") {
it("should be fun") { s => }
}
}
describe("Testing 5") {
describe("Scala code") {
it(" should be fun") { s => }
}
}
describe(" Testing 6") {
describe("Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 7") {
describe("Scala code") {
it("should be fun ") { s => }
}
}
describe("Testing 8 ") {
describe(" Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 9 ") {
describe("Scala code") {
it("should be fun") { s => }
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFeatureSpec extends FeatureSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Feature: Testing 1 Scenario: Scala code should be fun",
"Feature: Testing 2 Scenario: Scala code should be fun",
"Feature: Testing 3 Scenario: Scala code should be fun",
"Feature: Testing 4 Scenario: Scala code should be fun",
"Feature: Testing 5 Scenario: Scala code should be fun",
"Feature: Testing 6 Scenario: Scala code should be fun",
"Feature: Testing 7 Scenario: Scala code should be fun"
)
Feature("Testing 1") {
Scenario("Scala code should be fun") {}
}
Feature("Testing 2 ") {
Scenario("Scala code should be fun") {}
}
Feature("Testing 3") {
Scenario(" Scala code should be fun") {}
}
Feature("Testing 4") {
Scenario("Scala code should be fun ") {}
}
Feature(" Testing 5") {
Scenario("Scala code should be fun") {}
}
Feature("Testing 6 ") {
Scenario(" Scala code should be fun") {}
}
Feature("Testing 7 ") {
Scenario("Scala code should be fun") {}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFeatureSpec extends fixture.FeatureSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Feature: Testing 1 Scenario: Scala code should be fun",
"Feature: Testing 2 Scenario: Scala code should be fun",
"Feature: Testing 3 Scenario: Scala code should be fun",
"Feature: Testing 4 Scenario: Scala code should be fun",
"Feature: Testing 5 Scenario: Scala code should be fun",
"Feature: Testing 6 Scenario: Scala code should be fun",
"Feature: Testing 7 Scenario: Scala code should be fun"
)
Feature("Testing 1") {
Scenario("Scala code should be fun") { s => }
}
Feature("Testing 2 ") {
Scenario("Scala code should be fun") { s => }
}
Feature("Testing 3") {
Scenario(" Scala code should be fun") { s => }
}
Feature("Testing 4") {
Scenario("Scala code should be fun ") { s => }
}
Feature(" Testing 5") {
Scenario("Scala code should be fun") { s => }
}
Feature("Testing 6 ") {
Scenario(" Scala code should be fun") { s => }
}
Feature("Testing 7 ") {
Scenario("Scala code should be fun") { s => }
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFlatSpec extends FlatSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 should be fun to code in Scala",
"Testing 2 should be fun to code in Scala",
"Testing 3 should be fun to code in Scala",
"Testing 4 should be fun to code in Scala",
"Testing 5 should be fun to code in Scala",
"Testing 6 should be fun to code in Scala",
"Testing 7 should be fun to code in Scala"
)
"Testing 1" should "be fun to code in Scala" in {
}
"Testing 2 " should "be fun to code in Scala" in {
}
"Testing 3" should " be fun to code in Scala" in {
}
"Testing 4" should "be fun to code in Scala " in {
}
" Testing 5" should "be fun to code in Scala" in {
}
"Testing 6 " should " be fun to code in Scala" in {
}
"Testing 7 " should "be fun to code in Scala" in {
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFlatSpec extends fixture.FlatSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 should be fun to code in Scala",
"Testing 2 should be fun to code in Scala",
"Testing 3 should be fun to code in Scala",
"Testing 4 should be fun to code in Scala",
"Testing 5 should be fun to code in Scala",
"Testing 6 should be fun to code in Scala",
"Testing 7 should be fun to code in Scala"
)
"Testing 1" should "be fun to code in Scala" in { s =>
}
"Testing 2 " should "be fun to code in Scala" in { s =>
}
"Testing 3" should " be fun to code in Scala" in { s =>
}
"Testing 4" should "be fun to code in Scala " in { s =>
}
" Testing 5" should "be fun to code in Scala" in { s =>
}
"Testing 6 " should " be fun to code in Scala" in { s =>
}
"Testing 7 " should "be fun to code in Scala" in { s =>
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFreeSpec extends FreeSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
"Testing 1" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 2 " - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 3" - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 4" - {
"Scala code " - {
"should be fun" in {}
}
}
"Testing 5" - {
"Scala code" - {
" should be fun" in {}
}
}
" Testing 6" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 7" - {
"Scala code" - {
"should be fun " in {}
}
}
"Testing 8 " - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 9 " - {
"Scala code" - {
"should be fun" in {}
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFreeSpec extends fixture.FreeSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
"Testing 1" - {
"Scala code" - {
"should be fun" in { s => }
}
}
"Testing 2 " - {
"Scala code" - {
"should be fun" in { s => }
}
}
"Testing 3" - {
" Scala code" - {
"should be fun" in { s => }
}
}
"Testing 4" - {
"Scala code " - {
"should be fun" in { s => }
}
}
"Testing 5" - {
"Scala code" - {
" should be fun" in { s => }
}
}
" Testing 6" - {
"Scala code" - {
"should be fun" in { s => }
}
}
"Testing 7" - {
"Scala code" - {
"should be fun " in { s => }
}
}
"Testing 8 " - {
" Scala code" - {
"should be fun" in { s => }
}
}
"Testing 9 " - {
"Scala code" - {
"should be fun" in { s => }
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNamePropSpec extends PropSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun"
)
property("Testing 1 Scala code should be fun") {}
property(" Testing 2 Scala code should be fun") {}
property("Testing 3 Scala code should be fun ") {}
property(" Testing 4 Scala code should be fun") {}
property("Testing 5 Scala code should be fun ") {}
property(" Testing 6 Scala code should be fun ") {}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixturePropSpec extends fixture.PropSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun"
)
property("Testing 1 Scala code should be fun") { s => }
property(" Testing 2 Scala code should be fun") { s => }
property("Testing 3 Scala code should be fun ") { s => }
property(" Testing 4 Scala code should be fun") { s => }
property("Testing 5 Scala code should be fun ") { s => }
property(" Testing 6 Scala code should be fun ") { s => }
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameWordSpec extends WordSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 should test Scala code should be fun",
"Testing 2 should test Scala code should be fun",
"Testing 3 should test Scala code should be fun",
"Testing 4 should test Scala code should be fun",
"Testing 5 should test Scala code should be fun",
"Testing 6 should test Scala code should be fun",
"Testing 7 should test Scala code should be fun",
"Testing 8 should test Scala code should be fun",
"Testing 9 should test Scala code should be fun"
)
"Testing 1" should {
"test Scala code" should {
"be fun" in {}
}
}
"Testing 2 " should {
"test Scala code" should {
"be fun" in {}
}
}
"Testing 3" should {
" test Scala code" should {
"be fun" in {}
}
}
"Testing 4" should {
"test Scala code " should {
"be fun" in {}
}
}
"Testing 5" should {
"test Scala code" should {
" be fun" in {}
}
}
" Testing 6" should {
"test Scala code" should {
"be fun" in {}
}
}
"Testing 7" should {
"test Scala code" should {
"be fun " in {}
}
}
"Testing 8 " should {
" test Scala code" should {
"be fun" in {}
}
}
"Testing 9 " should {
"test Scala code" should {
"be fun" in {}
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureWordSpec extends fixture.WordSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 should test Scala code should be fun",
"Testing 2 should test Scala code should be fun",
"Testing 3 should test Scala code should be fun",
"Testing 4 should test Scala code should be fun",
"Testing 5 should test Scala code should be fun",
"Testing 6 should test Scala code should be fun",
"Testing 7 should test Scala code should be fun",
"Testing 8 should test Scala code should be fun",
"Testing 9 should test Scala code should be fun"
)
"Testing 1" should {
"test Scala code" should {
"be fun" in { s => }
}
}
"Testing 2 " should {
"test Scala code" should {
"be fun" in { s => }
}
}
"Testing 3" should {
" test Scala code" should {
"be fun" in { s => }
}
}
"Testing 4" should {
"test Scala code " should {
"be fun" in { s => }
}
}
"Testing 5" should {
"test Scala code" should {
" be fun" in { s => }
}
}
" Testing 6" should {
"test Scala code" should {
"be fun" in { s => }
}
}
"Testing 7" should {
"test Scala code" should {
"be fun " in { s => }
}
}
"Testing 8 " should {
" test Scala code" should {
"be fun" in { s => }
}
}
"Testing 9 " should {
"test Scala code" should {
"be fun" in { s => }
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNamePathFreeSpec extends path.FreeSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
"Testing 1" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 2 " - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 3" - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 4" - {
"Scala code " - {
"should be fun" in {}
}
}
"Testing 5" - {
"Scala code" - {
" should be fun" in {}
}
}
" Testing 6" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 7" - {
"Scala code" - {
"should be fun " in {}
}
}
"Testing 8 " - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 9 " - {
"Scala code" - {
"should be fun" in {}
}
}
override def newInstance: path.FreeSpecLike = new ExampleTestNamePathFreeSpec
}
@DoNotDiscover
protected[scalatest] class ExampleTestNamePathFunSpec extends path.FunSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
describe("Testing 1") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 2 ") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 3") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 4") {
describe("Scala code ") {
it("should be fun") {}
}
}
describe("Testing 5") {
describe("Scala code") {
it(" should be fun") {}
}
}
describe(" Testing 6") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 7") {
describe("Scala code") {
it("should be fun ") {}
}
}
describe("Testing 8 ") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 9 ") {
describe("Scala code") {
it("should be fun") {}
}
}
override def newInstance: path.FunSpecLike = new ExampleTestNamePathFunSpec
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/TestNameProp.scala
|
Scala
|
apache-2.0
| 23,341 |
package metaconfig.generic
import scala.annotation.StaticAnnotation
import metaconfig.annotation._
import metaconfig.internal.CliParser
final class Setting(val field: Field) {
def name: String = field.name
def withName(name: String): Setting = new Setting(field.withName(name))
def tpe: String = field.tpe
def annotations: List[StaticAnnotation] = field.annotations
def underlying: Option[Settings[Nothing]] =
if (field.underlying.isEmpty) None
else {
Some(
new Settings(field.underlying.flatten.map(new Setting(_)))
)
}
def flat: List[Setting] =
field.flat.map(new Setting(_))
override def toString: String = s"Setting($field)"
def description: Option[String] = field.annotations.collectFirst {
case Description(value) => value
}
def extraNames: List[String] = field.annotations.collect {
case ExtraName(value) => value
}
def deprecatedNames: List[DeprecatedName] = field.annotations.collect {
case d: DeprecatedName => d
}
def exampleValues: List[String] = field.annotations.collect {
case ExampleValue(value) => value
}
def sinceVersion: Option[String] = field.annotations.collectFirst {
case SinceVersion(value) => value
}
def deprecated: Option[Deprecated] = field.annotations.collectFirst {
case value: Deprecated => value
}
def isRepeated: Boolean =
field.annotations.exists(_.isInstanceOf[Repeated])
def isDynamic: Boolean =
annotations.exists(_.isInstanceOf[Dynamic])
def isHidden: Boolean =
annotations.exists(_.isInstanceOf[Hidden])
def isBoolean: Boolean =
annotations.exists(_.isInstanceOf[Flag])
def isTabCompleteAsPath: Boolean =
annotations.exists(_.isInstanceOf[TabCompleteAsPath])
def isCatchInvalidFlags: Boolean =
annotations.exists(_.isInstanceOf[CatchInvalidFlags])
def isPositionalArgument: Boolean =
annotations.exists {
case ExampleValue(CliParser.PositionalArgument) => true
case _ => false
}
def tabCompleteOneOf: Option[List[String]] =
annotations.collectFirst {
case oneof: TabCompleteAsOneOf => oneof.options.toList
}
@deprecated("Use isDynamic instead", "0.8.2")
def isMap: Boolean = field.tpe.startsWith("Map")
@deprecated("Use isDynamic instead", "0.8.2")
def isConf: Boolean = field.tpe.contains("Conf")
def alternativeNames: List[String] =
extraNames ::: deprecatedNames.map(_.name)
def allNames: List[String] = name :: alternativeNames
def matchesLowercase(name: String): Boolean =
allNames.exists(_.equalsIgnoreCase(name))
def deprecation(name: String): Option[DeprecatedName] =
deprecatedNames.find(_.name == name)
}
object Setting {
def apply[T](name: String, tpe: String): Setting =
new Setting(new Field(name, tpe, Nil, Nil))
}
|
olafurpg/metaconfig
|
metaconfig-core/shared/src/main/scala/metaconfig/generic/Setting.scala
|
Scala
|
apache-2.0
| 2,779 |
package com.rasterfoundry.common.utils
import com.rasterfoundry.common.cache._
import com.rasterfoundry.common.cache.kryo._
import com.rasterfoundry.common.{Config => CommonConfig}
import com.rasterfoundry.datamodel.TiffWithMetadata
import geotrellis.vector._
import geotrellis.raster._
import geotrellis.raster.histogram._
import geotrellis.raster.io.geotiff._
import geotrellis.raster.io.geotiff.reader.{GeoTiffReader, TiffTagsReader}
import geotrellis.proj4._
import geotrellis.vector.Projected
import cats.data._
import cats.implicits._
import scala.concurrent._
object CogUtils {
lazy val cacheConfig = CommonConfig.memcached
lazy val memcachedClient = KryoMemcachedClient.default
lazy val rfCache = new CacheClient(memcachedClient)
/** Read GeoTiff from URI while caching the header bytes in memcache */
def fromUri(uri: String)(
implicit ec: ExecutionContext): OptionT[Future, TiffWithMetadata] = {
val cacheKey = s"cog-header-${URIUtils.withNoParams(uri)}"
val cacheSize = 1 << 18
rfCache
.cachingOptionT(cacheKey, doCache = cacheConfig.tool.enabled) {
OptionT {
Future {
RangeReaderUtils.fromUri(uri).map(_.readRange(0, cacheSize))
}
}
}
.mapFilter { headerBytes =>
RangeReaderUtils.fromUri(uri).map { rr =>
val crr = CacheRangeReader(rr, headerBytes)
TiffWithMetadata(GeoTiffReader.readMultiband(crr, streaming = true),
TiffTagsReader.read(crr))
}
}
}
def getTiffExtent(uri: String): Option[Projected[MultiPolygon]] = {
for {
rr <- RangeReaderUtils.fromUri(uri)
tiff = GeoTiffReader.readMultiband(rr, streaming = true)
} yield {
val crs = tiff.crs
Projected(
MultiPolygon(tiff.extent.reproject(crs, WebMercator).toPolygon()),
3857)
}
}
def geoTiffDoubleHistogram(tiff: GeoTiff[MultibandTile],
buckets: Int = 80,
size: Int = 128): Array[Histogram[Double]] = {
def diagonal(tiff: GeoTiff[MultibandTile]): Int =
math.sqrt(tiff.cols * tiff.cols + tiff.rows * tiff.rows).toInt
val goldyLocksOverviews = tiff.overviews.filter { tiff =>
val d = diagonal(tiff)
(d >= size && d <= size * 4)
}
if (goldyLocksOverviews.nonEmpty) {
// case: overview that is close enough to the size, not more than 4x larger
// -- read the overview and get histogram
val theOne = goldyLocksOverviews.minBy(diagonal)
val hists = Array.fill(tiff.bandCount)(DoubleHistogram(buckets))
theOne.tile.foreachDouble { (band, v) =>
if (!v.isNaN) {
hists(band).countItem(v, 1)
}
}
hists.toArray
} else {
// case: such oveview can't be found
// -- take min overview and sample window from center
val theOne = tiff.overviews.minBy(diagonal)
val sampleBounds = {
val side = math.sqrt(size * size / 2)
val centerCol = theOne.cols / 2
val centerRow = theOne.rows / 2
GridBounds(
colMin = math.max(0, centerCol - (side / 2)).toInt,
rowMin = math.max(0, centerRow - (side / 2)).toInt,
colMax = math.min(theOne.cols - 1, centerCol + (side / 2)).toInt,
rowMax = math.min(theOne.rows - 1, centerRow + (side / 2)).toInt
)
}
val sample = theOne.crop(List(sampleBounds)).next._2
val hists = Array.fill(tiff.bandCount)(DoubleHistogram(buckets))
sample.foreachDouble { (band, v) =>
if (!v.isNaN) {
hists(band).countItem(v, 1)
}
}
hists.toArray
}
}
}
|
azavea/raster-foundry
|
app-backend/common/src/main/scala/utils/CogUtils.scala
|
Scala
|
apache-2.0
| 3,676 |
package scalaxb.stockquote.server
import javax.jws.{ WebService, WebMethod, WebParam, WebResult }
import javax.jws.soap.SOAPBinding
import javax.jws.soap.SOAPBinding.{Style, Use, ParameterStyle}
import collection.mutable
import scala.concurrent._, duration.Duration
@WebService(name = "DocumentWrappedService", serviceName = "DocumentWrappedService")
@SOAPBinding(style = Style.DOCUMENT, use = Use.LITERAL, parameterStyle = ParameterStyle.WRAPPED)
class DocumentWrappedService(sleepTime: Duration) {
private val buffer = mutable.Map[String, Double]()
def price(symbol: String): Double = {
Thread.sleep(sleepTime.toMillis)
buffer.getOrElse(symbol, 42.0)
}
def update(symbol: String, price: Double): Unit =
buffer(symbol) = price
def infos: Array[String] = Array("x")
}
|
eed3si9n/scalaxb
|
integration/src/test/scala/DocumentWrappedService.scala
|
Scala
|
mit
| 795 |
package com.socialthingy.plusf.z80
import com.socialthingy.plusf.ProcessorSpec
class BlockOperationSpec extends ProcessorSpec {
"ldi" should "operate correctly when bc decrements to a non-zero value" in new Machine {
// given
registerContainsValue("a", 0x80)
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0xbeef)
memory.set(0x1000, 0xba)
nextInstructionIs(0xed, 0xa0)
// when
processor.execute()
// then
memory.get(0x2000) shouldBe 0xba
registerValue("hl") shouldBe 0x1001
registerValue("de") shouldBe 0x2001
registerValue("bc") shouldBe 0xbeee
flag("h").value shouldBe false
flag("p").value shouldBe true
flag("n").value shouldBe false
flag("f3").value shouldBe true
flag("f5").value shouldBe true
}
"ldi" should "operate correctly when bc decrements to zero" in new Machine {
// given
registerContainsValue("a", 0x80)
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0x0001)
memory.set(0x1000, 0xba)
nextInstructionIs(0xed, 0xa0)
// when
processor.execute()
// then
memory.get(0x2000) shouldBe 0xba
registerValue("hl") shouldBe 0x1001
registerValue("de") shouldBe 0x2001
registerValue("bc") shouldBe 0x0000
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe false
flag("f3").value shouldBe true
flag("f5").value shouldBe true
}
"ldir" should "operate correctly when bc is greater than 1" in new Machine {
// given
registerContainsValue("a", 0x08)
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0x000a)
memory.set(0x1000, 0xff)
nextInstructionIs(0xed, 0xb0)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0000
memory.get(0x2000) shouldBe 0xff
registerValue("hl") shouldBe 0x1001
registerValue("de") shouldBe 0x2001
registerValue("bc") shouldBe 0x0009
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe false
}
"ldir" should "operate correctly when bc is 1" in new Machine {
// given
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0x0001)
memory.set(0x1000, 0xff)
nextInstructionIs(0xed, 0xb0)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0002
memory.get(0x2000) shouldBe 0xff
registerValue("hl") shouldBe 0x1001
registerValue("de") shouldBe 0x2001
registerValue("bc") shouldBe 0x0000
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe false
}
"ldir" should "operate correctly when bc is 0" in new Machine {
// given
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0x0000)
memory.set(0x1000, 0xff)
nextInstructionIs(0xed, 0xb0)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0000
memory.get(0x2000) shouldBe 0xff
registerValue("hl") shouldBe 0x1001
registerValue("de") shouldBe 0x2001
registerValue("bc") shouldBe 0xffff
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe false
}
"ldd" should "operate correctly when bc decrements to a non-zero value" in new Machine {
// given
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0xbeef)
memory.set(0x1000, 0xba)
nextInstructionIs(0xed, 0xa8)
// when
processor.execute()
// then
memory.get(0x2000) shouldBe 0xba
registerValue("hl") shouldBe 0x0fff
registerValue("de") shouldBe 0x1fff
registerValue("bc") shouldBe 0xbeee
flag("h").value shouldBe false
flag("p").value shouldBe true
flag("n").value shouldBe false
}
"ldd" should "operate correctly when bc decrements to zero" in new Machine {
// given
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0x0001)
memory.set(0x1000, 0xba)
nextInstructionIs(0xed, 0xa8)
// when
processor.execute()
// then
memory.get(0x2000) shouldBe 0xba
registerValue("hl") shouldBe 0x0fff
registerValue("de") shouldBe 0x1fff
registerValue("bc") shouldBe 0x0000
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe false
}
"lddr" should "operate correctly when bc is greater than 1" in new Machine {
// given
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0x000a)
memory.set(0x1000, 0xff)
nextInstructionIs(0xed, 0xb8)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0000
memory.get(0x2000) shouldBe 0xff
registerValue("hl") shouldBe 0x0fff
registerValue("de") shouldBe 0x1fff
registerValue("bc") shouldBe 0x0009
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe false
}
"lddr" should "operate correctly when bc is equal to 1" in new Machine {
// given
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0x0001)
memory.set(0x1000, 0xff)
nextInstructionIs(0xed, 0xb8)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0002
memory.get(0x2000) shouldBe 0xff
registerValue("hl") shouldBe 0x0fff
registerValue("de") shouldBe 0x1fff
registerValue("bc") shouldBe 0x0000
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe false
}
"lddr" should "operate correctly when bc is 0" in new Machine {
// given
registerContainsValue("hl", 0x1000)
registerContainsValue("de", 0x2000)
registerContainsValue("bc", 0x0000)
memory.set(0x1000, 0xff)
nextInstructionIs(0xed, 0xb8)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0000
memory.get(0x2000) shouldBe 0xff
registerValue("hl") shouldBe 0x0fff
registerValue("de") shouldBe 0x1fff
registerValue("bc") shouldBe 0xffff
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe false
}
"cpi" should "operate correctly with memory equal to value of a and bc greater than 1" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0090)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xa1)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x1001
registerValue("bc") shouldBe 0x008f
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe true
flag("n").value shouldBe true
flag("f3").value shouldBe false
flag("f5").value shouldBe false
}
"cpi" should "operate correctly with memory equal to a and bc equal to 1" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0001)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xa1)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x1001
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe true
}
"cpi" should "operate correctly with memory less than a and half borrow" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, binary("00001000"))
registerContainsValue("bc", 0x0001)
registerContainsValue("a", binary("10000000"))
nextInstructionIs(0xed, 0xa1)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x1001
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe false
flag("z").value shouldBe false
flag("h").value shouldBe true
flag("p").value shouldBe false
flag("n").value shouldBe true
flag("f3").value shouldBe false
flag("f5").value shouldBe true
}
"cpi" should "operate correctly with memory greater than a and half borrow" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, binary("00001000"))
registerContainsValue("bc", 0x0001)
registerContainsValue("a", binary("10000000"))
nextInstructionIs(0xed, 0xa1)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x1001
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe false
flag("z").value shouldBe false
flag("h").value shouldBe true
flag("p").value shouldBe false
flag("n").value shouldBe true
}
"cpi" should "operate correctly with memory greater than a and full borrow" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, binary("10000000"))
registerContainsValue("bc", 0x0001)
registerContainsValue("a", binary("00000001"))
nextInstructionIs(0xed, 0xa1)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x1001
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe true
flag("z").value shouldBe false
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe true
}
"cpir" should "operate correctly with memory equal to a and bc greater than one" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0090)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xb1)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0002
registerValue("hl") shouldBe 0x1001
registerValue("bc") shouldBe 0x008f
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe true
flag("n").value shouldBe true
}
"cpir" should "operate correctly with memory equal to a and bc equal to one" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0001)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xb1)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0002
registerValue("hl") shouldBe 0x1001
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe true
}
"cpir" should "operate correctly with memory equal to a and bc equal to zero" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0000)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xb1)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0002
registerValue("hl") shouldBe 0x1001
registerValue("bc") shouldBe 0xffff
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe true
flag("n").value shouldBe true
}
"cpd" should "operate correctly with memory equal to a and bc greater than one" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0090)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xa9)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x0fff
registerValue("bc") shouldBe 0x008f
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe true
flag("n").value shouldBe true
}
"cpd" should "operate correctly with memory equal to a and bc equal to one" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0001)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xa9)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x0fff
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe true
}
"cpd" should "operate correctly with memory less than a and half borrow" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, binary("00001000"))
registerContainsValue("bc", 0x0001)
registerContainsValue("a", binary("10000000"))
nextInstructionIs(0xed, 0xa9)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x0fff
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe false
flag("z").value shouldBe false
flag("h").value shouldBe true
flag("p").value shouldBe false
flag("n").value shouldBe true
}
"cpd" should "operate correctly with memory greater than a and half borrow" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, binary("00001000"))
registerContainsValue("bc", 0x0001)
registerContainsValue("a", binary("10000000"))
nextInstructionIs(0xed, 0xa9)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x0fff
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe false
flag("z").value shouldBe false
flag("h").value shouldBe true
flag("p").value shouldBe false
flag("n").value shouldBe true
}
"cpd" should "operate correctly with memory greater than a and full borrow" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, binary("10000000"))
registerContainsValue("bc", 0x0001)
registerContainsValue("a", binary("00000001"))
nextInstructionIs(0xed, 0xa9)
// when
processor.execute()
// then
registerValue("hl") shouldBe 0x0fff
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe true
flag("z").value shouldBe false
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe true
}
"cpdr" should "operate correctly with memory equal to a and bc greater than one" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0090)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xb9)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0002
registerValue("hl") shouldBe 0x0fff
registerValue("bc") shouldBe 0x008f
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe true
flag("n").value shouldBe true
}
"cpdr" should "operate correctly with memory equal to a and bc equal to one" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0001)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xb9)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0002
registerValue("hl") shouldBe 0x0fff
registerValue("bc") shouldBe 0x0000
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe false
flag("n").value shouldBe true
}
"cpdr" should "operate correctly with memory equal to a and bc equal to zero" in new Machine {
// given
registerContainsValue("hl", 0x1000)
memory.set(0x1000, 0xbe)
registerContainsValue("bc", 0x0000)
registerContainsValue("a", 0xbe)
nextInstructionIs(0xed, 0xb9)
// when
processor.execute()
// then
registerValue("pc") shouldBe 0x0002
registerValue("hl") shouldBe 0x0fff
registerValue("bc") shouldBe 0xffff
flag("s").value shouldBe false
flag("z").value shouldBe true
flag("h").value shouldBe false
flag("p").value shouldBe true
flag("n").value shouldBe true
}
}
|
alangibson27/plus-f
|
plus-f/src/test/scala/com/socialthingy/plusf/z80/BlockOperationSpec.scala
|
Scala
|
mit
| 16,710 |
package com.github.mdr.mash.completions
import com.github.mdr.mash.inference._
import com.github.mdr.mash.lexer.Token
import com.github.mdr.mash.os.{ EnvironmentInteractions, FileSystem }
import com.github.mdr.mash.parser.AbstractSyntax._
import com.github.mdr.mash.utils.Region
class ArgCompleter(fileSystem: FileSystem, envInteractions: EnvironmentInteractions) {
private val pathCompleter = new PathCompleter(fileSystem, envInteractions)
def completeArg(text: String, stringRegion: Region, parser: CompletionParser): Option[CompletionResult] = {
val expr = parser.parse(text)
for {
sourceInfo ← parser.parse(text).sourceInfoOpt
tokens = sourceInfo.node.tokens
literalToken ← tokens.find(_.region == stringRegion)
InvocationInfo(invocationExpr, argPos) ← InvocationFinder.findInvocationWithLiteralArg(expr, literalToken)
completionSpecs ← getCompletionSpecs(invocationExpr, argPos)
result ← completeFromSpecs(completionSpecs, literalToken)
} yield result
}
private def getCompletionSpecs(invocationExpr: InvocationExpr, argPos: Int): Option[Seq[CompletionSpec]] =
invocationExpr.function.typeOpt.collect {
case Type.BuiltinFunction(f) ⇒
f.getCompletionSpecs(argPos, TypedArguments.from(invocationExpr))
case Type.BoundBuiltinMethod(targetType, m) ⇒
m.getCompletionSpecs(argPos, Some(targetType), TypedArguments.from(invocationExpr))
}
private def completeFromSpecs(completionSpecs: Seq[CompletionSpec], literalToken: Token): Option[CompletionResult] =
completionSpecs.map(spec ⇒ completeFromSpec(spec, literalToken)).fold(None)(CompletionResult.merge)
private def completeFromSpec(spec: CompletionSpec, literalToken: Token): Option[CompletionResult] = {
val withoutQuotes = literalToken.text.filterNot(_ == '"')
import CompletionSpec._
spec match {
case Directory | File ⇒
def completePaths(substring: Boolean) =
pathCompleter.completePaths(withoutQuotes, literalToken.region, directoriesOnly = spec == Directory, substring = substring)
completePaths(substring = false) orElse completePaths(substring = true)
case Members(targetType) ⇒
val members = MemberCompleter.completeStringMember(targetType, withoutQuotes)
CompletionResult.of(members, literalToken.region)
case Items(items) ⇒
val matches = items.filter(_.startsWith(withoutQuotes))
val prefixResult = CompletionResult.of(matches.map(s ⇒ Completion(s, isQuoted = true)), literalToken.region)
prefixResult.orElse { // substring:
val matches = items.filter(_.contains(withoutQuotes))
def makeCompletion(item: String): Completion = {
val index = item.indexOf(withoutQuotes)
val location = CompletionLocation(index, index)
Completion(item, isQuoted = true, location = location)
}
CompletionResult.of(matches.map(makeCompletion), literalToken.region)
}
}
}
}
|
mdr/mash
|
src/main/scala/com/github/mdr/mash/completions/ArgCompleter.scala
|
Scala
|
mit
| 3,047 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.elastic6
import com.datamountaineer.kcql.Kcql
import com.datamountaineer.streamreactor.connect.elastic6.config.ElasticSettings
import com.datamountaineer.streamreactor.connect.elastic6.indexname.CreateIndex.getIndexName
import com.datamountaineer.streamreactor.connect.elastic6.config.ClientType
import com.sksamuel.elastic4s.bulk.BulkDefinition
import com.sksamuel.elastic4s.http.HttpClient
import com.sksamuel.elastic4s.mappings.MappingDefinition
import com.sksamuel.elastic4s.xpack.security.XPackElasticClient
import com.sksamuel.elastic4s.{ElasticsearchClientUri, TcpClient}
import org.elasticsearch.common.settings.Settings
import org.elasticsearch.client.RestClientBuilder.{RequestConfigCallback, HttpClientConfigCallback}
import org.apache.http.client.config.RequestConfig.Builder
import org.apache.http.impl.client.BasicCredentialsProvider
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
import org.apache.http.auth.{UsernamePasswordCredentials, AuthScope}
import scala.concurrent.Future
trait KElasticClient extends AutoCloseable {
def index(kcql: Kcql)
def execute(definition: BulkDefinition): Future[Any]
}
object KElasticClient {
def getClient(settings: ElasticSettings, essettings: Settings, uri: ElasticsearchClientUri): KElasticClient = {
if (settings.clientType.equals(ClientType.HTTP)) {
if (settings.httpBasicAuthUsername.nonEmpty && settings.httpBasicAuthPassword.nonEmpty) {
lazy val provider = {
val provider = new BasicCredentialsProvider
val credentials = new UsernamePasswordCredentials(settings.httpBasicAuthUsername, settings.httpBasicAuthPassword)
provider.setCredentials(AuthScope.ANY, credentials)
provider
}
new HttpKElasticClient(HttpClient(uri, new RequestConfigCallback {
override def customizeRequestConfig(requestConfigBuilder: Builder) = {
requestConfigBuilder
}
}, new HttpClientConfigCallback {
override def customizeHttpClient(httpClientBuilder: HttpAsyncClientBuilder) = {
httpClientBuilder.setDefaultCredentialsProvider(provider)
}
}))
} else {
new HttpKElasticClient(HttpClient(uri))
}
}
else if (settings.xPackSettings.nonEmpty) {
new TcpKElasticClient(XPackElasticClient(essettings, uri, settings.xPackPlugins: _*))
} else {
new TcpKElasticClient(TcpClient.transport(essettings, uri))
}
}
}
class TcpKElasticClient(client: TcpClient) extends KElasticClient {
import com.sksamuel.elastic4s.ElasticDsl._
override def index(kcql: Kcql): Unit = {
require(kcql.isAutoCreate, s"Auto-creating indexes hasn't been enabled for target:${kcql.getTarget}")
val indexName = getIndexName(kcql)
client.execute {
Option(kcql.getDocType) match {
case None => createIndex(indexName)
case Some(documentType) => createIndex(indexName).mappings(MappingDefinition(documentType))
}
}
}
override def execute(definition: BulkDefinition): Future[Any] = client.execute(definition)
override def close(): Unit = client.close()
}
class HttpKElasticClient(client: HttpClient) extends KElasticClient {
import com.sksamuel.elastic4s.http.ElasticDsl._
override def index(kcql: Kcql): Unit = {
require(kcql.isAutoCreate, s"Auto-creating indexes hasn't been enabled for target:${kcql.getTarget}")
val indexName = getIndexName(kcql)
client.execute {
Option(kcql.getDocType) match {
case None => createIndex(indexName)
case Some(documentType) => createIndex(indexName).mappings(MappingDefinition(documentType))
}
}
}
override def execute(definition: BulkDefinition): Future[Any] = client.execute(definition)
override def close(): Unit = client.close()
}
|
CodeSmell/stream-reactor
|
kafka-connect-elastic6/src/main/scala/com/datamountaineer/streamreactor/connect/elastic6/KElasticClient.scala
|
Scala
|
apache-2.0
| 4,460 |
package worker
import java.util.concurrent.Executor
import akka.actor.{Actor, ActorLogging, Status}
import akka.pattern.pipe
import com.cave.metrics.data._
import com.cave.metrics.data.evaluator.{CheckEvaluator, DataFetcher}
import init.Init
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try
object Checker {
type Result = Try[Boolean]
case class Done(alarm: Result)
case class Aborted(reason: String)
}
class Checker(check: Check) extends Actor with ActorLogging {
implicit val exec = context.dispatcher.asInstanceOf[Executor with ExecutionContext]
val evaluator = new CheckEvaluator(check)
def fetcher = new DataFetcher(Init.influxClientFactory)
this run check pipeTo self
def receive = {
case alarm: Checker.Result =>
context.parent ! Checker.Done(alarm)
stop()
case x: Status.Failure =>
context.parent ! Checker.Aborted(x.cause.getMessage)
stop()
}
def stop(): Unit = {
context stop self
}
private[worker] def run(check: Check)(implicit ec: ExecutionContext): Future[Try[Boolean]] = {
val result = evaluator.evaluate(fetcher)
result map { v =>
log.warning("Result of evaluation: " + v)
}
result
}
}
|
gilt/cave
|
worker/app/worker/Checker.scala
|
Scala
|
mit
| 1,222 |
/*
* Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
import sbt._
import Keys._
object SnowplowStreamEnrichBuild extends Build {
import Dependencies._
import BuildSettings._
// Configure prompt to show current project
override lazy val settings = super.settings :+ {
shellPrompt := { s => Project.extract(s).currentProject.id + " > " }
}
// Define our project, with basic project information and library dependencies
lazy val project = Project("snowplow-stream-enrich", file("."))
.settings(buildSettings: _*)
.settings(
libraryDependencies ++= Seq(
Libraries.logging,
Libraries.httpCore,
Libraries.httpClient,
Libraries.jacksonCore,
Libraries.argot,
Libraries.config,
Libraries.scalaUtil,
Libraries.snowplowRawEvent,
Libraries.snowplowCommonEnrich,
Libraries.scalazon,
Libraries.scalaz7,
Libraries.specs2,
Libraries.scalazSpecs2,
Libraries.commonsLang3,
Libraries.thrift,
Libraries.slf4j,
Libraries.log4jOverSlf4j,
Libraries.awsSdk,
Libraries.kinesisClient,
Libraries.igluClient,
Libraries.snowplowTracker,
Libraries.kafkaClients
// Add your additional libraries here (comma-separated)...
)
)
}
|
haensel-ams/snowplow
|
3-enrich/stream-enrich/project/SnowplowKinesisEnrichBuild.scala
|
Scala
|
apache-2.0
| 1,979 |
object Overflow3 {
def foo3(x: Int): Int = {
x - 1
}
}
|
epfl-lara/stainless
|
frontends/benchmarks/strictarithmetic/invalid/Overflow3.scala
|
Scala
|
apache-2.0
| 66 |
/*
* Copyright 2013 Akiyoshi Sugiki, University of Tsukuba
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kumoi.impl.rmi.hook
import java.lang.reflect.Method
import java.rmi.server.RemoteRef
import kumoi.core._
import kumoi.core.log._
import kumoi.core.rmi._
import kumoi.core.log._
import kumoi.core.rmi.hook._
import kumoi.core.or.hook.ReflectInfo
/**
*
* @author Akiyoshi Sugiki
*/
object ClientSideInvoke {
private val logging = Logging("RMI")
private val snames = Config("impl.rmi.hook.client.invokers",
"kumoi.impl.rmi.hook.aaa.ClientSecuredInvoke," +
"kumoi.impl.rmi.hook.cache.ClientCachedInvoke," +
"kumoi.impl.rmi.hook.fob.ClientFobInvoke," +
"kumoi.impl.rmi.hook.cons.ClientConsistentInvoke," +
"kumoi.impl.rmi.hook.real.ClientRealInvoke").split(",").map(_.trim).toList
private val stack = snames.map(Class.forName(_).newInstance.asInstanceOf[ClientInvoke])
logging.debug("Client Invokers = " + stack)
}
class ClientSideInvoke extends ClientInvoke {
import ClientSideInvoke._
def start(ref: RemoteRef) {
for (s <- stack) s.start(ref)
}
def invoke(ref: RemoteRef, proxy: Any, method: Method, args: Array[Object],
nouse: List[ClientInvoke], reflect: ReflectInfo) = {
val (next :: rest) = stack
val ifaces = proxy.getClass.getInterfaces
val reflect = ReflectInfo(ifaces, if (ifaces.length > 0) ifaces(0).getName else null,
method.getDeclaredAnnotations,
method.getName, method.getParameterTypes, method.getReturnType,
wrap(args))
next.invoke(ref, proxy, method, args, rest, reflect)
}
private def wrap(a: Array[Object]) = a match {
case null => Array[Object]()
case _ => a
}
}
|
axi-sugiki/kumoi
|
src/kumoi/impl/rmi/hook/ClientSideInvoke.scala
|
Scala
|
apache-2.0
| 2,209 |
package vep.app
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.server.Directives._
import vep.app.common.CommonIntegrationModule
import vep.app.production.ProductionIntegrationModule
import vep.app.seo.SeoIntegrationModule
import vep.app.user.UserIntegrationModule
import vep.framework.database.DatabaseConnection
import vep.{Configuration, Environment}
import vep.framework.mailer.{ConsoleMailer, DefaultMailer, Mailer}
import vep.framework.router.CorsHelper
import scala.concurrent.ExecutionContext
trait AppIntegrationModule
extends UserIntegrationModule
with CommonIntegrationModule
with ProductionIntegrationModule
with SeoIntegrationModule
with AppRouter
with DatabaseConnection {
def executionContext: ExecutionContext
lazy val configuration = new Configuration()
lazy val mailer: Mailer = configuration.environment match {
case Environment.prod => new DefaultMailer(configuration)
case _ => new ConsoleMailer
}
}
trait AppRouter {
self: AppIntegrationModule =>
lazy val route: Route = encodeResponse {
apiRoute ~ seoRoute ~ clientRouter.route
}
lazy val apiRoute: Route = {
val route = pathPrefix("api") {
commonRoute ~ userRoute ~ productionRoute
}
if (configuration.environment == Environment.dev) {
CorsHelper.withCors(route)
} else {
route
}
}
lazy val clientRouter = new ClientRouter
}
|
kneelnrise/vep
|
src/main/scala/vep/app/AppIntegrationModule.scala
|
Scala
|
mit
| 1,417 |
package akka.ainterface
import akka.ainterface.datatype.{ErlAtom, ErlPid, ErlReference, ErlTerm}
private[ainterface] sealed abstract class ControlMessage {
def target: Target
}
private[ainterface] object ControlMessage {
/**
* Cookie is currently unused.
* @see [[http://comments.gmane.org/gmane.comp.lang.erlang.general/74817]]
*/
val cookie: ErlAtom = ErlAtom("")
}
private[ainterface] sealed abstract class Target {
def nodeName: NodeName
}
private[ainterface] object Target {
final case class Pid(pid: ErlPid) extends Target {
override def nodeName: NodeName = NodeName(pid.nodeName)
}
final case class Name(name: ErlAtom, nodeName: NodeName) extends Target
}
/**
* {1, FromPid, ToPid}
*/
private[ainterface] final case class Link(from: ErlPid, to: ErlPid) extends ControlMessage {
override def target: Target = Target.Pid(to)
}
/**
* {2, Cookie, ToPid} and a message.
*/
private[ainterface] final case class Send(to: ErlPid, message: ErlTerm) extends ControlMessage {
def cookie: ErlAtom = ControlMessage.cookie
override def target: Target = Target.Pid(to)
}
/**
* {3, FromPid, ToPid, Reason}
* A process terminates, this message is sent to linked processes.
*/
private[ainterface] final case class Exit(from: ErlPid,
to: ErlPid,
reason: ErlTerm) extends ControlMessage {
override def target: Target = Target.Pid(to)
}
/**
* {4, FromPid, ToPid}
*/
private[ainterface] final case class Unlink(from: ErlPid, to: ErlPid) extends ControlMessage {
override def target: Target = Target.Pid(to)
}
/**
* {5}
* Currently not used.
*/
//private[ainterface] case object NodeLink extends ControlMessage
/**
* {6, FromPid, Cookie, ToName} and a message.
*/
private[ainterface] final case class RegSend(from: ErlPid,
toName: Target.Name,
message: ErlTerm) extends ControlMessage {
def cookie: ErlAtom = ControlMessage.cookie
override def target: Target = toName
}
/**
* {7, FromPid, ToPid}
*/
private[ainterface] final case class GroupLeader(from: ErlPid, to: ErlPid) extends ControlMessage {
override def target: Target = Target.Pid(to)
}
/**
* {8, FromPid, ToPid, Reason}
* erlang:exit/2 generates this message.
*/
private[ainterface] final case class Exit2(from: ErlPid,
to: ErlPid,
reason: ErlTerm) extends ControlMessage {
override def target: Target = Target.Pid(to)
}
/**
* {12, Cookie, ToPid, TraceToken} and a message.
*/
private[ainterface] final case class SendTT(to: ErlPid,
traceToken: ErlTerm,
message: ErlTerm) extends ControlMessage {
def cookie: ErlAtom = ControlMessage.cookie
override def target: Target = Target.Pid(to)
}
/**
* {13, FromPid, ToPid, TraceToken, Reason}
*/
private[ainterface] final case class ExitTT(from: ErlPid,
to: ErlPid,
traceToken: ErlTerm,
reason: ErlTerm) extends ControlMessage {
override def target: Target = Target.Pid(to)
}
/**
* {16, FromPid, Cookie, ToName, TraceToken} and a message.
*/
private[ainterface] case class RegSendTT(from: ErlPid,
toName: Target.Name,
traceToken: ErlTerm,
message: ErlTerm) extends ControlMessage {
def cookie: ErlAtom = ControlMessage.cookie
override def target: Target = toName
}
/**
* {18, FromPid, ToPid, TraceToken, Reason}
*/
private[ainterface] final case class Exit2TT(from: ErlPid,
to: ErlPid,
traceToken: ErlTerm,
reason: ErlTerm) extends ControlMessage {
override def target: Target = Target.Pid(to)
}
/**
* {19, FromPid, ToProc, Ref}
*/
private[ainterface] final case class MonitorP(from: ErlPid,
to: Target,
ref: ErlReference) extends ControlMessage {
override def target: Target = to
}
/**
* {20, FromPid, ToProc, Ref}
*/
private[ainterface] final case class DemonitorP(from: ErlPid,
to: Target,
ref: ErlReference) extends ControlMessage {
override def target: Target = to
}
/**
* {21, FromProc, ToPid, Ref, Reason}
*/
private[ainterface] final case class MonitorPExit(from: Target,
to: ErlPid,
ref: ErlReference,
reason: ErlTerm) extends ControlMessage {
override def target: Target = Target.Pid(to)
}
|
ainterface/ainterface
|
ainterface/src/main/scala/akka/ainterface/ControlMessage.scala
|
Scala
|
apache-2.0
| 4,982 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.api.Request
import kafka.cluster.{BrokerEndPoint, Partition, Replica}
import kafka.log.LogManager
import kafka.server.AbstractFetcherThread.ResultWithPartitions
import kafka.server.epoch.LeaderEpochCache
import kafka.utils.{DelayedItem, TestUtils}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.errors.{KafkaStorageException, ReplicaNotAvailableException}
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.requests.EpochEndOffset
import org.apache.kafka.common.requests.EpochEndOffset.{UNDEFINED_EPOCH_OFFSET, UNDEFINED_EPOCH}
import org.easymock.EasyMock._
import org.easymock.{Capture, CaptureType, EasyMock, IAnswer}
import org.junit.Assert._
import org.junit.Test
import scala.collection.JavaConverters._
import scala.collection.{Seq, Map}
class ReplicaAlterLogDirsThreadTest {
private val t1p0 = new TopicPartition("topic1", 0)
private val t1p1 = new TopicPartition("topic1", 1)
@Test
def issuesEpochRequestFromLocalReplica(): Unit = {
val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(1, "localhost:1234"))
//Setup all dependencies
val leaderEpochs = createNiceMock(classOf[LeaderEpochCache])
val replica = createNiceMock(classOf[Replica])
val futureReplica = createNiceMock(classOf[Replica])
val partition = createMock(classOf[Partition])
val replicaManager = createMock(classOf[ReplicaManager])
val leaderEpoch = 2
val leo = 13
//Stubs
expect(replica.epochs).andReturn(Some(leaderEpochs)).anyTimes()
expect(leaderEpochs.endOffsetFor(leaderEpoch)).andReturn((leaderEpoch, leo)).anyTimes()
stub(replica, replica, futureReplica, partition, replicaManager)
replay(leaderEpochs, replicaManager, replica)
val endPoint = new BrokerEndPoint(0, "localhost", 1000)
val thread = new ReplicaAlterLogDirsThread(
"alter-logs-dirs-thread-test1",
sourceBroker = endPoint,
brokerConfig = config,
replicaMgr = replicaManager,
quota = null,
brokerTopicStats = null)
val result = thread.fetchEpochsFromLeader(Map(t1p0 -> leaderEpoch, t1p1 -> leaderEpoch))
val expected = Map(
t1p0 -> new EpochEndOffset(Errors.NONE, leaderEpoch, leo),
t1p1 -> new EpochEndOffset(Errors.NONE, leaderEpoch, leo)
)
assertEquals("results from leader epoch request should have offset from local replica",
expected, result)
}
@Test
def fetchEpochsFromLeaderShouldHandleExceptionFromGetLocalReplica(): Unit = {
val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(1, "localhost:1234"))
//Setup all dependencies
val leaderEpochs = createNiceMock(classOf[LeaderEpochCache])
val replica = createNiceMock(classOf[Replica])
val partition = createMock(classOf[Partition])
val replicaManager = createMock(classOf[ReplicaManager])
val leaderEpoch = 2
val leo = 13
//Stubs
expect(replica.epochs).andReturn(Some(leaderEpochs)).anyTimes()
expect(leaderEpochs.endOffsetFor(leaderEpoch)).andReturn((leaderEpoch, leo)).anyTimes()
expect(replicaManager.getReplicaOrException(t1p0)).andReturn(replica).anyTimes()
expect(replicaManager.getPartition(t1p0)).andReturn(Some(partition)).anyTimes()
expect(replicaManager.getReplicaOrException(t1p1)).andThrow(new KafkaStorageException).once()
expect(replicaManager.getPartition(t1p1)).andReturn(Some(partition)).anyTimes()
replay(leaderEpochs, replicaManager, replica)
val endPoint = new BrokerEndPoint(0, "localhost", 1000)
val thread = new ReplicaAlterLogDirsThread(
"alter-logs-dirs-thread-test1",
sourceBroker = endPoint,
brokerConfig = config,
replicaMgr = replicaManager,
quota = null,
brokerTopicStats = null)
val result = thread.fetchEpochsFromLeader(Map(t1p0 -> leaderEpoch, t1p1 -> leaderEpoch))
val expected = Map(
t1p0 -> new EpochEndOffset(Errors.NONE, leaderEpoch, leo),
t1p1 -> new EpochEndOffset(Errors.KAFKA_STORAGE_ERROR, UNDEFINED_EPOCH, UNDEFINED_EPOCH_OFFSET)
)
assertEquals(expected, result)
}
@Test
def shouldTruncateToReplicaOffset(): Unit = {
//Create a capture to track what partitions/offsets are truncated
val truncateToCapture: Capture[Long] = newCapture(CaptureType.ALL)
// Setup all the dependencies
val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(1, "localhost:1234"))
val quotaManager = createNiceMock(classOf[ReplicationQuotaManager])
val leaderEpochsT1p0 = createMock(classOf[LeaderEpochCache])
val leaderEpochsT1p1 = createMock(classOf[LeaderEpochCache])
val futureReplicaLeaderEpochs = createMock(classOf[LeaderEpochCache])
val logManager = createMock(classOf[LogManager])
val replicaT1p0 = createNiceMock(classOf[Replica])
val replicaT1p1 = createNiceMock(classOf[Replica])
// one future replica mock because our mocking methods return same values for both future replicas
val futureReplica = createNiceMock(classOf[Replica])
val partition = createMock(classOf[Partition])
val replicaManager = createMock(classOf[ReplicaManager])
val responseCallback: Capture[Seq[(TopicPartition, FetchPartitionData)] => Unit] = EasyMock.newCapture()
val leaderEpoch = 2
val futureReplicaLEO = 191
val replicaT1p0LEO = 190
val replicaT1p1LEO = 192
//Stubs
expect(partition.truncateTo(capture(truncateToCapture), anyBoolean())).anyTimes()
expect(replicaT1p0.epochs).andReturn(Some(leaderEpochsT1p0)).anyTimes()
expect(replicaT1p1.epochs).andReturn(Some(leaderEpochsT1p1)).anyTimes()
expect(futureReplica.epochs).andReturn(Some(futureReplicaLeaderEpochs)).anyTimes()
expect(futureReplica.logEndOffset).andReturn(new LogOffsetMetadata(futureReplicaLEO)).anyTimes()
expect(futureReplicaLeaderEpochs.latestEpoch).andReturn(leaderEpoch).anyTimes()
expect(leaderEpochsT1p0.endOffsetFor(leaderEpoch)).andReturn((leaderEpoch, replicaT1p0LEO)).anyTimes()
expect(leaderEpochsT1p1.endOffsetFor(leaderEpoch)).andReturn((leaderEpoch, replicaT1p1LEO)).anyTimes()
expect(futureReplicaLeaderEpochs.endOffsetFor(leaderEpoch)).andReturn((leaderEpoch, futureReplicaLEO)).anyTimes()
expect(replicaManager.logManager).andReturn(logManager).anyTimes()
stubWithFetchMessages(replicaT1p0, replicaT1p1, futureReplica, partition, replicaManager, responseCallback)
replay(leaderEpochsT1p0, leaderEpochsT1p1, futureReplicaLeaderEpochs, replicaManager,
logManager, quotaManager, replicaT1p0, replicaT1p1, futureReplica, partition)
//Create the thread
val endPoint = new BrokerEndPoint(0, "localhost", 1000)
val thread = new ReplicaAlterLogDirsThread(
"alter-logs-dirs-thread-test1",
sourceBroker = endPoint,
brokerConfig = config,
replicaMgr = replicaManager,
quota = quotaManager,
brokerTopicStats = null)
thread.addPartitions(Map(t1p0 -> 0, t1p1 -> 0))
//Run it
thread.doWork()
//We should have truncated to the offsets in the response
assertTrue(truncateToCapture.getValues.asScala.contains(replicaT1p0LEO))
assertTrue(truncateToCapture.getValues.asScala.contains(futureReplicaLEO))
}
@Test
def shouldTruncateToEndOffsetOfLargestCommonEpoch(): Unit = {
//Create a capture to track what partitions/offsets are truncated
val truncateToCapture: Capture[Long] = newCapture(CaptureType.ALL)
// Setup all the dependencies
val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(1, "localhost:1234"))
val quotaManager = createNiceMock(classOf[ReplicationQuotaManager])
val leaderEpochs = createMock(classOf[LeaderEpochCache])
val futureReplicaLeaderEpochs = createMock(classOf[LeaderEpochCache])
val logManager = createMock(classOf[LogManager])
val replica = createNiceMock(classOf[Replica])
// one future replica mock because our mocking methods return same values for both future replicas
val futureReplica = createNiceMock(classOf[Replica])
val partition = createMock(classOf[Partition])
val replicaManager = createMock(classOf[ReplicaManager])
val responseCallback: Capture[Seq[(TopicPartition, FetchPartitionData)] => Unit] = EasyMock.newCapture()
val leaderEpoch = 5
val futureReplicaLEO = 195
val replicaLEO = 200
val replicaEpochEndOffset = 190
val futureReplicaEpochEndOffset = 191
//Stubs
expect(partition.truncateTo(capture(truncateToCapture), anyBoolean())).anyTimes()
expect(replica.epochs).andReturn(Some(leaderEpochs)).anyTimes()
expect(futureReplica.epochs).andReturn(Some(futureReplicaLeaderEpochs)).anyTimes()
expect(futureReplica.logEndOffset).andReturn(new LogOffsetMetadata(futureReplicaLEO)).anyTimes()
expect(futureReplicaLeaderEpochs.latestEpoch).andReturn(leaderEpoch).once()
expect(futureReplicaLeaderEpochs.latestEpoch).andReturn(leaderEpoch - 2).once()
// leader replica truncated and fetched new offsets with new leader epoch
expect(leaderEpochs.endOffsetFor(leaderEpoch)).andReturn((leaderEpoch - 1, replicaLEO)).anyTimes()
// but future replica does not know about this leader epoch, so returns a smaller leader epoch
expect(futureReplicaLeaderEpochs.endOffsetFor(leaderEpoch - 1)).andReturn((leaderEpoch - 2, futureReplicaLEO)).anyTimes()
// finally, the leader replica knows about the leader epoch and returns end offset
expect(leaderEpochs.endOffsetFor(leaderEpoch - 2)).andReturn((leaderEpoch - 2, replicaEpochEndOffset)).anyTimes()
expect(futureReplicaLeaderEpochs.endOffsetFor(leaderEpoch - 2)).andReturn((leaderEpoch - 2, futureReplicaEpochEndOffset)).anyTimes()
expect(replicaManager.logManager).andReturn(logManager).anyTimes()
stubWithFetchMessages(replica, replica, futureReplica, partition, replicaManager, responseCallback)
replay(leaderEpochs, futureReplicaLeaderEpochs, replicaManager, logManager, quotaManager, replica, futureReplica, partition)
//Create the thread
val endPoint = new BrokerEndPoint(0, "localhost", 1000)
val thread = new ReplicaAlterLogDirsThread(
"alter-logs-dirs-thread-test1",
sourceBroker = endPoint,
brokerConfig = config,
replicaMgr = replicaManager,
quota = quotaManager,
brokerTopicStats = null)
thread.addPartitions(Map(t1p0 -> 0))
// First run will result in another offset for leader epoch request
thread.doWork()
// Second run should actually truncate
thread.doWork()
//We should have truncated to the offsets in the response
assertTrue("Expected offset " + replicaEpochEndOffset + " in captured truncation offsets " + truncateToCapture.getValues,
truncateToCapture.getValues.asScala.contains(replicaEpochEndOffset))
}
@Test
def shouldTruncateToInitialFetchOffsetIfReplicaReturnsUndefinedOffset(): Unit = {
//Create a capture to track what partitions/offsets are truncated
val truncated: Capture[Long] = newCapture(CaptureType.ALL)
// Setup all the dependencies
val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(1, "localhost:1234"))
val quotaManager = createNiceMock(classOf[ReplicationQuotaManager])
val logManager = createMock(classOf[LogManager])
val replica = createNiceMock(classOf[Replica])
val futureReplica = createNiceMock(classOf[Replica])
val leaderEpochs = createNiceMock(classOf[LeaderEpochCache])
val futureReplicaLeaderEpochs = createMock(classOf[LeaderEpochCache])
val partition = createMock(classOf[Partition])
val replicaManager = createMock(classOf[ReplicaManager])
val responseCallback: Capture[Seq[(TopicPartition, FetchPartitionData)] => Unit] = EasyMock.newCapture()
val initialFetchOffset = 100
val futureReplicaLEO = 111
//Stubs
expect(partition.truncateTo(capture(truncated), anyBoolean())).anyTimes()
expect(futureReplica.logEndOffset).andReturn(new LogOffsetMetadata(futureReplicaLEO)).anyTimes()
expect(replicaManager.logManager).andReturn(logManager).anyTimes()
expect(replica.epochs).andReturn(Some(leaderEpochs)).anyTimes()
expect(futureReplica.epochs).andReturn(Some(futureReplicaLeaderEpochs)).anyTimes()
// pretend this is a completely new future replica, with no leader epochs recorded
expect(futureReplicaLeaderEpochs.latestEpoch).andReturn(UNDEFINED_EPOCH).anyTimes()
// since UNDEFINED_EPOCH is -1 which will be lower than any valid leader epoch, the method
// will return UNDEFINED_EPOCH_OFFSET if requested epoch is lower than the first epoch cached
expect(leaderEpochs.endOffsetFor(UNDEFINED_EPOCH)).andReturn((UNDEFINED_EPOCH, UNDEFINED_EPOCH_OFFSET)).anyTimes()
stubWithFetchMessages(replica, replica, futureReplica, partition, replicaManager, responseCallback)
replay(replicaManager, logManager, quotaManager, leaderEpochs, futureReplicaLeaderEpochs,
replica, futureReplica, partition)
//Create the thread
val endPoint = new BrokerEndPoint(0, "localhost", 1000)
val thread = new ReplicaAlterLogDirsThread(
"alter-logs-dirs-thread-test1",
sourceBroker = endPoint,
brokerConfig = config,
replicaMgr = replicaManager,
quota = quotaManager,
brokerTopicStats = null)
thread.addPartitions(Map(t1p0 -> initialFetchOffset))
//Run it
thread.doWork()
//We should have truncated to initial fetch offset
assertEquals("Expected future replica to truncate to initial fetch offset if replica returns UNDEFINED_EPOCH_OFFSET",
initialFetchOffset, truncated.getValue)
}
@Test
def shouldPollIndefinitelyIfReplicaNotAvailable(): Unit = {
//Create a capture to track what partitions/offsets are truncated
val truncated: Capture[Long] = newCapture(CaptureType.ALL)
// Setup all the dependencies
val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(1, "localhost:1234"))
val quotaManager = createNiceMock(classOf[kafka.server.ReplicationQuotaManager])
val leaderEpochs = createNiceMock(classOf[LeaderEpochCache])
val futureReplicaLeaderEpochs = createMock(classOf[LeaderEpochCache])
val logManager = createMock(classOf[kafka.log.LogManager])
val replica = createNiceMock(classOf[Replica])
val futureReplica = createNiceMock(classOf[Replica])
val partition = createMock(classOf[Partition])
val replicaManager = createMock(classOf[kafka.server.ReplicaManager])
val responseCallback: Capture[Seq[(TopicPartition, FetchPartitionData)] => Unit] = EasyMock.newCapture()
val futureReplicaLeaderEpoch = 1
val futureReplicaLEO = 290
val replicaLEO = 300
//Stubs
expect(partition.truncateTo(capture(truncated), anyBoolean())).anyTimes()
expect(replica.epochs).andReturn(Some(leaderEpochs)).anyTimes()
expect(futureReplica.epochs).andReturn(Some(futureReplicaLeaderEpochs)).anyTimes()
expect(futureReplicaLeaderEpochs.latestEpoch).andReturn(futureReplicaLeaderEpoch).anyTimes()
expect(leaderEpochs.endOffsetFor(futureReplicaLeaderEpoch)).andReturn((futureReplicaLeaderEpoch, replicaLEO)).anyTimes()
expect(futureReplicaLeaderEpochs.endOffsetFor(futureReplicaLeaderEpoch)).andReturn((futureReplicaLeaderEpoch, futureReplicaLEO)).anyTimes()
expect(futureReplica.logEndOffset).andReturn(new LogOffsetMetadata(futureReplicaLEO)).anyTimes()
expect(replicaManager.getReplica(t1p0)).andReturn(Some(replica)).anyTimes()
expect(replicaManager.getReplica(t1p0, Request.FutureLocalReplicaId)).andReturn(Some(futureReplica)).anyTimes()
expect(replicaManager.getReplicaOrException(t1p0, Request.FutureLocalReplicaId)).andReturn(futureReplica).anyTimes()
// this will cause fetchEpochsFromLeader return an error with undefined offset
expect(replicaManager.getReplicaOrException(t1p0)).andThrow(new ReplicaNotAvailableException("")).times(3)
expect(replicaManager.getReplicaOrException(t1p0)).andReturn(replica).once()
expect(replicaManager.getPartition(t1p0)).andReturn(Some(partition)).anyTimes()
expect(replicaManager.logManager).andReturn(logManager).anyTimes()
expect(replicaManager.fetchMessages(
EasyMock.anyLong(),
EasyMock.anyInt(),
EasyMock.anyInt(),
EasyMock.anyInt(),
EasyMock.anyObject(),
EasyMock.anyObject(),
EasyMock.anyObject(),
EasyMock.capture(responseCallback),
EasyMock.anyObject()))
.andAnswer(new IAnswer[Unit] {
override def answer(): Unit = {
responseCallback.getValue.apply(Seq.empty[(TopicPartition, FetchPartitionData)])
}
}).anyTimes()
replay(leaderEpochs, futureReplicaLeaderEpochs, replicaManager, logManager, quotaManager,
replica, futureReplica, partition)
//Create the thread
val endPoint = new BrokerEndPoint(0, "localhost", 1000)
val thread = new ReplicaAlterLogDirsThread(
"alter-logs-dirs-thread-test1",
sourceBroker = endPoint,
brokerConfig = config,
replicaMgr = replicaManager,
quota = quotaManager,
brokerTopicStats = null)
thread.addPartitions(Map(t1p0 -> 0))
// Run thread 3 times (exactly number of times we mock exception for getReplicaOrException)
(0 to 2).foreach { _ =>
thread.doWork()
}
// Nothing happened since the replica was not available
assertEquals(0, truncated.getValues.size())
// Next time we loop, getReplicaOrException will return replica
thread.doWork()
// Now the final call should have actually done a truncation (to offset futureReplicaLEO)
assertEquals(futureReplicaLEO, truncated.getValue)
}
@Test
def shouldFetchLeaderEpochOnFirstFetchOnly(): Unit = {
//Setup all dependencies
val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(1, "localhost:1234"))
val quotaManager = createNiceMock(classOf[ReplicationQuotaManager])
val leaderEpochs = createNiceMock(classOf[LeaderEpochCache])
val futureReplicaLeaderEpochs = createMock(classOf[LeaderEpochCache])
val logManager = createMock(classOf[LogManager])
val replica = createNiceMock(classOf[Replica])
val futureReplica = createNiceMock(classOf[Replica])
val partition = createMock(classOf[Partition])
val replicaManager = createMock(classOf[ReplicaManager])
val responseCallback: Capture[Seq[(TopicPartition, FetchPartitionData)] => Unit] = EasyMock.newCapture()
val leaderEpoch = 5
val futureReplicaLEO = 190
val replicaLEO = 213
//Stubs
expect(partition.truncateTo(futureReplicaLEO, true)).once()
expect(replica.epochs).andReturn(Some(leaderEpochs)).anyTimes()
expect(futureReplica.epochs).andReturn(Some(futureReplicaLeaderEpochs)).anyTimes()
expect(futureReplica.logEndOffset).andReturn(new LogOffsetMetadata(futureReplicaLEO)).anyTimes()
expect(futureReplicaLeaderEpochs.latestEpoch).andReturn(leaderEpoch)
expect(leaderEpochs.endOffsetFor(leaderEpoch)).andReturn((leaderEpoch, replicaLEO))
expect(futureReplicaLeaderEpochs.endOffsetFor(leaderEpoch)).andReturn((leaderEpoch, futureReplicaLEO))
expect(replicaManager.logManager).andReturn(logManager).anyTimes()
stubWithFetchMessages(replica, replica, futureReplica, partition, replicaManager, responseCallback)
replay(leaderEpochs, futureReplicaLeaderEpochs, replicaManager, logManager, quotaManager,
replica, futureReplica, partition)
//Create the fetcher thread
val endPoint = new BrokerEndPoint(0, "localhost", 1000)
val thread = new ReplicaAlterLogDirsThread(
"alter-logs-dirs-thread-test1",
sourceBroker = endPoint,
brokerConfig = config,
replicaMgr = replicaManager,
quota = quotaManager,
brokerTopicStats = null)
thread.addPartitions(Map(t1p0 -> 0))
// loop few times
(0 to 3).foreach { _ =>
thread.doWork()
}
//Assert that truncate to is called exactly once (despite more loops)
verify(partition)
}
@Test
def shouldFetchOneReplicaAtATime(): Unit = {
//Setup all dependencies
val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(1, "localhost:1234"))
val quotaManager = createNiceMock(classOf[ReplicationQuotaManager])
val logManager = createMock(classOf[LogManager])
val replica = createNiceMock(classOf[Replica])
val futureReplica = createNiceMock(classOf[Replica])
val partition = createMock(classOf[Partition])
val replicaManager = createMock(classOf[ReplicaManager])
//Stubs
expect(futureReplica.logStartOffset).andReturn(123).anyTimes()
expect(replicaManager.logManager).andReturn(logManager).anyTimes()
stub(replica, replica, futureReplica, partition, replicaManager)
replay(replicaManager, logManager, quotaManager, replica, futureReplica, partition)
//Create the fetcher thread
val endPoint = new BrokerEndPoint(0, "localhost", 1000)
val thread = new ReplicaAlterLogDirsThread(
"alter-logs-dirs-thread-test1",
sourceBroker = endPoint,
brokerConfig = config,
replicaMgr = replicaManager,
quota = quotaManager,
brokerTopicStats = null)
thread.addPartitions(Map(t1p0 -> 0, t1p1 -> 0))
val ResultWithPartitions(fetchRequest, partitionsWithError) =
thread.buildFetchRequest(Seq((t1p0, new PartitionFetchState(150)), (t1p1, new PartitionFetchState(160))))
assertFalse(fetchRequest.isEmpty)
assertFalse(partitionsWithError.nonEmpty)
val request = fetchRequest.underlying.build()
assertEquals(0, request.minBytes)
val fetchInfos = request.fetchData.asScala.toSeq
assertEquals(1, fetchInfos.length)
assertEquals("Expected fetch request for largest partition", t1p1, fetchInfos.head._1)
assertEquals(160, fetchInfos.head._2.fetchOffset)
}
@Test
def shouldFetchNonDelayedAndNonTruncatingReplicas(): Unit = {
//Setup all dependencies
val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(1, "localhost:1234"))
val quotaManager = createNiceMock(classOf[ReplicationQuotaManager])
val logManager = createMock(classOf[LogManager])
val replica = createNiceMock(classOf[Replica])
val futureReplica = createNiceMock(classOf[Replica])
val partition = createMock(classOf[Partition])
val replicaManager = createMock(classOf[ReplicaManager])
//Stubs
expect(futureReplica.logStartOffset).andReturn(123).anyTimes()
expect(replicaManager.logManager).andReturn(logManager).anyTimes()
stub(replica, replica, futureReplica, partition, replicaManager)
replay(replicaManager, logManager, quotaManager, replica, futureReplica, partition)
//Create the fetcher thread
val endPoint = new BrokerEndPoint(0, "localhost", 1000)
val thread = new ReplicaAlterLogDirsThread(
"alter-logs-dirs-thread-test1",
sourceBroker = endPoint,
brokerConfig = config,
replicaMgr = replicaManager,
quota = quotaManager,
brokerTopicStats = null)
thread.addPartitions(Map(t1p0 -> 0, t1p1 -> 0))
// one partition is ready and one is truncating
val ResultWithPartitions(fetchRequest, partitionsWithError) =
thread.buildFetchRequest(Seq(
(t1p0, new PartitionFetchState(150)),
(t1p1, new PartitionFetchState(160, truncatingLog=true))))
assertFalse(fetchRequest.isEmpty)
assertFalse(partitionsWithError.nonEmpty)
val fetchInfos = fetchRequest.underlying.build().fetchData.asScala.toSeq
assertEquals(1, fetchInfos.length)
assertEquals("Expected fetch request for non-truncating partition", t1p0, fetchInfos.head._1)
assertEquals(150, fetchInfos.head._2.fetchOffset)
// one partition is ready and one is delayed
val ResultWithPartitions(fetchRequest2, partitionsWithError2) =
thread.buildFetchRequest(Seq(
(t1p0, new PartitionFetchState(140)),
(t1p1, new PartitionFetchState(160, delay=new DelayedItem(5000)))))
assertFalse(fetchRequest2.isEmpty)
assertFalse(partitionsWithError2.nonEmpty)
val fetchInfos2 = fetchRequest2.underlying.build().fetchData.asScala.toSeq
assertEquals(1, fetchInfos2.length)
assertEquals("Expected fetch request for non-delayed partition", t1p0, fetchInfos2.head._1)
assertEquals(140, fetchInfos2.head._2.fetchOffset)
// both partitions are delayed
val ResultWithPartitions(fetchRequest3, partitionsWithError3) =
thread.buildFetchRequest(Seq(
(t1p0, new PartitionFetchState(140, delay=new DelayedItem(5000))),
(t1p1, new PartitionFetchState(160, delay=new DelayedItem(5000)))))
assertTrue("Expected no fetch requests since all partitions are delayed", fetchRequest3.isEmpty)
assertFalse(partitionsWithError3.nonEmpty)
}
def stub(replicaT1p0: Replica, replicaT1p1: Replica, futureReplica: Replica, partition: Partition, replicaManager: ReplicaManager) = {
expect(replicaManager.getReplica(t1p0)).andReturn(Some(replicaT1p0)).anyTimes()
expect(replicaManager.getReplica(t1p0, Request.FutureLocalReplicaId)).andReturn(Some(futureReplica)).anyTimes()
expect(replicaManager.getReplicaOrException(t1p0)).andReturn(replicaT1p0).anyTimes()
expect(replicaManager.getReplicaOrException(t1p0, Request.FutureLocalReplicaId)).andReturn(futureReplica).anyTimes()
expect(replicaManager.getPartition(t1p0)).andReturn(Some(partition)).anyTimes()
expect(replicaManager.getReplica(t1p1)).andReturn(Some(replicaT1p1)).anyTimes()
expect(replicaManager.getReplica(t1p1, Request.FutureLocalReplicaId)).andReturn(Some(futureReplica)).anyTimes()
expect(replicaManager.getReplicaOrException(t1p1)).andReturn(replicaT1p1).anyTimes()
expect(replicaManager.getReplicaOrException(t1p1, Request.FutureLocalReplicaId)).andReturn(futureReplica).anyTimes()
expect(replicaManager.getPartition(t1p1)).andReturn(Some(partition)).anyTimes()
}
def stubWithFetchMessages(replicaT1p0: Replica, replicaT1p1: Replica, futureReplica: Replica,
partition: Partition, replicaManager: ReplicaManager,
responseCallback: Capture[Seq[(TopicPartition, FetchPartitionData)] => Unit]) = {
stub(replicaT1p0, replicaT1p1, futureReplica, partition, replicaManager)
expect(replicaManager.fetchMessages(
EasyMock.anyLong(),
EasyMock.anyInt(),
EasyMock.anyInt(),
EasyMock.anyInt(),
EasyMock.anyObject(),
EasyMock.anyObject(),
EasyMock.anyObject(),
EasyMock.capture(responseCallback),
EasyMock.anyObject()))
.andAnswer(new IAnswer[Unit] {
override def answer(): Unit = {
responseCallback.getValue.apply(Seq.empty[(TopicPartition, FetchPartitionData)])
}
}).anyTimes()
}
}
|
ollie314/kafka
|
core/src/test/scala/unit/kafka/server/ReplicaAlterLogDirsThreadTest.scala
|
Scala
|
apache-2.0
| 27,578 |
package test
import akka.actor.ActorSystem
import akka.pattern.AskTimeoutException
import github.gphat.datadog._
import java.nio.charset.StandardCharsets
import org.json4s._
import org.json4s.native.JsonMethods._
import org.specs2.mutable.Specification
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await,Future,Promise}
import scala.util.Try
import spray.http._
class MetricSpec extends Specification {
implicit val formats = DefaultFormats
// Sequential because it's less work to share the client instance
sequential
"Client" should {
val adapter = new OkHttpAdapter()
val client = new Client(
apiKey = "apiKey",
appKey = "appKey",
httpAdapter = adapter
)
"handle add metrics" in {
val res = Await.result(client.addMetrics(
series = Seq(
Metric(
name = "foo.bar.test",
points = Seq((1412183578, 12.0), (1412183579, 123.0)),
host = Some("poop.example.com"),
tags = Some(Seq("tag1", "tag2:foo")),
metricType = Some("gauge")
),
Metric(
name = "foo.bar.gorch",
points = Seq((1412183580, 12.0), (1412183581, 123.0)),
host = Some("poop2.example.com"),
tags = Some(Seq("tag3", "tag3:foo")),
metricType = Some("counter")
)
)
), Duration(5, "second"))
res.statusCode must beEqualTo(200)
adapter.getRequest must beSome.which(_.uri.toString == "https://app.datadoghq.com/api/v1/series?api_key=apiKey&application_key=appKey")
val body = parse(adapter.getRequest.get.entity.asString)
val names = for {
JObject(series) <- body
JField("metric", JString(name)) <- series
} yield name
names must have size(2)
names must contain(be_==("foo.bar.test")).exactly(1)
names must contain(be_==("foo.bar.gorch")).exactly(1)
val points = for {
JObject(series) <- body
JField("points", JArray(point)) <- series
} yield point
points must have size(2)
points must contain(be_==(Seq(JArray(List(JInt(1412183578), JDouble(12.0))), JArray(List(JInt(1412183579), JDouble(123.0)))))).exactly(1)
points must contain(be_==(Seq(JArray(List(JInt(1412183580), JDouble(12.0))), JArray(List(JInt(1412183581), JDouble(123.0)))))).exactly(1)
adapter.getRequest must beSome.which(_.method == HttpMethods.POST)
}
"handle query timeseries" in {
val res = Await.result(client.query(
query = "system.cpu.idle{*}by{host}",
from = 1470453155,
to = 1470539518
), Duration(5, "second"))
res.statusCode must beEqualTo(200)
val params = adapter.getRequest.get.uri.query.toMap
params must havePairs(
"api_key" -> "apiKey",
"application_key" -> "appKey",
"query" -> "system.cpu.idle{*}by{host}",
"from" -> "1470453155",
"to" -> "1470539518"
)
adapter.getRequest must beSome.which(_.method == HttpMethods.GET)
}
}
}
|
gphat/datadog-scala
|
src/test/scala/MetricSpec.scala
|
Scala
|
mit
| 3,115 |
/* __ __ *\\
* / /____ ___ ____ ___ ___ _/ / lasius *
* / __/ -_) _ `/ _ \\/ _ \\/ _ `/ / contributed by tegonal *
* \\__/\\__/\\_, /\\___/_//_/\\_,_/_/ http://tegonal.com/ *
* /___/ *
* *
* This program is free software: you can redistribute it and/or modify it *
* under the terms of the GNU General Public License as published by *
* the Free Software Foundation, either version 3 of the License, *
* or (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, but *
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for *
* more details. *
* *
* You should have received a copy of the GNU General Public License along *
* with this program. If not, see http://www.gnu.org/licenses/ *
* *
\\* */
package repositories
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import org.specs2.mutable._
import models._
import org.joda.time.DateTime
import org.joda.time.Duration
import scala.concurrent.Await
import scala.concurrent.duration._
import play.api.libs.json._
import org.joda.time.format.DateTimeFormat
import mongo.EmbedMongo
import mongo.EmbedMongo.WithMongo
import BaseFormat._
@RunWith(classOf[JUnitRunner])
class BookingStatisticsRepositorySpec extends EmbedMongo {
val repository = new BookingByProjectMongoRepository
val user = UserId("user")
val day = DateTime.now.withTimeAtStartOfDay
def findByUserDayProject(user: UserId, day: DateTime, projectId: ProjectId) = {
Await.result(repository.find(Json.obj("userId" -> user, "day" -> day, "projectId" -> projectId), 1, 0).map(x => x.headOption.map(_._1)), DurationInt(15).seconds)
}
"BookingStatistic add" should {
"insert new record for new unique constraint" in new WithMongo {
val projectId = ProjectId("p1")
//initialize
val newDuration = Duration.standardHours(1)
val newValue = BookingByProject(BookingByProjectId(), user, day, projectId, newDuration)
//test
val resultFuture = repository.add(newValue)
//check
val result = Await.result(resultFuture, DurationInt(15).seconds)
result === true
val result2 = findByUserDayProject(user, day, projectId)
result2 !== None
result2.get.duration === newDuration
}
"add to correct previous unique constraint" in new WithMongo {
val existingDuration = Duration.standardHours(2)
val projectId = ProjectId("p2")
//initialize various statistics
val f = for {
id <- repository.insert(BookingByProject(BookingByProjectId(), user, day, projectId, existingDuration))
id2 <- repository.insert(BookingByProject(BookingByProjectId(), user, day, ProjectId("p2"), Duration.standardHours(3)))
id3 <- repository.insert(BookingByProject(BookingByProjectId(), UserId("user2"), day, projectId, Duration.standardHours(4)))
id4 <- repository.insert(BookingByProject(BookingByProjectId(), user, day.plusDays(1), projectId, Duration.standardHours(5)))
} yield {
id
}
Await.result(f, DurationInt(15).seconds)
val newDuration = Duration.standardHours(1)
val newValue = BookingByProject(BookingByProjectId(), user, day, projectId, newDuration)
//test
val resultFuture = repository.add(newValue)
//check
val result = Await.result(resultFuture, DurationInt(15).seconds)
result === true
val result2 = findByUserDayProject(user, day, projectId)
result2 !== None
result2.get.duration === existingDuration.plus(newDuration)
}
}
"BookingStatistic subtract" should {
"Remove negatvie value if no previous entry was found" in new WithMongo {
//initialize
val newDuration = Duration.standardHours(1)
val projectId = ProjectId("p3")
val newValue = BookingByProject(BookingByProjectId(), user, day, projectId, newDuration)
//test
val resultFuture = repository.subtract(newValue)
//check
val result = Await.result(resultFuture, DurationInt(15).seconds)
result === true
val result2 = findByUserDayProject(user, day, projectId)
result2 !== None
result2.get.duration === Duration.standardHours(-1)
}
"Remove from correct previous constraint" in new WithMongo {
val existingDuration = Duration.standardHours(2)
val projectId = ProjectId("p4")
//initialize various statistics
val f = for {
id <- repository.insert(BookingByProject(BookingByProjectId(), user, day, projectId, existingDuration))
id2 <- repository.insert(BookingByProject(BookingByProjectId(), user, day, ProjectId("p2"), Duration.standardHours(3)))
id3 <- repository.insert(BookingByProject(BookingByProjectId(), UserId("user2"), day, projectId, Duration.standardHours(4)))
id4 <- repository.insert(BookingByProject(BookingByProjectId(), user, day.plusDays(1), projectId, Duration.standardHours(5)))
} yield {
id
}
Await.result(f, DurationInt(15).seconds)
val newDuration = Duration.standardHours(1)
val newValue = BookingByProject(BookingByProjectId(), user, day, projectId, newDuration)
//test
val resultFuture = repository.subtract(newValue)
//check
val result = Await.result(resultFuture, DurationInt(15).seconds)
result === true
val result2 = findByUserDayProject(user, day, projectId)
result2 !== None
result2.get.duration === existingDuration.minus(newDuration)
}
}
val dateTimeFormat = DateTimeFormat.forPattern("dd.MM.yyyy");
def date(date: String): DateTime = {
DateTime.parse(date, dateTimeFormat)
}
def testFindByUserIdAndRange[T](from: DateTime, to: DateTime, day: DateTime)(test: Traversable[BookingByProject] => T)(implicit evidence$1: org.specs2.execute.AsResult[T]) = {
val user = UserId("user1")
//initialize
val b = BookingByProject(BookingByProjectId(), user, day, ProjectId("p1"), Duration.standardHours(1))
val f = repository.insert(b)
Await.result(f, DurationInt(15).seconds)
val find = repository.findByUserIdAndRange(user, from, to)
val findSync = Await.result(find, DurationInt(15).seconds)
test(findSync)
}
"findByUserIdAndRange" should {
"find BookingHistory Within range" in new WithMongo {
//initialize
val from = date("01.01.2000")
val to = date("01.01.2001")
val day = date("01.02.2000")
testFindByUserIdAndRange(from, to, day) { result =>
result must have size (1)
result.head.day must equalTo(day)
}
}
}
"Not find Booking outside of range" in new WithMongo {
//initialize
val from = date("01.01.2000")
val to = date("01.01.2001")
val day = date("01.02.2002")
testFindByUserIdAndRange(from, to, day) { result =>
result must have size (0)
}
}
}
|
tegonal/lasius
|
test/repositories/BookingStatisticsRepositorySpec.scala
|
Scala
|
gpl-3.0
| 7,672 |
import language.experimental.macros
import scala.reflect.macros.blackbox.Context
object M {
def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.untypecheck(a.tree))
def m(a: Any) = macro impl
}
|
yusuke2255/dotty
|
tests/disabled/not-representable/pos/annotated-original/M_1.scala
|
Scala
|
bsd-3-clause
| 200 |
package chana.jpql
import akka.cluster.pubsub.DistributedPubSubMediator.{ Subscribe, SubscribeAck }
import chana.Entity
import chana.PutJPQL
import chana.avro.UpdateAction
import chana.avro.UpdateEvent
import chana.jpql
import org.apache.avro.generic.GenericData.Record
import scala.concurrent.duration._
import scala.concurrent.forkjoin.ThreadLocalRandom
import scala.util.Failure
import scala.util.Success
object JPQLBehavior {
val jpqlTopic = "chana_jpql_"
private case class ReportingTick(key: String)
private def reportingDelay(interval: Duration) = ThreadLocalRandom.current.nextLong(100, interval.toMillis).millis
}
/**
* TODO pass entity id to jpql evaluator
*/
trait JPQLBehavior extends Entity {
import JPQLBehavior._
private var scheduledJpqls = Set[String]()
private def isScheduled(jpqlKey: String) = scheduledJpqls.contains(jpqlKey)
mediator ! Subscribe(jpqlTopic + entityName, self)
import context.dispatcher
// TODO:
// 1. report once when new-created - Done, see onReady()
// 2 report when new jpql is put - Done, see behavior when got PutJPQL
// 3. report only on updated - Done, see onUpdated()
// 4. report once when deleted // in case of deleted, should guarantee via ACK etc?
def jpqlBehavior: Receive = {
case ReportingTick(jpqlKey) =>
DistributedJPQLBoard.keyToJPQL.get(jpqlKey) match {
case (meta: JPQLSelect, interval) if meta.entity == entityName =>
scheduleJpqlReport(jpqlKey, meta, interval, record)
case _ =>
}
case SubscribeAck(Subscribe(topic, None, `self`)) =>
log.debug("Subscribed " + topic)
case PutJPQL(_, key, jpqlQuery, interval) =>
jpql.parseJPQL(key, jpqlQuery) match {
case Success(meta: jpql.JPQLSelect) =>
if (meta.entity == entityName) {
scheduleJpqlReport(key, meta, interval, record)
}
case Success(meta: jpql.JPQLUpdate) =>
val commander = sender()
jpql.update(id, record, meta) match {
case Success(actions) =>
if (actions.nonEmpty) {
resetIdleTimeout()
commit(id, actions.flatten, commander)
}
case x @ Failure(ex) =>
log.error(ex, ex.getMessage)
commander ! x
}
case Success(meta: jpql.JPQLInsert) =>
val commander = sender()
jpql.insert(id, record, meta) match {
case Success(actions) =>
if (actions.nonEmpty) {
resetIdleTimeout()
commit(id, actions, commander)
}
case x @ Failure(ex) =>
log.error(ex, ex.getMessage)
commander ! x
}
case Success(meta: jpql.JPQLDelete) =>
val commander = sender()
jpql.delete(id, record, meta) match {
case Success(actions) =>
if (actions.collectFirst { case UpdateAction(null, _, binlog) => true }.isDefined) {
isDeleted(true) // TODO persist log
} else {
commit(id, actions, commander)
}
case failure @ Failure(ex) =>
log.error(ex, ex.getMessage)
commander ! failure
}
case failure @ Failure(ex) =>
val commander = sender()
log.error(ex, ex.getMessage)
commander ! failure
}
}
/**
* Schedule periodic reporting if any jpql select is not scheduled
*/
private def scheduleJpqlReportAll(force: Boolean) {
var newScheduledJpqls = Set[String]()
val jpqls = DistributedJPQLBoard.keyToJPQL.entrySet.iterator
while (jpqls.hasNext) {
val entry = jpqls.next
val key = entry.getKey
val (meta, interval) = entry.getValue
if (meta.entity == entityName) {
if (force || !isScheduled(key)) {
scheduleJpqlReport(key, meta.asInstanceOf[JPQLSelect], interval, record) // report right now
}
newScheduledJpqls += key
}
}
scheduledJpqls = newScheduledJpqls
}
private def scheduleJpqlReport(jpqlKey: String, meta: JPQLSelect, interval: FiniteDuration, record: Record) {
if (meta.entity == entityName) {
try {
val reducerProxy = DistributedJPQLBoard.keyToReducerProxy.get(jpqlKey)
if (reducerProxy ne null) {
val toSend = if (isDeleted) {
DeletedRecord(id)
} else {
new JPQLMapperSelect(id, meta).gatherProjection(record)
}
reducerProxy ! toSend
}
context.system.scheduler.scheduleOnce(interval, self, ReportingTick(jpqlKey))
} catch {
case ex: Throwable => log.error(ex, ex.getMessage)
}
}
}
override def onReady() {
scheduleJpqlReportAll(force = true)
}
override def onUpdated(event: UpdateEvent) {
scheduleJpqlReportAll(true)
}
}
|
wandoulabs/chana
|
src/main/scala/chana/jpql/JPQLBehavior.scala
|
Scala
|
apache-2.0
| 4,914 |
package dotty.tools
package backend.jvm
import dotc.ast.Trees.Select
import dotc.ast.tpd._
import dotc.core._
import Contexts.{Context, ctx}
import Names.TermName, StdNames._
import Types.{JavaArrayType, UnspecifiedErrorType, Type}
import Symbols.{Symbol, NoSymbol}
import scala.annotation.threadUnsafe
import scala.collection.immutable
/** Scala primitive operations are represented as methods in `Any` and
* `AnyVal` subclasses. Here we demultiplex them by providing a mapping
* from their symbols to integers. Different methods exist for
* different value types, but with the same meaning (like plus, minus,
* etc.). They will all be mapped to the same int.
*
* Note: The three equal methods have the following semantics:
* - `"=="` checks for `null`, and if non-null, calls
* `java.lang.Object.equals`
* `(class: Any; modifier: final)`. Primitive: `EQ`
* - `"eq"` usual reference comparison
* `(class: AnyRef; modifier: final)`. Primitive: `ID`
* - `"equals"` user-defined equality (Java semantics)
* `(class: Object; modifier: none)`. Primitive: `EQUALS`
*
* Inspired from the `scalac` compiler.
*/
class DottyPrimitives(ictx: Context) {
import dotty.tools.backend.ScalaPrimitivesOps._
@threadUnsafe private lazy val primitives: immutable.Map[Symbol, Int] = init
/** Return the code for the given symbol. */
def getPrimitive(sym: Symbol): Int = {
primitives(sym)
}
/**
* Return the primitive code of the given operation. If the
* operation is an array get/set, we inspect the type of the receiver
* to demux the operation.
*
* @param fun The method symbol
* @param tpe The type of the receiver object. It is used only for array
* operations
*/
def getPrimitive(app: Apply, tpe: Type)(using Context): Int = {
val fun = app.fun.symbol
val defn = ctx.definitions
val code = app.fun match {
case Select(_, nme.primitive.arrayLength) =>
LENGTH
case Select(_, nme.primitive.arrayUpdate) =>
UPDATE
case Select(_, nme.primitive.arrayApply) =>
APPLY
case _ => getPrimitive(fun)
}
def elementType: Type = tpe.widenDealias match {
case defn.ArrayOf(el) => el
case JavaArrayType(el) => el
case _ =>
ctx.error(s"expected Array $tpe")
UnspecifiedErrorType
}
code match {
case APPLY =>
defn.scalaClassName(elementType) match {
case tpnme.Boolean => ZARRAY_GET
case tpnme.Byte => BARRAY_GET
case tpnme.Short => SARRAY_GET
case tpnme.Char => CARRAY_GET
case tpnme.Int => IARRAY_GET
case tpnme.Long => LARRAY_GET
case tpnme.Float => FARRAY_GET
case tpnme.Double => DARRAY_GET
case _ => OARRAY_GET
}
case UPDATE =>
defn.scalaClassName(elementType) match {
case tpnme.Boolean => ZARRAY_SET
case tpnme.Byte => BARRAY_SET
case tpnme.Short => SARRAY_SET
case tpnme.Char => CARRAY_SET
case tpnme.Int => IARRAY_SET
case tpnme.Long => LARRAY_SET
case tpnme.Float => FARRAY_SET
case tpnme.Double => DARRAY_SET
case _ => OARRAY_SET
}
case LENGTH =>
defn.scalaClassName(elementType) match {
case tpnme.Boolean => ZARRAY_LENGTH
case tpnme.Byte => BARRAY_LENGTH
case tpnme.Short => SARRAY_LENGTH
case tpnme.Char => CARRAY_LENGTH
case tpnme.Int => IARRAY_LENGTH
case tpnme.Long => LARRAY_LENGTH
case tpnme.Float => FARRAY_LENGTH
case tpnme.Double => DARRAY_LENGTH
case _ => OARRAY_LENGTH
}
case _ =>
code
}
}
/** Initialize the primitive map */
private def init: immutable.Map[Symbol, Int] = {
given Context = ictx
import Symbols.defn
val primitives = Symbols.newMutableSymbolMap[Int]
/** Add a primitive operation to the map */
def addPrimitive(s: Symbol, code: Int): Unit = {
assert(!(primitives contains s), "Duplicate primitive " + s)
primitives(s) = code
}
def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = {
val alts = cls.info.member(method).alternatives.map(_.symbol)
if (alts.isEmpty)
ctx.error(s"Unknown primitive method $cls.$method")
else alts foreach (s =>
addPrimitive(s,
s.info.paramInfoss match {
case List(tp :: _) if code == ADD && tp =:= ctx.definitions.StringType => CONCAT
case _ => code
}
)
)
}
// scala.Any
addPrimitive(defn.Any_==, EQ)
addPrimitive(defn.Any_!=, NE)
addPrimitive(defn.Any_isInstanceOf, IS)
addPrimitive(defn.Any_asInstanceOf, AS)
addPrimitive(defn.Any_##, HASH)
// java.lang.Object
addPrimitive(defn.Object_eq, ID)
addPrimitive(defn.Object_ne, NI)
/* addPrimitive(defn.Any_==, EQ)
addPrimitive(defn.Any_!=, NE)*/
addPrimitive(defn.Object_synchronized, SYNCHRONIZED)
/*addPrimitive(defn.Any_isInstanceOf, IS)
addPrimitive(defn.Any_asInstanceOf, AS)*/
// java.lang.String
addPrimitive(defn.String_+, CONCAT)
// scala.Array
lazy val ArrayClass = defn.ArrayClass
addPrimitives(ArrayClass, nme.length, LENGTH)
addPrimitives(ArrayClass, nme.apply, APPLY)
addPrimitives(ArrayClass, nme.update, UPDATE)
// scala.Boolean
lazy val BooleanClass = defn.BooleanClass
addPrimitives(BooleanClass, nme.EQ, EQ)
addPrimitives(BooleanClass, nme.NE, NE)
addPrimitives(BooleanClass, nme.UNARY_!, ZNOT)
addPrimitives(BooleanClass, nme.ZOR, ZOR)
addPrimitives(BooleanClass, nme.ZAND, ZAND)
addPrimitives(BooleanClass, nme.OR, OR)
addPrimitives(BooleanClass, nme.AND, AND)
addPrimitives(BooleanClass, nme.XOR, XOR)
// scala.Byte
lazy val ByteClass = defn.ByteClass
addPrimitives(ByteClass, nme.EQ, EQ)
addPrimitives(ByteClass, nme.NE, NE)
addPrimitives(ByteClass, nme.ADD, ADD)
addPrimitives(ByteClass, nme.SUB, SUB)
addPrimitives(ByteClass, nme.MUL, MUL)
addPrimitives(ByteClass, nme.DIV, DIV)
addPrimitives(ByteClass, nme.MOD, MOD)
addPrimitives(ByteClass, nme.LT, LT)
addPrimitives(ByteClass, nme.LE, LE)
addPrimitives(ByteClass, nme.GT, GT)
addPrimitives(ByteClass, nme.GE, GE)
addPrimitives(ByteClass, nme.XOR, XOR)
addPrimitives(ByteClass, nme.OR, OR)
addPrimitives(ByteClass, nme.AND, AND)
addPrimitives(ByteClass, nme.LSL, LSL)
addPrimitives(ByteClass, nme.LSR, LSR)
addPrimitives(ByteClass, nme.ASR, ASR)
// conversions
addPrimitives(ByteClass, nme.toByte, B2B)
addPrimitives(ByteClass, nme.toShort, B2S)
addPrimitives(ByteClass, nme.toChar, B2C)
addPrimitives(ByteClass, nme.toInt, B2I)
addPrimitives(ByteClass, nme.toLong, B2L)
// unary methods
addPrimitives(ByteClass, nme.UNARY_+, POS)
addPrimitives(ByteClass, nme.UNARY_-, NEG)
addPrimitives(ByteClass, nme.UNARY_~, NOT)
addPrimitives(ByteClass, nme.toFloat, B2F)
addPrimitives(ByteClass, nme.toDouble, B2D)
// scala.Short
lazy val ShortClass = defn.ShortClass
addPrimitives(ShortClass, nme.EQ, EQ)
addPrimitives(ShortClass, nme.NE, NE)
addPrimitives(ShortClass, nme.ADD, ADD)
addPrimitives(ShortClass, nme.SUB, SUB)
addPrimitives(ShortClass, nme.MUL, MUL)
addPrimitives(ShortClass, nme.DIV, DIV)
addPrimitives(ShortClass, nme.MOD, MOD)
addPrimitives(ShortClass, nme.LT, LT)
addPrimitives(ShortClass, nme.LE, LE)
addPrimitives(ShortClass, nme.GT, GT)
addPrimitives(ShortClass, nme.GE, GE)
addPrimitives(ShortClass, nme.XOR, XOR)
addPrimitives(ShortClass, nme.OR, OR)
addPrimitives(ShortClass, nme.AND, AND)
addPrimitives(ShortClass, nme.LSL, LSL)
addPrimitives(ShortClass, nme.LSR, LSR)
addPrimitives(ShortClass, nme.ASR, ASR)
// conversions
addPrimitives(ShortClass, nme.toByte, S2B)
addPrimitives(ShortClass, nme.toShort, S2S)
addPrimitives(ShortClass, nme.toChar, S2C)
addPrimitives(ShortClass, nme.toInt, S2I)
addPrimitives(ShortClass, nme.toLong, S2L)
// unary methods
addPrimitives(ShortClass, nme.UNARY_+, POS)
addPrimitives(ShortClass, nme.UNARY_-, NEG)
addPrimitives(ShortClass, nme.UNARY_~, NOT)
addPrimitives(ShortClass, nme.toFloat, S2F)
addPrimitives(ShortClass, nme.toDouble, S2D)
// scala.Char
lazy val CharClass = defn.CharClass
addPrimitives(CharClass, nme.EQ, EQ)
addPrimitives(CharClass, nme.NE, NE)
addPrimitives(CharClass, nme.ADD, ADD)
addPrimitives(CharClass, nme.SUB, SUB)
addPrimitives(CharClass, nme.MUL, MUL)
addPrimitives(CharClass, nme.DIV, DIV)
addPrimitives(CharClass, nme.MOD, MOD)
addPrimitives(CharClass, nme.LT, LT)
addPrimitives(CharClass, nme.LE, LE)
addPrimitives(CharClass, nme.GT, GT)
addPrimitives(CharClass, nme.GE, GE)
addPrimitives(CharClass, nme.XOR, XOR)
addPrimitives(CharClass, nme.OR, OR)
addPrimitives(CharClass, nme.AND, AND)
addPrimitives(CharClass, nme.LSL, LSL)
addPrimitives(CharClass, nme.LSR, LSR)
addPrimitives(CharClass, nme.ASR, ASR)
// conversions
addPrimitives(CharClass, nme.toByte, C2B)
addPrimitives(CharClass, nme.toShort, C2S)
addPrimitives(CharClass, nme.toChar, C2C)
addPrimitives(CharClass, nme.toInt, C2I)
addPrimitives(CharClass, nme.toLong, C2L)
// unary methods
addPrimitives(CharClass, nme.UNARY_+, POS)
addPrimitives(CharClass, nme.UNARY_-, NEG)
addPrimitives(CharClass, nme.UNARY_~, NOT)
addPrimitives(CharClass, nme.toFloat, C2F)
addPrimitives(CharClass, nme.toDouble, C2D)
// scala.Int
lazy val IntClass = defn.IntClass
addPrimitives(IntClass, nme.EQ, EQ)
addPrimitives(IntClass, nme.NE, NE)
addPrimitives(IntClass, nme.ADD, ADD)
addPrimitives(IntClass, nme.SUB, SUB)
addPrimitives(IntClass, nme.MUL, MUL)
addPrimitives(IntClass, nme.DIV, DIV)
addPrimitives(IntClass, nme.MOD, MOD)
addPrimitives(IntClass, nme.LT, LT)
addPrimitives(IntClass, nme.LE, LE)
addPrimitives(IntClass, nme.GT, GT)
addPrimitives(IntClass, nme.GE, GE)
addPrimitives(IntClass, nme.XOR, XOR)
addPrimitives(IntClass, nme.OR, OR)
addPrimitives(IntClass, nme.AND, AND)
addPrimitives(IntClass, nme.LSL, LSL)
addPrimitives(IntClass, nme.LSR, LSR)
addPrimitives(IntClass, nme.ASR, ASR)
// conversions
addPrimitives(IntClass, nme.toByte, I2B)
addPrimitives(IntClass, nme.toShort, I2S)
addPrimitives(IntClass, nme.toChar, I2C)
addPrimitives(IntClass, nme.toInt, I2I)
addPrimitives(IntClass, nme.toLong, I2L)
// unary methods
addPrimitives(IntClass, nme.UNARY_+, POS)
addPrimitives(IntClass, nme.UNARY_-, NEG)
addPrimitives(IntClass, nme.UNARY_~, NOT)
addPrimitives(IntClass, nme.toFloat, I2F)
addPrimitives(IntClass, nme.toDouble, I2D)
// scala.Long
lazy val LongClass = defn.LongClass
addPrimitives(LongClass, nme.EQ, EQ)
addPrimitives(LongClass, nme.NE, NE)
addPrimitives(LongClass, nme.ADD, ADD)
addPrimitives(LongClass, nme.SUB, SUB)
addPrimitives(LongClass, nme.MUL, MUL)
addPrimitives(LongClass, nme.DIV, DIV)
addPrimitives(LongClass, nme.MOD, MOD)
addPrimitives(LongClass, nme.LT, LT)
addPrimitives(LongClass, nme.LE, LE)
addPrimitives(LongClass, nme.GT, GT)
addPrimitives(LongClass, nme.GE, GE)
addPrimitives(LongClass, nme.XOR, XOR)
addPrimitives(LongClass, nme.OR, OR)
addPrimitives(LongClass, nme.AND, AND)
addPrimitives(LongClass, nme.LSL, LSL)
addPrimitives(LongClass, nme.LSR, LSR)
addPrimitives(LongClass, nme.ASR, ASR)
// conversions
addPrimitives(LongClass, nme.toByte, L2B)
addPrimitives(LongClass, nme.toShort, L2S)
addPrimitives(LongClass, nme.toChar, L2C)
addPrimitives(LongClass, nme.toInt, L2I)
addPrimitives(LongClass, nme.toLong, L2L)
// unary methods
addPrimitives(LongClass, nme.UNARY_+, POS)
addPrimitives(LongClass, nme.UNARY_-, NEG)
addPrimitives(LongClass, nme.UNARY_~, NOT)
addPrimitives(LongClass, nme.toFloat, L2F)
addPrimitives(LongClass, nme.toDouble, L2D)
// scala.Float
lazy val FloatClass = defn.FloatClass
addPrimitives(FloatClass, nme.EQ, EQ)
addPrimitives(FloatClass, nme.NE, NE)
addPrimitives(FloatClass, nme.ADD, ADD)
addPrimitives(FloatClass, nme.SUB, SUB)
addPrimitives(FloatClass, nme.MUL, MUL)
addPrimitives(FloatClass, nme.DIV, DIV)
addPrimitives(FloatClass, nme.MOD, MOD)
addPrimitives(FloatClass, nme.LT, LT)
addPrimitives(FloatClass, nme.LE, LE)
addPrimitives(FloatClass, nme.GT, GT)
addPrimitives(FloatClass, nme.GE, GE)
// conversions
addPrimitives(FloatClass, nme.toByte, F2B)
addPrimitives(FloatClass, nme.toShort, F2S)
addPrimitives(FloatClass, nme.toChar, F2C)
addPrimitives(FloatClass, nme.toInt, F2I)
addPrimitives(FloatClass, nme.toLong, F2L)
addPrimitives(FloatClass, nme.toFloat, F2F)
addPrimitives(FloatClass, nme.toDouble, F2D)
// unary methods
addPrimitives(FloatClass, nme.UNARY_+, POS)
addPrimitives(FloatClass, nme.UNARY_-, NEG)
// scala.Double
lazy val DoubleClass = defn.DoubleClass
addPrimitives(DoubleClass, nme.EQ, EQ)
addPrimitives(DoubleClass, nme.NE, NE)
addPrimitives(DoubleClass, nme.ADD, ADD)
addPrimitives(DoubleClass, nme.SUB, SUB)
addPrimitives(DoubleClass, nme.MUL, MUL)
addPrimitives(DoubleClass, nme.DIV, DIV)
addPrimitives(DoubleClass, nme.MOD, MOD)
addPrimitives(DoubleClass, nme.LT, LT)
addPrimitives(DoubleClass, nme.LE, LE)
addPrimitives(DoubleClass, nme.GT, GT)
addPrimitives(DoubleClass, nme.GE, GE)
// conversions
addPrimitives(DoubleClass, nme.toByte, D2B)
addPrimitives(DoubleClass, nme.toShort, D2S)
addPrimitives(DoubleClass, nme.toChar, D2C)
addPrimitives(DoubleClass, nme.toInt, D2I)
addPrimitives(DoubleClass, nme.toLong, D2L)
addPrimitives(DoubleClass, nme.toFloat, D2F)
addPrimitives(DoubleClass, nme.toDouble, D2D)
// unary methods
addPrimitives(DoubleClass, nme.UNARY_+, POS)
addPrimitives(DoubleClass, nme.UNARY_-, NEG)
primitives.toMap
}
def isPrimitive(fun: Tree): Boolean =
given Context = ictx
primitives.contains(fun.symbol)
|| (fun.symbol == NoSymbol // the only trees that do not have a symbol assigned are array.{update,select,length,clone}}
&& {
fun match
case Select(_, StdNames.nme.clone_) => false // but array.clone is NOT a primitive op.
case _ => true
})
}
|
som-snytt/dotty
|
compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala
|
Scala
|
apache-2.0
| 15,045 |
package rescala.fullmv
object TurnPhase {
type Type = Int
val dummy: Type = 0
val Uninitialized: Type = 1
val Framing: Type = 2
val Executing: Type = 3
val Completed: Type = 4
def toString(phase: TurnPhase.Type) =
phase match {
case 1 => "Uninitialized"
case 2 => "Framing"
case 3 => "Executing"
case 4 => "Completed"
case _ => s"unkonwn($phase)"
}
}
|
guidosalva/REScala
|
Code/Main/jvm/src/main/scala/rescala/fullmv/TurnPhase.scala
|
Scala
|
apache-2.0
| 420 |
package com.aesthetikx.android.canopy
import android.view.{LayoutInflater, View, ViewGroup}
import java.util.List
trait CanopyItem {
// Expansion
def setExpanded(expanded: Boolean): Unit
def toggleExpanded(): Unit
def isExpanded(): Boolean
def parentToggled(parentExpanded: Boolean, parentVisible: Boolean): Unit
// Visibility
def setVisible(visible: Boolean): Unit
def isVisible(): Boolean
// Tree
def getDepth(): Integer
def getChildCount(): Integer
def getChildren(): List[CanopyItem]
// Views
def getExpandedView(inflater: LayoutInflater, parent: ViewGroup): View
def getCollapsedView(inflater: LayoutInflater, parent: ViewGroup): View
}
|
Aesthetikx/canopy
|
library/src/main/scala/com/aesthetikx/android/canopy/CanopyItem.scala
|
Scala
|
gpl-2.0
| 694 |
package se.culvertsoft.mnet.backend
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import se.culvertsoft.mgen.javapack.serialization.BinaryReader
import se.culvertsoft.mgen.javapack.serialization.BinaryWriter
import se.culvertsoft.mgen.javapack.serialization.JsonReader
import se.culvertsoft.mgen.javapack.serialization.JsonWriter
import se.culvertsoft.mnet.Message
class WebsockSerializer {
// Stuff for serialization
private val classRegistry = new ClassRegistry
private val writeBuffer = new ByteArrayOutputStream
private val jsonWriter = new JsonWriter(writeBuffer, classRegistry)
private val binaryWriter = new BinaryWriter(writeBuffer, classRegistry)
private val jsonReader = new JsonReader(classRegistry)
private val binaryReader = new BinaryReader(new ByteArrayInputStream(Array[Byte]()), classRegistry)
def deserializeJson(msg: String): Message = synchronized {
jsonReader.readObject(msg, classOf[Message])
}
def deserializeBinary(msg: Array[Byte]): Message = synchronized {
binaryReader.setInput(new ByteArrayInputStream(msg)).readObject(classOf[Message])
}
def serializeJson(msg: Message): String = synchronized {
jsonWriter.writeObjectToString(msg)
}
def serializeBinary(msg: Message): Array[Byte] = synchronized {
binaryWriter.writeObject(msg)
val out = writeBuffer.toByteArray()
writeBuffer.reset()
out
}
}
|
culvertsoft/mnet
|
mnet-backend/src/main/scala/se/culvertsoft/mnet/backend/WebsockSerializer.scala
|
Scala
|
gpl-2.0
| 1,413 |
package sbt
import complete.{ Completion, Completions, DefaultParsers, HistoryCommands, Parser, TokenCompletions }
import classpath.ClasspathUtilities.toLoader
import DefaultParsers._
import Types.{ const, idFun }
import Function.tupled
import Command.applyEffect
import HistoryCommands.{ Start => HistoryPrefix }
import BasicCommandStrings._
import CommandUtil._
import BasicKeys._
import java.io.File
import scala.util.control.NonFatal
object BasicCommands {
lazy val allBasicCommands = Seq(nop, ignore, help, completionsCommand, multi, ifLast, append, setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, reboot, call, early, exit, continuous, history, shell, read, alias) ++ compatCommands
def nop = Command.custom(s => success(() => s))
def ignore = Command.command(FailureWall)(idFun)
def early = Command.arb(earlyParser, earlyHelp) { (s, other) => other :: s }
private[this] def earlyParser = (s: State) => token(EarlyCommand).flatMap(_ => otherCommandParser(s))
private[this] def earlyHelp = Help(EarlyCommand, EarlyCommandBrief, EarlyCommandDetailed)
def help = Command.make(HelpCommand, helpBrief, helpDetailed)(helpParser)
def helpParser(s: State) =
{
val h = (Help.empty /: s.definedCommands) { (a, b) =>
a ++
(try b.help(s) catch { case NonFatal(ex) => Help.empty })
}
val helpCommands = h.detail.keySet
val spacedArg = singleArgument(helpCommands).?
applyEffect(spacedArg)(runHelp(s, h))
}
def runHelp(s: State, h: Help)(arg: Option[String]): State =
{
val message = try
Help.message(h, arg)
catch {
case NonFatal(ex) =>
ex.toString
}
System.out.println(message)
s
}
@deprecated("Use Help.moreMessage", "0.13.0")
def moreHelp(more: Seq[String]): String = Help.moreMessage(more)
def completionsCommand = Command.make(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(completionsParser)
def completionsParser(state: State) =
{
val notQuoted = (NotQuoted ~ any.*) map { case (nq, s) => (nq +: s).mkString }
val quotedOrUnquotedSingleArgument = Space ~> (StringVerbatim | StringEscapable | notQuoted)
applyEffect(token(quotedOrUnquotedSingleArgument ?? "" examples ("", " ")))(runCompletions(state))
}
def runCompletions(state: State)(input: String): State = {
Parser.completions(state.combinedParser, input, 9).get map {
c => if (c.isEmpty) input else input + c.append
} foreach { c =>
System.out.println("[completions] " + c.replaceAll("\\n", " "))
}
state
}
def multiParser(s: State): Parser[Seq[String]] =
{
val nonSemi = token(charClass(_ != ';').+, hide = const(true))
(token(';' ~> OptSpace) flatMap { _ => matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace) } map (_.trim)).+
}
def multiApplied(s: State) =
Command.applyEffect(multiParser(s))(_ ::: s)
def multi = Command.custom(multiApplied, Help(Multi, MultiBrief, MultiDetailed))
lazy val otherCommandParser = (s: State) => token(OptSpace ~> combinedLax(s, NotSpaceClass ~ any.*))
def combinedLax(s: State, any: Parser[_]): Parser[String] =
matched(s.combinedParser | token(any, hide = const(true)))
def ifLast = Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser) { (s, arg) =>
if (s.remainingCommands.isEmpty) arg :: s else s
}
def append = Command(AppendCommand, Help.more(AppendCommand, AppendLastDetailed))(otherCommandParser) { (s, arg) =>
s.copy(remainingCommands = s.remainingCommands :+ arg)
}
def setOnFailure = Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser) { (s, arg) =>
s.copy(onFailure = Some(arg))
}
private[sbt] def compatCommands = Seq(
Command.command(Compat.ClearOnFailure) { s =>
s.log.warn(Compat.ClearOnFailureDeprecated)
s.copy(onFailure = None)
},
Command.arb(s => token(Compat.OnFailure, hide = const(true)).flatMap(x => otherCommandParser(s))) { (s, arg) =>
s.log.warn(Compat.OnFailureDeprecated)
s.copy(onFailure = Some(arg))
},
Command.command(Compat.FailureWall) { s =>
s.log.warn(Compat.FailureWallDeprecated)
s
}
)
def clearOnFailure = Command.command(ClearOnFailure)(s => s.copy(onFailure = None))
def stashOnFailure = Command.command(StashOnFailure)(s => s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten))
def popOnFailure = Command.command(PopOnFailure) { s =>
val stack = s.get(OnFailureStack).getOrElse(Nil)
val updated = if (stack.isEmpty) s.remove(OnFailureStack) else s.put(OnFailureStack, stack.tail)
updated.copy(onFailure = stack.headOption.flatten)
}
def reboot = Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootParser) { (s, full) =>
s.reboot(full)
}
def rebootParser(s: State) = token(Space ~> "full" ^^^ true) ?? false
def call = Command(ApplyCommand, Help.more(ApplyCommand, ApplyDetailed))(_ => callParser) {
case (state, (cp, args)) =>
val parentLoader = getClass.getClassLoader
state.log.info("Applying State transformations " + args.mkString(", ") + (if (cp.isEmpty) "" else " from " + cp.mkString(File.pathSeparator)))
val loader = if (cp.isEmpty) parentLoader else toLoader(cp.map(f => new File(f)), parentLoader)
val loaded = args.map(arg => ModuleUtilities.getObject(arg, loader).asInstanceOf[State => State])
(state /: loaded)((s, obj) => obj(s))
}
def callParser: Parser[(Seq[String], Seq[String])] = token(Space) ~> ((classpathOptionParser ?? Nil) ~ rep1sep(className, token(Space)))
private[this] def className: Parser[String] =
{
val base = StringBasic & not('-' ~> any.*, "Class name cannot start with '-'.")
def single(s: String) = Completions.single(Completion.displayOnly(s))
val compl = TokenCompletions.fixed((seen, level) => if (seen.startsWith("-")) Completions.nil else single("<class name>"))
token(base, compl)
}
private[this] def classpathOptionParser: Parser[Seq[String]] =
token(("-cp" | "-classpath") ~> Space) ~> classpathStrings <~ token(Space)
private[this] def classpathStrings: Parser[Seq[String]] =
token(StringBasic.map(s => IO.pathSplit(s).toSeq), "<classpath>")
def exit = Command.command(TerminateAction, exitBrief, exitBrief)(_ exit true)
def continuous =
Command(ContinuousExecutePrefix, continuousBriefHelp, continuousDetail)(otherCommandParser) { (s, arg) =>
withAttribute(s, Watched.Configuration, "Continuous execution not configured.") { w =>
val repeat = ContinuousExecutePrefix + (if (arg.startsWith(" ")) arg else " " + arg)
Watched.executeContinuously(w, s, arg, repeat)
}
}
def history = Command.custom(historyParser, BasicCommandStrings.historyHelp)
def historyParser(s: State): Parser[() => State] =
Command.applyEffect(HistoryCommands.actionParser) { histFun =>
val logError = (msg: String) => s.log.error(msg)
val hp = s get historyPath getOrElse None
val lines = hp.toList.flatMap(p => IO.readLines(p)).toIndexedSeq
histFun(complete.History(lines, hp, logError)) match {
case Some(commands) =>
commands foreach println //printing is more appropriate than logging
(commands ::: s).continue
case None => s.fail
}
}
def shell = Command.command(Shell, Help.more(Shell, ShellDetailed)) { s =>
val history = (s get historyPath) getOrElse Some(new File(s.baseDir, ".history"))
val prompt = (s get shellPrompt) match { case Some(pf) => pf(s); case None => "> " }
val reader = new FullReader(history, s.combinedParser)
val line = reader.readLine(prompt)
line match {
case Some(line) =>
val newState = s.copy(onFailure = Some(Shell), remainingCommands = line +: Shell +: s.remainingCommands).setInteractive(true)
if (line.trim.isEmpty) newState else newState.clearGlobalLog
case None => s.setInteractive(false)
}
}
def read = Command.make(ReadCommand, Help.more(ReadCommand, ReadDetailed))(s => applyEffect(readParser(s))(doRead(s)))
def readParser(s: State) =
{
val files = (token(Space) ~> fileParser(s.baseDir)).+
val portAndSuccess = token(OptSpace) ~> Port
portAndSuccess || files
}
def doRead(s: State)(arg: Either[Int, Seq[File]]): State =
arg match {
case Left(portAndSuccess) =>
val port = math.abs(portAndSuccess)
val previousSuccess = portAndSuccess >= 0
readMessage(port, previousSuccess) match {
case Some(message) => (message :: (ReadCommand + " " + port) :: s).copy(onFailure = Some(ReadCommand + " " + (-port)))
case None =>
System.err.println("Connection closed.")
s.fail
}
case Right(from) =>
val notFound = notReadable(from)
if (notFound.isEmpty)
readLines(from) ::: s // this means that all commands from all files are loaded, parsed, and inserted before any are executed
else {
s.log.error("Command file(s) not readable: \\n\\t" + notFound.mkString("\\n\\t"))
s
}
}
private def readMessage(port: Int, previousSuccess: Boolean): Option[String] =
{
// split into two connections because this first connection ends the previous communication
xsbt.IPC.client(port) { _.send(previousSuccess.toString) }
// and this second connection starts the next communication
xsbt.IPC.client(port) { ipc =>
val message = ipc.receive
if (message eq null) None else Some(message)
}
}
def alias = Command.make(AliasCommand, Help.more(AliasCommand, AliasDetailed)) { s =>
val name = token(OpOrID.examples(aliasNames(s): _*))
val assign = token(OptSpace ~ '=' ~ OptSpace)
val sfree = removeAliases(s)
val to = matched(sfree.combinedParser, partial = true).failOnException | any.+.string
val base = (OptSpace ~> (name ~ (assign ~> to.?).?).?)
applyEffect(base)(t => runAlias(s, t))
}
def runAlias(s: State, args: Option[(String, Option[Option[String]])]): State =
args match {
case None =>
printAliases(s); s
case Some(x ~ None) if !x.isEmpty =>
printAlias(s, x.trim); s
case Some(name ~ Some(None)) => removeAlias(s, name.trim)
case Some(name ~ Some(Some(value))) => addAlias(s, name.trim, value.trim)
}
def addAlias(s: State, name: String, value: String): State =
if (Command validID name) {
val removed = removeAlias(s, name)
if (value.isEmpty) removed else addAlias0(removed, name, value)
} else {
System.err.println("Invalid alias name '" + name + "'.")
s.fail
}
private[this] def addAlias0(s: State, name: String, value: String): State =
s.copy(definedCommands = newAlias(name, value) +: s.definedCommands)
def removeAliases(s: State): State = removeTagged(s, CommandAliasKey)
def removeAlias(s: State, name: String): State = s.copy(definedCommands = s.definedCommands.filter(c => !isAliasNamed(name, c)))
def removeTagged(s: State, tag: AttributeKey[_]): State = s.copy(definedCommands = removeTagged(s.definedCommands, tag))
def removeTagged(as: Seq[Command], tag: AttributeKey[_]): Seq[Command] = as.filter(c => !(c.tags contains tag))
def isAliasNamed(name: String, c: Command): Boolean = isNamed(name, getAlias(c))
def isNamed(name: String, alias: Option[(String, String)]): Boolean = alias match { case None => false; case Some((n, _)) => name == n }
def getAlias(c: Command): Option[(String, String)] = c.tags get CommandAliasKey
def printAlias(s: State, name: String): Unit = printAliases(aliases(s, (n, v) => n == name))
def printAliases(s: State): Unit = printAliases(allAliases(s))
def printAliases(as: Seq[(String, String)]): Unit =
for ((name, value) <- as)
println("\\t" + name + " = " + value)
def aliasNames(s: State): Seq[String] = allAliases(s).map(_._1)
def allAliases(s: State): Seq[(String, String)] = aliases(s, (n, v) => true)
def aliases(s: State, pred: (String, String) => Boolean): Seq[(String, String)] =
s.definedCommands.flatMap(c => getAlias(c).filter(tupled(pred)))
def newAlias(name: String, value: String): Command =
Command.make(name, (name, "'" + value + "'"), "Alias of '" + value + "'")(aliasBody(name, value)).tag(CommandAliasKey, (name, value))
def aliasBody(name: String, value: String)(state: State): Parser[() => State] = {
val aliasRemoved = removeAlias(state, name)
// apply the alias value to the commands of `state` except for the alias to avoid recursion (#933)
val partiallyApplied = Parser(Command.combine(aliasRemoved.definedCommands)(aliasRemoved))(value)
val arg = matched(partiallyApplied & (success(()) | (SpaceClass ~ any.*)))
// by scheduling the expanded alias instead of directly executing, we get errors on the expanded string (#598)
arg.map(str => () => (value + str) :: state)
}
def delegateToAlias(name: String, orElse: Parser[() => State])(state: State): Parser[() => State] =
aliases(state, (nme, _) => nme == name).headOption match {
case None => orElse
case Some((n, v)) => aliasBody(n, v)(state)
}
val CommandAliasKey = AttributeKey[(String, String)]("is-command-alias", "Internal: marker for Commands created as aliases for another command.")
}
|
jasonchaffee/sbt
|
main/command/src/main/scala/sbt/BasicCommands.scala
|
Scala
|
bsd-3-clause
| 13,448 |
package unfiltered.request
import org.specs2.mutable._
object JsonpSpec extends Specification with unfiltered.specs2.jetty.Served {
import unfiltered.response._
import unfiltered.request._
import unfiltered.request.{Path => UFPath}
class TestPlan extends unfiltered.filter.Plan {
def intent = {
case GET(UFPath("/jsonp") & Jsonp(callback)) => ResponseString(callback.wrap("[42]"))
case GET(UFPath("/jsonp.json") & Jsonp(callback)) => ResponseString(callback.wrap("[42]"))
case GET(UFPath("/jsonp/optional") & Jsonp.Optional(callback)) => ResponseString(callback.wrap("[42]"))
case GET(UFPath("/jsonp/lift-json") & Jsonp(callback)) => callback respond {
import org.json4s._
JArray(JInt(42) :: Nil)
}
case GET(UFPath("/jsonp/lift-json/optional") & Jsonp.Optional(callback)) => callback respond {
import org.json4s.JsonDSL._
"answer" -> Seq(42)
}
case _ => ResponseString("bad req")
}
}
def setup = { _.plan(new TestPlan) }
"Jsonp should" should {
"match an text/javascript accepts request with callback, wrapping response body in callback" in {
val resp = http(req(host / "jsonp" <<? Map("callback" -> "onResp")) <:< Map("Accept" -> "text/javascript")).as_string
resp must_== "onResp([42])"
}
"match an */* accepts request with path extension and callback, wrapping response body in callback" in {
val resp = http(req(host / "jsonp.json" <<? Map("callback" -> "onResp")) <:< Map("Accept" -> "*/*")).as_string
resp must_== "onResp([42])"
}
"not match an text/javascript accepts request without a callback" in {
val resp = http(req(host / "jsonp") <:< Map("Accept" -> "text/javascript")).as_string
resp must_== "bad req"
}
"optionally match an text/javascript accepts request with callback, wrapping response body in callback" in {
val resp = http(req(host / "jsonp" / "optional" <<? Map("callback" -> "onResp")) <:< Map("Accept" -> "text/javascript")).as_string
resp must_== "onResp([42])"
}
"optionaly match an application/json accepts request without a callback, return unwrapped response body" in {
val resp = http(req(host / "jsonp" / "optional") <:< Map("Accept" -> "application/json")).as_string
resp must_== "[42]"
}
"produce a jsonp response, wrapping response body in callback" in {
val resp = http(req(host / "jsonp" / "lift-json" <<? Map("callback" -> "onResp"))
<:< Map("Accept" -> "text/javascript"))
resp.as_string must_== """onResp([42])"""
val headers = resp.headers
headers("content-type") must_==(List("text/javascript; charset=utf-8"))
}
"optionally produce a json response when callback is missing" in {
val resp = http(req(host / "jsonp" / "lift-json" / "optional")
<:< Map("Accept" -> "application/json"))
val headers = resp.headers
resp.as_string must_== """{"answer":[42]}"""
headers("content-type") must_==(List("application/json;charset=utf-8"))
}
}
}
|
hamnis/unfiltered
|
json4s/src/test/scala/JsonpSpec.scala
|
Scala
|
mit
| 3,073 |
package collins.models
import play.api.libs.json.Json
import play.api.libs.json.JsObject
import play.api.libs.json.JsNumber
import play.api.libs.json.JsString
import org.squeryl.PrimitiveTypeMode._
import org.squeryl.Schema
import org.squeryl.Table
import collins.solr.SolrKey
import collins.solr.SolrSingleValue
import collins.solr.SolrIntValue
import collins.solr.SolrDoubleValue
import collins.solr.SolrBooleanValue
import collins.solr.SolrStringValue
import collins.models.shared.ValidatedEntity
import collins.models.shared.AnormAdapter
case class AssetMeta(
name: String,
priority: Int,
label: String,
description: String,
id: Long = 0,
value_type: Int = AssetMeta.ValueType.String.id
) extends ValidatedEntity[Long]
{
override def validate() {
require(name != null && name.toUpperCase == name && name.size > 0, "Name must be all upper case, length > 0")
require(AssetMeta.isValidName(name), "Name must be all upper case, alpha numeric (and hyphens): %s".format(name))
require(description != null && description.length > 0, "Need a description")
require(AssetMeta.ValueType.valIds(value_type), "Invalid value_type, must be one of [%s]".format(AssetMeta.ValueType.valStrings.mkString(",")))
}
override def asJson: String = {
Json.stringify(JsObject(Seq(
"ID" -> JsNumber(id),
"NAME" -> JsString(name),
"PRIORITY" -> JsNumber(priority),
"LABEL" -> JsString(label),
"DESCRIPTION" -> JsString(description)
)))
}
def getId(): Long = id
def getValueType(): AssetMeta.ValueType = AssetMeta.ValueType(value_type)
def valueType = getValueType
def getSolrKey(): SolrKey = SolrKey(name, valueType, true, true, false)
def validateValue(value: String): Boolean = typeStringValue(value).isDefined
def typeStringValue(value: String): Option[SolrSingleValue] = getValueType() match {
case AssetMeta.ValueType.Integer => try {
Some(SolrIntValue(Integer.parseInt(value)))
} catch {
case _: Throwable => None
}
case AssetMeta.ValueType.Boolean => try {
Some(SolrBooleanValue((new Truthy(value)).isTruthy))
} catch {
case _: Throwable => None
}
case AssetMeta.ValueType.Double => try {
Some(SolrDoubleValue(java.lang.Double.parseDouble(value)))
} catch {
case _: Throwable => None
}
case _ => Some(SolrStringValue(value))
}
}
object AssetMeta extends Schema with AnormAdapter[AssetMeta] {
private[this] val NameR = """[A-Za-z0-9\\-_]+""".r.pattern.matcher(_)
override val tableDef = table[AssetMeta]("asset_meta")
on(tableDef)(a => declare(
a.id is(autoIncremented,primaryKey),
a.name is(unique),
a.priority is(indexed)
))
override def delete(a: AssetMeta): Int = inTransaction {
afterDeleteCallback(a) {
tableDef.deleteWhere(p => p.id === a.id)
}
}
def isValidName(name: String): Boolean = {
name != null && name.nonEmpty && NameR(name).matches
}
def findAll(): Seq[AssetMeta] = inTransaction {
from(tableDef)(s => select(s)).toList
}
def findById(id: Long) = inTransaction {
tableDef.lookup(id)
}
def findOrCreateFromName(name: String, valueType: ValueType = ValueType.String): AssetMeta = findByName(name).getOrElse {
create(AssetMeta(
name = name.toUpperCase,
priority = -1,
label = name.toLowerCase.capitalize,
description = name,
value_type = valueType.id
))
}
override def get(a: AssetMeta) = findById(a.id).get
def findByName(name: String): Option[AssetMeta] = inTransaction {
tableDef.where(a =>
a.name.toUpperCase === name.toUpperCase
).headOption
}
def getViewable(): Seq[AssetMeta] = inTransaction {
from(tableDef)(a =>
where(a.priority gt -1)
select(a)
orderBy(a.priority asc)
).toList
}
type ValueType = ValueType.Value
object ValueType extends Enumeration {
val String = Value(1,"STRING")
val Integer = Value(2,"INTEGER")
val Double = Value(3,"DOUBLE")
val Boolean = Value(4,"BOOLEAN")
def valStrings = values.map{_.toString}
def valIds = values.map{_.id}
val postFix = Map[ValueType,String](
String -> "_meta_s",
Integer -> "_meta_i",
Double -> "_meta_d",
Boolean -> "_meta_b"
)
}
// DO NOT ADD ANYTHING TO THIS
// DEPRECATED
type Enum = Enum.Value
object Enum extends Enumeration(1) {
val ServiceTag = Value(1, "SERVICE_TAG")
val ChassisTag = Value(2, "CHASSIS_TAG")
val RackPosition = Value(3, "RACK_POSITION")
val PowerPort = Value(4, "POWER_PORT")
//val SwitchPort = Value(5, "SWITCH_PORT") Deprecated by id LldpPortIdValue
val CpuCount = Value(6, "CPU_COUNT")
val CpuCores = Value(7, "CPU_CORES")
val CpuThreads = Value(8, "CPU_THREADS")
val CpuSpeedGhz = Value(9, "CPU_SPEED_GHZ")
val CpuDescription = Value(10, "CPU_DESCRIPTION")
val MemorySizeBytes = Value(11, "MEMORY_SIZE_BYTES")
val MemoryDescription = Value(12, "MEMORY_DESCRIPTION")
val MemorySizeTotal = Value(13, "MEMORY_SIZE_TOTAL")
val MemoryBanksTotal = Value(14, "MEMORY_BANKS_TOTAL")
val NicSpeed = Value(15, "NIC_SPEED") // in bits
val MacAddress = Value(16, "MAC_ADDRESS")
val NicDescription = Value(17, "NIC_DESCRIPTION")
val DiskSizeBytes = Value(18, "DISK_SIZE_BYTES")
val DiskType = Value(19, "DISK_TYPE")
val DiskDescription = Value(20, "DISK_DESCRIPTION")
val DiskStorageTotal = Value(21, "DISK_STORAGE_TOTAL")
val LldpInterfaceName = Value(22, "LLDP_INTERFACE_NAME")
val LldpChassisName = Value(23, "LLDP_CHASSIS_NAME")
val LldpChassisIdType = Value(24, "LLDP_CHASSIS_ID_TYPE")
val LldpChassisIdValue = Value(25, "LLDP_CHASSIS_ID_VALUE")
val LldpChassisDescription = Value(26, "LLDP_CHASSIS_DESCRIPTION")
val LldpPortIdType = Value(27, "LLDP_PORT_ID_TYPE")
val LldpPortIdValue = Value(28, "LLDP_PORT_ID_VALUE")
val LldpPortDescription = Value(29, "LLDP_PORT_DESCRIPTION")
val LldpVlanId = Value(30, "LLDP_VLAN_ID")
val LldpVlanName = Value(31, "LLDP_VLAN_NAME")
// DO NOT USE - Deprecated
val NicName = Value(32, "INTERFACE_NAME")
// DO NOT USE - Deprecated
val NicAddress = Value(33, "INTERFACE_ADDRESS")
}
// Post enum fields, enum is not safe to extend with new values
object DynamicEnum {
val BaseDescription = AssetMeta.findOrCreateFromName("BASE_DESCRIPTION")
val BaseProduct = AssetMeta.findOrCreateFromName("BASE_PRODUCT")
val BaseVendor = AssetMeta.findOrCreateFromName("BASE_VENDOR")
val BaseSerial = AssetMeta.findOrCreateFromName("BASE_SERIAL")
def getValues(): Seq[AssetMeta] = {
Seq(BaseDescription,BaseProduct,BaseVendor,BaseSerial)
}
}
}
|
funzoneq/collins
|
app/collins/models/AssetMeta.scala
|
Scala
|
apache-2.0
| 6,751 |
/* Copyright 2009-2016 EPFL, Lausanne */
import leon.lang._
object Termination {
abstract class List
case class Cons(head: Int, tail: List) extends List
case class Nil() extends List
def f1(list: List) : Int = f2(list)
def f2(list: List) : Int = list match {
case Cons(head, tail) => f1(tail)
case Nil() => 0
}
def f3(list: List, b: Boolean) : Int = if(b) f4(list) else f1(list)
def f4(list: List) : Int = list match {
case Cons(head, tail) => f3(tail, true)
case Nil() => 0
}
}
// vim: set ts=4 sw=4 et:
|
regb/leon
|
src/test/resources/regression/termination/valid/Termination_passing1.scala
|
Scala
|
gpl-3.0
| 546 |
/*
* Copyright 2016-2017 original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
package tap.nlp.old.openNlp
import tap.UnitSpec
/**
* Created by [email protected] on 12/7/17.
*/
class ParserSpec extends UnitSpec {
val input = "My first lecture started 13 March 2015. This is another sentence."
val sentences = List("My first lecture started 13 March 2015.","This is another sentence.")
val tokens = List("My", "first", "lecture", "started", "13", "March", "2015", ".")
val posTags = List("PRP$", "JJ", "NN", "VBD", "CD", "NNP", "CD", ".")
val constTree = "(TOP (S (NP (PRP$ My) (JJ first) (NN lecture)) (VP (VBD started) (NP (CD 13) (NNP March) (CD 2015))) (. .)))"
"sentence" should "parse text into sentences" in {
val res = Parsers.sentence(input)
assert(res==sentences)
}
"token" should "parse a sentence into tokens" in {
val res = Parsers.token(sentences.head)
assert(res==tokens)
}
"postag" should "return postags from a list of tokens" in {
val res = Parsers.posTag(tokens)
assert(res==posTags)
}
"parseTree" should "parse a sentence into tree output as a string" in {
val res = Parsers.parseTree(tokens.mkString(" "))
println(res)
assert(res==constTree)
}
}
*/
|
uts-cic/tap
|
src/test/scala/tap/nlp/old/openNlp/ParsersSpec.scala
|
Scala
|
apache-2.0
| 1,790 |
/*
* Copyright (C) 2016 University of Basel, Graphics and Vision Research Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package scalismo.ui.view.dialog
import java.io.{PrintWriter, StringWriter}
import javax.swing.{BorderFactory, JComponent, UIManager}
import scalismo.ui.resources.icons.ScalableIcon
import scalismo.ui.view.ScalismoFrame
import scalismo.ui.view.util.{MultiLineLabel, ScalableUI}
import scala.swing._
import scala.swing.event.Key
object ErrorDialog {
def show(exception: Throwable,
title: String = "Error",
additionalMessage: String = "",
iconOverride: Option[ScalableIcon] = None)(implicit frame: ScalismoFrame): Unit = {
val dialog = new ErrorDialog(exception, title, additionalMessage, iconOverride)
dialog.pack()
dialog.centerOnScreen()
dialog.okButton.requestFocus()
dialog.visible = true
}
}
class ErrorDialog(exception: Throwable, title: String, additionalMessage: String, iconOverride: Option[ScalableIcon])(
implicit
val frame: ScalismoFrame
) extends Dialog(frame) {
modal = true
peer.setTitle(title)
val main = new BorderPanel
private val icon = {
val fallback = UIManager.getIcon("OptionPane.errorIcon")
iconOverride.map(_.resize(fallback.getIconWidth, fallback.getIconHeight)).getOrElse(fallback)
}
private val iconLabel = new Label("", icon, Alignment.Center)
private val placeHolderMessageLabel = {
val textOption = Option(exception.getMessage)
val text = textOption.getOrElse(exception.getClass.getName)
new Label(text, icon, Alignment.Right)
}
private val placeHolderAdditionalLabelOption: Option[Label] = additionalMessage match {
case null => None
case s if s.trim.length == 0 => None
case text => Some(new Label(text, icon, Alignment.Right))
}
private val messageLabel = {
val fullText = placeHolderAdditionalLabelOption match {
case Some(label) => s"${label.text}\n\n${placeHolderMessageLabel.text}"
case None => ""
}
new MultiLineLabel(fullText)
}
private val messagePanel = new BorderPanel {
layout(iconLabel) = BorderPanel.Position.West
layout(messageLabel) = BorderPanel.Position.Center
}
private val stackTrace = {
val trace = new StringWriter()
exception.printStackTrace(new PrintWriter(trace))
val area = new TextArea(trace.toString) {
rows = 25
columns = 80
editable = false
}
new ScrollPane(area)
}
private val placeholder = Component.wrap(new JComponent {
override def getPreferredSize: Dimension = {
val size = new Dimension
size.height = 0
size.width = placeHolderMessageLabel.preferredSize.width
placeHolderAdditionalLabelOption.foreach { l =>
size.width = Math.max(size.width, l.preferredSize.width)
}
size.width += iconLabel.preferredSize.width
size.width = Math.min(stackTrace.preferredSize.width, size.width)
size
}
})
val detailsButton: Button = new Button(new Action("Show Details") {
mnemonic = Key.D.id
override def apply(): Unit = {
main.layout(stackTrace) = BorderPanel.Position.Center
detailsButton.visible = false
val dialog = ErrorDialog.this
dialog.pack()
dialog.centerOnScreen()
}
})
val okButton = new Button(new Action("OK") {
mnemonic = Key.O.id
override def apply(): Unit = dispose()
})
private val buttons = new BorderPanel {
layout(okButton) = BorderPanel.Position.East
layout(detailsButton) = BorderPanel.Position.West
}
main.layout(messagePanel) = BorderPanel.Position.North
main.layout(placeholder) = BorderPanel.Position.Center
main.layout(buttons) = BorderPanel.Position.South
main.border = {
val px = ScalableUI.scale(5)
BorderFactory.createEmptyBorder(px, px, px, px)
}
peer.getRootPane.setDefaultButton(okButton.peer)
contents = main
}
|
unibas-gravis/scalismo-ui
|
src/main/scala/scalismo/ui/view/dialog/ErrorDialog.scala
|
Scala
|
gpl-3.0
| 4,557 |
package ohnosequences.scalaguide.test.errors
import org.scalatest.FunSuite
import ohnosequences.scalaguide.errors._
import scalaz._
class ErrorHandlingExamples extends FunSuite {
val notPalindromic = "aaaca"
val notFiveChars = "012345"
test("fail-fast examples") {
val notOk = FailFastWonky(
fiveChars = notFiveChars,
palindrome = notPalindromic
)
assert{ notOk == Failure(MustHaveLengthFive(notFiveChars)) }
info(s"trying to create a Wonky with an error in both params yields the first one: ${notOk}")
}
test("accumulate errors examples") {
val notOk = AccumulativeWonky(
fiveChars = notFiveChars,
palindrome = notPalindromic
)
assert{ notOk == Failure(NonEmptyList(MustHaveLengthFive("012345"), MustBePalindromic("aaaca"))) }
info(s"trying to create a Wonky with an error in both params yields both errors: ${notOk}")
}
}
|
ohnosequences/scala-guide
|
src/test/scala/errors.scala
|
Scala
|
agpl-3.0
| 906 |
package scala.collection
import org.junit.Assert.{ assertThrows => _, _ }
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.tools.testkit.AssertUtil._
import Seq.empty
@RunWith(classOf[JUnit4])
class IteratorTest {
private def from0 = Iterator.from(0)
@Test def groupedIteratorShouldNotAskForUnneededElement(): Unit = {
var counter = 0
val it = new Iterator[Int] { var i = 0 ; def hasNext = { counter = i; true } ; def next() = { i += 1; i } }
val slidingIt = it sliding 2
slidingIt.next()
assertEquals("Counter should be one, that means we didn't look further than needed", 1, counter)
}
@Test def groupedIteratorIsLazyWhenPadded(): Unit = {
var counter = 0
def it = new Iterator[Int] { var i = 0 ; def hasNext = { counter = i; true } ; def next() = { i += 1; i } }
val slidingIt = it sliding 2 withPadding -1
slidingIt.next()
assertEquals("Counter should be one, that means we didn't look further than needed", 1, counter)
}
@Test def dropDoesNotGrowStack(): Unit = {
def it = new Iterator[Throwable] { def hasNext = true ; def next() = new Throwable }
assertEquals(it.drop(1).next().getStackTrace.length, it.drop(1).drop(1).next().getStackTrace.length)
}
@Test def dropIsChainable(): Unit = {
assertSameElements(1 to 4, Iterator from 0 take 5 drop 1)
assertSameElements(3 to 4, Iterator from 0 take 5 drop 3)
assertSameElements(empty, Iterator from 0 take 5 drop 5)
assertSameElements(empty, Iterator from 0 take 5 drop 10)
assertSameElements(0 to 4, Iterator from 0 take 5 drop 0)
assertSameElements(0 to 4, Iterator from 0 take 5 drop -1)
assertSameElements(2 to 8 by 2, Iterator from 0 take 5 drop 1 map (2 * _))
assertSameElements(2 to 8 by 2, Iterator from 0 take 5 map (2 * _) drop 1)
assertSameElements(3 to 4, Iterator from 0 take 5 drop 1 drop 2)
assertSameElements(3 to 4, Iterator from 0 take 5 drop 2 drop 1)
}
@Test def sliceIsChainable(): Unit = {
assertSameElements(3 to 6, from0.slice(3, 7))
assertSameElements(empty, from0.slice(3, 3))
assertSameElements(0 to 2, from0.slice(-1, 3))
assertSameElements(empty, from0.slice(3, -1))
assertSameElements(6 to 12 by 2, from0.slice(3, 7).map(2 * _))
assertSameElements(6 to 12 by 2, from0.map(2 * _).slice(3, 7))
assertSameElements(4 to 6, from0.slice(3, 7).drop(1))
assertSameElements(4 to 7, from0.drop(1).slice(3, 7))
assertSameElements(4 to 5, from0.slice(3, 7).slice(1, 3))
assertSameElements(4 to 6, from0.slice(3, 7).slice(1, 10))
}
// test/files/run/iterator-concat.scala
@Test def concatIsStackFriendly(): Unit = {
// Create `size` Function0s, each of which evaluates to an Iterator
// which produces 1. Then fold them over ++ to get a single iterator,
// which should sum to "size".
def mk(size: Int): Iterator[Int] = {
//val closures = (1 to size).toList.map(x => (() => Iterator(1)))
//closures.foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
List.fill(size)(() => Iterator(1)).foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
}
assertEquals(100, mk(100).sum)
assertEquals(1000, mk(1000).sum)
assertEquals(10000, mk(10000).sum)
assertEquals(100000, mk(100000).sum)
}
@Test def from(): Unit = {
val it1 = Iterator.from(-1)
val it2 = Iterator.from(0, -1)
assertEquals(-1, it1.next())
assertEquals(0, it2.next())
}
@Test def range(): Unit = {
assertEquals(5, Iterator.range(0, 10, 2).size)
assertEquals(0, Iterator.range(0, 10, -2).size)
assertEquals(5, Iterator.range(10, 0, -2).size)
assertEquals(0, Iterator.range(10, 0, 2).size)
assertEquals(1, Iterator.range(0, 10, 11).size)
assertEquals(10, Iterator.range(0, 10, 1).size)
assertEquals(10, Iterator.range(10, 0, -1).size)
}
@Test def range3(): Unit = {
val r1 = Iterator.range(0, 10)
assertTrue(r1 contains 5)
assertTrue(r1 contains 6)
assertFalse(r1 contains 4)
val r2a = Iterator.range(0, 10, 2)
assertFalse(r2a contains 5)
val r2b = Iterator.range(0, 10, 2)
assertTrue(r2b contains 6)
val r3 = Iterator.range(0, 10, 11)
assertFalse(r3 contains 5)
assertTrue(r3.isEmpty)
}
@Test def rangeOverflow(): Unit = {
val step = 100000000
val numExpectedSamples = 22
def createIterator = Iterator.range(0, Int.MaxValue, step)
assertEquals(numExpectedSamples, createIterator.count(_ => true))
assertEquals(0, createIterator.min)
assertEquals((numExpectedSamples - 1) * step, createIterator.max)
}
@Test def rangeOverflow2(): Unit = {
val step = (Int.MaxValue / 2) + 1
val numExpectedSamples = 2
def createIterator = Iterator.range(0, Int.MaxValue, step)
assertEquals(numExpectedSamples, createIterator.count(_ => true))
assertEquals(0, createIterator.min)
assertEquals(step, createIterator.max)
}
@Test def rangeOverflow3(): Unit = {
val step = 1000000000
val numExpectedSamples = 5
def createIterator = Iterator.range(Int.MinValue +10,Int.MaxValue - 10, step)
assertEquals(numExpectedSamples, createIterator.count(_ => true))
assertEquals(Int.MinValue + 10, createIterator.min)
assertEquals(Int.MinValue + 10 + (numExpectedSamples - 1) * step, createIterator.max)
}
@Test def rangeUnderflow(): Unit = {
val step = -100000000
val numExpectedSamples = 22
def createIterator = Iterator.range(0, -Int.MaxValue, step)
assertEquals(numExpectedSamples, createIterator.count(_ => true))
assertEquals((numExpectedSamples - 1) * step, createIterator.min)
assertEquals(0, createIterator.max)
}
@Test def rangeUnderflow2(): Unit = {
val step = -(Int.MaxValue / 2) - 1
val numExpectedSamples = 2
def createIterator = Iterator.range(0, -Int.MaxValue, step)
assertEquals(numExpectedSamples, createIterator.count(_ => true))
assertEquals(step, createIterator.min)
assertEquals(0, createIterator.max)
}
@Test def rangeUnderflow3(): Unit = {
val step = -1000000000
val numExpectedSamples = 5
def createIterator = Iterator.range(Int.MaxValue -10,Int.MinValue + 10,step)
assertEquals(numExpectedSamples, createIterator.count(_ => true))
assertEquals(Int.MaxValue - 10 + (numExpectedSamples - 1) * step, createIterator.min)
assertEquals(Int.MaxValue - 10, createIterator.max)
}
@Test def take(): Unit = {
assertEquals(10, (Iterator from 0 take 10).size)
}
@Test def foreach(): Unit = {
val it1 = Iterator.from(0) take 20
var n = 0
it1 foreach { n += _ }
assertEquals(190, n)
}
// ticket #429
@Test def fromArray(): Unit = {
val a = List(1, 2, 3, 4).toArray
val xs0 = a.iterator.toList;
val xs1 = a.slice(0, 1).iterator
val xs2 = a.slice(0, 2).iterator
val xs3 = a.slice(0, 3).iterator
val xs4 = a.slice(0, 4).iterator
assertEquals(14, xs0.size + xs1.size + xs2.size + xs3.size + xs4.size)
}
@Test def toSeq(): Unit = {
assertEquals("1x2x3x4x5", List(1, 2, 3, 4, 5).iterator.mkString("x"))
}
@Test def indexOf(): Unit = {
assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexOf(4))
assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexOf(16))
}
@Test def indexWhere(): Unit = {
assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 4 })
assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 16 })
}
@Test def indexOfFrom(): Unit = {
assertEquals(1, List(1, 2, 3, 4, 5).iterator.indexOf(2, 0))
assertEquals(1, List(1, 2, 3, 4, 5).iterator.indexOf(2, 1))
assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexOf(2, 2))
assertEquals(4, List(1, 2, 3, 2, 1).iterator.indexOf(1, 1))
assertEquals(1, List(1, 2, 3, 2, 1).iterator.indexOf(2, 1))
}
@Test def indexWhereFrom(): Unit = {
assertEquals(1, List(1, 2, 3, 4, 5).iterator.indexWhere(_ == 2, 0))
assertEquals(1, List(1, 2, 3, 4, 5).iterator.indexWhere(_ == 2, 1))
assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexWhere(_ == 2, 2))
assertEquals(4, List(1, 2, 3, 2, 1).iterator.indexWhere(_ < 2, 1))
assertEquals(1, List(1, 2, 3, 2, 1).iterator.indexWhere(_ <= 2, 1))
}
// iterator-iterate-lazy.scala
// was java.lang.UnsupportedOperationException: tail of empty list
@Test def iterateIsSufficientlyLazy(): Unit = {
//Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).toList // suffices
Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
}
@Test def lazyListIsLazy(): Unit = {
val results = mutable.ListBuffer.empty[Int]
def mkIterator = Range.inclusive(1, 5).iterator map (x => { results += x ; x })
def mkInfinite = Iterator continually { results += 1 ; 1 }
LazyList.from(mkIterator): Unit
LazyList.from(mkInfinite): Unit
// back and forth without slipping into nontermination.
results += LazyList.from(1).iterator.drop(10).to(LazyList).drop(10).iterator.next()
assertTrue(List(21).sameElements(results))
}
// scala/bug#3516
@deprecated("Tests deprecated Stream", since="2.13")
@Test def toStreamIsSufficientlyLazy(): Unit = {
val results = collection.mutable.ListBuffer.empty[Int]
def mkIterator = (1 to 5).iterator map (x => { results += x ; x })
def mkInfinite = Iterator continually { results += 1 ; 1 }
// Stream is strict in its head so we should see 1 from each of them.
mkIterator.toStream: Unit
mkInfinite.toStream: Unit
// back and forth without slipping into nontermination.
results += (Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next()
assertSameElements(List(1,1,21), results)
}
// scala/bug#8552
@Test def indexOfShouldWorkForTwoParams(): Unit = {
assertEquals(1, List(1, 2, 3).iterator.indexOf(2, 0))
assertEquals(-1, List(5 -> 0).iterator.indexOf(5, 0))
assertEquals(0, List(5 -> 0).iterator.indexOf((5, 0)))
assertEquals(-1, List(5 -> 0, 9 -> 2, 0 -> 3).iterator.indexOf(9, 2))
assertEquals(1, List(5 -> 0, 9 -> 2, 0 -> 3).iterator.indexOf(9 -> 2))
}
// scala/bug#9332
@Test def spanExhaustsLeadingIterator(): Unit = {
def it = Iterator.iterate(0)(_ + 1).take(6)
val (x, y) = it.span(_ != 1)
val z = x.toList
assertEquals(1, z.size)
assertFalse(x.hasNext)
assertEquals(1, y.next())
assertFalse(x.hasNext) // was true, after advancing underlying iterator
}
// scala/bug#9913
@Test def `span leading iterator finishes at state -1`(): Unit = {
val (yes, no) = Iterator(1, 2, 3).span(_ => true)
assertFalse(no.hasNext)
assertTrue(yes.hasNext)
}
// scala/bug#9623
@Test def noExcessiveHasNextInJoinIterator(): Unit = {
var counter = 0
val exp = List(1,2,3,1,2,3)
def it: Iterator[Int] = new Iterator[Int] {
val parent = List(1,2,3).iterator
def next(): Int = parent.next()
def hasNext: Boolean = { counter += 1; parent.hasNext }
}
// Iterate separately
val res = new mutable.ArrayBuffer[Int]
it.foreach(res += _)
it.foreach(res += _)
assertSameElements(exp, res)
assertEquals(8, counter)
// JoinIterator
counter = 0
res.clear()
(it ++ it).foreach(res += _)
assertSameElements(exp, res)
assertEquals(8, counter) // was 17
// ConcatIterator
counter = 0
res.clear()
(Iterator.empty ++ it ++ it).foreach(res += _)
assertSameElements(exp, res)
assertEquals(8, counter) // was 14
}
// scala/bug#9691
@Test def bufferedHeadOptionReturnsValueWithHeadOrNone(): Unit = {
// Checks BufferedIterator returns Some(value) when there is a value
val validHeadOption = List(1,2,3).iterator.buffered.headOption
assertEquals(Some(1), validHeadOption)
// Checks BufferedIterator returns None when there is no value
val invalidHeadOption = List(1,2,3).iterator.drop(10).buffered.headOption
assertEquals(None: Option[Int], invalidHeadOption)
// Checks BufferedIterator returns Some(value) in the last position with a value
val validHeadOptionAtTail = List(1,2,3).iterator.drop(2).buffered.headOption
assertEquals(Some(3), validHeadOptionAtTail)
// Checks BufferedIterator returns None at the first position without a value
val invalidHeadOptionOnePastTail = List(1,2,3).iterator.drop(3).buffered.headOption
assertEquals(None, invalidHeadOptionOnePastTail)
// Checks BufferedIterator returns Some(null) if the next value is null.
val nullHandingList = List(null, "yellow").iterator.buffered.headOption
assertEquals(Some(null), nullHandingList)
// Checks that BufferedIterator is idempotent. That the head is not
// changed by its invocation, nor the headOption by the next call to head.
val it = List(1,2,3).iterator.buffered
val v1 = it.head
val v2 = it.headOption
val v3 = it.head
val v4 = it.headOption
assertEquals(v1, v3)
assertEquals(v2, v4)
assertEquals(Some(v1), v2)
}
// scala/bug#11153
@Test def handleExhaustedConcatSubIterator(): Unit = {
val it = Iterator.empty ++ Iterator.empty
// exhaust and clear internal state
it.hasNext
val concat = Iterator.empty ++ it
while (concat.hasNext) concat.next()
}
@Test
def hasCorrectDistinct(): Unit = {
val result = List(1, 1, 2, 3, 3, 3, 4, 5, 5).iterator.distinct
assertTrue(result.hasNext)
assertEquals(1, result.next())
assertTrue(result.hasNext)
assertEquals(2, result.next())
assertTrue(result.hasNext)
assertEquals(3, result.next())
assertTrue(result.hasNext)
assertEquals(4, result.next())
assertTrue(result.hasNext)
assertEquals(5, result.next())
assertFalse(result.hasNext)
}
@Test
def hasCorrectDistinctBy(): Unit = {
val result = List("a", "aa", "aaa", "b", "bb", "bbb", "bbbb", "c").iterator.distinctBy(_.length)
assertTrue(result.hasNext)
assertEquals("a", result.next())
assertTrue(result.hasNext)
assertEquals("aa", result.next())
assertTrue(result.hasNext)
assertEquals("aaa", result.next())
assertTrue(result.hasNext)
assertEquals("bbbb", result.next())
assertFalse(result.hasNext)
}
@Test
def knownSize(): Unit = {
def indexedSeq[A](xs: IndexedSeq[A]): Unit = {
val it = xs.iterator
assertEquals(xs.size, it.knownSize)
it.next()
assertEquals(xs.size - 1, it.knownSize)
}
indexedSeq(Vector(1, 2, 3))
indexedSeq(mutable.ArrayBuffer(1, 2, 3))
indexedSeq(immutable.ArraySeq(1, 2, 3))
indexedSeq(Range(start = 1, end = 3, step = 1))
indexedSeq(Range(start = 9, end = 2, step = -2))
indexedSeq(immutable.NumericRange(start = 1, end = 3, step = 1))
indexedSeq(immutable.NumericRange(start = -10, end = -5, step = 1))
}
private def knownSizeDecreases[A](it: Iterator[A]): Unit = {
val size = it.knownSize
it.next()
assertEquals(size - 1, it.knownSize)
}
@Test
def knownSize2(): Unit = {
assertEquals(10, Iterator.fill(10)(1).knownSize)
assertEquals(0, Iterator.fill(-10)(1).knownSize)
knownSizeDecreases(Iterator.fill(10)(1))
assertEquals(10, Iterator.tabulate(10)(_.toString).knownSize)
assertEquals(0, Iterator.tabulate(-10)(_.toString).knownSize)
knownSizeDecreases(Iterator.tabulate(10)(_.toString))
assertEquals(10, Iterator.range(1, 11).knownSize)
knownSizeDecreases(Iterator.range(1, 11))
assertEquals(5, Iterator.range(1, 11, 2).knownSize)
assertEquals(4, Iterator.range(1, 11, 3).knownSize)
assertEquals(5, Iterator.range(1, 10, 2).knownSize)
assertEquals(3, Iterator.range(1, 10, 3).knownSize)
knownSizeDecreases(Iterator.range(1, 10, 3))
assertEquals(4, Iterator.range(-5, 5, 3).knownSize)
assertEquals(4, Iterator.range(-15, -5, 3).knownSize)
assertEquals(-1, Iterator.range(Int.MinValue, Int.MaxValue).knownSize)
assertEquals(-1, Iterator.range(Int.MinValue, Int.MaxValue, 2).knownSize)
assertEquals(1431655765, Iterator.range(Int.MinValue, Int.MaxValue, 3).knownSize)
assertEquals(Int.MaxValue, Iterator.range(Int.MinValue, Int.MaxValue - 1, 2).knownSize)
}
@Test
def knownSize3(): Unit = {
def it = Iterator.fill(10)(1)
val buf = it.buffered
assertEquals(10, buf.knownSize)
buf.head
assertEquals(10, buf.knownSize)
knownSizeDecreases(buf)
val buf2 = Iterator.continually(1).buffered
assertEquals(-1, buf2.knownSize)
buf2.head
assertEquals(-1, buf2.knownSize)
assertEquals(10, it.padTo(5, 0).knownSize)
assertEquals(15, it.padTo(15, 0).knownSize)
knownSizeDecreases(it.padTo(15, 0))
val sl = it.scanLeft(0)(_ + _)
assertEquals(11, sl.knownSize)
knownSizeDecreases(sl)
knownSizeDecreases(sl) // first element is special so check twice
assertEquals(10, it.map(_ + 1).knownSize)
knownSizeDecreases(it.map(_ + 1))
assertEquals(5, it.zip(it.take(5)).knownSize)
assertEquals(5, it.take(5).zip(it).knownSize)
knownSizeDecreases(it.zip(it.take(5)))
assertEquals(10, it.zipAll(it.take(5), 2, 3).knownSize)
assertEquals(10, it.take(5).zipAll(it, 2, 3).knownSize)
knownSizeDecreases(it.zipAll(it.take(5), 2, 3))
val (a, b) = it.duplicate
assertEquals(10, a.knownSize)
assertEquals(10, b.knownSize)
knownSizeDecreases(a)
assertEquals(10, b.knownSize)
knownSizeDecreases(b)
knownSizeDecreases(b)
assertEquals(9, a.knownSize)
assertEquals(10, it.zipWithIndex.knownSize)
knownSizeDecreases(it.zipWithIndex)
}
@Test
def sliceKnownSize(): Unit = {
def it = Iterator.fill(10)(1)
assertEquals(4, it.take(4).knownSize)
assertEquals(10, it.take(30).knownSize)
assertEquals(6, it.drop(4).knownSize)
assertEquals(0, it.drop(15).knownSize)
assertEquals(2, it.slice(4, 6).knownSize)
assertEquals(2, it.slice(8, 15).knownSize)
assertEquals(5, it.slice(-5, 5).knownSize)
assertEquals(-1, Iterator.continually(1).take(5).knownSize)
assertEquals(-1, List.fill(10)(1).take(5).knownSize)
assertEquals(3, new Iterator.SliceIterator(it, 7, -1).knownSize)
knownSizeDecreases(it.slice(2, 9))
knownSizeDecreases(new Iterator.SliceIterator(it, 7, -1))
}
@Test
def emptyKnownSize(): Unit = {
assertEquals(0, Iterator.empty.knownSize)
}
@Test
def mkString(): Unit = {
val it = List("a", null, "b", null, "c", null).iterator
assertEquals("a,null,b,null,c,null", it.mkString(","))
}
@Test
def emptyTypedIteratorsShouldBeEqual(): Unit = {
val emptyDoubleIterator = Iterator.empty[Double]
val emptyIntIterator = Iterator.empty[Int]
assertSame(emptyDoubleIterator, emptyIntIterator)
}
@Test
def emptyIteratorInHigherOrderFunctions(): Unit = {
val seqOfIterators = Seq(Seq(1, 2, 3).iterator, Seq(3, 2, 1).iterator, Seq(1, 3, 2).iterator)
val unified = seqOfIterators.foldLeft(Iterator.empty[Int])((a, b) => a ++ b)
assertEquals(List(1, 2, 3, 3, 2, 1, 1, 3, 2), List.from(unified))
}
@Test
def emptyIteratorBuilder(): Unit = {
assertSame(Iterator.empty[Int], Iterator.newBuilder[Int].result())
}
@Test
def nonEmptyIteratorBuilder(): Unit = {
val iteratorBuilder = Iterator.newBuilder[Int]
iteratorBuilder += 5
iteratorBuilder += 4
iteratorBuilder += 3
assertEquals(List(5, 4, 3), List.from(iteratorBuilder.result()))
}
@Test
def nonEmptyIteratorAndClearBuilder(): Unit = {
val iteratorBuilder = Iterator.newBuilder[Int]
iteratorBuilder += 1
iteratorBuilder.clear()
assertSame(Iterator.empty, iteratorBuilder.result())
}
@Test def partition(): Unit = {
val it = Iterator(1, 2, 3, 4, 5, 6, 7)
val (even, odd) = it.partition(n => (n & 1) == 0)
assertSameElements(List.from(even), List(2, 4, 6))
assertSameElements(List.from(odd), List(1, 3, 5, 7))
}
@Test def padTo(): Unit = {
val it = Iterator(2, 4, 6, 8)
val padded = it.padTo(7, 10)
assertSameElements(List.from(padded), List(2, 4, 6, 8, 10, 10, 10))
}
@Test def corresponds(): Unit = {
val it = Iterator(1, 2, 3, 4, 5)
assertTrue(it.corresponds(Seq(1, 4, 9, 16, 25)) { (a, b) => b == a*a })
}
@deprecated("Tests deprecated API", since="2.13")
@Test def aggregate(): Unit = {
val result = Iterator('a', 'b', 'c').aggregate(0)({ (sum, ch) => sum + ch.toInt }, { (p1, p2) => p1 + p2 })
assertEquals(result, 294)
}
@Test def copyToArray(): Unit = {
def check(a: Array[Int], copyTo: Array[Int] => Int, elemsWritten: Int, start: Int, end: Int): Unit = {
val copied = copyTo(a)
assertEquals(elemsWritten, copied)
var i = 0
while (i < start) {
assertEquals(a(i), 0)
i += 1
}
while (i < a.length && i < end) {
assertEquals(a(i), i - start)
i += 1
}
while (i < a.length) {
assertEquals(a(i), 0)
i += 1
}
}
val far = 100000
def l = Iterable.from(Range(0, 100)).iterator
check(new Array(100), l.copyToArray(_), 100, 0, far)
check(new Array(10), l.copyToArray(_), 10, 0, far)
check(new Array(1000), l.copyToArray(_), 100, 0, 100)
check(new Array(100), l.copyToArray(_, 5), 95, 5, 105)
check(new Array(10), l.copyToArray(_, 5), 5, 5, 10)
check(new Array(1000), l.copyToArray(_, 5), 100, 5, 105)
check(new Array(100), l.copyToArray(_, 5, 50), 50, 5, 55)
check(new Array(10), l.copyToArray(_, 5, 50), 5, 5, 10)
check(new Array(1000), l.copyToArray(_, 5, 50), 50, 5, 55)
assertThrows[ArrayIndexOutOfBoundsException](l.copyToArray(new Array(10), -1))
assertThrows[ArrayIndexOutOfBoundsException](l.copyToArray(new Array(10), -1, 10))
check(new Array(10), l.copyToArray(_, 10), 0, 0, 0)
check(new Array(10), l.copyToArray(_, 10, 10), 0, 0, 0)
check(new Array(10), l.copyToArray(_, 0, -1), 0, 0, 0)
}
// scala/bug#10709
@Test def `scan is lazy enough`(): Unit = {
val results = collection.mutable.ListBuffer.empty[Int]
val it = new AbstractIterator[Int] {
var cur = 1
val max = 3
override def hasNext = {
results += -cur
cur < max
}
override def next() = {
val res = cur
results += -res
cur += 1
res
}
}
val xy = it.scanLeft(10)((sum, x) => {
results += -(sum + x)
sum + x
})
val scan = collection.mutable.ListBuffer.empty[Int]
for (i <- xy) {
scan += i
results += i
}
assertSameElements(List(10,11,13), scan)
assertSameElements(List(10,-1,-1,-11,11,-2,-2,-13,13,-3), results)
}
@Test def unfoldCorrectness(): Unit = {
val it1 = Iterator.unfold(1)(i => if (i > 10) None else Some((i, i + 1)))
assertSameElements(1 to 10, it1)
val it2 = Iterator.unfold(0)(_ => None)
assertSameElements(Nil, it2)
}
@Test def unfoldLaziness(): Unit = {
var executed: Boolean = false
val it = Iterator.unfold(0)(_ => {executed = true; None})
assertFalse(executed)
it.toList
assertTrue(executed)
}
@Test def `flatMap is memory efficient in previous element`(): Unit = {
import java.lang.ref._
import scala.util.chaining._
// Array.iterator holds onto array reference; by contrast, iterating over List walks tail.
// Avoid reaching seq1 through test class. Avoid testing Array.iterator.
class C extends Iterable[String] {
val ss = Array("first", "second")
def iterator = new Iterator[String] {
var i = 0
def hasNext = i < ss.length
def next() = if (hasNext) ss(i).tap(_ => i += 1) else Iterator.empty.next()
}
def apply(i: Int) = ss(i)
}
val seq1 = new WeakReference(new C)
val seq2 = List("third")
val it0: Iterator[Int] = Iterator(1, 2)
lazy val it: Iterator[String] = it0.flatMap {
case 1 => Option(seq1.get).getOrElse(Nil)
case 2 => check(); seq2
case _ => ???
}
def noop(): Unit = ()
def check(): Unit = assertNotReachable(seq1.get, it)(noop())
def checkHasElement(): Unit = assertNotReachable(Option(seq1.get).map(_.apply(1)).orNull, it)(noop())
assert(it.hasNext)
assertEquals("first", it.next())
// verify that we're in the middle of seq1
assertThrows[AssertionError](checkHasElement(), _.contains("held reference"))
assertThrows[AssertionError](check(), _.contains("held reference"))
assert(it.hasNext)
assertEquals("second", it.next())
assert(it.hasNext)
assertNotReachable(seq1.get, it) {
assertEquals("third", it.next())
}
assert(!it.hasNext)
}
@Test def tapEach(): Unit = {
locally {
var i = 0
val tapped = Iterator(-1, -1, -1).tapEach(_ => i += 1)
assertEquals(true, tapped.hasNext)
assertEquals(0, i)
}
locally {
var i = 0
val tapped = Iterator(-1, -1, -1).tapEach(_ => i += 1)
assertEquals(-3, tapped.sum)
assertEquals(3, i)
}
locally {
var i = 0
val tapped = Iterator(-1, -1, -1).tapEach(_ => i += 1)
assertEquals(-1, tapped.next())
assertEquals(1, i)
}
}
@Test
def t11106(): Unit = {
var i = 0
Iterator.continually(0)
.map(_ => {i += 1; i})
.withFilter(_ < 10)
.take(3)
.foreach(_ => ())
assertEquals(3, i)
}
@Test
def flatMap(): Unit = {
def check[T](mkIterator: () => Iterator[T], expected: Array[T]): Unit = {
// tests that the iterator produces the expected array of elems, when alternating hasNext/next() calls
// then continues to be empty after repeated calls to hasNext/next after exhausted
// additional variants are included, where we:
// * avoid calls to hasNext, in this case `next()` should still work as expected
// * avoid calls to hasNext in the post-exhaustion check -- next() should still throw each time
// * avoid calls to next() in the post-exhaustion check -- hasNext should still return `false` each time
locally {
val iter = mkIterator()
var i = 0
while (i < expected.length) {
assert(iter.hasNext)
assertEquals(expected(i), iter.next())
i += 1
}
i = 0
while (i < 10) {
assertThrows[Exception](iter.next())
assert(!iter.hasNext)
i += 1
}
}
locally {
val iter = mkIterator()
var i = 0
while (i < expected.length) {
assertEquals(expected(i), iter.next())
i += 1
}
i = 0
while (i < 10) {
assertThrows[Exception](iter.next())
assert(!iter.hasNext)
i += 1
}
}
locally {
val iter = mkIterator()
var i = 0
while (i < expected.length) {
assertEquals(expected(i), iter.next())
i += 1
}
i = 0
while (i < 10) {
assertThrows[Exception](iter.next())
i += 1
}
}
locally {
val iter = mkIterator()
var i = 0
while (i < expected.length) {
assertEquals(expected(i), iter.next())
i += 1
}
i = 0
while (i < 10) {
i += 1
assert(!iter.hasNext)
}
}
}
check(() => Iterator.empty[Int].flatMap(_ => Iterator.empty[Int]), Array())
check(() => Iterator.empty[Int].flatMap(_ => 1 to 10), Array())
check(() => Iterator(1).flatMap(i => List(i + 1, i + 2)), Array(2, 3))
check(() => Iterator(1).flatMap(i => List(i + 1, i + 2)), Array(2, 3))
check(() => (0 to 100 by 10).iterator.flatMap(i => i to (i + 9)), (0 to 109).toArray)
check(() => Iterator.from(1 to 10).flatMap {
case 1 => Nil
case 2 => List(1,2,3)
case 3 => Nil
case 4 => List(4)
case 5 => List(5,6,7,8,9)
case 6 => List(10)
case 7 => List(11,12,13,14,15)
case 8 => Nil
case 9 => Nil
case 10 => Nil
case _ => Nil
}, Array.from(1 to 15))
}
@Test
def `t11807 multiply-merged concat iterators`(): Unit = {
val it0 = Array(1).iterator
val it1 = Array(2).iterator ++ Array(3).iterator
val it2 = it0 ++ it1
assertEquals(1, it2.next())
assertTrue(it2.hasNext)
val it3 = it2 ++ Array(4).iterator
assertEquals(2, it3.next())
assertEquals(3, it3.next())
assertTrue("concatted tail of it3 should be next", it3.hasNext)
}
}
|
lrytz/scala
|
test/junit/scala/collection/IteratorTest.scala
|
Scala
|
apache-2.0
| 28,472 |
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Sun Jan 4 23:09:27 EST 2015
* @see LICENSE (MIT style license file).
*/
package scalation.analytics
import scalation.linalgebra.{MatrixD, VectoD, VectorD, VectorI}
import scalation.util.{Error, time}
import RegTechnique._
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ANCOVA` class supports ANalysis of COVAriance 'ANCOVA'. It allows
* the addition of a categorical treatment variable 't' into a multiple linear
* regression. This is done by introducing dummy variables 'dj' to distinguish
* the treatment level. The problem is again to fit the parameter vector 'b'
* in the augmented regression equation
* <p>
* y = b dot x + e = b0 + b_1 * x_1 + b_2 * x_2 + ... b_k * x_k
+ b_k+1 * d_1 + b_k+2 * d_2 + ... b_k+l * d_l + e
* <p>
* where 'e' represents the residuals (the part not explained by the model).
* Use Least-Squares (minimizing the residuals) to fit the parameter vector
* <p>
* b = x_pinv * y
* <p>
* where 'x_pinv' is the pseudo-inverse.
* @see see.stanford.edu/materials/lsoeldsee263/05-ls.pdf
* @param x_ the data/design matrix of continuous variables
* @param t the treatment/categorical variable vector
* @param y the response vector
* @param levels the number of treatment levels (1, ... levels)
* @param technique the technique used to solve for b in x.t*x*b = x.t*y
*/
class ANCOVA (x_ : MatrixD, t: VectorI, y: VectorD, levels: Int, technique: RegTechnique = QR)
extends Predictor with Error
{
if (x_.dim1 != y.dim) flaw ("constructor", "dimensions of x_ and y are incompatible")
if (t.dim != y.dim) flaw ("constructor", "dimensions of t and y are incompatible")
val x = new MatrixD (x_.dim1, x_.dim2 + levels - 1) // augmented design matrix
assignVars () // assign values for continuous variables
assignDummyVars () // assign values for dummy variables
val rg = new Regression (x, y, technique) // regular multiple linear regression
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Assign values for the continuous variables from the 'x' matrix.
*/
def assignVars ()
{
for (i <- 0 until x_.dim1; j <- 0 until x_.dim2) x(i, j) = x_(i, j)
} // for
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Assign values for the dummy variables based on the treatment vector 't'.
*/
def assignDummyVars ()
{
for (i <- 0 until x_.dim1) {
val lev = t(i) // treatment level for ith item
if (lev < 1 || lev > levels) flaw ("assignDummyVars", "treatment level is out of range")
if (lev < levels) x(i, x_.dim2 + lev) = 1.0
} // for
} // assignDummyVars
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Train the predictor by fitting the parameter vector (b-vector) in the
* regression equation
* y = b dot x + e = [b_0, ... b_k+l] dot [1, x_1, ..., d_1, ...] + e
* using the least squares method.
*/
def train () { rg.train () }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Retrain the predictor by fitting the parameter vector (b-vector) in the
* multiple regression equation
* yy = b dot x + e = [b_0, ... b_k+l] dot [1, x_1, ..., d_1, ...] + e
* using the least squares method.
* @param yy the new response vector
*/
def train (yy: VectorD) { rg.train (yy) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the quality of fit 'rSquared'.
*/
def fit: VectorD = rg.fit
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the vector of residuals/errors.
*/
override def residual: VectoD = rg.residual
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Predict the value of y = f(z) by evaluating the formula y = b dot z,
* e.g., (b0, b1, b2) dot (1, z1, z2).
* @param z the new vector to predict
*/
def predict (z: VectoD): Double = rg.predict (z)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform backward elimination to remove the least predictive variable
* from the model, returning the variable to eliminate, the new parameter
* vector, the new R-squared value and the new F statistic.
*/
def backElim (): Tuple3 [Int, VectoD, VectorD] = rg.backElim ()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the Variance Inflation Factor (VIF) for each variable to test
* for multi-collinearity by regressing 'xj' against the rest of the variables.
* A VIF over 10 indicates that over 90% of the variance of 'xj' can be predicted
* from the other variables, so 'xj' is a candidate for removal from the model.
*/
def vif: VectorD = rg.vif
} // ANCOVA class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ANCOVATest` object tests the `ANCOVA` class using the following
* regression equation.
* <p>
* y = b dot x = b_0 + b_1*x_1 + b_2*x_2 + b_3*d_1 + b_4*d_2
* <p>
*/
object ANCOVATest extends App
{
// 5 data points: constant term, x_1 coordinate, x_2 coordinate
val x = new MatrixD ((5, 3), 1.0, 36.0, 66.0, // 5-by-3 matrix
1.0, 37.0, 68.0,
1.0, 47.0, 64.0,
1.0, 32.0, 53.0,
1.0, 1.0, 101.0)
val t = VectorI (1, 1, 2, 2, 3) // treatments levels
val y = VectorD (745.0, 895.0, 442.0, 440.0, 1598.0) // response vector
val z = VectorD (1.0, 20.0, 80.0, 1.0)
println ("x = " + x)
println ("t = " + t)
println ("y = " + y)
val levels = 3
val anc = new ANCOVA (x, t, y, levels)
anc.train ()
println ("fit = " + anc.fit)
val yp = anc.predict (z)
println ("predict (" + z + ") = " + yp)
} // ANCOVATest object
|
NBKlepp/fda
|
scalation_1.2/src/main/scala/scalation/analytics/ANCOVA.scala
|
Scala
|
mit
| 6,567 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.fpm
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.util.MLlibTestSparkContext
class FPGrowthSuite extends SparkFunSuite with MLlibTestSparkContext {
test("FP-Growth using String type") {
val transactions = Seq(
"r z h k p",
"z y x w v u t s",
"s x o n r",
"x z y m t s q e",
"z",
"x z y r q t p")
.map(_.split(" "))
val rdd = sc.parallelize(transactions, 2).cache()
val fpg = new FPGrowth()
val model6 = fpg
.setMinSupport(0.9)
.setNumPartitions(1)
.run(rdd)
assert(model6.freqItemsets.count() === 0)
val model3 = fpg
.setMinSupport(0.5)
.setNumPartitions(2)
.run(rdd)
val freqItemsets3 = model3.freqItemsets.collect().map { itemset =>
(itemset.items.toSet, itemset.freq)
}
val expected = Set(
(Set("s"), 3L), (Set("z"), 5L), (Set("x"), 4L), (Set("t"), 3L), (Set("y"), 3L),
(Set("r"), 3L),
(Set("x", "z"), 3L), (Set("t", "y"), 3L), (Set("t", "x"), 3L), (Set("s", "x"), 3L),
(Set("y", "x"), 3L), (Set("y", "z"), 3L), (Set("t", "z"), 3L),
(Set("y", "x", "z"), 3L), (Set("t", "x", "z"), 3L), (Set("t", "y", "z"), 3L),
(Set("t", "y", "x"), 3L),
(Set("t", "y", "x", "z"), 3L))
assert(freqItemsets3.toSet === expected)
val model2 = fpg
.setMinSupport(0.3)
.setNumPartitions(4)
.run(rdd)
assert(model2.freqItemsets.count() === 54)
val model1 = fpg
.setMinSupport(0.1)
.setNumPartitions(8)
.run(rdd)
assert(model1.freqItemsets.count() === 625)
}
test("FP-Growth using Int type") {
val transactions = Seq(
"1 2 3",
"1 2 3 4",
"5 4 3 2 1",
"6 5 4 3 2 1",
"2 4",
"1 3",
"1 7")
.map(_.split(" ").map(_.toInt).toArray)
val rdd = sc.parallelize(transactions, 2).cache()
val fpg = new FPGrowth()
val model6 = fpg
.setMinSupport(0.9)
.setNumPartitions(1)
.run(rdd)
assert(model6.freqItemsets.count() === 0)
val model3 = fpg
.setMinSupport(0.5)
.setNumPartitions(2)
.run(rdd)
assert(model3.freqItemsets.first().items.getClass === Array(1).getClass,
"frequent itemsets should use primitive arrays")
val freqItemsets3 = model3.freqItemsets.collect().map { itemset =>
(itemset.items.toSet, itemset.freq)
}
val expected = Set(
(Set(1), 6L), (Set(2), 5L), (Set(3), 5L), (Set(4), 4L),
(Set(1, 2), 4L), (Set(1, 3), 5L), (Set(2, 3), 4L),
(Set(2, 4), 4L), (Set(1, 2, 3), 4L))
assert(freqItemsets3.toSet === expected)
val model2 = fpg
.setMinSupport(0.3)
.setNumPartitions(4)
.run(rdd)
assert(model2.freqItemsets.count() === 15)
val model1 = fpg
.setMinSupport(0.1)
.setNumPartitions(8)
.run(rdd)
assert(model1.freqItemsets.count() === 65)
}
}
|
andrewor14/iolap
|
mllib/src/test/scala/org/apache/spark/mllib/fpm/FPGrowthSuite.scala
|
Scala
|
apache-2.0
| 3,721 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import Matchers._
import exceptions.TestFailedException
import org.scalactic.Prettifier
class ShouldBeWritableStructuralSpec extends FunSpec {
private val prettifier = Prettifier.default
val fileName: String = "ShouldBeWritableStructuralSpec.scala"
def wasNotWritable(left: Any): String =
FailureMessages.wasNotWritable(prettifier, left)
def wasWritable(left: Any): String =
FailureMessages.wasWritable(prettifier, left)
describe("writable matcher") {
describe("when work with arbitrary object with isWritable() method") {
class MyWritability(value: Boolean) {
def isWritable(): Boolean = value
override def toString = "writability"
}
val objTrue = new MyWritability(true)
val objFalse = new MyWritability(false)
it("should do nothing for 'objTrue should be (writable)'") {
objTrue should be (writable)
}
it("should throw TFE with correct stack depth for 'objFalse should be (writable)'") {
val caught1 = intercept[TestFailedException] {
objFalse should be (writable)
}
assert(caught1.message === Some(wasNotWritable(objFalse)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing if for 'objFalse should not be writable'") {
objFalse should not be writable
}
it("should throw TFE with correct stack depth for 'objTrue should not be writable'") {
val caught1 = intercept[TestFailedException] {
objTrue should not be writable
}
assert(caught1.message === Some(wasWritable(objTrue)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with arbitrary object with isWritable method") {
class MyWritability(value: Boolean) {
def isWritable: Boolean = value
override def toString = "writability"
}
val objTrue = new MyWritability(true)
val objFalse = new MyWritability(false)
it("should do nothing for 'objTrue should be (writable)'") {
objTrue should be (writable)
}
it("should throw TFE with correct stack depth for 'objFalse should be (writable)'") {
val caught1 = intercept[TestFailedException] {
objFalse should be (writable)
}
assert(caught1.message === Some(wasNotWritable(objFalse)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing if for 'objFalse should not be writable'") {
objFalse should not be writable
}
it("should throw TFE with correct stack depth for 'objTrue should not be writable'") {
val caught1 = intercept[TestFailedException] {
objTrue should not be writable
}
assert(caught1.message === Some(wasWritable(objTrue)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with arbitrary object with isWritable val") {
class MyWritability(value: Boolean) {
val isWritable: Boolean = value
override def toString = "writability"
}
val objTrue = new MyWritability(true)
val objFalse = new MyWritability(false)
it("should do nothing for 'objTrue should be (writable)'") {
objTrue should be (writable)
}
it("should throw TFE with correct stack depth for 'objFalse should be (writable)'") {
val caught1 = intercept[TestFailedException] {
objFalse should be (writable)
}
assert(caught1.message === Some(wasNotWritable(objFalse)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing if for 'objFalse should not be writable'") {
objFalse should not be writable
}
it("should throw TFE with correct stack depth for 'objTrue should not be writable'") {
val caught1 = intercept[TestFailedException] {
objTrue should not be writable
}
assert(caught1.message === Some(wasWritable(objTrue)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/ShouldBeWritableStructuralSpec.scala
|
Scala
|
apache-2.0
| 5,339 |
package net.liftweb.markdown
/*
* Copyright 2013 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Based on https://github.com/chenkelmann/actuarius originally developed by
* Christoph Henkelmann http://henkelmann.eu/
*/
import org.junit.runner.RunWith
import org.scalatestplus.junit.JUnitRunner
import org.scalatest.{Matchers,FlatSpec}
import scala.xml.{Group, NodeSeq}
/**
* Tests the parsing on block level.
*/
@RunWith(classOf[JUnitRunner])
class BlockParsersTest extends FlatSpec with Matchers with BlockParsers{
"The BlockParsers" should "parse optional empty lines" in {
val p = optEmptyLines
val el = new EmptyLine(" \\n")
apply(p, Nil) should equal (Nil)
apply(p, List(el)) should equal (List(el))
apply(p, List(el, el)) should equal (List(el, el))
}
it should "accept empty documents" in {
val p = markdown
val el = new EmptyLine(" \\n")
apply(p, Nil) should equal (Nil)
apply(p, List(el)) should equal (Nil)
apply(p, List(el, el)) should equal (Nil)
}
it should "detect line types" in {
val p = line(classOf[CodeLine])
apply(p, List(new CodeLine(" ", "code"))) should equal (new CodeLine(" ", "code"))
an [IllegalArgumentException] should be thrownBy(apply(p, List(new OtherLine("foo"))))
}
it should "correctly override list items markup" in {
object MyDecorator extends Decorator {
override def decorateItemOpen(): String = "<foo>"
override def decorateItemClose(): String = "</foo>"
}
object MyTransformer extends Transformer {
override def deco(): Decorator = MyDecorator
}
MyTransformer.apply("* Content") should equal ("<ul>\\n<foo>Content</foo></ul>\\n")
}
}
|
lift/framework
|
core/markdown/src/test/scala/net/liftweb/markdown/BlockParsersTest.scala
|
Scala
|
apache-2.0
| 2,349 |
package com.sksamuel.elastic4s.searches.aggs
import com.sksamuel.elastic4s.searches.aggs.pipeline.PipelineAggregationDefinition
import com.sksamuel.exts.OptionImplicits._
case class GeoHashGridAggregationDefinition(name: String,
field: Option[String] = None,
shardSize: Option[Int] = None,
size: Option[Int] = None,
precision: Option[Int] = None,
pipelines: Seq[PipelineAggregationDefinition] = Nil,
subaggs: Seq[AggregationDefinition] = Nil,
metadata: Map[String, AnyRef] = Map.empty)
extends AggregationDefinition {
type T = GeoHashGridAggregationDefinition
def field(field: String): GeoHashGridAggregationDefinition = copy(field = field.some)
def precision(precision: Int): GeoHashGridAggregationDefinition = copy(precision = precision.some)
def shardSize(shardSize: Int): GeoHashGridAggregationDefinition = copy(shardSize = shardSize.some)
def size(size: Int): GeoHashGridAggregationDefinition = copy(size = size.some)
override def pipelines(pipelines: Iterable[PipelineAggregationDefinition]): T = copy(pipelines = pipelines.toSeq)
override def subAggregations(aggs: Iterable[AggregationDefinition]): T = copy(subaggs = aggs.toSeq)
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = metadata)
}
|
tyth/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/GeoHashGridAggregationDefinition.scala
|
Scala
|
apache-2.0
| 1,549 |
package io.plasmap
import io.plasmap.components.MainComponent
import japgolly.scalajs.react.{ReactDOM, React}
import org.scalajs.dom.ext.Ajax
import scala.scalajs.js
import scala.util.{Failure, Success}
import scalaz.effect.IO
import org.scalajs.dom
import scala.concurrent.ExecutionContext.Implicits.global
import upickle.default._
/**
* The main App.
*/
object App extends js.JSApp {
def main():Unit = {
val byId = dom.document.getElementById _
val containerNode = byId("react-container")
ReactDOM.render(
MainComponent.component("Winner winner winners"), containerNode)
}
}
|
i-am-the-slime/IntelliJLicenceWinner
|
client/src/main/scala/io/plasmap/App.scala
|
Scala
|
apache-2.0
| 605 |
/**
* Copyright 2013 Tully Ernst
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* KernelBase.scala
*/
package trysetnull.com.github.gpr.kernel
import org.apache.commons.math3.analysis.differentiation.DerivativeStructure
/** An abstract class for defining kernel functions to be used for GP regression. */
abstract class KernelBase {
/** Returns a function that returns the noise * kronecker delta
* the return value of which is intended to be added to a covariance matrix.
* @param noise is typically the noise variance, i.e. sigma_n^2.
* @param row index of the matrix
* @param column index of the matrix
*/
final def kroneckerDelta(noise: Double): (Int, Int) => Double = {
return (p, q) => if (p == q) noise else 0.0
}
/** Defines the covariance function as a kernel. */
def covariance(p: Int, q: Int): (Double, Double) => DerivativeStructure
}
|
trysetnull/calibris
|
src/main/scala/trysetnull/com/github/gpr/kernel/KernelBase.scala
|
Scala
|
apache-2.0
| 1,398 |
package wom
import cats.syntax.either._
import common.Checked
import common.validation.Checked._
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import shapeless.Coproduct
import wdl.{WdlNamespace, WdlNamespaceWithWorkflow}
import wom.executable.Executable.ResolvedExecutableInputs
import wom.graph.Graph.ResolvedExecutableInput
import wom.types._
import wom.values._
class WdlInputValidationSpec extends FlatSpec with Matchers with BeforeAndAfterAll with TableDrivenPropertyChecks {
behavior of "WDL Wom executable"
val wdlWorkflow: String =
"""
|task t {
| String t1
| Int? t2
| command { ... }
|}
|
|workflow w {
| File w1
| String? w2
|
| scatter(i in range(5)) {
| call t as u
| }
|
| call t
|}
""".stripMargin
val namespace = WdlNamespace.loadUsingSource(wdlWorkflow, None, None).get.asInstanceOf[WdlNamespaceWithWorkflow]
val graph = namespace.workflow.womDefinition
.valueOr(errors => fail(s"Failed to build a wom definition: ${errors.toList.mkString(", ")}"))
.graph
val w1OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.w1").getOrElse(fail("Failed to find an input node for w1")).singleOutputPort
val w2OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.w2").getOrElse(fail("Failed to find an input node for w2")).singleOutputPort
val t1OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.t.t1").getOrElse(fail("Failed to find an input node for t1")).singleOutputPort
val t2OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.t.t2").getOrElse(fail("Failed to find an input node for t2")).singleOutputPort
val u1OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.u.t1").getOrElse(fail("Failed to find an input node for u1")).singleOutputPort
val u2OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.u.t2").getOrElse(fail("Failed to find an input node for u2")).singleOutputPort
def validate(inputFile: String): Checked[ResolvedExecutableInputs] = {
import common.validation.Checked._
namespace.womExecutable(Option(inputFile)) match {
case Left(errors) => Left(errors)
case Right(e) => e.resolvedExecutableInputs.validNelCheck
}
}
it should "validate workflow inputs" in {
val validations = Table(
("inputFile", "expectedResult"),
(
"""
|{
| "w.w1": "my_file.txt",
| "w.t.t1": "helloT",
| "w.u.t1": "helloU"
|}
""".stripMargin,
Map (
w1OutputPort -> Coproduct[ResolvedExecutableInput](WomFile("my_file.txt"): WomValue),
w2OutputPort -> Coproduct[ResolvedExecutableInput](WomOptionalValue.none(WomStringType): WomValue),
t1OutputPort -> Coproduct[ResolvedExecutableInput](WomString("helloT"): WomValue),
t2OutputPort -> Coproduct[ResolvedExecutableInput](WomOptionalValue.none(WomIntegerType): WomValue),
u1OutputPort -> Coproduct[ResolvedExecutableInput](WomString("helloU"): WomValue),
u2OutputPort -> Coproduct[ResolvedExecutableInput](WomOptionalValue.none(WomIntegerType): WomValue)
).validNelCheck
),
(
"""
|{
| "w.w1": "my_file.txt",
| "w.w2": "inputString",
| "w.t.t1": "helloT",
| "w.t.t2": 5,
| "w.u.t1": "helloU",
| "w.u.t2": 6
|}
""".stripMargin,
Map (
w1OutputPort -> Coproduct[ResolvedExecutableInput](WomFile("my_file.txt"): WomValue),
w2OutputPort -> Coproduct[ResolvedExecutableInput](WomOptionalValue(WomString("inputString")): WomValue),
t1OutputPort -> Coproduct[ResolvedExecutableInput](WomString("helloT"): WomValue),
t2OutputPort -> Coproduct[ResolvedExecutableInput](WomOptionalValue(WomInteger(5)): WomValue),
u1OutputPort -> Coproduct[ResolvedExecutableInput](WomString("helloU"): WomValue),
u2OutputPort -> Coproduct[ResolvedExecutableInput](WomOptionalValue(WomInteger(6)): WomValue)
).validNelCheck
),
(
"""
|{
|}
""".stripMargin,
Set(
"Required workflow input 'w.t.t1' not specified",
"Required workflow input 'w.u.t1' not specified",
"Required workflow input 'w.w1' not specified"
).asLeft[ResolvedExecutableInputs]
)
)
forAll(validations) { (inputSource, expectation) =>
// The order in the Nel is not important, so make it a Set to check that all the expected failure messages are here, regardless of their order
validate(inputSource).leftMap(_.toList.toSet) shouldBe expectation
}
}
}
|
ohsu-comp-bio/cromwell
|
wdl/src/test/scala/wom/WdlInputValidationSpec.scala
|
Scala
|
bsd-3-clause
| 4,871 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.api
import org.openapitools.client.model.ComputerSet
import org.openapitools.client.model.FreeStyleBuild
import org.openapitools.client.model.FreeStyleProject
import org.openapitools.client.model.Hudson
import org.openapitools.client.model.ListView
import org.openapitools.client.model.Queue
import org.openapitools.client.core.JsonSupport._
import sttp.client._
import sttp.model.Method
object RemoteAccessApi {
def apply(baseUrl: String = "http://localhost") = new RemoteAccessApi(baseUrl)
}
class RemoteAccessApi(baseUrl: String) {
/**
* Retrieve computer details
*
* Expected answers:
* code 200 : ComputerSet (Successfully retrieved computer details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param depth Recursion depth in response model
*/
def getComputer(username: String, password: String)(depth: Int
): Request[Either[ResponseError[Exception], ComputerSet], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/computer/api/json?depth=${ depth }")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[ComputerSet])
/**
* Retrieve Jenkins details
*
* Expected answers:
* code 200 : Hudson (Successfully retrieved Jenkins details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*/
def getJenkins(username: String, password: String)(
): Request[Either[ResponseError[Exception], Hudson], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/api/json")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Hudson])
/**
* Retrieve job details
*
* Expected answers:
* code 200 : FreeStyleProject (Successfully retrieved job details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
*/
def getJob(username: String, password: String)(name: String
): Request[Either[ResponseError[Exception], FreeStyleProject], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/job/${name}/api/json")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[FreeStyleProject])
/**
* Retrieve job configuration
*
* Expected answers:
* code 200 : String (Successfully retrieved job configuration in config.xml format)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
*/
def getJobConfig(username: String, password: String)(name: String
): Request[Either[ResponseError[Exception], String], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/job/${name}/config.xml")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[String])
/**
* Retrieve job's last build details
*
* Expected answers:
* code 200 : FreeStyleBuild (Successfully retrieved job's last build details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
*/
def getJobLastBuild(username: String, password: String)(name: String
): Request[Either[ResponseError[Exception], FreeStyleBuild], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/job/${name}/lastBuild/api/json")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[FreeStyleBuild])
/**
* Retrieve job's build progressive text output
*
* Expected answers:
* code 200 : (Successfully retrieved job's build progressive text output)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
* @param number Build number
* @param start Starting point of progressive text output
*/
def getJobProgressiveText(username: String, password: String)(name: String, number: String, start: String
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/job/${name}/${number}/logText/progressiveText?start=${ start }")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Unit])
/**
* Retrieve queue details
*
* Expected answers:
* code 200 : Queue (Successfully retrieved queue details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*/
def getQueue(username: String, password: String)(
): Request[Either[ResponseError[Exception], Queue], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/queue/api/json")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Queue])
/**
* Retrieve queued item details
*
* Expected answers:
* code 200 : Queue (Successfully retrieved queued item details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param number Queue number
*/
def getQueueItem(username: String, password: String)(number: String
): Request[Either[ResponseError[Exception], Queue], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/queue/item/${number}/api/json")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Queue])
/**
* Retrieve view details
*
* Expected answers:
* code 200 : ListView (Successfully retrieved view details)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (View cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the view
*/
def getView(username: String, password: String)(name: String
): Request[Either[ResponseError[Exception], ListView], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/view/${name}/api/json")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[ListView])
/**
* Retrieve view configuration
*
* Expected answers:
* code 200 : String (Successfully retrieved view configuration in config.xml format)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (View cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the view
*/
def getViewConfig(username: String, password: String)(name: String
): Request[Either[ResponseError[Exception], String], Nothing] =
basicRequest
.method(Method.GET, uri"$baseUrl/view/${name}/config.xml")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[String])
/**
* Retrieve Jenkins headers
*
* Expected answers:
* code 200 : (Successfully retrieved Jenkins headers)
* Headers :
* x-jenkins - Jenkins version number
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*/
def headJenkins(username: String, password: String)(
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.HEAD, uri"$baseUrl/api/json")
.contentType("application/json")
.auth.basic(username, password)
.response(asJson[Unit])
/**
* Create a new job using job configuration, or copied from an existing job
*
* Expected answers:
* code 200 : (Successfully created a new job)
* code 400 : String (An error has occurred - error message is embedded inside the HTML response)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the new job
* @param from Existing job to copy from
* @param mode Set to 'copy' for copying an existing job
* @param jenkinsCrumb CSRF protection token
* @param contentType Content type header application/xml
* @param body Job configuration in config.xml format
*/
def postCreateItem(username: String, password: String)(name: String, from: Option[String] = None, mode: Option[String] = None, jenkinsCrumb: Option[String] = None, contentType: Option[String] = None, body: Option[String] = None
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/createItem?name=${ name }&from=${ from }&mode=${ mode }")
.contentType("application/json")
.header("Jenkins-Crumb", jenkinsCrumb)
.header("Content-Type", contentType)
.auth.basic(username, password)
.body(body)
.response(asJson[Unit])
/**
* Create a new view using view configuration
*
* Expected answers:
* code 200 : (Successfully created the view)
* code 400 : String (An error has occurred - error message is embedded inside the HTML response)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the new view
* @param jenkinsCrumb CSRF protection token
* @param contentType Content type header application/xml
* @param body View configuration in config.xml format
*/
def postCreateView(username: String, password: String)(name: String, jenkinsCrumb: Option[String] = None, contentType: Option[String] = None, body: Option[String] = None
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/createView?name=${ name }")
.contentType("application/json")
.header("Jenkins-Crumb", jenkinsCrumb)
.header("Content-Type", contentType)
.auth.basic(username, password)
.body(body)
.response(asJson[Unit])
/**
* Build a job
*
* Expected answers:
* code 200 : (Successfully built the job (backward compatibility for older versions of Jenkins))
* code 201 : (Successfully built the job)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
* @param json
* @param token
* @param jenkinsCrumb CSRF protection token
*/
def postJobBuild(username: String, password: String)(name: String, json: String, token: Option[String] = None, jenkinsCrumb: Option[String] = None
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/job/${name}/build?json=${ json }&token=${ token }")
.contentType("application/json")
.header("Jenkins-Crumb", jenkinsCrumb)
.auth.basic(username, password)
.response(asJson[Unit])
/**
* Update job configuration
*
* Expected answers:
* code 200 : (Successfully retrieved job configuration in config.xml format)
* code 400 : String (An error has occurred - error message is embedded inside the HTML response)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
* @param body Job configuration in config.xml format
* @param jenkinsCrumb CSRF protection token
*/
def postJobConfig(username: String, password: String)(name: String, body: String, jenkinsCrumb: Option[String] = None
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/job/${name}/config.xml")
.contentType("application/json")
.header("Jenkins-Crumb", jenkinsCrumb)
.auth.basic(username, password)
.body(body)
.response(asJson[Unit])
/**
* Delete a job
*
* Expected answers:
* code 200 : (Successfully deleted the job)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
* @param jenkinsCrumb CSRF protection token
*/
def postJobDelete(username: String, password: String)(name: String, jenkinsCrumb: Option[String] = None
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/job/${name}/doDelete")
.contentType("application/json")
.header("Jenkins-Crumb", jenkinsCrumb)
.auth.basic(username, password)
.response(asJson[Unit])
/**
* Disable a job
*
* Expected answers:
* code 200 : (Successfully disabled the job)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
* @param jenkinsCrumb CSRF protection token
*/
def postJobDisable(username: String, password: String)(name: String, jenkinsCrumb: Option[String] = None
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/job/${name}/disable")
.contentType("application/json")
.header("Jenkins-Crumb", jenkinsCrumb)
.auth.basic(username, password)
.response(asJson[Unit])
/**
* Enable a job
*
* Expected answers:
* code 200 : (Successfully enabled the job)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
* @param jenkinsCrumb CSRF protection token
*/
def postJobEnable(username: String, password: String)(name: String, jenkinsCrumb: Option[String] = None
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/job/${name}/enable")
.contentType("application/json")
.header("Jenkins-Crumb", jenkinsCrumb)
.auth.basic(username, password)
.response(asJson[Unit])
/**
* Stop a job
*
* Expected answers:
* code 200 : (Successfully stopped the job)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (Job cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the job
* @param jenkinsCrumb CSRF protection token
*/
def postJobLastBuildStop(username: String, password: String)(name: String, jenkinsCrumb: Option[String] = None
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/job/${name}/lastBuild/stop")
.contentType("application/json")
.header("Jenkins-Crumb", jenkinsCrumb)
.auth.basic(username, password)
.response(asJson[Unit])
/**
* Update view configuration
*
* Expected answers:
* code 200 : (Successfully updated view configuration)
* code 400 : String (An error has occurred - error message is embedded inside the HTML response)
* code 401 : (Authentication failed - incorrect username and/or password)
* code 403 : (Jenkins requires authentication - please set username and password)
* code 404 : (View cannot be found on Jenkins instance)
*
* Available security schemes:
* jenkins_auth (http)
*
* @param name Name of the view
* @param body View configuration in config.xml format
* @param jenkinsCrumb CSRF protection token
*/
def postViewConfig(username: String, password: String)(name: String, body: String, jenkinsCrumb: Option[String] = None
): Request[Either[ResponseError[Exception], Unit], Nothing] =
basicRequest
.method(Method.POST, uri"$baseUrl/view/${name}/config.xml")
.contentType("application/json")
.header("Jenkins-Crumb", jenkinsCrumb)
.auth.basic(username, password)
.body(body)
.response(asJson[Unit])
}
|
cliffano/swaggy-jenkins
|
clients/scala-sttp/generated/src/main/scala/org/openapitools/client/api/RemoteAccessApi.scala
|
Scala
|
mit
| 19,168 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql.streaming
import scala.reflect._
import scala.util.{Failure, Success, Try}
import com.rabbitmq.client.QueueingConsumer.Delivery
import com.rabbitmq.client.{Address, Channel, Connection, ConnectionFactory, QueueingConsumer}
import org.apache.spark.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.SnappyStreamingContext
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.receiver.Receiver
object RabbitMQUtils {
def createStream[T: ClassTag, D: ClassTag](snsc: SnappyStreamingContext,
options: Map[String, String]): ReceiverInputDStream[T] = {
new RabbitMQInputDStream[T, D](snsc, options)
}
}
trait RabbitMQDecoder[T] extends scala.AnyRef {
def fromBytes(bytes: scala.Array[scala.Byte]): T
}
final class RabbitMQStringDecoder extends RabbitMQDecoder[String] {
def fromBytes(bytes: scala.Array[scala.Byte]): String = {
new Predef.String(bytes)
}
}
final class RabbitMQInputDStream[T: ClassTag, D: ClassTag](
_snsc: SnappyStreamingContext,
options: Map[String, String])
extends ReceiverInputDStream[T](_snsc) {
override def getReceiver(): Receiver[T] = {
new RabbitMQReceiver[T, D](options)
}
}
final class RabbitMQReceiver[T: ClassTag, D: ClassTag](options: Map[String, String])
extends Receiver[T](StorageLevel.MEMORY_AND_DISK_SER_2) with Logging {
override def onStart() {
// implicit val akkaSystem = akka.actor.ActorSystem()
getConnectionAndChannel match {
case Success((connection: Connection, channel: Channel)) =>
new Thread() {
override def run() {
receive(connection, channel)
}
}.start()
case Failure(f) =>
restart("Failed to connect", f)
}
}
private def getConnectionAndChannel: Try[(Connection, Channel)] = {
for {
connection: Connection <- Try(
new ConnectionFactory()
.newConnection(Address.parseAddresses("localhost")))
channel: Channel <- Try(connection.createChannel)
} yield {
(connection, channel)
}
}
private def receive(connection: Connection,
channel: Channel): Unit = {
val queueName: String = options("queuename")
val consumer: QueueingConsumer = new QueueingConsumer(channel)
channel.basicConsume(queueName, false, consumer)
while (!isStopped()) {
val delivery: Delivery = consumer.nextDelivery()
val decoder: RabbitMQDecoder[T] = classTag[D].runtimeClass.getConstructor()
.newInstance().asInstanceOf[RabbitMQDecoder[T]]
store(decoder.fromBytes(delivery.getBody))
channel.basicAck(delivery.getEnvelope.getDeliveryTag, false)
}
}
override def onStop() {
}
}
|
vjr/snappydata
|
core/src/main/scala/org/apache/spark/sql/streaming/RabbitMQUtils.scala
|
Scala
|
apache-2.0
| 3,416 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.filterexpr
import java.sql.Timestamp
import org.apache.spark.sql.Row
import org.apache.spark.sql.common.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
/**
* Test Class for filter expression query on String datatypes
*
*/
class FilterProcessorTestCase extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("drop table if exists filtertestTables")
sql("drop table if exists filtertestTablesWithDecimal")
sql("drop table if exists filtertestTablesWithNull")
sql("drop table if exists filterTimestampDataType")
sql("drop table if exists noloadtable")
sql("CREATE TABLE filtertestTables (ID int, date Timestamp, country String, " +
"name String, phonetype String, serialname String, salary int) " +
"STORED BY 'org.apache.carbondata.format'"
)
sql("CREATE TABLE noloadtable (ID int, date Timestamp, country String, " +
"name String, phonetype String, serialname String, salary int) " +
"STORED BY 'org.apache.carbondata.format'"
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "MM-dd-yyyy HH:mm:ss")
sql("CREATE TABLE filterTimestampDataType (ID int, date Timestamp, country String, " +
"name String, phonetype String, serialname String, salary int) " +
"STORED BY 'org.apache.carbondata.format'"
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "MM-dd-yyyy HH:mm:ss")
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/data2_DiffTimeFormat.csv' INTO TABLE " +
s"filterTimestampDataType " +
s"OPTIONS('DELIMITER'= ',', " +
s"'FILEHEADER'= '')"
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
sql(
s"LOAD DATA local inpath '$resourcesPath/dataDiff.csv' INTO TABLE filtertestTables " +
s"OPTIONS('DELIMITER'= ',', " +
s"'FILEHEADER'= '')"
)
sql(
"CREATE TABLE filtertestTablesWithDecimal (ID decimal, date Timestamp, country " +
"String, " +
"name String, phonetype String, serialname String, salary int) " +
"STORED BY 'org.apache.carbondata.format'"
)
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/dataDiff.csv' INTO TABLE " +
s"filtertestTablesWithDecimal " +
s"OPTIONS('DELIMITER'= ',', " +
s"'FILEHEADER'= '')"
)
sql("DROP TABLE IF EXISTS filtertestTablesWithNull")
sql(
"CREATE TABLE filtertestTablesWithNull (ID int, date Timestamp, country " +
"String, " +
"name String, phonetype String, serialname String,salary int) " +
"STORED BY 'org.apache.carbondata.format'"
)
sql("DROP TABLE IF EXISTS filtertestTablesWithNullJoin")
sql(
"CREATE TABLE filtertestTablesWithNullJoin (ID int, date Timestamp, country " +
"String, " +
"name String, phonetype String, serialname String,salary int) " +
"STORED BY 'org.apache.carbondata.format'"
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd HH:mm:ss")
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/data2.csv' INTO TABLE " +
s"filtertestTablesWithNull " +
s"OPTIONS('DELIMITER'= ',', " +
s"'FILEHEADER'= '')"
)
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/data2.csv' INTO TABLE " +
s"filtertestTablesWithNullJoin " +
s"OPTIONS('DELIMITER'= ',', " +
s"'FILEHEADER'= '')"
)
sql("DROP TABLE IF EXISTS big_int_basicc")
sql("DROP TABLE IF EXISTS big_int_basicc_1")
sql("DROP TABLE IF EXISTS big_int_basicc_Hive")
sql("DROP TABLE IF EXISTS big_int_basicc_Hive_1")
sql("CREATE TABLE big_int_basicc (imei string,age int,task bigint,name string,country string,city string,sale int,num double,level decimal(10,3),quest bigint,productdate timestamp,enddate timestamp,PointId double,score decimal(10,3))STORED BY 'org.apache.carbondata.format'")
sql("CREATE TABLE big_int_basicc_1 (imei string,age int,task bigint,name string,country string,city string,sale int,num double,level decimal(10,3),quest bigint,productdate timestamp,enddate timestamp,PointId double,score decimal(10,3))STORED BY 'org.apache.carbondata.format'")
sql("CREATE TABLE big_int_basicc_Hive (imei string,age int,task bigint,name string,country string,city string,sale int,num double,level decimal(10,3),quest bigint,productdate date,enddate date,PointId double,score decimal(10,3))row format delimited fields terminated by ',' " +
"tblproperties(\\"skip.header.line.count\\"=\\"1\\") ")
sql("CREATE TABLE big_int_basicc_Hive_1 (imei string,age int,task bigint,name string,country string,city string,sale int,num double,level decimal(10,3),quest bigint,productdate date,enddate date,PointId double,score decimal(10,3))row format delimited fields terminated by ',' " +
"tblproperties(\\"skip.header.line.count\\"=\\"1\\") ")
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd HH:mm:ss")
sql(s"""LOAD DATA INPATH '$resourcesPath/big_int_Decimal.csv' INTO TABLE big_int_basicc options ('DELIMITER'=',', 'QUOTECHAR'='\\"', 'COMPLEX_DELIMITER_LEVEL_1'='$$','COMPLEX_DELIMITER_LEVEL_2'=':', 'FILEHEADER'= '')""")
sql(s"""LOAD DATA INPATH '$resourcesPath/big_int_Decimal.csv' INTO TABLE big_int_basicc_1 options ('DELIMITER'=',', 'QUOTECHAR'='\\"', 'COMPLEX_DELIMITER_LEVEL_1'='$$','COMPLEX_DELIMITER_LEVEL_2'=':', 'FILEHEADER'= '')""")
sql(s"load data local inpath '$resourcesPath/big_int_Decimal.csv' into table big_int_basicc_Hive")
sql(s"load data local inpath '$resourcesPath/big_int_Decimal.csv' into table big_int_basicc_Hive_1")
}
test("Is not null filter") {
checkAnswer(
sql("select id from filtertestTablesWithNull " + "where id is not null"),
Seq(Row(4), Row(6))
)
}
test("join filter") {
checkAnswer(
sql("select b.name from filtertestTablesWithNull a join filtertestTablesWithNullJoin b " + "on a.name=b.name"),
Seq(Row("aaa4"), Row("aaa5"),Row("aaa6"))
)
}
test("Between filter") {
checkAnswer(
sql("select date from filtertestTablesWithNull " + " where date between '2014-01-20 00:00:00' and '2014-01-28 00:00:00'"),
Seq(Row(Timestamp.valueOf("2014-01-21 00:00:00")), Row(Timestamp.valueOf("2014-01-22 00:00:00")))
)
}
test("Multi column with invalid member filter") {
checkAnswer(
sql("select id from filtertestTablesWithNull " + "where id = salary"),
Seq()
)
}
test("Greater Than Filter") {
checkAnswer(
sql("select id from filtertestTables " + "where id >999"),
Seq(Row(1000))
)
}
test("Greater Than Filter with decimal") {
checkAnswer(
sql("select id from filtertestTablesWithDecimal " + "where id >999"),
Seq(Row(1000))
)
}
test("Greater Than equal to Filter") {
checkAnswer(
sql("select id from filtertestTables " + "where id >=999"),
Seq(Row(999), Row(1000))
)
}
test("Greater Than equal to Filter with limit") {
checkAnswer(
sql("select id from filtertestTables " + "where id >=999 order by id desc limit 1"),
Seq(Row(1000))
)
}
test("Greater Than equal to Filter with aggregation limit") {
checkAnswer(
sql("select count(id),country from filtertestTables " + "where id >=999 group by country limit 1"),
Seq(Row(2,"china"))
)
}
test("Greater Than equal to Filter with decimal") {
checkAnswer(
sql("select id from filtertestTables " + "where id >=999"),
Seq(Row(999), Row(1000))
)
}
test("Include Filter") {
checkAnswer(
sql("select id from filtertestTables " + "where id =999"),
Seq(Row(999))
)
}
test("In Filter") {
checkAnswer(
sql(
"select Country from filtertestTables where Country in ('china','france') group by Country"
),
Seq(Row("china"), Row("france"))
)
}
test("Logical condition") {
checkAnswer(
sql("select id,country from filtertestTables " + "where country='china' and name='aaa1'"),
Seq(Row(1, "china"))
)
}
test("filter query over table having no data") {
checkAnswer(
sql("select * from noloadtable " + "where country='china' and name='aaa1'"),
Seq()
)
}
test("Time stamp filter with diff time format for load greater") {
checkAnswer(
sql("select date from filterTimestampDataType where date > '2014-07-10 00:00:00'"),
Seq(Row(Timestamp.valueOf("2014-07-20 00:00:00.0")),
Row(Timestamp.valueOf("2014-07-25 00:00:00.0"))
)
)
}
test("Time stamp filter with diff time format for load less") {
checkAnswer(
sql("select date from filterTimestampDataType where date < '2014-07-20 00:00:00'"),
Seq(Row(Timestamp.valueOf("2014-07-10 00:00:00.0"))
)
)
}
test("Time stamp filter with diff time format for load less than equal") {
checkAnswer(
sql("select date from filterTimestampDataType where date <= '2014-07-20 00:00:00'"),
Seq(Row(Timestamp.valueOf("2014-07-10 00:00:00.0")),Row(Timestamp.valueOf("2014-07-20 00:00:00.0"))
)
)
}
test("Time stamp filter with diff time format for load greater than equal") {
checkAnswer(
sql("select date from filterTimestampDataType where date >= '2014-07-20 00:00:00'"),
Seq(Row(Timestamp.valueOf("2014-07-20 00:00:00.0")),Row(Timestamp.valueOf("2014-07-25 00:00:00.0"))
)
)
}
test("join query with bigdecimal filter") {
checkAnswer(
sql("select b.level from big_int_basicc_Hive a join big_int_basicc_Hive_1 b on a.level=b.level order by level"),
sql("select b.level from big_int_basicc a join big_int_basicc_1 b on a.level=b.level order by level")
)
}
test("join query with bigint filter") {
checkAnswer(
sql("select b.task from big_int_basicc_Hive a join big_int_basicc_Hive_1 b on a.task=b.task"),
sql("select b.task from big_int_basicc a join big_int_basicc_1 b on a.task=b.task")
)
}
override def afterAll {
sql("drop table if exists filtertestTables")
sql("drop table if exists filtertestTablesWithDecimal")
sql("drop table if exists filtertestTablesWithNull")
sql("drop table if exists filterTimestampDataType")
sql("drop table if exists noloadtable")
sql("DROP TABLE IF EXISTS big_int_basicc")
sql("DROP TABLE IF EXISTS big_int_basicc_1")
sql("DROP TABLE IF EXISTS big_int_basicc_Hive")
sql("DROP TABLE IF EXISTS big_int_basicc_Hive_1")
sql("DROP TABLE IF EXISTS filtertestTablesWithNull")
sql("DROP TABLE IF EXISTS filtertestTablesWithNullJoin")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
}
}
|
JihongMA/incubator-carbondata
|
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/FilterProcessorTestCase.scala
|
Scala
|
apache-2.0
| 11,936 |
/* ______
** | ___ \\
** ___ _ _ _ __ | |_/ /_ _ _ __ _ __
** / __| | | | '_ \\| ___ \\ | | | '__| '_ \\
** \\__ \\ |_| | | | | |_/ / |_| | | | | | |
** |___/\\__,_|_| |_\\____/ \\__,_|_| |_| |_|
**
** SunBurn RayTracer
** http://www.hsyl20.fr/sunburn
** GPLv3
*/
package fr.hsyl20.sunburn.samplers
import fr.hsyl20.sunburn.core._
/**
* This sampler shuffles samples from another sampler
* (set as an argument of the constructor).
* Shuffling is used because samples created with the same
* method may have the same spatial arrangement.
**/
class ShuffledSampler(sampler: Sampler) extends Sampler(sampler.sampleCount) {
private def randomInt(max: Int) : Int = (scala.math.random * max).toInt
override def generate : Seq[Sample]= {
val s = sampler.generate()
//shuffled indices
val r = Array.range(0, s.size)
for (i <- 0 until s.size) {
val temp = r(i)
val j = randomInt(s.size)
r(i) = r(j)
r(j) = temp
}
//shuffled samples
val sh = new Array[Sample](s.size)
var i = 0
for (sa <- s) {
sh(r(i)) = sa
i += 1
}
sh
}
}
|
hsyl20/SunBurn
|
src/main/scala/samplers/ShuffledSampler.scala
|
Scala
|
gpl-3.0
| 1,314 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.filters.csrf
import java.util.concurrent.CompletableFuture
import javax.inject.Inject
import akka.stream.scaladsl.Source
import akka.util.ByteString
import org.specs2.specification.core.Fragment
import play.api.ApplicationLoader.Context
import play.api.http.HttpEntity
import play.api.http.HttpErrorHandler
import play.api.http.HttpFilters
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.inject.guice.GuiceApplicationLoader
import play.api.libs.json.Json
import play.api.libs.ws._
import play.api.mvc.Handler.Stage
import play.api.mvc._
import play.api.routing.HandlerDef
import play.api.routing.Router
import play.api.test._
import play.api.Environment
import play.api.Mode
import play.mvc.Http
import scala.compat.java8.OptionConverters._
import scala.concurrent.Future
import scala.util.Random
/**
* Specs for the global CSRF filter
*/
class CSRFFilterSpec extends CSRFCommonSpecs {
sequential
"a CSRF filter also" should {
// conditions for adding a token
"not add a token to non GET requests" in {
buildCsrfAddToken()(_.put(""))(_.status must_== NOT_FOUND)
}
"not add a token to GET requests that don't accept HTML" in {
buildCsrfAddToken()(_.addHttpHeaders(ACCEPT -> "application/json").get())(_.status must_== NOT_FOUND)
}
"not add a token to GET request when response might be cached by shared cache" in {
buildCsrfAddResponseHeaders(CACHE_CONTROL -> "public, max-age=3600")(_.get())(_.cookies must be empty)
}
"add a token to GET request when response is not cached by shared cache" in {
Fragment.foreach(
Seq(
"no-cache",
"no-store",
"NO-CACHE",
"NO-STORE ",
"no-cache, must-revalidate",
"private",
"PRIVATE ",
"must-revalidate, private"
)
) { directive =>
directive >> {
buildCsrfAddResponseHeaders(CACHE_CONTROL -> directive)(_.get())(_.cookies must not be empty)
}
}
}
"add a token to GET request when response does not have a Cache-Control header" in {
buildCsrfAddResponseHeaders()(_.get())(_.cookies must not be empty)
}
"not add a token to non GET request when response might be cached by shared cache" in {
Fragment.foreach(Seq("POST", "PUT", "DELETE")) { method =>
method >> {
buildCsrfAddResponseHeaders(CACHE_CONTROL -> "public, max-age=3600")(_.execute(method))(
_.cookies must be empty
)
}
}
}
"not add a token to non GET request when response is not cached by shared cache" in {
Fragment.foreach(Seq("POST", "PUT", "DELETE")) { method =>
method >> {
buildCsrfAddResponseHeaders(CACHE_CONTROL -> "no-cache")(_.execute(method))(
_.cookies must be empty
)
}
}
}
"not add a token to non GET request when response does not have a Cache-Control header" in {
Fragment.foreach(Seq("POST", "PUT", "DELETE")) { method =>
method >> {
buildCsrfAddResponseHeaders()(_.execute(method))(
_.cookies must be empty
)
}
}
}
"add a token to GET requests that accept HTML" in {
buildCsrfAddToken()(_.addHttpHeaders(ACCEPT -> "text/html").get())(_.status must_== OK)
}
"add a token to GET requests that accept XHTML" in {
buildCsrfAddToken()(_.addHttpHeaders(ACCEPT -> "application/xhtml+xml").get())(_.status must_== OK)
}
"not add a token to HEAD requests that don't accept HTML" in {
buildCsrfAddToken()(_.addHttpHeaders(ACCEPT -> "application/json").head())(_.status must_== NOT_FOUND)
}
"add a token to HEAD requests that accept HTML" in {
buildCsrfAddToken()(_.addHttpHeaders(ACCEPT -> "text/html").head())(_.status must_== OK)
}
// extra conditions for doing a check
"check non form bodies" in {
buildCsrfCheckRequest(sendUnauthorizedResult = false)(_.addCookie("foo" -> "bar").post(Json.obj("foo" -> "bar")))(
_.status must_== FORBIDDEN
)
}
"check all methods" in {
buildCsrfCheckRequest(sendUnauthorizedResult = false)(_.addCookie("foo" -> "bar").delete())(
_.status must_== FORBIDDEN
)
}
"not check safe methods" in {
buildCsrfCheckRequest(sendUnauthorizedResult = false)(_.addCookie("foo" -> "bar").options())(_.status must_== OK)
}
"not check requests with no cookies" in {
buildCsrfCheckRequest(sendUnauthorizedResult = false)(_.post(Map("foo" -> "bar")))(_.status must_== OK)
}
"not add a token when responding to GET requests that accept HTML and don't get the token" in {
buildCsrfAddTokenNoRender(false)(_.addHttpHeaders(ACCEPT -> "text/html").get())(_.cookies must be empty)
}
"not add a token when responding to GET requests that accept XHTML and don't get the token" in {
buildCsrfAddTokenNoRender(false)(_.addHttpHeaders(ACCEPT -> "application/xhtml+xml").get())(
_.cookies must be empty
)
}
"add a token when responding to GET requests that don't get the token, if using non-HTTPOnly session cookie" in {
buildCsrfAddTokenNoRender(
false,
"play.filters.csrf.cookie.name" -> null,
"play.http.session.httpOnly" -> "false"
)(_.addHttpHeaders(ACCEPT -> "text/html").get())(_.cookies must not be empty)
}
"add a token when responding to GET requests that don't get the token, if using non-HTTPOnly cookie" in {
buildCsrfAddTokenNoRender(
false,
"play.filters.csrf.cookie.name" -> "csrf",
"play.filters.csrf.cookie.httpOnly" -> "false"
)(_.addHttpHeaders(ACCEPT -> "text/html").get())(_.cookies must not be empty)
}
"add a token when responding to GET requests that don't get the token, if response is streamed" in {
buildCsrfAddTokenNoRender(true)(_.addHttpHeaders(ACCEPT -> "text/html").get())(_.cookies must not be empty)
}
// other
"feed the body once a check has been done and passes" in {
withActionServer(
Seq(
"play.http.filters" -> classOf[CsrfFilters].getName
)
)(implicit app => {
case _ =>
val Action = inject[DefaultActionBuilder]
Action(
_.body.asFormUrlEncoded
.flatMap(_.get("foo"))
.flatMap(_.headOption)
.map(Results.Ok(_))
.getOrElse(Results.NotFound)
)
}) { ws =>
val token = signedTokenProvider.generateToken
await(
ws.url("http://localhost:" + testServerPort)
.withSession(TokenName -> token)
.post(Map("foo" -> "bar", TokenName -> token))
).body must_== "bar"
}
}
"allow bypassing the CSRF filter using a route modifier tag" in {
withActionServer(
Seq(
"play.http.filters" -> classOf[CsrfFilters].getName
)
)(implicit app => {
case _ =>
val env = inject[Environment]
val Action = inject[DefaultActionBuilder]
new Stage {
override def apply(requestHeader: RequestHeader): (RequestHeader, Handler) = {
(
requestHeader.addAttr(
Router.Attrs.HandlerDef,
HandlerDef(
env.classLoader,
"routes",
"FooController",
"foo",
Seq.empty,
"POST",
"/foo",
"comments",
Seq("NOCSRF", "api")
)
),
Action { request =>
request.body.asFormUrlEncoded
.flatMap(_.get("foo"))
.flatMap(_.headOption)
.map(Results.Ok(_))
.getOrElse(Results.NotFound)
}
)
}
}
}) { ws =>
val token = signedTokenProvider.generateToken
await(
ws.url("http://localhost:" + testServerPort)
.withSession(TokenName -> token)
.post(Map("foo" -> "bar"))
).body must_== "bar"
}
}
val notBufferedFakeApp = GuiceApplicationBuilder()
.configure(
"play.http.secret.key" -> "ad31779d4ee49d5ad5162bf1429c32e2e9933f3b",
"play.filters.csrf.body.bufferSize" -> "200",
"play.http.filters" -> classOf[CsrfFilters].getName
)
.appRoutes(implicit app => {
case _ => {
val Action = inject[DefaultActionBuilder]
Action { req =>
(for {
body <- req.body.asFormUrlEncoded
foos <- body.get("foo")
foo <- foos.headOption
buffereds <- body.get("buffered")
buffered <- buffereds.headOption
} yield {
Results.Ok(foo + " " + buffered)
}).getOrElse(Results.NotFound)
}
}
})
.build()
"feed a not fully buffered body once a check has been done and passes" in new WithServer(
notBufferedFakeApp,
testServerPort
) {
val token = signedTokenProvider.generateToken
val ws = inject[WSClient]
val response = await(
ws.url("http://localhost:" + port)
.withSession(TokenName -> token)
.addHttpHeaders(CONTENT_TYPE -> "application/x-www-form-urlencoded")
.post(
Seq(
// Ensure token is first so that it makes it into the buffered part
TokenName -> token,
"buffered" -> "buffer",
// This value must go over the edge of csrf.body.bufferSize
"longvalue" -> Random.alphanumeric.take(1024).mkString(""),
"foo" -> "bar"
).map(f => f._1 + "=" + f._2).mkString("&")
)
)
response.status must_== OK
response.body must_== "bar buffer"
}
"work with a Java error handler" in {
def csrfCheckRequest = buildCsrfCheckRequestWithJavaHandler()
def csrfAddToken = buildCsrfAddToken("csrf.cookie.name" -> "csrf")
def generate = signedTokenProvider.generateToken
def addToken(req: WSRequest, token: String) = req.withCookies("csrf" -> token)
def getToken(response: WSResponse) = response.cookie("csrf").map(_.value)
def compareTokens(a: String, b: String) = signedTokenProvider.compareTokens(a, b) must beTrue
sharedTests(csrfCheckRequest, csrfAddToken, generate, addToken, getToken, compareTokens, UNAUTHORIZED)
}
}
"The CSRF module" should {
val environment = Environment(new java.io.File("."), getClass.getClassLoader, Mode.Test)
def fakeContext = Context.create(environment)
def loader = new GuiceApplicationLoader
"allow injecting CSRF filters" in {
implicit val app = loader.load(fakeContext)
inject[CSRFFilter] must beAnInstanceOf[CSRFFilter]
}
}
def buildCsrfCheckRequest(sendUnauthorizedResult: Boolean, configuration: (String, String)*) = new CsrfTester {
def apply[T](makeRequest: (WSRequest) => Future[WSResponse])(handleResponse: (WSResponse) => T) = {
val config = configuration ++ Seq("play.http.filters" -> classOf[CsrfFilters].getName) ++ {
if (sendUnauthorizedResult) Seq("play.filters.csrf.errorHandler" -> classOf[CustomErrorHandler].getName)
else Nil
}
withActionServer(config) { implicit app =>
{
case _ =>
val Action = inject[DefaultActionBuilder]
Action(Results.Ok)
}
} { ws =>
handleResponse(await(makeRequest(ws.url("http://localhost:" + testServerPort))))
}
}
}
def buildCsrfCheckRequestWithJavaHandler() = new CsrfTester {
def apply[T](makeRequest: (WSRequest) => Future[WSResponse])(handleResponse: (WSResponse) => T) = {
withActionServer(
Seq(
"play.http.filters" -> classOf[CsrfFilters].getName,
"play.filters.csrf.cookie.name" -> "csrf",
"play.filters.csrf.errorHandler" -> "play.filters.csrf.JavaErrorHandler"
)
) { implicit app =>
{
case _ =>
val Action = inject[DefaultActionBuilder]
Action(Results.Ok)
}
} { ws =>
handleResponse(await(makeRequest(ws.url("http://localhost:" + testServerPort))))
}
}
}
def buildCsrfAddToken(configuration: (String, String)*) = new CsrfTester {
def apply[T](makeRequest: (WSRequest) => Future[WSResponse])(handleResponse: (WSResponse) => T) = {
withActionServer(
configuration ++ Seq("play.http.filters" -> classOf[CsrfFilters].getName)
)(implicit app => {
case _ =>
val Action = inject[DefaultActionBuilder]
Action { implicit req =>
CSRF
.getToken(req)
.map { token =>
Results.Ok(token.value)
}
.getOrElse(Results.NotFound)
}
}) { ws =>
handleResponse(await(makeRequest(ws.url("http://localhost:" + testServerPort))))
}
}
}
def buildCsrfAddTokenNoRender(streamed: Boolean, configuration: (String, String)*) = new CsrfTester {
def apply[T](makeRequest: (WSRequest) => Future[WSResponse])(handleResponse: (WSResponse) => T) = {
withActionServer(
configuration ++ Seq("play.http.filters" -> classOf[CsrfFilters].getName)
)(implicit app => {
case _ =>
val Action = inject[DefaultActionBuilder]
if (streamed) {
Action(
Result(
header = ResponseHeader(200, Map.empty),
body = HttpEntity.Streamed(Source.single(ByteString("Hello world")), None, Some("text/html"))
)
)
} else {
Action(Results.Ok("Hello world!"))
}
}) { ws =>
handleResponse(await(makeRequest(ws.url("http://localhost:" + testServerPort))))
}
}
}
def buildCsrfAddResponseHeaders(responseHeaders: (String, String)*) = new CsrfTester {
def apply[T](makeRequest: (WSRequest) => Future[WSResponse])(handleResponse: (WSResponse) => T) = {
withActionServer(
Seq("play.http.filters" -> classOf[CsrfFilters].getName)
)(implicit app => {
case _ =>
val Action = inject[DefaultActionBuilder]
Action { implicit request: RequestHeader =>
Results.Ok(CSRF.getToken.fold("")(_.value)).withHeaders(responseHeaders: _*)
}
}) { ws =>
handleResponse(await(makeRequest(ws.url("http://localhost:" + testServerPort))))
}
}
}
}
class CustomErrorHandler extends CSRF.ErrorHandler {
import play.api.mvc.Results.Unauthorized
def handle(req: RequestHeader, msg: String) =
Future.successful(
Unauthorized(
"Origin: " + req.attrs
.get(HttpErrorHandler.Attrs.HttpErrorInfo)
.map(_.origin)
.getOrElse("<not set>") + " / " + msg
)
)
}
class JavaErrorHandler extends CSRFErrorHandler {
def handle(req: Http.RequestHeader, msg: String) =
CompletableFuture.completedFuture(
play.mvc.Results.unauthorized(
"Origin: " + req.attrs
.getOptional(play.http.HttpErrorHandler.Attrs.HTTP_ERROR_INFO)
.asScala
.map(_.origin)
.getOrElse("<not set>") + " / " + msg
)
)
}
class CsrfFilters @Inject() (filter: CSRFFilter) extends HttpFilters {
def filters = Seq(filter)
}
|
marcospereira/playframework
|
web/play-filters-helpers/src/test/scala/play/filters/csrf/CSRFFilterSpec.scala
|
Scala
|
apache-2.0
| 15,812 |
package toguru.play
import java.util.UUID
import play.api.mvc.RequestHeader
import toguru.api.Toggle.ToggleId
import toguru.api.{Activations, ToguruClient}
import toguru.impl.TogglesString.parse
import scala.util.Try
abstract class AbstractPlaySupport {
/**
* Creates a new toguru client based on the given client provider.
*
* @param clientProvider the client provider to create [[toguru.api.ClientInfo]]s from Play Requests.
* @param endpointUrl the toguru server to use, e.g. <code>http://localhost:9000</code>
* @return
*/
def toguruClient(clientProvider: PlayClientProvider, endpointUrl: String): PlayToguruClient =
new ToguruClient(clientProvider, Activations.fromEndpoint(endpointUrl))
/**
* Creates a new toguru client with forced test activations.
*
* @param clientProvider the client provider to create [[toguru.api.ClientInfo]]s from Play Requests.
* @param testActivations the acrt
* @return
*/
def testToguruClient(clientProvider: PlayClientProvider, testActivations: Activations.Provider): PlayToguruClient =
new ToguruClient(clientProvider, testActivations)
def uuidFromCookieValue(cookieName: String)(implicit requestHeader: RequestHeader): Option[UUID] =
requestHeader.cookies
.get(cookieName)
.flatMap(c => Try(UUID.fromString(c.value)).toOption)
def fromCookie(name: String)(implicit requestHeader: RequestHeader): Option[(String, String)] =
requestHeader.cookies.get(name).map(name -> _.value)
def fromHeader(name: String)(implicit requestHeader: RequestHeader): Option[(String, String)] =
requestHeader.headers.get(name).map(name -> _)
def forcedToggle(toggleId: ToggleId)(implicit requestHeader: RequestHeader): Option[Boolean] = {
def lowerCaseKeys[T](m: Map[String, T]) =
m.map {
case (k, v) => (k.toLowerCase, v)
}
val headers = lowerCaseKeys(requestHeader.headers.toSimpleMap)
lazy val maybeForcedFromHeader = headers
.get("x-toguru")
.orElse(headers.get("toguru"))
.flatMap(togglesString => parse(togglesString)(toggleId))
lazy val maybeForcedFromCookie = requestHeader.cookies
.get("toguru")
.orElse(requestHeader.cookies.get("toguru"))
.flatMap(cookie => parse(cookie.value)(toggleId))
lazy val lowerCasedKeysQueryStringMap = lowerCaseKeys(requestHeader.queryString)
lazy val parseToggleQueryString: ToggleId => Option[Boolean] = {
val maybeToggleString: Option[List[String]] =
lowerCasedKeysQueryStringMap.get("toguru").map(_.toList)
toggleId =>
maybeToggleString
.map {
case Nil => None
case toggleString :: _ => // we ignore toggles defined twice by the client
parse(toggleString)(toggleId)
}
.getOrElse(None)
}
val maybeForcedFromQueryParam = parseToggleQueryString(toggleId)
maybeForcedFromQueryParam
.orElse(maybeForcedFromHeader)
.orElse(maybeForcedFromCookie)
}
}
|
AutoScout24/toguru-scala-client
|
play/src/main/scala/toguru/play/AbstractPlaySupport.scala
|
Scala
|
mit
| 3,019 |
/*
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.flyberrycapital.slack
/**
* Class for representing a Slack IM channel.
*
* @param id The IM channel ID.
* @param user The user ID of the "calling user"
* @param created A UNIX timestamp corresponding to the IM creation data/time.
* @param is_user_deleted Denotes if the other user's account has been disabled.
*/
case class SlackIM(id: String, user: String, created: Int, is_user_deleted: Boolean)
|
flyberry-capital/scala-slack
|
src/main/scala/com/flyberrycapital/slack/SlackIM.scala
|
Scala
|
mit
| 1,502 |
package org.jetbrains.sbt.shell.sbt13_7
import org.jetbrains.plugins.scala.PerfCycleTests
import org.jetbrains.sbt.shell.UseSbtTestRunTest
import org.junit.experimental.categories.Category
/**
* Created by Roman.Shein on 13.04.2017.
*/
@Category(Array(classOf[PerfCycleTests]))
class UseSbtTestRunTest_13_7 extends UseSbtTestRunTest {
override def getPath: String = "sbt/shell/sbtTestRunTest_07"
}
|
jastice/intellij-scala
|
scala/scala-impl/test/org/jetbrains/sbt/shell/sbt13_7/UseSbtTestRunTest_13_7.scala
|
Scala
|
apache-2.0
| 406 |
// Copyright 2016 Clément Bizeau
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package models
import java.time.ZonedDateTime
import play.api.libs.functional.syntax._
import play.api.libs.json.{JsPath, Reads}
case class Auth(active: Boolean, client_id: String, username: String, token_type: String, exp: ZonedDateTime)
object Auth {
implicit val authReads: Reads[Auth] = (
(JsPath \\ "active").read[Boolean] and
(JsPath \\ "client_id").read[String] and
(JsPath \\ "username").read[String] and
(JsPath \\ "token_type").read[String] and
(JsPath \\ "exp").read[ZonedDateTime]
)(Auth.apply _)
}
|
clebi/projecter
|
app/models/Auth.scala
|
Scala
|
apache-2.0
| 1,134 |
package nounou.elements.data
import nounou.elements.ranges.{SampleRangeValid}
import nounou.elements.traits.{NNDataTiming, NNDataScale}
import breeze.linalg.{DenseMatrix => DM, DenseVector => DV}
/**NNData class with internal representation as data array.
*/
class NNDataPreloaded( val data: Array[DV[Int]], timingEntry: NNDataTiming, scaleEntry: NNDataScale)
extends NNData {
setScale(scaleEntry)
setTiming(timingEntry)
override def getChannelCount: Int = data.length
override def readPointImpl(channel: Int, frame: Int, segment: Int) =
data(channel)(timing.segmentStartFrame(segment) + frame)
override def readTraceDVImpl(channel: Int, rangeFrValid: SampleRangeValid) = {
data( channel )(
rangeFrValid.toRangeInclusive( timing.segmentStartFrame( rangeFrValid.segment ))
)
}
}
//class NNDataPreloadedSingleSegment( data: Array[DV[Int]], scale: NNDataScale, timing: NNDataTiming,
//// xBits: Int,
//// absGain: Double,
//// absOffset: Double,
//// absUnit: String,
//// scaleMax: Int,
//// scaleMin: Int,
//// //channelNames: Vector[String], // = Vector.tabulate[String](data.length)(i => "no channel name")
// segmentStartTs: Long,
// sampleRate: Double/*,
// layout: NNLayout = NNLayoutNull$$*/
// )
// extends NNDataPreloaded( Array(data),
// xBits,
// absGain, absOffset, absUnit,
// scaleMax, scaleMin, /*channelNames,*/ Array[Long](segmentStartTs),
// Array[Int](data.rows),
// sampleRate/*, layout*/){
// override lazy val segmentCount = 1
//
//}
|
ktakagaki/nounou.rebooted150527
|
src/main/scala/nounou/elements/data/NNDataPreloaded.scala
|
Scala
|
apache-2.0
| 1,859 |
package dispatch.as.stream
import dispatch._
object Lines {
def apply[U](f: String => U) =
new stream.StringsByLine[Unit] {
def onStringBy(string: String) {
f(string)
}
def onCompleted = ()
}
}
|
maiflai/reboot
|
core/src/main/scala/as/stream/lines.scala
|
Scala
|
lgpl-3.0
| 232 |
package autolift.test.algebird
import autolift._
import Algebird._
class `LiftM*Test` extends BaseSpec{
def intintF(x: Int, y: Int) = x + y
def anyanyF(x: Any, y: Any) = 1
"liftM2" should "work on a pair of List" in{
val lf = liftM2(intintF)
val out = lf(List(0, 1), List(1, 2))
same[List[Int]](out, List(1, 2, 2, 3))
}
"liftM2" should "work on a pair of Option List" in{
val lf = liftM2(intintF)
val out = lf(Option(List(0, 1)), Option(List(1, 2)))
same[Option[List[Int]]](out, Option(List(1, 2, 2, 3)))
val out2 = lf(Option(List(0, 1)), None: Option[List[Int]])
same[Option[List[Int]]](out2, None)
}
"liftM2" should "work with functions" in{
val lf = liftM2(anyanyF)
val out = lf(Option(2), Option('c'))
same[Option[Int]](out, Option(1))
}
"liftM2" should "map" in{
val lf = liftM2(intintF) map (_.toString)
val out = lf(List(1), List(2))
same[List[String]](out, List("3"))
}
"liftM3" should "work on Lists" in{
val lf = liftM3{ (x: Int, y: Int, z: Int) => x + y + z}
val out = lf(List(0, 1), List(0, 1), List(0, 1))
same[List[Int]](out, List(0, 1, 1, 2, 1, 2, 2, 3))
}
}
|
wheaties/AutoLifts
|
autolift-algebird/src/test/scala/autolift/algebird/LiftM*Test.scala
|
Scala
|
apache-2.0
| 1,133 |
package com.lucidchart.open.cashy.uploaders
import com.lucidchart.open.cashy.amazons3.S3Client
import com.lucidchart.open.cashy.models.{Asset, User, AssetModel}
import com.lucidchart.open.cashy.utils.{DownloadHelper, KrakenClient}
import scala.collection.mutable.MutableList
object KrakenImageUploader extends KrakenImageUploader
class KrakenImageUploader extends Uploader {
override def upload(bytes: Array[Byte], contentType: Option[String], user: User, data: UploadFormSubmission): UploadResult = {
val bucket = data.bucket
val assetName = data.assetName
val uploadedAssets = MutableList[Tuple2[String,Asset]]()
val existingAssets = MutableList[Tuple2[String,Asset]]()
val extension = getExtension(assetName)
// If the image is resized
val asset = if(data.resizeImage) {
val resizeWidth = data.imageWidth.get
val resizeHeight = data.imageHeight.get
// Get the bytes from the already resized image (when the user previewed it)
val resizedBytes = DownloadHelper.download(data.resizedImage.get).bytes
// Upload it to S3
val asset = uploadAndAudit(resizedBytes, bucket, assetName, contentType, user)
uploadedAssets += ((resizeWidth + "x" + resizeHeight, asset))
// If retina option is checked
if(data.uploadRetina) {
val retinaName = data.assetRetinaName.get
// Check if @2x name is taken
if(!S3Client.existsInS3(bucket, retinaName)) {
val tempUrl = tempUpload(bucket, bytes, contentType, extension)
// Send the resize request to Kraken
val retinaWidth = resizeWidth * 2
val retinaHeight = resizeHeight * 2
val retinaBytes = KrakenClient.resizeImage(tempUrl, retinaWidth, retinaHeight)
// Upload to S3
val retinaAsset = uploadAndAudit(retinaBytes, bucket, retinaName, contentType, user)
uploadedAssets += (("Retina", retinaAsset))
} else {
// Get that asset and return it
val retinaAsset = AssetModel.findByKey(bucket, retinaName).get
existingAssets += (("Retina", retinaAsset))
}
}
asset
} else {
val tempUrl = tempUpload(bucket, bytes, contentType, extension)
val compressedBytes = KrakenClient.compressImage(tempUrl)
// upload to s3
val asset = uploadAndAudit(compressedBytes, bucket, assetName, contentType, user)
uploadedAssets += (("Original", asset))
asset
}
UploadResult(
uploadedAssets.toList,
existingAssets.toList,
asset.bucket,
asset.parent
)
}
// Uploads an object to the temp location with a random name
private def tempUpload(bucket: String, bytes: Array[Byte], contentType: Option[String], extension: String): String = {
// Generate a name for the temp file
val tempName = java.util.UUID.randomUUID.toString + "." + extension
// Upload the original file to the temp location for kraken
S3Client.uploadTempFile(bucket, tempName, bytes, contentType)
}
}
|
lucidsoftware/cashy
|
app/com/lucidchart/open/cashy/uploaders/KrakenImageUploader.scala
|
Scala
|
apache-2.0
| 3,031 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager.utils
import org.apache.curator.framework.{CuratorFrameworkFactory, CuratorFramework}
import org.apache.curator.retry.ExponentialBackoffRetry
import org.apache.curator.test.TestingServer
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import scala.reflect.ClassTag
/**
* @author hiral
*/
trait CuratorAwareTest extends FunSuite with BeforeAndAfterAll with ZookeeperServerAwareTest {
private[this] var curator: Option[CuratorFramework] = None
override protected def beforeAll(): Unit = {
super.beforeAll()
val retryPolicy = new ExponentialBackoffRetry(1000, 3)
val curatorFramework = CuratorFrameworkFactory.newClient(testServer.getConnectString, retryPolicy)
curatorFramework.start
curator = Some(curatorFramework)
}
override protected def afterAll(): Unit = {
curator.foreach(_.close())
super.afterAll()
}
protected def withCurator(fn: CuratorFramework => Unit): Unit = {
curator.foreach(fn)
}
protected def produceWithCurator[T](fn: CuratorFramework => T) : T = {
require(curator.isDefined,"Cannot produce with no curator defined!")
fn(curator.get)
}
protected def checkError[T](fn: => Any)(implicit tag: ClassTag[T]): Unit = {
try {
fn
throw new RuntimeException(s"expected ${tag.runtimeClass} , but no exceptions were thrown!")
} catch {
case UtilException(caught) =>
if(!tag.runtimeClass.isAssignableFrom(caught.getClass)) {
throw new RuntimeException(s"expected ${tag.runtimeClass} , found ${caught.getClass}, value=$caught")
}
case throwable: Throwable =>
throw new RuntimeException(s"expected ${tag.runtimeClass} , found ${throwable.getClass}", throwable)
}
}
}
|
Flipkart/kafka-manager
|
test/kafka/manager/utils/CuratorAwareTest.scala
|
Scala
|
apache-2.0
| 1,857 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.thriftserver
import java.io._
import java.sql.Timestamp
import java.util.Date
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.concurrent.{Await, Promise}
import org.apache.spark.sql.test.ProcessTestUtils.ProcessOutputCapturer
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.scalatest.BeforeAndAfter
import org.apache.spark.util.Utils
import org.apache.spark.{Logging, SparkFunSuite}
/**
* A test suite for the `spark-sql` CLI tool. Note that all test cases share the same temporary
* Hive metastore and warehouse.
*/
class CliSuite extends SparkFunSuite with BeforeAndAfter with Logging {
val warehousePath = Utils.createTempDir()
val metastorePath = Utils.createTempDir()
val scratchDirPath = Utils.createTempDir()
before {
warehousePath.delete()
metastorePath.delete()
scratchDirPath.delete()
}
after {
warehousePath.delete()
metastorePath.delete()
scratchDirPath.delete()
}
/**
* Run a CLI operation and expect all the queries and expected answers to be returned.
* @param timeout maximum time for the commands to complete
* @param extraArgs any extra arguments
* @param errorResponses a sequence of strings whose presence in the stdout of the forked process
* is taken as an immediate error condition. That is: if a line beginning
* with one of these strings is found, fail the test immediately.
* The default value is `Seq("Error:")`
*
* @param queriesAndExpectedAnswers one or more tupes of query + answer
*/
def runCliWithin(
timeout: FiniteDuration,
extraArgs: Seq[String] = Seq.empty,
errorResponses: Seq[String] = Seq("Error:"))(
queriesAndExpectedAnswers: (String, String)*): Unit = {
val (queries, expectedAnswers) = queriesAndExpectedAnswers.unzip
// Explicitly adds ENTER for each statement to make sure they are actually entered into the CLI.
val queriesString = queries.map(_ + "\\n").mkString
val command = {
val cliScript = "../../bin/spark-sql".split("/").mkString(File.separator)
val jdbcUrl = s"jdbc:derby:;databaseName=$metastorePath;create=true"
s"""$cliScript
| --master local
| --hiveconf ${ConfVars.METASTORECONNECTURLKEY}=$jdbcUrl
| --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$warehousePath
| --hiveconf ${ConfVars.SCRATCHDIR}=$scratchDirPath
""".stripMargin.split("\\\\s+").toSeq ++ extraArgs
}
var next = 0
val foundAllExpectedAnswers = Promise.apply[Unit]()
val buffer = new ArrayBuffer[String]()
val lock = new Object
def captureOutput(source: String)(line: String): Unit = lock.synchronized {
// This test suite sometimes gets extremely slow out of unknown reason on Jenkins. Here we
// add a timestamp to provide more diagnosis information.
buffer += s"${new Timestamp(new Date().getTime)} - $source> $line"
// If we haven't found all expected answers and another expected answer comes up...
if (next < expectedAnswers.size && line.contains(expectedAnswers(next))) {
next += 1
// If all expected answers have been found...
if (next == expectedAnswers.size) {
foundAllExpectedAnswers.trySuccess(())
}
} else {
errorResponses.foreach { r =>
if (line.startsWith(r)) {
foundAllExpectedAnswers.tryFailure(
new RuntimeException(s"Failed with error line '$line'"))
}
}
}
}
val process = new ProcessBuilder(command: _*).start()
val stdinWriter = new OutputStreamWriter(process.getOutputStream)
stdinWriter.write(queriesString)
stdinWriter.flush()
stdinWriter.close()
new ProcessOutputCapturer(process.getInputStream, captureOutput("stdout")).start()
new ProcessOutputCapturer(process.getErrorStream, captureOutput("stderr")).start()
try {
Await.result(foundAllExpectedAnswers.future, timeout)
} catch { case cause: Throwable =>
val message =
s"""
|=======================
|CliSuite failure output
|=======================
|Spark SQL CLI command line: ${command.mkString(" ")}
|Exception: $cause
|Executed query $next "${queries(next)}",
|But failed to capture expected output "${expectedAnswers(next)}" within $timeout.
|
|${buffer.mkString("\\n")}
|===========================
|End CliSuite failure output
|===========================
""".stripMargin
logError(message, cause)
fail(message, cause)
} finally {
process.destroy()
}
}
test("Simple commands") {
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
runCliWithin(3.minute)(
"CREATE TABLE hive_test(key INT, val STRING);"
-> "OK",
"SHOW TABLES;"
-> "hive_test",
s"LOAD DATA LOCAL INPATH '$dataFilePath' OVERWRITE INTO TABLE hive_test;"
-> "OK",
"CACHE TABLE hive_test;"
-> "",
"SELECT COUNT(*) FROM hive_test;"
-> "5",
"DROP TABLE hive_test;"
-> "OK"
)
}
test("Single command with -e") {
runCliWithin(2.minute, Seq("-e", "SHOW DATABASES;"))("" -> "OK")
}
test("Single command with --database") {
runCliWithin(2.minute)(
"CREATE DATABASE hive_test_db;"
-> "OK",
"USE hive_test_db;"
-> "OK",
"CREATE TABLE hive_test(key INT, val STRING);"
-> "OK",
"SHOW TABLES;"
-> "hive_test"
)
runCliWithin(2.minute, Seq("--database", "hive_test_db", "-e", "SHOW TABLES;"))(
""
-> "OK",
""
-> "hive_test"
)
}
test("Commands using SerDe provided in --jars") {
val jarFile =
"../hive/src/test/resources/hive-hcatalog-core-0.13.1.jar"
.split("/")
.mkString(File.separator)
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
runCliWithin(3.minute, Seq("--jars", s"$jarFile"))(
"""CREATE TABLE t1(key string, val string)
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe';
""".stripMargin
-> "OK",
"CREATE TABLE sourceTable (key INT, val STRING);"
-> "OK",
s"LOAD DATA LOCAL INPATH '$dataFilePath' OVERWRITE INTO TABLE sourceTable;"
-> "OK",
"INSERT INTO TABLE t1 SELECT key, val FROM sourceTable;"
-> "",
"SELECT count(key) FROM t1;"
-> "5",
"DROP TABLE t1;"
-> "OK",
"DROP TABLE sourceTable;"
-> "OK"
)
}
}
|
pronix/spark
|
sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
|
Scala
|
apache-2.0
| 7,625 |
package hotpepper4s.raw
import hotpepper4s.{CodeName, Area, BaseArea, Results}
/**
* @author ponkotuy
* date: 13/12/27
*/
case class MiddleAreaResults(
private val api_version: String,
private val results_available: Int,
private val results_returned: String,
private val results_start: Int,
private val middle_area: List[MiddleArea]) extends Results[MiddleArea]{
def apiVersion: String = api_version
def resultsAvailable: Int = results_available
def resultsReturned: String = results_returned
def resultsStart: Int = results_start
def data: List[MiddleArea] = middle_area
def middleArea = middle_area
}
case class MiddleArea(
code: String,
name: String,
private val large_area: BaseArea,
private val service_area: BaseArea,
private val large_service_area: BaseArea) extends Area with CodeName {
def largeArea = large_area
def serviceArea = service_area
def largeServiceArea = large_service_area
}
|
ponkotuy/hotpepper4s
|
src/main/scala/hotpepper4s/raw/MiddleAreaResults.scala
|
Scala
|
mit
| 961 |
package com.twitter.util
import java.util.logging.{Level, Logger}
/**
* Wraps an exception that happens when handling another exception in
* a monitor.
*/
case class MonitorException(
handlingExc: Throwable,
monitorExc: Throwable
) extends Exception(monitorExc) {
override def getMessage =
"threw exception \""+monitorExc+"\" while handling "+
"another exception \""+handlingExc+"\""
}
/**
* A Monitor is a composable exception handler. It is independent of
* position, divorced from the notion of a call stack. Monitors do
* not recover values from a failed computations: It handles only true
* exceptions that may require cleanup.
*/
trait Monitor { self =>
/**
* Attempt to handle the exception `exc`.
*
* @return whether the exception was handled by this Monitor
*/
def handle(exc: Throwable): Boolean
/**
* Run `f` inside of the monitor context. If `f` throws
* an exception - directly or not - it is handled by this
* monitor.
*/
def apply(f: => Unit) = Monitor.using(this) {
try f catch { case exc: Throwable => if (!handle(exc)) throw exc }
}
/**
* A new monitor which, if `this` fails to handle the exception,
* attempts to let `next` handle it.
*/
def orElse(next: Monitor) = new Monitor {
def handle(exc: Throwable): Boolean = {
self.tryHandle(exc).rescue { case exc1 =>
next.tryHandle(exc1)
}.isReturn
}
}
/**
* A new monitor which first handles the exception with `this`,
* then passes it onto `next` unconditionally. The new monitor
* handles the exception if either `this` or `next` does.
*/
def andThen(next: Monitor) = new Monitor {
def handle(exc: Throwable): Boolean =
self.tryHandle(exc) match {
case Return(_) =>
next.tryHandle(exc)
true
case Throw(exc1) =>
next.tryHandle(exc1).isReturn
}
}
/**
* An implementation widget: attempts to handle `exc` returning a
* `com.twitter.util.Try[Unit]`. If the exception is unhandled,
* we return `Throw(exc)`, if the handler throws an exception, we
* wrap it in a [[MonitorException]].
*/
protected def tryHandle(exc: Throwable): Try[Unit] =
Try { self.handle(exc) } rescue {
case monitorExc => Throw(MonitorException(exc, monitorExc))
} flatMap { ok =>
if (ok) Return.Unit
else Throw(exc): Try[Unit]
}
}
/**
* Defines the (Future)-`Local` monitor as well as some monitor
* utilities.
*/
object Monitor extends Monitor {
private[this] val local = new Local[Monitor]
/** Get the current `Local` monitor */
def get = local() getOrElse NullMonitor
/** Set the `Local` monitor */
def set(m: Monitor) {
require(m ne this, "Cannot set the monitor to the global Monitor")
local() = m
}
/** Compute `f` with the `Local` monitor set to `m` */
@inline
def using[T](m: Monitor)(f: => T): T = restoring {
set(m)
f
}
/** Restore the `Local` monitor after running computation `f` */
@inline
def restoring[T](f: => T): T = {
val saved = local()
try f finally local.set(saved)
}
/**
* An exception catcher that attempts to handle exceptions with
* the current monitor.
*/
val catcher: PartialFunction[Throwable, Unit] = {
case exc =>
if (!handle(exc))
throw exc
}
/**
* Run the computation `f` in the context of the current `Local`
* monitor.
*/
override def apply(f: => Unit) =
try f catch catcher
/**
* Handle `exc` with the current `Local` monitor. If the
* `Local` monitor fails to handle the exception, it is handled by
* the `RootMonitor`.
*/
def handle(exc: Throwable): Boolean =
(get orElse RootMonitor).handle(exc)
private[this] val AlwaysFalse = scala.Function.const(false) _
/**
* Create a new monitor from a partial function.
*/
def mk(f: PartialFunction[Throwable, Boolean]) = new Monitor {
def handle(exc: Throwable): Boolean = f.applyOrElse(exc, AlwaysFalse)
}
/**
* Checks whether or not monitoring is activated, meaning that the
* currently-set Monitor is non-null.
*
* @return true if currently-set Monitor is the NullMonitor. False otherwise.
*/
def isActive: Boolean = get != NullMonitor
}
/**
* A monitor that always fails to handle an exception. Combining this
* with any other Monitor will simply return the other Monitor effectively
* removing NullMonitor from the chain.
*/
object NullMonitor extends Monitor {
def handle(exc: Throwable) = false
override def orElse(next: Monitor) = next
override def andThen(next: Monitor) = next
}
object RootMonitor extends Monitor {
private[this] val log = Logger.getLogger("monitor")
def handle(exc: Throwable) = exc match {
case NonFatal(e) =>
log.log(Level.SEVERE, "Exception propagated to the root monitor!", e)
true /* Never propagate non fatal exception */
case e: VirtualMachineError =>
log.log(Level.SEVERE, "VM error", e)
System.err.println("VM error: %s".format(e.getMessage))
e.printStackTrace(System.err)
System.exit(1)
true /*NOTREACHED*/
case e: Throwable =>
log.log(Level.SEVERE, "Fatal exception propagated to the root monitor!", e)
false
}
}
|
stremlenye/util
|
util-core/src/main/scala/com/twitter/util/Monitor.scala
|
Scala
|
apache-2.0
| 5,267 |
package Interview
import Interview.QuantexaTestQuestion.get_max_difference
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by dan.dixey on 06/07/2017.
*/
class TestQuantexa extends FlatSpec with Matchers {
"question one" should "find the max" in {
assert(get_max_difference(List(1, 2, 3, 5, 12, 50, 0, 39)) === 49)
assert(get_max_difference(List()) === Int.MinValue)
}
}
|
dandxy89/LearningScala
|
src/test/scala/Interview/TestQuantexa.scala
|
Scala
|
mit
| 401 |
//
// MicrosoftTranslator.scala -- Scala class MicrosoftTranslator
// Project OrcSites
//
// Created by amp on Oct 9, 2016.
//
// Copyright (c) 2017 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.lib.net
import java.io.{ FileNotFoundException, OutputStreamWriter }
import java.net.{ HttpURLConnection, URL, URLEncoder }
import java.util.Properties
import scala.io.Source
import orc.types.FunctionType
import orc.util.ArrayExtensions.{ Array1, Array2 }
import orc.values.sites.{ TypedSite, SpecificArity }
import orc.values.sites.compatibility.{ ScalaPartialSite }
import org.codehaus.jettison.json.{ JSONArray, JSONObject }
class MicrosoftTranslatorFactoryPropertyFile extends ScalaPartialSite with SpecificArity with TypedSite {
val arity = 1
def orcType() = {
import orc.values.sites.compatibility.Types._
FunctionType(Nil, List(string),
BingSearch.orcType)
}
def loadProperties(file: String): (String, String) = {
val p = new Properties();
val stream = classOf[MicrosoftTranslator].getResourceAsStream("/" + file);
if (stream == null) {
throw new FileNotFoundException(file);
}
p.load(stream);
(p.getProperty("orc.lib.net.bing.username"),
p.getProperty("orc.lib.net.bing.key"))
}
def evaluate(args: Array[AnyRef]): Option[AnyRef] = {
val Array1(file: String) = args
val (user, key) = loadProperties(file)
Some(new MicrosoftTranslator(user, key))
}
}
class MicrosoftTranslatorFactoryUsernameKey extends ScalaPartialSite with SpecificArity with TypedSite {
val arity = 2
def orcType() = {
import orc.values.sites.compatibility.Types._
FunctionType(Nil, List(string, string),
BingSearch.orcType)
}
def evaluate(args: Array[AnyRef]): Option[AnyRef] = {
val Array2(user: String, key: String) = args
Some(new MicrosoftTranslator(user, key))
}
}
object MicrosoftTranslator {
import orc.values.sites.compatibility.Types._
val orcType = FunctionType(Nil, List(string, string), string)
}
/** @author amp
*/
class MicrosoftTranslator(user: String, key: String) extends ScalaPartialSite with SpecificArity with TypedSite {
val arity = 2
def orcType() = MicrosoftTranslator.orcType
val authUrl = new URL("https://datamarket.accesscontrol.windows.net/v2/OAuth2-13")
var tokenExpiration = Long.MaxValue
var token = ""
def getAccessToken() = {
if (token == "" || System.currentTimeMillis() >= tokenExpiration) {
val conn = authUrl.openConnection().asInstanceOf[HttpURLConnection]
conn.setConnectTimeout(10000) // 10 seconds is reasonable
conn.setReadTimeout(5000) // 5 seconds is reasonable
conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded")
conn.setRequestMethod("POST")
conn.setDoOutput(true)
conn.connect()
val outputWr = new OutputStreamWriter(conn.getOutputStream())
outputWr.write(s"grant_type=client_credentials&client_id=${URLEncoder.encode(user, "UTF-8")}&client_secret=${URLEncoder.encode(key, "UTF-8")}&scope=http://api.microsofttranslator.com")
outputWr.close()
val src = Source.fromInputStream(conn.getInputStream())
val s = src.mkString("", "", "")
val o = new JSONObject(s)
if (o.has("error") && o.getBoolean("error")) {
throw new RuntimeException(s"Error Authenticating with $authUrl: ${o.optString("error_description")}");
}
tokenExpiration = (o.getLong("expires_in") - 10) * 1000 + System.currentTimeMillis()
token = o.getString("access_token")
}
token
}
def evaluate(args: Array[AnyRef]): Option[AnyRef] = {
val Array2(text: String, target: String) = args
val params = s"text=${URLEncoder.encode(text, "UTF-8")}&to=${URLEncoder.encode(target, "UTF-8")}"
val url = new URL(s"http://api.microsofttranslator.com/V2/AJAX.svc/Translate?$params")
val conn = url.openConnection().asInstanceOf[HttpURLConnection]
conn.setConnectTimeout(10000) // 10 seconds is reasonable
conn.setReadTimeout(5000) // 5 seconds is reasonable
conn.setRequestProperty("accept", "*/*")
conn.addRequestProperty("Authorization", "Bearer " + getAccessToken())
conn.connect()
val resp = conn.getResponseCode()
val src = Source.fromInputStream(conn.getInputStream())
val s = src.mkString("", "", "")
if (resp == 200) {
// Hack to parse bare JSON string. The stripPrefix is to remove the Unicode BOM.
val o = new JSONArray(s"[${s.stripPrefix("\\uFEFF")}]")
Some(o.getString(0))
} else {
throw new RuntimeException(s"Error translating with $url: $resp $s");
}
}
}
|
orc-lang/orc
|
OrcSites/src/orc/lib/net/MicrosoftTranslator.scala
|
Scala
|
bsd-3-clause
| 4,859 |
class A {
var s = "
"
}
object Main { def main(args: Array[String]) { } }
|
tobast/compil-petitscala
|
tests/syntax/bad/testfile-newline_in_string-1.scala
|
Scala
|
gpl-3.0
| 79 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import org.apache.hadoop.conf.{Configurable, Configuration}
import org.apache.hadoop.io.{Text, Writable}
import org.apache.hadoop.mapreduce.InputSplit
import org.apache.spark.{Partition, SparkContext}
import org.apache.spark.input.WholeTextFileInputFormat
/**
* An RDD that reads a bunch of text files in, and each text file becomes one record.
*/
private[spark] class WholeTextFileRDD(
sc : SparkContext,
inputFormatClass: Class[_ <: WholeTextFileInputFormat],
keyClass: Class[Text],
valueClass: Class[Text],
conf: Configuration,
minPartitions: Int)
extends NewHadoopRDD[Text, Text](sc, inputFormatClass, keyClass, valueClass, conf) {
override def getPartitions: Array[Partition] = {
val inputFormat = inputFormatClass.newInstance
val conf = getConf
inputFormat match {
case configurable: Configurable =>
configurable.setConf(conf)
case _ =>
}
val jobContext = newJobContext(conf, jobId)
inputFormat.setMinPartitions(jobContext, minPartitions)
val rawSplits = inputFormat.getSplits(jobContext).toArray
val result = new Array[Partition](rawSplits.size)
for (i <- 0 until rawSplits.size) {
result(i) = new NewHadoopPartition(id, i, rawSplits(i).asInstanceOf[InputSplit with Writable])
}
result
}
}
|
chenc10/Spark-PAF
|
core/src/main/scala/org/apache/spark/rdd/WholeTextFileRDD.scala
|
Scala
|
apache-2.0
| 2,134 |
package spire
package algebra
/**
* A `RingAlgebra` is a module that is also a `Rng`. An example is the Gaussian
* numbers.
*/
trait RingAlgebra[V, @sp R] extends Any with Module[V, R] with Rng[V]
object RingAlgebra {
implicit def ZAlgebra[A](implicit vector0: Ring[A], scalar0: Ring[Int]): ZAlgebra[A] = new ZAlgebra[A] {
val vector: Ring[A] = vector0
val scalar: Ring[Int] = scalar0
}
}
/**
* Given any `Ring[A]` we can construct a `RingAlgebra[A, Int]`. This is
* possible since we can define `fromInt` on `Ring` generally.
*/
trait ZAlgebra[V] extends Any with RingAlgebra[V, Int] with Ring[V] {
implicit def vector: Ring[V]
implicit def scalar: Ring[Int]
def zero: V = vector.zero
def one: V = vector.one
def negate(v: V): V = vector.negate(v)
def plus(v: V, w: V): V = vector.plus(v, w)
override def minus(v: V, w: V): V = vector.minus(v, w)
def times(v: V, w: V): V = vector.times(v, w)
def timesl(r: Int, v: V): V = vector.times(vector.fromInt(r), v)
override def fromInt(n: Int): V = vector.fromInt(n)
}
/**
* A `FieldAlgebra` is a vector space that is also a `Ring`. An example is the
* complex numbers.
*/
trait FieldAlgebra[V, @sp(Float, Double) F] extends Any with RingAlgebra[V, F] with VectorSpace[V, F]
|
tixxit/spire
|
core/shared/src/main/scala/spire/algebra/RingAlgebra.scala
|
Scala
|
mit
| 1,268 |
package com.sksamuel.elastic4s.searches.aggs
import com.sksamuel.elastic4s.script.ScriptDefinition
import com.sksamuel.elastic4s.searches.aggs.pipeline.PipelineAggregationDefinition
import com.sksamuel.exts.OptionImplicits._
case class AvgAggregationDefinition(name: String,
field: Option[String] = None,
missing: Option[AnyRef] = None,
script: Option[ScriptDefinition] = None,
pipelines: Seq[PipelineAggregationDefinition] = Nil,
subaggs: Seq[AbstractAggregation] = Nil,
metadata: Map[String, AnyRef] = Map.empty)
extends AggregationDefinition {
type T = AvgAggregationDefinition
def field(field: String): T = copy(field = field.some)
def missing(missing: AnyRef): T = copy(missing = missing.some)
def script(script: ScriptDefinition): T = copy(script = script.some)
override def subAggregations(aggs: Iterable[AbstractAggregation]): T = copy(subaggs = aggs.toSeq)
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = metadata)
}
|
aroundus-inc/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/AvgAggregationDefinition.scala
|
Scala
|
apache-2.0
| 1,182 |
/*
* Copyright 2007-2008 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.liftweb.widgets.flot
import _root_.net.liftweb.http.js._
import JsCmds._
import JE._
import _root_.net.liftweb.util._
import Helpers._
/**
*
*/
case class JsFlot(idPlaceholder: String,datas: List[FlotSerie], options: FlotOptions) extends JsCmd
{
lazy val jsCmd: JsCmd = Flot.renderJs(idPlaceholder, datas, options, Noop)
lazy val toJsCmd: String = jsCmd.toJsCmd
}
/**
*
*/
case class JsFlotAppendData(idPlaceholder: String,
datas: List [FlotSerie],
newDatas: List [(Double, Double)], pop: Boolean) extends JsCmd
{
def toJsCmd: String = {
if (datas.size != newDatas.size) Noop.toJsCmd
else {
val newValuePush: String = newDatas.zipWithIndex.map
{case (newData, num) => {
val nameSerie = "data_" + idPlaceholder + "_" + (num + 1)
val popjs = if (pop) {nameSerie + ".shift () ;\\n"} else ""
popjs + nameSerie + ".push ( [" + newData._1.toString + ", " + newData._2.toString + "]); \\n"
}
}.reduceLeft (_ + _)
val flotShow = Flot.renderFlotShow (idPlaceholder, datas, new FlotOptions{}, Noop).toJsCmd
newValuePush + flotShow
}
}
}
/**
*
*/
case class JsFlotWithOverview(idPlaceholder: String,
datas: List [FlotSerie],
options: FlotOptions,
idOverview: String,
optionsOverview: FlotOptions) extends JsCmd
{
def toJsCmd: String = {
val jsClearLegend: JsCmd =
optionsOverview.legend.flatMap(_.container.
map(c => JsRaw("jQuery("+("#" + c).encJs
+ ").html ('')").cmd)).
openOr(Noop)
val overview = new FlotOverview (idOverview, optionsOverview)
jsClearLegend & Flot.renderJs(idPlaceholder, datas, options, Noop, overview)
}
}
|
beni55/liftweb
|
lift-widgets/src/main/scala/net/liftweb/widgets/flot/FlotAjax.scala
|
Scala
|
apache-2.0
| 2,521 |
package me.echen.scaldingale
import com.twitter.scalding._
/**
* Given a dataset of movies and their ratings by different
* users, how can we compute the similarity between pairs of
* movies?
*
* This class computes similarities between movies
* by representing each movie as a vector of ratings and
* computing similarity scores over these vectors.
*
* Similarity measures include correlation, cosine similarity,
* and Jaccard similarity.
*
* @author Edwin Chen
*/
class MovieSimilarities(args : Args) extends Job(args) {
/**
* Parameters to regularize correlation.
*/
val PRIOR_COUNT = 10
val PRIOR_CORRELATION = 0
/**
* The input is a TSV file with three columns: (user, movie, rating).
*/
val INPUT_FILENAME = "data/ratings.tsv"
// *************************
// * STEPS OF THE COMPUTATION
// *************************
/**
* Read in the input and give each field a type and name.
*/
val ratings =
Tsv(INPUT_FILENAME).read
.mapTo((0, 1, 2) -> ('user, 'movie, 'rating)) {
fields : (String, String, Double) => fields
// In practice, the user and movie would probably be ids (and thus Ints or Longs),
// but let's use Strings so we can easily print out human-readable names.
}
/**
* Also keep track of the total number of people who rated a movie.
*/
val ratingsWithSize =
ratings
// Put the size of each group in a field called "numRaters".
.groupBy('movie) { _.size('numRaters) }
// Rename, since Scalding currently requires both sides of a join to have distinctly named fields.
.rename('movie -> 'movieX)
.joinWithLarger('movieX -> 'movie, ratings).discard('movieX)
/**
* Make a dummy copy of the ratings, so we can do a self-join.
*/
val ratings2 =
ratingsWithSize
.rename(('user, 'movie, 'rating, 'numRaters) -> ('user2, 'movie2, 'rating2, 'numRaters2))
/**
* Join the two rating streams on their user fields,
* in order to find all pairs of movies that a user has rated.
*/
val ratingPairs =
ratingsWithSize
.joinWithSmaller('user -> 'user2, ratings2)
// De-dupe so that we don't calculate similarity of both (A, B) and (B, A).
.filter('movie, 'movie2) { movies : (String, String) => movies._1 < movies._2 }
.project('movie, 'rating, 'numRaters, 'movie2, 'rating2, 'numRaters2)
/**
* Compute dot products, norms, sums, and sizes of the rating vectors.
*/
val vectorCalcs =
ratingPairs
// Compute (x*y, x^2, y^2), which we need for dot products and norms.
.map(('rating, 'rating2) -> ('ratingProd, 'ratingSq, 'rating2Sq)) {
ratings : (Double, Double) =>
(ratings._1 * ratings._2, scala.math.pow(ratings._1, 2), scala.math.pow(ratings._2, 2))
}
.groupBy('movie, 'movie2) {
_
.size // length of each vector
.sum('ratingProd -> 'dotProduct)
.sum('rating -> 'ratingSum)
.sum('rating2 -> 'rating2Sum)
.sum('ratingSq -> 'ratingNormSq)
.sum('rating2Sq -> 'rating2NormSq)
.max('numRaters) // Just an easy way to make sure the numRaters field stays.
.max('numRaters2)
}
/**
* Calculate similarity between rating vectors using similarity measures
* like correlation, cosine similarity, and Jaccard similarity.
*/
val similarities =
vectorCalcs
.map(('size, 'dotProduct, 'ratingSum, 'rating2Sum, 'ratingNormSq, 'rating2NormSq, 'numRaters, 'numRaters2) ->
('correlation, 'regularizedCorrelation, 'cosineSimilarity, 'jaccardSimilarity)) {
fields : (Double, Double, Double, Double, Double, Double, Double, Double) =>
val (size, dotProduct, ratingSum, rating2Sum, ratingNormSq, rating2NormSq, numRaters, numRaters2) = fields
val corr = correlation(size, dotProduct, ratingSum, rating2Sum, ratingNormSq, rating2NormSq)
val regCorr = regularizedCorrelation(size, dotProduct, ratingSum, rating2Sum, ratingNormSq, rating2NormSq, PRIOR_COUNT, PRIOR_CORRELATION)
val cosSim = cosineSimilarity(dotProduct, scala.math.sqrt(ratingNormSq), scala.math.sqrt(rating2NormSq))
val jaccard = jaccardSimilarity(size, numRaters, numRaters2)
(corr, regCorr, cosSim, jaccard)
}
/**
* Output all similarities to a TSV file.
*/
similarities
.project('movie, 'movie2, 'correlation, 'regularizedCorrelation, 'cosineSimilarity, 'jaccardSimilarity, 'size, 'numRaters, 'numRaters2)
.write(Tsv("./output.tsv"))
// *************************
// * SIMILARITY MEASURES
// *************************
/**
* The correlation between two vectors A, B is
* cov(A, B) / (stdDev(A) * stdDev(B))
*
* This is equivalent to
* [n * dotProduct(A, B) - sum(A) * sum(B)] /
* sqrt{ [n * norm(A)^2 - sum(A)^2] [n * norm(B)^2 - sum(B)^2] }
*/
def correlation(size : Double, dotProduct : Double, ratingSum : Double,
rating2Sum : Double, ratingNormSq : Double, rating2NormSq : Double) = {
val numerator = size * dotProduct - ratingSum * rating2Sum
val denominator = scala.math.sqrt(size * ratingNormSq - ratingSum * ratingSum) * scala.math.sqrt(size * rating2NormSq - rating2Sum * rating2Sum)
numerator / denominator
}
/**
* Regularize correlation by adding virtual pseudocounts over a prior:
* RegularizedCorrelation = w * ActualCorrelation + (1 - w) * PriorCorrelation
* where w = # actualPairs / (# actualPairs + # virtualPairs).
*/
def regularizedCorrelation(size : Double, dotProduct : Double, ratingSum : Double,
rating2Sum : Double, ratingNormSq : Double, rating2NormSq : Double,
virtualCount : Double, priorCorrelation : Double) = {
val unregularizedCorrelation = correlation(size, dotProduct, ratingSum, rating2Sum, ratingNormSq, rating2NormSq)
val w = size / (size + virtualCount)
w * unregularizedCorrelation + (1 - w) * priorCorrelation
}
/**
* The cosine similarity between two vectors A, B is
* dotProduct(A, B) / (norm(A) * norm(B))
*/
def cosineSimilarity(dotProduct : Double, ratingNorm : Double, rating2Norm : Double) = {
dotProduct / (ratingNorm * rating2Norm)
}
/**
* The Jaccard Similarity between two sets A, B is
* |Intersection(A, B)| / |Union(A, B)|
*/
def jaccardSimilarity(usersInCommon : Double, totalUsers1 : Double, totalUsers2 : Double) = {
val union = totalUsers1 + totalUsers2 - usersInCommon
usersInCommon / union
}
}
|
echen/scaldingale
|
MovieSimilarities.scala
|
Scala
|
mit
| 6,579 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.filters
import org.mockito.ArgumentCaptor
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpecLike}
import org.scalatestplus.play.OneAppPerTest
import play.api.http.HeaderNames
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.mvc.{Result, _}
import play.api.test._
import scala.concurrent.Future
class CacheControlFilterSpec extends WordSpecLike with Matchers with MockitoSugar with ScalaFutures with OneAppPerTest {
private trait Setup extends Results {
val expectedCacheControlHeader = HeaderNames.CACHE_CONTROL -> "no-cache,no-store,max-age=0"
val resultFromAction: Result = Ok
val cacheControlFilter = new CacheControlFilter {
val cachableContentTypes: Seq[String] = Seq("image/", "text/css", "application/javascript")
}
lazy val action = {
val mockAction = mock[(RequestHeader) => Future[Result]]
val outgoingResponse = Future.successful(resultFromAction)
when(mockAction.apply(any())).thenReturn(outgoingResponse)
mockAction
}
def requestPassedToAction = {
val updatedRequest = ArgumentCaptor.forClass(classOf[RequestHeader])
verify(action).apply(updatedRequest.capture())
updatedRequest.getValue
}
}
"During request pre-processing, the filter" should {
"do nothing, just pass on the request" in new Setup {
cacheControlFilter(action)(FakeRequest())
requestPassedToAction should ===(FakeRequest())
}
}
"During result post-processing, the filter" should {
"add a cache-control header if there isn't one and the response has no content type" in new Setup {
cacheControlFilter(action)(FakeRequest()).futureValue should be(resultFromAction.withHeaders(expectedCacheControlHeader))
}
"add a cache-control header if there isn't one and the response does not have an excluded content type" in new Setup {
override val resultFromAction: Result = Ok.as("text/html")
cacheControlFilter(action)(FakeRequest()).futureValue should be(resultFromAction.withHeaders(expectedCacheControlHeader))
}
"not add a cache-control header if there isn't one but the response is an exact match for an excluded content type" in new Setup {
override val resultFromAction: Result = Ok.as("text/css")
cacheControlFilter(action)(FakeRequest()).futureValue should be(resultFromAction)
}
"not add a cache-control header if there isn't one but the response is an exact match for an mime part of an excluded content type" in new Setup {
override val resultFromAction: Result = Ok.as("text/css; charset=utf-8")
cacheControlFilter(action)(FakeRequest()).futureValue should be(resultFromAction)
}
"not add a cache-control header if there isn't one but the response is an exact match for an category of the mime part of an excluded content type" in new Setup {
override val resultFromAction: Result = Ok.as("image/png")
cacheControlFilter(action)(FakeRequest()).futureValue should be(resultFromAction)
}
"not add a cache-control header if there is no content type but the status is NOT MODIFIED" in new Setup {
override val resultFromAction: Result = NotModified
cacheControlFilter(action)(FakeRequest()).futureValue should be(resultFromAction)
}
"replace any existing cache-control header" in new Setup {
override val resultFromAction = Ok.withHeaders(HeaderNames.CACHE_CONTROL -> "someOtherValue")
cacheControlFilter(action)(FakeRequest()).futureValue should be(resultFromAction.withHeaders(expectedCacheControlHeader))
}
"leave any other headers alone" in new Setup {
override val resultFromAction = Ok.withHeaders(
"header1" -> "value1",
HeaderNames.CACHE_CONTROL -> "someOtherValue",
"header2" -> "value2")
cacheControlFilter(action)(FakeRequest()).futureValue should be(resultFromAction.withHeaders(expectedCacheControlHeader))
}
}
"Creating the filter from config" should {
"load the correct values" in new WithApplication(new GuiceApplicationBuilder().configure("caching" -> List("image/", "text/")).build()) {
CacheControlFilter.fromConfig("caching").cachableContentTypes should be(List("image/", "text/"))
}
}
}
|
cjwebb/play-filters
|
src/test/scala/uk/gov/hmrc/play/filters/CacheControlFilterSpec.scala
|
Scala
|
apache-2.0
| 4,999 |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.akka
import akka.actor.ActorRefFactory
import akka.actor.Props
import com.netflix.atlas.json.Json
import spray.http._
import spray.routing._
class TestApi(val actorRefFactory: ActorRefFactory) extends WebApi {
import CustomDirectives._
import spray.http.StatusCodes._
def routes: RequestContext => Unit = {
path("jsonparse") {
post { ctx =>
val parser = getJsonParser(ctx.request).get
try {
val v = Json.decode[String](parser)
ctx.responder ! HttpResponse(status = OK, entity = v)
} catch {
case e: Exception =>
e.printStackTrace()
ctx.responder ! HttpResponse(status = BadRequest, entity = e.getMessage)
} finally {
parser.close()
}
}
} ~
accessLog {
path("chunked") {
get { ctx =>
val ref = actorRefFactory.actorOf(Props(new ChunkResponseActor))
ref.tell("start", ctx.responder)
}
}
}
}
}
|
rspieldenner/atlas
|
atlas-akka/src/test/scala/com/netflix/atlas/akka/TestApi.scala
|
Scala
|
apache-2.0
| 1,607 |
// ScalaMenuTest.scala
object ScalaMenuTest {
def main(args: Array[String]): Unit = {
val v = new javax.swing.JMenu()
v.add(new javax.swing.JMenuItem())
//v.add(new java.awt.PopupMenu());
}
}
|
yusuke2255/dotty
|
tests/untried/pos/kinzer.scala
|
Scala
|
bsd-3-clause
| 208 |
package spinoco.protocol.sdp
/**
* Description of the Media transferred
* @param tpe Type of the media
* @param port Port where the media are available
* @param portCount Count of the ports
* @param protocol Media transport protocol
* @param format Media format ordered by their preference
* @param information Information
* @param connectionData Connection data ofr the media, if differ from one supplied at session level
* @param bandwidth Bandwidth information for this media
* @param attributes Attributes for this media.
*/
case class MediaDescription(
tpe: MediaType.Value
, port: Int
, portCount: Option[Int]
, protocol: MediaProtocol.Value
, format: List[Int]
, information: Option[String]
, connectionData: List[ConnectionData]
, bandwidth: List[Bandwidth]
, attributes: List[Attribute]
)
/** type of the media **/
object MediaType extends Enumeration {
val Audio = Value("audio")
val Video = Value("video")
val Text = Value("text")
val Application = Value("application")
val Message = Value("message")
}
/** media transport protocol **/
object MediaProtocol extends Enumeration {
val UDP = Value("udp")
val `RTP/AVP` = Value("RTP/AVP")
val `RTP/SAVP` = Value("RTP/SAVP")
}
|
Spinoco/protocol
|
sdp/src/main/scala/spinoco/protocol/sdp/MediaDescription.scala
|
Scala
|
mit
| 1,333 |
package com.karasiq.shadowcloud.model.utils
import com.karasiq.common.memory.MemorySize
@SerialVersionUID(0L)
final case class StorageHealth(writableSpace: Long, totalSpace: Long, usedSpace: Long = 0L, online: Boolean = true) extends HealthStatus {
require(writableSpace >= 0 && totalSpace >= 0 && usedSpace >= 0, "Invalid sizes")
def freeSpace: Long = {
math.max(0L, totalSpace - usedSpace)
}
def +(h1: StorageHealth): StorageHealth = {
@inline
def safeSum(l1: Long, l2: Long): Long = {
val bigInt = BigInt(l1) + BigInt(l2)
if (bigInt.isValidLong) bigInt.longValue() else Long.MaxValue
}
copy(
writableSpace = safeSum(writableSpace, h1.writableSpace),
totalSpace = safeSum(totalSpace, h1.totalSpace),
usedSpace = safeSum(usedSpace, h1.usedSpace),
online = online && h1.online
)
}
def -(bytes: Long): StorageHealth = {
StorageHealth.normalized(writableSpace - bytes, totalSpace, usedSpace + bytes, online)
}
override def toString: String = {
s"StorageHealth(${if (online) "" else "Offline, "}${MemorySize(writableSpace)} available, ${MemorySize(usedSpace)}/${MemorySize(totalSpace)})"
}
}
object StorageHealth {
val empty = StorageHealth(0, 0)
val unlimited = StorageHealth(Long.MaxValue, Long.MaxValue)
def normalized(writableSpace: Long, totalSpace: Long, usedSpace: Long = 0L, online: Boolean = true): StorageHealth = {
val totalSpaceN = if (totalSpace >= 0) totalSpace else Long.MaxValue
val usedSpaceN = if (usedSpace >= 0) math.min(totalSpaceN, usedSpace) else totalSpaceN
val writableSpaceN = math.max(0L, math.min(totalSpaceN - usedSpaceN, writableSpace))
new StorageHealth(
writableSpaceN,
totalSpaceN,
usedSpaceN,
online
)
}
}
|
Karasiq/shadowcloud
|
model/src/main/scala/com/karasiq/shadowcloud/model/utils/StorageHealth.scala
|
Scala
|
apache-2.0
| 1,795 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.accounts.frs102.BoxesFixture
import uk.gov.hmrc.ct.box.CtValidation
class ACQ5021Spec extends WordSpec with Matchers with BoxesFixture {
"ACQ5021" should {
"for Full Accounts pass validation" when {
"ac42, acq5021 have value" in {
ac42withValue
ac43noValue
acq5022noValue
ACQ5021(Some(false)).validate(boxRetriever) shouldBe empty
}
"ac42, acq5022 have value" in {
ac42withValue
ac43noValue
acq5022withValue
ACQ5021(None).validate(boxRetriever) shouldBe empty
}
"ac43, acq5021 have value" in {
ac42noValue
ac43withValue
acq5022noValue
ACQ5021(Some(false)).validate(boxRetriever) shouldBe empty
}
"ac43, acq5022 have value" in {
ac42noValue
ac43withValue
acq5022withValue
ACQ5021(None).validate(boxRetriever) shouldBe empty
}
"ac42, acq5022, acq5021 have value" in {
ac42withValue
ac43noValue
acq5022withValue
ACQ5021(Some(true)).validate(boxRetriever) shouldBe empty
}
"ac43, acq5022, acq5021 have value" in {
ac42noValue
ac43withValue
acq5022withValue
ACQ5021(Some(true)).validate(boxRetriever) shouldBe empty
}
"ac42, ac43, acq5022, acq5021 have value" in {
ac42withValue
ac43withValue
acq5022withValue
ACQ5021(Some(true)).validate(boxRetriever) shouldBe empty
}
"all no value" in {
ac42noValue
ac43noValue
acq5022noValue
ACQ5021(None).validate(boxRetriever) shouldBe empty
}
}
"for Full Accounts fail validation" when {
val errorAtLeastOne = Set(CtValidation(None,"error.balance.sheet.intangible.assets.one.box.required",None))
val cannotExist = Set(CtValidation(Some("ACQ5021"),"error.ACQ5021.cannot.exist",None))
"ac42 has value and acq5021,acq5022 have no value" in {
ac42withValue
ac43noValue
acq5022noValue
ACQ5021(None).validate(boxRetriever) shouldBe errorAtLeastOne
}
"ac43 has value and acq5021,acq5022 have no value" in {
ac42noValue
ac43withValue
acq5022noValue
ACQ5021(None).validate(boxRetriever) shouldBe errorAtLeastOne
}
"ac42,ac43 have value and acq5021,acq5022 have no value" in {
ac42withValue
ac43withValue
acq5022noValue
ACQ5021(None).validate(boxRetriever) shouldBe errorAtLeastOne
}
"ac42, ac43 has no value and acq5021 has value" in {
ac42noValue
ac43noValue
acq5022noValue
ACQ5021(Some(false)).validate(boxRetriever) shouldBe cannotExist
}
}
}
}
|
liquidarmour/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/ACQ5021Spec.scala
|
Scala
|
apache-2.0
| 3,480 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.builder.serializers
import java.util.concurrent.TimeUnit
import com.outworkers.phantom.builder.QueryBuilder
import com.outworkers.phantom.builder.query.{OptionPart, SerializationTest}
import com.outworkers.phantom.builder.syntax.CQLSyntax
import com.outworkers.phantom.dsl._
import com.outworkers.util.samplers._
import org.joda.time.Seconds
import org.scalatest.{FreeSpec, Matchers}
import scala.concurrent.duration._
class CreateQueryBuilderTest extends FreeSpec with Matchers with SerializationTest {
private[this] val BasicTable = db.basicTable
final val DefaultTtl = 500
final val OneDay = 86400
private[this] val root = BasicTable.create.qb.queryString
"The CREATE query builder" - {
"should correctly serialise primary key definitions" - {
"a simple single partition key definition" in {
val cols = List("test")
QueryBuilder.Create.primaryKey(cols).queryString shouldEqual "PRIMARY KEY (test)"
}
"a single partition key and a primary key" in {
val partitions = List("test")
val primaries = List("test2")
QueryBuilder.Create.primaryKey(partitions, primaries).queryString shouldEqual "PRIMARY KEY (test, test2)"
}
"a composite partition key" in {
val partitions = List("partition1", "partition2")
val primaries = List("primary1")
val key = QueryBuilder.Create.primaryKey(partitions, primaries).queryString
key shouldEqual "PRIMARY KEY ((partition1, partition2), primary1)"
}
"a compound primary key" in {
val partitions = List("partition1")
val primaries = List("primary1", "primary2")
val key = QueryBuilder.Create.primaryKey(partitions, primaries).queryString
key shouldEqual "PRIMARY KEY (partition1, primary1, primary2)"
}
"a composite and compound primary key" in {
val partitions = List("partition1", "partition2")
val primaries = List("primary1", "primary2")
val key = QueryBuilder.Create.primaryKey(partitions, primaries).queryString
key shouldEqual "PRIMARY KEY ((partition1, partition2), primary1, primary2)"
}
"a composite with clustering order" in {
val partitions = List("partition1", "partition2")
val primaries = List("primary1", "primary2")
val clustering = List("primary1 ASC", "primary2 ASC")
val key = QueryBuilder.Create.primaryKey(partitions, primaries, clustering).queryString
key shouldEqual "PRIMARY KEY ((partition1, partition2), primary1, primary2) WITH CLUSTERING ORDER BY (primary1 ASC, primary2 ASC)"
}
}
"should create a simple percentile clause" - {
"using the augmented number strings" in {
val num = gen[Int]
val qb = num.percentile.queryString
qb shouldEqual s"$num ${CQLSyntax.CreateOptions.percentile}"
}
}
"should allow using DateTieredCompactionStrategy and its options" - {
"serialise a create query with a DateTieredCompactionStrategy" in {
val qb = BasicTable.create.`with`(
compaction eqs DateTieredCompactionStrategy
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'DateTieredCompactionStrategy'}"
}
"allow setting base_time_seconds" in {
val qb = BasicTable.create.`with`(
compaction eqs DateTieredCompactionStrategy.base_time_seconds(5L)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'DateTieredCompactionStrategy', 'base_time_seconds': 5}"
}
"allow setting max_threshold" in {
val qb = BasicTable.create.`with`(
compaction eqs DateTieredCompactionStrategy.max_threshold(5)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'DateTieredCompactionStrategy', 'max_threshold': 5}"
}
"allow setting min_threshold" in {
val qb = BasicTable.create.`with`(
compaction eqs DateTieredCompactionStrategy.min_threshold(5)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'DateTieredCompactionStrategy', 'min_threshold': 5}"
}
"allow setting timestamp_resolution" in {
val qb = BasicTable.create.`with`(
compaction eqs DateTieredCompactionStrategy.timestamp_resolution(TimeUnit.MILLISECONDS)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'DateTieredCompactionStrategy', 'timestamp_resolution': 'MILLISECONDS'}"
}
}
"should allow using TimeWindowCompactionStrategy and its options" - {
"serialise a create query with a TimeWindowCompactionStrategy" in {
val qb = BasicTable.create.`with`(compaction eqs TimeWindowCompactionStrategy).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'TimeWindowCompactionStrategy'}"
}
"serialise a create query with a TimeWindowCompactionStrategy and an option set" in {
val qb = BasicTable.create.`with`(compaction eqs TimeWindowCompactionStrategy.compaction_window_unit(TimeUnit.DAYS)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'TimeWindowCompactionStrategy', 'compaction_window_unit': 'DAYS'}"
}
"serialise a create query with a TimeWindowCompactionStrategy and two options set" in {
val qb = BasicTable.create.`with`(compaction eqs TimeWindowCompactionStrategy
.compaction_window_unit(TimeUnit.DAYS)
.compaction_window_size(5)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class'" +
": 'TimeWindowCompactionStrategy', 'compaction_window_unit': 'DAYS', 'compaction_window_size': 5}"
}
"serialise a create query with a TimeWindowCompactionStrategy and three options set" in {
val qb = BasicTable.create.`with`(compaction eqs TimeWindowCompactionStrategy
.compaction_window_unit(TimeUnit.DAYS)
.compaction_window_size(5)
.timestamp_resolution(TimeUnit.MILLISECONDS)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class'" +
": 'TimeWindowCompactionStrategy', 'compaction_window_unit': 'DAYS', 'compaction_window_size': 5, 'timestamp_resolution': 'MILLISECONDS'}"
}
}
"should allow specifying table creation options" - {
"serialise a simple create query with a SizeTieredCompactionStrategy and no compaction strategy options set" in {
val qb = BasicTable.create.`with`(compaction eqs SizeTieredCompactionStrategy).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy'}"
}
"serialise a create query with a TimeWindowCompactionStrategy" in {
val qb = BasicTable.create.`with`(compaction eqs TimeWindowCompactionStrategy).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'TimeWindowCompactionStrategy'}"
}
"serialise a create query with a TimeWindowCompactionStrategy and an option set" in {
val qb = BasicTable.create.`with`(compaction eqs TimeWindowCompactionStrategy.compaction_window_unit(TimeUnit.DAYS)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'TimeWindowCompactionStrategy', 'compaction_window_unit': 'DAYS'}"
}
"serialise a create query with a TimeWindowCompactionStrategy and both options set" in {
val qb = BasicTable.create.`with`(compaction eqs TimeWindowCompactionStrategy
.compaction_window_unit(TimeUnit.DAYS)
.compaction_window_size(5)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'TimeWindowCompactionStrategy', 'compaction_window_unit': 'DAYS', 'compaction_window_size': 5}"
}
"serialise a simple create query with a SizeTieredCompactionStrategy and 1 compaction strategy options set" in {
val qb = BasicTable.create.`with`(
compaction eqs LeveledCompactionStrategy.sstable_size_in_mb(50)
).qb.queryString
qb shouldEqual "CREATE TABLE phantom.basicTable (id uuid, id2 uuid, id3 uuid, placeholder text, PRIMARY KEY (id, id2, id3)) WITH compaction = {'class': 'LeveledCompactionStrategy', 'sstable_size_in_mb': 50}"
}
"serialise a simple create query with a SizeTieredCompactionStrategy and 1 compaction strategy options set and a compression strategy set" in {
val qb = BasicTable.create
.`with`(compaction eqs LeveledCompactionStrategy.sstable_size_in_mb(50))
.and(compression eqs LZ4Compressor.crc_check_chance(0.5))
.qb.queryString
qb shouldEqual s"""$root WITH compaction = {'class': 'LeveledCompactionStrategy', 'sstable_size_in_mb': 50} AND compression = {'sstable_compression': 'LZ4Compressor', 'crc_check_chance': 0.5}"""
}
"add a comment option to a create query" in {
val qb = BasicTable.create
.`with`(comment eqs "testing")
.qb.queryString
qb shouldEqual s"$root WITH comment = 'testing'"
}
"allow specifying a read_repair_chance clause" in {
val qb = BasicTable.create.`with`(read_repair_chance eqs 5D).qb.queryString
qb shouldEqual s"$root WITH read_repair_chance = 5.0"
}
"allow specifying a dclocal_read_repair_chance clause" in {
val qb = BasicTable.create.`with`(dclocal_read_repair_chance eqs 5D).qb.queryString
qb shouldEqual s"$root WITH dclocal_read_repair_chance = 5.0"
}
"allow specifying a replicate_on_write clause" in {
val qb = BasicTable.create.`with`(replicate_on_write eqs true).qb.queryString
qb shouldEqual s"$root WITH replicate_on_write = true"
}
"allow specifying a custom gc_grace_seconds clause" in {
val qb = BasicTable.create.`with`(gc_grace_seconds eqs 5.seconds).qb.queryString
qb shouldEqual s"$root WITH gc_grace_seconds = 5"
}
"allow specifying larger custom units as gc_grace_seconds" in {
val qb = BasicTable.create.`with`(gc_grace_seconds eqs 1.day).qb.queryString
qb shouldEqual s"$root WITH gc_grace_seconds = 86400"
}
"allow specifying custom gc_grade_seconds using the Joda Time ReadableInstant and Second API" in {
val qb = BasicTable.create.`with`(gc_grace_seconds eqs Seconds.seconds(OneDay)).qb.queryString
qb shouldEqual s"$root WITH gc_grace_seconds = 86400"
}
"allow specifying a bloom_filter_fp_chance using a Double param value" in {
val qb = BasicTable.create.`with`(bloom_filter_fp_chance eqs 5D).qb.queryString
qb shouldEqual s"$root WITH bloom_filter_fp_chance = 5.0"
}
}
"should allow specifying cache strategies " - {
"specify Cache.None as a cache strategy" in {
val qb = BasicTable.create.`with`(caching eqs Cache.None()).qb.queryString
if (session.v4orNewer) {
qb shouldEqual s"$root WITH caching = {'keys': 'none', 'rows_per_partition': 'none'}"
} else {
qb shouldEqual s"$root WITH caching = 'none'"
}
}
"specify Cache.KeysOnly as a caching strategy" in {
val qb = BasicTable.create.`with`(caching eqs Cache.KeysOnly()).qb.queryString
if (session.v4orNewer) {
qb shouldEqual s"$root WITH caching = {'keys': 'all', 'rows_per_partition': 'none'}"
} else {
qb shouldEqual s"$root WITH caching = 'keys_only'"
}
}
"specify Cache.RowsOnly as a caching strategy" in {
val qb = BasicTable.create.`with`(caching eqs Cache.RowsOnly()).qb.queryString
if (session.v4orNewer) {
qb shouldEqual s"$root WITH caching = {'rows_per_partition': 'all'}"
} else {
qb shouldEqual s"$root WITH caching = 'rows_only'"
}
}
"specify a Cache rows_per_partition as an integer value" in {
val qb = BasicTable.create.`with`(caching eqs Cache.All().rows_per_partition(5)).qb.queryString
qb shouldEqual s"$root WITH caching = {'keys': 'all', 'rows_per_partition': 'all', 'rows_per_partition': 5}"
}
"specify Cache.All as a caching strategy" in {
val qb = BasicTable.create.`with`(caching eqs Cache.All()).qb.queryString
if (session.v4orNewer) {
qb shouldEqual s"$root WITH caching = {'keys': 'all', 'rows_per_partition': 'all'}"
} else {
qb shouldEqual s"$root WITH caching = 'all'"
}
}
}
"should allow specifying a default_time_to_live" - {
"specify a default time to live using a Long value" in {
val qb = BasicTable.create.`with`(default_time_to_live eqs DefaultTtl.toLong).qb.queryString
qb shouldEqual s"$root WITH default_time_to_live = 500"
}
"specify a default time to live using a org.joda.time.Seconds value" in {
val qb = BasicTable.create.`with`(default_time_to_live eqs Seconds.seconds(DefaultTtl)).qb.queryString
qb shouldEqual s"$root WITH default_time_to_live = 500"
}
"specify a default time to live using a scala.concurrent.duration.FiniteDuration value" in {
val qb = BasicTable.create.`with`(
default_time_to_live eqs FiniteDuration(DefaultTtl, TimeUnit.SECONDS)
).qb.queryString
qb shouldEqual s"$root WITH default_time_to_live = 500"
}
}
"should allow specifying a clustering order" - {
"specify a single column clustering order with ascending ordering" in {
val column = ("test", CQLSyntax.Ordering.asc) :: Nil
val qb = QueryBuilder.Create.clusteringOrder(column).queryString
qb shouldEqual "CLUSTERING ORDER BY (test ASC)"
}
"specify a single column clustering order with descending ordering" in {
val column = ("test", CQLSyntax.Ordering.desc) :: Nil
val qb = QueryBuilder.Create.clusteringOrder(column).queryString
qb shouldEqual "CLUSTERING ORDER BY (test DESC)"
}
"specify multiple columns and preserve ordering" in {
val column1 = ("test", CQLSyntax.Ordering.asc)
val column2 = ("test2", CQLSyntax.Ordering.desc)
val columns = List(column1, column2)
val qb = QueryBuilder.Create.clusteringOrder(columns).queryString
qb shouldEqual "CLUSTERING ORDER BY (test ASC, test2 DESC)"
}
}
"should allow using SizeTieredCompaction and all its properties" - {
"specify a SizeTieredCompactionStrategy" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy'}"
}
"specify a SizeTieredCompactionStrategy with a tombstone threshold" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.tombstone_threshold(5D)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'tombstone_threshold': 5.0}"
}
"specify a SizeTieredCompactionStrategy with a tombstone compaction interval" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.tombstone_compaction_interval(5L)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'tombstone_compaction_interval': 5}"
}
"specify a SizeTieredCompactionStrategy with an unchecked tombstone compaction option" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.unchecked_tombstone_compaction(5D)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'unchecked_tombstone_compaction': 5.0}"
}
"specify a SizeTieredCompactionStrategy with a max_threshold option" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.max_threshold(5)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'max_threshold': 5}"
}
"specify a SizeTieredCompactionStrategy with a min_threshold option" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.min_threshold(5)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'min_threshold': 5}"
}
"specify a SizeTieredCompactionStrategy with a cold_reads_to_omit option" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.cold_reads_to_omit(5D)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'cold_reads_to_omit': 5.0}"
}
"specify a SizeTieredCompactionStrategy with a bucket_low option" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.bucket_low(5D)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'bucket_low': 5.0}"
}
"specify a SizeTieredCompactionStrategy with a bucket_high option" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.bucket_high(5D)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'bucket_high': 5.0}"
}
"specify a SizeTieredCompactionStrategy with a min_sstable_size option" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.min_sstable_size(5)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'min_sstable_size': 5}"
}
"specify a SizeTieredCompactionStrategy with an enabled flag set to true" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.enabled(true)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'enabled': true}"
}
"specify a SizeTieredCompactionStrategy with an enabled flag set to false" in {
val qb = BasicTable.create
.option(compaction eqs SizeTieredCompactionStrategy
.enabled(false)
).qb.queryString
qb shouldEqual s"$root WITH compaction = {'class': 'SizeTieredCompactionStrategy', 'enabled': false}"
}
}
"should allow generating secondary indexes based on trait mixins" - {
"specify a secondary index on a non-map column" in {
val qb = QueryBuilder.Create.index("t", "k", "col").queryString
qb shouldEqual "CREATE INDEX IF NOT EXISTS t_col_idx ON k.t(col)"
}
"specify a secondary index on a map column for the keys of a map column" in {
val qb = QueryBuilder.Create.mapIndex("t", "k", "col").queryString
qb shouldEqual "CREATE INDEX IF NOT EXISTS t_col_idx ON k.t(keys(col))"
}
"specify a secondary index on a cased column" in {
val qb = QueryBuilder.Create.index("t", "k", "\\"col\\"").queryString
qb shouldEqual "CREATE INDEX IF NOT EXISTS \\"t_col_idx\\" ON k.t(\\"col\\")"
}
}
}
"should allow creating SASI indexes" - {
"create a basic index definition from two strings" in {
val qb = QueryBuilder.Create.sasiIndexName("table", "column")
qb.queryString shouldEqual s"table_column_${CQLSyntax.SASI.suffix}"
}
"create a full SASI index definition" in {
val index = QueryBuilder.Create.sasiIndexName("table", "column")
val qb = QueryBuilder.Create.createSASIIndex(KeySpace("keyspace"), "table", index, "column", OptionPart.empty.qb)
qb.queryString shouldEqual s"CREATE CUSTOM INDEX IF NOT EXISTS $index ON keyspace.table(column) USING 'org.apache.cassandra.index.sasi.SASIIndex' WITH {}"
}
}
}
|
outworkers/phantom
|
phantom-dsl/src/test/scala/com/outworkers/phantom/builder/serializers/CreateQueryBuilderTest.scala
|
Scala
|
apache-2.0
| 20,864 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate
import java.util.concurrent.TimeUnit
import akka.actor._
import akka.pattern.{ ask, pipe }
import akka.util.Timeout
import com.rbmhtechnology.eventuate.Acceptor.Recover
import com.rbmhtechnology.eventuate.EventsourcingProtocol._
import com.rbmhtechnology.eventuate.ReplicationFilter.NoFilter
import com.rbmhtechnology.eventuate.ReplicationProtocol._
import com.rbmhtechnology.eventuate.log.EventLogClock
import com.typesafe.config.Config
import scala.collection.immutable.Seq
import scala.concurrent._
import scala.concurrent.duration._
import scala.reflect.ClassTag
/**
* [[ReplicationEndpoint.recover]] completes with this exception if recovery fails.
*
* @param cause Recovery failure cause.
* @param partialUpdate Set to `true` if recovery already made partial updates, `false` if recovery
* failed without having made partial updates to replication partners.
*/
class RecoveryException(cause: Throwable, val partialUpdate: Boolean) extends RuntimeException(cause)
private class RecoverySettings(config: Config) {
val localReadTimeout: FiniteDuration =
config.getDuration("eventuate.log.read-timeout", TimeUnit.MILLISECONDS).millis
val localWriteTimeout: FiniteDuration =
config.getDuration("eventuate.log.write-timeout", TimeUnit.MILLISECONDS).millis
val remoteOperationRetryMax: Int =
config.getInt("eventuate.log.recovery.remote-operation-retry-max")
val remoteOperationRetryDelay: FiniteDuration =
config.getDuration("eventuate.log.recovery.remote-operation-retry-delay", TimeUnit.MILLISECONDS).millis
val remoteOperationTimeout: FiniteDuration =
config.getDuration("eventuate.log.recovery.remote-operation-timeout", TimeUnit.MILLISECONDS).millis
val snapshotDeletionTimeout: FiniteDuration =
config.getDuration("eventuate.log.recovery.snapshot-deletion-timeout", TimeUnit.MILLISECONDS).millis
}
/**
* Represents a link between a local and remote event log that are subject to disaster recovery.
*
* @param replicationLink used to recover events (through replication)
* @param localSequenceNr sequence number of the local event log at the beginning of disaster recovery.
* @param remoteSequenceNr Current sequence nr of the remote log
*/
private case class RecoveryLink(replicationLink: ReplicationLink, localSequenceNr: Long, remoteSequenceNr: Long)
/**
* Provides disaster recovery primitives.
*
* @param endpoint endpoint to be recovered.
*
* @see [[ReplicationEndpoint.recover()]]
*/
private class Recovery(endpoint: ReplicationEndpoint) {
private val settings = new RecoverySettings(endpoint.system.settings.config)
import settings._
import endpoint.system.dispatcher
private implicit val timeout = Timeout(remoteOperationTimeout)
private implicit val scheduler = endpoint.system.scheduler
/**
* Read [[ReplicationEndpointInfo]] from local [[ReplicationEndpoint]]
*/
def readEndpointInfo: Future[ReplicationEndpointInfo] =
readLogSequenceNrs.map(ReplicationEndpointInfo(endpoint.id, _))
private def readLogSequenceNrs: Future[Map[String, Long]] =
readEventLogClocks.map(_.mapValues(_.sequenceNr).view.force)
/**
* Reads the clocks from local event logs.
*/
def readEventLogClocks: Future[Map[String, EventLogClock]] =
Future.traverse(endpoint.logNames)(name => readEventLogClock(endpoint.logs(name)).map(name -> _)).map(_.toMap)
/**
* Synchronize sequence numbers of local logs with replication progress stored in remote replicas.
* @return a set of [[RecoveryLink]]s indicating the events that need to be recovered
*/
def synchronizeReplicationProgressesWithRemote(info: ReplicationEndpointInfo): Future[Set[RecoveryLink]] =
Future.traverse(endpoint.connectors) { connector =>
synchronizeReplicationProgressWithRemote(connector.remoteAcceptor, info).map { remoteInfo =>
connector.links(remoteInfo).map(toRecoveryLink(_, info, remoteInfo))
}
} map (_.flatten)
private def toRecoveryLink(replicationLink: ReplicationLink, localInfo: ReplicationEndpointInfo, remoteInfo: ReplicationEndpointInfo): RecoveryLink =
RecoveryLink(replicationLink, localInfo.logSequenceNrs(replicationLink.target.logName), remoteInfo.logSequenceNrs(replicationLink.target.logName))
private def synchronizeReplicationProgressWithRemote(remoteAcceptor: ActorSelection, info: ReplicationEndpointInfo): Future[ReplicationEndpointInfo] =
readResult[SynchronizeReplicationProgressSuccess, SynchronizeReplicationProgressFailure, ReplicationEndpointInfo](
Retry(remoteAcceptor.ask(SynchronizeReplicationProgress(info)), remoteOperationRetryDelay, remoteOperationRetryMax), _.info, _.cause)
/**
* Update the locally stored replication progress of remote replicas with the sequence numbers given in ``info``.
* Replication progress that is greater than the corresponding sequence number in ``info`` is reset to that
*/
def synchronizeReplicationProgress(info: ReplicationEndpointInfo): Future[Unit] = {
Future.traverse(endpoint.commonLogNames(info)) { name =>
val logActor = endpoint.logs(name)
val logId = info.logId(name)
val remoteSequenceNr = info.logSequenceNrs(name)
for {
currentProgress <- readReplicationProgress(logActor, logId)
_ <- if (currentProgress > remoteSequenceNr) updateReplicationMetadata(logActor, logId, remoteSequenceNr)
else Future.successful(currentProgress)
} yield ()
} map (_ => ())
}
private def readReplicationProgress(logActor: ActorRef, logId: String): Future[Long] =
readResult[GetReplicationProgressSuccess, GetReplicationProgressFailure, Long](
logActor.ask(GetReplicationProgress(logId))(localReadTimeout), _.storedReplicationProgress, _.cause)
/**
* Sets the replication progress for the remote replicate with id `logId` to `replicationProgress`
* and clears the cached version vector.
*/
private def updateReplicationMetadata(logActor: ActorRef, logId: String, replicationProgress: Long): Future[Long] = {
readResult[ReplicationWriteSuccess, ReplicationWriteFailure, Long](
logActor.ask(ReplicationWrite(Seq.empty, Map(logId -> ReplicationMetadata(replicationProgress, VectorTime.Zero))))(localWriteTimeout), _ => replicationProgress, _.cause)
}
/**
* @return `true`, if the source of the [[RecoveryLink]] did not receive all events before the disaster, i.e.
* the initial replication from the location to be recovered to the source of event recovery was filtered.
*/
def isFilteredLink(link: RecoveryLink): Boolean =
endpoint.endpointFilters.filterFor(link.replicationLink.source.logId, link.replicationLink.target.logName) != NoFilter
/**
* Initiates event recovery for the given [[ReplicationLink]]s. The returned [[Future]] completes when
* all events are successfully recovered.
*/
def recoverLinks(recoveryLinks: Set[RecoveryLink])(implicit ec: ExecutionContext): Future[Unit] = {
if (recoveryLinks.isEmpty) {
Future.successful(())
} else {
val recoveryFinishedPromise = Promise[Unit]()
deleteSnapshots(recoveryLinks).onSuccess {
case _ => endpoint.acceptor ! Recover(recoveryLinks, recoveryFinishedPromise)
}
recoveryFinishedPromise.future
}
}
/**
* Deletes all invalid snapshots from local event logs. A snapshot is invalid if it covers
* events that have been lost.
*/
private def deleteSnapshots(links: Set[RecoveryLink]): Future[Unit] =
Future.sequence(links.map(deleteSnapshots)).map(_ => ())
def readEventLogClock(targetLog: ActorRef): Future[EventLogClock] =
targetLog.ask(GetEventLogClock)(localReadTimeout).mapTo[GetEventLogClockSuccess].map(_.clock)
private def deleteSnapshots(link: RecoveryLink): Future[Unit] =
readResult[DeleteSnapshotsSuccess.type, DeleteSnapshotsFailure, Unit](
endpoint.logs(link.replicationLink.target.logName).ask(DeleteSnapshots(link.localSequenceNr + 1L))(Timeout(snapshotDeletionTimeout)), _ => (), _.cause)
/**
* In case disaster recovery was not able to recover all events (e.g. only through a single filtered connection)
* the local sequence no must be adjusted to the log's version vector to avoid events being
* written in the causal past.
*/
def adjustEventLogClocks: Future[Unit] =
Future.traverse(endpoint.logs.values)(adjustEventLogClock).map(_ => ())
private def adjustEventLogClock(log: ActorRef): Future[Unit] = {
readResult[AdjustEventLogClockSuccess, AdjustEventLogClockFailure, Unit](
log ? AdjustEventLogClock, _ => (), _.cause)
}
private def readResult[S: ClassTag, F: ClassTag, R](f: Future[Any], result: S => R, cause: F => Throwable): Future[R] = f.flatMap {
case success: S => Future.successful(result(success))
case failure: F => Future.failed(cause(failure))
}
}
/**
* [[ReplicationEndpoint]]-scoped singleton that receives all requests from remote endpoints. These are
*
* - [[GetReplicationEndpointInfo]] requests.
* - [[ReplicationRead]] requests (inside [[ReplicationReadEnvelope]]s).
*
* This actor is also involved in disaster recovery and implements a state machine with the following
* possible transitions:
*
* - `initializing` -> `recovering` -> `processing` (when calling `endpoint.recover()`)
* - `initializing` -> `processing` (when calling `endpoint.activate()`)
*/
private class Acceptor(endpoint: ReplicationEndpoint) extends Actor {
import Acceptor._
import context.dispatcher
private val recovery = new Recovery(endpoint)
def initializing: Receive = recovering orElse {
case Process =>
context.become(processing)
}
def recovering: Receive = {
case Recover(links, promise) =>
endpoint.connectors.foreach(_.activate(Some(links.map(_.replicationLink))))
val recoveryManager = context.actorOf(Props(new RecoveryManager(endpoint.id, links)))
context.become(recoveringEvents(recoveryManager, promise) orElse processing)
case RecoveryCompleted =>
context.become(processing)
}
def recoveringEvents(recoveryManager: ActorRef, promise: Promise[Unit]): Receive = {
case writeSuccess: ReplicationWriteSuccess =>
recoveryManager forward writeSuccess
case EventRecoveryCompleted =>
promise.success(())
context.become(recovering orElse processing)
}
def processing: Receive = {
case re: ReplicationReadEnvelope if re.incompatibleWith(endpoint.applicationName, endpoint.applicationVersion) =>
sender ! ReplicationReadFailure(IncompatibleApplicationVersionException(endpoint.id, endpoint.applicationVersion, re.targetApplicationVersion), re.payload.targetLogId)
case ReplicationReadEnvelope(r, logName, _, _) =>
val r2 = r.copy(filter = endpoint.endpointFilters.filterFor(r.targetLogId, logName) and r.filter)
endpoint.logs(logName) forward r2
case _: ReplicationWriteSuccess =>
}
override def unhandled(message: Any): Unit = message match {
case GetReplicationEndpointInfo =>
recovery.readEndpointInfo.map(GetReplicationEndpointInfoSuccess).pipeTo(sender())
case SynchronizeReplicationProgress(remoteInfo) =>
val localInfo = for {
_ <- recovery.synchronizeReplicationProgress(remoteInfo)
localInfo <- recovery.readEndpointInfo.map(SynchronizeReplicationProgressSuccess)
} yield localInfo
localInfo.recover {
case ex: Throwable => SynchronizeReplicationProgressFailure(SynchronizeReplicationProgressSourceException(ex.getMessage))
} pipeTo sender()
case _ =>
super.unhandled(message)
}
def receive =
initializing
}
private object Acceptor {
val Name = "acceptor"
case object Process
case class Recover(links: Set[RecoveryLink], promise: Promise[Unit])
case object RecoveryCompleted
case class RecoveryStepCompleted(link: RecoveryLink)
case object MetadataRecoveryCompleted
case object EventRecoveryCompleted
}
/**
* If disaster recovery is initiated events are recovered until
* a [[ReplicationWriteSuccess]] sent as notification from the local [[Replicator]] is received indicating that all
* events, known to exist remotely at the beginning of recovery, are replicated.
*
* When all replication links have been processed this actor
* notifies [[Acceptor]] (= parent) that recovery completed and ends itself.
*/
private class RecoveryManager(endpointId: String, links: Set[RecoveryLink]) extends Actor with ActorLogging {
import Acceptor._
def receive = recoveringEvents(links)
private def recoveringEvents(active: Set[RecoveryLink]): Receive = {
case writeSuccess: ReplicationWriteSuccess if active.exists(link => writeSuccess.metadata.contains(link.replicationLink.source.logId)) =>
active.find(recoveryForLinkFinished(_, writeSuccess)).foreach { link =>
val updatedActive = removeLink(active, link)
if (updatedActive.isEmpty) {
context.parent ! EventRecoveryCompleted
self ! PoisonPill
} else
context.become(recoveringEvents(updatedActive))
}
}
private def recoveryForLinkFinished(link: RecoveryLink, writeSuccess: ReplicationWriteSuccess): Boolean =
writeSuccess.metadata.get(link.replicationLink.source.logId) match {
case Some(md) => link.remoteSequenceNr <= md.replicationProgress
case None => false
}
private def removeLink(active: Set[RecoveryLink], link: RecoveryLink): Set[RecoveryLink] = {
val updatedActive = active - link
val finished = links.size - updatedActive.size
val all = links.size
log.info("[recovery of {}] Event recovery finished for remote log {} ({} of {})", endpointId, link.replicationLink.source.logId, finished, all)
updatedActive
}
}
|
RBMHTechnology/eventuate
|
eventuate-core/src/main/scala/com/rbmhtechnology/eventuate/Recovery.scala
|
Scala
|
apache-2.0
| 14,457 |
/*
* -----------------------------------------------------------------------------
* - ScaNao is an open-source enabling Nao's control from Scala code. -
* - At the low level jNaoqi is used to bridge the C++ code with the JVM. -
* - -
* - CreatedBy: Nicolas Jorand -
* - Date: 3 Feb 2013 -
* - -
* - _______. ______ ___ .__ __. ___ ______ -
* - / | / | / \\ | \\ | | / \\ / __ \\ -
* - | (----`| ,----' / ^ \\ | \\| | / ^ \\ | | | | -
* - \\ \\ | | / /_\\ \\ | . ` | / /_\\ \\ | | | | -
* - .----) | | `----. / _____ \\ | |\\ | / _____ \\ | `--' | -
* - |_______/ \\______|/__/ \\__\\ |__| \\__| /__/ \\__\\ \\______/ -
* -----------------------------------------------------------------------------
*/
package io.nao.scanao.srv
import akka.actor.{Props, ActorLogging, Actor}
import io.nao.scanao.msg.motion.{Stiffness, OpenHand, CloseHand}
import com.github.levkhomich.akka.tracing.ActorTracing
/**
* This package is used for syntactic sugar when sending a message to thi Actor
*/
class SNMotionActor extends Actor with ActorLogging with SNQIMessage with ActorTracing {
log.info("Creating instance of SNMemoryActor")
val moduleName = "ALMotion"
def receive = {
case data: Stiffness => {
srv.call("setStiffnesses", data.joint.name, data.joint.stiffness.asInstanceOf[java.lang.Float])
}
// case data: AngleInterpolation => {
// angleInterpolation(data.joints, data.absolute)
// sender ! Done
// }
// case data: AngleInterpolationBezier => {
// angleInterpolationBezier(data.joints)
// sender ! Done
// }
// case data: AngleInterpolationWithSpeed => {
// angleInterpolationWithSpeed(data.joints, data.maxSpeed)
// sender ! Done
// }
// case data: ChangeAngles => {
// changeAngles(data.joints, data.maxSpeed)
// sender ! Done
// }
// case data: ChangePosition => {
// changePosition(data.effectorName, data.space, data.fractionMaxSpeed)
// sender ! Done
// }
case data: CloseHand => {
srv.call("closeHand", data.hand.name)
}
case data: OpenHand => {
srv.call("openHand", data.hand.name)
}
// case data: AreResourcesAvailable => {
// sender ! areResourcesAvailable(data.resourceNames)
// }
case x@_ => {
log.error("Unknown Message " + x)
}
}
}
object SNMotionActor {
/**
* Create the Props for this actor
* @return a Props for creating this actor
*/
def props(): Props = Props(new SNMotionActor)
}
|
iPomme/scanao
|
server/src/main/scala/io/nao/scanao/srv/SNMotionActor.scala
|
Scala
|
mit
| 2,951 |
/*
* Copyright (C) 2013 Alcatel-Lucent.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Licensed to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package molecule
package process
package core
import module._
private[molecule] object Foreach extends ProcessType {
override def name = "Foreach"
def apply[A, B](f: A => B)(ichan: IChan[A]): Process[Unit] = new CoreProcess[Unit] {
def ptype = Foreach
def main(t: UThread, rchan: ROChan[Unit]): Unit = {
def _foreach(ich: IChan[A]): Unit = {
ich.read(t, { (seg, ich) =>
seg.foreach(f)
ich match {
case IChan(EOS) => rchan.success_!(())
case IChan(signal) => rchan.failure_!(signal)
case _ => _foreach(ich)
}
})
}
_foreach(ichan)
}
}
}
|
molecule-labs/molecule
|
molecule-core/src/main/scala/molecule/process/core/Foreach.scala
|
Scala
|
apache-2.0
| 1,381 |
package com.sksamuel.elastic4s
import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction
import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionBuilder
import org.elasticsearch.index.query.functionscore.factor.FactorBuilder
import org.elasticsearch.index.query.functionscore.weight.WeightBuilder
import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder
import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionBuilder
import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder
import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionBuilder
import org.elasticsearch.index.query.functionscore.{ DecayFunctionBuilder, ScoreFunctionBuilder }
/** @author Stephen Samuel */
trait ScoreDsl {
def randomScore(seed: Int) = new RandomScoreDefinition(seed)
def scriptScore(script: String) = new ScriptScoreDefinition(script)
def gaussianScore(field: String, origin: String, scale: String) = new GaussianDecayScoreDefinition(field, origin, scale)
def linearScore(field: String, origin: String, scale: String) = new LinearDecayScoreDefinition(field, origin, scale)
def exponentialScore(field: String, origin: String, scale: String) = new ExponentialDecayScoreDefinition(field, origin, scale)
@deprecated("since 1.4.0", "1.4.0")
def factorScore(boost: Double) = new FactorScoreDefinition(boost)
def fieldFactorScore(fieldName: String) = new FieldValueFactorDefinition(fieldName)
def weightScore(boost: Double) = new WeightScoreDefinition(boost)
}
@deprecated("since 1.4.0", "1.4.0")
class FactorScoreDefinition(boost: Double) extends ScoreDefinition[FactorScoreDefinition] {
val builder = new FactorBuilder().boostFactor(boost.toFloat)
}
class WeightScoreDefinition(boost: Double) extends ScoreDefinition[WeightScoreDefinition] {
val builder = new WeightBuilder().setWeight(boost.toFloat)
}
trait ScoreDefinition[T] {
val builder: ScoreFunctionBuilder
var _filter: Option[FilterDefinition] = None
def filter(filter: FilterDefinition): T = {
this._filter = Option(filter)
this.asInstanceOf[T]
}
def weight(boost: Double): T = {
builder.setWeight(boost.toFloat)
this.asInstanceOf[T]
}
}
class FieldValueFactorDefinition(fieldName: String) extends ScoreDefinition[FieldValueFactorDefinition] {
override val builder = new FieldValueFactorFunctionBuilder(fieldName: String)
def factor(f: Double): this.type = {
builder.factor(f.toFloat)
this
}
def modifier(m: FieldValueFactorFunction.Modifier): this.type = {
builder.modifier(m)
this
}
}
class RandomScoreDefinition(seed: Int) extends ScoreDefinition[RandomScoreDefinition] {
val builder = new RandomScoreFunctionBuilder().seed(seed)
}
class ScriptScoreDefinition(script: String) extends ScoreDefinition[ScriptScoreDefinition] {
val builder = new ScriptScoreFunctionBuilder().script(script)
def param(key: String, value: String): ScriptScoreDefinition = {
builder.param(key, value)
this
}
def params(map: Map[String, String]): ScriptScoreDefinition = {
map.foreach(entry => param(entry._1, entry._2))
this
}
def lang(lang: String): ScriptScoreDefinition = {
builder.lang(lang)
this
}
}
abstract class DecayScoreDefinition[T] extends ScoreDefinition[T] {
val builder: DecayFunctionBuilder
def offset(offset: Any): T = {
builder.setOffset(offset.toString)
this.asInstanceOf[T]
}
def decay(decay: Double): T = {
builder.setDecay(decay)
this.asInstanceOf[T]
}
}
class GaussianDecayScoreDefinition(field: String, origin: String, scale: String)
extends DecayScoreDefinition[GaussianDecayScoreDefinition] {
val builder = new GaussDecayFunctionBuilder(field, origin, scale)
}
class LinearDecayScoreDefinition(field: String, origin: String, scale: String)
extends DecayScoreDefinition[LinearDecayScoreDefinition] {
val builder = new LinearDecayFunctionBuilder(field, origin, scale)
}
class ExponentialDecayScoreDefinition(field: String, origin: String, scale: String)
extends DecayScoreDefinition[ExponentialDecayScoreDefinition] {
val builder = new ExponentialDecayFunctionBuilder(field, origin, scale)
}
|
alexander-svendsen/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/scorers.scala
|
Scala
|
apache-2.0
| 4,353 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hbase
import java.io.File
import java.util.Date
import org.apache.hadoop.hbase.{HColumnDescriptor, HTableDescriptor, TableExistsException, TableName}
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.{DataFrame, Row}
import org.scalatest.{BeforeAndAfterAll, FunSuite, Suite}
abstract class TestBase
extends FunSuite with BeforeAndAfterAll with Logging {
self: Suite =>
val startTime = (new Date).getTime
val hbaseHome = {
val loader = this.getClass.getClassLoader
val url = loader.getResource("loadData.txt")
val file = new File(url.getPath)
val parent = file.getParentFile
parent.getAbsolutePath
}
if (hbaseHome == null || hbaseHome.isEmpty)
logError("Spark Home is not defined; may lead to unexpected error!")
/**
* Runs the plan and makes sure the answer matches the expected result.
* @param rdd the [[DataFrame]] to be executed
* @param expectedAnswer the expected result, can either be an Any, Seq[Product], or Seq[ Seq[Any] ].
*/
protected def checkAnswer(rdd: DataFrame, expectedAnswer: Seq[Row]): Unit = {
val isSorted = rdd.logicalPlan.collect { case s: logical.Sort => s}.nonEmpty
def prepareAnswer(answer: Seq[Row]): Seq[Row] = {
// Converts data to types that we can do equality comparison using Scala collections.
// For BigDecimal type, the Scala type has a better definition of equality test (similar to
// Java's java.math.BigDecimal.compareTo).
val converted: Seq[Row] = answer.map { s =>
Row.fromSeq(s.toSeq.map {
case d: java.math.BigDecimal => BigDecimal(d)
case o => o
})
}
if (!isSorted) converted.sortBy(_.toString()) else converted
}
val sparkAnswer = try rdd.collect().toSeq catch {
case e: Exception =>
fail(
s"""
|Exception thrown while executing query:
|${rdd.queryExecution}
|== Exception ==
|$e
|${org.apache.spark.sql.catalyst.util.stackTraceToString(e)}
""".stripMargin)
}
if (prepareAnswer(expectedAnswer) != prepareAnswer(sparkAnswer)) {
fail( s"""
|Results do not match for query:
|${rdd.logicalPlan}
|== Analyzed Plan ==
|${rdd.queryExecution.analyzed}
|== Physical Plan ==
|${rdd.queryExecution.executedPlan}
|== Results ==
|${
sideBySide(
s"== Correct Answer - ${expectedAnswer.size} ==" +:
prepareAnswer(expectedAnswer).map(_.toString()),
s"== Spark Answer - ${sparkAnswer.size} ==" +:
prepareAnswer(sparkAnswer).map(_.toString())).mkString("\\n")
}
""".stripMargin)
}
}
protected def checkAnswer(rdd: DataFrame, expectedAnswer: Row): Unit = {
checkAnswer(rdd, Seq(expectedAnswer))
}
def runSql(sql: String) = {
logInfo(sql)
TestHbase.sql(sql).collect()
}
override protected def afterAll(): Unit = {
val msg = s"Test ${getClass.getName} completed at ${(new java.util.Date).toString} duration=${((new java.util.Date).getTime - startTime) / 1000}"
logInfo(msg)
}
val CompareTol = 1e-6
def compareWithTol(actarr: Seq[Any], exparr: Seq[Any], emsg: String): Boolean = {
actarr.zip(exparr).forall { case (aa, ee) =>
val eq = (aa, ee) match {
case (a: Double, e: Double) =>
Math.abs(a - e) <= CompareTol
case (a: Float, e: Float) =>
Math.abs(a - e) <= CompareTol
case (a: Byte, e) => true //For now, we assume it is ok
case (a, e) =>
if (a == null && e == null) {
logDebug(s"a=null e=null")
} else {
logDebug(s"atype=${a.getClass.getName} etype=${e.getClass.getName}")
}
a == e
case _ => throw new IllegalArgumentException("Expected tuple")
}
if (!eq) {
logError(s"$emsg: Mismatch- act=$aa exp=$ee")
}
eq
}
}
def verify(testName: String, sql: String, result1: Seq[Seq[Any]], exparr: Seq[Seq[Any]]) = {
val res = {
for (rx <- exparr.indices)
yield compareWithTol(result1(rx), exparr(rx), s"Row$rx failed")
}.foldLeft(true) { case (res1, newres) => res1 && newres}
logInfo(s"$sql came back with ${result1.size} results")
logInfo(result1.mkString)
assert(res, "One or more rows did not match expected")
}
def createNativeHbaseTable(tableName: String, families: Seq[String]) = {
val hbaseAdmin = TestHbase.hbaseAdmin
val hdesc = new HTableDescriptor(TableName.valueOf(tableName))
families.foreach { f => hdesc.addFamily(new HColumnDescriptor(f))}
try {
hbaseAdmin.createTable(hdesc)
} catch {
case e: TableExistsException =>
logError(s"Table already exists $tableName", e)
}
}
def dropLogicalTable(tableName: String) = {
TestHbase.catalog.refreshTable(TableIdentifier(tableName))
TestHbase.catalog.client.runSqlHive(s"DROP TABLE $tableName")
TestHbase.catalog.unregisterTable(Seq(tableName))
}
def dropNativeHbaseTable(tableName: String) = {
try {
val hbaseAdmin = TestHbase.hbaseAdmin
hbaseAdmin.disableTable(tableName)
hbaseAdmin.deleteTable(tableName)
} catch {
case e: TableExistsException =>
logError(s"Table already exists $tableName", e)
}
}
def loadData(tableName: String, loadFile: String) = {
// then load data into table
val loadSql = s"LOAD PARALL DATA LOCAL INPATH '$loadFile' INTO TABLE $tableName"
runSql(loadSql)
}
def printRows(rows: Array[Row]) = {
println("======= QUERY RESULTS ======")
for (i <- rows.indices) {
println(rows(i).mkString(" | "))
}
println("============================")
}
}
|
HuaweiBigData/astro
|
src/test/scala/org/apache/spark/sql/hbase/TestBase.scala
|
Scala
|
apache-2.0
| 6,728 |
//import com.sfxcode.nosql.mongo.gridfs.GridfsDatabaseFunctions
//import com.sfxcode.nosql.mongo.model.ImageMetadata
//import org.bson.types.ObjectId
//
//object GridfsDatabaseApp extends App with GridfsDatabaseFunctions {
//
// dropImages
//
// println(imagesCount)
// createIndexOnImages("name")
//
// val id: ObjectId = insertImage(SourcePath + "scala-logo.png", ImageMetadata("logo1"))
//
// println(id)
//
// val file = findImage(id.toString)
// println(file)
//
// val imageFile = findImage(file)
//
// println(imageFile.getChunkSize)
// println(imageFile.getMetadata.get("indexSet"))
//
// downloadImage(imageFile, TargetPath + file.getFilename)
//
// insertImage(SourcePath + "scala-logo.jpg", ImageMetadata("logo2", indexSet = Set(5, 6, 7)))
// println(imagesCount)
//
// val logos = findImages("group", "logos")
//
// println(logos)
//
//}
|
sfxcode/simple-mongo
|
docs/src/main/scala/GridfsDatabaseApp.scala
|
Scala
|
apache-2.0
| 865 |
package com.rrdinsights.scalabrine.models
import com.rrdinsights.scalabrine.models.Utils._
import java.{lang => jl}
final case class TeamGameLog(games: Seq[GameLog])
final case class TeamGameLogResponse(resource: String,
teamGameLog: TeamGameLog)
final case class GameLog(teamId: jl.Integer,
gameId: String,
gameDate: String,
matchup: String,
result: String, // TODO - convert to case objects
wins: jl.Integer,
losses: jl.Integer,
winPercentage: jl.Double,
minutes: jl.Double,
fieldGoalsMade: jl.Integer,
fieldGoalAttempts: jl.Integer,
fieldGoalPercentage: jl.Double,
threePointFieldGoalsMade: jl.Integer,
threePointFieldGoalAttempts: jl.Integer,
threePointFieldGoalPercentage: jl.Double,
freeThrowsMade: jl.Integer,
freeThrowAttempts: jl.Integer,
freeThrowPercentage: jl.Double,
offensiveRebounds: jl.Integer,
defensiveRebounds: jl.Integer,
totalRebounds: jl.Integer,
assists: jl.Integer,
steals: jl.Integer,
blocks: jl.Integer,
turnovers: jl.Integer,
personalFouls: jl.Integer,
points: jl.Integer) extends ConvertedResultSetResponse
private[rrdinsights] case object GameConverter extends ResultSetRawResponseConverter[GameLog] {
override val name: String = "TeamGameLog"
override def convertRaw(rows: Array[Array[Any]]): Seq[GameLog] =
rows.map(row => GameLog(
transformToInt(row(0)),
transformToString(row(1)),
transformToString(row(2)),
transformToString(row(3)),
transformToString(row(4)),
transformToInt(row(5)),
transformToInt(row(6)),
transformToDouble(row(7)),
minutesToDouble(transformToInt(row(8))),
transformToInt(row(9)),
transformToInt(row(10)),
transformToDouble(row(11)),
transformToInt(row(12)),
transformToInt(row(13)),
transformToDouble(row(14)),
transformToInt(row(15)),
transformToInt(row(18)),
transformToDouble(row(17)),
transformToInt(row(18)),
transformToInt(row(19)),
transformToInt(row(20)),
transformToInt(row(21)),
transformToInt(row(22)),
transformToInt(row(23)),
transformToInt(row(24)),
transformToInt(row(25)),
transformToInt(row(26))))
private def minutesToDouble(minutes: jl.Integer): jl.Double = minutes.toDouble
}
final case class TeamGameLogRawResponse(override val resource: String,
override val resultSets: Array[ResultSetResponse]) extends RawResponse {
def toTeamGameLogResponse: TeamGameLogResponse =
TeamGameLogResponse(resource, toTeamGameLog)
def toTeamGameLog: TeamGameLog = TeamGameLogRawResponse.toTeamGameLog(resultSets)
}
private[rrdinsights] object TeamGameLogRawResponse extends ResultSetRawResponseConverters {
def toTeamGameLog(rawSummary: Array[ResultSetResponse]): TeamGameLog =
TeamGameLog(convert[GameLog](rawSummary, GameConverter))
override protected val converters: Seq[ResultSetRawResponseConverter[_]] =
Seq(GameConverter)
}
|
rd11490/Scalabrine
|
src/main/scala/com/rrdinsights/scalabrine/models/TeamGameLog.scala
|
Scala
|
mit
| 3,634 |
package net.hearthstats.game.imageanalysis
import java.awt.image.BufferedImage
import grizzled.slf4j.Logging
import net.hearthstats.core.{Card, Deck, HeroClass}
import net.hearthstats.game.imageanalysis.CoordinateCacheBase.UniquePixelIdentifier
import net.hearthstats.game.imageanalysis.UniquePixel._
import net.hearthstats.game.ocr.{DeckCardOcr, DeckNameOcr}
import net.hearthstats.util.Coordinate
import org.apache.commons.lang3.StringUtils
import scala.collection.mutable.ListBuffer
/**
* Analyses screenshots of the deck screen in Hearthstone and returns a deck object.
*
* @author gtch
*/
class DeckAnalyser(val cardList: List[Card], val imgWidth: Int, val imgHeight: Int) extends CoordinateCacheBase with Logging {
val ocr = new DeckCardOcr
val individualPixelAnalyser = new IndividualPixelAnalyser
def identifyDeck(img1: BufferedImage, img2: BufferedImage): Option[Deck] = {
if (img1.getWidth != imgWidth || img1.getHeight() != imgHeight) throw new RuntimeException("Image 1 is not the expected size")
if (img2.getWidth != imgWidth || img2.getHeight() != imgHeight) throw new RuntimeException("Image 2 is not the expected size")
// Make a list of cards found on each screen; there may be some overlap
val cards1 = identifyCards(img1)
val cards2 = identifyCards(img2)
if (cards1.size < 3 || cards2.size < 3) {
// One of the images contained hardly any cards, which suggests the screen capture failed. This can happen if the image
// was captured during a screen refreshes. Return None to indicate that a new screen capture is needed.
debug(s"Deck identification failed because cards were not detected on a screen (img1 = ${cards1.size}, img2 = ${cards2.size})")
return None
}
// Combine the two lists, using the card with the larger count if the same card appears in both lists
val cards = (cards1 ++ cards2).groupBy(_.id).map(c => c._2.head.copy(count = c._2.foldLeft(0)((i, s) => i max s.count))).toList
cards.foreach(c => debug(s"Identified card ${c.count} ${c.name}"))
// Determine the deck name, checking both images and returning the longest name if they're not the same
val ocr = new DeckNameOcr()
val deckName1 = ocr.process(img1)
val deckName2 = ocr.process(img2)
var deckName = List(deckName1, deckName2).maxBy(_.length)
val heroClass = identifyClass(img1) match {
case Some(str) => str
case None => ""
}
Some(new Deck(cards = cards, name = deckName, hero = HeroClass.byName(heroClass)))
}
private def identifyCards(img: BufferedImage): List[Card] = {
val cards = new ListBuffer[Card]
for (i <- 0 until DeckAnalyser.CardsVisibleOnScreen) {
val cardImg = extractCardImage(img, i)
val roughName = ocr.process(cardImg);
val card = identifyCard(roughName)
card match {
case Some(c) => {
debug(s"Card $i: $roughName matched ${c.originalName}")
cards.append(c.copy(count = identifyCount(img, i)))
}
case _ => debug(s"Card $i: $roughName DID NOT MATCH")
}
}
// Group multiple cards together (golden and normal cards would otherwise be separate in the list)
cards.groupBy(_.id).map(c => c._2.head.copy(count = c._2.foldLeft(0)((i, s) => i + s.count))).toList
}
private def extractCardImage(img: BufferedImage, cardNo: Int): BufferedImage = {
val yoffset = (135f + (45.0f * cardNo.toFloat)).toInt
val topLeft = getCoordinate(1280, yoffset)
val bottomRight = getCoordinate(1470, yoffset + 24)
debug(s"Extracting ${topLeft.x}x${topLeft.y} to ${bottomRight.x}x${bottomRight.y}")
img.getSubimage(topLeft.x, topLeft.y,
bottomRight.x - topLeft.x, bottomRight.y - topLeft.y)
}
private def identifyCard(roughName: String): Option[Card] = {
val bestCard = cardList.maxBy(c => StringUtils.getJaroWinklerDistance(roughName, c.name))
// Only return the card if the distance score is higher than 0.6; lower scores mean the card wasn't found
if (StringUtils.getJaroWinklerDistance(roughName, bestCard.name) > 0.6) {
Some(bestCard)
} else {
None
}
}
private def identifyCount(img: BufferedImage, cardNo: Int): Int = {
val yoffset = (45.0f * cardNo.toFloat).toInt
// Look for a yellow number two in various offset positions
if (isCountTwo(img, yoffset) || isCountTwo(img, yoffset - 2) || isCountTwo(img, yoffset + 2)) {
2
} else {
1
}
}
private def isCountTwo(img: BufferedImage, offset: Int): Boolean = {
checkPixelIsYellow(img, 1487, 152 + offset) &&
checkPixelIsYellow(img, 1485, 144 + offset) &&
checkPixelIsYellow(img, 1492, 143 + offset) &&
!checkPixelIsYellow(img, 1481, 139 + offset) &&
!checkPixelIsYellow(img, 1496, 150 + offset)
}
private def checkPixelIsYellow(img: BufferedImage, x: Int, y: Int): Boolean = {
val pixel = getCoordinate(x, y)
val rgb = img.getRGB(pixel.x, pixel.y)
val red = (rgb >> 16) & 0xFF
val green = (rgb >> 8) & 0xFF
val blue = (rgb & 0xFF)
// debug(s"Pixel $x,$y: $red,$green,$blue yellow=$yellow")
red > 166 && red < 230 && green > 148 && green < 210 && blue < 20
}
private def identifyClassPixel(image: BufferedImage, pixelRules: Iterable[(Array[UniquePixel], String)]): Option[String] =
(for {
(pixels, result) <- pixelRules
if individualPixelAnalyser.testAllPixelsMatch(image, pixels)
} yield result).headOption
private def identifyClass(img: BufferedImage): Option[String] =
identifyClassPixel(img, Seq(
Array(DECK_DRUID_1, DECK_DRUID_2) -> "Druid",
Array(DECK_HUNTER_1, DECK_HUNTER_2) -> "Hunter",
Array(DECK_MAGE_1, DECK_MAGE_2) -> "Mage",
Array(DECK_PALADIN_1, DECK_PALADIN_2) -> "Paladin",
Array(DECK_PRIEST_1, DECK_PRIEST_2) -> "Priest",
Array(DECK_ROGUE_1, DECK_ROGUE_2) -> "Rogue",
Array(DECK_SHAMAN_1, DECK_SHAMAN_2) -> "Shaman",
Array(DECK_WARLOCK_1, DECK_WARLOCK_2) -> "Warlock",
Array(DECK_WARRIOR_1, DECK_WARRIOR_2) -> "Warrior"))
/**
* Calculates the correct pixel position for a given coordinate at the current image size.
*
* @param x The X position, relative to a 1600px width screen
* @param y The Y position, relative to a 1200px high screen
* @return The given coordinate converted into the current image size.
*/
private def getCoordinate(x: Int, y: Int): Coordinate = {
val upi: UniquePixelIdentifier = new UniquePixelIdentifier(x, y, imgWidth, imgHeight)
getCachedCoordinate(upi)
}
}
object DeckAnalyser {
val CardsVisibleOnScreen = 21
}
|
HearthStats/HearthStats.net-Uploader
|
companion/src/main/scala/net/hearthstats/game/imageanalysis/DeckAnalyser.scala
|
Scala
|
bsd-3-clause
| 6,632 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate
import akka.actor._
/**
* A conditional request is a request to an actor in the [[EventsourcedView]]
* hierarchy whose delivery to the actor's command handler is delayed until
* the request's `condition` is in the causal past of that actor (i.e. if the
* `condition` is `<=` the actor's current version).
*/
case class ConditionalRequest(condition: VectorTime, req: Any)
/**
* Thrown by an actor in the [[EventsourcedView]] hierarchy if it receives
* a [[ConditionalRequest]] but does not extends the [[ConditionalRequests]]
* trait.
*/
class ConditionalRequestException(msg: String) extends RuntimeException(msg)
/**
* Must be extended by actors in the [[EventsourcedView]] hierarchy if they
* want to support [[ConditionalRequest]] processing.
*/
trait ConditionalRequests extends EventsourcedView with EventsourcedVersion {
import ConditionalRequests._
private val requestManager = context.actorOf(Props(new RequestManager(self)))
/**
* Internal API.
*/
override private[eventuate] def conditionalSend(condition: VectorTime, req: Any): Unit =
requestManager ! Request(condition, req, sender())
/**
* Internal API.
*/
override private[eventuate] def versionChanged(condition: VectorTime): Unit =
requestManager ! condition
}
private object ConditionalRequests {
case class Request(condition: VectorTime, req: Any, sdr: ActorRef)
case class Send(olderThan: VectorTime)
case class Sent(olderThan: VectorTime, num: Int)
class RequestManager(owner: ActorRef) extends Actor {
val requestBuffer = context.actorOf(Props(new RequestBuffer(owner)))
var currentVersion: VectorTime = VectorTime.Zero
val idle: Receive = {
case cr: Request =>
process(cr)
case t: VectorTime =>
currentVersion = t
requestBuffer ! Send(t)
context.become(sending)
}
val sending: Receive = {
case cr: Request =>
process(cr)
case t: VectorTime =>
currentVersion = t
case Sent(olderThan, num) if olderThan == currentVersion =>
context.become(idle)
case Sent(olderThan, num) =>
requestBuffer ! Send(currentVersion)
}
def receive = idle
def process(cr: Request): Unit = {
if (cr.condition <= currentVersion) owner.tell(cr.req, cr.sdr)
else requestBuffer ! cr
}
}
class RequestBuffer(owner: ActorRef) extends Actor {
// TODO: cleanup requests older than threshold
var requests: Vector[Request] = Vector.empty
def receive = {
case Send(olderThan) =>
sender() ! Sent(olderThan, send(olderThan))
case cc: Request =>
requests = requests :+ cc
}
def send(olderThan: VectorTime): Int = {
val (older, other) = requests.partition(_.condition <= olderThan)
requests = other
older.foreach(cc => owner.tell(cc.req, cc.sdr))
older.length
}
}
}
|
RBMHTechnology/eventuate
|
eventuate-core/src/main/scala/com/rbmhtechnology/eventuate/ConditionalRequest.scala
|
Scala
|
apache-2.0
| 3,607 |
package ${package} {
package snippet {
import org.specs._
import org.specs.runner.JUnit3
import org.specs.runner.ConsoleRunner
import net.liftweb._
import http._
import net.liftweb.util._
import net.liftweb.common._
import org.specs.matcher._
import org.specs.specification._
import Helpers._
import lib._
class HelloWorldTestSpecsAsTest extends JUnit3(HelloWorldTestSpecs)
object HelloWorldTestSpecsRunner extends ConsoleRunner(HelloWorldTestSpecs)
object HelloWorldTestSpecs extends Specification {
val session = new LiftSession("", randomString(20), Empty)
val stableTime = now
override def executeExpectations(ex: Examples, t: =>Any): Any = {
S.initIfUninitted(session) {
DependencyFactory.time.doWith(stableTime) {
super.executeExpectations(ex, t)
}
}
}
"HelloWorld Snippet" should {
"Put the time in the node" in {
val hello = new HelloWorld
Thread.sleep(1000) // make sure the time changes
val str = hello.howdy(<span>Hello at <b:time/></span>).toString
str.indexOf(stableTime.toString) must be >= 0
str.indexOf("Hello at") must be >= 0
}
}
}
}
}
|
wsaccaco/lift
|
archetypes/lift-archetype-sbt/src/main/resources/archetype-resources/src/test/scala/snippet/HelloWorldTest.scala
|
Scala
|
apache-2.0
| 1,141 |
package onion.compiler
/* ************************************************************** *
* *
* Copyright (c) 2016-, Kota Mizushima, All rights reserved. *
* *
* *
* This software is distributed under the modified BSD License. *
* ************************************************************** */
import java.text.MessageFormat
import scala.collection.mutable.Buffer
import onion.compiler.toolbox.Message
import onion.compiler.exceptions.CompilationException
/**
* @author Kota Mizushima
*
*/
class SemanticErrorReporter(threshold: Int) {
private val problems = Buffer[CompileError]()
private var sourceFile: String = null
private var errorCount: Int = 0
private def format(string: String): String = {
MessageFormat.format(string)
}
private def format(string: String, arg: String): String = {
MessageFormat.format(string, arg)
}
private def format(string: String, arg1: String, arg2: String): String = {
MessageFormat.format(string, arg1, arg2)
}
private def format(string: String, arg1: String, arg2: String, arg3: String): String = {
MessageFormat.format(string, arg1, arg2, arg3)
}
private def format(string: String, arg1: String, arg2: String, arg3: String, arg4: String): String = {
MessageFormat.format(string, arg1, arg2, arg3, arg4)
}
private def format(string: String, args: Array[String]): String = {
MessageFormat.format(string, args.asInstanceOf[Array[AnyRef]]:_*)
}
private[this] def message(property: String): String = Message(property)
private def reportIllegalMethodCall(position: Location, items: Array[AnyRef]): Unit = {
val receiver = items(0).asInstanceOf[IRT.ClassType].name
val methodName= items(1).asInstanceOf[String]
problem(position, format(message("error.semantic.illegalMethodCall"), receiver, methodName))
}
private def reportIncompatibleType(position: Location, items: Array[AnyRef]): Unit = {
val expected: IRT.Type = items(0).asInstanceOf[IRT.Type]
val detected: IRT.Type = items(1).asInstanceOf[IRT.Type]
problem(position, format(message("error.semantic.incompatibleType"), expected.name, detected.name))
}
private def names(types: Array[IRT.Type]): String = {
val buffer = new StringBuffer
if (types.length > 0) {
buffer.append(types(0).name)
var i: Int = 1
while (i < types.length) {
buffer.append(", ")
buffer.append(types(i).name)
i += 1
}
}
new String(buffer)
}
private def reportIncompatibleOperandType(position: Location, items: Array[AnyRef]): Unit = {
val operator: String = items(0).asInstanceOf[String]
val operands: Array[IRT.Type] = items(1).asInstanceOf[Array[IRT.Type]]
problem(position, format(message("error.semantic.incompatibleOperandType"), items(0).asInstanceOf[String], names(operands)))
}
private def reportLValueRequired(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.lValueRequired")))
}
private def reportVariableNotFound(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.variableNotFound"), items(0).asInstanceOf[String]))
}
private def reportClassNotFound(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.classNotFound"), items(0).asInstanceOf[String]))
}
private def reportFieldNotFound(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.fieldNotFound"), (items(0).asInstanceOf[IRT.Type]).name, items(1).asInstanceOf[String]))
}
private def reportMethodNotFound(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.methodNotFound"), (items(0).asInstanceOf[IRT.Type]).name, items(1).asInstanceOf[String], names((items(2).asInstanceOf[Array[IRT.Type]]))))
}
private def reportAmbiguousMethod(position: Location, items: Array[AnyRef]): Unit = {
val item1: Array[AnyRef] = items(0).asInstanceOf[Array[AnyRef]]
val item2: Array[AnyRef] = items(1).asInstanceOf[Array[AnyRef]]
val target1: String = (item1(0).asInstanceOf[IRT.ObjectType]).name
val name1: String = item1(1).asInstanceOf[String]
val args1: String = names(item1(2).asInstanceOf[Array[IRT.Type]])
val target2: String = (item2(0).asInstanceOf[IRT.ObjectType]).name
val name2: String = item2(1).asInstanceOf[String]
val args2: String = names(item2(2).asInstanceOf[Array[IRT.Type]])
problem(position, format(message("error.semantic.ambiguousMethod"), Array[String](target1, name1, args2, target2, name2, args2)))
}
private def reportDuplicateLocalVariable(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.duplicatedVariable"), items(0).asInstanceOf[String]))
}
private def reportDuplicateClass(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.duplicatedClass"), items(0).asInstanceOf[String]))
}
private def reportDuplicateField(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.duplicatedField"), (items(0).asInstanceOf[IRT.Type]).name, items(1).asInstanceOf[String]))
}
private def reportDuplicateMethod(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.duplicatedMethod"), (items(0).asInstanceOf[IRT.Type]).name, items(1).asInstanceOf[String], names(items(2).asInstanceOf[Array[IRT.Type]])))
}
private def reportDuplicateGlobalVariable(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.duplicatedGlobalVariable"), items(0).asInstanceOf[String]))
}
private def reportDuplicateFunction(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.duplicatedGlobalVariable"), items(0).asInstanceOf[String], names(items(1).asInstanceOf[Array[IRT.Type]])))
}
private def reportDuplicateConstructor(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.duplicatedConstructor"), (items(0).asInstanceOf[IRT.Type]).name, names(items(1).asInstanceOf[Array[IRT.Type]])))
}
private def reportMethodNotAccessible(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.methodNotAccessible"), (items(0).asInstanceOf[IRT.ObjectType]).name, items(1).asInstanceOf[String], names((items(2).asInstanceOf[Array[IRT.Type]])), (items(3).asInstanceOf[IRT.ClassType]).name))
}
private def reportFieldNotAccessible(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.fieldNotAccessible"), (items(0).asInstanceOf[IRT.ClassType]).name, items(1).asInstanceOf[String], (items(2).asInstanceOf[IRT.ClassType]).name))
}
private def reportClassNotAccessible(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.classNotAccessible"), (items(0).asInstanceOf[IRT.ClassType]).name, (items(1).asInstanceOf[IRT.ClassType]).name))
}
private def reportCyclicInheritance(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.cyclicInheritance"), items(0).asInstanceOf[String]))
}
private def reportCyclicDelegation(position: Location, items: Array[AnyRef]): Unit = {
problem(position, message("error.semantic.cyclicDelegation"))
}
private def reportIllegalInheritance(position: Location, items: Array[AnyRef]): Unit = {
}
private def reportCannotReturnValue(position: Location, items: Array[AnyRef]): Unit = {
problem(position, message("error.semantic.cannotReturnValue"))
}
private def reportConstructorNotFound(position: Location, items: Array[AnyRef]): Unit = {
val `type` : String = (items(0).asInstanceOf[IRT.Type]).name
val args: String = names((items(1).asInstanceOf[Array[IRT.Type]]))
problem(position, format(message("error.semantic.constructorNotFound"), `type`, args))
}
private def reportAmbiguousConstructor(position: Location, items: Array[AnyRef]): Unit = {
val item1: Array[AnyRef] = items(0).asInstanceOf[Array[AnyRef]]
val item2: Array[AnyRef] = items(1).asInstanceOf[Array[AnyRef]]
val target1: String = (item1(0).asInstanceOf[IRT.ObjectType]).name
val args1: String = names(item1(1).asInstanceOf[Array[IRT.Type]])
val target2: String = (item2(0).asInstanceOf[IRT.ObjectType]).name
val args2: String = names(item2(1).asInstanceOf[Array[IRT.Type]])
problem(position, format(message("error.semantic.ambiguousConstructor"), target1, args2, target2, args2))
}
private def reportInterfaceRequied(position: Location, items: Array[AnyRef]): Unit = {
val `type` : IRT.Type = items(0).asInstanceOf[IRT.Type]
problem(position, format(message("error.semantic.interfaceRequired"), `type`.name))
}
private def reportUnimplementedFeature(position: Location, items: Array[AnyRef]): Unit = {
problem(position, message("error.semantic.unimplementedFeature"))
}
private def reportDuplicateGeneratedMethod(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.duplicateGeneratedMethod"), (items(0).asInstanceOf[IRT.Type]).name, items(1).asInstanceOf[String], names(items(2).asInstanceOf[Array[IRT.Type]])))
}
private def reportIsNotBoxableType(position: Location, items: Array[AnyRef]): Unit = {
problem(position, format(message("error.semantic.isNotBoxableType"), (items(0).asInstanceOf[IRT.Type]).name))
}
private def problem(position: Location, message: String): Unit = {
problems.append(new CompileError(sourceFile, position, message))
}
def report(error: SemanticError, position: Location, items: Array[AnyRef]): Unit = {
errorCount += 1
error match {
case SemanticError.ILLEGAL_METHOD_CALL =>
reportIllegalMethodCall(position, items)
case SemanticError.INCOMPATIBLE_TYPE =>
reportIncompatibleType(position, items)
case SemanticError.INCOMPATIBLE_OPERAND_TYPE =>
reportIncompatibleOperandType(position, items)
case SemanticError.VARIABLE_NOT_FOUND =>
reportVariableNotFound(position, items)
case SemanticError.CLASS_NOT_FOUND =>
reportClassNotFound(position, items)
case SemanticError.FIELD_NOT_FOUND =>
reportFieldNotFound(position, items)
case SemanticError.METHOD_NOT_FOUND =>
reportMethodNotFound(position, items)
case SemanticError.AMBIGUOUS_METHOD =>
reportAmbiguousMethod(position, items)
case SemanticError.DUPLICATE_LOCAL_VARIABLE =>
reportDuplicateLocalVariable(position, items)
case SemanticError.DUPLICATE_CLASS =>
reportDuplicateClass(position, items)
case SemanticError.DUPLICATE_FIELD =>
reportDuplicateField(position, items)
case SemanticError.DUPLICATE_METHOD =>
reportDuplicateMethod(position, items)
case SemanticError.DUPLICATE_GLOBAL_VARIABLE =>
reportDuplicateGlobalVariable(position, items)
case SemanticError.DUPLICATE_FUNCTION =>
reportDuplicateFunction(position, items)
case SemanticError.METHOD_NOT_ACCESSIBLE =>
reportMethodNotAccessible(position, items)
case SemanticError.FIELD_NOT_ACCESSIBLE =>
reportFieldNotAccessible(position, items)
case SemanticError.CLASS_NOT_ACCESSIBLE =>
reportClassNotAccessible(position, items)
case SemanticError.CYCLIC_INHERITANCE =>
reportCyclicInheritance(position, items)
case SemanticError.CYCLIC_DELEGATION =>
reportCyclicDelegation(position, items)
case SemanticError.ILLEGAL_INHERITANCE =>
reportIllegalInheritance(position, items)
case SemanticError.CANNOT_RETURN_VALUE =>
reportCannotReturnValue(position, items)
case SemanticError.CONSTRUCTOR_NOT_FOUND =>
reportConstructorNotFound(position, items)
case SemanticError.AMBIGUOUS_CONSTRUCTOR =>
reportAmbiguousConstructor(position, items)
case SemanticError.INTERFACE_REQUIRED =>
reportInterfaceRequied(position, items)
case SemanticError.UNIMPLEMENTED_FEATURE =>
reportUnimplementedFeature(position, items)
case SemanticError.DUPLICATE_CONSTRUCTOR =>
reportDuplicateConstructor(position, items)
case SemanticError.DUPLICATE_GENERATED_METHOD =>
reportDuplicateGeneratedMethod(position, items)
case SemanticError.IS_NOT_BOXABLE_TYPE =>
reportIsNotBoxableType(position, items)
case SemanticError.LVALUE_REQUIRED =>
reportLValueRequired(position, items)
}
if (errorCount >= threshold) {
throw new CompilationException(problems.toSeq)
}
}
def getProblems: Array[CompileError] = problems.toArray
def setSourceFile(sourceFile: String): Unit = {
this.sourceFile = sourceFile
}
}
|
onionlang/onion
|
src/main/scala/onion/compiler/SemanticErrorReporter.scala
|
Scala
|
bsd-3-clause
| 13,531 |
/**
* Copyright (C) 2012 Inria, University Lille 1.
*
* This file is part of PowerAPI.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI. If not, see <http://www.gnu.org/licenses/>.
*
* Contact: [email protected].
*/
package fr.inria.powerapi.formula.mem.single
import scala.concurrent.duration.DurationInt
import org.scalatest.junit.ShouldMatchersForJUnit
import org.scalatest.FlatSpec
import fr.inria.powerapi.library.PowerAPI
import fr.inria.powerapi.core.Process
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import fr.inria.powerapi.core.Listener
import fr.inria.powerapi.formula.mem.api.MemFormulaMessage
import akka.actor.ActorSystem
import akka.testkit.TestActorRef
import fr.inria.powerapi.sensor.mem.api.MemSensorMessage
import fr.inria.powerapi.sensor.mem.sigar.MemSensor
class MemFormulaListener extends Listener {
def messagesToListen = Array(classOf[MemFormulaMessage])
def acquire = {
case memFormulaMessage: MemFormulaMessage => println(memFormulaMessage.energy.power)
case unknown => {
if (log.isWarningEnabled) log.warning("unknown message " + unknown)
}
}
}
@RunWith(classOf[JUnitRunner])
class MemFormulaSpec extends FlatSpec with ShouldMatchersForJUnit {
trait ConfigurationMock extends Configuration {
override lazy val readPower = 5.0
override lazy val writePower = 15.0
}
implicit val system = ActorSystem("mem-formula-single")
val memFormula = TestActorRef(new MemFormula with ConfigurationMock)
"A MemFormula" should "compute global memory power consumption" in {
memFormula.underlyingActor.power should equal ((5 + 15).doubleValue / 2)
}
"A MemFormula" should "compute process memory power consumption" in {
memFormula.underlyingActor.compute(MemSensorMessage(residentPerc = 0.5, tick = null)) should equal (memFormula.underlyingActor.power * 0.5)
}
"A MemFormula" should "react to Tick to compute process memory power consumption" in {
val currentPid = java.lang.management.ManagementFactory.getRuntimeMXBean.getName.split("@")(0).toInt
Array(classOf[MemSensor], classOf[MemFormula]).foreach(PowerAPI.startEnergyModule(_))
PowerAPI.startMonitoring(process = Process(currentPid), duration = 1.second, listener = classOf[MemFormulaListener])
Thread.sleep((5.seconds).toMillis)
PowerAPI.stopMonitoring(process = Process(currentPid), duration = 1.second, listener = classOf[MemFormulaListener])
Array(classOf[MemSensor], classOf[MemFormula]).foreach(PowerAPI.stopEnergyModule(_))
}
}
|
abourdon/powerapi-akka
|
formulae/formula-mem-single/src/test/scala/fr/inria/powerapi/formula/mem/single/MemFormulaSpec.scala
|
Scala
|
agpl-3.0
| 3,114 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.common.si
import java.util.UUID
import com.bwsw.sj.common.dal.model.instance.InstanceDomain
import com.bwsw.sj.common.dal.model.service.{ServiceDomain, ZKServiceDomain}
import com.bwsw.sj.common.dal.model.stream.StreamDomain
import com.bwsw.sj.common.dal.repository.{ConnectionRepository, GenericMongoRepository}
import com.bwsw.sj.common.si.model.service.{Service, ServiceCreator}
import com.bwsw.sj.common.si.result._
import com.bwsw.sj.common.utils.{MessageResourceUtils, MessageResourceUtilsMock}
import org.mockito.ArgumentMatchers.{any, anyString}
import org.mockito.Mockito.when
import org.mockito.invocation.InvocationOnMock
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{FlatSpec, Matchers}
import scaldi.{Injector, Module}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
class ServiceSiTests extends FlatSpec with Matchers {
"ServiceSI" should "create correct service" in new ServiceMocks {
when(service.validate()).thenReturn(ArrayBuffer[String]())
serviceSI.create(service) shouldBe Created
serviceStorage.toSet shouldBe (initServiceStorage + serviceDomain)
}
it should "not create incorrect service" in new ServiceMocks {
val errors = ArrayBuffer("Not valid")
when(service.validate()).thenReturn(errors)
serviceSI.create(service) shouldBe NotCreated(errors)
serviceStorage.toSet shouldBe initServiceStorage
}
it should "give all services" in new ServiceMocks {
serviceSI.getAll().toSet shouldBe services.toSet
}
it should "give service when it exists" in new ServiceMocks {
serviceStorage += serviceDomain
services += service
serviceSI.get(serviceName) shouldBe Some(service)
}
it should "not give service when it does not exists" in new ServiceMocks {
serviceSI.get(nonExistsServiceName) shouldBe empty
}
it should "give empty arrays when service does not have related streams and instances" in
new ServiceMocksWithRelated {
serviceSI.getRelated(serviceWithoutRelatedName) shouldBe
Right((mutable.Buffer.empty[String], mutable.Buffer.empty[String]))
}
it should "give related streams when service has them" in new ServiceMocksWithRelated {
val expected = Right((onlyStreamsRelatedNames.toSet, mutable.Buffer.empty[String]))
val related = serviceSI.getRelated(serviceWithRelatedOnlyStreamsName)
.map { case (streams, instances) => (streams.toSet, instances) }
related shouldBe expected
}
it should "give related instances when service has them" in new ServiceMocksWithRelated {
val expected = Right((mutable.Buffer.empty[String], onlyInstancesRelatedNames.toSet))
val related = serviceSI.getRelated(serviceWithRelatedOnlyInstancesName)
.map { case (streams, instances) => (streams, instances.toSet) }
related shouldBe expected
}
it should "give related streams and instances when service has them" in new ServiceMocksWithRelated {
val expected = Right((bothStreamsRelatedNames.toSet, bothInstancesRelatedNames.toSet))
val related = serviceSI.getRelated(serviceWithRelatedBothName)
.map { case (streams, instances) => (streams.toSet, instances.toSet) }
related shouldBe expected
}
it should "tell that service does not exists in getRelated()" in new ServiceMocksWithRelated {
serviceSI.getRelated(nonExistsServiceName) shouldBe Left(false)
}
it should "delete service when it does not have related streams and instances" in new ServiceMocksWithRelated {
serviceSI.delete(serviceWithoutRelatedName) shouldBe Deleted
serviceStorage.toSet shouldBe (initServiceStorage - serviceWithoutRelatedDomain)
}
it should "not delete service when it does not exists" in new ServiceMocksWithRelated {
serviceSI.delete(nonExistsServiceName) shouldBe EntityNotFound
serviceStorage.toSet shouldBe initServiceStorage
}
it should "not delete service when it has related streams" in new ServiceMocksWithRelated {
val deletionError = s"rest.services.service.cannot.delete.due.to.streams:$serviceWithRelatedOnlyStreamsName"
serviceSI.delete(serviceWithRelatedOnlyStreamsName) shouldBe DeletionError(deletionError)
serviceStorage.toSet shouldBe initServiceStorage
}
it should "not delete service when it has related instances" in new ServiceMocksWithRelated {
val deletionError = s"rest.services.service.cannot.delete.due.to.instances:$serviceWithRelatedOnlyInstancesName"
serviceSI.delete(serviceWithRelatedOnlyInstancesName) shouldBe DeletionError(deletionError)
serviceStorage.toSet shouldBe initServiceStorage
}
it should "not delete service when it has related streams and instances" in new ServiceMocksWithRelated {
val deletionError = s"rest.services.service.cannot.delete.due.to.streams:$serviceWithRelatedBothName"
serviceSI.delete(serviceWithRelatedBothName) shouldBe DeletionError(deletionError)
serviceStorage.toSet shouldBe initServiceStorage
}
trait ServiceMocks extends MockitoSugar {
val nonExistsServiceName = "non-exist-service"
val serviceName = "service-name"
val serviceDomain = mock[ServiceDomain]
when(serviceDomain.name).thenReturn(serviceName)
val service = mock[Service]
when(service.name).thenReturn(serviceName)
when(service.to()).thenReturn(serviceDomain)
val initServiceStorageSize = 10
val serviceStorage: mutable.Buffer[ServiceDomain] = Range(0, initServiceStorageSize).map { _ =>
val serviceDomain = mock[ServiceDomain]
when(serviceDomain.name).thenReturn(UUID.randomUUID().toString)
serviceDomain
}.toBuffer
val initServiceStorage: Set[ServiceDomain] = serviceStorage.toSet
val services = serviceStorage.map { serviceDomain =>
val service = mock[Service]
val serviceName = serviceDomain.name
when(service.name).thenReturn(serviceName)
service
}
val serviceRepository = mock[GenericMongoRepository[ServiceDomain]]
when(serviceRepository.getAll).thenReturn({
serviceStorage
})
when(serviceRepository.save(any[ServiceDomain]()))
.thenAnswer((invocationOnMock: InvocationOnMock) => {
serviceStorage += invocationOnMock.getArgument[ServiceDomain](0)
})
when(serviceRepository.delete(anyString()))
.thenAnswer((invocationOnMock: InvocationOnMock) => {
val serviceName = invocationOnMock.getArgument[String](0)
serviceStorage -= serviceStorage.find(_.name == serviceName).get
})
when(serviceRepository.get(anyString()))
.thenAnswer((invocationOnMock: InvocationOnMock) => {
val serviceName = invocationOnMock.getArgument[String](0)
serviceStorage.find(_.name == serviceName)
})
val connectionRepository = mock[ConnectionRepository]
when(connectionRepository.getServiceRepository).thenReturn(serviceRepository)
val createService = mock[ServiceCreator]
when(createService.from(any[ServiceDomain])(any[Injector]))
.thenAnswer((invocationOnMock: InvocationOnMock) => {
val serviceDomain = invocationOnMock.getArgument[ServiceDomain](0)
services.find(_.name == serviceDomain.name).get
})
val module = new Module {
bind[ConnectionRepository] to connectionRepository
bind[MessageResourceUtils] to MessageResourceUtilsMock.messageResourceUtils
bind[ServiceCreator] to createService
}
val injector = module.injector
val serviceSI = new ServiceSI()(injector)
}
trait ServiceMocksWithRelated extends ServiceMocks {
val serviceWithoutRelatedName = "service-without-related"
val serviceWithoutRelatedDomain = mock[ServiceDomain]
when(serviceWithoutRelatedDomain.name).thenReturn(serviceWithoutRelatedName)
val serviceWithRelatedOnlyStreamsName = "service-with-related-streams"
val serviceWithRelatedOnlyStreamsDomain = mock[ServiceDomain]
when(serviceWithRelatedOnlyStreamsDomain.name).thenReturn(serviceWithRelatedOnlyStreamsName)
val streamsForOneService = 5
val onlyStreamsRelated = Range(0, streamsForOneService).map { _ =>
val stream = mock[StreamDomain]
when(stream.name).thenReturn(UUID.randomUUID().toString)
when(stream.service).thenReturn(serviceWithRelatedOnlyStreamsDomain)
stream
}
val onlyStreamsRelatedNames = onlyStreamsRelated.map(_.name)
val serviceWithRelatedOnlyInstancesName = "service-with-related-instances"
val serviceWithRelatedOnlyInstancesDomain = mock[ZKServiceDomain]
when(serviceWithRelatedOnlyInstancesDomain.name).thenReturn(serviceWithRelatedOnlyInstancesName)
val instancesForOneService = 5
val onlyInstancesRelated = Range(0, instancesForOneService).map { _ =>
val instance = mock[InstanceDomain]
when(instance.name).thenReturn(UUID.randomUUID().toString)
when(instance.coordinationService).thenReturn(serviceWithRelatedOnlyInstancesDomain)
instance
}
val onlyInstancesRelatedNames = onlyInstancesRelated.map(_.name)
val serviceWithRelatedBothName = "service-with-both-related"
val serviceWithRelatedBothDomain = mock[ZKServiceDomain]
when(serviceWithRelatedBothDomain.name).thenReturn(serviceWithRelatedBothName)
val bothStreamsRelated = Range(0, streamsForOneService).map { _ =>
val stream = mock[StreamDomain]
when(stream.name).thenReturn(UUID.randomUUID().toString)
when(stream.service).thenReturn(serviceWithRelatedBothDomain)
stream
}
val bothStreamsRelatedNames = bothStreamsRelated.map(_.name)
val bothInstancesRelated = Range(0, instancesForOneService).map { _ =>
val instance = mock[InstanceDomain]
when(instance.name).thenReturn(UUID.randomUUID().toString)
when(instance.coordinationService).thenReturn(serviceWithRelatedBothDomain)
instance
}
val bothInstancesRelatedNames = bothInstancesRelated.map(_.name)
val allStreams = (onlyStreamsRelated ++ bothStreamsRelated).toBuffer
val streamRepository = mock[GenericMongoRepository[StreamDomain]]
when(streamRepository.getAll).thenReturn(allStreams)
when(connectionRepository.getStreamRepository).thenReturn(streamRepository)
val allInstances = (onlyInstancesRelated ++ bothInstancesRelated).toBuffer
val instanceRepository = mock[GenericMongoRepository[InstanceDomain]]
when(instanceRepository.getAll).thenReturn(allInstances)
when(connectionRepository.getInstanceRepository).thenReturn(instanceRepository)
serviceStorage ++= mutable.Buffer(
serviceWithoutRelatedDomain,
serviceWithRelatedOnlyStreamsDomain,
serviceWithRelatedOnlyInstancesDomain,
serviceWithRelatedBothDomain)
override val initServiceStorage: Set[ServiceDomain] = serviceStorage.toSet
override val serviceSI = new ServiceSI()(injector)
}
}
|
bwsw/sj-platform
|
core/sj-common/src/test/scala-2.12/com/bwsw/sj/common/si/ServiceSiTests.scala
|
Scala
|
apache-2.0
| 11,612 |
def mapF[B](f: A => B): Liist[B] =
foldRight(Niil: Liist[B]){ case (a,v) => f(a) ::: v }
|
grzegorzbalcerek/scala-exercises
|
Liist/stepLiistMapF.scala
|
Scala
|
bsd-2-clause
| 91 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.