code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package io.coding.me.m2p2.core.analyzer import java.io.File import java.io.FileInputStream import scala.collection.JavaConversions.asScalaIterator import scala.collection.mutable.MutableList import scala.util.Try import com.typesafe.scalalogging.LazyLogging import io.coding.me.m2p2.core.internal.extension.StringExtensions.string2extension import io.coding.me.m2p2.core.internal.resource.TryWithResource import javax.xml.stream.XMLInputFactory import javax.xml.stream.events.Attribute import javax.xml.stream.events.EndElement import javax.xml.stream.events.StartElement /** * Naive representation of a P2 installable unit */ case class P2Unit(id: String, version: String) { require(id.isNotNullOrEmpty(), "Version of a P2 unit must not be empty") require(version.isNotNullOrEmpty(), "Version of a P2 unit must not be empty") } /** * Typical format: * {{{ * <?xml version='1.0' encoding='UTF-8'?> * <units size='1'> * <unit id='example-bundle' version='0.1.0.201507201658'> * <update id='example-bundle' range='[0.0.0,0.1.0.201507201658)' severity='0'/> * <properties size='5'> * <property name='org.eclipse.equinox.p2.name' value='Bundle'/> * <property name='org.eclipse.equinox.p2.provider' value='Eclipse.org'/> * <property name='maven-groupId' value='example.group'/> * <property name='maven-artifactId' value='example-bundle'/> * <property name='maven-version' value='0.1.0-SNAPSHOT'/> * </properties> * <provides size='3'> * <provided namespace='org.eclipse.equinox.p2.iu' name='example-bundle' version='0.1.0.201507201658'/> * <provided namespace='osgi.bundle' name='example-bundle' version='0.1.0.201507201658'/> * <provided namespace='org.eclipse.equinox.p2.eclipse.type' name='bundle' version='1.0.0'/> * </provides> * <requires size='3'> * <required namespace='osgi.bundle' name='org.eclipse.core.runtime' range='0.0.0'/> * <required namespace='osgi.bundle' name='org.eclipse.swt' range='3.6.0'/> * <required namespace='osgi.bundle' name='org.eclipse.ui' range='3.6.0'/> * </requires> * <artifacts size='1'> * <artifact classifier='osgi.bundle' id='example-bundle' version='0.1.0.201507201658'/> * </artifacts> * <touchpoint id='org.eclipse.equinox.p2.osgi' version='1.0.0'/> * <touchpointData size='1'> * <instructions size='1'> * <instruction key='manifest'> * Bundle-SymbolicName: example-bundle;singleton:=true&#xA;Bundle-Version: 0.1.0.201507201658&#xA; * </instruction> * </instructions> * </touchpointData> * </unit> * </units> * }}} */ object P2Metadata extends FileNameVersionExtractor with LazyLogging { import io.coding.me.m2p2.core.internal.extension.StringExtensions._ /** * Creates a list of P2 unit representations based on a file, typically a p2content.xml file. */ def apply(file: File): Try[Option[Set[P2Unit]]] = extractMavenVersion(file).flatMap { mavenVersion => TryWithResource(new FileInputStream(file)).map { inputStream => val p2units = MutableList.empty[P2Unit] val factory = XMLInputFactory.newInstance() val r = factory.createXMLEventReader(inputStream) var id: Option[String] = None var version: Option[String] = None while (r.hasNext()) { val event = r.nextEvent() event match { case s: StartElement if s.getName.getLocalPart == "unit" => val attributes = s.getAttributes.toList.asInstanceOf[List[Attribute]] id = attributes.find(_.getName.getLocalPart == "id").map(_.getValue).headOption version = attributes.find(_.getName.getLocalPart == "version").map(_.getValue).headOption case s: EndElement if s.getName.getLocalPart == "unit" => if (id.isDefined && version.isDefined) { p2units += P2Unit(id.get, version.get) } else { logger.warn(s"Content file ${file} seems to be invalid. Can't parse XML properly.") } id = None version = None case _ => // noop } } logger.debug(s"Found ${p2units.size} installable units in file ${file}") Some(p2units.toSet) } } }
tssp/maven-p2-view
core/src/main/scala/io/coding/me/m2p2/core/analyzer/P2Metadata.scala
Scala
mit
4,287
// Starter Code for Exercise 4 // From "Parameterized Types" atom import com.atomicscala.AtomicTest._ val weather = Vector(100, 80, 20, 100, 20) historicalData(weather) is "Sunny=2, Mostly Sunny=1, Mostly Cloudy=2"
P7h/ScalaPlayground
Atomic Scala/atomic-scala-solutions/30_ParameterizedTypes/Starter-4.scala
Scala
apache-2.0
216
import sbt._ object BuildTarget { private sealed trait DeploymentRuntime private case object ConductR extends DeploymentRuntime private case object Kubernetes extends DeploymentRuntime private val deploymentRuntime: DeploymentRuntime = sys.props.get("buildTarget") match { case Some(v) if v.toLowerCase == "conductr" => ConductR case Some(v) if v.toLowerCase == "kubernetes" => Kubernetes case Some(v) => sys.error(s"The build target $v is not supported. Only supports 'conductr' or 'kubernetes'") case None => ConductR } val additionalLibraryDependencies: Seq[ModuleID] = if (deploymentRuntime == Kubernetes) Seq(Library.serviceLocatorDns) else Seq.empty }
TimMoore/activator-lagom-java-chirper-jpa
project/BuildTarget.scala
Scala
apache-2.0
720
package scodec.codecs import org.scalacheck.{Arbitrary, Gen} import scodec._ import scodec.bits._ class MultiplexedCodecTest extends CodecSuite { "listDelimited codec" should { val codec = listDelimited(BitVector(' '), ascii) val bits = ascii.encode("i am delimited").require val list = List("i", "am", "delimited") "encode a list" in { codec.encode(list).require shouldBe bits } "decode to a list" in { codec.decode(bits).require.value shouldBe list codec.decode(bits).require.remainder shouldBe BitVector.empty } } "vectorMultiplexed codec" should { "be performant" in { val delimiter = BitVector.empty // a simplistic example to test performance of mux/deMux // a realistic example would have a more complex deMux (potentially resulting in exponential time complexities) val codec = vectorMultiplexed( _ ++ delimiter ++ _, bits => (bits.compact, BitVector.empty), int32) val trials = 10 val sizes = List(10, 100, 1000, 10000, 100000) val results = (1 to trials).map { trial => sizes map { size => val vec = definedSamples(Gen.listOfN(size, Arbitrary.arbitrary[Int]).map { _.toVector }).head val (encoded, encodeTime) = time { codec.encode(vec).require } // info(s"$trial - encoding $size took $encodeTime") val (decoded, decodeTime) = time { codec.decode(encoded).require.value } // info(s"$trial - decoding $size took $decodeTime") decoded shouldBe vec encodeTime + decodeTime } }.drop(1) // drop first iteration to allow for JIT val averages = results.reduceLeft((x, y) => (x zip y) map { case (z, b) => z + b }).map { _ / results.size.toLong } info("Roundtrip averages:") (sizes zip averages).foreach { case (s, average) => info(s" $s - $average") } } } }
alissapajer/scodec
shared/src/test/scala/scodec/codecs/MultiplexedCodecTest.scala
Scala
bsd-3-clause
1,989
import sbt.Keys._ import sbt._ object DebuggerTest { /** Test-specific project settings. */ val settings = Seq( libraryDependencies ++= Seq( "org.slf4j" % "slf4j-api" % "1.7.5", "org.slf4j" % "slf4j-log4j12" % "1.7.5" % "test,it", "log4j" % "log4j" % "1.2.17" % "test,it", "org.scalatest" %% "scalatest" % "3.0.0" % "test,it", "org.scalamock" %% "scalamock-scalatest-support" % "3.4.2" % "test,it" ) ) }
ensime/scala-debugger
project/DebuggerTest.scala
Scala
apache-2.0
449
package vegas.data import java.text.SimpleDateFormat /** * Base trait for transforming Any values into primitive types that are accepted by vega-lite. Default implementation * does a pass through for primitives, converts dates to ISO8601, and uses toString for everything else. */ trait ValueTransformer { def transform(values: Map[String, Any]): Map[String, Any] = values.map { case(k,v) => (k, transformValue(v)) } /** * Transforms Any values into one of the supported primitive types */ def transformValue(value: Any): Any } object DefaultValueTransformer extends ValueTransformer { val df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); def transformValue(v: Any): Any = v match { case null => null case st if SimpleTypeUtils.isSimpleType(st) => st case d: java.sql.Date => d.toString case d: java.util.Date => df.format(d) case Some(x: Any) => transformValue(x) case None => null case _ => v.toString } }
aishfenton/Vegas
core/src/main/scala/vegas/data/ValueTransformer.scala
Scala
mit
974
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package iht.models.des.ihtReturn import play.api.libs.json.Json case class SpousesEstate(domiciledInUk: Option[Boolean]= None, whollyExempt: Option[Boolean]= None, jointAssetsPassingToOther: Option[Boolean]= None, otherGifts: Option[Boolean]=None, agriculturalOrBusinessRelief: Option[Boolean]= None, giftsWithReservation: Option[Boolean]= None, benefitFromTrust: Option[Boolean]= None, unusedNilRateBand: Option[BigDecimal]= None) object SpousesEstate { implicit val formats = Json.format[SpousesEstate] }
hmrc/iht-frontend
app/iht/models/des/ihtReturn/SpousesEstate.scala
Scala
apache-2.0
1,293
package scalan.primitives import scala.annotation.unchecked.uncheckedVariance import scalan._ import scala.reflect.runtime.universe._ import scalan.common.OverloadHack.{Overloaded2, Overloaded1} trait StructItems extends ViewsDsl with Entities { self: StructsDsl with Scalan => trait StructItem[@uncheckedVariance +Val, Schema <: Struct] extends Def[StructItem[Val @uncheckedVariance, Schema]] { def eVal: Elem[Val @uncheckedVariance] def eSchema: Elem[Schema] def key: Rep[StructKey[Schema]] def value: Rep[Val] } abstract class StructItemBase[Val, Schema <: Struct] (val key: Rep[StructKey[Schema]], val value: Rep[Val]) (implicit val eVal: Elem[Val], val eSchema: Elem[Schema]) extends StructItem[Val, Schema] } trait StructItemsDsl extends impl.StructItemsAbs { self: StructsDsl with Scalan => def struct_getItem[S <: Struct](s: Rep[S], i: Rep[Int])(implicit eS: Elem[S]): Rep[StructItem[_,S]] def struct_getItem[S <: Struct](s: Rep[S], i: Int)(implicit eS: Elem[S], o1: Overloaded1): Rep[StructItem[_,S]] = { val names = eS.fieldNames val value = s(names(i)) val key = IndexStructKey[S](i) StructItemBase(key, value)(eS.fields(i)._2.asElem[Any], eS) } def struct_setItem[S <: Struct](s: Rep[S], i: Rep[Int], v: Rep[_])(implicit eS: Elem[S]): Rep[S] = { updateField(s, eS.fieldNames(i.asValue), v) } trait StructItemFunctor[S <: Struct] extends Functor[({type f[x] = StructItem[x,S]})#f] { implicit def eS: Elem[S] def tag[T](implicit tT: WeakTypeTag[T]) = weakTypeTag[StructItem[T,S]] def lift[T](implicit eT: Elem[T]) = element[StructItem[T,S]] def unlift[T](implicit eFT: Elem[StructItem[T,S]]) = eFT.asInstanceOf[StructItemElem[T,S,_]].eVal def getElem[T](fa: Rep[StructItem[T,S]]) = fa.selfType1 def unapply[T](e: Elem[_]) = e match { case e: StructItemElem[_, _, _] => Some(e.asElem[StructItem[T,S]]) case _ => None } def map[A:Elem,B:Elem](xs: Rep[StructItem[A,S]])(f: Rep[A] => Rep[B]) = StructItemBase(xs.key, f(xs.value)) } implicit def structItemContainer[S <: Struct : Elem]: Functor[({type f[x] = StructItem[x,S]})#f] = new StructItemFunctor[S] { def eS = element[S] } implicit class StructElemExtensionsForStructItem[S <: Struct](eS: Elem[S]) { def getItemElem[V](i: Int): Elem[StructItem[V, S]] = { val eV = eS(i).asElem[V] structItemElement(eV, eS) } def getItemElem[V](fieldName: String): Elem[StructItem[V, S]] = { val eV = eS(fieldName).asElem[V] structItemElement(eV, eS) } } implicit class StructExtensionsForStructItem[S <: Struct](s: Rep[S])(implicit eS: Elem[S]) { def getItem[A](i: Int): Rep[StructItem[A, S]] = struct_getItem(s, i).asRep[StructItem[A,S]] def getItem[A](i: Rep[Int]): Rep[StructItem[A, S]] = struct_getItem(s, i).asRep[StructItem[A,S]] def getItem[A](k: Rep[StructKey[S]])(implicit o: Overloaded2): Rep[StructItem[A,S]] = struct_getItem(s, k.index).asRep[StructItem[A,S]] def setItem(i: Rep[Int], v: Rep[_]): Rep[S] = struct_setItem(s, i, v) def setItem(k: Rep[StructKey[S]], v: Rep[_])(implicit o: Overloaded2): Rep[S] = struct_setItem(s, k.index, v) } } trait StructItemsDslStd extends impl.StructItemsStd {self: StructsDsl with ScalanStd => def struct_getItem[S <: Struct](s: Rep[S], i: Rep[Int])(implicit eS: Elem[S]): Rep[StructItem[_,S]] = struct_getItem(s, i) } trait StructItemsDslExp extends impl.StructItemsExp {self: StructsDsl with ScalanExp => def struct_getItem[S <: Struct](s: Rep[S], i: Rep[Int])(implicit eS: Elem[S]): Rep[StructItem[_,S]] = i match { case Def(Const(i: Int)) => struct_getItem(s, i) case _ => ??? } }
PCMNN/scalan-ce
core/src/main/scala/scalan/primitives/StructItems.scala
Scala
apache-2.0
3,724
package jp.co.bizreach.play2handlebars import org.scalatest.FunSpec class HelpersSpec extends FunSpec with FakePlayHelper { case class Who(who:String) describe("Handlebars Iterable aware helper") { describe("when the template has each and feed scala.Iterable") { it("should list values") { runApp(PlayApp()) { val sList = List(Who("Foo"),Who("Bar")) assert(HBS("test-template-each", "list" -> sList).toString === "Hello FooBar!") } } } describe("when the template has each and feed java.lang.Iterable") { it("should list values") { runApp(PlayApp()) { import scala.collection.JavaConverters._ val jList: java.util.List[Who] = List(Who("Java"), Who("Mocha")).asJava assert(HBS("test-template-each", "list" -> jList).toString === "Hello JavaMocha!") } } } } }
bizreach/play2-handlebars
src/test/scala/jp/co/bizreach/play2handlebars/HelpersSpec.scala
Scala
apache-2.0
895
package com.cloudwick.generator.logEvents /** * Class for wrapping default command line options * @author ashrith */ case class OptionsConfig( awsAccessKey: String = "", awsSecretKey: String = "", awsEndPoint: String = "", eventsPerSec: Int = 0, destination: String = "file", kafkaBrokerList: String = "localhost:9092", kafkaTopicName: String = "logs", kinesisStreamName: String = "generator", kinesisShardCount: Int = 1, outputFormat: String = "string", filePath: String = "/tmp", fileRollSize: Int = Int.MaxValue, // in bytes totalEvents: Long = 1000, flushBatch: Int = 10000, ipSessionCount: Int = 25, ipSessionLength: Int = 50, threadsCount: Int = 1, threadPoolSize: Int = 10, logLevel: String = "INFO" )
Arpit1286/generator
src/main/scala/com/cloudwick/generator/logEvents/OptionsConfig.scala
Scala
apache-2.0
750
package vn.myfeed.parser.processor import vn.myfeed.parser.model.{LinkElement, ArticleElement, MediaElement, Article} import vn.myfeed.parser.util.VideoSitePattern /** * Thi class will mark an image what has the previous or next potential element is become potential. * * @author Nguyen Duc Dung * @since 12/24/12 9:07 AM * */ class MediaBaseFilter(minTitleLength: Int = 2) extends Processor { def process(implicit article: Article) { var previous: Option[ArticleElement] = None var next: Option[ArticleElement] = None for (i <- 0 until article.elements.size) { if (i - 1 >= 0) previous = Some(article.elements(i - 1)) if (i + 1 <= article.elements.size - 1) next = Some(article.elements(i + 1)) val element = article.elements(i) if(element.isInstanceOf[MediaElement]) { previous.map(prevElement => { next.map(nextElement => { if(prevElement.isInstanceOf[MediaElement] && nextElement.isInstanceOf[MediaElement]) { prevElement.isPotential = true element.isPotential = true nextElement.isPotential = true } }) }) } if (!element.isPotential && element.isInstanceOf[MediaElement]) { previous.map(prevElement => if (prevElement.isPotential) element.isPotential = true) next.map(nextElement => if (nextElement.isPotential) element.isPotential = true) } if (element.isPotential && element.isInstanceOf[MediaElement]) { //The image title should mark as a potential element. next.map(nextElement => if (element.isPotential && !nextElement.isInstanceOf[LinkElement] && nextElement.text.length >= minTitleLength) { nextElement.isPotential = true }) } } //Marking media element form known video site is potential article.mediaElements.foreach(el => { val tagName = el.tagName val src = el.src if(tagName == "iframe" && VideoSitePattern.matches(src)) { el.isPotential = true } }) } }
SunriseSoftVN/hayhayblog
parser/app/vn/myfeed/parser/processor/MediaBaseFilter.scala
Scala
gpl-2.0
2,078
package utils import scala.util.Random import play.api.Play import models._ /** * Add some functionalities to existing classes, such as list. Is mainly * used for the TweetManager. */ object Enrichments { /** Enrich a list with some functionalities */ implicit class RichList[T](lst: List[T]) { /** Shuffle the list */ def shuffle: List[T] = { val rds = new Random(System.currentTimeMillis) lst.map(entry => (entry, rds.nextInt)).sortBy(x => x._2).map(entry => entry._1) } /** Split the list into 'nb' parts of equal size (or a bit less depending of the size of the list) */ def split(nb: Int) = { val partSize = Math.ceil(lst.size / nb.toDouble).toInt def loop(lst: List[T], out: List[List[T]]): List[List[T]] = lst match { case Nil => out case _ => loop(lst.drop(partSize), out :+ lst.take(partSize)) } loop(lst, Nil) } } /** * get a configuration value * @param key The key to get * @param error The error message to return in case the configuration is not found * * @return the string corresponding to the key */ def getConfString(key: String, error: String) = Play.current.configuration.getString(key).getOrElse(sys.error(error)) /** * get a configuration value * @param key The key to get * @param error The error message to return in case the configuration is not found * * @return the Int corresponding to the key */ def getConfInt(key: String, error: String) = Play.current.configuration.getInt(key).getOrElse(sys.error(error)) /** * get a configuration value * @param key The key to get * @param error The error message to return in case the configuration is not found * * @return the Double corresponding to the key */ def getConfDouble(key: String, error: String) = Play.current.configuration.getDouble(key).getOrElse(sys.error(error)) /** * get a configuration value * @param key The key to get * @param error The error message to return in case the configuration is not found * * @return the Boolean corresponding to the key */ def getConfBoolean(key: String, error: String) = Play.current.configuration.getBoolean(key).getOrElse(sys.error(error)) /** * Multistep counter. The inner counter must be increased by 'step' in order to change * the output. * @param step The step of the multistep counter. */ case class MultiCounter(step: Int) { var inCount = 0 var outCount = 0 /** @return the next count */ def incr: Int = if ((inCount + 1) % step == 0) { val ret = outCount outCount = outCount + 1 inCount = 0 ret } else { inCount = inCount + 1 outCount } } /** Enrich a list of cluster (HC) to return some JSon */ implicit class RichClusterList(lst: List[Set[Cluster]]) { val maxOpacity = getConfDouble("clustHC.opacityCorrector", "RichClusterList: not opacity corrector defind in conf for ClustHC.") def toJson = { def setToJson(set: Set[Cluster]): String = { val maxTweetMeter = if(!set.isEmpty) set.map(c => c.tweetMeter).max else 1 /* Let's leverage the tweet metter for opacity */ def clustToJson(clust: Cluster) = s"""{"x": ${clust.center._2},"y": ${clust.center._1}, "r": ${clust.center._2 + clust.radius}, "d": ${clust.tweetMeter / maxTweetMeter * maxOpacity}}""" if(!set.isEmpty) s"""{"centers": [${set.tail.foldLeft(clustToJson(set.head))((acc, s) => acc + "," + clustToJson(s))}]}""" else s"""{"centers": []}""" } s"""{"clusters": [${lst.tail.foldLeft(setToJson(lst.head))((acc, s) => acc + ", " + setToJson(s))}]}""" } } /** Enrich a list of superPixels (Slic) to return some JSon */ implicit class RichPixelList(lst: List[SuperPixel]) { def toJson = { def pixelToJson(pix: SuperPixel) = s"""{"x": ${pix.pos._1}, "y": ${pix.pos._2}, "r": ${pix.d}}""" s"""{"clusters": [{"centers": [${lst.tail.foldLeft(pixelToJson(lst.head))((acc, s) => acc + ", " + pixelToJson(s))}]}]}""" } } }
TweetAggregator/CrossTalk
app/utils/Enrichments.scala
Scala
gpl-2.0
4,097
import org.scalatest.{BeforeAndAfter, FeatureSpec, OneInstancePerTest, Matchers} class FakeTestSpec extends FeatureSpec with Matchers { info("As a User") info("I want to be able to test 1+1=2") feature("Validate Math") { scenario("test 1+1 = 2") { (1+1) shouldBe 2 } } }
telegraph/sbt-pipeline.playframework.g8
src/main/g8/component-test/src/test/scala/$package$/FakeTestSpec.scala
Scala
apache-2.0
301
package spatial.models.characterization import spatial.metadata._ import spatial.dsl._ import spatial._ import virtualized._ trait RegFiles extends Benchmarks { self: SpatialCompiler => case class RegFile1DOp[T:Num:Type](depth: scala.Int, len: scala.Int, p: scala.Int)(val N: scala.Int) extends Benchmark { val prefix: JString = s"${depth}_${len}_${p}" def eval(): SUnit = { val outs = List.fill(N){ ArgOut[T] } Accel { val rfs = List.fill(N){ RegFile.buffer[T](len) } if (depth == 1) { Foreach(0 until 100 par p) { i => rfs.foreach{ rf => rf.update(i, i.to[T]) } } } Foreach(0 until 1000) { _ => List.tabulate(depth) { d => Foreach(0 until 100 par p) { i => rfs.zip(outs).foreach{case (rf,out) => if (d > 0) rf.update(i, i.to[T]) else out := rf(i) } } } () } } } } case class RegFile2DOp[T:Num:Type](depth: scala.Int, rows: scala.Int, cols: scala.Int, p0: scala.Int, p1: scala.Int)(val N: scala.Int) extends Benchmark { val prefix: JString = s"${depth}_${rows}_${cols}_${p0}_${p1}" def eval(): SUnit = { val outs = List.fill(N)(ArgOut[T]) Accel { val rfs = List.fill(N){ RegFile.buffer[T](rows, cols) } if (depth == 1) { Foreach(0 until 100 par p0) { i => Foreach(0 until 100 par p1) { j => rfs.foreach{rf => rf.update(i, j, i.to[T]) } } } } Foreach(0 until 1000) { _ => List.tabulate(depth) { d => Foreach(0 until 100 par p0) { i => Foreach(0 until 100 par p1) { j => rfs.zip(outs).foreach{case (rf,out) => if (d > 0) rf.update(i, j, i.to[T]) else out := rf(i, j) } } } } () } } } } private val dims1d = List(2, 4, 8, 16, 32, 64) val dims2d = List( (2,2), (4,2), (4,4), (8,2), (8,4), (8,8), (16,2), (16,4), (32,2), (64,1), (64,2) ) //gens ::= dims2d.flatMap{case (rows,cols) => List.tabulate(3){depth => MetaProgGen("Reg16", Seq(100,200), RegFile2DOp[Int16](depth, rows, cols)) } } gens :::= dims1d.flatMap{len => List.tabulate(3){depth => MetaProgGen("RegFile1D", Seq(100,200), RegFile1DOp[Int32](depth+1, len, 1)) } } gens :::= dims2d.flatMap{case (rows,cols) => List.tabulate(3){depth => MetaProgGen("RegFile2D", Seq(100,200), RegFile2DOp[Int32](depth+1, rows, cols, 1, 1)) } } }
stanford-ppl/spatial-lang
spatial/core/src/spatial/models/characterization/RegFiles.scala
Scala
mit
2,561
// Copyright (C) 2019 MapRoulette contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). package org.maproulette.models import org.joda.time.DateTime import play.api.libs.json.{JsValue, Json, Reads, Writes} import play.api.libs.json.JodaWrites._ import play.api.libs.json.JodaReads._ /** * @author cuthbertm */ case class Point(lat: Double, lng: Double) case class PointReview(reviewStatus: Option[Int], reviewRequestedBy: Option[Int], reviewedBy: Option[Int], reviewedAt: Option[DateTime], reviewStartedAt: Option[DateTime]) /** * This is the clustered point that will be displayed on the map. The popup will contain the title * of object with a blurb or description of said object. If the object is a challenge then below * that will be a start button so you can jump into editing tasks in the challenge * * @param id The id of the object for this clustered point * @param owner The osm id of the owner of the object * @param ownerName The name of the owner * @param title The title of the object (or name) * @param parentId The id of the parent, Challenge if Task, and Project if Challenge * @param parentName The name of the parent * @param point The latitude and longitude of the point * @param blurb A short descriptive text for the object * @param modified The last time this set of points was modified * @param difficulty The difficulty level of this ClusteredPoint (if a challenge) * @param type The type of this ClusteredPoint * @param status The status of the task, only used for task points, ie. not challenge points * @param mappedOn The date this task was mapped * @param pointReview a PointReview instance with review data * @param bundleId id of bundle task is member of, if any * @param isBundlePrimary whether task is primary task in bundle (if a member of a bundle) */ case class ClusteredPoint(id: Long, owner: Long, ownerName: String, title: String, parentId: Long, parentName: String, point: Point, bounding: JsValue, blurb: String, modified: DateTime, difficulty: Int, `type`: Int, status: Int, suggestedFix: Option[String] = None, mappedOn: Option[DateTime], pointReview: PointReview, priority: Int, bundleId: Option[Long]=None, isBundlePrimary: Option[Boolean]=None) object ClusteredPoint { implicit val pointWrites: Writes[Point] = Json.writes[Point] implicit val pointReads: Reads[Point] = Json.reads[Point] implicit val pointReviewWrites: Writes[PointReview] = Json.writes[PointReview] implicit val pointReviewReads: Reads[PointReview] = Json.reads[PointReview] implicit val clusteredPointWrites: Writes[ClusteredPoint] = Json.writes[ClusteredPoint] implicit val clusteredPointReads: Reads[ClusteredPoint] = Json.reads[ClusteredPoint] }
mvexel/maproulette2
app/org/maproulette/models/ClusteredPoint.scala
Scala
apache-2.0
2,946
import java.io.File import scala.io.Source import org.fedoraproject.javadeptools._ import org.fedoraproject.javadeptools.DatabaseBuilder import org.scalatest.FlatSpec import org.scalatest.Matchers import com.zaxxer.hikari.HikariDataSource import anorm.SQL import anorm.SqlStringInterpolation import anorm.sqlToSimple import javax.sql.DataSource import org.postgresql.Driver import org.scalatest.BeforeAndAfterAll import java.net.URI class Test extends FlatSpec with Matchers with BeforeAndAfterAll { def createLocalTestDb = { { val ds = new HikariDataSource() ds.setDriverClassName("org.postgresql.Driver") ds.setJdbcUrl("jdbc:postgresql:postgres") ds.setUsername("postgres") implicit val conn = ds.getConnection() SQL"""DROP DATABASE IF EXISTS "java-deptools-test"""".execute() SQL"""CREATE DATABASE "java-deptools-test"""".execute() conn.close() ds.close() } { val ds = new HikariDataSource() ds.setDriverClassName("org.postgresql.Driver") ds.setJdbcUrl("jdbc:postgresql:java-deptools-test") ds.setUsername("java-deptools") implicit val conn = ds.getConnection conn.setAutoCommit(false) SQL(Source.fromInputStream(getClass.getResourceAsStream("schema.sql"), "UTF-8").mkString).execute() conn.commit() conn.close() ds } } implicit val hds = createLocalTestDb implicit val conn = hds.getConnection override def afterAll { conn.close() hds.close() } def compareClasses(name: String, fileId: Int) { } DatabaseBuilder.buildFromURLs("test1", Seq( "args4j-2.32-3.fc24.noarch.rpm", "sat4j-2.3.5-8.fc24.noarch.rpm" ).map(getClass.getResource(_))) DatabaseBuilder.buildFromURLs("test2", Seq( "sat4j-2.3.5-8.fc24.noarch.rpm" ).map(getClass.getResource(_))) it should "list all collections" in { val collections = DAO.findAllCollections collections(0).name shouldEqual "test1" collections(1).name shouldEqual "test2" } it should "find package by name" in { val pkg = DAO.findPackageByName(1, "args4j") pkg shouldBe defined pkg.get.name shouldEqual "args4j" } it should "find file by id" in { val file = DAO.findFileById(1) file shouldBe defined } it should "list jars in package" in { val pkg = DAO.findPackageByName(2, "sat4j").get val jars = DAO.findFilesForPackage(pkg.id) jars.map(_.path) should contain theSameElementsInOrderAs Seq( "/usr/share/java/org.sat4j.core.jar", "/usr/share/java/org.sat4j.pb.jar") jars.map(_.classCount) should contain theSameElementsInOrderAs Seq( 216, 129) jars.map(_.packageId) should contain only pkg.id } // it should "list all packages" in { // // } it should "list the class tree" in { val expected = Source.fromInputStream(getClass.getResourceAsStream("args4j-classes.txt")).getLines().toArray val pkg = DAO.findPackageByName(1, "args4j").get val jars = DAO.findFilesForPackage(pkg.id) jars.size shouldEqual 1 val classes = DAO.findClassesForFile(jars(0).id).map(_.qualifiedName) classes should contain theSameElementsAs expected } }
msimacek/java-deptools
core/src/test/scala/Test.scala
Scala
apache-2.0
3,198
package mesosphere.marathon import java.io.{ IOException, FileInputStream } import com.google.protobuf.ByteString import mesosphere.chaos.http.HttpConf import org.apache.mesos.Protos.{ Credential, FrameworkInfo, FrameworkID } import org.apache.mesos.{ MesosSchedulerDriver, SchedulerDriver } import org.slf4j.LoggerFactory object MarathonSchedulerDriver { private[this] val log = LoggerFactory.getLogger(getClass) def newDriver(config: MarathonConf, httpConfig: HttpConf, newScheduler: MarathonScheduler, frameworkId: Option[FrameworkID]): SchedulerDriver = { val frameworkInfoBuilder = FrameworkInfo.newBuilder() .setName(config.frameworkName()) .setFailoverTimeout(config.mesosFailoverTimeout().toDouble) .setUser(config.mesosUser()) .setCheckpoint(config.checkpoint()) // Set the role, if provided. config.mesosRole.get.foreach(frameworkInfoBuilder.setRole) // Set the ID, if provided frameworkId.foreach(frameworkInfoBuilder.setId) if (config.webuiUrl.isSupplied) { frameworkInfoBuilder.setWebuiUrl(config.webuiUrl()) } else if (httpConfig.sslKeystorePath.isDefined) { // ssl enabled, use https frameworkInfoBuilder.setWebuiUrl(s"https://${config.hostname()}:${httpConfig.httpsPort()}") } else { // ssl disabled, use http frameworkInfoBuilder.setWebuiUrl(s"http://${config.hostname()}:${httpConfig.httpPort()}") } // set the authentication principal, if provided config.mesosAuthenticationPrincipal.get.foreach(frameworkInfoBuilder.setPrincipal) val credential: Option[Credential] = config.mesosAuthenticationPrincipal.get.map { principal => val credentialBuilder = Credential.newBuilder() .setPrincipal(principal) config.mesosAuthenticationSecretFile.get.foreach { secretFile => try { val secretBytes = ByteString.readFrom(new FileInputStream(secretFile)) credentialBuilder.setSecret(secretBytes) } catch { case cause: Throwable => throw new IOException(s"Error reading authentication secret from file [$secretFile]", cause) } } credentialBuilder.build() } val frameworkInfo = frameworkInfoBuilder.build() log.debug("Start creating new driver") val newDriver: MesosSchedulerDriver = credential match { case Some(cred) => new MesosSchedulerDriver(newScheduler, frameworkInfo, config.mesosMaster(), cred) case None => new MesosSchedulerDriver(newScheduler, frameworkInfo, config.mesosMaster()) } log.debug("Finished creating new driver") newDriver } }
quamilek/marathon
src/main/scala/mesosphere/marathon/MarathonSchedulerDriver.scala
Scala
apache-2.0
2,732
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.integration.torch import com.intel.analytics.bigdl.dllib.nn._ import com.intel.analytics.bigdl._ import com.intel.analytics.bigdl.dllib.tensor.{Storage, Tensor} import com.intel.analytics.bigdl.dllib.utils.RandomGenerator._ import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers} import scala.math._ import scala.util.Random import com.intel.analytics.bigdl.dllib.feature.dataset.LocalArrayDataSet import com.intel.analytics.bigdl.dllib.feature.dataset.image.{ColorJitter, LabeledBGRImage} import com.intel.analytics.bigdl.dllib.utils.RandomGenerator @com.intel.analytics.bigdl.tags.Serial class ColorJitterSpec extends TorchSpec { "A ColorJitter" should "blend image correctly" in { if (!TH.hasTorch()) { cancel("Torch is not installed") } val seed = 1000 RNG.setSeed(seed) val image1 = new LabeledBGRImage((1 to 27).map(_.toFloat).toArray, 3, 3, 0) val image2 = new LabeledBGRImage((2 to 28).map(_.toFloat).toArray, 3, 3, 0) val image3 = new LabeledBGRImage((3 to 29).map(_.toFloat).toArray, 3, 3, 0) val expected = image1.clone() val labeledBGRImage = new LabeledBGRImage(image1.content, image1.width(), image1.height(), image1.label()) val colorJitter = ColorJitter() val iter = colorJitter.apply(Iterator.single(labeledBGRImage)) val test = iter.next() val torchInput = Tensor[Float](Storage(expected.content), storageOffset = 1, size = Array(3, 3, 3)) .transpose(1, 3).transpose(2, 3) println(s"torchInput = ${torchInput}") val code = "torch.setdefaulttensortype('torch.FloatTensor')" + "torch.manualSeed(" + seed + ")\\n" + """ |local function blend(img1, img2, alpha) | return img1:mul(alpha):add(1 - alpha, img2) |end | |local function grayscale(dst, img) | dst:resizeAs(img) | dst[1]:zero() | dst[1]:add(0.299, img[1]):add(0.587, img[2]):add(0.114, img[3]) | dst[2]:copy(dst[1]) | dst[3]:copy(dst[1]) | return dst |end | |function Saturation(var) | local gs | | return function(input) | gs = gs or input.new() | grayscale(gs, input) | local alpha = 1.0 + torch.uniform(-var, var) | blend(input, gs, alpha) | return input | end |end | |function Brightness(var) | local gs | | return function(input) | gs = gs or input.new() | gs:resizeAs(input):zero() | | local alpha = 1.0 + torch.uniform(-var, var) | blend(input, gs, alpha) | return input | end |end | |function Contrast(var) | local gs | | return function(input) | gs = gs or input.new() | grayscale(gs, input) | gs:fill(gs[1]:mean()) | local alpha = 1.0 + torch.uniform(-var, var) | --local alpha = 0.61087716 | blend(input, gs, alpha) | return input | end |end | |function RandomOrder(ts) | return function(input) | local img = input.img or input | local order = torch.randperm(#ts) | for i=1,#ts do | img = ts[order[i]](img) | end | return img | end |end | |function ColorJitter(opt) | local brightness = opt.brightness or 0 | local contrast = opt.contrast or 0 | local saturation = opt.saturation or 0 | | local ts = {} | if brightness ~= 0 then | table.insert(ts, Brightness(brightness)) | end | if contrast ~= 0 then | table.insert(ts, Contrast(contrast)) | end | if saturation ~= 0 then | table.insert(ts, Saturation(saturation)) | end | | if #ts == 0 then | return function(input) return input end | end | | return RandomOrder(ts) |end | |local transform = ColorJitter({ | brightness = 0.4, | contrast = 0.4, | saturation = 0.4, |}) |output = transform(input) """.stripMargin val (luaTime, torchResult) = TH.run(code, Map("input" -> torchInput), Array("output")) val luaOutput = torchResult("output").asInstanceOf[Tensor[Float]] val bigdlOutput = Tensor[Float](Storage(test.content), storageOffset = 1, size = Array(3, 3, 3)) .transpose(1, 3).transpose(2, 3) luaOutput.map(bigdlOutput, (v1, v2) => { assert(abs(v1 - v2) < 1e-5) v1 }) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/integration/torch/ColorJitterSpec.scala
Scala
apache-2.0
5,483
import sbt._ import sbt.Keys._ object ProjectBuild extends Build { lazy val root = Project( id = "root", base = file("."), settings = Project.defaultSettings ++ Seq( name := "Sandoc", organization := "org.hocdoc", version := "0.1", scalaVersion := "2.10.0" ) ) }
Hocdoc/sandoc
project/Build.scala
Scala
apache-2.0
311
package com.whitepages.framework.client.riak import java.net.UnknownHostException import java.util.concurrent.{LinkedBlockingQueue, ThreadPoolExecutor, TimeUnit} import akka.actor._ import com.basho.riak.client.api.cap.{ConflictResolverFactory, Quorum} import com.basho.riak.client.api.commands.kv.{DeleteValue, FetchValue, StoreValue} import com.basho.riak.client.api.convert.ConverterFactory import com.basho.riak.client.api.{RiakClient => RiakJavaClient} import com.basho.riak.client.core.RiakCluster import com.fasterxml.jackson.core.`type`.TypeReference import com.persist.JsonOps import com.persist.JsonOps._ import com.typesafe.config.Config import com.whitepages.framework.client.ExtendedClientLogging import com.whitepages.framework.client.riak.Riak2Client.{RiakDeleteElapsedTime, RiakGetElapsedTime, RiakPutElapsedTime} import com.whitepages.framework.logging.noId import com.whitepages.framework.util.ClassSupport import scala.concurrent.{ExecutionContext, Future} object Riak2Client { val DefaultRiakBucketType = "default" sealed trait RiakMonitorMessages case class RiakGetElapsedTime(t: Long) extends RiakMonitorMessages case class RiakPutElapsedTime(t: Long) extends RiakMonitorMessages case class RiakDeleteElapsedTime(t: Long) extends RiakMonitorMessages case class RiakValueSize(size: Long, opType: RiakOperationType) extends RiakMonitorMessages /* The following are used for monitoring fetch/put payload size */ sealed trait RiakOperationType case object RiakFetchOperation extends RiakOperationType case object RiakPutOperation extends RiakOperationType } case class Riak2Client(registration: (ConverterFactory, ConflictResolverFactory) => Unit , private val actorFactory: ActorRefFactory , private val clientConfig: Config , private val mapper: ExtendedClientLogging.Mapper = ExtendedClientLogging.defaultMapper ) extends ClassSupport with Riak2Support with TimeMeasurement with Riak2ClientConfig { import scala.collection.JavaConverters._ private[this] val hosts = clientConfig.getStringList(hostsKey).asScala private[this] val port = clientConfig.getInt(portKey) private[this] val maxProcessingThreads = clientConfig.getInt(maxProcessingThreadsKey) private[this] val maxConnections = clientConfig.getInt(maxConnectionsKey) private[this] val minConnectionsOpt = // do not require this key to keep it compatible with the other clients if (clientConfig.hasPath(minConnectionsKey)) Some(clientConfig.getInt(minConnectionsKey)) else None private[this] val idleTimeout = clientConfig.getDuration(timeoutKey, TimeUnit.MILLISECONDS) private[this] val connectionTimeout = clientConfig.getDuration(connectionTimeoutKey, TimeUnit.MILLISECONDS) case class RiakClusterAndClient(cluster: RiakCluster, client: RiakJavaClient) private[this] var riakOpt: Option[RiakClusterAndClient] = None private[this] implicit val dispatcher = actorFactory.dispatcher // TODO: pull the right execution context for map/recover registerConvertersAndResolvers() private def registerConvertersAndResolvers() = { val converterFactory = ConverterFactory.getInstance() val resolverFactory = ConflictResolverFactory.getInstance() registration(converterFactory, resolverFactory) } private def startClient(): Future[RiakClusterAndClient] = Future { scala.util.control.Exception.catching(classOf[UnknownHostException]).withApply { case t: UnknownHostException => log.error(noId, "Unknown host while starting the Riak cluster", t) throw t case t: Throwable => val msg = s"Unable to connect to Riak ${hosts.mkString(",")}:$port :: ${t.getMessage}" log.error(noId, msg, t) throw t } { log.info(noId, JsonObject("RiakClient" -> "CONNECTING TO", "hosts" -> hosts.mkString(","), "port" -> port)) val cluster = buildRiakCluster(hosts, port, maxConnections, minConnectionsOpt, idleTimeout.toInt, connectionTimeout.toInt) cluster.start() RiakClusterAndClient(cluster, new RiakJavaClient(cluster)) } } def start(): Future[Unit] = riakOpt match { case Some(_) => log.info(noId, "Riak client already started") Future.successful((): Unit) case None => startClient() .map { case rcc => riakOpt = Some(rcc) log.info(noId, "Riak client successfully started") (): Unit } .recover { case t: Throwable => log.error(noId, "Exception caught while starting the Riak client", t)} } def stop(): Future[Boolean] = { riakOpt match { case Some(cc) => javaFutureToScalaFuture[java.lang.Boolean](cc.client.shutdown()) .map { case jbol => java.lang.Boolean.TRUE == jbol} .recover { case t: Throwable => log.error(noId, "Caught exception while shutting down", t) false } case None => Future.successful(true) } } private def failClientNotSet(): Future[Nothing] = Future.failed(new IllegalArgumentException("Riak client not set")) /* When a fetch operation is executed the order of execution is as follows: 1. RawClient fetch 2. Siblings iterated and converted 3. Converted siblings passed to conflict resolver 4. Resolved value returned */ // http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ThreadPoolExecutor.html // http://blog.jessitron.com/2014/01/choosing-executorservice.html val processingEc = ExecutionContext.fromExecutorService( new ThreadPoolExecutor(0, maxProcessingThreads, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue[Runnable]())) def genericGet[T](key: String , bucketName: String , bucketType: String , typeReferenceOpt: Option[TypeReference[DomainObject[T]]] // required for generic types ): Future[Option[DomainObject[T]]] = riakOpt match { case Some(RiakClusterAndClient(cluster, client)) => val nanoTimer = buildNanoTimer /* The following code would be easily reusable across fetch/store/delete if the corresponding *Value.Response classes would have a common ancestor in the type hierarchy :( */ val fetchValueCommand = new FetchValue.Builder(buildLocation(bucketType, bucketName, key)) .build() //logThread val fetchedValueF = riakFutureToScalaFuture(client.executeAsync(fetchValueCommand)) val resultF = fetchedValueF .map { case (response, query) => //logThread Option(typeReferenceOpt match { case Some(tr: TypeReference[DomainObject[T]]) => response.getValue(tr) // generic type info removed by type erasure case None => response.getValue[DomainObject[T]](classOf[DomainObject[T]]) /* this path not tested */ }) }(processingEc) .recover { case t: Throwable => log.error(noId, JsonOps.JsonObject("message" -> "Exception caught in Riak FETCH", "key" -> key, "bucketName" -> bucketName), t) throw t } resultF.onComplete { case _ => require(monitor != null) monitor ! RiakGetElapsedTime(nanoTimer((): Unit).toNanos) // monitor converts to micros } resultF case None => failClientNotSet() } /* For a store operation 1. Fetch operation performed as above 2. The Mutation is applied to the fetched value 3. The mutated value is converted to RiakObject 4. The store is performed through the RawClient 5. if returnBody is true the siblings are iterated, converted and conflict resolved and the value is returned */ def genericPut[T](key: String , bucketName: String , value: DomainObject[T] , bucketType: String , typeReferenceOpt: Option[TypeReference[DomainObject[T]]] // required for generic types ): Future[Unit] = riakOpt match { case Some(RiakClusterAndClient(cluster, client)) => val nanoTimer = buildNanoTimer /* TODO: allow passing of options, such as RETURN_BODY or RETURN_HEAD */ val builder = typeReferenceOpt match { case Some(tr) => new StoreValue.Builder(value, tr) case None => new StoreValue.Builder(value) /* this path not tested */ } val storeValueCommand = builder .withLocation(buildLocation(bucketType, bucketName, key)) .withOption(StoreValue.Option.W, Quorum.defaultQuorum()) // monitor converts to micros .build() val storedValueF = riakFutureToScalaFuture(client.executeAsync(storeValueCommand)) val resultF = storedValueF .map(_ => (): Unit) .recover { case t: Throwable => log.error(noId, JsonOps.JsonObject("message" -> "Exception caught in Riak PUT", "key" -> key, "bucketName" -> bucketName), t) throw t } resultF.onComplete { case _ => require(monitor != null) monitor ! RiakPutElapsedTime(nanoTimer((): Unit).toNanos) // monitor converts to micros } resultF case None => failClientNotSet() } def delete(key: String , bucketName: String , bucketType: String ): Future[Unit] = riakOpt match { case Some(RiakClusterAndClient(cluster, client)) => val nanoTimer = buildNanoTimer val deleteValueCommand = new DeleteValue.Builder(buildLocation(bucketType, bucketName, key)) .build() val deletedValueF = riakFutureToScalaFuture(client.executeAsync(deleteValueCommand)) val resultF = deletedValueF .map { case (response, query) => None /* nothing to return */} .recover { case t: Throwable => log.error(noId, JsonOps.JsonObject("message" -> "Exception caught in Riak DELETE", "key" -> key, "bucketName" -> bucketName), t) throw t } .map(_ => (): Unit) resultF.onComplete { case _ => monitor ! RiakDeleteElapsedTime(nanoTimer((): Unit).toNanos)} // TODO: micros? resultF case None => failClientNotSet() } }
whitepages/scala-riak-client
src/main/scala/com/whitepages/framework/client/riak/Riak2Client.scala
Scala
apache-2.0
10,308
/* * Copyright 2018 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package connector import models._ import play.api.libs.json.{JsValue, Json} import uk.gov.hmrc.play.config.ServicesConfig import scala.concurrent.Future import play.api.libs.concurrent.Execution.Implicits._ import play.api.Play import play.api.Logger import scala.util.{Failure, Success, Try} import metrics._ import java.util.concurrent.TimeUnit import config.WSHttp import uk.gov.hmrc.http.{HeaderCarrier, HttpPost, HttpResponse} object CalculatorConnector extends CalculatorConnector with ServicesConfig with GraphiteMetrics { override def httpPostRequest = WSHttp override val serviceUrl = baseUrl("paac") } trait CalculatorConnector extends Metrics { this: Metrics => def httpPostRequest: HttpPost def serviceUrl: String def connectToPAACService(contributions:List[Contribution], notMemberInP1: Boolean = false)(implicit hc: HeaderCarrier): Future[List[TaxYearResults]] = { val startTime = System.currentTimeMillis() // should use cross-cutting concerns and wrap this val earliestYear = contributions.foldLeft(Integer.MAX_VALUE)((x,y)=>x.min(y.taxPeriodStart.year)) val calculationRequest = CalculationRequest(contributions, Some(earliestYear), Some(false), Some(notMemberInP1)) val endpoint = Play.current.configuration.getString("microservice.services.paac.endpoints.calculate").getOrElse("/paac/calculate") val body = Json.toJson(calculationRequest) Logger.info(s"""[CalculatorConnector]: Making calculation request:\\n${contributions.mkString("\\n")}\\nEarliest year =${earliestYear}""") httpPostRequest.POST[JsValue, HttpResponse](s"${serviceUrl}${endpoint}",body).map { (response)=> calculatorStatusCode(response.status) calculationTime(System.currentTimeMillis() - startTime, TimeUnit.MILLISECONDS) val received = (response.json \\ "results").as[List[TaxYearResults]] Logger.debug(s"""${received.mkString("\\n")}""") received } andThen { case Failure(e) => { failedCalculation Logger.error(s"[CalculatorConnector]: Backend failed to calculate: ${e.getMessage()}", e) throw e } case Success(results) => { successfulCalculation results } } } }
hmrc/paac-frontend
app/connector/CalculatorConnector.scala
Scala
apache-2.0
2,834
/** * Copyright (C) 2015 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.xml import java.io.Writer import javax.xml.transform.stream.StreamResult import org.xml.sax.Attributes object HTMLFragmentSerializer { def create(writer: Writer, skipRootElement: Boolean) = { val identity = TransformerUtils.getIdentityTransformerHandler TransformerUtils.applyOutputProperties( identity.getTransformer, "html", null, null, null, "utf-8", true, null, false, 0 ) identity.setResult(new StreamResult(writer)) val htmlReceiver = new PlainHTMLOrXHTMLReceiver("", identity) if (skipRootElement) new SkipRootElement(htmlReceiver) else htmlReceiver } class SkipRootElement(receiver: XMLReceiver) extends ForwardingXMLReceiver(receiver) { var level = 0 override def startElement(uri: String, localname: String, qName: String, attributes: Attributes): Unit = { if (level > 0) super.startElement(uri, localname, qName, attributes) level += 1 } override def endElement(uri: String, localname: String, qName: String): Unit = { level -= 1 if (level > 0) super.endElement(uri, localname, qName) } } }
wesley1001/orbeon-forms
src/main/scala/org/orbeon/oxf/xml/HTMLFragmentSerializer.scala
Scala
lgpl-2.1
1,914
/* * Copyright 2007-2010 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb { package example { package snippet { /* import _root_.net.liftweb.example.model._ import _root_.scala.xml.{NodeSeq, Text, Group} import _root_.net.liftweb.http.{S, SHtml} import _root_.net.liftweb.mapper._ import _root_.net.liftweb.http.S._ import _root_.net.liftweb.util._ import _root_.net.liftweb.util.Helpers._ import _root_.net.liftweb.textile._ // show determines which one is used. bind hooks the content into the lift view case class BindChoice(show: Boolean, bind: () => NodeSeq) class Wiki extends MetaWikiEntry { def uriFor(path:String) = "/wiki/" + path /** * Display the Textile marked up wiki or an edit box */ def main: NodeSeq = { val pageName = S.param("wiki_page") openOr "HomePage" // set the name of the page def showAll = { findAll(OrderBy(WikiEntry.name, Ascending)).flatMap(entry => <div><a href={uriFor(entry.name)}>{entry.name}</a></div>) } if (pageName == "all") showAll // if the page is "all" display all the pages else { // find the entry in the database or create a new one val entry = find(By(WikiEntry.name, pageName)) openOr create.name(pageName) // is it a new entry? val isNew = !entry.saved_? // show edit or just display val edit = isNew || (S.param("param1").map(_ == "edit") openOr false) <span><a href={uriFor("all")}>Show All Pages</a><br/>{ if (edit) editEntry(entry, isNew, pageName) else TextileParser.toHtml(entry.entry, Some(TextileParser.DefaultRewriter("/wiki"))) ++ <br/><a href={uriFor(pageName+"/edit")}>Edit</a> // and add an "edit" link }</span> } } def choosebind(xhtml : NodeSeq) = { def pageName = S.param("wiki_page") openOr "HomePage" // set the name of the page def showAll = BindChoice((pageName == "all"), () => bind("pages", (xhtml \\\\ "showAll").filter(_.prefix == "wiki").toList.head.child, TheBindParam("all", findAll(OrderBy(WikiEntry.name, Ascending)).flatMap(entry => <div><a href={"/wikibind/"+entry.name}>{entry.name}</a></div>)))) // find the entry in the database or create a new one def entry = find(By(WikiEntry.name, pageName)) openOr create.name(pageName) // is it a new entry? def isNew = !entry.saved_? def toEdit = isNew || (S.param("param1").map(_ == "edit") openOr false) def edit = BindChoice(toEdit, () => bind("edit", (xhtml \\\\ "editting").filter(_.prefix == "wiki").toList.head.child, "form" -> editEntry(entry, isNew, pageName))) def view = BindChoice(!toEdit, () => bind("view", (xhtml \\\\ "displaying").filter(_.prefix == "wiki").toList.head.child, TheBindParam("name", Text(pageName)), TheBindParam("value", (TextileParser.toHtml(entry.entry, Some(TextileParser.DefaultRewriter("/wiki"))) ++ <br/><a href={uriFor(pageName+"/edit")}>Edit</a>)))) (showAll :: edit :: view :: Nil).find(_.show == true).map(_.bind()) match { case Some(x) => x case _ => <span /> } } private def editEntry(entry: WikiEntry, isNew: Boolean, pageName: String) = { val action = uriFor(pageName) val message = if (isNew) Text("Create Entry named "+pageName) else Text("Edit entry named "+pageName) val hobixLink = <span>&nbsp;<a href="http://hobix.com/textile/quick.html" target="_blank">Textile Markup Reference</a><br /></span> val cancelLink = <a href={uriFor(pageName)}>Cancel</a> val textarea = entry.entry.toForm val submitButton = SHtml.submit(isNew ? "Add" | "Edit", () => entry.save) <form method="GET" action={action}>{ // the form tag message ++ hobixLink ++ textarea ++ // display the form <br /> ++ cancelLink ++ Text(" ") ++ submitButton }</form> } } */ } } }
wsaccaco/lift
examples/example/src/main/scala/net/liftweb/example/snippet/Wiki.scala
Scala
apache-2.0
4,447
/* * Copyright 2012-2014 Comcast Cable Communications Management, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.xfinity.sirius.uberstore.data import java.nio.ByteBuffer import com.comcast.xfinity.sirius.uberstore.common.Checksummer /** * Class providing UberStoreFileOps, storing entries in the following format: * * [len: Int][chksum: Long][data: Array[Byte]] */ class UberStoreBinaryFileOps extends UberStoreFileOps { this: Checksummer => final val HEADER_SIZE = 4 + 8 // int len + long checksum /** * @inheritdoc */ def put(writeHandle: UberDataFileWriteHandle, body: Array[Byte]): Long = { val len: Int = body.length val chksum: Long = checksum(body) val byteBuf = ByteBuffer.allocate(HEADER_SIZE + len) byteBuf.putInt(len).putLong(chksum).put(body) writeHandle.write(byteBuf.array) } /** * @inheritdoc */ def readNext(readHandle: UberDataFileReadHandle): Option[Array[Byte]] = if (readHandle.eof()) { None } else { val (bodyLen, chksum) = readHeader(readHandle) val body = readBody(readHandle, bodyLen) if (chksum == checksum(body)) { Some(body) // [that i used to know | to love] } else { throw new IllegalStateException("File corrupted at offset " + readHandle.offset()) } } // Helper jawns private def readHeader(readHandle: UberDataFileReadHandle): (Int, Long) = (readHandle.readInt(), readHandle.readLong()) private def readBody(readHandle: UberDataFileReadHandle, bodyLen: Int): Array[Byte] = { val entry = new Array[Byte](bodyLen) readHandle.readFully(entry) entry } }
Comcast/sirius
src/main/scala/com/comcast/xfinity/sirius/uberstore/data/UberStoreBinaryFileOps.scala
Scala
apache-2.0
2,188
package at.forsyte.apalache.tla.tooling.opt import java.io.File import org.backuity.clist.{Command, _} /** * This command initiates the 'check' command line. * * @author Igor Konnov */ class CheckCmd extends Command(name = "check", description = "Check a TLA+ specification") with General { var file: File = arg[File](description = "a file containing a TLA+ specification (.tla or .json)") var config: String = opt[String]( name = "config", default = "", description = "configuration file in TLC format,\n" + "default: <file>.cfg, or none if <file>.cfg not present") var search: String = opt[String]( name = "search", default = "bfs", description = "search type (dfs or bfs), default: bfs") var cinit: String = opt[String]( name = "cinit", default = "", description = "the name of an operator that initializes CONSTANTS,\n" + "default: None") var init: String = opt[String]( name = "init", default = "", description = "the name of an operator that initializes VARIABLES,\n" + "default: Init") var next: String = opt[String]( name = "next", default = "", description = "the name of a transition operator, default: Next") var inv: String = opt[String](name = "inv", default = "", description = "the name of an invariant operator, e.g., Inv") var length: Int = opt[Int](name = "length", default = 10, description = "the bound on the computation length, default: 10") var tuning: String = opt[String](name="tuning", default = "", description = "filename of the tuning options, see apalache/docs/tuning.md") var randomizeDfs: Boolean = opt[Boolean]( name = "randomizeDfs", default = true, description = "randomize the choice of the next transition in DFS,\n" + "default: true") var filter: String = opt[String](name = "filter", default = "", description = "A sequence of regexes over transition numbers\n" + "to filter transitions at every step, e.g., (0|1),(1|2),4") }
konnov/apalache
mod-tool/src/main/scala/at/forsyte/apalache/tla/tooling/opt/CheckCmd.scala
Scala
apache-2.0
2,022
package org.elasticmq.actor import org.elasticmq.actor.reply._ import org.elasticmq._ import org.elasticmq.msg._ import org.elasticmq.actor.test.{DataCreationHelpers, QueueManagerForEachTest, ActorTest} import org.joda.time.{Duration, DateTime} class QueueActorMsgOpsTest extends ActorTest with QueueManagerForEachTest with DataCreationHelpers { waitTest("non-existent msg should not be found") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) // When lookupResult <- queueActor ? LookupMessage(MessageId("xyz")) } yield { // Then lookupResult should be (None) } } waitTest("after persisting a msg it should be found") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val message = createNewMessageData("xyz", "123", Map(), MillisNextDelivery(123L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(message) // When lookupResult <- queueActor ? LookupMessage(MessageId("xyz")) } yield { // Then lookupResult.map(createNewMessageData(_)) should be (Some(message)) } } waitTest("sending msg with maximum size should succeed") { // Given val maxMessageContent = "x" * 65535 val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val m = createNewMessageData("xyz", maxMessageContent, Map(), MillisNextDelivery(123L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(m) // When lookupResult <- queueActor ? LookupMessage(MessageId("xyz")) } yield { // Then lookupResult.map(createNewMessageData(_)) should be (Some(m)) } } waitTest("no undelivered msg should not be found in an empty queue") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val q2 = createQueueData("q2", MillisVisibilityTimeout(2L)) for { Right(queueActor1) <- queueManagerActor ? CreateQueue(q1) Right(queueActor2) <- queueManagerActor ? CreateQueue(q2) _ <- queueActor1 ? SendMessage(createNewMessageData("xyz", "123", Map(), MillisNextDelivery(50L))) // When lookupResult <- queueActor2 ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) } yield { // Then lookupResult should be (Nil) } } waitTest("undelivered msg should be found in a non-empty queue") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val q2 = createQueueData("q2", MillisVisibilityTimeout(2L)) val m = createNewMessageData("xyz", "123", Map(), MillisNextDelivery(50L)) for { Right(queueActor1) <- queueManagerActor ? CreateQueue(q1) Right(queueActor2) <- queueManagerActor ? CreateQueue(q2) _ <- queueActor1 ? SendMessage(m) // When lookupResult <- queueActor1 ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) } yield { // Then withoutDeliveryReceipt(lookupResult.headOption).map(createNewMessageData(_)) should be (Some(m.copy(nextDelivery = MillisNextDelivery(101L)))) } } waitTest("next delivery should be updated after receiving") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val m = createNewMessageData("xyz", "123", Map(), MillisNextDelivery(50L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(m) // When _ <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) lookupResult <- queueActor ? LookupMessage(MessageId("xyz")) } yield { // Then withoutDeliveryReceipt(lookupResult).map(createNewMessageData(_)) should be (Some(m.copy(nextDelivery = MillisNextDelivery(101L)))) } } waitTest("receipt handle should be filled when receiving") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(createNewMessageData("xyz", "123", Map(), MillisNextDelivery(50L))) // When lookupBeforeReceiving <- queueActor ? LookupMessage(MessageId("xyz")) received <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) lookupAfterReceiving <- queueActor ? LookupMessage(MessageId("xyz")) } yield { // Then lookupBeforeReceiving.flatMap(_.deliveryReceipt) should be (None) val receivedReceipt = received.flatMap(_.deliveryReceipt) val lookedUpReceipt = lookupAfterReceiving.flatMap(_.deliveryReceipt) receivedReceipt.size should be > (0) lookedUpReceipt should be ('defined) receivedReceipt.headOption should be (lookedUpReceipt) } } waitTest("receipt handle should change on subsequent receives") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(createNewMessageData("xyz", "123", Map(), MillisNextDelivery(50L))) // When received1 <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) _ = nowProvider.mutableNowMillis.set(101L) received2 <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) } yield { // Then val received1Receipt = received1.flatMap(_.deliveryReceipt) val received2Receipt = received2.flatMap(_.deliveryReceipt) received1Receipt.size should be > (0) received2Receipt.size should be > (0) received1Receipt should not be (received2Receipt) } } waitTest("delivered msg should not be found in a non-empty queue when it is not visible") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(createNewMessageData("xyz", "123", Map(), MillisNextDelivery(123L))) // When receiveResult <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) } yield { // Then receiveResult should be (Nil) } } waitTest("increasing next delivery of a msg") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val m = createNewMessageData("xyz", "1234", Map(), MillisNextDelivery(123L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(m) // When _ <- queueActor ? UpdateVisibilityTimeout(m.id.get, MillisVisibilityTimeout(50L)) lookupResult <- queueActor ? LookupMessage(MessageId("xyz")) } yield { // Then lookupResult.map(createNewMessageData(_)) should be (Some(createNewMessageData("xyz", "1234", Map(), MillisNextDelivery(150L)))) } } waitTest("decreasing next delivery of a msg") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) // Initially m2 should be delivered after m1 val m1 = createNewMessageData("xyz1", "1234", Map(), MillisNextDelivery(150L)) val m2 = createNewMessageData("xyz2", "1234", Map(), MillisNextDelivery(200L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(m1) _ <- queueActor ? SendMessage(m2) // When _ <- queueActor ? UpdateVisibilityTimeout(m2.id.get, MillisVisibilityTimeout(10L)) _ = nowProvider.mutableNowMillis.set(110L) receiveResult <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) } yield { // Then // This should find the first msg, as it has the visibility timeout decreased. receiveResult.headOption.map(_.id) should be (m2.id) } } waitTest("msg should be deleted") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val m1 = createNewMessageData("xyz", "123", Map(), MillisNextDelivery(50L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(m1) List(m1data) <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) // When _ <- queueActor ? DeleteMessage(m1data.deliveryReceipt.get) lookupResult <- queueActor ? LookupMessage(MessageId("xyz")) } yield { // Then lookupResult should be (None) } } waitTest("msg statistics should be updated") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val m1 = createNewMessageData("xyz", "123", Map(), MillisNextDelivery(50L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(m1) // When Some(lookupResult) <- queueActor ? LookupMessage(m1.id.get) List(receiveResult1) <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) _ = nowProvider.mutableNowMillis.set(110L) List(receiveResult2) <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 1, None) } yield { // Then lookupResult.statistics should be (MessageStatistics(NeverReceived, 0)) receiveResult1.statistics should be (MessageStatistics(OnDateTimeReceived(new DateTime(100L)), 1)) receiveResult2.statistics should be (MessageStatistics(OnDateTimeReceived(new DateTime(110L)), 2)) } } waitTest("should receive at most as much messages as given") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val msgs = (for { i <- 1 to 5 } yield createNewMessageData("xyz" + i, "123", Map(), MillisNextDelivery(100))).toList val List(m1, m2, m3, m4, m5) = msgs for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(m1) _ <- queueActor ? SendMessage(m2) _ <- queueActor ? SendMessage(m3) _ <- queueActor ? SendMessage(m4) _ <- queueActor ? SendMessage(m5) // When receiveResults1 <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 3, None) receiveResults2 <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 2, None) } yield { // Then receiveResults1.size should be (3) receiveResults2.size should be (2) (receiveResults1.map(_.id.id).toSet ++ receiveResults2.map(_.id.id).toSet) should be (msgs.map(_.id.get.id).toSet) } } waitTest("should receive as much messages as possible") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val msgs = (for { i <- 1 to 3 } yield createNewMessageData("xyz" + i, "123", Map(), MillisNextDelivery(100))).toList val List(m1, m2, m3) = msgs for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) _ <- queueActor ? SendMessage(m1) _ <- queueActor ? SendMessage(m2) _ <- queueActor ? SendMessage(m3) // When receiveResults <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 5, None) } yield { // Then receiveResults.size should be (3) receiveResults.map(_.id.id).toSet should be (msgs.map(_.id.get.id).toSet) } } waitTest("should wait for messages to be received for the specified period of time") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val start = System.currentTimeMillis() for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) // When receiveResults <- queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 5, Some(Duration.millis(500L))) } yield { // Then val end = System.currentTimeMillis() (end - start) should be >= (500L) receiveResults should be (Nil) } } waitTest("should wait until messages are available") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val msg = createNewMessageData("xyz", "123", Map(), MillisNextDelivery(200L)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) // When receiveResultsFuture = queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 5, Some(Duration.millis(1000L))) _ <- { Thread.sleep(500); nowProvider.mutableNowMillis.set(200L); queueActor ? SendMessage(msg) } receiveResults <- receiveResultsFuture } yield { // Then receiveResults.size should be (1) receiveResults.map(_.id) should be (msg.id.toList) } } waitTest("multiple futures should wait until messages are available, and receive the message only once") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val msg = createNewMessageData("xyz", "123", Map(), MillisNextDelivery(100)) val start = System.currentTimeMillis() for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) // When receiveResults1Future = queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 5, Some(Duration.millis(1000L))) receiveResults2Future = queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 5, Some(Duration.millis(1000L))) _ <- { Thread.sleep(500); queueActor ? SendMessage(msg) } receiveResults1 <- receiveResults1Future receiveResults2 <- receiveResults2Future } yield { // Then val end = System.currentTimeMillis() (end - start) should be >= (1000L) // no reply for one of the futures Set(receiveResults1.size, receiveResults2.size) should be (Set(0, 1)) (receiveResults1 ++ receiveResults2).map(_.id) should be (msg.id.toList) } } waitTest("multiple futures should wait until messages are available, and receive all sent messages") { // Given val q1 = createQueueData("q1", MillisVisibilityTimeout(1L)) val msg1 = createNewMessageData("xyz1", "123a", Map(), MillisNextDelivery(100)) val msg2 = createNewMessageData("xyz2", "123b", Map(), MillisNextDelivery(100)) for { Right(queueActor) <- queueManagerActor ? CreateQueue(q1) // When receiveResults1Future = queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 5, Some(Duration.millis(1000L))) receiveResults2Future = queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 5, Some(Duration.millis(1000L))) receiveResults3Future = queueActor ? ReceiveMessages(DefaultVisibilityTimeout, 5, Some(Duration.millis(1000L))) _ <- { Thread.sleep(500); queueActor ? SendMessage(msg1); queueActor ? SendMessage(msg2) } receiveResults1 <- receiveResults1Future receiveResults2 <- receiveResults2Future receiveResults3 <- receiveResults3Future } yield { // Then List(receiveResults1.size, receiveResults2.size, receiveResults3.size).sum should be (2) (receiveResults1 ++ receiveResults2 ++ receiveResults3).map(_.id).toSet should be ((msg1.id.toList ++ msg2.id.toList).toSet) } } def withoutDeliveryReceipt(messageOpt: Option[MessageData]) = { messageOpt.map(_.copy(deliveryReceipt = None)) } }
kubek2k/elasticmq
core/src/test/scala/org/elasticmq/actor/QueueActorMsgOpsTest.scala
Scala
apache-2.0
14,976
package org.orbeon.oxf.xml.xerces import org.orbeon.oxf.xml.ParserConfiguration /* * An improvement over org.orbeon.apache.xerces.parsers.SAXParser. Every time * org.orbeon.apache.xerces.parsers.SAXParser is constructed it looks in * META-INF/services/orbeon.apache.xerces.xni.parser.XMLParserConfiguration to figure out what * config to use. Pbms with this are * * - We only want our config to be used. While we have changed the above file, any work done * to read the file is really just a waste. * * - The contents of the file do not change at runtime so there is little point in repeatedly * reading it. * * - About 16.4K of garbage gets create with each read of the above file. At the frequency with * which OPS creates SAX parsers this accumulates quickly and consequently we start losing processor time to the * garbage collector. */ object XercesSAXParser { private val NOTIFY_BUILTIN_REFS = "http://apache.org/xml/features/scanner/notify-builtin-refs" private val SYMBOL_TABLE = "http://apache.org/xml/properties/internal/symbol-table" private val XMLGRAMMAR_POOL = "http://apache.org/xml/properties/internal/grammar-pool" private[xerces] val RECOGNIZED_FEATURES = Array(NOTIFY_BUILTIN_REFS) private[xerces] val RECOGNIZED_PROPERTIES = Array(SYMBOL_TABLE, XMLGRAMMAR_POOL) def makeConfig(parserConfiguration: ParserConfiguration): OrbeonParserConfiguration = { val result = new OrbeonParserConfiguration(parserConfiguration) result.addRecognizedFeatures(RECOGNIZED_FEATURES) result.setFeature(NOTIFY_BUILTIN_REFS, true) result.addRecognizedProperties(RECOGNIZED_PROPERTIES) result } } class XercesSAXParser(val parserConfiguration: ParserConfiguration) extends org.orbeon.apache.xerces.parsers.SAXParser(XercesSAXParser.makeConfig(parserConfiguration))
orbeon/orbeon-forms
src/main/scala/org/orbeon/oxf/xml/xerces/XercesSAXParser.scala
Scala
lgpl-2.1
1,813
package com.github.ldaniels528.trifecta.sjs import io.scalajs.npm.angularjs._ import io.scalajs.util.ScalaJsHelper._ import scala.scalajs.js /** * Trifecta AngularJS Filters * @author [email protected] */ object Filters { private val timeUnits = Seq("min", "hour", "day", "month", "year") private val timeFactors = Seq(60, 24, 30, 12) /** * Capitalize: Returns the capitalize representation of a given string */ val capitalize: js.Function = () => { (value: js.UndefOr[String]) => value map { s => if (s.nonEmpty) s.head.toUpper + s.tail else "" } }: js.Function /** * Duration: Converts a given time stamp to a more human readable expression (e.g. "5 mins ago") */ val duration: js.Function = () => { (time: js.Dynamic) => toDuration(time) }: js.Function /** * Yes/No: Converts a boolean value into 'Yes' or 'No' */ val yesNo: js.Function = () => ((state: Boolean) => if (state) "Yes" else "No"): js.Function /** * Converts the given time expression to a textual duration * @param time the given time stamp (in milliseconds) * @return the duration (e.g. "10 mins ago") */ private def toDuration(time: js.UndefOr[js.Any], noFuture: Boolean = false) = { // get the time in milliseconds val myTime = time.toOption map { case value if angular.isDate(value) => value.asInstanceOf[js.Date].getTime() case value if angular.isNumber(value) => value.asInstanceOf[Double] case value if angular.isObject(value) => val obj = value.asInstanceOf[js.Dynamic] if (angular.isDefined(obj.$date)) obj.$date.asOpt[Double].getOrElse(js.Date.now()) else js.Date.now() case _ => js.Date.now() } getOrElse js.Date.now() // compute the elapsed time val elapsed = (js.Date.now() - myTime) / 60000 // compute the age var age = Math.abs(elapsed) var unit = 0 while (unit < timeFactors.length && age >= timeFactors(unit)) { age /= timeFactors(unit) unit += 1 } // make the age and unit names more readable val unitName = timeUnits(unit) + (if (age.toInt != 1) "s" else "") if (unit == 0 && (age >= 0 && age < 1)) "just now" else if (elapsed < 0) { if (noFuture) "moments ago" else f"$age%.0f $unitName from now" } else f"$age%.0f $unitName ago" } }
ldaniels528/trifecta
app-js/src/main/scala/com/github/ldaniels528/trifecta/sjs/Filters.scala
Scala
apache-2.0
2,345
package acolyte.jdbc.play private[play] trait AcolyteDatabaseCompat { db: AcolyteDatabase ⇒ }
cchantep/acolyte
play-jdbc/src/main/play-2.6/AcolyteDatabaseCompat.scala
Scala
lgpl-2.1
97
package ar.com.pablitar import ar.com.pablitar.point.Point case class Bounds[P<:Point[P]](val lowerBound:P, val upperBound: P) { def bound(p:P) = p.max(lowerBound).min(upperBound) }
pablitar/fun-spaces
fun-spaces-alt2/src/ar/com/pablitar/Bounds.scala
Scala
mit
189
package org.freeour.app.config import java.util.Properties import scala.io.Source /** * Created by Bill Lv on 2/25/15. */ class BaseConfig(propFileName: String) { protected var properties: Properties = null val url = getClass.getResource(propFileName) if (url != null) { val source = Source.fromURL(url) properties = new Properties() properties.load(source.bufferedReader()) } }
ideaalloc/freeour
src/main/scala/org/freeour/app/config/BaseConfig.scala
Scala
gpl-2.0
406
import sbt._ import sbt.Configuration import sbt.Keys._ import de.johoop.jacoco4sbt._ import JacocoPlugin._ import scala.util.Properties trait BuildSettings { import Dependencies._ val Organization = "be.wegenenverkeer" val Version = "0.1.0-SNAPSHOT" val ScalaVersion = "2.10.3" val ScalaBuildOptions = Seq("-unchecked", "-deprecation", "-feature", "-language:reflectiveCalls", "-language:implicitConversions", "-language:postfixOps") lazy val testSettings = Seq( libraryDependencies ++= mainTestDependencies, parallelExecution in Test := false ) def projectSettings(projectName:String, extraDependencies:Seq[ModuleID]) = Seq( organization := Organization, name := projectName, version := Version, scalaVersion := ScalaVersion, scalacOptions := ScalaBuildOptions, parallelExecution := false, resolvers += "Local Maven" at Path.userHome.asFile.toURI.toURL + ".m2/repository", resolvers += Resolver.typesafeRepo("releases"), libraryDependencies ++= extraDependencies ) val publishingCredentials = (for { username <- Option(System.getenv().get("SONATYPE_USERNAME")) password <- Option(System.getenv().get("SONATYPE_PASSWORD")) } yield Seq(Credentials( "Sonatype Nexus Repository Manager", "oss.sonatype.org", username, password) )).getOrElse(Seq()) val publishSettings = Seq( publishMavenStyle := true, pomIncludeRepository := { _ => false}, publishTo := { val nexus = "https://oss.sonatype.org/" if (isSnapshot.value) Some("snapshots" at nexus + "content/repositories/snapshots") else Some("releases" at nexus + "service/local/staging/deploy/maven2") }, pomExtra := pomInfo, credentials ++= publishingCredentials ) def buildSettings(projectName:String, extraDependencies:Seq[ModuleID] = Seq()) = { Defaults.defaultSettings ++ projectSettings(projectName, extraDependencies) ++ testSettings ++ publishSettings ++ jacoco.settings } lazy val pomInfo = <url>https://github.com/WegenenVerkeer/atomium</url> <licenses> <license> <name>MIT licencse</name> <url>http://opensource.org/licenses/MIT</url> <distribution>repo</distribution> </license> </licenses> <scm> <url>[email protected]:WegenenVerkeer/atomium.git</url> <connection>scm:git:[email protected]:WegenenVerkeer/atomium.git</connection> </scm> <developers> <developer> <id>AWV</id> <name>De ontwikkelaars van AWV</name> <url>http://www.wegenenverkeer.be</url> </developer> </developers> }
kwark/RxHttpClient
project/BuildSettings.scala
Scala
mit
2,714
package scala.meta package internal package prettyprinters import scala.meta.classifiers._ import scala.meta.prettyprinters._ import Show.{ sequence => s, repeat => r, indent => i, newline => n, meta => m, wrap => w, function => fn } import scala.meta.internal.trees.{root => _, branch => _, _} import scala.meta.internal.tokenizers.Chars._ import scala.meta.internal.tokenizers.keywords import org.scalameta.adt._ import org.scalameta.invariants._ import org.scalameta.unreachable import scala.compat.Platform.EOL object TreeSyntax { def apply[T <: Tree](dialect: Dialect): Syntax[T] = { object syntaxInstances { // NOTE: these groups closely follow non-terminals in the grammar spec from SLS, except for: // 1) we don't care about tracking non-terminals (with m() and/or p()) when that doesn't affect parenthesization // 2) `InfixType ::= CompoundType {id [nl] CompoundType}` is incorrect. Should be `CompoundType | InfixType {id [nl] InfixType}` // 3) `Pattern2 ::= varid ['@' Pattern3]` has become `Pattern2 ::= varid ['@' AnyPattern3]` due to implementational reasons // 4) `Type ::= ... | InfixType [ExistentialClause]` has become `Type ::= ... | AnyInfixType [ExistentialClause]` due to implementational reasons // 5) `FunctionArgTypes ::= InfixType | ...` has become `Type ::= AnyInfixType | ...` due to implementational reasons @root trait SyntacticGroup { def categories: List[String] def precedence: Double } object SyntacticGroup { @branch trait Type extends SyntacticGroup { def categories = List("Type") } object Type { @leaf object ParamTyp extends Type { def precedence = 0 } @leaf object Typ extends Type { def precedence = 1 } @leaf object AnyInfixTyp extends Type { def precedence = 1.5 } @leaf class InfixTyp(op: String) extends Type { def precedence = 2 } @leaf object RefineTyp extends Type { def precedence = 3 } @leaf object WithTyp extends Type { def precedence = 3.5 } @leaf object AnnotTyp extends Type { def precedence = 4 } @leaf object SimpleTyp extends Type { def precedence = 6 } } @branch trait Term extends SyntacticGroup { def categories = List("Term") } object Term { @leaf object Expr extends Term { def precedence = 0 } @leaf object Expr1 extends Term { def precedence = 1 } @leaf object PostfixExpr extends Term { def precedence = 2 } @leaf class InfixExpr(op: String) extends Term { def precedence = 3 } @leaf object PrefixExpr extends Term { def precedence = 4 } @leaf object SimpleExpr extends Term { def precedence = 5 } @leaf object SimpleExpr1 extends Term { def precedence = 6 } } @branch trait Pat extends SyntacticGroup { def categories = List("Pat") } object Pat { @leaf object Pattern extends Pat { def precedence = 0 } @leaf object Pattern1 extends Pat { def precedence = 1 } @leaf object Pattern2 extends Pat { def precedence = 2 } @leaf object AnyPattern3 extends Pat { def precedence = 2.5 } @leaf class Pattern3(op: String) extends Pat { def precedence = 3 } @leaf object SimplePattern extends Pat { def precedence = 6 } } @leaf object Literal extends Term with Pat { override def categories = List("Term", "Pat"); def precedence = 6 } require(Literal.precedence == Term.SimpleExpr1.precedence && Literal.precedence == Pat.SimplePattern.precedence) @leaf object Path extends Type with Term with Pat { override def categories = List("Type", "Term", "Pat"); def precedence = 6 } require(Path.precedence == Type.SimpleTyp.precedence && Path.precedence == Term.SimpleExpr1.precedence && Path.precedence == Pat.SimplePattern.precedence) } import SyntacticGroup.Type._, SyntacticGroup.Term._, SyntacticGroup.Pat._, SyntacticGroup.Literal, SyntacticGroup.Path def p(og: SyntacticGroup, t: Tree, left: Boolean = false, right: Boolean = false) = { def opNeedsParens(oo: String, io: String, customAssoc: Boolean, customPrecedence: Boolean): Boolean = { implicit class XtensionMySyntacticInfo(name: String) { def isleftassoc: Boolean = if (customAssoc) name.last != ':' else true def isrightassoc: Boolean = !isleftassoc def precedence: Int = if (customPrecedence) Term.Name(name).precedence else 0 } require(left != right) val (ol, il) = (oo.isleftassoc, io.isleftassoc) if (ol ^ il) true else { val (l, r) = (ol, !ol) val (op, ip) = (oo.precedence, io.precedence) if (op < ip) r else if (op > ip) l else l ^ left } } def groupNeedsParens(og: SyntacticGroup, ig: SyntacticGroup): Boolean = { val result = { require(og.categories.intersect(ig.categories).nonEmpty) (og, ig) match { case (InfixExpr(oo), InfixExpr(io)) => opNeedsParens(oo, io, customAssoc = true, customPrecedence = true) case (InfixTyp(oo), InfixTyp(io)) => opNeedsParens(oo, io, customAssoc = true, customPrecedence = false) case (Pattern3(oo), Pattern3(io)) => opNeedsParens(oo, io, customAssoc = true, customPrecedence = true) case _ => og.precedence > ig.precedence } } // println((og, ig, left, right) + " => " + result) result } s(t) match { case Show.Meta(ig: SyntacticGroup, res) if groupNeedsParens(og, ig) => s("(", res, ")") case res => res } } def kw(keyword: String) = fn(sb => { val prelast = if (sb.length > 1) sb.charAt(sb.length - 2) else ' ' val last = if (sb.length > 0) sb.charAt(sb.length - 1) else ' ' val next = if (keyword.length > 0) keyword(0) else ' ' val danger = { val opThenOp = isOperatorPart(last) && isOperatorPart(next) val underscoreThenOp = isIdentifierPart(prelast) && last == '_' && isOperatorPart(next) opThenOp || underscoreThenOp } if (danger) s(" " +keyword) else s(keyword) }) def templ(templ: Template) = if (templ.early.isEmpty && templ.inits.isEmpty && templ.self.name.is[Name.Anonymous] && templ.self.decltpe.isEmpty && templ.stats.isEmpty) s() else if (templ.inits.nonEmpty || templ.early.nonEmpty) s(" extends ", templ) else s(" ", templ) def guessIsBackquoted(t: Name): Boolean = { def cantBeWrittenWithoutBackquotes(t: Name): Boolean = { // Fold over codepoints for a given string def foldCodepoints[T](value: String, start: T)(f: (Int, T, Int) => T): T = { val length = value.length @annotation.tailrec def work(offset: Int, acc: T): T = { if (offset >= length) acc else { val codepoint = value.codePointAt(offset) work(offset + Character.charCount(codepoint), f(offset, acc, codepoint)) } } work(0, start) } // These rules are transcribed from // https://github.com/scala/scala/blob/2.13.x/spec/01-lexical-syntax.md#lexical-syntax def lexicalWhitespace(codepoint: Int): Boolean = Set[Int]('\\u0020', '\\u0009', '\\u000D', '\\u000A').contains(codepoint) def lexicalLetter(codepoint: Int): Boolean = ( Set[Int]('\\u0024', '\\u005F').contains(codepoint) || Set[Int](Character.LOWERCASE_LETTER, Character.UPPERCASE_LETTER, Character.TITLECASE_LETTER, Character.OTHER_LETTER, Character.LETTER_NUMBER).contains(Character.getType(codepoint)) ) def lexicalDigit(codepoint: Int): Boolean = Set[Int]('0', '1', '2', '3', '4', '5', '6', '7', '8', '9').contains(codepoint) def lexicalParentheses(codepoint: Int): Boolean = Set[Int]('(', ')', '[', ']', '{', '}').contains(codepoint) def lexicalDelimiter(codepoint: Int): Boolean = Set[Int]('`', '\\'', '"', '.', ';', ',').contains(codepoint) def lexicalOperator(codepoint: Int): Boolean = ( '\\u0020' <= codepoint && codepoint <= '\\u007E' && ( !lexicalWhitespace(codepoint) && !lexicalLetter(codepoint) && !lexicalDigit(codepoint) && !lexicalParentheses(codepoint) && !lexicalDelimiter(codepoint)) || Set[Int](Character.MATH_SYMBOL, Character.OTHER_SYMBOL).contains(Character.getType(codepoint)) ) sealed trait OperatorState case object Accepted extends OperatorState case object Required extends OperatorState case object Forbidden extends OperatorState sealed trait ValidityState case object Valid extends ValidityState case object Invalid extends ValidityState def validPlainid(string: String): Boolean = { val (_, validity) = foldCodepoints[(OperatorState, ValidityState)](string, (Accepted, Valid))({ // Any invalid state is invalid case (offset , (_ , Invalid), _ ) => (Forbidden, Invalid) // Must start with either a letter or an operator case (offset@0 , (Accepted , Valid), next) if lexicalLetter(next) => (Forbidden, Valid) case (offset@0 , (Accepted , Valid), next) if lexicalOperator(next) => (Required , Valid) // Non-leading underscores reset operator validity case (offset , (Forbidden, Valid), next) if next == '_' => (Accepted , Valid) // Non-leading operators are accepted only after underscores case (offset , (Accepted , Valid), next) if lexicalOperator(next) => (Required , Valid) // Operators must not be followed by non-operators case (offset , (Required , Valid), next) if lexicalOperator(next) => (Required , Valid) // Lexical letters and digits can follow underscores case (offset , (Accepted , Valid), next) if lexicalLetter(next) => (Forbidden, Valid) case (offset , (Accepted , Valid), next) if lexicalDigit(next) => (Forbidden, Valid) // Non-operators must not be followed by operators case (offset , (Forbidden, Valid), next) if lexicalLetter(next) => (Forbidden, Valid) case (offset , (Forbidden, Valid), next) if lexicalDigit(next) => (Forbidden, Valid) // Bail on anything not matched here case (_ , (_ , _ ), next) => (Forbidden, Invalid) }) validity == Valid } t.value != "this" && (keywords.contains(t.value) || t.value.contains("//") || t.value.contains("/*") || t.value.contains("*/") || !validPlainid(t.value)) } def isAmbiguousWithPatVarTerm(t: Term.Name, p: Tree): Boolean = { val looksLikePatVar = t.value.head.isLower && t.value.head.isLetter val thisLocationAlsoAcceptsPatVars = p match { case p: Term.Name => unreachable case p: Term.Select => false case p: Pat.Wildcard => unreachable case p: Pat.Var => false case p: Pat.Bind => unreachable case p: Pat.Alternative => true case p: Pat.Tuple => true case p: Pat.Extract => p.args.exists(_ eq t) case p: Pat.ExtractInfix => (p.lhs eq t) || p.rhs.exists(_ eq t) case p: Pat.Interpolate => p.args.exists(_ eq t) case p: Pat.Typed => unreachable case p: Pat => unreachable case p: Case => p.pat eq t case p: Defn.Val => p.pats.exists(_ eq t) case p: Defn.Var => p.pats.exists(_ eq t) case p: Enumerator.Generator => p.pat eq t case p: Enumerator.Val => p.pat eq t case _ => false } looksLikePatVar && thisLocationAlsoAcceptsPatVars } def isAmbiguousWithPatVarType(t: Type.Name, p: Tree): Boolean = { false } (t, t.parent) match { case (t: Term.Name, Some(p: Tree)) => isAmbiguousWithPatVarTerm(t, p) || cantBeWrittenWithoutBackquotes(t) case (t: Type.Name, Some(p: Tree)) => isAmbiguousWithPatVarType(t, p) || cantBeWrittenWithoutBackquotes(t) case _ => cantBeWrittenWithoutBackquotes(t) } } def guessIsPostfix(t: Term.Select): Boolean = false def guessHasExpr(t: Term.Return): Boolean = t.expr match { case Lit.Unit() => false; case _ => true } def guessHasElsep(t: Term.If): Boolean = t.elsep match { case Lit.Unit() => false; case e => true } def guessHasStats(t: Template): Boolean = t.stats.nonEmpty def guessHasBraces(t: Pkg): Boolean = { def isOnlyChildOfOnlyChild(t: Pkg): Boolean = t.parent match { case Some(pkg: Pkg) => isOnlyChildOfOnlyChild(pkg) && pkg.stats.length == 1 case Some(source: Source) => source.stats.length == 1 case None => true case _ => unreachable } !isOnlyChildOfOnlyChild(t) } // Branches implicit def syntaxTree[T <: Tree]: Syntax[T] = Syntax { // Bottom case t: Quasi => if (!dialect.allowUnquotes) throw new UnsupportedOperationException(s"$dialect doesn't support unquoting") if (t.rank > 0) { s("." * (t.rank + 1), w("{", t.tree, "}", !t.tree.is[Quasi])) } else { val allowBraceless = t.tree.is[Term.Name] || t.tree.is[Pat.Var] || t.tree.is[Term.This] || t.tree.is[Pat.Wildcard] implicit val syntaxDialect = dialect.copy(allowTermUnquotes = false, allowPatUnquotes = false, allowMultilinePrograms = true) s("$", w("{", t.tree.syntax, "}", !allowBraceless)) } // Name case t: Name.Anonymous => s("_") case t: Name.Indeterminate => if (guessIsBackquoted(t)) s("`", t.value, "`") else s(t.value) // Term case t: Term.This => val qual = if (t.qual.is[Name.Anonymous]) s() else s(t.qual, ".") m(Path, qual, kw("this")) case t: Term.Super => val thisqual = if (t.thisp.is[Name.Anonymous]) s() else s(t.thisp, ".") val superqual = if (t.superp.is[Name.Anonymous]) s() else s("[", t.superp, "]") m(Path, s(thisqual, kw("super"), superqual)) case t: Term.Name => m(Path, if (guessIsBackquoted(t)) s("`", t.value, "`") else s(t.value)) case t: Term.Select => m(Path, s(p(SimpleExpr, t.qual), if (guessIsPostfix(t)) " " else ".", t.name)) case t: Term.Interpolate => val parts = t.parts.map{ case Lit(part: String) => part } val zipped = parts.zip(t.args).map { case (part, id: Name) if !guessIsBackquoted(id) => s(part, "$", id.value) case (part, arg) => s(part, "${", p(Expr, arg), "}") } val quote = if (parts.exists(s => s.contains("\\n") || s.contains("\\""))) "\\"\\"\\"" else "\\"" m(SimpleExpr1, s(t.prefix, quote, r(zipped), parts.last, quote)) case t: Term.Xml => if (!dialect.allowXmlLiterals) throw new UnsupportedOperationException(s"$dialect doesn't support xml literals") val parts = t.parts.map{ case Lit(part: String) => part } val zipped = parts.zip(t.args).map{ case (part, arg) => s(part, "{", p(Expr, arg), "}") } m(SimpleExpr1, s(r(zipped), parts.last)) case t: Term.Apply => m(SimpleExpr1, s(p(SimpleExpr1, t.fun), t.args)) case t: Term.ApplyType => m(SimpleExpr1, s(p(SimpleExpr, t.fun), t.targs)) case t: Term.ApplyInfix => val args = t.args match { case (Lit.Unit()) :: Nil => s("(())") case (arg: Term) :: Nil => s(p(InfixExpr(t.op.value), arg, right = true)) case args => s(args) } m(InfixExpr(t.op.value), s(p(InfixExpr(t.op.value), t.lhs, left = true), " ", t.op, t.targs, " ", args)) case t: Term.ApplyUnary => m(PrefixExpr, s(t.op, p(SimpleExpr, t.arg))) case t: Term.Assign => m(Expr1, s(p(SimpleExpr1, t.lhs), " ", kw("="), " ", p(Expr, t.rhs))) case t: Term.Return => m(Expr1, s(kw("return"), if (guessHasExpr(t)) s(" ", p(Expr, t.expr)) else s())) case t: Term.Throw => m(Expr1, s(kw("throw"), " ", p(Expr, t.expr))) case t: Term.Ascribe => m(Expr1, s(p(PostfixExpr, t.expr), kw(":"), " ", t.tpe)) case t: Term.Annotate => m(Expr1, s(p(PostfixExpr, t.expr), kw(":"), " ", t.annots)) case t: Term.Tuple => m(SimpleExpr1, s("(", r(t.args, ", "), ")")) case t: Term.Block => import Term.{Block, Function} def pstats(s: List[Stat]) = r(s.map(i(_)), "") t match { case Block(Function(Term.Param(mods, name: Term.Name, tptopt, _) :: Nil, Block(stats)) :: Nil) if mods.exists(_.is[Mod.Implicit]) => m(SimpleExpr, s("{ ", kw("implicit"), " ", name, tptopt.map(s(kw(":"), " ", _)).getOrElse(s()), " ", kw("=>"), " ", pstats(stats), n("}"))) case Block(Function(Term.Param(mods, name: Term.Name, None, _) :: Nil, Block(stats)) :: Nil) => m(SimpleExpr, s("{ ", name, " ", kw("=>"), " ", pstats(stats), n("}"))) case Block(Function(Term.Param(_, _: Name.Anonymous, _, _) :: Nil, Block(stats)) :: Nil) => m(SimpleExpr, s("{ ", kw("_"), " ", kw("=>"), " ", pstats(stats), n("}"))) case Block(Function(params, Block(stats)) :: Nil) => m(SimpleExpr, s("{ (", r(params, ", "), ") => ", pstats(stats), n("}"))) case _ => m(SimpleExpr, if (t.stats.isEmpty) s("{}") else s("{", pstats(t.stats), n("}"))) } case t: Term.If => m(Expr1, s(kw("if"), " (", t.cond, ") ", p(Expr, t.thenp), if (guessHasElsep(t)) s(" ", kw("else"), " ", p(Expr, t.elsep)) else s())) case t: Term.Match => m(Expr1, s(p(PostfixExpr, t.expr), " ", kw("match"), " {", r(t.cases.map(i(_)), ""), n("}"))) case t: Term.Try => m(Expr1, s(kw("try"), " ", p(Expr, t.expr), if (t.catchp.nonEmpty) s(" ", kw("catch"), " {", r(t.catchp.map(i(_)), ""), n("}")) else s(""), t.finallyp.map { finallyp => s(" ", kw("finally"), " ", finallyp) }.getOrElse(s()))) case t: Term.TryWithHandler => m(Expr1, s(kw("try"), " ", p(Expr, t.expr), " ", kw("catch"), " ", t.catchp, t.finallyp.map { finallyp => s(" ", kw("finally"), " ", finallyp) }.getOrElse(s()))) case t: Term.Function => t match { case Term.Function(Term.Param(mods, name: Term.Name, tptopt, _) :: Nil, body) if mods.exists(_.is[Mod.Implicit]) => m(Expr, s(kw("implicit"), " ", name, tptopt.map(s(kw(":"), " ", _)).getOrElse(s()), " ", kw("=>"), " ", p(Expr, body))) case Term.Function(Term.Param(mods, name: Term.Name, None, _) :: Nil, body) => m(Expr, s(name, " ", kw("=>"), " ", p(Expr, body))) case Term.Function(Term.Param(_, _: Name.Anonymous, decltpeOpt, _) :: Nil, body) => val param = decltpeOpt match { case Some(decltpe) => s(kw("("), kw("_"), kw(":"), decltpe, kw(")")) case None => s(kw("_")) } m(Expr, param, " ", kw("=>"), " ", p(Expr, body)) case Term.Function(params, body) => m(Expr, s("(", r(params, ", "), ") ", kw("=>"), " ", p(Expr, body))) } case t: Term.PartialFunction => m(SimpleExpr, s("{", r(t.cases.map(i(_)), ""), n("}"))) case t: Term.While => m(Expr1, s(kw("while"), " (", t.expr, ") ", p(Expr, t.body))) case t: Term.Do => m(Expr1, s(kw("do"), " ", p(Expr, t.body), " ", kw("while"), " (", t.expr, ")")) case t: Term.For => m(Expr1, s(kw("for"), " (", r(t.enums, "; "), ") ", t.body)) case t: Term.ForYield => m(Expr1, s(kw("for"), " (", r(t.enums, "; "), ") ", kw("yield"), " ", t.body)) case t: Term.New => m(SimpleExpr, s(kw("new"), " ", t.init)) case t: Term.NewAnonymous => val needsExplicitBraces = { val selfIsEmpty = t.templ.self.name.is[Name.Anonymous] && t.templ.self.decltpe.isEmpty t.templ.early.isEmpty && t.templ.inits.length < 2 && selfIsEmpty && t.templ.stats.isEmpty } m(SimpleExpr, s(kw("new"), " ", t.templ), w(" {", "", "}", needsExplicitBraces)) case _: Term.Placeholder => m(SimpleExpr1, kw("_")) case t: Term.Eta => m(SimpleExpr, s(p(SimpleExpr1, t.expr), " ", kw("_"))) case t: Term.Repeated => s(p(PostfixExpr, t.expr), kw(":"), " ", kw("_*")) case t: Term.Param => val mods = t.mods.filter(!_.is[Mod.Implicit]) // NOTE: `implicit` in parameters is skipped in favor of `implicit` in the enclosing parameter list s(w(mods, " "), t.name, t.decltpe, t.default.map(s(" ", kw("="), " ", _)).getOrElse(s())) // Type case t: Type.Name => m(Path, if (guessIsBackquoted(t)) s("`", t.value, "`") else s(t.value)) case t: Type.Select => m(SimpleTyp, s(t.qual, kw("."), t.name)) case t: Type.Project => m(SimpleTyp, s(p(SimpleTyp, t.qual), kw("#"), t.name)) case t: Type.Singleton => m(SimpleTyp, s(p(SimpleExpr1, t.ref), ".", kw("type"))) case t: Type.Apply => m(SimpleTyp, s(p(SimpleTyp, t.tpe), kw("["), r(t.args.map(arg => p(Typ, arg)), ", "), kw("]"))) case t: Type.ApplyInfix => m(InfixTyp(t.op.value), s(p(InfixTyp(t.op.value), t.lhs, left = true), " ", t.op, " ", p(InfixTyp(t.op.value), t.rhs, right = true))) case t @ (_: Type.Function | _: Type.ImplicitFunction) => val (prefix, tParams, tRes) = t match { case Type.Function(params, res) => (s(), params, res) case Type.ImplicitFunction(params, res) => (s(kw("implicit"), " "), params, res) } val params = tParams match { case param +: Nil if !param.is[Type.Tuple] => s(p(AnyInfixTyp, param)) case params => s("(", r(params.map(param => p(ParamTyp, param)), ", "), ")") } m(Typ, s(prefix, params, " ", kw("=>"), " ", p(Typ, tRes))) case t: Type.Tuple => m(SimpleTyp, s("(", r(t.args, ", "), ")")) case t: Type.With => if (!dialect.allowWithTypes) throw new UnsupportedOperationException(s"$dialect doesn't support with types") m(WithTyp, s(p(WithTyp, t.lhs), " with ", p(WithTyp, t.rhs))) case t: Type.And => if (!dialect.allowAndTypes) throw new UnsupportedOperationException(s"$dialect doesn't support and types") m(InfixTyp("&"), s(p(InfixTyp("&"), t.lhs, left = true), " ", "&", " ", p(InfixTyp("&"), t.rhs, right = true))) case t: Type.Or => if (!dialect.allowOrTypes) throw new UnsupportedOperationException(s"$dialect doesn't support or types") m(InfixTyp("|"), s(p(InfixTyp("|"), t.lhs, left = true), " ", "|", " ", p(InfixTyp("|"), t.rhs, right = true))) case t: Type.Refine => m(RefineTyp, t.tpe.map(tpe => s(p(WithTyp, tpe), " ")).getOrElse(s("")), "{", w(" ", r(t.stats, "; "), " ", t.stats.nonEmpty), "}") case t: Type.Existential => m(Typ, s(p(AnyInfixTyp, t.tpe), " ", kw("forSome"), " { ", r(t.stats, "; "), " }")) case t: Type.Annotate => m(AnnotTyp, s(p(SimpleTyp, t.tpe), " ", t.annots)) case t: Type.Lambda => m(Typ, t.tparams, " ", kw("=>"), " ", p(Typ, t.tpe)) case t: Type.Method => m(Typ, t.paramss, kw(":"), " ", p(Typ, t.tpe)) case t: Type.Placeholder => m(SimpleTyp, s(kw("_"), t.bounds)) case t: Type.Bounds => s(t.lo.map(lo => s(" ", kw(">:"), " ", p(Typ, lo))).getOrElse(s()), t.hi.map(hi => s(" ", kw("<:"), " ", p(Typ, hi))).getOrElse(s())) case t: Type.Repeated => m(ParamTyp, s(p(Typ, t.tpe), kw("*"))) case t: Type.ByName => m(ParamTyp, s(kw("=>"), " ", p(Typ, t.tpe))) case t: Type.Var => m(SimpleTyp, s(t.name.value)) case t: Type.Param => val mods = t.mods.filter(m => !m.is[Mod.Covariant] && !m.is[Mod.Contravariant]) require(t.mods.length - mods.length <= 1) val variance = t.mods.foldLeft("")((curr, m) => if (m.is[Mod.Covariant]) "+" else if (m.is[Mod.Contravariant]) "-" else curr) val tbounds = s(t.tbounds) val vbounds = { if (t.vbounds.nonEmpty && !dialect.allowViewBounds) throw new UnsupportedOperationException(s"$dialect doesn't support view bounds") r(t.vbounds.map { s(" ", kw("<%"), " ", _) }) } val cbounds = r(t.cbounds.map { s(kw(":"), " ", _) }) s(w(mods, " "), variance, t.name, t.tparams, tbounds, vbounds, cbounds) // Pat case t: Pat.Var => m(SimplePattern, s(if (guessIsBackquoted(t.name)) s"`${t.name.value}`" else t.name.value)) case _: Pat.Wildcard => m(SimplePattern, kw("_")) case _: Pat.SeqWildcard => m(SimplePattern, kw("_*")) case t: Pat.Bind => val separator = t.rhs match { case Pat.SeqWildcard() => if (dialect.allowAtForExtractorVarargs) s(" ", kw("@")) else if (dialect.allowColonForExtractorVarargs) s(kw(":")) else throw new UnsupportedOperationException(s"$dialect doesn't support extractor varargs") case _ => s(" ", kw("@")) } m(Pattern2, s(p(SimplePattern, t.lhs), separator, " ", p(AnyPattern3, t.rhs))) case t: Pat.Alternative => m(Pattern, s(p(Pattern, t.lhs), " ", kw("|"), " ", p(Pattern, t.rhs))) case t: Pat.Tuple => m(SimplePattern, s("(", r(t.args, ", "), ")")) case t: Pat.Extract => m(SimplePattern, s(t.fun, t.args)) case t: Pat.ExtractInfix => m(Pattern3(t.op.value), s(p(Pattern3(t.op.value), t.lhs, left = true), " ", t.op, " ", t.rhs match { case pat :: Nil => s(p(Pattern3(t.op.value), pat, right = true)) case pats => s(pats) })) case t: Pat.Interpolate => val parts = t.parts.map{ case Lit(part: String) => part } val zipped = parts.zip(t.args).map { case (part, id: Name) if !guessIsBackquoted(id) => s(part, "$", id.value) case (part, arg) => s(part, "${", arg, "}") } m(SimplePattern, s(t.prefix, "\\"", r(zipped), parts.last, "\\"")) case t: Pat.Xml => if (!dialect.allowXmlLiterals) throw new UnsupportedOperationException(s"$dialect doesn't support xml literals") val parts = t.parts.map{ case Lit(part: String) => part } val zipped = parts.zip(t.args).map{ case (part, arg) => s(part, "{", arg, "}") } m(SimplePattern, s(r(zipped), parts.last)) case Pat.Typed(lhs, rhs : Lit) => if (dialect.allowLiteralTypes) m(Pattern1, s(p(SimplePattern, lhs), kw(":"), " ", p(Literal, rhs))) else throw new UnsupportedOperationException(s"$dialect doesn't support literal types") case t: Pat.Typed => m(Pattern1, s(p(SimplePattern, t.lhs), kw(":"), " ", p(RefineTyp, t.rhs))) // Lit case Lit.Boolean(value) => m(Literal, s(value.toString)) case Lit.Byte(value) => m(Literal, s("ByteLiterals.", if (value == 0) "Zero" else if (value > 0) "Plus" + value else "Minus" + value)) case Lit.Short(value) => m(Literal, s("ShortLiterals.", if (value == 0) "Zero" else if (value > 0) "Plus" + value else "Minus" + value)) case Lit.Int(value) => m(Literal, s(value.toString)) case Lit.Long(value) => m(Literal, s(value.toString + "L")) case t @ Lit.Float(value) => val n = value.toFloat if (java.lang.Float.isNaN(n)) s("Float.NaN") else { n match { case Float.PositiveInfinity => s("Float.PositiveInfinity") case Float.NegativeInfinity => s("Float.NegativeInfinity") case _ => s(value, "f") } } case t @ Lit.Double(value) => val n = value.toDouble if (java.lang.Double.isNaN(n)) s("Double.NaN") else { n match { case Double.PositiveInfinity => s("Double.PositiveInfinity") case Double.NegativeInfinity => s("Double.NegativeInfinity") case _ => s(value, "d") } } case Lit.Char(value) => m(Literal, s(enquote(value.toString, SingleQuotes))) // Strings should be triple-quoted regardless of what newline style is used. case Lit.String(value) => m(Literal, s(enquote(value.toString, if (value.contains("\\n")) TripleQuotes else DoubleQuotes))) case Lit.Symbol(value) => m(Literal, s("'", value.name)) case Lit.Null() => m(Literal, s(kw("null"))) case Lit.Unit() => m(Literal, s("()")) // Member case t: Decl.Val => s(w(t.mods, " "), kw("val"), " ", r(t.pats, ", "), kw(":"), " ", t.decltpe) case t: Decl.Var => s(w(t.mods, " "), kw("var"), " ", r(t.pats, ", "), kw(":"), " ", t.decltpe) case t: Decl.Type => s(w(t.mods, " "), kw("type"), " ", t.name, t.tparams, t.bounds) case t: Decl.Def => s(w(t.mods, " "), kw("def"), " ", t.name, t.tparams, t.paramss, kw(":"), " ", t.decltpe) case t: Defn.Val => s(w(t.mods, " "), kw("val"), " ", r(t.pats, ", "), t.decltpe, " ", kw("="), " ", t.rhs) case t: Defn.Var => s(w(t.mods, " "), kw("var"), " ", r(t.pats, ", "), t.decltpe, " ", kw("="), " ", t.rhs.map(s(_)).getOrElse(s(kw("_")))) case t: Defn.Type => s(w(t.mods, " "), kw("type"), " ", t.name, t.tparams, " ", kw("="), " ", t.body) case t: Defn.Class => s(w(t.mods, " "), kw("class"), " ", t.name, t.tparams, w(" ", t.ctor, t.ctor.mods.nonEmpty), templ(t.templ)) case t: Defn.Trait => if (dialect.allowTraitParameters || t.ctor.mods.isEmpty) { s(w(t.mods, " "), kw("trait"), " ", t.name, t.tparams, w(" ", t.ctor, t.ctor.mods.nonEmpty), templ(t.templ)) } else { throw new UnsupportedOperationException(s"$dialect doesn't support trait parameters") } case t: Defn.Object => s(w(t.mods, " "), kw("object"), " ", t.name, templ(t.templ)) case t: Defn.Def => s(w(t.mods, " "), kw("def"), " ", t.name, t.tparams, t.paramss, t.decltpe, " = ", t.body) case t: Defn.Macro => s(w(t.mods, " "), kw("def"), " ", t.name, t.tparams, t.paramss, t.decltpe, " ", kw("="), " ", kw("macro"), " ", t.body) case t: Pkg => if (guessHasBraces(t)) s(kw("package"), " ", t.ref, " {", r(t.stats.map(i(_)), ""), n("}")) else s(kw("package"), " ", t.ref, r(t.stats.map(n(_)))) case t: Pkg.Object => s(kw("package"), " ", w(t.mods, " "), kw("object"), " ", t.name, templ(t.templ)) case t: Ctor.Primary => s(w(t.mods, " ", t.mods.nonEmpty && t.paramss.nonEmpty), t.paramss) case t: Ctor.Secondary => if (t.stats.isEmpty) s(w(t.mods, " "), kw("def"), " ", kw("this"), t.paramss, " = ", t.init) else s(w(t.mods, " "), kw("def"), " ", kw("this"), t.paramss, " {", i(t.init), "", r(t.stats.map(i(_)), ""), n("}")) // Init case t: Init => s(if (t.tpe.is[Type.Singleton]) kw("this") else p(AnnotTyp, t.tpe), t.argss) // Self case t: Self => s(t.name, t.decltpe) // Template case t: Template => val isSelfEmpty = t.self.name.is[Name.Anonymous] && t.self.decltpe.isEmpty val isSelfNonEmpty = !isSelfEmpty val isBodyEmpty = isSelfEmpty && t.stats.isEmpty val isTemplateEmpty = t.early.isEmpty && t.inits.isEmpty && isBodyEmpty if (isTemplateEmpty) s() else { val pearly = if (!t.early.isEmpty) s("{ ", r(t.early, "; "), " } with ") else s() val pparents = w(r(t.inits, " with "), " ", !t.inits.isEmpty && !isBodyEmpty) val pbody = { val isOneLiner = t.stats.length == 0 || (t.stats.length == 1 && !s(t.stats.head).toString.contains(EOL)) (isSelfNonEmpty, t.stats) match { case (false, Nil) => s() case (false, List(stat)) if isOneLiner => s("{ ", stat, " }") case (false, stats) => s("{", r(stats.map(i(_)), ""), n("}")) case (true, Nil) => s("{ ", t.self, " => }") case (true, List(stat)) if isOneLiner => s("{ ", t.self, " => ", stat, " }") case (true, stats) => s("{ ", t.self, " =>", r(stats.map(i(_)), ""), n("}")) } } s(pearly, pparents, pbody) } // Mod case Mod.Annot(init) => s(kw("@"), p(SimpleTyp, init.tpe), init.argss) case Mod.Private(Name.Anonymous()) => s(kw("private")) case Mod.Private(within) => s(kw("private"), kw("["), within, kw("]")) case Mod.Protected(Name.Anonymous()) => s(kw("protected")) case Mod.Protected(within) => s(kw("protected"), kw("["), within, kw("]")) case _: Mod.Implicit => kw("implicit") case _: Mod.Final => kw("final") case _: Mod.Sealed => kw("sealed") case _: Mod.Override => kw("override") case _: Mod.Case => kw("case") case _: Mod.Abstract => kw("abstract") case _: Mod.Covariant => kw("+") case _: Mod.Contravariant => kw("-") case _: Mod.Lazy => kw("lazy") case _: Mod.ValParam => kw("val") case _: Mod.VarParam => kw("var") case _: Mod.Inline => if (!dialect.allowInlineMods) throw new UnsupportedOperationException(s"$dialect doesn't support inline modifiers") kw("inline") // Enumerator case t: Enumerator.Val => s(p(Pattern1, t.pat), " = ", p(Expr, t.rhs)) case t: Enumerator.Generator => s(p(Pattern1, t.pat), " <- ", p(Expr, t.rhs)) case t: Enumerator.Guard => s(kw("if"), " ", p(PostfixExpr, t.cond)) // Import case t: Importee.Name => s(t.name) case t: Importee.Rename => s(t.name, " ", kw("=>"), " ", t.rename) case t: Importee.Unimport => s(t.name, " ", kw("=>"), " ", kw("_")) case _: Importee.Wildcard => kw("_") case t: Importer => s(t.ref, ".", t.importees) case t: Import => s(kw("import"), " ", r(t.importers, ", ")) // Case case t: Case => val ppat = p(Pattern, t.pat) val pcond = t.cond.map(cond => s(" ", kw("if"), " ", p(PostfixExpr, cond))).getOrElse(s()) val isOneLiner = { def isOneLiner(t: Case) = t.stats match { case Nil => true case head :: Nil => head.is[Lit] || head.is[Term.Name] case _ => false } t.parent match { case Some(Term.Match(_, cases)) => cases.forall(isOneLiner) case Some(Term.PartialFunction(cases)) => cases.forall(isOneLiner) case _ => isOneLiner(t) } } val pbody = (t.stats, isOneLiner) match { case (Nil, true) => s("") case (List(stat), true) => s(" ", stat) case (stats, _) => r(stats.map(i(_)), "") } s("case ", ppat, pcond, " ", kw("=>"), pbody) // Source case t: Source => r(t.stats, EOL) } // Multiples and optionals implicit def syntaxArgs: Syntax[List[Term]] = Syntax { case (b: Term.Block) :: Nil => s(" ", b) case (f @ Term.Function(params, _)) :: Nil if !params.exists(_.decltpe.isEmpty) => s(" { ", f, " }") case args => s("(", r(args, ", "), ")") } implicit def syntaxArgss: Syntax[List[List[Term]]] = Syntax { r(_) } implicit def syntaxTargs: Syntax[List[Type]] = Syntax { targs => if (targs.isEmpty) s() else s("[", r(targs, ", "), "]") } implicit def syntaxPats: Syntax[List[Pat]] = Syntax { pats => s("(", r(pats, ", "), ")") } implicit def syntaxMods: Syntax[List[Mod]] = Syntax { mods => if (mods.nonEmpty) r(mods, " ") else s() } implicit def syntaxAnnots: Syntax[List[Mod.Annot]] = Syntax { annots => if (annots.nonEmpty) r(annots, " ") else s() } implicit def syntaxParams: Syntax[List[Term.Param]] = Syntax { params => s("(", r(params, ", "), ")") } implicit def syntaxParamss: Syntax[List[List[Term.Param]]] = Syntax { paramss => r(paramss.map(params => { s("(", w("implicit ", r(params, ", "), params.exists(_.mods.exists(_.is[Mod.Implicit]))), ")") }), "") } implicit def syntaxTparams: Syntax[List[Type.Param]] = Syntax { tparams => if (tparams.nonEmpty) s("[", r(tparams, ", "), "]") else s() } implicit def syntaxTypeOpt: Syntax[Option[Type]] = Syntax { _.map { t => s(kw(":"), " ", t) }.getOrElse(s()) } implicit def syntaxImportee: Syntax[List[Importee]] = Syntax { case (t: Importee.Name) :: Nil => s(t) case (t: Importee.Wildcard) :: Nil => s(t) case (t: Importee.Rename) :: Nil => s("{", t, "}") case importees => s("{ ", r(importees, ", "), " }") } } // NOTE: This is the current state of the art of smart prettyprinting. // If we prettyprint a tree that's just been parsed with the same dialect, // then we retain formatting. Otherwise, we don't, even in the tiniest. // I expect to improve on this in the nearest future, because we had it much better until recently. Syntax { (x: T) => x.origin match { // NOTE: Options don't really matter, // because if we've parsed a tree, it's not gonna contain lazy seqs anyway. // case Origin.Parsed(_, originalDialect, _) if dialect == originalDialect && options == Options.Eager => case Origin.Parsed(_, originalDialect, _) if dialect == originalDialect => s(x.pos.text) case _ => syntaxInstances.syntaxTree[T].apply(x) } } } }
olafurpg/scalameta
scalameta/trees/shared/src/main/scala/scala/meta/internal/prettyprinters/TreeSyntax.scala
Scala
bsd-3-clause
39,419
/* * Copyright (C) 2016 Nikos Katzouris * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package oled.mwua import akka.actor.{ActorSystem, Props} import app.runutils.CMDArgs import app.runutils.IOHandling.MongoSource import com.mongodb.casbah.{MongoClient, MongoCollection} import com.typesafe.scalalogging.LazyLogging import experiments.caviar.FullDatasetHoldOut import experiments.caviar.FullDatasetHoldOut.MongoDataOptions import logic.Examples.Example import oled.mwua.Runner.logger object Runner_Streaming extends LazyLogging { def main(args: Array[String]) = { val argsok = CMDArgs.argsOk(args) if (!argsok._1) { logger.error(argsok._2); System.exit(-1) } else { val runningOptions = CMDArgs.getOLEDInputArgs(args) val trainingDataOptions = new StreamingMongoDataOptions(dbName = runningOptions.train, targetConcept = runningOptions.targetHLE) val testingDataOptions = trainingDataOptions val trainingDataFunction: StreamingMongoDataOptions => Iterator[Example] = getMongoData val testingDataFunction: StreamingMongoDataOptions => Iterator[Example] = getMongoData val system = ActorSystem("HoeffdingLearningSystem") // use also start to evaluate a hand-crafted theory, the whole thing is hard-coded in Learner_NEW val startMsg = "start-streaming" //"start"//if (runningOptions.evalth != "None") "eval" else "start" system.actorOf(Props(new Learner_NEW(runningOptions, trainingDataOptions, testingDataOptions, trainingDataFunction, testingDataFunction)), name = "Learner") ! startMsg } } class StreamingMongoDataOptions(val dbName: String, val chunkSize: Int = 1, val limit: Double = Double.PositiveInfinity.toInt, val targetConcept: String = "None", val sortDbByField: String = "time", val sort: String = "ascending") extends MongoSource def getMongoData(opts: StreamingMongoDataOptions): Iterator[Example] = { val mc = MongoClient() val collection: MongoCollection = mc(opts.dbName)("examples") val dataIterator = opts.allData(collection, opts.sort, opts.sortDbByField) map { x => val e = Example(x) opts.targetConcept match { case "None" => new Example(annot = e.annotation, nar = e.narrative, _time = e.time) case _ => new Example(annot = e.annotation filter (_.contains(opts.targetConcept)), nar = e.narrative, _time = e.time) } } dataIterator //.take(1000) } }
nkatzz/OLED
src/main/scala/oled/mwua/Runner_Streaming.scala
Scala
gpl-3.0
3,102
package org.jetbrains.plugins.scala.testingSupport.locationProvider import java.util.Collections import java.{util => ju} import com.intellij.execution.testframework.sm.runner.SMTestLocator import com.intellij.execution.{Location, PsiLocation} import com.intellij.openapi.editor.Document import com.intellij.openapi.project.Project import com.intellij.psi._ import com.intellij.psi.search.GlobalSearchScope import org.jetbrains.plugins.scala.caches.ScalaShortNamesCacheManager import org.jetbrains.plugins.scala.extensions._ import org.jetbrains.plugins.scala.lang.psi.ElementScope import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTypeDefinition} import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager import org.jetbrains.plugins.scala.testingSupport.locationProvider.ScalaTestLocationProvider._ import scala.jdk.CollectionConverters.SeqHasAsJava /** * User: Alexander Podkhalyuzin * Date: 24.02.2009 * * For Specs, Specs2 and ScalaTest * * @see [[org.jetbrains.plugins.scala.testingSupport.test.ui.ScalaTestRunLineMarkerProvider]] */ class ScalaTestLocationProvider extends SMTestLocator { override def getLocation(protocolId: String, locationData: String, project: Project, scope: GlobalSearchScope): ju.List[Location[_ <: PsiElement]] = protocolId match { case ScalaProtocol => // TODO: do we even need this separation? why not using just scalatest://? getLocationForScalaProtocol(locationData, project, scope) case ScalaTestProtocol => getLocationForScalaTestProtocol(locationData, project, scope) case _ => Collections.emptyList() } } object ScalaTestLocationProvider { /** note: used not only in ScalaTest framework, but e.g. in uTest * * @see [[org.jetbrains.plugins.scala.testingSupport.uTest.UTestReporter]] */ private val ScalaTestProtocol = "scalatest" val ScalaTestProtocolPrefix = "scalatest://" // what is this protocol used? don't we only use "scalatest" prefix for now? private val ScalaProtocol = "scala" private val SpecsHintPattern = """(\\S+)\\?filelocation=(.+):(.+)""".r private val ScalaTestTopOfClassPattern = """TopOfClass:(\\S+)TestName:(.+)""".r private val ScalaTestTopOfMethodPattern = """TopOfMethod:(\\S+):(\\S+)TestName:(.+)""".r private val ScalaTestLineInFinePattern = """LineInFile:(\\S+):(.+):(.+)TestName:(.+)""".r def isTestUrl(url: String): Boolean = url.startsWith(ScalaTestLocationProvider.ScalaTestProtocolPrefix) def getClassFqn(locationUrl: String): Option[String] = locationUrl.stripPrefix(ScalaTestLocationProvider.ScalaTestProtocolPrefix) match { case ScalaTestTopOfClassPattern(classFqn, _) => Some(classFqn) case ScalaTestTopOfMethodPattern(classFqn, _, _) => Some(classFqn) case ScalaTestLineInFinePattern(classFqn, _, _, _) => Some(classFqn) case _ => None } private def getLocationForScalaTestProtocol(locationData: String, project: Project, scope: GlobalSearchScope): ju.List[Location[_ <: PsiElement]] = { val res = new ju.ArrayList[Location[_ <: PsiElement]]() locationData match { case ScalaTestTopOfClassPattern(classFqn, testName) => val classes = ScalaShortNamesCacheManager.getInstance(project).getClassesByFQName(classFqn, scope) val clazz = classes.find(!_.isInstanceOf[ScObject]).orElse(classes.headOption) clazz.foreach(c => res.add(PsiLocationWithName(project, c, testName))) case ScalaTestTopOfMethodPattern(classFqn, methodName, testName) => val classes = ScalaShortNamesCacheManager.getInstance(project). getClassesByFQName(classFqn, GlobalSearchScope.allScope(project)) val methodOwner = classes.find(!_.isInstanceOf[ScObject]).orElse(classes.headOption) methodOwner match { case Some(td: ScTypeDefinition) => td.methodsByName(methodName).foreach { signature => res.add(PsiLocationWithName(project, signature.method, testName)) } case _ => } if (res.isEmpty && methodOwner.isDefined) { val methods = methodOwner.get.findMethodsByName(methodName, false) methods.foreach { method => res.add(PsiLocationWithName(project, method, testName)) } } case ScalaTestLineInFinePattern(classFqn, fileName, lineNumber, testName) => val classes = ScalaPsiManager.instance(project).getCachedClass(GlobalSearchScope.allScope(project), classFqn) val supers = classes.iterator.flatMap(_.allSupers) val found = (classes.iterator ++ supers).find(_.containingFile.exists(_.name == fileName)) found match { case Some(file) => res.add(createLocationFor(project, file.getContainingFile, lineNumber.toInt, Some(testName))) case _ => } case _ => } res } // TODO: fix SCL-8859 // Classname is not actually classFqn! // Spec2 reports some trash in it's location line // (see org.jetbrains.plugins.scala.testingSupport.specs2.Spec2Utils.parseLocation) // Suppose you have some test class `class MySpec1 extends Specification` in package `org.example` // Spec2 will report `Specification.s2(MySpec1.scala:7)` as a location line! // Notice that: // 1) class name is not actually test class name but the name of the base class // 2) class name is not fully qualified // 3) file name is not relevant to sources dir // So there is no possibility to distinguish between different test classes with same name in different packages! private def getLocationForScalaProtocol(locationData: String, project: Project, scope: GlobalSearchScope): ju.List[Location[_ <: PsiElement]] = locationData match { case SpecsHintPattern(className, fileName, lineNumber) => val classes = ScalaShortNamesCacheManager.getInstance(project).getClassesByFQName(className, scope) val found = classes.find(c => Option(c.getContainingFile).exists(_.name == fileName)) found match { case Some(file) => val res = new ju.ArrayList[Location[_ <: PsiElement]]() res.add(createLocationFor(project, file.getContainingFile, lineNumber.toInt)) res case _ => searchForClassByUnqualifiedName(project, className).toSeq.asJava } case _ => searchForClassByUnqualifiedName(project, locationData).toSeq.asJava } private def searchForClassByUnqualifiedName(project: Project, locationData: String): Option[Location[_ <: PsiElement]] = { val clazz = ElementScope(project).getCachedClass(locationData) val location = clazz.map(PsiLocation.fromPsiElement[PsiClass](project, _)) location } private def createLocationFor( project: Project, psiFile: PsiFile, lineNum: Int, withName: Option[String] = None ): Location[_ <: PsiElement] = { assert(lineNum > 0) val doc: Document = PsiDocumentManager.getInstance(project).getDocument(psiFile) if (doc == null) { return null } val lineCount: Int = doc.getLineCount var lineStartOffset: Int = 0 var endOffset: Int = 0 if (lineNum <= lineCount) { lineStartOffset = doc.getLineStartOffset(lineNum - 1) endOffset = doc.getLineEndOffset(lineNum - 1) } else { lineStartOffset = 0 endOffset = doc.getTextLength } var offset: Int = lineStartOffset var elementAtLine: PsiElement = null var found = false while (offset <= endOffset && !found && { elementAtLine = psiFile.findElementAt(offset) elementAtLine != null }) { if (!elementAtLine.isInstanceOf[PsiWhiteSpace]) { found = true } val length: Int = elementAtLine.getTextLength offset += (if (length > 1) length - 1 else 1) } withName match { case Some(testName) => PsiLocationWithName(project, if (elementAtLine != null) elementAtLine else psiFile, testName) case _ => PsiLocation.fromPsiElement(project, if (elementAtLine != null) elementAtLine else psiFile) } } }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/testingSupport/locationProvider/ScalaTestLocationProvider.scala
Scala
apache-2.0
8,141
package womtool import wom.graph.GraphNodePort.{InputPort, OutputPort} import wom.graph._ import wom.graph.expression.ExpressionNode package object graph { private[graph] def dotSafe(s: String) = s""""${s.replaceAllLiterally("\\"", "\\\\\\"")}"""" private[graph] implicit class GraphNodeGraphics(val graphNode: GraphNode) extends AnyVal { def graphFillColor = graphNode match { case _: ConditionalNode | _: ScatterNode | _: WorkflowCallNode => "lightgray" case _: ExternalGraphInputNode => "lightskyblue1" case _: OuterGraphInputNode => "blueviolet" case _: PortBasedGraphOutputNode => "yellowgreen" case _: ExpressionBasedGraphOutputNode => "palegreen" case _ => "white" } def graphName: String = dotSafe(graphNode match { case c: CallNode => s"call ${c.localName}" case s: ScatterNode => s"scatter ${s.scatterCollectionExpressionNode.identifier.localName.value} in" case _: ConditionalNode => "conditional" case gin: OptionalGraphInputNodeWithDefault => s"${gin.womType.toDisplayString} ${gin.localName} = ..." case gin: GraphInputNode => s"${gin.womType.toDisplayString} ${gin.localName}" case gon: GraphOutputNode => s"${gon.womType.toDisplayString} ${gon.localName}" case expr: ExpressionNode => val inputNames = expr.womExpression.inputs.mkString(", ") s"${expr.womType.toDisplayString} ${expr.localName} = f($inputNames)" case other => throw new Exception(s"womgraph can't find a graphName for GraphNodes of type: ${other.getClass.getSimpleName}") }) def graphId: String = dotSafe("NODE" + graphObjectUniqueId(graphNode)) } private[graph] implicit class GraphNodePortGraphics(val graphNodePort: GraphNodePort) extends AnyVal { def graphShape = graphNodePort match { case _: InputPort => "oval" case _: OutputPort => "hexagon" } def graphName: String = dotSafe(graphNodePort.name) def graphId: String = dotSafe("PORT" + graphObjectUniqueId(graphNodePort)) } /** * Should be good enough to provide a unique ID based on object reference. * * In some cases this breaks down (cf. https://stackoverflow.com/questions/10645494/can-i-assume-two-objects-with-the-same-system-identityhashcode-are-the-same#10645567), * but I think that should be rare enough to ignore (since womgraph is mostly a "help the developers" kind of a feature!) */ private def graphObjectUniqueId(a: Any): Int = System.identityHashCode(a) }
ohsu-comp-bio/cromwell
womtool/src/main/scala/womtool/graph/package.scala
Scala
bsd-3-clause
2,509
package org.soic.eac import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.ml.Pipeline import org.apache.spark.ml.classification.RandomForestClassifier import org.apache.spark.ml.feature.{VectorAssembler, StringIndexer} import org.apache.spark.ml.tuning.{TrainValidationSplit, ParamGridBuilder, CrossValidator} import org.apache.spark.mllib.linalg._ import org.apache.spark.mllib.regression.LabeledPoint import org.apache.spark.mllib.tree.RandomForest import org.apache.spark.mllib.util.MLUtils import org.apache.spark.sql.types.{DoubleType, StringType, StructField, StructType} import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.sql.{Row, SQLContext} import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator import org.apache.spark.rdd.RDD import org.apache.spark.sql.DataFrame /** * Created by vjalali on 3/20/16. */ class CreditReader extends Reader{ def Indexed(FilePath:String, sc: SparkContext): DataFrame= { val rawData = sc.textFile(FilePath) val sqlContext = new org.apache.spark.sql.SQLContext(sc) val schema = StructType(dataSchema.split(" ").zipWithIndex.map {case (fieldName, i) => StructField(fieldName, if (numericalFeaturesInfo.keySet.contains(i)) DoubleType else StringType, true)}) val rowRDD = rawData.map(_.split(",")).map(p => Row(p(0),p(1).toDouble, p(2).toDouble, p(3), p(4), p(5), p(6), p(7).toDouble, p(8), p(9), p(10).toDouble, p(11), p(12), p(13).toDouble, p(14).toDouble, p(15))) val adultDataFrame = sqlContext.createDataFrame(rowRDD, schema) var indexer = new StringIndexer().setInputCol("a1").setOutputCol("a1Index").fit(adultDataFrame) var indexed = indexer.transform(adultDataFrame) indexer = new StringIndexer().setInputCol("a4").setOutputCol("a4Index").fit(indexed) indexed = indexer.transform(indexed) indexer = new StringIndexer().setInputCol("a5").setOutputCol("a5Index").fit(indexed) indexed = indexer.transform(indexed) indexer = new StringIndexer().setInputCol("a6").setOutputCol("a6Index").fit(indexed) indexed = indexer.transform(indexed) indexer = new StringIndexer().setInputCol("a7").setOutputCol("a7Index").fit(indexed) indexed = indexer.transform(indexed) indexer = new StringIndexer().setInputCol("a9").setOutputCol("a9Index").fit(indexed) indexed = indexer.transform(indexed) indexer = new StringIndexer().setInputCol("a10").setOutputCol("a10Index").fit(indexed) indexed = indexer.transform(indexed) indexer = new StringIndexer().setInputCol("a12").setOutputCol("a12Index").fit(indexed) indexed = indexer.transform(indexed) indexer = new StringIndexer().setInputCol("a13").setOutputCol("a13Index").fit(indexed) indexed = indexer.transform(indexed) indexer = new StringIndexer().setInputCol("a16").setOutputCol("label").fit(indexed) indexed = indexer.transform(indexed) return indexed } def Output(indexed: DataFrame): DataFrame= { val transformedDf = indexed.drop("a1"). drop("a4"). drop("a5"). drop("a6"). drop("a7"). drop("a9"). drop("a10"). drop("a12"). drop("a13"). drop("a16") var assembler = new VectorAssembler().setInputCols(Array("a1Index","a2", "a3", "a4Index", "a5Index", "a6Index", "a7Index", "a8", "a9Index", "a10Index", "a11", "a12Index", "a13Index","a14","a15")) .setOutputCol("features") var output = assembler.transform(transformedDf) return output } def DFTransformed(indexed: DataFrame): RDD[LabeledPoint] = { val transformed = indexed.map(x => new LabeledPoint(x.get(25).asInstanceOf[Double], new DenseVector(Array(x.get(16).asInstanceOf[Double],x.get(1).asInstanceOf[Double],x.get(2).asInstanceOf[Double], x.get(17).asInstanceOf[Double], x.get(18).asInstanceOf[Double], x.get(19).asInstanceOf[Double], x.get(20).asInstanceOf[Double], x.get(7).asInstanceOf[Double], x.get(21).asInstanceOf[Double], x.get(22).asInstanceOf[Double], x.get(10).asInstanceOf[Double], x.get(23).asInstanceOf[Double], x.get(24).asInstanceOf[Double], x.get(13).asInstanceOf[Double], x.get(14).asInstanceOf[Double])))) return transformed } override def numberOfClasses: Int = 2 override def categoricalFeaturesInfo: Map[Int, Int] = Map[Int, Int]((0,2),(3,3),(4,3),(5,14),(6,9),(8,2),(9,2),(11,2),(12,3)) override def numericalFeaturesInfo: Map[Int, Double] = Map[Int, Double]((1,11.83827),(2,5.027077),(7,3.37112),(10,4.968497),(13,168.2968),(14,5253.279)) override def dataSchema: String = "a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 a14 a15 a16" override def inputFileName: String = "credit/crxCleaned.data" }
vahidj/eac
src/main/scala/org/soic/eac/CreditReader.scala
Scala
apache-2.0
4,744
package prototype import scalafx.application.JFXApp import scalafx.event.ActionEvent import scalafx.Includes._ import scalafx.application.JFXApp.PrimaryStage import scalafx.scene.Scene import scalafx.scene.control._ import scalafx.scene.layout.GridPane import scalafx.scene.paint.Color import scalafx.geometry.Insets object CheckBoxTest extends JFXApp { val check = new CheckBox { text = "WurstBox" } check.onAction = (event: ActionEvent) => { lblCheckState.text = if (check.indeterminate.get) "Indeterminate" else check.selected.get().toString } val lblCheckState = new Label { text = check.selected.get().toString } val btnAllowIndeterminate = new scalafx.scene.control.Button { text = "Allow Indeterminate" } btnAllowIndeterminate.onAction = (event: ActionEvent) => { check.allowIndeterminate = !check.allowIndeterminate.get() } val lblAllowIndeterminate = new Label { text <== when(check.allowIndeterminate) choose "Can be Indeterminate" otherwise "Can not be Indeterminate" } val btnFire = new Button { text = "Fire!" } btnFire.onAction = (event: ActionEvent) => check.fire() val txfText = new TextField txfText.delegate.textProperty.bindBidirectional(check.text) val grid = new GridPane { padding = Insets(10) hgap = 5 vgap = 5 } grid.add(check, 0, 0) grid.add(lblCheckState, 1, 0) grid.add(btnAllowIndeterminate, 0, 1) grid.add(lblAllowIndeterminate, 1, 1) grid.add(btnFire, 0, 2) grid.add(txfText, 1, 2) stage = new PrimaryStage { title = "CheckBox Test" scene = new Scene { fill = Color.LIGHTGRAY content = grid } } }
dadarakt/jamon
src/main/scala/prototype/Gui.scala
Scala
apache-2.0
1,659
// Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy of // the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. package com.cloudant.clouseau import org.apache.commons.configuration.SystemConfiguration import org.specs2.mutable.SpecificationWithJUnit import java.io.File import concurrent._ class IndexCleanupServiceSpec extends SpecificationWithJUnit { sequential "the index clean-up service" should { "rename index when database is deleted" in new cleanup_service { node.cast(service, RenamePathMsg("foo.1234567890")) must be equalTo 'ok Thread.sleep(1000) val indexdir = new File("target", "indexes") var subdirlist = List[String]() for (file <- indexdir.listFiles if file.getName contains ".deleted") { subdirlist = file.getName() +: subdirlist } subdirlist.length > 0 must be equalTo true } } } trait cleanup_service extends RunningNode { val config = new SystemConfiguration() val args = new ConfigurationArgs(config) val service = node.spawnService[IndexCleanupService, ConfigurationArgs](args) val mbox = node.spawnMbox val dir = new File("target", "indexes") if (dir.exists) { for (f <- dir.listFiles) { f.delete } } val foodir = new File(new File("target", "indexes"), "foo.1234567890") if (!foodir.exists) { foodir.mkdirs } }
cloudant-labs/clouseau
src/test/scala/com/cloudant/clouseau/IndexCleanupServiceSpec.scala
Scala
apache-2.0
1,790
package com.redhat.samples.camel import org.apache.camel.builder.RouteBuilder import org.apache.camel.support.builder.Namespaces import org.apache.camel.test.junit4.CamelTestSupport import org.junit.Test class CBRTest extends CamelTestSupport { override def createRouteBuilder = new RouteBuilder { override def configure: Unit = { val ns = new Namespaces("soap", "http://schemas.xmlsoap.org/soap/envelope/") .add("ns", "urn:samples-camel:basic:1.0") // @formatter:off from("direct:in") .choice .when.xpath("/soap:Envelope/soap:Body/ns:aaa", ns) .to("mock:a") .when.xpath("/soap:Envelope/soap:Body/ns:bbb", ns) .to("mock:b") .otherwise .to("mock:c") // @formatter:on } } private def test(uri: String, xml: String) = { val e = getMockEndpoint(uri) e.expectedMessageCount(1) e.expectedBodiesReceived(xml) template.sendBody("direct:in", xml) e.assertIsSatisfied() } @Test(timeout = 1000) def a: Unit = { val xml = <soap:Envelope xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:ns="urn:samples-camel:basic:1.0"> <soap:Header/> <soap:Body> <ns:aaa> <value>Test</value> </ns:aaa> </soap:Body> </soap:Envelope> test("mock:a", xml.toString) } @Test(timeout = 1000) def b: Unit = { val xml = <soap:Envelope xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:ns="urn:samples-camel:basic:1.0"> <soap:Header/> <soap:Body> <ns:bbb> <value>Test</value> </ns:bbb> </soap:Body> </soap:Envelope> test("mock:b", xml.toString) } @Test(timeout = 1000) def c: Unit = { val xml = <soap:Envelope xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:ns="urn:samples-camel:basic:1.0"> <soap:Header/> <soap:Body> <ns:ccc> <value>Test</value> </ns:ccc> </soap:Body> </soap:Envelope> test("mock:c", xml.toString) } }
tadayosi/samples-camel
basic/src/test/scala/com/redhat/samples/camel/CBRTest.scala
Scala
apache-2.0
2,116
package com.datastax.spark.connector.cql.sai import com.datastax.spark.connector.SparkCassandraITWordSpecBase import com.datastax.spark.connector.ccm.CcmConfig.V6_8_3 import com.datastax.spark.connector.cluster.DefaultCluster class IndexedSetSpec extends SparkCassandraITWordSpecBase with DefaultCluster with SaiCollectionBaseSpec { override def beforeClass { dseFrom(V6_8_3) { conn.withSessionDo { session => createKeyspace(session, ks) session.execute( s"""CREATE TABLE IF NOT EXISTS $ks.set_test ( | pk_1 frozen<set<int>>, | pk_2 int, | set_col set<int>, | frozen_set_col frozen<set<int>>, | PRIMARY KEY ((pk_1, pk_2)));""".stripMargin) session.execute( s"""CREATE CUSTOM INDEX pk_set_test_sai_idx ON $ks.set_test (full(pk_1)) USING 'StorageAttachedIndex';""".stripMargin) session.execute( s"""CREATE CUSTOM INDEX frozen_set_test_sai_idx ON $ks.set_test (set_col) USING 'StorageAttachedIndex';""".stripMargin) session.execute( s"""CREATE CUSTOM INDEX full_set_test_sai_idx ON $ks.set_test (full(frozen_set_col)) USING 'StorageAttachedIndex';""".stripMargin) for (i <- (0 to 9)) { session.execute(s"insert into $ks.set_test " + s"(pk_1, pk_2, set_col, frozen_set_col) values " + s"({10$i, 11$i}, $i, {10$i, 11$i}, {10$i, 11$i})") } } } } // TODO: SPARKC-630 "Index on a non-frozen set column" ignore { indexOnANonFrozenCollection("set_test", "set_col") } "Index on a frozen set column" should { indexOnAFrozenCollection("set_test", "frozen_set_col") } }
datastax/spark-cassandra-connector
connector/src/it/scala/com/datastax/spark/connector/cql/sai/IndexedSetSpec.scala
Scala
apache-2.0
1,708
package quizleague.web.site import scalajs.js import js.Dynamic.literal import js.JSConverters._ import com.felstar.scalajs.vue._ import java.time.format.{DateTimeFormatter, DateTimeFormatterBuilder} import quill.VueQuillEditor import quizleague.web.util.rx._ import rxscalajs.Observable import showdown.VueShowdown import scala.scalajs.js.annotation.JSExport import scala.scalajs.js.annotation.JSExportTopLevel @JSExportTopLevel("Site") object SiteApp{ val dateFormatter = new DateTimeFormatterBuilder() .append(DateTimeFormatter.ISO_LOCAL_DATE) .append(DateTimeFormatter.ISO_LOCAL_TIME) .append(DateTimeFormatter.ISO_LOCAL_DATE_TIME) .toFormatter @JSExport def main():Unit = { Vue.use(VueQuillEditor) Vue.use(VueShowdown, showdown.defaultOptions) Vue.filter("date", (date:String, format:String) => DateTimeFormatter.ofPattern(format).format(DateTimeFormatter.ISO_LOCAL_DATE.parse(date))) Vue.filter("time", (time:String, format:String) => DateTimeFormatter.ofPattern(format).format(DateTimeFormatter.ISO_LOCAL_TIME.parse(time))) Vue.filter("datetime", (datetime:String, format:String) => DateTimeFormatter.ofPattern(format).format(DateTimeFormatter.ISO_LOCAL_DATE_TIME.parse(datetime))) Vue.filter("combine", (obs:js.Array[RefObservable[Any]]) => Observable.combineLatest(obs.map(_.obs)).map(_.toJSArray)) Vue.filter("wrap", (obj:js.Any) => Observable.just(obj)) new Vue( literal(el="#app", router = Router(SiteModule(), scrollBehavior = () => js.Dynamic.literal(x=0,y=0) ), vuetify = new Vuetify() ) ) } }
gumdrop/quizleague-maintain
js/src/main/scala/quizleague/web/site/SiteApp.scala
Scala
mit
1,620
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.api.stream import org.apache.flink.api.scala._ import org.apache.flink.table.api._ import org.apache.flink.table.api.config.ExecutionConfigOptions import org.apache.flink.table.api.internal.TableEnvironmentInternal import org.apache.flink.table.planner.utils.TableTestBase import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType} import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.{Before, Test} import java.sql.Timestamp import java.time.Duration @RunWith(classOf[Parameterized]) class ExplainTest(extended: Boolean) extends TableTestBase { private val extraDetails = if (extended) { Array(ExplainDetail.CHANGELOG_MODE, ExplainDetail.ESTIMATED_COST) } else { Array.empty[ExplainDetail] } private val util = streamTestUtil() util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c) util.addDataStream[(Int, Long, String)]("MyTable1", 'a, 'b, 'c) util.addDataStream[(Int, Long, String)]("MyTable2", 'd, 'e, 'f) val STRING = VarCharType.STRING_TYPE val LONG = new BigIntType() val INT = new IntType() @Before def before(): Unit = { util.tableEnv.getConfig.getConfiguration.setInteger( ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 4) } @Test def testExplainTableSourceScan(): Unit = { util.verifyExplain("SELECT * FROM MyTable", extraDetails:_*) } @Test def testExplainDataStreamScan(): Unit = { util.verifyExplain("SELECT * FROM MyTable1", extraDetails:_*) } @Test def testExplainWithFilter(): Unit = { util.verifyExplain("SELECT * FROM MyTable1 WHERE mod(a, 2) = 0", extraDetails:_*) } @Test def testExplainWithAgg(): Unit = { util.verifyExplain("SELECT COUNT(*) FROM MyTable1 GROUP BY a", extraDetails:_*) } @Test def testExplainWithJoin(): Unit = { util.verifyExplain("SELECT a, b, c, e, f FROM MyTable1, MyTable2 WHERE a = d", extraDetails:_*) } @Test def testExplainWithUnion(): Unit = { util.verifyExplain("SELECT * FROM MyTable1 UNION ALL SELECT * FROM MyTable2", extraDetails:_*) } @Test def testExplainWithSort(): Unit = { util.verifyExplain("SELECT * FROM MyTable1 ORDER BY a LIMIT 5", extraDetails:_*) } @Test def testExplainWithSingleSink(): Unit = { val table = util.tableEnv.sqlQuery("SELECT * FROM MyTable1 WHERE a > 10") val appendSink = util.createAppendTableSink(Array("a", "b", "c"), Array(INT, LONG, STRING)) util.verifyExplainInsert(table, appendSink, "appendSink", extraDetails: _*) } @Test def testExplainWithMultiSinks(): Unit = { val stmtSet = util.tableEnv.createStatementSet() val table = util.tableEnv.sqlQuery("SELECT a, COUNT(*) AS cnt FROM MyTable1 GROUP BY a") util.tableEnv.registerTable("TempTable", table) val table1 = util.tableEnv.sqlQuery("SELECT * FROM TempTable WHERE cnt > 10") val upsertSink1 = util.createUpsertTableSink(Array(0), Array("a", "cnt"), Array(INT, LONG)) util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink1", upsertSink1) stmtSet.addInsert("upsertSink1", table1) val table2 = util.tableEnv.sqlQuery("SELECT * FROM TempTable WHERE cnt < 10") val upsertSink2 = util.createUpsertTableSink(Array(0), Array("a", "cnt"), Array(INT, LONG)) util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "upsertSink2", upsertSink2) stmtSet.addInsert("upsertSink2", table2) util.verifyExplain(stmtSet, extraDetails: _*) } @Test def testMiniBatchIntervalInfer(): Unit = { val stmtSet = util.tableEnv.createStatementSet() // Test emit latency propagate among RelNodeBlocks util.addDataStream[(Int, String, Timestamp)]("T1", 'id1, 'text, 'rowtime.rowtime) util.addDataStream[(Int, String, Int, String, Long, Timestamp)]( "T2", 'id2, 'cnt, 'name, 'goods, 'rowtime.rowtime) util.addTableWithWatermark("T3", util.tableEnv.from("T1"), "rowtime", 0) util.addTableWithWatermark("T4", util.tableEnv.from("T2"), "rowtime", 0) util.tableEnv.getConfig.getConfiguration.setBoolean( ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true) util.tableEnv.getConfig.getConfiguration.set( ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, Duration.ofSeconds(3)) val table = util.tableEnv.sqlQuery( """ |SELECT id1, T3.rowtime AS ts, text | FROM T3, T4 |WHERE id1 = id2 | AND T3.rowtime > T4.rowtime - INTERVAL '5' MINUTE | AND T3.rowtime < T4.rowtime + INTERVAL '3' MINUTE """.stripMargin) util.tableEnv.registerTable("TempTable", table) val table1 = util.tableEnv.sqlQuery( """ |SELECT id1, LISTAGG(text, '#') |FROM TempTable |GROUP BY id1, TUMBLE(ts, INTERVAL '8' SECOND) """.stripMargin) val appendSink1 = util.createAppendTableSink(Array("a", "b"), Array(INT, STRING)) util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "appendSink1", appendSink1) stmtSet.addInsert("appendSink1", table1) val table2 = util.tableEnv.sqlQuery( """ |SELECT id1, LISTAGG(text, '*') |FROM TempTable |GROUP BY id1, HOP(ts, INTERVAL '12' SECOND, INTERVAL '6' SECOND) """.stripMargin) val appendSink2 = util.createAppendTableSink(Array("a", "b"), Array(INT, STRING)) util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal( "appendSink2", appendSink2) stmtSet.addInsert("appendSink2", table2) util.verifyExplain(stmtSet, extraDetails: _*) } } object ExplainTest { @Parameterized.Parameters(name = "extended={0}") def parameters(): java.util.Collection[Boolean] = { java.util.Arrays.asList(true, false) } }
lincoln-lil/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/ExplainTest.scala
Scala
apache-2.0
6,638
//package request-bodies import com.excilys.ebi.gatling.core.Predef._ import com.excilys.ebi.gatling.http.Predef._ import akka.util.duration._ import bootstrap._ import util.parsing.json.JSONObject class CreateRelationships extends Simulation { val httpConf = httpConfig .baseURL("http://localhost:7474") .acceptHeader("application/json") .requestInfoExtractor(request => { println(request.getStringData) Nil }) val rnd = new scala.util.Random val chooseRandomNodes = exec((session) => { session.setAttribute("params", JSONObject(Map("id1" -> rnd.nextInt(100000), "id2" -> rnd.nextInt(100000))).toString()) }) val createRelationship = """START node1=node({id1}), node2=node({id2}) CREATE UNIQUE node1-[:KNOWS]->node2""" val cypherQuery = """{"query": "%s", "params": %s }""".format(createRelationship, "${params}") val scn = scenario("Create Relationships") .during(30) { exec(chooseRandomNodes) .exec( http("create relationships") .post("/db/data/cypher") .header("X-Stream", "true") .body(cypherQuery) .asJSON .check(status.is(200))) .pause(0 milliseconds, 5 milliseconds) } setUp( scn.users(100).ramp(10).protocolConfig(httpConf) ) }
maxdemarzi/gatling_neo
src/test/resources/request-bodies/CreateRelationships.scala
Scala
mit
1,330
case class C(d: Double) { } case class D[T, U, V](bingo: Int, donkey: String, private val vegas: Set[A])(jehovah: Int) { } class A { def f = (new C(5)) match { case C => true case _ => false } def g[T, U, V](x: D[T, U, V]) = x match { case D => true case _ => false } }
yusuke2255/dotty
tests/untried/neg/t4879.scala
Scala
bsd-3-clause
301
/** * Copyright (C) 2015 DANS - Data Archiving and Networked Services ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.knaw.dans.easy.ingest.command import java.io.File import java.nio.charset.StandardCharsets import org.apache.commons.io.FileUtils._ import org.scalatest.matchers.{ MatchResult, Matcher } /** See also <a href="http://www.scalatest.org/user_guide/using_matchers#usingCustomMatchers">CustomMatchers</a> */ trait CustomMatchers { class ContentMatcher(content: String) extends Matcher[File] { def apply(left: File): MatchResult = { def trimLines(s: String): String = s.split("\\n").map(_.trim).mkString("\\n") MatchResult( trimLines(readFileToString(left, StandardCharsets.UTF_8)).contains(trimLines(content)), s"$left did not contain: $content" , s"$left contains $content" ) } } /** usage example: new File(...) should containTrimmed("...") */ def containTrimmed(content: String) = new ContentMatcher(content) } object CustomMatchers extends CustomMatchers
DANS-KNAW/easy-ingest
command/src/test/scala/nl.knaw.dans.easy.ingest.command/CustomMatchers.scala
Scala
apache-2.0
1,577
/* * In6Out3Shape.scala * (FScape) * * Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved. * * This software is published under the GNU Affero General Public License v3+ * * * For further information, please contact Hanns Holger Rutz at * [email protected] */ package de.sciss.fscape.stream.impl.shapes import akka.stream.{Inlet, Outlet, Shape} import scala.collection.immutable.{Seq => ISeq} final case class In6Out3Shape[In0, In1, In2, In3, In4, In5, Out0, Out1, Out2](in0 : Inlet [In0 ], in1 : Inlet [In1 ], in2 : Inlet [In2 ], in3 : Inlet [In3 ], in4 : Inlet [In4 ], in5 : Inlet [In5 ], out0: Outlet[Out0], out1: Outlet[Out1], out2: Outlet[Out2]) extends Shape { val inlets : ISeq[Inlet [_]] = Vector(in0, in1, in2, in3, in4, in5) val outlets: ISeq[Outlet[_]] = Vector(out0, out1, out2) def deepCopy(): In6Out3Shape[In0, In1, In2, In3, In4, In5, Out0, Out1, Out2] = In6Out3Shape(in0.carbonCopy(), in1.carbonCopy(), in2.carbonCopy(), in3.carbonCopy(), in4.carbonCopy(), in5.carbonCopy(), out0.carbonCopy(), out1.carbonCopy(), out2.carbonCopy()) }
Sciss/FScape-next
core/shared/src/main/scala/de/sciss/fscape/stream/impl/shapes/In6Out3Shape.scala
Scala
agpl-3.0
1,725
package org.openapitools.server.model /** * @param `class` for example: ''null'' * @param displayName for example: ''null'' * @param estimatedDurationInMillis for example: ''null'' * @param fullDisplayName for example: ''null'' * @param fullName for example: ''null'' * @param name for example: ''null'' * @param organization for example: ''null'' * @param parameters for example: ''null'' * @param permissions for example: ''null'' * @param weatherScore for example: ''null'' * @param pullRequest for example: ''null'' * @param links for example: ''null'' * @param latestRun for example: ''null'' */ final case class BranchImpl ( `class`: Option[String], displayName: Option[String], estimatedDurationInMillis: Option[Int], fullDisplayName: Option[String], fullName: Option[String], name: Option[String], organization: Option[String], parameters: Option[Seq[StringParameterDefinition]], permissions: Option[BranchImplpermissions], weatherScore: Option[Int], pullRequest: Option[String], links: Option[BranchImpllinks], latestRun: Option[PipelineRunImpl] )
cliffano/swaggy-jenkins
clients/scala-akka-http-server/generated/src/main/scala/org/openapitools/server/model/BranchImpl.scala
Scala
mit
1,112
/******************************************************************************* Copyright (c) 2013, S-Core. All rights reserved. Use is subject to license terms. This distribution may include materials developed by third parties. ******************************************************************************/ package kr.ac.kaist.jsaf.analysis.typing.models.Tizen import kr.ac.kaist.jsaf.analysis.typing.AddressManager._ import kr.ac.kaist.jsaf.analysis.cfg.{CFG, CFGExpr, InternalError} import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T, _} import kr.ac.kaist.jsaf.analysis.typing.models._ import kr.ac.kaist.jsaf.analysis.typing._ import kr.ac.kaist.jsaf.analysis.typing.models.AbsInternalFunc import kr.ac.kaist.jsaf.analysis.typing.models.AbsConstValue import kr.ac.kaist.jsaf.analysis.typing.domain.Heap import kr.ac.kaist.jsaf.analysis.typing.domain.UIntSingle import kr.ac.kaist.jsaf.analysis.typing.domain.Context object TIZENCalendarAttendee extends Tizen { val name = "CalendarAttendee" /* predefined locations */ val loc_cons = newSystemRecentLoc(name + "Cons") val loc_proto = newSystemRecentLoc(name + "Proto") /* constructor or object*/ private val prop_cons: List[(String, AbsProperty)] = List( ("@class", AbsConstValue(PropValue(AbsString.alpha("Function")))), ("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))), ("@extensible", AbsConstValue(PropValue(T))), ("@scope", AbsConstValue(PropValue(Value(NullTop)))), ("@construct", AbsInternalFunc("tizen.CalendarAttendee.constructor")), ("@hasinstance", AbsConstValue(PropValue(Value(NullTop)))), ("prototype", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F)))) ) /* prototype */ private val prop_proto: List[(String, AbsProperty)] = List( ("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))), ("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))), ("@extensible", AbsConstValue(PropValue(T))) ) override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List( (loc_cons, prop_cons), (loc_proto, prop_proto) ) override def getSemanticMap(): Map[String, SemanticFun] = { Map( ("tizen.CalendarAttendee.constructor" -> ( (sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => { val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2 val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2 val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l)) if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size) val addr_env = set_addr.head val addr1 = cfg.getAPIAddress(addr_env, 0) val l_r1 = addrToLoc(addr1, Recent) val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1) val n_arglen = Operator.ToUInt32(getArgValue(h_1, ctx_1, args, "length")) val o_contRef = ObjEmpty. update("@class", PropValue(AbsString.alpha("Object"))). update("@proto", PropValue(ObjectValue(Value(TIZENContactRef.loc_proto), F, F, F))). update("@extensible", PropValue(T)). update("addressBookId", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("contactId", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))) val h_2 = h_1.update(l_r1, o_contRef) val o_new = ObjEmpty. update("@class", PropValue(AbsString.alpha("Object"))). update("@proto", PropValue(ObjectValue(Value(TIZENCalendarAttendee.loc_proto), F, F, F))). update("@extensible", PropValue(T)) val (h_3, es_1) = n_arglen match { case UIntSingle(n) if n == 0 => val o_new2 = o_new. update("uri", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("name", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("role", PropValue(ObjectValue(Value(AbsString.alpha("REQ_PARTICIPANT")), F, T, T))). update("status", PropValue(ObjectValue(Value(AbsString.alpha("PENDING")), F, T, T))). update("RSVP", PropValue(ObjectValue(Value(F), F, T, T))). update("type", PropValue(ObjectValue(Value(AbsString.alpha("INDIVIDUAL")), F, T, T))). update("group", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("delegatorURI", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("delegateURI", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("contactRef", PropValue(ObjectValue(Value(l_r1), F, T, T))) val h_3 = lset_this.foldLeft(h_2)((_h, l) => _h.update(l, o_new2)) (h_3, TizenHelper.TizenExceptionBot) case UIntSingle(n) if n == 1 => val v_1 = getArgValue(h_2, ctx_1, args, "0") val o_new2 = o_new. update("uri", PropValue(ObjectValue(Value(Helper.toString(v_1._1)), F, T, T))). update("name", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("role", PropValue(ObjectValue(Value(AbsString.alpha("REQ_PARTICIPANT")), F, T, T))). update("status", PropValue(ObjectValue(Value(AbsString.alpha("PENDING")), F, T, T))). update("RSVP", PropValue(ObjectValue(Value(F), F, T, T))). update("type", PropValue(ObjectValue(Value(AbsString.alpha("INDIVIDUAL")), F, T, T))). update("group", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("delegatorURI", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("delegateURI", PropValue(ObjectValue(Value(AbsString.alpha("")), F, T, T))). update("contactRef", PropValue(ObjectValue(Value(l_r1), F, T, T))) val h_3 = lset_this.foldLeft(h_2)((_h, l) => _h.update(l, o_new2)) (h_3, TizenHelper.TizenExceptionBot) case UIntSingle(n) if n == 2 => val v_1 = getArgValue(h_2, ctx_1, args, "0") val v_2 = getArgValue(h_2, ctx_1, args, "1") val es = if (v_2._1 <= PValueTop) Set[WebAPIException](TypeMismatchError) else TizenHelper.TizenExceptionBot val (v_3, v_4, v_5, v_6, v_7, v_8, v_9, v_10, v_11) = v_2._2.foldLeft((ValueBot, ValueBot, ValueBot, ValueBot, ValueBot, ValueBot, ValueBot, ValueBot, ValueBot))((_v, l) => { (_v._1 + Helper.LookupL(h_2, l, "name"), _v._2 + Helper.LookupL(h_2, l, "role"), _v._3 + Helper.LookupL(h_2, l, "status"), _v._4 + Helper.LookupL(h_2, l, "RSVP"), _v._5 + Helper.LookupL(h_2, l, "type"), _v._6 + Helper.LookupL(h_2, l, "group"), _v._7 + Helper.LookupL(h_2, l, "delegatorURI"), _v._8 + Helper.LookupL(h_2, l, "delegateURI"), _v._9 + Helper.LookupL(h_2, l, "contactRef")) }) val o_new2 = o_new. update("uri", PropValue(ObjectValue(Value(Helper.toString(v_1._1)), F, T, T))). update("name", PropValue(ObjectValue(Value(v_3._1._5), F, T, T))). update("role", PropValue(ObjectValue(Value(v_4._1._5), F, T, T))). update("status", PropValue(ObjectValue(Value(v_5._1._5), F, T, T))). update("RSVP", PropValue(ObjectValue(Value(v_6._1._3), F, T, T))). update("type", PropValue(ObjectValue(Value(v_7._1._5), F, T, T))). update("group", PropValue(ObjectValue(Value(v_8._1), F, T, T))). update("delegatorURI", PropValue(ObjectValue(Value(v_9._1), F, T, T))). update("delegateURI", PropValue(ObjectValue(Value(v_10._1), F, T, T))). update("contactRef", PropValue(ObjectValue(Value(v_11._2), F, T, T))) val h_3 = lset_this.foldLeft(h_2)((_h, l) => _h.update(l, o_new2)) (h_3, es) case _ => { (h_2, TizenHelper.TizenExceptionBot) } } val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1) ((Helper.ReturnStore(h_3, Value(lset_this)), ctx_1), (he + h_e, ctxe + ctx_e)) } )) ) } override def getPreSemanticMap(): Map[String, SemanticFun] = { Map() } override def getDefMap(): Map[String, AccessFun] = { Map() } override def getUseMap(): Map[String, AccessFun] = { Map() } }
daejunpark/jsaf
src/kr/ac/kaist/jsaf/analysis/typing/models/Tizen/TIZENCalendarAttendee.scala
Scala
bsd-3-clause
8,801
package taczombie.client.view.gui import java.awt.Color import scala.swing.Label import javax.swing.border.CompoundBorder import javax.swing.border.EmptyBorder import javax.swing.border.EtchedBorder import javax.swing.border.TitledBorder import taczombie.client.model.ViewModel import taczombie.client.util.Observer class GameMessage(model: ViewModel) extends Label with Observer { focusable = false border = new CompoundBorder(new TitledBorder(new EtchedBorder, "Message"), new EmptyBorder(5, 5, 5, 10)) text = "Welcome to TacZombie!" foreground = Color.red model.add(this) def update { text = model.gameMessage } }
mahieke/TacZombie
gui/src/main/scala/taczombie/client/view/gui/GameMessage.scala
Scala
gpl-2.0
638
package reactivemongo.api.gridfs private[gridfs] trait GridFSCompat { /* Concats two array - fast way */ protected def concat[T](a1: Array[T], a2: Array[T])(implicit m: scala.reflect.ClassTag[T]): Array[T] = { var i, j = 0 val result = new Array[T](a1.length + a2.length) while (i < a1.length) { result(i) = a1(i) i = i + 1 } while (j < a2.length) { result(i + j) = a2(j) j = j + 1 } result } }
ReactiveMongo/ReactiveMongo
driver/src/main/scala-3/api/gridfs/GridFSCompat.scala
Scala
apache-2.0
454
package com.btcontract.wallet.utils import android.content.{Context, Intent} import com.btcontract.wallet.{ClassNames, R} import android.app.{NotificationManager, PendingIntent, Service} import androidx.core.app.NotificationCompat object AwaitService { val awaitServiceClass: Class[AwaitService] = classOf[AwaitService] final val TITLE_TO_DISPLAY = "titleToDisplay" final val BODY_TO_DISPLAY = "bodyToDisplay" final val CHANNEL_ID = "awaitChannelId1" final val NOTIFICATION_ID = 14 final val ACTION_CANCEL = "actionCancel" final val ACTION_SHOW = "actionShow" } class AwaitService extends Service { me => override def onBind(intent: Intent): Null = null override def onDestroy: Unit = { val srv = getSystemService(Context.NOTIFICATION_SERVICE) srv.asInstanceOf[NotificationManager].cancel(AwaitService.NOTIFICATION_ID) super.onDestroy } override def onStartCommand(serviceIntent: Intent, flags: Int, id: Int): Int = { processServiceIntent(serviceIntent) Service.START_NOT_STICKY } def processServiceIntent(intent: Intent): Unit = if (intent.getAction != AwaitService.ACTION_CANCEL) { val awaitedBodyText = intent.getStringExtra(AwaitService.BODY_TO_DISPLAY) val awaitedTitleText = intent.getStringExtra(AwaitService.TITLE_TO_DISPLAY) val disaplyIntent = PendingIntent.getActivity(me, 0, new Intent(me, ClassNames.mainActivityClass), 0) val cancelIntent = PendingIntent.getService(me, 0, new Intent(me, AwaitService.awaitServiceClass).setAction(AwaitService.ACTION_CANCEL), 0) val notification = new NotificationCompat.Builder(me, AwaitService.CHANNEL_ID).setContentTitle(awaitedTitleText).setContentText(awaitedBodyText) .addAction(android.R.drawable.ic_menu_close_clear_cancel, getResources.getString(R.string.dialog_cancel), cancelIntent) .setSmallIcon(R.drawable.ic_history_white_24dp).setContentIntent(disaplyIntent).build startForeground(AwaitService.NOTIFICATION_ID, notification) } else { stopForeground(true) stopSelf } }
btcontract/wallet
app/src/main/java/com/btcontract/wallet/utils/AwaitService.scala
Scala
apache-2.0
2,077
package api import akka.actor.ActorDSL._ import com.softwaremill.macwire.tagging.Tagger import core.UserActor._ import core.UserTag import core.model.{UserDeleted, PageParams, User, UserCreated} import spray.http.StatusCodes class UserEndpointTest extends BaseApiTest { "UserEndpoint" should { "return OK when getting list of users" in { // given val userActor = actor(new Act { become { case GetUsers(PageParams(Some(2), Some(1))) => sender() ! GetUsersResult(List(User("[email protected]", "pass"))) } }).taggedWith[UserTag] val userEndpoint = new UserEndpoint(userActor) // when Get("/users?skip=2&limit=1") ~> userEndpoint.route ~> check { // then status mustBe StatusCodes.OK responseAs[List[User]] must have size 1 } } "return Created when user successfully created" in { // given val userActor = actor(new Act { become { case CreateUser(User("[email protected]", "pass")) => sender() ! UserCreated("[email protected]") } }).taggedWith[UserTag] val userEndpoint = new UserEndpoint(userActor) // when Post("/users", User("[email protected]", "pass")) ~> userEndpoint.route ~> check { // then status mustBe StatusCodes.Created responseAs[String] must include("[email protected]") header("Location").value.value must endWith ("/users/[email protected]") } } "return OK when getting single user" in { // given val userActor = actor(new Act { become { case GetUser("[email protected]") => sender() ! GetUserResult(User("[email protected]", "pass")) } }).taggedWith[UserTag] val userEndpoint = new UserEndpoint(userActor) // when Get("/users/[email protected]") ~> userEndpoint.route ~> check { // then status mustBe StatusCodes.OK responseAs[User] mustBe User("[email protected]", "pass") } } "return No Content when deleting user" in { // given val userActor = actor(new Act { become { case DeleteUser("[email protected]") => sender() ! UserDeleted } }).taggedWith[UserTag] val userEndpoint = new UserEndpoint(userActor) // when Delete("/users/[email protected]") ~> userEndpoint.route ~> check { // then status mustBe StatusCodes.NoContent responseAs[String] mustBe empty } } } }
DamianJureczko/user-service
src/test/scala/api/UserEndpointTest.scala
Scala
apache-2.0
2,527
package com.mentatlabs.nsa package scalac package options /* -Yinfer-debug * ============= * 2.9.1 - 2.11.8: Trace type inference and implicit search. * 2.12.0: !! missing !! */ case object ScalacYInferDebug extends ScalacOptionBoolean("-Yinfer-debug", ScalacVersions.`2.9.1`) { override val deprecated = Some(ScalacVersions.`2.11.0`) override val removed = Some(ScalacVersions.`2.12.0`) }
mentat-labs/sbt-nsa
nsa-core/src/main/scala/com/mentatlabs/nsa/scalac/options/private/deprecated/ScalacYInferDebug.scala
Scala
bsd-3-clause
417
package io.vamp.model.reader import scala.reflect._ import scala.util.{ Failure, Try } sealed trait UnitValue[T] { def value: T def normalized: String override def toString = normalized } object UnitValue { def of[V <: Any: ClassTag](value: Any): Try[V] = value match { case _ if classTag[V].runtimeClass == classOf[Percentage] ⇒ Try(Percentage.of(value).asInstanceOf[V]) case _ if classTag[V].runtimeClass == classOf[MegaByte] ⇒ Try(MegaByte.of(value).asInstanceOf[V]) case _ if classTag[V].runtimeClass == classOf[Quantity] ⇒ Try(Quantity.of(value).asInstanceOf[V]) case _ if classTag[V].runtimeClass == classOf[Time] ⇒ Try(Time.of(value).asInstanceOf[V]) case _ ⇒ Failure(new IllegalArgumentException()) } def illegal(value: Any) = throw new IllegalArgumentException(s"illegal value: $value") } object Percentage { private val percentagePattern = """^\\s*(\\d{1,3})\\s*%\\s*$""".r def of(source: Any): Percentage = source match { case string: String ⇒ string match { case percentagePattern(p) ⇒ Percentage(p.toInt) case _ ⇒ throw new IllegalArgumentException() } case _ ⇒ UnitValue.illegal(source) } } case class Percentage(value: Int) extends UnitValue[Int] { { if (value < 0 || value > 100) UnitValue.illegal(value) } def normalized = s"$value%" } object MegaByte { private val kiloPattern1 = """^\\s*(.\\d+)\\s*[K|k]\\s*[B|b]{0,1}\\s*$""".r private val kiloPattern2 = """^\\s*(\\d+.*\\d*)\\s*[K|k]\\s*[B|b]{0,1}\\s*$""".r private val kiloPattern3 = """^\\s*(.\\d*)\\s*[K|k]\\s*[I|i]{0,1}\\s*$""".r private val kiloPattern4 = """^\\s*(\\d+.*\\d*)\\s*[K|k]\\s*[I|i]{0,1}\\s*$""".r private val megaPattern1 = """^\\s*(.\\d+)\\s*[M|m]\\s*[B|b]{0,1}\\s*$""".r private val megaPattern2 = """^\\s*(\\d+.*\\d*)\\s*[M|m]\\s*[B|b]{0,1}\\s*$""".r private val megaPattern3 = """^\\s*(.\\d*)\\s*[M|m]\\s*[I|i]{0,1}\\s*$""".r private val megaPattern4 = """^\\s*(\\d+.*\\d*)\\s*[M|m]\\s*[I|i]{0,1}\\s*$""".r private val gigaPattern1 = """^\\s*(.\\d+)\\s*[G|g]\\s*[B|b]{0,1}\\s*$""".r private val gigaPattern2 = """^\\s*(\\d+.*\\d*)\\s*[G|g]\\s*[B|b]{0,1}\\s*$""".r private val gigaPattern3 = """^\\s*(.\\d*)\\s*[M|m]\\s*[I|i]{0,1}\\s*$""".r private val gigaPattern4 = """^\\s*(\\d+.*\\d*)\\s*[M|m]\\s*[I|i]{0,1}\\s*$""".r def of(source: Any): MegaByte = source match { case string: String ⇒ string match { case kiloPattern1(kb) ⇒ MegaByte(kb.toDouble / 1000) case kiloPattern2(kb) ⇒ MegaByte(kb.toDouble / 1000) case kiloPattern3(kb) ⇒ MegaByte(kb.toDouble / 1024) case kiloPattern4(kb) ⇒ MegaByte(kb.toDouble / 1024) case megaPattern1(mb) ⇒ MegaByte(mb.toDouble) case megaPattern2(mb) ⇒ MegaByte(mb.toDouble) case megaPattern3(mb) ⇒ MegaByte(mb.toDouble * 1.024) case megaPattern4(mb) ⇒ MegaByte(mb.toDouble * 1.024) case gigaPattern1(gb) ⇒ MegaByte(gb.toDouble * 1000) case gigaPattern2(gb) ⇒ MegaByte(gb.toDouble * 1000) case gigaPattern3(gb) ⇒ MegaByte(gb.toDouble * 1024) case gigaPattern4(gb) ⇒ MegaByte(gb.toDouble * 1024) case _ ⇒ throw new IllegalArgumentException(s"String $string does not match any Memory Pattern") } case _ ⇒ UnitValue.illegal(source) } def gigaByte2MegaByte(gb: Double): Double = 1000 * gb } case class MegaByte(value: Double) extends UnitValue[Double] { if (value < 0) UnitValue.illegal(value) def normalized = f"$value%.2fMB" } object Quantity { private val pattern = """^\\s*(.+)\\s*$""".r private val milliPattern = """^\\s*(.+?)\\s*m\\s*$""".r def of(source: Any): Quantity = source match { case string: String ⇒ string match { case milliPattern(m) ⇒ Quantity(m.toDouble / 1000) case pattern(m) ⇒ Quantity(m.toDouble) case _ ⇒ throw new IllegalArgumentException() } case _ ⇒ Try(Quantity(source.toString.toDouble)).getOrElse(UnitValue.illegal(source)) } } case class Quantity(value: Double) extends UnitValue[Double] { def normalized = f"$value%.2f" } object Time { private val secondPattern = "(\\\\d+)(s|sec|second|seconds)".r private val minutePattern = "(\\\\d+)(m|min|minute|minutes)".r private val hourPattern = "(\\\\d+)(h|hrs|hour|hours)".r def of(source: Any): Time = source match { case string: String ⇒ string match { case secondPattern(s, _) ⇒ Time(s.toInt) case minutePattern(m, _) ⇒ Time(m.toInt * 60) case hourPattern(h, _) ⇒ Time(h.toInt * 3600) case s ⇒ throw new IllegalArgumentException(s) } case _ ⇒ Try(Time(source.toString.toInt)).getOrElse(UnitValue.illegal(source)) } } /** * Time defines a UnitValue for minutes (m) and seconds (s) */ case class Time(value: Int) extends UnitValue[Int] { override def normalized = s"${value}s" }
magneticio/vamp
model/src/main/scala/io/vamp/model/reader/UnitValue.scala
Scala
apache-2.0
4,855
package com.typesafe.sbt package packager package rpm import sbt._ import linux._ import java.nio.charset.Charset import SbtNativePackager.Linux import sbt.Keys.{ name, version, sourceDirectory, target, packageBin, streams } import linux.LinuxPlugin.autoImport.{ linuxPackageMappings, linuxPackageSymlinks, serverLoading, packageArchitecture } import packager.Keys._ /** * Plugin containing all generic values used for packaging rpms. * * @example Enable the plugin in the `build.sbt` * {{{ * enablePlugins(RpmPlugin) * }}} */ object RpmPlugin extends AutoPlugin { override def requires = LinuxPlugin object autoImport extends RpmKeys { val Rpm = config("rpm") extend Linux val RpmConstants = Names } import autoImport._ private final def osPostInstallMacro: java.net.URL = getClass getResource "brpJavaRepackJar" /** RPM specific names */ object Names { val Scriptlets = "scriptlets" //maintainer script names /** `pretrans` */ val Pretrans = "pretrans" /** `postinst` */ val Post = "post" /** `preinst` */ val Pre = "pre" /** `postun` */ val Postun = "postun" /** `preun` */ val Preun = "preun" /** `verifyscript` */ val Verifyscript = "verifyscript" /** `posttrans` */ val Posttrans = "posttrans" } override def projectConfigurations: Seq[Configuration] = Seq(Rpm) override lazy val projectSettings = Seq( rpmOs := "Linux", // TODO - default to something else? rpmRelease := "1", rpmPrefix := None, rpmVendor := "", // TODO - Maybe pull in organization? rpmLicense := None, rpmDistribution := None, rpmUrl := None, rpmGroup := None, rpmPackager := None, rpmIcon := None, rpmAutoprov := "yes", rpmAutoreq := "yes", rpmProvides := Seq.empty, rpmRequirements := Seq.empty, rpmPrerequisites := Seq.empty, rpmObsoletes := Seq.empty, rpmConflicts := Seq.empty, rpmSetarch := None, rpmChangelogFile := None, rpmBrpJavaRepackJars := false, rpmPretrans := None, rpmPre := None, rpmPost := None, rpmVerifyscript := None, rpmPosttrans := None, rpmPreun := None, rpmPostun := None, rpmScriptsDirectory <<= sourceDirectory apply (_ / "rpm" / Names.Scriptlets), // Explicitly defer default settings to generic Linux Settings. maintainerScripts in Rpm <<= maintainerScripts in Linux, packageSummary in Rpm <<= packageSummary in Linux, packageDescription in Rpm <<= packageDescription in Linux, target in Rpm <<= target(_ / "rpm"), name in Rpm <<= name in Linux, packageName in Rpm <<= packageName in Linux, executableScriptName in Rpm <<= executableScriptName in Linux, rpmDaemonLogFile := s"${(packageName in Linux).value}.log", daemonStdoutLogFile in Rpm := Some((rpmDaemonLogFile).value), // override the linux sourceDirectory setting sourceDirectory in Rpm <<= sourceDirectory ) ++ inConfig(Rpm)(Seq( packageArchitecture := "noarch", rpmMetadata <<= (packageName, version, rpmRelease, rpmPrefix, packageArchitecture, rpmVendor, rpmOs, packageSummary, packageDescription, rpmAutoprov, rpmAutoreq) apply RpmMetadata, rpmDescription <<= (rpmLicense, rpmDistribution, rpmUrl, rpmGroup, rpmPackager, rpmIcon, rpmChangelogFile) apply RpmDescription, rpmDependencies <<= (rpmProvides, rpmRequirements, rpmPrerequisites, rpmObsoletes, rpmConflicts) apply RpmDependencies, maintainerScripts := { val scripts = maintainerScripts.value if (rpmBrpJavaRepackJars.value) { val pre = scripts.getOrElse(Names.Pre, Nil) val scriptBits = IO.readStream(RpmPlugin.osPostInstallMacro.openStream, Charset forName "UTF-8") scripts + (Names.Pre -> (pre :+ scriptBits)) } else { scripts } }, rpmScripts := RpmScripts.fromMaintainerScripts(maintainerScripts.value), rpmSpecConfig <<= (rpmMetadata, rpmDescription, rpmDependencies, rpmSetarch, rpmScripts, linuxPackageMappings, linuxPackageSymlinks, defaultLinuxInstallLocation) map RpmSpec, packageBin <<= (rpmSpecConfig, target, streams) map { (spec, dir, s) => spec.validate(s.log) RpmHelper.buildRpm(spec, dir, s.log) }, rpmLint <<= (packageBin, streams) map { (rpm, s) => (Process(Seq("rpmlint", "-v", rpm.getAbsolutePath)) ! s.log) match { case 0 => () case x => sys.error("Failed to run rpmlint, exit status: " + x) } } )) } object RpmDeployPlugin extends AutoPlugin { import RpmPlugin.autoImport._ override def requires = RpmPlugin override def projectSettings = SettingsHelper.makeDeploymentSettings(Rpm, packageBin in Rpm, "rpm") }
giabao/sbt-native-packager
src/main/scala/com/typesafe/sbt/packager/rpm/RpmPlugin.scala
Scala
bsd-2-clause
4,741
// Copyright 2014,2015,2016,2017,2018,2019,2020 Commonwealth Bank of Australia // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package commbank.grimlock.spark.transform import commbank.grimlock.framework.encoding.Value import commbank.grimlock.framework.position.{ Coordinates1, Position } import commbank.grimlock.spark.environment.Context import commbank.grimlock.library.transform.{ CutRules => FwCutRules } import shapeless.HList /** Implement cut rules for Spark. */ case object CutRules extends FwCutRules[Context.E] { def fixed[ K <: HList, V <: HList ]( ext: Context.E[Stats[K, V]], min: Position[V], max: Position[V], k: Long ): Context.E[Map[Position[K], List[Double]]] = fixedFromStats(ext, min, max, k) def squareRootChoice[ K <: HList, V <: HList ]( ext: Context.E[Stats[K, V]], count: Position[V], min: Position[V], max: Position[V] ): Context.E[Map[Position[K], List[Double]]] = squareRootChoiceFromStats(ext, count, min, max) def sturgesFormula[ K <: HList, V <: HList ]( ext: Context.E[Stats[K, V]], count: Position[V], min: Position[V], max: Position[V] ): Context.E[Map[Position[K], List[Double]]] = sturgesFormulaFromStats(ext, count, min, max) def riceRule[ K <: HList, V <: HList ]( ext: Context.E[Stats[K, V]], count: Position[V], min: Position[V], max: Position[V] ): Context.E[Map[Position[K], List[Double]]] = riceRuleFromStats(ext, count, min, max) def doanesFormula[ K <: HList, V <: HList ]( ext: Context.E[Stats[K, V]], count: Position[V], min: Position[V], max: Position[V], skewness: Position[V] ): Context.E[Map[Position[K], List[Double]]] = doanesFormulaFromStats(ext, count, min, max, skewness) def scottsNormalReferenceRule[ K <: HList, V <: HList ]( ext: Context.E[Stats[K, V]], count: Position[V], min: Position[V], max: Position[V], sd: Position[V] ): Context.E[Map[Position[K], List[Double]]] = scottsNormalReferenceRuleFromStats(ext, count, min, max, sd) def breaks[ T <% Value[T] ]( range: Map[T, List[Double]] ): Context.E[Map[Position[Coordinates1[T]], List[Double]]] = breaksFromMap(range) }
CommBank/grimlock
grimlock-core/src/main/scala/commbank/grimlock/spark/CutRules.scala
Scala
apache-2.0
2,758
/* * Scala (https://www.scala-lang.org) * * Copyright EPFL and Lightbend, Inc. * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package scala.jdk import org.junit.Test import org.junit.Assert.{assertFalse, assertTrue} import scala.collection.{AnyStepper, DoubleStepper, IntStepper, LongStepper, Stepper, concurrent => cc, immutable => ci, mutable => cm} import scala.{collection => co} @deprecated("Tests deprecated API", since="2.13") class StepperConversionTest { def isAcc[X](x: X): Boolean = x match { case _: AnyAccumulatorStepper[_] => true case _: DoubleAccumulatorStepper => true case _: IntAccumulatorStepper => true case _: LongAccumulatorStepper => true case _ => false } trait SpecCheck { def check[X](x: X): Boolean def msg[X](x: X): String def assert(x: Any): Unit = if(!check(x)) assertTrue(msg(x), false) } object SpecCheck { def apply(f: Any => Boolean, err: Any => String = _ => "SpecCheck failed") = new SpecCheck { def check[X](x: X): Boolean = f(x) def msg[X](x: X): String = err(x) } } def yay(x: => Stepper[_])(implicit correctSpec: SpecCheck): Unit = { correctSpec.assert(x) assertTrue(isAcc(x)) } def nay[X](x: => Stepper[_])(implicit correctSpec: SpecCheck): Unit = { correctSpec.assert(x) assertFalse(isAcc(x)) } @Test def comprehensivelyGeneric(): Unit = { implicit val spec = SpecCheck(_.isInstanceOf[AnyStepper[_]]) // Collection section nay( co.Iterator[String]("salmon").buffered.stepper ) nay( co.IndexedSeq[String]("salmon").stepper ) nay( co.Iterable[String]("salmon").stepper ) nay( co.Iterable[String]("salmon").view.stepper ) nay( co.Iterator[String]("salmon").stepper ) nay( co.LinearSeq[String]("salmon").stepper ) nay( co.Map[String, String]("fish" -> "salmon").stepper ) nay( co.Map[String, String]("fish" -> "salmon").keyStepper ) nay( co.Map[String, String]("fish" -> "salmon").valueStepper ) nay( co.Seq[String]("salmon").stepper ) nay( co.Seq[String]("salmon").view.stepper ) nay( co.Set[String]("salmon").stepper ) nay( co.SortedMap[String, String]("fish" -> "salmon").stepper ) nay( co.SortedMap[String, String]("fish" -> "salmon").keyStepper ) nay( co.SortedMap[String, String]("fish" -> "salmon").valueStepper ) nay( co.SortedSet[String]("salmon").stepper ) yay( co.Iterable[String]("salmon").to(Accumulator).stepper ) yay( (co.Iterator[String]("salmon"): co.IterableOnce[String]).iterator.to(Accumulator).stepper ) yay( co.Iterable[String]("salmon").view.to(Accumulator).stepper ) // Immutable section nay( ci.::("salmon", Nil).stepper ) nay( (ci.HashMap[String, String]("fish" -> "salmon"): ci.AbstractMap[String, String]).stepper ) nay( (ci.HashMap[String, String]("fish" -> "salmon"): ci.AbstractMap[String, String]).keyStepper ) nay( (ci.HashMap[String, String]("fish" -> "salmon"): ci.AbstractMap[String, String]).valueStepper ) nay( ci.HashSet[String]("salmon").stepper ) nay( ci.IndexedSeq[String]("salmon").stepper ) nay( ci.IntMap[String](123456 -> "salmon").stepper ) nay( ci.IntMap[String](123456 -> "salmon").valueStepper ) nay( ci.Iterable[String]("salmon").stepper ) nay( ci.LinearSeq[String]("salmon").stepper ) nay( ci.List[String]("salmon").stepper ) nay( ci.ListMap[String, String]("fish" -> "salmon").stepper ) nay( ci.ListMap[String, String]("fish" -> "salmon").keyStepper ) nay( ci.ListMap[String, String]("fish" -> "salmon").valueStepper ) nay( ci.ListSet[String]("salmon").stepper ) nay( ci.LongMap[String](9876543210L -> "salmon").stepper ) nay( ci.LongMap[String](9876543210L -> "salmon").valueStepper ) nay( ci.Map[String, String]("fish" -> "salmon").stepper ) nay( ci.Map[String, String]("fish" -> "salmon").keyStepper ) nay( ci.Map[String, String]("fish" -> "salmon").valueStepper ) nay( ci.Queue[String]("salmon").stepper ) nay( ci.Seq[String]("salmon").stepper ) nay( ci.Set[String]("salmon").stepper ) nay( ci.SortedMap[String, String]("fish" -> "salmon").stepper ) nay( ci.SortedMap[String, String]("fish" -> "salmon").keyStepper ) nay( ci.SortedMap[String, String]("fish" -> "salmon").valueStepper ) nay( ci.SortedSet[String]("salmon").stepper ) nay( ci.Stream[String]("salmon").stepper ) nay( ci.Stream[String]("salmon").view.stepper ) nay( ci.LazyList[String]("salmon").stepper ) nay( ci.LazyList[String]("salmon").view.stepper ) yay( ci.Iterable[String]("salmon").to(Accumulator).stepper ) nay( ci.TreeMap[String, String]("fish" -> "salmon").stepper ) nay( ci.TreeMap[String, String]("fish" -> "salmon").keyStepper ) nay( ci.TreeMap[String, String]("fish" -> "salmon").valueStepper ) nay( ci.TreeSet[String]("salmon").stepper ) nay( ci.Vector[String]("salmon").stepper ) // Mutable section nay( (cm.ArrayBuffer[String]("salmon"): cm.AbstractBuffer[String]).stepper ) nay( (cm.PriorityQueue[String]("salmon"): cm.AbstractIterable[String]).stepper ) nay( (cm.HashMap[String, String]("fish" -> "salmon"): cm.AbstractMap[String, String]).stepper ) nay( (cm.HashMap[String, String]("fish" -> "salmon"): cm.AbstractMap[String, String]).keyStepper ) nay( (cm.HashMap[String, String]("fish" -> "salmon"): cm.AbstractMap[String, String]).valueStepper ) nay( (cm.ArrayBuffer[String]("salmon"): cm.AbstractSeq[String]).stepper ) nay( (cm.HashSet[String]("salmon"): cm.AbstractSet[String]).stepper ) nay( cm.AnyRefMap[String,String]("fish" -> "salmon").stepper ) nay( cm.AnyRefMap[String,String]("fish" -> "salmon").keyStepper ) nay( cm.AnyRefMap[String,String]("fish" -> "salmon").valueStepper ) nay( cm.ArrayBuffer[String]("salmon").stepper ) nay( (Array("salmon"): cm.ArraySeq[String]).stepper ) nay( cm.ArraySeq[String]("salmon").stepper ) nay( cm.ArrayStack[String]("salmon").stepper ) nay( (cm.ArrayBuffer[String]("salmon"): cm.Buffer[String]).stepper ) nay( cm.HashMap[String, String]("fish" -> "salmon").stepper ) nay( cm.HashMap[String, String]("fish" -> "salmon").keyStepper ) nay( cm.HashMap[String, String]("fish" -> "salmon").valueStepper ) nay( cm.HashSet[String]("salmon").stepper ) nay( cm.IndexedSeq[String]("salmon").stepper ) nay( cm.IndexedSeq[String]("salmon").view.stepper ) nay( cm.Iterable[String]("salmon").stepper ) nay( cm.LinkedHashMap[String, String]("fish" -> "salmon").stepper ) nay( cm.LinkedHashMap[String, String]("fish" -> "salmon").keyStepper ) nay( cm.LinkedHashMap[String, String]("fish" -> "salmon").valueStepper ) nay( cm.LinkedHashSet[String]("salmon").stepper ) nay( cm.ListBuffer[String]("salmon").stepper ) nay( cm.ListMap[String, String]("fish" -> "salmon").stepper ) nay( cm.ListMap[String, String]("fish" -> "salmon").keyStepper ) nay( cm.ListMap[String, String]("fish" -> "salmon").keyStepper ) nay( cm.LongMap[String](9876543210L -> "salmon").stepper ) nay( cm.LongMap[String](9876543210L -> "salmon").valueStepper ) nay( cm.Map[String, String]("fish" -> "salmon").stepper ) nay( cm.Map[String, String]("fish" -> "salmon").keyStepper ) nay( cm.Map[String, String]("fish" -> "salmon").valueStepper ) nay( cm.OpenHashMap[String, String]("fish" -> "salmon").stepper ) nay( cm.OpenHashMap[String, String]("fish" -> "salmon").keyStepper ) nay( cm.OpenHashMap[String, String]("fish" -> "salmon").valueStepper ) nay( cm.PriorityQueue[String]("salmon").stepper ) nay( cm.Queue[String]("salmon").stepper ) // Used to be `Good` in 2.12, in 2.13 `Queue` is no longer a `LinearSeq` nay( cm.Seq[String]("salmon").stepper ) nay( cm.Set[String]("salmon").stepper ) nay( cm.SortedSet[String]("salmon").stepper ) nay( cm.Stack[String]("salmon").stepper ) // Used to be `Good` in 2.12, in 2.13 `Stack` is no longer a `LinearSeq` yay( cm.Iterable[String]("salmon").to(Accumulator).stepper ) nay( cm.TreeSet[String]("salmon").stepper ) nay( cm.UnrolledBuffer[String]("salmon").stepper ) nay( cm.WeakHashMap[String, String]("fish" -> "salmon").stepper ) nay( cm.WeakHashMap[String, String]("fish" -> "salmon").keyStepper ) nay( cm.WeakHashMap[String, String]("fish" -> "salmon").valueStepper ) // Java 6 converters section // Concurrent section nay( cc.TrieMap[String, String]("fish" -> "salmon").stepper ) nay( cc.TrieMap[String, String]("fish" -> "salmon").keyStepper ) nay( cc.TrieMap[String, String]("fish" -> "salmon").keyStepper ) nay( (cc.TrieMap[String, String]("fish" -> "salmon"): cc.Map[String, String]).stepper ) nay( (cc.TrieMap[String, String]("fish" -> "salmon"): cc.Map[String, String]).keyStepper ) nay( (cc.TrieMap[String, String]("fish" -> "salmon"): cc.Map[String, String]).valueStepper ) } @Test def comprehensivelyDouble(): Unit = { implicit val spec = SpecCheck(_.isInstanceOf[DoubleStepper]) //Double-specific tests // Collection section nay( co.Iterator[Double](3.14159).buffered.stepper ) nay( co.IndexedSeq[Double](3.14159).stepper ) nay( co.Iterable[Double](3.14159).stepper ) nay( co.Iterable[Double](3.14159).view.stepper ) nay( co.Iterator[Double](3.14159).stepper ) nay( co.LinearSeq[Double](3.14159).stepper ) nay( co.Map[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( co.Map[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( co.Seq[Double](3.14159).stepper ) nay( co.Seq[Double](3.14159).view.stepper ) nay( co.Set[Double](3.14159).stepper ) nay( co.SortedMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( co.SortedMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( co.SortedSet[Double](3.14159).stepper ) yay( co.Iterable[Double](3.14159).to(Accumulator).stepper ) yay( (co.Iterator[Double](3.14159): co.IterableOnce[Double]).iterator.to(Accumulator).stepper ) yay( co.Iterable[Double](3.14159).view.to(Accumulator).stepper ) // Immutable section nay( ci.::(3.14159, Nil).stepper ) nay( (ci.HashMap[Double, Double](2.718281828 -> 3.14159): ci.AbstractMap[Double, Double]).keyStepper ) nay( (ci.HashMap[Double, Double](2.718281828 -> 3.14159): ci.AbstractMap[Double, Double]).valueStepper ) nay( ci.HashSet[Double](3.14159).stepper ) nay( ci.IndexedSeq[Double](3.14159).stepper ) nay( ci.IntMap[Double](123456 -> 3.14159).valueStepper ) nay( ci.Iterable[Double](3.14159).stepper ) nay( ci.LinearSeq[Double](3.14159).stepper ) nay( ci.List[Double](3.14159).stepper ) nay( ci.ListMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( ci.ListMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( ci.ListSet[Double](3.14159).stepper ) nay( ci.LongMap[Double](9876543210L -> 3.14159).valueStepper ) nay( ci.Map[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( ci.Map[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( ci.Queue[Double](3.14159).stepper ) nay( ci.Seq[Double](3.14159).stepper ) nay( ci.Set[Double](3.14159).stepper ) nay( ci.SortedMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( ci.SortedMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( ci.SortedSet[Double](3.14159).stepper ) nay( ci.Stream[Double](3.14159).stepper ) nay( ci.Stream[Double](3.14159).view.stepper ) nay( ci.LazyList[Double](3.14159).stepper ) nay( ci.LazyList[Double](3.14159).view.stepper ) yay( ci.Iterable[Double](3.14159).to(Accumulator).stepper ) nay( ci.TreeMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( ci.TreeMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( ci.TreeSet[Double](3.14159).stepper ) nay( ci.Vector[Double](3.14159).stepper ) // Mutable section nay( (cm.ArrayBuffer[Double](3.14159): cm.AbstractBuffer[Double]).stepper ) nay( (cm.PriorityQueue[Double](3.14159): cm.AbstractIterable[Double]).stepper ) nay( (cm.HashMap[Double, Double](2.718281828 -> 3.14159): cm.AbstractMap[Double, Double]).keyStepper ) nay( (cm.HashMap[Double, Double](2.718281828 -> 3.14159): cm.AbstractMap[Double, Double]).valueStepper ) nay( (cm.ArrayBuffer[Double](3.14159): cm.AbstractSeq[Double]).stepper ) nay( (cm.HashSet[Double](3.14159): cm.AbstractSet[Double]).stepper ) nay( cm.AnyRefMap[String,Double]("fish" -> 3.14159).valueStepper ) nay( cm.ArrayBuffer[Double](3.14159).stepper ) nay( (Array(3.14159): cm.ArraySeq[Double]).stepper ) nay( cm.ArraySeq[Double](3.14159).stepper ) nay( cm.ArrayStack[Double](3.14159).stepper ) nay( (cm.ArrayBuffer[Double](3.14159): cm.Buffer[Double]).stepper ) nay( cm.HashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( cm.HashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( cm.HashSet[Double](3.14159).stepper ) nay( cm.IndexedSeq[Double](3.14159).stepper ) nay( cm.IndexedSeq[Double](3.14159).view.stepper ) nay( cm.Iterable[Double](3.14159).stepper ) nay( cm.LinkedHashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( cm.LinkedHashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( cm.LinkedHashSet[Double](3.14159).stepper ) nay( cm.ListBuffer[Double](3.14159).stepper ) nay( cm.ListMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( cm.ListMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( cm.LongMap[Double](9876543210L -> 3.14159).valueStepper ) nay( cm.Map[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( cm.Map[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( cm.OpenHashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( cm.OpenHashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( cm.PriorityQueue[Double](3.14159).stepper ) nay( cm.Queue[Double](3.14159).stepper ) // Used to be `Good` in 2.12, in 2.13 `Queue` is no longer a `LinearSeq` nay( cm.Seq[Double](3.14159).stepper ) nay( cm.Set[Double](3.14159).stepper ) nay( cm.SortedSet[Double](3.14159).stepper ) nay( cm.Stack[Double](3.14159).stepper ) // Used to be `Good` in 2.12, in 2.13 `Stack` is no longer a `LinearSeq` yay( cm.Iterable[Double](3.14159).to(Accumulator).stepper ) nay( cm.TreeSet[Double](3.14159).stepper ) nay( cm.UnrolledBuffer[Double](3.14159).stepper ) nay( cm.WeakHashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( cm.WeakHashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) // Java 6 converters section // Concurrent section nay( cc.TrieMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) nay( cc.TrieMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) nay( (cc.TrieMap[Double, Double](2.718281828 -> 3.14159): cc.Map[Double, Double]).keyStepper ) nay( (cc.TrieMap[Double, Double](2.718281828 -> 3.14159): cc.Map[Double, Double]).valueStepper ) } @Test def comprehensivelyInt(): Unit = { implicit val spec = SpecCheck(_.isInstanceOf[IntStepper], x => s"$x should be an IntStepper") // Int-specific tests nay( co.BitSet(42).stepper ) nay( ci.BitSet(42).stepper ) nay( ci.NumericRange(123456, 123458, 1).stepper ) nay( cm.BitSet(42).stepper ) nay( (1 until 2).stepper ) nay( ci.IntMap[String](123456 -> "salmon").keyStepper ) nay( ci.IntMap[Double](123456 -> 3.14159).keyStepper ) nay( ci.IntMap[Long](123456 -> 0x123456789L).keyStepper ) // Collection section nay( co.Iterator[Int](654321).buffered.stepper ) nay( co.IndexedSeq[Int](654321).stepper ) nay( co.Iterable[Int](654321).stepper ) nay( co.Iterable[Int](654321).view.stepper ) nay( co.Iterator[Int](654321).stepper ) nay( co.LinearSeq[Int](654321).stepper ) nay( co.Map[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( co.Map[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( co.Seq[Int](654321).stepper ) nay( co.Seq[Int](654321).view.stepper ) nay( co.Set[Int](654321).stepper ) nay( co.SortedMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( co.SortedMap[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( co.SortedSet[Int](654321).stepper ) yay( co.Iterable[Int](654321).to(Accumulator).stepper ) yay( (co.Iterator[Int](654321): co.IterableOnce[Int]).to(Accumulator).stepper ) yay( co.Iterable[Int](654321).view.to(Accumulator).stepper ) // Immutable section nay( ci.::(654321, Nil).stepper ) nay( (ci.HashMap[Int, Int](0xDEEDED -> 654321): ci.AbstractMap[Int, Int]).keyStepper ) nay( (ci.HashMap[Int, Int](0xDEEDED -> 654321): ci.AbstractMap[Int, Int]).valueStepper ) nay( ci.HashSet[Int](654321).stepper ) nay( ci.IndexedSeq[Int](654321).stepper ) nay( ci.IntMap[Int](123456 -> 654321).keyStepper ) nay( ci.IntMap[Int](123456 -> 654321).valueStepper ) nay( ci.Iterable[Int](654321).stepper ) nay( ci.LinearSeq[Int](654321).stepper ) nay( ci.List[Int](654321).stepper ) nay( ci.ListMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( ci.ListMap[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( ci.ListSet[Int](654321).stepper ) nay( ci.LongMap[Int](9876543210L -> 654321).valueStepper ) nay( ci.Map[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( ci.Map[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( ci.Queue[Int](654321).stepper ) nay( ci.Seq[Int](654321).stepper ) nay( ci.Set[Int](654321).stepper ) nay( ci.SortedMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( ci.SortedMap[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( ci.SortedSet[Int](654321).stepper ) nay( ci.Stream[Int](654321).stepper ) nay( ci.Stream[Int](654321).view.stepper ) nay( ci.LazyList[Int](654321).stepper ) nay( ci.LazyList[Int](654321).view.stepper ) yay( ci.Iterable[Int](654321).to(Accumulator).stepper ) nay( ci.TreeMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( ci.TreeMap[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( ci.TreeSet[Int](654321).stepper ) nay( ci.Vector[Int](654321).stepper ) // Mutable section nay( (cm.ArrayBuffer[Int](654321): cm.AbstractBuffer[Int]).stepper ) nay( (cm.PriorityQueue[Int](654321): cm.AbstractIterable[Int]).stepper ) nay( (cm.HashMap[Int, Int](0xDEEDED -> 654321): cm.AbstractMap[Int, Int]).keyStepper ) nay( (cm.HashMap[Int, Int](0xDEEDED -> 654321): cm.AbstractMap[Int, Int]).valueStepper ) nay( (cm.ArrayBuffer[Int](654321): cm.AbstractSeq[Int]).stepper ) nay( (cm.HashSet[Int](654321): cm.AbstractSet[Int]).stepper ) nay( cm.AnyRefMap[String, Int]("fish" -> 654321).valueStepper ) nay( cm.ArrayBuffer[Int](654321).stepper ) nay( (Array(654321): cm.ArraySeq[Int]).stepper ) nay( cm.ArraySeq[Int](654321).stepper ) nay( cm.ArrayStack[Int](654321).stepper ) nay( (cm.ArrayBuffer[Int](654321): cm.Buffer[Int]).stepper ) nay( cm.HashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( cm.HashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( cm.HashSet[Int](654321).stepper ) nay( cm.IndexedSeq[Int](654321).stepper ) nay( cm.IndexedSeq[Int](654321).view.stepper ) nay( cm.Iterable[Int](654321).stepper ) nay( cm.LinkedHashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( cm.LinkedHashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( cm.LinkedHashSet[Int](654321).stepper ) nay( cm.ListBuffer[Int](654321).stepper ) nay( cm.ListMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( cm.ListMap[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( cm.LongMap[Int](9876543210L -> 654321).valueStepper ) nay( cm.Map[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( cm.Map[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( cm.OpenHashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( cm.OpenHashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( cm.PriorityQueue[Int](654321).stepper ) nay( cm.Queue[Int](654321).stepper ) // Used to be `Good` in 2.12, in 2.13 `Queue` is no longer a `LinearSeq` nay( cm.Seq[Int](654321).stepper ) nay( cm.Set[Int](654321).stepper ) nay( cm.SortedSet[Int](654321).stepper ) nay( cm.Stack[Int](654321).stepper ) // Used to be `Good` in 2.12, in 2.13 `Stack` is no longer a `LinearSeq` yay( cm.Iterable[Int](654321).to(Accumulator).stepper ) nay( cm.TreeSet[Int](654321).stepper ) nay( cm.UnrolledBuffer[Int](654321).stepper ) nay( cm.WeakHashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( cm.WeakHashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) // Java 6 converters section // Concurrent section nay( cc.TrieMap[Int, Int](0xDEEDED -> 654321).keyStepper ) nay( cc.TrieMap[Int, Int](0xDEEDED -> 654321).valueStepper ) nay( (cc.TrieMap[Int, Int](0xDEEDED -> 654321): cc.Map[Int, Int]).keyStepper ) nay( (cc.TrieMap[Int, Int](0xDEEDED -> 654321): cc.Map[Int, Int]).valueStepper ) } @Test def shortWidening(): Unit = { implicit val spec = SpecCheck(_.isInstanceOf[IntStepper], x => s"$x should be an IntStepper") nay( Array[Short](654321.toShort).stepper ) nay( (Array[Short](654321.toShort): cm.ArraySeq[Short]).stepper ) //TODO: None of these currently work because there are no native Stepper implementations: //nay( ci.NumericRange(123456.toShort, 123458.toShort, 1.toShort).stepper ) //nay( ((Array[Short](654321.toShort): cm.ArraySeq[Short]): cm.ArrayLike[Short, cm.ArraySeq[Short]]).stepper ) //nay( (Array[Short](654321.toShort): cm.ArrayOps[Short]).stepper ) //nay( cm.ResizableArray[Short](654321.toShort).stepper ) } @Test def comprehensivelyLong(): Unit = { implicit val spec = SpecCheck(_.isInstanceOf[LongStepper]) // Long-specific tests nay( ci.NumericRange(9876543210L, 9876543212L, 1L).stepper ) nay( ci.LongMap[String](9876543210L -> "salmon").keyStepper ) nay( cm.LongMap[String](9876543210L -> "salmon").keyStepper ) nay( ci.LongMap[Double](9876543210L -> 3.14159).keyStepper ) nay( cm.LongMap[Double](9876543210L -> 3.14159).keyStepper ) nay( ci.LongMap[Int](9876543210L -> 654321).keyStepper ) nay( cm.LongMap[Int](9876543210L -> 654321).keyStepper ) // Collection section nay( co.Iterator[Long](0x123456789L).buffered.stepper ) nay( co.IndexedSeq[Long](0x123456789L).stepper ) nay( co.Iterable[Long](0x123456789L).stepper ) nay( co.Iterable[Long](0x123456789L).view.stepper ) nay( co.Iterator[Long](0x123456789L).stepper ) nay( co.LinearSeq[Long](0x123456789L).stepper ) nay( co.Map[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( co.Map[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( co.Seq[Long](0x123456789L).stepper ) nay( co.Seq[Long](0x123456789L).view.stepper ) nay( co.Set[Long](0x123456789L).stepper ) nay( co.SortedMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( co.SortedMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( co.SortedSet[Long](0x123456789L).stepper ) yay( co.Iterable[Long](0x123456789L).to(Accumulator).stepper ) yay( (co.Iterator[Long](0x123456789L): co.IterableOnce[Long]).to(Accumulator).stepper ) yay( co.Iterable[Long](0x123456789L).view.to(Accumulator).stepper ) // Immutable section nay( ci.::(0x123456789L, Nil).stepper ) nay( (ci.HashMap[Long, Long](1234567654321L -> 0x123456789L): ci.AbstractMap[Long, Long]).keyStepper ) nay( (ci.HashMap[Long, Long](1234567654321L -> 0x123456789L): ci.AbstractMap[Long, Long]).valueStepper ) nay( ci.HashSet[Long](0x123456789L).stepper ) nay( ci.IndexedSeq[Long](0x123456789L).stepper ) nay( ci.IntMap[Long](123456 -> 0x123456789L).valueStepper ) nay( ci.Iterable[Long](0x123456789L).stepper ) nay( ci.LinearSeq[Long](0x123456789L).stepper ) nay( ci.List[Long](0x123456789L).stepper ) nay( ci.ListMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( ci.ListMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( ci.ListSet[Long](0x123456789L).stepper ) nay( ci.LongMap[Long](9876543210L -> 0x123456789L).keyStepper ) nay( ci.LongMap[Long](9876543210L -> 0x123456789L).valueStepper ) nay( ci.Map[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( ci.Map[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( ci.Queue[Long](0x123456789L).stepper ) nay( ci.Seq[Long](0x123456789L).stepper ) nay( ci.Set[Long](0x123456789L).stepper ) nay( ci.SortedMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( ci.SortedMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( ci.SortedSet[Long](0x123456789L).stepper ) nay( ci.Stream[Long](0x123456789L).stepper ) nay( ci.Stream[Long](0x123456789L).view.stepper ) nay( ci.LazyList[Long](0x123456789L).stepper ) nay( ci.LazyList[Long](0x123456789L).view.stepper ) yay( ci.Iterable[Long](0x123456789L).to(Accumulator).stepper ) nay( ci.TreeMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( ci.TreeMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( ci.TreeSet[Long](0x123456789L).stepper ) nay( ci.Vector[Long](0x123456789L).stepper ) // Mutable section nay( (cm.ArrayBuffer[Long](0x123456789L): cm.AbstractBuffer[Long]).stepper ) nay( (cm.PriorityQueue[Long](0x123456789L): cm.AbstractIterable[Long]).stepper ) nay( (cm.HashMap[Long, Long](1234567654321L -> 0x123456789L): cm.AbstractMap[Long, Long]).keyStepper ) nay( (cm.HashMap[Long, Long](1234567654321L -> 0x123456789L): cm.AbstractMap[Long, Long]).valueStepper ) nay( (cm.ArrayBuffer[Long](0x123456789L): cm.AbstractSeq[Long]).stepper ) nay( (cm.HashSet[Long](0x123456789L): cm.AbstractSet[Long]).stepper ) nay( cm.AnyRefMap[String,Long]("fish" -> 0x123456789L).valueStepper ) nay( cm.ArrayBuffer[Long](0x123456789L).stepper ) nay( (Array(0x123456789L): cm.ArraySeq[Long]).stepper ) nay( cm.ArraySeq[Long](0x123456789L).stepper ) nay( cm.ArrayStack[Long](0x123456789L).stepper ) nay( (cm.ArrayBuffer[Long](0x123456789L): cm.Buffer[Long]).stepper ) nay( cm.HashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( cm.HashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( cm.HashSet[Long](0x123456789L).stepper ) nay( cm.IndexedSeq[Long](0x123456789L).stepper ) nay( cm.IndexedSeq[Long](0x123456789L).view.stepper ) nay( cm.Iterable[Long](0x123456789L).stepper ) nay( cm.LinkedHashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( cm.LinkedHashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( cm.LinkedHashSet[Long](0x123456789L).stepper ) nay( cm.ListBuffer[Long](0x123456789L).stepper ) nay( cm.ListMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( cm.ListMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( cm.LongMap[Long](9876543210L -> 0x123456789L).keyStepper ) nay( cm.LongMap[Long](9876543210L -> 0x123456789L).valueStepper ) nay( cm.Map[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( cm.Map[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( cm.OpenHashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( cm.OpenHashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( cm.PriorityQueue[Long](0x123456789L).stepper ) nay( cm.Queue[Long](0x123456789L).stepper ) // Used to be `Good` in 2.12, in 2.13 `Queue` is no longer a `LinearSeq` nay( cm.Seq[Long](0x123456789L).stepper ) nay( cm.Set[Long](0x123456789L).stepper ) nay( cm.SortedSet[Long](0x123456789L).stepper ) nay( cm.Stack[Long](0x123456789L).stepper ) // Used to be `Good` in 2.12, in 2.13 `Stack` is no longer a `LinearSeq` yay( cm.Iterable[Long](0x123456789L).to(Accumulator).stepper ) nay( cm.TreeSet[Long](0x123456789L).stepper ) nay( cm.UnrolledBuffer[Long](0x123456789L).stepper ) nay( cm.WeakHashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( cm.WeakHashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) // Java 6 converters section // Concurrent section nay( cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) nay( cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) nay( (cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L): cc.Map[Long, Long]).keyStepper ) nay( (cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L): cc.Map[Long, Long]).valueStepper ) } @Test def comprehensivelySpecific(): Unit = { implicit val spec = SpecCheck(_.isInstanceOf[IntStepper], x => s"$x should be an IntStepper") nay( ci.NumericRange(277: Short, 279: Short, 1: Short).stepper ) nay( ("salmon": ci.WrappedString).stepper ) } }
lrytz/scala
test/junit/scala/jdk/StepperConversionTest.scala
Scala
apache-2.0
29,242
/* * Contributions: * Jean-Francois GUENA: implement "suffixed collection name" feature (issue #39 partially fulfilled) * ... */ package akka.contrib.persistence.mongodb import java.util.concurrent.ConcurrentHashMap import akka.actor._ import akka.stream.Materializer import com.codahale.metrics.MetricRegistry import com.typesafe.config.{Config, ConfigFactory} import scala.collection.JavaConverters._ import scala.concurrent.duration._ import scala.util.Try object MongoPersistenceExtension extends ExtensionId[MongoPersistenceExtension] with ExtensionIdProvider { override def lookup: ExtensionId[MongoPersistenceExtension] = MongoPersistenceExtension override def createExtension(actorSystem: ExtendedActorSystem): MongoPersistenceExtension = { val settings = MongoSettings(actorSystem.settings) val implementation = settings.Implementation val implType = actorSystem.dynamicAccess.getClassFor[MongoPersistenceExtension](implementation) .getOrElse(Class.forName(implementation, true, Thread.currentThread.getContextClassLoader)) val implCons = implType.getConstructor(classOf[ActorSystem]) implCons.newInstance(actorSystem).asInstanceOf[MongoPersistenceExtension] } override def get(actorSystem: ActorSystem): MongoPersistenceExtension = super.get(actorSystem) } abstract class MongoPersistenceExtension(actorSystem: ActorSystem) extends Extension { implicit val materializer: Materializer = Materializer(actorSystem) private val configuredExtensions = new ConcurrentHashMap[Config, ConfiguredExtension].asScala def apply(config: Config): ConfiguredExtension = { configuredExtensions.putIfAbsent(config, configured(config)) configuredExtensions(config) } def configured(config: Config): ConfiguredExtension } trait ConfiguredExtension { def journaler: MongoPersistenceJournallingApi def snapshotter: MongoPersistenceSnapshottingApi def readJournal: MongoPersistenceReadJournallingApi def registry: MetricRegistry = DropwizardMetrics.metricRegistry } object MongoSettings { def apply(systemSettings: ActorSystem.Settings): MongoSettings = { val fullName = s"${getClass.getPackage.getName}.mongo" val systemConfig = systemSettings.config systemConfig.checkValid(ConfigFactory.defaultReference(), fullName) new MongoSettings(systemConfig.getConfig(fullName)) } } class MongoSettings(val config: Config) { def withOverride(by: Config): MongoSettings = { new MongoSettings(by.withFallback(config)) } val Implementation: String = config.getString("driver") val MongoUri: String = Try(config.getString("mongouri")).toOption match { case Some(uri) => uri case None => // Use legacy approach val Urls = config.getStringList("urls").asScala.toList.mkString(",") val Username = Try(config.getString("username")).toOption val Password = Try(config.getString("password")).toOption val DbName = config.getString("db") (for { user <- Username password <- Password } yield { s"mongodb://$user:$password@$Urls/$DbName" }) getOrElse s"mongodb://$Urls/$DbName" } val Database: Option[String] = Try(config.getString("database")).toOption val JournalCollection: String = config.getString("journal-collection") val JournalIndex: String = config.getString("journal-index") val JournalSeqNrIndex: String = config.getString("journal-seq-nr-index") val JournalTagIndex: String = config.getString("journal-tag-index") val JournalWriteConcern: String = config.getString("journal-write-concern") val JournalWTimeout: FiniteDuration = config.getDuration("journal-wtimeout",MILLISECONDS).millis val JournalFSync: Boolean = config.getBoolean("journal-fsync") val JournalAutomaticUpgrade: Boolean = config.getBoolean("journal-automatic-upgrade") val SnapsCollection: String = config.getString("snaps-collection") val SnapsIndex: String = config.getString("snaps-index") val SnapsWriteConcern: String = config.getString("snaps-write-concern") val SnapsWTimeout: FiniteDuration = config.getDuration("snaps-wtimeout",MILLISECONDS).millis val SnapsFSync: Boolean = config.getBoolean("snaps-fsync") val realtimeEnablePersistence: Boolean = config.getBoolean("realtime-enable-persistence") val realtimeCollectionName: String = config.getString("realtime-collection") val realtimeCollectionSize: Long = config.getLong("realtime-collection-size") val MetadataCollection: String = config.getString("metadata-collection") val UseLegacyJournalSerialization: Boolean = config.getBoolean("use-legacy-serialization") val SuffixBuilderClass: String = config.getString("suffix-builder.class") val SuffixSeparator: String = config.getString("suffix-builder.separator") val SuffixDropEmptyCollections: Boolean = config.getBoolean("suffix-drop-empty-collections") val SuffixMigrationHeavyLoad: Boolean = Option(config.getBoolean("suffix-migration.heavy-load")).getOrElse(false) val SuffixMigrationEmptyMetadata: Boolean = Option(config.getBoolean("suffix-migration.empty-metadata")).getOrElse(false) val SuffixMigrationMaxInsertRetry: Int = Option(config.getInt("suffix-migration.max-insert-retry")).filter(_ >= 0).getOrElse(1) val SuffixMigrationMaxDeleteRetry: Int = Option(config.getInt("suffix-migration.max-delete-retry")).filter(_ >= 0).getOrElse(1) val SuffixMigrationMaxEmptyMetadataRetry: Int = Option(config.getInt("suffix-migration.max-empty-metadata-retry")).filter(_ >= 0).getOrElse(1) val SuffixMigrationParallelism: Int = Option(config.getInt("suffix-migration.parallelism")).filter(_ > 0).getOrElse(1) val MongoMetricsBuilderClass: String = config.getString("metrics-builder.class") val CollectionCache: Config = config.getConfig("collection-cache") }
JeanFrancoisGuena/akka-persistence-mongo
common/src/main/scala/akka/contrib/persistence/mongodb/MongoPersistenceExtension.scala
Scala
apache-2.0
5,789
/** This file contains all HBase related types, instances, and functions (on * interim basis until it is extracted into its own plugin). */ package dsmigrator trait HBaseTypes extends Classes with Types with Instances { // TODO: Create product type value constructor for Table type trait Table // TODO: Create product type value constructor for Column type trait Column // TODO: Create product type value constructor for ColumnGroup type trait ColumnGroup type TableAction = Action[Table] type ColumnAction = Action[Column] type ColumnGroupAction = Action[ColumnGroup] }
mbbx6spp/dsmigrator
src/main/scala/dsmigrator/hbase.scala
Scala
bsd-3-clause
596
package forms import models.Product import play.api.data.Form import play.api.data.Forms._ import play.api.data.format.Formats._ /** * The form which handles the sign up process. */ object ProductForm { /** * A play framework form. */ val form = Form( mapping( "id" -> optional(longNumber), "name" -> nonEmptyText, "description" -> nonEmptyText, "price" -> of[Double] )(Product.apply)(Product.unapply) ) }
Wirwing/hello-conekta-play-framework
app/forms/ProductForm.scala
Scala
mit
457
/* * Copyright 2016 Dennis Vriend * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package akka.persistence.jdbc.journal.dao import akka.NotUsed import akka.persistence.{ AtomicWrite, PersistentRepr } import akka.stream.scaladsl._ import scala.collection.immutable.Seq import scala.concurrent.Future import scala.util.Try trait JournalDao { /** * Deletes all persistent messages up to toSequenceNr (inclusive) for the persistenceId */ def delete(persistenceId: String, toSequenceNr: Long): Future[Unit] /** * Returns the highest sequence number for the events that are stored for that `persistenceId`. When no events are * found for the `persistenceId`, 0L will be the highest sequence number */ def highestSequenceNr(persistenceId: String, fromSequenceNr: Long): Future[Long] /** * Returns a Source of PersistentRepr for a certain persistenceId */ def messages(persistenceId: String, fromSequenceNr: Long, toSequenceNr: Long, max: Long): Source[Try[PersistentRepr], NotUsed] /** * @see [[akka.persistence.journal.AsyncWriteJournal.asyncWriteMessages(messages)]] */ def asyncWriteMessages(messages: Seq[AtomicWrite]): Future[Seq[Try[Unit]]] }
gavares/akka-persistence-jdbc
src/main/scala/akka/persistence/jdbc/journal/dao/JournalDao.scala
Scala
apache-2.0
1,706
import com.github.play2war.plugin.{Play2WarKeys, Play2WarPlugin} import sbt._ object ApplicationBuild extends Build { val appName = "PlayingWithPlayJava" val appVersion = "1.0-SNAPSHOT" val appDependencies = Seq( // Add your project dependencies here, ) val main = play.Project(appName, appVersion, appDependencies) .settings(Play2WarPlugin.play2WarSettings: _*) .settings( // Add your own project settings here Play2WarKeys.servletVersion := "3.1" ) }
bpupadhyaya/PlayingWithPlay
PlayingWithPlayJava/project/Build.scala
Scala
mit
509
/* Copyright The MITRE Corporation 2010. All rights reserved. */ package org.mitre.jcarafe.dparser import org.mitre.jcarafe.crf.{AbstractInstance,Feature,DenseTrainable,CoreModel,AccessSeq,DecodingAlgorithm,InstanceSequence} import org.mitre.jcarafe.util.FastLoops._ import org.mitre.jcarafe.jama._ abstract class MstCrf(val nfs: Int, val gPrior: Double = 100.0) extends DenseTrainable[AbstractInstance] { import org.mitre.jcarafe.util.FastLoops._ /** * These are the model parameters */ val lambdas: Array[Double] = Array.fill(nfs)(0.0) val numParams = nfs val gradient: Array[Double] = Array.fill(nfs)(0.0) val invSigSqr = 1.0 / gPrior def initialize() = {} def getCoreModel() = new CoreModel(lambdas,numParams,0,0,0) def regularize() = { var i = 0 var llMod = 0.0 val llen = lambdas.length while (i < llen) { val li = lambdas(i) gradient(i) = li * invSigSqr llMod -= (li * li * invSigSqr) / 2.0 i += 1 } llMod } def printMatrix(m: Array[Array[Double]]) = { println("") m foreach {r => r foreach {e => printf(" %f",e)}; println("")} println("") } protected def inferGradientAndLL(iseq:Seq[AbstractInstance]) = { var sll = 0.0 var i = 0 val sl = iseq.length val kirchoff = Array.fill(sl,sl)(0.0) val sMat = Array.fill(sl,sl)(0.0) forIndex(sl){i => val instFeatures = iseq(i).getCompVec(0) val label = iseq(i).label updateScores(sMat,instFeatures,lambdas,i,true,label) } computeKirchoff(sl,kirchoff,sMat) val luDecomp = new org.mitre.jcarafe.jama.LUDecomposition(new Matrix(kirchoff)) val ident = org.mitre.jcarafe.jama.Matrix.identity(sl,sl) val invK = luDecomp.solve(ident).getArray() val detK = luDecomp.det() val expectations = Array.fill(numParams)(0.0) forIndex(sl){i => val instFeatures = iseq(i).getCompVec(0) val klen = instFeatures.length val label = iseq(i).label forIndex(klen){k => val inst = instFeatures(k) val parent = inst.cur if (parent == label) sll += lambdas(inst.fid) * inst.value val off = if (parent == i) 0.0 else invK(i)(parent) expectations(inst.fid) += sMat(i)(parent) * inst.value * (invK(i)(i) - off) gradient(inst.fid) += sMat(i)(parent) * inst.value * (invK(i)(i) - off) } } sll -= math.log(detK) sll } protected def computeKirchoff(s: Int, km: Array[Array[Double]], sm: Array[Array[Double]]) = { forIndex(s){i => // row index forIndex(s){j => // column index val ss = sm(j)(i) if (i != j) km(i)(j) = -ss km(i)(i) += sm(i)(j) } } } def getGradient(seqAccessor: AccessSeq[AbstractInstance]) : Option[Double] = { var logLi = regularize() for (j <- 0 until seqAccessor.length) { val seq = seqAccessor(j) if (seq.length > 0) logLi += inferGradientAndLL(seq) } Some(-logLi) } protected def updateScores(scoreMat: Array[Array[Double]], instFeatures: Array[Feature], lambdas: Array[Double], pos: Int, takeExp: Boolean, lab: Int) = { val klen = instFeatures.length forIndex(klen) {k => val inst = instFeatures(k) if ((inst.cur) == lab) { gradient(inst.fid) -= inst.value // constraint } scoreMat(pos)(inst.cur) += lambdas(inst.fid) * inst.value } if (takeExp) { val scoreSize = scoreMat.length forIndex(scoreSize) {i => scoreMat(pos)(i) = math.exp(scoreMat(pos)(i)) } } } } class MstMAPInference(crf: CoreModel) extends DecodingAlgorithm { val lambdas = crf.params protected def updateScores(scoreVec: Array[Double], instFeatures: Array[Feature], lambdas: Array[Double], pos: Int) = { val klen = instFeatures.length val scoreSize = scoreVec.length forIndex(klen) {k => val inst = instFeatures(k) val parent = inst.cur scoreVec(parent) += lambdas(inst.fid) * inst.value } forIndex(scoreSize) {i => scoreVec(i) = math.exp(scoreVec(i)) } } def getCopyOf = new MstMAPInference(this.crf) def assignBestSequence(iseq: collection.immutable.IndexedSeq[AbstractInstance]) : Double = { val sl = iseq.length val sMat = Array.fill(sl,sl)(0.0) val t0 = System.nanoTime forIndex(sl){i => val instFeatures = iseq(i).getCompVec(0) val label = iseq(i).label var sVec = sMat(i) updateScores(sVec,instFeatures,lambdas,i) } //println("\\n score assignment in " + ((System.nanoTime - t0) / 1000000000.0) + " seconds") //println("Edge scores: ") //forIndex(sMat.length){i => forIndex(sMat.length){j => println("("+i+","+j+") => " + sMat(i)(j))}} val t1 = System.nanoTime val cle = new ChuLiuEdmonds() //println("CLE instance created in " + ((System.nanoTime - t1) / 1000000000.0) + " seconds") val t2 = System.nanoTime val bstSeq = cle.cleInfer(sMat) //println("\\n CLE inference performed in" + ((System.nanoTime - t2) / 1000000000.0) + " seconds") forIndex(sl){i => iseq(i).label_=(bstSeq(i))} 0.0 } } class ChuLiuEdmonds { case class PNode(val vl: Int, var onStack: Boolean = false, var index: Int = -1, var lowlink: Int = -1, var nodes: Set[Int] = Set(), var bestIn: Map[Int,Int] = Map(), var parent: Int = -1, var children: Set[Int] = Set()) { override def equals(other: Any) = other match {case other: PNode => this.vl == other.vl case _ => false} override val hashCode = vl override def toString = "PNode("+vl+")" } var idGensym = 0 var origSize = 0 var allNodes : Array[PNode] = Array() def cleInfer(sMat: Array[Array[Double]]) = { val smp = padSMat(sMat) initializeGraph(sMat) cleAlgorithm((0 until sMat.length).toSet,smp) val res = Array.fill(sMat.length)(0) forIndex(sMat.length){i => res(i) = allNodes(i).parent} res } def getBestGraph(gr: Set[Int], sm: Array[Array[Double]]) = { gr foreach {ni => val v = allNodes(ni) var bst = -1 var bstV = -Double.MaxValue val pars = sm(ni) forIndex(pars.size) {j => if ((pars(j) >= bstV)) { // subtle - this will ensure we pick the most recently introduced node bstV = pars(j) bst = j}} v.parent = bst val pnode = allNodes(bst) pnode.children += ni } } def initializeGraph(sMat: Array[Array[Double]]) : Unit = { allNodes = Array.tabulate(sMat.length * 2){i => PNode(i)} idGensym = sMat.length origSize = idGensym } def padSMat(sMat: Array[Array[Double]]) = { val sl = sMat.length Array.tabulate(sl * 2){i => Array.tabulate(sl * 2){j => if ((i < sl) && (j < sl)) sMat(i)(j) else 0.0}} } def getCLESolution(gr: Set[Int], sMat: Array[Array[Double]]) = { println("looking for next best graph on: ") sMat foreach {row => row foreach {el => println(" " + el)}; println} val smp = padSMat(sMat) initializeGraph(sMat) cleAlgorithm(gr,smp) val res = Array.fill(sMat.length)(0) var sc = 0.0 forIndex(gr.size){i => sc += sMat(i)(allNodes(i).parent); res(i) = allNodes(i).parent} (res,sc) } def cleAlgorithm(gr: Set[Int], sMat: Array[Array[Double]]) : Unit = { getBestGraph(gr,sMat) val sccs = findSCCs(gr) println("SCCS: " + sccs) val res = sccs.find(_.length > 1) res match { case None => case Some(cyc) => val lngth = gr.size var bestpars = Array.tabulate(cyc.length){i => ((for (j <- 0 until cyc.length) yield {(sMat(i)(j), j)}).max._2, i)} var cbuf : Set[Int] = Set() val carr = new collection.mutable.ArrayBuffer[Int] bestpars.foreach{case (par,i) => if (!cbuf.contains(par)) {cbuf += par; carr append par}; if (!cbuf.contains(i)) {cbuf += i;carr append i}} val c1 : Array[Int] = carr.toArray println("C1: ") c1 foreach {x => print(" " + x)} println val (ng,cycleNodes,cId) = contract(gr,c1,sMat) cleAlgorithm(ng, sMat) ng foreach {ni => val n = allNodes(ni) if (ni == cId) { // if this node corresponds to the compacted node we created val par_i = n.parent // this is the parent in compact graph val par = allNodes(par_i) val ind = if (par_i == ni) -1 else par_i println("ind = " + ind) val ri = n.bestIn(ind) println("ri = " + ri) val toset = if (ind < 0) ri else ind println("toset = " + toset) allNodes(ri).parent = toset } else { if (n.parent == cId) { n.bestIn.get(ni) match { case Some(par) => n.parent = par case None => } }}} } } def contract(graph: Set[Int], cycle: Array[Int], sMat: Array[Array[Double]]) = { val cycleSet = cycle.toSet var gc = graph.diff(cycleSet) // remove cycle nodes val origNodes = gc val clen = cycle.length val nnode = allNodes(idGensym) gc += idGensym idGensym += 1 var totalS = 0.0 // also set parents here forIndex(clen){i => nnode.nodes += cycle(i) val j = if (i > 0) i - 1 else clen - 1 val n = allNodes(cycle(i)) n.parent = cycle(j) // set parent here println("parent of " + cycle(i) + " set to " + cycle(j)) totalS += sMat(cycle(j))(cycle(i))} // add edges out from collapsed cycle node origNodes foreach {v => var nn = -Double.MaxValue var bst = -1 forIndex(clen){i => val sc = sMat(v)(cycle(i)) if (sc > nn) { nn = sc bst = i }} val vnode = allNodes(v) vnode.bestIn += (v -> cycle(bst)) sMat(v)(nnode.vl) = nn } // add edges into collapsed cycle node var nn = -Double.MaxValue origNodes foreach {v => var bst = -1 var snn = -Double.MaxValue forIndex(clen){i => val j = if (i > 0) i - 1 else clen - 1 val sc = sMat(cycle(i))(v) - sMat(cycle(i))(cycle(j)) if (sc > snn) { bst = i snn = sc }} nnode.bestIn += (v -> cycle(bst)) sMat(nnode.vl)(v) = snn + totalS if (snn > nn) nn = snn } // add in root to collapsed cycle node var snn = -Double.MaxValue var bst = -1 forIndex(clen){i => val j = if (i > 0) i - 1 else clen - 1 val sc = sMat(cycle(i))(cycle(i)) - sMat(cycle(i))(cycle(j)) if (sc > snn) { snn = sc bst = i }} nnode.bestIn += ((-1) -> cycle(bst)) sMat(nnode.vl)(nnode.vl) = (snn + totalS) (gc,nnode.nodes,(idGensym - 1)) } def findSCCs(graph: Set[Int]) : List[List[Int]] = { val sccs = new collection.mutable.ListBuffer[List[Int]] var c = 0 val st = new collection.mutable.Stack[PNode] var ind = 0 val sz = graph.size graph foreach {i => allNodes(i).index = -1} graph foreach {i => if (allNodes(i).index < 0) tarjan(allNodes(i))} def tarjan(v: PNode) : Unit = { v.index = ind v.lowlink = ind ind += 1 st.push(v) v.onStack = true // been visited v.children foreach {ci => val vp = allNodes(ci) if (vp.index < 0) { tarjan(vp) v.lowlink = math.min(v.lowlink, vp.lowlink) } else if (vp.onStack) { v.lowlink = math.min(v.lowlink, vp.lowlink) } } if (v.lowlink == v.index) { val sc = new collection.mutable.ListBuffer[Int] var cont = true while (cont) { val vp = st.pop() vp.onStack = false sc append vp.vl if (vp.vl == v.vl) cont = false } sccs append sc.toList } } sccs.toList } } class KBestChuLiuEdmonds(val sOrig: Array[Array[Double]]) extends ChuLiuEdmonds { class Solution(val sc: Double, val edge: (Int,Int), val res: Array[Int], val mustInclude: Set[(Int,Int)], val mustExclude: Set[(Int,Int)]) implicit def orderedSolution(s: Solution) : Ordered[Solution] = new Ordered[Solution] { def compare(other: Solution) = s.sc.compare(other.sc) } def getScore(sMat: Array[Array[Double]], pars: Array[Int]) = { var sc = 0.0 forIndex(sMat.size){i => sc += sMat(i)(pars(i))} sc } def getLocalScore(sMat: Array[Array[Double]], gr: Set[Int]) = { var sc = 0.0 gr foreach {i => sc += sMat(i)(allNodes(i).parent)} sc } def getNextBestGraph(sm: Array[Array[Double]], gr: Set[Int], r1: Array[Int], sc: Double) = { val sMat = padSMat(sm) var curBstScore = -Double.MaxValue var edge = (-1,-1) // edge to remove println("looking for next best graph on: ") sm foreach {row => row foreach {el => println(" " + el)}; println} gr foreach {ni => val bst = r1(ni) sm(ni)(bst) = -1E300 val (rr,ss) = (new ChuLiuEdmonds).getCLESolution(gr,sm) if ((ss < sc) && (ss > curBstScore)) { curBstScore = ss edge = (ni,bst) } sm(ni)(bst) = sOrig(ni)(bst) // set it back } println("After looking for next edge: ") sm foreach {row => row foreach {el => println(" " + el)}; println} (edge, (sc - curBstScore)) } def maskWeights(sm: Array[Array[Double]], in: Set[(Int,Int)], ex: Set[(Int,Int)]) = { println("Applying mask: " + in + " AND " + ex) in foreach {case (i,j) => sm(i)(j) = 1E300} ex foreach {case (i,j) => sm(i)(j) = -1E300} } def unmaskWeights(sm: Array[Array[Double]]) = { var i = 0; while (i < sm.length) { var j = 0; while (j < sm.length) { sm(i)(j) = sOrig(i)(j) j += 1} i += 1} } def kBestCLEInfer(sMat: Array[Array[Double]], k: Int) = { val nnds = (0 until sMat.length).toSet val smp = padSMat(sMat) initializeGraph(sMat) val (bst,bstScore) = (new ChuLiuEdmonds).getCLESolution(nnds,sMat) val bstList = new collection.mutable.ListBuffer[Array[Int]]() val p_Queue = new collection.mutable.PriorityQueue[Solution]() val (e1,d1) = getNextBestGraph(sMat,nnds,bst,bstScore) p_Queue += new Solution(bstScore - d1, e1, bst, Set(), Set()) bstList append bst for (i <- 2 to k) { val iSol = p_Queue.dequeue val yP = iSol.mustInclude + iSol.edge val zP = iSol.mustExclude + iSol.edge println("before masking: ") sMat foreach {row => row foreach {v => print(" " + v)}; println} maskWeights(sMat, iSol.mustInclude, zP) println("after masking: ") sMat foreach {row => row foreach {v => print(" " + v)}; println} val (bst_i,bstScore_i) = (new ChuLiuEdmonds).getCLESolution(nnds, sMat) bstList append bst_i val (e_j,d_j) = getNextBestGraph(sMat, nnds, bst_i, bstScore_i) p_Queue += new Solution(iSol.sc - d_j, e_j, bst_i, iSol.mustInclude, zP) unmaskWeights(sMat) maskWeights(sMat, yP, iSol.mustExclude) val (e_i,d_i) = getNextBestGraph(sMat, nnds, bst_i, bstScore_i) p_Queue += new Solution(iSol.sc - d_i, e_i, bst_i, yP, iSol.mustExclude) unmaskWeights(sMat) } bstList } } object KBestChuLiuEdmonds { def apply(sm: Array[Array[Double]]) = { val copy = Array.tabulate(sm.length){i => Array.tabulate(sm.length){j => sm(i)(j)}} new KBestChuLiuEdmonds(copy) } }
wellner/jcarafe
jcarafe-ext/src/main/scala/org/mitre/jcarafe/dparser/MstInference.scala
Scala
bsd-3-clause
15,471
// Copyright (c) 2015 Contributors to Ars Terra // // This file is part of Ars Terrae. // // Ars Terrae is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Ars Terrae is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Ars Terrae. If not, see <http://www.gnu.org/licenses/>. package mc.arsterra import cpw.mods.fml.common.Mod.EventHandler import cpw.mods.fml.common.event.FMLInitializationEvent import cpw.mods.fml.common.{Mod, SidedProxy} import net.minecraftforge.common.MinecraftForge import org.apache.logging.log4j.{LogManager, Logger} @Mod(modid = "arsterra", name = "Ars Terra", version = "0.0.2", modLanguage = "scala") object ArsTerra { private val log: Logger = LogManager.getLogger("ArsTerra") @SidedProxy(clientSide = "mc.arsterra.ArsTerra$ClientProxy", serverSide = "mc.arsterra.ArsTerra$ServerProxy") var proxy: CommonProxy = null @EventHandler def init(e: FMLInitializationEvent): Unit = { proxy.init(e) } class CommonProxy { @EventHandler def init(e: FMLInitializationEvent): Unit = { log.info("Ars Terra loading") MinecraftForge.EVENT_BUS.register(TreeShake) MinecraftForge.EVENT_BUS.register(LogPile) } } class ClientProxy extends CommonProxy { @EventHandler override def init(e: FMLInitializationEvent): Unit = super.init(e) } class ServerProxy extends CommonProxy { @EventHandler override def init(e: FMLInitializationEvent): Unit = super.init(e) } }
HarvestMoon/HarvestMoon
src/main/scala/mc/arsterra/ArsTerra.scala
Scala
mit
1,907
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalactic import org.scalatest._ import scala.collection.GenSeq import scala.collection.GenMap import scala.collection.GenSet import scala.collection.GenIterable import scala.collection.GenTraversable import scala.collection.GenTraversableOnce import scala.collection.{mutable,immutable} class FreshConversionCheckedSetEqualityConstraintsSpec extends Spec with NonImplicitAssertions with CheckedEquality { // TODO: Need to explicitly enable the implicit conversion equality case class Super(size: Int) class Sub(sz: Int) extends Super(sz) val super1: Super = new Super(1) val sub1: Sub = new Sub(1) val super2: Super = new Super(2) val sub2: Sub = new Sub(2) val nullSuper: Super = null case class Fruit(name: String) class Apple extends Fruit("apple") class Orange extends Fruit("orange") implicit class IntWrapper(val value: Int) { override def equals(o: Any): Boolean = o match { case that: IntWrapper => this.value == that.value case _ => false } override def hashCode: Int = value.hashCode } object `the SetEqualityConstraints trait` { def `should allow any Set to be compared with any other Set, so long as the element types of the two Sets adhere have a recursive EqualityConstraint` { assert(mutable.HashSet(1, 2, 3) === immutable.HashSet(1, 2, 3)) assert(mutable.HashSet(1, 2, 3) === immutable.HashSet(1L, 2L, 3L)) assert(mutable.HashSet(1L, 2L, 3L) === immutable.HashSet(1, 2, 3)) assert(immutable.HashSet(1, 2, 3) === mutable.HashSet(1L, 2L, 3L)) assert(immutable.HashSet(1L, 2L, 3L) === mutable.HashSet(1, 2, 3)) assertTypeError("immutable.HashSet(new IntWrapper(1), new IntWrapper(2), new IntWrapper(3)) === mutable.HashSet(1, 2, 3)") assertTypeError("immutable.HashSet(1, 2, 3) === mutable.HashSet(new IntWrapper(1), new IntWrapper(2), new IntWrapper(3))") assert(mutable.HashSet(new Apple, new Apple) === immutable.HashSet(new Fruit("apple"), new Fruit("apple"))) assert(immutable.HashSet(new Fruit("apple"), new Fruit("apple")) === mutable.HashSet(new Apple, new Apple)) assertTypeError("mutable.HashSet(new Apple, new Apple) === immutable.HashSet(new Orange, new Orange)") assertTypeError("immutable.HashSet(new Apple, new Apple) === mutable.HashSet(new Orange, new Orange)") assertTypeError("immutable.HashSet(new Orange, new Orange) === mutable.HashSet(new Apple, new Apple)") assertTypeError("mutable.HashSet(new Orange, new Orange) === immutable.HashSet(new Apple, new Apple)") } } }
travisbrown/scalatest
src/test/scala/org/scalactic/FreshConversionCheckedSetEqualityConstraintsSpec.scala
Scala
apache-2.0
3,185
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.eagle.datastream.core import com.typesafe.config.Config import org.jgrapht.experimental.dag.DirectedAcyclicGraph import scala.collection.JavaConversions._ import scala.collection.mutable.ListBuffer /** * Replace GroupByProducer(Vertex) with StreamConnector (Edge) * * For example as to Storm, it's mainly for grouping method * * @param config context configuration */ case class StreamGroupbyExpansion(config: Config) extends StreamDAGExpansion(config){ override def expand(dag: DirectedAcyclicGraph[StreamProducer[Any], StreamConnector[Any,Any]]) = { val iter = dag.iterator() var toBeAddedEdges = new ListBuffer[StreamConnector[Any,Any]] var toBeRemovedVertex = new ListBuffer[StreamProducer[Any]] while(iter.hasNext) { val current = iter.next() dag.outgoingEdgesOf(current).foreach(edge => { val child = edge.to child match { case p : GroupByProducer[Any] => { dag.outgoingEdgesOf(p).foreach(c2 => { p match { case GroupByFieldProducer(fields) => toBeAddedEdges += GroupbyFieldsConnector(current, c2.to,fields) case GroupByStrategyProducer(strategy) => toBeAddedEdges += GroupbyStrategyConnector(current, c2.to,strategy) case GroupByKeyProducer(keySelector) => current.outKeyed = true current.keySelector = KeySelectorWrapper(keySelector) c2.to.inKeyed = true toBeAddedEdges += GroupbyKeyConnector(current, c2.to,keySelector) case _ => toBeAddedEdges += ShuffleConnector(current, c2.to) } }) toBeRemovedVertex += p } case _ => } }) } // add back edges toBeAddedEdges.foreach(e => dag.addEdge(e.from, e.to, e)) toBeRemovedVertex.foreach(v => dag.removeVertex(v)) } } object StreamGroupbyExpansion{ def apply()(implicit config:Config, dag: DirectedAcyclicGraph[StreamProducer[Any], StreamConnector[Any,Any]]): StreamGroupbyExpansion ={ val e = StreamGroupbyExpansion(config) e.expand(dag) e } }
pkuwm/incubator-eagle
eagle-core/eagle-data-process/eagle-stream-process-api/src/main/scala/org/apache/eagle/datastream/core/StreamGroupbyExpansion.scala
Scala
apache-2.0
2,987
package vggames.regex.task import org.junit.runner.RunWith import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner import vggames.regex.Escaper; import vggames.regex.Escaper @RunWith(classOf[JUnitRunner]) class EscaperSpec extends Specification { "the escaper" should { "transform space to space demarcation" in { new Escaper().apply(" ") must_== "-Espa&ccedil;o-" } "transform empty string to empty demarcation" in { new Escaper().apply("") must_== "-Vazio-" } "transform new line to new line demarcation" in { new Escaper().apply("\\n") must_== "-Quebra-de-Linha-" } "transform tab to tab demarcation" in { new Escaper().apply("\\t") must_== "-Tab-" } "transform return to return demarcation" in { new Escaper().apply("\\r") must_== "-Retorno-" } "transform page feed to page feed demarcation" in { new Escaper().apply("\\f") must_== "-Quebra-de-P&aacute;gina-" } "not escape a normal word" in { new Escaper().apply("asdrubal") must_== "asdrubal" } "transform multiple blanks to its demarcations" in { new Escaper().apply("The Spa\\tce") must_== "The-Espa&ccedil;o-Spa-Tab-ce" } } }
vidageek/games
games/regex/src/test/scala/vggames/regex/task/EscaperSpec.scala
Scala
gpl-3.0
1,227
/** * Magmanics Licensing. This web application allows for centralized control * of client application activation, with optional configuration parameters * to control licensable features, and storage of supplementary information * about the client machine. Client applications may interface with this * central server (for activation) using libraries licenced under an * alternative licence. * * Copyright (C) 2010 James Baxter <j.w.baxter(at)gmail.com> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.magmanics.licensing.ui.content.user import com.magmanics.licensing.model.User import com.magmanics.vaadin.component.TableWithCheckboxes import scala.collection.JavaConversions._ /** * @author jbaxter - 18/06/11 */ class UserSelectionTable(users: Seq[User]) extends TableWithCheckboxes { setSelectable(true) setMultiSelect(true) setImmediate(true) setPageLength(5) override def containerProperties = List(("username", classOf[String], "", "Username", null, null)) override def itemRows = users.map(u => Array(u.name) -> u.name) //select all setValue(getItemIds()) def getUsers = { val users = getValue.asInstanceOf[java.util.Set[String]] users.toSeq.sortBy(s => s) } }
manicmonkey/licensing
Licensing-UI-Vaadin/src/main/scala/com/magmanics/licensing/ui/content/user/UserSelectionTable.scala
Scala
gpl-3.0
1,835
package com.sksamuel.elastic4s.requests.indexes import com.sksamuel.elastic4s.fields.ElasticField import com.sksamuel.elastic4s.requests.analyzers.{AnalyzerDefinition, TokenFilter, Tokenizer} import com.sksamuel.elastic4s.requests.mappings.{FieldDefinition, MappingDefinition} trait CreateIndexApi { def createIndex(name: String): CreateIndexRequest = CreateIndexRequest(name) @deprecated("use new analysis package", "7.0.1") def analyzers(analyzers: AnalyzerDefinition*) = new AnalyzersWrapper(analyzers) @deprecated("use new analysis package", "7.0.1") def tokenizers(tokenizers: Tokenizer*) = new TokenizersWrapper(tokenizers) @deprecated("use new analysis package", "7.0.1") def filters(filters: TokenFilter*) = new TokenFiltersWrapper(filters) @deprecated("Use of types is deprecated in 7; create the mapping without a type name by using properties, eg createIndex(\\"foo\\").mapping(properties(fielda, fieldb))", "7.0.0") def mapping(name: String): MappingDefinition = MappingDefinition(Some(name)) val emptyMapping: MappingDefinition = MappingDefinition.empty def properties(field: FieldDefinition, tail: FieldDefinition*): MappingDefinition = mapping(field +: tail) def properties(fields: Seq[FieldDefinition] = Nil): MappingDefinition = MappingDefinition(fields) def properties(field: ElasticField, tail: ElasticField*): MappingDefinition = MappingDefinition(properties = field +: tail) @deprecated("This method is now called properties as types are deprecated in 7.0", "7.0.0") def mapping(field: FieldDefinition, tail: FieldDefinition*): MappingDefinition = mapping(field +: tail) @deprecated("This method is now called properties as types are deprecated in 7.0", "7.0.0") def mapping(fields: Seq[FieldDefinition] = Nil): MappingDefinition = MappingDefinition(fields) @deprecated("use new analysis package", "7.0.1") class AnalyzersWrapper(val analyzers: Iterable[AnalyzerDefinition]) @deprecated("use new analysis package", "7.0.1") class TokenizersWrapper(val tokenizers: Iterable[Tokenizer]) @deprecated("use new analysis package", "7.0.1") class TokenFiltersWrapper(val filters: Iterable[TokenFilter]) }
stringbean/elastic4s
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/indexes/CreateIndexApi.scala
Scala
apache-2.0
2,185
object P1246 extends App { def area(a: (Float, Float), b: (Float, Float)) = a._1 * b._2 - a._2 * b._1 def vec(ax: Float, ay: Float, bx: Float, by: Float) = (bx - ax, by - ay) val n = readLine.toInt val vs = (0 until n).map(i => readLine.split(" ").map(_.toFloat) ).view val xs = vs.map(_(0)) val ys = vs.map(_(1)) val px = xs.sum / xs.length val py = ys.sum / ys.length val toVec = (i: Int) => vec(px, py, xs(i), ys(i)) val polyArea = (0 until n) .view .map(i => (i, (i + 1) % n)) .map(p => (toVec(p._1), toVec(p._2))) .map(p => (area(p._1, p._2))) .sum println(if (polyArea > 0) "ccw" else "cw") }
waterlink/sc-playground
1246.scala
Scala
mit
664
package org.jetbrains.plugins.scala package codeInsight package intention package matcher import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction import com.intellij.openapi.editor.Editor import com.intellij.openapi.project.Project import com.intellij.psi.PsiElement import org.jetbrains.plugins.scala.extensions._ import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScConstructorPattern import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDefinition import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScObject} import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory import org.jetbrains.plugins.scala.lang.psi.types.ScType import org.jetbrains.plugins.scala.lang.refactoring.namesSuggester.NameSuggester import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult class ConvertToTypedPatternIntention extends PsiElementBaseIntentionAction { def getFamilyName = "Convert to typed pattern" override def getText = getFamilyName def isAvailable(project: Project, editor: Editor, element: PsiElement) = { element match { case e @ Parent(Both(ref: ScStableCodeReferenceElement, Parent(_: ScConstructorPattern))) => true case _ => false } } override def invoke(project: Project, editor: Editor, element: PsiElement) { val codeRef = element.getParent.asInstanceOf[ScStableCodeReferenceElement] val constrPattern = codeRef.getParent.asInstanceOf[ScConstructorPattern] val manager = codeRef.getManager val name = codeRef.bind() match { case Some( result @ ScalaResolveResult(fun: ScFunctionDefinition, _)) if fun.name == "unapply"=> // TODO follow aliases result.parentElement match { case Some(obj: ScObject) => ScalaPsiUtil.getCompanionModule(obj) match { case Some(cls: ScClass) => val tpe = ScType.designator(cls) val names = NameSuggester.suggestNamesByType(tpe) names.head case _ => "value" } case _ => "value" } case _ => "value" } // TODO replace references to the constructor pattern params with "value.param" val newPattern = ScalaPsiElementFactory.createPatternFromText("%s: %s".format(name, codeRef.getText), manager) constrPattern.replace(newPattern) } }
triggerNZ/intellij-scala
src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/ConvertToTypedPatternIntention.scala
Scala
apache-2.0
2,523
package com.github.mdr.mash.compiler import com.github.mdr.mash.parser.AbstractSyntax._ import com.github.mdr.mash.parser.{ Abstractifier, MashParser, Provenance } import org.scalatest.{ FlatSpec, Matchers } class DesugarPipesTest extends FlatSpec with Matchers { "a | b" desugarsTo "b a" "a | b c" desugarsTo "b c a" "a | b | c" desugarsTo "c (b a)" "a | b c | d e" desugarsTo "d e (b c a)" "foo --bar=(a | b)" desugarsTo "foo --bar=(b a)" private implicit class RichString(s: String) { def desugarsTo(expected: String) { "DesugarPipes" should s"desugar pipes in '$s'" in { val actualExpr = removeSourceInfo(DesugarPipes.desugarPipes(parse(s))) val expectedExpr = removeSourceInfo(parse(expected)) actualExpr.sourceInfoOpt should equal(expectedExpr.sourceInfoOpt) actualExpr should equal(expectedExpr) } } } private def parse(s: String): Program = { val concreteProgram = MashParser.parseForgiving(s) val abstractProgram = new Abstractifier(Provenance.internal(s)).abstractify(concreteProgram) ParenRemover.removeParens(abstractProgram) } private def removeSourceInfo(program: Program): Program = program.transform { case e ⇒ e.withSourceInfoOpt(None) }.asInstanceOf[Program] }
mdr/mash
src/test/scala/com/github/mdr/mash/compiler/DesugarPipesTest.scala
Scala
mit
1,275
/* Copyright 2009-2016 EPFL, Lausanne */ package leon package genc package ir /* * Print an IR tree */ final class IRPrinter[S <: IR](val ir: S) { import ir._ case class Context(indent: Int) { val pad = " " * indent val newLine = s"\n$pad" def +(i: Int) = copy(indent = indent + i) } // Entry point for pretty printing final def apply(prog: ProgDef): String = rec(prog)(Context(0)) final def apply(tree: Tree)(implicit ptx: Context): String = tree match { case t: ProgDef => rec(t) case t: FunDef => rec(t) case t: ClassDef => rec(t) case t: ValDef => rec(t) case t: Expr => rec(t) case t: Type => rec(t) case t: ArrayAlloc => rec(t) case _ => ??? } private def rec(prog: ProgDef)(implicit ptx: Context): String = { val deps = new DependencyFinder(ir) deps(prog.asInstanceOf[deps.ir.ProgDef]) // Hugly cast we have to live with... val funs = deps.getFunctions map { _.asInstanceOf[FunDef] } map rec val classes = deps.getClasses map { _.asInstanceOf[ClassDef] } map rec (funs mkString ptx.newLine) + ptx.newLine + (classes mkString ptx.newLine) } private def rec(fd: FunDef)(implicit ptx: Context): String = { val ctx = fd.ctx map rec mkString ", " val params = (fd.params map rec).mkString(start = ", ", sep = ", ", end = "") val pre = fd.id + "(<" + ctx + ">" + params + "): " + rec(fd.returnType) + " = {" + ptx.newLine + " " val body = rec(fd.body)(ptx + 1) val post = ptx.newLine + "}" pre + body + post } private def rec(fb: FunBody)(implicit ptx: Context): String = { fb match { case FunBodyAST(body) => rec(body) case _ => "@cCode.function" } } private def rec(cd: ClassDef)(implicit ptx: Context): String = { val pre = if (cd.isAbstract) "abstract " else "" val fields = cd.fields map rec mkString ", " val parent = if (cd.parent.isDefined) " extends " + cd.parent.get.id else "" pre + "class " + cd.id + "(" + fields + ")" + parent } private def rec(vd: ValDef)(implicit ptx: Context): String = { vd.id + ": " + rec(vd.typ) } private def rec(alloc: ArrayAlloc)(implicit ptx: Context): String = { (alloc: @unchecked) match { case ArrayAllocStatic(arrayType, length, Right(values)) => "Array[" + rec(arrayType.base) + "](" + (values map rec mkString ", ") + ")" case ArrayAllocStatic(arrayType, length, Left(z)) => "Array[" + rec(arrayType.base) + "]( 0's " + length + " times )" case ArrayAllocVLA(arrayType, length, valueInit) => "Array[" + rec(arrayType.base) + "].fill(" + rec(length) + ")(" + rec(valueInit) + ")" } } private def rec(e: Expr)(implicit ptx: Context): String = (e: @unchecked) match { case Binding(vd) => "[[ " + vd.id + ": " + rec(vd.getType) + " ]]" case Block(exprs) => "{{ " + (exprs map rec mkString ptx.newLine) + " }}" case Decl(vd) => (if (vd.isVar) "var" else "val") + " " + rec(vd) + " // no value" case DeclInit(vd, value) => (if (vd.isVar) "var" else "val") + " " + rec(vd) + " = " + rec(value) case App(fd, extra, args) => fd.id + "(<" + (extra map rec mkString ", ") + ">" + (args map rec).mkString(start = ", ", sep = ", ", end = "") + ")" case Construct(cd, args) => cd.id + "(" + (args map rec mkString ", ") + ")" case ArrayInit(alloc) => rec(alloc) case FieldAccess(objekt, fieldId) => rec(objekt) + "." + fieldId case ArrayAccess(array, index) => rec(array) + "[" + rec(index) + "]" case ArrayLength(array) => rec(array) + ".length" case Assign(lhs, rhs) => rec(lhs) + " = " + rec(rhs) case BinOp(op, lhs, rhs) => rec(lhs) + " " + op.symbol + " " + rec(rhs) case UnOp(op, expr) => op.symbol + rec(expr) case If(cond, thenn) => "if (" + rec(cond) + ") {" + ptx.newLine + " " + rec(thenn)(ptx + 1) + ptx.newLine + "}" case IfElse(cond, thenn, elze) => "if (" + rec(cond) + ") {" + ptx.newLine + " " + rec(thenn)(ptx + 1) + ptx.newLine + "} " + "else {" + ptx.newLine + " " + rec(elze)(ptx + 1) + ptx.newLine + "}" case While(cond, body) => "while (" + rec(cond) + ") {" + ptx.newLine + " " + rec(body)(ptx + 1) + ptx.newLine + "}" case IsA(expr, ct) => "¿" + ct.clazz.id + "?" + rec(expr) case AsA(expr, ct) => "(" + ct.clazz.id + ")" + rec(expr) case IntegralCast(expr, newType) => "(" + newType + ")" + rec(expr) case Lit(lit) => lit.toString case Ref(e) => "&" + rec(e) case Deref(e) => "*" + rec(e) case Return(e) => "return " + rec(e) case Break => "break" } private def rec(typ: Type)(implicit ptx: Context): String = (typ: @unchecked) match { case PrimitiveType(pt) => pt.toString case ClassType(clazz) => clazz.id case ArrayType(base) => "Array[" + rec(base) + "]" case ReferenceType(t) => "Ref[" + rec(t) + "]" case TypedefType(original, alias, _) => "typedef " + original + " -> " + alias case DroppedType => "DROPPED" case NoType => "NO-TYPE" } }
regb/leon
src/main/scala/leon/genc/ir/IRPrinter.scala
Scala
gpl-3.0
5,013
package cz.cvut.fit.palicand.vocloud.ssl.ml.classification import cz.cvut.fit.palicand.vocloud.ssl.utils.{MatrixUtils, DataframeUtils} import org.apache.spark.ml.PredictionModel import org.apache.spark.ml.classification.{RandomForestClassifier, ProbabilisticClassificationModel} import org.apache.spark.ml.feature.OneHotEncoder import org.apache.spark.ml.param.{ParamValidators, DoubleParam, Params, ParamMap} import org.apache.spark.ml.util.Identifiable import org.apache.spark.mllib.linalg.distributed._ import org.apache.spark.mllib.linalg._ import org.apache.spark.sql.{Row, DataFrame} import org.scalactic._ import Tolerance._ import TripleEquals._ import scala.annotation.tailrec /** * Created by palickaa on 16/03/16. */ trait LabelSpreadingParams extends Params with KnnKernel { val alpha = new DoubleParam(this, "alpha", "The weight of label retention vs label propagation", ParamValidators.inRange(0.0, 1.0)) def setAlpha(value: Double): LabelSpreadingParams = { set(alpha, value) } } final class LabelSpreadingClassifier(override val uid: String) extends GraphClassifier(uid) with LabelSpreadingParams { def this() = this(Identifiable.randomUID("lsc")) override protected def computeLabelProbabilities(distances: BlockMatrix, toLabel: BlockMatrix, labeledRows: Long) : BlockMatrix = { val mulLabels = new BlockMatrix(toLabel.blocks.map {case ((i, j), mat) => ((i, j), MatrixUtils.fromBreeze(MatrixUtils.toBreeze(mat) * (1.0d - $(alpha)))) }, toLabel.rowsPerBlock, toLabel.rowsPerBlock, toLabel.numRows, toLabel.numCols).cache() @tailrec def labelSpreadingRec(laplacian: BlockMatrix, labels: BlockMatrix, iteration: Int): BlockMatrix = { if (iteration > $(maxIterations)) { return labels } val newLabels = laplacian.multiply(labels).add(mulLabels).cache() //assert(MatrixUtils.hasOnlyValidElements(newLabels)) if (hasConverged(labels, newLabels, 0.001)) { return newLabels } labelSpreadingRec(laplacian, newLabels, iteration + 1) } val degrees = distances.toIndexedRowMatrix.rows.map { case row => (row.index, (row.vector, row.vector.toArray.sum)) }.cache val laplacian = new CoordinateMatrix(degrees.cartesian(degrees).filter { case ((i, (v1, d1)), (j, (v2, d2))) => (i, j) match { case (`j`, _) if v1.numNonzeros > 1 => true case (`i`, `j`) if (v1(j.toInt) !== 0.0 +- 0.0000001) && (v2(i.toInt) !== 0.0 +- 0.0000001) => true case _ => false } }.map {case ((i, (v1, d1)), (j, (v2, d2))) => new MatrixEntry(i, j, (i, j) match { case (`j`, _) if v1.numNonzeros > 1 => $(alpha) case (`i`, `j`) => $(alpha) / math.sqrt(d1 * d2) }) }, distances.numRows, distances.numCols).toBlockMatrix(toLabel.rowsPerBlock, toLabel.colsPerBlock).cache() labelSpreadingRec(laplacian, toLabel, 0) } override def copy(extra: ParamMap): LabelSpreadingClassifier = { defaultCopy(extra) } }
palicand/graph_ssl
src/main/scala/cz/cvut/fit/palicand/vocloud/ssl/ml/classification/LabelSpreadingClassifier.scala
Scala
mit
3,016
package sgl.util import scala.concurrent.ExecutionContext.Implicits.global class FutureLoaderSuite extends LoaderAbstractSuite { override def makeLoader[A](body: => A): Loader[A] = FutureLoader(body) }
regb/scala-game-library
jvm-shared/src/test/scala/sgl/util/FutureLoaderSuite.scala
Scala
mit
208
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler.cluster.k8s import org.apache.spark._ import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.deploy.worker.WorkerWatcher import org.apache.spark.executor.CoarseGrainedExecutorBackend import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.resource.ResourceProfile import org.apache.spark.resource.ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID import org.apache.spark.rpc._ import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._ import org.apache.spark.util.Utils private[spark] object KubernetesExecutorBackend extends Logging { // Message used internally to start the executor when the driver successfully accepted the // registration request. case object RegisteredExecutor case class Arguments( driverUrl: String, executorId: String, bindAddress: String, hostname: String, cores: Int, appId: String, workerUrl: Option[String], resourcesFileOpt: Option[String], resourceProfileId: Int, podName: String) def main(args: Array[String]): Unit = { val createFn: (RpcEnv, Arguments, SparkEnv, ResourceProfile, String) => CoarseGrainedExecutorBackend = { case (rpcEnv, arguments, env, resourceProfile, execId) => new CoarseGrainedExecutorBackend(rpcEnv, arguments.driverUrl, execId, arguments.bindAddress, arguments.hostname, arguments.cores, env, arguments.resourcesFileOpt, resourceProfile) } run(parseArguments(args, this.getClass.getCanonicalName.stripSuffix("$")), createFn) System.exit(0) } def run( arguments: Arguments, backendCreateFn: (RpcEnv, Arguments, SparkEnv, ResourceProfile, String) => CoarseGrainedExecutorBackend): Unit = { Utils.initDaemon(log) SparkHadoopUtil.get.runAsSparkUser { () => // Debug code Utils.checkHost(arguments.hostname) // Bootstrap to fetch the driver's Spark properties. val executorConf = new SparkConf val fetcher = RpcEnv.create( "driverPropsFetcher", arguments.bindAddress, arguments.hostname, -1, executorConf, new SecurityManager(executorConf), numUsableCores = 0, clientMode = true) var driver: RpcEndpointRef = null val nTries = 3 for (i <- 0 until nTries if driver == null) { try { driver = fetcher.setupEndpointRefByURI(arguments.driverUrl) } catch { case e: Throwable => if (i == nTries - 1) { throw e } } } val cfg = driver.askSync[SparkAppConfig](RetrieveSparkAppConfig(arguments.resourceProfileId)) val props = cfg.sparkProperties ++ Seq[(String, String)](("spark.app.id", arguments.appId)) val execId: String = arguments.executorId match { case null | "EXECID" | "" => // We need to resolve the exec id dynamically driver.askSync[String](GenerateExecID(arguments.podName)) case id => id } fetcher.shutdown() // Create SparkEnv using properties we fetched from the driver. val driverConf = new SparkConf() for ((key, value) <- props) { // this is required for SSL in standalone mode if (SparkConf.isExecutorStartupConf(key)) { driverConf.setIfMissing(key, value) } else { driverConf.set(key, value) } } cfg.hadoopDelegationCreds.foreach { tokens => SparkHadoopUtil.get.addDelegationTokens(tokens, driverConf) } driverConf.set(EXECUTOR_ID, execId) val env = SparkEnv.createExecutorEnv(driverConf, execId, arguments.bindAddress, arguments.hostname, arguments.cores, cfg.ioEncryptionKey, isLocal = false) val backend = backendCreateFn(env.rpcEnv, arguments, env, cfg.resourceProfile, execId) env.rpcEnv.setupEndpoint("Executor", backend) arguments.workerUrl.foreach { url => env.rpcEnv.setupEndpoint("WorkerWatcher", new WorkerWatcher(env.rpcEnv, url, isChildProcessStopping = backend.stopping)) } env.rpcEnv.awaitTermination() } } def parseArguments(args: Array[String], classNameForEntry: String): Arguments = { var driverUrl: String = null var executorId: String = null var bindAddress: String = null var hostname: String = null var cores: Int = 0 var resourcesFileOpt: Option[String] = None var appId: String = null var workerUrl: Option[String] = None var resourceProfileId: Int = DEFAULT_RESOURCE_PROFILE_ID var podName: String = null var argv = args.toList while (!argv.isEmpty) { argv match { case ("--driver-url") :: value :: tail => driverUrl = value argv = tail case ("--executor-id") :: value :: tail => executorId = value argv = tail case ("--bind-address") :: value :: tail => bindAddress = value argv = tail case ("--hostname") :: value :: tail => hostname = value argv = tail case ("--cores") :: value :: tail => cores = value.toInt argv = tail case ("--resourcesFile") :: value :: tail => resourcesFileOpt = Some(value) argv = tail case ("--app-id") :: value :: tail => appId = value argv = tail case ("--worker-url") :: value :: tail => // Worker url is used in spark standalone mode to enforce fate-sharing with worker workerUrl = Some(value) argv = tail case ("--resourceProfileId") :: value :: tail => resourceProfileId = value.toInt argv = tail case ("--podName") :: value :: tail => podName = value argv = tail case Nil => case tail => // scalastyle:off println System.err.println(s"Unrecognized options: ${tail.mkString(" ")}") // scalastyle:on println printUsageAndExit(classNameForEntry) } } if (hostname == null) { hostname = Utils.localHostName() log.info(s"Executor hostname is not provided, will use '$hostname' to advertise itself") } if (driverUrl == null || executorId == null || cores <= 0 || appId == null) { printUsageAndExit(classNameForEntry) } if (bindAddress == null) { bindAddress = hostname } Arguments(driverUrl, executorId, bindAddress, hostname, cores, appId, workerUrl, resourcesFileOpt, resourceProfileId, podName) } private def printUsageAndExit(classNameForEntry: String): Unit = { // scalastyle:off println System.err.println( s""" |Usage: $classNameForEntry [options] | | Options are: | --driver-url <driverUrl> | --executor-id <executorId> | --bind-address <bindAddress> | --hostname <hostname> | --cores <cores> | --resourcesFile <fileWithJSONResourceInformation> | --app-id <appid> | --worker-url <workerUrl> | --resourceProfileId <id> | --podName <podName> |""".stripMargin) // scalastyle:on println System.exit(1) } }
shaneknapp/spark
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesExecutorBackend.scala
Scala
apache-2.0
8,041
package progscala2.introscala.shapes /** * Created by younggi on 7/7/16. */ object Messages { object Exit object Finished case class Response(message: String) } import akka.actor.Actor class ShapesDrawingActor extends Actor { import Messages._ override def receive: Receive = { case s: Shape => s.draw(str => println(s"ShapesDrawingActor: $str")) sender ! Response(s"ShapesDrawingActor: $s drawn") case Exit => println(s"ShapesDrawingActor: exiting...") sender ! Finished case unexpected => // default. Equivalent to "unexpected: Any" val response = Response(s"ERROR: Unkwon message: $unexpected") println(s"ShapesDrawingActor: $response") sender ! response } }
younggi/books
programming_scala/progscala2/src/main/scala/progscala2/introscala/shapes/ShapesDrawingActor.scala
Scala
mit
735
package uk.gov.dvla.vehicles.acquire.gatling object Headers { val headers_accept_html = Map( "Accept" -> """text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8""" ) val headers_accept_png = Map( "Accept" -> """image/png,image/*;q=0.8,*/*;q=0.5""" ) val headers_x_www_form_urlencoded = Map( "Accept" -> """text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8""", "Content-Type" -> """application/x-www-form-urlencoded""" ) }
dvla/vehicles-acquire-online
gatling-tests/src/test/scala/uk/gov/dvla/vehicles/acquire/gatling/Headers.scala
Scala
mit
479
package fr.ramiro.sfuzzy object FunctionsUtils { lazy val min: OperationFuzzy = (_: FuzzyEvaluationType).min(_) lazy val max: OperationFuzzy = (_: FuzzyEvaluationType).max(_) def cog(min: FuzzyEvaluationType, max: FuzzyEvaluationType, step: FuzzyEvaluationType)(func: (FuzzyEvaluationType) => FuzzyEvaluationType): FuzzyEvaluationType = { val range = min to max by step val sum = range.foldLeft(0: BigDecimal) { _ + func(_) } sum / range.size } }
rrramiro/sFuzzyLogic
src/main/scala/fr/ramiro/sfuzzy/FunctionsUtils.scala
Scala
apache-2.0
467
/** * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.appjet.oui; class NoninheritedDynamicVariable[T](init: T) { private val tl = new ThreadLocal[T] { override def initialValue = init.asInstanceOf[T with AnyRef] } /** Retrieve the current value */ def value: T = tl.get.asInstanceOf[T] /** Set the value of the variable while executing the specified * thunk. * * @param newval The value to which to set the fluid * @param thunk The code to evaluate under the new setting */ def withValue[S](newval: T)(thunk: =>S): S = { val oldval = value tl.set(newval) try { thunk } finally { tl.set(oldval) } } /** Change the currently bound value, discarding the old value. * Usually <code>withValue()</code> gives better semantics. */ def value_=(newval: T) = { tl.set(newval) } override def toString: String = "NoninheritedDynamicVariable(" + value +")" }
OpeningDesign/SketchSpace
infrastructure/net.appjet.oui/dynamicvar.scala
Scala
apache-2.0
1,497
package courier object Compat { def asJava[T](set: Set[T]): java.util.Set[T] = { import scala.jdk.CollectionConverters._ set.asJava } }
softprops/courier
src/main/scala-3/compat.scala
Scala
mit
149
/** * Copyright 2014 Dropbox, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package djinni import java.io.StringWriter import djinni.ast._ import djinni.generatorTools._ import djinni.meta._ import djinni.writer.IndentWriter import scala.collection.mutable class ObjcppGenerator(spec: Spec) extends Generator(spec) { val objcMarshal = new ObjcMarshal(spec) val objcppMarshal = new ObjcppMarshal(spec) val cppMarshal = new CppMarshal(spec) class ObjcRefs() { var body = mutable.TreeSet[String]() var privHeader = mutable.TreeSet[String]() def find(ty: TypeRef) { find(ty.resolved) } def find(tm: MExpr) { tm.args.foreach(find) find(tm.base) } def find(m: Meta) = for(r <- objcppMarshal.references(m)) r match { case ImportRef(arg) => body.add("#import " + arg) case _ => } } private def arcAssert(w: IndentWriter) = w.wl("static_assert(__has_feature(objc_arc), " + q("Djinni requires ARC to be enabled for this file") + ");") override def generateEnum(origin: String, ident: Ident, doc: Doc, e: Enum) { // No generation required } def headerName(ident: String): String = idObjc.ty(ident) + "." + spec.objcHeaderExt def privateBodyName(ident: String): String = idObjc.ty(ident) + "+Private." + spec.objcppExt def nnCheck(expr: String): String = spec.cppNnCheckExpression.fold(expr)(check => s"$check($expr)") override def generateInterface(origin: String, ident: Ident, doc: Doc, typeParams: Seq[TypeParam], i: Interface) { val refs = new ObjcRefs() i.methods.map(m => { m.params.map(p => refs.find(p.ty)) m.ret.foreach(refs.find) }) i.consts.map(c => { refs.find(c.ty) }) val self = objcMarshal.typename(ident, i) val cppSelf = cppMarshal.fqTypename(ident, i) refs.privHeader.add("#include <memory>") refs.privHeader.add("!#include " + q(spec.objcppIncludeCppPrefix + spec.cppFileIdentStyle(ident) + "." + spec.cppHeaderExt)) refs.body.add("!#import " + q(spec.objcppIncludeObjcPrefix + headerName(ident))) spec.cppNnHeader match { case Some(nnHdr) => refs.privHeader.add("#include " + nnHdr) case _ => } def writeObjcFuncDecl(method: Interface.Method, w: IndentWriter) { val label = if (method.static) "+" else "-" val ret = objcMarshal.fqReturnType(method.ret) val decl = s"$label ($ret)${idObjc.method(method.ident)}" writeAlignedObjcCall(w, decl, method.params, "", p => (idObjc.field(p.ident), s"(${objcMarshal.paramType(p.ty)})${idObjc.local(p.ident)}")) } val helperClass = objcppMarshal.helperClass(ident) writeObjcFile(objcppMarshal.privateHeaderName(ident.name), origin, refs.privHeader, w => { arcAssert(w) w.wl w.wl((if(i.ext.objc) "@protocol " else "@class ") + self + ";") w.wl wrapNamespace(w, spec.objcppNamespace, w => { w.wl(s"class $helperClass").bracedSemi { w.wlOutdent("public:") spec.cppNnType match { case Some(nnPtr) => w.wl(s"using CppType = ${nnPtr}<$cppSelf>;") w.wl(s"using CppOptType = std::shared_ptr<$cppSelf>;") case _ => w.wl(s"using CppType = std::shared_ptr<$cppSelf>;") w.wl(s"using CppOptType = std::shared_ptr<$cppSelf>;") } w.wl("using ObjcType = " + (if(i.ext.objc) s"id<$self>" else s"$self*") + ";"); w.wl w.wl(s"using Boxed = $helperClass;") w.wl w.wl(s"static CppType toCpp(ObjcType objc);") w.wl(s"static ObjcType fromCppOpt(const CppOptType& cpp);") w.wl(s"static ObjcType fromCpp(const CppType& cpp) { return fromCppOpt(cpp); }") w.wl w.wlOutdent("private:") w.wl("class ObjcProxy;") } }) w.wl }) if (i.ext.cpp) { refs.body.add("!#import " + q(spec.objcppIncludePrefix + objcppMarshal.privateHeaderName(ident.name))) refs.body.add("#import " + q(spec.objcBaseLibIncludePrefix + "DJICppWrapperCache+Private.h")) refs.body.add("#include <utility>") refs.body.add("#import " + q(spec.objcBaseLibIncludePrefix + "DJIError.h")) refs.body.add("#include <exception>") } if (i.ext.objc) { refs.body.add("#import " + q(spec.objcBaseLibIncludePrefix + "DJIObjcWrapperCache+Private.h")) refs.body.add("!#import " + q(spec.objcppIncludePrefix + objcppMarshal.privateHeaderName(ident.name))) } writeObjcFile(privateBodyName(ident.name), origin, refs.body, w => { arcAssert(w) val objcSelf = if (i.ext.objc && i.ext.cpp) self + "CppProxy" else self if (i.ext.cpp) { w.wl if (i.ext.objc) w.wl(s"@interface $objcSelf : NSObject<$self>") else w.wl(s"@interface $objcSelf ()") w.wl w.wl(s"- (id)initWithCpp:(const std::shared_ptr<$cppSelf>&)cppRef;") w.wl w.wl("@end") w.wl w.wl(s"@implementation $objcSelf {") w.wl(s" ::djinni::CppProxyCache::Handle<std::shared_ptr<$cppSelf>> _cppRefHandle;") w.wl("}") w.wl w.wl(s"- (id)initWithCpp:(const std::shared_ptr<$cppSelf>&)cppRef") w.braced { w.w("if (self = [super init])").braced { w.wl("_cppRefHandle.assign(cppRef);") } w.wl("return self;") } for (m <- i.methods) { w.wl writeObjcFuncDecl(m, w) w.braced { w.w("try").bracedEnd(" DJINNI_TRANSLATE_EXCEPTIONS()") { m.params.foreach(p => { if (isInterface(p.ty.resolved) && spec.cppNnCheckExpression.nonEmpty) { // We have a non-optional interface, assert that we're getting a non-null value val paramName = idObjc.local(p.ident) val stringWriter = new StringWriter() writeObjcFuncDecl(m, new IndentWriter(stringWriter)) val singleLineFunctionDecl = stringWriter.toString.replaceAll("\\n *", " ") val exceptionReason = s"Got unexpected null parameter '$paramName' to function $objcSelf $singleLineFunctionDecl" w.w(s"if ($paramName == nil)").braced { w.wl(s"""throw std::invalid_argument("$exceptionReason");""") } } }) val ret = m.ret.fold("")(_ => "auto r = ") val call = ret + (if (!m.static) "_cppRefHandle.get()->" else cppSelf + "::") + idCpp.method(m.ident) + "(" writeAlignedCall(w, call, m.params, ")", p => objcppMarshal.toCpp(p.ty, idObjc.local(p.ident.name))) w.wl(";") m.ret.fold()(r => w.wl(s"return ${objcppMarshal.fromCpp(r, "r")};")) } } } } if (i.ext.objc) { w.wl val objcExtSelf = objcppMarshal.helperClass("objc_proxy") wrapNamespace(w, spec.objcppNamespace, w => { w.wl(s"class $helperClass::ObjcProxy final") w.wl(s": public $cppSelf") w.wl(s", public ::djinni::ObjcProxyCache::Handle<ObjcType>") // Use base class to avoid name conflicts with user-defined methods having the same name as this new data member w.bracedSemi { w.wlOutdent("public:") w.wl("using Handle::Handle;") for (m <- i.methods) { val ret = cppMarshal.fqReturnType(m.ret) val params = m.params.map(p => cppMarshal.fqParamType(p.ty) + " c_" + idCpp.local(p.ident)) w.wl(s"$ret ${idCpp.method(m.ident)}${params.mkString("(", ", ", ")")} override").braced { w.w("@autoreleasepool").braced { val ret = m.ret.fold("")(_ => "auto r = ") val call = s"[Handle::get() ${idObjc.method(m.ident)}" writeAlignedObjcCall(w, ret + call, m.params, "]", p => (idObjc.field(p.ident), s"(${objcppMarshal.fromCpp(p.ty, "c_" + idCpp.local(p.ident))})")) w.wl(";") m.ret.fold()(ty => { if (spec.cppNnCheckExpression.nonEmpty && isInterface(ty.resolved)) { // We have a non-optional interface, so assert that we're getting a non-null value // before putting it into a non-null pointer val stringWriter = new StringWriter() writeObjcFuncDecl(m, new IndentWriter(stringWriter)) val singleLineFunctionDecl = stringWriter.toString.replaceAll("\\n *", " ") val exceptionReason = s"Got unexpected null return value from function $objcSelf $singleLineFunctionDecl" w.w(s"if (r == nil)").braced { w.wl(s"""throw std::invalid_argument("$exceptionReason");""") } } w.wl(s"return ${objcppMarshal.toCpp(ty, "r")};") }) } } } } }) } w.wl wrapNamespace(w, spec.objcppNamespace, w => { // ObjC-to-C++ coversion w.wl(s"auto $helperClass::toCpp(ObjcType objc) -> CppType").braced { // Handle null w.w("if (!objc)").braced { if (spec.cppNnType.isEmpty) { w.wl("return nullptr;") } else { w.wl(s"""throw std::invalid_argument("$helperClass::toCpp requires non-nil object");""") } } if (i.ext.cpp && !i.ext.objc) { // C++ only. In this case we generate a class instead of a protocol, so // we don't have to do any casting at all, just access cppRef directly. w.wl("return " + nnCheck("objc->_cppRefHandle.get()") + ";") //w.wl(s"return ${spec.cppNnCheckExpression.getOrElse("")}(objc->_cppRefHandle.get());") } else { // ObjC only, or ObjC and C++. val objcExtSelf = objcppMarshal.helperClass("objc_proxy") if (i.ext.cpp) { // If it could be implemented in C++, we might have to unwrap a proxy object. w.w(s"if ([(id)objc isKindOfClass:[$objcSelf class]])").braced { val getProxyExpr = s"(($objcSelf*)objc)->_cppRefHandle.get()" w.wl(s"return ${nnCheck(getProxyExpr)};") } } val getProxyExpr = s"::djinni::get_objc_proxy<$objcExtSelf>(objc)" w.wl(s"return ${nnCheck(getProxyExpr)};") } } w.wl w.wl(s"auto $helperClass::fromCppOpt(const CppOptType& cpp) -> ObjcType").braced { // Handle null w.w("if (!cpp)").braced { w.wl("return nil;") } if (i.ext.objc && !i.ext.cpp) { // ObjC only. In this case we *must* unwrap a proxy object - the dynamic_cast will // throw bad_cast if we gave it something of the wrong type. val objcExtSelf = objcppMarshal.helperClass("objc_proxy") w.wl(s"return dynamic_cast<$objcExtSelf&>(*cpp).Handle::get();") } else { // C++ only, or C++ and ObjC. if (i.ext.objc) { // If it could be implemented in ObjC, we might have to unwrap a proxy object. val objcExtSelf = objcppMarshal.helperClass("objc_proxy") w.w(s"if (auto cppPtr = dynamic_cast<$objcExtSelf*>(cpp.get()))").braced { w.wl("return cppPtr->Handle::get();") } } w.wl(s"return ::djinni::get_cpp_proxy<$objcSelf>(cpp);") } } }) if (i.ext.cpp) { w.wl w.wl("@end") } }) } override def generateRecord(origin: String, ident: Ident, doc: Doc, params: Seq[TypeParam], r: Record) { val refs = new ObjcRefs() for (c <- r.consts) refs.find(c.ty) for (f <- r.fields) refs.find(f.ty) val objcName = ident.name + (if (r.ext.objc) "_base" else "") val noBaseSelf = objcMarshal.typename(ident, r) // Used for constant names val cppSelf = cppMarshal.fqTypename(ident, r) refs.privHeader.add("!#import " + q(spec.objcppIncludeObjcPrefix + (if(r.ext.objc) "../" else "") + headerName(ident))) refs.privHeader.add("!#include " + q(spec.objcppIncludeCppPrefix + (if(r.ext.cpp) "../" else "") + spec.cppFileIdentStyle(ident) + "." + spec.cppHeaderExt)) refs.body.add("#include <cassert>") refs.body.add("!#import " + q(spec.objcppIncludePrefix + objcppMarshal.privateHeaderName(objcName))) def checkMutable(tm: MExpr): Boolean = tm.base match { case MOptional => checkMutable(tm.args.head) case MString => true case MBinary => true case _ => false } val helperClass = objcppMarshal.helperClass(ident) writeObjcFile(objcppMarshal.privateHeaderName(objcName), origin, refs.privHeader, w => { arcAssert(w) w.wl w.wl(s"@class $noBaseSelf;") w.wl wrapNamespace(w, spec.objcppNamespace, w => { w.wl(s"struct $helperClass") w.bracedSemi { w.wl(s"using CppType = $cppSelf;") w.wl(s"using ObjcType = $noBaseSelf*;") w.wl w.wl(s"using Boxed = $helperClass;") w.wl w.wl(s"static CppType toCpp(ObjcType objc);") w.wl(s"static ObjcType fromCpp(const CppType& cpp);") } }) }) writeObjcFile(privateBodyName(objcName), origin, refs.body, w => { wrapNamespace(w, spec.objcppNamespace, w => { w.wl(s"auto $helperClass::toCpp(ObjcType obj) -> CppType") w.braced { w.wl("assert(obj);") if(r.fields.isEmpty) w.wl("(void)obj; // Suppress warnings in relase builds for empty records") val call = "return CppType(" writeAlignedCall(w, "return {", r.fields, "}", f => objcppMarshal.toCpp(f.ty, "obj." + idObjc.field(f.ident))) w.wl(";") } w.wl w.wl(s"auto $helperClass::fromCpp(const CppType& cpp) -> ObjcType") w.braced { if(r.fields.isEmpty) w.wl("(void)cpp; // Suppress warnings in relase builds for empty records") val first = if(r.fields.isEmpty) "" else IdentStyle.camelUpper("with_" + r.fields.head.ident.name) val call = s"return [[$noBaseSelf alloc] init$first" writeAlignedObjcCall(w, call, r.fields, "]", f => (idObjc.field(f.ident), s"(${objcppMarshal.fromCpp(f.ty, "cpp." + idCpp.field(f.ident))})")) w.wl(";") } }) }) } def writeObjcFile(fileName: String, origin: String, refs: Iterable[String], f: IndentWriter => Unit) { createFile(spec.objcppOutFolder.get, fileName, (w: IndentWriter) => { w.wl("// AUTOGENERATED FILE - DO NOT MODIFY!") w.wl("// This file generated by Djinni from " + origin) w.wl if (refs.nonEmpty) { // Ignore the ! in front of each line; used to put own headers to the top // according to Objective-C style guide refs.foreach(s => w.wl(if (s.charAt(0) == '!') s.substring(1) else s)) w.wl } f(w) }) } }
mknejp/djinni
src/source/ObjcppGenerator.scala
Scala
apache-2.0
15,681
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.ct600.v2.calculations import org.joda.time.LocalDate import org.scalatest.prop.TableDrivenPropertyChecks._ import org.scalatest.{Matchers, WordSpec} import uk.gov.hmrc.ct.computations.CP2 import uk.gov.hmrc.ct.ct600.v2.B80 import uk.gov.hmrc.ct.ct600a._ import uk.gov.hmrc.ct.ct600a.v2._ class LoansToParticipatorsCalculatorSpec extends WordSpec with Matchers { def someDate(value:String):Option[LocalDate] = Some(new LocalDate(value)) val lpq01Table = Table( ("lpq03", "lpq04", "lpq05", "expectedLpq01"), (Some(true), Some(true), None, true), (Some(true), Some(false), Some(true), true), (Some(true), Some(true), None, true), (Some(true), Some(false), Some(true), true), (None, None, None, false), (Some(false), None, None, false), (Some(true), Some(false), None, false) ) "LoansToParticipatorsCalculator" should { "correctly validate LPQ01 " in new LoansToParticipatorsCalculator { forAll(lpq01Table) { (lpq03: Option[Boolean], lpq04: Option[Boolean], lpq05: Option[Boolean], expected: Boolean) => { calculateLPQ01(LPQ03(lpq03), LPQ04(lpq04), LPQ05(lpq05)) shouldBe LPQ01(expected) } } } val a2Table = Table( ("expectedValue", "lp02"), (1, LP02(Some(Loan(id = "123", name = "", amount = 1, repaid = false, lastRepaymentDate = None, totalAmountRepaid = None) :: Nil))), (6, LP02(Some( Loan(id = "123", name = "", amount = 1, repaid = true, lastRepaymentDate = Some(new LocalDate("1939-09-02")), totalAmountRepaid = Some(99)) :: Loan(id = "123", name = "", amount = 2, repaid = true, lastRepaymentDate = Some(new LocalDate("1939-08-30")), totalAmountRepaid = Some(99)) :: Loan(id = "123", name = "", amount = 3, repaid = false, lastRepaymentDate = None, totalAmountRepaid = None) :: Nil))) ) "correctly calculate A2 using loans made during the accounting period and still outstanding at the end of the accounting period - this should be all in this filing" in new LoansToParticipatorsCalculator { forAll(a2Table) { (expectedValue: Int, lp02: LP02) => { calculateA2(lp02) shouldBe A2(Some(expectedValue)) } } } "correctly calculate A3 as 25% of A2" in new LoansToParticipatorsCalculator { calculateA3(A2(Some(1))) shouldBe A3(Some(0.25)) calculateA3(A2(Some(333))) shouldBe A3(Some(83.25)) } val reliefDueNowOnLoanTable = Table( ("expectedValue", "isRepaid", "repaymentDate"), (true, true, someDate("2014-09-30")), (false, true, someDate("2014-10-01")), (false, false, None) ) "correctly calculate whether relief is due now for loans repaid within 9 months of end of AP" in new LoansToParticipatorsCalculator { forAll(reliefDueNowOnLoanTable) { (expectedValue: Boolean, isRepaid:Boolean, repaymentDate:Option[LocalDate]) => { val aLoan = Loan(id = "123", name = "", amount = 10000, repaid = isRepaid, lastRepaymentDate = repaymentDate) val acctPeriodEnd = new LocalDate("2013-12-31") aLoan.isRepaymentReliefEarlierThanDue(acctPeriodEnd) shouldBe expectedValue } } } val reliefDueNowOnWriteOffTable = Table( ("expectedValue", "dateWrittenOff"), (true, "2014-09-30"), (false, "2014-10-01") ) "correctly calculate whether relief is due now for write offs made within 9 months of end of AP" in new LoansToParticipatorsCalculator { forAll(reliefDueNowOnWriteOffTable) { (expectedValue: Boolean, dateWrittenOff: String) => { val writeOff = WriteOff(loanId = "123", amountWrittenOff = 10, dateWrittenOff = new LocalDate(dateWrittenOff), endDateOfWriteOffAP = Some(new LocalDate("2050-12-31"))) val acctPeriodEnd = new LocalDate("2013-12-31") writeOff.isReliefEarlierThanDue(acctPeriodEnd) shouldBe expectedValue } } } val a4Table = Table( ("expectedValue", "lp02"), (None, LP02(Some(Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("1939-08-31")), totalAmountRepaid = Some(1)) :: Nil))), (Some(1), LP02(Some(Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("1939-09-01")), totalAmountRepaid = Some(1)) :: Nil))), (Some(1), LP02(Some(Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("1940-05-31")), totalAmountRepaid = Some(1)) :: Nil))), (None, LP02(Some(Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("1940-06-01")), totalAmountRepaid = Some(1)) :: Nil))), (None, LP02(Some(Loan(id = "123", name = "", amount = 123, repaid = false, lastRepaymentDate = Some(new LocalDate("1939-09-01")), totalAmountRepaid = Some(1)) :: Nil))), (Some(4), LP02(Some( Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("1939-09-02")), totalAmountRepaid = Some(1)) :: Loan(id = "456", name = "", amount = 456, repaid = true, lastRepaymentDate = Some(new LocalDate("1939-08-30")), totalAmountRepaid = Some(2)) :: Loan(id = "789", name = "", amount = 789, repaid = true, lastRepaymentDate = Some(new LocalDate("1940-05-31")), totalAmountRepaid = Some(3)) :: Nil))) ) "correctly calculate A4 using loan repayments made between the end of the accounting period and 9months and 1 day later" in new LoansToParticipatorsCalculator { forAll(a4Table) { (expectedValue: Option[Int], lp02: LP02) => { val cp2 = CP2(new LocalDate("1939-08-31")) calculateA4(cp2, lp02) shouldBe A4(expectedValue) } } } val a5Table = Table( ("expectedValue", "lp03"), (None, LP03(Some(List(WriteOff("123", 1, new LocalDate("1939-08-31")))))), (Some(1), LP03(Some(List(WriteOff("123", 1, new LocalDate("1939-09-01")))))), (Some(1), LP03(Some(List(WriteOff("123", 1, new LocalDate("1940-05-31")))))), (None, LP03(Some(List(WriteOff("123", 1, new LocalDate("1940-06-01"), Some(new LocalDate("1940-08-31"))))))), (Some(4), LP03(Some( List( WriteOff("123", 1, new LocalDate("1939-09-01")), WriteOff("456", 2, new LocalDate("1940-06-02"), Some(new LocalDate("1940-08-31"))), WriteOff("789", 3, new LocalDate("1940-05-31"))) ))) ) "correctly validate A5 using write offs made between the end of the accounting period and 9months and 1 day later" in new LoansToParticipatorsCalculator { forAll(a5Table) { (expectedValue: Option[Int], lp03: LP03) => { val cp2 = CP2(new LocalDate("1939-08-31")) calculateA5(cp2, lp03) shouldBe A5(expectedValue) } } } "correctly calculate A6 as A4+A5" in new LoansToParticipatorsCalculator { calculateA6(A4(Some(4)), A5(Some(5))) shouldBe A6(Some(9)) calculateA6(A4(None), A5(Some(5))) shouldBe A6(Some(5)) calculateA6(A4(Some(4)), A5(None)) shouldBe A6(Some(4)) } "correctly calculate A7 as 25% of A6" in new LoansToParticipatorsCalculator { calculateA7(A6(Some(1))) shouldBe A7(Some(0.25)) calculateA7(A6(Some(333))) shouldBe A7(Some(83.25)) } val a8Table = Table( ("expectedValue", "lp02", "filingDate"), (None, LP02(Some(Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("1940-05-31")), totalAmountRepaid = Some(1), endDateOfRepaymentAP = someDate("1940-08-31")) :: Nil)), LPQ07(someDate("1941-03-01"))), (Some(1), LP02(Some(Loan(id = "456", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("1940-07-01")), totalAmountRepaid = Some(1), endDateOfRepaymentAP = someDate("1940-08-31")) :: Nil)), LPQ07(someDate("1941-06-01"))), (Some(4), LP02(Some( Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("1940-06-01")), totalAmountRepaid = Some(1), endDateOfRepaymentAP = someDate("1940-08-31")) :: Loan(id = "456", name = "", amount = 456, repaid = true, lastRepaymentDate = Some(new LocalDate("1940-05-31")), totalAmountRepaid = Some(2), endDateOfRepaymentAP = someDate("1940-08-31")) :: Loan(id = "789", name = "", amount = 789, repaid = true, lastRepaymentDate = Some(new LocalDate("1941-01-31")), totalAmountRepaid = Some(3), endDateOfRepaymentAP = someDate("1941-08-31")) :: Nil)), LPQ07(someDate("1942-06-01"))) ) "correctly calculate A8 using loan repayments made more than 9 months after the end of the accounting period " in new LoansToParticipatorsCalculator { forAll(a8Table) { (expectedValue: Option[Int], lp02: LP02, filingDate: LPQ07) => { val cp2 = CP2(new LocalDate("1939-08-31")) calculateA8(cp2, lp02, filingDate) shouldBe A8(expectedValue) } } } val A8InverseTable = Table( ("expectedValue", "lp02", "filingDate"), (0, LP02(Some(Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("2014-05-31")), totalAmountRepaid = Some(1), endDateOfRepaymentAP = someDate("2014-12-31")) :: Nil)), LPQ07(someDate("2015-06-01"))), //repayment too early (0, LP02(Some(Loan(id = "456", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("2014-06-01")), totalAmountRepaid = Some(1), endDateOfRepaymentAP = someDate("2014-12-31")) :: Nil)), LPQ07(someDate("2015-10-01"))), // relief due now (1, LP02(Some(Loan(id = "457", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("2014-10-01")), totalAmountRepaid = Some(1), endDateOfRepaymentAP = someDate("2014-12-31")) :: Nil)), LPQ07(someDate("2015-09-29"))), // filing date early - relief not yet due (1, LP02(Some(Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("2014-11-01")), totalAmountRepaid = Some(1), endDateOfRepaymentAP = someDate("2014-12-31")) :: Nil)), LPQ07(None)), // no filing date - meaning LPQ06 == true ie filied within 9 months (2, LP02(Some( Loan(id = "123", name = "", amount = 123, repaid = true, lastRepaymentDate = Some(new LocalDate("2014-05-31")), totalAmountRepaid = Some(1), endDateOfRepaymentAP = someDate("2014-12-31")) :: Loan(id = "456", name = "", amount = 456, repaid = true, lastRepaymentDate = Some(new LocalDate("2014-10-01")), totalAmountRepaid = Some(2), endDateOfRepaymentAP = someDate("2014-12-31")) :: Loan(id = "789", name = "", amount = 789, repaid = true, lastRepaymentDate = Some(new LocalDate("2014-06-01")), totalAmountRepaid = Some(5), endDateOfRepaymentAP = someDate("2014-12-31")) :: Nil)), LPQ07(someDate("2015-09-30"))) ) "correctly calculate A8Inverse using loan repayments made more than 9 months after the end of the accounting period " in new LoansToParticipatorsCalculator { forAll(A8InverseTable) { (expectedValue: Int, lp02: LP02, filingDate: LPQ07) => { val cp2 = CP2(new LocalDate("2013-12-31")) calculateA8Inverse(cp2, lp02, filingDate) shouldBe A8Inverse(Some(expectedValue)) } } } val reliefLaterThanDueNowTable = Table( ("expectedValue", "isRepaid", "repaymentDate", "endDateOfAccountingPeriodDuringWhichRepaymentWasMade", "filingDate"), (false, true, someDate("2014-09-30"), someDate("2014-12-31"), someDate("2015-10-01")), (true, true, someDate("2014-10-01"), someDate("2014-12-31"), someDate("2015-10-01")), (false, true, someDate("2014-10-01"), someDate("2014-12-31"), someDate("2015-09-30")), (true, true, someDate("2014-10-01"), someDate("2014-12-31"), someDate("2015-10-01")), (false, false, None, someDate("2014-12-31"), someDate("2015-10-01")), (false, false, someDate("2014-10-01"), someDate("2014-12-31"), someDate("2015-10-01")), (false, true, someDate("2014-10-01"), someDate("2014-12-31"), None), (false, true, someDate("2014-09-30"), None, someDate("2015-10-01")) ) "correctly calculate isRepaymentLaterReliefNowDue using loan repayments made more than 9 months after the end of the accounting period" in new LoansToParticipatorsCalculator { forAll(reliefLaterThanDueNowTable) { (expectedValue: Boolean, isRepaid:Boolean, repaymentDate:Option[LocalDate], endDateOfAccountingPeriodDuringWhichRepaymentWasMade: Option[LocalDate], filingDate: Option[LocalDate]) => { val aLoan = Loan(id = "123", name = "", amount = 10000, repaid = isRepaid, lastRepaymentDate = repaymentDate, totalAmountRepaid = Some(5000), endDateOfRepaymentAP = endDateOfAccountingPeriodDuringWhichRepaymentWasMade) val acctPeriodEnd = new LocalDate("2013-12-31") aLoan.isRepaymentLaterReliefNowDue(acctPeriodEnd, LPQ07(filingDate)) shouldBe expectedValue } } } "throw exception when repayment date is missing" in { intercept[IllegalArgumentException] { val aLoan = Loan(id = "123", name = "", amount = 10000, repaid = true, lastRepaymentDate = someDate("2014-10-01"), totalAmountRepaid = Some(5000), endDateOfRepaymentAP = None) val acctPeriodEnd = new LocalDate("2013-12-31") aLoan.isRepaymentLaterReliefNowDue(acctPeriodEnd, LPQ07(someDate("2015-10-01"))) } } val repaymentReliefLaterThanNotYetDueTable = Table( ("expectedValue", "isRepaid", "repaymentDate", "endDateOfAccountingPeriodDuringWhichRepaymentWasMade", "filingDate"), (false, true, someDate("2014-09-30"), someDate("2014-12-31"), someDate("2015-10-01")), // repayment within 9 months (false, true, someDate("2014-10-01"), someDate("2014-12-31"), someDate("2015-10-01")), // relief due now (true, true, someDate("2014-10-01"), someDate("2014-12-31"), someDate("2015-09-30")), // filing date within 9 months - GOOD (false, true, someDate("2014-10-01"), someDate("2014-12-31"), someDate("2015-10-01")), // filing date more that 9 months (false, false, None, someDate("2014-12-31"), someDate("2015-10-01")), // not repaid (false, false, someDate("2014-10-01"), someDate("2014-12-31"), someDate("2015-10-01")), // not repaid (true, true, someDate("2014-10-01"), someDate("2014-12-31"), None), // no filing date - meaning LPQ06 == true ie filied within 9 months (false, true, someDate("2014-09-30"), None, someDate("2015-10-01")) // repayment within 9 months and no end of AP date ) "correctly calculate isRepaymentLaterReliefNotYetDue using loan repayments made more than 9 months after the end of the accounting period" in new LoansToParticipatorsCalculator { forAll(repaymentReliefLaterThanNotYetDueTable) { (expectedValue: Boolean, isRepaid:Boolean, repaymentDate:Option[LocalDate], endDateOfAccountingPeriodDuringWhichRepaymentWasMade: Option[LocalDate], filingDate: Option[LocalDate]) => { val aLoan = Loan(id = "123", name = "", amount = 10000, repaid = isRepaid, lastRepaymentDate = repaymentDate, totalAmountRepaid = Some(5000), endDateOfRepaymentAP = endDateOfAccountingPeriodDuringWhichRepaymentWasMade) val acctPeriodEnd = new LocalDate("2013-12-31") aLoan.isRepaymentLaterReliefNotYetDue(acctPeriodEnd, LPQ07(filingDate)) shouldBe expectedValue } } } "throw exception when repayment date is missing on call to isRepaymentLaterReliefNotYetDue" in { intercept[IllegalArgumentException] { val aLoan = Loan(id = "123", name = "", amount = 10000, repaid = true, lastRepaymentDate = someDate("2014-10-01"), totalAmountRepaid = Some(5000), endDateOfRepaymentAP = None) val acctPeriodEnd = new LocalDate("2013-12-31") aLoan.isRepaymentLaterReliefNotYetDue(acctPeriodEnd, LPQ07(someDate("2015-10-01"))) } } val writeOffReliefLaterThanNotYetDueTable = Table( ("expectedValue", "isRepaid", "dateWrittenOff", "endDateOfAccountingPeriodDuringWhichRepaymentWasMade", "filingDate"), (false, true, "2014-09-30", someDate("2014-12-31"), someDate("2015-10-01")), // repayment within 9 months (false, true, "2014-10-01", someDate("2014-12-31"), someDate("2015-10-01")), // relief due now (true, true, "2014-10-01", someDate("2014-12-31"), someDate("2015-09-30")), // filing date within 9 months - GOOD (false, true, "2014-10-01", someDate("2014-12-31"), someDate("2015-10-01")), // filing date more that 9 months (false, false, "2014-10-01", someDate("2014-12-31"), someDate("2015-10-01")) // not repaid ) "correctly calculate isWriteOffLaterReliefNotYetDue using loan writeOffs made more than 9 months after the end of the accounting period" in new LoansToParticipatorsCalculator { forAll(writeOffReliefLaterThanNotYetDueTable) { (expectedValue: Boolean, isRepaid:Boolean, dateWrittenOff:String, endDateOfWriteOffAP: Option[LocalDate], filingDate: Option[LocalDate]) => { val writeOff = WriteOff(loanId = "123", amountWrittenOff = 10, dateWrittenOff = new LocalDate(dateWrittenOff), endDateOfWriteOffAP = endDateOfWriteOffAP) val acctPeriodEnd = new LocalDate("2013-12-31") writeOff.isLaterReliefNotYetDue(acctPeriodEnd, LPQ07(filingDate)) shouldBe expectedValue } } } "throw exception on call to iswriteOffLaterReliefNotYetDue when endDateOfWriteOffAP date is missing, and dateWrittenOff > 9 months after end of AP" in { intercept[IllegalArgumentException] { val writeOff = WriteOff(loanId = "123", amountWrittenOff = 10, dateWrittenOff = new LocalDate("2014-10-01"), endDateOfWriteOffAP = None) val acctPeriodEnd = new LocalDate("2013-12-31") writeOff.isLaterReliefNotYetDue(acctPeriodEnd, LPQ07(someDate("2015-10-01"))) } } "return false when writeOff date is within 9 months of the end date of AP and endDateOfWriteOffAP is None" in { val writeOff = WriteOff(loanId = "123", amountWrittenOff = 10, dateWrittenOff = new LocalDate("2014-09-30"), endDateOfWriteOffAP = None) val acctPeriodEnd = new LocalDate("2013-12-31") writeOff.isLaterReliefNowDue(acctPeriodEnd, LPQ07(someDate("2015-10-01"))) shouldBe false } val a9Table = Table( ("expectedValue", "lp03", "filingDate"), (None, LP03(Some(List(WriteOff("123", 1, new LocalDate("1940-05-31"), Some(new LocalDate("1940-12-31")))))), someDate("1942-06-07")), (None, LP03(Some(List(WriteOff("123", 1, new LocalDate("1940-05-31"), None)))), someDate("1942-06-07")), (Some(1), LP03(Some(List(WriteOff("123", 1, new LocalDate("1940-06-01"), Some(new LocalDate("1940-12-31")))))), someDate("1941-10-01")), (None, LP03(Some(List(WriteOff("123", 1, new LocalDate("1940-06-01"), Some(new LocalDate("1940-12-31")))))), someDate("1941-09-30")), (Some(6), LP03(Some( List( WriteOff("123", 1, new LocalDate("1940-06-01"), someDate("1940-12-31")), WriteOff("456", 2, new LocalDate("1940-05-31"), someDate("1940-12-31")), WriteOff("789", 5, new LocalDate("1941-12-31"), someDate("1940-12-31"))))), someDate("1941-10-01")) ) "correctly calculate A9 using write offs made more than 9 months after the end of the accounting period" in new LoansToParticipatorsCalculator { forAll(a9Table) { (expectedValue: Option[Int], lp03: LP03, filingDate: Option[LocalDate]) => { val cp2 = CP2(new LocalDate("1939-08-31")) calculateA9(cp2, lp03, LPQ07(filingDate)) shouldBe A9(expectedValue) } } } val a9InverseTable = Table( ("A9InverseExpectedValue", "lp03", "filingDate"), (0, LP03(Some(List(WriteOff("123", 1, new LocalDate("2014-05-31"), someDate("2014-12-31"))))), someDate("2015-06-01")), (0, LP03(Some(List(WriteOff("123", 1, new LocalDate("2014-06-01"), None)))), someDate("2015-10-01")), (0, LP03(Some(List(WriteOff("123", 1, new LocalDate("2014-06-01"), someDate("2014-12-31"))))), someDate("2014-10-01")), (1, LP03(Some(List(WriteOff("123", 1, new LocalDate("2014-10-01"), someDate("2014-12-31"))))), someDate("2015-09-30")), (2, LP03(Some( List( WriteOff("123", 1, new LocalDate("2014-05-31"), someDate("2014-12-31")), WriteOff("456", 2, new LocalDate("2014-10-01"), someDate("2014-12-31")), WriteOff("789", 5, new LocalDate("2014-06-01"), someDate("2014-12-31"))))), someDate("2015-09-30")) ) "correctly calculate A9Inverse using write offs made more than 9 months after the end of the accounting period" in new LoansToParticipatorsCalculator { forAll(a9InverseTable) { (expectedValue: Int, lp03: LP03, filingDate: Option[LocalDate]) => { val cp2 = CP2(new LocalDate("2013-12-31")) calculateA9Inverse(cp2, lp03, LPQ07(filingDate)) shouldBe A9Inverse(Some(expectedValue)) } } } val writeOffRelief = Table( ("expectedValue", "dateWrittenOff", "endDateOfWriteOffAP", "filingDate"), (false, "1940-09-30", "1940-12-31", "1940-11-1"), (true, "1940-10-01", "1940-12-31", "1941-10-1"), (false, "1940-10-01", "1940-12-31", "1940-09-30") ) "correctly calculate if relief is due on write offs after 9 months" in new LoansToParticipatorsCalculator { forAll(writeOffRelief) { (expectedValue: Boolean, dateWrittenOff: String, endDateOfWriteOffAP: String, filingDate: String) => { val cp2 = CP2(new LocalDate("1939-12-31")) val writeOff = WriteOff(loanId = "123", amountWrittenOff = 10, dateWrittenOff = new LocalDate(dateWrittenOff), endDateOfWriteOffAP = someDate(endDateOfWriteOffAP)) writeOff.isLaterReliefNowDue(cp2.value, LPQ07(someDate(filingDate))) shouldBe expectedValue } } } "throw exception when writeOffDate is more than 9 months after AP end, but endDateOfWriteOffAP is None" in { intercept[IllegalArgumentException] { val writeOff = WriteOff(loanId = "123", amountWrittenOff = 10, dateWrittenOff = new LocalDate("1940-10-01"), endDateOfWriteOffAP = None) val cp2 = CP2(new LocalDate("1939-12-31")) writeOff.isLaterReliefNowDue(cp2.value, LPQ07(someDate("1941-10-1"))) } } "not throw exception when writeOffDate is more than 9 months after AP end, but filing date is not set (implying that filing is within 9 months of end of AP)" in new LoansToParticipatorsCalculator { val writeOff = WriteOff(loanId = "123", amountWrittenOff = 10, dateWrittenOff = new LocalDate("2014-10-02"), endDateOfWriteOffAP = Some(new LocalDate("2014-12-31"))) val cp2 = CP2(new LocalDate("2013-12-31")) // AP end date writeOff.isReliefEarlierThanDue(cp2.value) shouldBe false writeOff.isLaterReliefNowDue(cp2.value, LPQ07(None)) shouldBe false calculateA9(cp2, LP03(Some(List(writeOff))), LPQ07(None)) shouldBe A9(None) calculateA9Inverse(cp2, LP03(Some(List(writeOff))), LPQ07(None)) shouldBe A9Inverse(Some(10)) } "correctly calculate A10 as A9+A10" in new LoansToParticipatorsCalculator { calculateA10(A8(Some(4)), A9(Some(5))) shouldBe A10(Some(9)) calculateA10(A8(None), A9(Some(5))) shouldBe A10(Some(5)) calculateA10(A8(Some(4)), A9(None)) shouldBe A10(Some(4)) } "correctly calculate A10Inverse as A9Inverse+A10Inverse" in new LoansToParticipatorsCalculator { calculateA10Inverse(A8Inverse(Some(4)), A9Inverse(Some(5))) shouldBe A10Inverse(Some(9)) calculateA10Inverse(A8Inverse(None), A9Inverse(Some(5))) shouldBe A10Inverse(Some(5)) calculateA10Inverse(A8Inverse(Some(4)), A9Inverse(None)) shouldBe A10Inverse(Some(4)) } "correctly calculate A11 as 25% of A10" in new LoansToParticipatorsCalculator { calculateA11(A10(Some(1))) shouldBe A11(Some(0.25)) calculateA11(A10(Some(333))) shouldBe A11(Some(83.25)) } "correctly calculate A11Inverse as 25% of A10Inverse" in new LoansToParticipatorsCalculator { calculateA11Inverse(A10Inverse(Some(1))) shouldBe A11Inverse(Some(0.25)) calculateA11Inverse(A10Inverse(Some(333))) shouldBe A11Inverse(Some(83.25)) } "correctly calculate A12, total outstanding loans, as A2 + LP04" in new LoansToParticipatorsCalculator { calculateA12(A2(None), LP04(None)) shouldBe A12(Some(0)) calculateA12(A2(None), LP04(Some(4))) shouldBe A12(Some(4)) calculateA12(A2(Some(4)), LP04(None)) shouldBe A12(Some(4)) calculateA12(A2(Some(3)), LP04(Some(5))) shouldBe A12(Some(8)) } "correctly calculate A13 as A3 minus (the sum of Boxes A7 and A11)" in new LoansToParticipatorsCalculator { calculateA13(a3 = A3(Some(100)), a7 = A7(Some(7.99)), a11 = A11(Some(11))) shouldBe A13(Some(81.01)) calculateA13(a3 = A3(Some(100.30)), a7 = A7(Some(7.99)), a11 = A11(Some(11))) shouldBe A13(Some(81.31)) calculateA13(a3 = A3(Some(100)), a7 = A7(Some(7)), a11 = A11(Some(11))) shouldBe A13(Some(82)) calculateA13(a3 = A3(Some(45.75)), a7 = A7(Some(7.25)), a11 = A11(Some(11))) shouldBe A13(Some(27.5)) calculateA13(a3 = A3(Some(7.25)), a7 = A7(Some(7)), a11 = A11(Some(11))) shouldBe A13(Some(-10.75)) calculateA13(a3 = A3(None), a7 = A7(None), a11 = A11(None)) shouldBe A13(None) calculateA13(a3 = A3(Some(100)), a7 = A7(None), a11 = A11(None)) shouldBe A13(Some(100)) } "correctly calculate B80 as true when A11 > 0, otherwise none" in new LoansToParticipatorsCalculator { calculateB80(A11(None)) shouldBe B80(None) calculateB80(A11(Some(0))) shouldBe B80(None) calculateB80(A11(Some(-1))) shouldBe B80(None) calculateB80(A11(Some(1))) shouldBe B80(Some(true)) } } }
hmrc/ct-calculations
src/test/scala/uk/gov/hmrc/ct/ct600/v2/calculations/LoansToParticipatorsCalculatorSpec.scala
Scala
apache-2.0
28,062
/*********************************************************************** * Copyright (c) 2013-2019 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.features.kryo.serialization import com.esotericsoftware.kryo.io.{Input, Output} import com.esotericsoftware.kryo.{Kryo, Serializer} import org.locationtech.geomesa.features.SerializationOption.SerializationOption import org.locationtech.geomesa.features.kryo.KryoFeatureSerializer import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType} /** * Kryo serialization implementation for simple features. This class shouldn't be used directly - * see @KryoFeatureSerializer * * @param sft the type of simple feature * @param opts the encoding options (optional) */ class SimpleFeatureSerializer(sft: SimpleFeatureType, opts: Set[SerializationOption] = Set.empty) extends Serializer[SimpleFeature] { private val serializer = KryoFeatureSerializer(sft, opts) override def write(kryo: Kryo, output: Output, sf: SimpleFeature): Unit = { val bytes = serializer.serialize(sf) output.writeInt(bytes.length, true) output.write(bytes) } override def read(kryo: Kryo, input: Input, typ: Class[SimpleFeature]): SimpleFeature = { val bytes = Array.ofDim[Byte](input.readInt(true)) input.read(bytes) serializer.deserialize(bytes) } }
elahrvivaz/geomesa
geomesa-features/geomesa-feature-kryo/src/main/scala/org/locationtech/geomesa/features/kryo/serialization/SimpleFeatureSerializer.scala
Scala
apache-2.0
1,675
class DevelopApp { }
Rovak/ScalaWebscraper
scraper-demo/src/main/scala/DevelopApp.scala
Scala
mit
22
/* * Copyright 2014 Lars Edenbrandt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package se.nimsa.sbx.dicom import se.nimsa.dicom.data.Tag case class DicomProperty(name: String, dicomTag: Int) object DicomProperty { object PatientName extends DicomProperty("patientName", Tag.PatientName) object PatientID extends DicomProperty("patientID", Tag.PatientID) object PatientBirthDate extends DicomProperty("patientBirthDate", Tag.PatientBirthDate) object PatientSex extends DicomProperty("patientSex", Tag.PatientSex) object StudyInstanceUID extends DicomProperty("studyInstanceUID", Tag.StudyInstanceUID) object StudyDescription extends DicomProperty("studyDescription", Tag.StudyDescription) object StudyDate extends DicomProperty("studyDate", Tag.StudyDate) object StudyID extends DicomProperty("studyID", Tag.StudyID) object AccessionNumber extends DicomProperty("accessionNumber", Tag.AccessionNumber) object PatientAge extends DicomProperty("patientAge", Tag.PatientAge) object SeriesInstanceUID extends DicomProperty("seriesInstanceUID", Tag.SeriesInstanceUID) object SeriesNumber extends DicomProperty("seriesNumber", Tag.SeriesNumber) object SeriesDescription extends DicomProperty("seriesDescription", Tag.SeriesDescription) object SeriesDate extends DicomProperty("seriesDate", Tag.SeriesDate) object Modality extends DicomProperty("modality", Tag.Modality) object ProtocolName extends DicomProperty("protocolName", Tag.ProtocolName) object BodyPartExamined extends DicomProperty("bodyPartExamined", Tag.BodyPartExamined) object SOPInstanceUID extends DicomProperty("sopInstanceUID", Tag.SOPInstanceUID) object ImageType extends DicomProperty("imageType", Tag.ImageType) object InstanceNumber extends DicomProperty("instanceNumber", Tag.InstanceNumber) object Manufacturer extends DicomProperty("manufacturer", Tag.Manufacturer) object StationName extends DicomProperty("stationName", Tag.StationName) object FrameOfReferenceUID extends DicomProperty("frameOfReferenceUID", Tag.FrameOfReferenceUID) }
slicebox/slicebox
src/main/scala/se/nimsa/sbx/dicom/DicomProperty.scala
Scala
apache-2.0
2,605
/** * Copyright (c) 2012-2013, Tomasz Kaczmarzyk. * * This file is part of BeanDiff. * * BeanDiff is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * BeanDiff is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with BeanDiff; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ package org.beandiff.core import org.beandiff.core.model.Path import org.beandiff.core.model.FieldProperty import org.beandiff.support.FieldSupport.RichField import org.beandiff.support.FieldSupport import org.beandiff.support.ClassSupport.RichClass import org.beandiff.core.model.Property class FieldRoutePlanner extends RoutePlanner { def guide(current: Path, o1: Any, o2: Any, walker: ObjectWalker): Unit = { getDeclaredFields(o1.getClass) foreach { f => f.setAccessible(true) val val1 = f.get(o1) val val2 = f.get(o2) val path = current.step(new FieldProperty(f.getName)) walker.walk(path, val1, val2) } } override def routes(o1: Any, o2: Any) = { // TODO too much responsibility? if (o1.getClass == o2.getClass) { o1.getClass.fieldsInHierarchy map { f => { (new FieldProperty(f.getName), (f.getFrom(o1), f.getFrom(o2))) } } } else { val o1fields = o1.getClass.fieldsInHierarchyByName val o2fields = o2.getClass.fieldsInHierarchyByName val fieldNames = o1fields.keySet ++ o2fields.keySet for (fname <- fieldNames) yield (o1fields.get(fname), o2fields.get(fname)) match { case (Some(f1), Some(f2)) => (Property(fname), (Some(f1.get(o1)), Some(f2.get(o2)))) case (None, Some(f2)) => (Property(fname), (None, Some(f2.get(o2)))) case (Some(f1), None) => (Property(fname), (Some(f1.get(o1)), None)) } } } protected def getDeclaredFields(c: Class[_]) = { // TODO get rid of it c.getDeclaredFields() } }
tkaczmarzyk/beandiff
src/main/scala/org/beandiff/core/FieldRoutePlanner.scala
Scala
lgpl-3.0
2,446
package au.id.cxd.math.count /** * ##import MathJax * Permutation operation (select without replacement) $P_r^n$ * * $$ * \\frac{n!}{(n-k)!} * $$ * * Created by cd on 6/09/2014. */ class Permutation { def op(n: Double)(r: Double) = { if (r <= n) { Factorial(n) / Factorial(n - r) } else 0.0 } } object Permutation { def apply(n: Double)(r: Double) = new Permutation().op(n)(r) }
cxd/scala-au.id.cxd.math
math/src/main/scala/au/id/cxd/math/count/Permutation.scala
Scala
mit
418
object Script { val rl = loopInduction( parseFormula( "(A() > 0 & b()>0 & B() > 0 & B() > b() & ~(f() = l()) & eps() > 0) &" + "(((b()*B()*x(l()) > b()*B()*x(f()) + " + "(1/2) * (B()*v(f())^2 - b()*v(l())^2) & " + "x(l()) > x(f()) &" + "v(f()) >= 0 &" + "v(l()) >= 0 ) ) )")) val cuttct = cutT( DirectedCut, parseFormula( "b()*B()*X1>b()*B()*X2+1/2*(B()*V1^2-b()*V2^2)+" + "B()*(A()+b())*(1/2*A()*eps()^2+eps()*V1)" ), parseFormula( "b()*B()*X1>b()*B()*X2+1/2*(B()*V1^2-b()*V2^2)+" + "B()*(A()+b())*(1/2*A()*s()^2+s()*V1)" ) ) val cuttct2 = cutT( StandardCut, parseFormula( "b()*B()*X1>b()*B()*X2+1/2*(B()*V1^2-b()*V2^2)+" + "B()*(A()+b())*(1/2*A()*eps()^2+eps()*V1)" ), parseFormula( "B()*(A()+b())*(1/2*A()*s()^2+s()*V1) <= " + "B()*(A()+b())*(1/2*A()*eps()^2+eps()*V1) " ) ) val mostthingsT = repeatT( eitherlistT(hpalphaT, alphaT, nonarithcloseT, betaT, substT)) val everythingT: Tactic = composeT( repeatT( eitherlistT(hpalphaT, alphaT, nonarithcloseT, betaT, substT)), eitherT(nonarithcloseT, hidethencloseT)) val ch_brake = composelistT(repeatT(hpalphaT), diffsolveT(RightP(1),Endpoint), repeatT(hpalphaT), instantiate0T(St("C")), repeatT(substT), hideunivsT(St("C")), repeatT(nullarizeT), repeatT(vacuousT), everythingT ) val keepfm1 = parseFormula("B() * (A() + b()) * C <= D") val keepfm2 = parseFormula("b() * B() * X > b() * B() * X1 + C + D") val hidefm3 = parseFormula("b() * B() * X > b() * B() * X1 + C ") val whatev_finish = composelistT( repeatT(nullarizeT), repeatT(substT), branchT(cuttct, List(branchT(cuttct2, List( composelistT( repeatT(hidematchT(List(keepfm2,hidefm3))), everythingT ), composeT(repeatT( hidenotmatchT(List(keepfm1,keepfm2))), arithT))), composelistT( mostthingsT, hidecantqeT, hidematchT(List(hidefm3)), everythingT ))) ) val ch_whatev = composelistT(repeatT(hpalphaT), diffsolveT(RightP(1),Endpoint), repeatT(hpalphaT), instantiate0T(St("C")), repeatT(substT), hideunivsT(St("C")), repeatT(hpalphaT), repeatT(vacuousT), branchT(tryruleT(impLeft), List(branchT(tryruleT(impLeft), List(whatev_finish, composelistT( tryruleT(not), alleasyT)) ), composelistT( tryruleT(not), tryruleT(close)))) ) val indtct = composeT( repeatT(eitherT(hpalphaT,alphaT)), branchT(tryruleT(andRight), List(ch_brake,ch_whatev))) val main = branchT(tryruleT(rl), List(tryruleatT(close)(RightP(0)), indtct, repeatT(trylistofrulesT(List(close,andLeft))) )) }
keymaerad/KeYmaeraD
examples/llcsimple.dl.scala
Scala
bsd-3-clause
3,883
package com.rcirka.play.dynamodb import java.util.UUID import scala.concurrent.{Await, Future} import scala.concurrent.duration._ package object util { def awaitResult[T](future: Future[T]) : T = Await.result(future, 10 seconds) def newKey() = UUID.randomUUID().toString.replace("-", "") }
rcirka/Play-DynamoDB
src/test/scala/com/rcirka/play/dynamodb/util/package.scala
Scala
mit
299
package com.peterpotts.snake.mapreduce import com.peterpotts.snake.coercion.Compare import com.peterpotts.snake.predicate.Extractor case class ExtractorOrdering[T](extractor: Extractor[T]) extends Ordering[T] { def compare(left: T, right: T): Int = Compare(extractor(left), extractor(right)) override def toString = s"$extractor order" }
peterpotts/snake
src/main/scala/com/peterpotts/snake/mapreduce/ExtractorOrdering.scala
Scala
mit
345
/* * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.spark.runtime.graph import java.io.{ OutputStream, OutputStreamWriter, PrintWriter } import java.util.concurrent.atomic.AtomicBoolean import scala.collection.mutable import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.slf4j.LoggerFactory import com.asakusafw.runtime.directio.{ Counter, OutputTransactionContext } import com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil import resource._ class TransactionManager( configuration: Configuration, transactionId: String, properties: Map[String, String]) { private val Logger = LoggerFactory.getLogger(getClass) private val transactionProperties = if (properties.isEmpty) { Map("Transaction ID" -> transactionId) } else { properties } private val running: mutable.Map[String, OutputTransactionContext] = mutable.Map.empty def acquire(id: String): OutputTransactionContext = synchronized { val acquired = running.get(id) if (acquired.isDefined) { throw new IllegalStateException() } val created = new OutputTransactionContext(transactionId, id, new Counter()) running.put(id, created) created } def release(context: OutputTransactionContext): Unit = synchronized { val id = context.getOutputId val acquired = running.get(id) if (acquired.isEmpty) { if (Logger.isWarnEnabled) { Logger.warn(s"invalid transaction output ID: ${id}") } } running.remove(id) } def begin(): Unit = { if (Logger.isDebugEnabled) { Logger.debug(s"starting transaction of Direct I/O file output: ${transactionId}") } setCommitted(true) setTransactionInfo(true) } def end(): Unit = { if (running.isEmpty) { if (Logger.isDebugEnabled) { Logger.debug(s"finishing transaction of Direct I/O file output: ${transactionId}") } if (isCommitted()) { setCommitted(false) setTransactionInfo(false) } } } def isCommitted(): Boolean = { val commitMark = getCommitMarkPath() val fs = commitMark.getFileSystem(configuration) fs.exists(commitMark) } private def setTransactionInfo(value: Boolean): Unit = { val transactionInfo = getTransactionInfoPath() val fs = transactionInfo.getFileSystem(configuration) if (value) { for { output <- managed( new TransactionManager.SafeOutputStream(fs.create(transactionInfo, false))) writer <- managed( new PrintWriter(new OutputStreamWriter(output, HadoopDataSourceUtil.COMMENT_CHARSET))) } { transactionProperties.foreach { case (key, value) if value != null => // scalastyle:ignore writer.println(s"${key}: ${value}") case _ => } } } else { fs.delete(transactionInfo, false) } } private def setCommitted(value: Boolean): Unit = { val commitMark = getCommitMarkPath() val fs = commitMark.getFileSystem(configuration) if (value) { fs.create(commitMark, false).close() } else { fs.delete(commitMark, false) } } private def getTransactionInfoPath(): Path = { HadoopDataSourceUtil.getTransactionInfoPath(configuration, transactionId) } private def getCommitMarkPath(): Path = { HadoopDataSourceUtil.getCommitMarkPath(configuration, transactionId) } } object TransactionManager { private[TransactionManager] class SafeOutputStream(delegate: OutputStream) extends OutputStream { private val closed = new AtomicBoolean() override def write(b: Int): Unit = { delegate.write(b) } override def write(b: Array[Byte], off: Int, len: Int): Unit = { delegate.write(b, off, len) } override def close(): Unit = { if (closed.compareAndSet(false, true)) { delegate.close() } } } }
ueshin/asakusafw-spark
runtime/src/main/scala/com/asakusafw/spark/runtime/graph/TransactionManager.scala
Scala
apache-2.0
4,460
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.cloudml.zen.ml.neuralNetwork import java.util.Random import breeze.linalg.{DenseMatrix => BDM, DenseVector => BDV} import breeze.numerics.signum import com.github.cloudml.zen.ml.util.Logging import org.apache.spark.annotation.Experimental import org.apache.spark.mllib.util.Saveable import org.apache.spark.SparkContext @Experimental class MLPModel( val innerLayers: Array[Layer], val dropout: Array[Double]) extends Saveable with Logging with Serializable { def this(topology: Array[Int], inputDropout: Double, hiddenDropout: Double) { this(MLP.initLayers(topology), MLP.initDropout(topology.length - 1, Array(hiddenDropout, inputDropout))) } def this(topology: Array[Int]) { this(MLP.initLayers(topology), MLP.initDropout(topology.length - 1, Array(0.2, 0.5))) } require(innerLayers.length > 0) require(dropout.forall(t => t >= 0 && t < 1)) require(dropout.last == 0D) require(innerLayers.length == dropout.length) @transient protected lazy val rand: Random = new Random() def topology: Array[Int] = { val topology = new Array[Int](numLayer + 1) topology(0) = numInput for (i <- 1 to numLayer) { topology(i) = innerLayers(i - 1).numOut } topology } def numLayer: Int = innerLayers.length def numInput: Int = innerLayers.head.numIn def numOut: Int = innerLayers.last.numOut def predict(x: BDM[Double]): BDM[Double] = { var output = x for (layer <- 0 until numLayer) { output = innerLayers(layer).forward(output) val dropoutRate = dropout(layer) if (dropoutRate > 0D) { output :*= (1D - dropoutRate) } } output } protected[ml] def computeDelta( x: BDM[Double], label: BDM[Double]): (Array[BDM[Double]], Array[BDM[Double]]) = { val batchSize = x.cols val out = new Array[BDM[Double]](numLayer) val delta = new Array[BDM[Double]](numLayer) val dropOutMasks: Array[BDM[Double]] = dropOutMask(batchSize) for (layer <- 0 until numLayer) { val output = innerLayers(layer).forward(if (layer == 0) x else out(layer - 1)) if (dropOutMasks(layer) != null) { assert(output.rows == dropOutMasks(layer).rows) output :*= dropOutMasks(layer) } out(layer) = output } for (layer <- (0 until numLayer).reverse) { val output = out(layer) val currentLayer = innerLayers(layer) delta(layer) = if (layer == numLayer - 1) { currentLayer.outputError(output, label) } else { val nextLayer = innerLayers(layer + 1) val nextDelta = delta(layer + 1) nextLayer.previousError(output, currentLayer, nextDelta) } if (dropOutMasks(layer) != null) { delta(layer) :*= dropOutMasks(layer) } } (out, delta) } protected[ml] def computeGradient( x: BDM[Double], label: BDM[Double], epsilon: Double = 0.0): (Array[(BDM[Double], BDV[Double])], Double, Double) = { var input = x var (out, delta) = computeDelta(x, label) // Improving Back-Propagation by Adding an Adversarial Gradient // URL: http://arxiv.org/abs/1510.04189 if (epsilon > 0.0) { var sign: BDM[Double] = innerLayers.head.weight.t * delta.head sign = signum(sign) sign :*= epsilon sign :+= x val t = computeDelta(sign, label) out = t._1 delta = t._2 input = sign } val grads = computeGradientGivenDelta(input, out, delta) val cost = if (innerLayers.last.layerType == "SoftMax") { NNUtil.crossEntropy(out.last, label) } else { NNUtil.meanSquaredError(out.last, label) } (grads, cost, input.cols.toDouble) } protected[ml] def computeGradientGivenDelta( x: BDM[Double], out: Array[BDM[Double]], delta: Array[BDM[Double]]): Array[(BDM[Double], BDV[Double])] = { val grads = new Array[(BDM[Double], BDV[Double])](numLayer) for (i <- 0 until numLayer) { val input = if (i == 0) x else out(i - 1) grads(i) = innerLayers(i).backward(input, delta(i)) } grads } protected[ml] def dropOutMask(cols: Int): Array[BDM[Double]] = { val masks = new Array[BDM[Double]](numLayer) for (layer <- 0 until numLayer) { val dropoutRate = dropout(layer) masks(layer) = if (dropoutRate > 0) { val rows = innerLayers(layer).numOut val mask = BDM.zeros[Double](rows, cols) for (i <- 0 until rows) { for (j <- 0 until cols) { mask(i, j) = if (rand.nextDouble() > dropoutRate) 1D else 0D } } mask } else { null } } masks } protected[ml] def assign(newNN: MLPModel): MLPModel = { innerLayers.zip(newNN.innerLayers).foreach { case (oldLayer, newLayer) => oldLayer.weight := newLayer.weight oldLayer.bias := newLayer.bias } this } def setSeed(seed: Long): Unit = { rand.setSeed(seed) } protected def formatVersion: String = MLP.SaveLoadV1_0.formatVersionV1_0 override def save(sc: SparkContext, path: String): Unit = { MLP.SaveLoadV1_0.save(sc, path, this) } override def equals(other: Any): Boolean = other match { case m: MLPModel => innerLayers.sameElements(m.innerLayers) && dropout.sameElements(m.dropout) case _ => false } override def hashCode: Int = { var result: Int = 1 for (element <- innerLayers) { result = 31 * result + (if (element == null) 0 else element.hashCode) } for (element <- dropout) { result = 31 * result + element.hashCode } return result } }
witgo/zen
ml/src/main/scala/com/github/cloudml/zen/ml/neuralNetwork/MLPModel.scala
Scala
apache-2.0
6,392
package kartograffel.server import java.time._ import eu.timepit.refined.api.Refined import eu.timepit.refined.collection.NonEmpty import eu.timepit.refined.scalacheck.any.arbitraryFromValidate import eu.timepit.refined.types.string.NonEmptyString import kartograffel.server.domain.model._ import kartograffel.shared.domain.model.Position import org.scalacheck.derive.MkArbitrary import org.scalacheck.{Arbitrary, Gen} import eu.timepit.refined.scalacheck.numeric._ import scala.annotation.tailrec object ArbitraryInstances { implicit lazy val localDateArbitrary: Arbitrary[LocalDate] = Arbitrary { for { year <- Gen.chooseNum(1, 9999) month <- Gen.chooseNum(1, 12) maxDaysInMonth = Month.of(month).length(Year.of(year).isLeap) day <- Gen.chooseNum(1, maxDaysInMonth) } yield LocalDate.of(year, month, day) } implicit lazy val localTimeArbitrary: Arbitrary[LocalTime] = Arbitrary { for { hour <- Gen.chooseNum(0, 23) minute <- Gen.chooseNum(0, 59) second <- Gen.chooseNum(0, 59) nanoOfSecond <- Gen.chooseNum(0, 999999999) } yield LocalTime.of(hour, minute, second, nanoOfSecond) } implicit lazy val localDateTimeArbitrary: Arbitrary[LocalDateTime] = Arbitrary { for { date <- localDateArbitrary.arbitrary time <- localTimeArbitrary.arbitrary } yield LocalDateTime.of(date, time) } implicit lazy val distanceUnit: Arbitrary[DistanceUnit] = Arbitrary { Gen.oneOf(meter, kilometer) } implicit lazy val nonEmptyStringArbitrary: Arbitrary[NonEmptyString] = arbitraryFromValidate[Refined, String, NonEmpty] implicit lazy val graffelIdArbitrary: Arbitrary[GraffelId] = MkArbitrary[GraffelId].arbitrary implicit lazy val graffelArbitrary: Arbitrary[Graffel] = MkArbitrary[Graffel].arbitrary implicit lazy val positionArbitrary: Arbitrary[Position] = MkArbitrary[Position].arbitrary implicit lazy val radiusArbitrary: Arbitrary[Radius] = MkArbitrary[Radius].arbitrary implicit lazy val tagIdArbitrary: Arbitrary[TagId] = MkArbitrary[TagId].arbitrary implicit lazy val tagArbitrary: Arbitrary[Tag] = MkArbitrary[Tag].arbitrary @tailrec def sampleOf[T](implicit ev: Arbitrary[T]): T = ev.arbitrary.sample match { case Some(t) => t case None => sampleOf[T] } }
fthomas/kartograffel
modules/server/jvm/src/test/scala/kartograffel/server/ArbitraryInstances.scala
Scala
apache-2.0
2,388
package dotty.tools package dotc package typer import ast.{tpd, untpd} import ast.Trees._ import core._ import util.SimpleMap import Symbols._, Names._, Denotations._, Types._, Contexts._, StdNames._, Flags._ import Decorators.StringInterpolators object ImportInfo { /** The import info for a root import from given symbol `sym` */ def rootImport(sym: Symbol)(implicit ctx: Context) = { val expr = tpd.Ident(sym.valRef) val selectors = untpd.Ident(nme.WILDCARD) :: Nil val imp = tpd.Import(expr, selectors) new ImportInfo(imp.symbol, selectors, isRootImport = true) } } /** Info relating to an import clause * @param sym The import symbol defined by the clause * @param selectors The selector clauses * @param rootImport true if this is one of the implicit imports of scala, java.lang * or Predef in the start context, false otherwise. */ class ImportInfo(val sym: Symbol, val selectors: List[untpd.Tree], val isRootImport: Boolean = false)(implicit ctx: Context) { /** The (TermRef) type of the qualifier of the import clause */ def site(implicit ctx: Context): Type = { val ImportType(expr) = sym.info expr.tpe } /** The names that are excluded from any wildcard import */ def excluded: Set[TermName] = { ensureInitialized(); myExcluded } /** A mapping from renamed to original names */ def reverseMapping: SimpleMap[TermName, TermName] = { ensureInitialized(); myMapped } /** The original names imported by-name before renaming */ def originals: Set[TermName] = { ensureInitialized(); myOriginals } /** Does the import clause end with wildcard? */ def isWildcardImport = { ensureInitialized(); myWildcardImport } private var myExcluded: Set[TermName] = null private var myMapped: SimpleMap[TermName, TermName] = null private var myOriginals: Set[TermName] = null private var myWildcardImport: Boolean = false /** Compute info relating to the selector list */ private def ensureInitialized(): Unit = if (myExcluded == null) { myExcluded = Set() myMapped = SimpleMap.Empty myOriginals = Set() def recur(sels: List[untpd.Tree]): Unit = sels match { case sel :: sels1 => sel match { case Pair(Ident(name: TermName), Ident(nme.WILDCARD)) => myExcluded += name case Pair(Ident(from: TermName), Ident(to: TermName)) => myMapped = myMapped.updated(to, from) myOriginals += from case Ident(nme.WILDCARD) => myWildcardImport = true case Ident(name: TermName) => myMapped = myMapped.updated(name, name) myOriginals += name } recur(sels1) case nil => } recur(selectors) } /** The implicit references imported by this import clause */ def importedImplicits: List[TermRef] = { val pre = site if (isWildcardImport) { val refs = pre.implicitMembers if (excluded.isEmpty) refs else refs filterNot (ref => excluded contains ref.name.toTermName) } else for { renamed <- reverseMapping.keys denot <- pre.member(reverseMapping(renamed)).altsWith(_ is Implicit) } yield TermRef.withSigAndDenot(pre, renamed, denot.signature, denot) } /** The root import symbol hidden by this symbol, or NoSymbol if no such symbol is hidden. * Note: this computation needs to work even for un-initialized import infos, and * is not allowed to force initialization. */ lazy val hiddenRoot: Symbol = { val sym = site.termSymbol def hasMaskingSelector = selectors exists { case Pair(_, Ident(nme.WILDCARD)) => true case _ => false } if ((defn.RootImports contains sym) && hasMaskingSelector) sym else NoSymbol } override def toString = { val siteStr = site.show val exprStr = if (siteStr endsWith ".type") siteStr dropRight 5 else siteStr val selectorStr = selectors match { case Ident(name) :: Nil => name.show case _ => "{...}" } i"import $exprStr.$selectorStr" } }
AlexSikia/dotty
src/dotty/tools/dotc/typer/ImportInfo.scala
Scala
bsd-3-clause
4,067
// Copyright 2012 Foursquare Labs Inc. All Rights Reserved package io.fsq.twofishes.server import io.fsq.twitter.ostrich.stats.Stats import io.fsq.twofishes.util.{NameNormalizer, TwofishesLogger} case class ParseParams( tokens: List[String] = Nil, originalTokens: List[String] = Nil, connectorStart: Int = 0, connectorEnd: Int = 0, hadConnector: Boolean = false, spaceAtEnd: Boolean = false ) class QueryParser(logger: TwofishesLogger) { def parseQuery(query: String): ParseParams = { val normalizedQuery = NameNormalizer.normalize(query) logger.ifDebug("%s --> %s", query, normalizedQuery) var originalTokens = NameNormalizer.tokenize(normalizedQuery) parseQueryTokens( originalTokens, spaceAtEnd = query.endsWith(" ") ) } def parseQueryTokens(originalTokens: List[String], spaceAtEnd: Boolean = false): ParseParams = { logger.ifDebug("--> %s", originalTokens.mkString("_|_")) // This is awful connector parsing val connectorStart = originalTokens.indexOf("near") val connectorEnd = connectorStart val hadConnector = connectorStart != -1 val tokens = if (hadConnector) { originalTokens.drop(connectorEnd + 1) } else { originalTokens } // if there're too many tokens, take as many as are allowed and continue Stats.addMetric("query_length", originalTokens.size) if (hadConnector) { Stats.addMetric("query_length_after_connector_parsing", tokens.size) } val finalTokens = if (tokens.size > GeocodeServerConfigSingleton.config.maxTokens) { Stats.incr("too_many_tokens", 1) tokens.take(GeocodeServerConfigSingleton.config.maxTokens) } else { tokens } if (originalTokens.exists(_ == "http")) { throw new Exception("don't support url queries") } ParseParams( tokens = finalTokens, originalTokens = originalTokens, connectorStart = connectorStart, connectorEnd = connectorEnd, hadConnector = hadConnector, spaceAtEnd = spaceAtEnd ) } }
foursquare/fsqio
src/jvm/io/fsq/twofishes/server/QueryParser.scala
Scala
apache-2.0
2,046
package co.ledger.wallet.web.ripple import biz.enef.angulate.ext.{Route, RouteProvider} /** * * Routes * ledger-wallet-ripple-chrome * * Created by Pierre Pollastri on 03/05/2016. * * The MIT License (MIT) * * Copyright (c) 2016 Ledger * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ object Routes { def declare($routeProvider: RouteProvider) = { $routeProvider .when("/onboarding/launch/:discover/:animated?", Route(templateUrl = "/templates/onboarding/launch.html")) .when("/onboarding/linux/:animated?", Route(templateUrl = "/templates/onboarding/linux.html")) .when("/onboarding/opening/", Route(templateUrl = "/templates/onboarding/opening.html")) .when("/onboarding/split-disclaimer/:balance", Route(templateUrl = "/templates/onboarding/split-disclaimer.html")) .when("/onboarding/update", Route(templateUrl = "/templates/onboarding/update.html")) .when("/onboarding/download/:tag", Route(templateUrl = "/templates/onboarding/download.html")) .when("/account/:id", Route(templateUrl = "/templates/wallet/account.html")) .when("/send", Route(templateUrl = "/templates/wallet/send/index.html")) .when("/send/:amount/to/:recipient/from/:account_id/with/:fee/tag/:tag?", Route(templateUrl = "/templates/wallet/send/perform.html")) .when("/receive", Route(templateUrl = "/templates/wallet/receive.html")) .when("/help", Route(templateUrl = "/templates/wallet/help.html")) .otherwise(Route(redirectTo = "/onboarding/launch/0")) } }
LedgerHQ/ledger-wallet-ripple
src/main/scala/co/ledger/wallet/web/ripple/Routes.scala
Scala
mit
2,595
package hr.fer.ztel.thesis.spark import java.time.Instant import hr.fer.ztel.thesis.measure.ItemPairSimilarityMeasure import hr.fer.ztel.thesis.measure.ItemPairSimilarityMeasure._ import org.apache.spark.sql.SparkSession class SparkSessionHandler(args: Array[String]) extends Serializable { if (args.length != 8) { println("Wrong args, should be: [folder] [user-item] [item-item] [measure] [normalize] [output] [k] [blocksize]") System exit 1 } val folder: String = if (!args(0).endsWith("/")) args(0) + "/" else args(0) val userItemPath: String = folder + args(1) val itemItemPath: String = folder + args(2) val measureStr: String = args(3).toLowerCase val normalize: Boolean = args(4).toLowerCase.toBoolean val measure: ItemPairSimilarityMeasure = parseMeasure(measureStr).get val time: String = Instant.ofEpochMilli(System.currentTimeMillis) .toString.replace(":", "-").replace(".", "-") val recommendationsPath: String = folder + args(5) + "_" + time val topK: Int = args(6).toInt val usersSizePath: String = userItemPath + ".lookup.size" val itemsSizePath: String = itemItemPath + ".lookup.size" val blockSize: Int = args(7).toInt lazy val getSparkSession: SparkSession = { val sparkBuilder = if (folder.startsWith("hdfs:///")) SparkSession.builder else SparkSession.builder.master("local[*]") sparkBuilder .appName("SparkRecommenderThesis") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.kryo.registrator", "hr.fer.ztel.thesis.spark.SparkKryoRegistrator") .getOrCreate } }
fpopic/master_thesis
src/main/scala/hr/fer/ztel/thesis/spark/SparkSessionHandler.scala
Scala
mit
1,614
package controllers import models.Address import org.jsoup.Jsoup import org.mockito.Matchers import org.mockito.Mockito.{reset, times, verify, when} import org.scalatest.BeforeAndAfterEach import org.scalatest.mock.MockitoSugar import org.scalatestplus.play.{OneServerPerSuite, PlaySpec} import play.api.i18n.Messages import play.api.mvc.Results import play.api.test.FakeRequest import play.api.test.Helpers._ import service.ReactiveMongoService import scala.concurrent.Future class AddressControllerSpec extends PlaySpec with OneServerPerSuite with MockitoSugar with BeforeAndAfterEach { trait TestAddressService extends ReactiveMongoService[Address] { override def collectionName: String = "address" override val formats = Address.formats } val mockAddressService: ReactiveMongoService[Address] = mock[TestAddressService] val testAddresses = List[Address](Address("Line_1", "line_2", None, None, "110006", "India")) object TestAddressController extends AddressController { override val addressService = mockAddressService } override def beforeEach: Unit = { reset(mockAddressService) } "AddressController" must { "respond to /address-details" in { val result = route(FakeRequest(GET, s"/play-scala/address-details")).get status(result) must not be (NOT_FOUND) } "addressDetails" must { "respond with OK" in { when(mockAddressService.fetchAll).thenReturn(Future.successful(testAddresses)) val result = TestAddressController.addressDetails().apply(FakeRequest()) status(result) must be(OK) verify(mockAddressService, times(1)).fetchAll } "must contain \\"Hello World page\\" as heading 1" in { when(mockAddressService.fetchAll).thenReturn(Future.successful(testAddresses)) val result = TestAddressController.addressDetails().apply(FakeRequest()) val document = Jsoup.parse(contentAsString(result)) document.title() must be(Messages("hello.title")) document.getElementById("header-1").text() must be("Hello World page") verify(mockAddressService, times(1)).fetchAll } } "submit" must { "form validation - for invalid data" must { "respond with bad request and respective errors on pages" in { when(mockAddressService.fetchAll).thenReturn(Future.successful(testAddresses)) val result = TestAddressController.submit.apply(FakeRequest()) status(result) must be(BAD_REQUEST) val document = Jsoup.parse(contentAsString(result)) document.select(".error").text() must include("This field is required") verify(mockAddressService, times(1)).fetchAll } } "form validation - for valid data" must { "respond with redirect for successful save in mongo" in { when(mockAddressService.create(Matchers.any())).thenReturn(Future.successful(Results.Status(CREATED))) when(mockAddressService.fetchAll).thenReturn(Future.successful(testAddresses)) val result = TestAddressController.submit.apply(FakeRequest() .withFormUrlEncodedBody("line1" -> "ABC", "line2" -> "line 2", "postcode" -> "110085", "country" -> "India")) status(result) must be(SEE_OTHER) redirectLocation(result).get must be("/play-scala/contact-details") verify(mockAddressService, times(1)).create(Matchers.any()) } "respond with redirect to address-details page for invalid json in mongo" in { when(mockAddressService.create(Matchers.any())).thenReturn(Future.successful(Results.Status(BAD_REQUEST))) when(mockAddressService.fetchAll).thenReturn(Future.successful(testAddresses)) val result = TestAddressController.submit.apply(FakeRequest() .withFormUrlEncodedBody("line1" -> "ABC", "line2" -> "line 2", "postcode" -> "110085", "country" -> "India")) status(result) must be(SEE_OTHER) redirectLocation(result).get must be("/play-scala/address-details") verify(mockAddressService, times(1)).create(Matchers.any()) } "respond with redirect to address-details page for any other status" in { when(mockAddressService.create(Matchers.any())).thenReturn(Future.successful(Results.Status(BAD_GATEWAY))) when(mockAddressService.fetchAll).thenReturn(Future.successful(testAddresses)) val result = TestAddressController.submit.apply(FakeRequest() .withFormUrlEncodedBody("line1" -> "ABC", "line2" -> "line 2", "postcode" -> "110085", "country" -> "India")) status(result) must be(SEE_OTHER) redirectLocation(result).get must be("/play-scala/address-details") verify(mockAddressService, times(1)).create(Matchers.any()) } } } } }
avinash-anand/PlayScala1
test/controllers/AddressControllerSpec.scala
Scala
gpl-3.0
4,831