code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package org.tensorframes
import org.apache.spark.sql.types._
import org.tensorframes.impl.{ScalarType, SupportedOperations}
class ColumnInformation private (
val field: StructField,
val stf: Option[SparkTFColInfo]) extends Serializable {
import MetadataConstants._
def columnName: String = field.name
def merged: StructField = {
val b = new MetadataBuilder().withMetadata(field.metadata)
for (info <- stf) {
b.putLongArray(shapeKey, info.shape.dims.toArray)
// Keep the SQL name, so that we do not leak internal details.
val dt = SupportedOperations.opsFor(info.dataType).sqlType
b.putString(tensorStructType, dt.toString)
}
val meta = b.build()
field.copy(metadata = meta)
}
override def equals(o: Any): Boolean = o match {
case ci: ColumnInformation => ci.field == field && ci.stf == stf
case _ => false
}
override def hashCode: Int = {
field.hashCode * 31 + stf.hashCode()
}
}
object ColumnInformation extends Logging {
import MetadataConstants._
import Shape.Unknown
/**
* Reads meta data info encoded in the field information. If these metadata info are missing,
* it gets the info it can from the the structure.
*/
def apply(field: StructField): ColumnInformation = {
val meta = extract(field.metadata).orElse {
// Do not support nullable for now.
if (field.nullable) {
// TODO switch back
// None
extractFromRow(field.dataType)
} else {
extractFromRow(field.dataType)
}
}
new ColumnInformation(field, meta)
}
def apply(field: StructField, info: SparkTFColInfo): ColumnInformation = {
new ColumnInformation(field, Some(info))
}
def apply(field: StructField, info: Option[SparkTFColInfo]): ColumnInformation = {
new ColumnInformation(field, info)
}
def unapply(x: ColumnInformation): Option[(StructField, Option[SparkTFColInfo])] = {
Some((x.field, x.stf))
}
/**
* Returns a struct field with all the relevant information about shape filled out in the
* metadata.
*
* @param name the name of the field
* @param scalarType the data type
* @param blockShape the shape of the block
*/
def structField(name: String, scalarType: ScalarType, blockShape: Shape): StructField = {
val i = SparkTFColInfo(blockShape, scalarType)
val f = StructField(name, sqlType(scalarType, blockShape.tail), nullable = false)
ColumnInformation(f, i).merged
}
private def sqlType(scalarType: ScalarType, shape: Shape): DataType = {
if (shape.dims.isEmpty) {
SupportedOperations.opsFor(scalarType).sqlType
} else {
ArrayType(sqlType(scalarType, shape.tail), containsNull = false)
}
}
private def extract(meta: Metadata): Option[SparkTFColInfo] = {
// Try to read the metadata information.
val shape = if (meta.contains(shapeKey)) {
Option(meta.getLongArray(shapeKey)).map(Shape.apply)
} else {
None
}
val tpe = if (meta.contains(tensorStructType)) {
Option(meta.getString(tensorStructType)).flatMap(getType)
} else {
None
}
for {
s <- shape
t <- tpe
ops <- SupportedOperations.getOps(t)
} yield SparkTFColInfo(s, ops.scalarType)
}
private def getType(s: String): Option[DataType] = {
val res = supportedTypes.find(_.toString == s)
logInfo(s"getType: $s -> $res")
res
}
/**
* Tries to extract information about the type from the data type.
*
* @return
*/
private def extractFromRow(dt: DataType): Option[SparkTFColInfo] = dt match {
case x: ArrayType =>
logTrace("arraytype: " + x)
// Look into the array to figure out the type.
extractFromRow(x.elementType).map { info =>
SparkTFColInfo(info.shape.prepend(Unknown), info.dataType)
}
case _ => SupportedOperations.getOps(dt) match {
case Some(ops) =>
logTrace("numerictype: " + ops.scalarType)
// It is a basic type that we understand
Some(SparkTFColInfo(Shape(Unknown), ops.scalarType))
case None => None
}
}
}
|
databricks/tensorframes
|
src/main/scala/org/tensorframes/ColumnInformation.scala
|
Scala
|
apache-2.0
| 4,125 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.Properties
import kafka.utils.TestUtils._
import kafka.utils.{IntEncoder, CoreUtils, TestUtils}
import kafka.zk.ZooKeeperTestHarness
import kafka.common._
import kafka.producer.{KeyedMessage, Producer}
import kafka.serializer.StringEncoder
import java.io.File
import org.scalatest.junit.JUnit3Suite
import org.junit.Assert._
class LogRecoveryTest extends JUnit3Suite with ZooKeeperTestHarness {
val replicaLagTimeMaxMs = 5000L
val replicaLagMaxMessages = 10L
val replicaFetchWaitMaxMs = 1000
val replicaFetchMinBytes = 20
val overridingProps = new Properties()
overridingProps.put(KafkaConfig.ReplicaLagTimeMaxMsProp, replicaLagTimeMaxMs.toString)
overridingProps.put(KafkaConfig.ReplicaFetchWaitMaxMsProp, replicaFetchWaitMaxMs.toString)
overridingProps.put(KafkaConfig.ReplicaFetchMinBytesProp, replicaFetchMinBytes.toString)
var configs: Seq[KafkaConfig] = null
val topic = "new-topic"
val partitionId = 0
var server1: KafkaServer = null
var server2: KafkaServer = null
def configProps1 = configs.head
def configProps2 = configs.last
val message = "hello"
var producer: Producer[Int, String] = null
def hwFile1: OffsetCheckpoint = new OffsetCheckpoint(new File(configProps1.logDirs(0), ReplicaManager.HighWatermarkFilename))
def hwFile2: OffsetCheckpoint = new OffsetCheckpoint(new File(configProps2.logDirs(0), ReplicaManager.HighWatermarkFilename))
var servers: Seq[KafkaServer] = Seq.empty[KafkaServer]
// Some tests restart the brokers then produce more data. But since test brokers use random ports, we need
// to use a new producer that knows the new ports
def updateProducer() = {
if (producer != null)
producer.close()
producer = TestUtils.createProducer[Int, String](TestUtils.getBrokerListStrFromServers(servers),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[IntEncoder].getName)
}
override def setUp() {
super.setUp()
configs = TestUtils.createBrokerConfigs(2, zkConnect, false).map(KafkaConfig.fromProps(_, overridingProps))
// start both servers
server1 = TestUtils.createServer(configProps1)
server2 = TestUtils.createServer(configProps2)
servers = List(server1, server2)
// create topic with 1 partition, 2 replicas, one on each broker
createTopic(zkClient, topic, partitionReplicaAssignment = Map(0->Seq(0,1)), servers = servers)
// create the producer
updateProducer()
}
override def tearDown() {
producer.close()
for(server <- servers) {
server.shutdown()
CoreUtils.rm(server.config.logDirs(0))
}
super.tearDown()
}
def testHWCheckpointNoFailuresSingleLogSegment {
val numMessages = 2L
sendMessages(numMessages.toInt)
// give some time for the follower 1 to record leader HW
TestUtils.waitUntilTrue(() =>
server2.replicaManager.getReplica(topic, 0).get.highWatermark.messageOffset == numMessages,
"Failed to update high watermark for follower after timeout")
servers.foreach(server => server.replicaManager.checkpointHighWatermarks())
val leaderHW = hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L)
assertEquals(numMessages, leaderHW)
val followerHW = hwFile2.read.getOrElse(TopicAndPartition(topic, 0), 0L)
assertEquals(numMessages, followerHW)
}
def testHWCheckpointWithFailuresSingleLogSegment {
var leader = waitUntilLeaderIsElectedOrChanged(zkClient, topic, partitionId)
assertEquals(0L, hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L))
sendMessages(1)
Thread.sleep(1000)
var hw = 1L
// kill the server hosting the preferred replica
server1.shutdown()
assertEquals(hw, hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L))
// check if leader moves to the other server
leader = waitUntilLeaderIsElectedOrChanged(zkClient, topic, partitionId, oldLeaderOpt = leader)
assertEquals("Leader must move to broker 1", 1, leader.getOrElse(-1))
// bring the preferred replica back
server1.startup()
// Update producer with new server settings
updateProducer()
leader = waitUntilLeaderIsElectedOrChanged(zkClient, topic, partitionId)
assertTrue("Leader must remain on broker 1, in case of zookeeper session expiration it can move to broker 0",
leader.isDefined && (leader.get == 0 || leader.get == 1))
assertEquals(hw, hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L))
// since server 2 was never shut down, the hw value of 30 is probably not checkpointed to disk yet
server2.shutdown()
assertEquals(hw, hwFile2.read.getOrElse(TopicAndPartition(topic, 0), 0L))
server2.startup()
updateProducer()
leader = waitUntilLeaderIsElectedOrChanged(zkClient, topic, partitionId, oldLeaderOpt = leader)
assertTrue("Leader must remain on broker 0, in case of zookeeper session expiration it can move to broker 1",
leader.isDefined && (leader.get == 0 || leader.get == 1))
sendMessages(1)
hw += 1
// give some time for follower 1 to record leader HW of 60
TestUtils.waitUntilTrue(() =>
server2.replicaManager.getReplica(topic, 0).get.highWatermark.messageOffset == hw,
"Failed to update high watermark for follower after timeout")
// shutdown the servers to allow the hw to be checkpointed
servers.foreach(server => server.shutdown())
assertEquals(hw, hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L))
assertEquals(hw, hwFile2.read.getOrElse(TopicAndPartition(topic, 0), 0L))
}
def testHWCheckpointNoFailuresMultipleLogSegments {
sendMessages(20)
val hw = 20L
// give some time for follower 1 to record leader HW of 600
TestUtils.waitUntilTrue(() =>
server2.replicaManager.getReplica(topic, 0).get.highWatermark.messageOffset == hw,
"Failed to update high watermark for follower after timeout")
// shutdown the servers to allow the hw to be checkpointed
servers.foreach(server => server.shutdown())
val leaderHW = hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L)
assertEquals(hw, leaderHW)
val followerHW = hwFile2.read.getOrElse(TopicAndPartition(topic, 0), 0L)
assertEquals(hw, followerHW)
}
def testHWCheckpointWithFailuresMultipleLogSegments {
var leader = waitUntilLeaderIsElectedOrChanged(zkClient, topic, partitionId)
sendMessages(2)
var hw = 2L
// allow some time for the follower to get the leader HW
TestUtils.waitUntilTrue(() =>
server2.replicaManager.getReplica(topic, 0).get.highWatermark.messageOffset == hw,
"Failed to update high watermark for follower after timeout")
// kill the server hosting the preferred replica
server1.shutdown()
server2.shutdown()
assertEquals(hw, hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L))
assertEquals(hw, hwFile2.read.getOrElse(TopicAndPartition(topic, 0), 0L))
server2.startup()
updateProducer()
// check if leader moves to the other server
leader = waitUntilLeaderIsElectedOrChanged(zkClient, topic, partitionId, oldLeaderOpt = leader)
assertEquals("Leader must move to broker 1", 1, leader.getOrElse(-1))
assertEquals(hw, hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L))
// bring the preferred replica back
server1.startup()
updateProducer()
assertEquals(hw, hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L))
assertEquals(hw, hwFile2.read.getOrElse(TopicAndPartition(topic, 0), 0L))
sendMessages(2)
hw += 2
// allow some time for the follower to get the leader HW
TestUtils.waitUntilTrue(() =>
server1.replicaManager.getReplica(topic, 0).get.highWatermark.messageOffset == hw,
"Failed to update high watermark for follower after timeout")
// shutdown the servers to allow the hw to be checkpointed
servers.foreach(server => server.shutdown())
assertEquals(hw, hwFile1.read.getOrElse(TopicAndPartition(topic, 0), 0L))
assertEquals(hw, hwFile2.read.getOrElse(TopicAndPartition(topic, 0), 0L))
}
private def sendMessages(n: Int = 1) {
for(i <- 0 until n)
producer.send(new KeyedMessage[Int, String](topic, 0, message))
}
}
|
jhspaybar/kafka
|
core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala
|
Scala
|
apache-2.0
| 9,013 |
package org.jetbrains.plugins.scala.failed.annotator
import org.jetbrains.plugins.scala.PerfCycleTests
import org.jetbrains.plugins.scala.javaHighlighting.JavaHighlitghtingTestBase
import org.junit.experimental.categories.Category
/**
* @author Alefas
* @since 23/03/16
*/
@Category(Array(classOf[PerfCycleTests]))
class JavaHighlightingTest extends JavaHighlitghtingTestBase {
def testSCL8982() = {
val scala =
"""
|object Foo {
| class Bar {
|
| }
|}
""".stripMargin
val java =
"""
|public class Main {
| public static void main(String[] args) {
| new Foo$Bar();
| }
|}
|
""".stripMargin
assertNothing(errorsFromJavaCode(scala, java, "Main"))
}
def testSCL9663B() = {
val scala =
"""
|class Foo(val cell: String) extends AnyVal {
| def foo(x: Int) = 123
|}
""".stripMargin
val java =
"""
|public class Test {
| public static void main(String[] args) {
| Foo$ foo = Foo$.MODULE$;
|
| foo.foo$extension("text", 1);
| }
|}
""".stripMargin
assertNothing(errorsFromJavaCode(scala, java, "Test"))
}
def testSCL7525() = {
val scala =
"""
|package SCL7525
|object Test {
| new Foo(new Foo.ArgsBar)
|}
""".stripMargin
val java =
"""
|package SCL7525;
|public class Foo {
| public Foo(Args a) { }
| public static class Args<T extends Args<T>> { }
| public static class ArgsBar extends Args<ArgsBar> { }
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL9029() = {
val scala =
"""
|package scl9029
|import java.lang.invoke.{MethodHandles, MethodType}
|
|class SCL9029 {
| def a: Int = 5
|
| def b = {
| val mh = MethodHandles.publicLookup().findVirtual(
| classOf[A], "a", MethodType.methodType(classOf[Int])
| )
| val z: Int = mh.invokeExact(this)
| }
|}
""".stripMargin
val java =
"""
|package scl9029;
|public class Foo {
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL6409() = {
val java =
"""
|public class JavaDummy<T> {
| public void method(JavaDummy<? super JavaDummy<?>> arg) {}
|}""".stripMargin
val scala =
"""
|class Inheritor extends JavaDummy[Int] {
| override def method(arg: JavaDummy[_ <: JavaDummy[_]]): Unit = super.method(arg)
|}""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL7069() = {
val scala =
"""
|package z
|import z.Test.U
|
|class R {
| val u: U[Any] = new U[Any]
|
| Test.foo(u)
|}
""".stripMargin
val java =
"""
|package z;
|public class Test {
| public static class U<T> {
|
| }
|
| public static int foo(U<? extends Object> u) {
| return 1;
| }
|
| public static boolean foo(String s) {
| return false;
| }
|}
|
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL6114() = {
val scala =
"""
|package foo;
|
|package bar {
| class Test
|}
""".stripMargin
val java =
"""
|package foo;
|
|class A {
| public bar.Test something; // Test is red - cannot resolve symbol Test.
|}
""".stripMargin
assertNothing(errorsFromJavaCode(scala, java, "A"))
}
def testSCL9871(): Unit = {
val java =
"""
|package foo.object;
|
|public class Related
|{
| static String foo() { return "package scoped"; }
|}
""".stripMargin
val scala =
"""
|package foo.`object`
|
|import foo.`object`.Related // unneeded, but without this, Related is marked red below with message "cannot resolve symbol"
|
|object Escaping extends App {
| println(Related.foo) // foo is marked as red, with message "Symbol foo is inaccessible from this place"
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL10150(): Unit = {
val java =
"""
|package jPack1.object;
|public class JClassFromObjPack { }
""".stripMargin
val scala =
"""
|package sPack1
|
|class SClass {
| def x: JClassFromObjPack = new JClassFromObjPack
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL8639(): Unit = {
val java =
"""
|public abstract class Java<S> {
| public abstract class JavaInner {
| abstract void foo(S arg);
| }
|}
|
""".stripMargin
val scala =
"""
|class Scala extends Java[String]{
| val s = new JavaInner {
| override def foo(arg: String): Unit = {}
| }
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL8759(): Unit = {
val java =
"""
|public class Foobar {
| public static void foo(Object something) {
| }
| public static <T extends Number> void foo(T something) {
| }
|}
""".stripMargin
val scala =
"""
|class ScClass {
| def method = {
| Foobar.foo("")
| Foobar.foo(java.lang.Integer.valueOf(1))
| }
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL8666(): Unit = {
val java =
"""
|import scala.Function0;
|import scala.Function1;
|
|import java.util.concurrent.Callable;
|import java.util.function.Function;
|
|public class Lambdas {
|
| public static <A> A doIt(Callable<A> f) {
| System.out.println("callable");
| try {
| return f.call();
| } catch (final Exception ex) {
| throw new RuntimeException(ex);
| }
| }
|
| public static <A> A doIt(final Function0<A> f) {
| System.out.println("java_func");
| try {
| return f.apply();
| } catch (final Exception ex) {
| throw new RuntimeException(ex);
| }
| }
|
| public static void doIt(Runnable f) {
| System.out.println("runnable");
| try {
| f.run();
| } catch (final Exception ex) {
| throw new RuntimeException(ex);
| }
| }
|
| public static void main(final String... args) {
| final Lambdas l = new Lambdas();
| Lambdas.doIt(() -> {
| int x = 3;
| });
| Lambdas.doIt(() -> 24);
| }
|}
""".stripMargin
assertNothing(errorsFromJavaCode("", java, "Lambdas"))
}
def testClassParameterScala(): Unit = {
val scala =
"""
|class ScalaClass (var name: String, var surname: String)
|
|object Start {
| def main(args: Array[String]) {
| val scalaClassObj = new ScalaClass("Dom", "Sien")
| println(scalaClassObj.name)
| println(scalaClassObj.surname)
|
| val javaClassObj = new JavaClass("Dom2", "Sien2", 31)
| println(javaClassObj.name)
| println(javaClassObj.surname)
| println(javaClassObj.getAge)
| }
|}
""".stripMargin
val java =
"""
|public class JavaClass extends ScalaClass {
| private int age;
|
| public JavaClass(String name, String surname, int age) {
| super(name, surname);
| this.age = age;
| }
|
| public int getAge() {
| return age;
| }
|
| public void setAge(int age) {
| this.age = age;
| }
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testOptionApply(): Unit = {
val java =
"""
|import scala.Option;
|
|public abstract class OptionApply {
|
| public OptionApply() {
| setAction(Option.apply("importVCardFile"));
| }
|
| public abstract void setAction(Option<String> bar);
|}
""".stripMargin
assertNothing(errorsFromJavaCode(scalaFileText = "", java, javaClassName = "OptionApply"))
}
}
|
katejim/intellij-scala
|
test/org/jetbrains/plugins/scala/failed/annotator/JavaHighlightingTest.scala
|
Scala
|
apache-2.0
| 9,273 |
package report.donut.transformers.cucumber
import java.io.File
import org.json4s.DefaultFormats
import org.scalatest.{FlatSpec, Matchers}
import report.donut.DonutTestData
import report.donut.gherkin.model
import report.donut.gherkin.processors.JSONProcessor
import scala.collection.mutable.ListBuffer
class GroupByFeatureNameTest extends FlatSpec with Matchers {
implicit val formats = DefaultFormats
// BDD json files for same feature
private val sample4RootDir = List("src", "test", "resources", "samples-4").mkString("", File.separator, File.separator)
private val sample4Features = JSONProcessor.loadFrom(new File(sample4RootDir)).right.get.flatMap(f => f.extract[List[Feature]])
private val sample4DonutFeatures = CucumberTransformer.transform(sample4Features, DonutTestData.statusConfiguration).right.get
// Unit tests as BDD format json files
private val sample5RootDir = List("src", "test", "resources", "samples-5").mkString("", File.separator, File.separator)
private val sample5Features = JSONProcessor.loadFrom(new File(sample5RootDir)).right.get.flatMap(f => f.extract[List[Feature]])
// BDD and Unit test json files in BDD format, but with different feature names
private val sample6BDDRootDir = List("src", "test", "resources", "samples-6", "bdd").mkString("", File.separator, File.separator)
private val sample6BDDFeatures = JSONProcessor.loadFrom(new File(sample6BDDRootDir)).right.get.flatMap(f => f.extract[List[Feature]])
private val sample6BDDDonutFeatures = CucumberTransformer.transform(sample6BDDFeatures, DonutTestData.statusConfiguration).right.get
private val sample6UnitTestsRootDir = List("src", "test", "resources", "samples-6", "unit").mkString("", File.separator, File.separator)
private val sample6UnitTests = JSONProcessor.loadFrom(new File(sample6UnitTestsRootDir)).right.get.flatMap(f => f.extract[List[Feature]])
behavior of "Cucumber transformer - Group by feature name"
it should "group donut features by feature name while transforming the list of cucumber features" in {
sample4DonutFeatures.size shouldBe 1
sample4DonutFeatures.head.name shouldBe "Add numbers"
val expectedScenarioNames = List("Add two numbers: 1 and 2", "Only 1 number is provided", "Add four numbers: 1,2,5,10")
val scenarios = sample4DonutFeatures.head.scenarios
scenarios.size shouldBe 3
scenarios.map(s => s.name).sorted shouldBe expectedScenarioNames.sorted
}
it should "mapToDonutFeatures if a feature is split across multiple BDD json files" in {
val donutFeatures = CucumberTransformer.mapToDonutFeatures(sample4Features, new ListBuffer[model.Feature], DonutTestData.statusConfiguration)
val scenarios = donutFeatures.head.scenarios
sample4Features.size shouldBe 3
donutFeatures.size shouldBe 1
scenarios.size shouldBe 3
donutFeatures.head.index.toInt shouldBe 10000
for (o <- sample4Features) {
o.name == donutFeatures.head.name
}
}
it should "mapToDonutFeatures if 1 feature is split across few BDD and unit test json files" in {
val generatedFeatures = CucumberTransformer.mapToDonutFeatures(sample5Features, sample4DonutFeatures, DonutTestData.statusConfiguration)
val scenarios = generatedFeatures.head.scenarios
generatedFeatures.size shouldBe 1
scenarios.size shouldBe 4
scenarios(3).keyword shouldBe "Unit Test"
scenarios(3).name should equal(sample5Features.head.elements.head.name)
}
it should "mapToDonutFeatures when there are few bdd json files and few unit test json files with a different feature name" in {
val generatedFeatures = CucumberTransformer.mapToDonutFeatures(sample6UnitTests, sample6BDDDonutFeatures, DonutTestData.statusConfiguration)
val nonBDDFeature = generatedFeatures(1)
val nonBDDScenario = nonBDDFeature.scenarios.head
val bddFeature = generatedFeatures.head
generatedFeatures.size shouldBe 2
bddFeature.name shouldBe "Add numbers"
bddFeature.index shouldBe "10000"
nonBDDFeature.name shouldBe "Without feature"
nonBDDFeature.index shouldBe "10001"
nonBDDScenario.name shouldBe "Add four numbers: 1,2,5,10"
nonBDDScenario.keyword shouldBe "Unit Test"
}
}
|
MagenTys/donut
|
src/test/scala/report/donut/transformers/cucumber/GroupByFeatureNameTest.scala
|
Scala
|
mit
| 4,205 |
/*
* Copyright 2014 Kevin Herron
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.digitalpetri.ethernetip.cip.structs
import com.digitalpetri.ethernetip.cip.epath.PaddedEPath
import com.digitalpetri.ethernetip.util.Buffers
import io.netty.buffer.ByteBuf
import scala.util.Try
/**
* @param serviceCode Service code of the request.
* @param requestPath The request/application path.
* @param requestData Service specific data per object definition to be delivered in the Explicit Messaging Request.
* If no additional data is to be sent with the Explicit Messaging Request, then this array will be
* empty.
*/
case class MessageRouterRequest(serviceCode: Int, requestPath: PaddedEPath, requestData: ByteBuf)
object MessageRouterRequest {
def encode(request: MessageRouterRequest, buffer: ByteBuf = Buffers.unpooled()): ByteBuf = {
buffer.writeByte(request.serviceCode)
PaddedEPath.encode(request.requestPath, buffer)
buffer.writeBytes(request.requestData)
buffer
}
def decode(buffer: ByteBuf): MessageRouterRequest = {
MessageRouterRequest(
serviceCode = buffer.readUnsignedByte(),
requestPath = PaddedEPath.decode(buffer),
requestData = buffer.slice())
}
}
/**
* @param serviceCode The request service code + 0x80.
* @param generalStatus One of the general status codes defined in the CIP specification appendix B.
* @param additionalStatus Additional status codes.
* @param data Response data.
*/
case class MessageRouterResponse(serviceCode: Int,
generalStatus: Short,
additionalStatus: Seq[Short],
data: Option[ByteBuf])
object MessageRouterResponse {
def encode(response: MessageRouterResponse, buffer: ByteBuf = Buffers.unpooled()): ByteBuf = {
buffer.writeByte(response.serviceCode)
buffer.writeByte(0x00)
buffer.writeByte(response.generalStatus)
buffer.writeByte(response.additionalStatus.size)
response.additionalStatus.foreach(s => buffer.writeShort(s))
response.data.foreach(buffer.writeBytes)
buffer
}
def decode(buffer: ByteBuf): Try[MessageRouterResponse] = Try {
val replyService = buffer.readUnsignedByte()
val reserved = buffer.readByte()
val generalStatus = buffer.readUnsignedByte()
assert(reserved == 0)
def decodeAdditionalStatus(additional: List[Short], count: Int): List[Short] = {
if (count == 0) additional
else decodeAdditionalStatus(additional :+ buffer.readShort(), count - 1)
}
val additionalStatus = decodeAdditionalStatus(List.empty, buffer.readUnsignedByte())
val data: Option[ByteBuf] = {
if (buffer.readableBytes() == 0) None
else Some(buffer.readSlice(buffer.readableBytes()))
}
MessageRouterResponse(replyService, generalStatus, additionalStatus, data)
}
}
|
digitalpetri/scala-ethernet-ip
|
enip-core/src/main/scala/com/digitalpetri/ethernetip/cip/structs/MessageRouter.scala
|
Scala
|
apache-2.0
| 3,436 |
// scalac: -Xasync
import scala.concurrent._
import ExecutionContext.Implicits.global
import scala.tools.testkit.async.Async._
import scala.concurrent.duration.Duration
case class FixedFoo(foo: Int)
class Foobar(val foo: Int, val bar: Double) {
def getValue = async { 4.2 }
def func(f: Any) = async {
new Foobar(foo = f match {
case FixedFoo(x) => x
case _ => 2
},
bar = await(getValue))
}
}
object Test extends App { test()
def test() = Await.result(new Foobar(0, 0).func(4), Duration.Inf)
}
|
lrytz/scala
|
test/async/jvm/concurrent_NegativeArraySizeExceptionFine1.scala
|
Scala
|
apache-2.0
| 552 |
package breeze.integrate.quasimontecarlo
/*
Copyright 2016 Chris Stucchio
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import spire.implicits.cfor
import breeze.linalg._
object Halton {
val HALTON_MAX_DIMENSION = 1229
private def readClasspathFileToIntArray(filename: String): Array[Int] = {
/*
* Reads a file from the classpath to an array int's.
* The file should be stored as text, with integers separated by a ',' and perhaps arbitrary whitespace, including newlines.
*/
val fileStream = this.getClass().getClassLoader().getResourceAsStream(filename)
val lines = scala.io.Source.fromInputStream(fileStream).getLines()
val nums = lines.flatMap(x => x.split(',')).map(x => x.replaceAll("\\\\s+",""))
nums.map(x => x.toInt).toArray
}
lazy val PRIMES = readClasspathFileToIntArray("primes.txt")
lazy val EA_PERMS = readClasspathFileToIntArray("quasimontecarlo_halton_ea_perms.txt")
def integrate(func: Array[Double] => Double)(dimension: Int, numSamples: Long): Double = {
val gen = new BaseUniformHaltonGenerator(dimension)
var result: Double = 0
cfor(0)(i => i < numSamples, i => i+1)(i => {
result += func(gen.getNextUnsafe)
})
result / numSamples
}
}
class BaseUniformHaltonGenerator(val dimension: Int) extends QuasiMonteCarloGenerator {
/*
* Provides a generalized Halton sequence:
* https://en.wikipedia.org/wiki/Halton_sequence
* http://www.genetic-programming.org/hc2013/DeRainville-ACM-MCS-2012-Paper.pdf
*
* This is a fairly primitive, highly optimized class that probably shouldn't be used in user code.
*
* This generates a sequence of *uniform* quasi-random variables, i.e. variables that are
* can be used to approximate \\int f(x) dU(x) where dU(x) is the uniform distribution on
* [0,1].
*/
private val bases = java.util.Arrays.copyOfRange(Halton.PRIMES, 0, dimension)
private var count: Long = 0
private val counters: Array[UnboxedIntVector] = List.fill(dimension)({ new UnboxedIntVector(16) }).toArray
val permutations: Array[Array[Long]] =
(0 to dimension).map(i => {
val vv = new Array[Long](Halton.PRIMES(i))
cfor(0)(j => j < Halton.PRIMES(i), j => j+1)(j => {
vv(j) = j
})
shuffle(vv)
vv
}).toArray
private val currentValue = new Array[Double](dimension)
private var generatedCount: Long = 0
def numGenerated: Long = generatedCount
def getNextUnsafe = {
cfor(0)(j => j < dimension, j => j + 1)(j => {
var lIndex: Int = 0
while((lIndex < counters(j).size()) && (counters(j).get(lIndex) == (bases(j)-1))) {
counters(j).set(lIndex, 0)
lIndex += 1
}
if (lIndex == counters(j).size()) {
counters(j).add(1)
} else {
counters(j).set(lIndex, counters(j).get(lIndex) + 1)
}
var lCountSizeI: Int = counters(j).size()
var lBasesPow: Long = bases(j)
var lValue: Double = permutations(j)(counters(j).get(lCountSizeI-1))
cfor(lCountSizeI-1)(k => k >= 1, k => k - 1)(k => {
lValue += permutations(j)(counters(j).get(k-1)) * lBasesPow
lBasesPow *= bases(j)
})
currentValue(j) = lValue.toDouble / lBasesPow.toDouble
})
generatedCount += 1
currentValue
}
private class UnboxedIntVector(initialSize: Int = 256) {
/*
* This is totally unsafe to use anywhere besides here.
* I ran the code with bounds checks, they slowed things down,
* but the internal code here seems to make them unnecessary.
*/
private var storage: Array[Int] = new Array[Int](initialSize)
private var actualSize: Int = 0
def add(x: Int) = {
if (actualSize == storage.size) {
val oldStorage = storage
storage = new Array[Int](oldStorage.size*2)
}
storage(actualSize) = x
actualSize += 1
}
def size(): Int = actualSize
def get(i: Int): Int = storage(i)
def set(i: Int, x: Int) = {
storage(i) = x
}
}
}
|
crealytics/breeze
|
math/src/main/scala/breeze/integrate/quasimontecarlo/Halton.scala
|
Scala
|
apache-2.0
| 4,465 |
/*
* Copyright 2013 TeamNexus
*
* TeamNexus Licenses this file to you under the MIT License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://opensource.org/licenses/mit-license.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License
*/
package com.nexus.traits
trait TLoader {
def load()
}
|
crvidya/nexus-scala
|
src/main/scala/com/nexus/traits/TLoader.scala
|
Scala
|
mit
| 664 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.IOException
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.internal.io.FileCommitProtocol
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogTable, CatalogTablePartition}
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.execution.command._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.PartitionOverwriteMode
import org.apache.spark.sql.util.SchemaUtils
/**
* A command for writing data to a [[HadoopFsRelation]]. Supports both overwriting and appending.
* Writing to dynamic partitions is also supported.
*
* @param staticPartitions partial partitioning spec for write. This defines the scope of partition
* overwrites: when the spec is empty, all partitions are overwritten.
* When it covers a prefix of the partition keys, only partitions matching
* the prefix are overwritten.
* @param ifPartitionNotExists If true, only write if the partition does not exist.
* Only valid for static partitions.
*/
case class InsertIntoHadoopFsRelationCommand(
outputPath: Path,
staticPartitions: TablePartitionSpec,
ifPartitionNotExists: Boolean,
partitionColumns: Seq[Attribute],
bucketSpec: Option[BucketSpec],
fileFormat: FileFormat,
options: Map[String, String],
query: LogicalPlan,
mode: SaveMode,
catalogTable: Option[CatalogTable],
fileIndex: Option[FileIndex],
outputColumnNames: Seq[String])
extends DataWritingCommand {
import org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils.escapePathName
private lazy val parameters = CaseInsensitiveMap(options)
private[sql] lazy val dynamicPartitionOverwrite: Boolean = {
val partitionOverwriteMode = parameters.get("partitionOverwriteMode")
// scalastyle:off caselocale
.map(mode => PartitionOverwriteMode.withName(mode.toUpperCase))
// scalastyle:on caselocale
.getOrElse(SQLConf.get.partitionOverwriteMode)
val enableDynamicOverwrite = partitionOverwriteMode == PartitionOverwriteMode.DYNAMIC
// This config only makes sense when we are overwriting a partitioned dataset with dynamic
// partition columns.
enableDynamicOverwrite && mode == SaveMode.Overwrite &&
staticPartitions.size < partitionColumns.length
}
override def run(sparkSession: SparkSession, child: SparkPlan): Seq[Row] = {
// Most formats don't do well with duplicate columns, so lets not allow that
SchemaUtils.checkColumnNameDuplication(
outputColumnNames,
s"when inserting into $outputPath",
sparkSession.sessionState.conf.caseSensitiveAnalysis)
val hadoopConf = sparkSession.sessionState.newHadoopConfWithOptions(options)
val fs = outputPath.getFileSystem(hadoopConf)
val qualifiedOutputPath = outputPath.makeQualified(fs.getUri, fs.getWorkingDirectory)
val partitionsTrackedByCatalog = sparkSession.sessionState.conf.manageFilesourcePartitions &&
catalogTable.isDefined &&
catalogTable.get.partitionColumnNames.nonEmpty &&
catalogTable.get.tracksPartitionsInCatalog
var initialMatchingPartitions: Seq[TablePartitionSpec] = Nil
var customPartitionLocations: Map[TablePartitionSpec, String] = Map.empty
var matchingPartitions: Seq[CatalogTablePartition] = Seq.empty
// When partitions are tracked by the catalog, compute all custom partition locations that
// may be relevant to the insertion job.
if (partitionsTrackedByCatalog) {
matchingPartitions = sparkSession.sessionState.catalog.listPartitions(
catalogTable.get.identifier, Some(staticPartitions))
initialMatchingPartitions = matchingPartitions.map(_.spec)
customPartitionLocations = getCustomPartitionLocations(
fs, catalogTable.get, qualifiedOutputPath, matchingPartitions)
}
val committer = FileCommitProtocol.instantiate(
sparkSession.sessionState.conf.fileCommitProtocolClass,
jobId = java.util.UUID.randomUUID().toString,
outputPath = outputPath.toString,
dynamicPartitionOverwrite = dynamicPartitionOverwrite)
val doInsertion = if (mode == SaveMode.Append) {
true
} else {
val pathExists = fs.exists(qualifiedOutputPath)
(mode, pathExists) match {
case (SaveMode.ErrorIfExists, true) =>
throw new AnalysisException(s"path $qualifiedOutputPath already exists.")
case (SaveMode.Overwrite, true) =>
if (ifPartitionNotExists && matchingPartitions.nonEmpty) {
false
} else if (dynamicPartitionOverwrite) {
// For dynamic partition overwrite, do not delete partition directories ahead.
true
} else {
deleteMatchingPartitions(fs, qualifiedOutputPath, customPartitionLocations, committer)
true
}
case (SaveMode.Overwrite, _) | (SaveMode.ErrorIfExists, false) =>
true
case (SaveMode.Ignore, exists) =>
!exists
case (s, exists) =>
throw new IllegalStateException(s"unsupported save mode $s ($exists)")
}
}
if (doInsertion) {
def refreshUpdatedPartitions(updatedPartitionPaths: Set[String]): Unit = {
val updatedPartitions = updatedPartitionPaths.map(PartitioningUtils.parsePathFragment)
if (partitionsTrackedByCatalog) {
val newPartitions = updatedPartitions -- initialMatchingPartitions
if (newPartitions.nonEmpty) {
AlterTableAddPartitionCommand(
catalogTable.get.identifier, newPartitions.toSeq.map(p => (p, None)),
ifNotExists = true).run(sparkSession)
}
// For dynamic partition overwrite, we never remove partitions but only update existing
// ones.
if (mode == SaveMode.Overwrite && !dynamicPartitionOverwrite) {
val deletedPartitions = initialMatchingPartitions.toSet -- updatedPartitions
if (deletedPartitions.nonEmpty) {
AlterTableDropPartitionCommand(
catalogTable.get.identifier, deletedPartitions.toSeq,
ifExists = true, purge = false,
retainData = true /* already deleted */).run(sparkSession)
}
}
}
}
val updatedPartitionPaths =
FileFormatWriter.write(
sparkSession = sparkSession,
plan = child,
fileFormat = fileFormat,
committer = committer,
outputSpec = FileFormatWriter.OutputSpec(
qualifiedOutputPath.toString, customPartitionLocations, outputColumns),
hadoopConf = hadoopConf,
partitionColumns = partitionColumns,
bucketSpec = bucketSpec,
statsTrackers = Seq(basicWriteJobStatsTracker(hadoopConf)),
options = options)
// update metastore partition metadata
if (updatedPartitionPaths.isEmpty && staticPartitions.nonEmpty
&& partitionColumns.length == staticPartitions.size) {
// Avoid empty static partition can't loaded to datasource table.
val staticPathFragment =
PartitioningUtils.getPathFragment(staticPartitions, partitionColumns)
refreshUpdatedPartitions(Set(staticPathFragment))
} else {
refreshUpdatedPartitions(updatedPartitionPaths)
}
// refresh cached files in FileIndex
fileIndex.foreach(_.refresh())
// refresh data cache if table is cached
sparkSession.catalog.refreshByPath(outputPath.toString)
if (catalogTable.nonEmpty) {
CommandUtils.updateTableStats(sparkSession, catalogTable.get)
}
} else {
logInfo("Skipping insertion into a relation that already exists.")
}
Seq.empty[Row]
}
/**
* Deletes all partition files that match the specified static prefix. Partitions with custom
* locations are also cleared based on the custom locations map given to this class.
*/
private def deleteMatchingPartitions(
fs: FileSystem,
qualifiedOutputPath: Path,
customPartitionLocations: Map[TablePartitionSpec, String],
committer: FileCommitProtocol): Unit = {
val staticPartitionPrefix = if (staticPartitions.nonEmpty) {
"/" + partitionColumns.flatMap { p =>
staticPartitions.get(p.name) match {
case Some(value) =>
Some(escapePathName(p.name) + "=" + escapePathName(value))
case None =>
None
}
}.mkString("/")
} else {
""
}
// first clear the path determined by the static partition keys (e.g. /table/foo=1)
val staticPrefixPath = qualifiedOutputPath.suffix(staticPartitionPrefix)
if (fs.exists(staticPrefixPath) && !committer.deleteWithJob(fs, staticPrefixPath, true)) {
throw new IOException(s"Unable to clear output " +
s"directory $staticPrefixPath prior to writing to it")
}
// now clear all custom partition locations (e.g. /custom/dir/where/foo=2/bar=4)
for ((spec, customLoc) <- customPartitionLocations) {
assert(
(staticPartitions.toSet -- spec).isEmpty,
"Custom partition location did not match static partitioning keys")
val path = new Path(customLoc)
if (fs.exists(path) && !committer.deleteWithJob(fs, path, true)) {
throw new IOException(s"Unable to clear partition " +
s"directory $path prior to writing to it")
}
}
}
/**
* Given a set of input partitions, returns those that have locations that differ from the
* Hive default (e.g. /k1=v1/k2=v2). These partitions were manually assigned locations by
* the user.
*
* @return a mapping from partition specs to their custom locations
*/
private def getCustomPartitionLocations(
fs: FileSystem,
table: CatalogTable,
qualifiedOutputPath: Path,
partitions: Seq[CatalogTablePartition]): Map[TablePartitionSpec, String] = {
partitions.flatMap { p =>
val defaultLocation = qualifiedOutputPath.suffix(
"/" + PartitioningUtils.getPathFragment(p.spec, table.partitionSchema)).toString
val catalogLocation = new Path(p.location).makeQualified(
fs.getUri, fs.getWorkingDirectory).toString
if (catalogLocation != defaultLocation) {
Some(p.spec -> catalogLocation)
} else {
None
}
}.toMap
}
}
|
ConeyLiu/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelationCommand.scala
|
Scala
|
apache-2.0
| 11,580 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.regression
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.util.LinearDataGenerator
import org.apache.spark.streaming.{LocalStreamingContext, TestSuiteBase}
import org.apache.spark.streaming.dstream.DStream
class StreamingLinearRegressionSuite
extends SparkFunSuite
with LocalStreamingContext
with TestSuiteBase {
// use longer wait time to ensure job completion
override def maxWaitTimeMillis: Int = 20000
// Assert that two values are equal within tolerance epsilon
def assertEqual(v1: Double, v2: Double, epsilon: Double): Unit = {
def errorMessage = v1.toString + " did not equal " + v2.toString
assert(math.abs(v1-v2) <= epsilon, errorMessage)
}
// Assert that model predictions are correct
def validatePrediction(predictions: Seq[Double], input: Seq[LabeledPoint]): Unit = {
val numOffPredictions = predictions.zip(input).count { case (prediction, expected) =>
// A prediction is off if the prediction is more than 0.5 away from expected value.
math.abs(prediction - expected.label) > 0.5
}
// At least 80% of the predictions should be on.
assert(numOffPredictions < input.length / 5)
}
// Test if we can accurately learn Y = 10*X1 + 10*X2 on streaming data
test("parameter accuracy") {
// create model
val model = new StreamingLinearRegressionWithSGD()
.setInitialWeights(Vectors.dense(0.0, 0.0))
.setStepSize(0.2)
.setNumIterations(25)
.setConvergenceTol(0.0001)
// generate sequence of simulated data
val numBatches = 10
val input = (0 until numBatches).map { i =>
LinearDataGenerator.generateLinearInput(0.0, Array(10.0, 10.0), 100, 42 * (i + 1))
}
// apply model training to input stream
ssc = setupStreams(input, (inputDStream: DStream[LabeledPoint]) => {
model.trainOn(inputDStream)
inputDStream.count()
})
runStreams(ssc, numBatches, numBatches)
// check accuracy of final parameter estimates
assertEqual(model.latestModel().intercept, 0.0, 0.1)
assertEqual(model.latestModel().weights(0), 10.0, 0.1)
assertEqual(model.latestModel().weights(1), 10.0, 0.1)
// check accuracy of predictions
val validationData = LinearDataGenerator.generateLinearInput(0.0, Array(10.0, 10.0), 100, 17)
validatePrediction(validationData.map(row => model.latestModel().predict(row.features)),
validationData)
}
// Test that parameter estimates improve when learning Y = 10*X1 on streaming data
test("parameter convergence") {
// create model
val model = new StreamingLinearRegressionWithSGD()
.setInitialWeights(Vectors.dense(0.0))
.setStepSize(0.2)
.setNumIterations(25)
// generate sequence of simulated data
val numBatches = 10
val input = (0 until numBatches).map { i =>
LinearDataGenerator.generateLinearInput(0.0, Array(10.0), 100, 42 * (i + 1))
}
// create buffer to store intermediate fits
val history = new ArrayBuffer[Double](numBatches)
// apply model training to input stream, storing the intermediate results
// (we add a count to ensure the result is a DStream)
ssc = setupStreams(input, (inputDStream: DStream[LabeledPoint]) => {
model.trainOn(inputDStream)
inputDStream.foreachRDD(x => history += math.abs(model.latestModel().weights(0) - 10.0))
inputDStream.count()
})
runStreams(ssc, numBatches, numBatches)
// compute change in error
val deltas = history.drop(1).zip(history.dropRight(1))
// check error stability (it always either shrinks, or increases with small tol)
assert(deltas.forall(x => (x._1 - x._2) <= 0.1))
// check that error shrunk on at least 2 batches
assert(deltas.map(x => if ((x._1 - x._2) < 0) 1 else 0).sum > 1)
}
// Test predictions on a stream
test("predictions") {
// create model initialized with true weights
val model = new StreamingLinearRegressionWithSGD()
.setInitialWeights(Vectors.dense(10.0, 10.0))
.setStepSize(0.2)
.setNumIterations(25)
// generate sequence of simulated data for testing
val numBatches = 10
val nPoints = 100
val testInput = (0 until numBatches).map { i =>
LinearDataGenerator.generateLinearInput(0.0, Array(10.0, 10.0), nPoints, 42 * (i + 1))
}
// apply model predictions to test stream
ssc = setupStreams(testInput, (inputDStream: DStream[LabeledPoint]) => {
model.predictOnValues(inputDStream.map(x => (x.label, x.features)))
})
// collect the output as (true, estimated) tuples
val output: Seq[Seq[(Double, Double)]] = runStreams(ssc, numBatches, numBatches)
// compute the mean absolute error and check that it's always less than 0.1
val errors = output.map(batch => batch.map(p => math.abs(p._1 - p._2)).sum / nPoints)
assert(errors.forall(x => x <= 0.1))
}
// Test training combined with prediction
test("training and prediction") {
// create model initialized with zero weights
val model = new StreamingLinearRegressionWithSGD()
.setInitialWeights(Vectors.dense(0.0, 0.0))
.setStepSize(0.2)
.setNumIterations(25)
// generate sequence of simulated data for testing
val numBatches = 10
val nPoints = 100
val testInput = (0 until numBatches).map { i =>
LinearDataGenerator.generateLinearInput(0.0, Array(10.0, 10.0), nPoints, 42 * (i + 1))
}
// train and predict
ssc = setupStreams(testInput, (inputDStream: DStream[LabeledPoint]) => {
model.trainOn(inputDStream)
model.predictOnValues(inputDStream.map(x => (x.label, x.features)))
})
val output: Seq[Seq[(Double, Double)]] = runStreams(ssc, numBatches, numBatches)
// assert that prediction error improves, ensuring that the updated model is being used
val error = output.map(batch => batch.map(p => math.abs(p._1 - p._2)).sum / nPoints).toList
assert((error.head - error.last) > 2)
}
// Test empty RDDs in a stream
test("handling empty RDDs in a stream") {
val model = new StreamingLinearRegressionWithSGD()
.setInitialWeights(Vectors.dense(0.0, 0.0))
.setStepSize(0.2)
.setNumIterations(25)
val numBatches = 10
val nPoints = 100
val emptyInput = Seq.empty[Seq[LabeledPoint]]
ssc = setupStreams(emptyInput,
(inputDStream: DStream[LabeledPoint]) => {
model.trainOn(inputDStream)
model.predictOnValues(inputDStream.map(x => (x.label, x.features)))
}
)
val output: Seq[Seq[(Double, Double)]] = runStreams(ssc, numBatches, numBatches)
}
}
|
jkbradley/spark
|
mllib/src/test/scala/org/apache/spark/mllib/regression/StreamingLinearRegressionSuite.scala
|
Scala
|
apache-2.0
| 7,502 |
package pattern
/*
+1>> This source code is licensed as GPLv3 if not stated otherwise.
>> NO responsibility taken for ANY harm, damage done
>> to you, your data, animals, etc.
>>
+2>>
>> Last modified: 2013-10-29 :: 20:37
>> Origin: patterns
>>
+3>>
>> Copyright (c) 2013:
>>
>> | | |
>> | ,---.,---|,---.|---.
>> | | || |`---.| |
>> `---'`---'`---'`---'`---'
>> // Niklas Klügel
>>
+4>>
>> Made in Bavaria by fat little elves - since 1983.
*/
//FIXME: disgusting!!!
//TODO: round to clock accuracy
class MusicalTime(protected val rep: Double) extends Ordered[MusicalTime] {
//def d256 = new MusicalTime(rep/256)
def d128 = new MusicalTime(rep/128)
def d64 = new MusicalTime(rep/64)
def d32 = new MusicalTime(rep/32)
def d16 = new MusicalTime(rep/16)
def d8 = new MusicalTime(rep/8)
def d4 = new MusicalTime(rep/4)
def d2 = new MusicalTime(rep/2)
def + (that: MusicalTime) = new MusicalTime(this.rep + that.rep)
def - (that: MusicalTime) = new MusicalTime(this.rep - that.rep)
def compare(that: MusicalTime): Int = this.rep.compareTo(that.rep)
def toDouble = rep
override def toString: String = "MusicalTime: "+rep.toString
// ??
//def toTicks
//stretch
//toTime
}
class MusicalDuration(rep: Double) extends MusicalTime(rep) {
override def d128 = new MusicalDuration(rep/128)
override def d64 = new MusicalDuration(rep/64)
override def d32 = new MusicalDuration(rep/32)
override def d16 = new MusicalDuration(rep/16)
override def d8 = new MusicalDuration(rep/8)
override def d4 = new MusicalDuration(rep/4)
override def d2 = new MusicalDuration(rep/2)
def stretch(factor: Double) = new MusicalDuration(factor*this.rep)
}
object MusicalDuration {
def apply(v: Int) = new MusicalDuration(v)
def apply(v: Double) = new MusicalDuration(v)
def apply(v: Float) = new MusicalDuration(v)
def min(l: MusicalDuration,r:MusicalDuration) : MusicalDuration= {
if (l >= r) {
r
} else {
l
}
}
// not sure whether this wont lead to double triggers?
def happens(ref: MusicalTime, pt: MusicalTime, accuracy: MusicalTime) : Boolean = {
scala.math.abs((ref-pt).toDouble) < accuracy.toDouble
}
def happensRepeatedly(ref: MusicalTime, pt: MusicalTime, accuracy: MusicalTime) : Boolean = {
scala.math.abs(ref.toDouble % pt.toDouble) < accuracy.toDouble
}
}
//musical point in time?
//object Time {
/*def every(time: Timing)(block: => ()) = {
Context.getContext()
} */
//logical time player and for clock?
//}
/*
class Timing {
}
class Timed[T](val value:T) {
def unapply() : T = value
//set get time
//sollte eigene clock haben, sodass man stretch events machen kann?
}
*/
|
lodsb/patterns
|
src/main/scala/pattern/MusicalTime.scala
|
Scala
|
gpl-3.0
| 2,835 |
package jwtyped
import io.circe.generic.semiauto._
case class Payload(sub: String, name: String, admin: Boolean)
object Payload {
import io.circe.{Decoder, Encoder}
implicit val decoder: Decoder[Payload] = deriveDecoder
implicit val encoder: Encoder[Payload] = deriveEncoder
}
|
etaty/jwtyped
|
src/test/scala/jwtyped/Payload.scala
|
Scala
|
apache-2.0
| 288 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spark.scheduler.cluster
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import akka.actor._
import akka.util.duration._
import akka.pattern.ask
import akka.util.Duration
import spark.{Utils, SparkException, Logging, TaskState}
import akka.dispatch.Await
import java.util.concurrent.atomic.AtomicInteger
import akka.remote.{RemoteClientShutdown, RemoteClientDisconnected, RemoteClientLifeCycleEvent}
/**
* A standalone scheduler backend, which waits for standalone executors to connect to it through
* Akka. These may be executed in a variety of ways, such as Mesos tasks for the coarse-grained
* Mesos mode or standalone processes for Spark's standalone deploy mode (spark.deploy.*).
*/
private[spark]
class StandaloneSchedulerBackend(scheduler: ClusterScheduler, actorSystem: ActorSystem)
extends SchedulerBackend with Logging {
// Use an atomic variable to track total number of cores in the cluster for simplicity and speed
var totalCoreCount = new AtomicInteger(0)
class DriverActor(sparkProperties: Seq[(String, String)]) extends Actor {
private val executorActor = new HashMap[String, ActorRef]
private val executorAddress = new HashMap[String, Address]
private val executorHostPort = new HashMap[String, String]
private val freeCores = new HashMap[String, Int]
private val actorToExecutorId = new HashMap[ActorRef, String]
private val addressToExecutorId = new HashMap[Address, String]
override def preStart() {
// Listen for remote client disconnection events, since they don't go through Akka's watch()
context.system.eventStream.subscribe(self, classOf[RemoteClientLifeCycleEvent])
}
def receive = {
case RegisterExecutor(executorId, hostPort, cores) =>
Utils.checkHostPort(hostPort, "Host port expected " + hostPort)
if (executorActor.contains(executorId)) {
sender ! RegisterExecutorFailed("Duplicate executor ID: " + executorId)
} else {
logInfo("Registered executor: " + sender + " with ID " + executorId)
sender ! RegisteredExecutor(sparkProperties)
context.watch(sender)
executorActor(executorId) = sender
executorHostPort(executorId) = hostPort
freeCores(executorId) = cores
executorAddress(executorId) = sender.path.address
actorToExecutorId(sender) = executorId
addressToExecutorId(sender.path.address) = executorId
totalCoreCount.addAndGet(cores)
makeOffers()
}
case StatusUpdate(executorId, taskId, state, data) =>
scheduler.statusUpdate(taskId, state, data.value)
if (TaskState.isFinished(state)) {
freeCores(executorId) += 1
makeOffers(executorId)
}
case ReviveOffers =>
makeOffers()
case StopDriver =>
sender ! true
context.stop(self)
case RemoveExecutor(executorId, reason) =>
removeExecutor(executorId, reason)
sender ! true
case Terminated(actor) =>
actorToExecutorId.get(actor).foreach(removeExecutor(_, "Akka actor terminated"))
case RemoteClientDisconnected(transport, address) =>
addressToExecutorId.get(address).foreach(removeExecutor(_, "remote Akka client disconnected"))
case RemoteClientShutdown(transport, address) =>
addressToExecutorId.get(address).foreach(removeExecutor(_, "remote Akka client shutdown"))
}
// Make fake resource offers on all executors
def makeOffers() {
launchTasks(scheduler.resourceOffers(
executorHostPort.toArray.map {case (id, hostPort) => new WorkerOffer(id, hostPort, freeCores(id))}))
}
// Make fake resource offers on just one executor
def makeOffers(executorId: String) {
launchTasks(scheduler.resourceOffers(
Seq(new WorkerOffer(executorId, executorHostPort(executorId), freeCores(executorId)))))
}
// Launch tasks returned by a set of resource offers
def launchTasks(tasks: Seq[Seq[TaskDescription]]) {
for (task <- tasks.flatten) {
freeCores(task.executorId) -= 1
executorActor(task.executorId) ! LaunchTask(task)
}
}
// Remove a disconnected slave from the cluster
def removeExecutor(executorId: String, reason: String) {
if (executorActor.contains(executorId)) {
logInfo("Executor " + executorId + " disconnected, so removing it")
val numCores = freeCores(executorId)
actorToExecutorId -= executorActor(executorId)
addressToExecutorId -= executorAddress(executorId)
executorActor -= executorId
executorHostPort -= executorId
freeCores -= executorId
executorHostPort -= executorId
totalCoreCount.addAndGet(-numCores)
scheduler.executorLost(executorId, SlaveLost(reason))
}
}
}
var driverActor: ActorRef = null
val taskIdsOnSlave = new HashMap[String, HashSet[String]]
override def start() {
val properties = new ArrayBuffer[(String, String)]
val iterator = System.getProperties.entrySet.iterator
while (iterator.hasNext) {
val entry = iterator.next
val (key, value) = (entry.getKey.toString, entry.getValue.toString)
if (key.startsWith("spark.") && !key.equals("spark.hostPort")) {
properties += ((key, value))
}
}
driverActor = actorSystem.actorOf(
Props(new DriverActor(properties)), name = StandaloneSchedulerBackend.ACTOR_NAME)
}
private val timeout = Duration.create(System.getProperty("spark.akka.askTimeout", "10").toLong, "seconds")
override def stop() {
try {
if (driverActor != null) {
val future = driverActor.ask(StopDriver)(timeout)
Await.result(future, timeout)
}
} catch {
case e: Exception =>
throw new SparkException("Error stopping standalone scheduler's driver actor", e)
}
}
override def reviveOffers() {
driverActor ! ReviveOffers
}
override def defaultParallelism() = Option(System.getProperty("spark.default.parallelism"))
.map(_.toInt).getOrElse(math.max(totalCoreCount.get(), 2))
// Called by subclasses when notified of a lost worker
def removeExecutor(executorId: String, reason: String) {
try {
val future = driverActor.ask(RemoveExecutor(executorId, reason))(timeout)
Await.result(future, timeout)
} catch {
case e: Exception =>
throw new SparkException("Error notifying standalone scheduler's driver actor", e)
}
}
}
private[spark] object StandaloneSchedulerBackend {
val ACTOR_NAME = "StandaloneScheduler"
}
|
wgpshashank/spark
|
core/src/main/scala/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
|
Scala
|
apache-2.0
| 7,423 |
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.parse.rst
import laika.io.Input
import laika.tree.Elements._
import laika.parse.rst.Directives._
import laika.parse.rst.TextRoles._
import scala.util.parsing.input.CharSequenceReader
import laika.parse.util.WhitespacePreprocessor
/** A parser for text written in reStructuredText markup. Instances of this class may be passed directly
* to the `Parse` or `Transform` APIs:
*
* {{{
* val document = Parse as ReStructuredText fromFile "hello.rst"
*
* Transform from ReStructuredText to HTML fromFile "hello.rst" toFile "hello.html"
* }}}
*
* reStructuredText has several types of extension points that are fully supported by Laika.
* In contrast to the original Python implementation, the API has been redesigned to be a more
* idiomatic, concise and type-safe Scala DSL.
*
* The following extension types are available:
*
* - Block Directives - an extension hook for adding new block level elements to
* reStructuredText markup. Use the `withBlockDirectives` method of this class to
* add directive implementations to the parser. Specification entry:
* [[http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#directives]]
*
* - Substitution Definitions - an extension hook for adding new span level elements to
* reStructuredText markup that can be used by substitution references (like `|subst|`).
* Use the `withSpanDirectives` method of this class to
* add directive implementations to the parser that can be used as substitution definitions.
* Specification entry:
* [[http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#substitution-definitions]]
*
* - Interpreted Text Roles - an extension hook for adding new dynamic span level elements to
* reStructuredText markup. In contrast to substitution definitions the implementation of a text
* role uses the text from the occurrences in the markup referring to the role as input.
* Use the `withTextRoles` method of this class to
* add custom text role implementations to the parser that can be referred to by interpreted text.
* Specification entry:
* [[http://docutils.sourceforge.net/docs/ref/rst/directives.html#custom-interpreted-text-roles]]
*
* @author Jens Halm
*/
class ReStructuredText private (
blockDirectives: List[Directive[Block]],
spanDirectives: List[Directive[Span]],
textRoles: List[TextRole]
) extends (Input => RawDocument) { self =>
/** Adds the specified directives and returns a new instance of the parser.
* These block directives may then be used anywhere in documents parsed by this instance.
*
* Example:
*
* {{{
* case class Note (title: String, content: Seq[Block]) extends Block with BlockContainer[Note]
*
* val rst = ReStructuredText withBlockDirectives (
* BlockDirective("note") {
* (argument() ~ blockContent)(Note)
* }
* )
*
* Transform from rst to HTML fromFile "hello.rst" toFile "hello.html"
* }}}
*
* For more details on implementing directives see [[laika.parse.rst.Directives]].
*/
def withBlockDirectives (directives: Directive[Block]*) = {
new ReStructuredText(blockDirectives ++ directives, spanDirectives, textRoles)
}
/** Adds the specified directives and returns a new instance of the parser.
* These span directives can then be referred to by substitution references.
*
* Example:
*
* {{{
* val rst = ReStructuredText withSpanDirectives (
* SpanDirective("replace") {
* spanContent map SpanSequence
* }
* )
*
* Transform from rst to HTML fromFile "hello.rst" toFile "hello.html"
* }}}
*
* For more details on implementing directives see [[laika.parse.rst.Directives]].
*/
def withSpanDirectives (directives: Directive[Span]*) = {
new ReStructuredText(blockDirectives, spanDirectives ++ directives, textRoles)
}
/** Adds the specified text roles and returns a new instance of the parser.
* These text roles may then be used in interpreted text spans.
*
* Example:
*
* {{{
* val rst = ReStructuredText withTextRoles (
* TextRole("link", "http://www.our-server.com/tickets/")(field("base-url")) {
* (base, text) => Link(List(Text(text)), base + text)
* }
* )
*
* Transform from rst to HTML fromFile "hello.rst" toFile "hello.html"
* }}}
*
* For more details on implementing directives see [[laika.parse.rst.TextRoles]].
*/
def withTextRoles (roles: TextRole*) = {
new ReStructuredText(blockDirectives, spanDirectives, textRoles ++ roles)
}
private lazy val parser = {
new BlockParsers with InlineParsers {
val blockDirectives = self.blockDirectives map { d => (d.name, d.part) } toMap
val spanDirectives = self.spanDirectives map { d => (d.name, d.part) } toMap
val textRoles = self.textRoles map { r => (r.name, r) } toMap
}
}
/** The actual parser function, fully parsing the specified input and
* returning a document tree.
*/
def apply (input: Input) = {
val raw = input.asParserInput.source
val preprocessed = (new WhitespacePreprocessor)(raw.toString)
parser.parseDocument(new CharSequenceReader(preprocessed))
}
}
/** The default reStructuredText parser configuration, without any directives or text roles installed.
*
* @author Jens Halm
*/
object ReStructuredText extends ReStructuredText(Nil,Nil,Nil)
|
Hocdoc/sandoc
|
src/main/scala/laika/parse/rst/ReStructuredText.scala
|
Scala
|
apache-2.0
| 6,203 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.testingUtils
import akka.actor.{ActorRef, Cancellable, Terminated}
import akka.pattern.{ask, pipe}
import org.apache.flink.api.common.JobID
import org.apache.flink.runtime.FlinkActor
import org.apache.flink.runtime.checkpoint.CheckpointOptions.CheckpointType
import org.apache.flink.runtime.checkpoint.CompletedCheckpoint
import org.apache.flink.runtime.checkpoint.savepoint.SavepointStore
import org.apache.flink.runtime.concurrent.BiFunction
import org.apache.flink.runtime.execution.ExecutionState
import org.apache.flink.runtime.jobgraph.JobStatus
import org.apache.flink.runtime.jobmanager.JobManager
import org.apache.flink.runtime.jobmanager.slots.ActorTaskManagerGateway
import org.apache.flink.runtime.messages.Acknowledge
import org.apache.flink.runtime.messages.ExecutionGraphMessages.JobStatusChanged
import org.apache.flink.runtime.messages.JobManagerMessages._
import org.apache.flink.runtime.messages.Messages.Disconnect
import org.apache.flink.runtime.messages.RegistrationMessages.RegisterTaskManager
import org.apache.flink.runtime.messages.TaskManagerMessages.Heartbeat
import org.apache.flink.runtime.testingUtils.TestingJobManagerMessages._
import org.apache.flink.runtime.testingUtils.TestingMessages._
import org.apache.flink.runtime.testingUtils.TestingTaskManagerMessages.AccumulatorsChanged
import scala.collection.mutable
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.language.postfixOps
/** This mixin can be used to decorate a JobManager with messages for testing purpose. */
trait TestingJobManagerLike extends FlinkActor {
that: JobManager =>
import context._
import scala.collection.JavaConverters._
val waitForAllVerticesToBeRunning = scala.collection.mutable.HashMap[JobID, Set[ActorRef]]()
val waitForTaskManagerToBeTerminated = scala.collection.mutable.HashMap[String, Set[ActorRef]]()
val waitForAllVerticesToBeRunningOrFinished =
scala.collection.mutable.HashMap[JobID, Set[ActorRef]]()
var periodicCheck: Option[Cancellable] = None
val waitForJobStatus = scala.collection.mutable.HashMap[JobID,
collection.mutable.HashMap[JobStatus, Set[ActorRef]]]()
val waitForAccumulatorUpdate = scala.collection.mutable.HashMap[JobID, (Boolean, Set[ActorRef])]()
val waitForLeader = scala.collection.mutable.HashSet[ActorRef]()
val waitForNumRegisteredTaskManagers = mutable.PriorityQueue.newBuilder(
new Ordering[(Int, ActorRef)] {
override def compare(x: (Int, ActorRef), y: (Int, ActorRef)): Int = y._1 - x._1
})
val waitForClient = scala.collection.mutable.HashSet[ActorRef]()
val waitForShutdown = scala.collection.mutable.HashSet[ActorRef]()
var disconnectDisabled = false
var postStopEnabled = true
abstract override def postStop(): Unit = {
if (postStopEnabled) {
super.postStop()
} else {
// only stop leader election service to revoke the leadership of this JM so that a new JM
// can be elected leader
leaderElectionService.stop()
}
}
abstract override def handleMessage: Receive = {
handleTestingMessage orElse super.handleMessage
}
def handleTestingMessage: Receive = {
case Alive => sender() ! Acknowledge.get()
case RequestExecutionGraph(jobID) =>
currentJobs.get(jobID) match {
case Some((executionGraph, jobInfo)) => sender() ! decorateMessage(
ExecutionGraphFound(
jobID,
executionGraph)
)
case None => archive.tell(decorateMessage(RequestExecutionGraph(jobID)), sender())
}
case WaitForAllVerticesToBeRunning(jobID) =>
if(checkIfAllVerticesRunning(jobID)){
sender() ! decorateMessage(AllVerticesRunning(jobID))
}else{
val waiting = waitForAllVerticesToBeRunning.getOrElse(jobID, Set[ActorRef]())
waitForAllVerticesToBeRunning += jobID -> (waiting + sender())
if(periodicCheck.isEmpty){
periodicCheck =
Some(
context.system.scheduler.schedule(
0 seconds,
200 millis,
self,
decorateMessage(NotifyListeners)
)
)
}
}
case WaitForAllVerticesToBeRunningOrFinished(jobID) =>
if(checkIfAllVerticesRunningOrFinished(jobID)){
sender() ! decorateMessage(AllVerticesRunning(jobID))
}else{
val waiting = waitForAllVerticesToBeRunningOrFinished.getOrElse(jobID, Set[ActorRef]())
waitForAllVerticesToBeRunningOrFinished += jobID -> (waiting + sender())
if(periodicCheck.isEmpty){
periodicCheck =
Some(
context.system.scheduler.schedule(
0 seconds,
200 millis,
self,
decorateMessage(NotifyListeners)
)
)
}
}
case NotifyListeners =>
for(jobID <- currentJobs.keySet){
notifyListeners(jobID)
}
if(waitForAllVerticesToBeRunning.isEmpty && waitForAllVerticesToBeRunningOrFinished.isEmpty) {
periodicCheck foreach { _.cancel() }
periodicCheck = None
}
case NotifyWhenJobRemoved(jobID) =>
val gateways = instanceManager.getAllRegisteredInstances.asScala.map(_.getTaskManagerGateway)
val responses = gateways.map{
gateway => gateway match {
case actorGateway: ActorTaskManagerGateway =>
actorGateway.getActorGateway.ask(NotifyWhenJobRemoved(jobID), timeout).mapTo[Boolean]
case _ =>
throw new IllegalStateException("The task manager gateway is not of type " +
s"${classOf[ActorTaskManagerGateway].getSimpleName}")
}
}
val jobRemovedOnJobManager = (self ? CheckIfJobRemoved(jobID))(timeout).mapTo[Boolean]
val allFutures = responses ++ Seq(jobRemovedOnJobManager)
import context.dispatcher
Future.fold(allFutures)(true)(_ & _) map(decorateMessage(_)) pipeTo sender()
case CheckIfJobRemoved(jobID) =>
if(currentJobs.contains(jobID)) {
context.system.scheduler.scheduleOnce(
200 milliseconds,
self,
decorateMessage(CheckIfJobRemoved(jobID))
)(context.dispatcher, sender())
} else {
sender() ! decorateMessage(true)
}
case NotifyWhenTaskManagerTerminated(taskManager) =>
val waiting = waitForTaskManagerToBeTerminated.getOrElse(taskManager.path.name, Set())
waitForTaskManagerToBeTerminated += taskManager.path.name -> (waiting + sender)
case msg@Terminated(taskManager) =>
super.handleMessage(msg)
waitForTaskManagerToBeTerminated.remove(taskManager.path.name) foreach {
_ foreach {
listener =>
listener ! decorateMessage(TaskManagerTerminated(taskManager))
}
}
// see shutdown method for reply
case NotifyOfComponentShutdown =>
waitForShutdown += sender()
case NotifyWhenAccumulatorChange(jobID) =>
val (updated, registered) = waitForAccumulatorUpdate.
getOrElse(jobID, (false, Set[ActorRef]()))
waitForAccumulatorUpdate += jobID -> (updated, registered + sender)
sender ! true
/**
* Notification from the task manager that changed accumulator are transferred on next
* Hearbeat. We need to keep this state to notify the listeners on next Heartbeat report.
*/
case AccumulatorsChanged(jobID: JobID) =>
waitForAccumulatorUpdate.get(jobID) match {
case Some((updated, registered)) =>
waitForAccumulatorUpdate.put(jobID, (true, registered))
case None =>
}
/**
* Disabled async processing of accumulator values and send accumulators to the listeners if
* we previously received an [[AccumulatorsChanged]] message.
*/
case msg : Heartbeat =>
super.handleMessage(msg)
waitForAccumulatorUpdate foreach {
case (jobID, (updated, actors)) if updated =>
currentJobs.get(jobID) match {
case Some((graph, jobInfo)) =>
val userAccumulators = graph.aggregateUserAccumulators
actors foreach {
actor => actor ! UpdatedAccumulators(jobID, userAccumulators)
}
case None =>
}
waitForAccumulatorUpdate.put(jobID, (false, actors))
case _ =>
}
case RequestWorkingTaskManager(jobID) =>
currentJobs.get(jobID) match {
case Some((eg, _)) =>
if(eg.getAllExecutionVertices.asScala.isEmpty){
sender ! decorateMessage(WorkingTaskManager(None))
} else {
val resource = eg.getAllExecutionVertices.asScala.head.getCurrentAssignedResource
if(resource == null){
sender ! decorateMessage(WorkingTaskManager(None))
} else {
sender ! decorateMessage(
WorkingTaskManager(
Some(
resource.getTaskManagerGateway() match {
case actorTaskManagerGateway: ActorTaskManagerGateway =>
actorTaskManagerGateway.getActorGateway
case _ => throw new IllegalStateException(
"The task manager gateway is not of type " +
s"${classOf[ActorTaskManagerGateway].getSimpleName}")
}
)
)
)
}
}
case None => sender ! decorateMessage(WorkingTaskManager(None))
}
case NotifyWhenJobStatus(jobID, state) =>
val jobStatusListener = waitForJobStatus.getOrElseUpdate(jobID,
scala.collection.mutable.HashMap[JobStatus, Set[ActorRef]]())
val listener = jobStatusListener.getOrElse(state, Set[ActorRef]())
jobStatusListener += state -> (listener + sender)
case msg@JobStatusChanged(jobID, newJobStatus, _, _) =>
super.handleMessage(msg)
val cleanup = waitForJobStatus.get(jobID) match {
case Some(stateListener) =>
stateListener.remove(newJobStatus) match {
case Some(listeners) =>
listeners foreach {
_ ! decorateMessage(JobStatusIs(jobID, newJobStatus))
}
case _ =>
}
stateListener.isEmpty
case _ => false
}
if (cleanup) {
waitForJobStatus.remove(jobID)
}
case DisableDisconnect =>
disconnectDisabled = true
case DisablePostStop =>
postStopEnabled = false
case RequestSavepoint(savepointPath) =>
try {
//TODO user class loader ?
val savepoint = SavepointStore.loadSavepoint(
savepointPath,
Thread.currentThread().getContextClassLoader)
sender ! ResponseSavepoint(savepoint)
}
catch {
case e: Exception =>
sender ! ResponseSavepoint(null)
}
case msg: Disconnect =>
if (!disconnectDisabled) {
super.handleMessage(msg)
val taskManager = sender()
waitForTaskManagerToBeTerminated.remove(taskManager.path.name) foreach {
_ foreach {
listener =>
listener ! decorateMessage(TaskManagerTerminated(taskManager))
}
}
}
case CheckpointRequest(jobId, checkpointOptions) =>
currentJobs.get(jobId) match {
case Some((graph, _)) =>
val checkpointCoordinator = graph.getCheckpointCoordinator()
if (checkpointCoordinator != null) {
// Immutable copy for the future
val senderRef = sender()
try {
// Do this async, because checkpoint coordinator operations can
// contain blocking calls to the state backend or ZooKeeper.
val cpFuture = checkpointCoordinator.triggerCheckpoint(
System.currentTimeMillis(),
checkpointOptions)
cpFuture.handleAsync[Void](
new BiFunction[CompletedCheckpoint, Throwable, Void] {
override def apply(success: CompletedCheckpoint, cause: Throwable): Void = {
if (success != null) {
if (success.getExternalPointer == null &&
CheckpointType.SAVEPOINT.equals(checkpointOptions.getCheckpointType)) {
senderRef ! CheckpointRequestFailure(
jobId, new Exception("Savepoint has not been persisted.")
)
} else {
senderRef ! CheckpointRequestSuccess(
jobId,
success.getCheckpointID,
success.getExternalPointer,
success.getTimestamp)
}
} else {
senderRef ! CheckpointRequestFailure(
jobId, new Exception("Failed to complete checkpoint", cause))
}
null
}
},
context.dispatcher)
} catch {
case e: Exception =>
senderRef ! CheckpointRequestFailure(jobId, new Exception(
"Failed to trigger checkpoint", e))
}
} else {
sender() ! CheckpointRequestFailure(jobId, new IllegalStateException(
"Checkpointing disabled. You can enable it via the execution environment of " +
"your job."))
}
case None =>
sender() ! CheckpointRequestFailure(jobId, new IllegalArgumentException("Unknown job."))
}
case NotifyWhenLeader =>
if (leaderElectionService.hasLeadership) {
sender() ! true
} else {
waitForLeader += sender()
}
case msg: GrantLeadership =>
super.handleMessage(msg)
waitForLeader.foreach(_ ! true)
waitForLeader.clear()
case NotifyWhenClientConnects =>
waitForClient += sender()
sender() ! true
case msg: RegisterJobClient =>
super.handleMessage(msg)
waitForClient.foreach(_ ! ClientConnected)
case msg: RequestClassloadingProps =>
super.handleMessage(msg)
waitForClient.foreach(_ ! ClassLoadingPropsDelivered)
case NotifyWhenAtLeastNumTaskManagerAreRegistered(numRegisteredTaskManager) =>
if (that.instanceManager.getNumberOfRegisteredTaskManagers >= numRegisteredTaskManager) {
// there are already at least numRegisteredTaskManager registered --> send Acknowledge
sender() ! Acknowledge.get()
} else {
// wait until we see at least numRegisteredTaskManager being registered at the JobManager
waitForNumRegisteredTaskManagers += ((numRegisteredTaskManager, sender()))
}
// TaskManager may be registered on these two messages
case msg @ (_: RegisterTaskManager) =>
super.handleMessage(msg)
// dequeue all senders which wait for instanceManager.getNumberOfStartedTaskManagers or
// fewer registered TaskManagers
while (waitForNumRegisteredTaskManagers.nonEmpty &&
waitForNumRegisteredTaskManagers.head._1 <=
instanceManager.getNumberOfRegisteredTaskManagers) {
val receiver = waitForNumRegisteredTaskManagers.dequeue()._2
receiver ! Acknowledge.get()
}
}
def checkIfAllVerticesRunning(jobID: JobID): Boolean = {
currentJobs.get(jobID) match {
case Some((eg, _)) =>
eg.getAllExecutionVertices.asScala.forall( _.getExecutionState == ExecutionState.RUNNING)
case None => false
}
}
def checkIfAllVerticesRunningOrFinished(jobID: JobID): Boolean = {
currentJobs.get(jobID) match {
case Some((eg, _)) =>
eg.getAllExecutionVertices.asScala.forall {
case vertex =>
(vertex.getExecutionState == ExecutionState.RUNNING
|| vertex.getExecutionState == ExecutionState.FINISHED)
}
case None => false
}
}
def notifyListeners(jobID: JobID): Unit = {
if(checkIfAllVerticesRunning(jobID)) {
waitForAllVerticesToBeRunning.remove(jobID) match {
case Some(listeners) =>
for (listener <- listeners) {
listener ! decorateMessage(AllVerticesRunning(jobID))
}
case _ =>
}
}
if(checkIfAllVerticesRunningOrFinished(jobID)) {
waitForAllVerticesToBeRunningOrFinished.remove(jobID) match {
case Some(listeners) =>
for (listener <- listeners) {
listener ! decorateMessage(AllVerticesRunning(jobID))
}
case _ =>
}
}
}
/**
* No killing of the VM for testing.
*/
override protected def shutdown(): Unit = {
log.info("Shutting down TestingJobManager.")
waitForShutdown.foreach(_ ! ComponentShutdown(self))
waitForShutdown.clear()
}
}
|
oscarceballos/flink-1.3.2
|
flink-runtime/src/test/scala/org/apache/flink/runtime/testingUtils/TestingJobManagerLike.scala
|
Scala
|
apache-2.0
| 17,793 |
package com.lyrx.text
import scala.collection.immutable.HashMap
import scala.util.matching.Regex
/**
* Created by extawe on 4/5/17.
*/
object TextTypes {
type Par = Seq[String]
type Pars = Seq[Par]
type Replacement[S] = (String,S)
type Replacements[S] = Map[String,S]
type Replacer[S] = String => S
type RegexReplacers[S] = Map[Regex,Replacer[S]]
type EitherTag[S] = Either[String,S]
type EitherTagSeq[S] = Seq[EitherTag[S]]
}
|
lyrx/lyrxgenerator
|
src/main/scala/com/lyrx/text/TextTypes.scala
|
Scala
|
gpl-3.0
| 453 |
/*
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package graph
import scala.concurrent.{ ExecutionContext, Future }
import org.objectweb.asm.Opcodes
import org.objectweb.asm.signature.SignatureVisitor
import org.apache.spark.rdd.RDD
import com.asakusafw.spark.runtime.RoundContext
import com.asakusafw.spark.runtime.graph.{ CacheOnce => CacheOnceTrait }
import com.asakusafw.spark.runtime.rdd.BranchKey
import com.asakusafw.spark.tools.asm._
import com.asakusafw.spark.tools.asm4s._
import com.asakusafw.spark.tools.asm4s.MixIn._
trait CacheStrategy extends ClassBuilder with Mixing
trait CacheOnce extends CacheStrategy {
override val mixins = Seq(
MixIn(classOf[CacheOnceTrait[_, _]].asType,
Seq(
FieldDef(Opcodes.ACC_TRANSIENT,
"value",
classOf[AnyRef].asType)),
Seq(
MethodDef("getOrCache",
classOf[AnyRef].asType,
Seq(
classOf[AnyRef].asType,
classOf[Function0[_]].asType),
new MethodSignatureBuilder()
.newParameterType(classOf[AnyRef].asType)
.newParameterType {
_.newClassType(classOf[Function0[_]].asType) {
_.newTypeArgument()
}
}
.newReturnType(classOf[AnyRef].asType)))))
}
|
asakusafw/asakusafw-spark
|
compiler/src/main/scala/com/asakusafw/spark/compiler/graph/CacheStrategy.scala
|
Scala
|
apache-2.0
| 1,893 |
trait Bippy[@specialized(
scala.Char, scala.Boolean, scala.Byte,
scala.Short, scala.Int, scala.Long,
scala.Float, scala.Double, scala.Unit,
scala.AnyRef) T] { }
trait Bippy2[@specialized(Char, Boolean, Byte, Short, Int, Long, Float, Double, Unit, AnyRef) T] { }
|
yusuke2255/dotty
|
tests/untried/pos/specialize10.scala
|
Scala
|
bsd-3-clause
| 271 |
package org.mdpeg
import org.mdpeg.ast._
import org.mdpeg.parsers.{MultilineTablesRules, PrimitiveRules}
import org.parboiled2.{ParseError, Parser, ParserInput}
import org.scalatest.{FlatSpec, Matchers}
class MultilineTablesRulesSpec extends FlatSpec with Matchers {
class MultilineTablesRulesTestSpec(val input: ParserInput) extends Parser with PrimitiveRules with MultilineTablesRules {
}
def tableMock(bodyColumns: Vector[MultilineTableColumn],
head: Option[MultilineTableRow] = Some(Vector(
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""Term 1
|Term cont""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""Description 1
|Description cont""".stripMargin)))))),
widths: Vector[Float] = Vector(25.0f, 75.0f)) = MultilineTableBlock(
widths,
Some(MultilineTableCaption(Vector(Markdown(RawMarkdownContent("This is a table caption"))), Some("table:table_lable_name"))),
head,
bodyColumns
)
it should "parse table border" in {
val term =
"""-------------------------------------------------------------------------------
|""".stripMargin
val parsed = new MultilineTablesRulesTestSpec(term).tableBorder.run()
noException should be thrownBy {
parsed.get
}
}
it should "parse table caption" in {
val term =
"""-------------------------------------------------------------
|Table: This is a table caption\\\\label{table:table_lable_name}
|
|
|""".stripMargin
val parsed = new MultilineTablesRulesTestSpec(term).tableCaption.run()
parsed.get shouldEqual Some("""This is a table caption\\\\label{table:table_lable_name}""")
}
it should "parse table empty caption" in {
val term =
"""-------------------------------------------------------------
|
|
|""".stripMargin
val parsed = new MultilineTablesRulesTestSpec(term).tableCaption.run()
parsed.get shouldEqual None
}
it should "parse table's width separator" in {
val term =
"""----------- ---------------------------------
|""".stripMargin
val term2 =
"""----------- --- --- --- --- --- --- ------- ------ ---------------------------------
|""".stripMargin
val term3 =
"""-----------
|""".stripMargin
val parsed = new MultilineTablesRulesTestSpec(term).tableHeadWidthSeparator.run()
val parsed2 = new MultilineTablesRulesTestSpec(term2).tableHeadWidthSeparator.run()
val parsed3 = new MultilineTablesRulesTestSpec(term3).tableHeadWidthSeparator.run()
noException should be thrownBy {
parsed.get
}
noException should be thrownBy {
parsed2.get
}
noException should be thrownBy {
parsed3.get
}
}
it should "parse table a one-line tall heading" in {
val term =
"""--------------------------------------------------------------------------------
|Term 1 Description line 1
|---------------- ------------------------------------------------
|""".stripMargin
val parsed = new MultilineTablesRulesTestSpec(term).tableHeadRaw.run()
noException should be thrownBy {
parsed.get
}
}
it should "parse table a multi-line tall heading with blank lines" in {
val term =
"""--------------------------------------------------------------------------------
|Term 1 Description line 1
|Term 2 Description line 2
|
|Term 3 Description line 3
|
|Term 4 Description line 4
|---------------- ------------------------------------------------
|""".stripMargin
val parsed = new MultilineTablesRulesTestSpec(term).tableHeadRaw.run()
noException should be thrownBy {
parsed.get
}
}
it should "fail parsing table with no content lines" in {
val term =
"""--------------------------------------------------------------------------------
|---------------- ------------------------------------------------
|""".stripMargin
val parsed = new MultilineTablesRulesTestSpec(term).tableHeadRaw.run()
a[ParseError] should be thrownBy {
parsed.get
}
}
it should "parse table content for table without head" in {
val term =
"""---------------- ------------------------------------------------
|.It 1 is a long established fact that 1
|.It 2 is a long established fact that 2
|.It 3 is a long established fact that 3
|
|
|CAPSED WORD 1 The point of using Lorem Ipsum is 1
|CAPSED WORD 2 The point of using Lorem Ipsum is 2
|
|Many desktop publishing packages and""".stripMargin
val parsed = new MultilineTablesRulesTestSpec(term).tableBody.run()
noException should be thrownBy {
parsed.get
}
}
it should "parse table with header and caption" in {
val term =
"""--------------------------------------------------------------------------------
|Term 1 Description 1
|
|Term cont Description cont
|---------------- ------------------------------------------------
|.It is a long established fact that
|
|CAPSED WORD The point of using Lorem Ipsum is
|
|Many desktop publishing packages and
|--------------------------------------------------------------------------------
|Table: This is a table caption\\label{table:table_lable_name}""".stripMargin
val parser = new MultilineTablesRulesTestSpec(term)
parser.multiTable.run().get shouldEqual tableMock(Vector(
Vector(
MultilineTableCell(Vector(Markdown(RawMarkdownContent(".It")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("CAPSED WORD")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("Many"))))),
Vector(
MultilineTableCell(Vector(Markdown(RawMarkdownContent("is a long established fact that")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("The point of using Lorem Ipsum is")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("desktop publishing packages and"))))))
)
}
it should "separate table rows by blank line" in {
val term =
"""--------------------------------------------------------------------------------
|Term 1 Description 1
|
|Term cont Description cont
|---------------- ------------------------------------------------
|.It is a long established fact that
|
|CAPSED WORD The point of using Lorem Ipsum is
|Many desktop publishing packages and
|--------------------------------------------------------------------------------
|Table: This is a table caption\\label{table:table_lable_name}""".stripMargin
val parser = new MultilineTablesRulesTestSpec(term)
parser.multiTable.run().get shouldEqual tableMock(Vector(
Vector(
MultilineTableCell(Vector(Markdown(RawMarkdownContent(".It")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""CAPSED WORD
|Many""".stripMargin))))),
Vector(
MultilineTableCell(Vector(Markdown(RawMarkdownContent("is a long established fact that")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""The point of using Lorem Ipsum is
|desktop publishing packages and""".stripMargin)))))))
}
it should "eliminate trailing empty line in body row" in {
val term =
"""--------------------------------------------------------------------------------
|Term 1 Description 1
|
|Term cont Description cont
|---------------- ------------------------------------------------
|.It is a long established fact that
|
|
|
|--------------------------------------------------------------------------------
|Table: This is a table caption\\label{table:table_lable_name}""".stripMargin
val parser = new MultilineTablesRulesTestSpec(term)
parser.multiTable.run().get shouldEqual tableMock(Vector(
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent(".It"))))),
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent("is a long established fact that"))))
)))
}
it should "parse 1x1 table with header" in {
val term =
"""--------------------------------------------------------------------------------
|Term 1
|
|Term cont
|----------------
|.It
|
|--------------------------------------------------------------------------------
|Table: This is a table caption\\label{table:table_lable_name}""".stripMargin
val parser = new MultilineTablesRulesTestSpec(term)
parser.multiTable.run().get shouldEqual tableMock(
Vector(Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent(".It")))))),
Some(Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""Term 1
|Term cont""".stripMargin)))))),
Vector(100.0f))
}
it should "parse a rectangular table" in {
val term =
"""--------------------------------------------------------------------------------
|Term 1 Term 2 Term 3 Term 4 Term 5
|----------- ----------- ----------- ----------- -----------
|.It is a rectangular table
|--------------------------------------------------------------------------------
|Table: This is a table caption\\label{table:table_lable_name}""".stripMargin
val parser = new MultilineTablesRulesTestSpec(term)
parser.multiTable.run().get shouldEqual MultilineTableBlock(
Vector(20.0f, 20.0f, 20.0f, 20.0f, 20.0f),
Some(MultilineTableCaption(Vector(Markdown(RawMarkdownContent("This is a table caption"))), Some("table:table_lable_name"))),
Some(
Vector(
MultilineTableCell(Vector(Markdown(RawMarkdownContent("Term 1")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("Term 2")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("Term 3")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("Term 4")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("Term 5")))))),
Vector(
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent(".It"))))),
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent("is"))))),
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent(" a"))))),
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent("rectangular"))))),
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent("table")))))))
}
it should "parse doesn't cut text that doesn't fit into width separator" in {
val term =
"""----------- ----------- ----------- ----------- -----------
|.It is longer than neccesary and it should be truncated :)
|--------------------------------------------------------------------------------
|""".stripMargin
val parser = new MultilineTablesRulesTestSpec(term)
parser.multiTable.run().get shouldEqual MultilineTableBlock(
Vector(20.0f, 20.0f, 20.0f, 20.0f, 20.0f),
None,
None,
Vector(
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent(".It is longer"))))),
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent("than neccesary"))))),
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent(" and it should"))))),
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent(" be truncated"))))),
Vector(MultilineTableCell(Vector(Markdown(RawMarkdownContent(" :)")))))))
}
it should "parse table with non equal number of lines in cells" in {
val term =
"""--------------------------------------------------------------------------------
|This header is longer than sep And this header is also longer than this separator
|----------- ---------------------------------
|**Why do we use it?**
|
|There-are It is a long established fact that a reader will be
| distracted by the readable content of a page when looking at
|
|**Where can I get some?**
|
|dummy It uses a dictionary of over
| Lorem Ipsum which looks reasonable
|
|text The generated Lorem Ipsum is
|
|printing or non-characteristic words etc
|
|**Where does it come from?**
|
|leap-into It uses a dictionary of over 200
| you need to be sure there
|
|variations-join anything embarrassing hidden
| you need to be sure there isn't
| within this period
|
|**What is Lorem Ipsum?**
|
|Lorem "There are many variations of passages.
| *randomised words which : 1597 z*
|
|anything but the majority have suffered alteration.
| *to use a passage: "" (empty string)*
|--------------------------------------------------------------------------------
|Table: This is a table caption\\label{table:table_lable_name}""".stripMargin
val parser = new MultilineTablesRulesTestSpec(term)
parser.multiTable.run().get shouldEqual MultilineTableBlock(
Vector(25.0f, 75.0f),
Some(MultilineTableCaption(Vector(Markdown(RawMarkdownContent("This is a table caption"))), Some("table:table_lable_name"))),
Some(Vector(
MultilineTableCell(Vector(Markdown(RawMarkdownContent("This header is longer than sep")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("And this header is also longer than this separator")))))),
Vector(
Vector(
MultilineTableCell(Vector(Markdown(RawMarkdownContent("**Why do we use it?**")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""There-are
|""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("**Where can I get some?**")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""dummy
|""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("text")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("printing")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("**Where does it come from?**")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""leap-into
|""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""variations-join
|
|""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("**What is Lorem Ipsum?**")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""Lorem
|""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""anything
|""".stripMargin))))),
Vector(
MultilineTableCell(Vector(Markdown(RawMarkdownContent("")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""It is a long established fact that a reader will be
|distracted by the readable content of a page when looking at""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""It uses a dictionary of over
|Lorem Ipsum which looks reasonable""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("The generated Lorem Ipsum is")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("or non-characteristic words etc")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""It uses a dictionary of over 200
|you need to be sure there""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""anything embarrassing hidden
|you need to be sure there isn't
|within this period""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent("")))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
""""There are many variations of passages.
|*randomised words which : 1597 z*""".stripMargin)))),
MultilineTableCell(Vector(Markdown(RawMarkdownContent(
"""but the majority have suffered alteration.
|*to use a passage: "" (empty string)*""".stripMargin)))))))
}
}
|
DRouh/mdpeg
|
src/test/scala/MultilineTablesRulesSpec.scala
|
Scala
|
apache-2.0
| 17,881 |
package actors
import actors.LunchbotActor.OutboundMessage
import akka.testkit.TestKitBase
import model.Statuses
import model.Statuses.Status
import org.scalatest.{Assertion, MustMatchers}
import scala.concurrent.duration._
import scala.reflect.ClassTag
trait MessageAssertions {
self: TestKitBase
with MustMatchers =>
def expectSuccess[T <: OutboundMessage: ClassTag]: Assertion = expectStatus[T](Statuses.Success)
def expectFailure[T <: OutboundMessage: ClassTag]: Assertion = expectStatus[T](Statuses.Failure)
private def expectStatus[T <: OutboundMessage: ClassTag](expected: Status): Assertion = {
expectMsgPF(5 seconds) {
case out: T => out.status must be(expected)
}
}
}
|
mturlo/lunchbot
|
src/test/scala/actors/MessageAssertions.scala
|
Scala
|
mit
| 714 |
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.bijection.avro
import com.twitter.bijection.{BaseProperties, CheckProperties, Injection}
import org.apache.avro.Schema
import org.apache.avro.generic.{GenericData, GenericRecord}
/**
* @author
* Muhammad Ashraf
* @since 7/5/13
*/
class GenericAvroCodecLaws extends CheckProperties with BaseProperties {
val testSchema = new Schema.Parser().parse("""{
"type":"record",
"name":"FiscalRecord",
"namespace":"avro",
"fields":[
{
"name":"calendarDate",
"type":"string"
},
{
"name":"fiscalWeek",
"type":[
"int",
"null"
]
},
{
"name":"fiscalYear",
"type":[
"int",
"null"
]
}
]
}""")
def buildGenericAvroRecord(i: (String, Int, Int)): GenericRecord = {
val fiscalRecord = new GenericData.Record(testSchema)
fiscalRecord.put("calendarDate", i._1)
fiscalRecord.put("fiscalWeek", i._2)
fiscalRecord.put("fiscalYear", i._3)
fiscalRecord
}
implicit val testGenericRecord = arbitraryViaFn { is: (String, Int, Int) =>
buildGenericAvroRecord(is)
}
def roundTripsGenericRecord(implicit injection: Injection[GenericRecord, Array[Byte]]) = {
isLooseInjection[GenericRecord, Array[Byte]]
}
def roundTripsGenericRecordToJson(implicit injection: Injection[GenericRecord, String]) = {
isLooseInjection[GenericRecord, String]
}
property("round trips Generic Record -> Array[Byte]") {
roundTripsGenericRecord(GenericAvroCodecs[GenericRecord](testSchema))
}
property("round trips Generic Record -> Array[Byte] using Binary Encoder/Decoder") {
roundTripsGenericRecord(GenericAvroCodecs.toBinary[GenericRecord](testSchema))
}
property("round trips Generic Record -> String using Json Encoder/Decoder") {
roundTripsGenericRecordToJson(GenericAvroCodecs.toJson[GenericRecord](testSchema))
}
}
|
twitter/bijection
|
bijection-avro/src/test/scala/com/twitter/bijection/avro/GenericAvroCodecLaws.scala
|
Scala
|
apache-2.0
| 3,625 |
package reactivemongo.api.commands.bson
import reactivemongo.api.commands.{
MongodProcess,
ServerProcess,
ServerStatus,
ServerStatusAsserts,
ServerStatusBackgroundFlushing,
ServerStatusConnections,
ServerStatusExtraInfo,
ServerStatusGlobalLock,
ServerStatusJournaling,
ServerStatusJournalingTime,
ServerStatusResult,
ServerStatusLock,
ServerStatusNetwork
}
import reactivemongo.bson.{
BSONDocument,
BSONDocumentReader,
BSONDocumentWriter,
BSONNumberLike
}
@deprecated("Internal: will be made private", "0.16.0")
object BSONServerStatusImplicits {
object BSONServerStatusWriter
extends BSONDocumentWriter[ServerStatus.type] {
val bsonCmd = BSONDocument("serverStatus" -> 1)
def write(command: ServerStatus.type) = bsonCmd
}
object BSONServerStatusAssertsReader
extends BSONDocumentReader[ServerStatusAsserts] {
def read(doc: BSONDocument): ServerStatusAsserts = (for {
regular <- doc.getAsTry[BSONNumberLike]("regular").map(_.toInt)
warning <- doc.getAsTry[BSONNumberLike]("warning").map(_.toInt)
msg <- doc.getAsTry[BSONNumberLike]("msg").map(_.toInt)
user <- doc.getAsTry[BSONNumberLike]("user").map(_.toInt)
rollovers <- doc.getAsTry[BSONNumberLike]("rollovers").map(_.toInt)
} yield ServerStatusAsserts(regular, warning, msg, user, rollovers)).get
}
private implicit def assertsReader = BSONServerStatusAssertsReader
object BSONServerStatusBackgroundFlushingReader
extends BSONDocumentReader[ServerStatusBackgroundFlushing] {
def read(doc: BSONDocument): ServerStatusBackgroundFlushing = (for {
flushes <- doc.getAsTry[BSONNumberLike]("flushes").map(_.toInt)
totalMs <- doc.getAsTry[BSONNumberLike]("total_ms").map(_.toLong)
averageMs <- doc.getAsTry[BSONNumberLike]("average_ms").map(_.toLong)
lastMs <- doc.getAsTry[BSONNumberLike]("last_ms").map(_.toLong)
lastFinished <- doc.getAsTry[BSONNumberLike](
"last_finished").map(_.toLong)
} yield ServerStatusBackgroundFlushing(
flushes, totalMs, averageMs, lastMs, lastFinished)).get
}
private implicit def bgFlushingReader =
BSONServerStatusBackgroundFlushingReader
object BSONServerStatusConnections
extends BSONDocumentReader[ServerStatusConnections] {
def read(doc: BSONDocument): ServerStatusConnections = (for {
current <- doc.getAsTry[BSONNumberLike]("current").map(_.toInt)
available <- doc.getAsTry[BSONNumberLike]("available").map(_.toInt)
totalCreated <- doc.getAsTry[BSONNumberLike]("totalCreated").map(_.toLong)
} yield ServerStatusConnections(current, available, totalCreated)).get
}
private implicit def connectionsReader = BSONServerStatusConnections
object BSONServerStatusJournalingTime
extends BSONDocumentReader[ServerStatusJournalingTime] {
def read(doc: BSONDocument): ServerStatusJournalingTime = (for {
dt <- doc.getAsTry[BSONNumberLike]("dt").map(_.toLong)
prepLogBuffer <- doc.getAsTry[BSONNumberLike](
"prepLogBuffer").map(_.toLong)
writeToJournal <- doc.getAsTry[BSONNumberLike](
"writeToJournal").map(_.toLong)
writeToDataFiles <- doc.getAsTry[BSONNumberLike](
"writeToDataFiles").map(_.toLong)
remapPrivateView <- doc.getAsTry[BSONNumberLike](
"remapPrivateView").map(_.toLong)
commits <- doc.getAsTry[BSONNumberLike]("commits").map(_.toLong)
commitsInWriteLock <- doc.getAsTry[BSONNumberLike](
"commitsInWriteLock").map(_.toLong)
} yield ServerStatusJournalingTime(dt, prepLogBuffer, writeToJournal,
writeToDataFiles, remapPrivateView, commits, commitsInWriteLock)).get
}
private implicit def timeReader = BSONServerStatusJournalingTime
object BSONServerStatusJournaling
extends BSONDocumentReader[ServerStatusJournaling] {
def read(doc: BSONDocument): ServerStatusJournaling = (for {
commits <- doc.getAsTry[BSONNumberLike]("commits").map(_.toInt)
journaledMB <- doc.getAsTry[BSONNumberLike]("journaledMB").map(_.toDouble)
writeToDataFilesMB <- doc.getAsTry[BSONNumberLike](
"writeToDataFilesMB").map(_.toDouble)
compression <- doc.getAsTry[BSONNumberLike]("compression").map(_.toDouble)
commitsInWriteLock <- doc.getAsTry[BSONNumberLike](
"commitsInWriteLock").map(_.toInt)
earlyCommits <- doc.getAsTry[BSONNumberLike]("earlyCommits").map(_.toInt)
timeMs <- doc.getAsTry[ServerStatusJournalingTime]("timeMS")
} yield ServerStatusJournaling(commits, journaledMB, writeToDataFilesMB,
compression, commitsInWriteLock, earlyCommits, timeMs)).get
}
private implicit def journalingReader = BSONServerStatusJournaling
object BSONServerStatusNetwork
extends BSONDocumentReader[ServerStatusNetwork] {
def read(doc: BSONDocument): ServerStatusNetwork = (for {
bytesIn <- doc.getAsTry[BSONNumberLike]("bytesIn").map(_.toInt)
bytesOut <- doc.getAsTry[BSONNumberLike]("bytesOut").map(_.toInt)
numRequests <- doc.getAsTry[BSONNumberLike]("numRequests").map(_.toInt)
} yield ServerStatusNetwork(bytesIn, bytesOut, numRequests)).get
}
private implicit def networkReader = BSONServerStatusNetwork
object BSONServerStatusLock
extends BSONDocumentReader[ServerStatusLock] {
def read(doc: BSONDocument): ServerStatusLock = (for {
total <- doc.getAsTry[BSONNumberLike]("total").map(_.toInt)
readers <- doc.getAsTry[BSONNumberLike]("readers").map(_.toInt)
writers <- doc.getAsTry[BSONNumberLike]("writers").map(_.toInt)
} yield ServerStatusLock(total, readers, writers)).get
}
private implicit def statusLockReader = BSONServerStatusLock
object BSONServerStatusGlobalLock
extends BSONDocumentReader[ServerStatusGlobalLock] {
def read(doc: BSONDocument): ServerStatusGlobalLock = (for {
totalTime <- doc.getAsTry[BSONNumberLike]("totalTime").map(_.toInt)
currentQueue <- doc.getAsTry[ServerStatusLock]("currentQueue")
activeClients <- doc.getAsTry[ServerStatusLock]("activeClients")
} yield ServerStatusGlobalLock(totalTime, currentQueue, activeClients)).get
}
private implicit def globalLockReader = BSONServerStatusGlobalLock
object BSONServerStatusExtraInfo
extends BSONDocumentReader[ServerStatusExtraInfo] {
def read(doc: BSONDocument): ServerStatusExtraInfo = (for {
heapUsageBytes <- doc.getAsTry[BSONNumberLike](
"heap_usage_bytes").map(_.toInt)
pageFaults <- doc.getAsTry[BSONNumberLike]("page_faults").map(_.toInt)
} yield ServerStatusExtraInfo(heapUsageBytes, pageFaults)).get
}
private implicit def extraReader = BSONServerStatusExtraInfo
object BSONServerStatusResultReader
extends DealingWithGenericCommandErrorsReader[ServerStatusResult] {
def readResult(doc: BSONDocument): ServerStatusResult = (for {
host <- doc.getAsTry[String]("host")
version <- doc.getAsTry[String]("version")
process <- doc.getAsTry[String]("process").map[ServerProcess] {
ServerProcess.unapply(_).getOrElse(MongodProcess)
}
pid <- doc.getAsTry[BSONNumberLike]("pid").map(_.toLong)
uptime <- doc.getAsTry[BSONNumberLike]("uptime").map(_.toLong)
uptimeMillis <- doc.getAsTry[BSONNumberLike]("uptimeMillis").map(_.toLong)
uptimeEstimate <- doc.getAsTry[BSONNumberLike](
"uptimeEstimate").map(_.toLong)
localTime <- doc.getAsTry[BSONNumberLike]("localTime").map(_.toLong)
advisoryHostFQDNs = doc.getAs[List[String]](
"advisoryHostFQDNs").toList.flatten
asserts <- doc.getAsTry[ServerStatusAsserts]("asserts")
backgroundFlushing = doc.getAs[ServerStatusBackgroundFlushing](
"backgroundFlushing")
connections <- doc.getAsTry[ServerStatusConnections]("connections")
dur = doc.getAs[ServerStatusJournaling]("dur")
extraInfo = doc.getAs[ServerStatusExtraInfo]("extra_info")
globalLock <- doc.getAsTry[ServerStatusGlobalLock]("globalLock")
network <- doc.getAsTry[ServerStatusNetwork]("network")
} yield ServerStatusResult(host, version, process, pid,
uptime, uptimeMillis, uptimeEstimate, localTime, advisoryHostFQDNs,
asserts, backgroundFlushing, connections, dur, extraInfo,
globalLock, network)).get
}
}
|
cchantep/ReactiveMongo
|
driver/src/main/scala/api/commands/bson/serverstatus.scala
|
Scala
|
apache-2.0
| 8,276 |
package scala.slick.examples.test
class PlainSQLTest extends RecordedDoctest {
def run = scala.slick.examples.jdbc.PlainSQL.main(null)
}
|
boldradius/slick
|
slick-testkit/src/doctest/scala/PlainSQLTest.scala
|
Scala
|
bsd-2-clause
| 140 |
package com.typesafe.slick.testkit.tests
import scala.language.higherKinds
import com.typesafe.slick.testkit.util.{RelationalTestDB, AsyncTest}
class RelationalMiscTest extends AsyncTest[RelationalTestDB] {
import tdb.profile.api._
def isNotAndOrTest = {
class T(tag: Tag) extends Table[(String, String)](tag, "users") {
def a = column[String]("a")
def b = column[String]("b")
def * = (a, b)
}
val ts = TableQuery[T]
for {
_ <- ts.schema.create
_ <- ts ++= Seq(("1", "a"), ("2", "a"), ("3", "b"))
q1 = for(t <- ts if t.a === "1" || t.a === "2") yield t
_ <- q1.result.map(r => r.toSet shouldBe Set(("1", "a"), ("2", "a")))
q2 = for(t <- ts if (t.a =!= "1") || (t.b =!= "a")) yield t
_ <- q2.result.map(r => r.toSet shouldBe Set(("2", "a"), ("3", "b")))
// No need to test that the unexpected result is actually unexpected
// now that the compiler prints a warning about it
q4 = for(t <- ts if t.a =!= "1" || t.b =!= "a") yield t
_ <- q4.result.map(r => r.toSet shouldBe Set(("2", "a"), ("3", "b")))
} yield ()
}
def testLike = {
class T1(tag: Tag) extends Table[String](tag, "t1_2") {
def a = column[String]("a")
def * = a
}
val t1s = TableQuery[T1]
for {
_ <- t1s.schema.create
_ <- t1s ++= Seq("foo", "bar", "foobar", "foo%")
q1 = for { t1 <- t1s if t1.a like "foo" } yield t1.a
_ <- q1.result.map(_ shouldBe List("foo"))
q2 = for { t1 <- t1s if t1.a like "foo%" } yield t1.a
_ <- q2.to[Set].result.map(_ shouldBe Set("foo", "foobar", "foo%"))
_ <- ifCap(rcap.likeEscape) {
val q3 = for { t1 <- t1s if t1.a.like("foo^%", '^') } yield t1.a
q3.result.map(_ shouldBe List("foo%"))
}
} yield ()
}
def testSorting = {
import slick.lifted.{Shape, Ordered}
class T1(tag: Tag) extends Table[(String, String, String)](tag, "t1_3") {
def a = column[String]("a")
def b = column[String]("b")
def c = column[String]("c")
def * = (a, b, c)
}
val t1s = TableQuery[T1]
implicit class TupledQueryExtensionMethods[E1, E2, U1, U2, C[_]](q: Query[(E1, E2), (U1, U2), C]) {
def sortedValues(implicit ordered: (E1 => Ordered),
shape: Shape[FlatShapeLevel, E2, U2, E2]): Query[E2, U2, C] =
q.sortBy(_._1).map(_._2)
}
for {
_ <- t1s.schema.create
_ <- t1s ++= Seq(("a2", "b2", "c2"), ("a1", "b1", "c1"))
q1 = (for {
t1 <- t1s
} yield t1.c -> (t1.a, t1.b)).sortedValues
_ <- q1.result.map(_ shouldBe List(("a1", "b1"), ("a2", "b2")))
} yield ()
}
def testConditional = {
class T1(tag: Tag) extends Table[(Int, Option[Int])](tag, "t1_conditional") {
def a = column[Int]("a")
def b = column[Option[Int]]("b")
def * = (a, b)
}
val t1s = TableQuery[T1]
for {
_ <- t1s.schema.create
_ <- t1s ++= Seq((1, Some(11)), (2, None), (3, Some(33)), (4, None))
q1 = t1s.map { t1 => (t1.a, Case.If(t1.a < 3) Then 1 Else 0) }
_ <- q1.to[Set].result.map(_ shouldBe Set((1, 1), (2, 1), (3, 0), (4, 0)))
q2 = t1s.map { t1 => (t1.a, Case.If(t1.a < 3) Then 1) }
_ <- q2.to[Set].result.map(_ shouldBe Set((1, Some(1)), (2, Some(1)), (3, None), (4, None)))
q3 = t1s.map { t1 => (t1.a, Case.If(t1.a < 3) Then 1 If(t1.a < 4) Then 2 Else 0) }
_ <- q3.to[Set].result.map(_ shouldBe Set((1, 1), (2, 1), (3, 2), (4, 0)))
q4 = t1s.map { t1 => Case.If(t1.a < 3) Then t1.b Else t1.a.? }.to[Set]
_ <- mark("q4", q4.result).map(_ shouldBe Set(Some(11), None, Some(3), Some(4)))
} yield ()
}
def testCast = {
class T1(tag: Tag) extends Table[(String, Int, Double)](tag, "t1_4") {
def a = column[String]("a")
def b = column[Int]("b")
def c = column[Double]("c")
def * = (a, b, c)
}
val t1s = TableQuery[T1]
for {
_ <- t1s.schema.create
_ <- t1s ++= Seq(("foo", 1, 2.0), ("bar", 2, 2.0))
q1 = t1s.map(t1 => t1.a ++ t1.b.asColumnOf[String])
_ <- q1.to[Set].result.map(_ shouldBe Set("foo1", "bar2"))
q2 = t1s.map(t1 => t1.c * t1.b.asColumnOf[Double])
_ <- q2.to[Set].result.map(_ shouldBe Set(2.0, 4.0))
} yield ()
}
def testOptionConversions = {
class T1(tag: Tag) extends Table[(Int, Option[Int])](tag, "t1_optconv") {
def a = column[Int]("a")
def b = column[Option[Int]]("b")
def * = (a, b)
}
val t1s = TableQuery[T1]
for {
_ <- t1s.schema.create
_ <- t1s ++= Seq((1, Some(10)), (2, None))
// GetOrElse in ResultSetMapping on client side
q1 = for { t <- t1s } yield (t.a, t.b.getOrElse(0))
_ <- q1.result.map(r => r.toSet shouldBe Set((1, 10), (2, 0)))
// GetOrElse in query on the DB side
q2 = for { t <- t1s } yield (t.a, t.b.getOrElse(0) + 1)
_ <- q2.result.map(r => r.toSet shouldBe Set((1, 11), (2, 1)))
} yield ()
}
def testInitErrors = {
case class Id(toInt: Int)
case class Customer(id: Id)
// Before making `shaped` and `toNode` in `TableQuery` lazy,
// putting `Tables` before `A` caused a StackOverflowException
object Tables {
val as = TableQuery[A]
implicit val idMapper = MappedColumnType.base[Id, Int](_.toInt, Id)
}
class A(tag: Tag) extends Table[Customer](tag, "INIT_A") {
def id = column[Id]("ID", O.PrimaryKey, O.AutoInc)(Tables.idMapper)
import Tables.idMapper
def * = id.mapTo[Customer]
}
Tables.as.schema
case class Id2(toInt: Int)
implicit val id2Mapper = null.asInstanceOf[BaseColumnType[Id2]]
class B(tag: Tag) extends Table[Id2](tag, "INIT_A") {
def id = column[Id2]("ID", O.PrimaryKey, O.AutoInc)
def * = id
}
val bs = TableQuery[B]
try {
bs.map(_.id)
bs.schema
???
} catch {
case t: NullPointerException if (t.getMessage ne null) && (t.getMessage contains "initialization order") =>
// This is the expected error message from RelationalTableComponent.Table.column
}
try {
MappedColumnType.base[Id, Int](_.toInt, Id)(implicitly, null.asInstanceOf[BaseColumnType[Int]])
???
} catch {
case t: NullPointerException if (t.getMessage ne null) && (t.getMessage contains "initialization order") =>
// This is the expected error message from RelationalTypesComponent.MappedColumnTypeFactory.assertNonNullType
}
DBIO.successful(())
}
}
|
kwark/slick
|
slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/RelationalMiscTest.scala
|
Scala
|
bsd-2-clause
| 6,537 |
package scala
package reflect
package internal
package tpe
private[internal] trait CommonOwners {
self: SymbolTable =>
/** The most deeply nested owner that contains all the symbols
* of thistype or prefixless typerefs/singletype occurrences in given type.
*/
protected[internal] def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
/** The most deeply nested owner that contains all the symbols
* of thistype or prefixless typerefs/singletype occurrences in given list
* of types.
*/
protected[internal] def commonOwner(tps: List[Type]): Symbol = {
if (tps.isEmpty) NoSymbol
else {
commonOwnerMap.clear()
tps foreach (commonOwnerMap traverse _)
if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
}
}
protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
var result: Symbol = _
def clear() { result = null }
private def register(sym: Symbol) {
// First considered type is the trivial result.
if ((result eq null) || (sym eq NoSymbol))
result = sym
else
while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
result = result.owner
}
def traverse(tp: Type) = tp.normalize match {
case ThisType(sym) => register(sym)
case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
case SingleType(NoPrefix, sym) => register(sym.owner)
case _ => mapOver(tp)
}
}
private lazy val commonOwnerMapObj = new CommonOwnerMap
}
|
felixmulder/scala
|
src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
|
Scala
|
bsd-3-clause
| 1,680 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sumologic.elasticsearch.restlastic
import akka.util.Timeout
import com.sumologic.elasticsearch.restlastic.dsl.Dsl._
import org.junit.runner.RunWith
import scala.concurrent.duration._
import org.scalatest._
import org.scalatestplus.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class RestlasticSearchClient2Test extends WordSpec with Matchers with BeforeAndAfterAll
with ElasticsearchIntegrationTest with OneInstancePerTest with RestlasticSearchClientTest {
override val restClient = RestlasticSearchClient2Test.restClient
val indices = createIndices(3)
"RestlasticSearchClient2" should {
behave like restlasticClient(
restClient,
IndexName,
indices,
StringType,
BasicFieldMapping(StringType, None, Some(Name("not_analyzed")), ignoreAbove = None, None))
}
}
object RestlasticSearchClient2Test {
val restClient = {
val endpointProvider = new EndpointProvider {
override def endpoint: Endpoint = Endpoint("127.0.0.1", 9200)
override def ready: Boolean = true
}
new RestlasticSearchClient2(endpointProvider)(timeout = Timeout(30 seconds))
}
}
|
SumoLogic/elasticsearch-client
|
elasticsearch-core/src/test/scala/com/sumologic/elasticsearch/restlastic/RestlasticSearchClient2Test.scala
|
Scala
|
apache-2.0
| 1,941 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst
object CurrentUserContext {
val CURRENT_USER: InheritableThreadLocal[String] = new InheritableThreadLocal[String] {
override protected def initialValue(): String = null
}
}
|
mahak/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CurrentUserContext.scala
|
Scala
|
apache-2.0
| 1,022 |
package provingground.interface
import provingground.{Context, _}
import translation._
import Translator.unmatched
import upickle.default._
import cats._
import cats.implicits._
import provingground.induction.{ExstInducDefn, ExstInducStrucs}
import provingground.scalahott.NatRing
import scala.util.matching.Regex
trait JsFunc[F[_]] {
def encode(t: F[ujson.Value]): ujson.Value
def decode(js: ujson.Value): F[ujson.Value]
}
object JsFunc {
implicit val idJS: JsFunc[Id] = new JsFunc[Id] {
def encode(t: ujson.Value): ujson.Value = t
def decode(js: ujson.Value): ujson.Value = js
}
import Functors._
implicit val intJs: JsFunc[N] = new JsFunc[N] {
def encode(t: Int) = ujson.Num(t.toDouble)
def decode(js: ujson.Value): Int = js.num.toInt
}
implicit val strJs: JsFunc[S] = new JsFunc[S] {
def encode(t: String) = ujson.Str(t)
def decode(js: ujson.Value): String = js.str
}
implicit val unitJs: JsFunc[Un] = new JsFunc[Un] {
def encode(t: Unit): ujson.Null.type = ujson.Null
def decode(js: ujson.Value): Unit = ()
}
implicit val vecJs: JsFunc[Vector] = new JsFunc[Vector] {
def encode(t: Vector[ujson.Value]) = ujson.Arr(t: _*)
def decode(js: ujson.Value): Vector[ujson.Value] = js.arr.toVector
}
implicit def pairJS[X[_], Y[_]](
implicit xJs: JsFunc[X],
yJs: JsFunc[Y]
): JsFunc[({ type Z[A] = (X[A], Y[A]) })#Z] =
new JsFunc[({ type Z[A] = (X[A], Y[A]) })#Z] {
def encode(t: (X[ujson.Value], Y[ujson.Value])) =
ujson.Obj("first" -> xJs.encode(t._1), "second" -> yJs.encode(t._2))
def decode(js: ujson.Value): (X[ujson.Value], Y[ujson.Value]) =
(xJs.decode(js("first")), yJs.decode(js("second")))
}
import Translator._
def toJs[I, F[_]](pat: Pattern[I, F])(name: String, header: String = "intro")(
implicit jsF: JsFunc[F]
): Translator[I, ujson.Value] =
pat >>> { (js) =>
ujson.Obj(header -> ujson.Str(name), "tree" -> jsF.encode(js))
}
def jsToOpt[I, F[_]: Traverse](name: String, header: String = "intro")(
build: F[I] => Option[I]
)(implicit jsF: JsFunc[F]): Translator[ujson.Value, I] = {
val pat = Pattern[ujson.Value, F] { (js) =>
if (js(header) == ujson.Str(name)) Some(jsF.decode(js("tree"))) else None
}
pat >> build
}
def jsToBuild[I, F[_]: Traverse](name: String, header: String = "intro")(
build: F[I] => I
)(implicit jsF: JsFunc[F]): Translator[ujson.Value, I] = {
val pat = Pattern[ujson.Value, F] { (js) =>
if (js(header) == ujson.Str(name)) Some(jsF.decode(js("tree"))) else None
}
pat >>> build
}
}
import TermPatterns._
import HoTT._
import Functors._
object TermJson {
import JsFunc._
implicit val travNamed: Traverse[Named] = traversePair[S, Id]
val termToJson: Translator.OrElse[Term, ujson.Value] =
toJs(universe)("universe") ||
toJs(formalAppln)("appln") ||
toJs(lambdaTriple)("lambda") ||
toJs(sigmaTriple)("sigma") ||
toJs(piTriple)("pi") ||
toJs(prodTyp)("product-type") ||
toJs(absPair)("pair") ||
toJs(plusTyp)("plus-type") ||
toJs(funcTyp)("func-type") ||
toJs(star)("star") ||
toJs(unit)("unit-type") ||
toJs(zero)("zero-type") ||
toJs(prop)("prop-universe") ||
toJs(introIndInducFunc)("intro-indexed-induc-func") ||
toJs(introIndRecFunc)("intro-indexed-rec-func") ||
toJs(introInducFunc)("intro-induc-func") ||
toJs(introRecFunc)("intro-rec-func") ||
toJs(indInducFunc)("indexed-inductive-function") ||
toJs(indRecFunc)("indexed-recursive-function") ||
toJs(recFunc)("recursive-function") ||
toJs(inducFunc)("inductive-function") ||
toJs(hashSymbolic)("symbolic") ||
toJs(mereWitness)("witness") ||
toJs(firstIncl)("first-inclusion") ||
toJs(secondIncl)("second-inclusion") ||
toJs(identityTyp)("equality") ||
toJs(refl)("reflexivity") ||
toJs(natTyp)("nat-type") ||
toJs(natUniv)("nat-univ") ||
toJs(natZero)("nat-zero") ||
toJs(natSucc)("nat-succ") ||
toJs(natSum)("nat-sum") ||
toJs(natProd)("nat-prod") ||
toJs(natLiteral)("nat-literal") ||
toJs(natAddMorph)("nat-additive-morphism") ||
toJs(foldedTerm)("folded-term") ||
toJs(miscAppln)("appln")
def termToJsonGet(t: Term) =
termToJson(t).getOrElse(throw new Exception(s"cannot serialize term $t"))
def fdJson(fd: FiniteDistribution[Term]): ujson.Arr = {
val pmf = for {
Weighted(elem, p) <- fd.pmf
tjs <- termToJson(elem)
} yield ujson.Obj("term" -> tjs, "weight" -> ujson.Num(p))
ujson.Arr(pmf: _*)
}
import induction._
import TermLang.applyAll
val exstInduc = ExstInducStrucs.Base || NatRing
def jsonToTerm(
inds: Typ[Term] => Option[ConstructorSeqTL[_, Term, _]] = (_) => None,
indexedInds: Term => Option[IndexedConstructorSeqDom[_, Term, _, _, _]] =
(_) => None
): Translator.OrElse[ujson.Value, Term] =
jsonToTermBase ||
jsToOpt[Term, VIVIIV]("intro-indexed-rec-func") {
case (intros, (u, (w, (x, (y, v))))) =>
(buildIndRecDef()(w, (x, (y, v))))
.orElse(applyAll(ExstInducStrucs.getIndexed(x, w).recOpt(x, toTyp(y)), v))
} ||
jsToOpt[Term, VIVIIV]("intro-indexed-induc-func") {
case (intros, (u, (w, (x, (y, v))))) =>
(buildIndIndDef()(w, (x, (y, v))))
.orElse(applyAll(ExstInducStrucs.getIndexed(x, w).inducOpt(x, y), v))
} ||
jsToOpt[Term, VIIV]("intro-rec-func") {
case (w, (x, (y, v))) =>
(buildRecDef()(x, (y, v)))
.orElse(applyAll(exstInduc.recOpt(x, toTyp(y)), v))
.orElse(applyAll(ExstInducStrucs.get(x, w).recOpt(x, toTyp(y)), v))
} ||
jsToOpt[Term, VIIV]("intro-induc-func") {
case (w, (x, (y, v))) =>
(buildIndDef()(x, (y, v)))
.orElse {
val func = exstInduc.inducOpt(x, y)
applyAll(func, v)
}
.orElse {
val func = ExstInducStrucs.get(x, w).inducOpt(x, y)
applyAll(func, v)
}
} ||
jsToOpt[Term, IIV]("recursive-function") {
case (x, (y, v)) =>
buildRecDef(inds)(x, (y, v))
} ||
jsToOpt[Term, IIV]("inductive-function") {
case (x, (y, v)) => buildIndDef(inds)(x, (y, v))
} ||
jsToOpt[Term, IVIIV]("indexed-recursive-function") {
case (u, (w, (x, (y, v)))) =>
buildIndRecDef(indexedInds)(w, (x, (y, v)))
} ||
jsToOpt[Term, IVIIV]("indexed-inductive-function") {
case (u, (w, (x, (y, v)))) =>
buildIndIndDef(indexedInds)(w, (x, (y, v)))
}
implicit val termRW : ReadWriter[Term] = readwriter[ujson.Value].bimap(
term => termToJsonGet(term),
js => jsonToTerm()(js).get
)
implicit val fdTermRW : ReadWriter[FiniteDistribution[Term]] = readwriter[Vector[(Term, Double)]].bimap(
fd => fd.pmf.map{case Weighted(elem, x) => (elem, x)},
v => FiniteDistribution(v.map{case (t, p) => Weighted(t, p)})
)
implicit val typRW : ReadWriter[Typ[Term]] = termRW.bimap(
t => t,
t => toTyp(t)
)
implicit val fdTypRW : ReadWriter[FiniteDistribution[Typ[Term]]] = fdTermRW.bimap(
fd => fd.map(t => t),
fd => fd.map(toTyp(_))
)
def jsToTermExst(
exst: ExstInducStrucs
): Translator.OrElse[ujson.Value, Term] =
jsonToTermBase ||
jsToOpt[Term, IIV]("recursive-function") {
case (dom: Term, (cod: Term, data: Vector[Term])) =>
for {
codom <- typOpt(cod)
fn <- exst.recOpt(dom, codom)
} yield data.foldLeft(fn)(fold(_)(_))
//buildRecDef(inds)(x, (y, v))
} ||
jsToOpt[Term, IIV]("inductive-function") {
case (dom: Term, (cod: Term, data: Vector[Term])) =>
for {
fn <- exst.inducOpt(dom, cod)
} yield data.foldLeft(fn)(fold(_)(_))
} ||
jsToOpt[Term, IVIIV]("indexed-recursive-function") {
case (
_,
(index: Vector[Term], (dom: Term, (cod: Term, data: Vector[Term])))
) =>
for {
codom <- typOpt(cod)
fn <- exst.recOpt(dom, codom)
} yield
(data ++ index).foldLeft(fn)(fold(_)(_)) //buildIndRecDef(indexedInds)(w, (x, (y, v)))
} ||
jsToOpt[Term, IVIIV]("indexed-inductive-function") {
case (
_,
(index: Vector[Term], (dom: Term, (cod: Term, data: Vector[Term])))
) =>
for {
fn <- exst.inducOpt(dom, cod)
} yield (data ++ index).foldLeft(fn)(fold(_)(_))
//buildIndIndDef(indexedInds)(w, (x, (y, v)))
}
def jsToFD(
exst: ExstInducStrucs
)(js: ujson.Value): FiniteDistribution[Term] = {
val pmf =
js.arr.toVector.map { wp =>
Weighted(
jsToTermExst(exst)(wp.obj("term")).get,
wp.obj("weight").num
)
}
FiniteDistribution(pmf)
}
val jsonToTermBase: Translator.OrElse[ujson.Value, Term] =
jsToBuild[Term, N]("universe")((n) => Universe(n)) ||
jsToBuild[Term, II]("appln") { case (func, arg) => fold(func)(arg) } ||
jsToBuild[Term, III]("lambda") {
case ((variable, typ), value) => variable :~> value
} ||
jsToBuild[Term, III]("equality") {
case ((dom, lhs), rhs) => lhs =:= rhs
} ||
jsToBuild[Term, III]("pi") {
case ((variable, typ), value: Typ[u]) => variable ~>: value
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, III]("sigma") {
case ((variable, typ), value: Typ[u]) => sigma(variable)(value)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("product-type") {
case (x: Typ[u], y: Typ[v]) => ProdTyp(x, y)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("plus-type") {
case (x: Typ[u], y: Typ[v]) => PlusTyp(x, y)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("pair") { case (x, y) => mkPair(x, y) } ||
jsToBuild[Term, II]("func-type") {
case (x: Typ[u], y: Typ[v]) => FuncTyp(x, y)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("reflexivity") {
case (dom: Typ[u], value: Term) => Refl(dom, value)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, IV]("folded-term") {
case (op, v) =>
v.reduce[Term] {
case (a: Term, b: Term) => applyFunc(applyFunc(op, a), b)
}
} ||
jsToBuild[Term, Un]("star") { (_) =>
Star
} ||
jsToBuild[Term, Un]("unit-type") { (_) =>
Unit
} ||
jsToBuild[Term, Un]("zero-type") { (_) =>
Zero
} ||
jsToBuild[Term, Un]("prop-universe") { (_) =>
Prop
} ||
jsToBuild[Term, Un]("nat-type") { (_) =>
NatRing.NatTyp
} ||
jsToBuild[Term, Un]("nat-univ") { (_) =>
NatRing.NatTyp.typ
} ||
jsToBuild[Term, Un]("nat-zero") { (_) =>
NatRing.zero
} ||
jsToBuild[Term, Un]("nat-succ") { (_) =>
NatRing.succ
} ||
jsToBuild[Term, Un]("nat-sum") { (_) =>
NatRing.sum
} ||
jsToBuild[Term, Un]("nat-prod") { (_) =>
NatRing.prod
} ||
jsToBuild[Term, N]("nat-literal") { (n) =>
NatRing.Literal(n)
} ||
jsToBuild[Term, II]("nat-additive-morphism") {
case (base, op) =>
NatRing.AdditiveMorphism(
base.asInstanceOf[Func[NatRing.Nat, NatRing.Nat]],
op.asInstanceOf[(NatRing.Nat, NatRing.Nat) => NatRing.Nat]
)
} ||
jsToBuild[Term, II]("first-inclusion") {
case (tp: PlusTyp[u, v], x) => tp.incl1(x.asInstanceOf[u])
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("second-inclusion") {
case (tp: PlusTyp[u, v], x) => tp.incl2(x.asInstanceOf[v])
case (x, y) => unmatched(x, y)
} ||
jsToOpt[Term, IIV]("recursive-function") {
case (a, (b, v)) =>
// println(s"building base recursive type $a codomain $b data $v")
val fn = buildRecDef()
fn(a, (b, v))
} ||
jsToOpt[Term, IIV]("inductive-function") {
case (a, (b, v)) =>
val fn = buildIndDef()
fn(a, (b, v))
} ||
jsToOpt[Term, IVIIV]("indexed-recursive-function") {
case (u, (w, (a, (b, v)))) =>
// println(s"building indexed recursive:\\n index $w,\\n type $a,\\n codomain $b,\\n data $v\\n\\n")
val fn = buildIndRecDef()
val res = fn(w, (a, (b, v)))
println(s"result: $res")
res
} ||
jsToOpt[Term, IVIIV]("indexed-inductive-function") {
case (u, (w, (a, (b, v)))) =>
// println(s"building indexed inductive:\\n index $w,\\n type $a,\\n codomain $b,\\n data $v\\n\\n")
val fn = buildIndIndDef()
val res = fn(w, (a, (b, v)))
// println(s"result: $res")
res
} ||
jsToBuild[Term, Named]("symbolic") {
case (name, tp: Typ[u]) => deHash(name) :: tp
case (x, y) => unmatched(x, y)
}(travNamed, implicitly[JsFunc[Named]]) ||
jsToBuild[Term, II]("Witness") {
case (tp, value) => toTyp(tp).symbObj(MereWitness(value))
}
val hashReg: Regex = "_[0-9][0-9]+".r
def deHash(s: String): String = hashReg.replaceAllIn(s, "")
}
object InducJson {
import TermJson._, ExstInducStrucs._
def toJson(exst: ExstInducStrucs): ujson.Value = exst match {
case Base => ujson.Obj("intro" -> "base")
case NatRing => ujson.Obj("intro" -> "nat-ring")
case OrElse(first, second) =>
ujson.Obj(
"intro" -> "or-else",
"first" -> toJson(first),
"second" -> toJson(second)
)
case LambdaInduc(x, struc) =>
ujson.Obj(
"intro" -> "lambda",
"variable" -> termToJsonGet(x),
"structure" -> toJson(struc)
)
case ConsSeqExst(cs, intros) =>
ujson.Obj(
"intro" -> "constructor-sequence",
"type" -> termToJsonGet(cs.typ),
"intros" -> ujson.Arr(intros.map { (t) =>
termToJsonGet(t)
}: _*)
)
case ind @ IndConsSeqExst(cs, intros) =>
ujson.Obj(
"intro" -> "indexed-constructor-sequence",
"type" -> termToJsonGet(ind.fmly),
"intros" -> ujson.Arr(intros.map { (t) =>
termToJsonGet(t)
}: _*)
)
}
def fdJson(fd: FiniteDistribution[ExstInducDefn]): ujson.Arr = {
val pmf = for {
Weighted(elem, p) <- fd.pmf
} yield
ujson.Obj(
"type-family" -> termToJsonGet(elem.typFamily),
"introduction-rules" -> ujson.Arr(
(elem.intros.map((t) => termToJsonGet(t))): _*
),
"structure" -> toJson(elem.ind),
"weight" -> ujson.Num(p)
)
ujson.Arr(pmf: _*)
}
def fromJson(init: ExstInducStrucs)(js: ujson.Value): ExstInducStrucs =
js.obj("intro").str match {
case "base" => Base
case "nat-ring" => NatRing
case "or-else" =>
OrElse(
fromJson(init)(js.obj("first")),
fromJson(init)(js.obj("second"))
)
case "lambda" =>
val x = jsToTermExst(init)(js.obj("variable")).get
val struc = fromJson(init)(js.obj("structure"))
LambdaInduc(x, struc)
case "constructor-sequence" =>
val typ = jsToTermExst(init)(js.obj("type")).flatMap(typOpt).get
val intros =
js.obj("intros").arr.map((t) => jsToTermExst(init)(t).get).toVector
get(typ, intros)
case "indexed-constructor-sequence" =>
val typF = jsToTermExst(init)(js.obj("type")).get
val intros =
js.obj("intros").arr.map((t) => jsToTermExst(init)(t).get).toVector
getIndexed(typF, intros)
}
implicit val rwInducStruct: ReadWriter[ExstInducStrucs] = readwriter[ujson.Value].bimap(
ind => toJson(ind),
js => fromJson(ExstInducStrucs.Base)(js)
)
import TermJson._
implicit val rwInducDefn: ReadWriter[ExstInducDefn] = macroRW
def jsToFD(
exst: ExstInducStrucs
)(js: ujson.Value): FiniteDistribution[ExstInducDefn] = {
val pmf =
js.arr.toVector.map { wp =>
val ind = fromJson(exst)(wp.obj("structure"))
val typFamily = jsToTermExst(exst)(wp.obj("type-family")).get
val intros = wp
.obj("introduction-rules")
.arr
.toVector
.map((t) => jsToTermExst(exst)(t).get)
val parameters = wp
.obj("parameters")
.arr
.toVector
.map((t) => jsToTermExst(exst)(t).get)
Weighted(
ExstInducDefn(typFamily, intros, ind),
wp.obj("weight").num
)
}
FiniteDistribution(pmf)
}
}
object ContextJson {
import Context._, TermJson._
def toJson(ctx: Context): ujson.Value = ctx match {
case Empty => ujson.Obj("intro" -> "empty")
case ac: AppendConstant[u] =>
ujson.Obj(
"intro" -> "append-constant",
"init" -> toJson(ac.init),
"constant" -> termToJsonGet(ac.constant)
)
case at: AppendTerm[u] =>
val rl = at.role match {
case Context.Assert => ujson.Str("assert")
case Context.Consider => ujson.Str("consider")
}
ujson.Obj(
"intro" -> "append-term",
"init" -> toJson(at.init),
"term" -> termToJsonGet(at.term),
"role" -> rl
)
case av: AppendVariable[u] =>
ujson.Obj(
"intro" -> "append-variable",
"init" -> toJson(av.init),
"expression" -> termToJsonGet(av.variable)
)
case AppendDefn(init, defn, global) =>
ujson.Obj(
"intro" -> "append-definition",
"name" -> termToJsonGet(defn.name),
"init" -> toJson(init),
"value" -> termToJsonGet(defn.valueTerm),
"global" -> ujson.Bool(global)
)
case AppendIndDef(init, defn) =>
ujson.Obj(
"intro" -> "append-inductive-definition",
"defn" -> InducJson.toJson(defn),
"init" -> toJson(init)
)
}
def fromJson(js: ujson.Value): Context =
js.obj("intro").str match {
case "empty" => Empty
case "append-term" =>
val init = fromJson(js.obj("init"))
val term = jsToTermExst(init.inducStruct)(js.obj("term")).get
val role = js.obj("role").str match {
case "assert" => Assert
case "consider" => Consider
}
AppendTerm(init, term, role)
case "append-constant" =>
val init = fromJson(js.obj("init"))
val term = jsToTermExst(init.inducStruct)(js.obj("constant")).get
AppendConstant(init, term)
case "append-variable" =>
val init = fromJson(js.obj("init"))
val term = jsToTermExst(init.inducStruct)(js.obj("variable")).get
AppendVariable(init, term)
case "append-definition" =>
val init = fromJson(js.obj("init"))
val name = jsToTermExst(init.inducStruct)(js.obj("name")).get
val value = jsToTermExst(init.inducStruct)(js.obj("value")).get
AppendDefn(init, Defn(name, value), js.obj("global").bool)
case "append-inductive-definition" =>
val init = fromJson(js.obj("init"))
val defn = InducJson.fromJson(init.inducStruct)(js.obj("defn"))
AppendIndDef(init, defn)
}
implicit val rwContext: ReadWriter[Context] =
readwriter[ujson.Value].bimap(toJson, fromJson(_))
}
object ConciseTermJson {
import JsFunc._
implicit val travNamed: Traverse[Named] = traversePair[S, Id]
val termToJson: Translator.OrElse[Term, ujson.Value] =
toJs(universe)("U") ||
toJs(formalAppln)("Ap") ||
toJs(lambdaTriple)("\\u03bb") ||
toJs(sigmaTriple)("\\u03c3") ||
toJs(piTriple)("\\u03c0") ||
toJs(prodTyp)("\\u03a0") ||
toJs(absPair)("pair") ||
toJs(plusTyp)("\\u03a3") ||
toJs(funcTyp)(UnicodeSyms.Arrow) ||
toJs(star)("*") ||
toJs(unit)("One") ||
toJs(zero)("Void") ||
toJs(prop)("Prop") ||
toJs(indInducFunc)("IInd") ||
toJs(indRecFunc)("IRec") ||
toJs(recFunc)("Rec") ||
toJs(inducFunc)("Ind") ||
toJs(hashSymbolic)("Sym") ||
toJs(mereWitness)("Witness") ||
toJs(firstIncl)("i1") ||
toJs(secondIncl)("i2") ||
toJs(identityTyp)("=") ||
toJs(refl)("=") ||
toJs(natTyp)("Nat") ||
toJs(natUniv)("NatU") ||
toJs(natZero)("0N") ||
toJs(natSucc)("succN") ||
toJs(natSum)("+N") ||
toJs(natProd)("*N") ||
toJs(natLiteral)("NL") ||
toJs(natAddMorph)("nat-additive-morphism") ||
toJs(foldedTerm)("folded-term") ||
toJs(miscAppln)("Ap")
def termToJsonGet(t: Term): ujson.Value =
termToJson(t).getOrElse(throw new Exception(s"cannot serialize term $t"))
def fdJson(fd: FiniteDistribution[Term]): ujson.Arr = {
val pmf = for {
Weighted(elem, p) <- fd.pmf
tjs <- termToJson(elem)
} yield ujson.Obj("term" -> tjs, "weight" -> ujson.Num(p))
ujson.Arr(pmf: _*)
}
import induction._
def jsonToTerm(
inds: Typ[Term] => Option[ConstructorSeqTL[_, Term, _]] = (_) => None,
indexedInds: Term => Option[IndexedConstructorSeqDom[_, Term, _, _, _]] =
(_) => None
): Translator.OrElse[ujson.Value, Term] =
jsonToTermBase ||
jsToOpt[Term, IIV]("recursive-function") {
case (x, (y, v)) =>
buildRecDef(inds)(x, (y, v))
} ||
jsToOpt[Term, IIV]("inductive-function") {
case (x, (y, v)) => buildIndDef(inds)(x, (y, v))
} ||
jsToOpt[Term, IVIIV]("indexed-recursive-function") {
case (u, (w, (x, (y, v)))) =>
buildIndRecDef(indexedInds)(w, (x, (y, v)))
} ||
jsToOpt[Term, IVIIV]("indexed-inductive-function") {
case (u, (w, (x, (y, v)))) =>
buildIndIndDef(indexedInds)(w, (x, (y, v)))
}
def jsToTermExst(
exst: ExstInducStrucs
): Translator.OrElse[ujson.Value, Term] =
jsonToTermBase ||
jsToOpt[Term, IIV]("recursive-function") {
case (dom: Term, (cod: Term, data: Vector[Term])) =>
for {
codom <- typOpt(cod)
fn <- exst.recOpt(dom, codom)
} yield data.foldLeft(fn)(fold(_)(_))
//buildRecDef(inds)(x, (y, v))
} ||
jsToOpt[Term, IIV]("inductive-function") {
case (dom: Term, (cod: Term, data: Vector[Term])) =>
for {
fn <- exst.inducOpt(dom, cod)
} yield data.foldLeft(fn)(fold(_)(_))
} ||
jsToOpt[Term, IVIIV]("indexed-recursive-function") {
case (
_,
(index: Vector[Term], (dom: Term, (cod: Term, data: Vector[Term])))
) =>
for {
codom <- typOpt(cod)
fn <- exst.recOpt(dom, codom)
} yield
(data ++ index).foldLeft(fn)(fold(_)(_)) //buildIndRecDef(indexedInds)(w, (x, (y, v)))
} ||
jsToOpt[Term, IVIIV]("indexed-inductive-function") {
case (
_,
(index: Vector[Term], (dom: Term, (cod: Term, data: Vector[Term])))
) =>
for {
fn <- exst.inducOpt(dom, cod)
} yield (data ++ index).foldLeft(fn)(fold(_)(_))
//buildIndIndDef(indexedInds)(w, (x, (y, v)))
}
def jsToFD(
exst: ExstInducStrucs
)(js: ujson.Value): FiniteDistribution[Term] = {
val pmf =
js.arr.toVector.map { wp =>
Weighted(
jsToTermExst(exst)(wp.obj("term")).get,
wp.obj("weight").num
)
}
FiniteDistribution(pmf)
}
val jsonToTermBase: Translator.OrElse[ujson.Value, Term] =
jsToBuild[Term, N]("U")((n) => Universe(n)) ||
jsToBuild[Term, II]("Ap") { case (func, arg) => fold(func)(arg) } ||
jsToBuild[Term, III]("\\u03bb") {
case ((variable, typ), value) => variable :~> value
} ||
jsToBuild[Term, III]("=") {
case ((dom, lhs), rhs) => lhs =:= rhs
} ||
jsToBuild[Term, III]("\\u03c0") {
case ((variable, typ), value: Typ[u]) => variable ~>: value
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, III]("\\u03c3") {
case ((variable, typ), value: Typ[u]) => sigma(variable)(value)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("\\u03a0") {
case (x: Typ[u], y: Typ[v]) => ProdTyp(x, y)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("\\u03a3") {
case (x: Typ[u], y: Typ[v]) => PlusTyp(x, y)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("pair") { case (x, y) => mkPair(x, y) } ||
jsToBuild[Term, II](UnicodeSyms.Arrow) {
case (x: Typ[u], y: Typ[v]) => FuncTyp(x, y)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("=") {
case (dom: Typ[u], value: Term) => Refl(dom, value)
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, IV]("folded-term") {
case (op, v) =>
v.reduce[Term] {
case (a: Term, b: Term) => applyFunc(applyFunc(op, a), b)
}
} ||
jsToBuild[Term, Un]("*") { (_) =>
Star
} ||
jsToBuild[Term, Un]("One") { (_) =>
Unit
} ||
jsToBuild[Term, Un]("Void") { (_) =>
Zero
} ||
jsToBuild[Term, Un]("Prop") { (_) =>
Prop
} ||
jsToBuild[Term, Un]("Nat") { (_) =>
NatRing.NatTyp
} ||
jsToBuild[Term, Un]("NatU") { (_) =>
NatRing.NatTyp.typ
} ||
jsToBuild[Term, Un]("0N") { (_) =>
NatRing.zero
} ||
jsToBuild[Term, Un]("succN") { (_) =>
NatRing.succ
} ||
jsToBuild[Term, Un]("+N") { (_) =>
NatRing.sum
} ||
jsToBuild[Term, Un]("*N") { (_) =>
NatRing.prod
} ||
jsToBuild[Term, N]("NL") { (n) =>
NatRing.Literal(n)
} ||
jsToBuild[Term, II]("nat-additive-morphism") {
case (base, op) =>
NatRing.AdditiveMorphism(
base.asInstanceOf[Func[NatRing.Nat, NatRing.Nat]],
op.asInstanceOf[(NatRing.Nat, NatRing.Nat) => NatRing.Nat]
)
} ||
jsToBuild[Term, II]("i1") {
case (tp: PlusTyp[u, v], x) => tp.incl1(x.asInstanceOf[u])
case (x, y) => unmatched(x, y)
} ||
jsToBuild[Term, II]("i2") {
case (tp: PlusTyp[u, v], x) => tp.incl2(x.asInstanceOf[v])
case (x, y) => unmatched(x, y)
} ||
jsToOpt[Term, IIV]("Rec") {
case (a, (b, v)) =>
// println(s"building base recursive type $a codomain $b data $v")
val fn = buildRecDef()
fn(a, (b, v))
} ||
jsToOpt[Term, IIV]("Ind") {
case (a, (b, v)) =>
val fn = buildIndDef()
fn(a, (b, v))
} ||
jsToOpt[Term, IVIIV]("IRec") {
case (u, (w, (a, (b, v)))) =>
// println(s"building indexed recursive:\\n index $w,\\n type $a,\\n codomain $b,\\n data $v\\n\\n")
val fn = buildIndRecDef()
val res = fn(w, (a, (b, v)))
println(s"result: $res")
res
} ||
jsToOpt[Term, IVIIV]("IInd") {
case (u, (w, (a, (b, v)))) =>
// println(s"building indexed inductive:\\n index $w,\\n type $a,\\n codomain $b,\\n data $v\\n\\n")
val fn = buildIndIndDef()
val res = fn(w, (a, (b, v)))
// println(s"result: $res")
res
} ||
jsToBuild[Term, Named]("Sym") {
case (name, tp: Typ[u]) => deHash(name) :: tp
case (x, y) => unmatched(x, y)
}(travNamed, implicitly[JsFunc[Named]]) ||
jsToBuild[Term, II]("Witness") {
case (tp, value) => toTyp(tp).symbObj(MereWitness(value))
}
val hashReg: Regex = "_[0-9][0-9]+".r
def deHash(s: String): String = hashReg.replaceAllIn(s, "")
object ContextJson {
import Context._
def toJson(ctx: Context): ujson.Value = ctx match {
case Empty => ujson.Obj("intro" -> "empty")
case ac: AppendConstant[u] =>
ujson.Obj(
"intro" -> "append-constant",
"init" -> toJson(ac.init),
"constant" -> termToJsonGet(ac.constant)
)
case at: AppendTerm[u] =>
val rl = at.role match {
case Context.Assert => ujson.Str("assert")
case Context.Consider => ujson.Str("consider")
}
ujson.Obj(
"intro" -> "append-term",
"init" -> toJson(at.init),
"term" -> termToJsonGet(at.term),
"role" -> rl
)
case av: AppendVariable[u] =>
ujson.Obj(
"intro" -> "append-variable",
"init" -> toJson(av.init),
"expression" -> termToJsonGet(av.variable)
)
case AppendDefn(init, defn, global) =>
ujson.Obj(
"intro" -> "append-definition",
"name" -> termToJsonGet(defn.name),
"init" -> toJson(init),
"value" -> termToJsonGet(defn.valueTerm),
"global" -> ujson.Bool(global)
)
case AppendIndDef(init, defn) =>
ujson.Obj(
"intro" -> "append-inductive-definition",
"defn" -> InducJson.toJson(defn),
"init" -> toJson(init)
)
}
def fromJson(js: ujson.Value): Context =
js.obj("intro").str match {
case "empty" => Empty
case "append-term" =>
val init = fromJson(js.obj("init"))
val term = jsToTermExst(init.inducStruct)(js.obj("term")).get
val role = js.obj("role").str match {
case "assert" => Assert
case "consider" => Consider
}
AppendTerm(init, term, role)
case "append-constant" =>
val init = fromJson(js.obj("init"))
val term = jsToTermExst(init.inducStruct)(js.obj("constant")).get
AppendConstant(init, term)
case "append-variable" =>
val init = fromJson(js.obj("init"))
val term = jsToTermExst(init.inducStruct)(js.obj("variable")).get
AppendVariable(init, term)
case "append-definition" =>
val init = fromJson(js.obj("init"))
val name = jsToTermExst(init.inducStruct)(js.obj("name")).get
val value = jsToTermExst(init.inducStruct)(js.obj("value")).get
AppendDefn(init, Defn(name, value), js.obj("global").bool)
case "append-inductive-definition" =>
val init = fromJson(js.obj("init"))
val defn = InducJson.fromJson(init.inducStruct)(js.obj("defn"))
AppendIndDef(init, defn)
}
}
}
|
siddhartha-gadgil/ProvingGround
|
core/src/main/scala/provingground/interface/JsFunc.scala
|
Scala
|
mit
| 31,216 |
package controllers
import com.bryzek.apidoc.api.v0.Client
import com.bryzek.apidoc.generator.v0.models.File
import scala.concurrent.ExecutionContext.Implicits.global
import javax.inject.Inject
import play.api.i18n.{MessagesApi, I18nSupport}
import play.api.mvc.{Action, Controller, Result}
class Code @Inject() (val messagesApi: MessagesApi) extends Controller with I18nSupport {
def index(orgKey: String, applicationKey: String, version: String, generatorKey: String) = AnonymousOrg.async { implicit request =>
lib.ApiClient.callWith404(
request.api.Code.get(orgKey, applicationKey, version, generatorKey)
).map {
case None => Redirect(routes.Versions.show(orgKey, applicationKey, version)).flashing("warning" -> "Version not found")
case Some(code) => {
Ok(views.html.code.index(
request.mainTemplate().copy(title = Some(code.generator.generator.name + " - Files")),
orgKey = orgKey,
applicationKey = applicationKey,
version = version,
generatorKey = generatorKey,
files = code.files
))
}
}.recover {
case r: com.bryzek.apidoc.api.v0.errors.ErrorsResponse => {
Redirect(routes.Versions.show(orgKey, applicationKey, version)).flashing("warning" -> r.errors.map(_.message).mkString(", "))
}
}
}
def zipFile(orgKey: String, applicationKey: String, version: String, generatorKey: String, fileName: String) = AnonymousOrg.async { implicit request =>
withFiles(request.api, orgKey, applicationKey, version, generatorKey) { files =>
val file = lib.Zipfile.create(fileName, files)
Ok.sendFile(file, inline = true)
}
}
def tarballFile(orgKey: String, applicationKey: String, version: String, generatorKey: String, fileName: String) = AnonymousOrg.async { implicit request =>
withFiles(request.api, orgKey, applicationKey, version, generatorKey) { files =>
val file = lib.TarballFile.create(fileName, files)
Ok.sendFile(file, inline = true)
}
}
def file(orgKey: String, applicationKey: String, version: String, generatorKey: String, fileName: String) = AnonymousOrg.async { implicit request =>
withFiles(request.api, orgKey, applicationKey, version, generatorKey) { files =>
files.find(_.name == fileName) match {
case None => {
Redirect(routes.Versions.show(orgKey, applicationKey, version)).flashing("warning" -> s"File $fileName not found")
}
case Some(file) => {
Ok(file.contents)
}
}
}
}
private[this] def withFiles(
api: Client, orgKey: String, applicationKey: String, version: String, generatorKey: String
) (
f: Seq[File] => Result
) = {
lib.ApiClient.callWith404(
api.Code.get(orgKey, applicationKey, version, generatorKey)
).map {
case None => {
Redirect(routes.Versions.show(orgKey, applicationKey, version)).flashing("warning" -> "Version not found")
}
case Some(code) => {
f(code.files)
}
}.recover {
case r: com.bryzek.apidoc.api.v0.errors.ErrorsResponse => {
Redirect(routes.Versions.show(orgKey, applicationKey, version)).flashing("warning" -> r.errors.map(_.message).mkString(", "))
}
}
}
}
|
Seanstoppable/apidoc
|
www/app/controllers/Code.scala
|
Scala
|
mit
| 3,274 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx.impl
import scala.reflect.ClassTag
import org.apache.spark.graphx._
import org.apache.spark.graphx.util.collection.GraphXPrimitiveKeyOpenHashMap
import org.apache.spark.util.collection.BitSet
/**
* A collection of edges, along with referenced vertex attributes and an optional active vertex set
* for filtering computation on the edges.
*
* The edges are stored in columnar format in `localSrcIds`, `localDstIds`, and `data`. All
* referenced global vertex ids are mapped to a compact set of local vertex ids according to the
* `global2local` map. Each local vertex id is a valid index into `vertexAttrs`, which stores the
* corresponding vertex attribute, and `local2global`, which stores the reverse mapping to global
* vertex id. The global vertex ids that are active are optionally stored in `activeSet`.
*
* The edges are clustered by source vertex id, and the mapping from global vertex id to the index
* of the corresponding edge cluster is stored in `index`.
*
* @tparam ED the edge attribute type
* @tparam VD the vertex attribute type
*
* @param localSrcIds the local source vertex id of each edge as an index into `local2global` and
* `vertexAttrs`
* @param localDstIds the local destination vertex id of each edge as an index into `local2global`
* and `vertexAttrs`
* @param data the attribute associated with each edge
* @param index a clustered index on source vertex id as a map from each global source vertex id to
* the offset in the edge arrays where the cluster for that vertex id begins
* @param global2local a map from referenced vertex ids to local ids which index into vertexAttrs
* @param local2global an array of global vertex ids where the offsets are local vertex ids
* @param vertexAttrs an array of vertex attributes where the offsets are local vertex ids
* @param activeSet an optional active vertex set for filtering computation on the edges
*/
private[graphx]
class EdgePartition[
@specialized(Char, Int, Boolean, Byte, Long, Float, Double) ED: ClassTag, VD: ClassTag](
localSrcIds: Array[Int],
localDstIds: Array[Int],
data: Array[ED],
index: GraphXPrimitiveKeyOpenHashMap[VertexId, Int],
global2local: GraphXPrimitiveKeyOpenHashMap[VertexId, Int],
local2global: Array[VertexId],
vertexAttrs: Array[VD],
activeSet: Option[VertexSet])
extends Serializable {
/** No-arg constructor for serialization. */
private def this() = this(null, null, null, null, null, null, null, null)
/** Return a new `EdgePartition` with the specified edge data. */
def withData[ED2: ClassTag](data: Array[ED2]): EdgePartition[ED2, VD] = {
new EdgePartition(
localSrcIds, localDstIds, data, index, global2local, local2global, vertexAttrs, activeSet)
}
/** Return a new `EdgePartition` with the specified active set, provided as an iterator. */
def withActiveSet(iter: Iterator[VertexId]): EdgePartition[ED, VD] = {
val activeSet = new VertexSet
while (iter.hasNext) { activeSet.add(iter.next()) }
new EdgePartition(
localSrcIds, localDstIds, data, index, global2local, local2global, vertexAttrs,
Some(activeSet))
}
/** Return a new `EdgePartition` with updates to vertex attributes specified in `iter`. */
def updateVertices(iter: Iterator[(VertexId, VD)]): EdgePartition[ED, VD] = {
val newVertexAttrs = new Array[VD](vertexAttrs.length)
System.arraycopy(vertexAttrs, 0, newVertexAttrs, 0, vertexAttrs.length)
while (iter.hasNext) {
val kv = iter.next()
newVertexAttrs(global2local(kv._1)) = kv._2
}
new EdgePartition(
localSrcIds, localDstIds, data, index, global2local, local2global, newVertexAttrs,
activeSet)
}
/** Return a new `EdgePartition` without any locally cached vertex attributes. */
def withoutVertexAttributes[VD2: ClassTag](): EdgePartition[ED, VD2] = {
val newVertexAttrs = new Array[VD2](vertexAttrs.length)
new EdgePartition(
localSrcIds, localDstIds, data, index, global2local, local2global, newVertexAttrs,
activeSet)
}
@inline private def srcIds(pos: Int): VertexId = local2global(localSrcIds(pos))
@inline private def dstIds(pos: Int): VertexId = local2global(localDstIds(pos))
@inline private def attrs(pos: Int): ED = data(pos)
/** Look up vid in activeSet, throwing an exception if it is None. */
def isActive(vid: VertexId): Boolean = {
activeSet.get.contains(vid)
}
/** The number of active vertices, if any exist. */
def numActives: Option[Int] = activeSet.map(_.size)
/**
* Reverse all the edges in this partition.
*
* @return a new edge partition with all edges reversed.
*/
def reverse: EdgePartition[ED, VD] = {
val builder = new ExistingEdgePartitionBuilder[ED, VD](
global2local, local2global, vertexAttrs, activeSet, size)
var i = 0
while (i < size) {
val localSrcId = localSrcIds(i)
val localDstId = localDstIds(i)
val srcId = local2global(localSrcId)
val dstId = local2global(localDstId)
val attr = data(i)
builder.add(dstId, srcId, localDstId, localSrcId, attr)
i += 1
}
builder.toEdgePartition
}
/**
* Construct a new edge partition by applying the function f to all
* edges in this partition.
*
* Be careful not to keep references to the objects passed to `f`.
* To improve GC performance the same object is re-used for each call.
*
* @param f a function from an edge to a new attribute
* @tparam ED2 the type of the new attribute
* @return a new edge partition with the result of the function `f`
* applied to each edge
*/
def map[ED2: ClassTag](f: Edge[ED] => ED2): EdgePartition[ED2, VD] = {
val newData = new Array[ED2](data.length)
val edge = new Edge[ED]()
val size = data.length
var i = 0
while (i < size) {
edge.srcId = srcIds(i)
edge.dstId = dstIds(i)
edge.attr = data(i)
newData(i) = f(edge)
i += 1
}
this.withData(newData)
}
/**
* Construct a new edge partition by using the edge attributes
* contained in the iterator.
*
* @note The input iterator should return edge attributes in the
* order of the edges returned by `EdgePartition.iterator` and
* should return attributes equal to the number of edges.
*
* @param iter an iterator for the new attribute values
* @tparam ED2 the type of the new attribute
* @return a new edge partition with the attribute values replaced
*/
def map[ED2: ClassTag](iter: Iterator[ED2]): EdgePartition[ED2, VD] = {
// Faster than iter.toArray, because the expected size is known.
val newData = new Array[ED2](data.length)
var i = 0
while (iter.hasNext) {
newData(i) = iter.next()
i += 1
}
assert(newData.length == i)
this.withData(newData)
}
/**
* Construct a new edge partition containing only the edges matching `epred` and where both
* vertices match `vpred`.
*/
def filter(
epred: EdgeTriplet[VD, ED] => Boolean,
vpred: (VertexId, VD) => Boolean): EdgePartition[ED, VD] = {
val builder = new ExistingEdgePartitionBuilder[ED, VD](
global2local, local2global, vertexAttrs, activeSet)
var i = 0
while (i < size) {
// The user sees the EdgeTriplet, so we can't reuse it and must create one per edge.
val localSrcId = localSrcIds(i)
val localDstId = localDstIds(i)
val et = new EdgeTriplet[VD, ED]
et.srcId = local2global(localSrcId)
et.dstId = local2global(localDstId)
et.srcAttr = vertexAttrs(localSrcId)
et.dstAttr = vertexAttrs(localDstId)
et.attr = data(i)
if (vpred(et.srcId, et.srcAttr) && vpred(et.dstId, et.dstAttr) && epred(et)) {
builder.add(et.srcId, et.dstId, localSrcId, localDstId, et.attr)
}
i += 1
}
builder.toEdgePartition
}
/**
* Apply the function f to all edges in this partition.
*
* @param f an external state mutating user defined function.
*/
def foreach(f: Edge[ED] => Unit) {
iterator.foreach(f)
}
/**
* Merge all the edges with the same src and dest id into a single
* edge using the `merge` function
*
* @param merge a commutative associative merge operation
* @return a new edge partition without duplicate edges
*/
def groupEdges(merge: (ED, ED) => ED): EdgePartition[ED, VD] = {
val builder = new ExistingEdgePartitionBuilder[ED, VD](
global2local, local2global, vertexAttrs, activeSet)
var currSrcId: VertexId = null.asInstanceOf[VertexId]
var currDstId: VertexId = null.asInstanceOf[VertexId]
var currLocalSrcId = -1
var currLocalDstId = -1
var currAttr: ED = null.asInstanceOf[ED]
// Iterate through the edges, accumulating runs of identical edges using the curr* variables and
// releasing them to the builder when we see the beginning of the next run
var i = 0
while (i < size) {
if (i > 0 && currSrcId == srcIds(i) && currDstId == dstIds(i)) {
// This edge should be accumulated into the existing run
currAttr = merge(currAttr, data(i))
} else {
// This edge starts a new run of edges
if (i > 0) {
// First release the existing run to the builder
builder.add(currSrcId, currDstId, currLocalSrcId, currLocalDstId, currAttr)
}
// Then start accumulating for a new run
currSrcId = srcIds(i)
currDstId = dstIds(i)
currLocalSrcId = localSrcIds(i)
currLocalDstId = localDstIds(i)
currAttr = data(i)
}
i += 1
}
// Finally, release the last accumulated run
if (size > 0) {
builder.add(currSrcId, currDstId, currLocalSrcId, currLocalDstId, currAttr)
}
builder.toEdgePartition
}
/**
* Apply `f` to all edges present in both `this` and `other` and return a new `EdgePartition`
* containing the resulting edges.
*
* If there are multiple edges with the same src and dst in `this`, `f` will be invoked once for
* each edge, but each time it may be invoked on any corresponding edge in `other`.
*
* If there are multiple edges with the same src and dst in `other`, `f` will only be invoked
* once.
*/
def innerJoin[ED2: ClassTag, ED3: ClassTag]
(other: EdgePartition[ED2, _])
(f: (VertexId, VertexId, ED, ED2) => ED3): EdgePartition[ED3, VD] = {
val builder = new ExistingEdgePartitionBuilder[ED3, VD](
global2local, local2global, vertexAttrs, activeSet)
var i = 0
var j = 0
// For i = index of each edge in `this`...
while (i < size && j < other.size) {
val srcId = this.srcIds(i)
val dstId = this.dstIds(i)
// ... forward j to the index of the corresponding edge in `other`, and...
while (j < other.size && other.srcIds(j) < srcId) { j += 1 }
if (j < other.size && other.srcIds(j) == srcId) {
while (j < other.size && other.srcIds(j) == srcId && other.dstIds(j) < dstId) { j += 1 }
if (j < other.size && other.srcIds(j) == srcId && other.dstIds(j) == dstId) {
// ... run `f` on the matching edge
builder.add(srcId, dstId, localSrcIds(i), localDstIds(i),
f(srcId, dstId, this.data(i), other.attrs(j)))
}
}
i += 1
}
builder.toEdgePartition
}
/**
* The number of edges in this partition
*
* @return size of the partition
*/
val size: Int = localSrcIds.length
/** The number of unique source vertices in the partition. */
def indexSize: Int = index.size
/**
* Get an iterator over the edges in this partition.
*
* Be careful not to keep references to the objects from this iterator.
* To improve GC performance the same object is re-used in `next()`.
*
* @return an iterator over edges in the partition
*/
def iterator: Iterator[Edge[ED]] = new Iterator[Edge[ED]] {
private[this] val edge = new Edge[ED]
private[this] var pos = 0
override def hasNext: Boolean = pos < EdgePartition.this.size
override def next(): Edge[ED] = {
edge.srcId = srcIds(pos)
edge.dstId = dstIds(pos)
edge.attr = data(pos)
pos += 1
edge
}
}
/**
* Get an iterator over the edge triplets in this partition.
*
* It is safe to keep references to the objects from this iterator.
*/
def tripletIterator(
includeSrc: Boolean = true, includeDst: Boolean = true)
: Iterator[EdgeTriplet[VD, ED]] = new Iterator[EdgeTriplet[VD, ED]] {
private[this] var pos = 0
override def hasNext: Boolean = pos < EdgePartition.this.size
override def next(): EdgeTriplet[VD, ED] = {
val triplet = new EdgeTriplet[VD, ED]
val localSrcId = localSrcIds(pos)
val localDstId = localDstIds(pos)
triplet.srcId = local2global(localSrcId)
triplet.dstId = local2global(localDstId)
if (includeSrc) {
triplet.srcAttr = vertexAttrs(localSrcId)
}
if (includeDst) {
triplet.dstAttr = vertexAttrs(localDstId)
}
triplet.attr = data(pos)
pos += 1
triplet
}
}
/**
* Send messages along edges and aggregate them at the receiving vertices. Implemented by scanning
* all edges sequentially.
*
* @param sendMsg generates messages to neighboring vertices of an edge
* @param mergeMsg the combiner applied to messages destined to the same vertex
* @param tripletFields which triplet fields `sendMsg` uses
* @param activeness criteria for filtering edges based on activeness
*
* @return iterator aggregated messages keyed by the receiving vertex id
*/
def aggregateMessagesEdgeScan[A: ClassTag](
sendMsg: EdgeContext[VD, ED, A] => Unit,
mergeMsg: (A, A) => A,
tripletFields: TripletFields,
activeness: EdgeActiveness): Iterator[(VertexId, A)] = {
val aggregates = new Array[A](vertexAttrs.length)
val bitset = new BitSet(vertexAttrs.length)
val ctx = new AggregatingEdgeContext[VD, ED, A](mergeMsg, aggregates, bitset)
var i = 0
while (i < size) {
val localSrcId = localSrcIds(i)
val srcId = local2global(localSrcId)
val localDstId = localDstIds(i)
val dstId = local2global(localDstId)
val edgeIsActive =
if (activeness == EdgeActiveness.Neither) true
else if (activeness == EdgeActiveness.SrcOnly) isActive(srcId)
else if (activeness == EdgeActiveness.DstOnly) isActive(dstId)
else if (activeness == EdgeActiveness.Both) isActive(srcId) && isActive(dstId)
else if (activeness == EdgeActiveness.Either) isActive(srcId) || isActive(dstId)
else throw new Exception("unreachable")
if (edgeIsActive) {
val srcAttr = if (tripletFields.useSrc) vertexAttrs(localSrcId) else null.asInstanceOf[VD]
val dstAttr = if (tripletFields.useDst) vertexAttrs(localDstId) else null.asInstanceOf[VD]
ctx.set(srcId, dstId, localSrcId, localDstId, srcAttr, dstAttr, data(i))
sendMsg(ctx)
}
i += 1
}
bitset.iterator.map { localId => (local2global(localId), aggregates(localId)) }
}
/**
* Send messages along edges and aggregate them at the receiving vertices. Implemented by
* filtering the source vertex index, then scanning each edge cluster.
*
* @param sendMsg generates messages to neighboring vertices of an edge
* @param mergeMsg the combiner applied to messages destined to the same vertex
* @param tripletFields which triplet fields `sendMsg` uses
* @param activeness criteria for filtering edges based on activeness
*
* @return iterator aggregated messages keyed by the receiving vertex id
*/
def aggregateMessagesIndexScan[A: ClassTag](
sendMsg: EdgeContext[VD, ED, A] => Unit,
mergeMsg: (A, A) => A,
tripletFields: TripletFields,
activeness: EdgeActiveness): Iterator[(VertexId, A)] = {
val aggregates = new Array[A](vertexAttrs.length)
val bitset = new BitSet(vertexAttrs.length)
val ctx = new AggregatingEdgeContext[VD, ED, A](mergeMsg, aggregates, bitset)
index.iterator.foreach { cluster =>
val clusterSrcId = cluster._1
val clusterPos = cluster._2
val clusterLocalSrcId = localSrcIds(clusterPos)
val scanCluster =
if (activeness == EdgeActiveness.Neither) true
else if (activeness == EdgeActiveness.SrcOnly) isActive(clusterSrcId)
else if (activeness == EdgeActiveness.DstOnly) true
else if (activeness == EdgeActiveness.Both) isActive(clusterSrcId)
else if (activeness == EdgeActiveness.Either) true
else throw new Exception("unreachable")
if (scanCluster) {
var pos = clusterPos
val srcAttr =
if (tripletFields.useSrc) vertexAttrs(clusterLocalSrcId) else null.asInstanceOf[VD]
ctx.setSrcOnly(clusterSrcId, clusterLocalSrcId, srcAttr)
while (pos < size && localSrcIds(pos) == clusterLocalSrcId) {
val localDstId = localDstIds(pos)
val dstId = local2global(localDstId)
val edgeIsActive =
if (activeness == EdgeActiveness.Neither) true
else if (activeness == EdgeActiveness.SrcOnly) true
else if (activeness == EdgeActiveness.DstOnly) isActive(dstId)
else if (activeness == EdgeActiveness.Both) isActive(dstId)
else if (activeness == EdgeActiveness.Either) isActive(clusterSrcId) || isActive(dstId)
else throw new Exception("unreachable")
if (edgeIsActive) {
val dstAttr =
if (tripletFields.useDst) vertexAttrs(localDstId) else null.asInstanceOf[VD]
ctx.setRest(dstId, localDstId, dstAttr, data(pos))
sendMsg(ctx)
}
pos += 1
}
}
}
bitset.iterator.map { localId => (local2global(localId), aggregates(localId)) }
}
}
private class AggregatingEdgeContext[VD, ED, A](
mergeMsg: (A, A) => A,
aggregates: Array[A],
bitset: BitSet)
extends EdgeContext[VD, ED, A] {
private[this] var _srcId: VertexId = _
private[this] var _dstId: VertexId = _
private[this] var _localSrcId: Int = _
private[this] var _localDstId: Int = _
private[this] var _srcAttr: VD = _
private[this] var _dstAttr: VD = _
private[this] var _attr: ED = _
def set(
srcId: VertexId, dstId: VertexId,
localSrcId: Int, localDstId: Int,
srcAttr: VD, dstAttr: VD,
attr: ED) {
_srcId = srcId
_dstId = dstId
_localSrcId = localSrcId
_localDstId = localDstId
_srcAttr = srcAttr
_dstAttr = dstAttr
_attr = attr
}
def setSrcOnly(srcId: VertexId, localSrcId: Int, srcAttr: VD) {
_srcId = srcId
_localSrcId = localSrcId
_srcAttr = srcAttr
}
def setRest(dstId: VertexId, localDstId: Int, dstAttr: VD, attr: ED) {
_dstId = dstId
_localDstId = localDstId
_dstAttr = dstAttr
_attr = attr
}
override def srcId: VertexId = _srcId
override def dstId: VertexId = _dstId
override def srcAttr: VD = _srcAttr
override def dstAttr: VD = _dstAttr
override def attr: ED = _attr
override def sendToSrc(msg: A) {
send(_localSrcId, msg)
}
override def sendToDst(msg: A) {
send(_localDstId, msg)
}
@inline private def send(localId: Int, msg: A) {
if (bitset.get(localId)) {
aggregates(localId) = mergeMsg(aggregates(localId), msg)
} else {
aggregates(localId) = msg
bitset.set(localId)
}
}
}
|
akopich/spark
|
graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala
|
Scala
|
apache-2.0
| 20,304 |
package com.alvrod.cryptopals.breakers
object FrequencyScore {
val frequencyMap = Map[Char, Double] (
'a' -> 8.167,
'b' -> 1.492,
'c' -> 2.782,
'd' -> 4.253,
'e' -> 13.0001,
'f' -> 2.228,
'g' -> 2.015,
'h' -> 6.094,
'i' -> 6.966,
'j' -> 0.153,
'k' -> 0.772,
'l' -> 4.025,
'm' -> 2.406,
'n' -> 6.749,
'o' -> 7.507,
'p' -> 1.929,
'q' -> 0.095,
'r' -> 5.987,
's' -> 6.327,
't' -> 9.056,
'u' -> 2.758,
'v' -> 0.978,
'w' -> 2.360,
'x' -> 0.150,
'y' -> 1.974,
'z' -> 0.074,
' ' -> 13.1, // use as wildcard for all whitespace
'?' -> 8.5 // use as wildcard for all punctuation
)
def getFrequency(bytes: Array[Byte]): Double = {
val ascii = new String(bytes)
ascii.foldLeft(0.0)((acc, item) =>
acc + frequencyMap.getOrElse(item, 0.0)
)
}
}
|
alvrod/cryptopals
|
src/com/alvrod/cryptopals/breakers/FrequencyScore.scala
|
Scala
|
gpl-2.0
| 871 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.optim
import com.intel.analytics.bigdl.dataset.{LocalDataSet, MiniBatch}
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.nn.Utils
import com.intel.analytics.bigdl.nn.abstractnn.Activity
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils._
import org.apache.log4j.Logger
import scala.reflect.ClassTag
object LocalOptimizer {
val logger = Logger.getLogger(getClass)
}
/**
* Optimize a model on a single machine
*
* @param model model to be optimized
* @param dataset data set
* @param criterion criterion to be used
*/
class LocalOptimizer[T: ClassTag] private[optim](
model: Module[T],
dataset: LocalDataSet[MiniBatch[T]],
criterion: Criterion[T]
)(implicit ev: TensorNumeric[T])
extends Optimizer[T, MiniBatch[T]](
model, dataset, criterion) {
import LocalOptimizer._
import Optimizer._
private val coreNumber = Engine.coreNumber()
private val subModelNumber = Engine.getEngineType match {
case MklBlas => coreNumber
case _ => throw new IllegalArgumentException
}
private val (weight, grad) = model.getParameters()
private val gradLength = grad.nElement()
private val syncGradTaskSize = gradLength / subModelNumber
private val syncGradExtraTask = gradLength % subModelNumber
private val syncGradParallelNum =
if (syncGradTaskSize == 0) syncGradExtraTask else subModelNumber
private val workingModels = (1 to subModelNumber).map(i => {
logger.info(s"Clone $i model...")
model.cloneModule()
}).toArray
private val workingModelWAndG = workingModels.map(_.getParameters())
workingModelWAndG.foreach(_._1.storage().set(weight.storage()))
private val workingCriterion =
(1 to subModelNumber).map(_ => criterion.cloneCriterion()).toArray
override def optimize(): Module[T] = {
var wallClockTime = 0L
var count = 0
optimMethod.clearHistory()
optimMethod.loadFromTable(state)
state("epoch") = state.get[Int]("epoch").getOrElse(1)
state("neval") = state.get[Int]("neval").getOrElse(1)
state("isLayerwiseScaled") = Utils.isLayerwiseScaled(model)
dataset.shuffle()
var iter = dataset.data(train = true)
logger.info("model thread pool size is " + Engine.model.getPoolSize)
while (!endWhen(state)) {
val start = System.nanoTime()
// Fetch data and prepare tensors
val batch = iter.next()
var b = 0
val stackSize = batch.size() / subModelNumber
val extraSize = batch.size() % subModelNumber
val parallelism = if (stackSize == 0) extraSize else subModelNumber
val miniBatchBuffer = new Array[MiniBatch[T]](parallelism)
while (b < parallelism) {
val offset = b * stackSize + math.min(b, extraSize) + 1
val length = stackSize + (if (b < extraSize) 1 else 0)
miniBatchBuffer(b) = batch.slice(offset, length)
b += 1
}
val dataFetchTime = System.nanoTime()
val lossSum = Engine.default.invokeAndWait(
(0 until parallelism).map(i =>
() => {
val localModel = workingModels(i)
localModel.zeroGradParameters()
localModel.training()
val localCriterion = workingCriterion(i)
val input = miniBatchBuffer(i).getInput()
val target = miniBatchBuffer(i).getTarget()
val output = localModel.forward(input)
val _loss = ev.toType[Double](localCriterion.forward(output, target))
val errors = localCriterion.backward(output, target)
localModel.backward(input, errors)
_loss
})
).sum
// copy multi-model gradient to the buffer
Engine.default.invokeAndWait(
(0 until syncGradParallelNum).map(tid =>
() => {
val offset = tid * syncGradTaskSize + math.min(tid, syncGradExtraTask)
val length = syncGradTaskSize + (if (tid < syncGradExtraTask) 1 else 0)
var i = 0
while (i < parallelism) {
if (i == 0) {
grad.narrow(1, offset + 1, length)
.copy(workingModelWAndG(i)._2.narrow(1, offset + 1, length))
} else {
grad.narrow(1, offset + 1, length)
.add(workingModelWAndG(i)._2.narrow(1, offset + 1, length))
}
i += 1
}
})
)
val loss = lossSum / parallelism
grad.div(ev.fromType(parallelism))
optimMethod.state.update("epoch", state.get("epoch"))
optimMethod.state.update("neval", state.get("neval"))
optimMethod.optimize(_ => (ev.fromType(loss), grad), weight)
val end = System.nanoTime()
wallClockTime += end - start
count += batch.size()
val head =
header(state[Int]("epoch"), count, dataset.size(), state[Int]("neval"), wallClockTime)
logger.info(s"$head " +
s"loss is $loss, iteration time is ${(end - start) / 1e9}s " +
s"data fetch time is ${(dataFetchTime - start) / 1e9}s, " +
s"train time ${(end - dataFetchTime) / 1e9}s. " +
s"Throughput is ${batch.size().toDouble / (end - start) * 1e9} record / second. " +
optimMethod.getHyperParameter()
)
state("neval") = state[Int]("neval") + 1
if (count >= dataset.size()) {
state("epoch") = state[Int]("epoch") + 1
dataset.shuffle()
iter = dataset.toLocal().data(train = true)
count = 0
}
validate(wallClockTime)
checkpoint(wallClockTime)
}
// copy running status from workingModels to model
model.copyStatus(workingModels.head)
model
}
private def checkpoint(wallClockTime: Long): Unit = {
if (checkpointTrigger.isEmpty || checkpointPath.isEmpty) {
return
}
val trigger = checkpointTrigger.get
if (trigger(state) && checkpointPath.isDefined) {
logger.info(s"[Wall Clock ${wallClockTime / 1e9}s] Save model to path")
saveModel(workingModels.head, checkpointPath, isOverWrite, s".${state[Int]("neval")}")
saveState(state, checkpointPath, isOverWrite, s".${state[Int]("neval")}")
}
}
private def validate(wallClockTime: Long): Unit = {
if (validationTrigger.isEmpty || validationDataSet.isEmpty) {
return
}
val trigger = validationTrigger.get
if (!trigger(state)) {
return
}
val vMethods = validationMethods.get
val vMethodsArr = (1 to subModelNumber).map(i => vMethods.map(_.clone())).toArray
val dataIter = validationDataSet.get.toLocal().data(train = false)
logger.info(s"[Wall Clock ${wallClockTime / 1e9}s] Validate model...")
workingModels.foreach(_.evaluate())
var count = 0
dataIter.map(batch => {
val stackSize = batch.size() / subModelNumber
val extraSize = batch.size() % subModelNumber
val parallelism = if (stackSize == 0) extraSize else subModelNumber
val start = System.nanoTime()
val result = Engine.default.invokeAndWait(
(0 until parallelism).map(b =>
() => {
val offset = b * stackSize + math.min(b, extraSize) + 1
val length = stackSize + (if (b < extraSize) 1 else 0)
val currentMiniBatch = batch.slice(offset, length)
val input = currentMiniBatch.getInput()
val target = currentMiniBatch.getTarget()
val output = workingModels(b).forward(input)
val validatMethods = vMethodsArr(b)
validatMethods.map(validation => {
validation(output, target)
})
}
)
).reduce((left, right) => {
left.zip(right).map { case (l, r) =>
l + r
}
})
count += batch.size()
logger.info(s"[Validation] $count/${validationDataSet.get.size()} Throughput is ${
batch.size() / ((System.nanoTime() - start) / 1e9)
} record / sec")
result
}).reduce((left, right) => {
left.zip(right).map { case (l, r) =>
l + r
}
}).zip(vMethods).foreach(r => {
logger.info(s"${r._2} is ${r._1}")
})
}
}
|
JerryYanWan/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/optim/LocalOptimizer.scala
|
Scala
|
apache-2.0
| 8,793 |
package com.xta.spark.color.converter
/**
* Created by Rex on 7/25/16.
*/
class Hex(v: String) {
val value = v
def red: String = rgb._1
def green: String = rgb._2
def blue: String = rgb._3
private def rgb: (String, String, String) = {
val startIndex = if (value.startsWith("#")) 1 else 0
val redChars = value slice(startIndex, startIndex+2)
val greenChars = value slice(startIndex+2, startIndex+4)
val blueChars = value slice(startIndex+4, startIndex+6)
(redChars, greenChars, blueChars)
}
//region equality
private def canEqual(a: Any) = a.isInstanceOf[Hex]
override def equals(that: Any): Boolean =
that match {
case that: Hex => that.canEqual(this) && this.hashCode == that.hashCode
case _ => false
}
override def hashCode:Int = {
value.hashCode()
}
//endregion
}
|
xta/spark-color-converter
|
src/main/scala/com/xta/spark/color/converter/Hex.scala
|
Scala
|
mit
| 849 |
package models
import java.util.UUID
import utils.Global._
import utils.semantic.Vocabulary.{ owl, lwm, rdf }
import utils.semantic._
import scala.concurrent.{ Promise, Future }
case class ApplicationToken(student: Resource, labwork: Resource)
object ApplicationTokens {
import scala.concurrent.ExecutionContext.Implicits.global
def create(token: ApplicationToken): Future[Individual] = {
val id = UUID.randomUUID()
val tokenResource = ResourceUtils.createResource(lwmNamespace, id)
val statements = List(
Statement(tokenResource, rdf.typ, lwm.ApplicationToken),
Statement(tokenResource, rdf.typ, owl.NamedIndividual),
Statement(token.student, lwm.hasApplicationToken, tokenResource),
Statement(tokenResource, lwm.hasId, StringLiteral(id.toString)),
Statement(tokenResource, lwm.hasLabWork, token.labwork)
)
sparqlExecutionContext.executeUpdate(SPARQLBuilder.insertStatements(statements: _*)).map(b ⇒ Individual(tokenResource))
}
def delete(resource: Resource): Future[Resource] = {
val p = Promise[Resource]()
val individual = Individual(resource)
if (individual.props(rdf.typ).contains(lwm.ApplicationToken)) {
sparqlExecutionContext.executeUpdate(SPARQLBuilder.removeIndividual(resource)).map { b ⇒ p.success(resource) }
} else {
p.failure(new IllegalArgumentException("Resource is not an ApplicationToken"))
}
p.future
}
}
|
FHK-ADV/lwm
|
app/models/ApplicationTokens.scala
|
Scala
|
mit
| 1,435 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.features.serialization
import com.typesafe.scalalogging.LazyLogging
import com.vividsolutions.jts.geom._
import scala.reflect.ClassTag
/**
* Based on the method from geotools WKBWriter. This method is optimized for kryo and simplified from
* WKBWriter in the following ways:
*
* 1. Doesn't save SRID (geomesa didn't use that functionality in WKBWriter)
* 2. Doesn't worry about byte order (handled by kryo) TODO does avro handle byte order?
* 3. Doesn't use a precision model
*/
// noinspection LanguageFeature
trait GeometrySerialization[T <: NumericWriter, V <: NumericReader] extends LazyLogging {
// note: dimensions have to be determined from the internal coordinate sequence, not the geometry itself.
import GeometrySerialization._
private lazy val factory = new GeometryFactory()
private lazy val csFactory = factory.getCoordinateSequenceFactory
def serialize(out: T, geometry: Geometry): Unit = {
if (geometry == null) { out.writeByte(NULL_BYTE) } else {
out.writeByte(NOT_NULL_BYTE)
geometry match {
case g: Point => writePoint(out, g)
case g: LineString => writeLineString(out, g)
case g: Polygon => writePolygon(out, g)
case g: MultiPoint => writeGeometryCollection(out, GeometrySerialization.MultiPoint, g)
case g: MultiLineString => writeGeometryCollection(out, GeometrySerialization.MultiLineString, g)
case g: MultiPolygon => writeGeometryCollection(out, GeometrySerialization.MultiPolygon, g)
case g: GeometryCollection => writeGeometryCollection(out, GeometrySerialization.GeometryCollection, g)
}
}
}
def deserialize(in: V): Geometry = {
if (in.readByte() == NULL_BYTE) { null } else {
in.readInt(true) match {
case Point2d => readPoint(in, Some(2))
case LineString2d => readLineString(in, Some(2))
case Polygon2d => readPolygon(in, Some(2))
case Point => readPoint(in, None)
case LineString => readLineString(in, None)
case Polygon => readPolygon(in, None)
case MultiPoint => factory.createMultiPoint(readGeometryCollection[Point](in))
case MultiLineString => factory.createMultiLineString(readGeometryCollection[LineString](in))
case MultiPolygon => factory.createMultiPolygon(readGeometryCollection[Polygon](in))
case GeometryCollection => factory.createGeometryCollection(readGeometryCollection[Geometry](in))
}
}
}
private def writePoint(out: T, g: Point): Unit = {
val coords = g.getCoordinateSequence
val (flag, writeDims) = if (coords.getDimension == 2) { (Point2d, false) } else { (Point, true) }
out.writeInt(flag, optimizePositive = true)
writeCoordinateSequence(out, coords, writeLength = false, writeDims)
}
private def readPoint(in: V, dims: Option[Int]): Point =
factory.createPoint(readCoordinateSequence(in, Some(1), dims))
private def writeLineString(out: T, g: LineString): Unit = {
val coords = g.getCoordinateSequence
val (flag, writeDims) = if (coords.getDimension == 2) { (LineString2d, false) } else { (LineString, true) }
out.writeInt(flag, optimizePositive = true)
writeCoordinateSequence(out, coords, writeLength = true, writeDims)
}
private def readLineString(in: V, dims: Option[Int]): LineString =
factory.createLineString(readCoordinateSequence(in, None, dims))
private def writePolygon(out: T, g: Polygon): Unit = {
val exterior = g.getExteriorRing.getCoordinateSequence
val twoD = exterior.getDimension == 2 &&
(0 until g.getNumInteriorRing).forall(i => g.getInteriorRingN(i).getCoordinateSequence.getDimension == 2)
val (flag, writeDims) = if (twoD) { (Polygon2d, false) } else { (Polygon, true) }
out.writeInt(flag, optimizePositive = true)
writeCoordinateSequence(out, exterior, writeLength = true, writeDims)
out.writeInt(g.getNumInteriorRing, optimizePositive = true)
var i = 0
while (i < g.getNumInteriorRing) {
writeCoordinateSequence(out, g.getInteriorRingN(i).getCoordinateSequence, writeLength = true, writeDims)
i += 1
}
}
private def readPolygon(in: V, dims: Option[Int]): Polygon = {
val exteriorRing = factory.createLinearRing(readCoordinateSequence(in, None, dims))
val numInteriorRings = in.readInt(true)
if (numInteriorRings == 0) {
factory.createPolygon(exteriorRing)
} else {
val interiorRings = Array.ofDim[LinearRing](numInteriorRings)
var i = 0
while (i < numInteriorRings) {
interiorRings.update(i, factory.createLinearRing(readCoordinateSequence(in, None, dims)))
i += 1
}
factory.createPolygon(exteriorRing, interiorRings)
}
}
private def writeGeometryCollection(out: T, typ: Int, g: GeometryCollection): Unit = {
out.writeInt(typ, optimizePositive = true)
out.writeInt(g.getNumGeometries, optimizePositive = true)
var i = 0
while (i < g.getNumGeometries) {
serialize(out, g.getGeometryN(i))
i += 1
}
}
private def readGeometryCollection[U <: Geometry: ClassTag](in: V): Array[U] = {
val numGeoms = in.readInt(true)
val geoms = Array.ofDim[U](numGeoms)
var i = 0
while (i < numGeoms) {
geoms.update(i, deserialize(in).asInstanceOf[U])
i += 1
}
geoms
}
private def writeCoordinateSequence(out: T,
coords: CoordinateSequence,
writeLength: Boolean,
writeDimensions: Boolean): Unit = {
val dims = coords.getDimension
if (writeLength) {
out.writeInt(coords.size(), optimizePositive = true)
}
if (writeDimensions) {
out.writeInt(dims, optimizePositive = true)
}
var i = 0
while (i < coords.size()) {
val coord = coords.getCoordinate(i)
var j = 0
while (j < dims) {
out.writeDouble(coord.getOrdinate(j))
j += 1
}
i += 1
}
}
private def readCoordinateSequence(in: V, length: Option[Int], dimensions: Option[Int]): CoordinateSequence = {
val numCoords = length.getOrElse(in.readInt(true))
val numDims = dimensions.getOrElse(in.readInt(true))
val coords = csFactory.create(numCoords, numDims)
var i = 0
while (i < numCoords) {
var j = 0
while (j < numDims) {
coords.setOrdinate(i, j, in.readDouble())
j += 1
}
i += 1
}
coords
}
}
object GeometrySerialization {
// 2-d values - corresponds to com.vividsolutions.jts.io.WKBConstants
val Point2d: Int = 1
val LineString2d: Int = 2
val Polygon2d: Int = 3
val MultiPoint: Int = 4
val MultiLineString: Int = 5
val MultiPolygon: Int = 6
val GeometryCollection: Int = 7
// n-dimensional values
val Point: Int = 8
val LineString: Int = 9
val Polygon: Int = 10
}
|
ronq/geomesa
|
geomesa-features/geomesa-feature-common/src/main/scala/org/locationtech/geomesa/features/serialization/GeometrySerialization.scala
|
Scala
|
apache-2.0
| 7,568 |
package postoffice
import java.time.LocalDateTime
import akka.actor.{ActorSystem, Props}
import akkaviz.serialization.{AkkaVizSerializer, SerializationContext}
import upickle.Js
import upickle.Js.Value
import scala.util.Random
case class Parcel(source: City, destination: City, weight: Weight)
case class PostOffice(city: City)
sealed trait ParcelAction
case class Pickup(date: LocalDateTime, parcel: Parcel) extends ParcelAction
case class Delivery(date: LocalDateTime, parcel: Parcel) extends ParcelAction
case class Rejected(date: LocalDateTime, parcel: Parcel) extends ParcelAction
sealed trait City
case object Wroclaw extends City
case object Lodz extends City
case object Warszawa extends City
case object Poznan extends City
case object Krakow extends City
case object GorzowWlkp extends City
case object Berlin extends City
object PostOfficeSerializer extends AkkaVizSerializer {
override def canSerialize(obj: scala.Any): Boolean = obj match {
case c: City => true
case _ => false
}
override def serialize(obj: scala.Any, context: SerializationContext): Value = obj match {
case c: City => Js.Str(c.getClass.getName.split('.').last.stripSuffix("$"))
}
}
object PostOffice {
val ParcelRoutes = Map[(City, City), List[City]](
(Wroclaw -> Warszawa) -> (Wroclaw :: Lodz :: Warszawa :: Nil),
(Krakow -> Warszawa) -> (Krakow :: Lodz :: Warszawa :: Nil),
(Berlin -> Warszawa) -> (Berlin :: GorzowWlkp :: Poznan :: Warszawa :: Nil),
(GorzowWlkp -> Wroclaw) -> (GorzowWlkp :: Poznan :: Wroclaw :: Nil)
)
def route: ((City, City)) => List[City] = {
case (src, dest) =>
ParcelRoutes
.get(src -> dest)
.orElse(ParcelRoutes.get(dest -> src).map(_.reverse))
.getOrElse(src :: dest :: Nil)
}
val WeightLimit = 5.00
val DelayRange = 1150 to 5500
def randomDelay = Random.nextInt(DelayRange.max - DelayRange.min) + DelayRange.min
val Cities: Vector[City] = Vector(
Wroclaw,
Lodz,
Warszawa,
Poznan,
Krakow,
GorzowWlkp,
Berlin
)
def run(implicit system: ActorSystem) = {
for (city <- PostOffice.Cities) {
system.actorOf(Props(classOf[PostOfficeActor], PostOffice(city)), city.toString)
}
}
}
|
blstream/akka-viz
|
demo/src/main/scala/postoffice/postoffice.scala
|
Scala
|
mit
| 2,249 |
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.feature
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import java.nio.ByteBuffer
import java.text.SimpleDateFormat
import java.util.UUID
import com.vividsolutions.jts.geom.{Point, Polygon}
import org.apache.avro.io.{BinaryDecoder, DecoderFactory, Encoder, EncoderFactory}
import org.geotools.factory.Hints
import org.geotools.filter.identity.FeatureIdImpl
import org.junit.runner.RunWith
import org.locationtech.geomesa.feature.EncodingOption.EncodingOptions
import org.locationtech.geomesa.feature.serialization.{HintKeySerialization, AbstractWriter}
import org.locationtech.geomesa.security.SecurityUtils
import org.locationtech.geomesa.utils.geohash.GeohashUtils
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.feature.simple.SimpleFeature
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.mutable.ListBuffer
import scala.util.Random
@RunWith(classOf[JUnitRunner])
class AvroSimpleFeatureWriterTest extends Specification with Mockito {
def createComplicatedFeatures(numFeatures : Int) : List[Version2ASF] = {
val geoSchema = "f0:String,f1:Integer,f2:Double,f3:Float,f4:Boolean,f5:UUID,f6:Date,f7:Point:srid=4326,"+
"f8:Polygon:srid=4326,f9:Long,f10:String,f11:Integer,f12:Date,f13:Geometry,f14:UUID"
val sft = SimpleFeatureTypes.createType("test", geoSchema)
val r = new Random()
r.setSeed(0)
val list = new ListBuffer[Version2ASF]
for(i <- 0 until numFeatures){
val fid = new FeatureIdImpl(r.nextString(5))
val sf = new Version2ASF(fid, sft)
sf.setAttribute("f0", r.nextString(10).asInstanceOf[Object])
sf.setAttribute("f1", r.nextInt().asInstanceOf[Object])
sf.setAttribute("f2", r.nextDouble().asInstanceOf[Object])
sf.setAttribute("f3", r.nextFloat().asInstanceOf[Object])
sf.setAttribute("f4", r.nextBoolean().asInstanceOf[Object])
sf.setAttribute("f5", UUID.fromString("12345678-1234-1234-1234-123456789012"))
sf.setAttribute("f6", new SimpleDateFormat("yyyyMMdd").parse("20140102"))
sf.setAttribute("f7", GeohashUtils.wkt2geom("POINT(45.0 49.0)").asInstanceOf[Point])
sf.setAttribute("f8", GeohashUtils.wkt2geom("POLYGON((-80 30,-80 23,-70 30,-70 40,-80 40,-80 30))").asInstanceOf[Polygon])
sf.setAttribute("f9", r.nextLong().asInstanceOf[Object])
// test nulls on a few data types
"f10,f11,f12,f13,f14".split(",").foreach { id =>
sf.setAttribute(id, null.asInstanceOf[Object])
}
list += sf
}
list.toList
}
def createSimpleFeature: SimpleFeature = {
val sft = SimpleFeatureTypes.createType("AvroSimpleFeatureWriterTest", "name:String,*geom:Point,dtg:Date")
val builder = AvroSimpleFeatureFactory.featureBuilder(sft)
builder.reset()
builder.set("name", "test_feature")
builder.set("geom", WKTUtils.read("POINT(-110 30)"))
builder.set("dtg", "2012-01-02T05:06:07.000Z")
val sf = builder.buildFeature("fid")
sf.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
sf
}
"AvroSimpleFeatureWriter2" should {
"correctly serialize features compared to old integrated AvroSimpleFeature write() method" in {
val sft = SimpleFeatureTypes.createType("testType", "a:Integer,b:Date,*geom:Point:srid=4326")
val f = new AvroSimpleFeature(new FeatureIdImpl("fakeid"), sft)
f.setAttribute(0,"1")
f.setAttribute(1,"2013-01-02T00:00:00.000Z")
f.setAttribute(2,"POINT(45.0 49.0)")
val oldBaos = new ByteArrayOutputStream()
Version2ASF(f).write(oldBaos)
val oldBytes = oldBaos.toByteArray
val afw = new AvroSimpleFeatureWriter(sft)
val newBaos = new ByteArrayOutputStream()
val encoder = EncoderFactory.get().directBinaryEncoder(newBaos, null)
afw.write(f, encoder)
encoder.flush()
val newBytes = newBaos.toByteArray
newBytes mustEqual oldBytes
}
"correctly serialize all the datatypes provided in AvroSimpleFeature" in {
val features = createComplicatedFeatures(10)
val oldBaos = new ByteArrayOutputStream()
def serializeOld(sf: SimpleFeature) = {
oldBaos.reset()
Version2ASF(sf).write(oldBaos)
oldBaos.toByteArray
}
val afw = new AvroSimpleFeatureWriter(features(0).getFeatureType)
val newBaos = new ByteArrayOutputStream()
val encoder = EncoderFactory.get().directBinaryEncoder(newBaos, null)
def serializeNew(sf: SimpleFeature) = {
newBaos.reset()
afw.write(sf, encoder)
encoder.flush()
newBaos.toByteArray
}
var decoder: BinaryDecoder = null
val fsr = FeatureSpecificReader(features(0).getFeatureType)
def convert(bytes: Array[Byte]) = {
val bais = new ByteArrayInputStream(bytes)
decoder = DecoderFactory.get().directBinaryDecoder(bais, decoder)
fsr.read(null, decoder)
}
val oldFeatures = features.map(serializeOld).map(convert)
val newFeatures = features.map(serializeNew).map(convert)
newFeatures.zip(oldFeatures).foreach { case (n, o) =>
n.getID mustEqual o.getID
n.getAttributeCount mustEqual o.getAttributeCount
n.getAttributeCount mustEqual 15
n.getAttributes mustEqual o.getAttributes
}
newFeatures.zip(features).foreach { case (n, o) =>
n.getID mustEqual o.getID
n.getAttributeCount mustEqual o.getAttributeCount
n.getAttributeCount mustEqual 15
n.getAttributes mustEqual o.getAttributes
}
success
}
"serialize user data when requested" >> {
import org.locationtech.geomesa.security._
val sf = createSimpleFeature
val vis = "test&usa"
sf.visibility = vis
val userData = sf.getUserData
userData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
userData.put(java.lang.Integer.valueOf(5), null)
userData.put(null, "null key")
val afw = new AvroSimpleFeatureWriter(sf.getType, EncodingOptions.withUserData)
val encoder = mock[Encoder]
afw.write(sf, encoder)
there was one(encoder).writeArrayStart()
there was one(encoder).setItemCount(4)
there was 4.times(encoder).startItem()
// 1 key and 2 values have type String
there was three(encoder).writeString("java.lang.String")
// 1 key and 0 values have type Hints.Key
there was one(encoder).writeString(classOf[Hints.Key].getName)
// 0 keys and 1 value have type Boolean
there was one(encoder).writeString("java.lang.Boolean")
// 1 key and 0 values have type Integer
there was one(encoder).writeString("java.lang.Boolean")
// 1 key and 1 value are null
there was two(encoder).writeString(AbstractWriter.NULL_MARKER_STR)
// visibility data
there was one(encoder).writeString(SecurityUtils.FEATURE_VISIBILITY)
there was one(encoder).writeString(vis)
// hint data
there was one(encoder).writeString(HintKeySerialization.keyToId(Hints.USE_PROVIDED_FID))
there was one(encoder).writeBoolean(true)
// key = 5, value = null
there was one(encoder).writeInt(5)
// key = null, value = "null key"
there was one(encoder).writeString("null key")
there was one(encoder).writeArrayEnd()
}
}
}
|
jnh5y/geomesa
|
geomesa-feature/src/test/scala/org/locationtech/geomesa/feature/AvroSimpleFeatureWriterTest.scala
|
Scala
|
apache-2.0
| 8,103 |
package io.opencensus.scala.http4s
import cats.effect.Effect
import io.opencensus.trace.Span
import org.http4s.client.Client
object implicits {
implicit class ClientWithTracing[F[_]: Effect](client: Client[F]) {
/**
* Enriches the `Client[F]` by tracing and propagation of the SpanContext via http headers.
*
* @param parentSpan the current span which will act as parent of the new span
*/
def traced(parentSpan: Span): Client[F] =
TracingClient[F].trace(client, Some(parentSpan))
/**
* Enriches the `Client[F]` by tracing and propagation of the SpanContext via http headers.
*/
def traced: Client[F] =
TracingClient[F].trace(client, None)
}
}
|
census-ecosystem/opencensus-scala
|
http4s/src/main/scala/io/opencensus/scala/http4s/implicits.scala
|
Scala
|
apache-2.0
| 717 |
package at.logic.gapt.examples
import at.logic.gapt.expr._
import at.logic.gapt.expr.hol.CNFp
import at.logic.gapt.proofs.{ Context, Sequent }
import at.logic.gapt.proofs.gaptic._
import at.logic.gapt.proofs.lk.LKProofSchemata
object tautSchema extends TacticsProof {
ctx += Context.Sort( "i" )
ctx += hoc"P: i>o"
ctx += hoc"0:i"
ctx += hoc"s:i>i"
ctx += hoc"PAND:i>o"
ctx += hoc"taut: i>i"
val es = Sequent( Seq( hof"!x PAND(s(x)) = (P(s(x)) & PAND(x))", hof"PAND(0) = P(0)", hof"PAND(n)" ), Seq( hof"PAND(n)" ) )
ctx += Context.ProofNameDeclaration( le"taut n", es )
val esBc = Sequent(
Seq(
( "Ant_0" -> hof"!x PAND(s(x)) = (P(s(x)) & PAND(x))" ),
( "Ant_1" -> hof"PAND(0) = P(0)" ),
( "Ant_2" -> hof"PAND(0)" )
),
Seq(
( "Suc_0" -> hof"PAND(0)" )
)
)
val bc = Lemma( esBc ) {
rewrite ltr "Ant_1" in "Suc_0"
rewrite ltr "Ant_1" in "Ant_2"
trivial
}
ctx += Context.ProofDefinitionDeclaration( le"taut 0", bc )
val esSc = Sequent(
Seq(
( "Ant_0" -> hof"!x PAND(s(x)) = (P(s(x)) & PAND(x))" ),
( "Ant_1" -> hof"PAND(0) = P(0)" ),
( "Ant_2" -> hof"PAND(s(n))" )
),
Seq(
( "Suc_0" -> hof"PAND(s(n))" )
)
)
val sc = Lemma( esSc ) {
rewrite ltr "Ant_0" in "Suc_0"
rewrite ltr "Ant_0" in "Ant_2"
andL
andR
trivial
forget( "Ant_2_0" )
ref( "taut" )
}
ctx += Context.ProofDefinitionDeclaration( le"taut (s n)", sc )
}
|
gebner/gapt
|
examples/schema/TestSchema.scala
|
Scala
|
gpl-3.0
| 1,474 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.layers.internal
import com.intel.analytics.bigdl.dllib.nn.abstractnn.TensorModule
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.common.zooMKLBlas
import scala.reflect.ClassTag
private[bigdl] class InternalERF[T: ClassTag]()(
implicit ev: TensorNumeric[T]) extends TensorModule[T] {
val derivativeFactor = ev.fromType(1.1283791670955126)
override def updateOutput(input: Tensor[T]): Tensor[T] = {
output.resizeAs(input).copy(input)
zooMKLBlas.erf(output)
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
val tensor = Tensor().resizeAs(input).copy(input)
val derivative = (-tensor.pow(ev.fromType(2))).exp().mul(derivativeFactor)
gradInput = gradOutput.cmul(derivative)
gradInput
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/keras/layers/InternalERF.scala
|
Scala
|
apache-2.0
| 1,545 |
package barstools.macros
import mdf.macrolib
class SRAMCompiler extends MacroCompilerSpec with HasSRAMGenerator with HasSimpleWidthTestGenerator {
val compiler: macrolib.SRAMCompiler = generateSRAMCompiler("awesome", "A")
val verilog = s"v-SRAMCompiler.v"
override lazy val depth = BigInt(16)
override lazy val memWidth = 8
override lazy val libWidth = 8
override lazy val mem_name = "mymem"
override lazy val memPortPrefix = "X"
override lazy val lib_name = "mygroup_8x16_SVT"
override lazy val libPortPrefix = "A"
writeToLib(lib, Seq(compiler))
writeToMem(mem, Seq(generateSRAM("mymem", "X", 8, 16)))
compileExecuteAndTest(mem, Some(lib), verilog, output = output, useCompiler = true)
}
|
ucb-bar/barstools
|
src/test/scala/barstools/macros/SRAMCompiler.scala
|
Scala
|
bsd-3-clause
| 718 |
package org.apache.spark.mllib.generators
/**
* Abstract class for a Decision Tree
*/
sealed abstract class DecisionTree extends Serializable {
def decide(data: Array[Double]): Int
def stats: DTStats
}
/**
* Discriminative node
*/
case class Decider(
variable: Int,
threshold: Double,
leftDecision: DecisionTree,
rightDecision: DecisionTree)
extends DecisionTree{
override def decide(data: Array[Double]): Int = {
if (data(variable) < threshold) {
leftDecision match {
case Decision(label) => label
case decider: Decider => decider.decide(data)
}
} else {
rightDecision match {
case Decision(label) => label
case decider: Decider => decider.decide(data)
}
}
}
override def stats = {
val leftStats = leftDecision.stats
val rightStats = rightDecision.stats
DTStats(leftStats.leafs + rightStats.leafs,
math.max(leftStats.maxDepth, rightStats.maxDepth) + 1,
math.min(leftStats.minDepth, rightStats.minDepth) + 1,
leftStats.variables ++ rightStats.variables + variable)
}
}
/**
* Leaf node.
*/
case class Decision(label: Int) extends DecisionTree {
override def decide(data: Array[Double]): Int = label
override def stats = DTStats(1, 0, 0, Set.empty[Int])
}
/**
* Case class used for getting Decision Tree Statistics.
*/
case class DTStats(leafs: Long, maxDepth: Int, minDepth: Int, variables: Set[Int])
|
sramirez/spark-experiments
|
src/main/scala/org/apache/spark/mllib/generators/DecisionTree.scala
|
Scala
|
apache-2.0
| 1,474 |
package com.cloudera.hue.livy.repl.scala
import com.cloudera.hue.livy.repl.Session
import com.cloudera.hue.livy.repl.scala.interpreter._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.write
import org.json4s.{JValue, _}
import scala.collection.mutable
import scala.concurrent.{ExecutionContext, Future}
object SparkSession {
def create(): Session = new SparkSession()
}
private class SparkSession extends Session {
private implicit def executor: ExecutionContext = ExecutionContext.global
implicit val formats = DefaultFormats
private var _history = new mutable.ArrayBuffer[JValue]
private val interpreter = new Interpreter()
interpreter.start()
override def state: Session.State = interpreter.state match {
case Interpreter.NotStarted() => Session.NotStarted()
case Interpreter.Starting() => Session.Starting()
case Interpreter.Idle() => Session.Idle()
case Interpreter.Busy() => Session.Busy()
case Interpreter.ShuttingDown() => Session.ShuttingDown()
}
override def history(): Seq[JValue] = _history
override def history(id: Int): Option[JValue] = synchronized {
if (id < _history.length) {
Some(_history(id))
} else {
None
}
}
override def execute(code: String): Future[JValue] = {
Future {
val content = interpreter.execute(code) match {
case ExecuteComplete(executeCount, output) =>
Map(
"status" -> "ok",
"execution_count" -> executeCount,
"data" -> Map(
"text/plain" -> output
)
)
case ExecuteIncomplete(executeCount, output) =>
Map(
"status" -> "error",
"execution_count" -> executeCount,
"ename" -> "Error",
"evalue" -> "output",
"traceback" -> List()
)
case ExecuteError(executeCount, output) =>
Map(
"status" -> "error",
"execution_count" -> executeCount,
"ename" -> "Error",
"evalue" -> "output",
"traceback" -> List()
)
}
val jsonContent = parse(write(content))
_history += jsonContent
jsonContent
}
}
override def close(): Future[Unit] = {
Future {
interpreter.shutdown()
}
}
}
|
erickt/hue
|
apps/spark/java/livy-repl/src/main/scala/com/cloudera/hue/livy/repl/scala/SparkSession.scala
|
Scala
|
apache-2.0
| 2,332 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.{expressions, InternalRow}
import org.apache.spark.sql.catalyst.expressions.{CreateNamedStruct, Expression, ExprId, InSet, ListQuery, Literal, PlanExpression}
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{BooleanType, DataType, StructType}
/**
* The base class for subquery that is used in SparkPlan.
*/
abstract class ExecSubqueryExpression extends PlanExpression[BaseSubqueryExec] {
/**
* Fill the expression with collected result from executed plan.
*/
def updateResult(): Unit
/** Updates the expression with a new plan. */
override def withNewPlan(plan: BaseSubqueryExec): ExecSubqueryExpression
}
object ExecSubqueryExpression {
/**
* Returns true when an expression contains a subquery
*/
def hasSubquery(e: Expression): Boolean = {
e.find {
case _: ExecSubqueryExpression => true
case _ => false
}.isDefined
}
}
/**
* A subquery that will return only one row and one column.
*
* This is the physical copy of ScalarSubquery to be used inside SparkPlan.
*/
case class ScalarSubquery(
plan: BaseSubqueryExec,
exprId: ExprId)
extends ExecSubqueryExpression {
override def dataType: DataType = plan.schema.fields.head.dataType
override def children: Seq[Expression] = Nil
override def nullable: Boolean = true
override def toString: String = plan.simpleString(SQLConf.get.maxToStringFields)
override def withNewPlan(query: BaseSubqueryExec): ScalarSubquery = copy(plan = query)
override def semanticEquals(other: Expression): Boolean = other match {
case s: ScalarSubquery => plan.sameResult(s.plan)
case _ => false
}
// the first column in first row from `query`.
@volatile private var result: Any = _
@volatile private var updated: Boolean = false
def updateResult(): Unit = {
// Only return the first two rows as an array to avoid Driver OOM.
val rows = plan.executeTake(2)
if (rows.length > 1) {
sys.error(s"more than one row returned by a subquery used as an expression:\\n$plan")
}
if (rows.length == 1) {
assert(rows(0).numFields == 1,
s"Expects 1 field, but got ${rows(0).numFields}; something went wrong in analysis")
result = rows(0).get(0, dataType)
} else {
// If there is no rows returned, the result should be null.
result = null
}
updated = true
}
override def eval(input: InternalRow): Any = {
require(updated, s"$this has not finished")
result
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
require(updated, s"$this has not finished")
Literal.create(result, dataType).doGenCode(ctx, ev)
}
}
/**
* The physical node of in-subquery. This is for Dynamic Partition Pruning only, as in-subquery
* coming from the original query will always be converted to joins.
*/
case class InSubqueryExec(
child: Expression,
plan: BaseSubqueryExec,
exprId: ExprId,
private var resultBroadcast: Broadcast[Array[Any]] = null) extends ExecSubqueryExpression {
@transient private var result: Array[Any] = _
@transient private lazy val inSet = InSet(child, result.toSet)
override def dataType: DataType = BooleanType
override def children: Seq[Expression] = child :: Nil
override def nullable: Boolean = child.nullable
override def toString: String = s"$child IN ${plan.name}"
override def withNewPlan(plan: BaseSubqueryExec): InSubqueryExec = copy(plan = plan)
override def semanticEquals(other: Expression): Boolean = other match {
case in: InSubqueryExec => child.semanticEquals(in.child) && plan.sameResult(in.plan)
case _ => false
}
def updateResult(): Unit = {
val rows = plan.executeCollect()
result = if (plan.output.length > 1) {
rows.asInstanceOf[Array[Any]]
} else {
rows.map(_.get(0, child.dataType))
}
resultBroadcast = plan.sqlContext.sparkContext.broadcast(result)
}
def values(): Option[Array[Any]] = Option(resultBroadcast).map(_.value)
private def prepareResult(): Unit = {
require(resultBroadcast != null, s"$this has not finished")
if (result == null) {
result = resultBroadcast.value
}
}
override def eval(input: InternalRow): Any = {
prepareResult()
inSet.eval(input)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
prepareResult()
inSet.doGenCode(ctx, ev)
}
override lazy val canonicalized: InSubqueryExec = {
copy(
child = child.canonicalized,
plan = plan.canonicalized.asInstanceOf[BaseSubqueryExec],
exprId = ExprId(0),
resultBroadcast = null)
}
}
/**
* Plans subqueries that are present in the given [[SparkPlan]].
*/
case class PlanSubqueries(sparkSession: SparkSession) extends Rule[SparkPlan] {
def apply(plan: SparkPlan): SparkPlan = {
plan.transformAllExpressions {
case subquery: expressions.ScalarSubquery =>
val executedPlan = QueryExecution.prepareExecutedPlan(sparkSession, subquery.plan)
ScalarSubquery(
SubqueryExec(s"scalar-subquery#${subquery.exprId.id}", executedPlan),
subquery.exprId)
case expressions.InSubquery(values, ListQuery(query, _, exprId, _)) =>
val expr = if (values.length == 1) {
values.head
} else {
CreateNamedStruct(
values.zipWithIndex.flatMap { case (v, index) =>
Seq(Literal(s"col_$index"), v)
}
)
}
val executedPlan = QueryExecution.prepareExecutedPlan(sparkSession, query)
InSubqueryExec(expr, SubqueryExec(s"subquery#${exprId.id}", executedPlan), exprId)
}
}
}
/**
* Find out duplicated subqueries in the spark plan, then use the same subquery result for all the
* references.
*/
object ReuseSubquery extends Rule[SparkPlan] {
def apply(plan: SparkPlan): SparkPlan = {
if (!conf.subqueryReuseEnabled) {
return plan
}
// Build a hash map using schema of subqueries to avoid O(N*N) sameResult calls.
val subqueries = mutable.HashMap[StructType, ArrayBuffer[BaseSubqueryExec]]()
plan transformAllExpressions {
case sub: ExecSubqueryExpression =>
val sameSchema =
subqueries.getOrElseUpdate(sub.plan.schema, ArrayBuffer[BaseSubqueryExec]())
val sameResult = sameSchema.find(_.sameResult(sub.plan))
if (sameResult.isDefined) {
sub.withNewPlan(ReusedSubqueryExec(sameResult.get))
} else {
sameSchema += sub.plan
sub
}
}
}
}
|
shuangshuangwang/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/subquery.scala
|
Scala
|
apache-2.0
| 7,705 |
package ch.uzh.ifi.pdeboer.pplib.process.recombination
import ch.uzh.ifi.pdeboer.pplib.hcomp.{HComp, HCompPortalAdapter}
import ch.uzh.ifi.pdeboer.pplib.process.entities._
import ch.uzh.ifi.pdeboer.pplib.process.stdlib._
import ch.uzh.ifi.pdeboer.pplib.util.TestUtils
import org.junit.{Assert, Test}
import scala.reflect.runtime.universe._
/**
* Created by pdeboer on 27/03/15.
*/
class TypeRecombinatorTest {
TestUtils.ensureThereIsAtLeast1Portal()
def newDB = {
val db = new RecombinationDB
db.addClass(classOf[Collection])
db.addClass(classOf[CollectionWithSigmaPruning])
db
}
@Test
def testFixProcessRecombination: Unit = {
val db = new RecombinationDB
db.addClass(classOf[FixPatchProcess])
db.addClass(classOf[FindFixPatchProcess])
db.addClass(classOf[Collection])
db.addClass(classOf[ContestWithMultipleEqualWinnersProcess])
db.addClass(classOf[CollectDecideProcess])
db.addClass(classOf[Contest])
val recombinator = new TypeRecombinator(RecombinationHints.create(TypeRecombinatorTest.DEFAULT_TESTING_HINTS), db)
val results = recombinator.materialize[FindFixPatchProcess]
Assert.assertEquals(1, results.size)
}
@Test
def testApplicableTypes: Unit = {
val db = new RecombinationDB
class ApplicableType extends CreateProcess[List[IndexedPatch], List[IndexedPatch]](Map.empty) {
override protected def run(data: List[IndexedPatch]): List[IndexedPatch] = Nil
}
db.addClass(classOf[ApplicableType])
val recombinator = new TypeRecombinator(RecombinationHints.create(Map.empty), db)
Assert.assertEquals("default case", 1, recombinator.getApplicableTypesInDB(typeOf[CreateProcess[List[IndexedPatch], List[IndexedPatch]]]).size)
Assert.assertEquals("generic superclass", 1, recombinator.getApplicableTypesInDB(typeOf[CreateProcess[_ <: List[Patch], _ <: List[Patch]]]).size)
}
@Test
def testTrivialMaterialize: Unit = {
val db = newDB
val r = new TypeRecombinator(RecombinationHints.create(Map()), db)
val materialized = r.materialize[CreateProcess[Patch, List[Patch]]]
val processClasses = materialized.map(_.clazz).toSet
Assert.assertEquals(Set(classOf[Collection], classOf[CollectionWithSigmaPruning]), processClasses)
}
@Test
def testTypeConstrainedMaterialize: Unit = {
val db = newDB
db.addClass(classOf[Contest])
val r = new TypeRecombinator(RecombinationHints.create(Map()), db)
val materialized = r.materialize[DecideProcess[List[Patch], Patch]]
val processClasses = materialized.map(_.clazz).toSet
Assert.assertEquals(Set(classOf[Contest]), processClasses)
}
@Test
def testParameterSupplyingConstraints: Unit = {
val possibleValue1 = 17
val possibleValue2 = 23
val workerCountHint = new AddedParameterRecombinationHint[Int](DefaultParameters.WORKER_COUNT, List(possibleValue1, possibleValue2))
val settingsHint = new SettingsOnParamsRecombinationHint(addGeneralDefaultValuesForParam = Some(false), addLocalDefaultValuesForParam = Some(false))
val db = newDB
val r = new TypeRecombinator(RecombinationHints.create(Map(
RecombinationHints.DEFAULT_HINTS -> List(workerCountHint, settingsHint)
)), db)
val materialized = r.materialize[ProcessStub[Patch, List[Patch]]]
Assert.assertTrue(materialized.forall(p => {
val thisParam = p.getParam(workerCountHint.param.key)
val valueIsSet = thisParam.contains(possibleValue1) || thisParam.contains(possibleValue2)
valueIsSet && p.params.size == 1
}))
Assert.assertEquals(4, materialized.length)
}
@Test
def testCollectDecideProcessRecombinationWithSimpleDB: Unit = {
val db = newDB
db.addClass(classOf[Contest])
db.addClass(classOf[ContestWithBeatByKVotingProcess])
db.addClass(classOf[CollectDecideProcess])
val parametersToDisableDefaultValues: Map[Class[_ <: ProcessStub[_, _]], List[RecombinationHint]] = Map(
RecombinationHints.DEFAULT_HINTS -> List(new SettingsOnParamsRecombinationHint(addGeneralDefaultValuesForParam = Some(false), addLocalDefaultValuesForParam = Some(false))))
val r = new TypeRecombinator(RecombinationHints.create(parametersToDisableDefaultValues), db)
val materialized = r.materialize[CollectDecideProcess]
Assert.assertEquals(4, materialized.size)
}
@Test
def testTurningDefaultOffAndForceParamSetting: Unit = {
val materialized = new TextShorteningRecombinationTest().candidates
def containsSubProcessWithPortal(process: PassableProcessParam[_], targetPortal: HCompPortalAdapter): Boolean = {
val OneOfChildrenContainsPortal = process.params.values.exists {
case proc: PassableProcessParam[_] =>
containsSubProcessWithPortal(proc, targetPortal)
case _ => false
}
val thisProcessContainsPortal = process.getParam(DefaultParameters.PORTAL_PARAMETER).contains(targetPortal)
thisProcessContainsPortal && OneOfChildrenContainsPortal
}
val otherPortals = HComp.allDefinedPortals.toSet - HComp.randomPortal
if (otherPortals.isEmpty) println(Thread.currentThread().getStackTrace()(1) + ": This test will only work if you have defined more than 1 portal.")
Assert.assertFalse("no crowd flower must be used, only mturk", materialized.exists(p =>
otherPortals.exists(portal => containsSubProcessWithPortal(p, portal))))
}
}
object TypeRecombinatorTest {
val DEFAULT_TESTING_HINTS: Map[Class[_ <: ProcessStub[_, _]], List[RecombinationHint]] = Map(RecombinationHints.DEFAULT_HINTS -> (List(
//disable default values for instruction values
new SettingsOnParamsRecombinationHint(List(DefaultParameters.INSTRUCTIONS.key), addGeneralDefaultValuesForParam = Some(false), addLocalDefaultValuesForParam = Some(false)),
new AddedParameterRecombinationHint[InstructionData](DefaultParameters.INSTRUCTIONS, List(
new InstructionData(actionName = "shorten the following paragraph", detailedDescription = "grammar (e.g. tenses), text-length")))
) ::: RecombinationHints.hcompPlatform(List(HComp.randomPortal))))
}
|
uzh/PPLib
|
src/test/scala/ch/uzh/ifi/pdeboer/pplib/process/recombination/TypeRecombinatorTest.scala
|
Scala
|
mit
| 5,890 |
/**
* FILE: OwnedResourceTest.scala
* PERCORSO /Codice/sgad/servertier/src/test/scala/sgad/servertier/dataaccess/data/userdata
* DATA CREAZIONE: 20 Febbraio 2014
* AUTORE: ProTech
* EMAIL: [email protected]
*
* Questo file è proprietà del gruppo ProTech, viene rilasciato sotto licenza Apache v2.
*
* DIARIO DELLE MODIFICHE:
* 2014-02-20 - Creazione della classe - Segantin Fabio
*/
import org.scalatest._
import sgad.servertier.dataaccess.data.shareddata.Resource
import sgad.servertier.dataaccess.data.userdata.OwnedResource
/**
* Classe di Test per la classe OwnedResource
*/
class OwnedResourceTest extends FlatSpec with PrivateMethodTester {
val gold = new Resource("oro")
val potion = new Resource("pozione")
val setQuantity = PrivateMethod[Unit]('setQuantity)
"Una OwnedResource" must "mantenere la stessa risorsa fornita durante il costruttore" in {
val ownedPotions = new OwnedResource(potion, 100)
val ownedGold = new OwnedResource(gold, 200)
assert(ownedPotions.getResource == potion)
assert(ownedGold.getResource == gold)
}
it must "mantenere la stessa quantità in maniera analoga al setter e al costruttore" in {
val ownedPotions = new OwnedResource(potion, 100)
val ownedGold = new OwnedResource(gold, 200)
assert(ownedGold.getQuantity == 200)
assert(ownedPotions.getQuantity == 100)
val goldQuantity = ownedGold.getQuantity
val potionQuantity = ownedPotions.getQuantity
ownedGold invokePrivate setQuantity(100)
assert(ownedGold.getQuantity == 100)
assert(!(goldQuantity != 100 && ownedGold.getQuantity == goldQuantity))
assert(ownedPotions.getQuantity == potionQuantity)
}
it must "essere uguale solo ad oggetti equivalenti" in {
val ownedPotions1 = new OwnedResource(potion, 100)
val ownedGold1 = new OwnedResource(gold, 200)
val ownedPotions2 = new OwnedResource(potion, 100)
val ownedGold2 = new OwnedResource(gold, 200)
val ownedPotions3 = new OwnedResource(potion, 200)
val ownedGold3 = new OwnedResource(gold, 100)
val resource1 = new Resource("panna")
assert(ownedPotions1 == ownedPotions2)
assert(ownedGold1 == ownedGold2)
assert(ownedGold3 != ownedGold2)
assert(ownedGold3 != ownedPotions1)
assert(ownedPotions3 != resource1)
}
it must "lanciare una eccezione se viene chiamato un metodo con parametri non adatti" in {
intercept[IllegalArgumentException] {
new OwnedResource(null, 200)
}
intercept[IllegalArgumentException] {
val ownedGold = new OwnedResource(gold, 200)
ownedGold invokePrivate setQuantity(-1)
}
}
it must "avere la stessa chiave della risorsa" in {
val ownedGold = new OwnedResource(gold, 200)
assert(gold.getKey == ownedGold.getKey)
}
}
|
protechunipd/SGAD
|
Codice/sgad/servertier/src/test/scala/sgad/servertier/dataaccess/data/userdata/OwnedResourceTest.scala
|
Scala
|
apache-2.0
| 2,695 |
package com.twitter.finagle.memcached.stress
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.Address
import com.twitter.finagle.Memcached
import com.twitter.finagle.Name
import com.twitter.finagle.Service
import com.twitter.finagle.memcached.integration.external.InProcessMemcached
import com.twitter.finagle.memcached.protocol._
import com.twitter.io.Buf
import com.twitter.util.{Await, Awaitable, Time}
import java.net.{InetAddress, InetSocketAddress}
import org.scalatest.BeforeAndAfter
import org.scalatest.funsuite.AnyFunSuite
class InterpreterServiceTest extends AnyFunSuite with BeforeAndAfter {
val TimeOut = 15.seconds
private def awaitResult[T](awaitable: Awaitable[T]): T = Await.result(awaitable, TimeOut)
var server: InProcessMemcached = null
var client: Service[Command, Response] = null
before {
server = new InProcessMemcached(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
val address = Address(server.start().boundAddress.asInstanceOf[InetSocketAddress])
client = Memcached.client
.connectionsPerEndpoint(1)
.newService(Name.bound(address), "memcache")
}
after {
server.stop()
}
test("set & get") {
val _key = "key"
val value = Buf.Utf8("value")
val zero = Buf.Utf8("0")
val start = System.currentTimeMillis
(0 until 100) map { i =>
val key = _key + i
awaitResult(client(Delete(Buf.Utf8(key))))
awaitResult(client(Set(Buf.Utf8(key), 0, Time.epoch, value)))
assert(
awaitResult(client(Get(Seq(Buf.Utf8(key))))) == Values(
Seq(Value(Buf.Utf8(key), value, None, Some(zero)))
)
)
}
val end = System.currentTimeMillis
// println("%d ms".format(end - start))
}
}
|
twitter/finagle
|
finagle-memcached/src/test/scala/com/twitter/finagle/memcached/stress/InterpreterServiceTest.scala
|
Scala
|
apache-2.0
| 1,755 |
package org.jetbrains.plugins.scala
package lang.psi.types.nonvalue
import com.intellij.openapi.project.Project
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.psi.{PsiParameter, PsiTypeParameter}
import org.jetbrains.plugins.scala.extensions.PsiParameterExt
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScTypeParam}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import scala.collection.immutable.{HashMap, HashSet}
/**
* @author ilyas
*/
/**
* This is internal type, no expression can have such type.
*/
trait NonValueType extends ScType {
def isValue = false
}
/**
* Generalized parameter. It's not psi element. So can be used in any place.
* Some difference
*/
case class Parameter(name: String, deprecatedName: Option[String], paramType: ScType, expectedType: ScType,
isDefault: Boolean, isRepeated: Boolean, isByName: Boolean,
index: Int = -1, psiParam: Option[PsiParameter] = None, defaultType: Option[ScType] = None) {
def this(name: String, deprecatedName: Option[String], paramType: ScType,
isDefault: Boolean, isRepeated: Boolean, isByName: Boolean, index: Int) {
this(name, deprecatedName, paramType, paramType, isDefault, isRepeated, isByName, index)
}
def this(param: ScParameter) {
this(param.name, param.deprecatedName, param.getType(TypingContext.empty).getOrNothing, param.getType(TypingContext.empty).getOrNothing,
param.isDefaultParam, param.isRepeatedParameter, param.isCallByNameParameter, param.index, Some(param), param.getDefaultExpression.flatMap(_.getType().toOption))
}
def this(param: PsiParameter) {
this(param.getName, None, param.paramType, param.paramType, false, param.isVarArgs, false, param.index, Some(param))
}
def paramInCode: Option[ScParameter] = psiParam match {
case Some(scParam: ScParameter) => Some(scParam)
case _ => None
}
def nameInCode = psiParam.map(_.getName)
}
/**
* Class representing type parameters in our type system. Can be constructed from psi.
* todo: lower and upper types will be reevaluated many times, is it good or bad? Seems bad. What other ways to fix SCL-7216?
* @param lowerType important to be lazy, see SCL-7216
* @param upperType important to be lazy, see SCL-7216
*/
class TypeParameter(val name: String, val typeParams: Seq[TypeParameter], val lowerType: () => ScType,
val upperType: () => ScType, val ptp: PsiTypeParameter) {
def this(ptp: PsiTypeParameter) {
this(ptp match {
case tp: ScTypeParam => tp.name
case _ => ptp.getName
}, ptp match {
case tp: ScTypeParam => tp.typeParameters.map(new TypeParameter(_))
case _ => Seq.empty
}, ptp match {
case tp: ScTypeParam => () => tp.lowerBound.getOrNothing
case _ => () => Nothing //todo: lower type?
}, ptp match {
case tp: ScTypeParam => () => tp.upperBound.getOrAny
case _ => () => Any //todo: upper type?
}, ptp)
}
def update(fun: ScType => ScType): TypeParameter = {
new TypeParameter(name, typeParams.map(_.update(fun)), {
val res = fun(lowerType())
() => res
}, {
val res = fun(upperType())
() => res
}, ptp)
}
def canEqual(other: Any): Boolean = other.isInstanceOf[TypeParameter]
override def equals(other: Any): Boolean = other match {
case that: TypeParameter =>
(that canEqual this) &&
name == that.name &&
typeParams == that.typeParams &&
lowerType() == that.lowerType() &&
upperType() == that.upperType() &&
ptp == that.ptp
case _ => false
}
override def hashCode(): Int = {
val state = Seq(name, typeParams, ptp)
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
}
object TypeParameter {
def apply(name: String, typeParams: Seq[TypeParameter], lowerType: () => ScType, upperType: () => ScType,
ptp: PsiTypeParameter): TypeParameter = {
new TypeParameter(name, typeParams, lowerType, upperType, ptp)
}
def unapply(t: TypeParameter): Option[(String, Seq[TypeParameter], () => ScType, () => ScType, PsiTypeParameter)] = {
Some(t.name, t.typeParams, t.lowerType, t.upperType, t.ptp)
}
def fromArray(ptps: Array[PsiTypeParameter]): Array[TypeParameter] = {
if (ptps.length == 0) EMPTY_ARRAY
else ptps.map(new TypeParameter(_))
}
val EMPTY_ARRAY: Array[TypeParameter] = Array.empty
}
case class ScMethodType(returnType: ScType, params: Seq[Parameter], isImplicit: Boolean)
(val project: Project, val scope: GlobalSearchScope) extends NonValueType {
def visitType(visitor: ScalaTypeVisitor) {
visitor.visitMethodType(this)
}
override def typeDepth: Int = returnType.typeDepth
def inferValueType: ValueType = {
ScFunctionType(returnType.inferValueType, params.map(p => {
val inferredParamType = p.paramType.inferValueType
if (!p.isRepeated) inferredParamType
else {
val seqClass = ScalaPsiManager.instance(project).getCachedClass(scope, "scala.collection.Seq")
seqClass.fold(inferredParamType) { inferred =>
ScParameterizedType(ScDesignatorType(inferred), Seq(inferredParamType))
}
}
}))(project, scope)
}
override def removeAbstracts = new ScMethodType(returnType.removeAbstracts,
params.map(p => p.copy(paramType = p.paramType.removeAbstracts)), isImplicit)(project, scope)
override def recursiveUpdate(update: ScType => (Boolean, ScType), visited: HashSet[ScType]): ScType = {
if (visited.contains(this)) {
return update(this) match {
case (true, res) => res
case _ => this
}
}
val newVisited = visited + this
update(this) match {
case (true, res) => res
case _ =>
new ScMethodType(returnType.recursiveUpdate(update, newVisited),
params.map(p => p.copy(paramType = p.paramType.recursiveUpdate(update, newVisited))), isImplicit)(project, scope)
}
}
override def recursiveVarianceUpdateModifiable[T](data: T, update: (ScType, Int, T) => (Boolean, ScType, T),
variance: Int = 1): ScType = {
update(this, variance, data) match {
case (true, res, _) => res
case (_, _, newData) =>
new ScMethodType(returnType.recursiveVarianceUpdateModifiable(newData, update, variance),
params.map(p => p.copy(paramType = p.paramType.recursiveVarianceUpdateModifiable(newData, update, -variance))),
isImplicit)(project, scope)
}
}
override def equivInner(r: ScType, uSubst: ScUndefinedSubstitutor, falseUndef: Boolean): (Boolean, ScUndefinedSubstitutor) = {
var undefinedSubst = uSubst
r match {
case m: ScMethodType =>
if (m.params.length != params.length) return (false, undefinedSubst)
var t = Equivalence.equivInner(m.returnType, returnType,undefinedSubst, falseUndef)
if (!t._1) return (false, undefinedSubst)
undefinedSubst = t._2
var i = 0
while (i < params.length) {
//todo: Seq[Type] instead of Type*
if (params(i).isRepeated != m.params(i).isRepeated) return (false, undefinedSubst)
t = Equivalence.equivInner(params(i).paramType, m.params(i).paramType, undefinedSubst, falseUndef)
if (!t._1) return (false, undefinedSubst)
undefinedSubst = t._2
i = i + 1
}
(true, undefinedSubst)
case _ => (false, undefinedSubst)
}
}
}
case class ScTypePolymorphicType(internalType: ScType, typeParameters: Seq[TypeParameter]) extends NonValueType {
if (internalType.isInstanceOf[ScTypePolymorphicType]) {
throw new IllegalArgumentException("Polymorphic type can't have wrong internal type")
}
def polymorphicTypeSubstitutor: ScSubstitutor = polymorphicTypeSubstitutor(inferValueType = false)
def polymorphicTypeSubstitutor(inferValueType: Boolean): ScSubstitutor =
new ScSubstitutor(new HashMap[(String, String), ScType] ++ typeParameters.map(tp => {
var contraVariant = 0
var coOrInVariant = 0
internalType.recursiveVarianceUpdate {
case (typez: ScType, i: Int) =>
val pair = typez match {
case tp: ScTypeParameterType => (tp.name, ScalaPsiUtil.getPsiElementId(tp.param))
case ScUndefinedType(tp) => (tp.name, ScalaPsiUtil.getPsiElementId(tp.param))
case ScAbstractType(tp, _, _) => (tp.name, ScalaPsiUtil.getPsiElementId(tp.param))
case _ => null
}
if (pair != null) {
if ((tp.name, ScalaPsiUtil.getPsiElementId(tp.ptp)) == pair) {
if (i == -1) contraVariant += 1
else coOrInVariant += 1
}
}
(false, typez)
}
if (coOrInVariant == 0 && contraVariant != 0)
((tp.name, ScalaPsiUtil.getPsiElementId(tp.ptp)), tp.upperType().inferValueType)
else
((tp.name, ScalaPsiUtil.getPsiElementId(tp.ptp)), tp.lowerType().inferValueType)
}), Map.empty, None)
def abstractTypeSubstitutor: ScSubstitutor = {
def hasRecursiveTypeParameters(typez: ScType): Boolean = {
var hasRecursiveTypeParameters = false
typez.recursiveUpdate {
case tpt: ScTypeParameterType =>
typeParameters.find(tp => (tp.name, ScalaPsiUtil.getPsiElementId(tp.ptp)) == (tpt.name, tpt.getId)) match {
case None => (true, tpt)
case _ =>
hasRecursiveTypeParameters = true
(true, tpt)
}
case tp: ScType => (hasRecursiveTypeParameters, tp)
}
hasRecursiveTypeParameters
}
new ScSubstitutor(new HashMap[(String, String), ScType] ++ typeParameters.map(tp => {
val lowerType: ScType = if (hasRecursiveTypeParameters(tp.lowerType())) Nothing else tp.lowerType()
val upperType: ScType = if (hasRecursiveTypeParameters(tp.upperType())) Any else tp.upperType()
((tp.name, ScalaPsiUtil.getPsiElementId(tp.ptp)),
new ScAbstractType(new ScTypeParameterType(tp.ptp, ScSubstitutor.empty), lowerType, upperType))
}), Map.empty, None)
}
def typeParameterTypeSubstitutor: ScSubstitutor =
new ScSubstitutor(new HashMap[(String, String), ScType] ++ typeParameters.map(tp => ((tp.name, ScalaPsiUtil.getPsiElementId(tp.ptp)),
new ScTypeParameterType(tp.ptp, ScSubstitutor.empty))), Map.empty, None)
def inferValueType: ValueType = {
polymorphicTypeSubstitutor(inferValueType = true).subst(internalType.inferValueType).asInstanceOf[ValueType]
}
override def removeAbstracts = ScTypePolymorphicType(internalType.removeAbstracts, typeParameters.map(tp => {
TypeParameter(tp.name, tp.typeParams /* todo: ? */, () => tp.lowerType().removeAbstracts,
() => tp.upperType().removeAbstracts, tp.ptp)
}))
override def recursiveUpdate(update: ScType => (Boolean, ScType), visited: HashSet[ScType]): ScType = {
if (visited.contains(this)) {
return update(this) match {
case (true, res) => res
case _ => this
}
}
val newVisited = visited + this
update(this) match {
case (true, res) => res
case _ =>
ScTypePolymorphicType(internalType.recursiveUpdate(update, newVisited), typeParameters.map(tp => {
TypeParameter(tp.name, tp.typeParams /* todo: ? */, {
val res = tp.lowerType().recursiveUpdate(update, newVisited)
() => res
}, {
val res = tp.upperType().recursiveUpdate(update, newVisited)
() => res
}, tp.ptp)
}))
}
}
override def recursiveVarianceUpdateModifiable[T](data: T, update: (ScType, Int, T) => (Boolean, ScType, T),
variance: Int = 1): ScType = {
update(this, variance, data) match {
case (true, res, _) => res
case (_, _, newData) =>
ScTypePolymorphicType(internalType.recursiveVarianceUpdateModifiable(newData, update, variance), typeParameters.map(tp => {
TypeParameter(tp.name, tp.typeParams /* todo: ? */,
() => tp.lowerType().recursiveVarianceUpdateModifiable(newData, update, -variance),
() => tp.upperType().recursiveVarianceUpdateModifiable(newData, update, variance), tp.ptp)
}))
}
}
override def equivInner(r: ScType, uSubst: ScUndefinedSubstitutor,
falseUndef: Boolean): (Boolean, ScUndefinedSubstitutor) = {
var undefinedSubst = uSubst
r match {
case p: ScTypePolymorphicType =>
if (typeParameters.length != p.typeParameters.length) return (false, undefinedSubst)
var i = 0
while (i < typeParameters.length) {
var t = Equivalence.equivInner(typeParameters(i).lowerType(),
p.typeParameters(i).lowerType(), undefinedSubst, falseUndef)
if (!t._1) return (false,undefinedSubst)
undefinedSubst = t._2
t = Equivalence.equivInner(typeParameters(i).upperType(),
p.typeParameters(i).upperType(), undefinedSubst, falseUndef)
if (!t._1) return (false, undefinedSubst)
undefinedSubst = t._2
i = i + 1
}
val subst = new ScSubstitutor(new collection.immutable.HashMap[(String, String), ScType] ++
typeParameters.zip(p.typeParameters).map({
tuple => ((tuple._1.name, ScalaPsiUtil.getPsiElementId(tuple._1.ptp)), new ScTypeParameterType(tuple._2.name,
tuple._2.ptp match {
case p: ScTypeParam => p.typeParameters.toList.map{new ScTypeParameterType(_, ScSubstitutor.empty)}
case _ => Nil
}, new Suspension(tuple._2.lowerType), new Suspension(tuple._2.upperType), tuple._2.ptp))
}), Map.empty, None)
Equivalence.equivInner(subst.subst(internalType), p.internalType, undefinedSubst, falseUndef)
case _ => (false, undefinedSubst)
}
}
def visitType(visitor: ScalaTypeVisitor): Unit = {
visitor.visitTypePolymorphicType(this)
}
override def typeDepth: Int = {
if (typeParameters.nonEmpty) internalType.typeDepth.max(ScType.typeParamsDepth(typeParameters.toArray) + 1)
else internalType.typeDepth
}
}
|
LPTK/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/types/nonvalue/ScMethodType.scala
|
Scala
|
apache-2.0
| 14,441 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples
import java.util.Random
import breeze.linalg.{DenseVector, Vector}
/**
* Logistic regression based classification.
*
* This is an example implementation for learning how to use Spark. For more conventional use,
* please refer to org.apache.spark.ml.classification.LogisticRegression.
*/
object LocalFileLR {
val D = 10 // Number of dimensions
val rand = new Random(42)
case class DataPoint(x: Vector[Double], y: Double)
def parsePoint(line: String): DataPoint = {
val nums = line.split(' ').map(_.toDouble)
DataPoint(new DenseVector(nums.slice(1, D + 1)), nums(0))
}
def showWarning() {
System.err.println(
"""WARN: This is a naive implementation of Logistic Regression and is given as an example!
|Please use org.apache.spark.ml.classification.LogisticRegression
|for more conventional use.
""".stripMargin)
}
def main(args: Array[String]) {
showWarning()
val fileSrc = scala.io.Source.fromFile(args(0))
val lines = fileSrc.getLines().toArray
val points = lines.map(parsePoint)
val ITERATIONS = args(1).toInt
// Initialize w to a random value
val w = DenseVector.fill(D) {2 * rand.nextDouble - 1}
println(s"Initial w: $w")
for (i <- 1 to ITERATIONS) {
println(s"On iteration $i")
val gradient = DenseVector.zeros[Double](D)
for (p <- points) {
val scale = (1 / (1 + math.exp(-p.y * (w.dot(p.x)))) - 1) * p.y
gradient += p.x * scale
}
w -= gradient
}
fileSrc.close()
println(s"Final w: $w")
}
}
// scalastyle:on println
|
lhfei/spark-in-action
|
spark-2.x/src/main/scala/org/apache/spark/examples/LocalFileLR.scala
|
Scala
|
apache-2.0
| 2,530 |
package requests
import play.api.data.Form
import play.api.data.Forms._
case class SearchRecipeForm(elems: Seq[Long])
object SearchRecipeForm {
val form = Form(
mapping(
"elem" -> seq(longNumber(min = 0L))
)(SearchRecipeForm.apply)(SearchRecipeForm.unapply)
)
}
|
ponkotuy/FactorioRecipe
|
app/requests/SearchRecipeForm.scala
|
Scala
|
apache-2.0
| 282 |
package reactivemongo.api.commands.bson
import reactivemongo.api.BSONSerializationPack
import reactivemongo.api.commands._
import reactivemongo.bson._
object BSONFindAndModifyCommand extends FindAndModifyCommand[BSONSerializationPack.type] {
val pack: BSONSerializationPack.type = BSONSerializationPack
}
object BSONFindAndModifyImplicits {
import BSONFindAndModifyCommand._
implicit object FindAndModifyResultReader extends DealingWithGenericCommandErrorsReader[FindAndModifyResult] {
def readResult(result: BSONDocument): FindAndModifyResult =
FindAndModifyResult(
result.getAs[BSONDocument]("lastErrorObject").map { doc =>
UpdateLastError(
updatedExisting = doc.getAs[Boolean]("updatedExisting").getOrElse(false),
n = doc.getAs[Int]("n").getOrElse(0),
err = doc.getAs[String]("err"),
upsertedId = doc.getAs[BSONValue]("upserted")
)
},
result.getAs[BSONDocument]("value")
)
}
implicit object FindAndModifyWriter extends BSONDocumentWriter[ResolvedCollectionCommand[FindAndModify]] {
def write(command: ResolvedCollectionCommand[FindAndModify]): BSONDocument =
BSONDocument(
"findAndModify" -> command.collection,
"query" -> command.command.query,
"sort" -> command.command.sort,
"fields" -> command.command.fields,
"upsert" -> (if(command.command.upsert) Some(true) else None)
) ++ (command.command.modify match {
case Update(document, fetchNewObject) =>
BSONDocument(
"update" -> document,
"new" -> fetchNewObject
)
case Remove =>
BSONDocument("remove" -> true)
})
}
}
|
reactific/ReactiveMongo
|
driver/src/main/scala/api/commands/bson/findandmodify.scala
|
Scala
|
apache-2.0
| 1,726 |
package com.bigchange.basic
import breeze.linalg
import org.apache.spark.graphx._
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
case class Person(name: String, occupation: String, age: Int)
/**
* Created by C.J.YOU on 2016/5/3.
* 该示例展示了图的一些基本用法,包括图的创建,图的一些基本操作等。
*/
object GraphOperation {
def printGraph[VD, ED](graph: Graph[VD, ED], hint: String): Unit = {
println(hint)
graph.triplets.map(triplet => s"<${triplet.srcId},${triplet.srcAttr}> is the ${triplet.attr} of <${triplet.dstId},${triplet.dstAttr}>").collect.foreach(println)
}
// Define a reduce operation to compute the highest degree vertex
def max(a: (VertexId, Int), b: (VertexId, Int)): (VertexId, Int) = {
if (a._2 > b._2) a else b
}
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setAppName("GraphOperation")
.setMaster("local")
val ctx = new SparkContext(sparkConf)
// 我们构造一个真实的图关系,其中包括 “师生” 和 “家庭” 关系,成员角色如下:
// (enhong.chen, prof)
// (qi.liu, student)
// (qifeng.dai, student)
// (tianwei.dai, girl)
// (alice, girl) # 这是一个孤立结点
// 定义 vertex 的 RDD
val users: RDD[(VertexId, Person)] = ctx.parallelize(Array(
(1L, Person("enhong.chen", "prof", 40)),
(2L, Person("qi.liu", "student", 30)),
(3L, Person("qifeng.dai", "student", 32)),
(4L, Person("tianwei.dai", "girl", 3)),
(5L, Person("alice", "girl", 3))
))
// 定义 edge 的 RDD
val relationships: RDD[Edge[String]] = ctx.parallelize(Array(
Edge( 1L, 2L, "advisor"),
Edge(1L, 3L, "advisor"),
Edge(2L, 3L, "friend"),
Edge(3L, 2L, "friend"),
Edge(3L, 4L, "father"),
Edge(0L, 4L, "mother")
))
// 定义一个默认的 user,当有缺失用户的时候使用
val defaultUser = Person("None", "Missing", 0)
// Build the initial Graph
val graph = Graph(users, relationships, defaultUser)
// 找出所有的教授
graph.vertices.filter { case (id, e) => e.occupation == "prof" }.collect.foreach(println)
// 找出所有边,其中 srcID > desID 的
graph.edges.filter { case Edge(src, dst, prop) => src > dst }.collect.foreach(x =>println(x))
// 测试 triplet 关系
/**
* <0,Person(None,Missing,0)> is the mother of <4,Person(tianwei.dai,girl,3)>
<1,Person(enhong.chen,prof,40)> is the advisor of <2,Person(qi.liu,student,30)>
<1,Person(enhong.chen,prof,40)> is the advisor of <3,Person(qifeng.dai,student,32)>
<2,Person(qi.liu,student,30)> is the friend of <3,Person(qifeng.dai,student,32)>
<3,Person(qifeng.dai,student,32)> is the friend of <2,Person(qi.liu,student,30)>
<3,Person(qifeng.dai,student,32)> is the father of <4,Person(tianwei.dai,girl,3)>
*/
printGraph(graph, "===show graph===")
println("\n| Property Operators |\n")
// 改变某个顶点的 property, 以及相应的关系
val graph2 = graph.
mapVertices { case (id, vd) => if (vd.name == "qi.liu") Person(vd.name, "prof", vd.age) else vd }.
mapEdges(e => if (e.srcId == 1 && e.dstId == 2) "colleague" else e.attr)
graph2.vertices.filter { case (id, vd) => vd.occupation == "prof" }.collect.foreach(println)
/**
* <0,Person(None,Missing,0)> is the mother of <4,Person(tianwei.dai,girl,3)>
<1,Person(enhong.chen,prof,40)> is the colleague of <2,Person(qi.liu,prof,30)>
<1,Person(enhong.chen,prof,40)> is the advisor of <3,Person(qifeng.dai,student,32)>
<2,Person(qi.liu,prof,30)> is the friend of <3,Person(qifeng.dai,student,32)>
<3,Person(qifeng.dai,student,32)> is the friend of <2,Person(qi.liu,prof,30)>
<3,Person(qifeng.dai,student,32)> is the father of <4,Person(tianwei.dai,girl,3)>
*/
printGraph(graph2, "===show graph after alter qi.liu status===")
println("\n| Structural Operators |\n")
// 改变 graph 的 topoloy
val ccGraph = graph.connectedComponents()
val validGraph = graph.subgraph(vpred = (id, attr) => attr.occupation != "Missing") // 获取子图
val validCCGraph = ccGraph.mask(validGraph)
printGraph(validCCGraph, "===show connected graph & graph without missing vertex===")
println("\n| Join Operators |\n")
val u2: RDD[(VertexId, String)] = ctx.parallelize(Array(
(2L, "prof"),
(3L, "staff")
))
val joinGraph = graph.joinVertices(u2)((id, v1, v2) => Person(v1.name, v2, v1.age))
println("===show graph after join===")
joinGraph.vertices.foreach(println)
val outerJoinGraph = graph.outerJoinVertices(u2)((id, v1, v2) => (v1.name, v2.getOrElse(v1.occupation)))
println("===show graph after outer join===")
outerJoinGraph.vertices.foreach(println)
println("\n| Neighborhood Aggregation |\n")
// Compute the number of older followers and their total age
val olderFollowers: VertexRDD[(Int, Double)] = graph.aggregateMessages[(Int, Double)](
triplet => {
// Map Function
if (triplet.srcAttr.age > triplet.dstAttr.age) {
// Send message to destination vertex containing counter and age
triplet.sendToDst(1, triplet.srcAttr.age)
}
},
// Add counter and age
(a, b) => (a._1 + b._1, a._2 + b._2) // Reduce Function
)
// Divide total age by number of older followers to get average age of older followers
val avgAgeOfOlderFollowers: VertexRDD[Double] =
olderFollowers.mapValues((id, value) => value match {
case (count, totalAge) => totalAge / count
})
println("===show average of followers===")
avgAgeOfOlderFollowers.collect.foreach(println(_))
println("\n| Computing Degree Information |\n")
// Compute the max degrees
val maxInDegree: (VertexId, Int) = graph.inDegrees.reduce(max)
val maxOutDegree: (VertexId, Int) = graph.outDegrees.reduce(max)
val maxDegrees: (VertexId, Int) = graph.degrees.reduce(max)
println("===max in-degree===")
println(maxInDegree)
println("===max out-degree===")
println(maxOutDegree)
println("===max degree===")
println(maxDegrees)
println("\n| Collecting Neighbors |\n")
println("===collect neighbors in===")
graph.collectNeighbors(EdgeDirection.In).collect.foreach { case (id, neighboors) =>
val s = for (nei <- neighboors) yield nei._1
println(s"$id -- ${s.mkString(", ")}")
}
println("===collect neighbors out===")
graph.collectNeighbors(EdgeDirection.Out).collect.foreach { case (id, neighboors) =>
val s = for (nei <- neighboors) yield nei._1
println(s"$id -- ${s.mkString(", ")}")
}
println("===collect neighbors either===")
graph.collectNeighbors(EdgeDirection.Either).collect.foreach { case (id, neighboors) =>
val s = for (nei <- neighboors) yield nei._1
println(s"$id -- ${s.mkString(", ")}")
}
println("\n| Pregel API |\n")
val relationships2: RDD[Edge[Int]] = ctx.parallelize(Array(
Edge(1L, 2L, 3),
Edge(1L, 3L, 3),
Edge(2L, 3L, 2),
Edge(2L, 4L, 50),
Edge(2L, 5L, 50),
Edge(3L, 2L, 2),
Edge(3L, 4L, 1),
Edge(3L, 5L, 4),
Edge(0L, 4L, 1)
))
val graph3 = Graph(users, relationships2, defaultUser)
val sourceId: VertexId = 2 // The ultimate source
// Initialize the graph such that all vertices except the root have distance infinity.
val initialGraph = graph3.mapVertices((id, _) => if (id == sourceId) 0.0 else Double.PositiveInfinity)
val sssp = initialGraph.pregel(Double.PositiveInfinity)(
(id, dist, newDist) => {
linalg.min(dist, newDist)
}, // Vertex Program
triplet => {
// Send Message
if (triplet.srcAttr + triplet.attr < triplet.dstAttr) {
Iterator((triplet.dstId, triplet.srcAttr + triplet.attr))
} else {
Iterator.empty
}
},
(a, b) => math.min(a, b) // Merge Message
)
println(sssp.vertices.collect.mkString("\n"))
val setA: VertexRDD[Int] = VertexRDD(ctx.parallelize(0L until 10L).map(id => (id, 1)))
val rddB: RDD[(VertexId, Double)] = ctx.parallelize(3L until 12L).flatMap(id => List((id, 3.0), (id, 2.0)))
println("\n| VertexRDDs |\n")
// There should be 20 entries in rddB
println(rddB.count)
val setB: VertexRDD[Double] = setA.aggregateUsingIndex(rddB, _ + _)
// There should be 10 entries in setB
println(setB.count)
setB.collect.foreach(println)
ctx.stop()
}
}
|
bigchange/AI
|
src/main/scala/com/bigchange/basic/GraphOperation.scala
|
Scala
|
apache-2.0
| 8,769 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package types
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* @author Alexander Podkhalyuzin
*/
/*
* Types ::= Type {',' Type}
*/
object Types extends ParserNode{
def parse(builder: ScalaPsiBuilder): (Boolean, Boolean) ={
var isTuple = false
def typesParse() = if (ParamType.parseInner(builder)){
true
} else if (builder.getTokenType == ScalaTokenTypes.tUNDER) {
builder.advanceLexer()
true
} else {
false
}
val typesMarker = builder.mark
if (!typesParse) {
typesMarker.drop()
return (false,isTuple)
}
var exit = true
while (exit && builder.getTokenType == ScalaTokenTypes.tCOMMA) {
isTuple = true
builder.advanceLexer() //Ate ,
if (!typesParse) {
exit = false
//builder error ScalaBundle.message("wrong.type",new Array[Object](0))
}
}
if (isTuple) typesMarker.done(ScalaElementTypes.TYPES)
else typesMarker.drop()
return (true,isTuple)
}
}
|
double-y/translation-idea-plugin
|
src/org/jetbrains/plugins/scala/lang/parser/parsing/types/Types.scala
|
Scala
|
apache-2.0
| 1,164 |
package org.http4s
package object booPickle extends BooPickleInstances
|
ChristopherDavenport/http4s
|
boopickle/src/main/scala/org/http4s/booPickle/package.scala
|
Scala
|
apache-2.0
| 72 |
package crudex.web
import scalaz._, Scalaz._
import scalaz.concurrent.Task
import org.http4s._, org.http4s.dsl._
import org.http4s.circe._
import crudex.persist.stm.ThingStm._
import io.circe._
import io.circe.syntax._
import crudex.web.Common._
import crudex.app.Common._
import io.circe.Encoder
/**
* Generic CRUD handler that works across entities and is agnostic to which DB monadic effect is used
*/
case class CrudHandler[K,D,E[_]](uri: String)(implicit evM: Monad[E],
evPersitAsTask: E ~> Task,
evConvertKey: IntId[K],
evPersist: PersistCrud[K,D,E],
evJsonK: Encoder[K],
evJsonD: Encoder[D],
evDecodeJsonD: Decoder[D]) {
import io.circe.generic.auto._ //needed for auto json instance of Entity
import crudex.model.instances._
val crudService = HttpService {
case GET -> Root / uri / IntVar(thingId) =>
renderJsonResponseOrNotFound (
evPersist.retrieveRecord(evConvertKey.fromInt(thingId))
)
case GET -> Root / uri =>
renderJsonResponse(
evPersist.retrieveAll
)
case req @ POST -> Root / uri =>
for {
thing <- req.as(jsonOf[D])
res <- renderJsonResponse(evPersist.create(thing))
} yield (res)
case req @ PUT -> Root / uri / IntVar(thingId) =>
for {
thing <- req.as(jsonOf[D])
res <- renderJsonResponseOrNotFound(evPersist.update(evConvertKey.fromInt(thingId))(thing))
} yield (res)
//TODO delete should return 201 not 200 on success
case DELETE -> Root / uri / IntVar(thingId) =>
renderJsonResponseOrNotFound(
evPersist.delete(evConvertKey.fromInt(thingId))
)
}
}
|
rpeszek/crud-ex-backent-http4s
|
src/main/scala/crudex/web/CrudHandler.scala
|
Scala
|
mit
| 1,911 |
package net.fwbrasil.bond
trait False
case object False extends Validator[Boolean, False] {
def isValid(v: Boolean) =
v == false
}
trait True
case object True extends Validator[Boolean, True] {
def isValid(v: Boolean) =
v == true
}
|
fwbrasil/bond
|
src/main/scala/net/fwbrasil/bond/Booleans.scala
|
Scala
|
lgpl-2.1
| 245 |
package controllers
import java.nio.ByteBuffer
import upickle.default
import play.api.mvc._
import services.ApiService
import shared.Api
import javax.inject.Inject
import scala.concurrent.ExecutionContext.Implicits.global
import slick.driver._
object Router extends autowire.Server[String, default.Reader, default.Writer] {
def read[Res: default.Reader](p: String) = default.read[Res](p)
def write[Res: default.Writer](r: Res) = default.write(r)
}
class Application @Inject() (dbConfigProvider: play.api.db.slick.DatabaseConfigProvider) extends Controller {
val dbConfig = dbConfigProvider.get[JdbcProfile]
val apiService = new ApiService()
def index = Action {
Ok(views.html.index("UNTITLED APP"))
}
def autowireApi(path: String) = Action.async(parse.json.map(_.toString())) {
implicit request =>
println(s"Request path: $path")
println(s"Request body: ${request.body}")
// call Autowire route
Router.route[Api](apiService)(
autowire.Core.Request(path.split("/"), default.read[Map[String, String]](request.body))
).map(Ok(_))
}
def logging = Action(parse.anyContent) {
implicit request =>
request.body.asJson.foreach { msg =>
println(s"CLIENT - $msg")
}
Ok("")
}
}
|
mitchdzugan/PSPSjsAR-stack-template
|
server/src/main/scala/controllers/Application.scala
|
Scala
|
apache-2.0
| 1,273 |
package com.wangc.fast.p9
/**
* Created by wangchao on 2017/6/20.
*/
object Study9_10_Reg {
def main(args: Array[String]): Unit = {
val p = """[0-9]""".r
// val p2 = "\s" //因为这种情况,所以推荐使用三个双引号
val target = "23kdf3hj6"
for (matchStr <- p.findAllIn(target)){
println( matchStr )
}
}
}
|
wang153723482/HelloWorld_my
|
HelloWorld_scala/src/com/wangc/fast/p9/Study9_10_Reg.scala
|
Scala
|
apache-2.0
| 364 |
package ps.tricerato.pureimage.filter
import ps.tricerato.pureimage._
object Lanczos {
val a = 2
import math._
private def L(x: Float):Float= if (x == 0F) {
1F
} else if (abs(x) > a) {
0F
} else {
sinc(x) * sinc(x / a)
}
private def sinc(x: Float) = (sin(Pi * x) / (Pi * x)).toFloat
val RANGE = (-2 * a to 2 * a) map { _.toFloat / 2 }
val COORDS = for {
i <- RANGE
j <- RANGE
} yield (i, j)
val LANCZOS_UNNORMALIZED = COORDS map { case (i,j) => (i, j) -> L(0 - i) * L(0 - j) }
val SUM = LANCZOS_UNNORMALIZED.map(_._2).sum
val LANCZOS = LANCZOS_UNNORMALIZED map { case (k,v) => k -> v / SUM } filter { case (k, v) => abs(v) > 0.001 }
def apply[I : Pixel](scale: Float)(image: Image[I]) = {
val lanczosByCoord = LANCZOS groupBy { case ((i,j), v) => ((i * scale).toInt, (j * scale).toInt) }
val lanczos = lanczosByCoord.toSeq map { case ((i,j), vals) =>
(i, j) -> vals.map(_._2).sum
} filter { case (_, v) => abs(v) > 0.001 }
val (ij, ls) = lanczos.unzip
val (is, js) = ij.unzip
val (ia, ja, la) = (is.toArray, js.toArray, ls.toArray)
val mx = image.width - 1
val my = image.height - 1
val ops = image.ops
image map { (x, y) =>
var sum = ops.zero
var index = 0
val length = ia.length
while (index < length) {
val l = la(index)
val i = ia(index)
val j = ja(index)
val x0 = between(0, x + i, mx)
val y0 = between(0, y + j, my)
val p = image(x0, y0)
sum = ops.sum(sum, ops.fade(p, l))
index += 1
}
// println(sum)
sum
}
}
}
|
non/pureimage
|
src/main/scala/ps/tricerato/pureimage/filter/Lanczos.scala
|
Scala
|
mit
| 1,646 |
package ycf.web
import akka.actor._
import akka.io.IO
import akka.pattern.ask
import akka.util.Timeout
import spray.can.Http
import spray.routing.HttpService
import scala.concurrent.duration._
import scala.language.postfixOps
object BusAlertApp extends App {
implicit val system = ActorSystem("on-spray-can")
val service = system.actorOf(Props[BusAlertRoutingActor], "bus-alert-router")
implicit val timeout = Timeout(5 seconds)
// start a new HTTP server on port 8080 with our service actor as the handler
IO(Http) ? Http.Bind(service, interface = "0.0.0.0", port = 8080)
}
class BusAlertRoutingActor extends Actor with ActorLogging with HttpService {
def actorRefFactory: ActorRefFactory = context
import context.dispatcher
val alertServiceActor = context.actorOf(Props[BusAlertServiceActor], "bus-time")
implicit val timeout = Timeout(5 seconds)
def receive = runRoute {
path("") {
get {
complete {
(alertServiceActor ? Start).mapTo[String]
}
}
}
}
}
/**
* Object for providing config to application
* Settings are contained in application.conf under ''bus-alerter''
*/
object BusAlertConfig {
import com.typesafe.config.ConfigFactory
private val config = ConfigFactory.load()
private val root = config.getConfig("bus-alerter")
val BUS_TIME_API_KEY = root.getString("BUS_TIME_API_KEY")
val BUS_STOP_ID = root.getString("BUS_STOP_ID")
val SMS_NUMBER = root.getString("SMS_NUMBER")
// Receive notice for express buses.
val EXPRESS_BUS_LINE = //"M15-SBS"
if(root.hasPath("EXPRESS_BUS_LINE"))
Some(root.getString("EXPRESS_BUS_LINE"))
else
None
// Range in meters that a bus must be within for a message to be sent
val minBusRange = 350
val maxBusRange = 750
}
|
frosforever/spray-bus-alerter
|
src/main/scala/ycf/web/BusAlertApp.scala
|
Scala
|
mit
| 1,788 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix
import cats.Contravariant
import scala.concurrent.ExecutionContext
package object execution {
/** Returns the associated Cats type class instances for the
* [[CancelableFuture]] data type.
*
* @param ec is the
* [[scala.concurrent.ExecutionContext ExecutionContext]]
* needed in order to create the needed type class instances,
* since future transformations rely on an `ExecutionContext`
* passed explicitly (by means of an implicit parameter)
* on each operation
*/
implicit def cancelableFutureCatsInstances(implicit ec: ExecutionContext): CancelableFutureCatsInstances =
new CancelableFutureCatsInstances()
/** Contravariant type class instance of [[Callback]] for Cats. */
implicit def contravariantCallback[E]: Contravariant[Callback[E, ?]] =
contravariantRef.asInstanceOf[Contravariant[Callback[E, ?]]]
private[this] val contravariantRef: Contravariant[Callback[Any, ?]] =
new Contravariant[Callback[Any, ?]] {
override def contramap[A, B](cb: Callback[Any, A])(f: B => A): Callback[Any, B] =
cb.contramap(f)
}
}
|
alexandru/monifu
|
monix-catnap/shared/src/main/scala/monix/execution/package.scala
|
Scala
|
apache-2.0
| 1,826 |
/*
* Copyright 2012 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.web.template
import org.eknet.publet.vfs.Path
import org.eknet.publet.vfs.util.ClasspathContainer
import grizzled.slf4j.Logging
import org.eknet.publet.web.asset.{AssetCollection, AssetManager, Group}
import Path._
import org.eknet.publet.web.template.DefaultLayout.Assets
import org.eknet.publet.Publet
import com.google.inject.{Inject, Singleton}
import org.eknet.publet.web.guice.PubletStartedEvent
import com.google.common.eventbus.Subscribe
import org.eknet.publet.engine.scalate.ScalateEngine
/**
* Registers provided default assets and sets the default layout template.
*
* @author Eike Kettner [email protected]
* @since 19.05.12 18:25
*/
@Singleton
class DefaultLayout @Inject() (publet: Publet, assetMgr: AssetManager, scalateEngine: ScalateEngine) extends Logging {
@Subscribe
def onStartup(ev: PubletStartedEvent) {
//mustache
assetMgr setup Assets.mustache
//jquery
assetMgr setup (Assets.jquery, Assets.jqueryMigrate)
//highlightJs
publet.mountManager.mount("/publet/highlightjs/".p,
new ClasspathContainer(base = "/org/eknet/publet/web/includes/highlight"))
assetMgr setup Assets.highlightjs
//publet's resources
publet.mountManager.mount("/publet/".p,
new ClasspathContainer(base = "/org/eknet/publet/web/includes/publet"))
assetMgr setup Assets.publet
//jquery.loadmask.spin
assetMgr setup (Assets.spin, Assets.loadmask)
//bootstrap
publet.mountManager.mount(Path("/publet/bootstrap/"),
new ClasspathContainer(base = "/org/eknet/publet/web/includes/bootstrap"))
assetMgr setup Assets.bootstrap
scalateEngine.setDefaultLayoutUri("/publet/bootstrap/bootstrap.single.jade")
//add default asset groups
assetMgr.setup(Assets.default, Assets.defaultNoHighlightJs)
}
}
object DefaultLayout {
object Assets extends AssetCollection {
override def classPathBase = "/org/eknet/publet/web/includes"
val mustache = Group("mustache")
.add(resource("mustache/mustache.js"))
val jquery = Group("jquery")
.add(resource("jquery/jquery-1.9.1.min.js").noCompress)
.add(resource("jquery/jquery.form.js"))
val jqueryMigrate = Group("jquery.migrate")
.add(resource("jquery/jquery-migrate-1.1.1.min.js").noCompress)
.require(jquery.name)
val spin = Group("spinjs").add(resource("spin/spin.min.js").noCompress)
val loadmask = Group("loadmask.jquery")
.add(resource("loadmask/jquery.loadmask.spin.js"))
.add(resource("loadmask/jquery.loadmask.spin.css"))
.require(jquery.name, spin.name)
val publet = Group("publet")
.add(resource("publet/js/publet.js"))
.add(resource("publet/js/jquery.feedback-message.js"))
.require(jquery.name)
val highlightjs = Group("highlightjs")
.add(resource("highlight/highlight.pack.js").noCompress)
.add(resource("highlight/highlight-onload.js"))
val bootstrap = Group("bootstrap")
.add(resource("bootstrap/js/bootstrap.js"))
.add(resource("bootstrap/css/bootstrap.css"))
.add(resource("bootstrap/css/bootstrap.custom.css"))
.add(resource("bootstrap/img/glyphicons-halflings.png"))
.add(resource("bootstrap/img/glyphicons-halflings-white.png"))
.require(jquery.name)
val default = Group("default").use(mustache.name, jquery.name, spin.name, loadmask.name,
publet.name, highlightjs.name, bootstrap.name)
val defaultNoHighlightJs = Group("defaultNoHighlightJs")
.use(mustache.name, jquery.name, spin.name, loadmask.name, publet.name, bootstrap.name)
}
}
|
eikek/publet
|
web/src/main/scala/org/eknet/publet/web/template/DefaultLayout.scala
|
Scala
|
apache-2.0
| 4,199 |
package rpgboss.model
trait RpgEnum extends Enumeration {
def get(v: Int) = apply(v)
def findOrDefault(s: String) = {
values.find(_.toString == s).getOrElse(default)
}
def default: Value
}
trait BooleanRpgEnum extends RpgEnum {
def fromBoolean(x: Boolean): Value
def toBoolean(id: Int): Boolean
}
object DirectionMasks {
val NORTH = 1 << 0
val EAST = 1 << 1
val SOUTH = 1 << 2
val WEST = 1 << 3
val NE = 1 << 4
val SE = 1 << 5
val SW = 1 << 6
val NW = 1 << 7
val ALLCARDINAL = NORTH | EAST | SOUTH | WEST
val NONE = 0
def allBlocked(b: Byte) = (b & ALLCARDINAL) == ALLCARDINAL
def flagged(b: Byte, dir: Int) = (b & dir) == dir
}
object MusicSlots {
val BEGIN = 0
val WEATHER = 7
val NUM_SLOTS = 8
}
object PictureSlots {
val BEGIN = 0
val BELOW_MAP = 0
val ABOVE_MAP = 8
val MAP_END = 24
val BATTLE_BEGIN = 24
val BATTLE_BACKGROUND = 24
val BATTLE_SPRITES_ENEMIES = 28
val BATTLE_SPRITES_PARTY = 40
val GAME_OVER = 49
val BATTLE_END = 50
val ABOVE_WINDOW = 50
val WEATHER = 58
val NUM_SLOTS = 64
}
object WeatherEffects extends RpgEnum {
val RAIN = Value(1, "Rain")
val FOG = Value(2, "Fog")
def default = RAIN
}
object Transitions extends RpgEnum {
val BaseBehaviour = Value(-1, "BaseBehaviour")
val NONE = Value(0, "None")
val FADE = Value(1, "Fade_Out")
val Custom1 = Value(2, "Custom1")
val Custom2 = Value(3, "Custom2")
val Custom3 = Value(4, "Custom3")
def default = FADE
val fadeLength = 0.5f
}
object Origins extends RpgEnum {
val SCREEN_TOP_LEFT = Value(0, "Top_Left")
val SCREEN_CENTER = Value(1, "Screen_Center")
val ON_ENTITY = Value(2, "On_Event_Player")
def default = SCREEN_TOP_LEFT
}
object WeatherTypes extends RpgEnum {
val NONE = Value(0, "None")
val RAIN = Value(1, "Rain")
val SNOW = Value(2, "Snow")
def default = NONE
}
object Constants {
val MINLEVEL = 1
val MAXLEVEL = 9000
val MINPRICE = 0
val MAXPRICE = 999999
val MINEFFECTARG = -9999
val MAXEFFECTARG = 9999
val NUM_VEHICLES = 4
import DirectionMasks._
val DirectionOffsets = Map(
NORTH -> (0, -1),
EAST -> (1, 0),
SOUTH -> (0, 1),
WEST -> (-1, 0),
NE -> (1, -1),
SE -> (1, 1),
SW -> (-1, 1),
NW -> (-1, -1))
}
|
DrDub/rpgboss
|
core/src/main/scala/rpgboss/model/Constants.scala
|
Scala
|
agpl-3.0
| 2,278 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.sbt
import sbt.testing.{ Runner, TaskDef }
/**
* As there is no further special handling needed or simulations to reject,
* [[GatlingRunner]] simply creates a [[GatlingTask]] for each discovered simulation.
*
* @param args the arguments for the new run.
* @param remoteArgs the arguments for the run in a forked JVM.
* @param testClassLoader the test ClassLoader, provided by SBT.
*/
class GatlingRunner(val args: Array[String], val remoteArgs: Array[String], testClassLoader: ClassLoader) extends Runner {
def tasks(taskDefs: Array[TaskDef]) = taskDefs.map(new GatlingTask(_, testClassLoader, args, remoteArgs))
def done = "Simulation(s) execution ended."
}
|
gatling/gatling
|
gatling-test-framework/src/main/scala/io/gatling/sbt/GatlingRunner.scala
|
Scala
|
apache-2.0
| 1,314 |
/*
* Copyright 2001-2015 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import scala.io.Source
import java.io.{BufferedWriter, File, FileWriter}
import sbt.IO
import GenCompatibleClasses.generatorSource
object GenScalacticJS {
private def uncommentJsExport(line: String): String =
if (line.trim.startsWith("//SCALATESTJS,NATIVE-ONLY "))
line.substring(line.indexOf("//SCALATESTJS,NATIVE-ONLY ") + 26)
else if (line.trim.startsWith("//SCALATESTJS-ONLY "))
line.substring(line.indexOf("//SCALATESTJS-ONLY ") + 19)
else
line
private def transformLine(line: String): String =
uncommentJsExport(line)
private def copyFile(sourceFile: File, destFile: File): File = {
val destWriter = new BufferedWriter(new FileWriter(destFile))
try {
val lines = Source.fromFile(sourceFile).getLines.toList
var skipMode = false
for (line <- lines) {
if (line.trim == "// SKIP-SCALATESTJS,NATIVE-START" || line.trim == "// SKIP-SCALATESTJS-START")
skipMode = true
else if (line.trim == "// SKIP-SCALATESTJS,NATIVE-END" || line.trim == "// SKIP-SCALATESTJS-END")
skipMode = false
else if (!skipMode) {
destWriter.write(transformLine(line))
destWriter.newLine()
}
}
destFile
}
finally {
destWriter.flush()
destWriter.close()
println("Copied " + destFile.getAbsolutePath)
}
}
def copyFiles(sourceDirName: String, packageDirName: String, files: List[String], targetDir: File): Seq[File] = {
val packageDir = new File(targetDir, packageDirName)
packageDir.mkdirs()
val sourceDir = new File(sourceDirName)
files.map { sourceFileName =>
val sourceFile = new File(sourceDir, sourceFileName)
val destFile = new File(packageDir, sourceFile.getName)
copyFile(sourceFile, destFile)
}
}
def copyDir(sourceDirName: String, packageDirName: String, targetDir: File, skipList: List[String]): Seq[File] = {
val packageDir = new File(targetDir, packageDirName)
packageDir.mkdirs()
val sourceDir = new File(sourceDirName)
sourceDir.listFiles.toList.filter(f => f.isFile && !skipList.contains(f.getName) && f.getName.endsWith(".scala")).map { sourceFile =>
val destFile = new File(packageDir, sourceFile.getName)
if (!destFile.exists || sourceFile.lastModified > destFile.lastModified) {
copyFile(sourceFile, destFile)
}
destFile
}
}
def copyResourceDir(sourceDirName: String, packageDirName: String, targetDir: File, skipList: List[String]): Seq[File] = {
val packageDir = new File(targetDir, packageDirName)
packageDir.mkdirs()
val sourceDir = new File(sourceDirName)
sourceDir.listFiles.toList.filter(f => f.isFile && !skipList.contains(f.getName)).map { sourceFile =>
val destFile = new File(packageDir, sourceFile.getName)
if (!destFile.exists || sourceFile.lastModified > destFile.lastModified)
IO.copyFile(sourceFile, destFile)
destFile
}
}
def genScala(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/scalactic/src/main/scala/org/scalactic", "org/scalactic", targetDir, List.empty) ++
copyDir("jvm/scalactic/src/main/scala/org/scalactic/exceptions", "org/scalactic/exceptions", targetDir, List.empty) ++
copyDir("jvm/scalactic/src/main/scala/org/scalactic/source", "org/scalactic/source", targetDir, List("ObjectMeta.scala")) ++
copyDir("jvm/scalactic/src/main/scala/org/scalactic/anyvals", "org/scalactic/anyvals", targetDir, List.empty) ++
GenVersions.genScalacticVersions(new File(targetDir, "org/scalactic"), version, scalaVersion)
def genMacroScala(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/scalactic-macro/src/main/scala/org/scalactic", "org/scalactic", targetDir, List.empty) ++
copyDir("jvm/scalactic-macro/src/main/scala/org/scalactic/anyvals", "org/scalactic/anyvals", targetDir, List.empty) ++
copyDir("jvm/scalactic-macro/src/main/scala/org/scalactic/source", "org/scalactic/source", targetDir, List.empty)
def genResource(targetDir: File, version: String, scalaVersion: String): Seq[File] = {
val sourceResourceFile = new File("jvm/scalactic-macro/src/main/resources/org/scalactic/ScalacticBundle.properties")
val destResourceDir = new File(targetDir, "org/scalactic")
destResourceDir.mkdirs()
val destResourceFile = new File(destResourceDir, "ScalacticBundle.properties")
if (!destResourceFile.exists || sourceResourceFile.lastModified > destResourceFile.lastModified)
copyFile(sourceResourceFile, destResourceFile)
List(destResourceFile)
}
def genHtml(targetDir: File, version: String, scalaVersion: String): Seq[File] = {
copyResourceDir("scalatest-doc/src/main/html", "html", targetDir, List.empty)
}
def genTest(targetDir: File, version: String, scalaVersion: String): Seq[File] =
copyDir("jvm/scalactic-test/src/test/scala/org/scalactic", "org/scalactic", targetDir,
List(
"TripleEqualsSpec.for210"
)) ++
copyDir("jvm/scalactic-test/src/test/scala/org/scalactic/anyvals", "org/scalactic/anyvals", targetDir, List.empty) ++
copyDir("jvm/scalactic-test/src/test/scala/org/scalactic/source", "org/scalactic/source", targetDir, List.empty)
}
|
scalatest/scalatest
|
project/GenScalacticJS.scala
|
Scala
|
apache-2.0
| 5,869 |
package client.handler
import client.rootmodel.SearchesRootModel
import client.modules.AppModule
import client.services.{CoreApi, LGCircuit}
import client.utils.PrologParser
import diode.{ActionHandler, ActionResult, ModelRW}
import shared.models.Label
import scala.scalajs.js.JSConverters._
import scala.scalajs.js.JSON
import scala.util.{Failure, Success}
import diode.AnyAction._
import shared.dtos.{Connection, LabelPost, SubscribeRequest}
object SearchesModelHandler {
def getSearchesModel(listOfLabels: Seq[String]): SearchesRootModel = {
try {
val labelsArray = PrologParser.StringToLabel(listOfLabels.toJSArray)
val model = upickle.default.read[Seq[Label]](JSON.stringify(labelsArray))
// logger.log.debug(s"searchesModel = ${model}")
SearchesRootModel(model)
} catch {
case e: Exception =>
SearchesRootModel(Nil)
}
}
def leaf(text: String/*, color: String = "#CC5C64"*/) = "leaf(text(\\"" + s"${text}" + "\\"),display(color(\\"\\"),image(\\"\\")))"
}
case class CreateLabels(labelStrSeq: Seq[String])
case class AddLabel(label: Label)
case class UpdatePrevSearchLabel(labelStr: String, viewName: String)
case class UpdatePrevSearchCnxn(cnxns: Seq[Connection], viewName: String)
case class PostLabelsAndMsg(labelNames: Seq[String], subscribeReq: SubscribeRequest)
case class UpdateLabel(label: Label)
case class UpdateLabels(labels: Seq[Label])
// scalastyle:off
class SearchesHandler[M](modelRW: ModelRW[M, SearchesRootModel]) extends ActionHandler(modelRW) {
override def handle: PartialFunction[Any, ActionResult[M]] = {
case CreateLabels(labelStrSeq: Seq[String]) =>
try {
updated(SearchesModelHandler.getSearchesModel(labelStrSeq))
} catch {
case e:Exception =>
noChange
}
case AddLabel(label: Label) =>
updated(value.copy(searchesModel = value.searchesModel ++ Seq(label)))
case UpdatePrevSearchLabel(labelStr, viewName) =>
viewName match {
case AppModule.MESSAGES_VIEW => updated(value.copy(previousMsgSearchLabel = labelStr))
case AppModule.PROFILES_VIEW => updated(value.copy(previousProfileSearchLabel = labelStr))
case AppModule.PROJECTS_VIEW => updated(value.copy(previousProjectSearchLabel = labelStr))
}
case UpdatePrevSearchCnxn(cnxns, viewName) =>
viewName match {
case AppModule.MESSAGES_VIEW => updated(value.copy(previousMsgSearchCnxn = cnxns))
case AppModule.PROFILES_VIEW => updated(value.copy(previousProfileSearchCnxn = cnxns))
case AppModule.PROJECTS_VIEW => updated(value.copy(previousProjectSearchCnxn = cnxns))
}
case UpdateLabel(label) =>
updated(value.copy(searchesModel = value.searchesModel.map(e => if (e.uid == label.uid) label else e)))
// case UpdateLabels(labels: Seq[label])
}
}
|
LivelyGig/ProductWebUI
|
client/src/main/scala/client/handler/SearchesHandler.scala
|
Scala
|
apache-2.0
| 2,849 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.h2o
import org.apache.spark.SparkConf
/**
* Just simple configuration holder which is representing
* properties passed from user to H2O App.
*/
trait H2OConf {
/* Require Spar config */
private[spark] def sparkConf:SparkConf
// Precondition
require(sparkConf != null, "sparkConf was null")
/* Initialize configuration */
// Collect configuration properties
def numH2OWorkers = sparkConf.getInt(PROP_CLUSTER_SIZE._1, PROP_CLUSTER_SIZE._2)
def useFlatFile = sparkConf.getBoolean(PROP_USE_FLATFILE._1, PROP_USE_FLATFILE._2)
def basePort = sparkConf.getInt(PROP_PORT_BASE._1, PROP_PORT_BASE._2)
def cloudTimeout = sparkConf.getInt(PROP_CLOUD_TIMEOUT._1, PROP_CLOUD_TIMEOUT._2)
def drddMulFactor = sparkConf.getInt(PROP_DUMMY_RDD_MUL_FACTOR._1, PROP_DUMMY_RDD_MUL_FACTOR._2)
def numRddRetries = sparkConf.getInt(PROP_SPREADRDD_RETRIES._1, PROP_SPREADRDD_RETRIES._2)
def cloudName = sparkConf.get(PROP_CLOUD_NAME._1, PROP_CLOUD_NAME._2)
def defaultCloudSize = sparkConf.getInt(PROP_DEFAULT_CLUSTER_SIZE._1, PROP_DEFAULT_CLUSTER_SIZE._2)
def h2oNodeLogLevel = sparkConf.get(PROP_NODE_LOG_LEVEL._1, PROP_NODE_LOG_LEVEL._2)
def h2oClientLogLevel = sparkConf.get(PROP_CLIENT_LOG_LEVEL._1, PROP_CLIENT_LOG_LEVEL._2)
def networkMask = sparkConf.getOption(PROP_NETWORK_MASK._1)
def nthreads = sparkConf.getInt(PROP_NTHREADS._1, PROP_NTHREADS._2)
def disableGA = sparkConf.getBoolean(PROP_DISABLE_GA._1, PROP_DISABLE_GA._2)
def clientWebPort = sparkConf.getInt(PROP_CLIENT_WEB_PORT._1, PROP_CLIENT_WEB_PORT._2)
def clientIcedDir = sparkConf.getOption(PROP_CLIENT_ICED_DIR._1)
def nodeIcedDir = sparkConf.getOption(PROP_NODE_ICED_DIR._1)
def jks = sparkConf.getOption(PROP_JKS._1)
def jksPass = sparkConf.getOption(PROP_JKS_PASS._1)
def hashLogin = sparkConf.getBoolean(PROP_HASH_LOGIN._1, PROP_HASH_LOGIN._2)
def ldapLogin = sparkConf.getBoolean(PROP_LDAP_LOGIN._1, PROP_LDAP_LOGIN._2)
def loginConf = sparkConf.getOption(PROP_LOGIN_CONF._1)
def userName = sparkConf.getOption(PROP_USER_NAME._1)
/* Configuration properties */
/** Configuration property - use flatfile for H2O cloud formation. */
val PROP_USE_FLATFILE = ( "spark.ext.h2o.flatfile", true)
/** Configuration property - expected number of workers of H2O cloud.
* Value -1 means automatic detection of cluster size.
*/
val PROP_CLUSTER_SIZE = ( "spark.ext.h2o.cluster.size", -1 )
/** Configuration property - base port used for individual H2O nodes configuration. */
val PROP_PORT_BASE = ( "spark.ext.h2o.port.base", 54321 )
/** Configuration property - timeout for cloud up. */
val PROP_CLOUD_TIMEOUT = ("spark.ext.h2o.cloud.timeout", 60*1000)
/** Configuration property - number of retries to create an RDD spreat over all executors */
val PROP_SPREADRDD_RETRIES = ("spark.ext.h2o.spreadrdd.retries", 10)
/** Configuration property - name of H2O cloud */
val PROP_CLOUD_NAME = ("spark.ext.h2o.cloud.name", "sparkling-water-")
/** Starting size of cluster in case that size is not explicitelly passed */
val PROP_DEFAULT_CLUSTER_SIZE = ( "spark.ext.h2o.default.cluster.size,", 20)
/* H2O internal log level for launched remote nodes. */
val PROP_NODE_LOG_LEVEL = ("spark.ext.h2o.node.log.level", "INFO")
/** H2O log leve for client running in Spark driver */
val PROP_CLIENT_LOG_LEVEL = ("spark.ext.h2o.client.log.level", "WARN")
/** Subnet selector for h2o if IP guess fail - useful if 'spark.ext.h2o.flatfile' is false
* and we are trying to guess right IP on mi*/
val PROP_NETWORK_MASK = ("spark.ext.h2o.network.mask", null.asInstanceOf[String])
/** Limit for number of threads used by H2O, default -1 means unlimited */
val PROP_NTHREADS = ("spark.ext.h2o.nthreads", -1)
/** Disable GA tracking */
val PROP_DISABLE_GA = ("spark.ext.h2o.disable.ga", false)
/** Exact client port to access web UI.
* The value `-1` means automatic search for free port starting at `spark.ext.h2o.port.base`. */
val PROP_CLIENT_WEB_PORT = ("spark.ext.h2o.client.web.port", -1)
/** Location of iced directory for the driver instance. */
val PROP_CLIENT_ICED_DIR = ("spark.ext.h2o.client.iced.dir", null.asInstanceOf[String])
/** Location of iced directory for Spark nodes */
val PROP_NODE_ICED_DIR = ("spark.ext.h2o.node.iced.dir", null.asInstanceOf[String])
/** Configuration property - multiplication factor for dummy RDD generation.
* Size of dummy RDD is PROP_CLUSTER_SIZE*PROP_DUMMY_RDD_MUL_FACTOR */
val PROP_DUMMY_RDD_MUL_FACTOR = ("spark.ext.h2o.dummy.rdd.mul.factor", 10)
/** Path to Java KeyStore file. */
val PROP_JKS = ("spark.ext.h2o.jks", null.asInstanceOf[String])
/** Password for Java KeyStore file. */
val PROP_JKS_PASS = ("spark.ext.h2o.jks.pass", null.asInstanceOf[String])
/** Enable hash login. */
val PROP_HASH_LOGIN = ("spark.ext.h2o.hash.login", false)
/** Enable LDAP login. */
val PROP_LDAP_LOGIN = ("spark.ext.h2o.ldap.login", false)
/** Login configuration file. */
val PROP_LOGIN_CONF = ("spark.ext.h2o.login.conf", null.asInstanceOf[String])
/** Override user name for cluster. */
val PROP_USER_NAME = ("spark.ext.h2o.user.name", null.asInstanceOf[String])
/**
* Produce arguments for H2O node based on this config.
* @return array of H2O launcher command line arguments
*/
def getH2ONodeArgs: Array[String] = (getH2OCommonOptions ++ Seq("-log_level", h2oNodeLogLevel)).toArray
/**
* Get arguments for H2O client.
* @return array of H2O client arguments.
*/
def getH2OClientArgs: Array[String] = (
getH2OCommonOptions
++ Seq("-quiet")
++ (if (hashLogin) Seq("-hash_login") else Nil)
++ (if (ldapLogin) Seq("-ldap_login") else Nil)
++ Seq("-log_level", h2oClientLogLevel)
++ Seq("-log_dir", clientLogDir)
++ Seq(
("-ice_root", clientIcedDir.orNull),
("-port", if (clientWebPort > 0) clientWebPort else null),
("-jks", jks.orNull),
("-jks_pass", jksPass.orNull),
("-login_conf", loginConf.orNull),
("-user_name", userName.orNull)
).filter(_._2 != null).flatMap(x => Seq(x._1, x._2.toString))
).toArray
private def getH2OCommonOptions:Seq[String] =
// Option in form key=value
Seq(
("-name", cloudName),
("-nthreads", if (nthreads > 0) nthreads else null),
("-network", networkMask.orNull),
("-baseport", basePort))
.filter(x => x._2 != null)
.flatMap(x => Seq(x._1, x._2.toString)) ++ // Append single boolean options
Seq(("-ga_opt_out", disableGA))
.filter(_._2).map(x => x._1)
private def clientLogDir: String = {
System.getProperty("user.dir") + java.io.File.separator + "h2ologs"
}
override def toString: String =
s"""Sparkling Water configuration:
| workers : $numH2OWorkers
| cloudName : $cloudName
| flatfile : $useFlatFile
| basePort : $basePort
| cloudTimeout : $cloudTimeout
| h2oNodeLog : $h2oNodeLogLevel
| h2oClientLog : $h2oClientLogLevel
| nthreads : $nthreads
| drddMulFactor: $drddMulFactor""".stripMargin
}
|
tromika/sparkling-water
|
core/src/main/scala/org/apache/spark/h2o/H2OConf.scala
|
Scala
|
apache-2.0
| 8,055 |
package org.jetbrains.bsp.project.importing.setup
import com.intellij.notification.{Notification, NotificationType}
import com.intellij.openapi.actionSystem.{AnAction, AnActionEvent}
import com.intellij.openapi.application.{ApplicationManager, ModalityState}
import com.intellij.openapi.diagnostic.Logger
import com.intellij.openapi.ide.CopyPasteManager
import com.intellij.openapi.vfs.LocalFileSystem
import java.awt.datatransfer.StringSelection
import java.io.{BufferedReader, File, InputStreamReader}
import java.nio.file.Path
import java.util.concurrent.atomic.AtomicBoolean
import org.jetbrains.bsp.{BspBundle, BspErrorMessage}
import org.jetbrains.bsp.project.importing.FastpassProjectImportProvider
import org.jetbrains.bsp.project.importing.setup.FastpassConfigSetup.{FastpassProcessCheckTimeout, logger}
import org.jetbrains.plugins.scala.build.{BuildMessages, BuildReporter}
import scala.concurrent.duration.DurationInt
import scala.jdk.CollectionConverters._
import scala.util.{Failure, Success, Try}
object FastpassConfigSetup {
private val FastpassProcessCheckTimeout = 100.millis
private val logger = Logger.getInstance(classOf[FastpassConfigSetup])
val fastpassRelativePath = "fastpass/bin/fastpass"
def computeBspWorkspace(file: File): Path = {
val pantsRoot = FastpassProjectImportProvider.pantsRoot(LocalFileSystem.getInstance().findFileByIoFile(file))
val relativeDir = pantsRoot.get.toNioPath.relativize(file.toPath)
val projectName = relativeDir.toString.replace("/", ".")
val bspWorkspace = pantsRoot.get.getParent.toNioPath.resolve("bsp-projects").resolve(projectName)
bspWorkspace.toFile.toPath
}
def create(baseDir: File): Try[BspConfigSetup] = {
val bspWorkspace = FastpassConfigSetup.computeBspWorkspace(baseDir)
val baseDirVFile = LocalFileSystem.getInstance().findFileByIoFile(baseDir)
FastpassProjectImportProvider.pantsRoot(baseDirVFile) match {
case Some(_) if bspWorkspace.resolve(".bloop").toFile.exists()=> {
Success(new FastpassConfigSetupEmpty(bspWorkspace))
}
case Some(pantsRoot) =>
val relativeDir = pantsRoot.toNioPath.relativize(baseDirVFile.toNioPath)
val processBuilder = new ProcessBuilder(
fastpassRelativePath,
"create",
s"--name=${bspWorkspace.getFileName}",
relativeDir.toString + "::"
)
processBuilder.directory(new File(pantsRoot.toNioPath.toString))
logger.info(s"Creating BSP configuration with '${processBuilder.command().asScala.mkString(" ")}'")
Success(new FastpassConfigSetup(processBuilder))
case None => Failure(new IllegalArgumentException(s"'$baseDir is not a pants directory'"))
}
}
}
class FastpassConfigSetupEmpty(bspWorkspace: Path) extends BspConfigSetup {
override def cancel(): Unit = { }
override def run(implicit reporter: BuildReporter): Try[BuildMessages] = {
val realPath = bspWorkspace.toRealPath().toString
val title = BspBundle.message("bsp.fastpass.notification.reused.workspace.title")
val message = BspBundle.message("bsp.fastpass.notification.reused.workspace.message", realPath)
val notification = new Notification("Fastpass", title, message, NotificationType.WARNING)
notification.addAction(new AnAction(BspBundle.message("bsp.fastpass.notification.reused.workspace.button")) {
override def actionPerformed(e: AnActionEvent): Unit = {
CopyPasteManager.getInstance().setContents(new StringSelection(realPath))
}
})
notification.setImportant(true)
ApplicationManager.getApplication.invokeLater(new Runnable {
override def run(): Unit = notification.notify(null)
}, ModalityState.NON_MODAL)
Success(BuildMessages.empty)
}
}
class FastpassConfigSetup(processBuilder: ProcessBuilder) extends BspConfigSetup {
override def cancel(): Unit = cancellationFlag.set(true)
private val cancellationFlag: AtomicBoolean = new AtomicBoolean(false)
private def waitFinish(process: Process, reporter: BuildReporter): Try[BuildMessages] = {
val stdoutReader = new BufferedReader(new InputStreamReader(process.getInputStream))
val stderrReader = new BufferedReader(new InputStreamReader(process.getErrorStream))
var buildMessages = BuildMessages.empty
while(!process.waitFor(FastpassProcessCheckTimeout.length, FastpassProcessCheckTimeout.unit)){
val stderrLines = stderrReader.lines()
stderrLines.forEach{ line =>
buildMessages = buildMessages.addError(line)
reporter.log(line)
}
stdoutReader.lines().forEach{ line =>
buildMessages = buildMessages.message(line)
reporter.log(line)
}
if(cancellationFlag.get()){
process.destroy()
}
}
if(process.exitValue() == 0) {
Success(buildMessages.status(BuildMessages.OK))
} else {
Failure(BspErrorMessage(
s"""Command ${processBuilder.command.asScala} failed with:
|${buildMessages.errors.mkString("\\n")}""".stripMargin))
}
}
override def run(implicit reporter: BuildReporter): Try[BuildMessages] = {
reporter.start()
logger.info(s"Running '${processBuilder.command().asScala.mkString(" ")}' in ${processBuilder.directory()}")
val process = processBuilder.start()
val result = waitFinish(process, reporter)
result match {
case Failure(err) => {
// Log to ensure the error message is not lost. Current implementation of
// reporter.finishWithFailure ignores errors
logger.error(err)
reporter.finishWithFailure(err)
}
case Success(bm) => reporter.finish(bm)
}
result
}
}
|
JetBrains/intellij-scala
|
bsp/src/org/jetbrains/bsp/project/importing/setup/FastpassConfigSetup.scala
|
Scala
|
apache-2.0
| 5,664 |
package io.eels.component.hbase
import io.eels._
import io.eels.schema.{DataType, StructType}
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
import org.apache.hadoop.hbase.filter._
import org.apache.hadoop.hbase.util.Bytes
import scala.collection.JavaConversions._
// These are simply marker predicates used for pattern matching
case class ContainsPredicate(name: String, value: Any) extends NamedPredicate(name) {
override def eval(row: Row): Boolean = true
}
case class RegexPredicate(name: String, value: Any) extends NamedPredicate(name) {
override def eval(row: Row): Boolean = true
}
case class StartsWithPredicate(name: String, value: Any) extends NamedPredicate(name) {
override def eval(row: Row): Boolean = true
}
case class NotEqualsPredicate(name: String, value: Any) extends NamedPredicate(name) {
override def eval(row: Row): Boolean = row.get(name) != value
}
object HbasePredicate {
private val ByteComparableClazz = classOf[BinaryComparator]
private val StringComparableClazz = classOf[SubstringComparator]
private val RegexStringComparableClazz = classOf[RegexStringComparator]
private val BinaryPrefixComparableClazz = classOf[BinaryPrefixComparator]
def apply(pred: Predicate)(implicit schema: StructType, serializer: HbaseSerializer): FilterList = pred match {
case EqualsPredicate(name, value) => new FilterList(hbaseFiler(name, CompareOp.EQUAL, value, ByteComparableClazz))
case NotEqualsPredicate(name, value) => new FilterList(hbaseFiler(name, CompareOp.NOT_EQUAL, value, ByteComparableClazz))
case ContainsPredicate(name, value) => new FilterList(hbaseFiler(name, CompareOp.EQUAL, value, StringComparableClazz))
case StartsWithPredicate(name, value) => new FilterList(hbaseFiler(name, CompareOp.EQUAL, value, BinaryPrefixComparableClazz))
case RegexPredicate(name, value) => new FilterList(hbaseFiler(name, CompareOp.EQUAL, value, RegexStringComparableClazz))
case GtPredicate(name, value) => new FilterList(hbaseFiler(name, CompareOp.GREATER, value, ByteComparableClazz))
case GtePredicate(name, value) => new FilterList(hbaseFiler(name, CompareOp.GREATER_OR_EQUAL, value, ByteComparableClazz))
case LtPredicate(name, value) => new FilterList(hbaseFiler(name, CompareOp.LESS, value, ByteComparableClazz))
case LtePredicate(name, value) => new FilterList(hbaseFiler(name, CompareOp.LESS_OR_EQUAL, value, ByteComparableClazz))
case AndPredicate(predicates: Seq[Predicate]) => new FilterList(FilterList.Operator.MUST_PASS_ALL, predicates.map(apply).flatMap(_.getFilters))
case OrPredicate(predicates: Seq[Predicate]) => new FilterList(FilterList.Operator.MUST_PASS_ONE, predicates.map(apply).flatMap(_.getFilters))
case _@predicateType => sys.error(s"Predicate type '${predicateType.getClass.getSimpleName}' is not supported!")
}
def hbaseFiler[T](name: String, compareOp: CompareOp, value: Any, comparableClass: Class[T])
(implicit schema: StructType, serializer: HbaseSerializer): Filter = {
val field = schema.fields.find(_.name == name).getOrElse(sys.error(s"Field '$name' in the predicate is not defined in the EEL schema"))
if (field.key) {
new RowFilter(compareOp, hbaseComparator(comparableClass, name, field.dataType, value))
} else {
new SingleColumnValueFilter(
Bytes.toBytes(field.columnFamily.getOrElse(sys.error(s"No Column Family defined for field '${field.name}'"))),
Bytes.toBytes(name),
compareOp,
hbaseComparator(comparableClass, name, field.dataType, value))
}
}
def hbaseComparator[T](comparableClass: Class[T], name: String, dataType: DataType, value: Any)
(implicit schema: StructType, serializer: HbaseSerializer): ByteArrayComparable = (comparableClass, value) match {
case (ByteComparableClazz, _) => new BinaryComparator(serializer.toBytes(value, name, dataType))
case (RegexStringComparableClazz, stringValue: String) => new RegexStringComparator(stringValue)
case (StringComparableClazz, stringValue: String) => new SubstringComparator(stringValue)
case (BinaryPrefixComparableClazz, _) => new BinaryPrefixComparator(serializer.toBytes(value, name, dataType))
}
// Shorthand predicate names
def or(left: Predicate, right: Predicate) = OrPredicate(Seq(left, right))
def and(left: Predicate, right: Predicate) = AndPredicate(Seq(left, right))
def equals(name: String, value: Any) = EqualsPredicate(name, value)
def notEquals(name: String, value: Any) = NotEqualsPredicate(name, value)
def gt(name: String, value: Any) = GtPredicate(name, value)
def gte(name: String, value: Any) = GtePredicate(name, value)
def lt(name: String, value: Any) = LtPredicate(name, value)
def lte(name: String, value: Any) = LtePredicate(name, value)
def regex(name: String, value: Any) = RegexPredicate(name, value)
def contains(name: String, value: Any) = ContainsPredicate(name, value)
def startsWith(name: String, value: Any) = StartsWithPredicate(name, value)
}
|
sksamuel/hadoop-streams
|
eel-hbase/src/main/scala/io/eels/component/hbase/HbasePredicate.scala
|
Scala
|
apache-2.0
| 5,053 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2.parquet
import org.apache.hadoop.mapreduce.{Job, OutputCommitter, TaskAttemptContext}
import org.apache.parquet.hadoop.{ParquetOutputCommitter, ParquetOutputFormat}
import org.apache.parquet.hadoop.ParquetOutputFormat.JobSummaryLevel
import org.apache.parquet.hadoop.codec.CodecConfig
import org.apache.parquet.hadoop.util.ContextUtil
import org.apache.spark.internal.Logging
import org.apache.spark.sql.Row
import org.apache.spark.sql.connector.write.LogicalWriteInfo
import org.apache.spark.sql.execution.datasources.{OutputWriter, OutputWriterFactory}
import org.apache.spark.sql.execution.datasources.parquet._
import org.apache.spark.sql.execution.datasources.v2.FileWrite
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
case class ParquetWrite(
paths: Seq[String],
formatName: String,
supportsDataType: DataType => Boolean,
info: LogicalWriteInfo) extends FileWrite with Logging {
override def prepareWrite(
sqlConf: SQLConf,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory = {
val parquetOptions = new ParquetOptions(options, sqlConf)
val conf = ContextUtil.getConfiguration(job)
val committerClass =
conf.getClass(
SQLConf.PARQUET_OUTPUT_COMMITTER_CLASS.key,
classOf[ParquetOutputCommitter],
classOf[OutputCommitter])
if (conf.get(SQLConf.PARQUET_OUTPUT_COMMITTER_CLASS.key) == null) {
logInfo("Using default output committer for Parquet: " +
classOf[ParquetOutputCommitter].getCanonicalName)
} else {
logInfo("Using user defined output committer for Parquet: " + committerClass.getCanonicalName)
}
conf.setClass(
SQLConf.OUTPUT_COMMITTER_CLASS.key,
committerClass,
classOf[OutputCommitter])
// We're not really using `ParquetOutputFormat[Row]` for writing data here, because we override
// it in `ParquetOutputWriter` to support appending and dynamic partitioning. The reason why
// we set it here is to setup the output committer class to `ParquetOutputCommitter`, which is
// bundled with `ParquetOutputFormat[Row]`.
job.setOutputFormatClass(classOf[ParquetOutputFormat[Row]])
ParquetOutputFormat.setWriteSupportClass(job, classOf[ParquetWriteSupport])
ParquetSchemaConverter.checkFieldNames(dataSchema)
// This metadata is useful for keeping UDTs like Vector/Matrix.
ParquetWriteSupport.setSchema(dataSchema, conf)
// Sets flags for `ParquetWriteSupport`, which converts Catalyst schema to Parquet
// schema and writes actual rows to Parquet files.
conf.set(SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key, sqlConf.writeLegacyParquetFormat.toString)
conf.set(SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key, sqlConf.parquetOutputTimestampType.toString)
// Sets compression scheme
conf.set(ParquetOutputFormat.COMPRESSION, parquetOptions.compressionCodecClassName)
// SPARK-15719: Disables writing Parquet summary files by default.
if (conf.get(ParquetOutputFormat.JOB_SUMMARY_LEVEL) == null
&& conf.get(ParquetOutputFormat.ENABLE_JOB_SUMMARY) == null) {
conf.setEnum(ParquetOutputFormat.JOB_SUMMARY_LEVEL, JobSummaryLevel.NONE)
}
if (ParquetOutputFormat.getJobSummaryLevel(conf) == JobSummaryLevel.NONE
&& !classOf[ParquetOutputCommitter].isAssignableFrom(committerClass)) {
// output summary is requested, but the class is not a Parquet Committer
logWarning(s"Committer $committerClass is not a ParquetOutputCommitter and cannot" +
s" create job summaries. " +
s"Set Parquet option ${ParquetOutputFormat.JOB_SUMMARY_LEVEL} to NONE.")
}
new OutputWriterFactory {
override def newInstance(
path: String,
dataSchema: StructType,
context: TaskAttemptContext): OutputWriter = {
new ParquetOutputWriter(path, context)
}
override def getFileExtension(context: TaskAttemptContext): String = {
CodecConfig.from(context).getCodec.getExtension + ".parquet"
}
}
}
}
|
chuckchen/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetWrite.scala
|
Scala
|
apache-2.0
| 4,941 |
package com.olaq.sbt
import java.util.Properties
import org.sonarsource.scanner.api.{EmbeddedScanner, LogOutput}
import sbt.{Def, _}
import Keys._
import scala.collection.JavaConverters._
object SonarScannerPlugin extends AutoPlugin {
object autoImport {
val sonar: TaskKey[Unit] = taskKey[Unit]("Runs sonar-scanner")
val printSonarProperties: TaskKey[Unit] = taskKey[Unit]("Prints Sonar properties for current project")
val sonarProperties: TaskKey[Map[String, String]] = taskKey[Map[String, String]]("Sonar Scanner properties")
}
import autoImport._
override def trigger: PluginTrigger = allRequirements
override def projectSettings = Seq(
sonar := sonarScanTask.value,
printSonarProperties := printSonarPropertiesTask.value,
sonarProperties := sonarPropertiesTask.value
)
private lazy val sonarPropertiesTask = Def.task {
Map(
"sonar.host.url" -> "http://localhost:9000",
"sonar.projectKey" -> s"${organization.value}:${name.value}",
"sonar.projectVersion" -> version.value,
"sonar.sources" -> sourceDirectory.in(Compile).value.absolutePath,
"sonar.tests" -> sourceDirectory.in(Test).value.absolutePath,
"sonar.java.binaries" -> classDirectory.in(Compile).value.absolutePath,
"sonar.java.test.binaries" -> classDirectory.in(Test).value.absolutePath,
"sonar.java.libraries" -> dependencyClasspath.in(Compile).value
.map(p => p.data.absolutePath)
.mkString(","),
"sonar.java.test.libraries" -> dependencyClasspath.in(Test).value
.map(p => p.data.absolutePath)
.filter(s => s.endsWith(".jar"))
.mkString(",")
)
}
private lazy val sonarScanTask = Def.task {
val properties = new Properties()
properties.putAll(sonarProperties.value.asJava)
val runner = EmbeddedScanner.create(new LogOutputImpl(sLog.value))
.addGlobalProperties(properties)
runner.start()
runner.runAnalysis(properties)
runner.stop()
}
private lazy val printSonarPropertiesTask = Def.task {
val logger = sLog.value
logger.info("Printing sonar properties:")
logger.info(sonarProperties.value.mkString("\\n"))
}
class LogOutputImpl(logger: Logger) extends LogOutput {
override def log(formattedMessage: String, level: LogOutput.Level): Unit = {
level match {
case LogOutput.Level.TRACE => logger.debug(formattedMessage)
case LogOutput.Level.DEBUG => logger.debug(formattedMessage)
case LogOutput.Level.INFO => logger.info(formattedMessage)
case LogOutput.Level.WARN => logger.warn(formattedMessage)
case LogOutput.Level.ERROR => logger.error(formattedMessage)
}
}
}
}
|
olaq/sbt-sonar-scanner-plugin
|
src/main/scala/com/olaq/sbt/SonarScannerPlugin.scala
|
Scala
|
mit
| 2,700 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.mvc.request
import javax.inject.Inject
import play.api.http.{ HttpConfiguration, SecretConfiguration }
import play.api.libs.crypto.CookieSignerProvider
import play.api.libs.typedmap.TypedMap
import play.api.mvc._
import play.core.system.RequestIdProvider
/**
* A `RequestFactory` provides logic for creating requests.
*/
trait RequestFactory {
/**
* Create a `RequestHeader`.
*/
def createRequestHeader(
connection: RemoteConnection,
method: String,
target: RequestTarget,
version: String,
headers: Headers,
attrs: TypedMap): RequestHeader
/**
* Creates a `RequestHeader` based on the values of an
* existing `RequestHeader`. The factory may modify the copied
* values to produce a modified `RequestHeader`.
*/
def copyRequestHeader(rh: RequestHeader): RequestHeader = {
createRequestHeader(rh.connection, rh.method, rh.target, rh.version, rh.headers, rh.attrs)
}
/**
* Create a `Request` with a body. By default this just calls
* `createRequestHeader(...).withBody(body)`.
*/
def createRequest[A](
connection: RemoteConnection,
method: String,
target: RequestTarget,
version: String,
headers: Headers,
attrs: TypedMap,
body: A): Request[A] =
createRequestHeader(connection, method, target, version, headers, attrs).withBody(body)
/**
* Creates a `Request` based on the values of an
* existing `Request`. The factory may modify the copied
* values to produce a modified `Request`.
*/
def copyRequest[A](r: Request[A]): Request[A] = {
createRequest[A](r.connection, r.method, r.target, r.version, r.headers, r.attrs, r.body)
}
}
object RequestFactory {
/**
* A `RequestFactory` that creates a request with the arguments given, without
* any additional modification.
*/
val plain = new RequestFactory {
override def createRequestHeader(
connection: RemoteConnection,
method: String,
target: RequestTarget,
version: String,
headers: Headers,
attrs: TypedMap): RequestHeader =
new RequestHeaderImpl(connection, method, target, version, headers, attrs)
}
}
/**
* The default [[RequestFactory]] used by a Play application. This
* `RequestFactory` adds the following typed attributes to requests:
* - request id
* - cookie
* - session cookie
* - flash cookie
*/
class DefaultRequestFactory @Inject() (
val cookieHeaderEncoding: CookieHeaderEncoding,
val sessionBaker: SessionCookieBaker,
val flashBaker: FlashCookieBaker) extends RequestFactory {
def this(config: HttpConfiguration) = this(
new DefaultCookieHeaderEncoding(config.cookies),
new DefaultSessionCookieBaker(config.session, config.secret, new CookieSignerProvider(config.secret).get),
new DefaultFlashCookieBaker(config.flash, config.secret, new CookieSignerProvider(config.secret).get)
)
override def createRequestHeader(
connection: RemoteConnection,
method: String,
target: RequestTarget,
version: String,
headers: Headers,
attrs: TypedMap): RequestHeader = {
val requestId: Long = RequestIdProvider.freshId()
val cookieCell = new LazyCell[Cookies] {
override protected def emptyMarker: Cookies = null
override protected def create: Cookies =
cookieHeaderEncoding.fromCookieHeader(headers.get(play.api.http.HeaderNames.COOKIE))
}
val sessionCell = new LazyCell[Session] {
override protected def emptyMarker: Session = null
override protected def create: Session = sessionBaker.decodeFromCookie(cookieCell.value.get(sessionBaker.COOKIE_NAME))
}
val flashCell = new LazyCell[Flash] {
override protected def emptyMarker: Flash = null
override protected def create: Flash = flashBaker.decodeFromCookie(cookieCell.value.get(flashBaker.COOKIE_NAME))
}
val updatedAttrMap = attrs + (
RequestAttrKey.Id -> requestId,
RequestAttrKey.Cookies -> cookieCell,
RequestAttrKey.Session -> sessionCell,
RequestAttrKey.Flash -> flashCell
)
new RequestHeaderImpl(connection, method, target, version, headers, updatedAttrMap)
}
}
|
Shruti9520/playframework
|
framework/src/play/src/main/scala/play/api/mvc/request/RequestFactory.scala
|
Scala
|
apache-2.0
| 4,231 |
import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.rdd.RDD
import org.bson.BSONObject
import com.mongodb.hadoop.{
MongoInputFormat, MongoOutputFormat,
BSONFileInputFormat, BSONFileOutputFormat}
import com.mongodb.hadoop.io.MongoUpdateWritable
import java.io._
import com.cloudera.datascience.lsa._
import com.cloudera.datascience.lsa.ParseWikipedia._
import com.cloudera.datascience.lsa.RunLSA._
import org.apache.spark.rdd.EmptyRDD
import scala.collection.mutable.ListBuffer
import org.apache.spark.mllib.linalg._
import org.apache.spark.mllib.linalg.distributed.RowMatrix
import breeze.linalg.{DenseMatrix => BDenseMatrix, DenseVector => BDenseVector, SparseVector => BSparseVector}
import org.apache.spark.mllib.regression._
import org.apache.spark.rdd._
@transient val mongoConfig = new Configuration()
mongoConfig.set("mongo.input.uri",
"mongodb://localhost:27017/cordir.brief")
val documents = sc.newAPIHadoopRDD(
mongoConfig, // Configuration
classOf[MongoInputFormat], // InputFormat
classOf[Object], // Key type
classOf[BSONObject]) // Value type
:type documents
val stopWords = sc.broadcast(ParseWikipedia.loadStopWords("deps/lsa/src/main/resources/stopwords.txt")).value
var lemmatized = documents.map(s=> (s._2.get("_id").toString,ParseWikipedia.plainTextToLemmas(s._2.get("article").toString, stopWords, ParseWikipedia.createNLPPipeline())))
val numTerms = 1000
val k = 100 // nombre de valeurs singuliers à garder
val nbConcept = 30
val filtered = lemmatized.filter(_._2.size > 1)
val documentSize=documents.collect().length
println("Documents Size : "+documentSize)
println("Number of Terms : "+numTerms)
val (termDocMatrix, termIds, docIds, idfs) = ParseWikipedia.termDocumentMatrix(filtered, stopWords, numTerms, sc)
val outputConfig = new Configuration()
outputConfig.set("mongo.output.uri","mongodb://localhost:27017/cordir.briefIdfs")
sc.parallelize(idfs.toSeq).saveAsNewAPIHadoopFile("file:///this-is-completely-unused",classOf[Object],classOf[BSONObject],classOf[MongoOutputFormat[Object, BSONObject]],outputConfig)
outputConfig.set("mongo.output.uri","mongodb://localhost:27017/cordir.briefTermDocMatrix")
termDocMatrix.zipWithIndex().map(a => (a._2,a._1.toArray)).saveAsNewAPIHadoopFile("file:///this-is-completely-unused",classOf[Object],classOf[BSONObject],classOf[MongoOutputFormat[Object, BSONObject]],outputConfig)
outputConfig.set("mongo.output.uri","mongodb://localhost:27017/cordir.briefTermIds")
sc.parallelize(termIds.toSeq).saveAsNewAPIHadoopFile("file:///this-is-completely-unused",classOf[Object],classOf[BSONObject],classOf[MongoOutputFormat[Object, BSONObject]],outputConfig)
outputConfig.set("mongo.output.uri","mongodb://localhost:27017/cordir.briefDocIds")
sc.parallelize(docIds.toSeq).saveAsNewAPIHadoopFile("file:///this-is-completely-unused",classOf[Object],classOf[BSONObject],classOf[MongoOutputFormat[Object, BSONObject]],outputConfig)
val mat = new RowMatrix(termDocMatrix)
val svd = mat.computeSVD(k, computeU=true)
val topConceptTerms = RunLSA.topTermsInTopConcepts(svd, nbConcept, numTerms, termIds)
val topConceptDocs = RunLSA.topDocsInTopConcepts(svd, nbConcept, documentSize, docIds)
var all=sc.emptyRDD[(String,Double)]
import collection.mutable.HashMap
val docConcept = new HashMap[String,ListBuffer[Double]]()
var count=0
for ( a <- topConceptDocs) {
count+=1
for ( (b,c) <- a) {
if (!docConcept.contains(b)) {
docConcept.put(b, new ListBuffer[Double]())
}
docConcept(b) += c
}
for((k,v) <- docConcept){
while(v.size<count){
v+=0.0
}
}
}
//Add notes
var docConceptRDD=sc.parallelize(docConcept.toSeq)
var toWrite=docConceptRDD.map(a => (a._1, a._2.toArray))
val outputConfig = new Configuration()
outputConfig.set("mongo.output.uri",
"mongodb://localhost:27017/cordir.briefDocConcept")
toWrite.saveAsNewAPIHadoopFile(
"file:///this-is-completely-unused",
classOf[Object],
classOf[BSONObject],
classOf[MongoOutputFormat[Object, BSONObject]],
outputConfig)
//make labeled point
val termConcept = new HashMap[String,ListBuffer[Double]]()
count=0
for ( a <- topConceptTerms) {
count+=1
for ( (b,c) <- a) {
if (!termConcept.contains(b)) {
termConcept.put(b, new ListBuffer[Double]())
}
termConcept(b) += c
}
for((k,v) <- termConcept){
while(v.size<count){
v+=0.0
}
}
}
var parr=sc.parallelize(termConcept.toSeq)
val outputConfig = new Configuration()
outputConfig.set("mongo.output.uri","mongodb://localhost:27017/cordir.briefTermConcept")
parr.map(a => (a._1,a._2.toArray)).coalesce(1,true).saveAsNewAPIHadoopFile("file:///this-is-completely-unused",classOf[Object],classOf[BSONObject],classOf[MongoOutputFormat[Object, BSONObject]],outputConfig)
exit
|
StatisticalProject/CORDIR
|
sparkBrief.scala
|
Scala
|
apache-2.0
| 4,942 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.contrib.scalaz
import slamdata.Predef._
import java.lang.{Throwable, RuntimeException}
import scalaz._, Scalaz._, Leibniz.===
import scalaz.concurrent.Task
trait CatchableInstances {
implicit def injectableTaskCatchable[S[_]](implicit I: Task :<: S): Catchable[Free[S, ?]] =
catchable.freeCatchable[Task, S]
}
final class CatchableOps[F[_], A] private[scalaz] (self: F[A])(implicit F0: Catchable[F]) {
/** Reify thrown non-fatal exceptions as a value. */
def attemptNonFatal(implicit F: Monad[F]): F[Throwable \\/ A] =
new CatchableOps(self map (_.right[Throwable])).handleNonFatal {
case th => th.left[A]
}
/** Ensures `f` is sequenced after `fa`, whether the latter succeeded or not.
*
* Useful for releasing resources that may have been acquired in order to
* produce `fa`.
*/
def ensuring(f: Option[Throwable] => F[Unit])(implicit F: Bind[F]): F[A] =
F0.attempt(self) flatMap {
case -\\/(t) => f(some(t)) *> F0.fail(t)
case \\/-(a) => f(none) as a
}
/** Handle caught exceptions using the given partial function, reraising any
* unhandled exceptions.
*/
def handle[B >: A](pf: PartialFunction[Throwable, B])(implicit F: Monad[F]): F[B] =
handleWith[B](pf andThen (F.point(_)))
/** Handle caught non-fatal exceptions using the given partial function,
* reraising any unhandled exceptions.
*/
def handleNonFatal[B >: A](pf: PartialFunction[Throwable, B])(implicit F: Monad[F]): F[B] =
handleNonFatalWith[B](pf andThen (F.point(_)))
/** Handle caught non-fatal exceptions using the given effectful partial
* function, reraising any unhandled exceptions.
*/
def handleNonFatalWith[B >: A](pf: PartialFunction[Throwable, F[B]])(implicit F: Monad[F]): F[B] =
handleWith[B](nonFatal andThen pf)
/** Handle caught exceptions using the given effectful partial function,
* reraising any unhandled exceptions.
*/
def handleWith[B >: A](pf: PartialFunction[Throwable, F[B]])(implicit F: Monad[F]): F[B] =
F0.attempt(self) flatMap {
case -\\/(t) => pf.lift(t) getOrElse F0.fail(t)
case \\/-(a) => F.point(a)
}
////
private val nonFatal: PartialFunction[Throwable, Throwable] = {
case scala.util.control.NonFatal(t) => t
}
}
final class CatchableOfDisjunctionOps[F[_], A, B] private[scalaz] (self: F[A \\/ B])(implicit F: Catchable[F]) {
def unattempt(implicit M: Monad[F], T: A === Throwable): F[B] =
self flatMap (_.fold(a => F.fail[B](T(a)), M.point(_)))
def unattemptRuntime(implicit M: Monad[F], S: Show[A]): F[B] =
new CatchableOfDisjunctionOps(
self map (_.leftMap[Throwable](a => new RuntimeException(a.shows)))
).unattempt
}
trait ToCatchableOps {
implicit def toCatchableOps[F[_]: Catchable, A](self: F[A]): CatchableOps[F, A] =
new CatchableOps(self)
implicit def toCatchableOfDisjunctionOps[F[_]: Catchable, A, B](self: F[A \\/ B]): CatchableOfDisjunctionOps[F, A, B] =
new CatchableOfDisjunctionOps(self)
}
object catchable extends CatchableInstances with ToCatchableOps {
def freeCatchable[F[_], S[_]](implicit F: Catchable[F], I: F :<: S): Catchable[Free[S, ?]] =
new Catchable[Free[S, ?]] {
type ExceptT[X[_], A] = EitherT[X, Throwable, A]
private val attemptT: S ~> ExceptT[Free[S, ?], ?] =
λ[S ~> ExceptT[Free[S, ?], ?]](sa =>
EitherT(I.prj(sa).fold(
Free.liftF(sa) map (_.right[Throwable]))
{ case fa => Free.liftF(I(F.attempt(fa))) }))
def attempt[A](fa: Free[S, A]) =
fa.foldMap(attemptT).run
def fail[A](t: Throwable) =
Free.liftF(I(F.fail[A](t)))
}
}
|
jedesah/Quasar
|
foundation/src/main/scala/quasar/contrib/scalaz/catchable.scala
|
Scala
|
apache-2.0
| 4,281 |
package com.pktippa
object StringMingling {
def main(args: Array[String]) {
// Read Pawel String from command line
var pawelString = readLine();
// Read Shaka String from command line
var shakaString = readLine();
// Mingled String
var mingledString = getMingledString(pawelString,shakaString);
println(mingledString);
}
// TODO - Need to implement in Recursion
// Method getMingledString
// Accepts String, String
// Returns String
def getMingledString(first:String,second:String):String = {
// Initializing mingledString new StringBuilder
var mingledString = StringBuilder.newBuilder;
// Looping through 0 until length of String
for(x <- 0 until first.length()){
mingledString.append(first.charAt(x).toString + second.charAt(x).toString) // Building the mingedString
}
// Returning Mingled String
mingledString.toString
}
}
|
pk-hackerrank/hr-funcprog
|
recursion/string-mingling/scala/src/com/pktippa/StringMingling.scala
|
Scala
|
mit
| 910 |
package com.twitter.server.filters
import com.twitter.concurrent.NamedPoolThreadFactory
import com.twitter.finagle.Service
import com.twitter.finagle.filter.OffloadFilter
import com.twitter.finagle.http.{Request, Response}
import com.twitter.server.AdminHttpServer.Route
import com.twitter.util.{ExecutorServiceFuturePool, FuturePool}
import java.util.concurrent.Executors
object AdminThreadPoolFilter {
private lazy val Pool: FuturePool =
new ExecutorServiceFuturePool(
Executors
.newCachedThreadPool(new NamedPoolThreadFactory("AdminFuturePool", makeDaemons = true))
) {
override def toString: String = "Admin.FuturePool"
}
/**
* Force the [[Route]] `r` to be handled by a dedicated admin thread pool.
*/
def isolateRoute(r: Route): Route =
r.copy(handler = isolateService(r.handler))
/**
* Force the [[Service[Request, Response]] `s` to be handled by a dedicated admin thread pool.
*/
def isolateService(s: Service[Request, Response]): Service[Request, Response] =
(new OffloadFilter.Server(Pool)).andThen(s)
}
|
twitter/twitter-server
|
server/src/main/scala/com/twitter/server/filters/AdminThreadPoolFilter.scala
|
Scala
|
apache-2.0
| 1,080 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qsu.mra
import slamdata.Predef.{Boolean, Vector}
import cats.{Applicative, Eq, Monoid, Reducible}
import cats.data.{Const, NonEmptyVector}
import cats.kernel.{BoundedSemilattice, CommutativeMonoid}
import cats.instances.vector._
import cats.syntax.apply._
import cats.syntax.eq._
import cats.syntax.reducible._
import scalaz.@@
import scalaz.Tags.{Disjunction, Conjunction}
import scalaz.syntax.tag._
trait Provenance[S, V, T] { self =>
type P
/** Apply `f` to cross product of the independent components of `ps`,
* 'or'-ing the results.
*
* Each argument to `f` will have the same size as `ps`, if any `ps` are
* empty, their corresponsing position in the `NonEmptyVector` will also be
* empty.
*/
def applyComponentsN[F[_]: Applicative, G[_]: Reducible](
f: NonEmptyVector[P] => F[P])(
ps: G[P])
: F[P] = {
def nev(p: P): NonEmptyVector[P] =
NonEmptyVector.fromVector(foldMapComponents(Vector(_))(p))
.getOrElse(NonEmptyVector.one(empty))
def cross(in: NonEmptyVector[NonEmptyVector[P]], p: P)
: NonEmptyVector[NonEmptyVector[P]] =
nev(p).flatMap(p => in.map(_ :+ p))
val crossed =
ps.reduceLeftTo(p => nev(p).map(NonEmptyVector.one(_)))(cross)
crossed.reduceLeftTo(f)((x, y) => (x, f(y)).mapN(or))
}
/** The set of identity comparisons describing an autojoin of `l` and `r`. */
def autojoin(l: P, r: P): AutoJoin[S, V]
/** The conjunction of two provenance, representing a dataset where values
* have identities from both inputs.
*/
def and(l: P, r: P): P
/** Provenance having a dimensionality of zero. */
def empty: P
/** Convert each component into `A` and combine using its monoid. */
def foldMapComponents[A: Monoid](f: P => A)(p: P): A =
traverseComponents[Const[A, ?]](p => Const(f(p)))(p).getConst
/** Convert each scalar id into `A` and combine using its monoid. */
def foldMapScalarIds[A: Monoid](f: (S, T) => A)(p: P): A =
traverseScalarIds[Const[A, ?]]((s, t) => Const(f(s, t)))(p).getConst
/** Convert each vector id into `A` and combine using its monoid. */
def foldMapVectorIds[A: Monoid](f: (V, T) => A)(p: P): A =
traverseVectorIds[Const[A, ?]]((v, t) => Const(f(v, t)))(p).getConst
/** Append an identity, maintaining current dimensionality. */
def inflateConjoin(vectorId: V, sort: T, p: P): P
/** Append an identity, increasing dimensionality by 1. */
def inflateExtend(vectorId: V, sort: T, p: P): P
/** "Submerge" an identity, making it the second-highest dimension,
* increasing dimensionality by 1.
*/
def inflateSubmerge(vectorId: V, sort: T, p: P): P
/** Inject into a structure at an unknown field, maintains dimensionality. */
def injectDynamic(p: P): P
/** Inject into a structure at the given field, maintains dimensionality. */
def injectStatic(scalarId: S, sort: T, p: P): P
/** The disjunction of two provenance, representing a dataset where values
* have identities from either of the inputs.
*/
def or(l: P, r: P): P
/** Project an unknown field, maintains dimensionality. */
def projectDynamic(p: P): P
/** Project a statically known field, maintains dimensionality. */
def projectStatic(scalarId: S, sort: T, p: P): P
/** Discard the highest dimension, reducing dimensionality by 1. */
def reduce(p: P): P
/** Conjoin all dimensions into a single one. */
def squash(p: P): P
/** Apply `f` to each independent component of `p`. */
def traverseComponents[F[_]: Applicative](f: P => F[P])(p: P): F[P]
/** Apply `f` to each scalar id of `p`. */
def traverseScalarIds[F[_]: Applicative](f: (S, T) => F[(S, T)])(p: P): F[P]
/** Apply `f` to each vector id of `p`. */
def traverseVectorIds[F[_]: Applicative](f: (V, T) => F[(V, T)])(p: P): F[P]
object instances {
implicit val pConjunctionCommutativeMonoid: CommutativeMonoid[P @@ Conjunction] =
new CommutativeMonoid[P @@ Conjunction] {
val empty = Conjunction(self.empty)
def combine(x: P @@ Conjunction, y: P @@ Conjunction) =
Conjunction(and(x.unwrap, y.unwrap))
}
implicit val pDisjunctionSemilattice: BoundedSemilattice[P @@ Disjunction] =
new BoundedSemilattice[P @@ Disjunction] {
val empty = Disjunction(self.empty)
def combine(x: P @@ Disjunction, y: P @@ Disjunction) =
Disjunction(or(x.unwrap, y.unwrap))
}
}
object syntax {
implicit final class ProvenanceOps(p: P) {
def ⋈ (that: P): AutoJoin[S, V] =
self.autojoin(p, that)
def ∧ (that: P): P =
self.and(p, that)
def ∨ (that: P): P =
self.or(p, that)
def foldMapComponents[A: Monoid](f: P => A): A =
self.foldMapComponents(f)(p)
def foldMapScalarIds[A: Monoid](f: (S, T) => A): A =
self.foldMapScalarIds(f)(p)
def foldMapVectorIds[A: Monoid](f: (V, T) => A): A =
self.foldMapVectorIds(f)(p)
def inflateConjoin(vectorId: V, sort: T): P =
self.inflateConjoin(vectorId, sort, p)
def inflateExtend(vectorId: V, sort: T): P =
self.inflateExtend(vectorId, sort, p)
def inflateSubmerge(vectorId: V, sort: T): P =
self.inflateSubmerge(vectorId, sort, p)
def injectDynamic: P =
self.injectDynamic(p)
def injectStatic(scalarId: S, sort: T): P =
self.injectStatic(scalarId, sort, p)
def isEmpty(implicit P: Eq[P]): Boolean =
p === self.empty
def projectDynamic: P =
self.projectDynamic(p)
def projectStatic(scalarId: S, sort: T): P =
self.projectStatic(scalarId, sort, p)
def reduce: P =
self.reduce(p)
def squash: P =
self.squash(p)
def traverseComponents[F[_]: Applicative](f: P => F[P]): F[P] =
self.traverseComponents(f)(p)
def traverseScalarIds[F[_]: Applicative](f: (S, T) => F[(S, T)]): F[P] =
self.traverseScalarIds(f)(p)
def traverseVectorIds[F[_]: Applicative](f: (V, T) => F[(V, T)]): F[P] =
self.traverseVectorIds(f)(p)
}
}
}
object Provenance {
type Aux[S, V, T, P0] = Provenance[S, V, T] { type P = P0 }
}
|
quasar-analytics/quasar
|
qsu/src/main/scala/quasar/qsu/mra/Provenance.scala
|
Scala
|
apache-2.0
| 6,804 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.lang.{Boolean => JBoolean}
import java.nio.ByteBuffer
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.hive.ql.exec._
import org.apache.hadoop.hive.ql.udf.{UDFType => HiveUDFType}
import org.apache.hadoop.hive.ql.udf.generic._
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF._
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ConversionHelper
import org.apache.hadoop.hive.serde2.objectinspector.{ConstantObjectInspector, ObjectInspector, ObjectInspectorFactory}
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.hive.HiveShim._
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
private[hive] case class HiveSimpleUDF(
name: String, funcWrapper: HiveFunctionWrapper, children: Seq[Expression])
extends Expression
with HiveInspectors
with CodegenFallback
with Logging
with UserDefinedExpression {
override lazy val deterministic: Boolean = isUDFDeterministic && children.forall(_.deterministic)
override def nullable: Boolean = true
@transient
lazy val function = funcWrapper.createFunction[UDF]()
@transient
private lazy val method =
function.getResolver.getEvalMethod(children.map(_.dataType.toTypeInfo).asJava)
@transient
private lazy val arguments = children.map(toInspector).toArray
@transient
private lazy val isUDFDeterministic = {
val udfType = function.getClass.getAnnotation(classOf[HiveUDFType])
udfType != null && udfType.deterministic() && !udfType.stateful()
}
override def foldable: Boolean = isUDFDeterministic && children.forall(_.foldable)
// Create parameter converters
@transient
private lazy val conversionHelper = new ConversionHelper(method, arguments)
override lazy val dataType = javaTypeToDataType(method.getGenericReturnType)
@transient
private lazy val wrappers = children.map(x => wrapperFor(toInspector(x), x.dataType)).toArray
@transient
lazy val unwrapper = unwrapperFor(ObjectInspectorFactory.getReflectionObjectInspector(
method.getGenericReturnType, ObjectInspectorOptions.JAVA))
@transient
private lazy val cached: Array[AnyRef] = new Array[AnyRef](children.length)
@transient
private lazy val inputDataTypes: Array[DataType] = children.map(_.dataType).toArray
// TODO: Finish input output types.
override def eval(input: InternalRow): Any = {
val inputs = wrap(children.map(_.eval(input)), wrappers, cached, inputDataTypes)
val ret = FunctionRegistry.invoke(
method,
function,
conversionHelper.convertIfNecessary(inputs : _*): _*)
unwrapper(ret)
}
override def toString: String = {
s"$nodeName#${funcWrapper.functionClassName}(${children.mkString(",")})"
}
override def prettyName: String = name
override def sql: String = s"$name(${children.map(_.sql).mkString(", ")})"
}
// Adapter from Catalyst ExpressionResult to Hive DeferredObject
private[hive] class DeferredObjectAdapter(oi: ObjectInspector, dataType: DataType)
extends DeferredObject with HiveInspectors {
private val wrapper = wrapperFor(oi, dataType)
private var func: () => Any = _
def set(func: () => Any): Unit = {
this.func = func
}
override def prepare(i: Int): Unit = {}
override def get(): AnyRef = wrapper(func()).asInstanceOf[AnyRef]
}
private[hive] case class HiveGenericUDF(
name: String, funcWrapper: HiveFunctionWrapper, children: Seq[Expression])
extends Expression
with HiveInspectors
with CodegenFallback
with Logging
with UserDefinedExpression {
override def nullable: Boolean = true
override lazy val deterministic: Boolean = isUDFDeterministic && children.forall(_.deterministic)
override def foldable: Boolean =
isUDFDeterministic && returnInspector.isInstanceOf[ConstantObjectInspector]
@transient
lazy val function = funcWrapper.createFunction[GenericUDF]()
@transient
private lazy val argumentInspectors = children.map(toInspector)
@transient
private lazy val returnInspector = {
function.initializeAndFoldConstants(argumentInspectors.toArray)
}
@transient
private lazy val unwrapper = unwrapperFor(returnInspector)
@transient
private lazy val isUDFDeterministic = {
val udfType = function.getClass.getAnnotation(classOf[HiveUDFType])
udfType != null && udfType.deterministic() && !udfType.stateful()
}
@transient
private lazy val deferredObjects = argumentInspectors.zip(children).map { case (inspect, child) =>
new DeferredObjectAdapter(inspect, child.dataType)
}.toArray[DeferredObject]
override lazy val dataType: DataType = inspectorToDataType(returnInspector)
override def eval(input: InternalRow): Any = {
returnInspector // Make sure initialized.
var i = 0
val length = children.length
while (i < length) {
val idx = i
deferredObjects(i).asInstanceOf[DeferredObjectAdapter]
.set(() => children(idx).eval(input))
i += 1
}
unwrapper(function.evaluate(deferredObjects))
}
override def prettyName: String = name
override def toString: String = {
s"$nodeName#${funcWrapper.functionClassName}(${children.mkString(",")})"
}
}
/**
* Converts a Hive Generic User Defined Table Generating Function (UDTF) to a
* `Generator`. Note that the semantics of Generators do not allow
* Generators to maintain state in between input rows. Thus UDTFs that rely on partitioning
* dependent operations like calls to `close()` before producing output will not operate the same as
* in Hive. However, in practice this should not affect compatibility for most sane UDTFs
* (e.g. explode or GenericUDTFParseUrlTuple).
*
* Operators that require maintaining state in between input rows should instead be implemented as
* user defined aggregations, which have clean semantics even in a partitioned execution.
*/
private[hive] case class HiveGenericUDTF(
name: String,
funcWrapper: HiveFunctionWrapper,
children: Seq[Expression])
extends Generator with HiveInspectors with CodegenFallback with UserDefinedExpression {
@transient
protected lazy val function: GenericUDTF = {
val fun: GenericUDTF = funcWrapper.createFunction()
fun.setCollector(collector)
fun
}
@transient
protected lazy val inputInspectors = children.map(toInspector)
@transient
protected lazy val outputInspector = function.initialize(inputInspectors.toArray)
@transient
protected lazy val udtInput = new Array[AnyRef](children.length)
@transient
protected lazy val collector = new UDTFCollector
override lazy val elementSchema = StructType(outputInspector.getAllStructFieldRefs.asScala.map {
field => StructField(field.getFieldName, inspectorToDataType(field.getFieldObjectInspector),
nullable = true)
}.toSeq)
@transient
private lazy val inputDataTypes: Array[DataType] = children.map(_.dataType).toArray
@transient
private lazy val wrappers = children.map(x => wrapperFor(toInspector(x), x.dataType)).toArray
@transient
private lazy val unwrapper = unwrapperFor(outputInspector)
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
outputInspector // Make sure initialized.
val inputProjection = new InterpretedProjection(children)
function.process(wrap(inputProjection(input), wrappers, udtInput, inputDataTypes))
collector.collectRows()
}
protected class UDTFCollector extends Collector {
var collected = new ArrayBuffer[InternalRow]
override def collect(input: java.lang.Object): Unit = {
// We need to clone the input here because implementations of
// GenericUDTF reuse the same object. Luckily they are always an array, so
// it is easy to clone.
collected += unwrapper(input).asInstanceOf[InternalRow]
}
def collectRows(): Seq[InternalRow] = {
val toCollect = collected
collected = new ArrayBuffer[InternalRow]
toCollect.toSeq
}
}
override def terminate(): TraversableOnce[InternalRow] = {
outputInspector // Make sure initialized.
function.close()
collector.collectRows()
}
override def toString: String = {
s"$nodeName#${funcWrapper.functionClassName}(${children.mkString(",")})"
}
override def prettyName: String = name
}
/**
* While being evaluated by Spark SQL, the aggregation state of a Hive UDAF may be in the following
* three formats:
*
* 1. An instance of some concrete `GenericUDAFEvaluator.AggregationBuffer` class
*
* This is the native Hive representation of an aggregation state. Hive `GenericUDAFEvaluator`
* methods like `iterate()`, `merge()`, `terminatePartial()`, and `terminate()` use this format.
* We call these methods to evaluate Hive UDAFs.
*
* 2. A Java object that can be inspected using the `ObjectInspector` returned by the
* `GenericUDAFEvaluator.init()` method.
*
* Hive uses this format to produce a serializable aggregation state so that it can shuffle
* partial aggregation results. Whenever we need to convert a Hive `AggregationBuffer` instance
* into a Spark SQL value, we have to convert it to this format first and then do the conversion
* with the help of `ObjectInspector`s.
*
* 3. A Spark SQL value
*
* We use this format for serializing Hive UDAF aggregation states on Spark side. To be more
* specific, we convert `AggregationBuffer`s into equivalent Spark SQL values, write them into
* `UnsafeRow`s, and then retrieve the byte array behind those `UnsafeRow`s as serialization
* results.
*
* We may use the following methods to convert the aggregation state back and forth:
*
* - `wrap()`/`wrapperFor()`: from 3 to 1
* - `unwrap()`/`unwrapperFor()`: from 1 to 3
* - `GenericUDAFEvaluator.terminatePartial()`: from 2 to 3
*
* Note that, Hive UDAF is initialized with aggregate mode, and some specific Hive UDAFs can't
* mix UPDATE and MERGE actions during its life cycle. However, Spark may do UPDATE on a UDAF and
* then do MERGE, in case of hash aggregate falling back to sort aggregate. To work around this
* issue, we track the ability to do MERGE in the Hive UDAF aggregate buffer. If Spark does
* UPDATE then MERGE, we can detect it and re-create the aggregate buffer with a different
* aggregate mode.
*/
private[hive] case class HiveUDAFFunction(
name: String,
funcWrapper: HiveFunctionWrapper,
children: Seq[Expression],
isUDAFBridgeRequired: Boolean = false,
mutableAggBufferOffset: Int = 0,
inputAggBufferOffset: Int = 0)
extends TypedImperativeAggregate[HiveUDAFBuffer]
with HiveInspectors
with UserDefinedExpression {
override def withNewMutableAggBufferOffset(newMutableAggBufferOffset: Int): ImperativeAggregate =
copy(mutableAggBufferOffset = newMutableAggBufferOffset)
override def withNewInputAggBufferOffset(newInputAggBufferOffset: Int): ImperativeAggregate =
copy(inputAggBufferOffset = newInputAggBufferOffset)
// Hive `ObjectInspector`s for all child expressions (input parameters of the function).
@transient
private lazy val inputInspectors = children.map(toInspector).toArray
// Spark SQL data types of input parameters.
@transient
private lazy val inputDataTypes: Array[DataType] = children.map(_.dataType).toArray
private def newEvaluator(): GenericUDAFEvaluator = {
val resolver = if (isUDAFBridgeRequired) {
new GenericUDAFBridge(funcWrapper.createFunction[UDAF]())
} else {
funcWrapper.createFunction[AbstractGenericUDAFResolver]()
}
val clazz = Utils.classForName(classOf[SimpleGenericUDAFParameterInfo].getName)
if (HiveUtils.isHive23) {
val ctor = clazz.getDeclaredConstructor(
classOf[Array[ObjectInspector]], JBoolean.TYPE, JBoolean.TYPE, JBoolean.TYPE)
val args = Array[AnyRef](inputInspectors, JBoolean.FALSE, JBoolean.FALSE, JBoolean.FALSE)
val parameterInfo = ctor.newInstance(args: _*).asInstanceOf[SimpleGenericUDAFParameterInfo]
resolver.getEvaluator(parameterInfo)
} else {
val ctor = clazz.getDeclaredConstructor(
classOf[Array[ObjectInspector]], JBoolean.TYPE, JBoolean.TYPE)
val args = Array[AnyRef](inputInspectors, JBoolean.FALSE, JBoolean.FALSE)
val parameterInfo = ctor.newInstance(args: _*).asInstanceOf[SimpleGenericUDAFParameterInfo]
resolver.getEvaluator(parameterInfo)
}
}
private case class HiveEvaluator(
evaluator: GenericUDAFEvaluator,
objectInspector: ObjectInspector)
// The UDAF evaluator used to consume raw input rows and produce partial aggregation results.
// Hive `ObjectInspector` used to inspect partial aggregation results.
@transient
private lazy val partial1HiveEvaluator = {
val evaluator = newEvaluator()
HiveEvaluator(evaluator, evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputInspectors))
}
// The UDAF evaluator used to consume partial aggregation results and produce final results.
// Hive `ObjectInspector` used to inspect final results.
@transient
private lazy val finalHiveEvaluator = {
val evaluator = newEvaluator()
HiveEvaluator(
evaluator,
evaluator.init(GenericUDAFEvaluator.Mode.FINAL, Array(partial1HiveEvaluator.objectInspector)))
}
// Spark SQL data type of partial aggregation results
@transient
private lazy val partialResultDataType =
inspectorToDataType(partial1HiveEvaluator.objectInspector)
// Wrapper functions used to wrap Spark SQL input arguments into Hive specific format.
@transient
private lazy val inputWrappers = children.map(x => wrapperFor(toInspector(x), x.dataType)).toArray
// Unwrapper function used to unwrap final aggregation result objects returned by Hive UDAFs into
// Spark SQL specific format.
@transient
private lazy val resultUnwrapper = unwrapperFor(finalHiveEvaluator.objectInspector)
@transient
private lazy val cached: Array[AnyRef] = new Array[AnyRef](children.length)
@transient
private lazy val aggBufferSerDe: AggregationBufferSerDe = new AggregationBufferSerDe
override def nullable: Boolean = true
override lazy val dataType: DataType = inspectorToDataType(finalHiveEvaluator.objectInspector)
override def prettyName: String = name
override def sql(isDistinct: Boolean): String = {
val distinct = if (isDistinct) "DISTINCT " else " "
s"$name($distinct${children.map(_.sql).mkString(", ")})"
}
// The hive UDAF may create different buffers to handle different inputs: original data or
// aggregate buffer. However, the Spark UDAF framework does not expose this information when
// creating the buffer. Here we return null, and create the buffer in `update` and `merge`
// on demand, so that we can know what input we are dealing with.
override def createAggregationBuffer(): HiveUDAFBuffer = null
@transient
private lazy val inputProjection = UnsafeProjection.create(children)
override def update(buffer: HiveUDAFBuffer, input: InternalRow): HiveUDAFBuffer = {
// The input is original data, we create buffer with the partial1 evaluator.
val nonNullBuffer = if (buffer == null) {
HiveUDAFBuffer(partial1HiveEvaluator.evaluator.getNewAggregationBuffer, false)
} else {
buffer
}
assert(!nonNullBuffer.canDoMerge, "can not call `merge` then `update` on a Hive UDAF.")
partial1HiveEvaluator.evaluator.iterate(
nonNullBuffer.buf, wrap(inputProjection(input), inputWrappers, cached, inputDataTypes))
nonNullBuffer
}
override def merge(buffer: HiveUDAFBuffer, input: HiveUDAFBuffer): HiveUDAFBuffer = {
// The input is aggregate buffer, we create buffer with the final evaluator.
val nonNullBuffer = if (buffer == null) {
HiveUDAFBuffer(finalHiveEvaluator.evaluator.getNewAggregationBuffer, true)
} else {
buffer
}
// It's possible that we've called `update` of this Hive UDAF, and some specific Hive UDAF
// implementation can't mix the `update` and `merge` calls during its life cycle. To work
// around it, here we create a fresh buffer with final evaluator, and merge the existing buffer
// to it, and replace the existing buffer with it.
val mergeableBuf = if (!nonNullBuffer.canDoMerge) {
val newBuf = finalHiveEvaluator.evaluator.getNewAggregationBuffer
finalHiveEvaluator.evaluator.merge(
newBuf, partial1HiveEvaluator.evaluator.terminatePartial(nonNullBuffer.buf))
HiveUDAFBuffer(newBuf, true)
} else {
nonNullBuffer
}
// The 2nd argument of the Hive `GenericUDAFEvaluator.merge()` method is an input aggregation
// buffer in the 3rd format mentioned in the ScalaDoc of this class. Originally, Hive converts
// this `AggregationBuffer`s into this format before shuffling partial aggregation results, and
// calls `GenericUDAFEvaluator.terminatePartial()` to do the conversion.
finalHiveEvaluator.evaluator.merge(
mergeableBuf.buf, partial1HiveEvaluator.evaluator.terminatePartial(input.buf))
mergeableBuf
}
override def eval(buffer: HiveUDAFBuffer): Any = {
resultUnwrapper(finalHiveEvaluator.evaluator.terminate(
if (buffer == null) {
finalHiveEvaluator.evaluator.getNewAggregationBuffer
} else {
buffer.buf
}
))
}
override def serialize(buffer: HiveUDAFBuffer): Array[Byte] = {
// Serializes an `AggregationBuffer` that holds partial aggregation results so that we can
// shuffle it for global aggregation later.
aggBufferSerDe.serialize(if (buffer == null) null else buffer.buf)
}
override def deserialize(bytes: Array[Byte]): HiveUDAFBuffer = {
// Deserializes an `AggregationBuffer` from the shuffled partial aggregation phase to prepare
// for global aggregation by merging multiple partial aggregation results within a single group.
HiveUDAFBuffer(aggBufferSerDe.deserialize(bytes), false)
}
// Helper class used to de/serialize Hive UDAF `AggregationBuffer` objects
private class AggregationBufferSerDe {
private val partialResultUnwrapper = unwrapperFor(partial1HiveEvaluator.objectInspector)
private val partialResultWrapper =
wrapperFor(partial1HiveEvaluator.objectInspector, partialResultDataType)
private val projection = UnsafeProjection.create(Array(partialResultDataType))
private val mutableRow = new GenericInternalRow(1)
def serialize(buffer: AggregationBuffer): Array[Byte] = {
// The buffer may be null if there is no input. It's unclear if the hive UDAF accepts null
// buffer, for safety we create an empty buffer here.
val nonNullBuffer = if (buffer == null) {
partial1HiveEvaluator.evaluator.getNewAggregationBuffer
} else {
buffer
}
// `GenericUDAFEvaluator.terminatePartial()` converts an `AggregationBuffer` into an object
// that can be inspected by the `ObjectInspector` returned by `GenericUDAFEvaluator.init()`.
// Then we can unwrap it to a Spark SQL value.
mutableRow.update(0, partialResultUnwrapper(
partial1HiveEvaluator.evaluator.terminatePartial(nonNullBuffer)))
val unsafeRow = projection(mutableRow)
val bytes = ByteBuffer.allocate(unsafeRow.getSizeInBytes)
unsafeRow.writeTo(bytes)
bytes.array()
}
def deserialize(bytes: Array[Byte]): AggregationBuffer = {
// `GenericUDAFEvaluator` doesn't provide any method that is capable to convert an object
// returned by `GenericUDAFEvaluator.terminatePartial()` back to an `AggregationBuffer`. The
// workaround here is creating an initial `AggregationBuffer` first and then merge the
// deserialized object into the buffer.
val buffer = finalHiveEvaluator.evaluator.getNewAggregationBuffer
val unsafeRow = new UnsafeRow(1)
unsafeRow.pointTo(bytes, bytes.length)
val partialResult = unsafeRow.get(0, partialResultDataType)
finalHiveEvaluator.evaluator.merge(buffer, partialResultWrapper(partialResult))
buffer
}
}
}
case class HiveUDAFBuffer(buf: AggregationBuffer, canDoMerge: Boolean)
|
dbtsai/spark
|
sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
|
Scala
|
apache-2.0
| 21,353 |
package io.questions.model.questionnaire
import java.time._
import cats.implicits._
import cats.{ Eq, Show }
import io.circe.{ Decoder, Encoder }
import io.circe.java8.time._
import io.questions.model._
import io.questions.util._
sealed trait PrimitiveAnswer extends Product with Serializable {
def blank: PrimitiveAnswer
def isAnswered: Boolean
}
object PrimitiveAnswer extends EncoderHelpers {
final case class StringAnswer(answer: Option[String] = None) extends PrimitiveAnswer {
override def blank: PrimitiveAnswer = copy(answer = None)
override def isAnswered: Boolean = answer.isDefined
}
final case class IntAnswer(answer: Option[Int] = None) extends PrimitiveAnswer {
override def blank: PrimitiveAnswer = copy(answer = None)
override def isAnswered: Boolean = answer.isDefined
}
final case class BigDecimalAnswer(answer: Option[BigDecimal] = None) extends PrimitiveAnswer {
override def blank: PrimitiveAnswer = copy(answer = None)
override def isAnswered: Boolean = answer.isDefined
}
// answer stores the key of the enumeration, not the display value!
final case class EnumerationAnswer(answer: Option[String] = None, enumeration: EnumerationName) extends PrimitiveAnswer {
override def blank: PrimitiveAnswer = copy(answer = None)
override def isAnswered: Boolean = answer.isDefined
}
final case class DateTimeAnswer(date: Option[LocalDate] = None, time: Option[LocalTime] = None, zone: Option[ZoneId] = None)
extends PrimitiveAnswer {
override def blank: PrimitiveAnswer = copy(date = None, time = None, zone = None)
override def isAnswered: Boolean = date.isDefined
def asLocalDate: Either[String, LocalDate] = date.toRight("No LocalDate available")
def asLocalDateTime: Either[String, LocalDateTime] =
(date, time) match {
case (Some(d), Some(t)) ⇒ LocalDateTime.of(d, t).asRight[String]
case _ ⇒ "Can't construct a LocalDateTime without a LocalDate or a LocalTime".asLeft[LocalDateTime]
}
def asLocalTime: Either[String, LocalTime] = time.toRight("No LocalTime available")
def asOffsetTime: Either[String, OffsetTime] =
(date, time, zone) match {
case (Some(d), Some(t), Some(z)) ⇒
val offset = z.getRules.getOffset(LocalDateTime.of(d, t))
OffsetTime.of(t, offset).asRight[String]
case _ ⇒ "Can't construct an OffsetTime without a LocalDate, LocalTime, or a ZoneId".asLeft[OffsetTime]
}
def asZonedDateTime: Either[String, ZonedDateTime] =
(date, time, zone) match {
case (Some(d), Some(t), Some(z)) ⇒
val zoneOffset = z.getRules.getOffset(LocalDateTime.of(d, t))
ZonedDateTime.of(d, t, zoneOffset).asRight[String]
case _ ⇒ "Can't construct a ZonedDateTime without a LocalDate, LocalTime, or a ZoneId".asLeft[ZonedDateTime]
}
}
// scalastyle:off cyclomatic.complexity
def valueEquals(a1: PrimitiveAnswer, a2: PrimitiveAnswer): Either[String, Boolean] =
(a1, a2) match {
case (s1: StringAnswer, s2: StringAnswer) ⇒ (s1.answer === s2.answer).asRight
case (s1: IntAnswer, s2: IntAnswer) ⇒ (s1.answer === s2.answer).asRight
case (s1: BigDecimalAnswer, s2: BigDecimalAnswer) ⇒ (s1.answer === s2.answer).asRight
case (s1: EnumerationAnswer, s2: EnumerationAnswer) ⇒ (s1.answer === s2.answer).asRight
case (s1: DateTimeAnswer, s2: DateTimeAnswer) ⇒
(s1.date === s2.date && s1.time === s2.time && s1.zone === s2.zone).asRight
// special case to treat string and enumeration as equivalent for predicates purposes
case (s1: StringAnswer, s2: EnumerationAnswer) ⇒ (s1.answer === s2.answer).asRight
case (s1: EnumerationAnswer, s2: StringAnswer) ⇒ (s1.answer === s2.answer).asRight
case (s1, s2) ⇒ s"Mismatched types in valueEquals: ${s1.getClass} && ${s2.getClass}".asLeft
}
// scalastyle:on cyclomatic.complexity
// scalastyle:off cyclomatic.complexity
def typeCheck(a1: PrimitiveAnswer, a2: PrimitiveAnswer): Either[String, Boolean] = (a1, a2) match {
case (_: StringAnswer, _: StringAnswer) ⇒ true.asRight
case (_: IntAnswer, _: IntAnswer) ⇒ true.asRight
case (_: BigDecimalAnswer, _: BigDecimalAnswer) ⇒ true.asRight
case (_: EnumerationAnswer, _: EnumerationAnswer) ⇒ true.asRight
case (_: DateTimeAnswer, _: DateTimeAnswer) ⇒ true.asRight
// Special cases for integrity checks in predicates, so we can work with enumerations as strings.
case (_: StringAnswer, _: EnumerationAnswer) ⇒ true.asRight
case (_: EnumerationAnswer, _: StringAnswer) ⇒ true.asRight
case (s1, s2) ⇒ s"Mismatched types in typeCheck: ${s1.getClass} && ${s2.getClass}".asLeft
}
// scalastyle:on cyclomatic.complexity
implicit val show: Show[PrimitiveAnswer] = {
case s: StringAnswer => s"StringAnswer[${s.answer}]"
case i: IntAnswer => s"IntAnswer[${i.answer}]"
case bd: BigDecimalAnswer => s"BigDecimalAnswer[${bd.answer}]"
case e: EnumerationAnswer => s"EnumerationAnswer[${e.answer}]"
case d: DateTimeAnswer => s"DateTimeAnswer[date=${d.date}][time=${d.time}][zone=${d.zone}]"
}
implicit val encoder: Encoder[PrimitiveAnswer] = deriveCustomEncoder
implicit val decoder: Decoder[PrimitiveAnswer] = deriveCustomDecoder
implicit val equal: Eq[PrimitiveAnswer] = Eq.fromUniversalEquals[PrimitiveAnswer]
}
|
channingwalton/qanda
|
questionnaire/src/main/scala/io/questions/model/questionnaire/PrimitiveAnswer.scala
|
Scala
|
mit
| 5,474 |
package org.jetbrains.sbt.runner
import java.io.File
import java.util
import java.util.jar.JarFile
import com.intellij.execution.Executor
import com.intellij.execution.configuration.EnvironmentVariablesComponent
import com.intellij.execution.configurations._
import com.intellij.execution.filters.TextConsoleBuilderFactory
import com.intellij.execution.runners.ExecutionEnvironment
import com.intellij.execution.util.JavaParametersUtil
import com.intellij.openapi.module.Module
import com.intellij.openapi.options.SettingsEditor
import com.intellij.openapi.project.Project
import com.intellij.openapi.projectRoots.Sdk
import com.intellij.openapi.util.JDOMExternalizer
import com.intellij.openapi.util.text.StringUtil
import org.jdom.Element
import org.jetbrains.android.sdk.AndroidSdkType
import org.jetbrains.sbt.project.SbtProjectResolver
import org.jetbrains.sbt.settings.SbtSystemSettings
/**
* Run configuration of sbt tasks.
*/
class SbtRunConfiguration(val project: Project, val configurationFactory: ConfigurationFactory, val name: String)
extends ModuleBasedConfiguration[RunConfigurationModule](name, new RunConfigurationModule(project), configurationFactory) {
/**
* List of task to execute in format of sbt.
*/
private var tasks = ""
/**
* Extra java options.
*/
private var javaOptions = "-Xms512M -Xmx1024M -Xss1M -XX:+CMSClassUnloadingEnabled"
/**
* Environment variables.
*/
private val environmentVariables: java.util.Map[String, String] = new java.util.HashMap[String, String]()
private var workingDirectory: String = defaultWorkingDirectory
private def defaultWorkingDirectory = Option(project.getBaseDir).fold("")(_.getPath)
override def getValidModules: util.Collection[Module] = new java.util.ArrayList
override def getState(executor: Executor, env: ExecutionEnvironment): RunProfileState = {
val state: SbtComandLineState = new SbtComandLineState(this, env)
state.setConsoleBuilder(TextConsoleBuilderFactory.getInstance.createBuilder(getProject))
state
}
override def getConfigurationEditor: SettingsEditor[_ <: RunConfiguration] = new SbtRunConfigurationEditor(project, this)
override def writeExternal(element: Element) {
super.writeExternal(element)
JDOMExternalizer.write(element, "tasks", getTasks)
JDOMExternalizer.write(element, "vmparams", getJavaOptions)
JDOMExternalizer.write(element, "workingDir", getWorkingDir)
EnvironmentVariablesComponent.writeExternal(element, getEnvironmentVariables)
}
override def readExternal(element: Element) {
super.readExternal(element)
tasks = JDOMExternalizer.readString(element, "tasks")
javaOptions = JDOMExternalizer.readString(element, "vmparams")
workingDirectory = JDOMExternalizer.readString(element, "workingDir")
EnvironmentVariablesComponent.readExternal(element, environmentVariables)
}
override def isCompileBeforeLaunchAddedByDefault: Boolean = false
def apply(params: SbtRunConfigurationForm): Unit = {
tasks = params.getTasks
javaOptions = params.getJavaOptions
workingDirectory = params.getWorkingDir
environmentVariables.clear()
environmentVariables.putAll(params.getEnvironmentVariables)
}
def determineMainClass(launcherPath: String): String = {
val jf = new JarFile(new File(launcherPath))
val attributes = jf.getManifest.getMainAttributes
Option(attributes.getValue("Main-Class")).getOrElse("xsbt.boot.Boot")
}
def getTasks: String = tasks
def getJavaOptions: String = javaOptions
def getEnvironmentVariables: util.Map[String, String] = environmentVariables
def getWorkingDir: String = if (StringUtil.isEmpty(workingDirectory)) defaultWorkingDirectory else workingDirectory
class SbtComandLineState(configuration: SbtRunConfiguration, environment: ExecutionEnvironment)
extends JavaCommandLineState(environment) {
def createJavaParameters(): JavaParameters = {
val params: JavaParameters = new JavaParameters
val jdk: Sdk = JavaParametersUtil.createProjectJdk(configuration.getProject, null)
try {
jdk.getSdkType match {
case _: AndroidSdkType =>
environmentVariables.put("ANDROID_HOME", jdk.getSdkModificator.getHomePath)
case _ => // do nothing
}
} catch {
case _ : NoClassDefFoundError => // no android plugin, do nothing
}
params.setWorkingDirectory(workingDirectory)
params.configureByProject(configuration.getProject, JavaParameters.JDK_ONLY, jdk)
val sbtSystemSettings: SbtSystemSettings = SbtSystemSettings.getInstance(configuration.getProject)
if (sbtSystemSettings.getCustomLauncherEnabled) {
params.getClassPath.add(sbtSystemSettings.getCustomLauncherPath)
params.setMainClass(determineMainClass(sbtSystemSettings.getCustomLauncherPath))
} else {
val launcher = SbtProjectResolver.getDefaultLauncher
params.getClassPath.add(launcher)
params.setMainClass(determineMainClass(launcher.getAbsolutePath))
}
params.setEnv(environmentVariables)
params.getVMParametersList.addParametersString(javaOptions)
params.getProgramParametersList.addParametersString(tasks)
params
}
override def ansiColoringEnabled(): Boolean = true
}
}
|
triplequote/intellij-scala
|
scala/scala-impl/src/org/jetbrains/sbt/runner/SbtRunConfiguration.scala
|
Scala
|
apache-2.0
| 5,318 |
package io.youi.stream.watcher
import java.nio.file.Path
case class PathEvent(path: Path, directory: Path, kinds: Set[EventKind], lastModified: Long) {
override def toString: String = s"PathEvent(path: $path, directory: $directory, kinds: $kinds, lastModified: $lastModified)"
}
|
outr/youi
|
core/jvm/src/main/scala/io/youi/stream/watcher/PathEvent.scala
|
Scala
|
mit
| 282 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package unit.kafka.server
import java.util.Properties
import kafka.message._
import kafka.server.{Defaults, KafkaConfig}
import org.apache.kafka.common.config.ConfigException
import org.junit.{Assert, Test}
import org.scalatest.junit.JUnit3Suite
import scala.collection.Map
import scala.util.Random._
class KafkaConfigConfigDefTest extends JUnit3Suite {
@Test
def testFromPropsDefaults() {
val defaults = new Properties()
defaults.put(KafkaConfig.ZkConnectProp, "127.0.0.1:2181")
// some ordinary setting
defaults.put(KafkaConfig.AdvertisedPortProp, "1818")
val props = new Properties(defaults)
val config = KafkaConfig.fromProps(props)
Assert.assertEquals(1818, config.advertisedPort)
Assert.assertEquals("KafkaConfig defaults should be retained", Defaults.ConnectionsMaxIdleMs, config.connectionsMaxIdleMs)
}
@Test
def testFromPropsEmpty() {
// only required
val p = new Properties()
p.put(KafkaConfig.ZkConnectProp, "127.0.0.1:2181")
val actualConfig = KafkaConfig.fromProps(p)
val expectedConfig = new KafkaConfig(zkConnect = "127.0.0.1:2181")
Assert.assertEquals(expectedConfig.zkConnect, actualConfig.zkConnect)
Assert.assertEquals(expectedConfig.zkSessionTimeoutMs, actualConfig.zkSessionTimeoutMs)
Assert.assertEquals(expectedConfig.zkConnectionTimeoutMs, actualConfig.zkConnectionTimeoutMs)
Assert.assertEquals(expectedConfig.zkSyncTimeMs, actualConfig.zkSyncTimeMs)
Assert.assertEquals(expectedConfig.maxReservedBrokerId, actualConfig.maxReservedBrokerId)
Assert.assertEquals(expectedConfig.brokerId, actualConfig.brokerId)
Assert.assertEquals(expectedConfig.messageMaxBytes, actualConfig.messageMaxBytes)
Assert.assertEquals(expectedConfig.numNetworkThreads, actualConfig.numNetworkThreads)
Assert.assertEquals(expectedConfig.numIoThreads, actualConfig.numIoThreads)
Assert.assertEquals(expectedConfig.backgroundThreads, actualConfig.backgroundThreads)
Assert.assertEquals(expectedConfig.queuedMaxRequests, actualConfig.queuedMaxRequests)
Assert.assertEquals(expectedConfig.port, actualConfig.port)
Assert.assertEquals(expectedConfig.hostName, actualConfig.hostName)
Assert.assertEquals(expectedConfig.advertisedHostName, actualConfig.advertisedHostName)
Assert.assertEquals(expectedConfig.advertisedPort, actualConfig.advertisedPort)
Assert.assertEquals(expectedConfig.socketSendBufferBytes, actualConfig.socketSendBufferBytes)
Assert.assertEquals(expectedConfig.socketReceiveBufferBytes, actualConfig.socketReceiveBufferBytes)
Assert.assertEquals(expectedConfig.socketRequestMaxBytes, actualConfig.socketRequestMaxBytes)
Assert.assertEquals(expectedConfig.maxConnectionsPerIp, actualConfig.maxConnectionsPerIp)
Assert.assertEquals(expectedConfig.maxConnectionsPerIpOverrides, actualConfig.maxConnectionsPerIpOverrides)
Assert.assertEquals(expectedConfig.connectionsMaxIdleMs, actualConfig.connectionsMaxIdleMs)
Assert.assertEquals(expectedConfig.numPartitions, actualConfig.numPartitions)
Assert.assertEquals(expectedConfig.logDirs, actualConfig.logDirs)
Assert.assertEquals(expectedConfig.logSegmentBytes, actualConfig.logSegmentBytes)
Assert.assertEquals(expectedConfig.logRollTimeMillis, actualConfig.logRollTimeMillis)
Assert.assertEquals(expectedConfig.logRollTimeJitterMillis, actualConfig.logRollTimeJitterMillis)
Assert.assertEquals(expectedConfig.logRetentionTimeMillis, actualConfig.logRetentionTimeMillis)
Assert.assertEquals(expectedConfig.logRetentionBytes, actualConfig.logRetentionBytes)
Assert.assertEquals(expectedConfig.logCleanupIntervalMs, actualConfig.logCleanupIntervalMs)
Assert.assertEquals(expectedConfig.logCleanupPolicy, actualConfig.logCleanupPolicy)
Assert.assertEquals(expectedConfig.logCleanerThreads, actualConfig.logCleanerThreads)
Assert.assertEquals(expectedConfig.logCleanerIoMaxBytesPerSecond, actualConfig.logCleanerIoMaxBytesPerSecond, 0.0)
Assert.assertEquals(expectedConfig.logCleanerDedupeBufferSize, actualConfig.logCleanerDedupeBufferSize)
Assert.assertEquals(expectedConfig.logCleanerIoBufferSize, actualConfig.logCleanerIoBufferSize)
Assert.assertEquals(expectedConfig.logCleanerDedupeBufferLoadFactor, actualConfig.logCleanerDedupeBufferLoadFactor, 0.0)
Assert.assertEquals(expectedConfig.logCleanerBackoffMs, actualConfig.logCleanerBackoffMs)
Assert.assertEquals(expectedConfig.logCleanerMinCleanRatio, actualConfig.logCleanerMinCleanRatio, 0.0)
Assert.assertEquals(expectedConfig.logCleanerEnable, actualConfig.logCleanerEnable)
Assert.assertEquals(expectedConfig.logCleanerDeleteRetentionMs, actualConfig.logCleanerDeleteRetentionMs)
Assert.assertEquals(expectedConfig.logIndexSizeMaxBytes, actualConfig.logIndexSizeMaxBytes)
Assert.assertEquals(expectedConfig.logIndexIntervalBytes, actualConfig.logIndexIntervalBytes)
Assert.assertEquals(expectedConfig.logFlushIntervalMessages, actualConfig.logFlushIntervalMessages)
Assert.assertEquals(expectedConfig.logDeleteDelayMs, actualConfig.logDeleteDelayMs)
Assert.assertEquals(expectedConfig.logFlushSchedulerIntervalMs, actualConfig.logFlushSchedulerIntervalMs)
Assert.assertEquals(expectedConfig.logFlushIntervalMs, actualConfig.logFlushIntervalMs)
Assert.assertEquals(expectedConfig.logFlushOffsetCheckpointIntervalMs, actualConfig.logFlushOffsetCheckpointIntervalMs)
Assert.assertEquals(expectedConfig.numRecoveryThreadsPerDataDir, actualConfig.numRecoveryThreadsPerDataDir)
Assert.assertEquals(expectedConfig.autoCreateTopicsEnable, actualConfig.autoCreateTopicsEnable)
Assert.assertEquals(expectedConfig.minInSyncReplicas, actualConfig.minInSyncReplicas)
Assert.assertEquals(expectedConfig.controllerSocketTimeoutMs, actualConfig.controllerSocketTimeoutMs)
Assert.assertEquals(expectedConfig.controllerMessageQueueSize, actualConfig.controllerMessageQueueSize)
Assert.assertEquals(expectedConfig.defaultReplicationFactor, actualConfig.defaultReplicationFactor)
Assert.assertEquals(expectedConfig.replicaLagTimeMaxMs, actualConfig.replicaLagTimeMaxMs)
Assert.assertEquals(expectedConfig.replicaLagMaxMessages, actualConfig.replicaLagMaxMessages)
Assert.assertEquals(expectedConfig.replicaSocketTimeoutMs, actualConfig.replicaSocketTimeoutMs)
Assert.assertEquals(expectedConfig.replicaSocketReceiveBufferBytes, actualConfig.replicaSocketReceiveBufferBytes)
Assert.assertEquals(expectedConfig.replicaFetchMaxBytes, actualConfig.replicaFetchMaxBytes)
Assert.assertEquals(expectedConfig.replicaFetchWaitMaxMs, actualConfig.replicaFetchWaitMaxMs)
Assert.assertEquals(expectedConfig.replicaFetchMinBytes, actualConfig.replicaFetchMinBytes)
Assert.assertEquals(expectedConfig.replicaFetchBackoffMs, actualConfig.replicaFetchBackoffMs)
Assert.assertEquals(expectedConfig.numReplicaFetchers, actualConfig.numReplicaFetchers)
Assert.assertEquals(expectedConfig.replicaHighWatermarkCheckpointIntervalMs, actualConfig.replicaHighWatermarkCheckpointIntervalMs)
Assert.assertEquals(expectedConfig.fetchPurgatoryPurgeIntervalRequests, actualConfig.fetchPurgatoryPurgeIntervalRequests)
Assert.assertEquals(expectedConfig.producerPurgatoryPurgeIntervalRequests, actualConfig.producerPurgatoryPurgeIntervalRequests)
Assert.assertEquals(expectedConfig.autoLeaderRebalanceEnable, actualConfig.autoLeaderRebalanceEnable)
Assert.assertEquals(expectedConfig.leaderImbalancePerBrokerPercentage, actualConfig.leaderImbalancePerBrokerPercentage)
Assert.assertEquals(expectedConfig.leaderImbalanceCheckIntervalSeconds, actualConfig.leaderImbalanceCheckIntervalSeconds)
Assert.assertEquals(expectedConfig.uncleanLeaderElectionEnable, actualConfig.uncleanLeaderElectionEnable)
Assert.assertEquals(expectedConfig.controlledShutdownMaxRetries, actualConfig.controlledShutdownMaxRetries)
Assert.assertEquals(expectedConfig.controlledShutdownRetryBackoffMs, actualConfig.controlledShutdownRetryBackoffMs)
Assert.assertEquals(expectedConfig.controlledShutdownEnable, actualConfig.controlledShutdownEnable)
Assert.assertEquals(expectedConfig.offsetMetadataMaxSize, actualConfig.offsetMetadataMaxSize)
Assert.assertEquals(expectedConfig.offsetsLoadBufferSize, actualConfig.offsetsLoadBufferSize)
Assert.assertEquals(expectedConfig.offsetsTopicReplicationFactor, actualConfig.offsetsTopicReplicationFactor)
Assert.assertEquals(expectedConfig.offsetsTopicPartitions, actualConfig.offsetsTopicPartitions)
Assert.assertEquals(expectedConfig.offsetsTopicSegmentBytes, actualConfig.offsetsTopicSegmentBytes)
Assert.assertEquals(expectedConfig.offsetsTopicCompressionCodec, actualConfig.offsetsTopicCompressionCodec)
Assert.assertEquals(expectedConfig.offsetsRetentionMinutes, actualConfig.offsetsRetentionMinutes)
Assert.assertEquals(expectedConfig.offsetsRetentionCheckIntervalMs, actualConfig.offsetsRetentionCheckIntervalMs)
Assert.assertEquals(expectedConfig.offsetCommitTimeoutMs, actualConfig.offsetCommitTimeoutMs)
Assert.assertEquals(expectedConfig.offsetCommitRequiredAcks, actualConfig.offsetCommitRequiredAcks)
Assert.assertEquals(expectedConfig.deleteTopicEnable, actualConfig.deleteTopicEnable)
Assert.assertEquals(expectedConfig.compressionType, actualConfig.compressionType)
}
private def atLeastXIntProp(x: Int): String = (nextInt(Int.MaxValue - 1) + x).toString
private def atLeastOneIntProp: String = atLeastXIntProp(1)
private def inRangeIntProp(fromInc: Int, toInc: Int): String = (nextInt(toInc + 1 - fromInc) + fromInc).toString
@Test
def testFromPropsToProps() {
import scala.util.Random._
val expected = new Properties()
KafkaConfig.configNames().foreach(name => {
name match {
case KafkaConfig.ZkConnectProp => expected.setProperty(name, "127.0.0.1:2181")
case KafkaConfig.ZkSessionTimeoutMsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.ZkConnectionTimeoutMsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.ZkSyncTimeMsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.NumNetworkThreadsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.NumIoThreadsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.BackgroundThreadsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.QueuedMaxRequestsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.PortProp => expected.setProperty(name, "1234")
case KafkaConfig.HostNameProp => expected.setProperty(name, nextString(10))
case KafkaConfig.AdvertisedHostNameProp => expected.setProperty(name, nextString(10))
case KafkaConfig.AdvertisedPortProp => expected.setProperty(name, "4321")
case KafkaConfig.SocketRequestMaxBytesProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.MaxConnectionsPerIpProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.MaxConnectionsPerIpOverridesProp => expected.setProperty(name, "127.0.0.1:2, 127.0.0.2:3")
case KafkaConfig.NumPartitionsProp => expected.setProperty(name, "2")
case KafkaConfig.LogDirsProp => expected.setProperty(name, "/tmp/logs,/tmp/logs2")
case KafkaConfig.LogDirProp => expected.setProperty(name, "/tmp/log")
case KafkaConfig.LogSegmentBytesProp => expected.setProperty(name, atLeastXIntProp(Message.MinHeaderSize))
case KafkaConfig.LogRollTimeMillisProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.LogRollTimeHoursProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.LogRetentionTimeMillisProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.LogRetentionTimeMinutesProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.LogRetentionTimeHoursProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.LogCleanupIntervalMsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.LogCleanupPolicyProp => expected.setProperty(name, randFrom(Defaults.Compact, Defaults.Delete))
case KafkaConfig.LogCleanerIoMaxBytesPerSecondProp => expected.setProperty(name, "%.1f".format(nextDouble * .9 + .1))
case KafkaConfig.LogCleanerDedupeBufferLoadFactorProp => expected.setProperty(name, "%.1f".format(nextDouble * .9 + .1))
case KafkaConfig.LogCleanerMinCleanRatioProp => expected.setProperty(name, "%.1f".format(nextDouble * .9 + .1))
case KafkaConfig.LogCleanerEnableProp => expected.setProperty(name, randFrom("true", "false"))
case KafkaConfig.LogIndexSizeMaxBytesProp => expected.setProperty(name, atLeastXIntProp(4))
case KafkaConfig.LogFlushIntervalMessagesProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.NumRecoveryThreadsPerDataDirProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.AutoCreateTopicsEnableProp => expected.setProperty(name, randFrom("true", "false"))
case KafkaConfig.MinInSyncReplicasProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.AutoLeaderRebalanceEnableProp => expected.setProperty(name, randFrom("true", "false"))
case KafkaConfig.UncleanLeaderElectionEnableProp => expected.setProperty(name, randFrom("true", "false"))
case KafkaConfig.ControlledShutdownEnableProp => expected.setProperty(name, randFrom("true", "false"))
case KafkaConfig.OffsetsLoadBufferSizeProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.OffsetsTopicPartitionsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.OffsetsTopicSegmentBytesProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.OffsetsTopicCompressionCodecProp => expected.setProperty(name, randFrom(GZIPCompressionCodec.codec.toString,
SnappyCompressionCodec.codec.toString, LZ4CompressionCodec.codec.toString, NoCompressionCodec.codec.toString))
case KafkaConfig.OffsetsRetentionMinutesProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.OffsetsRetentionCheckIntervalMsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.OffsetCommitTimeoutMsProp => expected.setProperty(name, atLeastOneIntProp)
case KafkaConfig.DeleteTopicEnableProp => expected.setProperty(name, randFrom("true", "false"))
// explicit, non trivial validations or with transient dependencies
// require(brokerId >= -1 && brokerId <= maxReservedBrokerId)
case KafkaConfig.MaxReservedBrokerIdProp => expected.setProperty(name, "100")
case KafkaConfig.BrokerIdProp => expected.setProperty(name, inRangeIntProp(0, 100))
// require(logCleanerDedupeBufferSize / logCleanerThreads > 1024 * 1024)
case KafkaConfig.LogCleanerThreadsProp => expected.setProperty(name, "2")
case KafkaConfig.LogCleanerDedupeBufferSizeProp => expected.setProperty(name, (1024 * 1024 * 3 + 1).toString)
// require(replicaFetchWaitMaxMs <= replicaSocketTimeoutMs)
case KafkaConfig.ReplicaFetchWaitMaxMsProp => expected.setProperty(name, "321")
case KafkaConfig.ReplicaSocketTimeoutMsProp => expected.setProperty(name, atLeastXIntProp(321))
// require(replicaFetchMaxBytes >= messageMaxBytes)
case KafkaConfig.MessageMaxBytesProp => expected.setProperty(name, "1234")
case KafkaConfig.ReplicaFetchMaxBytesProp => expected.setProperty(name, atLeastXIntProp(1234))
// require(replicaFetchWaitMaxMs <= replicaLagTimeMaxMs)
case KafkaConfig.ReplicaLagTimeMaxMsProp => expected.setProperty(name, atLeastXIntProp(321))
//require(offsetCommitRequiredAcks >= -1 && offsetCommitRequiredAcks <= offsetsTopicReplicationFactor)
case KafkaConfig.OffsetCommitRequiredAcksProp => expected.setProperty(name, "-1")
case KafkaConfig.OffsetsTopicReplicationFactorProp => expected.setProperty(name, inRangeIntProp(-1, Short.MaxValue))
//BrokerCompressionCodec.isValid(compressionType)
case KafkaConfig.CompressionTypeProp => expected.setProperty(name, randFrom(BrokerCompressionCodec.brokerCompressionOptions))
case nonNegativeIntProperty => expected.setProperty(name, nextInt(Int.MaxValue).toString)
}
})
val actual = KafkaConfig.fromProps(expected).toProps
Assert.assertEquals(expected, actual)
}
@Test
def testFromPropsInvalid() {
def getBaseProperties(): Properties = {
val validRequiredProperties = new Properties()
validRequiredProperties.put(KafkaConfig.ZkConnectProp, "127.0.0.1")
validRequiredProperties
}
// to ensure a basis is valid - bootstraps all needed validation
KafkaConfig.fromProps(getBaseProperties())
KafkaConfig.configNames().foreach(name => {
name match {
case KafkaConfig.ZkConnectProp => // ignore string
case KafkaConfig.ZkSessionTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ZkConnectionTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ZkSyncTimeMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.BrokerIdProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.NumNetworkThreadsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.NumIoThreadsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.BackgroundThreadsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.QueuedMaxRequestsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.PortProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.HostNameProp => // ignore string
case KafkaConfig.AdvertisedHostNameProp => //ignore string
case KafkaConfig.AdvertisedPortProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.SocketSendBufferBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.SocketReceiveBufferBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.MaxConnectionsPerIpOverridesProp =>
assertPropertyInvalid(getBaseProperties(), name, "127.0.0.1:not_a_number")
case KafkaConfig.ConnectionsMaxIdleMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.NumPartitionsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogDirsProp => // ignore string
case KafkaConfig.LogDirProp => // ignore string
case KafkaConfig.LogSegmentBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", Message.MinHeaderSize - 1)
case KafkaConfig.LogRollTimeMillisProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRollTimeHoursProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRetentionTimeMillisProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRetentionTimeMinutesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRetentionTimeHoursProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogRetentionBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogCleanupIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogCleanupPolicyProp => assertPropertyInvalid(getBaseProperties(), name, "unknown_policy", "0")
case KafkaConfig.LogCleanerIoMaxBytesPerSecondProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogCleanerDedupeBufferSizeProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "1024")
case KafkaConfig.LogCleanerDedupeBufferLoadFactorProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogCleanerEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean")
case KafkaConfig.LogCleanerDeleteRetentionMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogCleanerMinCleanRatioProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogIndexSizeMaxBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "3")
case KafkaConfig.LogFlushIntervalMessagesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.LogFlushSchedulerIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LogFlushIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.NumRecoveryThreadsPerDataDirProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.AutoCreateTopicsEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case KafkaConfig.MinInSyncReplicasProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.ControllerSocketTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ControllerMessageQueueSizeProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.DefaultReplicationFactorProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaLagTimeMaxMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaLagMaxMessagesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaSocketTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "-2")
case KafkaConfig.ReplicaSocketReceiveBufferBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaFetchMaxBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaFetchWaitMaxMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaFetchMinBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.NumReplicaFetchersProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ReplicaHighWatermarkCheckpointIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.FetchPurgatoryPurgeIntervalRequestsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ProducerPurgatoryPurgeIntervalRequestsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.AutoLeaderRebalanceEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case KafkaConfig.LeaderImbalancePerBrokerPercentageProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.LeaderImbalanceCheckIntervalSecondsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.UncleanLeaderElectionEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case KafkaConfig.ControlledShutdownMaxRetriesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ControlledShutdownRetryBackoffMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.ControlledShutdownEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case KafkaConfig.OffsetMetadataMaxSizeProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number")
case KafkaConfig.OffsetsLoadBufferSizeProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsTopicReplicationFactorProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsTopicPartitionsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsTopicSegmentBytesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsTopicCompressionCodecProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "-1")
case KafkaConfig.OffsetsRetentionMinutesProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetsRetentionCheckIntervalMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetCommitTimeoutMsProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "0")
case KafkaConfig.OffsetCommitRequiredAcksProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "-2")
case KafkaConfig.DeleteTopicEnableProp => assertPropertyInvalid(getBaseProperties(), name, "not_a_boolean", "0")
case nonNegativeIntProperty => assertPropertyInvalid(getBaseProperties(), name, "not_a_number", "-1")
}
})
}
@Test
def testSpecificProperties(): Unit = {
val defaults = new Properties()
defaults.put(KafkaConfig.ZkConnectProp, "127.0.0.1:2181")
// For ZkConnectionTimeoutMs
defaults.put(KafkaConfig.ZkSessionTimeoutMsProp, "1234")
defaults.put(KafkaConfig.MaxReservedBrokerIdProp, "1")
defaults.put(KafkaConfig.BrokerIdProp, "1")
defaults.put(KafkaConfig.HostNameProp, "127.0.0.1")
defaults.put(KafkaConfig.PortProp, "1122")
defaults.put(KafkaConfig.MaxConnectionsPerIpOverridesProp, "127.0.0.1:2, 127.0.0.2:3")
defaults.put(KafkaConfig.LogDirProp, "/tmp1,/tmp2")
defaults.put(KafkaConfig.LogRollTimeHoursProp, "12")
defaults.put(KafkaConfig.LogRollTimeJitterHoursProp, "11")
defaults.put(KafkaConfig.LogRetentionTimeHoursProp, "10")
//For LogFlushIntervalMsProp
defaults.put(KafkaConfig.LogFlushSchedulerIntervalMsProp, "123")
defaults.put(KafkaConfig.OffsetsTopicCompressionCodecProp, SnappyCompressionCodec.codec.toString)
val config = KafkaConfig.fromProps(defaults)
Assert.assertEquals("127.0.0.1:2181", config.zkConnect)
Assert.assertEquals(1234, config.zkConnectionTimeoutMs)
Assert.assertEquals(1, config.maxReservedBrokerId)
Assert.assertEquals(1, config.brokerId)
Assert.assertEquals("127.0.0.1", config.hostName)
Assert.assertEquals(1122, config.advertisedPort)
Assert.assertEquals("127.0.0.1", config.advertisedHostName)
Assert.assertEquals(Map("127.0.0.1" -> 2, "127.0.0.2" -> 3), config.maxConnectionsPerIpOverrides)
Assert.assertEquals(List("/tmp1", "/tmp2"), config.logDirs)
Assert.assertEquals(12 * 60L * 1000L * 60, config.logRollTimeMillis)
Assert.assertEquals(11 * 60L * 1000L * 60, config.logRollTimeJitterMillis)
Assert.assertEquals(10 * 60L * 1000L * 60, config.logRetentionTimeMillis)
Assert.assertEquals(123L, config.logFlushIntervalMs)
Assert.assertEquals(SnappyCompressionCodec, config.offsetsTopicCompressionCodec)
}
private def assertPropertyInvalid(validRequiredProps: => Properties, name: String, values: Any*) {
values.foreach((value) => {
val props = validRequiredProps
props.setProperty(name, value.toString)
intercept[Exception] {
KafkaConfig.fromProps(props)
}
})
}
private def randFrom[T](choices: T*): T = {
import scala.util.Random
choices(Random.nextInt(choices.size))
}
private def randFrom[T](choices: List[T]): T = {
import scala.util.Random
choices(Random.nextInt(choices.size))
}
}
|
WillCh/cs286A
|
dataMover/kafka/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala
|
Scala
|
bsd-2-clause
| 29,344 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.approval.retriever
import uk.gov.hmrc.ct.accounts.approval.boxes.{CompaniesHouseAccountsApproval, HmrcAccountsApproval}
import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever
import uk.gov.hmrc.ct.box.retriever.FilingAttributesBoxValueRetriever
trait CoHoAccountsApprovalBoxRetriever extends AccountsBoxRetriever {
self: FilingAttributesBoxValueRetriever =>
def companiesHouseAccountsApproval(): CompaniesHouseAccountsApproval
}
|
pncampbell/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/approval/retriever/CoHoAccountsApprovalBoxRetriever.scala
|
Scala
|
apache-2.0
| 1,080 |
package org.joda.time
import java.io.Serializable
import org.joda.time.base.BaseInterval
import org.joda.time.chrono.ISOChronology
object Interval {
def parse(str: String): Interval = new Interval(str)
}
@SerialVersionUID(4922451897541386752L)
class Interval extends BaseInterval with ReadableInterval with Serializable {
def this(startInstant: Long, endInstant: Long) {
this()
super.auxConstructor(startInstant, endInstant, null)
}
def this(startInstant: Long, endInstant: Long, zone: DateTimeZone) {
this()
super.auxConstructor(startInstant,
endInstant,
ISOChronology.getInstance(zone))
}
def this(startInstant: Long, endInstant: Long, chronology: Chronology) {
this()
super.auxConstructor(startInstant, endInstant, chronology)
}
def this(start: ReadableInstant, end: ReadableInstant) {
this()
super.auxConstructor(start, end)
}
def this(start: ReadableInstant, duration: ReadableDuration) {
this()
super.auxConstructor(start, duration)
}
def this(duration: ReadableDuration, end: ReadableInstant) {
this()
super.auxConstructor(duration, end)
}
def this(start: ReadableInstant, period: ReadablePeriod) {
this()
super.auxConstructor(start, period)
}
def this(period: ReadablePeriod, end: ReadableInstant) {
this()
super.auxConstructor(period, end)
}
def this(interval: AnyRef) {
this()
super.auxConstructor(interval, null)
}
def this(interval: AnyRef, chronology: Chronology) {
this()
super.auxConstructor(interval, chronology)
}
override def toInterval(): Interval = this
def overlap(interval: ReadableInterval): Interval = {
var _interval = interval
_interval = DateTimeUtils.getReadableInterval(_interval)
if (overlaps(_interval) == false) {
return null
}
val start = Math.max(getStartMillis, _interval.getStartMillis)
val end = Math.min(getEndMillis, _interval.getEndMillis)
new Interval(start, end, getChronology)
}
def gap(interval: ReadableInterval): Interval = {
var _interval = interval
_interval = DateTimeUtils.getReadableInterval(_interval)
val otherStart = _interval.getStartMillis
val otherEnd = _interval.getEndMillis
val thisStart = getStartMillis
val thisEnd = getEndMillis
if (thisStart > otherEnd) {
new Interval(otherEnd, thisStart, getChronology)
} else if (otherStart > thisEnd) {
new Interval(thisEnd, otherStart, getChronology)
} else {
null
}
}
def abuts(interval: ReadableInterval): Boolean = {
if (interval == null) {
val now = DateTimeUtils.currentTimeMillis()
(getStartMillis == now || getEndMillis == now)
} else {
(interval.getEndMillis == getStartMillis || getEndMillis == interval.getStartMillis)
}
}
def withChronology(chronology: Chronology): Interval = {
if (getChronology == chronology) {
return this
}
new Interval(getStartMillis, getEndMillis, chronology)
}
def withStartMillis(startInstant: Long): Interval = {
if (startInstant == getStartMillis) {
return this
}
new Interval(startInstant, getEndMillis, getChronology)
}
def withStart(start: ReadableInstant): Interval = {
val startMillis = DateTimeUtils.getInstantMillis(start)
withStartMillis(startMillis)
}
def withEndMillis(endInstant: Long): Interval = {
if (endInstant == getEndMillis) {
return this
}
new Interval(getStartMillis, endInstant, getChronology)
}
def withEnd(end: ReadableInstant): Interval = {
val endMillis = DateTimeUtils.getInstantMillis(end)
withEndMillis(endMillis)
}
def withDurationAfterStart(duration: ReadableDuration): Interval = {
val durationMillis = DateTimeUtils.getDurationMillis(duration)
if (durationMillis == toDurationMillis()) {
return this
}
val chrono = getChronology
val startMillis = getStartMillis
val endMillis = chrono.add(startMillis, durationMillis, 1)
new Interval(startMillis, endMillis, chrono)
}
def withDurationBeforeEnd(duration: ReadableDuration): Interval = {
val durationMillis = DateTimeUtils.getDurationMillis(duration)
if (durationMillis == toDurationMillis()) {
return this
}
val chrono = getChronology
val endMillis = getEndMillis
val startMillis = chrono.add(endMillis, durationMillis, -1)
new Interval(startMillis, endMillis, chrono)
}
def withPeriodAfterStart(period: ReadablePeriod): Interval = {
if (period == null) {
return withDurationAfterStart(null)
}
val chrono = getChronology
val startMillis = getStartMillis
val endMillis = chrono.add(period, startMillis, 1)
new Interval(startMillis, endMillis, chrono)
}
def withPeriodBeforeEnd(period: ReadablePeriod): Interval = {
if (period == null) {
return withDurationBeforeEnd(null)
}
val chrono = getChronology
val endMillis = getEndMillis
val startMillis = chrono.add(period, endMillis, -1)
new Interval(startMillis, endMillis, chrono)
}
}
|
mdedetrich/soda-time
|
shared/src/main/scala/org/joda/time/Interval.scala
|
Scala
|
bsd-2-clause
| 5,120 |
package de.windelknecht.stup.utils.io
import java.io.{OutputStream, InputStream}
import java.nio.channels.{Channels, WritableByteChannel, ReadableByteChannel}
import java.nio.ByteBuffer
object ChannelTools {
/**
* Copies the content of the src channel into the dest channel.
*/
def fastChannelCopy(
size: Int = Int.MaxValue,
src: ReadableByteChannel,
dest: WritableByteChannel
) {
val buffer = ByteBuffer.allocateDirect(Math.min(size, 16 * 1024))
var alreadyRead = 0L
while (src.read(buffer) != -1) {
// prepare the buffer to be drained
buffer.flip()
buffer.limit(math.min((size - alreadyRead).toInt, buffer.limit()))
alreadyRead += buffer.limit()
// write to the channel, may block
dest.write(buffer)
// If partial transfer, shift remainder down
// If buffer is empty, same as doing clear()
buffer.compact()
if(alreadyRead >= size)
return
}
// EOF will leave buffer in fill state
buffer.flip()
// make sure the buffer is fully drained.
while (buffer.hasRemaining) {
dest.write(buffer)
}
}
/**
* Copies the given src input stream into the given output stream.
*/
def fastStreamCopy(
src: InputStream,
dest: OutputStream
) {
val inputChannel = Channels.newChannel(src)
val outputChannel = Channels.newChannel(dest)
ChannelTools.fastChannelCopy(src = inputChannel, dest = outputChannel)
// cleanup
inputChannel.close()
outputChannel.close()
}
/**
* Copies the given src input stream into the given output stream.
*/
def fastStreamCopy_doNotCloseInput(
size: Long,
src: InputStream,
dest: OutputStream
) {
val inputChannel = Channels.newChannel(src)
val outputChannel = Channels.newChannel(dest)
ChannelTools.fastChannelCopy(src = inputChannel, dest = outputChannel)
// cleanup
outputChannel.close()
}
/**
* Copies the given src input stream into the given output stream.
*/
def fastStreamCopy_doNotCloseOutput(
src: InputStream,
dest: OutputStream
) {
val inputChannel = Channels.newChannel(src)
val outputChannel = Channels.newChannel(dest)
ChannelTools.fastChannelCopy(src = inputChannel, dest = outputChannel)
// cleanup
inputChannel.close()
}
}
|
windelknecht/stup-utils
|
src/main/scala/de/windelknecht/stup/utils/io/ChannelTools.scala
|
Scala
|
mit
| 2,340 |
package breeze.optimize
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import org.scalatest._
import org.scalatest.junit._
import org.scalatest.prop._
import org.scalacheck._
import org.junit.runner.RunWith
import breeze.linalg._
@RunWith(classOf[JUnitRunner])
class AdaptiveGradientTest extends OptimizeTestBase {
test("optimize a simple multivariate gaussian, l2") {
def optimizeThis(init2: DenseVector[Double], reg: Double) = {
val init = init2 % 100.0
val sgd = new AdaptiveGradientDescent.L2Regularization[DenseVector[Double]](reg % 1E3 abs, 1,1000)
val f = new BatchDiffFunction[DenseVector[Double]] {
def calculate(x: DenseVector[Double], r: IndexedSeq[Int]) = {
(((x - 3.0) :^ 2.0).sum,(x * 2.0) - 6.0)
}
val fullRange = 0 to 1
}
val result = sgd.minimize(f,init)
val targetValue = 3 / (sgd.regularizationConstant / 2 + 1)
val ok = norm(result :- DenseVector.ones[Double](init.size) * targetValue,2)/result.size < 2E-3
if(!ok) {
sys.error("min " + init + " with reg: " + sgd.regularizationConstant + "gives " + result + " should be " + targetValue)
}
ok
}
check(Prop.forAll( optimizeThis _))
}
test("optimize a simple multivariate gaussian, l1") {
def optimizeThis(init2: DenseVector[Double], reg: Double) = {
val init = init2 % 100.0
val sgd = new AdaptiveGradientDescent.L1Regularization[DenseVector[Double]](reg.abs%10, 1E-7, 1,600)
val f = new BatchDiffFunction[DenseVector[Double]] {
def calculate(x: DenseVector[Double], r: IndexedSeq[Int]) = {
(((x - 3.0) :^ 2.0).sum,(x * 2.0) - 6.0)
}
val fullRange = 0 to 1
}
val result = sgd.minimize(f,init)
val targetValue = if(sgd.lambda/2 > 3) 0.0 else 3 - sgd.lambda / 2
val ok = norm(result :- DenseVector.ones[Double](init.size) * targetValue,2)/result.size < 1E-3
if(!ok) {
sys.error("min " + init + " with reg: " + sgd.lambda + "gives " + result + " " + " should be " + targetValue)
}
ok
}
check(Prop.forAll( optimizeThis _))
}
}
|
ktakagaki/breeze
|
src/test/scala/breeze/optimize/AdaptiveGradientTest.scala
|
Scala
|
apache-2.0
| 2,679 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.events
import org.scalatest._
import java.util.Date
/**
* A base class for the events that can be passed to the report function passed
* to the <code>execute</code> method of a <code>Suite</code>.
*
* @author Bill Venners
*/
sealed abstract class Event extends Ordered[Event] with java.io.Serializable {
/**
* An <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run.
*/
val ordinal: Ordinal
/**
* An optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user.
*/
val formatter: Option[Formatter]
/**
* An optional location that provides information indicating where in the source code an event originated.
* IDEs can use this information, for example, to allow the user to hop from an event report to the relevant
* line of source code.
*/
val location: Option[Location]
/**
* An optional object that can be used to pass custom information to the reporter about this event.
*/
val payload: Option[Any]
/**
* A name for the <code>Thread</code> about whose activity this event was reported.
*/
val threadName: String
/**
* A <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch":
* January 1, 1970, 00:00:00 GMT.
*/
val timeStamp: Long
/**
* Comparing <code>this</code> event with the event passed as <code>that</code>. Returns
* x, where x < 0 iff this < that, x == 0 iff this == that, x > 0 iff this > that.
*
* @param that the event to compare to this event
* @param return an integer indicating whether this event is less than, equal to, or greater than
* the passed event
*/
def compare(that: Event): Int = ordinal.compare(that.ordinal)
}
/**
* Event that indicates a suite (or other entity) is about to start running a test.
*
* <p>
* For example, trait <code>Suite</code> uses <code>TestStarting</code> to report
* that a test method of a <code>Suite</code> is about to be invoked.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="TestStarting$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>TestStarting</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(TestStarting(ordinal, userFriendlyName, suiteName, Some(thisSuite.getClass.getName), testName))
* </pre>
*
* <p>
* The suite class name parameter is optional, because suites in ScalaTest are an abstraction that
* need not necessarily correspond to one class. Nevertheless, it most cases each suite will correspond
* to a class, and when it does, the fully qualified name of that class should be reported by passing a
* <code>Some</code> for <code>suiteClassName</code>. One use for this bit of information is JUnit integration,
* because the "name" provided to a JUnit <code>org.junit.runner.Description</code> appears to usually include
* a fully qualified class name by convention.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite containing the test that is starting, suitable for presenting to the user
* @param suiteID a string ID for the suite containing the test that is starting, intended to be unique across all suites in a run
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that is starting
* @param decodedSuiteName the decoded suite name, in case the suite name is put between backticks. None if it is same as suiteName.
* @param testName the name of the test that is starting
* @param testText the text of the test that is starting (may be the test name, or a suffix of the test name)
* @param decodedTestName the decoded name of the test, in case the name is put between backticks. None if it is same as testName.
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the test that is starting (if <code>None</code>
* is passed, the test cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestStarting</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class TestStarting (
ordinal: Ordinal,
suiteName: String,
suiteID: String,
suiteClassName: Option[String],
decodedSuiteName: Option[String],
testName: String,
testText: String,
decodedTestName: Option[String],
formatter: Option[Formatter] = None,
location: Option[Location] = None,
rerunner: Option[Rerunner] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (suiteName == null)
throw new NullPointerException("suiteName was null")
if (suiteID == null)
throw new NullPointerException("suiteID was null")
if (suiteClassName == null)
throw new NullPointerException("suiteClassName was null")
if (testName == null)
throw new NullPointerException("testName was null")
if (testText == null)
throw new NullPointerException("testText was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (rerunner == null)
throw new NullPointerException("rerunner was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a suite (or other entity) has completed running a test that succeeded.
*
* <p>
* For example, trait <code>Suite</code> uses <code>TestSucceeded</code> to report
* that a test method of a <code>Suite</code> returned normally
* (without throwing an <code>Exception</code>).
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="TestSucceeded$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>TestSucceeded</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(TestSucceeded(ordinal, userFriendlyName, suiteName, Some(thisSuite.getClass.getName), testName))
* </pre>
*
* <p>
* The suite class name parameter is optional, because suites in ScalaTest are an abstraction that
* need not necessarily correspond to one class. Nevertheless, it most cases each suite will correspond
* to a class, and when it does, the fully qualified name of that class should be reported by passing a
* <code>Some</code> for <code>suiteClassName</code>. One use for this bit of information is JUnit integration,
* because the "name" provided to a JUnit <code>org.junit.runner.Description</code> appears to usually include
* a fully qualified class name by convention.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite containing the test that has succeeded, suitable for presenting to the user
* @param suiteID a string ID for the suite containing the test that is starting, intended to be unique across all suites in a run
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has succeeded
* @param decodedSuiteName the decoded suite name, in case the suite name is put between backticks. None if it is same as suiteName.
* @param testName the name of the test that has succeeded
* @param testText the text of the test that has succeeded (may be the test name, or a suffix of the test name)
* @param decodedTestName the decoded name of the test, in case the name is put between backticks. None if it is same as testName.
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has succeeded
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the test that has succeeded (if <code>None</code>
* is passed, the test cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestSucceeded</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class TestSucceeded (
ordinal: Ordinal,
suiteName: String,
suiteID: String,
suiteClassName: Option[String],
decodedSuiteName: Option[String],
testName: String,
testText: String,
decodedTestName: Option[String],
duration: Option[Long] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
rerunner: Option[Rerunner] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (suiteName == null)
throw new NullPointerException("suiteName was null")
if (suiteID == null)
throw new NullPointerException("suiteID was null")
if (suiteClassName == null)
throw new NullPointerException("suiteClassName was null")
if (testName == null)
throw new NullPointerException("testName was null")
if (testText == null)
throw new NullPointerException("testText was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (rerunner == null)
throw new NullPointerException("rerunner was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a suite (or other entity) has completed running a test that failed.
*
* <p>
* For example, trait <code>Suite</code> uses <code>TestFailed</code> to report
* that a test method of a <code>Suite</code> completed abruptly with an <code>Exception</code>.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="TestFailed$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>TestFailed</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(TestFailed(ordinal, userFriendlyName, message, suiteName, Some(thisSuite.getClass.getName), testName))
* </pre>
*
* <p>
* The suite class name parameter is optional, because suites in ScalaTest are an abstraction that
* need not necessarily correspond to one class. Nevertheless, it most cases each suite will correspond
* to a class, and when it does, the fully qualified name of that class should be reported by passing a
* <code>Some</code> for <code>suiteClassName</code>. One use for this bit of information is JUnit integration,
* because the "name" provided to a JUnit <code>org.junit.runner.Description</code> appears to usually include
* a fully qualified class name by convention.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param suiteName a localized name identifying the suite containing the test that has failed, suitable for presenting to the user
* @param suiteID a string ID for the suite containing the test that is starting, intended to be unique across all suites in a run
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has failed
* @param decodedSuiteName the decoded suite name, in case the suite name is put between backticks. None if it is same as suiteName.
* @param testName the name of the test that has failed
* @param testText the text of the test that has failed (may be the test name, or a suffix of the test name)
* @param decodedTestName the decoded name of the test, in case the name is put between backticks. None if it is same as testName.
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the test has failed,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has failed
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the test that has failed (if <code>None</code>
* is passed, the test cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestFailed</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class TestFailed (
ordinal: Ordinal,
message: String,
suiteName: String,
suiteID: String,
suiteClassName: Option[String],
decodedSuiteName: Option[String],
testName: String,
testText: String,
decodedTestName: Option[String],
throwable: Option[Throwable] = None,
duration: Option[Long] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
rerunner: Option[Rerunner] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (message == null)
throw new NullPointerException("message was null")
if (suiteName == null)
throw new NullPointerException("suiteName was null")
if (suiteID == null)
throw new NullPointerException("suiteID was null")
if (suiteClassName == null)
throw new NullPointerException("suiteClassName was null")
if (testName == null)
throw new NullPointerException("testName was null")
if (testText == null)
throw new NullPointerException("testText was null")
if (throwable == null)
throw new NullPointerException("throwable was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (rerunner == null)
throw new NullPointerException("rerunner was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a suite (or other entity) has ignored a test.
*
* <p>
* For example, trait <code>Suite</code> uses <code>TestIgnored</code> to report
* that a test method of a <code>Suite</code> was ignored because it was annotated with <code>@Ignore</code>.
* Ignored tests will not be run, but will usually be reported as reminder to fix the broken test.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="TestIgnored$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>TestIgnored</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(TestIgnored(ordinal, userFriendlyName, suiteName, Some(thisSuite.getClass.getName), testName))
* </pre>
*
* <p>
* The suite class name parameter is optional, because suites in ScalaTest are an abstraction that
* need not necessarily correspond to one class. Nevertheless, it most cases each suite will correspond
* to a class, and when it does, the fully qualified name of that class should be reported by passing a
* <code>Some</code> for <code>suiteClassName</code>. One use for this bit of information is JUnit integration,
* because the "name" provided to a JUnit <code>org.junit.runner.Description</code> appears to usually include
* a fully qualified class name by convention.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite containing the test that was ignored, suitable for presenting to the user
* @param suiteID a string ID for the suite containing the test that is starting, intended to be unique across all suites in a run
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that was ignored
* @param decodedSuiteName the decoded suite name, in case the suite name is put between backticks. None if it is same as suiteName.
* @param testName the name of the test that was ignored
* @param testText the text of the test that was ignored (may be the test name, or a suffix of the test name)
* @param decodedTestName the decoded name of the test, in case the name is put between backticks. None if it is same as testName.
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestIgnored</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class TestIgnored (
ordinal: Ordinal,
suiteName: String,
suiteID: String,
suiteClassName: Option[String],
decodedSuiteName: Option[String],
testName: String,
testText: String,
decodedTestName: Option[String],
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (suiteName == null)
throw new NullPointerException("suiteName was null")
if (suiteID == null)
throw new NullPointerException("suiteID was null")
if (suiteClassName == null)
throw new NullPointerException("suiteClassName was null")
if (testName == null)
throw new NullPointerException("testName was null")
if (testText == null)
throw new NullPointerException("testText was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a test is pending, <em>i.e.</em>, it hasn't yet been implemented.
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="TestPending$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>TestPending</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(TestPending(ordinal, userFriendlyName, suiteName, Some(thisSuite.getClass.getName), testName))
* </pre>
*
* <p>
* The suite class name parameter is optional, because suites in ScalaTest are an abstraction that
* need not necessarily correspond to one class. Nevertheless, it most cases each suite will correspond
* to a class, and when it does, the fully qualified name of that class should be reported by passing a
* <code>Some</code> for <code>suiteClassName</code>. One use for this bit of information is JUnit integration,
* because the "name" provided to a JUnit <code>org.junit.runner.Description</code> appears to usually include
* a fully qualified class name by convention.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite containing the test that is pending, suitable for presenting to the user
* @param suiteID a string ID for the suite containing the test that is starting, intended to be unique across all suites in a run
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that is pending
* @param decodedSuiteName the decoded suite name, in case the suite name is put between backticks. None if it is same as suiteName.
* @param testName the name of the test that is pending
* @param testText the text of the test that is pending (may be the test name, or a suffix of the test name)
* @param decodedTestName the decoded name of the test, in case the name is put between backticks. None if it is same as testName.
* @param duration an optional amount of time, in milliseconds, that was required to run the test that is pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestPending</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class TestPending (
ordinal: Ordinal,
suiteName: String,
suiteID: String,
suiteClassName: Option[String],
decodedSuiteName: Option[String],
testName: String,
testText: String,
decodedTestName: Option[String],
duration: Option[Long] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (suiteName == null)
throw new NullPointerException("suiteName was null")
if (suiteID == null)
throw new NullPointerException("suiteID was null")
if (suiteClassName == null)
throw new NullPointerException("suiteClassName was null")
if (testName == null)
throw new NullPointerException("testName was null")
if (testText == null)
throw new NullPointerException("testText was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a test was canceled, <em>i.e.</em>, it couldn't run because some precondition was not met.
*
* <p>
* To create instances of this class you may
* use the factory methods provided in its <a href="TestCanceled$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>TestCanceled</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(TestPending(ordinal, userFriendlyName, suiteName, Some(thisSuite.getClass.getName), testName))
* </pre>
*
* <p>
* The suite class name parameter is optional, because suites in ScalaTest are an abstraction that
* need not necessarily correspond to one class. Nevertheless, it most cases each suite will correspond
* to a class, and when it does, the fully qualified name of that class should be reported by passing a
* <code>Some</code> for <code>suiteClassName</code>. One use for this bit of information is JUnit integration,
* because the "name" provided to a JUnit <code>org.junit.runner.Description</code> appears to usually include
* a fully qualified class name by convention.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite containing the test that was canceled, suitable for presenting to the user
* @param suiteID a string ID for the suite containing the test that is starting, intended to be unique across all suites in a run
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that was canceled
* @param decodedSuiteName the decoded suite name, in case the suite name is put between backticks. None if it is same as suiteName.
* @param testName the name of the test that was canceled
* @param testText the text of the test that was canceled (may be the test name, or a suffix of the test name)
* @param decodedTestName the decoded name of the test, in case the name is put between backticks. None if it is same as testName.
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the test was canceled,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to run the test that was canceled
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestCanceled</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
// TODO: Probably add a rerunnable to TestCanceled
final case class TestCanceled (
ordinal: Ordinal,
message: String,
suiteName: String,
suiteID: String,
suiteClassName: Option[String],
decodedSuiteName: Option[String],
testName: String,
testText: String,
decodedTestName: Option[String],
throwable: Option[Throwable] = None,
duration: Option[Long] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (message == null)
throw new NullPointerException("message was null")
if (suiteName == null)
throw new NullPointerException("suiteName was null")
if (suiteID == null)
throw new NullPointerException("suiteID was null")
if (suiteClassName == null)
throw new NullPointerException("suiteClassName was null")
if (testName == null)
throw new NullPointerException("testName was null")
if (testText == null)
throw new NullPointerException("testText was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (throwable == null)
throw new NullPointerException("throwable was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a suite of tests is about to start executing.
*
* <p>
* For example, trait <code>Suite</code> and object <code>Runner</code> use <code>SuiteStarting</code> to report
* that the <code>execute</code> method of a <code>Suite</code> is about to be invoked.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="SuiteStarting$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>SuiteStarting</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(SuiteStarting(ordinal, userFriendlyName, suiteName, Some(thisSuite.getClass.getName)))
* </pre>
*
* <p>
* The suite class name parameter is optional, because suites in ScalaTest are an abstraction that
* need not necessarily correspond to one class. Nevertheless, it most cases each suite will correspond
* to a class, and when it does, the fully qualified name of that class should be reported by passing a
* <code>Some</code> for <code>suiteClassName</code>. One use for this bit of information is JUnit integration,
* because the "name" provided to a JUnit <code>org.junit.runner.Description</code> appears to usually include
* a fully qualified class name by convention.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite that is starting, suitable for presenting to the user
* @param suiteID a string ID for the suite that is starting, intended to be unique across all suites in a run XXX
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name of the suite that is starting
* @param decodedSuiteName the decoded suite name, in case the suite name is put between backticks. None if it is same as suiteName.
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the suite that is starting (if <code>None</code>
* is passed, the suite cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>SuiteStarting</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class SuiteStarting (
ordinal: Ordinal,
suiteName: String,
suiteID: String,
suiteClassName: Option[String],
decodedSuiteName: Option[String],
formatter: Option[Formatter] = None,
location: Option[Location] = None,
rerunner: Option[Rerunner] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (suiteName == null)
throw new NullPointerException("suiteName was null")
if (suiteID == null)
throw new NullPointerException("suiteID was null")
if (suiteClassName == null)
throw new NullPointerException("suiteClassName was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (rerunner == null)
throw new NullPointerException("rerunner was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a suite of tests has completed executing.
*
* <p>
* For example, trait <code>Suite</code> and object <code>Runner</code> use <code>SuiteCompleted</code> to report
* that the <code>execute</code> method of a <code>Suite</code>
* has returned normally (without throwing a <code>RuntimeException</code>).
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="SuiteCompleted$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>SuiteCompleted</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(SuiteCompleted(ordinal, userFriendlyName, suiteName, Some(thisSuite.getClass.getName)))
* </pre>
*
* <p>
* The suite class name parameter is optional, because suites in ScalaTest are an abstraction that
* need not necessarily correspond to one class. Nevertheless, it most cases each suite will correspond
* to a class, and when it does, the fully qualified name of that class should be reported by passing a
* <code>Some</code> for <code>suiteClassName</code>. One use for this bit of information is JUnit integration,
* because the "name" provided to a JUnit <code>org.junit.runner.Description</code> appears to usually include
* a fully qualified class name by convention.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite that has completed, suitable for presenting to the user
* @param suiteID a string ID for the suite that has completed, intended to be unique across all suites in a run
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has completed
* @param decodedSuiteName the decoded suite name, in case the suite name is put between backticks. None if it is same as suiteName.
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has completed
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the suite that has completed (if <code>None</code>
* is passed, the suite cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>SuiteCompleted</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class SuiteCompleted (
ordinal: Ordinal,
suiteName: String,
suiteID: String,
suiteClassName: Option[String],
decodedSuiteName: Option[String],
duration: Option[Long] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
rerunner: Option[Rerunner] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (suiteName == null)
throw new NullPointerException("suiteName was null")
if (suiteID == null)
throw new NullPointerException("suiteID was null")
if (suiteClassName == null)
throw new NullPointerException("suiteClassName was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (rerunner == null)
throw new NullPointerException("rerunner was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates the execution of a suite of tests has aborted, likely because of an error, prior
* to completion.
*
* <p>
* For example, trait <code>Suite</code> and object <code>Runner</code> use <code>SuiteAborted</code> to report
* that the <code>execute</code> method of a <code>Suite</code>
* has completed abruptly with a <code>RuntimeException</code>.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="SuiteAborted$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>SuiteAborted</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(SuiteAborted(ordinal, userFriendlyName, message, suiteName, Some(thisSuite.getClass.getName)))
* </pre>
*
* <p>
* The suite class name parameter is optional, because suites in ScalaTest are an abstraction that
* need not necessarily correspond to one class. Nevertheless, it most cases each suite will correspond
* to a class, and when it does, the fully qualified name of that class should be reported by passing a
* <code>Some</code> for <code>suiteClassName</code>. One use for this bit of information is JUnit integration,
* because the "name" provided to a JUnit <code>org.junit.runner.Description</code> appears to usually include
* a fully qualified class name by convention.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param name a localized name identifying the suite that has aborted, which should include the
* suite name, suitable for presenting to the user
* @param message a localized message suitable for presenting to the user
* @param suiteName a localized name identifying the suite that has aborted, suitable for presenting to the user
* @param suiteID a string ID for the suite that has aborted, intended to be unique across all suites in a run
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has aborted
* @param decodedSuiteName the decoded suite name, in case the suite name is put between backticks. None if it is same as suiteName.
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has aborted
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the suite that has aborted (if <code>None</code>
* is passed, the suite cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>SuiteAborted</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class SuiteAborted (
ordinal: Ordinal,
message: String,
suiteName: String,
suiteID: String,
suiteClassName: Option[String],
decodedSuiteName: Option[String],
throwable: Option[Throwable] = None,
duration: Option[Long] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
rerunner: Option[Rerunner] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (message == null)
throw new NullPointerException("message was null")
if (suiteName == null)
throw new NullPointerException("suiteName was null")
if (suiteID == null)
throw new NullPointerException("suiteID was null")
if (suiteClassName == null)
throw new NullPointerException("suiteClassName was null")
if (throwable == null)
throw new NullPointerException("throwable was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (rerunner == null)
throw new NullPointerException("rerunner was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
// TODO: Put location as a val set to None
/**
* Event that indicates a runner is about run a suite of tests.
*
* <p>
* For example, object <code>Runner</code> reports <code>RunStarting</code> to indicate
* that the first <code>execute</code> method of a run's initial <code>Suite</code>
* is about to be invoked.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="RunStarting$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>RunStarting</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(RunStarting(ordinal, testCount))
* </pre>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param testCount the number of tests expected during this run
* @param configMap a <code>Map</code> of key-value pairs that can be used by custom <code>Reporter</code>s
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunStarting</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @throws IllegalArgumentException if <code>testCount</code> is less than zero.
*
* @author Bill Venners
*/
final case class RunStarting (
ordinal: Ordinal,
testCount: Int,
configMap: Map[String, Any],
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (testCount < 0)
throw new IllegalArgumentException("testCount was less than zero: " + testCount)
if (configMap == null)
throw new NullPointerException("configMap was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a runner has completed running a suite of tests.
*
* <p>
* <code>Suite</code>'s <code>execute</code> method takes a <code>Stopper</code>, whose <code>stopRequested</code>
* method indicates a stop was requested. If <code>true</code> is returned by
* <code>stopRequested</code> while a suite of tests is running, the
* <code>execute</code> method should promptly
* return even if that suite hasn't finished running all of its tests.
* </p>
*
* <p>If a stop was requested via the <code>Stopper</code>.
* <code>Runner</code> will report <code>RunStopped</code>
* when the <code>execute</code> method of the run's starting <code>Suite</code> returns.
* If a stop is not requested, <code>Runner</code> will report <code>RunCompleted</code>
* when the last <code>execute</code> method of the run's starting <code>Suite</code>s returns.
* </p>
*
* <p>
* ScalaTest's <code>Runner</code> fires a <code>RunCompleted</code> report with an empty <code>summary</code>, because
* the reporter is responsible for keeping track of the total number of tests reported as succeeded, failed, ignored, and pending.
* ScalaTest's internal reporter replaces the <code>RunCompleted</code> with a new one that is identical except that is
* has a defined <code>summary</code>.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="RunCompleted$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>RunCompleted</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(RunCompleted(ordinal))
* </pre>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has completed
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunCompleted</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class RunCompleted (
ordinal: Ordinal,
duration: Option[Long] = None,
summary: Option[Summary] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (summary == null)
throw new NullPointerException("summary was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a runner has stopped running a suite of tests prior to completion, likely
* because of a stop request.
*
* <p>
* <code>Suite</code>'s <code>execute</code> method takes a <code>Stopper</code>, whose <code>stopRequested</code>
* method indicates a stop was requested. If <code>true</code> is returned by
* <code>stopRequested</code> while a suite of tests is running, the
* <code>execute</code> method should promptly
* return even if that suite hasn't finished running all of its tests.
* </p>
*
* <p>If a stop was requested via the <code>Stopper</code>.
* <code>Runner</code> will report <code>RunStopped</code>
* when the <code>execute</code> method of the run's starting <code>Suite</code> returns.
* If a stop is not requested, <code>Runner</code> will report <code>RunCompleted</code>
* when the last <code>execute</code> method of the run's starting <code>Suite</code>s returns.
* </p>
*
* <p>
* ScalaTest's <code>Runner</code> fires a <code>RunStopped</code> report with an empty <code>summary</code>, because
* the reporter is responsible for keeping track of the total number of tests reported as succeeded, failed, ignored, and pending.
* ScalaTest's internal reporter replaces the <code>RunStopped</code> with a new one that is identical except that is
* has a defined <code>summary</code>.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="RunStopped$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>RunStopped</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(RunStopped(ordinal))
* </pre>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has stopped
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunStopped</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class RunStopped (
ordinal: Ordinal,
duration: Option[Long] = None,
summary: Option[Summary] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (summary == null)
throw new NullPointerException("summary was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a runner encountered an error while attempting to run a suite of tests.
*
* <p>
* For example, object <code>Runner</code> reports <code>RunAborted</code> if the
* <code>execute</code> method of any of the run's starting <code>Suite</code>s completes
* abruptly with a <code>Throwable</code>.
* </p>
*
* <p>
* ScalaTest's <code>Runner</code> fires a <code>RunAborted</code> report with an empty <code>summary</code>, because
* the reporter is responsible for keeping track of the total number of tests reported as succeeded, failed, ignored, and pending.
* ScalaTest's internal reporter replaces the <code>RunAborted</code> with a new one that is identical except that is
* has a defined <code>summary</code>.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="RunAborted$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>RunAborted</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(RunAborted(ordinal, message, Some(exception)))
* </pre>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required by the run that has aborted
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunAborted</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class RunAborted (
ordinal: Ordinal,
message: String,
throwable: Option[Throwable],
duration: Option[Long] = None,
summary: Option[Summary] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (message == null)
throw new NullPointerException("message was null")
if (throwable == null)
throw new NullPointerException("throwable was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (summary == null)
throw new NullPointerException("summary was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event used to provide information that is not appropriate to report via any other <code>Event</code>.
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="InfoProvided$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>InfoProvided</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(InfoProvided(ordinal, message, Some(NameInfo(suiteName, Some(thisSuite.getClass.getName), Some(testName)))))
* </pre>
*
* <p>
* An <code>InfoProvided</code> event may be fired from anywhere. In this respect <code>InfoProvided</code> is different
* from the other events, for which it is defined whether they are fired in the context of a suite or test.
* If fired in the context of a test, the <code>InfoProvided</code> event should include a <code>NameInfo</code> in which
* <code>testName</code> is defined. If fired in the context of a suite, but not a test, the <code>InfoProvided</code> event
* should include a <code>NameInfo</code> in which <code>testName</code> is <em>not</em> defined. If fired within the context
* of neither a suite nor a test, the <code>nameInfo</code> of the <code>InfoProvided</code> event (an <code>Option[NameInfo]</code>) should be <code>None</code>.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param nameInfo an optional <code>NameInfo</code> that if defined, provides names for the suite and optionally the test
* in the context of which the information was provided
* @param aboutAPendingTest indicates whether the information being provided via this event is about a pending test
* @param aboutACanceledTest indicates whether the information being provided via this event is about a canceled test
* @param throwable an optional <code>Throwable</code>
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>InfoProvided</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class InfoProvided (
ordinal: Ordinal,
message: String,
nameInfo: Option[NameInfo],
aboutAPendingTest: Option[Boolean] = None,
aboutACanceledTest: Option[Boolean] = None,
throwable: Option[Throwable] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (message == null)
throw new NullPointerException("message was null")
if (nameInfo == null)
throw new NullPointerException("nameInfo was null")
if (aboutAPendingTest == null)
throw new NullPointerException("aboutAPendingTest was null")
if (aboutACanceledTest == null)
throw new NullPointerException("aboutACanceledTest was null")
if (throwable == null)
throw new NullPointerException("throwable was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event used to provide markup text for document-style reports.
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="MarkupProvided$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>MarkupProvided</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(MarkupProvided(ordinal, text, Some(NameInfo(suiteName, Some(thisSuite.getClass.getName), Some(testName)))))
* </pre>
*
* <p>
* A <code>MarkupProvided</code> event may be fired from anywhere. In this respect <code>MarkupProvided</code> is different
* from the other events, for which it is defined whether they are fired in the context of a suite or test.
* If fired in the context of a test, the <code>MarkupProvided</code> event should include a <code>NameInfo</code> in which
* <code>testName</code> is defined. If fired in the context of a suite, but not a test, the <code>MarkupProvided</code> event
* should include a <code>NameInfo</code> in which <code>testName</code> is <em>not</em> defined. If fired within the context
* of neither a suite nor a test, the <code>nameInfo</code> of the <code>MarkupProvided</code> event (an <code>Option[NameInfo]</code>) should be <code>None</code>.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param text a snippet of markup text (in Markdown format)
* @param nameInfo an optional <code>NameInfo</code> that if defined, provides names for the suite and optionally the test
* in the context of which the information was provided
* @param aboutAPendingTest indicates whether the information being provided via this event is about a pending test
* @param aboutACanceledTest indicates whether the information being provided via this event is about a canceled test
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>MarkupProvided</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class MarkupProvided (
ordinal: Ordinal,
text: String,
nameInfo: Option[NameInfo],
aboutAPendingTest: Option[Boolean] = None,
aboutACanceledTest: Option[Boolean] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (text == null)
throw new NullPointerException("message was null")
if (nameInfo == null)
throw new NullPointerException("nameInfo was null")
if (aboutAPendingTest == null)
throw new NullPointerException("aboutAPendingTest was null")
if (aboutACanceledTest == null)
throw new NullPointerException("aboutACanceledTest was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a new scope has been opened.
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="ScopeOpened$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>ScopeOpened</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(ScopeOpened(ordinal, message, Some(NameInfo(suiteName, Some(thisSuite.getClass.getName), Some(testName)))))
* </pre>
*
* <p>
* A <code>ScopeOpened</code> event may be fired from within suites or tests.
* If fired in the context of a test, the <code>ScopeOpened</code> event should include a <code>NameInfo</code> in which
* <code>testName</code> is defined. If fired in the context of a suite, but not a test, the <code>ScopeOpened</code> event
* should include a <code>NameInfo</code> in which <code>testName</code> is <em>not</em> defined.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param nameInfo a <code>NameInfo</code> that provides names for the suite and optionally the test
* in the context of which the scope was opened
* @param aboutAPendingTest indicates whether the scope was opened in the context of a pending test
* @param aboutACanceledTest indicates whether the scope was opened in the context of a canceled test
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>ScopeOpened</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class ScopeOpened (
ordinal: Ordinal,
message: String,
nameInfo: NameInfo,
aboutAPendingTest: Option[Boolean] = None,
aboutACanceledTest: Option[Boolean] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (message == null)
throw new NullPointerException("message was null")
if (nameInfo == null)
throw new NullPointerException("nameInfo was null")
if (aboutAPendingTest == null)
throw new NullPointerException("aboutAPendingTest was null")
if (aboutACanceledTest == null)
throw new NullPointerException("aboutACanceledTest was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/**
* Event that indicates a scope has been closed.
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="ScopeClosed$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>ScopeClosed</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(ScopeClosed(ordinal, message, Some(NameInfo(suiteName, Some(thisSuite.getClass.getName), Some(testName)))))
* </pre>
*
* <p>
* A <code>ScopeClosed</code> event may be fired from within suites or tests.
* If fired in the context of a test, the <code>ScopeClosed</code> event should include a <code>NameInfo</code> in which
* <code>testName</code> is defined. If fired in the context of a suite, but not a test, the <code>ScopeClosed</code> event
* should include a <code>NameInfo</code> in which <code>testName</code> is <em>not</em> defined.
* </p>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param nameInfo a <code>NameInfo</code> that provides names for the suite and optionally the test
* in the context of which the scope was closed
* @param aboutAPendingTest indicates whether the scope was closed in the context of a pending test
* @param aboutACanceledTest indicates whether the scope was closed in the context of a canceled test
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param location An optional location that provides information indicating where in the source code an event originated.
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>ScopeClosed</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class ScopeClosed (
ordinal: Ordinal,
message: String,
nameInfo: NameInfo,
aboutAPendingTest: Option[Boolean] = None,
aboutACanceledTest: Option[Boolean] = None,
formatter: Option[Formatter] = None,
location: Option[Location] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (message == null)
throw new NullPointerException("message was null")
if (nameInfo == null)
throw new NullPointerException("nameInfo was null")
if (aboutAPendingTest == null)
throw new NullPointerException("aboutAPendingTest was null")
if (aboutACanceledTest == null)
throw new NullPointerException("aboutACanceledTest was null")
if (formatter == null)
throw new NullPointerException("formatter was null")
if (location == null)
throw new NullPointerException("location was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
}
/*
/**
* Event that indicates a runner is about run a suite of tests.
*
* <p>
* For example, object <code>Runner</code> reports <code>RunStarting</code> to indicate
* that the first <code>execute</code> method of a run's initial <code>Suite</code>
* is about to be invoked.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="RunStarting$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>RunStarting</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(RunStarting(ordinal, testCount))
* </pre>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param testCount the number of tests expected during this run
* @param configMap a <code>Map</code> of key-value pairs that can be used by custom <code>Reporter</code>s
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunStarting</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @throws IllegalArgumentException if <code>testCount</code> is less than zero.
*
* @author Bill Venners
*/
final case class DiscoveryStarting (
ordinal: Ordinal,
testCount: Int,
configMap: Map[String, Any],
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (testCount < 0)
throw new IllegalArgumentException("testCount was less than zero: " + testCount)
if (configMap == null)
throw new NullPointerException("configMap was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
/**
* Location in a <code>DiscoveryStarting</code> is always set to <code>None</code>.
*/
val location: Option[Location] = None
/**
* Formatter in a <code>DiscoveryStarting</code> is always set to <code>None</code>.
*/
val formatter: Option[Formatter] = None
}
/**
* Event that indicates a runner has completed running a suite of tests.
*
* <p>
* <code>Suite</code>'s <code>execute</code> method takes a <code>Stopper</code>, whose <code>stopRequested</code>
* method indicates a stop was requested. If <code>true</code> is returned by
* <code>stopRequested</code> while a suite of tests is running, the
* <code>execute</code> method should promptly
* return even if that suite hasn't finished running all of its tests.
* </p>
*
* <p>If a stop was requested via the <code>Stopper</code>.
* <code>Runner</code> will report <code>RunStopped</code>
* when the <code>execute</code> method of the run's starting <code>Suite</code> returns.
* If a stop is not requested, <code>Runner</code> will report <code>RunCompleted</code>
* when the last <code>execute</code> method of the run's starting <code>Suite</code>s returns.
* </p>
*
* <p>
* ScalaTest's <code>Runner</code> fires a <code>RunCompleted</code> report with an empty <code>summary</code>, because
* the reporter is responsible for keeping track of the total number of tests reported as succeeded, failed, ignored, and pending.
* ScalaTest's internal reporter replaces the <code>RunCompleted</code> with a new one that is identical except that is
* has a defined <code>summary</code>.
* </p>
*
* <p>
* To create instances of this class you may
* use the factory method provided in its <a href="RunCompleted$.html">companion object</a>. For example, given a
* report function named <code>report</code>, you could fire a <code>RunCompleted</code> event like this:
* </p>
*
* <pre class="stHighlight">
* report(RunCompleted(ordinal))
* </pre>
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has completed
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunCompleted</code> event
* @param threadName a name for the <code>Thread</code> about whose activity this event was reported
* @param timeStamp a <code>Long</code> indicating the time this event was reported, expressed in terms of the
* number of milliseconds since the standard base time known as "the epoch": January 1, 1970, 00:00:00 GMT
*
* @author Bill Venners
*/
final case class DiscoveryCompleted (
ordinal: Ordinal,
duration: Option[Long] = None,
summary: Option[Summary] = None,
payload: Option[Any] = None,
threadName: String = Thread.currentThread.getName,
timeStamp: Long = (new Date).getTime
) extends Event {
if (ordinal == null)
throw new NullPointerException("ordinal was null")
if (duration == null)
throw new NullPointerException("duration was null")
if (summary == null)
throw new NullPointerException("summary was null")
if (payload == null)
throw new NullPointerException("payload was null")
if (threadName == null)
throw new NullPointerException("threadName was null")
/**
* Location in a <code>DiscoveryCompleted</code> is always set to <code>None</code>.
*/
val location: Option[Location] = None
/**
* Formatter in a <code>DiscoveryCompleted</code> is always set to <code>None</code>.
*/
val formatter: Option[Formatter] = None
}
*/
/**
* Deprecated singleton object for the <a href="TestStarting.html"><code>TestStarting</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>TestStarting</code> objects.
* This object contains methods that were in the <code>TestStarting</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use TestStarting with named and/or default parameters instead.")
object DeprecatedTestStarting {
/**
* Constructs a new <code>TestStarting</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that is starting
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that is starting
* @param testName the name of the test that is starting
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the test that is starting (if <code>None</code>
* is passed, the test cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestStarting</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestStarting</code> instance initialized with the passed and default values
*
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
formatter: Option[Formatter],
rerunner: Option[Rerunner],
payload: Option[Any]
): TestStarting = {
TestStarting(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, formatter, None, rerunner, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestStarting</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that is starting
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that is starting
* @param testName the name of the test that is starting
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the test that is starting (if <code>None</code>
* is passed, the test cannot be rerun)
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
formatter: Option[Formatter],
rerunner: Option[Rerunner]
): TestStarting = {
TestStarting(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, formatter, None, rerunner, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestStarting</code> event with the passed parameters, passing <code>None</code> as the
* <code>rerunner</code>, <code>None</code> as the <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that is starting
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that is starting
* @param testName the name of the test that is starting
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
formatter: Option[Formatter]
): TestStarting = {
TestStarting(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, formatter, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestStarting</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that is starting
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that is starting
* @param testName the name of the test that is starting
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String
): TestStarting = {
TestStarting(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="TestSucceeded.html"><code>TestSucceeded</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>TestSucceeded</code> objects.
* This object contains methods that were in the <code>TestSucceeded</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use TestSucceeded with named and/or default parameters instead.")
object DeprecatedTestSucceeded {
/**
* Constructs a new <code>TestSucceeded</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that has succeeded
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has succeeded
* @param testName the name of the test that has succeeded
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has succeeded
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the test that has succeeded (if <code>None</code>
* is passed, the test cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestSucceeded</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestSucceeded</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
duration: Option[Long],
formatter: Option[Formatter],
rerunner: Option[Rerunner],
payload: Option[Any]
): TestSucceeded = {
TestSucceeded(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, duration, formatter, None, rerunner, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestSucceeded</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that has succeeded
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has succeeded
* @param testName the name of the test that has succeeded
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has succeeded
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the test that has succeeded (if <code>None</code>
* is passed, the test cannot be rerun)
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestSucceeded</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
duration: Option[Long],
formatter: Option[Formatter],
rerunner: Option[Rerunner]
): TestSucceeded = {
TestSucceeded(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, duration, formatter, None, rerunner, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestSucceeded</code> event with the passed parameters, passing <code>None</code> as the
* <code>rerunner</code>, <code>None</code> as the <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that has succeeded
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has succeeded
* @param testName the name of the test that has succeeded
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has succeeded
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestSucceeded</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
duration: Option[Long],
formatter: Option[Formatter]
): TestSucceeded = {
TestSucceeded(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, duration, formatter, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestSucceeded</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that has succeeded
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has succeeded
* @param testName the name of the test that has succeeded
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has succeeded
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestSucceeded</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
duration: Option[Long]
): TestSucceeded = {
TestSucceeded(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, duration, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestSucceeded</code> event with the passed parameters, passing <code>None</code> for <code>duration</code>,
* <code>None</code> for <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that has succeeded
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has succeeded
* @param testName the name of the test that has succeeded
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestSucceeded</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String
): TestSucceeded = {
TestSucceeded(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="TestFailed.html"><code>TestFailed</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>TestFailed</code> objects.
* This object contains methods that were in the <code>TestFailed</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use TestFailed with named and/or default parameters instead.")
object DeprecatedTestFailed {
/**
* Constructs a new <code>TestFailed</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param suiteName the name of the suite containing the test that has failed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has failed
* @param testName the name of the test that has failed
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the test has failed,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has failed
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the test that has failed (if <code>None</code>
* is passed, the test cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestFailed</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestFailed</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
testName: String,
throwable: Option[Throwable],
duration: Option[Long],
formatter: Option[Formatter],
rerunner: Option[Rerunner],
payload: Option[Any]
): TestFailed = {
TestFailed(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, throwable, duration, formatter, None, rerunner, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestFailed</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param suiteName the name of the suite containing the test that has failed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has failed
* @param testName the name of the test that has failed
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the test has failed,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has failed
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the test that has failed (if <code>None</code>
* is passed, the test cannot be rerun)
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestFailed</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
testName: String,
throwable: Option[Throwable],
duration: Option[Long],
formatter: Option[Formatter],
rerunner: Option[Rerunner]
): TestFailed = {
TestFailed(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, throwable, duration, formatter, None, rerunner, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestFailed</code> event with the passed parameters, passing <code>None</code> as the
* <code>rerunner</code>, <code>None</code> as the <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param suiteName the name of the suite containing the test that has failed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has failed
* @param testName the name of the test that has failed
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the test has failed,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has failed
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestFailed</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
testName: String,
throwable: Option[Throwable],
duration: Option[Long],
formatter: Option[Formatter]
): TestFailed = {
TestFailed(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, throwable, duration, formatter, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestFailed</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param suiteName the name of the suite containing the test that has failed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has failed
* @param testName the name of the test that has failed
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the test has failed,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to run the test that has failed
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestFailed</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
testName: String,
throwable: Option[Throwable],
duration: Option[Long]
): TestFailed = {
TestFailed(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, throwable, duration, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestFailed</code> event with the passed parameters, passing <code>None</code> for <code>duration</code>,
* <code>None</code> for <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param suiteName the name of the suite containing the test that has failed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that has failed
* @param testName the name of the test that has failed
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the test has failed,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestFailed</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
testName: String,
throwable: Option[Throwable]
): TestFailed = {
TestFailed(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, throwable, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="TestIgnored.html"><code>TestIgnored</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>TestIgnored</code> objects.
* This object contains methods that were in the <code>TestIgnored</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use TestIgnored with named and/or default parameters instead.")
object DeprecatedTestIgnored {
/**
* Constructs a new <code>TestIgnored</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that was ignored
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that was ignored
* @param testName the name of the test that was ignored
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestIgnored</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestIgnored</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
formatter: Option[Formatter],
payload: Option[Any]
): TestIgnored = {
TestIgnored(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, formatter, None, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestIgnored</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that was ignored
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that was ignored
* @param testName the name of the test that was ignored
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestIgnored</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
formatter: Option[Formatter]
): TestIgnored = {
TestIgnored(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, formatter, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestIgnored</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that was ignored
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that was ignored
* @param testName the name of the test that was ignored
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestIgnored</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String
): TestIgnored = {
TestIgnored(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="TestPending.html"><code>TestPending</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>TestPending</code> objects.
* This object contains methods that were in the <code>TestPending</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use TestPending with named and/or default parameters instead.")
object DeprecatedTestPending {
/**
* Constructs a new <code>TestPending</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that is pending
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that is pending
* @param testName the name of the test that is pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>TestPending</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestPending</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
formatter: Option[Formatter],
payload: Option[Any]
): TestPending = {
TestPending(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, None, formatter, None, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestPending</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that is pending
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that is pending
* @param testName the name of the test that is pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestPending</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String,
formatter: Option[Formatter]
): TestPending = {
TestPending(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, None, formatter, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>TestPending</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the test that is pending
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the test that is pending
* @param testName the name of the test that is pending
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>TestPending</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
testName: String
): TestPending = {
TestPending(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, testName, testName, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="SuiteStarting.html"><code>SuiteStarting</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>SuiteStarting</code> objects.
* This object contains methods that were in the <code>SuiteStarting</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use SuiteStarting with named and/or default parameters instead.")
object DeprecatedSuiteStarting {
/**
* Constructs a new <code>SuiteStarting</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite that is starting, which should include the
* suite name, suitable for presenting to the user
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name of the suite that is starting
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the suite that is starting (if <code>None</code>
* is passed, the suite cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>SuiteStarting</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
formatter: Option[Formatter],
rerunner: Option[Rerunner],
payload: Option[Any]
): SuiteStarting = {
SuiteStarting(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, formatter, None, rerunner, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteStarting</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite that is starting, which should include the
* suite name, suitable for presenting to the user
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name of the suite that is starting
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the suite that is starting (if <code>None</code>
* is passed, the suite cannot be rerun)
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
formatter: Option[Formatter],
rerunner: Option[Rerunner]
): SuiteStarting = {
SuiteStarting(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, formatter, None, rerunner, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteStarting</code> event with the passed parameters, passing <code>None</code> as the
* <code>rerunner</code>, <code>None</code> as the <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite that is starting, which should include the
* suite name, suitable for presenting to the user
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name of the suite that is starting
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
formatter: Option[Formatter]
): SuiteStarting = {
SuiteStarting(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, formatter, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteStarting</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName a localized name identifying the suite that is starting, which should include the
* suite name, suitable for presenting to the user
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name of the suite that is starting
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String]
): SuiteStarting = {
SuiteStarting(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="SuiteCompleted.html"><code>SuiteCompleted</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>SuiteCompleted</code> objects.
* This object contains methods that were in the <code>SuiteCompleted</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use SuiteCompleted with named and/or default parameters instead.")
object DeprecatedSuiteCompleted {
/**
* Constructs a new <code>SuiteCompleted</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the suite that has completed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has completed
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has completed
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the suite that has completed (if <code>None</code>
* is passed, the suite cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>SuiteCompleted</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
duration: Option[Long],
formatter: Option[Formatter],
rerunner: Option[Rerunner],
payload: Option[Any]
): SuiteCompleted = {
SuiteCompleted(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, duration, formatter, None, rerunner, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteCompleted</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the suite that has completed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has completed
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has completed
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the suite that has completed (if <code>None</code>
* is passed, the suite cannot be rerun)
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
duration: Option[Long],
formatter: Option[Formatter],
rerunner: Option[Rerunner]
): SuiteCompleted = {
SuiteCompleted(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, duration, formatter, None, rerunner, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteCompleted</code> event with the passed parameters, passing <code>None</code> as the
* <code>rerunner</code>, <code>None</code> as the <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the suite that has completed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has completed
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has completed
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
duration: Option[Long],
formatter: Option[Formatter]
): SuiteCompleted = {
SuiteCompleted(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, duration, formatter, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteCompleted</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param suiteName the name of the suite containing the suite that has completed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has completed
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has completed
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String],
duration: Option[Long]
): SuiteCompleted = {
SuiteCompleted(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, duration, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteCompleted</code> event with the passed parameters, passing <code>None</code> for <code>duration</code>,
* <code>None</code> for <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param name a localized name identifying the suite that has completed, which should include the
* suite name, suitable for presenting to the user
* @param suiteName the name of the suite containing the suite that has completed
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has completed
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
suiteName: String,
suiteClassName: Option[String]
): SuiteCompleted = {
SuiteCompleted(ordinal, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="SuiteAborted.html"><code>SuiteAborted</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>SuiteAborted</code> objects.
* This object contains methods that were in the <code>SuiteAborted</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use SuiteAborted with named and/or default parameters instead.")
object DeprecatedSuiteAborted {
/**
* Constructs a new <code>SuiteAborted</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param name a localized name identifying the suite that has aborted, which should include the
* suite name, suitable for presenting to the user
* @param message a localized message suitable for presenting to the user
* @param suiteName the name of the suite containing the suite that has aborted
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has aborted
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has aborted
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the suite that has aborted (if <code>None</code>
* is passed, the suite cannot be rerun)
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>SuiteAborted</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
throwable: Option[Throwable],
duration: Option[Long],
formatter: Option[Formatter],
rerunner: Option[Rerunner],
payload: Option[Any]
): SuiteAborted = {
SuiteAborted(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, throwable, duration, formatter, None, rerunner, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteAborted</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param suiteName the name of the suite containing the suite that has aborted
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has aborted
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has aborted
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param rerunner an optional <code>Rerunner</code> that can be used to rerun the suite that has aborted (if <code>None</code>
* is passed, the suite cannot be rerun)
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
throwable: Option[Throwable],
duration: Option[Long],
formatter: Option[Formatter],
rerunner: Option[Rerunner]
): SuiteAborted = {
SuiteAborted(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, throwable, duration, formatter, None, rerunner, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteAborted</code> event with the passed parameters, passing <code>None</code> as the
* <code>rerunner</code>, <code>None</code> as the <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has aborted
* @param suiteName the name of the suite containing the suite that has aborted
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has aborted
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
throwable: Option[Throwable],
duration: Option[Long],
formatter: Option[Formatter]
): SuiteAborted = {
SuiteAborted(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, throwable, duration, formatter, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteAborted</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param suiteName the name of the suite containing the suite that has aborted
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has aborted
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required to execute the suite that has aborted
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
throwable: Option[Throwable],
duration: Option[Long]
): SuiteAborted = {
SuiteAborted(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, throwable, duration, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>SuiteAborted</code> event with the passed parameters, passing <code>None</code> for <code>duration</code>,
* <code>None</code> for <code>formatter</code>, <code>None</code> as the <code>rerunner</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param suiteName the name of the suite containing the suite that has aborted
* @param suiteClassName an optional fully qualifed <code>Suite</code> class name containing the suite that has aborted
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>SuiteAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
suiteName: String,
suiteClassName: Option[String],
throwable: Option[Throwable]
): SuiteAborted = {
SuiteAborted(ordinal, message, suiteName, suiteClassName getOrElse suiteName, suiteClassName, None, throwable, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="RunStarting.html"><code>RunStarting</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>RunStarting</code> objects.
* This object contains methods that were in the <code>RunStarting</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>, and <code>IllegalArgumentException</code> if
* <code>testCount</code> is less than zero.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use RunStarting with named and/or default parameters instead.")
object DeprecatedRunStarting {
/**
* Constructs a new <code>RunStarting</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param testCount the number of tests expected during this run
* @param configMap a <code>Map</code> of key-value pairs that can be used by custom <code>Reporter</code>s
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunStarting</code> event
*
* @throws IllegalArgumentException if <code>testCount</code> is less than zero.
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
testCount: Int,
configMap: Map[String, Any],
formatter: Option[Formatter],
payload: Option[Any]
): RunStarting = {
RunStarting(ordinal, testCount, configMap, formatter, None, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunStarting</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param testCount the number of tests expected during this run
* @param configMap a <code>Map</code> of key-value pairs that can be used by custom <code>Reporter</code>s
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
testCount: Int,
configMap: Map[String, Any],
formatter: Option[Formatter]
): RunStarting = {
RunStarting(ordinal, testCount, configMap, formatter, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunStarting</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param testCount the number of tests expected during this run
* @param configMap a <code>Map</code> of key-value pairs that can be used by custom <code>Reporter</code>s
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunStarting</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
testCount: Int,
configMap: Map[String, Any]
): RunStarting = {
RunStarting(ordinal, testCount, configMap, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="RunCompleted.html"><code>RunCompleted</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>RunCompleted</code> objects.
* This object contains methods that were in the <code>RunCompleted</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use RunCompleted with named and/or default parameters instead.")
object DeprecatedRunCompleted {
/**
* Constructs a new <code>RunCompleted</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has completed
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunCompleted</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
duration: Option[Long],
summary: Option[Summary],
formatter: Option[Formatter],
payload: Option[Any]
): RunCompleted = {
RunCompleted(ordinal, duration, summary, formatter, None, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunCompleted</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has completed
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
duration: Option[Long],
summary: Option[Summary],
formatter: Option[Formatter]
): RunCompleted = {
RunCompleted(ordinal, duration, summary, formatter, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunCompleted</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has completed
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
duration: Option[Long],
summary: Option[Summary]
): RunCompleted = {
RunCompleted(ordinal, duration, summary, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunCompleted</code> event with the passed parameters, passing <code>None</code> for <code>summary</code>,
* <code>None</code> for <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has completed
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
duration: Option[Long]
): RunCompleted = {
RunCompleted(ordinal, duration, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunCompleted</code> event with the passed parameters, passing <code>None</code> for <code>duration</code>,
* <code>None</code> for <code>summary</code>, <code>None</code> for <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunCompleted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal
): RunCompleted = {
RunCompleted(ordinal, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="RunStopped.html"><code>RunStopped</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>RunStopped</code> objects.
* This object contains methods that were in the <code>RunStopped</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use RunStopped with named and/or default parameters instead.")
object DeprecatedRunStopped {
/**
* Constructs a new <code>RunStopped</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has stopped
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunStopped</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunStopped</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
duration: Option[Long],
summary: Option[Summary],
formatter: Option[Formatter],
payload: Option[Any]
): RunStopped = {
RunStopped(ordinal, duration, summary, formatter, None, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunStopped</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has stopped
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunStopped</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
duration: Option[Long],
summary: Option[Summary],
formatter: Option[Formatter]
): RunStopped = {
RunStopped(ordinal, duration, summary, formatter, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunStopped</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has stopped
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunStopped</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
duration: Option[Long],
summary: Option[Summary]
): RunStopped = {
RunStopped(ordinal, duration, summary, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunStopped</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param duration an optional amount of time, in milliseconds, that was required by the run that has stopped
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunStopped</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
duration: Option[Long]
): RunStopped = {
RunStopped(ordinal, duration, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunStopped</code> event with the passed parameters, passing <code>None</code> for <code>duration</code>,
* <code>None</code> for <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunStopped</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal
): RunStopped = {
RunStopped(ordinal, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="RunAborted.html"><code>RunAborted</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>RunAborted</code> objects.
* This object contains methods that were in the <code>RunAborted</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use RunAborted with named and/or default parameters instead.")
object DeprecatedRunAborted {
/**
* Constructs a new <code>RunAborted</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required by the run that has aborted
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>RunAborted</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
throwable: Option[Throwable],
duration: Option[Long],
summary: Option[Summary],
formatter: Option[Formatter],
payload: Option[Any]
): RunAborted = {
RunAborted(ordinal, message, throwable, duration, summary, formatter, None, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunAborted</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required by the run that has aborted
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
throwable: Option[Throwable],
duration: Option[Long],
summary: Option[Summary],
formatter: Option[Formatter]
): RunAborted = {
RunAborted(ordinal, message, throwable, duration, summary, formatter, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunAborted</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required by the run that has aborted
* @param summary an optional summary of the number of tests that were reported as succeeded, failed, ignored, and pending
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
throwable: Option[Throwable],
duration: Option[Long],
summary: Option[Summary]
): RunAborted = {
RunAborted(ordinal, message, throwable, duration, summary, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunAborted</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
* @param duration an optional amount of time, in milliseconds, that was required by the run that has aborted
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
throwable: Option[Throwable],
duration: Option[Long]
): RunAborted = {
RunAborted(ordinal, message, throwable, duration, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>RunAborted</code> event with the passed parameters, passing <code>None</code> for <code>duration</code>,
* <code>None</code> for <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param throwable an optional <code>Throwable</code> that, if a <code>Some</code>, indicates why the suite has aborted,
* or a <code>Throwable</code> created to capture stack trace information about the problem.
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>RunAborted</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
throwable: Option[Throwable]
): RunAborted = {
RunAborted(ordinal, message, throwable, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
/**
* Deprecated singleton object for the <a href="InfoProvided.html"><code>InfoProvided</code></a> event, which contains overloaded factory methods
* and an extractor method to facilitate pattern matching on <code>InfoProvided</code> objects.
* This object contains methods that were in the <code>InfoProvided</code> companion object prior to ScalaTest 2.0. If you get a compiler error when upgrading
* to 2.0 for one of the methods formerly in the companion object, a quick way to fix it is to put <code>Deprecated</code> in front of your call.
* Eventually you will need to fix it properly, as this singleton object is deprecated and will be removed in a future version of ScalaTest, but
* this will work as a quick fix to get you compiling again.
*
* <p>
* All factory methods throw <code>NullPointerException</code> if any of the passed values are <code>null</code>.
* </p>
*
* @author Bill Venners
*/
@deprecated("Use InfoProvided with named and/or default parameters instead.")
object DeprecatedInfoProvided {
/**
* Constructs a new <code>InfoProvided</code> event with the passed parameters, passing the current thread's
* name as <code>threadname</code> and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param nameInfo an optional <code>NameInfo</code> that if defined, provides names for the suite and optionally the test
* in the context of which the information was provided
* @param aboutAPendingTest indicates whether the information being provided via this event is about a pending test
* @param throwable an optional <code>Throwable</code>
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
* @param payload an optional object that can be used to pass custom information to the reporter about the <code>InfoProvided</code> event
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>InfoProvided</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
nameInfo: Option[NameInfo],
aboutAPendingTest: Option[Boolean],
throwable: Option[Throwable],
formatter: Option[Formatter],
payload: Option[Any]
): InfoProvided = {
InfoProvided(ordinal, message, nameInfo, aboutAPendingTest, Some(false), throwable, formatter, None, payload, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>InfoProvided</code> event with the passed parameters, passing <code>None</code> as the
* <code>payload</code>, the current threads name as <code>threadname</code>,
* and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param nameInfo an optional <code>NameInfo</code> that if defined, provides names for the suite and optionally the test
* in the context of which the information was provided
* @param aboutAPendingTest indicates whether the information being provided via this event is about a pending test
* @param throwable an optional <code>Throwable</code>
* @param formatter an optional formatter that provides extra information that can be used by reporters in determining
* how to present this event to the user
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>InfoProvided</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
nameInfo: Option[NameInfo],
aboutAPendingTest: Option[Boolean],
throwable: Option[Throwable],
formatter: Option[Formatter]
): InfoProvided = {
InfoProvided(ordinal, message, nameInfo, aboutAPendingTest, Some(false), throwable, formatter, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>InfoProvided</code> event with the passed parameters, passing <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param nameInfo an optional <code>NameInfo</code> that if defined, provides names for the suite and optionally the test
* in the context of which the information was provided
* @param aboutAPendingTest indicates whether the information being provided via this event is about a pending test
* @param throwable an optional <code>Throwable</code>
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>InfoProvided</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
nameInfo: Option[NameInfo],
aboutAPendingTest: Option[Boolean],
throwable: Option[Throwable]
): InfoProvided = {
InfoProvided(ordinal, message, nameInfo, aboutAPendingTest, Some(false), throwable, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>InfoProvided</code> event with the passed parameters, passing <code>None</code> for
* the <code>throwable</code>, <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param nameInfo an optional <code>NameInfo</code> that if defined, provides names for the suite and optionally the test
* in the context of which the information was provided
* @param aboutAPendingTest indicates whether the information being provided via this event is about a pending test
* @param throwable an optional <code>Throwable</code>
* * @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>InfoProvided</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
nameInfo: Option[NameInfo],
aboutAPendingTest: Option[Boolean]
): InfoProvided = {
InfoProvided(ordinal, message, nameInfo, aboutAPendingTest, Some(false), None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
/**
* Constructs a new <code>InfoProvided</code> event with the passed parameters, passing <code>None</code> for
* the <code>throwable</code>, <code>None</code> for
* <code>formatter</code>, <code>None</code> as the <code>payload</code>,
* the current threads name as <code>threadname</code>, and the current time as <code>timeStamp</code>.
*
* @param ordinal an <code>Ordinal</code> that can be used to place this event in order in the context of
* other events reported during the same run
* @param message a localized message suitable for presenting to the user
* @param nameInfo an optional <code>NameInfo</code> that if defined, provides names for the suite and optionally the test
* in the context of which the information was provided
* @param throwable an optional <code>Throwable</code>
*
* @throws NullPointerException if any of the passed values are <code>null</code>
*
* @return a new <code>InfoProvided</code> instance initialized with the passed and default values
*/
def apply(
ordinal: Ordinal,
message: String,
nameInfo: Option[NameInfo]
): InfoProvided = {
InfoProvided(ordinal, message, nameInfo, None, None, None, None, None, None, Thread.currentThread.getName, (new Date).getTime)
}
}
|
epishkin/scalatest-google-code
|
src/main/scala/org/scalatest/events/Event.scala
|
Scala
|
apache-2.0
| 180,277 |
package socialnetwork.server
import akka.actor._
import akka.io.IO
import akka.routing.{RoundRobinPool, FromConfig}
import com.typesafe.config.ConfigFactory
import org.mashupbots.socko.handlers.{StaticResourceRequest, StaticFileRequest, StaticContentHandlerConfig, StaticContentHandler}
import org.mashupbots.socko.routes._
import org.mashupbots.socko.webserver.{WebServer, WebServerConfig}
import java.io.File
import play.api.libs.json.{Json, Writes}
import socialnetwork.actor.{ClientHandlerActor, SocialNetworkActor, ConnectedClientsActor, TweetActor}
import socialnetwork.common._
object Server extends App {
var clients = Map[String, ActorRef]()
val settings = ConfigFactory.load.getConfig("server")
val actorSystem = ActorSystem("socialnetwork", settings)
// TODO replication with routers
// val tweetActorPool = actorSystem.actorOf(FromConfig.props(Props[TweetActor]), "tweet-router")
val tweetActorPool = actorSystem.actorOf(Props[TweetActor])
// val connectedClientsActorPool = actorSystem.actorOf(FromConfig.props(Props[ConnectedClientsActor]), "connection-router")
val socialNetworkActorPool = actorSystem.actorOf(Props[SocialNetworkActor])
// val socialNetworkActorPool = actorSystem.actorOf(FromConfig.props(Props[SocialNetworkActor]), "social-network-router")
val connectedClientsActorPool = actorSystem.actorOf(Props(classOf[ConnectedClientsActor], socialNetworkActorPool))
// val clientHandlerPool = actorSystem.actorOf(FromConfig.props(Props(classOf[ClientHandlerActor], connectedClientsActorPool, socialNetworkActorPool, tweetActorPool)), "client-handler-router")
// static content handlers
val staticHandlerConfig = StaticContentHandlerConfig(actorSystem)
val staticContentHandlerRouter = actorSystem.actorOf(Props(new StaticContentHandler(staticHandlerConfig))
.withRouter(FromConfig()).withDispatcher("static-dispatcher"), "static-file-router")
object StaticContentHandlerConfig extends ExtensionId[StaticContentHandlerConfig] with ExtensionIdProvider {
override def lookup = StaticContentHandlerConfig
override def createExtension(system: ExtendedActorSystem) =
new StaticContentHandlerConfig(system.settings.config, "static-content-handler")
}
def onHandshakeComplete(socketId: String): Unit = {
System.out.println(s"Web Socket $socketId connected")
}
def onSocketClose(socketId: String): Unit = {
clients(socketId) ! DisconnectUser
clients -= socketId
}
val routes = Routes({
case HttpRequest(request) => request match {
case GET(Path("/")) =>
staticContentHandlerRouter ! new StaticResourceRequest(request, settings.getString("static-dir") + "/index.html")
case GET(Path(path)) =>
staticContentHandlerRouter ! new StaticResourceRequest(request, settings.getString("static-dir") + path)
}
case WebSocketHandshake(handshake) => handshake match {
case Path("/chat/") =>
handshake.authorize(
onComplete = Some(onHandshakeComplete),
onClose = Some(onSocketClose))
}
case WebSocketFrame(wsFrame) =>
if (clients.contains(wsFrame.webSocketId)) {
clients(wsFrame.webSocketId) ! wsFrame
}
else {
val socketId = wsFrame.webSocketId
val handler = actorSystem.actorOf(Props(classOf[ClientHandlerActor], wsFrame, connectedClientsActorPool, socialNetworkActorPool, tweetActorPool))
clients += (socketId -> handler)
handler ! wsFrame
println(s"Created client handler for $socketId")
}
})
val webServer = new WebServer(WebServerConfig(port = settings.getInt("port")), routes, actorSystem)
webServer.start()
}
|
TheHipbot/SocialNetwork
|
src/main/scala/socialnetwork/server/Server.scala
|
Scala
|
mit
| 3,666 |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.kernels
import scala.reflect.ClassTag
import breeze.linalg.DenseMatrix
import io.github.mandar2812.dynaml.algebra.PartitionedPSDMatrix
import io.github.mandar2812.dynaml.pipes._
import spire.algebra.InnerProductSpace
/**
* Scalar Kernel defines algebraic behavior for kernels of the form
* K: Index x Index -> Double, i.e. kernel functions whose output
* is a scalar/double value. Generic behavior for these kernels
* is given by the ability to add and multiply valid kernels to
* create new valid scalar kernel functions.
*
* */
trait LocalScalarKernel[Index] extends
CovarianceFunction[Index, Double, DenseMatrix[Double]]
with Serializable { self =>
var (rowBlocking, colBlocking): (Int, Int) = (1000, 1000)
def setBlockSizes(s: (Int, Int)): Unit = {
rowBlocking = s._1
colBlocking = s._2
}
/**
* Create composite kernel k = k<sub>1</sub> + k<sub>2</sub>
*
* param otherKernel The kernel to add to the current one.
* return The kernel k defined above.
*
* */
def +[T <: LocalScalarKernel[Index]](otherKernel: T)(implicit ev: ClassTag[Index]): CompositeCovariance[Index] =
new AdditiveCovariance[Index](this, otherKernel)
/**
* Create composite kernel k = k<sub>1</sub> * k<sub>2</sub>
*
* @param otherKernel The kernel to multiply to the current one.
* @return The kernel k defined above.
*
* */
def *[T <: LocalScalarKernel[Index]](otherKernel: T)(implicit ev: ClassTag[Index]): CompositeCovariance[Index] =
new MultiplicativeCovariance[Index](this, otherKernel)
/**
* Returns the kernel multiplied by a positive constant: k_new = k*c
* */
def *(c: Double): LocalScalarKernel[Index] = {
require (c > 0, "Multiplicative constant applied on a kernel must be positive!")
new LocalScalarKernel[Index] {
override val hyper_parameters = self.hyper_parameters
state = self.state
blocked_hyper_parameters = self.blocked_hyper_parameters
override def setHyperParameters(h: Map[String, Double]) = {
self.setHyperParameters(h)
super.setHyperParameters(h)
}
override def gradientAt(config: Map[String, Double])(x: Index, y: Index) =
self.gradientAt(config)(x, y).map(co => (co._1, co._2*c))
override def evaluateAt(config: Map[String, Double])(x: Index, y: Index) =
self.evaluateAt(config)(x, y)*c
}
}
/**
* Construct a 2 layer kernel K = k1 > rbf
* */
def >[K <: GenericRBFKernel[Index]](otherKernel: K): CompositeCovariance[Index] = {
new CompositeCovariance[Index] {
override val hyper_parameters = self.hyper_parameters ++ otherKernel.hyper_parameters
override def evaluateAt(config: Map[String, Double])(x: Index, y: Index) = {
val arg = self.evaluateAt(config)(x,y) +
self.evaluateAt(config)(y,y) -
2.0*self.evaluateAt(config)(x,y)
math.exp(-1.0*arg/(2.0*math.pow(config("bandwidth"), 2.0)))
}
state = self.state ++ otherKernel.state
override def gradientAt(config: Map[String, Double])(x: Index, y: Index): Map[String, Double] = {
val arg = self.evaluateAt(config)(x,y) +
self.evaluateAt(config)(y,y) -
2.0*self.evaluateAt(config)(x,y)
val gradx = self.gradientAt(config)(x,x)
val grady = self.gradientAt(config)(y,y)
val gradxy = self.gradientAt(config)(x,y)
Map("bandwidth" ->
otherKernel.evaluateAt(config)(x,y)*arg/math.pow(math.abs(config("bandwidth")), 3)
) ++
gradxy.map((s) => {
val ans = (-2.0*s._2 + gradx(s._1) + grady(s._1))/2.0*math.pow(config("bandwidth"), 2.0)
(s._1, -1.0*otherKernel.evaluateAt(config)(x,y)*ans)
})
}
override def setHyperParameters(h: Map[String, Double]) = {
self.setHyperParameters(h)
otherKernel.setHyperParameters(h)
super.setHyperParameters(h)
}
override def buildKernelMatrix[S <: Seq[Index]](mappedData: S, length: Int) =
SVMKernel.buildSVMKernelMatrix(mappedData, length, this.evaluate)
override def buildCrossKernelMatrix[S <: Seq[Index]](dataset1: S, dataset2: S) =
SVMKernel.crossKernelMatrix(dataset1, dataset2, this.evaluate)
}
}
/**
* Construct the kronecker product kernel
* */
def :*[T1](otherKernel: LocalScalarKernel[T1]): KroneckerProductKernel[Index, T1] =
new KroneckerProductKernel[Index, T1](this, otherKernel)
def :+[T1](otherKernel: LocalScalarKernel[T1]): CompositeCovariance[(Index, T1)] =
new TensorCombinationKernel[Index, T1](this, otherKernel)(Reducer.:+:)
override def buildKernelMatrix[S <: Seq[Index]](mappedData: S, length: Int): KernelMatrix[DenseMatrix[Double]] =
SVMKernel.buildSVMKernelMatrix[S, Index](mappedData, length, this.evaluate)
override def buildCrossKernelMatrix[S <: Seq[Index]](dataset1: S, dataset2: S) =
SVMKernel.crossKernelMatrix(dataset1, dataset2, this.evaluate)
def buildBlockedKernelMatrix[S <: Seq[Index]](mappedData: S, length: Long): PartitionedPSDMatrix =
SVMKernel.buildPartitionedKernelMatrix(mappedData, length, rowBlocking, colBlocking, this.evaluate)
def buildBlockedCrossKernelMatrix[S <: Seq[Index]](dataset1: S, dataset2: S) =
SVMKernel.crossPartitonedKernelMatrix(dataset1, dataset2, rowBlocking, colBlocking, this.evaluate)
/**
* Get a pipeline which when given a particular
* configuration of hyper-parameters returns this kernel function
* set with that configuration.
* */
def asPipe: DataPipe[Map[String, Double], LocalScalarKernel[Index]] =
DataPipe((config: Map[String, Double]) => {
CovarianceFunction(evaluateAt _)(config)
})
}
abstract class CompositeCovariance[T] extends LocalSVMKernel[T]
object CompositeCovariance {
val truncateState = (kv: (String, Double)) => (kv._1.split("/").tail.mkString("/"), kv._2)
val truncateHyp = (k: String) => k.split("/").tail.mkString("/")
}
/**
* @author mandar2812 date: 22/01/2017
*
* A kernel represented as a dot product of an explicit feature mapping.
*
* @param p Feature map to be applied on input.
* */
class FeatureMapCovariance[T, U](p: DataPipe[T, U])(implicit e: InnerProductSpace[U, Double])
extends LocalSVMKernel[T] { self =>
val phi = p
override val hyper_parameters = List.empty[String]
override def evaluateAt(config: Map[String, Double])(x: T, y: T) = e.dot(phi(x), phi(y))
/**
* Construct a multi-layer kernel
* */
def >(other: LocalScalarKernel[U]): CompositeCovariance[T] =
new CompositeCovariance[T] {
override val hyper_parameters = other.hyper_parameters
blocked_hyper_parameters = other.blocked_hyper_parameters
state = other.state
override def evaluateAt(config: Map[String, Double])(x: T, y: T) =
other.evaluateAt(config)(self.phi(x), self.phi(y))
override def setHyperParameters(h: Map[String, Double]) = {
other.setHyperParameters(h)
super.setHyperParameters(h)
}
override def gradientAt(config: Map[String, Double])(x: T, y: T) =
other.gradientAt(config)(self.phi(x), self.phi(y))
}
/**
* Construct a multi-layer feature map kernel
* */
def >[V](other: FeatureMapCovariance[U, V])(
implicit e1: InnerProductSpace[V, Double]): FeatureMapCovariance[T, V] =
new FeatureMapCovariance[T, V](self.phi > other.phi)
}
|
transcendent-ai-labs/DynaML
|
dynaml-core/src/main/scala/io/github/mandar2812/dynaml/kernels/LocalScalarKernel.scala
|
Scala
|
apache-2.0
| 8,250 |
package test
object logTest extends App{
def f() = {
println(log.currentFileName)
println(log.currentLine)
println(log.currentPackage)
println(log.currentClassName)
println(log.currentFuncName)
}
f()
}
|
seraekim/srkim-lang-scala
|
src/main/java/test/logTest.scala
|
Scala
|
bsd-3-clause
| 228 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.resolver
import java.net.URI
import akka.actor.ActorSystem
import org.squbs.env.{DEV, Default, Environment}
class DummyServiceResolver(implicit system: ActorSystem) extends Resolver[URI] {
override def resolve(svcName: String, env: Environment): Option[URI] = {
if (svcName == name) Some(URI.create("http://www.google.com"))
else None
}
override def name: String = "DummyService"
}
class DummyLocalhostResolver(implicit system: ActorSystem) extends Resolver[URI] {
override def resolve(svcName: String, env: Environment = Default): Option[URI] = {
require(svcName != null, "Service name cannot be null")
require(svcName.length > 0, "Service name must not be blank")
env match {
case Default | DEV => Some(URI.create("http://localhost:8080"))
case _ => throw new RuntimeException("DummyLocalhostResolver cannot support " + env + " environment")
}
}
override def name: String = "DummyLocalhostResolver"
}
|
akara/squbs
|
squbs-ext/src/test/scala/org/squbs/resolver/DummyEndpointResolver.scala
|
Scala
|
apache-2.0
| 1,575 |
/*
* Skylark
* http://skylark.io
*
* Copyright 2012-2017 Quantarray, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.quantarray.skylark.measure
import scala.annotation.tailrec
import scala.language.implicitConversions
/**
* Measure.
*
* The guiding principle(s) of design is and should be:
*
* 1. Construction of a measure should be fast, without any recursion/iteration to perform simplification.
* 2. Compute-intensive methods, such as simplify, perform simplification and should be called only when necessary.
*
* @author Araik Grigoryan
*/
trait Measure[Self <: Measure[Self]] extends AnyMeasure
{
self: Self =>
type D <: Dimension[D]
/**
* Gets dimension of this measure.
*/
def dimension: D
def base: Option[(Self, Double)]
lazy val immediateBase = base.map(_._2).getOrElse(1.0)
lazy val ultimateBase: Option[(Self, Double)] =
{
@tailrec
def descend(measure: Option[Self], parent: Option[Self], multiple: Double): Option[(Self, Double)] =
{
measure match
{
case None => parent.map((_, multiple))
case Some(x) => descend(x.base.map(_._1), measure, x.base.map(_._2).getOrElse(1.0) * multiple)
}
}
descend(Some(this), None, 1.0)
}
def composes(name: String, system: SystemOfUnits, multiple: Double): Self
def composes(name: String, multiple: Double): Self = composes(name, system, multiple)
/**
* Adds another measure. CanAdd instance allows addition of apples and oranges to obtain bananas.
*/
def +[M2 <: Measure[M2]](addend: M2)(implicit ca: CanAddMeasure[Self, M2]): ca.R = ca.plus(this, addend)
/**
* Subtracts another measure.
*/
def -[M2 <: Measure[M2]](subtrahend: M2)(implicit ca: CanAddMeasure[Self, M2]): ca.R = ca.plus(this, subtrahend)
/**
* Divides by another measure.
*/
def /[M2 <: Measure[M2], R](denominator: M2)(implicit cd: CanDivideMeasure[Self, M2, R]): R = cd.divide(this, denominator)
/**
* Multiplies by another measure.
*/
def *[M2 <: Measure[M2], R](multiplier: M2)(implicit cm: CanMultiplyMeasure[Self, M2, R]): R = cm.times(this, multiplier)
/**
* Exponentiates this measure.
*/
def ^[R <: Measure[R]](exponent: Double)(implicit ce: CanExponentiateMeasure[Self, R]): R = ce.pow(this, exponent)
/**
* Gets an inverse of this measure.
*/
def inverse[R <: Measure[R]](implicit ce: CanExponentiateMeasure[Self, R]) = this ^ -exponent
/**
* Converts to target measure.
*/
def to[M2 <: Measure[M2]](target: M2)(implicit cc: CanConvert[Self, M2]): Option[Double] = cc.convert(this, target)
/**
* Converts to target measure with default value.
*/
def toOrElse[M2 <: Measure[M2], B >: Double](target: M2, default: B)(implicit cc: CanConvert[Self, M2]): B = to(target).getOrElse(default)
/**
* Attempts to simplify to target type.
*/
def simplify[R <: Measure[R]](implicit cs: CanSimplifyMeasure[Self, Option[R]]): Option[R] = cs.simplify(this)
}
/**
* Product measure.
*/
trait ProductMeasure[M1 <: Measure[M1], M2 <: Measure[M2]] extends Measure[ProductMeasure[M1, M2]] with AnyProductMeasure
{
val multiplicand: M1
val multiplier: M2
type D = ProductDimension[multiplicand.D, multiplier.D]
override lazy val dimension = ProductDimension(multiplicand.dimension, multiplier.dimension)
override val isStructuralAtom = false
}
object ProductMeasure
{
def apply[M1 <: Measure[M1], M2 <: Measure[M2]](multiplicand: M1, multiplier: M2): ProductMeasure[M1, M2] =
{
val params = (multiplicand, multiplier)
new ProductMeasure[M1, M2]
{
lazy val multiplicand: M1 = params._1
lazy val multiplier: M2 = params._2
lazy val name = s"${multiplicand.structuralName} * ${multiplier.structuralName}"
override def base: Option[(ProductMeasure[M1, M2], Double)] = None
override def composes(name: String, system: SystemOfUnits, multiple: Double): ProductMeasure[M1, M2] = this
override def equals(obj: scala.Any): Boolean = obj match
{
case that: ProductMeasure[_, _] => this.multiplicand == that.multiplicand && this.multiplier == that.multiplier
case _ => false
}
override def hashCode(): Int = 41 * multiplicand.hashCode() + multiplier.hashCode()
private val productElements = Seq(multiplicand, multiplier)
override def productElement(n: Int): Any = productElements(n)
val productArity: Int = productElements.size
override def canEqual(that: Any): Boolean = that.isInstanceOf[ProductMeasure[_, _]]
override def toString = name
}
}
def unapply[M1 <: Measure[M1], M2 <: Measure[M2]](pm: ProductMeasure[M1, M2]): Option[(M1, M2)] = Some((pm.multiplicand, pm.multiplier))
}
/**
* Ratio measure.
*/
trait RatioMeasure[M1 <: Measure[M1], M2 <: Measure[M2]] extends Measure[RatioMeasure[M1, M2]] with AnyRatioMeasure
{
val numerator: M1
val denominator: M2
type D = RatioDimension[numerator.D, denominator.D]
override lazy val dimension = RatioDimension(numerator.dimension, denominator.dimension)
override val isStructuralAtom = false
/**
* Converts to target measure.
*/
def to[M3 <: Measure[M3], M4 <: Measure[M4]](target: RatioMeasure[M3, M4])
(implicit ccn: CanConvert[M1, M3], ccd: CanConvert[M2, M4]): Option[Double] =
{
(numerator.to(target.numerator), denominator.to(target.denominator)) match
{
case (Some(n), Some(d)) => Some(n / d)
case _ => None
}
}
}
object RatioMeasure
{
def apply[M1 <: Measure[M1], M2 <: Measure[M2]](numerator: M1, denominator: M2): RatioMeasure[M1, M2] =
{
val params = (numerator, denominator)
new RatioMeasure[M1, M2]
{
lazy val numerator: M1 = params._1
lazy val denominator: M2 = params._2
lazy val name = s"${numerator.structuralName} / ${denominator.structuralName}"
override def base: Option[(RatioMeasure[M1, M2], Double)] = None
override def composes(name: String, system: SystemOfUnits, multiple: Double): RatioMeasure[M1, M2] = this
override def equals(obj: scala.Any): Boolean = obj match
{
case that: RatioMeasure[_, _] => this.numerator == that.numerator && this.denominator == that.denominator
case _ => false
}
override def hashCode(): Int = 41 * numerator.hashCode() + denominator.hashCode()
private val productElements = Seq(numerator, denominator)
override def productElement(n: Int): Any = productElements(n)
val productArity: Int = productElements.size
override def canEqual(that: Any): Boolean = that.isInstanceOf[RatioMeasure[_, _]]
override def toString = name
}
}
def unapply[M1 <: Measure[M1], M2 <: Measure[M2]](rm: RatioMeasure[M1, M2]): Option[(M1, M2)] = Some((rm.numerator, rm.denominator))
}
/**
* Exponential measure.
*/
trait ExponentialMeasure[B <: Measure[B]] extends Measure[ExponentialMeasure[B]] with AnyExponentialMeasure
{
val expBase: B
type D = ExponentialDimension[expBase.D]
override lazy val dimension = ExponentialDimension(expBase.dimension, exponent)
override val isStructuralAtom = false
val lift: Option[B] = if (exponent == 1.0) Some(expBase) else None
}
object ExponentialMeasure
{
def apply[B <: Measure[B]](expBase: B, exponent: Double, name: Option[String] = None): ExponentialMeasure[B] =
{
val params = (expBase, exponent, name)
new ExponentialMeasure[B]
{
lazy val expBase: B = params._1
override def exponent: Double = params._2
val name = params._3.getOrElse(baseName)
override def base: Option[(ExponentialMeasure[B], Double)] = None
override def composes(name: String, system: SystemOfUnits, multiple: Double): ExponentialMeasure[B] = ExponentialMeasure(expBase, exponent, Some(name))
override def equals(obj: scala.Any): Boolean = obj match
{
case that: ExponentialMeasure[_] => this.base == that.base && this.exponent == that.exponent
case _ => false
}
override def hashCode(): Int = 41 * base.hashCode() + exponent.hashCode()
private val productElements = Seq(base, exponent)
override def productElement(n: Int): Any = productElements(n)
val productArity: Int = productElements.size
override def canEqual(that: Any): Boolean = that.isInstanceOf[ExponentialMeasure[_]]
override def toString = name
}
}
def unapply[B <: Measure[B]](em: ExponentialMeasure[B]): Option[(B, Double)] = Some((em.expBase, em.exponent))
}
|
quantarray/skylark
|
skylark-measure/src/main/scala/com/quantarray/skylark/measure/Measure.scala
|
Scala
|
apache-2.0
| 9,138 |
package scalan.primitives
import scalan._
import scala.reflect.runtime.universe._
import scalan.util.CollectionUtil._
import scalan.common.OverloadHack._
import scalan.compilation.{GraphVizConfig, GraphVizExport}
import scalan.staged.Expressions
/**
The code is inspired by LMS structs and is used in Scalan with the same semantics
in order to easily translate operations to the equivalents via LmsBridge.
Their usage in Scalan is limited to be consistent with functional semantics of Scalan.
Don't expect everything possible in LMS to be also possible in Scalan in the same way.
There are changes in the code:
- Sym -> Exp
- Manifest -> Elem
- infix -> implicit class
- no SourceContext, withPos
- mirroring implemented in Scalan way (though consistent with LMS)
*/
trait Structs extends Base { self: StructsDsl with Scalan =>
}
trait StructsDsl extends Structs with StructItemsDsl with StructKeysDsl { self: StructsDsl with Scalan =>
// TODO consider if T type parameter is needed here and for AbstractStruct
// It's only useful if we'll have some static typing on structs later (Shapeless' records?)
abstract class StructTag[T <: Struct](implicit val typeTag: TypeTag[T]) {
override def equals(other: Any): Boolean =
!!!("StructTag.equals must be overridden so that the outer instances aren't compared")
}
case class SimpleTag[T <: Struct](name: String)(implicit typeTag: TypeTag[T]) extends StructTag[T] {
override def equals(other: Any) = other match {
case tag: StructsDsl#SimpleTag[_] => name == tag.name && typeTag == tag.typeTag
case _ => false
}
}
object SimpleTag {
def apply[T <: Struct](implicit tag: TypeTag[T]): SimpleTag[T] = SimpleTag[T](tag.tpe.typeSymbol.name.toString)
}
val defaultStructTag = SimpleTag[Struct]
protected def baseStructName(tag: StructTag[_]) = tag match {
case `defaultStructTag` => ""
case SimpleTag(name) => s"$name "
// Intentionally no case _, add something here or override when extending StructTag!
}
type StructField = (String, Rep[Any])
trait Struct {
def tag: StructTag[_] // TODO add type argument?
// def keys: Rep[KeySet]
// def values: Rep[HList]
def fields: Seq[StructField]
}
case class StructElem[T <: Struct](structTag: StructTag[T], fields: Seq[(String, Elem[_])]) extends Elem[T] {
override def isEntityType = fields.exists(_._2.isEntityType)
lazy val tag = structTag.typeTag
protected def getDefaultRep =
struct(structTag, fields.map { case (fn,fe) => (fn, fe.defaultRepValue) }: _*)
def get(fieldName: String): Option[Elem[_]] = fields.find(_._1 == fieldName).map(_._2)
def apply(fieldIndex: Int): Elem[_] = fields(fieldIndex)._2
def apply(fieldName: String): Elem[_] = fields.find(_._1 == fieldName).map(_._2).get
def fieldNames = fields.map(_._1)
def fieldElems: Seq[Elem[_]] = fields.map(_._2)
def isEqualType(tuple: Seq[Elem[_]]) = {
fields.length == tuple.length && fields.zip(tuple).forall { case ((fn,fe), e) => fe == e }
}
override def getName = {
s"${baseStructName(structTag)}$fieldsString"
}
def fieldsString = s"{${fields.map { case (fn,fe) => s"$fn: ${fe.name}"}.mkString("; ")}}"
def findFieldIndex(fieldName: String): Int = fields.iterator.map(_._1).indexOf(fieldName)
}
implicit def StructElemExtensions[T <: Struct](e: Elem[T]): StructElem[T] = e.asInstanceOf[StructElem[T]]
def structElement[T <: Struct](tag: StructTag[T], fields: Seq[(String, Elem[_])]): StructElem[T] =
if (cacheElems)
cachedElem[StructElem[T]](tag, fields)
else
StructElem(tag, fields)
def structElement(fields: Seq[(String, Elem[_])]): StructElem[Struct] =
structElement(defaultStructTag, fields)
def structElementFor[T <: Struct : TypeTag](fields: Seq[(String, Elem[_])]): StructElem[T] =
structElement(SimpleTag[T], fields)
/**
* Get tuple field name by index
*/
def tupleFN(fieldIndex: Int) = s"_$fieldIndex"
def tupleStructElement(fieldElems: Elem[_]*)(implicit o: Overloaded1): StructElem[Struct] = {
val fields = fieldElems.zipWithIndex.map { case (f, i) => tupleFN(i + 1) -> f }
// TODO add tupleTag(n)?
structElement(defaultStructTag, fields)
}
def tuple2StructElement[A:Elem, B:Elem]: StructElem[Struct] =
tupleStructElement(element[A], element[B])
def tuple3StructElement[A:Elem, B:Elem, C:Elem]: StructElem[Struct] =
tupleStructElement(element[A], element[B], element[C])
case class StructToPairIso[A1, A2, B1, B2](iso1: Iso[A1, B1], iso2: Iso[A2, B2])
extends IsoUR[Struct, (B1, B2)] {
override def equals(other: Any) = other match {
case iso: StructsDsl#StructToPairIso[_, _, _, _] =>
(this eq iso) || (iso1 == iso.iso1 && iso2 == iso.iso2)
case _ => false
}
implicit def eA1 = iso1.eFrom
implicit def eA2 = iso2.eFrom
implicit def eB1 = iso1.eTo
implicit def eB2 = iso2.eTo
lazy val eFrom = tuple2StructElement(iso1.eFrom, iso2.eFrom)
lazy val eTo = element[(B1, B2)]
lazy val selfType = new ConcreteIsoElem[Struct, (B1, B2), StructToPairIso[A1, A2, B1, B2]](eFrom, eTo).
asElem[IsoUR[Struct, (B1, B2)]]
override def from(p: Rep[(B1, B2)]) =
struct(tupleFN(1) -> iso1.from(p._1), tupleFN(2) -> iso2.from(p._2))
override def to(struct: Rep[Struct]) = {
Pair(iso1.to(struct(1).asRep[A1]), iso2.to(struct(2).asRep[A2]))
}
}
def structToPairIso[A1, A2, B1, B2](iso1: Iso[A1, B1], iso2: Iso[A2, B2]): Iso[Struct, (B1, B2)] =
reifyObject(StructToPairIso[A1, A2, B1, B2](iso1, iso2))
def structToPairIso[A:Elem,B:Elem]: Iso[Struct, (A, B)] = structToPairIso[A,B,A,B](identityIso[A], identityIso[B])
def structToPairIso[A,B](pe: Elem[(A,B)]): Iso[Struct, (A, B)] = structToPairIso[A,B](pe.eFst, pe.eSnd)
case class StructIso[S <: Struct, T <: Struct](eFrom: StructElem[S], eTo: StructElem[T], itemIsos: Seq[Iso[_,_]])
extends IsoUR[S, T] {
assert(eFrom.isEqualType(itemIsos.map(_.eFrom)))
assert(eTo.isEqualType(itemIsos.map(_.eTo)))
override def equals(other: Any) = other match {
case iso: StructsDsl#StructIso[_, _] =>
(this eq iso) || (eFrom == iso.eFrom && eTo == iso.eTo)
case _ => false
}
override def from(y: Rep[T]) = {
val items = eFrom.fields.zip(eTo.fields).zip(itemIsos).map {
case (((fnS, feS), (fnT, feT)), iso: Iso[s,t] @unchecked) =>
fnS -> iso.from(y(fnT).asRep[t])
}
struct(items).asRep[S]
}
override def to(x: Rep[S]) = {
val items = eFrom.fields.zip(eTo.fields).zip(itemIsos).map {
case (((fnS, feS), (fnT, feT)), iso: Iso[s,t] @unchecked) =>
fnT -> iso.to(x(fnS).asRep[s])
}
struct(items).asRep[T]
}
lazy val selfType = new ConcreteIsoElem[S, T, StructIso[S, T]](eFrom, eTo).asElem[IsoUR[S, T]]
}
def structIso[S <: Struct, T <: Struct](eFrom: StructElem[S], eTo: StructElem[T], itemIsos: Seq[Iso[_,_]]): Iso[S, T] =
reifyObject(StructIso(eFrom, eTo, itemIsos))
implicit class StructOps(s: Rep[Struct]) {
def apply(iField: Int): Rep[_] = field(s, iField)
def apply(fieldName: String): Rep[_] = field(s, fieldName)
def getChar(fieldName: String): Rep[Char] = field(s, fieldName).asRep[Char]
def getFloat(fieldName: String): Rep[Float] = field(s, fieldName).asRep[Float]
def getDouble(fieldName: String): Rep[Double] = field(s, fieldName).asRep[Double]
def getInt(fieldName: String): Rep[Int] = field(s, fieldName).asRep[Int]
def getLong(fieldName: String): Rep[Long] = field(s, fieldName).asRep[Long]
def getString(fieldName: String): Rep[String] = field(s, fieldName).asRep[String]
def getBoolean(fieldName: String): Rep[Boolean] = field(s, fieldName).asRep[Boolean]
def getByte(fieldName: String): Rep[Byte] = field(s, fieldName).asRep[Byte]
def getUnit(fieldName: String): Rep[Unit] = field(s, fieldName).asRep[Unit]
def getShort(fieldName: String): Rep[Short] = field(s, fieldName).asRep[Short]
}
def struct(fields: StructField*)(implicit o: Overloaded1): Rep[Struct] = struct(fields)
def struct(fields: Seq[StructField]): Rep[Struct] = struct(defaultStructTag, fields)
def struct[T <: Struct](tag: StructTag[T], fields: StructField*)(implicit o: Overloaded1): Rep[T] =
struct(tag, fields)
def struct[T <: Struct](tag: StructTag[T], fields: Seq[StructField]): Rep[T]
def tupleStruct(items: Rep[_]*): Rep[Struct] = {
val fields = items.zipWithIndex.map { case (f, i) => tupleFN(i + 1) -> f }
struct(defaultStructTag, fields)
}
def field(struct: Rep[Struct], field: String): Rep[_]
def updateField[S <: Struct](struct: Rep[S], fieldName: String, v: Rep[Any]): Rep[S]
def field(struct: Rep[Struct], fieldIndex: Int): Rep[_] = field(struct, tupleFN(fieldIndex))
def fields(struct: Rep[Struct], fields: Seq[String]): Rep[Struct]
case class Link(field: String, nestedField: String, nestedElem: Elem[_], flatName: String)
case class FlatteningIso[T <: Struct](eTo: StructElem[T], flatIsos: Map[String, Iso[_,_]], links: Seq[Link])
extends IsoUR[Struct,T] {
override def equals(other: Any) = other match {
case iso: StructsDsl#FlatteningIso[_] =>
(this eq iso) || (eFrom == iso.eFrom && eTo == iso.eTo)
case _ => false
}
val eFrom = tupleStructElement(links.map(_.nestedElem): _*)
lazy val selfType = new ConcreteIsoElem[Struct, T, FlatteningIso[T]](eFrom, eTo).asElem[IsoUR[Struct, T]]
val groups = links.groupBy(_.field)
def to(x: Rep[Struct]) = {
val items = eTo.fields.map { case (fn, fe) =>
val g = groups(fn)
flatIsos.get(fn) match {
case Some(iso: Iso[a, _] @unchecked) =>
val projectedStruct = struct(g.map(link => (link.nestedField -> x(link.flatName))): _*)
val s = iso.to(projectedStruct.asRep[a])
(fn -> s)
case _ =>
assert(g.length == 1, s"Many fields $g can't relate to the single field $fn without iso")
(fn -> x(g(0).flatName))
}
}
struct(eTo.structTag, items: _*)
}
def from(y: Rep[T]) = {
val items = eTo.fields.flatMap { case (fn, fe) =>
val g = groups(fn)
flatIsos.get(fn) match {
case Some(iso: Iso[_, a] @unchecked) =>
val nestedStruct = iso.from(y(fn).asRep[a]).asRep[Struct]
// nestedStruct is guaranteed to be a Rep[Struct], because iso can be either IdentityIso on a struct or FlatteningIso
g.map { link =>
link.flatName -> nestedStruct(link.nestedField)
}
case _ =>
List(g(0).flatName -> y(fn))
}
}
struct(items: _*)
}
}
/**
* Flattens all subtrees of structs in [[e]].
* Types other than structs are considered either as internal nodes or as leaves.
* @param e descriptor of struct type
* @return an isomorphism in which [[e]] is given by param and `eFrom` is flattened [[e]] preserving
* related order of the components
*/
def getFlatteningIso[T](e: Elem[T]): Iso[_,T] = e match {
// a == T, but Scala can't infer the type bound if T is used below
case se: StructElem[a] @unchecked =>
val flatIso = flatteningIso(se).asInstanceOf[Iso[_, T]]
flatIso match {
case Def(_: IdentityIso[T] @unchecked) =>
flatIso
case Def(_: FlatteningIso[T] @unchecked) =>
flatIso.eFrom match {
// TODO Actually, we currently know s == Struct. Is extra complexity needed?
case eFrom: StructElem[s] =>
val isos = eFrom.fields.map { case (fn,fe) => (fn, buildIso(fe, flatteningBuilder)) }
val eFromNew = structElement(isos.map { case (fn, iso) => fn -> iso.eFrom })
val sIso = reifyObject(new StructIso(eFromNew, eFrom, isos.map(_._2)))
sIso >> flatIso.asInstanceOf[Iso[s,T]]
}
}
case _ =>
buildIso(e, flatteningBuilder)
}
val flatteningBuilder = new IsoBuilder { def apply[S](e: Elem[S]) = getFlatteningIso(e) }
def flatteningIso[T <: Struct](eTo: StructElem[T]): Iso[_, T] = {
val flatIsos: Map[String, Iso[_, _]] = eTo.fields.collect {
case (fn, fe: StructElem[_]) => (fn, flatteningIso(fe))
}.toMap
if (flatIsos.isEmpty)
return identityIso(eTo)
// relate resulting field types by original field name
val fromFields = eTo.fields.flatMap {
case (fn, fe) =>
flatIsos.get(fn) match {
case Some(iso) =>
iso.eFrom match {
case flatElem: StructElem[_] =>
flatElem.fields.map { case (nestedName, nestedE) => (fn, nestedName -> nestedE) }
case _ => !!!(s"StructElem is expected as eFrom of flattened Iso $iso")
}
case None => List((fn, "" -> fe))
}
}
val links =
fromFields.zipWithIndex.map {
case ((fn, (nestedN, nestedE)), i) => Link(fn, nestedN, nestedE, tupleFN(i + 1))
}
val res: Iso[_, T] = reifyObject(FlatteningIso(eTo, flatIsos, links))
res
}
def getStructToPairsIso[T](implicit e: Elem[T]): Iso[_,T] = (e match {
case pe: PairElem[a,b] =>
val iso1 = getStructToPairsIso(pe.eFst)
val iso2 = getStructToPairsIso(pe.eSnd)
val res = structToPairIso(iso1, iso2)
res
case _ =>
buildIso(e, new IsoBuilder {
def apply[S](e: Elem[S]) = {
getStructToPairsIso(e)
}
})
}).asInstanceOf[Iso[_,T]]
def getStructWrapperIso[T](implicit e: Elem[T]): Iso[_,T] = {
getStructToPairsIso(e) match {
case iso: Iso[s,T] @unchecked =>
val flatIso = getFlatteningIso[s](iso.eFrom)
flatIso >> iso
}
}
case class MergeIso[T <: Struct](eTo: StructElem[T]) extends IsoUR[Struct,T] {
override def equals(other: Any) = other match {
case iso: MergeIso[_] =>
(this eq iso) || (eFrom == iso.eFrom && eTo == iso.eTo)
case _ => false
}
val eFrom = structElement(eTo.fields.flatMap { case (fn, fe: StructElem[_]) => fe.fields })
lazy val selfType = new ConcreteIsoElem[Struct, T, MergeIso[T]](eFrom, eTo).asElem[IsoUR[Struct, T]]
def to(x: Rep[Struct]) = {
val items = eTo.fields.map { case (outerN, outerE: StructElem[_]) =>
val s = struct(outerE.fields.map { case (innerN, innerE) => innerN -> x(innerN) })
outerN -> s
}
struct(eTo.structTag, items: _*)
}
def from(y: Rep[T]) = {
val items = eTo.fields.flatMap { case (outerN, outerE: StructElem[_]) =>
val s = y(outerN)
outerE.fields.map { case (innerN, innerE) => innerN -> s.asRep[Struct](innerN) }
}
struct(items: _*)
}
}
def getStructMergeIso[T](implicit e: Elem[T]): Iso[_,T] = (e match {
case se: StructElem[_] =>
reifyObject(MergeIso(se.asElem[Struct]))
case _ =>
!!!(s"Don't know how merge non struct $e")
}).asInstanceOf[Iso[_,T]]
def tuplifyStruct[A <: Struct](se: Elem[A]): Elem[_] = {
val res = foldRight[(String,Elem[_]), Elem[_]](se.fields)(_._2) { case ((fn,fe), e) => pairElement(fe, e) }
res
}
def unzipMany[T](tuple: Rep[_], list: List[T]): List[Rep[_]] = {
val pair = tuple.asRep[(Any, Any)]
list match {
case Nil => List(tuple)
case x :: Nil => List(tuple)
case x :: y :: Nil => List(pair._1, pair._2)
case x :: xs =>
pair._1 :: unzipMany(pair._2, xs)
}
}
case class PairifyIso[A, AS <: Struct](eTo: Elem[AS]) extends IsoUR[A, AS] {
val eFrom: Elem[A] = tuplifyStruct(eTo).asElem[A]
def from(y: Rep[AS]) = {
val res = foldRight[String, Rep[_]](eTo.fieldNames)(fn => y(fn)) {
case (fn, s) => Pair(y(fn), s)
}
res.asRep[A]
}
override def to(x: Rep[A]) = {
val items = unzipMany(x, eTo.fields.toList)
val fields = eTo.fieldNames.zip(items.map(_.asRep[Any]))
struct(fields).asRep[AS]
}
override def equals(other: Any) = other match {
case iso: PairifyIso[_,_] =>
(this eq iso) || (eFrom == iso.eFrom && eTo == iso.eTo)
case _ => false
}
lazy val selfType = new ConcreteIsoElem[A, AS, PairifyIso[A, AS]](eFrom, eTo).asElem[IsoUR[A, AS]]
}
def structWrapper[A:Elem,B:Elem](f: Rep[A => B]): Rep[Any => Any] = {
val wrapperFun = (getStructWrapperIso[A], getStructWrapperIso[B]) match {
case (inIso: Iso[a, A] @unchecked, outIso: Iso[b, B] @unchecked) =>
outIso.fromFun << f << inIso.toFun
}
wrapperFun.asRep[Any => Any]
}
def structWrapperIn[A:Elem,B:Elem](f: Rep[A => B]): Rep[Any => B] = {
val inIso = getStructWrapperIso[A]
val wrapperFun = inIso.toFun >> f
wrapperFun.asRep[Any => B]
}
def structWrapperOut[A:Elem,B:Elem](f: Rep[A => B]): Rep[A => Any] = {
val outIso = getStructWrapperIso[B]
val wrapperFun = f >> outIso.fromFun
wrapperFun.asRep[A => Any]
}
}
trait StructsDslStd extends StructsDsl with StructItemsDslStd with StructKeysDslStd { self: StructsDslStd with ScalanStd =>
case class StructSeq[T <: Struct](tag: StructTag[T], fields: Seq[StructField]) extends Struct {
override def equals(other: Any) = other match {
case ss: StructsDslStd#StructSeq[_] =>
tag == ss.tag && fields.sameElements(ss.fields)
case p: Product =>
val items = p.productIterator
fields.iterator.map(_._2).sameElements(items)
case _ => false
}
def findFieldIndex(fieldName: String): Int = fields.iterator.map(_._1).indexOf(fieldName)
}
def struct[T <: Struct](tag: StructTag[T], fields: Seq[StructField]): Rep[T] =
StructSeq(tag, fields).asRep[T]
def field(struct: Rep[Struct], field: String): Rep[_] =
struct.asInstanceOf[StructSeq[_]].fields.find(_._1 == field) match {
case Some((_, value)) => value
case None => !!!(s"Field $field not found in structure $struct", struct)
}
def updateField[S <: Struct](struct: Rep[S], fieldName: String, v: Rep[Any]): Rep[S] = {
val s = struct.asInstanceOf[StructSeq[S]]
val buf = new scala.collection.mutable.ArrayBuffer[StructField](s.fields.length)
s.fields.copyToBuffer(buf)
val i = s.findFieldIndex(fieldName)
buf(i) = (buf(i)._1, v)
StructSeq(s.tag, buf).asInstanceOf[S]
}
def fields(struct: Rep[Struct], fields: Seq[String]): Rep[Struct] = {
val StructSeq(tag, fieldsInStruct) = struct
StructSeq(tag, fieldsInStruct.filter(fields.contains))
}
}
trait StructsDslExp extends StructsDsl with Expressions with FunctionsExp with EffectsExp with ViewsDslExp with StructItemsDslExp
with StructKeysDslExp with GraphVizExport { self: StructsDslExp with ScalanExp =>
abstract class AbstractStruct[T <: Struct] extends Def[T] {
def tag: StructTag[T]
def fields: Seq[StructField]
lazy val selfType = structElement(tag, fields.map { case (name, value) => (name, value.elem) })
}
// should this just extend BaseDef? Having field[T] would simplify usage in some cases
abstract class AbstractField[T] extends Def[T] {
def struct: Rep[Struct]
def field: String
lazy val selfType = Struct.getFieldElem(struct, field).asElem[T]
}
object Struct {
def getFieldElem(struct: Rep[Struct], fn: String): Elem[_] = struct.elem match {
case se: StructElem[_] =>
se.get(fn).get
case _ => !!!(s"Symbol with StructElem expected but found ${struct.elem}", struct)
}
def unapply[T <: Struct](d: Def[T]) = unapplyStruct(d)
}
def unapplyStruct[T <: Struct](d: Def[T]): Option[(StructTag[T], Seq[StructField])] = d match {
case s: AbstractStruct[T] => Some((s.tag, s.fields))
case _ => None
}
object Field {
def unapply[T](d: Def[T]) = unapplyField(d)
}
def unapplyField[T](d: Def[T]): Option[(Rep[Struct], String)] = d match {
case f: AbstractField[T] => Some((f.struct, f.field))
case _ => None
}
case class SimpleStruct[T <: Struct](tag: StructTag[T], fields: Seq[StructField]) extends AbstractStruct[T]
case class FieldApply[T](struct: Rep[Struct], field: String) extends AbstractField[T]
case class FieldUpdate[S <: Struct, T](struct: Rep[S], fieldName: String, value: Rep[T]) extends AbstractStruct[S] {
val tag = struct.elem.structTag
val fields = struct.elem.fields.map { case (fn, _) =>
if (fn == fieldName)
(fieldName, value)
else
(fn, field(struct, fn))
}
}
case class ProjectionStruct(struct: Rep[Struct], outFields: Seq[String]) extends AbstractStruct[Struct] {
def tag = defaultStructTag
val fields = outFields.map(fn => (fn, field(struct, fn)))
}
def struct[T <: Struct](tag: StructTag[T], fields: Seq[StructField]): Rep[T] = SimpleStruct(tag, fields)
def field(struct: Rep[Struct], field: String): Rep[_] = FieldApply[Any](struct, field) // TODO Any?
def updateField[S <: Struct](struct: Rep[S], fieldName: String, v: Rep[Any]): Rep[S] = FieldUpdate[S,Any](struct, fieldName, v)
def fields(struct: Rep[Struct], fields: Seq[String]): Rep[Struct] = ProjectionStruct(struct, fields)
override def syms(e: Any): List[Exp[Any]] = e match {
case s: ProjectionStruct => syms(s.struct)
case FieldUpdate(s, _, v) => syms(s) :+ v
case s: AbstractStruct[_] => s.fields.flatMap(e => this.syms(e._2)).toList
case _ => super.syms(e)
}
override def symsFreq(e: Any): List[(Exp[Any], Double)] = e match {
case s: ProjectionStruct => symsFreq(s.struct)
case s: AbstractStruct[_] => s.fields.flatMap(e => symsFreq(e._2)).toList
case _ => super.symsFreq(e)
}
override def effectSyms(e: Any): List[Exp[Any]] = e match {
case s: ProjectionStruct => effectSyms(s.struct)
case s: AbstractStruct[_] => s.fields.flatMap(e => effectSyms(e._2)).toList
case _ => super.effectSyms(e)
}
override def readSyms(e: Any): List[Exp[Any]] = e match {
case s: AbstractStruct[_] => Nil //struct creation doesn't de-reference any of its inputs
case _ => super.readSyms(e)
}
override def aliasSyms(e: Any): List[Exp[Any]] = e match {
case SimpleStruct(tag,fields) => Nil
case FieldUpdate(s, fn, v) => Nil
case FieldApply(s,x) => Nil
case _ => super.aliasSyms(e)
}
override def containSyms(e: Any): List[Exp[Any]] = e match {
case SimpleStruct(tag,fields) => fields.collect { case (k, v: Exp[_]) => v }.toList
case FieldUpdate(s, fn, v) => List(v)
case FieldApply(s,x) => Nil
case _ => super.containSyms(e)
}
override def extractSyms(e: Any): List[Exp[Any]] = e match {
case SimpleStruct(tag,fields) => Nil
case FieldUpdate(_,_,_) => Nil
case FieldApply(s,x) => syms(s)
case _ => super.extractSyms(e)
}
override def copySyms(e: Any): List[Exp[Any]] = e match {
case SimpleStruct(tag,fields) => Nil
case FieldUpdate(_,_,_) => Nil
case FieldApply(s,x) => Nil
case _ => super.copySyms(e)
}
override protected def formatDef(d: Def[_])(implicit config: GraphVizConfig): String = d match {
case SimpleStruct(tag, fields) =>
s"${baseStructName(tag)}{${fields.map { case (fn, s) => s"$fn:$s" }.mkString("; ")}}"
case ProjectionStruct(struct, outs) => s"$struct.{${outs.mkString(",")}}"
case FieldUpdate(s, fn, v) => s"$s.$fn := $v"
case FieldApply(struct, fn) => s"$struct.$fn"
case _ => super.formatDef(d)
}
case class ViewStruct[A, B](source: Exp[A])(val iso: Iso[A, B])
extends View[A, B] {
override def toString = s"ViewStruct[${iso.eTo.name}]($source)"
override def equals(other: Any) = other match {
case v: ViewStruct[_, _] => source == v.source && iso.eTo == v.iso.eTo
case _ => false
}
}
override def unapplyViews[T](s: Exp[T]): Option[Unpacked[T]] = (s match {
case Def(view: ViewStruct[a, b]) =>
Some((view.source, view.iso))
case _ =>
super.unapplyViews(s)
}).asInstanceOf[Option[Unpacked[T]]]
object FieldGet {
def unapply[T](d: FieldApply[T]): Option[Exp[T]] = d match {
case FieldApply(Def(SimpleStruct(_, fs)), fn) =>
val optItem = fs.find { case (n, _) => n == fn }
optItem.map(_._2.asRep[T])
case _ => None
}
}
object IdentityStructMapping {
def unapply[A](d: Def[A]): Option[Rep[A]] = d match {
case Struct(_, fields) =>
val inputStructs = scala.collection.mutable.HashSet[Rep[A]]()
val okNames = fields.forall { case (fn, s) =>
s match {
case Def(Field(struct, name)) if name == fn =>
inputStructs += struct.asRep[A]
true
case _ => false
}
}
if (okNames && inputStructs.size == 1)
Some(inputStructs.head)
else
None
case _ => None
}
}
object IdentityStructLambda {
def unapply[A,B](lam: Lambda[A, B]): Boolean = lam.y match {
case Def(Struct(_, fields)) =>
fields.forall { case (fn, s) =>
s match {
case Def(Field(struct, name)) if struct == lam.x && name == fn => true
case _ => false
}
}
case _ => false
}
}
override def isIdentityLambda[A,B](lam: Lambda[A, B]): Boolean = {
super.isIdentityLambda(lam) || IdentityStructLambda.unapply(lam)
}
def shouldUnpackTuples = currentPass.config.shouldUnpackTuples
def shouldExtractFields = currentPass.config.shouldExtractFields
def shouldSlice = currentPass.config.shouldSlice
override def rewriteDef[T](d: Def[T]): Exp[_] = d match {
case FieldGet(v) if shouldExtractFields => v
case IdentityStructMapping(s) => s
case _ => super.rewriteDef(d)
}
object StructsRewriter extends Rewriter {
def apply[T](x: Exp[T]): Exp[T] = (x match {
case Def(FieldGet(v)) => v
case _ => x
}).asRep[T]
}
}
|
PCMNN/scalan-ce
|
core/src/main/scala/scalan/primitives/Structs.scala
|
Scala
|
apache-2.0
| 25,849 |
package uk.gov.gds.ier.validation.constraints
import uk.gov.gds.ier.validation.{FormKeys, ErrorMessages, Key}
import uk.gov.gds.ier.model.{OverseasParentName, Name, PreviousName}
import play.api.data.validation.{Invalid, Valid, Constraint}
import play.api.data.Mapping
import play.api.data.Forms._
import play.api.Logger
import uk.gov.gds.ier.transaction.overseas.InprogressOverseas
trait ParentNameConstraints extends CommonConstraints {
self: FormKeys
with ErrorMessages =>
lazy val parentNameNotOptional = Constraint[Option[Name]] (keys.overseasParentName.parentName.key) {
case Some(_) => Valid
case None => Invalid(
"Please enter their full name",
keys.overseasParentName.parentName.firstName,
keys.overseasParentName.parentName.lastName,
keys.overseasParentName.parentName)
}
lazy val parentPreviousNameNotOptionalIfHasPreviousIsTrue = Constraint[Option[PreviousName]] (
keys.overseasParentName.parentPreviousName.key) {
name =>
if (name.isDefined) {
if (name.get.hasPreviousName && !name.get.previousName.isDefined) {
Invalid("Please enter their previous full name",
keys.overseasParentName.parentPreviousName.previousName.firstName,
keys.overseasParentName.parentPreviousName.previousName.lastName)
}
else Valid
}
else Invalid("Please answer this question", keys.overseasParentName.parentPreviousName)
}
lazy val parentPrevNameOptionCheck = Constraint[InprogressOverseas] (keys.overseasParentName.parentPreviousName.key) {
application =>
if (application.overseasParentName.isDefined && application.overseasParentName.get.previousName.isDefined) Valid
else Invalid("Please answer this question", keys.overseasParentName.parentPreviousName)
}
lazy val parentFirstNameNotEmpty = Constraint[Option[Name]](keys.overseasParentName.parentName.firstName.key) {
case Some(Name("", _, _)) => Invalid(
"Please enter their first name",
keys.overseasParentName.parentName.firstName)
case _ => Valid
}
lazy val parentLastNameNotEmpty = Constraint[Option[Name]](keys.overseasParentName.parentName.lastName.key) {
case Some(Name(_, _, "")) => Invalid(
"Please enter their last name",
keys.overseasParentName.parentName.lastName)
case _ => Valid
}
lazy val parentFirstNameNotTooLong = fieldNotTooLong[Option[Name]](
fieldKey = keys.overseasParentName.parentName.firstName,
errorMessage = firstNameMaxLengthError,
maxLength = maxFirstLastNameLength) {
_.map { _.firstName } getOrElse ""
}
lazy val parentMiddleNamesNotTooLong = fieldNotTooLong[Option[Name]](
fieldKey = keys.overseasParentName.parentName.middleNames,
errorMessage = middleNameMaxLengthError,
maxLength = maxMiddleNameLength) {
_.map { _.middleNames.getOrElse("") } getOrElse("")
}
lazy val parentLastNameNotTooLong = fieldNotTooLong[Option[Name]](
fieldKey = keys.overseasParentName.parentName.lastName,
errorMessage = lastNameMaxLengthError,
maxLength = maxFirstLastNameLength) {
_.map { _.lastName } getOrElse("")
}
lazy val parentPreviousFirstNameNotEmpty = Constraint[Option[PreviousName]](
keys.overseasParentName.parentPreviousName.previousName.firstName.key) {
case Some(PreviousName(true, "true", None, Some(Name("", _, _)), _)) => Invalid(
"Please enter their previous first name",
keys.overseasParentName.parentPreviousName.previousName.firstName)
case _ => Valid
}
lazy val parentPreviousLastNameNotEmpty = Constraint[Option[PreviousName]](
keys.overseasParentName.parentPreviousName.previousName.lastName.key) {
case Some(PreviousName(true, "true", None, Some(Name(_, _, "")), _)) => Invalid(
"Please enter their previous last name",
keys.overseasParentName.parentPreviousName.previousName.lastName)
case _ => Valid
}
lazy val parentPrevFirstNameNotTooLong = fieldNotTooLong[Option[PreviousName]](
fieldKey = keys.overseasParentName.parentPreviousName.previousName.firstName,
errorMessage = previousFirstNameMaxLengthError,
maxLength = maxFirstLastNameLength) {
_.flatMap { _.previousName }
.map { _.firstName }
.getOrElse("")
}
lazy val parentPrevMiddleNamesNotTooLong = fieldNotTooLong[Option[PreviousName]](
fieldKey = keys.overseasParentName.parentPreviousName.previousName.middleNames,
errorMessage = previousMiddleNameMaxLengthError,
maxLength = maxMiddleNameLength) {
_.flatMap{ _.previousName }
.map{ _.middleNames.getOrElse("") }
.getOrElse("")
}
lazy val parentPrevLastNameNotTooLong = fieldNotTooLong[Option[PreviousName]](
fieldKey = keys.overseasParentName.parentPreviousName.previousName.lastName,
errorMessage = previousLastNameMaxLengthError,
maxLength = maxFirstLastNameLength) {
_.flatMap { _.previousName }
.map { _.lastName }
.getOrElse("")
}
}
|
alphagov/ier-frontend
|
app/uk/gov/gds/ier/validation/constraints/ParentNameConstraints.scala
|
Scala
|
mit
| 5,075 |
package com.outr.arango.api.model
import io.circe.Json
case class PutAPISimpleReplaceByExample(collection: String,
example: Option[String] = None,
newValue: Option[String] = None,
options: Option[PutAPISimpleReplaceByExampleOptions] = None)
|
outr/arangodb-scala
|
api/src/main/scala/com/outr/arango/api/model/PutAPISimpleReplaceByExample.scala
|
Scala
|
mit
| 363 |
object Test extends Application {
class A
class B extends A
def foo(x: A, y: B) = print(1)
def foo(x: A, y: A) = print(2)
/* line: 4 */foo(new B, new B)
}
|
LPTK/intellij-scala
|
testdata/resolve2/overloading/MoreSpecificRight.scala
|
Scala
|
apache-2.0
| 165 |
package lila.monitor
package actorApi
import lila.socket.SocketMember
case class Member(channel: JsChannel) extends SocketMember {
val userId = none
val troll = false
}
case object GetNbMoves
case object GetMoveLatency
case class Join(uid: String)
case class MonitorData(data: List[String])
|
bjhaid/lila
|
modules/monitor/src/main/actorApi.scala
|
Scala
|
mit
| 299 |
package org.scalatra.ssgi.servlet
import javax.servlet.ServletOutputStream
import java.io.ByteArrayOutputStream
class ByteArrayServletOutputStream(bufSize: Int) extends ServletOutputStream {
def this() = this(32)
val internal = new ByteArrayOutputStream
def toByteArray = internal.toByteArray
override def write( i: Int) { internal.write(i) }
override def write( bytes: Array[Byte]) { internal.write(bytes) }
override def write( bytes: Array[Byte], start: Int, end: Int) { internal.write(bytes, start, end) }
override def flush { }
override def close { }
private[ssgi] def reallyClose { internal.close }
def size = internal.size
def reset(): Unit = internal.reset()
}
|
scalatra/ssgi
|
servlet/src/main/scala/org/scalatra/ssgi/servlet/ByteArrayServletOutputStream.scala
|
Scala
|
bsd-2-clause
| 705 |
package com.github.libsml.aggregation.optimization
/**
* Created by huangyu on 15/9/5.
*/
object OptimizationMode extends Enumeration {
type Mode = Value
val SPARK, LOCAL = Value
}
|
libsml/libsml
|
aggregation/src/main/scala/com/github/libsml/aggregation/optimization/OptimizationMode.scala
|
Scala
|
apache-2.0
| 189 |
package konstructs.protocol
import scala.collection.JavaConverters._
import akka.actor.{Actor, Props, ActorRef, Stash, PoisonPill}
import akka.io.Tcp
import akka.util.{ByteString, ByteStringBuilder}
import konstructs.{PlayerActor, UniverseActor, DbActor}
import konstructs.api._
import konstructs.api.messages.Said
import konstructs.shard.ChunkPosition
class ClientActor(universe: ActorRef, factory: BlockFactory, textures: Array[Byte]) extends Actor with Stash {
import DbActor.BlockList
import UniverseActor.CreatePlayer
import ClientActor._
import PlayerActor._
implicit val bo = java.nio.ByteOrder.BIG_ENDIAN
private var readBuffer = ByteString.empty
private val writeBuffer = new ByteStringBuilder()
private var player: PlayerInfo = null
private var canWrite = true
private def readData[T](conv: String => T, data: String): List[T] = {
val comma = data.indexOf(',')
if (comma > 0) {
val i = conv(data.take(comma))
i :: readData(conv, data.drop(comma + 1))
} else {
val i = conv(data)
i :: Nil
}
}
private def handle(data: ByteString) = {
val command = data.decodeString("ascii")
if (command.startsWith("P,")) {
val floats = readData(_.toFloat, command.drop(2))
player.actor ! Position(floats(0), floats(1), floats(2), floats(3), floats(4))
} else if (command.startsWith("C,")) {
val ints = readData(_.toInt, command.drop(2))
player.actor ! DbActor.SendBlocks(ChunkPosition(ints(0), ints(1), ints(2)))
} else if (command.startsWith("M,")) {
val ints = readData(_.toInt, command.drop(2))
if (ints(0) != 0) {
player.actor ! Action(new konstructs.api.Position(ints(1), ints(2), ints(3)),
Orientation.get(Direction.get(ints(6)), Rotation.get(ints(7))),
ints(4),
ints(5))
} else {
player.actor ! Action(null, null, ints(4), ints(5))
}
} else if (command.startsWith("T,")) {
val message = command.substring(2)
player.actor ! Say(message)
} else if (command.startsWith("I")) {
player.actor ! CloseInventory
} else if (command.startsWith("R,")) {
val ints = readData(_.toInt, command.drop(2))
player.actor ! SelectItem(ints(0), ints(1))
} else if (command.startsWith("D,")) {
val ints = readData(_.toInt, command.drop(2))
player.actor ! SetViewDistance(ints(0))
}
true
}
private def read(data: ByteString)(handle: ByteString => Boolean) {
readBuffer = readBuffer ++ data
try {
while (!readBuffer.isEmpty) {
val size = readBuffer.iterator.getInt
val length = readBuffer.length - 4
if (size <= length) {
readBuffer = readBuffer.drop(4)
val result = handle(readBuffer.take(size))
readBuffer = readBuffer.drop(size)
if (!result)
return
} else {
return
}
}
} catch {
case _: java.util.NoSuchElementException =>
/* Packet was not complete yet */
}
}
def handleAck(pipe: ActorRef) {
canWrite = true
send(sender)
}
def receive = {
case Tcp.Received(data) =>
read(data) { data =>
val command = data.decodeString("ascii")
if (command.startsWith(s"V,$Version,")) {
val strings = readData(s => s, command.drop(2))
val auth = Authenticate(strings(0) toInt, strings(1), strings(2))
println(s"Player ${auth.name} connected with protocol version ${auth.version}")
universe ! CreatePlayer(auth.name, auth.token)
context.become(waitForPlayer(sender))
} else {
sendError(sender, s"This server only supports protocol version $Version")
}
false
}
case _: Tcp.ConnectionClosed =>
context.stop(self)
case Ack =>
handleAck(sender)
}
def waitForPlayer(pipe: ActorRef): Receive = {
case p: PlayerInfo =>
player = p
send(pipe, s"U,${p.pid},${p.pos.x},${p.pos.y},${p.pos.z},${p.pos.rx},${p.pos.ry}")
sendPlayerNick(pipe, p.pid, p.nick)
sendBlockTypes(pipe)
sendTextures(pipe)
unstashAll()
context.become(ready(pipe))
// Process any data left over from processing of version packet
read(ByteString.empty)(handle)
case Tcp.Received(data) =>
stash()
case Ack =>
handleAck(sender)
}
def ready(pipe: ActorRef): Receive = {
case Tcp.Received(data) =>
read(data)(handle)
case BlockList(chunk, data) =>
sendBlocks(pipe, chunk, data.data)
case ChunkUpdate(p, q, k) =>
sendChunkUpdate(pipe, p, q, k)
case b: SendBlock =>
sendBlock(pipe, b)
case BeltUpdate(items) =>
sendBelt(pipe, items)
case InventoryUpdate(view) =>
sendInventory(pipe, view.getItems.asScala.toMap)
case p: PlayerMovement =>
sendPlayerMovement(pipe, p)
case PlayerNick(pid, nick) =>
sendPlayerNick(pipe, pid, nick)
case PlayerLogout(pid) =>
sendPlayerLogout(pipe, pid)
case s: Said =>
sendSaid(pipe, s.getText)
case HeldStack(stack) =>
if (stack != null)
sendHeldStack(pipe, stack.size, factory.getW(stack), stack.getHead.getHealth.getHealth)
else
sendHeldStack(pipe, 0, -1, 0)
case Time(t) =>
sendTime(pipe, t)
case _: Tcp.ConnectionClosed =>
context.stop(self)
case Ack =>
handleAck(sender)
}
override def postStop {
if (player != null)
player.actor ! PoisonPill
}
def sendError(pipe: ActorRef, error: String) {
send(pipe, s"E,$error")
context.stop(self)
}
def sendPlayerNick(pipe: ActorRef, pid: Int, nick: String) {
send(pipe, s"N,$pid,$nick")
}
def sendTime(pipe: ActorRef, t: Long) {
send(pipe, s"T,$t")
}
def sendChunkUpdate(pipe: ActorRef, p: Int, q: Int, k: Int) {
send(pipe, s"c,$p,$q,$k")
}
def sendSaid(pipe: ActorRef, msg: String) {
send(pipe, s"t,$msg")
}
def sendPlayerLogout(pipe: ActorRef, pid: Int) {
send(pipe, s"D,$pid")
}
def sendPlayerMovement(pipe: ActorRef, p: PlayerMovement) {
send(pipe, s"P,${p.pid},${p.pos.x},${p.pos.y},${p.pos.z},${p.pos.rx},${p.pos.ry}")
}
def sendBelt(pipe: ActorRef, items: Array[Stack]) {
for ((stack, i) <- items.zipWithIndex) {
if (stack != null) {
send(pipe, s"G,${i},${stack.size},${factory.getW(stack)},${stack.getHead.getHealth.getHealth}")
} else {
send(pipe, s"G,${i},0,0,0")
}
}
}
def sendHeldStack(pipe: ActorRef, size: Int, w: Int, health: Int) {
send(pipe, s"i,$size,$w,$health")
}
def sendInventory(pipe: ActorRef, items: Map[Integer, Stack]) {
for ((p, stack) <- items) {
if (stack != null) {
send(pipe, s"I,${p},${stack.size},${factory.getW(stack)},${stack.getHead.getHealth.getHealth}")
} else {
send(pipe, s"I,${p},0,0,0")
}
}
}
def sendBlock(pipe: ActorRef, b: SendBlock) {
send(pipe, s"B,${b.p},${b.q},${b.x},${b.y},${b.z},${b.w}")
}
def sendBlocks(pipe: ActorRef, chunk: ChunkPosition, blocks: ByteString) {
val data =
ByteString.createBuilder.putByte(C).putInt(chunk.p).putInt(chunk.q).putInt(chunk.k).append(blocks).result
writeBuffer.putInt(data.length).append(data)
send(pipe)
}
def sendTextures(pipe: ActorRef) {
val data = ByteString.createBuilder.putByte(M).putBytes(textures).result
writeBuffer.putInt(data.length).append(data)
send(pipe)
}
def sendBlockTypes(pipe: ActorRef) {
val types = factory.getBlockTypes().asScala.map {
case (id, t) => factory.getW(id) -> t
}
for ((w, t) <- types) {
sendBlockType(pipe, w, t)
}
}
def booleanToInt(b: Boolean): Int = if (b) 1 else 0
def sendBlockType(pipe: ActorRef, w: Int, t: BlockType) {
val isObstacle = booleanToInt(t.isObstacle)
val isTransparent = booleanToInt(t.isTransparent)
val isOrientable = booleanToInt(t.isOrientable)
val faces = t.getFaces
send(pipe,
s"W,$w,${t.getBlockShape.getShape},${t.getBlockState.getState},$isObstacle,$isTransparent,${faces(0)},${faces(
1)},${faces(2)},${faces(3)},${faces(4)},${faces(5)},${isOrientable}")
}
def send(pipe: ActorRef, msg: String) {
val data = ByteString(msg, "ascii")
writeBuffer.putInt(data.length).append(data)
send(pipe)
}
def send(pipe: ActorRef) {
if (!writeBuffer.isEmpty) {
if (canWrite) {
pipe ! Tcp.Write(writeBuffer.result(), Ack)
writeBuffer.clear()
canWrite = false
}
}
}
}
object ClientActor {
val C = 'C'.toByte
val B = 'B'.toByte
val V = 'V'.toByte
val P = 'P'.toByte
val M = 'M'.toByte
val Version = 10
case object Ack extends Tcp.Event
def props(universe: ActorRef, factory: BlockFactory, textures: Array[Byte]) =
Props(classOf[ClientActor], universe, factory, textures)
}
|
konstructs/server
|
src/main/scala/konstructs/protocol/client.scala
|
Scala
|
mit
| 8,961 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.