code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
// Copyright 2015,2016,2017,2018,2019,2020 Commonwealth Bank of Australia // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package commbank.grimlock.test import commbank.grimlock.framework._ import commbank.grimlock.framework.content._ import commbank.grimlock.framework.encoding._ import commbank.grimlock.framework.environment.implicits._ import commbank.grimlock.framework.environment.tuner._ import commbank.grimlock.framework.metadata._ import commbank.grimlock.framework.position._ trait TestMatrixSet extends TestMatrix { val dataA = List( Cell(Position("foo"), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("quxx"), Content(ContinuousSchema[Double](), 2.0)) ) val dataB = List( Cell(Position("foo", 2), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("quxx", 5), Content(ContinuousSchema[Double](), 2.0)) ) val dataC = List( Cell(Position("foo", 2, "xyz"), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("quxx", 5, "abc"), Content(ContinuousSchema[Double](), 2.0)) ) val result1 = List( Cell(Position("bar"), Content(OrdinalSchema[String](), "6.28")), Cell(Position("baz"), Content(OrdinalSchema[String](), "9.42")), Cell(Position("foo"), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("qux"), Content(OrdinalSchema[String](), "12.56")) ) val result2 = List( Cell(Position("bar"), Content(OrdinalSchema[String](), "6.28")), Cell(Position("baz"), Content(OrdinalSchema[String](), "9.42")), Cell(Position("foo"), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("qux"), Content(OrdinalSchema[String](), "12.56")), Cell(Position("quxx"), Content(ContinuousSchema[Double](), 1.0)) ) val result3 = List( Cell(Position("bar"), Content(OrdinalSchema[String](), "6.28")), Cell(Position("baz"), Content(OrdinalSchema[String](), "9.42")), Cell(Position("foo"), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("qux"), Content(OrdinalSchema[String](), "12.56")), Cell(Position("quxx"), Content(ContinuousSchema[Double](), 2.0)) ) val result4 = List( Cell(Position("bar", 1), Content(OrdinalSchema[String](), "6.28")), Cell(Position("bar", 2), Content(ContinuousSchema[Double](), 12.56)), Cell(Position("bar", 3), Content(OrdinalSchema[Long](), 19L)), Cell(Position("baz", 1), Content(OrdinalSchema[String](), "9.42")), Cell(Position("baz", 2), Content(DiscreteSchema[Long](), 19L)), Cell(Position("foo", 1), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("foo", 3), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ), Cell(Position("qux", 1), Content(OrdinalSchema[String](), "12.56")) ) val result5 = List( Cell(Position("bar", 1), Content(OrdinalSchema[String](), "6.28")), Cell(Position("bar", 2), Content(ContinuousSchema[Double](), 12.56)), Cell(Position("bar", 3), Content(OrdinalSchema[Long](), 19L)), Cell(Position("baz", 1), Content(OrdinalSchema[String](), "9.42")), Cell(Position("baz", 2), Content(DiscreteSchema[Long](), 19L)), Cell(Position("foo", 1), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("foo", 3), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ), Cell(Position("qux", 1), Content(OrdinalSchema[String](), "12.56")), Cell(Position("quxx", 5), Content(ContinuousSchema[Double](), 1.0)) ) val result6 = List( Cell(Position("bar", 1), Content(OrdinalSchema[String](), "6.28")), Cell(Position("bar", 2), Content(ContinuousSchema[Double](), 12.56)), Cell(Position("bar", 3), Content(OrdinalSchema[Long](), 19L)), Cell(Position("baz", 1), Content(OrdinalSchema[String](), "9.42")), Cell(Position("baz", 2), Content(DiscreteSchema[Long](), 19L)), Cell(Position("foo", 1), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("foo", 3), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ), Cell(Position("qux", 1), Content(OrdinalSchema[String](), "12.56")), Cell(Position("quxx", 5), Content(ContinuousSchema[Double](), 2.0)) ) val result7 = List( Cell(Position("bar", 1, "xyz"), Content(OrdinalSchema[String](), "6.28")), Cell(Position("bar", 2, "xyz"), Content(ContinuousSchema[Double](), 12.56)), Cell(Position("bar", 3, "xyz"), Content(OrdinalSchema[Long](), 19L)), Cell(Position("baz", 1, "xyz"), Content(OrdinalSchema[String](), "9.42")), Cell(Position("baz", 2, "xyz"), Content(DiscreteSchema[Long](), 19L)), Cell(Position("foo", 1, "xyz"), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2, "xyz"), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("foo", 3, "xyz"), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4, "xyz"), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ), Cell(Position("qux", 1, "xyz"), Content(OrdinalSchema[String](), "12.56")) ) val result8 = List( Cell(Position("bar", 1, "xyz"), Content(OrdinalSchema[String](), "6.28")), Cell(Position("bar", 2, "xyz"), Content(ContinuousSchema[Double](), 12.56)), Cell(Position("bar", 3, "xyz"), Content(OrdinalSchema[Long](), 19L)), Cell(Position("baz", 1, "xyz"), Content(OrdinalSchema[String](), "9.42")), Cell(Position("baz", 2, "xyz"), Content(DiscreteSchema[Long](), 19L)), Cell(Position("foo", 1, "xyz"), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2, "xyz"), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("foo", 3, "xyz"), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4, "xyz"), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ), Cell(Position("qux", 1, "xyz"), Content(OrdinalSchema[String](), "12.56")), Cell(Position("quxx", 5, "abc"), Content(ContinuousSchema[Double](), 1.0)) ) val result9 = List( Cell(Position("bar", 1, "xyz"), Content(OrdinalSchema[String](), "6.28")), Cell(Position("bar", 2, "xyz"), Content(ContinuousSchema[Double](), 12.56)), Cell(Position("bar", 3, "xyz"), Content(OrdinalSchema[Long](), 19L)), Cell(Position("baz", 1, "xyz"), Content(OrdinalSchema[String](), "9.42")), Cell(Position("baz", 2, "xyz"), Content(DiscreteSchema[Long](), 19L)), Cell(Position("foo", 1, "xyz"), Content(OrdinalSchema[String](), "3.14")), Cell(Position("foo", 2, "xyz"), Content(ContinuousSchema[Double](), 1.0)), Cell(Position("foo", 3, "xyz"), Content(NominalSchema[String](), "9.42")), Cell( Position("foo", 4, "xyz"), Content( DateSchema[java.util.Date](), DateValue((new java.text.SimpleDateFormat("yyyy-MM-dd hh:mm:ss")).parse("2000-01-01 12:56:00")) ) ), Cell(Position("qux", 1, "xyz"), Content(OrdinalSchema[String](), "12.56")), Cell(Position("quxx", 5, "abc"), Content(ContinuousSchema[Double](), 2.0)) ) } class TestScalaMatrixSet extends TestMatrixSet with TestScala { import commbank.grimlock.scala.environment.implicits._ "A Matrix.set" should "return its updated data in 1D" in { toU(data1) .set(Cell(Position("foo"), Content(ContinuousSchema[Double](), 1.0)), Default()) .toList.sortBy(_.position) shouldBe result1 } it should "return its updated and added data in 1D" in { toU(data1) .set(List("foo", "quxx").map(pos => Cell(Position(pos), Content(ContinuousSchema[Double](), 1.0))), Default()) .toList.sortBy(_.position) shouldBe result2 } it should "return its matrix updated data in 1D" in { toU(data1) .set(toU(dataA), Default()) .toList.sortBy(_.position) shouldBe result3 } it should "return its updated data in 2D" in { toU(data2) .set(Cell(Position("foo", 2), Content(ContinuousSchema[Double](), 1.0)), Default()) .toList.sortBy(_.position) shouldBe result4 } it should "return its updated and added data in 2D" in { toU(data2) .set( List(Position("foo", 2), Position("quxx", 5)).map(pos => Cell(pos, Content(ContinuousSchema[Double](), 1.0))), Default() ) .toList.sortBy(_.position) shouldBe result5 } it should "return its matrix updated data in 2D" in { toU(data2) .set(toU(dataB), Default()) .toList.sortBy(_.position) shouldBe result6 } it should "return its updated data in 3D" in { toU(data3) .set(Cell(Position("foo", 2, "xyz"), Content(ContinuousSchema[Double](), 1.0)), Default()) .toList.sortBy(_.position) shouldBe result7 } it should "return its updated and added data in 3D" in { toU(data3) .set( List(Position("foo", 2, "xyz"), Position("quxx", 5, "abc")) .map(pos => Cell(pos, Content(ContinuousSchema[Double](), 1.0))), Default() ) .toList.sortBy(_.position) shouldBe result8 } it should "return its matrix updated data in 3D" in { toU(data3) .set(toU(dataC), Default()) .toList.sortBy(_.position) shouldBe result9 } } class TestScaldingMatrixSet extends TestMatrixSet with TestScalding { import commbank.grimlock.scalding.environment.implicits._ "A Matrix.set" should "return its updated data in 1D" in { toU(data1) .set(Cell(Position("foo"), Content(ContinuousSchema[Double](), 1.0)), Default()) .toList.sortBy(_.position) shouldBe result1 } it should "return its updated and added data in 1D" in { toU(data1) .set(List("foo", "quxx").map(pos => Cell(Position(pos), Content(ContinuousSchema[Double](), 1.0))), Default(12)) .toList.sortBy(_.position) shouldBe result2 } it should "return its matrix updated data in 1D" in { toU(data1) .set(toU(dataA), Default()) .toList.sortBy(_.position) shouldBe result3 } it should "return its updated data in 2D" in { toU(data2) .set(Cell(Position("foo", 2), Content(ContinuousSchema[Double](), 1.0)), Default(12)) .toList.sortBy(_.position) shouldBe result4 } it should "return its updated and added data in 2D" in { toU(data2) .set( List(Position("foo", 2), Position("quxx", 5)).map(pos => Cell(pos, Content(ContinuousSchema[Double](), 1.0))), Default() ) .toList.sortBy(_.position) shouldBe result5 } it should "return its matrix updated data in 2D" in { toU(data2) .set(toU(dataB), Default(12)) .toList.sortBy(_.position) shouldBe result6 } it should "return its updated data in 3D" in { toU(data3) .set(Cell(Position("foo", 2, "xyz"), Content(ContinuousSchema[Double](), 1.0)), Default()) .toList.sortBy(_.position) shouldBe result7 } it should "return its updated and added data in 3D" in { toU(data3) .set( List(Position("foo", 2, "xyz"), Position("quxx", 5, "abc")) .map(pos => Cell(pos, Content(ContinuousSchema[Double](), 1.0))), Default(12) ) .toList.sortBy(_.position) shouldBe result8 } it should "return its matrix updated data in 3D" in { toU(data3) .set(toU(dataC), Default()) .toList.sortBy(_.position) shouldBe result9 } } class TestSparkMatrixSet extends TestMatrixSet with TestSpark { import commbank.grimlock.spark.environment.implicits._ "A Matrix.set" should "return its updated data in 1D" in { toU(data1) .set(Cell(Position("foo"), Content(ContinuousSchema[Double](), 1.0)), Default()) .toList.sortBy(_.position) shouldBe result1 } it should "return its updated and added data in 1D" in { toU(data1) .set(List("foo", "quxx").map(pos => Cell(Position(pos), Content(ContinuousSchema[Double](), 1.0))), Default(12)) .toList.sortBy(_.position) shouldBe result2 } it should "return its matrix updated data in 1D" in { toU(data1) .set(toU(dataA), Default()) .toList.sortBy(_.position) shouldBe result3 } it should "return its updated data in 2D" in { toU(data2) .set(Cell(Position("foo", 2), Content(ContinuousSchema[Double](), 1.0)), Default(12)) .toList.sortBy(_.position) shouldBe result4 } it should "return its updated and added data in 2D" in { toU(data2) .set( List(Position("foo", 2), Position("quxx", 5)).map(pos => Cell(pos, Content(ContinuousSchema[Double](), 1.0))), Default() ) .toList.sortBy(_.position) shouldBe result5 } it should "return its matrix updated data in 2D" in { toU(data2) .set(toU(dataB), Default(12)) .toList.sortBy(_.position) shouldBe result6 } it should "return its updated data in 3D" in { toU(data3) .set(Cell(Position("foo", 2, "xyz"), Content(ContinuousSchema[Double](), 1.0)), Default()) .toList.sortBy(_.position) shouldBe result7 } it should "return its updated and added data in 3D" in { toU(data3) .set( List(Position("foo", 2, "xyz"), Position("quxx", 5, "abc")) .map(pos => Cell(pos, Content(ContinuousSchema[Double](), 1.0))), Default(12) ) .toList.sortBy(_.position) shouldBe result8 } it should "return its matrix updated data in 3D" in { toU(data3) .set(toU(dataC), Default()) .toList.sortBy(_.position) shouldBe result9 } }
CommBank/grimlock
grimlock-core/src/test/scala/commbank/grimlock/matrix/TestMatrixSet.scala
Scala
apache-2.0
14,670
package temportalist.compression.main.client.model import java.util import com.google.common.collect.Lists import net.minecraft.block.state.IBlockState import net.minecraft.client.Minecraft import net.minecraft.client.renderer.block.model.SimpleBakedModel.Builder import net.minecraft.client.renderer.block.model._ import net.minecraft.client.renderer.texture.TextureAtlasSprite import net.minecraft.entity.EntityLivingBase import net.minecraft.item.{ItemBlock, ItemStack} import net.minecraft.util.EnumFacing import net.minecraft.util.math.BlockPos import net.minecraft.world.World import temportalist.compression.main.common.Compression import temportalist.compression.main.common.init.Compressed import temportalist.compression.main.common.lib.EnumTier import temportalist.origin.api.common.helper.Names /** * * Created by TheTemportalist on 4/14/2016. * * @author TheTemportalist */ class ItemListCompressed(private val overlays: Array[TextureAtlasSprite]) extends ItemOverrideList(Lists.newArrayList()) { def getMissingModel: IBakedModel = Minecraft.getMinecraft.getBlockRendererDispatcher.getBlockModelShapes.getModelManager.getMissingModel override def handleItemState(originalModel: IBakedModel, stack: ItemStack, world: World, entity: EntityLivingBase): IBakedModel = { if (!stack.hasTagCompound) return originalModel val sampleStack = Compressed.getSampleStack(stack) val isBlock = stack.getItem.isInstanceOf[ItemBlock] val sampleModel = if (isBlock) Minecraft.getMinecraft.getBlockRendererDispatcher.getBlockModelShapes. getModelForState(Compressed.getSampleState(stack)) else Minecraft.getMinecraft.getRenderItem.getItemModelMesher.getItemModel(sampleStack) if (sampleModel == null) return originalModel val size = Compressed.getSize(stack) val i = EnumTier.getTierForSize(size).ordinal() val overlay = overlays(i) new IBakedModel { override def getParticleTexture: TextureAtlasSprite = sampleModel.getParticleTexture override def isBuiltInRenderer: Boolean = sampleModel.isBuiltInRenderer override def getItemCameraTransforms: ItemCameraTransforms = sampleModel.getItemCameraTransforms override def isAmbientOcclusion: Boolean = true override def isGui3d: Boolean = isBlock override def getOverrides: ItemOverrideList = sampleModel.getOverrides override def getQuads(state: IBlockState, side: EnumFacing, rand: Long): util.List[BakedQuad] = { val quadList = new util.ArrayList[BakedQuad]() try { var quads = sampleModel.getQuads(state, side, rand) if (quads != null) quadList.addAll(quads) if (overlay != null) { val overlayModel = new Builder( state, sampleModel, overlay, BlockPos.ORIGIN).makeBakedModel() quads = overlayModel.getQuads(state, side, rand) if (quads != null) quadList.addAll(quads) } } catch { case e: Exception => Compression.log("Error merging render models. " + "Please report this to https://github.com/TheTemportalist/Compression/issues. " + "As a temporary fix, you can consider adding \\'" + Names.getName(sampleStack, hasID = true, hasMeta = false) + "\\' or \\'" + Names.getName(sampleStack, hasID = true, hasMeta = true) + "\\' to the blacklist configuration option.") e.printStackTrace() } quadList } } } }
TheTemportalist/Compression
src/main/scala/temportalist/compression/main/client/model/ItemListCompressed.scala
Scala
apache-2.0
3,396
package scala.collection.mutable import org.junit.runner.RunWith import org.junit.runners.JUnit4 import org.junit.Test import scala.collection.mutable @RunWith(classOf[JUnit4]) /* Test for scala/bug#8014 and ++ in general */ class VectorTest { val noVec = Vector.empty[Int] val smallVec = Vector.range(0,3) val bigVec = Vector.range(0,64) val smsm = Vector.tabulate(2 * smallVec.length)(i => (i % smallVec.length)) val smbig = Vector.tabulate(smallVec.length + bigVec.length)(i => if (i < smallVec.length) i else i - smallVec.length ) val bigsm = Vector.tabulate(smallVec.length + bigVec.length)(i => if (i < bigVec.length) i else i - bigVec.length ) val bigbig = Vector.tabulate(2 * bigVec.length)(i => (i % bigVec.length)) val vecs = List(noVec, smallVec, bigVec) val ans = List( vecs, List(smallVec, smsm, smbig), List(bigVec, bigsm, bigbig) ) @Test def vectorCat(): Unit = { val cats = vecs.map(a => vecs.map(a ++ _)) assert( cats == ans ) } @Test def iteratorCat(): Unit = { def its = vecs.map(_.toList.toIterator) val cats = vecs.map(a => its.map(x => a ++ x.toList)) assert( cats == ans ) } @Test def arrayCat(): Unit = { val ars = vecs.map(_.toArray) val cats = vecs.map(a => ars.map(a ++ _)) assert( cats == ans ) } }
martijnhoekstra/scala
test/junit/scala/collection/mutable/VectorTest.scala
Scala
apache-2.0
1,331
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions.aggregate import org.apache.spark.sql.catalyst.analysis.TypeCheckResult import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionDescription, ImplicitCastInputTypes, UnevaluableAggregate} import org.apache.spark.sql.types.{AbstractDataType, BooleanType, DataType, LongType} @ExpressionDescription( usage = """ _FUNC_(expr) - Returns the number of `TRUE` values for the expression. """, examples = """ Examples: > SELECT _FUNC_(col % 2 = 0) FROM VALUES (NULL), (0), (1), (2), (3) AS tab(col); 2 > SELECT _FUNC_(col IS NULL) FROM VALUES (NULL), (0), (1), (2), (3) AS tab(col); 1 """, group = "agg_funcs", since = "3.0.0") case class CountIf(predicate: Expression) extends UnevaluableAggregate with ImplicitCastInputTypes { override def prettyName: String = "count_if" override def children: Seq[Expression] = Seq(predicate) override def nullable: Boolean = false override def dataType: DataType = LongType override def inputTypes: Seq[AbstractDataType] = Seq(BooleanType) override def checkInputDataTypes(): TypeCheckResult = predicate.dataType match { case BooleanType => TypeCheckResult.TypeCheckSuccess case _ => TypeCheckResult.TypeCheckFailure( s"function $prettyName requires boolean type, not ${predicate.dataType.catalogString}" ) } }
ConeyLiu/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountIf.scala
Scala
apache-2.0
2,209
package ir.bayan.academy.web /** * @author S.Hosein Ayat */ case class HttpRes(statusCode: Int, message: String, content: String) { } object HttpRes { implicit def requestJavaToScala(req: HttpResult): HttpRes = { HttpRes(req.statusCode, req.message, req.content) } implicit def requltScalaToJava(req: HttpRes): HttpResult = { new HttpResult(req.statusCode, req.message, req.content) } }
h-ayat/simple-web
src/main/scala/ir/bayan/academy/web/HttpRes.scala
Scala
gpl-3.0
413
package org.broadinstitute.sting.queue.util import java.lang.IllegalArgumentException object ShellUtils { /** * Escapes the String it's passed so that it will be interpreted literally when * parsed by sh/bash. Can correctly escape all characters except \\0, \\r, and \\n * * Replaces all instances of ' with '\\'', and then surrounds the resulting String * with single quotes. * * Examples: * ab -> 'ab' * a'b -> 'a'\\''b' * '' -> ''\\'''\\''' * * Since \\' is not supported inside single quotes in the shell (ie., '\\'' does not work), * whenever we encounter a single quote we need to terminate the existing single-quoted * string, place the \\' outside of single quotes, and then start a new single-quoted * string. As long as we don't insert spaces between the separate strings, the shell will * concatenate them together into a single argument value for us. * * @param str the String to escape * @return the same String quoted so that it will be interpreted literally when * parsed by sh/bash */ def escapeShellArgument ( str : String ) : String = { if ( str == null ) { throw new IllegalArgumentException("escapeShellArgument() was passed a null String") } "'" + str.replaceAll("'", "'\\\\\\\\''") + "'" } }
iontorrent/Torrent-Variant-Caller-stable
public/scala/src/org/broadinstitute/sting/queue/util/ShellUtils.scala
Scala
mit
1,306
package models.blog import java.sql.Timestamp import com.sksamuel.scrimage.{ Image => ScrImage } /** * Created by stanikol on 1/27/17. */ //object Data { // Article case class Article( id: Option[Long], sort_order: String, keywords: String, title: String, text: String, description: String, short_text: String ) object Article { def empty: Article = Article(None, "", "", "", "", "", "") } case class Comment(id: Option[Long], userID: String, articleID: Long, text: String, added: Timestamp) case class CommentInfo(id: Option[Long], userID: String, userEmail: Option[String], articleID: Long, articleTitle: String, text: String, added: Timestamp, fullName: String) case class CommentsShow(order: Option[String], articleID: Option[String]) //}
stanikol/walnuts
server/app/models/blog/Data.scala
Scala
apache-2.0
768
//: ---------------------------------------------------------------------------- //: Copyright (C) 2015 Verizon. All Rights Reserved. //: //: Licensed under the Apache License, Version 2.0 (the "License"); //: you may not use this file except in compliance with the License. //: You may obtain a copy of the License at //: //: http://www.apache.org/licenses/LICENSE-2.0 //: //: Unless required by applicable law or agreed to in writing, software //: distributed under the License is distributed on an "AS IS" BASIS, //: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //: See the License for the specific language governing permissions and //: limitations under the License. //: //: ---------------------------------------------------------------------------- package funnel package elastic import java.io.IOException import java.util.concurrent.{ThreadFactory, Executors, ExecutorService} import org.scalatest.{FlatSpec,Matchers} import org.scalatest.concurrent._ import org.scalatest.time._ import scala.collection.mutable import scala.concurrent.duration._ import scala.language.postfixOps import scalaz.Kleisli import scalaz.concurrent.Task import scalaz.stream.Process class FlattenedSpec extends FlatSpec with Matchers with Eventually { val F = ElasticFlattened(Monitoring.default, new Instruments(Monitoring.default)) def point = Datapoint[Any](Key[Double](s"now/${java.util.UUID.randomUUID.toString}", Units.Count, "some description", Map(AttributeKeys.source -> "http://10.0.10.10/stream/now", AttributeKeys.kind -> "gauge") ), 3.14) def point2 = Datapoint[Any](Key[Stats](s"sliding/${java.util.UUID.randomUUID.toString}", Units.Count, "some description", Map(AttributeKeys.source -> "http://10.0.10.10/stream/now", AttributeKeys.kind -> "gauge") ), Stats(3.14)) def point3 = Datapoint[Any](Key[Stats](s"sliding/file_system.%2F.use_percent", Units.Bytes(Units.Base.Kilo), "some description", Map(AttributeKeys.source -> "http://10.0.10.10/stream/now", AttributeKeys.kind -> "gauge") ), Stats(3.14)) //use batch size one to be able to predict how many http requests we will see val sharedCfg = new ElasticCfg("http://some-host", "idx", "a", "yyyy.MM.ww", "template", Some("version.sbt"), http = null, List("previous"), subscriptionTimeout = 300.millis, bufferSize = 4096, batchSize = 1) case class MonitoringEnv(M: Monitoring, I: Instruments, EF: ElasticFlattened, H: InMemoryHttpLayer) { private def taskRun[A](t: Duration)(op: Task[A]): java.util.concurrent.atomic.AtomicBoolean = { val b = new java.util.concurrent.atomic.AtomicBoolean(false) op.runAsyncInterruptibly(t => (), b) b } private def processRun[A](t: Duration)(p: Process[Task, A]): java.util.concurrent.atomic.AtomicBoolean = { val b = new java.util.concurrent.atomic.AtomicBoolean(false) p.run.runAsyncInterruptibly(_ => (), b) b } // starts publishing process and returns function to stop it def publish(): () => Unit = { val s1 = taskRun(10.seconds)(EF.publish("env", "a1", "a2")(sharedCfg)) val s2 = processRun(10.seconds)(M.dataDislodgement) () => { s1.set(true) s2.set(true) () } } } private def daemonThreads(name: String) = new ThreadFactory { def newThread(r: Runnable) = { val t = Executors.defaultThreadFactory.newThread(r) t.setDaemon(true) t.setName(name) t } } def monitoringEnv(rules: Seq[Rule] = Rule.defaultRules(sharedCfg))(testCode: (MonitoringEnv) => Any): Unit = monitoringEnv(InMemoryHttpLayer(rules))(testCode) def monitoringEnv(httpLayer: InMemoryHttpLayer)(testCode: (MonitoringEnv) => Any): Unit = { val pool: ExecutorService = Executors.newFixedThreadPool(8, daemonThreads("test-threads")) val M = Monitoring.instance(ES = pool, windowSize = 50.millis) val instruments = new Instruments(M, bufferTime = 20 millis) val ef = ElasticFlattened(M, instruments, httpLayer) val env = MonitoringEnv(M, instruments, ef, httpLayer) try { testCode(env) } finally { pool.shutdownNow() } } "toJson" should "correctly render documents to json in the happy case" in { println { F.toJson("dev", "127.0.0.1", "local")(point3) } } "template provisioning" should "provision template if missing" in monitoringEnv(Seq(Rule.failedPUT(s"/_template/${sharedCfg.templateName}", 999))) { (env: MonitoringEnv) => val thrown:HttpException = the [HttpException] thrownBy env.EF.publish("env", "a1", "a2")(sharedCfg).run thrown.code should equal (999) } it should "not try to provision template otherwise" in monitoringEnv() { (env: MonitoringEnv) => //wait until we see first metric, by that time all template check logic should be completed //send test metric val c = env.I.counter("m/test") c.increment val stopPublish = env.publish() eventually (timeout(Span(5, Seconds))) { env.H.requests.find(r => r.method == "POST") shouldBe defined } stopPublish() env.H.requests.find(r => r.method == "HEAD") shouldBe defined //expect lookup for template but no attempt to overwrite env.H.requests.find(r => r.method == "PUT") shouldBe empty } "publish" should "post first document" in monitoringEnv() { (env: MonitoringEnv) => val c = env.I.counter("m/test") c.increment val stopPublish = env.publish() eventually (timeout(Span(5, Seconds))) { env.H.requests.find(r => r.method == "POST" && r.contains("m.test")) shouldBe defined } stopPublish() } it should "continue posting documents periodically" in monitoringEnv() { (env: MonitoringEnv) => val c = env.I.counter("m/test") c.increment val stopPublish = env.publish() //update metric few times (waiting for longer than buffer window) => expect to see documents emitted (1 to 3) foreach { i => Thread.sleep(100) c.increment } eventually (timeout(Span(5, Seconds))) { env.H.requests.count(r => r.method == "POST" && r.contains("m.test")) should be > 3 } stopPublish() } //simulating errors on some requests private def failingHttpLayer(t: Throwable):InMemoryHttpLayer = new InMemoryHttpLayer(mutable.ListBuffer(Rule.defaultRules(sharedCfg): _*)) { var cnt = 0 override def http(v: HttpOp): Elastic.ES[String] = { cnt = cnt + 1 cnt match { case 4 => Kleisli.kleisli[Task, ElasticCfg, String]((cfg: ElasticCfg) => Task.fail(t)) case _ => super.http(v) } } } it should "not stop after http error" in monitoringEnv(failingHttpLayer(HttpException(500))) { (env: MonitoringEnv) => val c = env.I.counter("m/test") c.increment val stopPublish = env.publish() //update metric few times (waiting for longer than buffer window) => expect to see documents emitted (1 to 10) foreach { i => Thread.sleep(100) c.increment } eventually (timeout(Span(5, Seconds))) { env.H.requests.count(r => r.method == "POST" && r.contains("m.test")) should be > 9 } stopPublish() } it should "retry and proceed after non http error" in monitoringEnv(failingHttpLayer(new IOException("TestException"))) { (env: MonitoringEnv) => val c = env.I.counter("m/test") c.increment val stopPublish = env.publish() //update metric few times (waiting for longer than buffer window) => expect to see documents emitted (1 to 10) foreach { i => Thread.sleep(100) c.increment } eventually (timeout(Span(5, Seconds))) { env.H.requests.count(r => r.method == "POST" && r.contains("m.test")) should be > 10 } stopPublish() } }
neigor/funnel
elastic/src/test/scala/FlattendedSpec.scala
Scala
apache-2.0
7,984
package org.hibernate.cache.rediscala.regions import java.util.Properties import org.hibernate.cache.rediscala.client.RedisCache import org.hibernate.cache.rediscala.strategy.RedisAccessStrategyFactory import org.hibernate.cache.spi.TimestampsRegion /** * RedisTimestampsRegion * * @author 배성혁 [email protected] * @since 2014. 2. 21. 오후 1:38 */ class RedisTimestampsRegion(private[this] val _accessStrategyFactory: RedisAccessStrategyFactory, private[this] val _cache: RedisCache, private[this] val _regionName: String, private[this] val _props: Properties) extends RedisGeneralDataRegion(_accessStrategyFactory, _cache, _regionName, _props) with TimestampsRegion { }
debop/debop4s
hibernate-rediscala/src/main/scala/org/hibernate/cache/rediscala/regions/RedisTimestampsRegion.scala
Scala
apache-2.0
779
package truerss.api import com.github.fntz.omhs.{CommonResponse, RoutingDSL} import io.netty.handler.codec.http.HttpResponseStatus import io.netty.util.CharsetUtil import truerss.services.ApplicationPluginsService class PluginsApi(private val pluginsService: ApplicationPluginsService) { import OMHSSupport._ import RoutingDSL._ import ZIOSupport.UIOImplicits._ private val base = "api" / "v1" / "plugins" private val plugins = get(base / "all") ~> { () => pluginsService.view } private val js = get(base / "js") ~> { () => pluginsService.js.map { x => CommonResponse( status = HttpResponseStatus.OK, contentType = "application/javascript", content = x.getBytes(CharsetUtil.UTF_8) ) } } private val css = get(base / "css") ~> { () => pluginsService.css.map { x => CommonResponse( status = HttpResponseStatus.OK, contentType = "`text/css", content = x.getBytes(CharsetUtil.UTF_8) ) } } val route = plugins :: js :: css }
truerss/truerss
src/main/scala/truerss/api/PluginsApi.scala
Scala
mit
1,042
import definiti.native._ import java.time.LocalDateTime package object my { class IsNonEmpty[A](message: String = "This list should not be empty") extends SimpleVerification[Seq[A]](message) { override def isValid(list: Seq[A]): Boolean = ListExtension.nonEmpty(list) } case class MyType(attribute: Seq[String]) object MyType { val verification: Verification[MyType] = Verification.all(Verification.all(NonEmptyStringList.verification).from[MyType](_.attribute, "attribute")) } object NonEmptyStringList { val verification: Verification[Seq[String]] = Verification.all(new my.IsNonEmpty[String]()) } }
definiti/definiti-scala-model
src/test/resources/samples/generics/attributeAsAliasTypeWithGeneric/output.scala
Scala
mit
628
/******************************************************************************* * Copyright (c) 2013 CWI * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * * * Michael Steindorfer - [email protected] - CWI ******************************************************************************/ package org.eclipse.imp.pdb.facts.impl.persistent.scala import org.eclipse.imp.pdb.facts.IBool import org.eclipse.imp.pdb.facts.`type`.TypeFactory import org.eclipse.imp.pdb.facts.visitors.IValueVisitor abstract sealed class BoolValue extends Value with IBool { override def t = TypeFactory.getInstance boolType def accept[T, E <: Throwable](v: IValueVisitor[T, E]): T = v visitBoolean this def getStringRepresentation: String = toString def equivalent(other: IBool): IBool = if (this eq other) TrueValue else FalseValue } case object TrueValue extends BoolValue { val getValue: Boolean = true; def and(other: IBool): IBool = other def or(other: IBool): IBool = this def xor(other: IBool): IBool = other.not def not: IBool = FalseValue def implies(other: IBool): IBool = other } case object FalseValue extends BoolValue { val getValue: Boolean = false; def and(other: IBool): IBool = this def or(other: IBool): IBool = other def xor(other: IBool): IBool = other def not: IBool = TrueValue def implies(other: IBool): IBool = TrueValue }
msteindorfer/oopsla15-artifact
pdb.values.persistent.scala/src/main/scala/org/eclipse/imp/pdb/facts/impl/persistent/scala/BoolValue.scala
Scala
epl-1.0
1,595
package com.eharmony.aloha.models import com.eharmony.aloha.audit.Auditor /** * Created by ryan on 1/18/17. */ trait SubmodelBase[U, N, -A, +B <: U] extends Submodel[N, A, B] with Model[A, B] { def auditor: Auditor[U, N, B] final def apply(a: A): B = subvalue(a).audited protected[this] def failure(errorMsgs: => Seq[String] = Nil, missingVarNames: => Set[String] = Set.empty, subvalues: Seq[U] = Nil): Subvalue[B, N] = Subvalue(auditor.failure(modelId, errorMsgs, missingVarNames, subvalues), None) protected[this] def success(naturalValue: N, errorMsgs: => Seq[String] = Nil, missingVarNames: => Set[String] = Set.empty, subvalues: Seq[U] = Nil, prob: => Option[Float] = None): Subvalue[B, N] = Subvalue(auditor.success(modelId, naturalValue, errorMsgs, missingVarNames, subvalues, prob), Some(naturalValue)) def close(): Unit = () }
eHarmony/aloha
aloha-core/src/main/scala/com/eharmony/aloha/models/SubmodelBase.scala
Scala
mit
1,085
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.display.angular import java.io.{ByteArrayOutputStream, PrintStream} import java.util import org.apache.zeppelin.display.{AngularObject, AngularObjectRegistry, GUI} import org.apache.zeppelin.interpreter._ import org.apache.zeppelin.user.AuthenticationInfo import org.scalatest.concurrent.Eventually import org.scalatest.time.{Seconds, Span} import org.scalatest.{BeforeAndAfter, BeforeAndAfterEach, FlatSpec, Matchers} /** * Test */ trait AbstractAngularElemTest extends FlatSpec with BeforeAndAfter with BeforeAndAfterEach with Eventually with Matchers { override def beforeEach() { val intpGroup = new InterpreterGroup() val context = new InterpreterContext("note", "paragraph", null, "title", "text", new AuthenticationInfo(), new util.HashMap[String, Object](), new GUI(), new GUI(), new AngularObjectRegistry(intpGroup.getId(), null), null, new util.LinkedList[InterpreterContextRunner](), new InterpreterOutput(null)); InterpreterContext.set(context) super.beforeEach() // To be stackable, must call super.beforeEach } def angularElem(elem: scala.xml.Elem): AbstractAngularElem; def angularModel(name: String): AbstractAngularModel; "AngularElem" should "provide onclick method" in { registrySize should be(0) var a = 0 val elem = angularElem(<div></div>).onClick(() => { a = a + 1 }) elem.angularObjects.get("ng-click") should not be(null) registrySize should be(1) // click create thread for callback function to run. So it'll may not immediately invoked // after click. therefore eventually should be click(elem) eventually (timeout(Span(5, Seconds))) { a should be(1) } click(elem) eventually (timeout(Span(5, Seconds))) { a should be(2) } // disassociate elem.disassociate() registrySize should be(0) } "AngularElem" should "print angular display directive only once in a paragraph" in { val out = new ByteArrayOutputStream() val printOut = new PrintStream(out) angularElem(<div></div>).display(printOut) out.toString should be("<div></div>") out.reset angularElem(<div></div>).display(printOut) out.toString should be("<div></div>") } "AngularElem" should "bind angularObject to ng-model directive " in { angularElem(<div></div>) .model("name", "value").toString should be("<div ng-model=\\"name\\"></div>") angularElem(<div></div>).model("name", "value").model() should be("value") angularElem(<div></div>).model() should be(None) } "AngularElem" should "able to disassociate AngularObjects" in { val elem1 = angularElem(<div></div>).model("name1", "value1") val elem2 = angularElem(<div></div>).model("name2", "value2") val elem3 = angularElem(<div></div>).model("name3", "value3") registrySize should be(3) elem1.disassociate() registrySize should be(2) elem2.disassociate() elem3.disassociate() registrySize should be(0) } "AngularElem" should "allow access to InterpreterContext inside of callback function" in { angularModel("name").value("value") var modelValue = "" val elem = angularElem(<div></div>).onClick(() => modelValue = angularModel("name")().toString ) click(elem) eventually (timeout(Span(5, Seconds))) { modelValue should be("value")} } def registry = { InterpreterContext.get().getAngularObjectRegistry } def registrySize = { registry.getAllWithGlobal("note").size() } def noteId = { InterpreterContext.get().getNoteId } def click(elem: org.apache.zeppelin.display.angular.AbstractAngularElem) = { fireEvent("ng-click", elem) } // simulate click def fireEvent(eventName: String, elem: org.apache.zeppelin.display.angular.AbstractAngularElem) = { val angularObject: AngularObject[Any] = elem.angularObjects(eventName); angularObject.set("event"); } }
vipul1409/zeppelin
zeppelin-display/src/test/scala/org/apache/zeppelin/display/angular/AbstractAngularElemTest.scala
Scala
apache-2.0
4,765
package com.asto.dmp.taobaowarn.util import com.asto.dmp.taobaowarn.base.Constants import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.spark.Logging import org.apache.spark.rdd.RDD /** * 文件相关的工具类 */ object FileUtils extends Logging { private val conf = new Configuration() conf.set("fs.defaultFS", Constants.Hadoop.DEFAULT_FS) conf.set("mapreduce.jobtracker.address", Constants.Hadoop.JOBTRACKER_ADDRESS) def deleteFilesInHDFS(paths: String*) = { paths.foreach { path => val filePath = new Path(path) val HDFSFilesSystem = filePath.getFileSystem(new Configuration()) if (HDFSFilesSystem.exists(filePath)) { logInfo(Utils.logWrapper(s"删除目录:$filePath")) HDFSFilesSystem.delete(filePath, true) } } } def saveAsTextFile[T <: Product](rdd: RDD[T], savePath: String) = { deleteFilesInHDFS(savePath) logInfo(Utils.logWrapper(s"往${savePath}中写入信息")) rdd.map(_.productIterator.mkString(Constants.OutputPath.SEPARATOR)).coalesce(1).saveAsTextFile(savePath) } def saveAsTextFileForString(rdd: RDD[String], savePath: String) = { deleteFilesInHDFS(savePath) logInfo(Utils.logWrapper(s"往${savePath}中写入信息")) rdd.coalesce(1).saveAsTextFile(savePath) } def saveAsTextFile(text: String, savePath: String) = { deleteFilesInHDFS(savePath) logInfo(Utils.logWrapper(s"往${savePath}中写入信息")) val out = FileSystem.get(conf).create(new Path(savePath)) out.write(text.getBytes) out.flush() out.close() } }
zj-lingxin/dmp_taobao_warn
src/main/scala/com/asto/dmp/taobaowarn/util/FileUtils.scala
Scala
mit
1,624
/*********************************************************************** * Copyright (c) 2013-2016 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. *************************************************************************/ package org.locationtech.geomesa.raster.iterators import java.util.{Map => JMap} import org.apache.accumulo.core.data.{Key, Value} import org.apache.accumulo.core.iterators.{IteratorEnvironment, SortedKeyValueIterator} import org.locationtech.geomesa.accumulo.iterators._ import org.locationtech.geomesa.accumulo.iterators.legacy._ import org.locationtech.geomesa.raster.index.RasterEntry class RasterFilteringIterator extends GeomesaFilteringIterator with HasFeatureType with SetTopUnique with SetTopFilter with HasFilter { var setTopOptimized: (Key) => Unit = null override def init(source: SortedKeyValueIterator[Key, Value], options: JMap[String, String], env: IteratorEnvironment) = { super.init(source, options, env) initFeatureType(options) init(featureType, options) logger.debug(s"In RFI with $filter") setTopOptimized = if (filter == null) setTopInclude else setTopFilter } override def setTopFilter(key: Key): Unit = { val value = source.getTopValue val sf = RasterEntry.decodeIndexCQMetadataToSf(key.getColumnQualifierData.toArray) if (filter.evaluate(sf)) { topKey = key topValue = value } } override def setTopConditionally(): Unit = setTopOptimized(source.getTopKey) } object RasterFilteringIterator { val name: String = "raster-filtering-iterator" val priority: Int = 90 }
tkunicki/geomesa
geomesa-accumulo/geomesa-accumulo-raster/src/main/scala/org/locationtech/geomesa/raster/iterators/RasterFilteringIterator.scala
Scala
apache-2.0
1,876
package org.apache.spark.ml.tree.impl /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException import org.apache.spark.internal.Logging import org.apache.spark.ml.classification.DecisionTreeClassificationModel import org.apache.spark.ml.feature.LabeledPoint import org.apache.spark.ml.regression.DecisionTreeRegressionModel import org.apache.spark.ml.tree._ import org.apache.spark.ml.util.Instrumentation import org.apache.spark.mllib.tree.configuration.{Algo => OldAlgo, Strategy => OldStrategy} import org.apache.spark.mllib.tree.impurity.ImpurityCalculator import org.apache.spark.mllib.tree.model.ImpurityStats import org.apache.spark.rdd.RDD import org.apache.spark.storage.StorageLevel import org.apache.spark.util.random.{SamplingUtils, XORShiftRandom} import scala.collection.{Map, mutable} import scala.util.Random /** * !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- * Code from Apache Spark MLLib 2.1.0 https://github.com/apache/spark * The following has been slightly modified to perform evaluation about memory consumption. * !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- !!! --- * * ALGORITHM * * This is a sketch of the algorithm to help new developers. * * The algorithm partitions data by instances (rows). * On each iteration, the algorithm splits a set of nodes. In order to choose the best split * for a given node, sufficient statistics are collected from the distributed data. * For each node, the statistics are collected to some worker node, and that worker selects * the best split. * * This setup requires discretization of continuous features. This binning is done in the * findSplits() method during initialization, after which each continuous feature becomes * an ordered discretized feature with at most maxBins possible values. * * The main loop in the algorithm operates on a queue of nodes (nodeStack). These nodes * lie at the periphery of the tree being trained. If multiple trees are being trained at once, * then this queue contains nodes from all of them. Each iteration works roughly as follows: * On the master node: * - Some number of nodes are pulled off of the queue (based on the amount of memory * required for their sufficient statistics). * - For random forests, if featureSubsetStrategy is not "all," then a subset of candidate * features are chosen for each node. See method selectNodesToSplit(). * On worker nodes, via method findBestSplits(): * - The worker makes one pass over its subset of instances. * - For each (tree, node, feature, split) tuple, the worker collects statistics about * splitting. Note that the set of (tree, node) pairs is limited to the nodes selected * from the queue for this iteration. The set of features considered can also be limited * based on featureSubsetStrategy. * - For each node, the statistics for that node are aggregated to a particular worker * via reduceByKey(). The designated worker chooses the best (feature, split) pair, * or chooses to stop splitting if the stopping criteria are met. * On the master node: * - The master collects all decisions about splitting nodes and updates the model. * - The updated model is passed to the workers on the next iteration. * This process continues until the node queue is empty. * * Most of the methods in this implementation support the statistics aggregation, which is * the heaviest part of the computation. In general, this implementation is bound by either * the cost of statistics computation on workers or by communicating the sufficient statistics. */ object RandomForestRunner extends Logging { /** * Train a random forest. * * @param input Training data: RDD of `LabeledPoint` * @return an unweighted set of trees */ def run( input: RDD[LabeledPoint], strategy: OldStrategy, numTrees: Int, featureSubsetStrategy: String, seed: Long, instr: Option[Instrumentation], parentUID: Option[String] = None): Array[DecisionTreeModel] = { val timer = new TimeTracker() timer.start("total") timer.start("init") val retaggedInput = input.retag(classOf[LabeledPoint]) val metadata = DecisionTreeMetadata.buildMetadata(retaggedInput, strategy, numTrees, featureSubsetStrategy) input.foreachPartition(t => System.gc()) instr match { case Some(instrumentation) => instrumentation.logNumFeatures(metadata.numFeatures) instrumentation.logNumClasses(metadata.numClasses) case None => logInfo("numFeatures: " + metadata.numFeatures) logInfo("numClasses: " + metadata.numClasses) } // Find the splits and the corresponding bins (interval between the splits) using a sample // of the input data. timer.start("findSplits") input.foreachPartition(t => System.gc()) val splits = findSplits(retaggedInput, metadata, seed) input.foreachPartition(t => System.gc()) timer.stop("findSplits") logDebug("numBins: feature: number of bins") logDebug(Range(0, metadata.numFeatures).map { featureIndex => s"\\t$featureIndex\\t${metadata.numBins(featureIndex)}" }.mkString("\\n")) // Bin feature values (TreePoint representation). // Cache input RDD for speedup during multiple passes. val treeInput = TreePoint.convertToTreeRDD(retaggedInput, splits, metadata) val withReplacement = numTrees > 1 val baggedInput = BaggedPoint .convertToBaggedRDD(treeInput, strategy.subsamplingRate, numTrees, withReplacement, seed) .persist(StorageLevel.MEMORY_AND_DISK) input.foreachPartition(t => System.gc()) // depth of the decision tree val maxDepth = strategy.maxDepth require(maxDepth <= 30, s"DecisionTree currently only supports maxDepth <= 30, but was given maxDepth = $maxDepth.") // Max memory usage for aggregates // TODO: Calculate memory usage more precisely. val maxMemoryUsage: Long = strategy.maxMemoryInMB * 1024L * 1024L logDebug("max memory usage for aggregates = " + maxMemoryUsage + " bytes.") /* * The main idea here is to perform group-wise training of the decision tree nodes thus * reducing the passes over the data from (# nodes) to (# nodes / maxNumberOfNodesPerGroup). * Each data sample is handled by a particular node (or it reaches a leaf and is not used * in lower levels). */ // Create an RDD of node Id cache. // At first, all the rows belong to the root nodes (node Id == 1). val nodeIdCache = if (strategy.useNodeIdCache) { Some(NodeIdCache.init( data = baggedInput, numTrees = numTrees, checkpointInterval = strategy.checkpointInterval, initVal = 1)) } else { None } /* Stack of nodes to train: (treeIndex, node) The reason this is a stack is that we train many trees at once, but we want to focus on completing trees, rather than training all simultaneously. If we are splitting nodes from 1 tree, then the new nodes to split will be put at the top of this stack, so we will continue training the same tree in the next iteration. This focus allows us to send fewer trees to workers on each iteration; see topNodesForGroup below. */ val nodeStack = new mutable.Stack[(Int, LearningNode)] val rng = new Random() rng.setSeed(seed) // Allocate and queue root nodes. val topNodes = Array.fill[LearningNode](numTrees)(LearningNode.emptyNode(nodeIndex = 1)) Range(0, numTrees).foreach(treeIndex => nodeStack.push((treeIndex, topNodes(treeIndex)))) timer.stop("init") while (nodeStack.nonEmpty) { // Collect some nodes to split, and choose features for each node (if subsampling). // Each group of nodes may come from one or multiple trees, and at multiple levels. val (nodesForGroup, treeToNodeToIndexInfo) = RandomForestRunner.selectNodesToSplit(nodeStack, maxMemoryUsage, metadata, rng) // Sanity check (should never occur): assert(nodesForGroup.nonEmpty, s"RandomForest selected empty nodesForGroup. Error for unknown reason.") // Only send trees to worker if they contain nodes being split this iteration. val topNodesForGroup: Map[Int, LearningNode] = nodesForGroup.keys.map(treeIdx => treeIdx -> topNodes(treeIdx)).toMap // Choose node splits, and enqueue new nodes as needed. timer.start("findBestSplits") input.foreachPartition(t => System.gc()) RandomForestRunner.findBestSplits(baggedInput, metadata, topNodesForGroup, nodesForGroup, treeToNodeToIndexInfo, splits, nodeStack, timer, nodeIdCache) input.foreachPartition(t => System.gc()) timer.stop("findBestSplits") } baggedInput.unpersist() timer.stop("total") logInfo("Internal timing for DecisionTree:") logInfo(s"$timer") // Delete any remaining checkpoints used for node Id cache. if (nodeIdCache.nonEmpty) { try { nodeIdCache.get.deleteAllCheckpoints() } catch { case e: IOException => logWarning(s"delete all checkpoints failed. Error reason: ${e.getMessage}") } } val numFeatures = metadata.numFeatures parentUID match { case Some(uid) => if (strategy.algo == OldAlgo.Classification) { topNodes.map { rootNode => new DecisionTreeClassificationModel(uid, rootNode.toNode, numFeatures, strategy.getNumClasses) } } else { topNodes.map { rootNode => new DecisionTreeRegressionModel(uid, rootNode.toNode, numFeatures) } } case None => if (strategy.algo == OldAlgo.Classification) { topNodes.map { rootNode => new DecisionTreeClassificationModel(rootNode.toNode, numFeatures, strategy.getNumClasses) } } else { topNodes.map(rootNode => new DecisionTreeRegressionModel(rootNode.toNode, numFeatures)) } } } /** * Helper for binSeqOp, for data which can contain a mix of ordered and unordered features. * * For ordered features, a single bin is updated. * For unordered features, bins correspond to subsets of categories; either the left or right bin * for each subset is updated. * * @param agg Array storing aggregate calculation, with a set of sufficient statistics for * each (feature, bin). * @param treePoint Data point being aggregated. * @param splits possible splits indexed (numFeatures)(numSplits) * @param unorderedFeatures Set of indices of unordered features. * @param instanceWeight Weight (importance) of instance in dataset. */ private def mixedBinSeqOp( agg: DTStatsAggregator, treePoint: TreePoint, splits: Array[Array[Split]], unorderedFeatures: Set[Int], instanceWeight: Double, featuresForNode: Option[Array[Int]]): Unit = { val numFeaturesPerNode = if (featuresForNode.nonEmpty) { // Use subsampled features featuresForNode.get.length } else { // Use all features agg.metadata.numFeatures } // Iterate over features. var featureIndexIdx = 0 while (featureIndexIdx < numFeaturesPerNode) { val featureIndex = if (featuresForNode.nonEmpty) { featuresForNode.get.apply(featureIndexIdx) } else { featureIndexIdx } if (unorderedFeatures.contains(featureIndex)) { // Unordered feature val featureValue = treePoint.binnedFeatures(featureIndex) val leftNodeFeatureOffset = agg.getFeatureOffset(featureIndexIdx) // Update the left or right bin for each split. val numSplits = agg.metadata.numSplits(featureIndex) val featureSplits = splits(featureIndex) var splitIndex = 0 while (splitIndex < numSplits) { if (featureSplits(splitIndex).shouldGoLeft(featureValue, featureSplits)) { agg.featureUpdate(leftNodeFeatureOffset, splitIndex, treePoint.label, instanceWeight) } splitIndex += 1 } } else { // Ordered feature val binIndex = treePoint.binnedFeatures(featureIndex) agg.update(featureIndexIdx, binIndex, treePoint.label, instanceWeight) } featureIndexIdx += 1 } } /** * Helper for binSeqOp, for regression and for classification with only ordered features. * * For each feature, the sufficient statistics of one bin are updated. * * @param agg Array storing aggregate calculation, with a set of sufficient statistics for * each (feature, bin). * @param treePoint Data point being aggregated. * @param instanceWeight Weight (importance) of instance in dataset. */ private def orderedBinSeqOp( agg: DTStatsAggregator, treePoint: TreePoint, instanceWeight: Double, featuresForNode: Option[Array[Int]]): Unit = { val label = treePoint.label // Iterate over features. if (featuresForNode.nonEmpty) { // Use subsampled features var featureIndexIdx = 0 while (featureIndexIdx < featuresForNode.get.length) { val binIndex = treePoint.binnedFeatures(featuresForNode.get.apply(featureIndexIdx)) agg.update(featureIndexIdx, binIndex, label, instanceWeight) featureIndexIdx += 1 } } else { // Use all features val numFeatures = agg.metadata.numFeatures var featureIndex = 0 while (featureIndex < numFeatures) { val binIndex = treePoint.binnedFeatures(featureIndex) agg.update(featureIndex, binIndex, label, instanceWeight) featureIndex += 1 } } } /** * Given a group of nodes, this finds the best split for each node. * * @param input Training data: RDD of [[TreePoint]] * @param metadata Learning and dataset metadata * @param topNodesForGroup For each tree in group, tree index -> root node. * Used for matching instances with nodes. * @param nodesForGroup Mapping: treeIndex --> nodes to be split in tree * @param treeToNodeToIndexInfo Mapping: treeIndex --> nodeIndex --> nodeIndexInfo, * where nodeIndexInfo stores the index in the group and the * feature subsets (if using feature subsets). * @param splits possible splits for all features, indexed (numFeatures)(numSplits) * @param nodeStack Queue of nodes to split, with values (treeIndex, node). * Updated with new non-leaf nodes which are created. * @param nodeIdCache Node Id cache containing an RDD of Array[Int] where * each value in the array is the data point's node Id * for a corresponding tree. This is used to prevent the need * to pass the entire tree to the executors during * the node stat aggregation phase. */ private[tree] def findBestSplits( input: RDD[BaggedPoint[TreePoint]], metadata: DecisionTreeMetadata, topNodesForGroup: Map[Int, LearningNode], nodesForGroup: Map[Int, Array[LearningNode]], treeToNodeToIndexInfo: Map[Int, Map[Int, NodeIndexInfo]], splits: Array[Array[Split]], nodeStack: mutable.Stack[(Int, LearningNode)], timer: TimeTracker = new TimeTracker, nodeIdCache: Option[NodeIdCache] = None): Unit = { /* * The high-level descriptions of the best split optimizations are noted here. * * *Group-wise training* * We perform bin calculations for groups of nodes to reduce the number of * passes over the data. Each iteration requires more computation and storage, * but saves several iterations over the data. * * *Bin-wise computation* * We use a bin-wise best split computation strategy instead of a straightforward best split * computation strategy. Instead of analyzing each sample for contribution to the left/right * child node impurity of every split, we first categorize each feature of a sample into a * bin. We exploit this structure to calculate aggregates for bins and then use these aggregates * to calculate information gain for each split. * * *Aggregation over partitions* * Instead of performing a flatMap/reduceByKey operation, we exploit the fact that we know * the number of splits in advance. Thus, we store the aggregates (at the appropriate * indices) in a single array for all bins and rely upon the RDD aggregate method to * drastically reduce the communication overhead. */ // numNodes: Number of nodes in this group val numNodes = nodesForGroup.values.map(_.length).sum logDebug("numNodes = " + numNodes) logDebug("numFeatures = " + metadata.numFeatures) logDebug("numClasses = " + metadata.numClasses) logDebug("isMulticlass = " + metadata.isMulticlass) logDebug("isMulticlassWithCategoricalFeatures = " + metadata.isMulticlassWithCategoricalFeatures) logDebug("using nodeIdCache = " + nodeIdCache.nonEmpty.toString) /** * Performs a sequential aggregation over a partition for a particular tree and node. * * For each feature, the aggregate sufficient statistics are updated for the relevant * bins. * * @param treeIndex Index of the tree that we want to perform aggregation for. * @param nodeInfo The node info for the tree node. * @param agg Array storing aggregate calculation, with a set of sufficient statistics * for each (node, feature, bin). * @param baggedPoint Data point being aggregated. */ def nodeBinSeqOp( treeIndex: Int, nodeInfo: NodeIndexInfo, agg: Array[DTStatsAggregator], baggedPoint: BaggedPoint[TreePoint]): Unit = { if (nodeInfo != null) { val aggNodeIndex = nodeInfo.nodeIndexInGroup val featuresForNode = nodeInfo.featureSubset val instanceWeight = baggedPoint.subsampleWeights(treeIndex) if (metadata.unorderedFeatures.isEmpty) { orderedBinSeqOp(agg(aggNodeIndex), baggedPoint.datum, instanceWeight, featuresForNode) } else { mixedBinSeqOp(agg(aggNodeIndex), baggedPoint.datum, splits, metadata.unorderedFeatures, instanceWeight, featuresForNode) } agg(aggNodeIndex).updateParent(baggedPoint.datum.label, instanceWeight) } } /** * Performs a sequential aggregation over a partition. * * Each data point contributes to one node. For each feature, * the aggregate sufficient statistics are updated for the relevant bins. * * @param agg Array storing aggregate calculation, with a set of sufficient statistics for * each (node, feature, bin). * @param baggedPoint Data point being aggregated. * @return agg */ def binSeqOp( agg: Array[DTStatsAggregator], baggedPoint: BaggedPoint[TreePoint]): Array[DTStatsAggregator] = { treeToNodeToIndexInfo.foreach { case (treeIndex, nodeIndexToInfo) => val nodeIndex = topNodesForGroup(treeIndex).predictImpl(baggedPoint.datum.binnedFeatures, splits) nodeBinSeqOp(treeIndex, nodeIndexToInfo.getOrElse(nodeIndex, null), agg, baggedPoint) } agg } /** * Do the same thing as binSeqOp, but with nodeIdCache. */ def binSeqOpWithNodeIdCache( agg: Array[DTStatsAggregator], dataPoint: (BaggedPoint[TreePoint], Array[Int])): Array[DTStatsAggregator] = { treeToNodeToIndexInfo.foreach { case (treeIndex, nodeIndexToInfo) => val baggedPoint = dataPoint._1 val nodeIdCache = dataPoint._2 val nodeIndex = nodeIdCache(treeIndex) nodeBinSeqOp(treeIndex, nodeIndexToInfo.getOrElse(nodeIndex, null), agg, baggedPoint) } agg } /** * Get node index in group --> features indices map, * which is a short cut to find feature indices for a node given node index in group. */ def getNodeToFeatures( treeToNodeToIndexInfo: Map[Int, Map[Int, NodeIndexInfo]]): Option[Map[Int, Array[Int]]] = { if (!metadata.subsamplingFeatures) { None } else { val mutableNodeToFeatures = new mutable.HashMap[Int, Array[Int]]() treeToNodeToIndexInfo.values.foreach { nodeIdToNodeInfo => nodeIdToNodeInfo.values.foreach { nodeIndexInfo => assert(nodeIndexInfo.featureSubset.isDefined) mutableNodeToFeatures(nodeIndexInfo.nodeIndexInGroup) = nodeIndexInfo.featureSubset.get } } Some(mutableNodeToFeatures.toMap) } } // array of nodes to train indexed by node index in group val nodes = new Array[LearningNode](numNodes) nodesForGroup.foreach { case (treeIndex, nodesForTree) => nodesForTree.foreach { node => nodes(treeToNodeToIndexInfo(treeIndex)(node.id).nodeIndexInGroup) = node } } // Calculate best splits for all nodes in the group timer.start("chooseSplits") // In each partition, iterate all instances and compute aggregate stats for each node, // yield a (nodeIndex, nodeAggregateStats) pair for each node. // After a `reduceByKey` operation, // stats of a node will be shuffled to a particular partition and be combined together, // then best splits for nodes are found there. // Finally, only best Splits for nodes are collected to driver to construct decision tree. val nodeToFeatures = getNodeToFeatures(treeToNodeToIndexInfo) val nodeToFeaturesBc = input.sparkContext.broadcast(nodeToFeatures) val partitionAggregates: RDD[(Int, DTStatsAggregator)] = if (nodeIdCache.nonEmpty) { input.zip(nodeIdCache.get.nodeIdsForInstances).mapPartitions { points => // Construct a nodeStatsAggregators array to hold node aggregate stats, // each node will have a nodeStatsAggregator val nodeStatsAggregators = Array.tabulate(numNodes) { nodeIndex => val featuresForNode = nodeToFeaturesBc.value.map { nodeToFeatures => nodeToFeatures(nodeIndex) } new DTStatsAggregator(metadata, featuresForNode) } // iterator all instances in current partition and update aggregate stats points.foreach(binSeqOpWithNodeIdCache(nodeStatsAggregators, _)) // transform nodeStatsAggregators array to (nodeIndex, nodeAggregateStats) pairs, // which can be combined with other partition using `reduceByKey` System.gc() nodeStatsAggregators.view.zipWithIndex.map(_.swap).iterator } } else { input.mapPartitions { points => // Construct a nodeStatsAggregators array to hold node aggregate stats, // each node will have a nodeStatsAggregator val nodeStatsAggregators = Array.tabulate(numNodes) { nodeIndex => val featuresForNode = nodeToFeaturesBc.value.flatMap { nodeToFeatures => Some(nodeToFeatures(nodeIndex)) } new DTStatsAggregator(metadata, featuresForNode) } // iterator all instances in current partition and update aggregate stats points.foreach(binSeqOp(nodeStatsAggregators, _)) // transform nodeStatsAggregators array to (nodeIndex, nodeAggregateStats) pairs, // which can be combined with other partition using `reduceByKey` System.gc() nodeStatsAggregators.view.zipWithIndex.map(_.swap).iterator } } val nodeToBestSplits: Map[Int, (Split, ImpurityStats)] = partitionAggregates.reduceByKey((a, b) => a.merge(b)).map { case (nodeIndex, aggStats) => val featuresForNode = nodeToFeaturesBc.value.flatMap { nodeToFeatures => Some(nodeToFeatures(nodeIndex)) } // find best split for each node val (split: Split, stats: ImpurityStats) = binsToBestSplit(aggStats, splits, featuresForNode, nodes(nodeIndex)) (nodeIndex, (split, stats)) }.collectAsMap() partitionAggregates.foreachPartition(t => System.gc()) timer.stop("chooseSplits") val nodeIdUpdaters = if (nodeIdCache.nonEmpty) { Array.fill[mutable.Map[Int, NodeIndexUpdater]]( metadata.numTrees)(mutable.Map[Int, NodeIndexUpdater]()) } else { null } // Iterate over all nodes in this group. nodesForGroup.foreach { case (treeIndex, nodesForTree) => nodesForTree.foreach { node => val nodeIndex = node.id val nodeInfo = treeToNodeToIndexInfo(treeIndex)(nodeIndex) val aggNodeIndex = nodeInfo.nodeIndexInGroup val (split: Split, stats: ImpurityStats) = nodeToBestSplits(aggNodeIndex) logDebug("best split = " + split) // Extract info for this node. Create children if not leaf. val isLeaf = (stats.gain <= 0) || (LearningNode.indexToLevel(nodeIndex) == metadata.maxDepth) node.isLeaf = isLeaf node.stats = stats logDebug("Node = " + node) if (!isLeaf) { node.split = Some(split) val childIsLeaf = (LearningNode.indexToLevel(nodeIndex) + 1) == metadata.maxDepth val leftChildIsLeaf = childIsLeaf || (stats.leftImpurity == 0.0) val rightChildIsLeaf = childIsLeaf || (stats.rightImpurity == 0.0) node.leftChild = Some(LearningNode(LearningNode.leftChildIndex(nodeIndex), leftChildIsLeaf, ImpurityStats.getEmptyImpurityStats(stats.leftImpurityCalculator))) node.rightChild = Some(LearningNode(LearningNode.rightChildIndex(nodeIndex), rightChildIsLeaf, ImpurityStats.getEmptyImpurityStats(stats.rightImpurityCalculator))) if (nodeIdCache.nonEmpty) { val nodeIndexUpdater = NodeIndexUpdater( split = split, nodeIndex = nodeIndex) nodeIdUpdaters(treeIndex).put(nodeIndex, nodeIndexUpdater) } // enqueue left child and right child if they are not leaves if (!leftChildIsLeaf) { nodeStack.push((treeIndex, node.leftChild.get)) } if (!rightChildIsLeaf) { nodeStack.push((treeIndex, node.rightChild.get)) } logDebug("leftChildIndex = " + node.leftChild.get.id + ", impurity = " + stats.leftImpurity) logDebug("rightChildIndex = " + node.rightChild.get.id + ", impurity = " + stats.rightImpurity) } } } if (nodeIdCache.nonEmpty) { // Update the cache if needed. nodeIdCache.get.updateNodeIndices(input, nodeIdUpdaters, splits) } } /** * Calculate the impurity statistics for a given (feature, split) based upon left/right * aggregates. * * @param stats the recycle impurity statistics for this feature's all splits, * only 'impurity' and 'impurityCalculator' are valid between each iteration * @param leftImpurityCalculator left node aggregates for this (feature, split) * @param rightImpurityCalculator right node aggregate for this (feature, split) * @param metadata learning and dataset metadata for DecisionTree * @return Impurity statistics for this (feature, split) */ private def calculateImpurityStats( stats: ImpurityStats, leftImpurityCalculator: ImpurityCalculator, rightImpurityCalculator: ImpurityCalculator, metadata: DecisionTreeMetadata): ImpurityStats = { val parentImpurityCalculator: ImpurityCalculator = if (stats == null) { leftImpurityCalculator.copy.add(rightImpurityCalculator) } else { stats.impurityCalculator } val impurity: Double = if (stats == null) { parentImpurityCalculator.calculate() } else { stats.impurity } val leftCount = leftImpurityCalculator.count val rightCount = rightImpurityCalculator.count val totalCount = leftCount + rightCount // If left child or right child doesn't satisfy minimum instances per node, // then this split is invalid, return invalid information gain stats. if ((leftCount < metadata.minInstancesPerNode) || (rightCount < metadata.minInstancesPerNode)) { return ImpurityStats.getInvalidImpurityStats(parentImpurityCalculator) } val leftImpurity = leftImpurityCalculator.calculate() // Note: This equals 0 if count = 0 val rightImpurity = rightImpurityCalculator.calculate() val leftWeight = leftCount / totalCount.toDouble val rightWeight = rightCount / totalCount.toDouble val gain = impurity - leftWeight * leftImpurity - rightWeight * rightImpurity // if information gain doesn't satisfy minimum information gain, // then this split is invalid, return invalid information gain stats. if (gain < metadata.minInfoGain) { return ImpurityStats.getInvalidImpurityStats(parentImpurityCalculator) } new ImpurityStats(gain, impurity, parentImpurityCalculator, leftImpurityCalculator, rightImpurityCalculator) } /** * Find the best split for a node. * * @param binAggregates Bin statistics. * @return tuple for best split: (Split, information gain, prediction at node) */ private[tree] def binsToBestSplit( binAggregates: DTStatsAggregator, splits: Array[Array[Split]], featuresForNode: Option[Array[Int]], node: LearningNode): (Split, ImpurityStats) = { // Calculate InformationGain and ImpurityStats if current node is top node val level = LearningNode.indexToLevel(node.id) var gainAndImpurityStats: ImpurityStats = if (level == 0) { null } else { node.stats } val validFeatureSplits = Range(0, binAggregates.metadata.numFeaturesPerNode).view.map { featureIndexIdx => featuresForNode.map(features => (featureIndexIdx, features(featureIndexIdx))) .getOrElse((featureIndexIdx, featureIndexIdx)) }.withFilter { case (_, featureIndex) => binAggregates.metadata.numSplits(featureIndex) != 0 } // For each (feature, split), calculate the gain, and select the best (feature, split). val (bestSplit, bestSplitStats) = validFeatureSplits.map { case (featureIndexIdx, featureIndex) => val numSplits = binAggregates.metadata.numSplits(featureIndex) if (binAggregates.metadata.isContinuous(featureIndex)) { // Cumulative sum (scanLeft) of bin statistics. // Afterwards, binAggregates for a bin is the sum of aggregates for // that bin + all preceding bins. val nodeFeatureOffset = binAggregates.getFeatureOffset(featureIndexIdx) var splitIndex = 0 while (splitIndex < numSplits) { binAggregates.mergeForFeature(nodeFeatureOffset, splitIndex + 1, splitIndex) splitIndex += 1 } // Find best split. val (bestFeatureSplitIndex, bestFeatureGainStats) = Range(0, numSplits).map { splitIdx => val leftChildStats = binAggregates.getImpurityCalculator(nodeFeatureOffset, splitIdx) val rightChildStats = binAggregates.getImpurityCalculator(nodeFeatureOffset, numSplits) rightChildStats.subtract(leftChildStats) gainAndImpurityStats = calculateImpurityStats(gainAndImpurityStats, leftChildStats, rightChildStats, binAggregates.metadata) (splitIdx, gainAndImpurityStats) }.maxBy(_._2.gain) (splits(featureIndex)(bestFeatureSplitIndex), bestFeatureGainStats) } else if (binAggregates.metadata.isUnordered(featureIndex)) { // Unordered categorical feature val leftChildOffset = binAggregates.getFeatureOffset(featureIndexIdx) val (bestFeatureSplitIndex, bestFeatureGainStats) = Range(0, numSplits).map { splitIndex => val leftChildStats = binAggregates.getImpurityCalculator(leftChildOffset, splitIndex) val rightChildStats = binAggregates.getParentImpurityCalculator() .subtract(leftChildStats) gainAndImpurityStats = calculateImpurityStats(gainAndImpurityStats, leftChildStats, rightChildStats, binAggregates.metadata) (splitIndex, gainAndImpurityStats) }.maxBy(_._2.gain) (splits(featureIndex)(bestFeatureSplitIndex), bestFeatureGainStats) } else { // Ordered categorical feature val nodeFeatureOffset = binAggregates.getFeatureOffset(featureIndexIdx) val numCategories = binAggregates.metadata.numBins(featureIndex) /* Each bin is one category (feature value). * The bins are ordered based on centroidForCategories, and this ordering determines which * splits are considered. (With K categories, we consider K - 1 possible splits.) * * centroidForCategories is a list: (category, centroid) */ val centroidForCategories = Range(0, numCategories).map { featureValue => val categoryStats = binAggregates.getImpurityCalculator(nodeFeatureOffset, featureValue) val centroid = if (categoryStats.count != 0) { if (binAggregates.metadata.isMulticlass) { // multiclass classification // For categorical variables in multiclass classification, // the bins are ordered by the impurity of their corresponding labels. categoryStats.calculate() } else if (binAggregates.metadata.isClassification) { // binary classification // For categorical variables in binary classification, // the bins are ordered by the count of class 1. categoryStats.stats(1) } else { // regression // For categorical variables in regression and binary classification, // the bins are ordered by the prediction. categoryStats.predict } } else { Double.MaxValue } (featureValue, centroid) } logDebug("Centroids for categorical variable: " + centroidForCategories.mkString(",")) // bins sorted by centroids val categoriesSortedByCentroid = centroidForCategories.toList.sortBy(_._2) logDebug("Sorted centroids for categorical variable = " + categoriesSortedByCentroid.mkString(",")) // Cumulative sum (scanLeft) of bin statistics. // Afterwards, binAggregates for a bin is the sum of aggregates for // that bin + all preceding bins. var splitIndex = 0 while (splitIndex < numSplits) { val currentCategory = categoriesSortedByCentroid(splitIndex)._1 val nextCategory = categoriesSortedByCentroid(splitIndex + 1)._1 binAggregates.mergeForFeature(nodeFeatureOffset, nextCategory, currentCategory) splitIndex += 1 } // lastCategory = index of bin with total aggregates for this (node, feature) val lastCategory = categoriesSortedByCentroid.last._1 // Find best split. val (bestFeatureSplitIndex, bestFeatureGainStats) = Range(0, numSplits).map { splitIndex => val featureValue = categoriesSortedByCentroid(splitIndex)._1 val leftChildStats = binAggregates.getImpurityCalculator(nodeFeatureOffset, featureValue) val rightChildStats = binAggregates.getImpurityCalculator(nodeFeatureOffset, lastCategory) rightChildStats.subtract(leftChildStats) gainAndImpurityStats = calculateImpurityStats(gainAndImpurityStats, leftChildStats, rightChildStats, binAggregates.metadata) (splitIndex, gainAndImpurityStats) }.maxBy(_._2.gain) val categoriesForSplit = categoriesSortedByCentroid.map(_._1.toDouble).slice(0, bestFeatureSplitIndex + 1) val bestFeatureSplit = new CategoricalSplit(featureIndex, categoriesForSplit.toArray, numCategories) (bestFeatureSplit, bestFeatureGainStats) } }.maxBy(_._2.gain) (bestSplit, bestSplitStats) } /** * Returns splits for decision tree calculation. * Continuous and categorical features are handled differently. * * Continuous features: * For each feature, there are numBins - 1 possible splits representing the possible binary * decisions at each node in the tree. * This finds locations (feature values) for splits using a subsample of the data. * * Categorical features: * For each feature, there is 1 bin per split. * Splits and bins are handled in 2 ways: * (a) "unordered features" * For multiclass classification with a low-arity feature * (i.e., if isMulticlass && isSpaceSufficientForAllCategoricalSplits), * the feature is split based on subsets of categories. * (b) "ordered features" * For regression and binary classification, * and for multiclass classification with a high-arity feature, * there is one bin per category. * * @param input Training data: RDD of [[LabeledPoint]] * @param metadata Learning and dataset metadata * @param seed random seed * @return Splits, an Array of [[Split]] * of size (numFeatures, numSplits) */ protected[tree] def findSplits( input: RDD[LabeledPoint], metadata: DecisionTreeMetadata, seed: Long): Array[Array[Split]] = { logDebug("isMulticlass = " + metadata.isMulticlass) val numFeatures = metadata.numFeatures // Sample the input only if there are continuous features. val continuousFeatures = Range(0, numFeatures).filter(metadata.isContinuous) val sampledInput = if (continuousFeatures.nonEmpty) { // Calculate the number of samples for approximate quantile calculation. val requiredSamples = math.max(metadata.maxBins * metadata.maxBins, 10000) val fraction = if (requiredSamples < metadata.numExamples) { requiredSamples.toDouble / metadata.numExamples } else { 1.0 } logDebug("fraction of data used for calculating quantiles = " + fraction) input.sample(withReplacement = false, fraction, new XORShiftRandom(seed).nextInt()) } else { input.sparkContext.emptyRDD[LabeledPoint] } findSplitsBySorting(sampledInput, metadata, continuousFeatures) } private def findSplitsBySorting( input: RDD[LabeledPoint], metadata: DecisionTreeMetadata, continuousFeatures: IndexedSeq[Int]): Array[Array[Split]] = { val continuousSplits: scala.collection.Map[Int, Array[Split]] = { // reduce the parallelism for split computations when there are less // continuous features than input partitions. this prevents tasks from // being spun up that will definitely do no work. val numPartitions = math.min(continuousFeatures.length, input.partitions.length) input.foreachPartition(t => System.gc()) input .flatMap(point => continuousFeatures.map(idx => (idx, point.features(idx)))) .groupByKey(numPartitions) .map { case (idx, samples) => val thresholds = findSplitsForContinuousFeature(samples, metadata, idx) val splits: Array[Split] = thresholds.map(thresh => new ContinuousSplit(idx, thresh)) logDebug(s"featureIndex = $idx, numSplits = ${splits.length}") (idx, splits) }.collectAsMap() } val numFeatures = metadata.numFeatures val splits: Array[Array[Split]] = Array.tabulate(numFeatures) { case i if metadata.isContinuous(i) => val split = continuousSplits(i) metadata.setNumSplits(i, split.length) split case i if metadata.isCategorical(i) && metadata.isUnordered(i) => // Unordered features // 2^(maxFeatureValue - 1) - 1 combinations val featureArity = metadata.featureArity(i) Array.tabulate[Split](metadata.numSplits(i)) { splitIndex => val categories = extractMultiClassCategories(splitIndex + 1, featureArity) new CategoricalSplit(i, categories.toArray, featureArity) } case i if metadata.isCategorical(i) => // Ordered features // Splits are constructed as needed during training. Array.empty[Split] } splits } /** * Nested method to extract list of eligible categories given an index. It extracts the * position of ones in a binary representation of the input. If binary * representation of an number is 01101 (13), the output list should (3.0, 2.0, * 0.0). The maxFeatureValue depict the number of rightmost digits that will be tested for ones. */ private[tree] def extractMultiClassCategories( input: Int, maxFeatureValue: Int): List[Double] = { var categories = List[Double]() var j = 0 var bitShiftedInput = input while (j < maxFeatureValue) { if (bitShiftedInput % 2 != 0) { // updating the list of categories. categories = j.toDouble :: categories } // Right shift by one bitShiftedInput = bitShiftedInput >> 1 j += 1 } categories } /** * Find splits for a continuous feature * NOTE: Returned number of splits is set based on `featureSamples` and * could be different from the specified `numSplits`. * The `numSplits` attribute in the `DecisionTreeMetadata` class will be set accordingly. * * @param featureSamples feature values of each sample * @param metadata decision tree metadata * NOTE: `metadata.numbins` will be changed accordingly * if there are not enough splits to be found * @param featureIndex feature index to find splits * @return array of split thresholds */ private[tree] def findSplitsForContinuousFeature( featureSamples: Iterable[Double], metadata: DecisionTreeMetadata, featureIndex: Int): Array[Double] = { require(metadata.isContinuous(featureIndex), "findSplitsForContinuousFeature can only be used to find splits for a continuous feature.") val splits = if (featureSamples.isEmpty) { Array.empty[Double] } else { val numSplits = metadata.numSplits(featureIndex) // get count for each distinct value val (valueCountMap, numSamples) = featureSamples.foldLeft((Map.empty[Double, Int], 0)) { case ((m, cnt), x) => (m + ((x, m.getOrElse(x, 0) + 1)), cnt + 1) } // sort distinct values val valueCounts = valueCountMap.toSeq.sortBy(_._1).toArray // if possible splits is not enough or just enough, just return all possible splits val possibleSplits = valueCounts.length - 1 if (possibleSplits <= numSplits) { valueCounts.map(_._1).init } else { // stride between splits val stride: Double = numSamples.toDouble / (numSplits + 1) logDebug("stride = " + stride) // iterate `valueCount` to find splits val splitsBuilder = mutable.ArrayBuilder.make[Double] var index = 1 // currentCount: sum of counts of values that have been visited var currentCount = valueCounts(0)._2 // targetCount: target value for `currentCount`. // If `currentCount` is closest value to `targetCount`, // then current value is a split threshold. // After finding a split threshold, `targetCount` is added by stride. var targetCount = stride while (index < valueCounts.length) { val previousCount = currentCount currentCount += valueCounts(index)._2 val previousGap = math.abs(previousCount - targetCount) val currentGap = math.abs(currentCount - targetCount) // If adding count of current value to currentCount // makes the gap between currentCount and targetCount smaller, // previous value is a split threshold. if (previousGap < currentGap) { splitsBuilder += valueCounts(index - 1)._1 targetCount += stride } index += 1 } splitsBuilder.result() } } splits } private[tree] class NodeIndexInfo( val nodeIndexInGroup: Int, val featureSubset: Option[Array[Int]]) extends Serializable /** * Pull nodes off of the queue, and collect a group of nodes to be split on this iteration. * This tracks the memory usage for aggregates and stops adding nodes when too much memory * will be needed; this allows an adaptive number of nodes since different nodes may require * different amounts of memory (if featureSubsetStrategy is not "all"). * * @param nodeStack Queue of nodes to split. * @param maxMemoryUsage Bound on size of aggregate statistics. * @return (nodesForGroup, treeToNodeToIndexInfo). * nodesForGroup holds the nodes to split: treeIndex --> nodes in tree. * * treeToNodeToIndexInfo holds indices selected features for each node: * treeIndex --> (global) node index --> (node index in group, feature indices). * The (global) node index is the index in the tree; the node index in group is the * index in [0, numNodesInGroup) of the node in this group. * The feature indices are None if not subsampling features. */ private[tree] def selectNodesToSplit( nodeStack: mutable.Stack[(Int, LearningNode)], maxMemoryUsage: Long, metadata: DecisionTreeMetadata, rng: Random): (Map[Int, Array[LearningNode]], Map[Int, Map[Int, NodeIndexInfo]]) = { // Collect some nodes to split: // nodesForGroup(treeIndex) = nodes to split val mutableNodesForGroup = new mutable.HashMap[Int, mutable.ArrayBuffer[LearningNode]]() val mutableTreeToNodeToIndexInfo = new mutable.HashMap[Int, mutable.HashMap[Int, NodeIndexInfo]]() var memUsage: Long = 0L var numNodesInGroup = 0 // If maxMemoryInMB is set very small, we want to still try to split 1 node, // so we allow one iteration if memUsage == 0. while (nodeStack.nonEmpty && (memUsage < maxMemoryUsage || memUsage == 0)) { val (treeIndex, node) = nodeStack.top // Choose subset of features for node (if subsampling). val featureSubset: Option[Array[Int]] = if (metadata.subsamplingFeatures) { Some(SamplingUtils.reservoirSampleAndCount(Range(0, metadata.numFeatures).iterator, metadata.numFeaturesPerNode, rng.nextLong())._1) } else { None } // Check if enough memory remains to add this node to the group. val nodeMemUsage = RandomForestRunner.aggregateSizeForNode(metadata, featureSubset) * 8L if (memUsage + nodeMemUsage <= maxMemoryUsage || memUsage == 0) { nodeStack.pop() mutableNodesForGroup.getOrElseUpdate(treeIndex, new mutable.ArrayBuffer[LearningNode]()) += node mutableTreeToNodeToIndexInfo .getOrElseUpdate(treeIndex, new mutable.HashMap[Int, NodeIndexInfo]())(node.id) = new NodeIndexInfo(numNodesInGroup, featureSubset) } numNodesInGroup += 1 memUsage += nodeMemUsage } if (memUsage > maxMemoryUsage) { // If maxMemoryUsage is 0, we should still allow splitting 1 node. logWarning(s"Tree learning is using approximately $memUsage bytes per iteration, which" + s" exceeds requested limit maxMemoryUsage=$maxMemoryUsage. This allows splitting" + s" $numNodesInGroup nodes in this iteration.") } // Convert mutable maps to immutable ones. val nodesForGroup: Map[Int, Array[LearningNode]] = mutableNodesForGroup.mapValues(_.toArray).toMap val treeToNodeToIndexInfo = mutableTreeToNodeToIndexInfo.mapValues(_.toMap).toMap (nodesForGroup, treeToNodeToIndexInfo) } /** * Get the number of values to be stored for this node in the bin aggregates. * * @param featureSubset Indices of features which may be split at this node. * If None, then use all features. */ private def aggregateSizeForNode( metadata: DecisionTreeMetadata, featureSubset: Option[Array[Int]]): Long = { val totalBins = if (featureSubset.nonEmpty) { featureSubset.get.map(featureIndex => metadata.numBins(featureIndex).toLong).sum } else { metadata.numBins.map(_.toLong).sum } if (metadata.isClassification) { metadata.numClasses * totalBins } else { 3 * totalBins } } }
alessandrolulli/reforest
src/main/scala/org/apache/spark/ml/tree/impl/RandomForestRunner.scala
Scala
apache-2.0
52,298
/** * Copyright (C) 2009-2011 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.scalate import java.io.File import org.scalatest.ConfigMap class TemplateEngineHelpersTest extends TemplateTestSupport { test("generate URI link for existing file") { assertResult(Some("/moustache.js/array_of_strings.js")) { context.uri(new File(baseDir, "src/test/resources/moustache.js/array_of_strings.js")) } } test("no link for file outside of source dir") { assertResult(None) { context.uri(new File("/does/not/exist/12345.xml")) } } def context = new DefaultRenderContext("/foo", engine) override protected def beforeAll(configMap: ConfigMap) = { super.beforeAll(configMap) engine.sourceDirectories = List(new File(baseDir, "src/test/resources")) } }
scalate/scalate
scalate-core/src/test/scala/org/fusesource/scalate/TemplateEngineHelpersTest.scala
Scala
apache-2.0
1,458
package com.krrrr38.mackerel4s package model import com.ning.http.client.FluentCaseInsensitiveStringsMap /** * When send invalid request, Mackerel send invalid response. * This class wrap Mackerel invalid response. * @param statusCode * @param contentType * @param headers * @param body */ class MackerelResponseError( val statusCode: Int, val contentType: String, val headers: FluentCaseInsensitiveStringsMap, val body: String) extends Exception() /** * This class show mackerel-client-scala error. * such as invalid json serialization format and so on. * @param body * @param cause */ class MackerelClientException(val message: String, val body: String, cause: Throwable) extends Exception(message, cause) { def this(message: String) { this(message, "", null) } }
krrrr38/mackerel-client-scala
src/main/scala/com/krrrr38/mackerel4s/model/MackerelClientException.scala
Scala
mit
797
/* * Copyright (c) 2012-2019 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package com.snowplowanalytics.snowplow.enrich.common.loaders // Scala import scala.annotation.tailrec /** * Gets the true IP address events forwarded to the Scala Stream Collector. * See https://github.com/snowplow/snowplow/issues/1372 */ object IpAddressExtractor { private val ipRegex = """\\"?\\[?(?:(?:(\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}).*)|((?:[0-9a-f]|\\.|\\:+)+).*)\\]?\\"?""" // 1 group IPv4 and 1 IPv6 private val XForwardedForRegex = s"""^x-forwarded-for: $ipRegex.*""".r private val ForwardedForRegex = s"""^forwarded: for=$ipRegex.*""".r private val CloudfrontRegex = s"""^$ipRegex.*""".r /** * If a request has been forwarded, extract the original client IP address; * otherwise return the standard IP address * * If both FORWARDED and X-FORWARDED-FOR are set, * the IP contained in X-FORWARDED-FOR will be used. * * @param headers List of headers potentially containing X-FORWARDED-FOR or FORWARDED * @param lastIp Fallback IP address if no X-FORWARDED-FOR or FORWARDED header exists * @return True client IP address */ @tailrec def extractIpAddress(headers: List[String], lastIp: String, maybeForwardedForIp: Option[String] = None): String = headers match { case h :: t => h.toLowerCase match { case XForwardedForRegex(ipv4, ipv6) => Option(ipv4).getOrElse(ipv6) case ForwardedForRegex(ipv4, ipv6) => val ip = Option(ipv4).getOrElse(ipv6) extractIpAddress(t, lastIp, Some(ip)) case _ => extractIpAddress(t, lastIp) } case Nil => maybeForwardedForIp match { case Some(forwardedForIp) => forwardedForIp case _ => lastIp } } /** * If a request has been forwarded, extract the original client IP address; * otherwise return the standard IP address * * @param xForwardedFor x-forwarded-for field from the Cloudfront log * @param lastIp Fallback IP address if no X-FORWARDED-FOR header exists * @return True client IP address */ def extractIpAddress(xForwardedFor: String, lastIp: String): String = xForwardedFor match { case CloudfrontRegex(ipv4, ipv6) => Option(ipv4).getOrElse(ipv6) case _ => lastIp } }
RetentionGrid/snowplow
3-enrich/scala-common-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/IpAddressExtractor.scala
Scala
apache-2.0
2,982
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.bwsw.sj.engine.core.simulation.input import java.nio.charset.Charset import com.bwsw.sj.common.engine.core.entities.InputEnvelope import com.bwsw.sj.common.engine.core.input.{InputStreamingExecutor, InputStreamingResponse} import com.bwsw.sj.engine.input.eviction_policy.InputInstanceEvictionPolicy import io.netty.buffer.{ByteBuf, Unpooled} /** * Imitates behavior of InputTaskEngine for testing an * implementation of [[com.bwsw.sj.common.engine.core.input.InputStreamingExecutor]]. * * Usage example: * {{{ * val manager: InputEnvironmentManager * val executor = new SomeExecutor(manager) * * val hazelcastConfig = HazelcastConfig(600, 1, 1, EngineLiterals.lruDefaultEvictionPolicy, 100) * val hazelcast = new HazelcastMock(hazelcastConfig) * val evictionPolicy = InputInstanceEvictionPolicy(EngineLiterals.fixTimeEvictionPolicy, hazelcast) * * val simulator = new InputEngineSimulator(executor, evictionPolicy) * simulator.prepare(Seq("1", "2", "a", "3", "b")) // byte buffer in simulator will be contatins "1,2,a,3,b," * val outputDataList = simulator.process(duplicateCheck = true) * println(outputDataList) * }}} * * @param executor implementation of [[com.bwsw.sj.common.engine.core.input.InputStreamingExecutor]] under test * @param evictionPolicy eviction policy of duplicate envelopes * @param separator delimiter between data records * @param charset encoding of incoming data * @tparam T type of outgoing data * @author Pavel Tomskikh */ class InputEngineSimulator[T <: AnyRef](executor: InputStreamingExecutor[T], evictionPolicy: InputInstanceEvictionPolicy, separator: String = "", charset: Charset = Charset.forName("UTF-8")) { private val inputBuffer: ByteBuf = Unpooled.buffer() /** * Write data records in byte buffer * * @param records incoming data records */ def prepare(records: Seq[String]): Unit = records.foreach(prepare) /** * Write data record in byte buffer * * @param record incoming data record */ def prepare(record: String): Unit = inputBuffer.writeCharSequence(record + separator, charset) /** * Sends byte buffer to executor as long as it can tokenize the buffer. Method returns list of [[OutputData]]. * * @param duplicateCheck indicates that every envelope has to be checked on duplication * @param clearBuffer indicates that byte buffer must be cleared * @return list of [[OutputData]] */ def process(duplicateCheck: Boolean, clearBuffer: Boolean = true): Seq[OutputData[T]] = { def processOneInterval(outputDataList: Seq[OutputData[T]]): Seq[OutputData[T]] = { val maybeOutputData = executor.tokenize(inputBuffer).map { interval => val maybeInputEnvelope = executor.parse(inputBuffer, interval) val isNotDuplicate = maybeInputEnvelope.map(x => !isDuplicate(duplicateCheck)(x)) val response = executor.createProcessedMessageResponse(maybeInputEnvelope, isNotDuplicate.getOrElse(false)) inputBuffer.readerIndex(interval.finalValue + 1) inputBuffer.discardReadBytes() OutputData(maybeInputEnvelope, isNotDuplicate, response) } maybeOutputData match { case Some(outputData) => processOneInterval(outputDataList :+ outputData) case None => outputDataList } } val outputDataList = processOneInterval(Seq.empty) if (clearBuffer) clear() outputDataList } /** * Removes all data from byte buffer */ def clear(): Unit = inputBuffer.clear() private def isDuplicate(duplicateCheck: Boolean)(inputEnvelope: InputEnvelope[T]): Boolean = { if (inputEnvelope.duplicateCheck.isDefined) { if (inputEnvelope.duplicateCheck.get) evictionPolicy.isDuplicate(inputEnvelope.key) else false } else { if (duplicateCheck) evictionPolicy.isDuplicate(inputEnvelope.key) else false } } } /** * Contains data from outputs of an [[com.bwsw.sj.common.engine.core.input.InputStreamingExecutor]] * * @param inputEnvelope result of [[com.bwsw.sj.common.engine.core.input.InputStreamingExecutor.parse]] * @param isNotDuplicate indicates that [[inputEnvelope]] is not duplicate if inputEnvelope is defined or None otherwise * @param response response that will be sent to a client after an [[inputEnvelope]] has been processed * @tparam T type of outgoing data */ case class OutputData[T <: AnyRef](inputEnvelope: Option[InputEnvelope[T]], isNotDuplicate: Option[Boolean], response: InputStreamingResponse)
bwsw/sj-platform
core/sj-engine-simulators/src/main/scala/com/bwsw/sj/engine/core/simulation/input/InputEngineSimulator.scala
Scala
apache-2.0
5,567
package dsentric.operators import dsentric._ import dsentric.contracts._ object Sanitization { def sanitizeContract[D <: DObject](contract:BaseContract[D], value:RawObject):Option[RawObject] = { def getSanitizer[D2 <: DObject, T](property: Property[D2, T]): Option[Sanitizer[T]] = property._dataOperators.collectFirst { case s: Sanitizer[T]@unchecked => s } def applySanitizer[T](field: String, value: RawObject, sanitizer: Sanitizer[T]): Option[RawObject] = { val propertyValue = value.get(field) sanitizer.sanitize(propertyValue) match { case v if v == propertyValue => None case None => Some(value - field) case Some(raw) => Some(value + (field -> raw)) } } contract._fields.foldLeft[Option[RawObject]](None) { case (maybeObj, (field, property: BaseContract[DObject]@unchecked with ExpectedObjectProperty[D])) => getSanitizer(property) match { case None => val obj = value.get(field).collect { case rv: RawObject@unchecked => rv }.getOrElse(RawObject.empty) sanitizeContract(property, obj).map(r => maybeObj.getOrElse(value) + (field -> r)).orElse(maybeObj) case Some(sanitizer) => applySanitizer(field, maybeObj.getOrElse(value), sanitizer) .orElse(maybeObj) } case (maybeObj, (field, property: BaseContract[DObject]@unchecked with Property[D, _])) => getSanitizer(property) match { case None => value.get(field).collect { case rv: RawObject@unchecked => sanitizeContract(property, rv).map(r => maybeObj.getOrElse(value) + (field -> r)) }.flatten.orElse(maybeObj) case Some(sanitizer) => applySanitizer(field, maybeObj.getOrElse(value), sanitizer) .orElse(maybeObj) } case (maybeObj, (field, property)) => getSanitizer(property).flatMap { sanitizer => applySanitizer(field, maybeObj.getOrElse(value), sanitizer) .orElse(maybeObj) } } } }
HigherState/dsentric
maps/src/main/scala/dsentric/operators/Sanitization.scala
Scala
apache-2.0
2,124
package com.jd.cluster import scala.concurrent.forkjoin.ThreadLocalRandom import akka.actor.Actor import akka.actor.ActorRef import akka.actor.ActorSelection.toScala import akka.actor.Address import akka.actor.RelativeActorPath import akka.actor.RootActorPath import akka.cluster.Cluster import akka.cluster.ClusterEvent.CurrentClusterState import akka.cluster.ClusterEvent.MemberEvent import akka.cluster.ClusterEvent.MemberRemoved import akka.cluster.ClusterEvent.MemberUp import akka.cluster.MemberStatus import com.jd.common.PerfectNumbers import com.jd.common.Find import akka.actor.actorRef2Scala import akka.cluster.ClusterEvent.MemberEvent class ClusterClient extends Actor { val cluster = Cluster(context.system) override def preStart(): Unit = cluster.subscribe(self, classOf[MemberEvent]) override def postStop(): Unit = cluster unsubscribe self var nodes = Set.empty[Address] val servicePath = "/user/listener" val servicePathElements = servicePath match { case RelativeActorPath(elements) => elements case _ => throw new IllegalArgumentException( "servicePath [%s] is not a valid relative actor path" format servicePath) } def receive = { case state: CurrentClusterState => nodes = state.members.collect { case m if m.status == MemberStatus.Up => m.address } case MemberUp(member) => nodes += member.address case MemberRemoved(member, _) => nodes -= member.address case _: MemberEvent => // ignore case PerfectNumbers(list: List[Int]) => println("\\nFound Perfect Numbers:" + list.mkString(",")) cluster.down(self.path.address) context.system.shutdown() case Find(start: Int, end: Int, resultTo: ActorRef) => println("node size:" + nodes.size) nodes.size match { case x: Int if x < 1 => Thread.sleep(1000) self ! Find(start, end, resultTo) case _ => val address = nodes.toIndexedSeq(ThreadLocalRandom.current.nextInt(nodes.size)) val service = context.actorSelection(RootActorPath(address) / servicePathElements) service ! Find(start, end, resultTo) println("send to :" + address) } } }
pengyanhong/demoAkka
src/com/jd/cluster/ClusterClientActor.scala
Scala
apache-2.0
2,203
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel package scala package dsl import org.apache.camel.Exchange import org.apache.camel.model._ import org.apache.camel.processor.aggregate.AggregationStrategy import org.apache.camel.scala.dsl.builder.RouteBuilder import spi.Policy import reflect.{ClassTag, classTag} import java.lang.String import java.util.Comparator abstract class SAbstractDefinition[P <: ProcessorDefinition[_]] extends DSL with Wrapper[P] with Block { val target: P val unwrap = target implicit val builder: RouteBuilder implicit def predicateBuilder(predicate: Exchange => Any) = new ScalaPredicate(predicate) implicit def expressionBuilder(expression: Exchange => Any) = new ScalaExpression(expression) def apply(block: => Unit) = { builder.build(this, block) this } /** * Helper method to return this Scala type instead of creating another wrapper type for the processor */ def wrap(block: => Unit): SAbstractDefinition[_] = { block this } // EIPs //----------------------------------------------------------------- def aggregate(expression: Exchange => Any, strategy: AggregationStrategy) = SAggregateDefinition(target.aggregate(expression, strategy)) def as[Target](toType: Class[Target], charset: String = null) = wrap(target.convertBodyTo(toType, charset)) def attempt: STryDefinition = STryDefinition(target.doTry()) def bean(bean: Any) = bean match { case cls: Class[_] => wrap(target.bean(cls)) case ref: String => wrap(target.beanRef(ref)) case obj: Any => wrap(target.bean(obj)) } def choice = SChoiceDefinition(target.choice) def convertBodyTo[Target](toType: Class[Target], charset: String = null) = wrap(target.convertBodyTo(toType, charset)) def delay(period: Period) = SDelayDefinition(target.delay(period.milliseconds)) def dynamicRouter(expression: Exchange => Any) = wrap(target.dynamicRouter(expression)) def enrich(uri: String, strategy: AggregationStrategy) = wrap(target.enrich(uri, strategy)) def enrich(uri: String, strategy: AggregationStrategy, aggregateOnException: Boolean) = wrap(target.enrich(uri, strategy, aggregateOnException)) def filter(predicate: Exchange => Any) = SFilterDefinition(target.filter(predicateBuilder(predicate))) def handle[E <: Throwable : ClassTag](block: => Unit) = SOnExceptionDefinition[E](target.onException(classTag[E].runtimeClass.asInstanceOf[Class[E]])).apply(block) def id(id : String) = wrap(target.id(id)) def idempotentConsumer(expression: Exchange => Any) = SIdempotentConsumerDefinition(target.idempotentConsumer(expression, null)) @Deprecated def inOnly = wrap(target.inOnly) @Deprecated def inOut = wrap(target.inOut) def loadbalance = SLoadBalanceDefinition(target.loadBalance) def log(message: String) = wrap(target.log(message)) def log(level: LoggingLevel, message: String) = wrap(target.log(level, message)) def log(level: LoggingLevel, logName: String, message: String) = wrap(target.log(level, logName, message)) def log(level: LoggingLevel, logName: String, marker: String, message: String) = wrap(target.log(level, logName, marker, message)) def loop(expression: Exchange => Any) = SLoopDefinition(target.loop(expression)) def marshal(format: DataFormatDefinition) = wrap(target.marshal(format)) def multicast = SMulticastDefinition(target.multicast) def onCompletion: SOnCompletionDefinition = { val completion = SOnCompletionDefinition(target.onCompletion) // let's end the block in the Java DSL, we have a better way of handling blocks here completion.target.end completion } def onCompletion(predicate: Exchange => Boolean) = onCompletion.when(predicate).asInstanceOf[SOnCompletionDefinition] def onCompletion(config: Config[SOnCompletionDefinition]) = { config.configure(onCompletion) onCompletion } def otherwise: SChoiceDefinition = throw new Exception("otherwise is only supported in a choice block or after a when statement") def pipeline = SPipelineDefinition(target.pipeline) def policy(policy: Policy) = wrap(target.policy(policy)) def pollEnrich(uri: String, strategy: AggregationStrategy = null, timeout: Long = -1) = wrap(target.pollEnrich(uri, timeout, strategy)) def pollEnrich(uri: String, strategy: AggregationStrategy, timeout: Long, aggregateOnException: Boolean) = wrap(target.pollEnrich(uri, timeout, strategy, aggregateOnException)) def process(function: Exchange => Unit) = wrap(target.process(new ScalaProcessor(function))) def process(processor: Processor) = wrap(target.process(processor)) def recipients(expression: Exchange => Any) = wrap(target.recipientList(expression)) def resequence(expression: Exchange => Any) = SResequenceDefinition(target.resequence(expression)) def removeHeader(name : String) = wrap(target.removeHeader(name)) def removeHeaders(pattern: String) = wrap(target.removeHeaders(pattern)) def removeHeaders(pattern: String, excludePatterns: String*) = wrap(target.removeHeaders(pattern, excludePatterns:_*)) def removeProperty(name: String) = wrap(target.removeProperty(name)) def removeProperties(pattern: String) = wrap(target.removeProperties(pattern)) def removeProperties(pattern: String, excludePatterns: String*) = wrap(target.removeProperties(pattern, excludePatterns:_*)) def rollback = wrap(target.rollback) def routeId(routeId: String) = wrap(target.routeId(routeId)) @Deprecated def routingSlip(header: String) = wrap(target.routingSlip(header)) @Deprecated def routingSlip(header: String, separator: String) = wrap(target.routingSlip(header, separator)) def routingSlip(expression: Exchange => Any, separator: String) = wrap(target.routingSlip(expression, separator)) def routingSlip(expression: Exchange => Any) = wrap(target.routingSlip(expression)) def setBody(expression: Exchange => Any) = wrap(target.setBody(expression)) def setFaultBody(expression: Exchange => Any) = wrap(target.setFaultBody(expression)) def setHeader(name: String, expression: Exchange => Any) = wrap(target.setHeader(name, expression)) def setExchangePattern(mep: ExchangePattern) = wrap(target.setExchangePattern(mep)) def setProperty(name: String, expression: Exchange => Any) = wrap(target.setProperty(name, expression)) def sort[T](expression: (Exchange) => Any, comparator: Comparator[T] = null) = wrap(target.sort(expression, comparator)) def split(expression: Exchange => Any) = SSplitDefinition(target.split(expression)) def startupOrder(startupOrder :Int) = wrap(target.startupOrder(startupOrder)) def stop = wrap(target.stop) def threads = SThreadsDefinition(target.threads) def throttle(frequency: Frequency) = SThrottleDefinition(target.throttle(frequency.count).timePeriodMillis(frequency.period.milliseconds)) def throwException(exception: Exception) = wrap(target.throwException(exception)) def transacted = wrap(target.transacted) def transacted(ref: String) = wrap(target.transacted(ref)) def transform(expression: Exchange => Any) = wrap(target.transform(expression)) def unmarshal(format: DataFormatDefinition) = wrap(target.unmarshal(format)) def validate(expression: Exchange => Any) = wrap(target.validate(predicateBuilder(expression))) def when(filter: Exchange => Any): DSL with Block = SChoiceDefinition(target.choice).when(filter) def wireTap(uri: String) = wrap(target.wireTap(uri)) def wireTap(uri: String, expression: Exchange => Any) = wrap(target.wireTap(uri).newExchangeBody(expression)) def -->(pattern: ExchangePattern, uri: String) = wrap(target.to(pattern, uri)) def -->(uris: String*) = to(uris:_*) def to(pattern: ExchangePattern, uri: String) = wrap(target.to(pattern, uri)) def to(uris: String*) = { uris.length match { case 1 => target.to(uris(0)) case _ => val multi = multicast uris.foreach(multi.to(_)) } this } }
logzio/camel
components/camel-scala/src/main/scala/org/apache/camel/scala/dsl/SAbstractDefinition.scala
Scala
apache-2.0
8,697
/* * Copyright (C) 2014 - 2016 Softwaremill <http://softwaremill.com> * Copyright (C) 2016 - 2019 Lightbend Inc. <http://www.lightbend.com> */ package akka.kafka.benchmarks import java.util.Locale import akka.kafka.benchmarks.app.RunTestCommand import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer} import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig} import org.apache.kafka.common.requests.IsolationLevel import org.apache.kafka.common.serialization.{ ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer } import scala.jdk.CollectionConverters._ case class KafkaTransactionTestFixture(sourceTopic: String, sinkTopic: String, msgCount: Int, groupId: String, consumer: KafkaConsumer[Array[Byte], String], producer: KafkaProducer[Array[Byte], String]) { def close(): Unit = { consumer.close() producer.close() } } object KafkaTransactionFixtures extends PerfFixtureHelpers { def noopFixtureGen(c: RunTestCommand): FixtureGen[KafkaTransactionTestFixture] = FixtureGen[KafkaTransactionTestFixture](c, msgCount => { KafkaTransactionTestFixture("sourceTopic", "sinkTopic", msgCount, "groupId", consumer = null, producer = null) }) def initialize(c: RunTestCommand) = FixtureGen[KafkaTransactionTestFixture]( c, msgCount => { fillTopic(c.filledTopic, c.kafkaHost) val groupId = randomId() val sinkTopic = randomId() val consumerJavaProps = new java.util.Properties consumerJavaProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, c.kafkaHost) consumerJavaProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[ByteArrayDeserializer]) consumerJavaProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer]) consumerJavaProps.put(ConsumerConfig.CLIENT_ID_CONFIG, randomId()) consumerJavaProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId) consumerJavaProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") consumerJavaProps.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.toString.toLowerCase(Locale.ENGLISH)) val consumer = new KafkaConsumer[Array[Byte], String](consumerJavaProps) consumer.subscribe(Set(c.filledTopic.topic).asJava) val producerJavaProps = new java.util.Properties producerJavaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[ByteArraySerializer]) producerJavaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer]) producerJavaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, c.kafkaHost) producerJavaProps.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true.toString) producerJavaProps.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, randomId()) val producer = new KafkaProducer[Array[Byte], String](producerJavaProps) KafkaTransactionTestFixture(c.filledTopic.topic, sinkTopic, msgCount, groupId, consumer, producer) } ) }
softwaremill/reactive-kafka
benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaTransactionFixtureGen.scala
Scala
apache-2.0
3,279
package scalaxy.streams package test import org.junit._ import org.junit.runner.RunWith import org.junit.runners.Parameterized.Parameters import scala.collection.JavaConversions._ @RunWith(classOf[Parallelized]) class MacroIntegrationTest( name: String, source: String, expectedMessages: CompilerMessages) { import MacroIntegrationTest._ @Test def test = testMessages(source, expectedMessages)(strategy) } object MacroIntegrationTest extends StreamComponentsTestBase with StreamTransforms { scalaxy.streams.flags.logLevel = LogLevel.Verbose scalaxy.streams.flags.quietWarnings = true scalaxy.streams.flags.experimental = true implicit def strategy = scalaxy.streams.strategy.foolish @Parameters(name = "{0}") def data: java.util.Collection[Array[AnyRef]] = IntegrationTests.data.map(t => Array[AnyRef](t.name, t.source, t.expectedMessages)) }
nativelibs4java/scalaxy-streams
src/test/scala/MacroIntegrationTest.scala
Scala
bsd-3-clause
895
package de.maci.beanmodel.generator.testhelper import scala.collection.JavaConverters.asScalaBufferConverter import org.junit.runner.RunWith import org.scalatest.Finders import org.scalatest.FlatSpec import org.scalatest.Matchers import org.scalatest.junit.JUnitRunner import de.maci.beanmodel.generator.testhelper.VariableElementMocker.mockVariableElement import de.maci.beanmodel.generator.testhelper.ElementMocker.mockElement import de.maci.beanmodel.generator.testhelper.PackageElementMocker.mockPackageElement import de.maci.beanmodel.generator.testhelper.TypeElementMocker.mockTypeElement import javax.lang.model.element.ElementKind import javax.lang.model.element.Modifier import javax.lang.model.element.TypeElement /** * @author Daniel Götten <[email protected]> * @since 30.04.15 */ @RunWith(classOf[JUnitRunner]) class VariableElementMockerTest extends FlatSpec with Matchers { val typeElement = mockTypeElement withKind (ElementKind.CLASS) withEnclosingElement (() => mockPackageElement withQualifiedName ("de.maci") build) withSimpleName ("SomeName") build "The mocker" should "should return a valid instance if all required attributes are set." in { val variableElement = mockVariableElement withKind (ElementKind.FIELD) withEnclosingElement (() => typeElement) withSimpleName ("someVariable") withModifiers (Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL) build variableElement.getKind shouldBe ElementKind.FIELD variableElement.getEnclosingElement shouldBe typeElement variableElement.getSimpleName.toString shouldBe "someVariable" variableElement.getModifiers should contain theSameElementsAs Set(Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL) } it should "throw an AssertionError on build if ElementKind is not set." in { intercept[AssertionError] { mockVariableElement withEnclosingElement (() => typeElement) withSimpleName ("SomeName") build } } it should "throw an AssertionError on build if the enclosing element is not set." in { intercept[AssertionError] { mockVariableElement withKind (ElementKind.FIELD) withSimpleName ("SomeName") build } } it should "throw an AssertionError on build if the simple name is not set." in { intercept[AssertionError] { mockVariableElement withKind (ElementKind.FIELD) withEnclosingElement (() => typeElement) build } } it should "throw an IllegalArgumentException if the simple name is empty." in { intercept[IllegalArgumentException] { mockVariableElement withSimpleName ("") } } }
dangoe/maci-beanmodel
maci-beanmodel-gen/src/test/scala/de/maci/beanmodel/generator/testhelper/VariableElementMockerTest.scala
Scala
apache-2.0
2,572
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hyperledger.network.codecs import org.hyperledger.common.{BID, Hash} import scodec.Codec import scodec.bits.ByteVector import scodec.codecs.bytes object HashCodec { implicit val hashCodec: Codec[Hash] = bytes(32).xmap( bytes => Hash.createFromSafeArray(bytes.toArray), hash => ByteVector(hash.unsafeGetArray)) implicit val bidCodec: Codec[BID] = bytes(32).xmap( bytes => new BID(bytes.toArray), hash => ByteVector(hash.unsafeGetArray)) }
DigitalAssetCom/hlp-candidate
server/network/src/main/scala/org/hyperledger/network/codecs/HashCodec.scala
Scala
apache-2.0
1,041
/* * This file is part of Kiama. * * Copyright (C) 2008-2015 Anthony M Sloane, Macquarie University. * * Kiama is free software: you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the * Free Software Foundation, either version 3 of the License, or (at your * option) any later version. * * Kiama is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for * more details. * * You should have received a copy of the GNU Lesser General Public License * along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see * <http://www.gnu.org/licenses/>. */ package org.kiama package example.til import org.kiama.util.TransformerTests class TIL2_1Tests extends TIL2_1 with TransformerTests { import TILTree._ val x = Id ("x") val y = Id ("y") val n = Id ("n") test ("transform a single for loop") { val input = "for x := 1 to n do write x; end" val tree = Program (List ( Decl (x), For (x, Num (1), Var (n), List ( Write (Var (x)))))) assertTransformOk (input, parser, transform, tree) } test ("transform a for loop that occurs first in a sequence") { val input = "for x := 1 to n do write x; end write x;" val tree = Program (List ( Decl (x), For (x, Num (1), Var (n), List ( Write (Var (x)))), Write (Var (x)))) assertTransformOk (input, parser, transform, tree) } test ("transform a for loop that occurs last in a sequence") { val input = "write x; for x := 1 to n do write x; end" val tree = Program (List ( Write (Var (x)), Decl (x), For (x, Num (1), Var (n), List ( Write (Var (x)))))) assertTransformOk (input, parser, transform, tree) } test ("transform a for loop that occurs in the middle of a sequence") { val input = "write x; for x := 1 to n do write x; end write x;" val tree = Program (List ( Write (Var (x)), Decl (x), For (x, Num (1), Var (n), List ( Write (Var (x)))), Write (Var (x)))) assertTransformOk (input, parser, transform, tree) } test ("transform nested for loops") { val input = "for x := 1 to n do for y := 0 to x do write y; end end" val tree = Program (List ( Decl (x), For (x, Num (1), Var (n), List ( Decl (y), For (y, Num (0), Var (x), List ( Write (Var (y)))))))) assertTransformOk (input, parser, transform, tree) } }
solomono/kiama
library/src/org/kiama/example/til/TIL2_1Tests.scala
Scala
gpl-3.0
3,016
package equalitydemo import scalaz._ import Scalaz.{ToEqualOps => _, _} import org.scalactic._ import org.scalatest._ import TripleEqualsSupport.AToBEquivalenceConstraint import TripleEqualsSupport.BToAEquivalenceConstraint import scala.language.implicitConversions trait LowPriorityStrictScalazConstraints extends TripleEquals { implicit def lowPriorityScalazConstraint[A, B](implicit equalOfB: Equal[B], ev: A <:< B): Constraint[A, B] = new AToBEquivalenceConstraint[A, B](new ScalazEquivalence(equalOfB), ev) } trait StrictScalazEquality extends LowPriorityStrictScalazConstraints { override def convertToEqualizer[T](left: T): Equalizer[T] = super.convertToEqualizer[T](left) implicit override def convertToCheckingEqualizer[T](left: T): CheckingEqualizer[T] = new CheckingEqualizer(left) override def unconstrainedEquality[A, B](implicit equalityOfA: Equality[A]): Constraint[A, B] = super.unconstrainedEquality[A, B] implicit def spireConstraint[A, B](implicit equalOfA: Equal[A], ev: B <:< A): Constraint[A, B] = new BToAEquivalenceConstraint[A, B](new ScalazEquivalence(equalOfA), ev) } object StrictScalazEquality extends StrictScalazEquality
bvenners/equality-integration-demo
src/main/scala/equalitydemo/StrictScalazEquality.scala
Scala
apache-2.0
1,176
package xyz.nabijaczleweli.lonning import scala.annotation.elidable import scala.annotation.elidable._ import org.joda.time.DateTime /** @author Jędrzej * @since 16.04.14 */ object Formatter { @inline final val format = System.getProperty("lonning.fromat", "dd.MM.yy HH:mm:ss") private final val `emaNyb.elbadile` = { var t = Map.empty[Int, String] val repl = Map[Int, String](FINEST -> "DEBUG", FINER -> "LOG") for(i <- elidable.byName) { val g = repl get i._2 t += (if(g.isEmpty) i.swap else i._2 -> g.get) } t += 5000 -> "\\u00A1VERY VERY BAD!" t } def getPreStuffs(name: String, level: Int) = s"[${DateTime.now.toString(format)}] [$name] [${ val g = `emaNyb.elbadile` get level if(!g.isEmpty) g.get else level }]" }
nabijaczleweli/Scala-Game-of-Life
src/main/scala/xyz/nabijaczleweli/lonning/Formatter.scala
Scala
mit
791
//package com.sksamuel.avro4s.github // //import com.sksamuel.avro4s.SchemaFor //import org.scalatest.funsuite.AnyFunSuite //import org.scalatest.matchers.should.Matchers //import shapeless.{:+:, CNil} // //case class Coproducts(cp: Int :+: String :+: Boolean :+: CNil) //case class CoproductOfCoproductsField(cp: Coproducts :+: Boolean :+: CNil) // //class Github273 extends AnyFunSuite with Matchers { // // test("Diverging implicit expansion for SchemaFor in Coproducts inside case classes #273") { // SchemaFor[CoproductOfCoproductsField] // } //}
sksamuel/avro4s
avro4s-core/src/test/scala/com/sksamuel/avro4s/github/Github273.scala
Scala
apache-2.0
557
package controllers import java.math.{BigDecimal => JBD} import anorm._ import play.api._ import play.api.Play.current import play.api.db.DB import play.api.mvc._ import com.github.nscala_time.time.Imports._ import models._ object Trades extends Controller with SafeCast { def tradesNoLegs(und: String, year: String, month: String) = Action { val days = minMaxDays(safeInt(year), safeInt(month)) Redirect(routes.Screener.screener("all", und, None, days._1, days._2, None, None, None)) } def minMaxDays(year: Int, month: Int): (Option[Int], Option[Int]) = { val now = DateTime.now val curYear = now.getYear val year4d = if (year < 100) year + 2000 else year val minMax = if (year4d > curYear || (year4d == curYear && month >= now.getMonthOfYear)) { val startMonth = if (month > 0 && month <= 12) month else (if (year4d == curYear) now.getMonthOfYear else 1) val expRangeStart = new DateTime(year4d, startMonth, 1, 0, 0) val expRangeEnd = if (month > 0 && month <= 12) expRangeStart.plusMonths(1) else new DateTime(year4d, 12, 31, 0, 0) val millisInDay = 1000 * 60 * 60 * 24 val minDays = (expRangeStart.millis - now.millis) / millisInDay val maxDays = (expRangeEnd.millis - now.millis) / millisInDay (Some(minDays.toInt), Some(maxDays.toInt)) } else { (None, None) } return minMax } def trades(und: String, year: String, month: String, legs: String) = Action { val params = new TradeParams(und, year, month, legs) val trade = if (params.legs.size==2) twoLegTrade(params) else if (params.legs.size==4) fourLegTrade(params) else None Ok(views.html.trades(trade)) } def twoLegTrade(params: TradeParams): Option[TwoLegTrade] = { val longLeg = if (params.legs(0).isLong) params.legs(0) else params.legs(1) val shortLeg = if (params.legs(0).isLong) params.legs(1) else params.legs(0) return twoLegTrade(params, longLeg, shortLeg) } def twoLegTrade(params: TradeParams, longLeg: Leg, shortLeg: Leg): Option[TwoLegTrade] = { val callOrPut = if (longLeg.isCall) "C" else "P" val sql: SimpleSql[Row] = { SQL { "SELECT l.underlier, stocks.last_trade AS undLast, " + "l.exp_unixtime AS expires, l.symbol AS longSym, l.bid AS longBid, l.call_or_put AS callOrPut, " + "l.ask AS longAsk, l.strike AS longStrike, s.symbol AS shortSym, s.bid AS shortBid, " + "s.ask AS shortAsk, s.strike AS shortStrike " + "FROM options AS l JOIN options AS s ON l.underlier=s.underlier AND l.exp_unixtime=s.exp_unixtime " + "AND l.call_or_put=s.call_or_put JOIN stocks ON l.underlier=stocks.symbol " + "WHERE l.underlier={underlier} AND l.call_or_put={callOrPut} " + "AND l.exp_year={expYear} AND l.exp_month={expMonth} " + "AND l.ask>0 AND s.ask>0 " + "AND l.strike={longStrike} AND s.strike={shortStrike} LIMIT 1" }.on("underlier"->params.underlier, "callOrPut"->callOrPut, "expYear"->params.expiryYear, "expMonth"->params.expiryMonth, "longStrike"->new JBD(longLeg.strike.toString), "shortStrike"->new JBD(shortLeg.strike.toString)) } val rows = DB.withConnection(implicit c => sql().toList) return if (rows.nonEmpty) { val row = rows.head Some(if (longLeg.isCall) { if (longLeg.strike < shortLeg.strike) new BullCall(row) else new BearCall(row) } else { if (longLeg.strike < shortLeg.strike) new BullPut(row) else new BearPut(row) }) } else { None } } def fourLegTrade(params: TradeParams): Option[FourLegTrade] = { val longLegA = if (params.legs(0).isLong) params.legs(0) else params.legs(1) val shortLegA = if (params.legs(0).isLong) params.legs(1) else params.legs(0) val tradeA = twoLegTrade(params, longLegA, shortLegA) val longLegB = if (params.legs(2).isLong) params.legs(2) else params.legs(3) val shortLegB = if (params.legs(2).isLong) params.legs(3) else params.legs(2) val tradeB = twoLegTrade(params, longLegB, shortLegB) return if (tradeA.isDefined && tradeB.isDefined) { tradeA.get match { case a: BullCall => Some(new LongCallButterfly(a, tradeB.get.asInstanceOf[BearCall])) case a: BearCall => Some(new LongCallButterfly(tradeB.get.asInstanceOf[BullCall], a)) case a: BullPut => Some(new LongPutButterfly(a, tradeB.get.asInstanceOf[BearPut])) case a: BearPut => Some(new LongPutButterfly(tradeB.get.asInstanceOf[BullPut], a)) case _ => None } } else { None } } class TradeParams( und: String, year: String, month: String, legStr: String) { val underlier = und.toUpperCase val expiryYear: Int = safeInt(year) val expiryMonth: Int = safeInt(month) val legs: List[Leg] = parseLegs(legStr) private def parseLegs(str: String): List[Leg] = { return str.toUpperCase.split("-").filter(_.length>2).map { leg => val strike: BigDecimal = safeBigDecimal(leg.substring(1, leg.length-1)) Leg(leg.startsWith("L"), strike, leg.endsWith("C")) }.toList } } case class Leg(isLong: Boolean, strike: BigDecimal, isCall: Boolean) } trait SafeCast { def safeInt(str: String): Int = { try { str.toInt } catch { case e: NumberFormatException => 0 } } def safeBigDecimal(str: String): BigDecimal = { try { BigDecimal(str) } catch { case e: NumberFormatException => BigDecimal("0") } } }
Exupery/optionometer
app/controllers/Trades.scala
Scala
mit
5,572
package chessagents import akka.actor.Actor import akka.actor.Props import akka.actor.ActorRef import akka.actor.ActorPath import akka.actor.ActorSystem import akka.pattern.ask import akka.util.Timeout import scala.concurrent.{Await, Future} import scala.concurrent.duration._ import piecesActors._; import chessagents.protocols.ControllerPlayerProtocol._ import chessagents.protocols.PlayerPiecesProtocol._ import chessagents.protocols.ControllerPlayerProtocol import java.util.Arrays.ArrayList import scala.collection.mutable.ArrayBuffer import scala.util.control.Breaks._ class Player extends Actor { import context._ val color = self.path.name val BLACK = "black" val WHITE = "white" final val BOARD_SIZE: Int = 8 var board: ArrayBuffer[ArrayBuffer[PiecesNames.Value]] = ArrayBuffer( ArrayBuffer(PiecesNames.BLACK_ROOK, PiecesNames.BLACK_KNIGHT, PiecesNames.BLACK_BISHOP, PiecesNames.BLACK_QUEEN, PiecesNames.BLACK_KING, PiecesNames.BLACK_BISHOP, PiecesNames.BLACK_KNIGHT, PiecesNames.BLACK_ROOK), ArrayBuffer(PiecesNames.BLACK_PAWN, PiecesNames.BLACK_PAWN, PiecesNames.BLACK_PAWN, PiecesNames.BLACK_PAWN, PiecesNames.BLACK_PAWN, PiecesNames.BLACK_PAWN, PiecesNames.BLACK_PAWN, PiecesNames.BLACK_PAWN), ArrayBuffer(PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE), ArrayBuffer(PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE), ArrayBuffer(PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE), ArrayBuffer(PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE, PiecesNames.NONE), ArrayBuffer(PiecesNames.WHITE_PAWN, PiecesNames.WHITE_PAWN, PiecesNames.WHITE_PAWN, PiecesNames.WHITE_PAWN, PiecesNames.WHITE_PAWN, PiecesNames.WHITE_PAWN, PiecesNames.WHITE_PAWN, PiecesNames.WHITE_PAWN), ArrayBuffer(PiecesNames.WHITE_ROOK, PiecesNames.WHITE_KNIGHT, PiecesNames.WHITE_BISHOP, PiecesNames.WHITE_QUEEN, PiecesNames.WHITE_KING, PiecesNames.WHITE_BISHOP, PiecesNames.WHITE_KNIGHT, PiecesNames.WHITE_ROOK) ) var pieces: ArrayBuffer[ActorRef] = new ArrayBuffer[ActorRef]() var proposals: ArrayBuffer[ProposalMove] = ArrayBuffer() var sendProposal: ProposalMove = null var lastMovePieceRef: ActorRef = null var GameCtrl: ActorRef = null def initPieces(color: String): Unit = { var yPos: Int = 0 var initArray: ArrayBuffer[ActorRef] = pieces if (color.equals(BLACK)) { yPos = 1 } else { yPos = BOARD_SIZE - 2 } for (i <- 0 to BOARD_SIZE - 1) { initArray += context.actorOf(Props(new PawnActor(color)), name = "pawn" + color + i) initArray(i) ! setPosition(i, yPos,false) } initArray += context.actorOf(Props(new RookActor(color)), name = "rook" + color + "1") initArray += context.actorOf(Props(new KnightActor(color)), name = "knight" + color + "1") initArray += context.actorOf(Props(new BishopActor(color)), name = "bishop" + color + "1") initArray += context.actorOf(Props(new QueenActor(color)), name = "queen" + color) initArray += context.actorOf(Props(new KingActor(color)), name = "king" + color) initArray += context.actorOf(Props(new BishopActor(color)), name = "bishop" + color + "2") initArray += context.actorOf(Props(new KnightActor(color)), name = "knight" + color + "2") initArray += context.actorOf(Props(new RookActor(color)), name = "rook" + color + "2") for (i <- BOARD_SIZE to 2 * BOARD_SIZE - 1) { if (color.equals(BLACK)) { initArray(i) ! setPosition(i - BOARD_SIZE, yPos - 1,false) } else initArray(i) ! setPosition(i - BOARD_SIZE, yPos + 1,false) } } def decideNextMove(): ProposalMove = { var retProposal: ProposalMove = new ProposalMove((-1, -1, -1, -1), -100, null); //best found proposal var sortedProposals = proposals.sortBy(- _.getPoints()) proposals.clear(); var sumPoints = 0 for (p <- sortedProposals) { println(scala.math.max(0, p.getPoints())) sumPoints += scala.math.max(0, p.getPoints()) } var rand = scala.util.Random.nextInt(sumPoints) breakable { for (p <- sortedProposals) { rand -= scala.math.max(0, p.getPoints()) if (rand <= 0) { retProposal = p break } } } return retProposal } def updateBoard(from_x: Int, from_y: Int, to_x: Int, to_y: Int): Unit = { println("Player "+color+" updateBoard") var actualPiece: PiecesNames.Value = board(from_x)(from_y) board(from_x)(from_y) = PiecesNames.NONE board(to_x)(to_y) = actualPiece } def awaitingMoveAcceptation: Receive = { case ControllerPlayerProtocol.Accepted => { println(s"Player $color: awaitingMoveAcceptation: Received Accepted") sendProposal.getRef() ! setPosition(sendProposal.getMove()._3, sendProposal.getMove()._4,true) // refresh game state based on own move updateBoard(sendProposal.getMove()._2, sendProposal.getMove()._1, sendProposal.getMove()._4, sendProposal.getMove()._3) unbecome() } case ControllerPlayerProtocol.Rejected => { println(s"Player $color: awaitingMoveAcceptation: Received Rejected") // recalculate move and send another proposal for (piece <- pieces) { piece ! callForProposal(board) } become(listenFromPieces) } } def listenFromPieces: Receive = { case chessagents.protocols.PlayerPiecesProtocol.Move(from_x, from_y, to_x, to_y, points) => { println(s"Player "+color+": receive: PlayerPiecesProtocol.Move") var move: (Int, Int, Int, Int) = (from_x, from_y, to_x, to_y) var proposal: ProposalMove = new ProposalMove(move, points, sender()) println(s"Player "+color+": proposals length: "+proposals.length) println(s"Player "+color+": pieces length: "+pieces.length) proposals += proposal; //move proposals from all pieces println(s"Player "+color+": proposals length: "+proposals.length) if (proposals.length == pieces.length) { var proposal = decideNextMove() lastMovePieceRef = proposal.getRef() GameCtrl ! ControllerPlayerProtocol.Move(proposal.getMove()._2, proposal.getMove()._1, proposal.getMove()._4, proposal.getMove()._3) sendProposal = proposal become(awaitingMoveAcceptation) } } } def receive = { case NewGame => println(s"Player $color: receive: Received NewGame") // reset game state initPieces(color) GameCtrl = sender() GameCtrl ! ControllerPlayerProtocol.Accepted case RequestMove => println(s"Player $color: receive: Received RequestMove") for (piece <- pieces) { piece ! callForProposal(board) } sender() ! ControllerPlayerProtocol.Accepted become(listenFromPieces) case ControllerPlayerProtocol.Move(from_x, from_y, to_x, to_y) => println(s"Player $color: receive: Received Move $from_x, $from_y, $to_x, $to_y") // send to all pieces info about opponent move for (piece <- pieces) { implicit val timeout = Timeout(5 seconds) var future: Future[Any] = piece ? chessagents.protocols.PlayerPiecesProtocol.OpponentMove( from_x, from_y, to_x, to_y); Await.result(future, timeout.duration) } updateBoard(from_x, from_y, to_x, to_y) sender() ! ControllerPlayerProtocol.Accepted // refresh game state based on opponent's move case Destroy => println(s"Player $color: receive: Destroy") pieces -= sender() case YouWin => println(s"Player $color: receive: Received YouWin") context.stop(self) case YouLose => println(s"Player $color: receive: Received YouLose") context.stop(self) case Checked => println(s"Player $color: receive: Received Checked") } def getName: String = { this.color.toString } }
m-kostrzewa/ChessAgents
src/chessagents/Player.scala
Scala
mit
8,153
package edu.washington.cs.knowitall.regex import scala.collection.JavaConverters._ import org.junit.runner.RunWith import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner import edu.washington.cs.knowitall.regex.Expression.BaseExpression @RunWith(classOf[JUnitRunner]) class WordRegularExpressionTest extends Specification { case class WordToken(string: String, postag: String, chunk: String) def compile(string: String): RegularExpression[WordToken] = { // create a parser for regular expression language that have // the same token representation val parser = new RegularExpressionParser[WordToken]() { // Translate an string "part=value" into a BaseExpression that // checks whether the part of a WordToken has value 'value'. override def factory(string: String): BaseExpression[WordToken] = { new BaseExpression[WordToken](string) { val Array(part, quotedValue) = string.split("=") val value = quotedValue.drop(1).take(quotedValue.size - 2) override def apply(entity: WordToken) = { part match { case "string" => entity.string equalsIgnoreCase value case "postag" => entity.postag equalsIgnoreCase value case "chunk" => entity.chunk equalsIgnoreCase value } } } } } parser.parse(string) } "README regex example one" should { "work" in { val sentence = "The US president Barack Obama is travelling to Mexico." val tokens = Seq( WordToken("The", "DT", null), WordToken("US", "NNP", null), WordToken("president", "NN", null), WordToken("Barack", "NNP", null), WordToken("Obama", "NNP", null), WordToken("is", "VB", null), WordToken("travelling", "VB", null), WordToken("to", "TO", null), WordToken("Mexico", "NN", null), WordToken(".", ".", null)) val regex = compile("""(?:<string='a'> | <string='an'> | <string='the'>)? <postag='JJ'>* <postag='NNP'>+ <postag='NN'>+ <postag='NNP'>+""") val found = Option(regex.find(tokens.asJava)) found.size must_== 1 found.get.groups.get(0).tokens.asScala.map(_.string).mkString(" ") must_== "The US president Barack Obama" } } }
knowitall/openregex
src/test/scala/edu/washington/cs/knowitall/regex/WordRegularExpressionTest.scala
Scala
lgpl-3.0
2,335
package scala.virtualization.lms package common import java.io.PrintWriter import scala.virtualization.lms.internal.GenericNestedCodegen import collection.mutable.ArrayBuffer import scala.reflect.SourceContext trait SynchronizedArrayBufferOps extends ArrayBufferOps { /* object SynchronizedArrayBuffer { def apply[A:Manifest](xs: Rep[A]*)(implicit pos: SourceContext) = arraybuffer_new(xs) } */ } trait SynchronizedArrayBufferOpsExp extends SynchronizedArrayBufferOps with ArrayBufferOpsExp { case class SyncArrayBufferNew[A:Manifest](xs: Seq[Exp[A]]) extends Def[ArrayBuffer[A]] { val mA = manifest[A] } // all array buffers are synchronized (nackward compat). TODO: separate constructor override def arraybuffer_new[A:Manifest](xs: Seq[Exp[A]])(implicit pos: SourceContext) = reflectMutable(SyncArrayBufferNew(xs)) } trait BaseGenSynchronizedArrayBufferOps extends BaseGenArrayBufferOps { val IR: SynchronizedArrayBufferOpsExp import IR._ } trait ScalaGenSynchronizedArrayBufferOps extends BaseGenSynchronizedArrayBufferOps with ScalaGenArrayBufferOps { val IR: SynchronizedArrayBufferOpsExp import IR._ override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match { case a@SyncArrayBufferNew(xs) => emitValDef(sym, src"(new scala.collection.mutable.ArrayBuffer[${a.mA}] with scala.collection.mutable.SynchronizedBuffer[${a.mA}]) ++= List(${(xs map {quote}).mkString(",")})") case _ => super.emitNode(sym, rhs) } } trait CLikeGenSynchronizedArrayBufferOps extends BaseGenSynchronizedArrayBufferOps with CLikeGenArrayBufferOps { val IR: SynchronizedArrayBufferOpsExp import IR._ override def emitNode(sym: Sym[Any], rhs: Def[Any]) = { rhs match { case _ => super.emitNode(sym, rhs) } } } trait CudaGenSynchronizedArrayBufferOps extends CudaGenEffect with CLikeGenSynchronizedArrayBufferOps trait OpenCLGenSynchronizedArrayBufferOps extends OpenCLGenEffect with CLikeGenSynchronizedArrayBufferOps trait CGenSynchronizedArrayBufferOps extends CGenEffect with CLikeGenSynchronizedArrayBufferOps
afernandez90/virtualization-lms-core
src/common/SynchronizedArrayBufferOps.scala
Scala
bsd-3-clause
2,081
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.aggregate import org.apache.spark.{SparkEnv, TaskContext} import org.apache.spark.internal.{config, Logging} import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate._ import org.apache.spark.sql.catalyst.expressions.codegen.GenerateOrdering import org.apache.spark.sql.execution.UnsafeKVExternalSorter import org.apache.spark.sql.execution.metric.SQLMetric import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.StructType import org.apache.spark.unsafe.KVIterator class ObjectAggregationIterator( partIndex: Int, outputAttributes: Seq[Attribute], groupingExpressions: Seq[NamedExpression], aggregateExpressions: Seq[AggregateExpression], aggregateAttributes: Seq[Attribute], initialInputBufferOffset: Int, resultExpressions: Seq[NamedExpression], newMutableProjection: (Seq[Expression], Seq[Attribute]) => MutableProjection, originalInputAttributes: Seq[Attribute], inputRows: Iterator[InternalRow], fallbackCountThreshold: Int, numOutputRows: SQLMetric, spillSize: SQLMetric, numTasksFallBacked: SQLMetric) extends AggregationIterator( partIndex, groupingExpressions, originalInputAttributes, aggregateExpressions, aggregateAttributes, initialInputBufferOffset, resultExpressions, newMutableProjection) with Logging { // Indicates whether we have fallen back to sort-based aggregation or not. private[this] var sortBased: Boolean = false private[this] var aggBufferIterator: Iterator[AggregationBufferEntry] = _ // Remember spill data size of this task before execute this operator so that we can // figure out how many bytes we spilled for this operator. private val spillSizeBefore = TaskContext.get().taskMetrics().memoryBytesSpilled // Hacking the aggregation mode to call AggregateFunction.merge to merge two aggregation buffers private val mergeAggregationBuffers: (InternalRow, InternalRow) => Unit = { val newExpressions = aggregateExpressions.map { case agg @ AggregateExpression(_, Partial, _, _, _) => agg.copy(mode = PartialMerge) case agg @ AggregateExpression(_, Complete, _, _, _) => agg.copy(mode = Final) case other => other } val newFunctions = initializeAggregateFunctions(newExpressions, 0) val newInputAttributes = newFunctions.flatMap(_.inputAggBufferAttributes) generateProcessRow(newExpressions, newFunctions, newInputAttributes) } /** * Start processing input rows. */ processInputs() TaskContext.get().addTaskCompletionListener[Unit](_ => { // At the end of the task, update the task's spill size. spillSize.set(TaskContext.get().taskMetrics().memoryBytesSpilled - spillSizeBefore) }) override final def hasNext: Boolean = { aggBufferIterator.hasNext } override final def next(): UnsafeRow = { val entry = aggBufferIterator.next() val res = generateOutput(entry.groupingKey, entry.aggregationBuffer) numOutputRows += 1 res } /** * Generate an output row when there is no input and there is no grouping expression. */ def outputForEmptyGroupingKeyWithoutInput(): UnsafeRow = { if (groupingExpressions.isEmpty) { val defaultAggregationBuffer = createNewAggregationBuffer() generateOutput(UnsafeRow.createFromByteArray(0, 0), defaultAggregationBuffer) } else { throw new IllegalStateException( "This method should not be called when groupingExpressions is not empty.") } } // Creates a new aggregation buffer and initializes buffer values. This function should only be // called under two cases: // // - when creating aggregation buffer for a new group in the hash map, and // - when creating the re-used buffer for sort-based aggregation private def createNewAggregationBuffer(): SpecificInternalRow = { val bufferFieldTypes = aggregateFunctions.flatMap(_.aggBufferAttributes.map(_.dataType)) val buffer = new SpecificInternalRow(bufferFieldTypes) initAggregationBuffer(buffer) buffer } private def initAggregationBuffer(buffer: SpecificInternalRow): Unit = { // Initializes declarative aggregates' buffer values expressionAggInitialProjection.target(buffer)(EmptyRow) // Initializes imperative aggregates' buffer values aggregateFunctions.collect { case f: ImperativeAggregate => f }.foreach(_.initialize(buffer)) } private def getAggregationBufferByKey( hashMap: ObjectAggregationMap, groupingKey: UnsafeRow): InternalRow = { var aggBuffer = hashMap.getAggregationBuffer(groupingKey) if (aggBuffer == null) { aggBuffer = createNewAggregationBuffer() hashMap.putAggregationBuffer(groupingKey.copy(), aggBuffer) } aggBuffer } // This function is used to read and process input rows. When processing input rows, it first uses // hash-based aggregation by putting groups and their buffers in `hashMap`. If `hashMap` grows too // large, it sorts the contents, spills them to disk, and creates a new map. At last, all sorted // spills are merged together for sort-based aggregation. private def processInputs(): Unit = { // In-memory map to store aggregation buffer for hash-based aggregation. val hashMap = new ObjectAggregationMap() // If in-memory map is unable to stores all aggregation buffer, fallback to sort-based // aggregation backed by sorted physical storage. var sortBasedAggregationStore: SortBasedAggregator = null if (groupingExpressions.isEmpty) { // If there is no grouping expressions, we can just reuse the same buffer over and over again. val groupingKey = groupingProjection.apply(null) val buffer: InternalRow = getAggregationBufferByKey(hashMap, groupingKey) while (inputRows.hasNext) { processRow(buffer, inputRows.next()) } } else { while (inputRows.hasNext && !sortBased) { val newInput = inputRows.next() val groupingKey = groupingProjection.apply(newInput) val buffer: InternalRow = getAggregationBufferByKey(hashMap, groupingKey) processRow(buffer, newInput) // The hash map gets too large, makes a sorted spill and clear the map. if (hashMap.size >= fallbackCountThreshold && inputRows.hasNext) { logInfo( s"Aggregation hash map size ${hashMap.size} reaches threshold " + s"capacity ($fallbackCountThreshold entries), spilling and falling back to sort" + " based aggregation. You may change the threshold by adjust option " + SQLConf.OBJECT_AGG_SORT_BASED_FALLBACK_THRESHOLD.key ) // Falls back to sort-based aggregation sortBased = true numTasksFallBacked += 1 } } if (sortBased) { val sortIteratorFromHashMap = hashMap .dumpToExternalSorter(groupingAttributes, aggregateFunctions) .sortedIterator() sortBasedAggregationStore = new SortBasedAggregator( sortIteratorFromHashMap, StructType.fromAttributes(originalInputAttributes), StructType.fromAttributes(groupingAttributes), processRow, mergeAggregationBuffers, createNewAggregationBuffer()) while (inputRows.hasNext) { // NOTE: The input row is always UnsafeRow val unsafeInputRow = inputRows.next().asInstanceOf[UnsafeRow] val groupingKey = groupingProjection.apply(unsafeInputRow) sortBasedAggregationStore.addInput(groupingKey, unsafeInputRow) } } } if (sortBased) { aggBufferIterator = sortBasedAggregationStore.destructiveIterator() } else { aggBufferIterator = hashMap.iterator } } } /** * A class used to handle sort-based aggregation, used together with [[ObjectHashAggregateExec]]. * * @param initialAggBufferIterator iterator that points to sorted input aggregation buffers * @param inputSchema The schema of input row * @param groupingSchema The schema of grouping key * @param processRow Function to update the aggregation buffer with input rows * @param mergeAggregationBuffers Function used to merge the input aggregation buffers into existing * aggregation buffers * @param makeEmptyAggregationBuffer Creates an empty aggregation buffer * * @todo Try to eliminate this class by refactor and reuse code paths in [[SortAggregateExec]]. */ class SortBasedAggregator( initialAggBufferIterator: KVIterator[UnsafeRow, UnsafeRow], inputSchema: StructType, groupingSchema: StructType, processRow: (InternalRow, InternalRow) => Unit, mergeAggregationBuffers: (InternalRow, InternalRow) => Unit, makeEmptyAggregationBuffer: => InternalRow) { // external sorter to sort the input (grouping key + input row) with grouping key. private val inputSorter = createExternalSorterForInput() private val groupingKeyOrdering: BaseOrdering = GenerateOrdering.create(groupingSchema) def addInput(groupingKey: UnsafeRow, inputRow: UnsafeRow): Unit = { inputSorter.insertKV(groupingKey, inputRow) } /** * Returns a destructive iterator of AggregationBufferEntry. * Notice: it is illegal to call any method after `destructiveIterator()` has been called. */ def destructiveIterator(): Iterator[AggregationBufferEntry] = { new Iterator[AggregationBufferEntry] { val inputIterator = inputSorter.sortedIterator() var hasNextInput: Boolean = inputIterator.next() var hasNextAggBuffer: Boolean = initialAggBufferIterator.next() private var result: AggregationBufferEntry = _ private var groupingKey: UnsafeRow = _ override def hasNext(): Boolean = { result != null || findNextSortedGroup() } override def next(): AggregationBufferEntry = { val returnResult = result result = null returnResult } // Two-way merges initialAggBufferIterator and inputIterator private def findNextSortedGroup(): Boolean = { if (hasNextInput || hasNextAggBuffer) { // Find smaller key of the initialAggBufferIterator and initialAggBufferIterator groupingKey = findGroupingKey() result = new AggregationBufferEntry(groupingKey, makeEmptyAggregationBuffer) // Firstly, update the aggregation buffer with input rows. while (hasNextInput && groupingKeyOrdering.compare(inputIterator.getKey, groupingKey) == 0) { processRow(result.aggregationBuffer, inputIterator.getValue) hasNextInput = inputIterator.next() } // Secondly, merge the aggregation buffer with existing aggregation buffers. // NOTE: the ordering of these two while-block matter, mergeAggregationBuffer() should // be called after calling processRow. while (hasNextAggBuffer && groupingKeyOrdering.compare(initialAggBufferIterator.getKey, groupingKey) == 0) { mergeAggregationBuffers(result.aggregationBuffer, initialAggBufferIterator.getValue) hasNextAggBuffer = initialAggBufferIterator.next() } true } else { false } } private def findGroupingKey(): UnsafeRow = { var newGroupingKey: UnsafeRow = null if (!hasNextInput) { newGroupingKey = initialAggBufferIterator.getKey } else if (!hasNextAggBuffer) { newGroupingKey = inputIterator.getKey } else { val compareResult = groupingKeyOrdering.compare(inputIterator.getKey, initialAggBufferIterator.getKey) if (compareResult <= 0) { newGroupingKey = inputIterator.getKey } else { newGroupingKey = initialAggBufferIterator.getKey } } if (groupingKey == null) { groupingKey = newGroupingKey.copy() } else { groupingKey.copyFrom(newGroupingKey) } groupingKey } } } private def createExternalSorterForInput(): UnsafeKVExternalSorter = { new UnsafeKVExternalSorter( groupingSchema, inputSchema, SparkEnv.get.blockManager, SparkEnv.get.serializerManager, TaskContext.get().taskMemoryManager().pageSizeBytes, SparkEnv.get.conf.get(config.SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD), null ) } }
maropu/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/ObjectAggregationIterator.scala
Scala
apache-2.0
13,356
/* * Copyright 2013 - 2020 Outworkers Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.outworkers.phantom.builder.query import com.outworkers.phantom.builder.QueryBuilder import com.outworkers.phantom.builder.query.engine.{CQLQuery, MergeList, QueryPart} import com.outworkers.phantom.builder.syntax.CQLSyntax sealed abstract class CQLQueryPart[Part <: CQLQueryPart[Part]]( override val queries: Seq[CQLQuery] ) extends QueryPart[Part](queries) { override def mergeList(list: Seq[CQLQuery]): MergeList = new MergeList(list) } sealed class UsingPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[UsingPart](queries) { override def qb: CQLQuery = { if (queries.nonEmpty) { QueryBuilder.Update.usingPart(queries) } else { CQLQuery.empty } } override def instance(l: Seq[CQLQuery]): UsingPart = new UsingPart(l) } object UsingPart { def empty: UsingPart = new UsingPart() } sealed class WherePart( override val queries: Seq[CQLQuery] = Seq.empty ) extends CQLQueryPart[WherePart](queries) { override def qb: CQLQuery = QueryBuilder.Update.clauses(queries) override def instance(list: Seq[CQLQuery]): WherePart = new WherePart(list) } object WherePart { def empty: WherePart = new WherePart(Nil) } sealed class LimitedPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[LimitedPart](queries) { override def qb: CQLQuery = QueryBuilder.Update.clauses(queries) override def instance(l: Seq[CQLQuery]): LimitedPart = new LimitedPart(l) } object LimitedPart { def empty: LimitedPart = new LimitedPart(Nil) } sealed class OrderPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[OrderPart](queries) { override def qb: CQLQuery = queries match { case Seq() => CQLQuery.empty case _ => QueryBuilder.Select.Ordering.orderBy(queries) } override def instance(l: Seq[CQLQuery]): OrderPart = new OrderPart(l) } object OrderPart { def empty: OrderPart = new OrderPart(Nil) } sealed class FilteringPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[FilteringPart](queries) { override def qb: CQLQuery = QueryBuilder.Update.clauses(queries) override def instance(l: Seq[CQLQuery]): FilteringPart = new FilteringPart(l) } object FilteringPart { def empty: FilteringPart = new FilteringPart() } sealed class SetPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[SetPart](queries) { def appendConditionally(qb: CQLQuery, flag: Boolean): SetPart = { if (flag) { append(qb) } else { this } } override def qb: CQLQuery = queries match { case Seq() => CQLQuery.empty case _ => QueryBuilder.Update.set(QueryBuilder.Update.chain(queries)) } override def instance(l: Seq[CQLQuery]): SetPart = new SetPart(l) } object SetPart { def empty: SetPart = new SetPart() } sealed class CompareAndSetPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[CompareAndSetPart](queries) { override def qb: CQLQuery = QueryBuilder.Update.clauses(queries) override def instance(l: Seq[CQLQuery]): CompareAndSetPart = new CompareAndSetPart(l) } object CompareAndSetPart { def empty: CompareAndSetPart = new CompareAndSetPart() } sealed class ColumnsPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[ColumnsPart](queries) { override def qb: CQLQuery = QueryBuilder.Insert.columns(queries) override def instance(l: Seq[CQLQuery]): ColumnsPart = new ColumnsPart(l) } object ColumnsPart { def empty: ColumnsPart = new ColumnsPart() } sealed class ValuePart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[ValuePart](queries) { override def qb: CQLQuery = QueryBuilder.Insert.values(queries) override def instance(l: Seq[CQLQuery]): ValuePart = new ValuePart(l) } object ValuePart { def empty: ValuePart = new ValuePart() } sealed class LightweightPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[LightweightPart](queries) { override def qb: CQLQuery = QueryBuilder.Update.clauses(queries) override def instance(l: Seq[CQLQuery]): LightweightPart = new LightweightPart(l) } object LightweightPart { def empty: LightweightPart = new LightweightPart() } sealed class WithPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[WithPart](queries) { override def qb: CQLQuery = QueryBuilder.Alter.withOptions(queries) override def instance(l: Seq[CQLQuery]): WithPart = new WithPart(l) } object WithPart { def empty: WithPart = new WithPart() } sealed class OptionPart(override val queries: Seq[CQLQuery] = Seq.empty) extends CQLQueryPart[OptionPart](queries) { override def qb: CQLQuery = QueryBuilder.Utils.options(queries) override def instance(l: Seq[CQLQuery]): OptionPart = new OptionPart(l) def option(key: String, value: String): OptionPart = { append { QueryBuilder.Utils.option( CQLQuery.escape(key), CQLSyntax.Symbols.colon, value ) } } def option(key: String, value: Boolean): OptionPart = { option(key, CQLQuery.escape(value.toString)) } } object OptionPart { def apply(qb: CQLQuery): OptionPart = new OptionPart(qb :: Nil) def empty: OptionPart = new OptionPart() } sealed class AlterPart( override val queries: Seq[CQLQuery] = Seq.empty ) extends CQLQueryPart[AlterPart](queries) { override def qb: CQLQuery = { if (queries.nonEmpty) { QueryBuilder.Alter.alter(queries) } else { CQLQuery.empty } } override def instance(list: Seq[CQLQuery]): AlterPart = new AlterPart(list) } object AlterPart { def empty: AlterPart = new AlterPart(Nil) } sealed class AddPart( override val queries: Seq[CQLQuery] = Seq.empty ) extends CQLQueryPart[AddPart](queries) { override def qb: CQLQuery = { if (queries.nonEmpty) { QueryBuilder.Alter.addAll(queries) } else { CQLQuery.empty } } override def instance(list: Seq[CQLQuery]): AddPart = new AddPart(list) } object AddPart { def empty: AddPart = new AddPart(Nil) } sealed class RenamePart( override val queries: Seq[CQLQuery] = Seq.empty ) extends CQLQueryPart[RenamePart](queries) { override def qb: CQLQuery = { if (queries.nonEmpty) { QueryBuilder.Alter.rename(queries) } else { CQLQuery.empty } } override def instance(list: Seq[CQLQuery]): RenamePart = new RenamePart(list) } object RenamePart { def empty: RenamePart = new RenamePart(Nil) } sealed class DropPart( override val queries: Seq[CQLQuery] = Seq.empty ) extends CQLQueryPart[DropPart](queries) { override def qb: CQLQuery = { if (queries.nonEmpty) { QueryBuilder.Alter.dropAll(queries) } else { CQLQuery.empty } } override def instance(list: Seq[CQLQuery]): DropPart = new DropPart(list) } object DropPart { def empty: DropPart = new DropPart(Nil) }
outworkers/phantom
phantom-dsl/src/main/scala/com/outworkers/phantom/builder/query/Parts.scala
Scala
apache-2.0
7,476
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ml.ann import breeze.linalg.{*, DenseMatrix => BDM, DenseVector => BDV, Vector => BV, axpy => Baxpy, sum => Bsum} import breeze.numerics.{log => Blog, sigmoid => Bsigmoid} import org.apache.spark.mllib.linalg.{Vector, Vectors} import org.apache.spark.mllib.optimization._ import org.apache.spark.rdd.RDD import org.apache.spark.util.random.XORShiftRandom /** * Trait that holds Layer properties, that are needed to instantiate it. * 包含实例化所需的图层属性的特征 * Implements Layer instantiation. * */ private[ann] trait Layer extends Serializable { /** * Returns the instance of the layer based on weights provided * 根据提供的权重返回图层的实例 * @param weights vector with layer weights * @param position position of weights in the vector * @return the layer model */ def getInstance(weights: Vector, position: Int): LayerModel /** * Returns the instance of the layer with random generated weights * 返回具有随机生成权重的图层实例 * @param seed seed * @return the layer model */ def getInstance(seed: Long): LayerModel } /** * Trait that holds Layer weights (or parameters). * 保持图层权重(或参数)的特征。 * Implements functions needed for forward propagation, computing delta and gradient. * Can return weights in Vector format. * 实现前向传播,计算增量和梯度所需的功能,可以以Vector格式返回权重。 */ private[ann] trait LayerModel extends Serializable { /** * number of weights */ val size: Int /** * Evaluates the data (process the data through the layer) * 评估数据(通过图层处理数据) * @param data data * @return processed data */ def eval(data: BDM[Double]): BDM[Double] /** * Computes the delta for back propagation * 计算反向传播的增量 * @param nextDelta delta of the next layer * @param input input data * @return delta */ def prevDelta(nextDelta: BDM[Double], input: BDM[Double]): BDM[Double] /** * Computes the gradient计算渐变 * @param delta delta for this layer * @param input input data * @return gradient */ def grad(delta: BDM[Double], input: BDM[Double]): Array[Double] /** * Returns weights for the layer in a single vector * 返回单个向量中图层的权重 * @return layer weights */ def weights(): Vector } /** * Layer properties of affine transformations, that is y=A*x+b * 仿射变换的层属性,即y = A * x + b * @param numIn number of inputs * @param numOut number of outputs */ private[ann] class AffineLayer(val numIn: Int, val numOut: Int) extends Layer { override def getInstance(weights: Vector, position: Int): LayerModel = { AffineLayerModel(this, weights, position) } override def getInstance(seed: Long = 11L): LayerModel = { AffineLayerModel(this, seed) } } /** * Model of Affine layer y=A*x+b * @param w weights (matrix A) * @param b bias (vector b) */ private[ann] class AffineLayerModel private(w: BDM[Double], b: BDV[Double]) extends LayerModel { val size = w.size + b.length val gwb = new Array[Double](size) private lazy val gw: BDM[Double] = new BDM[Double](w.rows, w.cols, gwb) private lazy val gb: BDV[Double] = new BDV[Double](gwb, w.size) private var z: BDM[Double] = null private var d: BDM[Double] = null private var ones: BDV[Double] = null override def eval(data: BDM[Double]): BDM[Double] = { if (z == null || z.cols != data.cols) z = new BDM[Double](w.rows, data.cols) z(::, *) := b BreezeUtil.dgemm(1.0, w, data, 1.0, z) z } override def prevDelta(nextDelta: BDM[Double], input: BDM[Double]): BDM[Double] = { if (d == null || d.cols != nextDelta.cols) d = new BDM[Double](w.cols, nextDelta.cols) BreezeUtil.dgemm(1.0, w.t, nextDelta, 0.0, d) d } override def grad(delta: BDM[Double], input: BDM[Double]): Array[Double] = { BreezeUtil.dgemm(1.0 / input.cols, delta, input.t, 0.0, gw) if (ones == null || ones.length != delta.cols) ones = BDV.ones[Double](delta.cols) BreezeUtil.dgemv(1.0 / input.cols, delta, ones, 0.0, gb) gwb } override def weights(): Vector = AffineLayerModel.roll(w, b) } /** * Fabric for Affine layer models */ private[ann] object AffineLayerModel { /** * Creates a model of Affine layer * 创建仿射层模型 * @param layer layer properties * @param weights vector with weights * @param position position of weights in the vector * @return model of Affine layer */ def apply(layer: AffineLayer, weights: Vector, position: Int): AffineLayerModel = { val (w, b) = unroll(weights, position, layer.numIn, layer.numOut) new AffineLayerModel(w, b) } /** * Creates a model of Affine layer * 创建仿射层模型 * @param layer layer properties * @param seed seed * @return model of Affine layer */ def apply(layer: AffineLayer, seed: Long): AffineLayerModel = { val (w, b) = randomWeights(layer.numIn, layer.numOut, seed) new AffineLayerModel(w, b) } /** * Unrolls the weights from the vector * 从矢量中展开权重 * @param weights vector with weights * @param position position of weights for this layer * @param numIn number of layer inputs * @param numOut number of layer outputs * @return matrix A and vector b */ def unroll( weights: Vector, position: Int, numIn: Int, numOut: Int): (BDM[Double], BDV[Double]) = { val weightsCopy = weights.toArray // TODO: the array is not copied to BDMs, make sure this is OK! val a = new BDM[Double](numOut, numIn, weightsCopy, position) val b = new BDV[Double](weightsCopy, position + (numOut * numIn), 1, numOut) (a, b) } /** * Roll the layer weights into a vector * @param a matrix A * @param b vector b * @return vector of weights */ def roll(a: BDM[Double], b: BDV[Double]): Vector = { val result = new Array[Double](a.size + b.length) // TODO: make sure that we need to copy! System.arraycopy(a.toArray, 0, result, 0, a.size) System.arraycopy(b.toArray, 0, result, a.size, b.length) Vectors.dense(result) } /** * Generate random weights for the layer * @param numIn number of inputs * @param numOut number of outputs * @param seed seed * @return (matrix A, vector b) */ def randomWeights(numIn: Int, numOut: Int, seed: Long = 11L): (BDM[Double], BDV[Double]) = { val rand: XORShiftRandom = new XORShiftRandom(seed) val weights = BDM.fill[Double](numOut, numIn){ (rand.nextDouble * 4.8 - 2.4) / numIn } val bias = BDV.fill[Double](numOut){ (rand.nextDouble * 4.8 - 2.4) / numIn } (weights, bias) } } /** * Trait for functions and their derivatives for functional layers */ private[ann] trait ActivationFunction extends Serializable { /** * Implements a function * @param x input data * @param y output data */ def eval(x: BDM[Double], y: BDM[Double]): Unit /** * Implements a derivative of a function (needed for the back propagation) * @param x input data * @param y output data */ def derivative(x: BDM[Double], y: BDM[Double]): Unit /** * Implements a cross entropy error of a function. * Needed if the functional layer that contains this function is the output layer * of the network. * @param target target output * @param output computed output * @param result intermediate result * @return cross-entropy */ def crossEntropy(target: BDM[Double], output: BDM[Double], result: BDM[Double]): Double /** * Implements a mean squared error of a function * @param target target output * @param output computed output * @param result intermediate result * @return mean squared error */ def squared(target: BDM[Double], output: BDM[Double], result: BDM[Double]): Double } /** * Implements in-place application of functions */ private[ann] object ActivationFunction { def apply(x: BDM[Double], y: BDM[Double], func: Double => Double): Unit = { var i = 0 while (i < x.rows) { var j = 0 while (j < x.cols) { y(i, j) = func(x(i, j)) j += 1 } i += 1 } } def apply( x1: BDM[Double], x2: BDM[Double], y: BDM[Double], func: (Double, Double) => Double): Unit = { var i = 0 while (i < x1.rows) { var j = 0 while (j < x1.cols) { y(i, j) = func(x1(i, j), x2(i, j)) j += 1 } i += 1 } } } /** * Implements SoftMax activation function */ private[ann] class SoftmaxFunction extends ActivationFunction { override def eval(x: BDM[Double], y: BDM[Double]): Unit = { var j = 0 // find max value to make sure later that exponent is computable while (j < x.cols) { var i = 0 var max = Double.MinValue while (i < x.rows) { if (x(i, j) > max) { max = x(i, j) } i += 1 } var sum = 0.0 i = 0 while (i < x.rows) { val res = Math.exp(x(i, j) - max) y(i, j) = res sum += res i += 1 } i = 0 while (i < x.rows) { y(i, j) /= sum i += 1 } j += 1 } } override def crossEntropy( output: BDM[Double], target: BDM[Double], result: BDM[Double]): Double = { def m(o: Double, t: Double): Double = o - t ActivationFunction(output, target, result, m) -Bsum( target :* Blog(output)) / output.cols } override def derivative(x: BDM[Double], y: BDM[Double]): Unit = { def sd(z: Double): Double = (1 - z) * z ActivationFunction(x, y, sd) } override def squared(output: BDM[Double], target: BDM[Double], result: BDM[Double]): Double = { throw new UnsupportedOperationException("Sorry, squared error is not defined for SoftMax.") } } /** * Implements Sigmoid activation function */ private[ann] class SigmoidFunction extends ActivationFunction { override def eval(x: BDM[Double], y: BDM[Double]): Unit = { def s(z: Double): Double = Bsigmoid(z) ActivationFunction(x, y, s) } override def crossEntropy( output: BDM[Double], target: BDM[Double], result: BDM[Double]): Double = { def m(o: Double, t: Double): Double = o - t ActivationFunction(output, target, result, m) -Bsum(target :* Blog(output)) / output.cols } override def derivative(x: BDM[Double], y: BDM[Double]): Unit = { def sd(z: Double): Double = (1 - z) * z ActivationFunction(x, y, sd) } override def squared(output: BDM[Double], target: BDM[Double], result: BDM[Double]): Double = { // TODO: make it readable def m(o: Double, t: Double): Double = (o - t) ActivationFunction(output, target, result, m) val e = Bsum(result :* result) / 2 / output.cols def m2(x: Double, o: Double) = x * (o - o * o) ActivationFunction(result, output, result, m2) e } } /** * Functional layer properties, y = f(x) * @param activationFunction activation function */ private[ann] class FunctionalLayer (val activationFunction: ActivationFunction) extends Layer { override def getInstance(weights: Vector, position: Int): LayerModel = getInstance(0L) override def getInstance(seed: Long): LayerModel = FunctionalLayerModel(this) } /** * Functional layer model. Holds no weights. * @param activationFunction activation function */ private[ann] class FunctionalLayerModel private (val activationFunction: ActivationFunction) extends LayerModel { val size = 0 // matrices for in-place computations // outputs private var f: BDM[Double] = null // delta private var d: BDM[Double] = null // matrix for error computation private var e: BDM[Double] = null // delta gradient private lazy val dg = new Array[Double](0) override def eval(data: BDM[Double]): BDM[Double] = { if (f == null || f.cols != data.cols) f = new BDM[Double](data.rows, data.cols) activationFunction.eval(data, f) f } override def prevDelta(nextDelta: BDM[Double], input: BDM[Double]): BDM[Double] = { if (d == null || d.cols != nextDelta.cols) d = new BDM[Double](nextDelta.rows, nextDelta.cols) activationFunction.derivative(input, d) d :*= nextDelta d } override def grad(delta: BDM[Double], input: BDM[Double]): Array[Double] = dg override def weights(): Vector = Vectors.dense(new Array[Double](0)) def crossEntropy(output: BDM[Double], target: BDM[Double]): (BDM[Double], Double) = { if (e == null || e.cols != output.cols) e = new BDM[Double](output.rows, output.cols) val error = activationFunction.crossEntropy(output, target, e) (e, error) } def squared(output: BDM[Double], target: BDM[Double]): (BDM[Double], Double) = { if (e == null || e.cols != output.cols) e = new BDM[Double](output.rows, output.cols) val error = activationFunction.squared(output, target, e) (e, error) } def error(output: BDM[Double], target: BDM[Double]): (BDM[Double], Double) = { // TODO: allow user pick error activationFunction match { case sigmoid: SigmoidFunction => squared(output, target) case softmax: SoftmaxFunction => crossEntropy(output, target) } } } /** * Fabric of functional layer models */ private[ann] object FunctionalLayerModel { def apply(layer: FunctionalLayer): FunctionalLayerModel = new FunctionalLayerModel(layer.activationFunction) } /** * Trait for the artificial neural network (ANN) topology properties */ private[ann] trait Topology extends Serializable{ def getInstance(weights: Vector): TopologyModel def getInstance(seed: Long): TopologyModel } /** * Trait for ANN topology model */ private[ann] trait TopologyModel extends Serializable{ /** * Forward propagation * @param data input data * @return array of outputs for each of the layers */ def forward(data: BDM[Double]): Array[BDM[Double]] /** * Prediction of the model * @param data input data * @return prediction */ def predict(data: Vector): Vector /** * Computes gradient for the network * @param data input data * @param target target output * @param cumGradient cumulative gradient * @param blockSize block size * @return error */ def computeGradient(data: BDM[Double], target: BDM[Double], cumGradient: Vector, blockSize: Int): Double /** * Returns the weights of the ANN * @return weights */ def weights(): Vector } /** * Feed forward ANN * @param layers */ private[ann] class FeedForwardTopology private(val layers: Array[Layer]) extends Topology { override def getInstance(weights: Vector): TopologyModel = FeedForwardModel(this, weights) override def getInstance(seed: Long): TopologyModel = FeedForwardModel(this, seed) } /** * Factory for some of the frequently-used topologies */ private[ml] object FeedForwardTopology { /** * Creates a feed forward topology from the array of layers * @param layers array of layers * @return feed forward topology */ def apply(layers: Array[Layer]): FeedForwardTopology = { new FeedForwardTopology(layers) } /** * Creates a multi-layer perceptron * @param layerSizes sizes of layers including input and output size * @param softmax wether to use SoftMax or Sigmoid function for an output layer. * Softmax is default * @return multilayer perceptron topology */ def multiLayerPerceptron(layerSizes: Array[Int], softmax: Boolean = true): FeedForwardTopology = { val layers = new Array[Layer]((layerSizes.length - 1) * 2) for(i <- 0 until layerSizes.length - 1){ layers(i * 2) = new AffineLayer(layerSizes(i), layerSizes(i + 1)) layers(i * 2 + 1) = if (softmax && i == layerSizes.length - 2) { new FunctionalLayer(new SoftmaxFunction()) } else { new FunctionalLayer(new SigmoidFunction()) } } FeedForwardTopology(layers) } } /** * Model of Feed Forward Neural Network. * Implements forward, gradient computation and can return weights in vector format. * @param layerModels models of layers * @param topology topology of the network */ private[ml] class FeedForwardModel private( val layerModels: Array[LayerModel], val topology: FeedForwardTopology) extends TopologyModel { override def forward(data: BDM[Double]): Array[BDM[Double]] = { val outputs = new Array[BDM[Double]](layerModels.length) outputs(0) = layerModels(0).eval(data) for (i <- 1 until layerModels.length) { outputs(i) = layerModels(i).eval(outputs(i-1)) } outputs } override def computeGradient( data: BDM[Double], target: BDM[Double], cumGradient: Vector, realBatchSize: Int): Double = { val outputs = forward(data) val deltas = new Array[BDM[Double]](layerModels.length) val L = layerModels.length - 1 val (newE, newError) = layerModels.last match { case flm: FunctionalLayerModel => flm.error(outputs.last, target) case _ => throw new UnsupportedOperationException("Non-functional layer not supported at the top") } deltas(L) = new BDM[Double](0, 0) deltas(L - 1) = newE for (i <- (L - 2) to (0, -1)) { deltas(i) = layerModels(i + 1).prevDelta(deltas(i + 1), outputs(i + 1)) } val grads = new Array[Array[Double]](layerModels.length) for (i <- 0 until layerModels.length) { val input = if (i==0) data else outputs(i - 1) grads(i) = layerModels(i).grad(deltas(i), input) } // update cumGradient val cumGradientArray = cumGradient.toArray var offset = 0 // TODO: extract roll for (i <- 0 until grads.length) { val gradArray = grads(i) var k = 0 while (k < gradArray.length) { cumGradientArray(offset + k) += gradArray(k) k += 1 } offset += gradArray.length } newError } // TODO: do we really need to copy the weights? they should be read-only override def weights(): Vector = { // TODO: extract roll var size = 0 for (i <- 0 until layerModels.length) { size += layerModels(i).size } val array = new Array[Double](size) var offset = 0 for (i <- 0 until layerModels.length) { val layerWeights = layerModels(i).weights().toArray System.arraycopy(layerWeights, 0, array, offset, layerWeights.length) offset += layerWeights.length } Vectors.dense(array) } override def predict(data: Vector): Vector = { val size = data.size val result = forward(new BDM[Double](size, 1, data.toArray)) Vectors.dense(result.last.toArray) } } /** * Fabric for feed forward ANN models */ private[ann] object FeedForwardModel { /** * Creates a model from a topology and weights * @param topology topology * @param weights weights * @return model */ def apply(topology: FeedForwardTopology, weights: Vector): FeedForwardModel = { val layers = topology.layers val layerModels = new Array[LayerModel](layers.length) var offset = 0 for (i <- 0 until layers.length) { layerModels(i) = layers(i).getInstance(weights, offset) offset += layerModels(i).size } new FeedForwardModel(layerModels, topology) } /** * Creates a model given a topology and seed * @param topology topology * @param seed seed for generating the weights * @return model */ def apply(topology: FeedForwardTopology, seed: Long = 11L): FeedForwardModel = { val layers = topology.layers val layerModels = new Array[LayerModel](layers.length) var offset = 0 for(i <- 0 until layers.length){ layerModels(i) = layers(i).getInstance(seed) offset += layerModels(i).size } new FeedForwardModel(layerModels, topology) } } /** * Neural network gradient. Does nothing but calling Model's gradient * @param topology topology * @param dataStacker data stacker */ private[ann] class ANNGradient(topology: Topology, dataStacker: DataStacker) extends Gradient { override def compute(data: Vector, label: Double, weights: Vector): (Vector, Double) = { val gradient = Vectors.zeros(weights.size) val loss = compute(data, label, weights, gradient) (gradient, loss) } override def compute( data: Vector, label: Double, weights: Vector, cumGradient: Vector): Double = { val (input, target, realBatchSize) = dataStacker.unstack(data) val model = topology.getInstance(weights) model.computeGradient(input, target, cumGradient, realBatchSize) } } /** * Stacks pairs of training samples (input, output) in one vector allowing them to pass * through Optimizer/Gradient interfaces. If stackSize is more than one, makes blocks * or matrices of inputs and outputs and then stack them in one vector. * This can be used for further batch computations after unstacking. * @param stackSize stack size * @param inputSize size of the input vectors * @param outputSize size of the output vectors */ private[ann] class DataStacker(stackSize: Int, inputSize: Int, outputSize: Int) extends Serializable { /** * Stacks the data * @param data RDD of vector pairs * @return RDD of double (always zero) and vector that contains the stacked vectors */ def stack(data: RDD[(Vector, Vector)]): RDD[(Double, Vector)] = { val stackedData = if (stackSize == 1) { data.map { v => (0.0, Vectors.fromBreeze(BDV.vertcat( v._1.toBreeze.toDenseVector, v._2.toBreeze.toDenseVector)) ) } } else { data.mapPartitions { it => it.grouped(stackSize).map { seq => val size = seq.size val bigVector = new Array[Double](inputSize * size + outputSize * size) var i = 0 seq.foreach { case (in, out) => System.arraycopy(in.toArray, 0, bigVector, i * inputSize, inputSize) System.arraycopy(out.toArray, 0, bigVector, inputSize * size + i * outputSize, outputSize) i += 1 } (0.0, Vectors.dense(bigVector)) } } } stackedData } /** * Unstack the stacked vectors into matrices for batch operations * @param data stacked vector * @return pair of matrices holding input and output data and the real stack size */ def unstack(data: Vector): (BDM[Double], BDM[Double], Int) = { val arrData = data.toArray val realStackSize = arrData.length / (inputSize + outputSize) val input = new BDM(inputSize, realStackSize, arrData) val target = new BDM(outputSize, realStackSize, arrData, inputSize * realStackSize) (input, target, realStackSize) } } /** * Simple updater */ private[ann] class ANNUpdater extends Updater { override def compute( weightsOld: Vector, gradient: Vector, stepSize: Double, iter: Int, regParam: Double): (Vector, Double) = { val thisIterStepSize = stepSize val brzWeights: BV[Double] = weightsOld.toBreeze.toDenseVector Baxpy(-thisIterStepSize, gradient.toBreeze, brzWeights) (Vectors.fromBreeze(brzWeights), 0) } } /** * MLlib-style trainer class that trains a network given the data and topology * @param topology topology of ANN * @param inputSize input size * @param outputSize output size */ private[ml] class FeedForwardTrainer( topology: Topology, val inputSize: Int, val outputSize: Int) extends Serializable { // TODO: what if we need to pass random seed? private var _weights = topology.getInstance(11L).weights() private var _stackSize = 128 private var dataStacker = new DataStacker(_stackSize, inputSize, outputSize) private var _gradient: Gradient = new ANNGradient(topology, dataStacker) private var _updater: Updater = new ANNUpdater() private var optimizer: Optimizer = LBFGSOptimizer.setConvergenceTol(1e-4).setNumIterations(100) /** * Returns weights * @return weights */ def getWeights: Vector = _weights /** * Sets weights * @param value weights * @return trainer */ def setWeights(value: Vector): FeedForwardTrainer = { _weights = value this } /** * Sets the stack size * @param value stack size * @return trainer */ def setStackSize(value: Int): FeedForwardTrainer = { _stackSize = value dataStacker = new DataStacker(value, inputSize, outputSize) this } /** * Sets the SGD optimizer * @return SGD optimizer */ def SGDOptimizer: GradientDescent = { val sgd = new GradientDescent(_gradient, _updater) optimizer = sgd sgd } /** * Sets the LBFGS optimizer * @return LBGS optimizer */ def LBFGSOptimizer: LBFGS = { val lbfgs = new LBFGS(_gradient, _updater) optimizer = lbfgs lbfgs } /** * Sets the updater * @param value updater * @return trainer */ def setUpdater(value: Updater): FeedForwardTrainer = { _updater = value updateUpdater(value) this } /** * Sets the gradient * @param value gradient * @return trainer */ def setGradient(value: Gradient): FeedForwardTrainer = { _gradient = value updateGradient(value) this } private[this] def updateGradient(gradient: Gradient): Unit = { optimizer match { case lbfgs: LBFGS => lbfgs.setGradient(gradient) case sgd: GradientDescent => sgd.setGradient(gradient) case other => throw new UnsupportedOperationException( s"Only LBFGS and GradientDescent are supported but got ${other.getClass}.") } } private[this] def updateUpdater(updater: Updater): Unit = { optimizer match { case lbfgs: LBFGS => lbfgs.setUpdater(updater) case sgd: GradientDescent => sgd.setUpdater(updater) case other => throw new UnsupportedOperationException( s"Only LBFGS and GradientDescent are supported but got ${other.getClass}.") } } /** * Trains the ANN * @param data RDD of input and output vector pairs * @return model */ def train(data: RDD[(Vector, Vector)]): TopologyModel = { val newWeights = optimizer.optimize(dataStacker.stack(data), getWeights) topology.getInstance(newWeights) } }
tophua/spark1.52
mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala
Scala
apache-2.0
26,995
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.api.scala import org.apache.flink.api.common.ExecutionConfig import org.apache.flink.api.common.functions._ import org.apache.flink.api.common.state.{AggregatingStateDescriptor, FoldingStateDescriptor, ListStateDescriptor, ReducingStateDescriptor} import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation} import org.apache.flink.api.java.functions.KeySelector import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.streaming.api.operators.{OneInputStreamOperator, OutputTypeConfigurable} import org.apache.flink.streaming.api.scala.function.{ProcessWindowFunction, WindowFunction} import org.apache.flink.streaming.api.transformations.OneInputTransformation import org.apache.flink.streaming.api.windowing.assigners._ import org.apache.flink.streaming.api.windowing.evictors.CountEvictor import org.apache.flink.streaming.api.windowing.time.Time import org.apache.flink.streaming.api.windowing.triggers.{CountTrigger, EventTimeTrigger, ProcessingTimeTrigger, Trigger} import org.apache.flink.streaming.api.windowing.windows.{TimeWindow, Window} import org.apache.flink.streaming.runtime.operators.windowing._ import org.apache.flink.streaming.runtime.streamrecord.StreamRecord import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness import org.apache.flink.util.Collector import org.junit.Assert._ import org.junit.Test /** * These tests verify that the api calls on [[WindowedStream]] instantiate the correct * window operator. * * We also create a test harness and push one element into the operator to verify * that we get some output. */ class WindowTranslationTest { // -------------------------------------------------------------------------- // rich function tests // -------------------------------------------------------------------------- /** * .reduce() does not support [[RichReduceFunction]], since the reduce function is used * internally in a [[org.apache.flink.api.common.state.ReducingState]]. */ @Test(expected = classOf[UnsupportedOperationException]) def testReduceWithRichReducerFails() { val env = StreamExecutionEnvironment.getExecutionEnvironment val source = env.fromElements(("hello", 1), ("hello", 2)) env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) source .keyBy(0) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .reduce(new RichReduceFunction[(String, Int)] { override def reduce(value1: (String, Int), value2: (String, Int)) = null }) fail("exception was not thrown") } /** * .reduce() does not support [[RichReduceFunction]], since the reduce function is used * internally in a [[org.apache.flink.api.common.state.ReducingState]]. */ @Test(expected = classOf[UnsupportedOperationException]) def testAggregateWithRichFunctionFails() { val env = StreamExecutionEnvironment.getExecutionEnvironment val source = env.fromElements(("hello", 1), ("hello", 2)) env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) source .keyBy(0) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .aggregate(new DummyRichAggregator()) fail("exception was not thrown") } /** * .fold() does not support [[RichFoldFunction]], since the reduce function is used internally * in a [[org.apache.flink.api.common.state.FoldingState]]. */ @Test(expected = classOf[UnsupportedOperationException]) def testFoldWithRichFolderFails() { val env = StreamExecutionEnvironment.getExecutionEnvironment val source = env.fromElements(("hello", 1), ("hello", 2)) env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) source .keyBy(0) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .fold(("", 0), new RichFoldFunction[(String, Int), (String, Int)] { override def fold(accumulator: (String, Int), value: (String, Int)) = null }) fail("exception was not thrown") } // -------------------------------------------------------------------------- // merging window checks // -------------------------------------------------------------------------- @Test def testSessionWithFoldFails() { // verify that fold does not work with merging windows val env = StreamExecutionEnvironment.getExecutionEnvironment val windowedStream = env.fromElements("Hello", "Ciao") .keyBy(x => x) .window(EventTimeSessionWindows.withGap(Time.seconds(5))) try windowedStream.fold("", new FoldFunction[String, String]() { @throws[Exception] def fold(accumulator: String, value: String): String = accumulator }) catch { case _: UnsupportedOperationException => // expected // use a catch to ensure that the exception is thrown by the fold return } fail("The fold call should fail.") } @Test def testMergingAssignerWithNonMergingTriggerFails() { // verify that we check for trigger compatibility val env = StreamExecutionEnvironment.getExecutionEnvironment val windowedStream = env.fromElements("Hello", "Ciao") .keyBy(x => x) .window(EventTimeSessionWindows.withGap(Time.seconds(5))) try windowedStream.trigger(new Trigger[String, TimeWindow]() { def onElement( element: String, timestamp: Long, window: TimeWindow, ctx: Trigger.TriggerContext) = null def onProcessingTime(time: Long, window: TimeWindow, ctx: Trigger.TriggerContext) = null def onEventTime(time: Long, window: TimeWindow, ctx: Trigger.TriggerContext) = null override def canMerge = false def clear(window: TimeWindow, ctx: Trigger.TriggerContext) {} }) catch { case _: UnsupportedOperationException => // expected // use a catch to ensure that the exception is thrown by the fold return } fail("The trigger call should fail.") } @Test def testMergingWindowsWithEvictor() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(EventTimeSessionWindows.withGap(Time.seconds(1))) .evictor(CountEvictor.of(2)) .process(new TestProcessWindowFunction) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[EventTimeSessionWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } // -------------------------------------------------------------------------- // reduce() tests // -------------------------------------------------------------------------- @Test def testReduceEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .reduce(new DummyReducer) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .reduce(new DummyReducer) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceEventTimeWithScalaFunction() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .reduce( (x, _) => x ) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceWithWindowFunctionEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1))) .reduce( new DummyReducer, new WindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x)) }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceWithWindowFunctionProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingProcessingTimeWindows.of(Time.seconds(1))) .reduce( new DummyReducer, new WindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x)) }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceWithProcessWindowFunctionEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1))) .reduce( new DummyReducer, new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x)) }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceWithProcessWindowFunctionProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingProcessingTimeWindows.of(Time.seconds(1))) .reduce( new DummyReducer, new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x)) }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testApplyWithPreReducerEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1))) .apply( new DummyReducer, new WindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x)) }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testApplyWithPreReducerAndEvictor() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1))) .evictor(CountEvictor.of(100)) .apply( new DummyReducer, new WindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x)) }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceWithWindowFunctionEventTimeWithScalaFunction() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1))) .reduce( { (x, _) => x }, { (_, _, in, out: Collector[(String, Int)]) => in foreach { x => out.collect(x)} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } // -------------------------------------------------------------------------- // aggregate() tests // -------------------------------------------------------------------------- @Test def testAggregateEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .aggregate(new DummyAggregator()) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testAggregateProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .aggregate(new DummyAggregator()) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testAggregateWithWindowFunctionEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1))) .aggregate(new DummyAggregator(), new TestWindowFunction()) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testAggregateWithWindowFunctionProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingProcessingTimeWindows.of(Time.seconds(1))) .aggregate(new DummyAggregator(), new TestWindowFunction()) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testAggregateWithProcessWindowFunctionEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1))) .aggregate(new DummyAggregator(), new TestProcessWindowFunction()) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testAggregateWithProcessWindowFunctionProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingProcessingTimeWindows.of(Time.seconds(1))) .aggregate(new DummyAggregator(), new TestProcessWindowFunction()) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testAggregateWithWindowFunctionEventTimeWithScalaFunction() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1))) .aggregate(new DummyAggregator(), { (_, _, in: Iterable[(String, Int)], out: Collector[(String, Int)]) => { in foreach { x => out.collect(x)} } }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } // -------------------------------------------------------------------------- // fold() tests // -------------------------------------------------------------------------- @Test def testFoldEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .fold(("", "", 1), new DummyFolder) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .fold(("", "", 1), new DummyFolder) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldEventTimeWithScalaFunction() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .fold(("", "", 1)) { (acc, _) => acc } val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldWithWindowFunctionEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .fold( ("", "", 1), new DummyFolder, new WindowFunction[(String, String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._3))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldWithWindowFunctionProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .fold( ("", "", 1), new DummyFolder, new WindowFunction[(String, String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._3))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldWithProcessWindowFunctionEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .fold( ("", "", 1), new DummyFolder, new ProcessWindowFunction[(String, String, Int), (String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._3))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldWithProcessWindowFunctionProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .fold( ("", "", 1), new DummyFolder, new ProcessWindowFunction[(String, String, Int), (String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._3))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testApplyWithPreFolderEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .apply( ("", "", 1), new DummyFolder, new WindowFunction[(String, String, Int), (String, String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, String, Int)], out: Collector[(String, String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2, x._3))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testApplyWithPreFolderAndEvictor() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .evictor(CountEvictor.of(100)) .apply( ("", "", 1), new DummyFolder, new WindowFunction[(String, String, Int), (String, String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, String, Int)], out: Collector[(String, String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2, x._3))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldWithWindowFunctionEventTimeWithScalaFunction() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .fold( ("", "", 1), { (acc: (String, String, Int), _) => acc }, { (_, _, in: Iterable[(String, String, Int)], out: Collector[(String, Int)]) => in foreach { x => out.collect((x._1, x._3)) } }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } // -------------------------------------------------------------------------- // apply() tests // -------------------------------------------------------------------------- @Test def testApplyEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .apply( new WindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testApplyProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .apply( new WindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testProcessEventTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .process( new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testProcessProcessingTime() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .process( new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testApplyEventTimeWithScalaFunction() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .apply { (key, window, in, out: Collector[(String, Int)]) => in foreach { x => out.collect(x)} } val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceWithCustomTrigger() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .trigger(CountTrigger.of(1)) .reduce(new DummyReducer) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[CountTrigger[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldWithCustomTrigger() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .trigger(CountTrigger.of(1)) .fold(("", "", 1), new DummyFolder) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[CountTrigger[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[FoldingStateDescriptor[_, _]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testApplyWithCustomTrigger() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .trigger(CountTrigger.of(1)) .apply( new WindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[CountTrigger[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testProcessWithCustomTrigger() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .trigger(CountTrigger.of(1)) .process( new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[CountTrigger[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceWithEvictor() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .evictor(CountEvictor.of(100)) .reduce(new DummyReducer) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[ EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testReduceWithEvictorAndProcessFunction() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .evictor(CountEvictor.of(100)) .reduce(new DummyReducer, new TestProcessWindowFunction) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[ EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testAggregateWithEvictor() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .evictor(CountEvictor.of(100)) .aggregate(new DummyAggregator()) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testAggregateWithEvictorAndProcessFunction() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .evictor(CountEvictor.of(100)) .aggregate(new DummyAggregator(), new TestProcessWindowFunction) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldWithEvictor() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .evictor(CountEvictor.of(100)) .fold(("", "", 1), new DummyFolder) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[ EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) winOperator.setOutputType( window1.javaStream.getType.asInstanceOf[TypeInformation[(String, Int)]], new ExecutionConfig) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testFoldWithEvictorAndProcessFunction() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .evictor(CountEvictor.of(100)) .fold(("", "", 1), new DummyFolder, new TestFoldProcessWindowFunction) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[ EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) winOperator.setOutputType( window1.javaStream.getType.asInstanceOf[TypeInformation[(String, Int)]], new ExecutionConfig) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testApplyWithEvictor() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .evictor(CountEvictor.of(100)) .apply( new WindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } @Test def testProcessWithEvictor() { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime) val source = env.fromElements(("hello", 1), ("hello", 2)) val window1 = source .keyBy(_._1) .window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100))) .evictor(CountEvictor.of(100)) .process( new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, Int)], out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))} }) val transform = window1 .javaStream .getTransformation .asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]] val operator = transform.getOperator assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]]) val winOperator = operator .asInstanceOf[EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]] assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger]) assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]]) assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows]) assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]]) processElementAndEnsureOutput[String, (String, Int), (String, Int)]( winOperator, winOperator.getKeySelector, BasicTypeInfo.STRING_TYPE_INFO, ("hello", 1)) } /** * Ensure that we get some output from the given operator when pushing in an element and * setting watermark and processing time to `Long.MaxValue`. */ @throws[Exception] private def processElementAndEnsureOutput[K, IN, OUT]( operator: OneInputStreamOperator[IN, OUT], keySelector: KeySelector[IN, K], keyType: TypeInformation[K], element: IN) { val testHarness = new KeyedOneInputStreamOperatorTestHarness[K, IN, OUT](operator, keySelector, keyType) if (operator.isInstanceOf[OutputTypeConfigurable[String]]) { // use a dummy type since window functions just need the ExecutionConfig // this is also only needed for Fold, which we're getting rid off soon. operator.asInstanceOf[OutputTypeConfigurable[String]] .setOutputType(BasicTypeInfo.STRING_TYPE_INFO, new ExecutionConfig) } testHarness.open() testHarness.setProcessingTime(0) testHarness.processWatermark(Long.MinValue) testHarness.processElement(new StreamRecord[IN](element, 0)) // provoke any processing-time/event-time triggers testHarness.setProcessingTime(Long.MaxValue) testHarness.processWatermark(Long.MaxValue) // we at least get the two watermarks and should also see an output element assertTrue(testHarness.getOutput.size >= 3) testHarness.close() } } class DummyReducer extends ReduceFunction[(String, Int)] { override def reduce(value1: (String, Int), value2: (String, Int)): (String, Int) = { value1 } } class DummyFolder extends FoldFunction[(String, Int), (String, String, Int)] { override def fold(acc: (String, String, Int), in: (String, Int)): (String, String, Int) = { acc } } class DummyAggregator extends AggregateFunction[(String, Int), (String, Int), (String, Int)] { override def createAccumulator(): (String, Int) = ("", 0) override def merge(a: (String, Int), b: (String, Int)): (String, Int) = a override def getResult(accumulator: (String, Int)): (String, Int) = accumulator override def add(value: (String, Int), accumulator: (String, Int)): Unit = () } class DummyRichAggregator extends RichAggregateFunction[(String, Int), (String, Int), (String, Int)] { override def createAccumulator(): (String, Int) = ("", 0) override def merge(a: (String, Int), b: (String, Int)): (String, Int) = a override def getResult(accumulator: (String, Int)): (String, Int) = accumulator override def add(value: (String, Int), accumulator: (String, Int)): Unit = () } class TestWindowFunction extends WindowFunction[(String, Int), (String, String, Int), String, TimeWindow] { override def apply( key: String, window: TimeWindow, input: Iterable[(String, Int)], out: Collector[(String, String, Int)]): Unit = { input.foreach(e => out.collect((e._1, e._1, e._2))) } } class TestProcessWindowFunction extends ProcessWindowFunction[(String, Int), (String, String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, Int)], out: Collector[(String, String, Int)]): Unit = { input.foreach(e => out.collect((e._1, e._1, e._2))) } } class TestFoldProcessWindowFunction extends ProcessWindowFunction[(String, String, Int), (String, Int), String, TimeWindow] { override def process( key: String, window: Context, input: Iterable[(String, String, Int)], out: Collector[(String, Int)]): Unit = { input.foreach(e => out.collect((e._1, e._3))) } }
WangTaoTheTonic/flink
flink-streaming-scala/src/test/scala/org/apache/flink/streaming/api/scala/WindowTranslationTest.scala
Scala
apache-2.0
75,694
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest import scala.collection.immutable.TreeSet import org.scalatest.events._ import java.lang.annotation.AnnotationFormatError import java.nio.charset.CoderMalfunctionError import javax.xml.parsers.FactoryConfigurationError import javax.xml.transform.TransformerFactoryConfigurationError import java.awt.AWTError import SharedHelpers._ import tools.Runner.CHOSEN_STYLES import org.scalatest.Suite.formatterForSuiteStarting import PrivateMethodTester._ /* Uncomment after remove type aliases in org.scalatest package object import org.scalatest.exceptions.NotAllowedException import org.scalatest.exceptions.TestFailedException */ class SuiteSuite extends Suite with SeveredStackTraces { def `test: Suite should discover method names and tags using deprecated Informer form` { val a = new Suite { def testNames(info: Informer): Unit = () } assert(a.expectedTestCount(Filter()) === 1) val tnResult: Set[String] = a.testNames val gResult: Map[String, Set[String]] = a.tags assert(tnResult.size === 1) assert(gResult.keySet.size === 0) } def `test: Suite should discover method names and tags` { val a = new Suite { def testNames(r: Informer): Unit = () } assert(a.expectedTestCount(Filter()) === 1) val tnResult: Set[String] = a.testNames val gResult: Map[String, Set[String]] = a.tags assert(tnResult.size === 1) assert(gResult.keySet.size === 0) } def `test: test methods with no tags should not show up in tags map` { val a = new Suite { def `test: not tagged` = () } assert(a.tags.keySet.size === 0) } def `test: test methods that return non-Unit should be discovered using deprecated Informer form` { val a = new Suite { def testThis(): Int = 1 def testThat(info: Informer): String = "hi" } assert(a.expectedTestCount(Filter()) === 2) assert(a.testNames.size === 2) assert(a.tags.keySet.size === 0) } def `test: overloaded test methods should be discovered using deprecated Informer form` { val a = new Suite { def testThis() = () def testThis(info: Informer) = () } assert(a.expectedTestCount(Filter()) === 2) assert(a.testNames.size === 2) assert(a.tags.keySet.size === 0) } def testThatInterceptCatchesSubtypes() { class MyException extends RuntimeException class MyExceptionSubClass extends MyException intercept[MyException] { throw new MyException new AnyRef // This is needed because right now Nothing doesn't overload as an Any } intercept[MyException] { throw new MyExceptionSubClass new AnyRef // This is needed because right now Nothing doesn't overload as an Any } // Try with a trait trait MyTrait { def someRandomMethod() {} } class AnotherException extends RuntimeException with MyTrait val caught = intercept[MyTrait] { throw new AnotherException new AnyRef // This is needed because right now Nothing doesn't overload as an Any } // Make sure the result type is the type passed in, so I can // not cast and still invoke any method on it I want caught.someRandomMethod() } def testThatInterceptReturnsTheCaughtException() { val e = new RuntimeException val result = intercept[RuntimeException] { throw e new AnyRef // This is needed because right now Nothing doesn't overload as an Any } assert(result eq e) } def testStripDollars() { assertResult("MySuite") { Suite.stripDollars("line8$object$$iw$$iw$$iw$$iw$$iw$MySuite") } assertResult("MySuite") { Suite.stripDollars("MySuite") } assertResult("nested.MySuite") { Suite.stripDollars("nested.MySuite") } assertResult("$$$") { Suite.stripDollars("$$$") } assertResult("DollarAtEnd") { Suite.stripDollars("DollarAtEnd$") } assertResult("DollarAtEnd") { Suite.stripDollars("line8$object$$iw$$iw$$iw$$iw$$iw$DollarAtEnd$") } assertResult("MySuite$1") { Suite.stripDollars("MySuite$1") } assertResult("ExampleSuite") { Suite.stripDollars("$read$$iw$$iw$$iw$$iw$ExampleSuite") } assertResult("Fred") { Suite.stripDollars("$line19.$read$$iw$$iw$Fred$") } } def testDiffStrings() { assertResult(("[]", "[a]")) { Suite.diffStrings("", "a") } assertResult(("[a]", "[]")) { Suite.diffStrings("a", "") } assertResult(("a[]", "a[b]")) { Suite.diffStrings("a", "ab") } assertResult(("a[b]", "a[]")) { Suite.diffStrings("ab", "a") } assertResult(("[a]", "[b]")) { Suite.diffStrings("a", "b") } assertResult(("[a]big", "[]big")) { Suite.diffStrings("abig", "big") } assertResult(("[]big", "[a]big")) { Suite.diffStrings("big", "abig") } assertResult(("big[a]", "big[]")) { Suite.diffStrings("biga", "big") } assertResult(("big[]", "big[a]")) { Suite.diffStrings("big", "biga") } assertResult(("small[a]big", "small[]big")) { Suite.diffStrings("smallabig", "smallbig") } assertResult(("0123456789[]0123456789", "0123456789[a]0123456789")) { Suite.diffStrings("01234567890123456789", "0123456789a0123456789") } assertResult(("...01234567890123456789[]0123456789", "...01234567890123456789[a]0123456789")) { Suite.diffStrings("X012345678901234567890123456789", "X01234567890123456789a0123456789") } assertResult(("01234567890123456789[]01234567890123456789...", "01234567890123456789[a]01234567890123456789...")) { Suite.diffStrings("0123456789012345678901234567890123456789X", "01234567890123456789a01234567890123456789X") } assertResult(("...01234567890123456789[]01234567890123456789...", "...01234567890123456789[a]01234567890123456789...")) { Suite.diffStrings("XXXX0123456789012345678901234567890123456789XX", "XXXX01234567890123456789a01234567890123456789XX") } assertResult(("...01234567890123456789[]01234567890123456789...", "...01234567890123456789[a]01234567890123456789...")) { Suite.diffStrings("X0123456789012345678901234567890123456789X", "X01234567890123456789a01234567890123456789X") } } def testDecorateToStringValue() { val decorateToStringValue = PrivateMethod[String]('decorateToStringValue) assertResult("1") { FailureMessages invokePrivate decorateToStringValue(1.toByte) } assertResult("1") { FailureMessages invokePrivate decorateToStringValue(1.toShort) } assertResult("1") { FailureMessages invokePrivate decorateToStringValue(1) } assertResult("10") { FailureMessages invokePrivate decorateToStringValue(10L) } assertResult("1.0") { FailureMessages invokePrivate decorateToStringValue(1.0f) } assertResult("1.0") { FailureMessages invokePrivate decorateToStringValue(1.0) } assertResult("false") { FailureMessages invokePrivate decorateToStringValue(false) } assertResult("true") { FailureMessages invokePrivate decorateToStringValue(true) } assertResult("<(), the Unit value>") { FailureMessages invokePrivate decorateToStringValue(()) } assertResult("\"Howdy!\"") { FailureMessages invokePrivate decorateToStringValue("Howdy!") } assertResult("'c'") { FailureMessages invokePrivate decorateToStringValue('c') } assertResult("Hey!") { FailureMessages invokePrivate decorateToStringValue(new AnyRef { override def toString = "Hey!"}) } } def testTestDurations() { class MySuite extends Suite { def testSucceeds() = () def testFails() { fail() } } val mySuite = new MySuite val myReporter = new TestDurationReporter mySuite.run(None, Args(myReporter, Stopper.default, Filter(), ConfigMap.empty, None, new Tracker(new Ordinal(99)), Set.empty)) assert(myReporter.testSucceededWasFiredAndHadADuration) assert(myReporter.testFailedWasFiredAndHadADuration) } def testSuiteDurations() { // the suite duration is sent by runNestedSuites, so MySuite needs a // nested suite class MySuite extends Suite { override def nestedSuites = Vector(new Suite {}) def testSucceeds() = () def testFails() { fail() } } val mySuite = new MySuite val myReporter = new SuiteDurationReporter mySuite.run(None, Args(myReporter, Stopper.default, Filter(), ConfigMap.empty, None, new Tracker(new Ordinal(99)), Set.empty)) assert(myReporter.suiteCompletedWasFiredAndHadADuration) class SuiteThatAborts extends Suite { override def run(testName: Option[String], args: Args): Status = { throw new RuntimeException("Aborting for testing purposes") } } // the suite duration is sent by runNestedSuites, so MySuite needs a // nested suite class MyOtherSuite extends Suite { override def nestedSuites = Vector(new SuiteThatAborts) def testSucceeds() = () def testFails() { fail() } } val myOtherSuite = new MyOtherSuite val myOtherReporter = new SuiteDurationReporter myOtherSuite.run(None, Args(myOtherReporter, Stopper.default, Filter(), ConfigMap.empty, None, new Tracker(new Ordinal(99)), Set.empty)) assert(myOtherReporter.suiteAbortedWasFiredAndHadADuration) } def testPending() { class MySuite extends Suite { def testPending() { pending } } val mySuite = new MySuite val myReporter = new PendingReporter mySuite.run(None, Args(myReporter, Stopper.default, Filter(), ConfigMap.empty, None, new Tracker(new Ordinal(99)), Set.empty)) assert(myReporter.testPendingWasFired) } def testPrettifyArray() { // non arrays print just a toString assert(FailureMessages.decorateToStringValue(1) === "1") assert(FailureMessages.decorateToStringValue("hi") === "\"hi\"") assert(FailureMessages.decorateToStringValue(List(1, 2, 3)) === "List(1, 2, 3)") assert(FailureMessages.decorateToStringValue(Map("one" -> 1)) === "Map(\"one\" -> 1)") // arrays print pretty assert(FailureMessages.decorateToStringValue(Array(1, 2)) === "Array(1, 2)") // arrays of arrays print pretty assert(FailureMessages.decorateToStringValue(Array(Array(1, 2), Array(3, 4))) === "Array(Array(1, 2), Array(3, 4))") } class TestWasCalledSuite extends Suite { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false var theTestThisConfigMapWasEmpty = true var theTestThatConfigMapWasEmpty = true var theTestTheOtherConfigMapWasEmpty = true override def withFixture(test: NoArgTest): Outcome = { if (test.configMap.size > 0) test.name match { case "testThis" => theTestThisConfigMapWasEmpty = false case "testThat" => theTestThatConfigMapWasEmpty = false case "testTheOther" => theTestTheOtherConfigMapWasEmpty = false case _ => throw new Exception("Should never happen") } test() } def testThis() { theTestThisCalled = true } def testThat() { theTestThatCalled = true } def testTheOther() { theTestTheOtherCalled = true } } def testExecute() { val s1 = new TestWasCalledSuite s1.execute() assert(s1.theTestThisCalled) assert(s1.theTestThatCalled) assert(s1.theTestTheOtherCalled) assert(s1.theTestThisConfigMapWasEmpty) assert(s1.theTestThatConfigMapWasEmpty) assert(s1.theTestTheOtherConfigMapWasEmpty) val s2 = new TestWasCalledSuite s2.execute("testThis") assert(s2.theTestThisCalled) assert(!s2.theTestThatCalled) assert(!s2.theTestTheOtherCalled) assert(s2.theTestThisConfigMapWasEmpty) assert(s2.theTestThatConfigMapWasEmpty) assert(s2.theTestTheOtherConfigMapWasEmpty) val s3 = new TestWasCalledSuite s3.execute(configMap = ConfigMap("s" -> "s")) assert(s3.theTestThisCalled) assert(s3.theTestThatCalled) assert(s3.theTestTheOtherCalled) assert(!s3.theTestThisConfigMapWasEmpty) assert(!s3.theTestThatConfigMapWasEmpty) assert(!s3.theTestTheOtherConfigMapWasEmpty) val s4 = new TestWasCalledSuite s4.execute("testThis", ConfigMap("s" -> "s")) assert(s4.theTestThisCalled) assert(!s4.theTestThatCalled) assert(!s4.theTestTheOtherCalled) assert(!s4.theTestThisConfigMapWasEmpty) assert(s4.theTestThatConfigMapWasEmpty) assert(s4.theTestTheOtherConfigMapWasEmpty) val s5 = new TestWasCalledSuite s5.execute(testName = "testThis") assert(s5.theTestThisCalled) assert(!s5.theTestThatCalled) assert(!s5.theTestTheOtherCalled) assert(s5.theTestThisConfigMapWasEmpty) assert(s5.theTestThatConfigMapWasEmpty) assert(s5.theTestTheOtherConfigMapWasEmpty) val s6 = new TestWasCalledSuite s6.execute(testName = "testThis", configMap = ConfigMap("s" -> "s")) assert(s6.theTestThisCalled) assert(!s6.theTestThatCalled) assert(!s6.theTestTheOtherCalled) assert(!s6.theTestThisConfigMapWasEmpty) assert(s6.theTestThatConfigMapWasEmpty) assert(s6.theTestTheOtherConfigMapWasEmpty) } def `test: execute should use dynamic tagging to enable Doenitz wildcards for non-encoded test names` { val s1 = new TestWasCalledSuite s1.execute("Th") assert(s1.theTestThisCalled) assert(s1.theTestThatCalled) assert(s1.theTestTheOtherCalled) assert(s1.theTestThisConfigMapWasEmpty) assert(s1.theTestThatConfigMapWasEmpty) assert(s1.theTestTheOtherConfigMapWasEmpty) val s2 = new TestWasCalledSuite s2.execute("This") assert(s2.theTestThisCalled) assert(!s2.theTestThatCalled) assert(!s2.theTestTheOtherCalled) assert(s2.theTestThisConfigMapWasEmpty) assert(s2.theTestThatConfigMapWasEmpty) assert(s2.theTestTheOtherConfigMapWasEmpty) val s3 = new TestWasCalledSuite s3.execute("Th", configMap = ConfigMap("s" -> "s")) assert(s3.theTestThisCalled) assert(s3.theTestThatCalled) assert(s3.theTestTheOtherCalled) assert(!s3.theTestThisConfigMapWasEmpty) assert(!s3.theTestThatConfigMapWasEmpty) assert(!s3.theTestTheOtherConfigMapWasEmpty) val s4 = new TestWasCalledSuite s4.execute("Th", ConfigMap("s" -> "s")) assert(s4.theTestThisCalled) assert(s4.theTestThatCalled) assert(s4.theTestTheOtherCalled) assert(!s4.theTestThisConfigMapWasEmpty) assert(!s4.theTestThatConfigMapWasEmpty) assert(!s4.theTestTheOtherConfigMapWasEmpty) val s5 = new TestWasCalledSuite s5.execute(testName = "Th") assert(s5.theTestThisCalled) assert(s5.theTestThatCalled) assert(s5.theTestTheOtherCalled) assert(s5.theTestThisConfigMapWasEmpty) assert(s5.theTestThatConfigMapWasEmpty) assert(s5.theTestTheOtherConfigMapWasEmpty) val s6 = new TestWasCalledSuite s6.execute(testName = "This", configMap = ConfigMap("s" -> "s")) assert(s6.theTestThisCalled) assert(!s6.theTestThatCalled) assert(!s6.theTestTheOtherCalled) assert(!s6.theTestThisConfigMapWasEmpty) assert(s6.theTestThatConfigMapWasEmpty) assert(s6.theTestTheOtherConfigMapWasEmpty) } def `test: execute should use dynamic tagging to enable Doenitz wildcards for encoded test names` { class TestWasCalledSuite extends Suite { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false var theTestThisConfigMapWasEmpty = true var theTestThatConfigMapWasEmpty = true var theTestTheOtherConfigMapWasEmpty = true override def withFixture(test: NoArgTest): Outcome = { if (test.configMap.size > 0) test.name match { case "test$u0020this" => theTestThisConfigMapWasEmpty = false case "test$u0020that" => theTestThatConfigMapWasEmpty = false case "test$u0020the$u0020other" => theTestTheOtherConfigMapWasEmpty = false case _ => throw new Exception("Should never happen") } test() } def `test this` { theTestThisCalled = true } def `test that` { theTestThatCalled = true } def `test the other` { theTestTheOtherCalled = true } } val s1 = new TestWasCalledSuite s1.execute(" th") assert(s1.theTestThisCalled) assert(s1.theTestThatCalled) assert(s1.theTestTheOtherCalled) assert(s1.theTestThisConfigMapWasEmpty) assert(s1.theTestThatConfigMapWasEmpty) assert(s1.theTestTheOtherConfigMapWasEmpty) val s2 = new TestWasCalledSuite s2.execute(" this") assert(s2.theTestThisCalled) assert(!s2.theTestThatCalled) assert(!s2.theTestTheOtherCalled) assert(s2.theTestThisConfigMapWasEmpty) assert(s2.theTestThatConfigMapWasEmpty) assert(s2.theTestTheOtherConfigMapWasEmpty) val s3 = new TestWasCalledSuite s3.execute(" th", configMap = ConfigMap("s" -> "s")) assert(s3.theTestThisCalled) assert(s3.theTestThatCalled) assert(s3.theTestTheOtherCalled) assert(!s3.theTestThisConfigMapWasEmpty) assert(!s3.theTestThatConfigMapWasEmpty) assert(!s3.theTestTheOtherConfigMapWasEmpty) val s4 = new TestWasCalledSuite s4.execute(" th", ConfigMap("s" -> "s")) assert(s4.theTestThisCalled) assert(s4.theTestThatCalled) assert(s4.theTestTheOtherCalled) assert(!s4.theTestThisConfigMapWasEmpty) assert(!s4.theTestThatConfigMapWasEmpty) assert(!s4.theTestTheOtherConfigMapWasEmpty) val s5 = new TestWasCalledSuite s5.execute(testName = " th") assert(s5.theTestThisCalled) assert(s5.theTestThatCalled) assert(s5.theTestTheOtherCalled) assert(s5.theTestThisConfigMapWasEmpty) assert(s5.theTestThatConfigMapWasEmpty) assert(s5.theTestTheOtherConfigMapWasEmpty) val s6 = new TestWasCalledSuite s6.execute(testName = " this", configMap = ConfigMap("s" -> "s")) assert(s6.theTestThisCalled) assert(!s6.theTestThatCalled) assert(!s6.theTestTheOtherCalled) assert(!s6.theTestThisConfigMapWasEmpty) assert(s6.theTestThatConfigMapWasEmpty) assert(s6.theTestTheOtherConfigMapWasEmpty) } def `test: Suite should order encoded names in alphabetical decoded order` { // + comes before - // but $plus comes after $minus class ASuite extends Suite { def `test: the + operator should add` { val sum = 1 + 1 assert(sum === 2) } def `test: the - operator should subtract` { val diff = 4 - 1 assert(diff === 3) } } val a = new ASuite val expectedTestNames = List("" + "test$colon$u0020the$u0020$plus$u0020operator$u0020should$u0020add", "test$colon$u0020the$u0020$minus$u0020operator$u0020should$u0020subtract" ) assert(a.testNames.iterator.toList === expectedTestNames) } def testTestTags() { class TagSuite extends Suite { def testNoTagMethod() {} @SlowAsMolasses def testTagMethod() {} } val testTags = new TagSuite().tags assert(testTags.size === 1) val tagSet = testTags.getOrElse("testTagMethod", null) assert(tagSet != null) assert(tagSet.size === 1) assert(tagSet.toList(0) === classOf[SlowAsMolasses].getName) } def testRunNestedSuite() { class NoTagSuite extends Suite @Ignore class IgnoreSuite extends Suite { def testMethod1() {} def testMethod2() {} def testMethod3() {} } @SlowAsMolasses class SlowAsMolassesSuite extends Suite @FastAsLight class FastAsLightSuite extends Suite class MasterSuite extends Suite { override def nestedSuites = Vector(new NoTagSuite(), new IgnoreSuite(), new SlowAsMolassesSuite(), new FastAsLightSuite()) override def runNestedSuites(args: Args): Status = { super.runNestedSuites(args) } } class CounterDistributor extends Distributor { var count = 0 def apply(suite: Suite, args: Args): Status = { count += 1 SucceededStatus } def apply(suite: Suite, tracker: Tracker) { count += 1 } } val masterSuite = new MasterSuite() val defaultFilter = new Filter(None, Set.empty) val defaultReporter = new EventRecordingReporter masterSuite.runNestedSuites(Args(defaultReporter, Stopper.default, defaultFilter, ConfigMap.empty, None, new Tracker(new Ordinal(99)), Set.empty)) assert(defaultReporter.suiteStartingEventsReceived.size === 4) assert(defaultReporter.testIgnoredEventsReceived.size === 3) val defaultReporterDist = new EventRecordingReporter val defaultDistributor = new CounterDistributor masterSuite.runNestedSuites(Args(defaultReporterDist, Stopper.default, defaultFilter, ConfigMap.empty, Some(defaultDistributor), new Tracker(new Ordinal(99)), Set.empty)) assert(defaultDistributor.count === 4) val includeFilter = new Filter(Some(Set("org.scalatest.FastAsLight")), Set.empty) val includeReporter = new EventRecordingReporter masterSuite.runNestedSuites(Args(includeReporter, Stopper.default, includeFilter, ConfigMap.empty, None, new Tracker(new Ordinal(99)), Set.empty)) assert(includeReporter.suiteStartingEventsReceived.size === 4) assert(includeReporter.testIgnoredEventsReceived.size === 0) val includeReporterDist = new EventRecordingReporter val includeDistributor = new CounterDistributor masterSuite.runNestedSuites(Args(includeReporterDist, Stopper.default, includeFilter, ConfigMap.empty, Some(includeDistributor), new Tracker(new Ordinal(99)), Set.empty)) assert(includeDistributor.count === 4) val excludeFilter = new Filter(None, Set("org.scalatest.SlowAsMolasses")) val excludeReporter = new EventRecordingReporter masterSuite.runNestedSuites(Args(excludeReporter, Stopper.default, excludeFilter, ConfigMap.empty, None, new Tracker(new Ordinal(99)), Set.empty)) assert(excludeReporter.suiteStartingEventsReceived.size === 4) assert(excludeReporter.testIgnoredEventsReceived.size === 3) val excludeReporterDist = new EventRecordingReporter val excludeDistributor = new CounterDistributor masterSuite.runNestedSuites(Args(excludeReporterDist, Stopper.default, excludeFilter, ConfigMap.empty, Some(excludeDistributor), new Tracker(new Ordinal(99)), Set.empty)) assert(excludeDistributor.count === 4) } def testExpectedTestCount() { class NoTagSuite extends Suite { def testMethod1() {} def testMethod2() {} def testMethod3() {} } @Ignore class IgnoreSuite extends Suite { def testMethod1() {} def testMethod2() {} def testMethod3() {} } @SlowAsMolasses class SlowAsMolassesSuite extends Suite { def testMethod1() {} def testMethod2() {} def testMethod3() {} } @FastAsLight class FastAsLightSuite extends Suite { def testMethod1() {} def testMethod2() {} def testMethod3() {} } class MasterSuite extends Suite { override def nestedSuites = Vector(new NoTagSuite(), new IgnoreSuite(), new SlowAsMolassesSuite(), new FastAsLightSuite()) override def runNestedSuites(args: Args): Status = { super.runNestedSuites(args) } } val masterSuite = new MasterSuite() assert(masterSuite.expectedTestCount(new Filter(None, Set.empty)) === 9) assert(masterSuite.expectedTestCount(new Filter(Some(Set("org.scalatest.FastAsLight")), Set.empty)) === 3) assert(masterSuite.expectedTestCount(new Filter(None, Set("org.scalatest.FastAsLight"))) === 6) assert(masterSuite.expectedTestCount(new Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set.empty)) === 3) assert(masterSuite.expectedTestCount(new Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 6) } def testSuiteRunner() { assert(new NormalSuite().rerunner.get === classOf[NormalSuite].getName) assert(new WrappedSuite(Map.empty).rerunner.get === classOf[WrappedSuite].getName) assert(new NotAccessibleSuite("test").rerunner === None) } def testCheckChosenStyles() { class SimpleSuite extends Suite { def testMethod1() {} def testMethod2() {} def testMethod3() {} } val simpleSuite = new SimpleSuite() simpleSuite.run(None, Args(SilentReporter)) simpleSuite.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap(CHOSEN_STYLES -> Set("org.scalatest.Suite")), None, new Tracker, Set.empty)) val caught = intercept[NotAllowedException] { simpleSuite.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap(CHOSEN_STYLES -> Set("org.scalatest.FunSpec")), None, new Tracker, Set.empty)) } import OptionValues._ assert(caught.message.value === Resources("notTheChosenStyle", "org.scalatest.Suite", "org.scalatest.FunSpec")) val caught2 = intercept[NotAllowedException] { simpleSuite.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap(CHOSEN_STYLES -> Set("org.scalatest.FunSpec", "org.scalatest.FreeSpec")), None, new Tracker, Set.empty)) } assert(caught2.message.value === Resources("notOneOfTheChosenStyles", "org.scalatest.Suite", Suite.makeListForHumans(Vector("org.scalatest.FunSpec", "org.scalatest.FreeSpec")))) val caught3 = intercept[NotAllowedException] { simpleSuite.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap(CHOSEN_STYLES -> Set("org.scalatest.FunSpec", "org.scalatest.FreeSpec", "org.scalatest.FlatSpec")), None, new Tracker, Set.empty)) } assert(caught3.message.value === Resources("notOneOfTheChosenStyles", "org.scalatest.Suite", Suite.makeListForHumans(Vector("org.scalatest.FunSpec", "org.scalatest.FreeSpec", "org.scalatest.FlatSpec")))) } def testMakeListForHumans() { assert(Suite.makeListForHumans(Vector.empty) === "<empty list>") assert(Suite.makeListForHumans(Vector("")) === "\"\"") assert(Suite.makeListForHumans(Vector(" ")) === "\" \"") assert(Suite.makeListForHumans(Vector("FunSuite FunSpec")) === "\"FunSuite FunSpec\"") assert(Suite.makeListForHumans(Vector("hi")) === "hi") assert(Suite.makeListForHumans(Vector("ho")) === "ho") assert(Suite.makeListForHumans(Vector("hi", "ho")) === Resources("leftAndRight", "hi", "ho")) assert(Suite.makeListForHumans(Vector("fee", "fie", "foe", "fum")) === "fee, fie, " + Resources("leftCommaAndRight", "foe", "fum")) assert(Suite.makeListForHumans(Vector("A", "stitch", "in", "time", "saves", "nine")) === "A, stitch, in, time, " + Resources("leftCommaAndRight", "saves", "nine")) assert(Suite.makeListForHumans(Vector("fee ", "fie", " foe", "fum")) === "\"fee \", fie, " + Resources("leftCommaAndRight", "\" foe\"", "fum")) } def testStackDepth() { class TestSpec extends Suite { def testFailure() { assert(1 === 2) } } val rep = new EventRecordingReporter val s1 = new TestSpec s1.run(None, Args(rep)) assert(rep.testFailedEventsReceived.size === 1) assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "SuiteSuite.scala") assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 8) } def testAnErrorThatShouldCauseAnAbort() { assertResult(true) { Suite.anExceptionThatShouldCauseAnAbort(new AnnotationFormatError("oops")) } assertResult(true) { Suite.anExceptionThatShouldCauseAnAbort(new AWTError("ouch")) } assertResult(true) { Suite.anExceptionThatShouldCauseAnAbort(new CoderMalfunctionError(new Exception)) } assertResult(true) { Suite.anExceptionThatShouldCauseAnAbort(new FactoryConfigurationError) } assertResult(true) { Suite.anExceptionThatShouldCauseAnAbort(new LinkageError) } assertResult(true) { Suite.anExceptionThatShouldCauseAnAbort(new ThreadDeath) } assertResult(true) { Suite.anExceptionThatShouldCauseAnAbort(new TransformerFactoryConfigurationError) } assertResult(true) { Suite.anExceptionThatShouldCauseAnAbort(new VirtualMachineError {}) } assertResult(true) { Suite.anExceptionThatShouldCauseAnAbort(new OutOfMemoryError) } assertResult(false) { Suite.anExceptionThatShouldCauseAnAbort(new AssertionError) } assertResult(false) { Suite.anExceptionThatShouldCauseAnAbort(new RuntimeException) } } // // Verify that Suites that don't contain any tests but do // contain nested Suites get a MotionToSuppress formatter for // SuiteStarting events. // def testFormatterForSuiteStarting() { val emptySuite = new Suite {} val emptySuiteContainingNestedSuites = new Suites(emptySuite, new NormalSuite) val nonEmptySuite = new Suite { def testFoo {} } val nonEmptySuiteContainingNestedSuites = new Suites(emptySuite, new NormalSuite) { def testFoo {} } assert( formatterForSuiteStarting(emptySuite) !== Some(MotionToSuppress)) assert( formatterForSuiteStarting(emptySuiteContainingNestedSuites) === Some(MotionToSuppress)) assert( formatterForSuiteStarting(nonEmptySuite) !== Some(MotionToSuppress)) assert( formatterForSuiteStarting(nonEmptySuiteContainingNestedSuites) !== Some(MotionToSuppress)) } } @DoNotDiscover class `My Test` extends Suite {} @DoNotDiscover class NormalSuite extends Suite @DoNotDiscover @WrapWith(classOf[ConfigMapWrapperSuite]) class WrappedSuite(configMap: Map[_, _]) extends Suite @DoNotDiscover class NotAccessibleSuite(name: String) extends Suite
travisbrown/scalatest
src/test/scala/org/scalatest/SuiteSuite.scala
Scala
apache-2.0
30,155
package net.nomadicalien.ch7 import java.util.concurrent.{Callable, CountDownLatch, ExecutorService} object Nonblocking { trait Future[+A] { private[ch7] def apply(k: A => Unit): Unit } type Par[+A] = ExecutorService => Future[A] object Par { def run[A](es: ExecutorService)(p: Par[A]): A = { val ref = new java.util.concurrent.atomic.AtomicReference[A] // A mutable, threadsafe reference, to use for storing the result val latch = new CountDownLatch(1) // A latch which, when decremented, implies that `ref` has the result p(es) { a => ref.set(a); latch.countDown } // Asynchronously set the result, and decrement the latch latch.await // Block until the `latch.countDown` is invoked asynchronously ref.get // Once we've passed the latch, we know `ref` has been set, and return its value } def unit[A](a: A): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = cb(a) } /** A non-strict version of `unit` */ def delay[A](a: => A): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = cb(a) } def fork[A](a: => Par[A]): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = eval(es)(a(es)(cb)) } /** * Helper function for constructing `Par` values out of calls to non-blocking continuation-passing-style APIs. * This will come in handy in Chapter 13. */ def async[A](f: (A => Unit) => Unit): Par[A] = es => new Future[A] { def apply(k: A => Unit) = f(k) } /** * Helper function, for evaluating an action * asynchronously, using the given `ExecutorService`. */ def eval(es: ExecutorService)(r: => Unit): Unit = es.submit(new Callable[Unit] { def call = r }) def map2[A,B,C](p: Par[A], p2: Par[B])(f: (A,B) => C): Par[C] = es => new Future[C] { def apply(cb: C => Unit): Unit = { var ar: Option[A] = None var br: Option[B] = None // this implementation is a little too liberal in forking of threads - // it forks a new logical thread for the actor and for stack-safety, // forks evaluation of the callback `cb` val combiner = Actor[Either[A,B]](es) { case Left(a) => if (br.isDefined) eval(es)(cb(f(a,br.get))) else ar = Some(a) case Right(b) => if (ar.isDefined) eval(es)(cb(f(ar.get,b))) else br = Some(b) } p(es)(a => combiner ! Left(a)) p2(es)(b => combiner ! Right(b)) } } // specialized version of `map` def map[A,B](p: Par[A])(f: A => B): Par[B] = es => new Future[B] { def apply(cb: B => Unit): Unit = p(es)(a => eval(es) { cb(f(a)) }) } def lazyUnit[A](a: => A): Par[A] = fork(unit(a)) def asyncF[A,B](f: A => B): A => Par[B] = a => lazyUnit(f(a)) def sequenceRight[A](as: List[Par[A]]): Par[List[A]] = as match { case Nil => unit(Nil) case h :: t => map2(h, fork(sequence(t)))(_ :: _) } def sequenceBalanced[A](as: IndexedSeq[Par[A]]): Par[IndexedSeq[A]] = fork { if (as.isEmpty) unit(Vector()) else if (as.length == 1) map(as.head)(a => Vector(a)) else { val (l,r) = as.splitAt(as.length/2) map2(sequenceBalanced(l), sequenceBalanced(r))(_ ++ _) } } def sequence[A](as: List[Par[A]]): Par[List[A]] = map(sequenceBalanced(as.toIndexedSeq))(_.toList) def parMap[A,B](as: List[A])(f: A => B): Par[List[B]] = sequence(as.map(asyncF(f))) def parMap[A,B](as: IndexedSeq[A])(f: A => B): Par[IndexedSeq[B]] = sequenceBalanced(as.map(asyncF(f))) // exercise answers /* * We can implement `choice` as a new primitive. * * `p(es)(result => ...)` for some `ExecutorService`, `es`, and * some `Par`, `p`, is the idiom for running `p`, and registering * a callback to be invoked when its result is available. The * result will be bound to `result` in the function passed to * `p(es)`. * * If you find this code difficult to follow, you may want to * write down the type of each subexpression and follow the types * through the implementation. What is the type of `p(es)`? What * about `t(es)`? What about `t(es)(cb)`? */ def choice[A](p: Par[Boolean])(t: Par[A], f: Par[A]): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = p(es) { b => if (b) eval(es) { t(es)(cb) } else eval(es) { f(es)(cb) } } } /* The code here is very similar. */ def choiceN[A](p: Par[Int])(ps: List[Par[A]]): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = p(es) { ind => eval(es) { ps(ind)(es)(cb) }} } def choiceViaChoiceN[A](a: Par[Boolean])(ifTrue: Par[A], ifFalse: Par[A]): Par[A] = choiceN(map(a)(b => if (b) 0 else 1))(List(ifTrue, ifFalse)) def choiceMap[K,V](p: Par[K])(ps: Map[K,Par[V]]): Par[V] = es => new Future[V] { def apply(cb: V => Unit): Unit = p(es)(k => ps(k)(es)(cb)) } /* `chooser` is usually called `flatMap` or `bind`. */ def chooser[A,B](p: Par[A])(f: A => Par[B]): Par[B] = flatMap(p)(f) def flatMap[A,B](p: Par[A])(f: A => Par[B]): Par[B] = es => new Future[B] { def apply(cb: B => Unit): Unit = p(es)(a => f(a)(es)(cb)) } def choiceViaFlatMap[A](p: Par[Boolean])(f: Par[A], t: Par[A]): Par[A] = flatMap(p)(b => if (b) t else f) def choiceNViaFlatMap[A](p: Par[Int])(choices: List[Par[A]]): Par[A] = flatMap(p)(i => choices(i)) def join[A](p: Par[Par[A]]): Par[A] = es => new Future[A] { def apply(cb: A => Unit): Unit = p(es)(p2 => eval(es) { p2(es)(cb) }) } def joinViaFlatMap[A](a: Par[Par[A]]): Par[A] = flatMap(a)(x => x) def flatMapViaJoin[A,B](p: Par[A])(f: A => Par[B]): Par[B] = join(map(p)(f)) /* Gives us infix syntax for `Par`. */ implicit def toParOps[A](p: Par[A]): ParOps[A] = new ParOps(p) // infix versions of `map`, `map2` and `flatMap` class ParOps[A](p: Par[A]) { def map[B](f: A => B): Par[B] = Par.map(p)(f) def map2[B,C](b: Par[B])(f: (A,B) => C): Par[C] = Par.map2(p,b)(f) def flatMap[B](f: A => Par[B]): Par[B] = Par.flatMap(p)(f) def zip[B](b: Par[B]): Par[(A,B)] = p.map2(b)((_,_)) } } }
BusyByte/func-prog-scala
exercises-and-notes/src/main/scala/net/nomadicalien/ch7/Nonblocking.scala
Scala
apache-2.0
6,588
class C { def f(a: Int, b: Int = 0) = 0 f(z = 0, a = 1) }
AlexSikia/dotty
tests/untried/neg/t4928.scala
Scala
bsd-3-clause
62
package metaconfig.internal object Levenshtein { def closestCandidate( query: String, candidates: Seq[String] ): Option[String] = { if (candidates.isEmpty) { None } else { val candidate = candidates.sortBy(distance(query)).head val maxLength = query.length() + candidate.length() val minDifference = math.abs(query.length() - candidate.length()) val difference = distance(candidate)(query).toDouble - minDifference val ratio = difference.toDouble / math.min(query.length(), candidate.length()) if (ratio < 0.4) Some(candidate) else None // Don't return candidate when difference is large. } } /** Levenshtein distance. Implementation based on Wikipedia's algorithm. */ def distance(s1: String)(s2: String): Int = { val dist = Array.tabulate(s2.length + 1, s1.length + 1) { (j, i) => if (j == 0) i else if (i == 0) j else 0 } for (j <- 1 to s2.length; i <- 1 to s1.length) dist(j)(i) = if (s2(j - 1) == s1(i - 1)) dist(j - 1)(i - 1) else dist(j - 1)(i) .min(dist(j)(i - 1)) .min(dist(j - 1)(i - 1)) + 1 dist(s2.length)(s1.length) } }
olafurpg/metaconfig
metaconfig-core/shared/src/main/scala/metaconfig/internal/Levenshtein.scala
Scala
apache-2.0
1,219
/* sbt -- Simple Build Tool * Copyright 2008, 2009 Mark Harrah, Vesa Vilhonen */ package sbt import java.io.File import java.net.{URL, URLClassLoader} import java.lang.reflect.{Method, Modifier} import Modifier.{isPublic, isStatic} import classpath.ClasspathUtilities trait ScalaRun { def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Option[String] } class ForkRun(config: ForkScalaRun) extends ScalaRun { def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Option[String] = { val scalaOptions = classpathOption(classpath) ::: mainClass :: options.toList val strategy = config.outputStrategy getOrElse LoggedOutput(log) val exitCode = Fork.scala(config.javaHome, config.runJVMOptions, config.scalaJars, scalaOptions, config.workingDirectory, strategy) processExitCode(exitCode, "runner") } private def classpathOption(classpath: Seq[File]) = "-cp" :: Path.makeString(classpath) :: Nil private def processExitCode(exitCode: Int, label: String) = { if(exitCode == 0) None else Some("Nonzero exit code returned from " + label + ": " + exitCode) } } class Run(instance: ScalaInstance, trapExit: Boolean) extends ScalaRun { /** Runs the class 'mainClass' using the given classpath and options using the scala runner.*/ def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger) = { log.info("Running " + mainClass + " " + options.mkString(" ")) def execute = try { run0(mainClass, classpath, options, log) } catch { case e: java.lang.reflect.InvocationTargetException => throw e.getCause } def directExecute = try { execute; None } catch { case e: Exception => log.trace(e); Some(e.toString) } if(trapExit) Run.executeTrapExit( execute, log ) else directExecute } private def run0(mainClassName: String, classpath: Seq[File], options: Seq[String], log: Logger) { log.debug(" Classpath:\\n\\t" + classpath.mkString("\\n\\t")) val loader = ClasspathUtilities.makeLoader(classpath, instance) val main = getMainMethod(mainClassName, loader) invokeMain(loader, main, options) } private def invokeMain(loader: ClassLoader, main: Method, options: Seq[String]) { val currentThread = Thread.currentThread val oldLoader = Thread.currentThread.getContextClassLoader() currentThread.setContextClassLoader(loader) try { main.invoke(null, options.toArray[String].asInstanceOf[Array[String]] ) } finally { currentThread.setContextClassLoader(oldLoader) } } def getMainMethod(mainClassName: String, loader: ClassLoader) = { val mainClass = Class.forName(mainClassName, true, loader) val method = mainClass.getMethod("main", classOf[Array[String]]) val modifiers = method.getModifiers if(!isPublic(modifiers)) throw new NoSuchMethodException(mainClassName + ".main is not public") if(!isStatic(modifiers)) throw new NoSuchMethodException(mainClassName + ".main is not static") method } } /** This module is an interface to starting the scala interpreter or runner.*/ object Run { def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger)(implicit runner: ScalaRun) = runner.run(mainClass, classpath, options, log) /** Executes the given function, trapping calls to System.exit. */ def executeTrapExit(f: => Unit, log: Logger): Option[String] = { val exitCode = TrapExit(f, log) if(exitCode == 0) { log.debug("Exited with code 0") None } else Some("Nonzero exit code: " + exitCode) } }
ornicar/xsbt
run/Run.scala
Scala
bsd-3-clause
3,501
package com.github.jeanadrien.gatling.mqtt.actions import com.github.jeanadrien.gatling.mqtt.client.MqttQoS import com.github.jeanadrien.gatling.mqtt.client.MqttQoS.MqttQoS import com.softwaremill.quicklens._ import io.gatling.core.action.Action import io.gatling.core.session._ import io.gatling.core.structure.ScenarioContext import org.fusesource.mqtt.client.QoS import scala.concurrent.duration._ /** * */ case class PublishAndWaitActionBuilder( topic : Expression[String], payload : Expression[Array[Byte]], payloadFeedback : Array[Byte] => Array[Byte] => Boolean = PayloadComparison.sameBytesContent, qos : MqttQoS = MqttQoS.AtMostOnce, retain : Boolean = false, timeout : FiniteDuration = 60 seconds ) extends MqttActionBuilder { def qos(newQos : MqttQoS) : PublishAndWaitActionBuilder = this.modify(_.qos).setTo(newQos) def qosAtMostOnce = qos(MqttQoS.AtMostOnce) def qosAtLeastOnce = qos(MqttQoS.AtLeastOnce) def qosExactlyOnce = qos(MqttQoS.ExactlyOnce) def retain(newRetain : Boolean) : PublishAndWaitActionBuilder = this.modify(_.retain).setTo(newRetain) def payloadFeedback(fn : Array[Byte] => Array[Byte] => Boolean) : PublishAndWaitActionBuilder = this .modify(_.payloadFeedback).setTo(fn) def timeout(duration : FiniteDuration) : PublishAndWaitActionBuilder = this.modify(_.timeout).setTo(duration) override def build( ctx : ScenarioContext, next : Action ) : Action = { new PublishAndWaitAction( mqttComponents(ctx), ctx.coreComponents, topic, payload, payloadFeedback, qos, retain, timeout, next ) } }
jeanadrien/gatling-mqtt-protocol
src/main/scala/com/github/jeanadrien/gatling/mqtt/actions/PublishAndWaitActionBuilder.scala
Scala
apache-2.0
1,777
/* * Copyright (C) 2017 Fabrizio * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package com.colofabrix.scala.gfx import scala.annotation.tailrec import com.colofabrix.scala.math.{ Vect, VectorField, XYVect } import com.colofabrix.scala.physix.shapes._ import com.colofabrix.scala.tankwar.Configuration.World.{ Arena => ArenaConfig } /** * Gateway to draw a variety of things, from simple to complex. */ object GenericRender { private val arena = ArenaConfig.asBox /** Draw a vector. */ def draw(v: Vect, tail: Vect): Unit = Drawing.drawVector(v, tail) /** Draw the shapes of com.colofabrix.scala.physix.shapes */ @tailrec def draw(s: Shape): Unit = s match { case c: Circle => Drawing.drawCircle(c.center, c.radius) case b: Box => Drawing.drawPolygon(b.vertices) case Segment(p0, p1) => Drawing.drawSegment(p0, p1) case l: Line => l.clip(arena) match { case Some(s: Segment) => draw(s) case None => } } /** Draw a force field using vectors. */ def draw(ff: VectorField): Unit = { val density = 4 // Number of vectors every 100 pixels for { i <- 0.0.to(arena.width + 0.01, 100 / density) j <- 0.0.to(arena.height + 0.01, 100 / density) } { val probePoint = XYVect(i, j) OpenGL.apply(colour = Some(Colour.DARK_GREY)) { GenericRender.draw(ff(probePoint), probePoint) } } } }
ColOfAbRiX/tankwar
src/main/scala/com/colofabrix/scala/gfx/GenericRender.scala
Scala
apache-2.0
1,919
package x7c1.wheat.modern.database.selector.presets import android.database.Cursor import x7c1.wheat.macros.database.Query import x7c1.wheat.modern.database.selector.{CanIdentify, CursorConverter, CursorReadable, CursorReifiable} import scala.language.{higherKinds, reflectiveCalls} abstract class CanFindEntity[ I[T] <: CanIdentify[T], FROM: CursorReifiable: ({ type L[T] = CanFindBySelect[I, T] })#L, TO: ({ type L[T] = CursorReadable[FROM, T] })#L ] extends CanFindBySelect[I, TO]{ override def reify(cursor: Cursor): Option[TO] = { new CursorConverter[FROM, TO](cursor) convertAt 0 } override def queryAbout[X: I](target: X): Query = { implicitly[CanFindBySelect[I, FROM]] queryAbout target } }
x7c1/Linen
wheat-modern/src/main/scala/x7c1/wheat/modern/database/selector/presets/CanFindEntity.scala
Scala
mit
725
package com.marcosgarciacasado.ssbatch import org.joda.time.DateTime /** * Key class for the quarter of hour measure aggregation. * * @author Marcos García Casado * */ class AggregatedMeasure (latitudec : String, longitudec : String, requestTimec : String, measurec : String) extends Serializable with Equals { override def toString() = measure+","+requestTime+","+latitude+","+longitude /** * Latitude coordinate of the sensor. */ var latitude : String = latitudec /** * Longitude coordinate of the sensor. */ var longitude : String = longitudec /** * Time of the measure value extraction. */ var requestTime : String = requestTimec /** * Name of the magnitude. */ var measure : String = measurec def canEqual(other: Any) = { other.isInstanceOf[com.marcosgarciacasado.ssbatch.AggregatedMeasure] } override def equals(other: Any) = { other match { case that: com.marcosgarciacasado.ssbatch.AggregatedMeasure => that.canEqual(AggregatedMeasure.this) && that.latitude == this.latitude && that.longitude == this.longitude && that.requestTime == this.requestTime && that.measure == this.measure case _ => false } } override def hashCode() = { toString().hashCode() } }
marcos-garcia/smartsantanderdataanalysis
ssbatch/src/main/scala/com/marcosgarciacasado/ssbatch/AggregatedMeasure.scala
Scala
gpl-2.0
1,350
package de.zalando.beard.performance import java.io.{PrintWriter, StringWriter} import com.github.mustachejava.DefaultMustacheFactory import org.scalameter.api._ import scala.collection.JavaConverters._ /** * @author dpersa */ object MustacheJavaBenchmark extends Bench.LocalTime { val context = Map[String, AnyRef]("example" -> Map("title" -> "Mustache").asJava, "presentations" -> Seq(Map("title" -> "Title1", "speakerName" -> "Name1", "summary" -> "Summary1").asJava, Map("title" -> "Title2", "speakerName" -> "Name2", "summary" -> "Summary2").asJava).asJava).asJava val mf = new DefaultMustacheFactory() val mustache = mf.compile("mustache-java-benchmark/application.mustache") val sizes = Gen.range("size")(1, 100000, 20000) val ranges = for { size <- sizes } yield 0 until size performance of "Mustache" in { measure method "render" in { using(ranges) in { (r: Range) => { r.foreach { _ => mustache.execute(new PrintWriter(new StringWriter()), context).flush() } } } } } }
danpersa/beard
src/test/scala/de/zalando/beard/performance/MustacheJavaBenchmark.scala
Scala
apache-2.0
1,087
/* Copyright (c) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gdata.client /** * Factory for creating authentication objects. It encapsulates the method by which they * are aquired. It supports ClientLogin. TODO: AuthSub. * * @see http://code.google.com/apis/gdata/auth.html * @author Iulian Dragos */ trait AuthTokenFactory { /** Return the current token. */ def token: AuthToken /** Set the authentication token. */ def token_=(t: AuthToken): Unit /** * Sets the user credentials, and tries to get an authentication token. If authentication * fails, it throws an AuthenticationException * * @throws may throw any of AuthenticationException subclasses. */ def setUserCredentials(username: String, passwd: String): Unit /** * Sets the user credentials, and tries to get an authentication token using the provided * captcha token and answer. If authentication fails, it throws an AuthenticationException. * * @throws may throw any of AuthenticationException subclasses. */ def setUserCredentials(username: String, passwd: String, captchaToken: String, captchaAnswer: String): Unit }
mjanson/gdata-scala-client
src/com/google/gdata/client/AuthTokenFactory.scala
Scala
apache-2.0
1,783
val s = "He" println(s.product.toLong)
Gerhut/scala-for-the-impatient
Chapter2/7.scala
Scala
unlicense
39
package bita import com.redis._ import akka.actor.{ ActorSystem, Actor, Props, ActorRef } import akka.bita.{ RandomScheduleHelper, Scheduler } import akka.bita.pattern.Patterns._ import akka.util.duration._ import akka.util.Timeout import akka.dispatch.Await import bita.util.{ FileHelper, TestHelper } import bita.criteria._ import bita.ScheduleOptimization._ import org.scalatest._ import akka.testkit._ import akka.testkit.TestProbe import com.typesafe.config.ConfigFactory /** * Ported from net.fyrie.redis.KeysSpec * Test: renamenx should give */ class RenamenxSpec extends BitaTests { override def name = "Fyrie-renamenx" def run { system = ActorSystem("ActorSystem", ConfigFactory.parseString(""" akka { event-handlers = ["akka.testkit.TestEventListener"] loglevel = "DEBUG" stdout-loglevel = "DEBUG" actor { default-dispatcher { core-pool-size-min = 4 core-pool-size-factor = 2.0 throughput = 10 } } } """)) if (random) { RandomScheduleHelper.setMaxDelay(250) // Increase the delay between messages to 250 ms RandomScheduleHelper.setSystem(system) } val probe = new TestProbe(system) // Use a testprobe to represent the tests. val r = new RedisClient("localhost", 6379) r.set("anshin-1", "debasish") r.set("anshin-2", "maulindu") val result1 = r.renamenx("anshin-2", "anshin-2-new") val result2 = r.renamenx("anshin-1", "anshin-2-new") if (result1 == true && result2 == false) { bugDetected = false println(Console.GREEN + Console.BOLD + "***SUCCESS***" + Console.RESET) } else { bugDetected = true println(Console.RED + Console.BOLD + "***FAILURE***" + Console.RESET) } r.flushdb // Empty the redis server r.flushall } }
Tjoene/thesis
Case_Programs/scala-redis-2.9-pre-scala-2.10/src/test/scala/bita/RenamenxSpec.scala
Scala
gpl-2.0
1,927
package com.twitter.scrooge.android_generator import com.twitter.scrooge.ast._ import com.github.mustachejava.{Mustache, DefaultMustacheFactory} import com.twitter.scrooge.mustache.ScalaObjectHandler import java.io.{FileWriter, File, StringWriter} import com.twitter.scrooge.ast.SetType import com.twitter.scrooge.ast.MapType import com.twitter.scrooge.ast.ListType import com.twitter.scrooge.ast.ReferenceType import com.twitter.scrooge.java_generator.{ApacheJavaGenerator, TypeController} import scala.collection.concurrent.TrieMap import scala.collection.mutable import com.twitter.scrooge.backend.{GeneratorFactory, ThriftGenerator, ServiceOption} import com.twitter.scrooge.frontend.{ScroogeInternalException, ResolvedDocument} object AndroidGeneratorFactory extends GeneratorFactory { val lang = "android" private val templateCache = new TrieMap[String, Mustache] def apply( includeMap: Map[String, ResolvedDocument], defaultNamespace: String, experimentFlags: Seq[String] ): ThriftGenerator = new AndroidGenerator(includeMap, defaultNamespace, templateCache) } class AndroidGenerator( includeMap: Map[String, ResolvedDocument], defaultNamespace: String, templateCache: TrieMap[String, Mustache], genHashcode: Boolean = true ) extends ApacheJavaGenerator(includeMap, defaultNamespace, templateCache, genHashcode) { override def renderMustache(template: String, controller: Any = this) = { val sw = new StringWriter() val mustache = templateCache.getOrElseUpdate(template, { val mf = new DefaultMustacheFactory("androidgen/") mf.setObjectHandler(new ScalaObjectHandler) val m = mf.compile(template) m }) mustache.execute(sw, controller).flush() sw.toString } override def getNamespace(doc: Document): Identifier = doc.namespace("android") getOrElse SimpleID(defaultNamespace) override def getIncludeNamespace(includeFileName: String): Identifier = { val javaNamespace = includeMap.get(includeFileName).flatMap { doc: ResolvedDocument => doc.document.namespace("android") } javaNamespace.getOrElse(SimpleID(defaultNamespace)) } override def typeName( t: FunctionType, inContainer: Boolean = false, inInit: Boolean = false, skipGeneric: Boolean = false): String = { t match { case Void => if (inContainer) "Void" else "void" case OnewayVoid => if (inContainer) "Void" else "void" case TBool => if (inContainer) "Boolean" else "boolean" case TByte => if (inContainer) "Byte" else "byte" case TI16 => if (inContainer) "Short" else "short" case TI32 => if (inContainer) "Integer" else "int" case TI64 => if (inContainer) "Long" else "long" case TDouble => if (inContainer) "Double" else "double" case TString => "String" case TBinary => "ByteBuffer" case n: NamedType => qualifyNamedType(n.sid, n.scopePrefix).fullName case MapType(k, v, _) => { val prefix = if (inInit) "HashMap" else "Map" prefix + (if (skipGeneric) "" else "<" + typeName(k, inContainer = true) + "," + typeName(v, inContainer = true) + ">") } case SetType(x, _) => { val prefix = if (inInit) "HashSet" else "Set" prefix + (if (skipGeneric) "" else "<" + typeName(x, inContainer = true) + ">") } case ListType(x, _) => { val prefix = if (inInit) "ArrayList" else "List" prefix + (if (skipGeneric) "" else "<" + typeName(x, inContainer = true) + ">") } case r: ReferenceType => throw new ScroogeInternalException("ReferenceType should not appear in backend") case _ => throw new ScroogeInternalException("unknown type") } } def leftElementTypeName(t: FunctionType, skipGeneric: Boolean = false): String = { t match { case MapType(k, v, _) => typeName(k, inContainer = true, skipGeneric = skipGeneric) case SetType(x, _) => typeName(x, inContainer = true, skipGeneric = skipGeneric) case ListType(x, _) => typeName(x, inContainer = true, skipGeneric = skipGeneric) case _ => "" } } def rightElementTypeName(t: FunctionType, skipGeneric: Boolean = false): String = { t match { case MapType(k, v, _) => typeName(v, inContainer = true, skipGeneric = skipGeneric) case _ => "" } } def isListOrSetType(t: FunctionType) = { t match { case ListType(_,_) => true case SetType(_,_) => true case _ => false } } override def apply(doc: Document, serviceOptions: Set[ServiceOption], outputPath: File, dryRun: Boolean = false) = { val generatedFiles = new mutable.ListBuffer[File] val namespace = getNamespace(doc) val packageDir = namespacedFolder(outputPath, namespace.fullName, dryRun) def renderFile(templateName: String, controller: TypeController) = { val fileContent = renderMustache(templateName, controller) val file = new File(packageDir, controller.name + ".java") if (!dryRun) { val writer = new FileWriter(file) try { writer.write(fileContent) } finally { writer.close() } } file } if (doc.consts.nonEmpty) { generatedFiles += renderFile("consts.mustache", new ConstController(doc.consts, this, Some(namespace))) } doc.enums.foreach { enum => generatedFiles += renderFile("enum.mustache", new EnumController(enum, this, Some(namespace))) } doc.structs.foreach { struct => generatedFiles += renderFile("struct.mustache", new StructController(struct, false, this, Some(namespace))) } generatedFiles } }
nkhuyu/scrooge
scrooge-generator/src/main/scala/com/twitter/scrooge/android_generator/AndroidGenerator.scala
Scala
apache-2.0
5,636
package models import models.daos.ProductDAO import org.joda.time.DateTime case class Product (id: Long, name: String, description: Option[String] = None, createdAt: DateTime, updatedAt: DateTime, deletedAt: Option[DateTime] = None, partsSeq: Seq[Parts] = Nil) { def this(product: Product) = this( product.id, product.name, product.description, product.createdAt, product.updatedAt, product.deletedAt, product.partsSeq ) } object Product extends ProductDAO
KIWIKIGMBH/kiwierp
kiwierp-backend/app/models/Product.scala
Scala
mpl-2.0
500
package debop4s.data.slick.customtypes import debop4s.data.slick.AbstractSlickFunSuite import debop4s.data.slick.SlickExampleDatabase._ import debop4s.data.slick.SlickExampleDatabase.driver.simple._ import scala.reflect.ClassTag import scala.slick.lifted import scala.util.Try /** * DDD Component 또는 Record 형태의 정보를 매핑하는 테스트 * @author [email protected] */ class CustomRecordTypesFunSuite extends AbstractSlickFunSuite { // custom record type case class Pair[A, B](a: A, b: B) final class PairShape[Level <: ShapeLevel, M <: Pair[_, _], U <: Pair[_, _] : ClassTag, P <: Pair[_, _]](val shapes: Seq[Shape[_, _, _, _]]) extends MappedScalaProductShape[Level, Pair[_, _], M, U, P] { override def buildValue(elems: IndexedSeq[Any]): Any = Pair(elems(0), elems(1)) override def copy(shapes: Seq[lifted.Shape[_ <: ShapeLevel, _, _, _]]): lifted.Shape[Level, _, _, _] = new PairShape(shapes) } implicit def pairShape[Level <: ShapeLevel, M1, M2, U1, U2, P1, P2] (implicit s1: Shape[_ <: Level, M1, U1, P1], s2: Shape[_ <: Level, M2, U2, P2]): PairShape[Level, Pair[M1, M2], Pair[U1, U2], Pair[P1, P2]] = new PairShape[Level, Pair[M1, M2], Pair[U1, U2], Pair[P1, P2]](Seq(s1, s2)) class PairShapeTable(tag: Tag) extends Table[(String, Pair[Int, String])](tag, "pair_shape") { def name = column[String]("name") def shapeNo = column[Int]("shape_no") def shapeDesc = column[String]("shape_desc") def pair = Pair(shapeNo, shapeDesc) def * = (name, pair) } lazy val PairShapes = TableQuery[PairShapeTable] test("custom record type") { withSession { implicit session => Try { PairShapes.ddl.drop } PairShapes.ddl.create PairShapes +=("triangle", Pair(1, "a")) PairShapes +=("circle", Pair(2, "b")) PairShapes +=("rectangle", Pair(3, "c")) PairShapes.filter(_.name === "rectangle".bind).map(_.pair.a).run shouldEqual Vector(3) val q = PairShapes .map(_.pair) .map { case p => Pair(p.a, p.b ++ p.b) } .filter { case Pair(id, _) => id =!= 1 } .sortBy { case Pair(_, ss) => ss } .map { case Pair(id, ss) => Pair(id, Pair(42, ss)) } q.run shouldEqual Vector(Pair(2, Pair(42, "bb")), Pair(3, Pair(42, "cc"))) } } }
debop/debop4s
debop4s-data-slick/src/test/scala/debop4s/data/slick/customtypes/CustomRecordTypesFunSuite.scala
Scala
apache-2.0
2,321
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.server import kafka.api._ import kafka.utils._ import kafka.cluster.Replica import kafka.log.Log import kafka.message.{ByteBufferMessageSet, Message, MessageSet} import kafka.server.QuotaFactory.UnboundedQuota import org.apache.kafka.common.metrics.Metrics import org.apache.kafka.common.utils.{MockTime => JMockTime} import org.apache.kafka.common.requests.FetchRequest.PartitionData import org.junit.{After, Before, Test} import java.util.Properties import java.util.concurrent.atomic.AtomicBoolean import kafka.common.TopicAndPartition import org.apache.kafka.common.TopicPartition import org.easymock.EasyMock import org.junit.Assert._ class SimpleFetchTest { val replicaLagTimeMaxMs = 100L val replicaFetchWaitMaxMs = 100 val replicaLagMaxMessages = 10L val overridingProps = new Properties() overridingProps.put(KafkaConfig.ReplicaLagTimeMaxMsProp, replicaLagTimeMaxMs.toString) overridingProps.put(KafkaConfig.ReplicaFetchWaitMaxMsProp, replicaFetchWaitMaxMs.toString) val configs = TestUtils.createBrokerConfigs(2, TestUtils.MockZkConnect).map(KafkaConfig.fromProps(_, overridingProps)) // set the replica manager with the partition val time = new MockTime val jTime = new JMockTime val metrics = new Metrics val leaderLEO = 20L val followerLEO = 15L val partitionHW = 5 val fetchSize = 100 val messagesToHW = new Message("messageToHW".getBytes()) val messagesToLEO = new Message("messageToLEO".getBytes()) val topic = "test-topic" val partitionId = 0 val topicAndPartition = TopicAndPartition(topic, partitionId) val fetchInfo = Seq(new TopicPartition(topicAndPartition.topic, topicAndPartition.partition) -> new PartitionData(0, fetchSize)) var replicaManager: ReplicaManager = null @Before def setUp() { // create nice mock since we don't particularly care about zkclient calls val zkUtils = EasyMock.createNiceMock(classOf[ZkUtils]) EasyMock.replay(zkUtils) // create nice mock since we don't particularly care about scheduler calls val scheduler = EasyMock.createNiceMock(classOf[KafkaScheduler]) EasyMock.replay(scheduler) // create the log which takes read with either HW max offset or none max offset val log = EasyMock.createMock(classOf[Log]) EasyMock.expect(log.logEndOffset).andReturn(leaderLEO).anyTimes() EasyMock.expect(log.logEndOffsetMetadata).andReturn(new LogOffsetMetadata(leaderLEO)).anyTimes() EasyMock.expect(log.read(0, fetchSize, Some(partitionHW), true)).andReturn( new FetchDataInfo( new LogOffsetMetadata(0L, 0L, 0), new ByteBufferMessageSet(messagesToHW) )).anyTimes() EasyMock.expect(log.read(0, fetchSize, None, true)).andReturn( new FetchDataInfo( new LogOffsetMetadata(0L, 0L, 0), new ByteBufferMessageSet(messagesToLEO) )).anyTimes() EasyMock.replay(log) // create the log manager that is aware of this mock log val logManager = EasyMock.createMock(classOf[kafka.log.LogManager]) EasyMock.expect(logManager.getLog(topicAndPartition)).andReturn(Some(log)).anyTimes() EasyMock.replay(logManager) // create the replica manager replicaManager = new ReplicaManager(configs.head, metrics, time, jTime, zkUtils, scheduler, logManager, new AtomicBoolean(false), QuotaFactory.instantiate(configs.head, metrics, time).follower) // add the partition with two replicas, both in ISR val partition = replicaManager.getOrCreatePartition(topic, partitionId) // create the leader replica with the local log val leaderReplica = new Replica(configs.head.brokerId, partition, time, 0, Some(log)) leaderReplica.highWatermark = new LogOffsetMetadata(partitionHW) partition.leaderReplicaIdOpt = Some(leaderReplica.brokerId) // create the follower replica with defined log end offset val followerReplica= new Replica(configs(1).brokerId, partition, time) val leo = new LogOffsetMetadata(followerLEO, 0L, followerLEO.toInt) followerReplica.updateLogReadResult(new LogReadResult(FetchDataInfo(leo, MessageSet.Empty), -1L, -1, true)) // add both of them to ISR val allReplicas = List(leaderReplica, followerReplica) allReplicas.foreach(partition.addReplicaIfNotExists) partition.inSyncReplicas = allReplicas.toSet } @After def tearDown() { replicaManager.shutdown(false) metrics.close() } /** * The scenario for this test is that there is one topic that has one partition * with one leader replica on broker "0" and one follower replica on broker "1" * inside the replica manager's metadata. * * The leader replica on "0" has HW of "5" and LEO of "20". The follower on * broker "1" has a local replica with a HW matching the leader's ("5") and * LEO of "15", meaning it's not in-sync but is still in ISR (hasn't yet expired from ISR). * * When a fetch operation with read committed data turned on is received, the replica manager * should only return data up to the HW of the partition; when a fetch operation with read * committed data turned off is received, the replica manager could return data up to the LEO * of the local leader replica's log. * * This test also verifies counts of fetch requests recorded by the ReplicaManager */ @Test def testReadFromLog() { val initialTopicCount = BrokerTopicStats.getBrokerTopicStats(topic).totalFetchRequestRate.count() val initialAllTopicsCount = BrokerTopicStats.getBrokerAllTopicsStats().totalFetchRequestRate.count() assertEquals("Reading committed data should return messages only up to high watermark", messagesToHW, replicaManager.readFromLocalLog( replicaId = Request.OrdinaryConsumerId, fetchOnlyFromLeader = true, readOnlyCommitted = true, fetchMaxBytes = Int.MaxValue, hardMaxBytesLimit = false, readPartitionInfo = fetchInfo, quota = UnboundedQuota).find(_._1 == topicAndPartition).get._2.info.messageSet.head.message) assertEquals("Reading any data can return messages up to the end of the log", messagesToLEO, replicaManager.readFromLocalLog( replicaId = Request.OrdinaryConsumerId, fetchOnlyFromLeader = true, readOnlyCommitted = false, fetchMaxBytes = Int.MaxValue, hardMaxBytesLimit = false, readPartitionInfo = fetchInfo, quota = UnboundedQuota).find(_._1 == topicAndPartition).get._2.info.messageSet.head.message) assertEquals("Counts should increment after fetch", initialTopicCount+2, BrokerTopicStats.getBrokerTopicStats(topic).totalFetchRequestRate.count()) assertEquals("Counts should increment after fetch", initialAllTopicsCount+2, BrokerTopicStats.getBrokerAllTopicsStats().totalFetchRequestRate.count()) } }
geeag/kafka
core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala
Scala
apache-2.0
7,596
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka import java.util.Properties import java.util.concurrent.atomic._ import kafka.common._ import kafka.message._ import kafka.log._ import kafka.utils._ /** * A stress test that instantiates a log and then runs continual appends against it from one thread and continual reads against it * from another thread and checks a few basic assertions until the user kills the process. */ object StressTestLog { val running = new AtomicBoolean(true) def main(args: Array[String]) { val dir = TestUtils.randomPartitionLogDir(TestUtils.tempDir()) val time = new MockTime val logProprties = new Properties() logProprties.put(LogConfig.SegmentBytesProp, 64*1024*1024: java.lang.Integer) logProprties.put(LogConfig.MaxMessageBytesProp, Int.MaxValue: java.lang.Integer) logProprties.put(LogConfig.SegmentIndexBytesProp, 1024*1024: java.lang.Integer) val log = new Log(dir = dir, config = LogConfig(logProprties), recoveryPoint = 0L, scheduler = time.scheduler, time = time) val writer = new WriterThread(log) writer.start() val reader = new ReaderThread(log) reader.start() Runtime.getRuntime().addShutdownHook(new Thread() { override def run() = { running.set(false) writer.join() reader.join() CoreUtils.rm(dir) } }) while(running.get) { println("Reader offset = %d, writer offset = %d".format(reader.offset, writer.offset)) Thread.sleep(1000) } } abstract class WorkerThread extends Thread { override def run() { try { var offset = 0 while(running.get) work() } catch { case e: Exception => e.printStackTrace() running.set(false) } println(getClass.getName + " exiting...") } def work() } class WriterThread(val log: Log) extends WorkerThread { @volatile var offset = 0 override def work() { val logAppendInfo = log.append(TestUtils.singleMessageSet(offset.toString.getBytes)) require(logAppendInfo.firstOffset == offset && logAppendInfo.lastOffset == offset) offset += 1 if(offset % 1000 == 0) Thread.sleep(500) } } class ReaderThread(val log: Log) extends WorkerThread { @volatile var offset = 0 override def work() { try { log.read(offset, 1024, Some(offset+1)).messageSet match { case read: FileMessageSet if read.sizeInBytes > 0 => { val first = read.head require(first.offset == offset, "We should either read nothing or the message we asked for.") require(MessageSet.entrySize(first.message) == read.sizeInBytes, "Expected %d but got %d.".format(MessageSet.entrySize(first.message), read.sizeInBytes)) offset += 1 } case _ => } } catch { case e: OffsetOutOfRangeException => // this is okay } } } }
prashantbh/kafka
core/src/test/scala/other/kafka/StressTestLog.scala
Scala
apache-2.0
3,825
// // Using Java varargs // // Compile and run: // $ scalac varargs.scala // $ scala varargs // object varargs extends App { val msg = java.text.MessageFormat.format( "At {1,time} on {1,date}, there was {2} on planet {0}.", "Hoth", new java.util.Date(), "a disturbance in the Force") println("Message=" + msg) }
sergev/vak-opensource
languages/scala/varargs.scala
Scala
apache-2.0
346
package slash_actions import models._ object Revote extends SlashAction { def execute( votingSession:Option[VotingSession], username:String, data:String):Option[String] = { if (votingSession.isDefined) { val session = votingSession.get val ticketDescription = session.ticket clearVotes(session) sendRevoteCalledMessage(ticketDescription) } else { // TODO warn that session hasn't started } None } def clearVotes(session:VotingSession) = { VotingSession.clearVotes(session) } def sendRevoteCalledMessage(ticketDescription:String) = { val message = s"<!here|here> Given the discussion above about #ticketDescription, " + "please place a new vote using `/chasm vote`. Maybe try coming to a " + "consensus this time so I can take a break." slack.IncomingWebhookClient.postInChannel(message) } }
myrridin/chasm_bot
app/value_objects/slash_actions/Revote.scala
Scala
mit
891
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openwhisk.core.controller.test import java.time.{Clock, Instant} import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import akka.http.scaladsl.model.StatusCodes._ import akka.http.scaladsl.server.Route import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import spray.json.DefaultJsonProtocol._ import spray.json._ import org.apache.openwhisk.core.controller.WhiskActivationsApi import org.apache.openwhisk.core.entitlement.Collection import org.apache.openwhisk.core.entity._ import org.apache.openwhisk.core.entity.size._ import org.apache.openwhisk.http.{ErrorResponse, Messages} import org.apache.openwhisk.core.database.UserContext import scala.concurrent.duration._ /** * Tests Activations API. * * Unit tests of the controller service as a standalone component. * These tests exercise a fresh instance of the service object in memory -- these * tests do NOT communication with a whisk deployment. * * * @Idioglossia * "using Specification DSL to write unit tests, as in should, must, not, be" * "using Specs2RouteTest DSL to chain HTTP requests for unit testing, as in ~>" */ @RunWith(classOf[JUnitRunner]) class ActivationsApiTests extends ControllerTestCommon with WhiskActivationsApi { /** Activations API tests */ behavior of "Activations API" val creds = WhiskAuthHelpers.newIdentity() val context = UserContext(creds) val namespace = EntityPath(creds.subject.asString) val collectionPath = s"/${EntityPath.DEFAULT}/${collection.path}" val retriesOnTestFailures = 5 val waitBeforeRetry = 1.second def aname() = MakeName.next("activations_tests") def checkCount(filter: String, expected: Int, user: Identity = creds) = { implicit val tid = transid() withClue(s"count did not match for filter: $filter") { org.apache.openwhisk.utils.retry { Get(s"$collectionPath?count=true&$filter") ~> Route.seal(routes(user)) ~> check { status should be(OK) responseAs[JsObject] shouldBe JsObject(collection.path -> JsNumber(expected)) } } } } //// GET /activations it should "get summary activation by namespace" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() // create two sets of activation records, and check that only one set is served back val creds1 = WhiskAuthHelpers.newAuth() val notExpectedActivations = (1 to 2).map { i => WhiskActivation( EntityPath(creds1.subject.asString), aname(), creds1.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) } val actionName = aname() val activations = (1 to 2).map { i => WhiskActivation( namespace, actionName, creds.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) }.toList try { (notExpectedActivations ++ activations).foreach(storeActivation(_, context)) waitOnListActivationsInNamespace(namespace, 2, context) org.apache.openwhisk.utils.retry { Get(s"$collectionPath") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[List[JsObject]] activations.length should be(response.length) response should contain theSameElementsAs activations.map(_.summaryAsJson) response forall { a => a.getFields("for") match { case Seq(JsString(n)) => n == actionName.asString case _ => false } } } } // it should "list activations with explicit namespace owned by subject" in { org.apache.openwhisk.utils.retry { Get(s"/$namespace/${collection.path}") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[List[JsObject]] activations.length should be(response.length) response should contain theSameElementsAs activations.map(_.summaryAsJson) response forall { a => a.getFields("for") match { case Seq(JsString(n)) => n == actionName.asString case _ => false } } } } // it should "reject list activations with explicit namespace not owned by subject" in { val auser = WhiskAuthHelpers.newIdentity() Get(s"/$namespace/${collection.path}") ~> Route.seal(routes(auser)) ~> check { status should be(Forbidden) } } finally { (notExpectedActivations ++ activations).foreach(activation => deleteActivation(ActivationId(activation.docid.asString), context)) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should get summary activation by namespace not successful, retrying..")) } //// GET /activations?docs=true it should "return empty list when no activations exist" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() org.apache.openwhisk.utils .retry { // retry because view will be stale from previous test and result in null doc fields Get(s"$collectionPath?docs=true") ~> Route.seal(routes(creds)) ~> check { status should be(OK) responseAs[List[JsObject]] shouldBe 'empty } } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should return empty list when no activations exist not successful, retrying..")) } it should "get full activation by namespace" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() // create two sets of activation records, and check that only one set is served back val creds1 = WhiskAuthHelpers.newAuth() val notExpectedActivations = (1 to 2).map { i => WhiskActivation( EntityPath(creds1.subject.asString), aname(), creds1.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) } val actionName = aname() val activations = (1 to 2).map { i => WhiskActivation( namespace, actionName, creds.subject, ActivationId.generate(), start = Instant.now, end = Instant.now, response = ActivationResponse.success(Some(JsNumber(5)))) }.toList try { (notExpectedActivations ++ activations).foreach(storeActivation(_, context)) waitOnListActivationsInNamespace(namespace, 2, context) checkCount("", 2) org.apache.openwhisk.utils.retry { Get(s"$collectionPath?docs=true") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[List[JsObject]] activations.length should be(response.length) response should contain theSameElementsAs activations.map(_.toExtendedJson()) } } } finally { (notExpectedActivations ++ activations).foreach(activation => deleteActivation(ActivationId(activation.docid.asString), context)) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should get full activation by namespace not successful, retrying..")) } //// GET /activations?docs=true&since=xxx&upto=yyy it should "get full activation by namespace within a date range" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() // create two sets of activation records, and check that only one set is served back val creds1 = WhiskAuthHelpers.newAuth() val notExpectedActivations = (1 to 2).map { i => WhiskActivation( EntityPath(creds1.subject.asString), aname(), creds1.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) } val actionName = aname() val now = Instant.now(Clock.systemUTC()) val since = now.plusSeconds(10) val upto = now.plusSeconds(30) val activations = Seq( WhiskActivation( namespace, actionName, creds.subject, ActivationId.generate(), start = now.plusSeconds(9), end = now.plusSeconds(9)), WhiskActivation( namespace, actionName, creds.subject, ActivationId.generate(), start = now.plusSeconds(20), end = now.plusSeconds(20)), // should match WhiskActivation( namespace, actionName, creds.subject, ActivationId.generate(), start = now.plusSeconds(10), end = now.plusSeconds(20)), // should match WhiskActivation( namespace, actionName, creds.subject, ActivationId.generate(), start = now.plusSeconds(31), end = now.plusSeconds(31)), WhiskActivation( namespace, actionName, creds.subject, ActivationId.generate(), start = now.plusSeconds(30), end = now.plusSeconds(30))) // should match try { (notExpectedActivations ++ activations).foreach(storeActivation(_, context)) waitOnListActivationsInNamespace(namespace, activations.length, context) { // get between two time stamps val filter = s"since=${since.toEpochMilli}&upto=${upto.toEpochMilli}" val expected = activations.filter { e => (e.start.equals(since) || e.start.equals(upto) || (e.start.isAfter(since) && e.start.isBefore(upto))) } checkCount(filter, expected.length) org.apache.openwhisk.utils.retry { Get(s"$collectionPath?docs=true&$filter") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[List[JsObject]] expected.length should be(response.length) response should contain theSameElementsAs expected.map(_.toExtendedJson()) } } } { // get 'upto' with no defined since value should return all activation 'upto' val expected = activations.filter(e => e.start.equals(upto) || e.start.isBefore(upto)) val filter = s"upto=${upto.toEpochMilli}" checkCount(filter, expected.length) org.apache.openwhisk.utils.retry { Get(s"$collectionPath?docs=true&$filter") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[List[JsObject]] expected.length should be(response.length) response should contain theSameElementsAs expected.map(_.toExtendedJson()) } } } { // get 'since' with no defined upto value should return all activation 'since' org.apache.openwhisk.utils.retry { val expected = activations.filter(e => e.start.equals(since) || e.start.isAfter(since)) val filter = s"since=${since.toEpochMilli}" checkCount(filter, expected.length) Get(s"$collectionPath?docs=true&$filter") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[List[JsObject]] expected.length should be(response.length) response should contain theSameElementsAs expected.map(_.toExtendedJson()) } } } } finally { (notExpectedActivations ++ activations).foreach(activation => deleteActivation(ActivationId(activation.docid.asString), context)) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should get full activation by namespace within a date range not successful, retrying..")) } //// GET /activations?name=xyz it should "accept valid name parameters and reject invalid ones" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() Seq(("", OK), ("name=", OK), ("name=abc", OK), ("name=abc/xyz", OK), ("name=abc/xyz/123", BadRequest)) .foreach { case (p, s) => Get(s"$collectionPath?$p") ~> Route.seal(routes(creds)) ~> check { status should be(s) if (s == BadRequest) { responseAs[String] should include(Messages.badNameFilter(p.drop(5))) } } } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should accept valid name parameters and reject invalid ones not successful, retrying..")) } it should "get summary activation by namespace and action name" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() // create two sets of activation records, and check that only one set is served back val creds1 = WhiskAuthHelpers.newAuth() val notExpectedActivations = (1 to 2).map { i => WhiskActivation( EntityPath(creds1.subject.asString), aname(), creds1.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) } val activations = (1 to 2).map { i => WhiskActivation( namespace, EntityName(s"xyz"), creds.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) }.toList val activationsInPackage = (1 to 2).map { i => WhiskActivation( namespace, EntityName(s"xyz"), creds.subject, ActivationId.generate(), start = Instant.now, end = Instant.now, annotations = Parameters("path", s"${namespace.asString}/pkg/xyz")) }.toList try { (notExpectedActivations ++ activations ++ activationsInPackage).foreach(storeActivation(_, context)) waitOnListActivationsMatchingName(namespace, EntityPath("xyz"), activations.length, context) waitOnListActivationsMatchingName( namespace, EntityName("pkg").addPath(EntityName("xyz")), activations.length, context) checkCount("name=xyz", activations.length) org.apache.openwhisk.utils.retry { Get(s"$collectionPath?name=xyz") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[List[JsObject]] activations.length should be(response.length) response should contain theSameElementsAs activations.map(_.summaryAsJson) } } checkCount("name=pkg/xyz", activations.length) org.apache.openwhisk.utils.retry { Get(s"$collectionPath?name=pkg/xyz") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[List[JsObject]] activationsInPackage.length should be(response.length) response should contain theSameElementsAs activationsInPackage.map(_.summaryAsJson) } } } finally { (notExpectedActivations ++ activations ++ activationsInPackage).foreach(activation => deleteActivation(ActivationId(activation.docid.asString), context)) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should get summary activation by namespace and action name not successful, retrying..")) } it should "reject invalid query parameter combinations" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() org.apache.openwhisk.utils .retry { // retry because view will be stale from previous test and result in null doc fields Get(s"$collectionPath?docs=true&count=true") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) responseAs[ErrorResponse].error shouldBe Messages.docsNotAllowedWithCount } } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject invalid query parameter combinations not successful, retrying..")) } it should "reject list when limit is greater than maximum allowed value" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val exceededMaxLimit = Collection.MAX_LIST_LIMIT + 1 val response = Get(s"$collectionPath?limit=$exceededMaxLimit") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) responseAs[String] should include { Messages.listLimitOutOfRange(Collection.ACTIVATIONS, exceededMaxLimit, Collection.MAX_LIST_LIMIT) } } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject list when limit is greater than maximum allowed value not successful, retrying..")) } it should "reject list when limit is not an integer" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val notAnInteger = "string" val response = Get(s"$collectionPath?limit=$notAnInteger") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) responseAs[String] should include { Messages.argumentNotInteger(Collection.ACTIVATIONS, notAnInteger) } } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject list when limit is not an integer not successful, retrying..")) } it should "reject list when skip is negative" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val negativeSkip = -1 val response = Get(s"$collectionPath?skip=$negativeSkip") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) responseAs[String] should include { Messages.listSkipOutOfRange(Collection.ACTIVATIONS, negativeSkip) } } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject list when skip is negative not successful, retrying..")) } it should "reject list when skip is not an integer" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val notAnInteger = "string" val response = Get(s"$collectionPath?skip=$notAnInteger") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) responseAs[String] should include { Messages.argumentNotInteger(Collection.ACTIVATIONS, notAnInteger) } } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject list when skip is not an integer not successful, retrying..")) } it should "reject get activation by namespace and action name when action name is not a valid name" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() Get(s"$collectionPath?name=0%20") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject get activation by namespace and action name when action name is not a valid name not successful, retrying..")) } it should "reject get activation with invalid since/upto value" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() Get(s"$collectionPath?since=xxx") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) } Get(s"$collectionPath?upto=yyy") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject get activation with invalid since/upto value not successful, retrying..")) } it should "skip activations and return correct ones" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val activations: Seq[WhiskActivation] = (1 to 3).map { i => //make sure the time is different for each activation val time = Instant.now.plusMillis(i) WhiskActivation(namespace, aname(), creds.subject, ActivationId.generate(), start = time, end = time) }.toList try { activations.foreach(storeActivation(_, context)) waitOnListActivationsInNamespace(namespace, activations.size, context) Get(s"$collectionPath?skip=1") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val resultActivationIds = responseAs[List[JsObject]].map(_.fields("name")) val expectedActivationIds = activations.map(_.toJson.fields("name")).reverse.drop(1) resultActivationIds should be(expectedActivationIds) } } finally { activations.foreach(a => deleteActivation(ActivationId(a.docid.asString), context)) waitOnListActivationsInNamespace(namespace, 0, context) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should skip activations and return correct ones not successful, retrying..")) } it should "return last activation" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val activations = (1 to 3).map { i => //make sure the time is different for each activation val time = Instant.now.plusMillis(i) WhiskActivation(namespace, aname(), creds.subject, ActivationId.generate(), start = time, end = time) }.toList try { activations.foreach(storeActivation(_, context)) waitOnListActivationsInNamespace(namespace, activations.size, context) Get(s"$collectionPath?limit=1") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val activationsJson = activations.map(_.toJson) withClue(s"Original activations: ${activationsJson}") { val respNames = responseAs[List[JsObject]].map(_.fields("name")) val expectNames = activationsJson.map(_.fields("name")).drop(2) respNames should be(expectNames) } } } finally { activations.foreach(a => deleteActivation(ActivationId(a.docid.asString), context)) waitOnListActivationsInNamespace(namespace, 0, context) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some(s"${this.getClass.getName} > Activations API should return last activation not successful, retrying..")) } //// GET /activations/id it should "get activation by id" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val activation = WhiskActivation( namespace, aname(), creds.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) try { storeActivation(activation, context) Get(s"$collectionPath/${activation.activationId.asString}") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[JsObject] response should be(activation.toExtendedJson()) } // it should "get activation by name in explicit namespace owned by subject" in Get(s"/$namespace/${collection.path}/${activation.activationId.asString}") ~> Route .seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[JsObject] response should be(activation.toExtendedJson()) } // it should "reject get activation by name in explicit namespace not owned by subject" in val auser = WhiskAuthHelpers.newIdentity() Get(s"/$namespace/${collection.path}/${activation.activationId.asString}") ~> Route .seal(routes(auser)) ~> check { status should be(Forbidden) } } finally { deleteActivation(ActivationId(activation.docid.asString), context) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some(s"${this.getClass.getName} > Activations API should get activation by id not successful, retrying..")) } //// GET /activations/id/result it should "get activation result by id" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val activation = WhiskActivation( namespace, aname(), creds.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) try { storeActivation(activation, context) Get(s"$collectionPath/${activation.activationId.asString}/result") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[JsObject] response should be(activation.response.toExtendedJson) } } finally { deleteActivation(ActivationId(activation.docid.asString), context) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should get activation result by id not successful, retrying..")) } //// GET /activations/id/logs it should "get activation logs by id" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val activation = WhiskActivation( namespace, aname(), creds.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) try { storeActivation(activation, context) Get(s"$collectionPath/${activation.activationId.asString}/logs") ~> Route.seal(routes(creds)) ~> check { status should be(OK) val response = responseAs[JsObject] response should be(activation.logs.toJsonObject) } } finally { deleteActivation(ActivationId(activation.docid.asString), context) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some(s"${this.getClass.getName} > Activations API should get activation logs by id not successful, retrying..")) } //// GET /activations/id/bogus it should "reject request to get invalid activation resource" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val activation = WhiskActivation( namespace, aname(), creds.subject, ActivationId.generate(), start = Instant.now, end = Instant.now) storeActivation(activation, context) try { Get(s"$collectionPath/${activation.activationId.asString}/bogus") ~> Route.seal(routes(creds)) ~> check { status should be(NotFound) } } finally { deleteActivation(ActivationId(activation.docid.asString), context) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject request to get invalid activation resource not successful, retrying..")) } it should "reject get requests with invalid activation ids" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() val activationId = ActivationId.generate().toString val tooshort = activationId.substring(0, 31) val toolong = activationId + "xxx" val malformed = tooshort + "z" Get(s"$collectionPath/$tooshort") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) responseAs[String] shouldBe Messages.activationIdLengthError( SizeError("Activation id", tooshort.length.B, 32.B)) } Get(s"$collectionPath/$toolong") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) responseAs[String] shouldBe Messages.activationIdLengthError( SizeError("Activation id", toolong.length.B, 32.B)) } Get(s"$collectionPath/$malformed") ~> Route.seal(routes(creds)) ~> check { status should be(BadRequest) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject get requests with invalid activation ids not successful, retrying..")) } it should "reject request with put" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() Put(s"$collectionPath/${ActivationId.generate()}") ~> Route.seal(routes(creds)) ~> check { status should be(MethodNotAllowed) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some(s"${this.getClass.getName} > Activations API should reject request with put not successful, retrying..")) } it should "reject request with post" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() Post(s"$collectionPath/${ActivationId.generate()}") ~> Route.seal(routes(creds)) ~> check { status should be(MethodNotAllowed) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some(s"${this.getClass.getName} > Activations API should reject request with pos not successful, retrying..")) } it should "reject request with delete" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() Delete(s"$collectionPath/${ActivationId.generate()}") ~> Route.seal(routes(creds)) ~> check { status should be(MethodNotAllowed) } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should reject request with delete not successful, retrying..")) } it should "report proper error when record is corrupted on get" in { org.apache.openwhisk.utils .retry( { afterEach() implicit val tid = transid() //A bad activation type which breaks the deserialization by removing the subject entry class BadActivation(override val namespace: EntityPath, override val name: EntityName, override val subject: Subject, override val activationId: ActivationId, override val start: Instant, override val end: Instant) extends WhiskActivation(namespace, name, subject, activationId, start, end) { override def toJson = { val json = super.toJson JsObject(json.fields - "subject") } } val activation = new BadActivation(namespace, aname(), creds.subject, ActivationId.generate(), Instant.now, Instant.now) storeActivation(activation, context) Get(s"$collectionPath/${activation.activationId}") ~> Route.seal(routes(creds)) ~> check { status should be(InternalServerError) responseAs[ErrorResponse].error shouldBe Messages.corruptedEntity } }, retriesOnTestFailures, Some(waitBeforeRetry), Some( s"${this.getClass.getName} > Activations API should report proper error when record is corrupted on get not successful, retrying..")) } }
RSulzmann/openwhisk
tests/src/test/scala/org/apache/openwhisk/core/controller/test/ActivationsApiTests.scala
Scala
apache-2.0
35,813
package com.nyavro.manythanks.ws import org.scalatest._ import org.scalatest.concurrent.ScalaFutures import org.scalatest.time.{Millis, Seconds, Span} import scala.concurrent.ExecutionContext.Implicits.global class UserRepositoryTest extends WordSpec with Matchers with ScalaFutures with Config { implicit val defaultPatience = PatienceConfig(timeout = Span(5, Seconds), interval = Span(500, Millis)) "User repository" ignore { "create user" in { whenReady( for ( created <- UserRepository.create(UserEntity(None, "user", "1POzF21mPfK", "123-321")); remove <- UserRepository.delete(created.id.get) ) yield created ) { result => result.id.isDefined should === (true) } } "load user by id" in { whenReady( for ( created <- UserRepository.create(UserEntity(None, "user3", "F21mPfK1POz", "4123-32")); loaded <- UserRepository.load(created.id.get); remove <- UserRepository.delete(created.id.get) ) yield loaded ) { result => result.isDefined should === (true) result.get.login should === ("user3") result.get.gcmToken should === ("F21mPfK1POz") result.get.extId should === ("4123-32") } } } }
nyavro/manythanks
repository/src/test/scala/com/nyavro/manythanks/ws/UserRepositoryTest.scala
Scala
apache-2.0
1,278
package redis.api import akka.util.ByteString trait Aggregate case object SUM extends Aggregate case object MIN extends Aggregate case object MAX extends Aggregate case class Limit(value: Double, inclusive: Boolean = true) { def toByteString: ByteString = ByteString(if (inclusive) value.toString else "(" + value.toString) } trait Order case object ASC extends Order case object DESC extends Order case class LimitOffsetCount(offset: Long, count: Long) { def toByteString: Seq[ByteString] = Seq(ByteString("LIMIT"), ByteString(offset.toString), ByteString(count.toString)) } sealed trait BitOperator case object AND extends BitOperator case object OR extends BitOperator case object XOR extends BitOperator case object NOT extends BitOperator sealed trait ListPivot case object AFTER extends ListPivot case object BEFORE extends ListPivot sealed trait ShutdownModifier case object SAVE extends ShutdownModifier case object NOSAVE extends ShutdownModifier
beni55/rediscala
src/main/scala/redis/api/api.scala
Scala
apache-2.0
984
/** * Copyright 2015, deepsense.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.deepsense.docgen import java.util.Scanner /** * This app generates Seahorse documentation for Spark ported operations. * It is capable of generating redirects and documentation pages automatically. * To achieve that, it uses CatalogRecorder and reflection. * * Link generation for operation menu and operation subpage is semi-automatic. The app * prints links to System.out and the user is responsible for inserting them * into proper places in operations.md and operationsmenu.html files. * * The typical use case consists of: * 1. Running the app in link generation mode for new operations. * 2. Manually inserting generated links to operations.md and operationsmenu.html files. * 3. Running the app in documentation page & redirect creation mode (for new operations). * * Order of operations is important, because link generation recognizes new operations * by non-existence of their documentation pages. */ object SparkOperationsDocGenerator extends DocPageCreator with SparkOperationsExtractor with RedirectCreator with LinkPrinter { val sparkVersion = org.apache.spark.SPARK_VERSION val scalaDocPrefix = s"https://spark.apache.org/docs/$sparkVersion/api/scala/index.html#" // scalastyle:off println def main(args: Array[String]): Unit = { val sc = new Scanner(System.in) println("==========================") println("= Seahorse doc generator =") println("==========================") println println("What do you want to do?") println("[P]rint links to new Spark operations") println("[C]reate documentation pages and redirects for Spark operations") print("> ") sc.nextLine().toLowerCase match { case "p" => println("Do you want to print [A]ll links or only links to [N]ew operations?") print("> ") sc.nextLine().toLowerCase match { case "a" => printLinks(true) case "n" => printLinks(false) case _ => wrongInputExit() } case "c" => println("Do you want to [R]ecreate all pages and redirects or [U]pdate for new operations?") print("> ") sc.nextLine().toLowerCase match { case "r" => createDocPagesAndRedirects(true) case "u" => createDocPagesAndRedirects(false) case _ => wrongInputExit() } case _ => wrongInputExit() } } private def wrongInputExit(): Unit = { println("Unexpected input. Exiting...") System.exit(1) } private def printLinks(printAll: Boolean): Unit = { val sparkOperationsByCategory = mapByCategory(sparkOperations()) printOperationSiteLinks(sparkOperationsByCategory, printAll) printOperationMenuLinks(sparkOperationsByCategory, printAll) } private def createDocPagesAndRedirects(forceUpdate: Boolean): Unit = { val sparkOps = sparkOperations() val redirectCount = createRedirects(sparkOps, forceUpdate) val pageCount = createDocPages(sparkOps, forceUpdate) if(redirectCount == 0) { println("No redirects updated.") } else { println(s"Updated $redirectCount redirects.") } if(pageCount == 0) { println("No pages updated.") } else { println(s"Updated $pageCount pages.") } } // scalastyle:on println }
deepsense-io/seahorse-workflow-executor
docgen/src/main/scala/io/deepsense/docgen/SparkOperationsDocGenerator.scala
Scala
apache-2.0
3,869
/** * MIT License * * Copyright (c) 2016-2018 James Sherwood-Jones <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.jsherz.luskydive.core import java.sql.Date import java.util.UUID /** * A (typically static line) course, booked on a particular day. */ case class Course(uuid: UUID, date: Date, organiserUuid: UUID, secondaryOrganiserUuid: Option[UUID], status: Int) /** * Possible states that a course can be in. */ object CourseStatuses { val PENDING = 0 val CONFIRMED = 1 }
jSherz/lsd-members
backend/src/main/scala/com/jsherz/luskydive/core/Course.scala
Scala
mit
1,594
/* * Copyright 2011-2018 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.core.stats.writer object DataWriterType { private val AllTypes = Seq(ConsoleDataWriterType, FileDataWriterType, GraphiteDataWriterType, LeakReporterDataWriterType) .map(t => t.name -> t).toMap def findByName(name: String): Option[DataWriterType] = AllTypes.get(name) } sealed abstract class DataWriterType(val name: String, val className: String) object ConsoleDataWriterType extends DataWriterType("console", "io.gatling.core.stats.writer.ConsoleDataWriter") object FileDataWriterType extends DataWriterType("file", "io.gatling.core.stats.writer.LogFileDataWriter") object GraphiteDataWriterType extends DataWriterType("graphite", "io.gatling.metrics.GraphiteDataWriter") object LeakReporterDataWriterType extends DataWriterType("leak", "io.gatling.core.stats.writer.LeakReporterDataWriter")
wiacekm/gatling
gatling-core/src/main/scala/io/gatling/core/stats/writer/DataWriterType.scala
Scala
apache-2.0
1,442
package objsets import common._ import TweetReader._ /** * A class to represent tweets. */ class Tweet(val user: String, val text: String, val retweets: Int) { override def toString: String = "User: " + user + "\\n" + "Text: " + text + " [" + retweets + "]" } /** * This represents a set of objects of type `Tweet` in the form of a binary search * tree. Every branch in the tree has two children (two `TweetSet`s). There is an * invariant which always holds: for every branch `b`, all elements in the left * subtree are smaller than the tweet at `b`. The eleemnts in the right subtree are * larger. * * Note that the above structure requires us to be able to compare two tweets (we * need to be able to say which of two tweets is larger, or if they are equal). In * this implementation, the equality / order of tweets is based on the tweet's text * (see `def incl`). Hence, a `TweetSet` could not contain two tweets with the same * text from different users. * * * The advantage of representing sets as binary search trees is that the elements * of the set can be found quickly. If you want to learn more you can take a look * at the Wikipedia page [1], but this is not necessary in order to solve this * assignment. * * [1] http://en.wikipedia.org/wiki/Binary_search_tree */ abstract class TweetSet { /** * This method takes a predicate and returns a subset of all the elements * in the original set for which the predicate is true. * * Question: Can we implment this method here, or should it remain abstract * and be implemented in the subclasses? */ def filter(p: Tweet => Boolean): TweetSet /** * This is a helper method for `filter` that propagetes the accumulated tweets. */ /** * Returns a new `TweetSet` that is the union of `TweetSet`s `this` and `that`. * * Question: Should we implment this method here, or should it remain abstract * and be implemented in the subclasses? */ def union(that: TweetSet): TweetSet /** * Returns the tweet from this set which has the greatest retweet count. * * Calling `mostRetweeted` on an empty set should throw an exception of * type `java.util.NoSuchElementException`. * * Question: Should we implment this method here, or should it remain abstract * and be implemented in the subclasses? */ def mostRetweeted: Tweet def isEmpty : Boolean /** * Returns a list containing all tweets of this set, sorted by retweet count * in descending order. In other words, the head of the resulting list should * have the highest retweet count. * * Hint: the method `remove` on TweetSet will be very useful. * Question: Should we implment this method here, or should it remain abstract * and be implemented in the subclasses? */ def descendingByRetweet: TweetList /** * The following methods are already implemented */ /** * Returns a new `TweetSet` which contains all elements of this set, and the * the new element `tweet` in case it does not already exist in this set. * * If `this.contains(tweet)`, the current set is returned. */ def incl(tweet: Tweet): TweetSet /** * Returns a new `TweetSet` which excludes `tweet`. */ def remove(tweet: Tweet): TweetSet /** * Tests if `tweet` exists in this `TweetSet`. */ def contains(tweet: Tweet): Boolean /** * This method takes a function and applies it to every element in the set. */ def foreach(f: Tweet => Unit): Unit } class Empty extends TweetSet { def filter(p: Tweet => Boolean): TweetSet = this def mostRetweeted: Tweet = throw new java.util.NoSuchElementException("") def union(that: TweetSet) : TweetSet = that def isEmpty : Boolean = true def descendingByRetweet: TweetList = Nil /** * The following methods are already implemented */ def contains(tweet: Tweet): Boolean = false def incl(tweet: Tweet): TweetSet = new NonEmpty(tweet, new Empty, new Empty) def remove(tweet: Tweet): TweetSet = this def foreach(f: Tweet => Unit): Unit = () } class NonEmpty(elem: Tweet, left: TweetSet, right: TweetSet) extends TweetSet { def filter(p: Tweet => Boolean): TweetSet = { val l = left filter p val r = right filter p if (p(elem)) ((l union r) incl elem) else (l union r) } def mostRetweeted: Tweet = { if (left isEmpty) { if (right isEmpty) elem else { val r = right mostRetweeted; if (r.retweets > elem.retweets) r else elem } } else { val l = left mostRetweeted; val elem_ = if (l.retweets > elem.retweets) l else elem; if (right isEmpty) elem_ else { val r = right mostRetweeted; if (r.retweets > elem_.retweets) r else elem_ } } } def union(that: TweetSet): TweetSet = { val that_ = that remove elem val l = left union that_ (right union l) incl elem } def isEmpty : Boolean = false def descendingByRetweet: TweetList = { val t = mostRetweeted new Cons(t, (this remove t) descendingByRetweet) } /** * The following methods are already implemented */ def contains(x: Tweet): Boolean = if (x.text < elem.text) left.contains(x) else if (elem.text < x.text) right.contains(x) else true def incl(x: Tweet): TweetSet = { if (x.text < elem.text) new NonEmpty(elem, left.incl(x), right) else if (elem.text < x.text) new NonEmpty(elem, left, right.incl(x)) else this } def remove(tw: Tweet): TweetSet = if (tw.text < elem.text) new NonEmpty(elem, left.remove(tw), right) else if (elem.text < tw.text) new NonEmpty(elem, left, right.remove(tw)) else left.union(right) def foreach(f: Tweet => Unit): Unit = { f(elem) left.foreach(f) right.foreach(f) } } trait TweetList { def head: Tweet def tail: TweetList def isEmpty: Boolean def foreach(f: Tweet => Unit): Unit = if (!isEmpty) { f(head) tail.foreach(f) } } object Nil extends TweetList { def head = throw new java.util.NoSuchElementException("head of EmptyList") def tail = throw new java.util.NoSuchElementException("tail of EmptyList") def isEmpty = true } class Cons(val head: Tweet, val tail: TweetList) extends TweetList { def isEmpty = false } object GoogleVsApple { val google = List("android", "Android", "galaxy", "Galaxy", "nexus", "Nexus") val apple = List("ios", "iOS", "iphone", "iPhone", "ipad", "iPad") lazy val googleTweets: TweetSet = this keywordListAux(google) lazy val appleTweets: TweetSet = this keywordListAux(apple) def keywordListAux(list : List[String]) : TweetSet = TweetReader.allTweets filter((t) => list exists ((kw) => t.text.contains(kw))) /** * A list of all tweets mentioning a keyword from either apple or google, * sorted by the number of retweets. */ lazy val trending: TweetList = (googleTweets union appleTweets) descendingByRetweet } object Main extends App { // Print the trending tweets GoogleVsApple.trending foreach println }
shouya/thinking-dumps
progfun/week3/objsets/src/main/scala/objsets/TweetSet.scala
Scala
mit
7,032
package com.teambytes.inflatable.raft.cluster import akka.testkit.ImplicitSender import concurrent.duration._ import akka.cluster.ClusterEvent.{CurrentClusterState, MemberUp} import akka.cluster.Cluster import akka.actor.{RootActorPath, Props} import akka.util.Timeout import clusters._ import com.teambytes.inflatable.raft.protocol._ import com.teambytes.inflatable.raft.{RaftClientActor, ClusterConfiguration} import com.teambytes.inflatable.raft.example.WordConcatRaftActor import org.scalatest.time.{Millis, Span, Seconds} import com.teambytes.inflatable.raft.example.protocol._ abstract class ClusterRaftClientSpec extends RaftClusterSpec(ThreeNodesCluster) with ImplicitSender { implicit val defaultTimeout = { import concurrent.duration._ Timeout(5.seconds) } override implicit val patienceConfig = PatienceConfig( timeout = scaled(Span(2, Seconds)), interval = scaled(Span(1, Millis)) ) def initialParticipants = 3 behavior of s"${classOf[RaftClientActor].getSimpleName}" import ThreeNodesCluster._ it should "interact with cluster raft actors" in within(20.seconds) { Cluster(system).subscribe(testActor, classOf[MemberUp]) expectMsgClass(classOf[CurrentClusterState]) val firstAddress = node(first).address val secondAddress = node(second).address val thirdAddress = node(third).address Cluster(system) join firstAddress (1 to initialParticipants) map { idx => runOn(nodes(idx)) { val raftActor = system.actorOf(Props[WordConcatRaftActor], s"impl-raft-member-$idx") system.actorOf(ClusterRaftActor.props(raftActor, initialParticipants), s"raft-member-$idx") } } receiveN(3).collect { case MemberUp(m) => m.address }.toSet should be( Set(firstAddress, secondAddress, thirdAddress) ) Cluster(system).unsubscribe(testActor) testConductor.enter("all-nodes-up") val member1 = selectActorRef(firstAddress, 1) val member2 = selectActorRef(secondAddress, 2) val member3 = selectActorRef(thirdAddress, 3) val members = member1 :: member2 :: member3 :: Nil awaitLeaderElected(members) testConductor.enter("raft-up") // interact with cluster from each node runOn(second) { val client = system.actorOf(RaftClientActor.props( RootActorPath(firstAddress) / "user" / "raft-member-*", RootActorPath(secondAddress) / "user" / "raft-member-*", RootActorPath(thirdAddress) / "user" / "raft-member-*" ), "raft-client") client ! AppendWord("I") client ! AppendWord("like") client ! AppendWord("tea") client ! GetWords expectMsg("I") expectMsg("like") expectMsg("tea") expectMsg(List("I", "like", "tea")) } testConductor.enter("client-done") } } class ClusterRaftClientSpecMultiJvmNode1 extends ClusterRaftClientSpec class ClusterRaftClientSpecMultiJvmNode2 extends ClusterRaftClientSpec class ClusterRaftClientSpecMultiJvmNode3 extends ClusterRaftClientSpec
grahamar/inflatable
src/multi-jvm/scala/com/teambytes/inflatable/raft/cluster/ClusterRaftClientSpec.scala
Scala
apache-2.0
3,021
package com.jiffey.slick.additions import scala.slick.session.{Database => SlickDatabase} trait DbInfo { def database: SlickDatabase def driverName: String }
dre1080/slick-additions
src/main/scala/com/jiffey/slick/additions/DbInfo.scala
Scala
apache-2.0
166
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.plan.stream.sql import org.apache.flink.api.scala._ import org.apache.flink.table.functions.TableFunction import org.apache.flink.table.plan.stream.sql.RelTimeIndicatorConverterTest.TableFunc import org.apache.flink.table.util.TableTestBase import org.junit.Test import java.sql.Timestamp /** * Tests for [[org.apache.flink.table.calcite.RelTimeIndicatorConverter]]. */ class RelTimeIndicatorConverterTest extends TableTestBase { private val util = streamTestUtil() util.addDataStream[(Long, Long, Int)]("MyTable", 'rowtime, 'long, 'int, 'proctime) util.addDataStream[(Long, Long, Int)]("MyTable1", 'rowtime, 'long, 'int) util.addDataStream[(Long, Int)]("MyTable2", 'long, 'int, 'proctime) @Test def testSimpleMaterialization(): Unit = { val sqlQuery = """ |SELECT rowtime FROM | (SELECT FLOOR(rowtime TO DAY) AS rowtime, long FROM MyTable WHERE long > 0) t """.stripMargin util.verifyPlan(sqlQuery) } @Test def testSelectAll(): Unit = { util.verifyPlan("SELECT * FROM MyTable") } @Test def testFilteringOnRowtime(): Unit = { val sqlQuery = "SELECT rowtime FROM MyTable1 WHERE rowtime > CAST('1990-12-02 12:11:11' AS TIMESTAMP)" util.verifyPlan(sqlQuery) } @Test def testGroupingOnRowtime(): Unit = { util.verifyPlan("SELECT COUNT(long) FROM MyTable GROUP BY rowtime") } @Test def testAggregationOnRowtime(): Unit = { util.verifyPlan("SELECT MIN(rowtime) FROM MyTable1 GROUP BY long") } @Test def testGroupingOnProctime(): Unit = { util.verifyPlan("SELECT COUNT(long) FROM MyTable2 GROUP BY proctime") } @Test def testAggregationOnProctime(): Unit = { util.verifyPlan("SELECT MIN(proctime) FROM MyTable2 GROUP BY long") } @Test def testTableFunction(): Unit = { util.addFunction("tableFunc", new TableFunc) val sqlQuery = """ |SELECT rowtime, proctime, s |FROM MyTable, LATERAL TABLE(tableFunc(rowtime, proctime, '')) AS T(s) """.stripMargin util.verifyPlan(sqlQuery) } @Test def testUnion(): Unit = { util.verifyPlan("SELECT rowtime FROM MyTable1 UNION ALL SELECT rowtime FROM MyTable1") } @Test def testWindow(): Unit = { val sqlQuery = """ |SELECT TUMBLE_END(rowtime, INTERVAL '10' SECOND), | long, | SUM(`int`) |FROM MyTable1 | GROUP BY TUMBLE(rowtime, INTERVAL '10' SECOND), long """.stripMargin util.verifyPlan(sqlQuery) } @Test def testWindow2(): Unit = { val sqlQuery = """ |SELECT TUMBLE_END(rowtime, INTERVAL '0.1' SECOND) AS `rowtime`, | `long`, | SUM(`int`) |FROM MyTable1 | GROUP BY `long`, TUMBLE(rowtime, INTERVAL '0.1' SECOND) | """.stripMargin util.verifyPlan(sqlQuery) } @Test def testMultiWindow(): Unit = { val sqlQuery = """ |SELECT TUMBLE_END(newrowtime, INTERVAL '30' SECOND), long, sum(`int`) FROM ( | SELECT | TUMBLE_ROWTIME(rowtime, INTERVAL '10' SECOND) AS newrowtime, | long, | sum(`int`) as `int` | FROM MyTable1 | GROUP BY TUMBLE(rowtime, INTERVAL '10' SECOND), long |) t GROUP BY TUMBLE(newrowtime, INTERVAL '30' SECOND), long """.stripMargin util.verifyPlan(sqlQuery) } @Test def testWindowWithAggregationOnRowtime(): Unit = { val sqlQuery = """ |SELECT MIN(rowtime), long FROM MyTable1 |GROUP BY long, TUMBLE(rowtime, INTERVAL '0.1' SECOND) """.stripMargin util.verifyPlan(sqlQuery) } @Test def testWindowWithAggregationOnRowtimeWithHaving(): Unit = { val result = """ |SELECT MIN(rowtime), long FROM MyTable1 |GROUP BY long, TUMBLE(rowtime, INTERVAL '1' SECOND) |HAVING QUARTER(TUMBLE_END(rowtime, INTERVAL '1' SECOND)) = 1 """.stripMargin util.verifyPlan(result) } // TODO add temporal table join case } object RelTimeIndicatorConverterTest { class TableFunc extends TableFunction[String] { val t = new Timestamp(0L) def eval(time1: Long, time2: Timestamp, string: String): Unit = { collect(time1.toString + time2.after(t) + string) } } }
shaoxuan-wang/flink
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/sql/RelTimeIndicatorConverterTest.scala
Scala
apache-2.0
5,118
package org.thp.cortex.controllers import javax.inject.{ Inject, Singleton } import play.api.http.{ FileMimeTypes, HttpErrorHandler } import play.api.mvc.{ Action, AnyContent } import controllers.{ Assets, AssetsMetadata, ExternalAssets } import play.api.Environment import scala.concurrent.ExecutionContext trait AssetCtrl { def get(file: String): Action[AnyContent] } @Singleton class AssetCtrlProd @Inject() (errorHandler: HttpErrorHandler, meta: AssetsMetadata) extends Assets(errorHandler, meta) with AssetCtrl { def get(file: String): Action[AnyContent] = at("/www", file) } @Singleton class AssetCtrlDev @Inject() (environment: Environment)(implicit ec: ExecutionContext, fileMimeTypes: FileMimeTypes) extends ExternalAssets(environment) with AssetCtrl { def get(file: String): Action[AnyContent] = at("www/dist", file) }
CERT-BDF/Cortex
app/org/thp/cortex/controllers/AssetCtrl.scala
Scala
agpl-3.0
839
package tryp import scala.concurrent.duration._ import org.specs2.matcher.{Matcher, AnyMatchers, MatchResult, ValueChecksBase, ValueCheck, Expectable, TrypExpectable} import org.specs2.execute.NoDetails import org.specs2.SpecificationLike object PerformableMatcher { def result[A, B, F[_], S <: F[A]](check: ValueCheck[B], e: Expectable[S], a: Either[Throwable, B]) = a match { case l @ Left(t) => val trace = t.getStackTrace.toList Matcher.result(false, "performable succeeded", s"performable failed with $t", e, trace, NoDetails) case r @ Right(v) => Matcher.result(check.check(v), e) } } final class PerformableMatcher[A, F[_]](check: ValueCheck[A]) (implicit pa: Performable[F], timeout: FiniteDuration) extends Matcher[F[A]] { def apply[S <: F[A]](e: Expectable[S]): MatchResult[S] = { PerformableMatcher.result[A, A, F, S](check, e, pa.perform(e.value)) } } final class PerformableStreamMatcher[A, F[_]](check: ValueCheck[Vector[A]]) (implicit pa: Performable[F], timeout: FiniteDuration) extends Matcher[F[A]] { def apply[S <: F[A]](e: Expectable[S]): MatchResult[S] = { PerformableMatcher.result[A, Vector[A], F, S](check, e, pa.performAll(e.value)) } } final class PerformableExpectable[A, F[_]](fa: => F[A], retries: Int) (implicit pa: Performable[F], timeout: FiniteDuration) extends TrypExpectable(() => fa) with ValueChecksBase { def perform = pa.perform(value) def pm(m: => Matcher[A]) = new PerformableMatcher[A, F](m) def computes(m: => Matcher[A]) = applyMatcher(pm(m)) def computes_==(v: => A) = computes(AnyMatchers.be_==(v)) def computesF(f: => A => Matcher[A]): MatchResult[F[A]] = ??? def willCompute(m: => Matcher[A]): MatchResult[F[A]] = { applyMatcher(pm(m).eventually(retries, 100.millis)) } val will = willCompute _ def willCompute_==(v: => A) = { willCompute(AnyMatchers.be_==(v)) } val will_== = willCompute_== _ def psm(m: => Matcher[Vector[A]]) = new PerformableStreamMatcher[A, F](m) def streams(m: => Matcher[Vector[A]]) = applyMatcher(psm(m)) def streams_==(v: => Vector[A]) = streams(AnyMatchers.be_==(v)) } trait Matchers extends SpecificationLike { implicit def performableTimeout = 5.seconds def retries = 40 // triggers compiler bug type Aux[FA, F[_]] = Unapply[Performable, FA] { type M[A] = F[A] } implicit def ToPerformableExpectable[A, F[_]: Performable](fa: => F[A]): PerformableExpectable[A, F] = new PerformableExpectable[A, F](fa, retries) implicit def ToPerformableExpectableU[FA, F[_]](fa: => FA) (implicit U: Aux[FA, F], pf: Performable[F]): PerformableExpectable[U.A, U.M] = new PerformableExpectable[U.A, Aux[FA, F]#M](U.subst(fa), retries) implicit def streamToPerformableExpectable[A](fa: => Stream[IO, A]) = new PerformableExpectable[A, Stream[IO, ?]](fa, retries) }
tek/pulsar
unit/main/src/matchers.scala
Scala
mit
2,887
/* * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.spark.compiler package graph package branching import org.objectweb.asm.Opcodes import org.objectweb.asm.signature.SignatureVisitor import com.asakusafw.lang.compiler.planning.SubPlan import com.asakusafw.spark.runtime.rdd.BranchKey import com.asakusafw.spark.tools.asm._ import com.asakusafw.spark.tools.asm.MethodBuilder._ import com.asakusafw.spark.tools.asm4s._ trait BranchKeysField extends ClassBuilder { implicit def context: BranchKeysField.Context def subplanOutputs: Seq[SubPlan.Output] override def defFields(fieldDef: FieldDef): Unit = { super.defFields(fieldDef) fieldDef.newField( Opcodes.ACC_PRIVATE | Opcodes.ACC_TRANSIENT, "branchKeys", classOf[Set[_]].asType, new TypeSignatureBuilder() .newClassType(classOf[Set[_]].asType) { _.newTypeArgument(SignatureVisitor.INSTANCEOF, classOf[BranchKey].asType) }) } override def defMethods(methodDef: MethodDef): Unit = { super.defMethods(methodDef) methodDef.newMethod("branchKeys", classOf[Set[_]].asType, Seq.empty, new MethodSignatureBuilder() .newReturnType { _.newClassType(classOf[Set[_]].asType) { _.newTypeArgument(SignatureVisitor.INSTANCEOF, classOf[BranchKey].asType) } }) { implicit mb => val thisVar :: _ = mb.argVars thisVar.push().getField("branchKeys", classOf[Set[_]].asType).unlessNotNull { thisVar.push().putField("branchKeys", initBranchKeys()) } `return`(thisVar.push().getField("branchKeys", classOf[Set[_]].asType)) } } def getBranchKeysField()(implicit mb: MethodBuilder): Stack = { val thisVar :: _ = mb.argVars thisVar.push().invokeV("branchKeys", classOf[Set[_]].asType) } private def initBranchKeys()(implicit mb: MethodBuilder): Stack = { buildSet { builder => subplanOutputs.map(_.getOperator).sortBy(_.getSerialNumber).foreach { marker => builder += context.branchKeys.getField(marker) } } } } object BranchKeysField { trait Context { def branchKeys: BranchKeys } }
ueshin/asakusafw-spark
compiler/src/main/scala/com/asakusafw/spark/compiler/graph/branching/BranchKeysField.scala
Scala
apache-2.0
2,736
package com.twitter.finagle.thrift.transport.netty3 import com.twitter.finagle.thrift.ThriftClientFramedPipelineFactory import org.apache.thrift.protocol.TProtocolFactory import org.jboss.netty.channel.{ChannelPipeline, ChannelPipelineFactory} private[finagle] case class ThriftClientBufferedPipelineFactory( protocolFactory: TProtocolFactory) extends ChannelPipelineFactory { def getPipeline(): ChannelPipeline = { val pipeline = ThriftClientFramedPipelineFactory.getPipeline() pipeline.replace( "thriftFrameCodec", "thriftBufferDecoder", new ThriftBufferDecoder(protocolFactory)) pipeline } }
BuoyantIO/finagle
finagle-thrift/src/main/scala/com/twitter/finagle/thrift/transport/netty3/ThriftClientBufferedPipelineFactory.scala
Scala
apache-2.0
631
package drt.shared import java.util.UUID import drt.shared.CrunchApi.MillisSinceEpoch import drt.shared.Queues.Queue import drt.shared.Terminals.Terminal import upickle.default.{ReadWriter, macroRW} trait HasExpireables[A] { def purgeExpired(expireBefore: () => SDateLike): A } trait Expireable { def isExpired(expireBeforeMillis: MillisSinceEpoch): Boolean } case class StaffMovement(terminal: Terminal, reason: String, time: MilliDate, delta: Int, uUID: UUID, queue: Option[Queue] = None, createdBy: Option[String]) extends Expireable { def isExpired(expiresBeforeMillis: MillisSinceEpoch): Boolean = time.millisSinceEpoch < expiresBeforeMillis } object StaffMovement { implicit val terminalRw: ReadWriter[Terminal] = drt.shared.Terminals.Terminal.rw implicit val queueRw: ReadWriter[Queue] = drt.shared.Queues.Queue.rw implicit val rw: ReadWriter[StaffMovement] = macroRW } case class StaffMovementList(movements: Seq[StaffMovement])
UKHomeOffice/drt-scalajs-spa-exploration
shared/src/main/scala/drt/shared/StaffMovement.scala
Scala
apache-2.0
1,107
package extracells.item import extracells.api.{ECApi, IWirelessGasTermHandler} import extracells.item.ItemWirelessTerminalFluid.isInCreativeTab import extracells.models.ModelManager import net.minecraft.creativetab.CreativeTabs import net.minecraft.entity.player.EntityPlayer import net.minecraft.item.{Item, ItemStack} import net.minecraft.util.{ActionResult, EnumActionResult, EnumHand} import net.minecraft.world.World import net.minecraftforge.fml.relauncher.{Side, SideOnly} object ItemWirelessTerminalGas extends ItemECBase with IWirelessGasTermHandler with WirelessTermBase { def THIS = this ECApi.instance.registerWirelessTermHandler(this) override def getTranslationKey(itemStack: ItemStack): String = super.getTranslationKey(itemStack).replace("item.extracells", "extracells.item") def isItemNormalWirelessTermToo(is: ItemStack): Boolean = false override def onItemRightClick(world: World, entityPlayer: EntityPlayer, hand: EnumHand): ActionResult[ItemStack] = new ActionResult(EnumActionResult.SUCCESS, ECApi.instance.openWirelessGasTerminal(entityPlayer, hand, world)) @SideOnly(Side.CLIENT) override def registerModel(item: Item, manager: ModelManager) = manager.registerItemModel(item, 0, "terminals/fluid_wireless") override def isInCreativeTab2(targetTab: CreativeTabs): Boolean = isInCreativeTab(targetTab) }
ExtraCells/ExtraCells2
src/main/scala/extracells/item/ItemWirelessTerminalGas.scala
Scala
mit
1,367
package msgpack4z import scalaprops._ import UnionGen._ object MsgpackUnionSpec extends Scalaprops { override def param = super.param.copy( minSuccessful = 10000 ) val orderLaws = scalazlaws.order.all[MsgpackUnion] }
msgpack4z/msgpack4z-core
src/test/scala/msgpack4z/MsgpackUnionSpec.scala
Scala
mit
238
package aecor.example.transaction import aecor.example.account.AccountId import aecor.example.common.Amount import aecor.example.transaction.TransactionRoute.ApiResult import aecor.example.transaction.transaction.Transactions import cats.effect.{ Concurrent, Timer } import cats.implicits._ import scala.concurrent.duration._ final class DefaultTransactionService[F[_]](transactions: Transactions[F])( implicit F: Concurrent[F], timer: Timer[F] ) extends TransactionService[F] { def authorizePayment(transactionId: TransactionId, from: From[AccountId], to: To[AccountId], amount: Amount): F[TransactionRoute.ApiResult] = transactions(transactionId) .create(from, to, amount) .flatMap { _ => val getTransaction = transactions(transactionId).getInfo .flatMap { case Right(t) => t.pure[F] case _ => F.raiseError[Algebra.TransactionInfo](new IllegalStateException("Something went bad")) } def loop: F[Boolean] = getTransaction.flatMap { case Algebra.TransactionInfo(_, _, _, Some(value)) => value.pure[F] case _ => timer.sleep(10.millis) >> loop } Concurrent.timeout(loop, 10.seconds) } .map { succeeded => if (succeeded) { ApiResult.Authorized } else { ApiResult.Declined("You suck") } } } object DefaultTransactionService { def apply[F[_]](transactions: Transactions[F])(implicit F: Concurrent[F], timer: Timer[F]): TransactionService[F] = new DefaultTransactionService[F](transactions) }
notxcain/aecor
modules/example/src/main/scala/aecor/example/transaction/DefaultTransactionService.scala
Scala
mit
1,753
package quizleague.web.model import scala.scalajs.js import scala.scalajs.js.annotation.ScalaJSDefined class Venue( val id:String, val name:String, val address:String, val phone:String, val email:String, val website:String, val imageURL:String, val retired:Boolean = false ) extends Model object Venue{ def apply( id:String, name:String, address:String, phone:String, email:String, website:String, imageURL:String, retired:Boolean = false) = new Venue(id,name,address,phone,email,website,imageURL,retired) }
gumdrop/quizleague-maintain
js/src/main/scala/quizleague/web/model/Venue.scala
Scala
mit
585
/* Copyright 2012 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.scalding.serialization import com.twitter.chill.{ Externalizer => ChillExtern} import com.esotericsoftware.kryo.DefaultSerializer import com.esotericsoftware.kryo.serializers.JavaSerializer import com.twitter.chill.config.ScalaMapConfig /** * We need to control the Kryo created */ object Externalizer { def apply[T](t: T): Externalizer[T] = { val e = new Externalizer[T] e.set(t) e } } @DefaultSerializer(classOf[JavaSerializer]) class Externalizer[T] extends ChillExtern[T] { protected override def kryo = new KryoHadoop(ScalaMapConfig(Map("scalding.kryo.setreferences" -> "true"))) }
vidyar/twitterscalding
scalding-core/src/main/scala/com/twitter/scalding/serialization/Externalizer.scala
Scala
apache-2.0
1,192
package lang.lightweightjava.ast.statement import lang.lightweightjava.ast._ import name.namegraph.{NameGraphExtended, NameGraph} import name.{Identifier, Name, Renaming} import scala.language.implicitConversions trait TermVariable extends Identifier with AST { override def allNames = Set() protected def renameVariable(newName : Name) = { if (newName == "this") This else if (newName == "null") Null else { val renamed = VariableName(newName) renamed.id = this.id renamed.oName = this.oName renamed } } override def rename(renaming: Renaming): TermVariable = renameVariable(renaming(this).name) override def resolveNames(nameEnvironment: ClassNameEnvironment) = NameGraph(Set(), Map()) def resolveVariableNames(methodEnvironment : VariableNameEnvironment): NameGraphExtended = NameGraph(Set(), Map()) } object This extends Identifier("this") with TermVariable { override def fresh = throw new IllegalArgumentException("Can't create fresh instance of 'this' variable!") } object Null extends Identifier("null") with TermVariable { override def fresh = throw new IllegalArgumentException("Can't create fresh instance of 'null' variable!") } object VariableName { implicit def apply(name: Name): VariableName = { new VariableName(name) } } class VariableName(override val name: Name) extends Identifier(name) with TermVariable { require(AST.isLegalName(name), "Variable name '" + name + "' is no legal Java variable name") override def allNames = Set(this.name) override def resolveNames(nameEnvironment: ClassNameEnvironment) = resolveVariableNames(Map(name -> this)) override def resolveVariableNames(methodEnvironment : VariableNameEnvironment) = { // If the variable is pointing to itself (because it is declared here), add only the node but no edges if (!methodEnvironment.contains(name) || methodEnvironment(name) == this) NameGraphExtended(Set(this), Map()) // If the variable is pointing to another variable, add it and the edge to the name graph else NameGraphExtended(Set(this), Map(this -> Set(methodEnvironment(name)))) } override def fresh = VariableName(name) }
matthisk/hygienic-transformations
scala/src/main/scala/lang/lightweightjava/ast/statement/TermVariable.scala
Scala
lgpl-3.0
2,196
class i0 { val i1: (Any => String) = ??? } object i0 { import Ordering.{ implicitly => } (true: Boolean) match { case _: i1 => true } def i1(erased i2: Int): Int = { i1: Set[Int] => } }
som-snytt/dotty
tests/fuzzy/14b960e57195554a6a085eae8e039a949e8b106d.scala
Scala
apache-2.0
185
package org.mozartoz.bootcompiler.fastparse import java.io.File import org.mozartoz.bootcompiler.fastparse.Tokens.PreprocessorDirective import org.mozartoz.bootcompiler.fastparse.Tokens.PreprocessorDirectiveWithArg import org.mozartoz.bootcompiler.fastparse.Tokens.Token import org.mozartoz.bootcompiler.BootCompiler.Source import fastparse.core.Parsed import scala.collection.mutable.ArrayBuffer import org.mozartoz.bootcompiler.BootCompiler object Preprocessor { case class SourceMap(fromOffset: Int, sourceOffset: Int, source: Source, toOffset: Int = 0) { override def toString = fromOffset + "-" + toOffset + " " + sourceOffset + " @ " + source.getName def in(pos: Int) = { fromOffset <= pos && pos < toOffset } def length = toOffset - fromOffset } def preprocess(source: Source): (String, Seq[SourceMap]) = { val input = source.getCode val tokens = Parser.t(Parser.tokens(input), source.getPath) var defines: Set[String] = Set() var skipDepth = 0 var offset = 0 val buffer = new StringBuilder val sourceMap: ArrayBuffer[SourceMap] = new ArrayBuffer[SourceMap] def capture(until: Int) { buffer ++= input.substring(offset, until) } def restartAt(pos: Int) { offset = pos recordPosition } def ignore(token: Token) { capture(token.pB) restartAt(token.pE) } def recordPosition { sourceMap += SourceMap(buffer.length, offset, source) } recordPosition for (elem <- tokens) { if (elem.isInstanceOf[Token]) { val token = elem.asInstanceOf[Token] if (skipDepth > 0) { token match { case PreprocessorDirectiveWithArg("ifdef" | "ifndef", _) => skipDepth += 1 case PreprocessorDirective("else" | "endif") if skipDepth == 1 => skipDepth = 0 restartAt(token.pE) case PreprocessorDirective("endif") => skipDepth -= 1 } } else { token match { case PreprocessorDirectiveWithArg("define", name) => defines += name ignore(token) case PreprocessorDirectiveWithArg("undef", name) => defines -= name ignore(token) case PreprocessorDirectiveWithArg("ifdef", name) => if (defines contains name) { // next } else { skipDepth = 1 } ignore(token) case PreprocessorDirectiveWithArg("ifndef", name) => if (!(defines contains name)) { // next } else { skipDepth = 1 } ignore(token) case PreprocessorDirective("else") => skipDepth = 1 ignore(token) case PreprocessorDirective("endif") => ignore(token) case PreprocessorDirectiveWithArg("insert", fileName) => val file = resolve(new File(source.getPath), fileName) capture(token.pB) val subSource = BootCompiler.parserToVM.createSource(file.getPath) val (out, map) = preprocess(subSource) sourceMap ++= map.map { case SourceMap(fromOffset, sourceOffset, source, toOffset) => SourceMap(buffer.length + fromOffset, sourceOffset, source) } buffer ++= out restartAt(token.pE) } } } } capture(input.length) for (i <- 0 until sourceMap.size - 1) { sourceMap(i) = sourceMap(i).copy(toOffset = sourceMap(i + 1).fromOffset) } sourceMap(sourceMap.size - 1) = sourceMap.last.copy(toOffset = Int.MaxValue) (buffer.toString, sourceMap) } def resolve(currentFile: File, fileName: String) = { val file0 = if (new File(fileName).isAbsolute()) { new File(fileName) } else { new File(currentFile.getParentFile, fileName) } val file = { if (file0.exists()) file0 else { val altFile = new File(currentFile.getParentFile, fileName + ".oz") if (altFile.exists()) altFile else file0 } } file } }
eregon/mozart-graal
bootcompiler/src/main/scala/org/mozartoz/bootcompiler/fastparse/Preprocessor.scala
Scala
bsd-2-clause
4,245
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.status import org.apache.spark.{SparkConf, SparkFunSuite} import org.apache.spark.executor.TaskMetrics import org.apache.spark.internal.config.Status.LIVE_ENTITY_UPDATE_PERIOD import org.apache.spark.resource.ResourceProfile import org.apache.spark.scheduler.{SparkListenerStageSubmitted, SparkListenerTaskStart, StageInfo, TaskInfo, TaskLocality} import org.apache.spark.status.api.v1.SpeculationStageSummary import org.apache.spark.util.{Distribution, Utils} import org.apache.spark.util.kvstore._ class AppStatusStoreSuite extends SparkFunSuite { private val uiQuantiles = Array(0.0, 0.25, 0.5, 0.75, 1.0) private val stageId = 1 private val attemptId = 1 test("quantile calculation: 1 task") { compareQuantiles(1, uiQuantiles) } test("quantile calculation: few tasks") { compareQuantiles(4, uiQuantiles) } test("quantile calculation: more tasks") { compareQuantiles(100, uiQuantiles) } test("quantile calculation: lots of tasks") { compareQuantiles(4096, uiQuantiles) } test("quantile calculation: custom quantiles") { compareQuantiles(4096, Array(0.01, 0.33, 0.5, 0.42, 0.69, 0.99)) } test("quantile cache") { val store = new InMemoryStore() (0 until 4096).foreach { i => store.write(newTaskData(i)) } val appStore = new AppStatusStore(store) appStore.taskSummary(stageId, attemptId, Array(0.13d)) intercept[NoSuchElementException] { store.read(classOf[CachedQuantile], Array(stageId, attemptId, "13")) } appStore.taskSummary(stageId, attemptId, Array(0.25d)) val d1 = store.read(classOf[CachedQuantile], Array(stageId, attemptId, "25")) // Add a new task to force the cached quantile to be evicted, and make sure it's updated. store.write(newTaskData(4096)) appStore.taskSummary(stageId, attemptId, Array(0.25d, 0.50d, 0.73d)) val d2 = store.read(classOf[CachedQuantile], Array(stageId, attemptId, "25")) assert(d1.taskCount != d2.taskCount) store.read(classOf[CachedQuantile], Array(stageId, attemptId, "50")) intercept[NoSuchElementException] { store.read(classOf[CachedQuantile], Array(stageId, attemptId, "73")) } assert(store.count(classOf[CachedQuantile]) === 2) } private def createAppStore(disk: Boolean, live: Boolean): AppStatusStore = { val conf = new SparkConf() if (live) { return AppStatusStore.createLiveStore(conf) } // LevelDB doesn't support Apple Silicon yet if (Utils.isMacOnAppleSilicon && disk) { return null } val store: KVStore = if (disk) { val testDir = Utils.createTempDir() val diskStore = KVUtils.open(testDir, getClass.getName) new ElementTrackingStore(diskStore, conf) } else { new ElementTrackingStore(new InMemoryStore, conf) } new AppStatusStore(store) } Seq( "disk" -> createAppStore(disk = true, live = false), "in memory" -> createAppStore(disk = false, live = false), "in memory live" -> createAppStore(disk = false, live = true) ).foreach { case (hint, appStore) => test(s"SPARK-26260: summary should contain only successful tasks' metrics (store = $hint)") { assume(appStore != null) val store = appStore.store // Success and failed tasks metrics for (i <- 0 to 5) { if (i % 2 == 0) { writeTaskDataToStore(i, store, "FAILED") } else { writeTaskDataToStore(i, store, "SUCCESS") } } // Running tasks metrics (-1 = no metrics reported, positive = metrics have been reported) Seq(-1, 6).foreach { metric => writeTaskDataToStore(metric, store, "RUNNING") } /** * Following are the tasks metrics, * 1, 3, 5 => Success * 0, 2, 4 => Failed * -1, 6 => Running * * Task summary will consider (1, 3, 5) only */ val summary = appStore.taskSummary(stageId, attemptId, uiQuantiles).get val values = Array(1.0, 3.0, 5.0) val dist = new Distribution(values, 0, values.length).getQuantiles(uiQuantiles.sorted) dist.zip(summary.executorRunTime).foreach { case (expected, actual) => assert(expected === actual) } appStore.close() } } test("SPARK-36038: speculation summary") { val store = new InMemoryStore() val expectedSpeculationSummary = newSpeculationSummaryData(stageId, attemptId) store.write(expectedSpeculationSummary) val appStore = new AppStatusStore(store) val info = appStore.speculationSummary(stageId, attemptId) assert(info.isDefined) val expectedSpeculationSummaryInfo = expectedSpeculationSummary.info info.foreach { metric => assert(metric.numTasks == expectedSpeculationSummaryInfo.numTasks) assert(metric.numActiveTasks == expectedSpeculationSummaryInfo.numActiveTasks) assert(metric.numCompletedTasks == expectedSpeculationSummaryInfo.numCompletedTasks) assert(metric.numFailedTasks == expectedSpeculationSummaryInfo.numFailedTasks) assert(metric.numKilledTasks == expectedSpeculationSummaryInfo.numKilledTasks) } } test("SPARK-36038: speculation summary should not be present if there are no speculative tasks") { val conf = new SparkConf(false).set(LIVE_ENTITY_UPDATE_PERIOD, 0L) val statusStore = AppStatusStore.createLiveStore(conf) val listener = statusStore.listener.get // Simulate a stage in job progress listener val stageInfo = new StageInfo(stageId = 0, attemptId = 0, name = "dummy", numTasks = 1, rddInfos = Seq.empty, parentIds = Seq.empty, details = "details", resourceProfileId = ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID) (1 to 2).foreach { taskId => val taskInfo = new TaskInfo(taskId, taskId, 0, 0, "0", "localhost", TaskLocality.ANY, false) listener.onStageSubmitted(SparkListenerStageSubmitted(stageInfo)) listener.onTaskStart(SparkListenerTaskStart(0, 0, taskInfo)) } assert(statusStore.speculationSummary(0, 0).isEmpty) } private def compareQuantiles(count: Int, quantiles: Array[Double]): Unit = { val store = new InMemoryStore() val values = (0 until count).map { i => val task = newTaskData(i) store.write(task) i.toDouble }.toArray val summary = new AppStatusStore(store).taskSummary(stageId, attemptId, quantiles).get val dist = new Distribution(values, 0, values.length).getQuantiles(quantiles.sorted) dist.zip(summary.executorRunTime).foreach { case (expected, actual) => assert(expected === actual) } } private def newTaskData(i: Int, status: String = "SUCCESS"): TaskDataWrapper = { new TaskDataWrapper( i.toLong, i, i, i, i, i, i.toString, i.toString, status, i.toString, false, Nil, None, true, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, i, stageId, attemptId) } private def writeTaskDataToStore(i: Int, store: KVStore, status: String): Unit = { val liveTask = new LiveTask(new TaskInfo( i.toLong, i, i, i.toLong, i.toString, i.toString, TaskLocality.ANY, false), stageId, attemptId, None) if (status == "SUCCESS") { liveTask.info.finishTime = 1L } else if (status == "FAILED") { liveTask.info.failed = true liveTask.info.finishTime = 1L } val taskMetrics = getTaskMetrics(i) liveTask.updateMetrics(taskMetrics) liveTask.write(store.asInstanceOf[ElementTrackingStore], 1L) } private def getTaskMetrics(i: Int): TaskMetrics = { val taskMetrics = new TaskMetrics() taskMetrics.setExecutorDeserializeTime(i) taskMetrics.setExecutorDeserializeCpuTime(i) taskMetrics.setExecutorRunTime(i) taskMetrics.setExecutorCpuTime(i) taskMetrics.setResultSize(i) taskMetrics.setJvmGCTime(i) taskMetrics.setResultSerializationTime(i) taskMetrics.incMemoryBytesSpilled(i) taskMetrics.incDiskBytesSpilled(i) taskMetrics.incPeakExecutionMemory(i) taskMetrics.inputMetrics.incBytesRead(i) taskMetrics.inputMetrics.incRecordsRead(i) taskMetrics.outputMetrics.setBytesWritten(i) taskMetrics.outputMetrics.setRecordsWritten(i) taskMetrics.shuffleReadMetrics.incRemoteBlocksFetched(i) taskMetrics.shuffleReadMetrics.incLocalBlocksFetched(i) taskMetrics.shuffleReadMetrics.incFetchWaitTime(i) taskMetrics.shuffleReadMetrics.incRemoteBytesRead(i) taskMetrics.shuffleReadMetrics.incRemoteBytesReadToDisk(i) taskMetrics.shuffleReadMetrics.incLocalBytesRead(i) taskMetrics.shuffleReadMetrics.incRecordsRead(i) taskMetrics.shuffleWriteMetrics.incBytesWritten(i) taskMetrics.shuffleWriteMetrics.incWriteTime(i) taskMetrics.shuffleWriteMetrics.incRecordsWritten(i) taskMetrics } private def newSpeculationSummaryData( stageId: Int, stageAttemptId: Int): SpeculationStageSummaryWrapper = { val speculationStageSummary = new SpeculationStageSummary(10, 2, 5, 1, 2) new SpeculationStageSummaryWrapper(stageId, stageAttemptId, speculationStageSummary) } }
shaneknapp/spark
core/src/test/scala/org/apache/spark/status/AppStatusStoreSuite.scala
Scala
apache-2.0
9,866
/* * The MIT License * * Copyright (c) 2016 Fulcrum Genomics LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.fulcrumgenomics.vcf import java.nio.file.Paths import java.util import java.util.Comparator import com.fulcrumgenomics.FgBioDef._ import com.fulcrumgenomics.cmdline.{ClpGroups, FgBioTool} import com.fulcrumgenomics.commons.io.{Io, PathUtil} import com.fulcrumgenomics.commons.util.{LazyLogging, NumericCounter} import com.fulcrumgenomics.fasta.SequenceDictionary import com.fulcrumgenomics.sopt._ import com.fulcrumgenomics.util.{GenomicSpan, Metric, ProgressLogger} import com.fulcrumgenomics.vcf.PhaseCigarOp.PhaseCigarOp import htsjdk.samtools.util.{IntervalList, OverlapDetector} import htsjdk.variant.variantcontext.writer.{Options, VariantContextWriter, VariantContextWriterBuilder} import htsjdk.variant.variantcontext.{Genotype, GenotypeBuilder, VariantContext, VariantContextBuilder} import htsjdk.variant.vcf._ import scala.annotation.tailrec import scala.jdk.CollectionConverters._ import scala.collection.mutable.ListBuffer @clp( description = """ |Assess the accuracy of phasing for a set of variants. | |All phased genotypes should be annotated with the `PS` (phase set) `FORMAT` tag, which by convention is the |position of the first variant in the phase set (see the VCF specification). Furthermore, the alleles of a phased |genotype should use the `|` separator instead of the `/` separator, where the latter indicates the genotype is |unphased. | |The input VCFs are assumed to be single sample: the genotype from the first sample is used. | |Only bi-allelic heterozygous SNPs are considered. | |The input known phased variants can be subsetted using the known interval list, for example to keep only variants |from high-confidence regions. | |If the intervals argument is supplied, only the set of chromosomes specified will be analyzed. Note that the full |chromosome will be analyzed and start/stop positions will be ignored. """, group=ClpGroups.VcfOrBcf ) class AssessPhasing ( @arg(flag='c', doc="The VCF with called phased variants.") val calledVcf: PathToVcf, @arg(flag='t', doc="The VCF with known phased variants.") val truthVcf: PathToVcf, @arg(flag='o', doc="The output prefix for all output files.") val output: PathPrefix, @arg(flag='k', doc="The interval list over which known phased variants should be kept.") val knownIntervals: Option[PathToIntervals] = None, @arg(flag='m', doc="Allow missing fields in the VCF header.") val allowMissingFieldsInVcfHeader: Boolean = true, @arg(flag='s', doc="Skip sites where the truth and call are both called but do not share the same alleles.") val skipMismatchingAlleles: Boolean = true, @arg(flag='l', doc="Analyze only the given chromosomes in the interval list. The entire chromosome will be analyzed (start and end ignored).") val intervals: Option[PathToIntervals] = None, @arg(flag='b', doc="Remove enclosed phased blocks and truncate overlapping blocks.") val modifyBlocks: Boolean = true, @arg(flag='d', doc="Output a VCF with the called variants annotated by if their phase matches the truth") val debugVcf: Boolean = false ) extends FgBioTool with LazyLogging { import AssessPhasing.{CalledSampleName, TruthSampleName} Io.assertReadable(Seq(calledVcf, truthVcf)) knownIntervals.foreach(Io.assertReadable) Io.assertCanWriteFile(output) override def execute(): Unit = { ///////////////////////////////////////////////////////////////////////////////////////////////////////// // Setup ///////////////////////////////////////////////////////////////////////////////////////////////////////// // check the sequence dictionaries. val dict = { import com.fulcrumgenomics.fasta.Converters.FromSAMSequenceDictionary val calledReader = new VCFFileReader(calledVcf.toFile, true) val truthReader = new VCFFileReader(truthVcf.toFile, true) calledReader.getFileHeader.getSequenceDictionary.assertSameDictionary(truthReader.getFileHeader.getSequenceDictionary) calledReader.close() truthReader.close() calledReader.getFileHeader.getSequenceDictionary.fromSam } val knownIntervalList = knownIntervals.map { intv => IntervalList.fromFile(intv.toFile).uniqued() } val calledBlockLengthCounter = new NumericCounter[Long]() val truthBlockLengthCounter = new NumericCounter[Long]() val metric = new AssessPhasingMetric val writer = if (!debugVcf) None else { val path = Paths.get(output.toString + AssessPhasing.AnnotatedVcfExtension) val reader = new VCFFileReader(calledVcf.toFile, true) val header = reader.getFileHeader val builder = new VariantContextWriterBuilder() .setOutputFile(path.toFile) .setReferenceDictionary(header.getSequenceDictionary) .setOption(Options.INDEX_ON_THE_FLY) .modifyOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER, true) val writer: VariantContextWriter = builder.build val headerLines: util.Set[VCFHeaderLine] = new util.HashSet[VCFHeaderLine](header.getMetaDataInSortedOrder) headerLines.add(AssessPhasing.PhaseConcordanceFormatHeaderLine) // add the new format header lines VCFStandardHeaderLines.addStandardFormatLines(headerLines, false, Genotype.PRIMARY_KEYS) // add standard header lines writer.writeHeader(new VCFHeader(headerLines, List("call", "truth").asJava)) reader.safelyClose() Some(writer) } val chromosomes = intervals.map { intv => val intervalList = IntervalList.fromFile(intv.toFile) // Developer Note: warn the user if the supplied intervals do not span entire chromosomes. intervalList.getIntervals.find { interval => interval.getStart != 1 || interval.getEnd != dict(interval.getContig).length }.foreach { interval => logger.warning(s"Interval list (--intervals) given with intervals that do not span entire chromosomes (ex. '$interval'). Start/end will be ignored and entire chromosome analyzed.") } intervalList.getIntervals.map { i => i.getContig }.toSet } // NB: could parallelize! dict .iterator .filter { sequence => chromosomes match { case Some(set) => set.contains(sequence.name) case None => true } } .foreach { sequence => executeContig( dict = dict, contig = sequence.name, contigLength = sequence.length, knownIntervalList = knownIntervalList, metric = metric, calledBlockLengthCounter = calledBlockLengthCounter, truthBlockLengthCounter = truthBlockLengthCounter, writer = writer ) } val calledBlockLengthMetrics = calledBlockLengthCounter.map { case (length, count) => new PhaseBlockLengthMetric(dataset=CalledSampleName, length=length, count=count) } val truthBlockLengthMetrics = truthBlockLengthCounter.map { case (length, count) => new PhaseBlockLengthMetric(dataset=TruthSampleName, length=length, count=count) } val blockLengthMetrics = (calledBlockLengthMetrics ++ truthBlockLengthMetrics).toSeq val calledAssemblyStats = AssemblyStatistics(calledBlockLengthCounter) val truthAssemblyStats = AssemblyStatistics(truthBlockLengthCounter) metric.mean_called_block_length = calledBlockLengthCounter.mean() metric.median_called_block_length = calledBlockLengthCounter.median() metric.stddev_called_block_length = calledBlockLengthCounter.stddev(m=metric.mean_called_block_length) metric.n50_called_block_length = calledAssemblyStats.n50.toDouble metric.n90_called_block_length = calledAssemblyStats.n90.toDouble metric.l50_called = calledAssemblyStats.l50.toDouble metric.mean_truth_block_length = truthBlockLengthCounter.mean() metric.median_truth_block_length = truthBlockLengthCounter.median() metric.stddev_truth_block_length = truthBlockLengthCounter.stddev(m=metric.mean_truth_block_length) metric.n50_truth_block_length = truthAssemblyStats.n50.toDouble metric.n90_truth_block_length = truthAssemblyStats.n90.toDouble metric.l50_truth = truthAssemblyStats.l50.toDouble metric.finalizeMetric() ///////////////////////////////////////////////////////////////////////////////////////////////////////// // Output the metrics and finish up ///////////////////////////////////////////////////////////////////////////////////////////////////////// logger.info("Outputting") Metric.write(path=PathUtil.pathTo(s"${output}${AssessPhasingMetric.MetricExtension}"), metric=metric) Metric.write(PathUtil.pathTo(s"${output}${PhaseBlockLengthMetric.MetricExtension}"), blockLengthMetrics) writer.foreach(_.close()) } private def executeContig(dict: SequenceDictionary, contig: String, contigLength: Int, knownIntervalList: Option[IntervalList], metric: AssessPhasingMetric, calledBlockLengthCounter: NumericCounter[Long], truthBlockLengthCounter: NumericCounter[Long], writer: Option[VariantContextWriter] = None ): Unit = { logger.info(s"Assessing $contig") val intervalListForContig = knownIntervalList.map { oldList => val newList = new IntervalList(oldList.getHeader) newList.addall(oldList.getIntervals.filter { _.getContig == contig }.toJavaList) newList } // get the phased blocks logger.info("Getting the called phase blocks") val calledPhaseBlockDetector = { val calledReader = new VCFFileReader(calledVcf.toFile, true) val detector = PhaseBlock.buildOverlapDetector( iterator = toVariantContextIterator(calledReader, contig, contigLength), dict = dict, modifyBlocks = modifyBlocks ) calledReader.close() detector } logger.info("Getting the known phase blocks") val truthPhaseBlockDetector = { val truthReader = new VCFFileReader(truthVcf.toFile, true) val detector = PhaseBlock.buildOverlapDetector( iterator = toVariantContextIterator(truthReader, contig, contigLength, intervalList=intervalListForContig), dict = dict, modifyBlocks = modifyBlocks ) truthReader.close() detector } // get an iterator of the pairs val calledReader = new VCFFileReader(calledVcf.toFile, true) val truthReader = new VCFFileReader(truthVcf.toFile, true) val pairedIterator = JointVariantContextIterator( iters = Seq(toVariantContextIterator(truthReader, contig, contigLength, intervalList=intervalListForContig), toVariantContextIterator(calledReader, contig, contigLength)), dict = dict ).map { case Seq(left, right) => (left, right) } ///////////////////////////////////////////////////////////////////////////////////////////////////////// // Create the phasing cigar ///////////////////////////////////////////////////////////////////////////////////////////////////////// logger.info("Creating the phasing CIGAR") val cigar = PhaseCigar( pairedIterator = pairedIterator, truthPhaseBlockDetector = truthPhaseBlockDetector, calledPhaseBlockDetector = calledPhaseBlockDetector, metric = metric, skipMismatchingAlleles = skipMismatchingAlleles, writer = writer ) ///////////////////////////////////////////////////////////////////////////////////////////////////////// // Get the number of short and long switch errors, and other metrics ///////////////////////////////////////////////////////////////////////////////////////////////////////// logger.info("Computing short switch errors") metric.num_short_switch_errors = cigar.toShortSwitchErrorIndices().length // To find the # of long switch errors, we need to ignore runs of consecutive indices. logger.info("Computing long switch errors") val (numLongSwitchErrors, numLongSwitchSites) = cigar.toLongSwitchErrorsAndSites() metric.num_long_switch_errors += numLongSwitchErrors metric.num_switch_sites += numLongSwitchSites // Use the calculation described here: http://dx.doi.org/10.1038%2Fng.3119 logger.info("Computing Illumina switch errors") val illuminaSwitchErrors = cigar.toIlluminaSwitchErrors() metric.num_illumina_point_switch_errors += illuminaSwitchErrors.numPointErrors metric.num_illumina_long_switch_errors += illuminaSwitchErrors.numLongSwitchErrors metric.num_illumina_switch_sites += illuminaSwitchErrors.numSites ///////////////////////////////////////////////////////////////////////////////////////////////////////// // Get the phase blocks ///////////////////////////////////////////////////////////////////////////////////////////////////////// logger.info("Computing block metrics") calledPhaseBlockDetector.getAll.iterator.foreach { block => calledBlockLengthCounter.count(block.length) } truthPhaseBlockDetector.getAll.iterator.foreach { block => truthBlockLengthCounter.count(block.length) } calledReader.safelyClose() truthReader.safelyClose() logger.info(s"Completed $contig") } private def toVariantContextIterator(reader: VCFFileReader, contig: String, contigLength: Int, intervalList: Option[IntervalList] = None): Iterator[VariantContext] = { import com.fulcrumgenomics.fasta.Converters.FromSAMSequenceDictionary val sampleName = reader.getFileHeader.getSampleNamesInOrder.iterator().next() val baseIter: Iterator[VariantContext] = intervalList match { case Some(intv) => ByIntervalListVariantContextIterator(reader.iterator(), intv, dict=reader.getFileHeader.getSequenceDictionary.fromSam) case None => reader.query(contig, 1, contigLength) } baseIter .map(_.subContextFromSample(sampleName)) .filter(v => v.isSNP && v.isBiallelic && v.getGenotype(sampleName).isHet) } } object AssessPhasing { /** The output sample name for the called variants in the debug VCF. */ val CalledSampleName = "call" /** The output sample name for the truth/known variants in the debug VCF. */ val TruthSampleName = "truth" val AnnotatedVcfExtension = ".assess_phasing.vcf.gz" val PhaseConcordanceFormatTag = "PHASE_CONC" val PhaseConcordanceFormatDescription = "The phase concordance (Match or Mismatch) determined by fgbio's AssessPhasing" val PhaseConcordanceFormatHeaderLine = new VCFFormatHeaderLine(PhaseConcordanceFormatTag, 1, VCFHeaderLineType.Integer, PhaseConcordanceFormatDescription) def getPhasingSetId(ctx: VariantContext): Int = { Integer.valueOf(ctx.getGenotype(0).getExtendedAttribute("PS", "-1").toString) } } private object AssemblyStatistics { type BlockLength = Long def apply(blockLengthCounter: NumericCounter[BlockLength]): AssemblyStatistics = { if (blockLengthCounter.isEmpty) return AssemblyStatistics(0, 0, 0) val numBases = blockLengthCounter.map { case (length, count) => length * count }.sum val fiftyPercent = numBases / 2.0 val ninetyPercent = numBases * 0.9 val blockLengthsAndCounts = blockLengthCounter.toSeq.sortBy(_._1) var blockLengthSum = 0d var n50: BlockLength = 0L var n90: BlockLength = 0L var l50: Long = 0 forloop (blockLengthsAndCounts.length - 1) (0 <= _) (_ - 1) { i => val (length, count) = blockLengthsAndCounts(i) // L50 if (blockLengthSum < fiftyPercent) { // only if L50 has not been set yet val remainingSum = fiftyPercent - blockLengthSum val numBlocksRequired = Math.ceil(remainingSum / length).toInt if (numBlocksRequired < count) { l50 += numBlocksRequired } else { l50 += count } } // ** IMPORTANT ** add the sum of bases in blocks of this size *after* updating the L50. blockLengthSum += length * count // N50 if (n50 == 0 && blockLengthSum >= fiftyPercent) { n50 = length } // N90 if (n90 == 0 && blockLengthSum >= ninetyPercent) { n90 = length } } AssemblyStatistics(n50=n50, n90=n90, l50=l50) } } /** * @param n50 the longest block length such that the bases covered by all blocks this length and longer are at least * 50% of the # of bases covered by all blocks. * @param n90 the longest block length such that the bases covered by all blocks this length and longer are at least * 90% of the # of bases covered by all blocks. * @param l50 the smallest number of blocks such that the sum of the lengths of the blocks is >= 50% of the sum of * the lengths of all blocks. */ private case class AssemblyStatistics(n50: Long, n90: Long, l50: Long) object PhaseBlockLengthMetric { val MetricExtension = PathUtil.replaceExtension(Paths.get(AssessPhasingMetric.MetricExtension), ".block_lengths.txt").toString } /** Metrics produced by `AssessPhasing` describing the number of phased blocks of a given length. The output will have * multiple rows, one for each observed phased block length. * * @param dataset The name of the dataset being assessed (i.e. "truth" or "called"). * @param length The length of the phased block. * @param count The number of phased blocks of the given length. */ case class PhaseBlockLengthMetric ( dataset: String, length: Long = 0, count: Long = 0 ) extends Metric object PhaseBlock extends LazyLogging { import scala.collection.mutable /** Creates an overlap detector for blocks of phased variants. Variants from the same block are found using the * "PS" tag. The modify blocks option resolves overlapping blocks */ private[vcf] def buildOverlapDetector(iterator: Iterator[VariantContext], dict: SequenceDictionary, modifyBlocks: Boolean = true): OverlapDetector[PhaseBlock] = { val detector = new OverlapDetector[PhaseBlock](0, 0) val progress = new ProgressLogger(logger) // create the blocks val phaseBlocks = mutable.HashMap[Int, PhaseBlock]() while (iterator.hasNext) { val ctx = iterator.next() val phaseSetId: Int = AssessPhasing.getPhasingSetId(ctx) if (phaseSetId > 0) { val phaseBlock = phaseBlocks.get(phaseSetId) match { case Some(block) => require(block.getContig == ctx.getContig) require(block.getStart <= ctx.getStart) block.copy(end=Math.max(block.getEnd, ctx.getEnd)) // extend the block case None => new PhaseBlock(contig=ctx.getContig, start=ctx.getStart, end=ctx.getEnd) } phaseBlocks.put(phaseSetId, phaseBlock) } progress.record(ctx.getContig, ctx.getStart) } val blocksIn = new util.TreeSet[PhaseBlock](new Comparator[PhaseBlock] { /** Compares the two blocks based on start position, then returns the shorter block. */ def compare(a: PhaseBlock, b: PhaseBlock): Int = { if (a.start < b.start) -1 else if (a.start == b.start) b.length - a.length else 1 } }) phaseBlocks.values.foreach(blocksIn.add) logger.info(s"Found ${phaseBlocks.size} phase block") // Make sure we do not have any overlapping phase blocks // - if block #1 is enclosed in block #2, keep only block #2 // - otherwise, if block #1 and #2 overlap, truncate the smaller one // The loop below will compare two blocks at a time: the two blocks with the smallest start positions. If the do // not overlap, the first block (smaller position) is kept. If one is fully-contained in the other, then the // enclosed block is discarded. If they overlap, the smaller block is truncated such that they do not overlap. In // the case the start position is changed due to truncation, then that block must be re-inserted into the list of // input blocks to guarantee we compare the two blocks with the smallest start positions. val blocksOut = ListBuffer[PhaseBlock]() var left = blocksIn.pollFirst() while (!blocksIn.isEmpty) { val right = blocksIn.pollFirst() require(left.getStart <= right.getStart, s"left: $left right: $right") // At this point, left has the smallest start position, and right as the next smallest start position. if (left.overlaps(right)) { // do they have any overlap? require(modifyBlocks, s"Block $left overlaps $right") if (left.encloses(right)) { // the right is enclosed in the left, so keep the left block only logger.info(s"Removing $right enclosed in $left") // do nothing (keep left == left) because left may overlap subsequent blocks too! } else if (right.encloses(left)) { // the left is enclosed in the right, so keep the right block only logger.info(s"Removing $left enclosed in $right") left = right } else if (left.length < right.length) { // the left is smaller, so truncate left logger.info(s"Truncating $left which overlaps $right"); blocksOut.append(left.copy(end=right.getStart-1)) left = right } else { // the right is smaller, so truncate right logger.info(s"Truncating $right which overlaps $left"); blocksOut.append(left) // Since the start position of the right block is changed, and we are going to use it in the next iteration, // then re-insert it into the set and poll. blocksIn.add(right.copy(start=left.getEnd+1)) left = blocksIn.pollFirst() } } else { blocksOut.append(left) left = right } } if (left != null) blocksOut.append(left) blocksOut.foreach { block => logger.info(s"Keeping $block") } // add any remaining detector.addAll(blocksOut.toList.asJava, blocksOut.toList.asJava) phaseBlocks.clear() detector } } case class PhaseBlock private (contig: String, start: Int, end: Int) extends GenomicSpan object AssessPhasingMetric { val MetricExtension = ".assess_phasing_metrics.txt" } /** Metrics produced by `AssessPhasing` describing various statistics assessing the performance of phasing variants * relative to a known set of phased variant calls. Included are methods for assessing sensitivity and accuracy from * a number of previous papers (ex. http://dx.doi.org/10.1038%2Fng.3119). * * The N50, N90, and L50 statistics are defined as follows: * - The N50 is the longest block length such that the bases covered by all blocks this length and longer are at least * 50% of the # of bases covered by all blocks. * - The N90 is the longest block length such that the bases covered by all blocks this length and longer are at least * 90% of the # of bases covered by all blocks. * - The L50 is the smallest number of blocks such that the sum of the lengths of the blocks is `>=` 50% of the sum of * the lengths of all blocks. * * @param num_called The number of variants called. * @param num_phased The number of variants called with phase. * @param num_truth The number of variants with known truth genotypes. * @param num_truth_phased The number of variants with known truth genotypes with phase. * @param num_called_with_truth_phased The number of variants called that had a known phased genotype. * @param num_phased_with_truth_phased The number of variants called with phase that had a known phased genotype. * @param num_truth_phased_in_called_block The number of known phased variants that were in a called phased block. * @param num_both_phased_in_called_block The number of called phase variants that had a known phased genotype in a called phased block. * @param num_short_switch_errors The number of short switch errors (isolated switch errors). * @param num_long_switch_errors The number of long switch errors (# of runs of consecutive switch errors). * @param num_switch_sites The number of sites that could be (short or long) switch errors (i.e. the # of sites with both known and called phased variants). * @param num_illumina_point_switch_errors The number of point switch errors (defined in http://dx.doi.org/10.1038%2Fng.3119). * @param num_illumina_long_switch_errors The number of long switch errors (defined in http://dx.doi.org/10.1038%2Fng.3119). * @param num_illumina_switch_sites The number of sites that could be (point or long) switch errors (defined in http://dx.doi.org/10.1038%2Fng.3119). * @param frac_phased The fraction of called variants with phase. * @param frac_phased_with_truth_phased The fraction of known phased variants called with phase. * @param frac_truth_phased_in_called_block The fraction of phased known genotypes in a called phased block. * @param frac_phased_with_truth_phased_in_called_block The fraction of called phased variants that had a known phased genotype in a called phased block. * @param short_accuracy The fraction of switch sites without short switch errors (`1 - (num_short_switch_errors / num_switch_sites)`). * @param long_accuracy The fraction of switch sites without long switch errors (`1 - (num_long_switch_errors / num_switch_sites)`). * @param illumina_point_accuracy The fraction of switch sites without point switch errors according to the Illumina * method defining switch sites and errors (`1 - (num_illumina_point_switch_errors / num_illumina_switch_sites )`). * @param illumina_long_accuracy The fraction of switch sites wihtout long switch errors according to the Illumina * method defining switch sites and errors (`1 - (num_illumina_long_switch_errors / num_illumina_switch_sites )`). * @param mean_called_block_length The mean phased block length in the callset. * @param median_called_block_length The median phased block length in the callset. * @param stddev_called_block_length The standard deviation of the phased block length in the callset. * @param n50_called_block_length The N50 of the phased block length in the callset. * @param n90_called_block_length The N90 of the phased block length in the callset. * @param l50_called The L50 of the phased block length in the callset. * @param mean_truth_block_length The mean phased block length in the truth. * @param median_truth_block_length The median phased block length in the truth. * @param stddev_truth_block_length The standard deviation of the phased block length in the truth. * @param n50_truth_block_length The N50 of the phased block length in the truth. * @param n90_truth_block_length The N90 of the phased block length in the callset. * @param l50_truth The L50 of the phased block length in the callset. */ case class AssessPhasingMetric ( var num_called: Long = 0, var num_phased: Long = 0, var num_truth: Long = 0, var num_truth_phased: Long = 0, var num_called_with_truth_phased: Long = 0, var num_phased_with_truth_phased: Long = 0, var num_truth_phased_in_called_block: Long = 0, var num_both_phased_in_called_block: Long = 0, var num_short_switch_errors: Long = 0, var num_long_switch_errors: Long = 0, var num_switch_sites: Long = 0, var num_illumina_point_switch_errors: Long = 0, var num_illumina_long_switch_errors: Long = 0, var num_illumina_switch_sites: Long = 0, var frac_phased: Double = 0, var frac_phased_with_truth_phased: Double = 0, var frac_truth_phased_in_called_block: Double = 0, var frac_phased_with_truth_phased_in_called_block: Double = 0, var short_accuracy: Double = 0, var long_accuracy: Double = 0, var illumina_point_accuracy: Double = 0, var illumina_long_accuracy: Double = 0, var mean_called_block_length: Double = 0, var median_called_block_length: Double = 0, var stddev_called_block_length: Double = 0, var n50_called_block_length: Double = 0, var n90_called_block_length: Double = 0, var l50_called: Double = 0, var mean_truth_block_length: Double = 0, var median_truth_block_length: Double = 0, var stddev_truth_block_length: Double = 0, var n50_truth_block_length: Double = 0, var n90_truth_block_length: Double = 0, var l50_truth: Double = 0 ) extends Metric { private def divide(a: Double, b: Double): Double = if (b == 0) 0 else a / b private def divide(a: Long, b: Long): Double = divide(a.toDouble, b.toDouble) def finalizeMetric(): this.type = { this.frac_phased = divide(this.num_phased, this.num_called) this.frac_phased_with_truth_phased = divide(this.num_phased_with_truth_phased, this.num_called_with_truth_phased) this.frac_truth_phased_in_called_block = divide(this.num_truth_phased_in_called_block, this.num_truth_phased) this.frac_phased_with_truth_phased_in_called_block = divide(this.num_both_phased_in_called_block, this.num_truth_phased_in_called_block) this.short_accuracy = 1.0 - divide(this.num_short_switch_errors, this.num_switch_sites) this.long_accuracy = 1.0 - divide(this.num_long_switch_errors, this.num_switch_sites) this.illumina_point_accuracy = 1.0 - divide(this.num_illumina_point_switch_errors, this.num_illumina_switch_sites ) this.illumina_long_accuracy = 1.0 - divide(this.num_illumina_long_switch_errors, this.num_illumina_switch_sites ) this } } /** The cigar Elements */ private object PhaseCigarOp extends Enumeration { type PhaseCigarOp = Value val Match, // 0 Mismatch, // 1 TruthOnly, // 2 CallOnly, // 3 TruthEnd, // 4 CallEnd, // 5 BothEnd = Value // 6 } private[vcf] object PhaseCigar { import AssessPhasing.{CalledSampleName, TruthSampleName} import PhaseCigarOp._ private type VCtx = VariantContext def apply(cigar: Seq[PhaseCigarOp]): PhaseCigar = new PhaseCigar(cigar) /** Creates a Cigar from an iterator over variant contexts from a truth and call sample. The variants should be on a * single chromosome only. * * If `skipMismatchingAlleles`, then it skips sites where both truth and call have a variant call, but the alleles * disagree between the two. * * Examples of short and long errors: * CALL : MMMMXXXMMM MMMM|FFFFFFF|MMMMMFMMMM|F|MMFFFMMM * TRUTH: MMMM MMMMMMMMMM-MMMMMMM|MMMMMMMMMM|F|MMMMMMMM * correct long short long * CIGAR: 00003330002220000511111116000001000060600111000 * shows one known block split */ def apply(pairedIterator: Iterator[(Option[VariantContext], Option[VariantContext])], truthPhaseBlockDetector: OverlapDetector[PhaseBlock], calledPhaseBlockDetector: OverlapDetector[PhaseBlock], metric: AssessPhasingMetric, skipMismatchingAlleles: Boolean, writer: Option[VariantContextWriter] = None, assumeFixedAlleleOrder: Boolean = false): PhaseCigar = { val iter = pairedIterator.filter { // ensure the alleles are the same when both truth and call are called case ((Some(t: VCtx), Some(c: VCtx))) => !skipMismatchingAlleles || t.getAlleles.toSet == c.getAlleles.toSet case _ => true }.flatMap { case (t, c) => // collect metrics but only keep sites where either variant context (i.e. truth or call) is phased. val ctxs = Seq(t, c).flatten // NB: can be 1 or 2 contexts here val inTruthPhaseBlock = ctxs.headOption.exists(truthPhaseBlockDetector.overlapsAny) val inCalledPhaseBlock = ctxs.headOption.exists(calledPhaseBlockDetector.overlapsAny) val isTruthVariant = t.isDefined val isCalledVariant = c.isDefined val isTruthVariantPhased = t.exists { ctx => AssessPhasing.getPhasingSetId(ctx) > 0 } val isCalledVariantPhased = c.exists { ctx => AssessPhasing.getPhasingSetId(ctx) > 0 } // Fill in the metrics // Basic metrics if (isCalledVariant) { metric.num_called += 1 if (isCalledVariantPhased) metric.num_phased += 1 } if (isTruthVariant) { metric.num_truth += 1 if (isTruthVariantPhased) metric.num_truth_phased += 1 } // Less fun compute metrics if (isCalledVariant && isTruthVariantPhased) { metric.num_called_with_truth_phased += 1 if (isCalledVariantPhased) metric.num_phased_with_truth_phased += 1 } if (inCalledPhaseBlock) { if (isTruthVariantPhased) { metric.num_truth_phased_in_called_block += 1 if (isCalledVariantPhased) metric.num_both_phased_in_called_block += 1 } } if (isCalledVariantPhased || isTruthVariantPhased) { Some((t, c)) } else None } // The assignment of maternal/paternal alleles in some cases is arbitrary (ex. without a pedigree). Therefore, // the first variant in any block (truth or called) that has a corresponding phased variant in the other sample // (called or truth) is assumed to match and therefore determines the order of the alleles in the subsequent // variants, until an end of block is reached for either sample. var applyInvertMatch: Boolean = false var invertMatch: Boolean = false val cigarOps = new ListBuffer[PhaseCigarOp]() while (iter.hasNext) { val (t, c) = iter.next() val truthPhasedVariant = t.filter { ctx => AssessPhasing.getPhasingSetId(ctx) > 0 } val calledPhasedVariant = c.filter { ctx => AssessPhasing.getPhasingSetId(ctx) > 0 } // Get the end-block operator if we encountered one. val blockEndOp = contextsToBlockEndOperator(truthPhasedVariant, calledPhasedVariant) // If we reach the end of a block, we need to infer if we should invert the match/mismatches based on the next // site where both called and truth have a phased variant. if (!assumeFixedAlleleOrder && blockEndOp.nonEmpty) { applyInvertMatch = false } // Get the cigar operator for the current variant site. val matchingOp = contextsToMatchingOperator(truthPhasedVariant, calledPhasedVariant).map { op => if (assumeFixedAlleleOrder || (op != Match && op != Mismatch)) { // nothing to do op } else { // Check if the invert status has been set, if not set it, otherwise, apply it if (!applyInvertMatch) { // the first element! invertMatch = op == Mismatch applyInvertMatch = true Match } else if (invertMatch) { // invert it if (op == Match) Mismatch else Match } else op // keep it the way it is } } // Add it to the current buffer of cigar operators if (cigarOps.isEmpty) { cigarOps.append(BothEnd) matchingOp.foreach(cigarOps.append(_)) } else { Seq(blockEndOp, matchingOp).flatten.foreach(cigarOps.append(_)) } // Write to the output debug variant file (matchingOp, t, c, writer) match { case (Some(op), Some(truthCtx), Some(calledCtx), Some(w)) => val calledGenotype = { val builder = new GenotypeBuilder(calledCtx.getGenotype(0)) builder.name(CalledSampleName) builder.attribute(AssessPhasing.PhaseConcordanceFormatTag, op.toString) builder.make() } val truthGenotype = { val builder = new GenotypeBuilder(truthCtx.getGenotype(0)) builder.name(TruthSampleName) builder.make() } val ctxBuilder = new VariantContextBuilder(calledCtx) ctxBuilder.genotypes(calledGenotype, truthGenotype) w.add(ctxBuilder.make()) case _ => () } } if (cigarOps.isEmpty) { cigarOps.append(BothEnd) } cigarOps.append(BothEnd) PhaseCigar(cigarOps.toIndexedSeq) } /** Returns an end operator if we have reached a new block. */ private[vcf] def contextsToBlockEndOperator(truth: Option[VariantContext], call: Option[VariantContext]): Option[PhaseCigarOp] = (truth, call) match { case (None, Some(c: VCtx)) => if (isStartOfPhaseBlock(c)) Some(CallEnd) else None case (Some(t: VCtx), None) => if (isStartOfPhaseBlock(t)) Some(TruthEnd) else None case (Some(t: VCtx), Some(c: VCtx)) => (isStartOfPhaseBlock(t), isStartOfPhaseBlock(c)) match { case (true, true) => Some(BothEnd) case (false, true) => Some(CallEnd) case (true, false) => Some(TruthEnd) case (false, false) => None } case _ => unreachable() } /** Returns the cigar for the two variant contexts. */ private[vcf] def contextsToMatchingOperator(truth: Option[VariantContext], call: Option[VariantContext]): Option[PhaseCigarOp] = (truth, call) match { case (None, Some(c: VCtx)) => Some(CallOnly) case (Some(t: VCtx), None) => Some(TruthOnly) case (Some(t: VCtx), Some(c: VCtx)) => Some(cigarTypeForVariantContexts(t, c)) case _ => unreachable() } /** True if the phasing set id is the same as the start position of the given variant, false otherwise. */ def isStartOfPhaseBlock(ctx: VariantContext): Boolean = ctx.getStart == AssessPhasing.getPhasingSetId(ctx) /** Computes the cigar for two variant contexts. Returns [[Match]] if they share the same alleles in the same order, * [[Mismatch]] otherwise. */ private[vcf] def cigarTypeForVariantContexts(truth: VariantContext, call: VariantContext): PhaseCigarOp = { val truthAlleles = truth.getGenotype(0).getAlleles.toSeq val calledAlleles = call.getGenotype(0).getAlleles.toSeq require(truthAlleles.length == calledAlleles.length) require(truthAlleles.length == 2) if (truthAlleles.head != calledAlleles.head || truthAlleles.last != calledAlleles.last) PhaseCigarOp.Mismatch else PhaseCigarOp.Match } case class IlluminaSwitchErrors(var numPointErrors: Int, var numLongSwitchErrors: Int, var numSites: Int) { require(numPointErrors <= numSites) require(numLongSwitchErrors <= numSites) def add(other: IlluminaSwitchErrors): this.type = { this.numPointErrors += other.numPointErrors this.numLongSwitchErrors += other.numLongSwitchErrors this.numSites += other.numSites this } } } private[vcf] class PhaseCigar private(val cigar: Seq[PhaseCigarOp]) { import PhaseCigar._ import PhaseCigarOp._ /** Partitions the cigar into multiple cigars, with each cigar being a contiguous block of phased variants. */ private[vcf] def toPhasedBlocks(isTruth: Boolean = false): Seq[PhaseCigar] = { val endCigarTypes = if (isTruth) Set(TruthEnd, BothEnd) else Set(CallEnd, BothEnd) // Splits the cigar any time we reach a PhaseCigarOp in the endCigarTypes val (cigarsToReturn, lastCigar) = this.cigar.foldLeft((ListBuffer[Seq[PhaseCigarOp]](), ListBuffer[PhaseCigarOp]())) { case ((previousCigars: ListBuffer[Seq[PhaseCigarOp]], currentCigar: ListBuffer[PhaseCigarOp]), phaseCigarOp: PhaseCigarOp) => phaseCigarOp match { case tpe if endCigarTypes.contains(tpe) => // split! if (currentCigar.nonEmpty) previousCigars.append(currentCigar.toList) (previousCigars, new ListBuffer[PhaseCigarOp]()) case _ => // keep going currentCigar.append(phaseCigarOp) (previousCigars, currentCigar) } } // Make sure to ge the last one if (lastCigar.nonEmpty) cigarsToReturn.append(lastCigar.toList) cigarsToReturn.map(PhaseCigar(_)).toIndexedSeq } /** Gets all the indices in the cigar where there is a short switch error. * * Short switches are sites where the alleles are misphased (flipped), and both upstream and downstream, either the * haplotype block ends, or the next variant has the same phase. */ def toShortSwitchErrorIndices(): Seq[Int] = { // check if we can find a match or the end of a block at the given idx. If we cannot, move to the next idx. @tailrec def checkNext(cigar: Seq[PhaseCigarOp], idx: Int, by: Int): Boolean = { if (idx < 0 || cigar.length <= idx) return true // no more cigar(idx) match { case Match | CallEnd | BothEnd | TruthEnd => true case Mismatch => false case CallOnly | TruthOnly => checkNext(cigar=cigar, idx=idx+by, by=by) } } // find cigars that have a mismatch at the index, but then anchored on either side this.cigar.indices.filter { i => this.cigar(i) == Mismatch && checkNext(this.cigar, i-1, -1) && checkNext(this.cigar, i+1, 1) }.toList } /** Gets the number of long switch errors and the number of sites examined. * * @return a tuple of the number of long switch errors and the total number of sites examined. Long switches are a * stretch of two or more consecutive sites where the alleles are mis-phased (flipped), ann both upstream and * downstream, either the haplotype block ends, or the next variant has the same phase. The number of sites * examined are places where there are both a phased truth and phased called variant. */ def toLongSwitchErrorsAndSites(): (Int, Int) = { // Gets all the indices in the cigar where there is a long switch error. Currently returns the index for all cigar // mismatches that contribute to a long switch error, and tries to maximize such a stretch. val iter = cigar.iterator.bufferBetter var numLongSwitchErrors = 0 while (iter.hasNext) { if (iter.head == Mismatch) { val len = iter.takeWhile(c => c == Mismatch || c == CallOnly || c == TruthOnly).count(_ == Mismatch) if (len > 1) numLongSwitchErrors += 1 } iter.dropWhile(_ != Mismatch) } val numSites = this.cigar.count { case Mismatch | Match => true case _ => false } (numLongSwitchErrors, numSites) } /** Computes the number of point and long switch error rates as described in http://dx.doi.org/10.1038%2Fng.3119 */ def toIlluminaSwitchErrors(pointPenalty: Int = -1, transitionPenalty: Int = -5): IlluminaSwitchErrors = { // 1. partition the cigar into call blocks. // 2. for each call block, run the HMM, and sum the # of long swith errors this.toPhasedBlocks(isTruth=false).map { subCigar => subCigar.toIlluminaSwitchErrorsHmm(pointPenalty=pointPenalty, transitionPenalty=transitionPenalty) }.foldLeft(IlluminaSwitchErrors(0, 0, 0)) { case (acc, cur) => acc.add(cur) } } /** Computes the number of point and long switch error rates as described in http://dx.doi.org/10.1038%2Fng.3119 * * The HMM with a simple scoring system and two hidden states, one for each parental haplotype. The emission * probability is scored as follows: 0 for being on the correct haplotype, otherwise `pointPenalty`. The transition * probability is scored as follows: 0 for staying on the same haplotype, otherwise `transitionPenalty`. * * We prefer a long switch error over a sequence of point errors (i.e. when both produce have the same score). */ private[vcf] def toIlluminaSwitchErrorsHmm(pointPenalty: Int = -1, transitionPenalty: Int = -5): IlluminaSwitchErrors = { if (this.cigar.isEmpty) throw new IllegalArgumentException("cigar was empty") // Get the initial phase: true is on the top haplotype, false is the bottom haplotype. val phase = this.cigar.filter { phaseCigarOp => phaseCigarOp == Match || phaseCigarOp == Mismatch }.map { phaseCigarOp => if (phaseCigarOp == Match) true else false } if (phase.isEmpty) return IlluminaSwitchErrors(0, 0, 0) // one score tuple for each site, and each tuple is the score for being on a given haplotype val scores = ListBuffer.range(0, phase.length, 1).map(_ => (0, 0)) val from = ListBuffer.range(0, phase.length, 1).map(_ => (true, true)) // true if we stayed def topEmission(idx: Int): Int = if (phase(idx)) 0 else pointPenalty def botEmission(idx: Int): Int = if (phase(idx)) pointPenalty else 0 // we arbitrarily assume we are on the top haplotype (?) // run viterbi for i == 0 scores(0) = (topEmission(0), botEmission(0)) from(0) = (true, true) // not actually needed // run Viterbi for i > 0 var i = 1 while (i < scores.length) { // top haplotype val (topScore, topFrom) = { val transition = scores(i - 1)._2 + transitionPenalty + topEmission(i) val stay = scores(i - 1)._1 + topEmission(i) // no penalty to stay :) if (transition < stay) (stay, true) // '<' makes it prefer transition else (transition, false) } // bottom haplotype val (botScore, botFrom) = { val transition = scores(i - 1)._1 + transitionPenalty + botEmission(i) val stay = scores(i - 1)._2 + botEmission(i) // no penalty to stay :) if (transition < stay) (stay, true) // '<' makes it prefer transition else (transition, false) } // update scores(i) = (topScore, botScore) from(i) = (topFrom, botFrom) //println(s"scores($i): " + scores(i) + s" from($i): " + from(i)) i += 1 } /** Computes the # of long switch errors in the HMM output phased list. */ def numLongSwitchErrors(states: List[Int]): Int = { if (states.length == 1) 0 else states.sliding(2).count { case Seq(first, last) => first != last } } /** Backtracks to find the haplotype states, in reverse order. */ def backtrack(haplotypeInit: Int, stayInit: Boolean): List[Int] = { var haplotype = haplotypeInit var stay = stayInit val states = new ListBuffer[Int]() i = scores.length - 2 while (0 <= i) { //println("i: " + i + " haplotype: " + haplotype + " stay: " + stay + s" from($i): " + from(i)) states.append(haplotype) if (stay) { stay = if (haplotype == 0) from(i)._1 else from(i)._2 } else { haplotype = 1 - haplotype stay = if (haplotype == 0) from(i)._1 else from(i)._2 } i -= 1 } states.append(haplotype) require(states.length == scores.length) // NB: states is the reverse states.reverse.toList } // backtrack val states = { if (scores.last._1 > scores.last._2) backtrack(0, from(scores.length-1)._1) else if (scores.last._1 == scores.last._2) { // choose the solution with the most # of long switch errors val statesTop = backtrack(0, from(scores.length-1)._1) val statesBot = backtrack(1, from(scores.length-1)._2) val topScore = numLongSwitchErrors(statesTop) val botScore = numLongSwitchErrors(statesBot) if (topScore >= botScore) statesTop else statesBot } else backtrack(1, from(scores.length-1)._2) } //println("haplotypes: " + states.toSeq.mkString(", ")) //println(s"states.length=${states.length} scores.length=${scores.length}") // count how many mismatches are accounted for by point switch errors val numPointErrors = { require(phase.length == states.length) phase.zip(states.map(_ == 0)).count { case (p, s) => p != s } } IlluminaSwitchErrors(numPointErrors, numLongSwitchErrors(states), states.length) } }
fulcrumgenomics/fgbio
src/main/scala/com/fulcrumgenomics/vcf/AssessPhasing.scala
Scala
mit
49,087
package de.jannikarndt.datamover.monitor trait Monitoring { protected val monitor: Monitor = Monitor.getMonitor(getClass.getName) }
JannikArndt/DataMover
src/main/scala/de/jannikarndt/datamover/monitor/Monitoring.scala
Scala
mit
137
package cmwell.analytics.data import cmwell.analytics.util.Shard import scala.collection.mutable class XORSummaryFactory extends DataWriterFactory[IndexWithSourceHash]{ val shardSummaries = mutable.Map.empty[Shard, XORSummary] override def apply(shard: Shard): DataWriter[IndexWithSourceHash] = shardSummaries.synchronized { val xorSummary = XORSummary(index = shard.indexName, shard = shard.shard.toString) shardSummaries += shard -> xorSummary xorSummary } }
dudi3001/CM-Well
tools/dataConsistencyTool/extract-index-from-es/src/main/scala/cmwell/analytics/data/XORSummaryFactory.scala
Scala
apache-2.0
484
/* Copyright 2017-2018 EconomicSL Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.economicsl.mechanisms import cats._ import implicits._ /** Base trait defining a generic cardinal welfare function. * * A cardinal social welfare function is a function that takes as input * numeric representations of individual utilities (also known as cardinal * utility), and returns as output a numeric representation of the collective * welfare. The underlying assumption is that individuals utilities can be put * on a common scale and compared. */ trait CardinalSocialWelfareFunction[-CC <: Iterable[ValuationFunction[A]], A] extends SocialWelfareFunction[CC, ValuationFunction[A], A] object CardinalSocialWelfareFunction { def average[A]: CardinalSocialWelfareFunction[Iterable[ValuationFunction[A]], A] = { new CardinalSocialWelfareFunction[Iterable[ValuationFunction[A]], A] { def apply(preferences: Iterable[ValuationFunction[A]]): ValuationFunction[A] = { new ValuationFunction[A] { def apply(a: A): Numeraire = { val (total, count) = preferences.map(v => (v(a), 1)).reduce(_ |+| _) total / count } } } } } /** Rawlsian social welfare function: society should maximize the minimum individual Numeraire. */ def min[A]: CardinalSocialWelfareFunction[Iterable[ValuationFunction[A]], A] = { reduce(ValuationFunction.min) } /** Nash bargaining maximizes the produce of individual utilities. */ def prod[A]: CardinalSocialWelfareFunction[Iterable[ValuationFunction[A]], A] = { reduce(ValuationFunction.prod) } def reduce[A](implicit ev: Semigroup[ValuationFunction[A]]): CardinalSocialWelfareFunction[Iterable[ValuationFunction[A]], A] = { new CardinalSocialWelfareFunction[Iterable[ValuationFunction[A]], A] { def apply(preferences: Iterable[ValuationFunction[A]]): ValuationFunction[A] = { new ValuationFunction[A] { def apply(a: A): Numeraire = { preferences.reduce(ev.combine)(a) } } } } } /** Benthamite social welfare function: society should maximize the sum of individual Numeraire. */ def sum[A]: CardinalSocialWelfareFunction[Iterable[ValuationFunction[A]], A] = { reduce(ValuationFunction.sum) } }
EconomicSL/mechanisms
src/main/scala/org/economicsl/mechanisms/CardinalSocialWelfareFunction.scala
Scala
apache-2.0
2,802
package com.kalmanb.sbt.extra import org.scalatest.FunSpec class JenkinsGitPluginTest extends FunSpec { import JenkinsGitPlugin.Git._ val example = <project> <description/> <scm class="hudson.plugins.git.GitSCM" plugin="[email protected]"> <configVersion>2</configVersion> <userRemoteConfigs> <hudson.plugins.git.UserRemoteConfig> <name/> <refspec/> <url>/tmp</url> </hudson.plugins.git.UserRemoteConfig> </userRemoteConfigs> <branches> <hudson.plugins.git.BranchSpec> <name>**</name> </hudson.plugins.git.BranchSpec> </branches> <disableSubmodules>false</disableSubmodules> </scm> </project> describe("change job git branch") { it("should be update name") { val result = changeJobGitBranch("new-test")(example) assert((result \\ "_" \\ "_" \\ "hudson.plugins.git.BranchSpec" \\ "name").text === "new-test") } } }
kalmanb/sbt-jenkins-manager
src/test/scala/com/kalmanb/sbt/extra/JenkinsGitPluginTest.scala
Scala
apache-2.0
996
package BIDMach.models import BIDMat.{Mat,SBMat,CMat,DMat,FMat,IMat,HMat,GMat,GIMat,GSMat,SMat,SDMat} import BIDMat.MatFunctions._ import BIDMat.SciFunctions._ import BIDMat.Solvers._ import BIDMach.datasources._ import BIDMach.datasinks._ import BIDMach.updaters._ import BIDMach._ /** * A scalable approximate SVD (Singular Value Decomposition) using subspace iteration * * '''Parameters''' - dim(256): Model dimension * * Other key parameters inherited from the learner, datasource and updater: - blockSize: the number of samples processed in a block - npasses(10): number of complete passes over the dataset * */ class SVD(opts:SVD.Opts = new SVD.Options) extends Model(opts) { var Q:Mat = null; // (Left) Singular vectors var SV:Mat = null; // Singular values var P:Mat = null; var R:Mat = null def init() = { val nfeats = mats(0).nrows; if (refresh) { Q = normrnd(0, 1, nfeats, opts.dim); // Randomly initialize Q // QRdecompt(Q, Q, null); // Orthonormalize it Q ~ Q / sqrt(Q dot Q); SV = Q.zeros(1, opts.dim); // Holder for Singular values } else { Q = modelmats(0); SV = modelmats(1); } Q = convertMat(Q); // Move to GPU or double if needed SV = convertMat(SV); setmodelmats(Array(Q, SV)); P = Q.zeros(Q.nrows, Q.ncols); // Zero P R = Q.zeros(opts.dim, opts.dim) updatemats = Array(P); } def dobatch(mats:Array[Mat], ipass:Int, pos:Long):Unit = { val M = mats(0); val PP = (Q.t * M *^ M).t // Compute P = M * M^t * Q efficiently if (ipass < opts.miniBatchPasses) { P = PP; if (ipass > opts.powerWait) { subspaceIter; // Do minibatch subspace iterations } } else { P ~ P + PP; // Else accumulate P over the dataset } } def evalbatch(mat:Array[Mat], ipass:Int, pos:Long):FMat = { val M = mats(0); if (ogmats != null) { ogmats(0) = Q.t * M; // Save right singular vectors P <-- (ogmats(0) *^ M).t } SV ~ P ∙ Q; // Estimate the singular values max(SV, 1e-6f, SV); val diff = (P / SV) - Q; // residual row(-(math.sqrt(norm(diff) / diff.length))); // return the norm of the residual } override def updatePass(ipass:Int) = { if (ipass >= opts.miniBatchPasses) { if (ipass % 2 == 1) RayleighRitz; else subspaceIter; } P.clear; } def RayleighRitz = { R ~ P ^* Q; val (evals, evecs) = feig(cpu(R)); R <-- evecs(?, irow((R.ncols-1) to 0 by -1)); Q <-- Q * R; P <-- P * R; } def subspaceIter = { QRdecompt(P, Q, null); } } object SVD { trait Opts extends Model.Opts { var miniBatchPasses = 1; var powerWait = 10; } class Options extends Opts {} class MatOptions extends Learner.Options with SVD.Opts with MatSource.Opts with Batch.Opts def learner(mat:Mat):(Learner, MatOptions) = { val opts = new MatOptions; opts.batchSize = math.min(100000, mat.ncols/30 + 1); opts.updateAll = true; val nn = new Learner( new MatSource(Array(mat), opts), new SVD(opts), null, new Batch(opts), null, opts) (nn, opts) } class FileOptions extends Learner.Options with SVD.Opts with FileSource.Opts with Batch.Opts def learner(fnames:String):(Learner, FileOptions) = { val opts = new FileOptions; opts.batchSize = 10000; opts.fnames = List(FileSource.simpleEnum(fnames, 1, 0)); opts.updateAll = true; implicit val threads = threadPool(4); val nn = new Learner( new FileSource(opts), new SVD(opts), null, new Batch(opts), null, opts) (nn, opts) } class PredOptions extends Learner.Options with SVD.Opts with MatSource.Opts with MatSink.Opts; // This function constructs a predictor from an existing model def predictor(model:Model, mat1:Mat):(Learner, PredOptions) = { val nopts = new PredOptions; nopts.batchSize = math.min(10000, mat1.ncols/30 + 1) nopts.dim = model.opts.dim; nopts.miniBatchPasses = 0; val newmod = new SVD(nopts); newmod.refresh = false model.copyTo(newmod) val nn = new Learner( new MatSource(Array(mat1), nopts), newmod, null, null, new MatSink(nopts), nopts) (nn, nopts) } class FilePredOptions extends Learner.Options with SVD.Opts with FileSource.Opts with FileSink.Opts; // This function constructs a predictor from an existing model def predictor(model:Model, infnames:String, outfnames:String):(Learner, FilePredOptions) = { val nopts = new FilePredOptions; nopts.dim = model.opts.dim; nopts.fnames = List(FileSource.simpleEnum(infnames, 1, 0)); nopts.ofnames = List(FileSource.simpleEnum(outfnames, 1, 0)); val newmod = new SVD(nopts); newmod.refresh = false model.copyTo(newmod); implicit val threads = threadPool(4); val nn = new Learner( new FileSource(nopts), newmod, null, null, new FileSink(nopts), nopts) (nn, nopts) } }
jamesjia94/BIDMach
src/main/scala/BIDMach/models/SVD.scala
Scala
bsd-3-clause
5,725
package com.datastax.spark.connector.cql import com.datastax.driver.core.{AuthProvider, PlainTextAuthProvider} import com.datastax.spark.connector.util.ReflectionUtil import org.apache.spark.SparkConf /** Stores credentials used to authenticate to a Cassandra cluster and uses them * to configure a Cassandra connection. * This driver provides implementations [[NoAuthConf]] for no authentication * and [[PasswordAuthConf]] for password authentication. Other authentication * configurators can be plugged in by setting `cassandra.authentication.conf.factory.class` * option. See [[AuthConfFactory]]. */ trait AuthConf extends Serializable { /** Returns auth provider to be passed to the `Cluster.Builder` object. */ def authProvider: AuthProvider /** Returns auth credentials to be set in the Thrift authentication request. */ def thriftCredentials: Map[String, String] } /** Performs no authentication. Use with `AllowAllAuthenticator` in Cassandra. */ case object NoAuthConf extends AuthConf { override def authProvider = AuthProvider.NONE override def thriftCredentials: Map[String, String] = Map.empty } /** Performs plain-text password authentication. Use with `PasswordAuthenticator` in Cassandra. */ case class PasswordAuthConf(user: String, password: String) extends AuthConf { override def authProvider = new PlainTextAuthProvider(user, password) override def thriftCredentials: Map[String, String] = Map("username" -> user, "password" -> password) } /** Obtains authentication configuration by reading `SparkConf` object. */ trait AuthConfFactory { def authConf(conf: SparkConf): AuthConf } /** Default `AuthConfFactory` that supports no authentication or password authentication. * Password authentication is enabled when both `spark.cassandra.auth.username` and `spark.cassandra.auth.password` * options are present in `SparkConf`.*/ object DefaultAuthConfFactory extends AuthConfFactory { val CassandraUserNameProperty = "spark.cassandra.auth.username" val CassandraPasswordProperty = "spark.cassandra.auth.password" def authConf(conf: SparkConf): AuthConf = { val credentials = for (username <- conf.getOption(CassandraUserNameProperty); password <- conf.getOption(CassandraPasswordProperty)) yield (username, password) credentials match { case Some((user, password)) => PasswordAuthConf(user, password) case None => NoAuthConf } } } /** Entry point for obtaining `AuthConf` object from `SparkConf`, used when establishing connections to Cassandra. * The actual `AuthConf` creation is delegated to the [[AuthConfFactory]] pointed by `spark.cassandra.auth.conf.factory` property. */ object AuthConf { val AuthConfFactoryProperty = "spark.cassandra.auth.conf.factory" def fromSparkConf(conf: SparkConf) = { val authConfFactory = conf .getOption(AuthConfFactoryProperty) .map(ReflectionUtil.findGlobalObject[AuthConfFactory]) .getOrElse(DefaultAuthConfFactory) authConfFactory.authConf(conf) } }
brkyvz/spark-cassandra-connector
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/cql/AuthConf.scala
Scala
apache-2.0
3,042
/* * Copyright (c) 2016. Y Experiment (yexperiment.com) MIT License */ package states import com.jme3.app.Application import com.jme3.app.state.AppStateManager import com.jme3.asset.AssetManager import com.jme3.light.DirectionalLight import com.jme3.light.AmbientLight import com.jme3.math.ColorRGBA import com.jme3.math.Vector3f import com.jme3.scene.Node import com.jme3.util.SkyFactory class EnvironmentState(parentNode: Node) extends DefaultState(parentNode) { val ambientLight = new AmbientLight() val directionalLight = new DirectionalLight(Vector3f.UNIT_Z, ColorRGBA.White) var lightAdded = false def onAdd(node: Node): Unit = { } def onDel(node: Node): Unit = { } def onInit(stateManager: AppStateManager, app: Application): Unit = { loadSkybox("LW3D", app.getAssetManager) setAmbientColor(ColorRGBA.White) parentNode.addLight(directionalLight) app.getViewPort.setBackgroundColor(ColorRGBA.Green) } def onUpdate(node: Node, tpf: Float): Unit = { } def onClean(): Unit = { } def setAmbientColor(color: ColorRGBA) = { ambientLight.setColor(color) if (!lightAdded) { parentNode.addLight(ambientLight) lightAdded = true } } def loadSkybox(name: String, assetManager: AssetManager) = { rootNode.attachChild(SkyFactory.createSky(assetManager, assetManager.loadTexture("Skybox/" + name + "_left2.png"), assetManager.loadTexture("Skybox/" + name + "_right1.png"), assetManager.loadTexture("Skybox/" + name + "_front5.png"), assetManager.loadTexture("Skybox/" + name + "_back6.png"), assetManager.loadTexture("Skybox/" + name + "_top3.png"), assetManager.loadTexture("Skybox/" + name + "_bottom4.png") )) } }
Y-Experiment/LW3D
src/main/scala/states/EnvironmentState.scala
Scala
mit
1,742
package reactivemongo.api import scala.language.higherKinds import scala.collection.generic.CanBuildFrom import scala.collection.mutable.Builder import scala.concurrent.{ ExecutionContext, Future } private[api] trait CursorCompat[T] { _: DefaultCursor.Impl[T] with CursorCompatAPI[T] => import Cursor.{ Cont, ErrorHandler } def collect[M[_]](maxDocs: Int, err: ErrorHandler[M[T]])(implicit cbf: CanBuildFrom[M[_], T, M[T]], ec: ExecutionContext): Future[M[T]] = { if (maxDocs == 0 || maxDocs < -1) { Future(cbf().result()) } else { foldWhile[Builder[T, M[T]]](cbf(), maxDocs)( { (builder, a) => Cont(builder += a) }, { (b: Builder[T, M[T]], t: Throwable) => err(b.result(), t).map[Builder[T, M[T]]](_ => b) }).map(_.result()) } } override def peek[M[_]](maxDocs: Int)(implicit cbf: CanBuildFrom[M[_], T, M[T]], ec: ExecutionContext): Future[Cursor.Result[M[T]]] = { if (maxDocs == 0 || maxDocs < -1) { def ref = new Cursor.Reference( collectionName = fullCollectionName, cursorId = 0, numberToReturn = 0, tailable = this.tailable, pinnedNode = None) Future(new Cursor.Result[M[T]](cbf().result(), ref)) } else { makeRequest(maxDocs).map { resp => def builder = documentIterator(resp).foldLeft(cbf()) { _ += _ } println(s"numberToReturn = ${this.numberToReturn}") val ref = new Cursor.Reference( collectionName = fullCollectionName, cursorId = resp.reply.cursorID, numberToReturn = this.numberToReturn, tailable = this.tailable, pinnedNode = transaction.flatMap(_.pinnedNode)) new Cursor.Result[M[T]](builder.result(), ref) } } } }
cchantep/ReactiveMongo
driver/src/main/scala-2.13-/api/CursorCompat.scala
Scala
apache-2.0
1,771
package com.aquamentis.nwsr import android.content.ContentValues import android.content.Context import android.content.SharedPreferences import android.database.Cursor import android.database.sqlite.SQLiteDatabase import android.database.sqlite.SQLiteOpenHelper import android.preference.PreferenceManager import scala.collection.immutable.SortedSet import scala.math.exp import scala.util.Random import com.aquamentis.util.Feed import com.aquamentis.util.Story import com.aquamentis.util.RichDatabase._ object NWSRDatabaseHelper { val name = "nwsr.db" val version = 6 val createStories = ("create table story (" + "_id integer primary key, " + "title string, " + "title_hash blob, " + "link string, " + "weight real, " + "updated integer, " + // Status = 0 for hidden, 1 for "headlines", 2 for "saved" "status integer, " + "feed integer references feed);") val createFeeds = ("create table feed (" + "_id integer primary key, " + "title string, " + "link string, " + "display_link string, " + "etag string, " + "last_modified string);") var helper: Option[SQLiteOpenHelper] = None def apply(context: Context): SQLiteOpenHelper = synchronized { helper match { case Some(h) => h case None => { val h = new SQLiteOpenHelper(context, name, null, version) { override def onCreate(db: SQLiteDatabase) { db.exclusiveTransaction { db.execSQL(createStories) db.execSQL(createFeeds) } } override def onUpgrade(db: SQLiteDatabase, oldVer: Int, newVer: Int) { if (oldVer == 5) { db.exclusiveTransaction { db.execSQL("drop table story") db.execSQL(createStories) db.execSQL("drop table saved;") db.execSQL("drop table bigram;") db.execSQL("drop table word;") db.execSQL("drop table domain;") } } } } helper = Some(h) h } } } } object NWSRDatabase { def apply(context: Context): NWSRDatabase = { new NWSRDatabase(NWSRDatabaseHelper(context).getWritableDatabase(), PreferenceManager.getDefaultSharedPreferences(context)) } /** Construct a hash value for the story by concatenating the smallest * 3 hash values of character trigrams of the title. * * There are faster ways to find the minimum 3 than sorting, but we'll * never have thousands of items to sift through. */ def titleHash(story: Story): String = { val processed = story.title.toLowerCase "%08x%08x%08x".format( (SortedSet(processed.sliding(3).map(_.hashCode).toSeq:_*) ++ Seq(Int.MaxValue, Int.MaxValue-1, Int.MaxValue-2)) .take(3).toSeq:_*) } } class NWSRDatabase (val db: SQLiteDatabase, val prefs: SharedPreferences) { import NWSRDatabase._ val rng: Random = new Random() def storyView(): Cursor = { val limit = prefs.getString("stories_per_page", "20") db.query("story", Array("_id", "title", "link"), "status = 1", null, null, null, "weight desc", limit) } def feedView(): Cursor = db.query( "feed", Array("_id", "title", "display_link"), null, null, null, null, "title asc") def savedView(): Cursor = db.query( "story", Array("_id", "title", "link"), "status = 2", null, null, null, "updated desc") def addFeed(feed: Feed, id: Option[Long]) { val values = new ContentValues() val now: Long = System.currentTimeMillis values.put("title", feed.title) values.put("link", feed.link) values.put("display_link", feed.displayLink) feed.etag match { case Some(e) => values.put("etag", e) case None => } feed.lastMod match { case Some(lm) => values.put("last_modified", lm) case None => } /* This might be a good place for a wrapped transaction, but addStory * involves a lot of processing for the transaction to be exclusive */ val feedId = id match { case None => db.insert("feed", null, values) case Some(i) => { db.update("feed", values, "_id = " + i, Array.empty[String]) i } } for (story <- feed.stories) { addStory(story, feedId) } } def deleteFeed(id: Long) { db.exclusiveTransaction { db.delete("story", "feed = " + id, null) db.delete("feed", "_id = " + id, null) } } def feedsToRefresh(req: FeedRequest): Cursor = req match { case FeedLink(link: String) => db.rawQuery( "select null", Array.empty[String]) case FeedId(id: Long) => db.rawQuery( "select _id, link, etag, last_modified from feed where _id = %d" .format(id), Array.empty[String]) case FeedAll => db.query( "feed", Array("_id", "link", "etag", "last_modified"), null, null, null, null, null) } def addStory(story: Story, id: Long) { val hash = titleHash(story) // Assume http, https links remain as they are val link = story.link.stripPrefix("http://") db.query("select 1 where exists (select null from story where title_hash = '%s' and link = '%s')" .format(hash, link)) .ifNotExists { val values = new ContentValues() values.put("title", story.title) values.put("title_hash", hash) values.put("link", link) values.put("weight", rng.nextDouble()) values.put("updated", java.lang.Long.valueOf(System.currentTimeMillis)) db.query("select weight, updated, status from story where title_hash = '%s' order by status asc".format(hash)) .ifExists { (c: Cursor) => val status = c.getInt(2) if (status == 0 || status == 1) { values.put("weight", c.getDouble(0)) values.put("updated", java.lang.Long.valueOf(c.getLong(1))) } } values.put("status", java.lang.Integer.valueOf(1)) values.put("feed", java.lang.Long.valueOf(id)) db.insert("story", null, values) } } def hideStories(ids: Array[Long]) { val values = new ContentValues() values.put("status", java.lang.Integer.valueOf(0)) db.update("story", values, "_id in (%s)".format(ids.mkString(", ")), null) } def purgeOld() { val rate: Double = prefs.getString("story_decay_rate", "8.88343e-09").toDouble val now = System.currentTimeMillis db.exclusiveTransaction { db.query("select _id, weight, updated from story where status = 1").foreach { (c: Cursor) => val timeAgo = now - c.getLong(2) if (timeAgo > 3600000) { val values = new ContentValues() values.put("weight", c.getDouble(1) * exp(-1.0*rate*timeAgo)) values.put("updated", java.lang.Long.valueOf(now)) db.update("story", values, "_id = ?", Array[String](c.getLong(0).toString)) } } } val stories = db.singleLongQuery("select count(*) from story where not status = 2") // Hard limit of 10000 stories for now, maybe make it a preference later if (stories > 10000) { db.execSQL("delete from story where _id in (select _id from story order by weight asc limit %d)" .format(stories - 10000)) } } def addSaved(id: Long) { val values = new ContentValues() values.put("status", java.lang.Integer.valueOf(2)) values.put("updated", java.lang.Long.valueOf(System.currentTimeMillis)) db.update("story", values, "_id = ?", Array(id.toString)) } def deleteSaved(id: Option[Long]) { val values = new ContentValues() values.put("status", java.lang.Integer.valueOf(0)) id match { case None => db.update("story", values, "status = 2", null) case Some(id) => db.update( "story", values, "_id = ?",Array(id.toString)) } } }
alexclare/nwsr
src/scala/nwsr/database.scala
Scala
gpl-3.0
8,193
/******************************************************************************* * Copyright 2010 Maxime Lévesque * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************** */ package org.squeryl.internals import java.lang.annotation.Annotation import net.sf.cglib.proxy.{Factory, Callback, CallbackFilter, Enhancer, NoOp} import java.lang.reflect.{Member, Constructor, Method, Field, Modifier} import collection.mutable.{HashSet, ArrayBuffer} import org.squeryl.annotations._ import org.squeryl._ class PosoMetaData[T](val clasz: Class[T], val schema: Schema, val viewOrTable: View[T]) { override def toString = 'PosoMetaData + "[" + clasz.getSimpleName + "]" + fieldsMetaData.mkString("(",",",")") def findFieldMetaDataForProperty(name: String) = _fieldsMetaData.find(fmd => fmd.nameOfProperty == name) val isOptimistic = viewOrTable.ked.map(_.isOptimistic).getOrElse(false) val constructor = _const.headOption.orElse(org.squeryl.internals.Utils.throwError(clasz.getName + " must have a 0 param constructor or a constructor with only primitive types")).get def fieldsMetaData = _fieldsMetaData.filter(! _.isTransient) /** * fieldsMetaData the metadata of the persistent fields of this Poso * primaryKey None if this Poso is not a KeyedEntity[], Either[a persistedField, a composite key] */ val (_fieldsMetaData, primaryKey): (Iterable[FieldMetaData], Option[Either[FieldMetaData,Method]]) = { val isImplicitMode = _isImplicitMode val sampleInstance4OptionTypeDeduction = try { constructor._1.newInstance(constructor._2 :_*).asInstanceOf[AnyRef]; } catch { case e:IllegalArgumentException => throw new RuntimeException("invalid constructor choice " + constructor._1, e) case e:Exception => throw new RuntimeException("exception occurred while invoking constructor : " + constructor._1, e) } val members = new ArrayBuffer[(Member,HashSet[Annotation])] _fillWithMembers(clasz, members) val name2MembersMap = members.groupBy(m => { val n = m._1.getName val idx = n.indexOf("_$eq") if(idx != -1) n.substring(0, idx) else n }) val fmds = new ArrayBuffer[FieldMetaData]; for(e <- name2MembersMap) { val name = e._1 val v = e._2 var a:Set[Annotation] = Set.empty for(memberWithAnnotationTuple <- v) a = a.union(memberWithAnnotationTuple._2) val members = v.map(t => t._1) // here we do a filter and not a find, because there can be more than one setter/getter/field // with the same name, we want one that is not an erased type, excluding return and input type // of java.lang.Object does it. val o = classOf[java.lang.Object] val field = members.collectFirst{case f: Field if f.getType != o => f} val getter = members.collectFirst{case m: Method if (m.getName == name) && (m.getReturnType != o) => m} val setter = members.collectFirst{case m: Method if m.getName.endsWith("_$eq") && (m.getParameterTypes.apply(0) != o) => m} val property = (field, getter, setter, a) if(isImplicitMode && _groupOfMembersIsProperty(property)) { val isOptimisitcCounter = (for(k <- viewOrTable.ked; counterProp <- k.optimisticCounterPropertyName if counterProp == name) yield true).isDefined try { val r = FieldMetaData.factory.build(this, name, property, sampleInstance4OptionTypeDeduction, isOptimisitcCounter) fmds.append(r) } catch { case e:Exception => println(">>>" + clasz.getCanonicalName) println(">>>" + name) throw new RuntimeException( Utils.failSafeString( "error while reflecting on metadata for " + property + " of class " + this.clasz.getCanonicalName), e) } } } val k = fmds.find(fmd => fmd.isIdFieldOfKeyedEntity) val compositePrimaryKeyGetter: Option[Method] = if(k != None) // can't have both PK Field and CompositePK None else { // verify if we have an 'id' method that is a composite key, in this case we need to construct a // FieldMetaData that will become the 'primaryKey' field of this PosoMetaData viewOrTable.ked.map { ked => val pkMethod = clasz.getMethod(ked.idPropertyName) assert(pkMethod != null, "Could not get getter for " + ked.idPropertyName + " in " + clasz.getCanonicalName()) pkMethod } } val metaDataForPk: Option[Either[FieldMetaData,Method]] = if(k != None) Some(Left(k.get)) else if(compositePrimaryKeyGetter != None) Some(Right(compositePrimaryKeyGetter.get)) else None (fmds, metaDataForPk) //: (Iterable[FieldMetaData], Option[Either[FieldMetaData,Method]]) } def optimisticCounter = fieldsMetaData.find(fmd => fmd.isOptimisticCounter) if(isOptimistic) assert(optimisticCounter != None) def _const = { val r = new ArrayBuffer[(Constructor[_],Array[Object])] // for(ct <- clasz.getConstructors) // println("CT: " + ct.getParameterTypes.map(c=>c.getName).mkString(",")) for(ct <- clasz.getConstructors) _tryToCreateParamArray(r, ct) r.sortWith( (a:(Constructor[_],Array[Object]), b:(Constructor[_],Array[Object])) => a._2.length < b._2.length ) } def _tryToCreateParamArray( r: ArrayBuffer[(Constructor[_],Array[Object])], c: Constructor[_]): Unit = { val params: Array[Class[_]] = c.getParameterTypes if(params.length >= 1) { val cn = clasz.getName val test = params(0).getName + "$" + clasz.getSimpleName if(cn == test) org.squeryl.internals.Utils.throwError("inner classes are not supported, except when outer class is a singleton (object)\\ninner class is : " + cn) } val res = new Array[Object](params.length) for(i <- 0 to params.length -1) { val v = FieldMetaData.createDefaultValue(schema.fieldMapper, c, params(i), None, None) res(i) = v } r.append((c, res)) } //def createSamplePoso[T](vxn: ViewExpressionNode[T], classOfT: Class[T]): T = { //Enhancer.create(classOfT, new PosoPropertyAccessInterceptor(vxn)).asInstanceOf[T] //} def createSample(cb: Callback) = FieldReferenceLinker.executeAndRestoreLastAccessedFieldReference(_builder(cb)) private val _builder: (Callback) => T = { val e = new Enhancer e.setSuperclass(clasz) val pc: Array[Class[_]] = constructor._1.getParameterTypes e.setUseFactory(true) (callB:Callback) => { val cb = Array[Callback](callB, NoOp.INSTANCE) e.setCallbacks(cb) e.setCallbackFilter(PosoMetaData.finalizeFilter) //TODO : are we creating am unnecessary instance ? val fac = e.create(pc , constructor._2).asInstanceOf[Factory] fac.newInstance(pc, constructor._2, cb).asInstanceOf[T] } } private def _isImplicitMode = { val rowAnnotation = clasz.getAnnotation(classOf[Row]) rowAnnotation == null || rowAnnotation.fieldToColumnCorrespondanceMode == FieldToColumnCorrespondanceMode.IMPLICIT } private def _groupOfMembersIsProperty(property: (Option[Field], Option[Method], Option[Method], Set[Annotation])): Boolean = { if(property._4.find(an => an.isInstanceOf[Transient]) != None) return false val hasAField = property._1.exists { field => !Modifier.isStatic(field.getModifiers) } val hasGetter = property._2.exists { getter => !Modifier.isStatic(getter.getModifiers) && !classOf[java.lang.Void].isAssignableFrom(getter.getReturnType) && getter.getParameterTypes.length == 0 } val hasSetter = property._3.exists { setter => !Modifier.isStatic(setter.getModifiers) && property._3.get.getParameterTypes.length == 1 } val memberTypes = new ArrayBuffer[Class[_]] if(hasAField) memberTypes.append(property._1.get.getType) if(hasGetter) memberTypes.append(property._2.get.getReturnType) if(hasSetter) memberTypes.append(property._3.get.getParameterTypes.apply(0)) //not a property if it has no getter, setter or field if(memberTypes.isEmpty) return false //verify that all types are compatible : val c = memberTypes.remove(0) for(c0 <- memberTypes) { if((!c0.isAssignableFrom(c)) && (!c.isAssignableFrom(c0))) return false } (hasAField, hasGetter, hasSetter) match { case (true, false, false) => true case (false, true, true) => true case (true, true, true) => true case (true, true, false) => true case _ => false } } private def _includeAnnotation(a: Annotation) = a.isInstanceOf[ColumnBase] || a.isInstanceOf[Transient] || a.isInstanceOf[OptionType] private def _addAnnotations(m: Field, s: HashSet[Annotation]) = for(a <- m.getAnnotations if _includeAnnotation(a)) s.add(a) private def _addAnnotations(m: Method, s: HashSet[Annotation]) = for(a <- m.getAnnotations if _includeAnnotation(a)) s.add(a) private def _includeFieldOrMethodType(c: Class[_]) = schema.fieldMapper.isSupported(c) //! classOf[Query[_]].isAssignableFrom(c) private def _fillWithMembers(clasz: Class[_], members: ArrayBuffer[(Member,HashSet[Annotation])]): Unit = { for(m <-clasz.getMethods if(m.getDeclaringClass != classOf[Object]) && _includeFieldOrMethodType(m.getReturnType)) { m.setAccessible(true) val t = (m, new HashSet[Annotation]) _addAnnotations(m, t._2) members.append(t) } for(m <- clasz.getDeclaredFields if (m.getName.indexOf("$") == -1) && _includeFieldOrMethodType(m.getType)) { m.setAccessible(true) val t = (m, new HashSet[Annotation]) _addAnnotations(m, t._2) members.append(t) } val c = clasz.getSuperclass if(c != null) _fillWithMembers(c, members) } } object PosoMetaData { val finalizeFilter = new CallbackFilter { def accept(method: Method): Int = if (method.getName == "finalize") 1 else 0 } }
ccap/Squeryl
src/main/scala/org/squeryl/internals/PosoMetaData.scala
Scala
apache-2.0
10,881
package example import java.util.NoSuchElementException object Lists { /** * This method computes the sum of all elements in the list xs. There are * multiple techniques that can be used for implementing this method, and * you will learn during the class. * * For this example assignment you can use the following methods in class * `List`: * * - `xs.isEmpty: Boolean` returns `true` if the list `xs` is empty * - `xs.head: Int` returns the head element of the list `xs`. If the list * is empty an exception is thrown * - `xs.tail: List[Int]` returns the tail of the list `xs`, i.e. the the * list `xs` without its `head` element * * ''Hint:'' instead of writing a `for` or `while` loop, think of a recursive * solution. * * @param xs A list of natural numbers * @return The sum of all elements in `xs` */ def sum(xs: List[Int]): Int = { if (xs.isEmpty) 0 else xs.head + sum(xs.tail) } /** * This method returns the largest element in a list of integers. If the * list `xs` is empty it throws a `java.util.NoSuchElementException`. * * You can use the same methods of the class `List` as mentioned above. * * ''Hint:'' Again, think of a recursive solution instead of using looping * constructs. You might need to define an auxiliary method. * * @param xs A list of natural numbers * @return The largest element in `xs` * @throws java.util.NoSuchElementException if `xs` is an empty list */ def max(xs: List[Int]): Int = { def findMax(current: Int, lst: List[Int]): Int = { if (lst.isEmpty) current else if (current > lst.head) findMax(current, lst.tail) else findMax(lst.head, lst.tail) } if (xs.isEmpty) throw new NoSuchElementException("The list is empty") findMax(xs.head, xs.tail) } }
guhemama/moocs
Parallel.Programming.in.Scala.Coursera/Assignment0/src/main/scala/example/Lists.scala
Scala
bsd-3-clause
1,855
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources import java.util.Locale import org.apache.spark.sql.{AnalysisException, SaveMode, SparkSession} import org.apache.spark.sql.catalyst.analysis._ import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.expressions.{Expression, InputFileBlockLength, InputFileBlockStart, InputFileName, RowOrdering} import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.connector.catalog.CatalogV2Util.assertNoNullTypeInSchema import org.apache.spark.sql.connector.expressions.{FieldReference, RewritableTransform} import org.apache.spark.sql.errors.QueryCompilationErrors import org.apache.spark.sql.execution.command.DDLUtils import org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2 import org.apache.spark.sql.sources.InsertableRelation import org.apache.spark.sql.types.{AtomicType, StructType} import org.apache.spark.sql.util.PartitioningUtils.normalizePartitionSpec import org.apache.spark.sql.util.SchemaUtils /** * Replaces [[UnresolvedRelation]]s if the plan is for direct query on files. */ class ResolveSQLOnFile(sparkSession: SparkSession) extends Rule[LogicalPlan] { private def maybeSQLFile(u: UnresolvedRelation): Boolean = { conf.runSQLonFile && u.multipartIdentifier.size == 2 } def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators { case u: UnresolvedRelation if maybeSQLFile(u) => try { val dataSource = DataSource( sparkSession, paths = u.multipartIdentifier.last :: Nil, className = u.multipartIdentifier.head) // `dataSource.providingClass` may throw ClassNotFoundException, then the outer try-catch // will catch it and return the original plan, so that the analyzer can report table not // found later. val isFileFormat = classOf[FileFormat].isAssignableFrom(dataSource.providingClass) if (!isFileFormat || dataSource.className.toLowerCase(Locale.ROOT) == DDLUtils.HIVE_PROVIDER) { throw QueryCompilationErrors.unsupportedDataSourceTypeForDirectQueryOnFilesError( dataSource.className) } LogicalRelation(dataSource.resolveRelation()) } catch { case _: ClassNotFoundException => u case e: Exception => // the provider is valid, but failed to create a logical plan u.failAnalysis(e.getMessage, e) } } } /** * Preprocess [[CreateTable]], to do some normalization and checking. */ case class PreprocessTableCreation(sparkSession: SparkSession) extends Rule[LogicalPlan] { // catalog is a def and not a val/lazy val as the latter would introduce a circular reference private def catalog = sparkSession.sessionState.catalog def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators { // When we CREATE TABLE without specifying the table schema, we should fail the query if // bucketing information is specified, as we can't infer bucketing from data files currently. // Since the runtime inferred partition columns could be different from what user specified, // we fail the query if the partitioning information is specified. case c @ CreateTable(tableDesc, _, None) if tableDesc.schema.isEmpty => if (tableDesc.bucketSpec.isDefined) { failAnalysis("Cannot specify bucketing information if the table schema is not specified " + "when creating and will be inferred at runtime") } if (tableDesc.partitionColumnNames.nonEmpty) { failAnalysis("It is not allowed to specify partition columns when the table schema is " + "not defined. When the table schema is not provided, schema and partition columns " + "will be inferred.") } c // When we append data to an existing table, check if the given provider, partition columns, // bucket spec, etc. match the existing table, and adjust the columns order of the given query // if necessary. case c @ CreateTable(tableDesc, SaveMode.Append, Some(query)) if query.resolved && catalog.tableExists(tableDesc.identifier) => // This is guaranteed by the parser and `DataFrameWriter` assert(tableDesc.provider.isDefined) val db = tableDesc.identifier.database.getOrElse(catalog.getCurrentDatabase) val tableIdentWithDB = tableDesc.identifier.copy(database = Some(db)) val tableName = tableIdentWithDB.unquotedString val existingTable = catalog.getTableMetadata(tableIdentWithDB) if (existingTable.tableType == CatalogTableType.VIEW) { throw QueryCompilationErrors.saveDataIntoViewNotAllowedError() } // Check if the specified data source match the data source of the existing table. val existingProvider = DataSource.lookupDataSource(existingTable.provider.get, conf) val specifiedProvider = DataSource.lookupDataSource(tableDesc.provider.get, conf) // TODO: Check that options from the resolved relation match the relation that we are // inserting into (i.e. using the same compression). // If the one of the provider is [[FileDataSourceV2]] and the other one is its corresponding // [[FileFormat]], the two providers are considered compatible. if (fallBackV2ToV1(existingProvider) != fallBackV2ToV1(specifiedProvider)) { throw QueryCompilationErrors.mismatchedTableFormatError( tableName, existingProvider, specifiedProvider) } tableDesc.storage.locationUri match { case Some(location) if location.getPath != existingTable.location.getPath => throw QueryCompilationErrors.mismatchedTableLocationError( tableIdentWithDB, existingTable, tableDesc) case _ => } if (query.schema.length != existingTable.schema.length) { throw QueryCompilationErrors.mismatchedTableColumnNumberError( tableName, existingTable, query) } val resolver = conf.resolver val tableCols = existingTable.schema.map(_.name) // As we are inserting into an existing table, we should respect the existing schema and // adjust the column order of the given dataframe according to it, or throw exception // if the column names do not match. val adjustedColumns = tableCols.map { col => query.resolve(Seq(col), resolver).getOrElse { val inputColumns = query.schema.map(_.name).mkString(", ") throw QueryCompilationErrors.cannotResolveColumnGivenInputColumnsError(col, inputColumns) } } // Check if the specified partition columns match the existing table. val specifiedPartCols = CatalogUtils.normalizePartCols( tableName, tableCols, tableDesc.partitionColumnNames, resolver) if (specifiedPartCols != existingTable.partitionColumnNames) { val existingPartCols = existingTable.partitionColumnNames.mkString(", ") throw QueryCompilationErrors.mismatchedTablePartitionColumnError( tableName, specifiedPartCols, existingPartCols) } // Check if the specified bucketing match the existing table. val specifiedBucketSpec = tableDesc.bucketSpec.map { bucketSpec => CatalogUtils.normalizeBucketSpec(tableName, tableCols, bucketSpec, resolver) } if (specifiedBucketSpec != existingTable.bucketSpec) { val specifiedBucketString = specifiedBucketSpec.map(_.toString).getOrElse("not bucketed") val existingBucketString = existingTable.bucketSpec.map(_.toString).getOrElse("not bucketed") throw QueryCompilationErrors.mismatchedTableBucketingError( tableName, specifiedBucketString, existingBucketString) } val newQuery = if (adjustedColumns != query.output) { Project(adjustedColumns, query) } else { query } c.copy( tableDesc = existingTable, query = Some(TableOutputResolver.resolveOutputColumns( tableDesc.qualifiedName, existingTable.schema.toAttributes, newQuery, byName = true, conf))) // Here we normalize partition, bucket and sort column names, w.r.t. the case sensitivity // config, and do various checks: // * column names in table definition can't be duplicated. // * partition, bucket and sort column names must exist in table definition. // * partition, bucket and sort column names can't be duplicated. // * can't use all table columns as partition columns. // * partition columns' type must be AtomicType. // * sort columns' type must be orderable. // * reorder table schema or output of query plan, to put partition columns at the end. case c @ CreateTable(tableDesc, _, query) if query.forall(_.resolved) => if (query.isDefined) { assert(tableDesc.schema.isEmpty, "Schema may not be specified in a Create Table As Select (CTAS) statement") val analyzedQuery = query.get val normalizedTable = normalizeCatalogTable(analyzedQuery.schema, tableDesc) DDLUtils.checkDataColNames(tableDesc.copy(schema = analyzedQuery.schema)) val output = analyzedQuery.output val partitionAttrs = normalizedTable.partitionColumnNames.map { partCol => output.find(_.name == partCol).get } val newOutput = output.filterNot(partitionAttrs.contains) ++ partitionAttrs val reorderedQuery = if (newOutput == output) { analyzedQuery } else { Project(newOutput, analyzedQuery) } c.copy(tableDesc = normalizedTable, query = Some(reorderedQuery)) } else { DDLUtils.checkDataColNames(tableDesc) val normalizedTable = normalizeCatalogTable(tableDesc.schema, tableDesc) val partitionSchema = normalizedTable.partitionColumnNames.map { partCol => normalizedTable.schema.find(_.name == partCol).get } val reorderedSchema = StructType(normalizedTable.schema.filterNot(partitionSchema.contains) ++ partitionSchema) c.copy(tableDesc = normalizedTable.copy(schema = reorderedSchema)) } case create: V2CreateTablePlan if create.childrenResolved => val schema = create.tableSchema val partitioning = create.partitioning val identifier = create.tableName val isCaseSensitive = conf.caseSensitiveAnalysis // Check that columns are not duplicated in the schema val flattenedSchema = SchemaUtils.explodeNestedFieldNames(schema) SchemaUtils.checkColumnNameDuplication( flattenedSchema, s"in the table definition of $identifier", isCaseSensitive) // Check that columns are not duplicated in the partitioning statement SchemaUtils.checkTransformDuplication( partitioning, "in the partitioning", isCaseSensitive) if (schema.isEmpty) { if (partitioning.nonEmpty) { throw QueryCompilationErrors.specifyPartitionNotAllowedWhenTableSchemaNotDefinedError() } create } else { // Resolve and normalize partition columns as necessary val resolver = conf.resolver val normalizedPartitions = partitioning.map { case transform: RewritableTransform => val rewritten = transform.references().map { ref => // Throws an exception if the reference cannot be resolved val position = SchemaUtils.findColumnPosition(ref.fieldNames(), schema, resolver) FieldReference(SchemaUtils.getColumnName(position, schema)) } transform.withReferences(rewritten) case other => other } create.withPartitioning(normalizedPartitions) } } private def fallBackV2ToV1(cls: Class[_]): Class[_] = cls.newInstance match { case f: FileDataSourceV2 => f.fallbackFileFormat case _ => cls } private def normalizeCatalogTable(schema: StructType, table: CatalogTable): CatalogTable = { SchemaUtils.checkSchemaColumnNameDuplication( schema, "in the table definition of " + table.identifier, conf.caseSensitiveAnalysis) assertNoNullTypeInSchema(schema) val normalizedPartCols = normalizePartitionColumns(schema, table) val normalizedBucketSpec = normalizeBucketSpec(schema, table) normalizedBucketSpec.foreach { spec => for (bucketCol <- spec.bucketColumnNames if normalizedPartCols.contains(bucketCol)) { throw QueryCompilationErrors.bucketingColumnCannotBePartOfPartitionColumnsError( bucketCol, normalizedPartCols) } for (sortCol <- spec.sortColumnNames if normalizedPartCols.contains(sortCol)) { throw QueryCompilationErrors.bucketSortingColumnCannotBePartOfPartitionColumnsError( sortCol, normalizedPartCols) } } table.copy(partitionColumnNames = normalizedPartCols, bucketSpec = normalizedBucketSpec) } private def normalizePartitionColumns(schema: StructType, table: CatalogTable): Seq[String] = { val normalizedPartitionCols = CatalogUtils.normalizePartCols( tableName = table.identifier.unquotedString, tableCols = schema.map(_.name), partCols = table.partitionColumnNames, resolver = conf.resolver) SchemaUtils.checkColumnNameDuplication( normalizedPartitionCols, "in the partition schema", conf.resolver) if (schema.nonEmpty && normalizedPartitionCols.length == schema.length) { if (DDLUtils.isHiveTable(table)) { // When we hit this branch, it means users didn't specify schema for the table to be // created, as we always include partition columns in table schema for hive serde tables. // The real schema will be inferred at hive metastore by hive serde, plus the given // partition columns, so we should not fail the analysis here. } else { failAnalysis("Cannot use all columns for partition columns") } } schema.filter(f => normalizedPartitionCols.contains(f.name)).map(_.dataType).foreach { case _: AtomicType => // OK case other => failAnalysis(s"Cannot use ${other.catalogString} for partition column") } normalizedPartitionCols } private def normalizeBucketSpec(schema: StructType, table: CatalogTable): Option[BucketSpec] = { table.bucketSpec match { case Some(bucketSpec) => val normalizedBucketSpec = CatalogUtils.normalizeBucketSpec( tableName = table.identifier.unquotedString, tableCols = schema.map(_.name), bucketSpec = bucketSpec, resolver = conf.resolver) SchemaUtils.checkColumnNameDuplication( normalizedBucketSpec.bucketColumnNames, "in the bucket definition", conf.resolver) SchemaUtils.checkColumnNameDuplication( normalizedBucketSpec.sortColumnNames, "in the sort definition", conf.resolver) normalizedBucketSpec.sortColumnNames.map(schema(_)).map(_.dataType).foreach { case dt if RowOrdering.isOrderable(dt) => // OK case other => failAnalysis(s"Cannot use ${other.catalogString} for sorting column") } Some(normalizedBucketSpec) case None => None } } private def failAnalysis(msg: String) = throw new AnalysisException(msg) } /** * Preprocess the [[InsertIntoStatement]] plan. Throws exception if the number of columns mismatch, * or specified partition columns are different from the existing partition columns in the target * table. It also does data type casting and field renaming, to make sure that the columns to be * inserted have the correct data type and fields have the correct names. */ object PreprocessTableInsertion extends Rule[LogicalPlan] { private def preprocess( insert: InsertIntoStatement, tblName: String, partColNames: StructType, catalogTable: Option[CatalogTable]): InsertIntoStatement = { val normalizedPartSpec = normalizePartitionSpec( insert.partitionSpec, partColNames, tblName, conf.resolver) val staticPartCols = normalizedPartSpec.filter(_._2.isDefined).keySet val expectedColumns = insert.table.output.filterNot(a => staticPartCols.contains(a.name)) if (expectedColumns.length != insert.query.schema.length) { throw QueryCompilationErrors.mismatchedInsertedDataColumnNumberError( tblName, insert, staticPartCols) } val partitionsTrackedByCatalog = catalogTable.isDefined && catalogTable.get.partitionColumnNames.nonEmpty && catalogTable.get.tracksPartitionsInCatalog if (partitionsTrackedByCatalog && normalizedPartSpec.nonEmpty) { // empty partition column value if (normalizedPartSpec.values.flatten.exists(v => v != null && v.isEmpty)) { val spec = normalizedPartSpec.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]") throw QueryCompilationErrors.invalidPartitionSpecError( s"The spec ($spec) contains an empty partition column value") } } val newQuery = TableOutputResolver.resolveOutputColumns( tblName, expectedColumns, insert.query, byName = false, conf) if (normalizedPartSpec.nonEmpty) { if (normalizedPartSpec.size != partColNames.length) { throw QueryCompilationErrors.requestedPartitionsMismatchTablePartitionsError( tblName, normalizedPartSpec, partColNames) } insert.copy(query = newQuery, partitionSpec = normalizedPartSpec) } else { // All partition columns are dynamic because the InsertIntoTable command does // not explicitly specify partitioning columns. insert.copy(query = newQuery, partitionSpec = partColNames.map(_.name).map(_ -> None).toMap) } } def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators { case i @ InsertIntoStatement(table, _, _, query, _, _) if table.resolved && query.resolved => table match { case relation: HiveTableRelation => val metadata = relation.tableMeta preprocess(i, metadata.identifier.quotedString, metadata.partitionSchema, Some(metadata)) case LogicalRelation(h: HadoopFsRelation, _, catalogTable, _) => val tblName = catalogTable.map(_.identifier.quotedString).getOrElse("unknown") preprocess(i, tblName, h.partitionSchema, catalogTable) case LogicalRelation(_: InsertableRelation, _, catalogTable, _) => val tblName = catalogTable.map(_.identifier.quotedString).getOrElse("unknown") preprocess(i, tblName, new StructType(), catalogTable) case _ => i } } } /** * A rule to check whether the functions are supported only when Hive support is enabled */ object HiveOnlyCheck extends (LogicalPlan => Unit) { def apply(plan: LogicalPlan): Unit = { plan.foreach { case CreateTable(tableDesc, _, _) if DDLUtils.isHiveTable(tableDesc) => throw QueryCompilationErrors.ddlWithoutHiveSupportEnabledError( "CREATE Hive TABLE (AS SELECT)") case i: InsertIntoDir if DDLUtils.isHiveTable(i.provider) => throw QueryCompilationErrors.ddlWithoutHiveSupportEnabledError( "INSERT OVERWRITE DIRECTORY with the Hive format") case _ => // OK } } } /** * A rule to do various checks before reading a table. */ object PreReadCheck extends (LogicalPlan => Unit) { def apply(plan: LogicalPlan): Unit = { plan.foreach { case operator: LogicalPlan => operator transformExpressionsUp { case e @ (_: InputFileName | _: InputFileBlockLength | _: InputFileBlockStart) => checkNumInputFileBlockSources(e, operator) e } } } private def checkNumInputFileBlockSources(e: Expression, operator: LogicalPlan): Int = { operator match { case _: HiveTableRelation => 1 case _ @ LogicalRelation(_: HadoopFsRelation, _, _, _) => 1 case _: LeafNode => 0 // UNION ALL has multiple children, but these children do not concurrently use InputFileBlock. case u: Union => if (u.children.map(checkNumInputFileBlockSources(e, _)).sum >= 1) 1 else 0 case o => val numInputFileBlockSources = o.children.map(checkNumInputFileBlockSources(e, _)).sum if (numInputFileBlockSources > 1) { e.failAnalysis(s"'${e.prettyName}' does not support more than one sources") } else { numInputFileBlockSources } } } } /** * A rule to do various checks before inserting into or writing to a data source table. */ object PreWriteCheck extends (LogicalPlan => Unit) { def failAnalysis(msg: String): Unit = { throw new AnalysisException(msg) } def apply(plan: LogicalPlan): Unit = { plan.foreach { case InsertIntoStatement(l @ LogicalRelation(relation, _, _, _), partition, _, query, _, _) => // Get all input data source relations of the query. val srcRelations = query.collect { case LogicalRelation(src, _, _, _) => src } if (srcRelations.contains(relation)) { failAnalysis("Cannot insert into table that is also being read from.") } else { // OK } relation match { case _: HadoopFsRelation => // OK // Right now, we do not support insert into a non-file-based data source table with // partition specs. case _: InsertableRelation if partition.nonEmpty => failAnalysis(s"Insert into a partition is not allowed because $l is not partitioned.") case _ => failAnalysis(s"$relation does not allow insertion.") } case InsertIntoStatement(t, _, _, _, _, _) if !t.isInstanceOf[LeafNode] || t.isInstanceOf[Range] || t.isInstanceOf[OneRowRelation] || t.isInstanceOf[LocalRelation] => failAnalysis(s"Inserting into an RDD-based table is not allowed.") case _ => // OK } } }
maropu/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
Scala
apache-2.0
22,782
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package models.eab import play.api.libs.json._ import utils.AmlsSpec class EabSpec extends AmlsSpec { val completeEstateAgencyActPenalty = Json.obj( "penalisedEstateAgentsAct" -> true, "penalisedEstateAgentsActDetail" -> "details" ) val incompleteEstateAgencyActPenalty = Json.obj( "penalisedEstateAgentsAct" -> true, "penalisedEstateAgentsActDetail" -> "" ) val completePenalisedProfessionalBody = Json.obj( "penalisedProfessionalBody" -> true, "penalisedProfessionalBodyDetail" -> "details" ) val incompletePenalisedProfessionalBody = Json.obj( "penalisedProfessionalBody" -> true, "penalisedProfessionalBodyDetail" -> "" ) val completeRedressScheme = Json.obj( "redressScheme" -> "propertyRedressScheme", "redressSchemeDetail" -> "null" ) val incompleteRedressScheme = Json.obj( "redressScheme" -> "", "redressSchemeDetail" -> "null" ) val completeMoneyProtectionScheme = Json.obj( "clientMoneyProtectionScheme" -> true ) val completeServiceList = Seq( "assetManagement", "auctioneering", "businessTransfer", "commercial", "developmentCompany", "landManagement", "lettings", "relocation", "residential", "socialHousingProvision") val completeServices = Json.obj("eabServicesProvided" -> completeServiceList ) val completeServicesWithoutResidential = Json.obj( "eabServicesProvided" -> completeServiceList.filter(s => !s.equals("residential")) ) val completeServicesWithoutLetting = Json.obj( "eabServicesProvided" -> completeServiceList.filter(s => !s.equals("lettings")) ) val completeDateOfChange = Json.obj( "dateOfChange" -> "2019-01-01" ) "A constructed Eab model" when { "data are complete" must { val completeData = completeServices ++ completeDateOfChange ++ completeEstateAgencyActPenalty ++ completePenalisedProfessionalBody ++ completeRedressScheme ++ completeMoneyProtectionScheme val constructedEab = Eab(completeData, hasAccepted = true) val completeEab = Json.obj( "data" -> completeData, "hasChanged" -> false, "hasAccepted" -> true ) "serialise correctly" in { val serialisedEab = Json.toJson(constructedEab) serialisedEab mustBe completeEab } "deserialise correctly" in { val deserialisedEab = completeEab.as[Eab] deserialisedEab mustBe constructedEab } checkIsComplete(constructedEab) } "data are incomplete" must { val incompleteData = completeServices ++ completeEstateAgencyActPenalty val constructedEab = Eab(incompleteData) val incompleteEab = Json.obj( "data" -> incompleteData, "hasChanged" -> false, "hasAccepted" -> false ) "serialise correctly" in { val serialisedEab = Json.toJson(constructedEab) serialisedEab mustBe incompleteEab } "deserialise correctly" in { val deserialisedEab = incompleteEab.as[Eab] deserialisedEab mustBe constructedEab } checkIsComplete(constructedEab, isProfessionalBodyPenaltyComplete = false, isRedressSchemeComplete = false, isProtectionSchemeComplete = false, isComplete = false) } } "Pre-existing data" when { val oldServices = Json.obj("services" -> Json.arr("08", "03", "07", "02", "05", "01", "06", "09", "04" )) val oldDateOfChange = Json.obj("dateOfChange" -> "2002-02-02") val oldProfessionalBody = Json.obj( "penalised" -> true, "professionalBody" -> "test10") val oldEstateAct = Json.obj( "penalisedUnderEstateAgentsAct" -> true, "penalisedUnderEstateAgentsActDetails" -> "test10") val oldRedressScheme = Json.obj( "isRedress" -> false, "propertyRedressScheme" -> "03") val oldRedressSchemeNoRedress = Json.obj( "isRedress" -> false) val oldClientProtection = Json.obj( "clientMoneyProtection" -> false) "data are complete" must { val completeOldEab = ( oldServices ++ oldDateOfChange ++ oldRedressScheme ++ oldProfessionalBody ++ oldEstateAct ++ oldClientProtection ++ Json.obj( "hasChanged" -> true, "hasAccepted" -> true)).as[Eab] checkIsComplete(completeOldEab) } "data are complete no redress" must { val completeOldEab = ( oldServices ++ oldDateOfChange ++ oldRedressSchemeNoRedress ++ oldProfessionalBody ++ oldEstateAct ++ Json.obj( "hasChanged" -> true, "hasAccepted" -> true)).as[Eab] checkIsComplete(completeOldEab) } "data are incomplete" must { val incompleteOldEab = ( oldServices ++ oldRedressScheme ++ Json.obj( "hasChanged" -> false, "hasAccepted" -> false)).as[Eab] checkIsComplete(incompleteOldEab, isEstateAgentActPenaltyComplete = false, isProfessionalBodyPenaltyComplete = false, isComplete = false) } } "An Eab model" when { "EstateAgentActPenalty is true but EstateAgentActPenaltyDetails is empty" must { val eab = Json.obj( "data" -> (completeServices ++ incompleteEstateAgencyActPenalty ++ completePenalisedProfessionalBody ++ completeRedressScheme ++ completeMoneyProtectionScheme), "hasChanged" -> false, "hasAccepted" -> true ).as[Eab] "return false for isEstateAgentActPenaltyComplete" in { eab.isEstateAgentActPenaltyComplete mustBe false } "return false for isComplete" in { eab.isComplete mustBe false } } "PenalisedProfessionalBody is true but PenalisedProfessionalBodyDetails is empty" must { val eab = Json.obj( "data" -> (completeServices ++ completeEstateAgencyActPenalty ++ incompletePenalisedProfessionalBody ++ completeRedressScheme ++ completeMoneyProtectionScheme), "hasChanged" -> false, "hasAccepted" -> true ).as[Eab] "return false for isProfessionalBodyPenaltyComplete" in { eab.isProfessionalBodyPenaltyComplete mustBe false } "return false for isComplete" in { eab.isComplete mustBe false } } "protection scheme is empty" when { "services contain 'lettings'" must { val eab = Json.obj( "data" -> (completeServices ++ completeEstateAgencyActPenalty ++ completePenalisedProfessionalBody ++ completeRedressScheme), "hasChanged" -> false, "hasAccepted" -> true ).as[Eab] "return false for isProtectionSchemeComplete" in { eab.isProtectionSchemeComplete mustBe false } "return false for isComplete" in { eab.isComplete mustBe false } } "services do not contain 'lettings'" must { val completeEab = Json.obj( "data" -> (completeServicesWithoutLetting ++ completeEstateAgencyActPenalty ++ completePenalisedProfessionalBody ++ completeRedressScheme ++ completeMoneyProtectionScheme), "hasChanged" -> false, "hasAccepted" -> true ) val eab = completeEab.as[Eab] "return true for isProtectionSchemeComplete" in { eab.isProtectionSchemeComplete mustBe true } "return true for isComplete" in { eab.isComplete mustBe true } } } "redress scheme is empty" when { "services contain 'residential'" must { val incompleteData = completeServices ++ completeEstateAgencyActPenalty ++ completePenalisedProfessionalBody ++ incompleteRedressScheme ++ completeMoneyProtectionScheme val incompleteEab = Json.obj( "data" -> incompleteData, "hasChanged" -> false, "hasAccepted" -> true ) val eab = incompleteEab.as[Eab] "return false for isRedressSchemeComplete" in { eab.isRedressSchemeComplete mustBe false } "return false for isComplete" in { eab.isComplete mustBe false } } "services do not contain 'residential'" must { val incompleteData = completeServicesWithoutResidential ++ completeEstateAgencyActPenalty ++ completePenalisedProfessionalBody ++ incompleteRedressScheme ++ completeMoneyProtectionScheme val incompleteEab = Json.obj( "data" -> incompleteData, "hasChanged" -> false, "hasAccepted" -> true ) val eab = incompleteEab.as[Eab] "return true for isRedressSchemeComplete" in { eab.isRedressSchemeComplete mustBe true } "return true for isComplete" in { eab.isComplete mustBe true } } } } def checkIsComplete(eab: Eab, isServicesComplete: Boolean = true, isRedressSchemeComplete: Boolean = true, isProtectionSchemeComplete: Boolean = true, isEstateAgentActPenaltyComplete: Boolean = true, isProfessionalBodyPenaltyComplete: Boolean = true, isComplete: Boolean = true) = { s"return $isServicesComplete for isServicesComplete" in { eab.isServicesComplete mustBe isServicesComplete } s"return $isRedressSchemeComplete for isRedressSchemeComplete" in { eab.isRedressSchemeComplete mustBe isRedressSchemeComplete } s"return $isProtectionSchemeComplete for isProtectionSchemeComplete" in { eab.isProtectionSchemeComplete mustBe isProtectionSchemeComplete } s"return $isEstateAgentActPenaltyComplete for isEstateAgentActPenaltyComplete" in { eab.isEstateAgentActPenaltyComplete mustBe isEstateAgentActPenaltyComplete } s"return $isProfessionalBodyPenaltyComplete for isProfessionalBodyPenaltyComplete" in { eab.isProfessionalBodyPenaltyComplete mustBe isProfessionalBodyPenaltyComplete } s"return $isComplete for isComplete" in { eab.accept.isComplete mustBe isComplete } } }
hmrc/amls-frontend
test/models/eab/EabSpec.scala
Scala
apache-2.0
11,017
package scalacookbook.chapter03 import util.control.Breaks._ /** * Created by liguodong on 2016/6/28. */ object ImplBreakAndContinue extends App{ println("\\n=== BREAK EXAMPLE ===") breakable { for (i <- 1 to 10) { println(i) if (i > 4) break // break out of the for loop } } println("\\n=== CONTINUE EXAMPLE ===") val searchMe = "peter piper picked a peck of pickled peppers" var numPs = 0 for (i <- 0 until searchMe.length) { breakable { if (searchMe.charAt(i) != 'p') { break // break out of the 'breakable', continue the outside loop } else { numPs += 1 } } } println("Found " + numPs + " p's in the string.") //use the count method with a simple anonymous function val count = searchMe.count(_ == 'p') println("count : " + count) println("------------") import scala.util.control._ val Inner = new Breaks val Outer = new Breaks Outer.breakable { for (i <- 1 to 5) { Inner.breakable { for (j <- 'a' to 'e') { if (i == 1 && j == 'c') Inner.break else println(s"i: $i, j: $j") //相当于continue if (i == 2 && j == 'b') Outer.break //相当于break } } } } //Use the same approach if you prefer labeled breaks. import scala.util.control._ val Exit = new Breaks Exit.breakable { for (j <- 'a' to 'e') { if (j == 'c') Exit.break else println(s"j: $j") } } println("~~~~~~~~~~~~~~") //其他方式跳出循环 //If you don’t like using break and continue, //there are several other ways to attack these problems. val monkeyCollection = 1.to(10) var barrelIsFull = false for (monkey <- monkeyCollection if !barrelIsFull) { println(monkey) barrelIsFull = checkIfEqualFive(monkey.toInt) } def checkIfEqualFive(n:Int):Boolean = { if(n==5) true else false } //另外一个方式就是当满足条件,通过return结束。 //Another approach is to place your algorithm inside a function, and then return from //the function when the desired condition is reached. // calculate a sum of numbers, but limit it to a 'max' value def sumToMax(arr: Array[Int], limit: Int): Int = { var sum = 0 for (i <- arr) { sum += i if (sum > limit) return limit } sum } val a = Array.range(0,10) println(sumToMax(a, 10)) println("---------递归--------------") //使用递归算法 //A common approach in functional programming is to use recursive algorithms. def factorial(n: Int): Int = { if (n == 1) 1 else n + factorial(n - 1) } import com.github.nscala_time.time.Imports._ //println(factorial(1000)) val start1 = DateTime.now() println(factorial(1000)) val end1 = DateTime.now() //获取毫秒 println(start1.getMillis+" "+end1.getMillis) println("未使用尾递归:"+(end1.getMillis-start1.getMillis)) //一个最佳的方案,使用尾递归。尤其是起始值N非常大时。 //尾递归和一般的递归不同在对内存的占用, //普通递归创建stack累积而后计算收缩,尾递归只会占用恒量的内存(和迭代一样)。 //尾递归是把变化的参数传递给递归函数的变量了。 //A more optimal solution takes advantage of tail recursion import scala.annotation.tailrec def factorial2(n: Int): Int = { @tailrec def factorialAcc(acc: Int, n: Int): Int = { if (n <= 1) acc else factorialAcc(n + acc, n - 1) } factorialAcc(1, n) } //使用@tailrec注解,确保使用的算法是尾递归,如果使用的算法不是尾递归,就会报错。 import com.github.nscala_time.time.Imports._ val start2 = DateTime.now() println(factorial2(1000)) val end2 = DateTime.now() println("使用尾递归:"+(end2.getMillis-start2.getMillis)) }
liguodongIOT/java-scala-mix-sbt
src/main/scala/scalacookbook/chapter03/ImplBreakAndContinue.scala
Scala
apache-2.0
3,858
/* * Copyright 2015 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.ct600.v2 import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalString, Input} case class B150(value: Option[String]) extends CtBoxIdentifier("Branch sort code") with CtOptionalString with Input
keithhall/ct-calculations
src/main/scala/uk/gov/hmrc/ct/ct600/v2/B150.scala
Scala
apache-2.0
824